From 459130a26f7d214ffc8b00a207f71bd74795b5bb Mon Sep 17 00:00:00 2001 From: Sean Hammond Date: Tue, 28 Jan 2025 14:12:12 +0000 Subject: [PATCH 1/2] Enable many more Ruff linter rules --- .../ruff/lint/per_file_ignores/tests/tail | 6 + bin/run_data_task.py | 10 +- h/_version.py | 19 ++- h/accounts/schemas.py | 10 +- h/accounts/util.py | 8 +- h/activity/bucketing.py | 4 +- h/activity/query.py | 2 +- h/app.py | 2 +- h/cli/__init__.py | 4 +- h/cli/commands/authclient.py | 2 +- h/cli/commands/create_annotations.py | 14 +- h/cli/commands/devdata.py | 19 ++- h/cli/commands/move_uri.py | 2 +- h/cli/commands/normalize_uris.py | 2 +- h/cli/commands/search.py | 2 +- h/cli/commands/user.py | 6 +- h/config.py | 2 +- h/db/__init__.py | 2 +- h/db/types.py | 20 +-- h/emails/test.py | 2 +- h/feeds/atom.py | 2 +- h/feeds/render.py | 4 +- h/feeds/rss.py | 2 +- h/form.py | 2 +- h/jinja_extensions/svg_icon.py | 8 +- h/links.py | 3 +- ...71ec81d18c_delete_empty_document_titles.py | 8 +- ...fill_in_missing_password_updated_fields.py | 2 +- ...8660_fix_document_uri_unique_constraint.py | 6 +- ..._in_missing_denormalized_document_title.py | 4 +- ...5d1abac3c1a1_revert_annotation_metadata.py | 1 - ...e535a075f16_remove_null_document_titles.py | 8 +- ...move_all_groups_to_default_organization.py | 14 +- .../63e8b1fe1d4b_clean_up_document_uris.py | 4 +- ...8_strip_whitespace_from_document_titles.py | 8 +- ...610d_delete_empty_array_document_titles.py | 4 +- .../6df1c8c3e423_revert_annotation_user_id.py | 1 - .../77bc5b4f2205_revert_annotation_pk.py | 1 - ...ct_timestamps_of_elife_test_annotations.py | 4 +- ...8fcdcefd8c6f_delete_orphaned_api_tokens.py | 2 +- ...d23d_fill_in_missing_annotation_deleted.py | 2 +- .../9e47da806421_remove_old_tokens.py | 2 +- ...f70f588_removing_trailing_from_pdf_urns.py | 2 +- ...5e274b202c_update_all_document_web_uris.py | 8 +- ...n_missing_denormalized_document_web_uri.py | 4 +- ...2c50b1133_clean_up_moderation_extra_key.py | 2 +- ..._authclient_grant_response_type_columns.py | 4 +- ..._fill_in_missing_annotation_document_id.py | 6 +- ...e5_update_imported_elife_ann_timestamps.py | 4 +- ...fill_in_annotation_text_rendered_column.py | 4 +- .../f32200e2e496_backfill_user_pubid.py | 2 +- h/models/auth_client.py | 4 +- h/models/document/_document.py | 18 +- h/models/document/_meta.py | 14 +- h/models/document/_uri.py | 15 +- h/models/feature.py | 2 +- h/models/group.py | 18 +- h/models/group_scope.py | 2 +- h/models/organization.py | 4 +- h/models/token.py | 8 +- h/models/user.py | 16 +- h/models/user_identity.py | 4 +- h/notification/reply.py | 6 +- h/paginator.py | 2 +- h/presenters/__init__.py | 4 +- h/presenters/annotation_jsonld.py | 4 +- h/presenters/annotation_searchindex.py | 2 +- h/presenters/document_html.py | 2 +- h/presenters/organization_json.py | 2 +- h/realtime.py | 6 +- h/renderers.py | 2 +- h/routes.py | 2 +- h/schemas/analytics.py | 2 +- h/schemas/annotation.py | 8 +- h/schemas/api/group.py | 12 +- h/schemas/api/group_membership.py | 2 +- h/schemas/api/user.py | 4 +- h/schemas/base.py | 4 +- h/schemas/forms/accounts/edit_profile.py | 4 +- h/schemas/forms/accounts/login.py | 2 +- h/schemas/forms/admin/group.py | 6 +- h/schemas/forms/admin/organization.py | 6 +- h/scripts/init_elasticsearch.py | 2 +- h/search/__init__.py | 10 +- h/search/config.py | 12 +- h/search/core.py | 10 +- h/search/index.py | 2 +- h/search/parser.py | 4 +- h/search/query.py | 16 +- h/search/util.py | 2 +- h/security/__init__.py | 6 +- h/security/encryption.py | 6 +- h/security/identity.py | 10 +- h/security/permits.py | 6 +- h/security/policy/_auth_client.py | 2 +- h/security/policy/_cookie.py | 4 +- h/security/policy/helpers.py | 2 +- h/security/predicates.py | 6 +- h/services/analytics.py | 2 +- h/services/annotation_delete.py | 4 +- h/services/annotation_read.py | 9 +- h/services/annotation_stats.py | 2 +- h/services/annotation_write.py | 10 +- h/services/auth_ticket.py | 8 +- h/services/auth_token.py | 5 +- h/services/bulk_api/_helpers.py | 4 +- h/services/bulk_api/annotation.py | 3 +- h/services/bulk_api/group.py | 5 +- h/services/bulk_executor/_actions.py | 32 ++-- h/services/bulk_executor/_executor.py | 14 +- h/services/feature.py | 4 +- h/services/group_create.py | 4 +- h/services/group_delete.py | 2 +- h/services/group_members.py | 4 +- h/services/group_update.py | 4 +- h/services/job_queue.py | 23 ++- h/services/job_queue_metrics.py | 2 +- h/services/links.py | 2 +- h/services/oauth/__init__.py | 4 +- h/services/oauth/_bearer_token.py | 2 +- h/services/oauth/_jwt_grant.py | 10 +- h/services/oauth/_jwt_grant_token.py | 28 ++-- h/services/oauth/_validator.py | 55 ++++--- h/services/oauth/service.py | 6 +- h/services/search_index.py | 2 +- h/services/subscription.py | 6 +- h/services/user.py | 4 +- h/services/user_delete.py | 8 +- h/services/user_password.py | 4 +- h/services/user_rename.py | 4 +- h/services/user_signup.py | 8 +- h/services/user_unique.py | 2 +- h/services/user_update.py | 6 +- h/settings.py | 10 +- h/storage.py | 2 +- h/streamer/__init__.py | 2 +- h/streamer/filter.py | 4 +- h/streamer/messages.py | 8 +- h/streamer/websocket.py | 22 +-- h/streamer/worker.py | 2 +- h/tasks/celery.py | 2 +- h/tasks/cleanup.py | 6 +- h/tasks/indexer.py | 2 +- h/tasks/job_queue.py | 10 +- h/traversal/__init__.py | 12 +- h/traversal/annotation.py | 2 +- h/traversal/group.py | 5 +- h/traversal/group_membership.py | 8 +- h/traversal/organization.py | 2 +- h/traversal/user.py | 2 +- h/tweens.py | 17 +- h/util/datetime.py | 4 +- h/util/db.py | 2 +- h/util/document_claims.py | 17 +- h/util/group.py | 2 +- h/util/group_scope.py | 2 +- h/util/logging_filters.py | 4 +- h/util/markdown_render.py | 14 +- h/util/redirects.py | 6 +- h/util/uri.py | 16 +- h/util/view.py | 2 +- h/viewderivers.py | 3 +- h/views/account_signup.py | 6 +- h/views/accounts.py | 16 +- h/views/activity.py | 6 +- h/views/admin/admins.py | 2 +- h/views/admin/features.py | 6 +- h/views/admin/groups.py | 10 +- h/views/admin/nipsa.py | 4 +- h/views/admin/oauthclients.py | 2 +- h/views/admin/organizations.py | 6 +- h/views/admin/search.py | 4 +- h/views/admin/staff.py | 2 +- h/views/admin/users.py | 4 +- h/views/api/analytics.py | 2 +- h/views/api/annotations.py | 2 +- h/views/api/auth.py | 16 +- h/views/api/bulk/_ndjson.py | 4 +- h/views/api/bulk/annotation.py | 2 +- h/views/api/config.py | 6 +- h/views/api/decorators/__init__.py | 2 +- h/views/api/decorators/client_errors.py | 6 +- h/views/api/group_members.py | 4 +- h/views/api/groups.py | 4 +- h/views/api/helpers/cors.py | 8 +- h/views/api/helpers/json_payload.py | 2 +- h/views/api/helpers/media_types.py | 4 +- h/views/api/profile.py | 4 +- h/views/api/users.py | 4 +- h/views/badge.py | 4 +- h/views/home.py | 2 +- h/views/main.py | 2 +- h/views/notification.py | 2 +- h/views/organizations.py | 2 +- h/views/status.py | 8 +- pyproject.toml | 55 ++----- tests/common/factories/annotation.py | 17 +- tests/common/factories/auth_ticket.py | 2 +- tests/common/factories/authz_code.py | 2 +- tests/common/factories/base.py | 4 +- tests/common/factories/group.py | 4 +- tests/common/factories/job.py | 2 +- tests/common/factories/token.py | 6 +- tests/common/fixtures/elasticsearch.py | 4 +- tests/common/fixtures/services.py | 14 +- tests/conftest.py | 2 +- tests/functional/accounts_test.py | 34 ++-- tests/functional/api/annotations_test.py | 4 +- tests/functional/api/api_test.py | 4 +- tests/functional/api/bulk/action_test.py | 6 +- tests/functional/api/bulk/annotation_test.py | 2 +- tests/functional/api/conftest.py | 2 +- tests/functional/api/errors_test.py | 4 +- tests/functional/api/groups/__init__.py | 0 tests/functional/api/groups/create_test.py | 10 +- tests/functional/api/groups/members_test.py | 71 ++++---- tests/functional/api/groups/read_test.py | 30 ++-- tests/functional/api/groups/update_test.py | 30 ++-- tests/functional/api/profile_test.py | 2 +- tests/functional/api/users_test.py | 2 +- tests/functional/api/versions_test.py | 12 +- tests/functional/bin/run_data_task_test.py | 10 +- tests/functional/client_login_test.py | 4 +- tests/functional/fixtures/authentication.py | 12 +- tests/functional/fixtures/groups.py | 2 +- .../h/views/admin/permissions_test.py | 7 +- tests/functional/oauth_test.py | 8 +- tests/unit/h/accounts/schemas_test.py | 4 +- tests/unit/h/accounts/util_test.py | 8 +- tests/unit/h/activity/bucketing_test.py | 8 +- tests/unit/h/activity/query_test.py | 7 +- tests/unit/h/app_test.py | 6 +- tests/unit/h/celery_test.py | 4 +- tests/unit/h/cli/commands/authclient_test.py | 2 +- tests/unit/h/cli/commands/user_test.py | 4 +- tests/unit/h/conftest.py | 4 +- tests/unit/h/emails/flag_notification_test.py | 4 +- .../unit/h/emails/reply_notification_test.py | 32 ++-- tests/unit/h/emails/reset_password_test.py | 4 +- tests/unit/h/emails/signup_test.py | 4 +- tests/unit/h/emails/test_test.py | 4 +- tests/unit/h/eventqueue_test.py | 2 +- tests/unit/h/feeds/atom_test.py | 25 ++- tests/unit/h/feeds/rss_test.py | 14 +- tests/unit/h/feeds/util_test.py | 2 +- .../unit/h/jinja2_extensions/__init___test.py | 2 +- tests/unit/h/jinja2_extensions/filter_test.py | 6 +- .../unit/h/models/document/_document_test.py | 8 +- tests/unit/h/models/document/_meta_test.py | 12 +- tests/unit/h/models/document/_uri_test.py | 16 +- tests/unit/h/models/feature_test.py | 2 +- tests/unit/h/models/group_test.py | 8 +- tests/unit/h/models/organization_test.py | 4 +- tests/unit/h/models/token_test.py | 10 +- tests/unit/h/models/user_test.py | 14 +- tests/unit/h/paginator_test.py | 4 +- .../unit/h/presenters/annotation_html_test.py | 2 +- .../h/presenters/annotation_jsonld_test.py | 4 +- tests/unit/h/presenters/document_json_test.py | 2 +- .../presenters/group_membership_json_test.py | 4 +- tests/unit/h/schemas/annotation_test.py | 2 +- tests/unit/h/schemas/base_test.py | 4 +- .../forms/accounts/reset_password_test.py | 6 +- .../unit/h/schemas/forms/admin/group_test.py | 2 +- tests/unit/h/search/config_test.py | 4 +- tests/unit/h/search/conftest.py | 2 +- tests/unit/h/search/core_test.py | 10 +- tests/unit/h/search/index_test.py | 2 +- tests/unit/h/search/parser_test.py | 4 +- tests/unit/h/search/query_test.py | 10 +- tests/unit/h/security/permits_test.py | 2 +- tests/unit/h/security/policy/_api_test.py | 2 +- .../h/security/policy/_auth_client_test.py | 4 +- tests/unit/h/security/policy/_cookie_test.py | 8 +- .../unit/h/security/policy/top_level_test.py | 10 +- .../unit/h/services/annotation_delete_test.py | 4 +- tests/unit/h/services/annotation_json_test.py | 4 +- tests/unit/h/services/annotation_read_test.py | 2 +- tests/unit/h/services/annotation_sync_test.py | 10 +- .../unit/h/services/annotation_write_test.py | 8 +- tests/unit/h/services/auth_ticket_test.py | 10 +- tests/unit/h/services/auth_token_test.py | 6 +- tests/unit/h/services/bulk_api/__init__.py | 0 tests/unit/h/services/bulk_api/group_test.py | 2 +- .../h/services/bulk_api/lms_stats_test.py | 2 +- .../services/bulk_executor/_executor_test.py | 2 +- tests/unit/h/services/group_create_test.py | 8 +- tests/unit/h/services/group_members_test.py | 8 +- tests/unit/h/services/group_test.py | 8 +- .../unit/h/services/job_queue_metrics_test.py | 2 +- tests/unit/h/services/job_queue_test.py | 10 +- tests/unit/h/services/links_test.py | 19 ++- .../unit/h/services/oauth/_jwt_grant_test.py | 6 +- .../h/services/oauth/_jwt_grant_token_test.py | 6 +- .../unit/h/services/oauth/_validator_test.py | 34 ++-- tests/unit/h/services/oauth/service_test.py | 2 +- tests/unit/h/services/user_delete_test.py | 2 +- tests/unit/h/services/user_password_test.py | 12 +- tests/unit/h/services/user_rename_test.py | 2 +- tests/unit/h/services/user_signup_test.py | 8 +- tests/unit/h/services/user_test.py | 2 +- tests/unit/h/settings_test.py | 4 +- tests/unit/h/streamer/db_test.py | 2 +- tests/unit/h/streamer/filter_test.py | 20 +-- tests/unit/h/streamer/messages_speed_test.py | 6 +- tests/unit/h/streamer/messages_test.py | 6 +- tests/unit/h/streamer/tweens_test.py | 4 +- tests/unit/h/streamer/websocket_test.py | 2 +- tests/unit/h/subscribers_test.py | 2 +- tests/unit/h/tasks/cleanup_test.py | 40 ++--- tests/unit/h/tweens_test.py | 10 +- tests/unit/h/util/datetime_test.py | 4 +- tests/unit/h/util/db_test.py | 8 +- tests/unit/h/util/document_claims_test.py | 6 +- tests/unit/h/util/metrics_test.py | 6 +- tests/unit/h/util/test_logging_filters.py | 8 +- tests/unit/h/util/uri_test.py | 2 +- tests/unit/h/views/account_signup_test.py | 2 +- tests/unit/h/views/accounts_test.py | 6 +- tests/unit/h/views/activity_test.py | 155 +++++++++++++----- tests/unit/h/views/admin/badge_test.py | 2 +- tests/unit/h/views/admin/groups_test.py | 16 +- tests/unit/h/views/admin/nipsa_test.py | 2 +- tests/unit/h/views/admin/oauthclients_test.py | 12 +- .../unit/h/views/admin/organizations_test.py | 4 +- tests/unit/h/views/admin/search_test.py | 6 +- tests/unit/h/views/admin/users_test.py | 2 +- tests/unit/h/views/api/auth_test.py | 14 +- tests/unit/h/views/api/bulk/_ndjson_test.py | 6 +- tests/unit/h/views/api/bulk/stats_test.py | 2 +- .../unit/h/views/api/helpers/angular_test.py | 2 +- tests/unit/h/views/api/helpers/cors_test.py | 2 +- tests/unit/h/views/api/helpers/links_test.py | 38 +---- tests/unit/h/views/badge_test.py | 12 +- tests/unit/h/views/main_test.py | 2 +- 335 files changed, 1256 insertions(+), 1207 deletions(-) create mode 100644 .cookiecutter/includes/ruff/lint/per_file_ignores/tests/tail create mode 100644 tests/functional/api/groups/__init__.py create mode 100644 tests/unit/h/services/bulk_api/__init__.py diff --git a/.cookiecutter/includes/ruff/lint/per_file_ignores/tests/tail b/.cookiecutter/includes/ruff/lint/per_file_ignores/tests/tail new file mode 100644 index 00000000000..fb041f64f46 --- /dev/null +++ b/.cookiecutter/includes/ruff/lint/per_file_ignores/tests/tail @@ -0,0 +1,6 @@ +"PT006", # Enforces a consistent style for the type of the `argnames` parameter to + # pytest.mark.parametrize. We have too many pre-existing violations of + # this. +"PT007", # Enforces a consistent style for the type of the `argvalues` parameter to + # pytest.mark.parametrize. We have too many pre-existing violations of + # this. diff --git a/bin/run_data_task.py b/bin/run_data_task.py index 391d1925ca6..8981d3289cf 100755 --- a/bin/run_data_task.py +++ b/bin/run_data_task.py @@ -4,7 +4,7 @@ This is a general mechanism for running tasks defined in SQL, however it's currently only used to perform the aggregations and mappings required for reporting. -""" +""" # noqa: EXE002 from argparse import ArgumentParser @@ -60,18 +60,18 @@ def main(): ) # Run the update in a transaction, so we roll back if it goes wrong - with request.db.bind.connect() as connection: + with request.db.bind.connect() as connection: # noqa: SIM117 with connection.begin(): for script in scripts: if args.no_python and isinstance(script, PythonScript): - print(f"Skipping: {script}") + print(f"Skipping: {script}") # noqa: T201 continue for step in script.execute(connection, dry_run=args.dry_run): if args.dry_run: - print("Dry run!") + print("Dry run!") # noqa: T201 - print(step.dump(indent=" ") + "\n") + print(step.dump(indent=" ") + "\n") # noqa: T201 if __name__ == "__main__": diff --git a/h/_version.py b/h/_version.py index c988b4291e4..921a7ad9266 100644 --- a/h/_version.py +++ b/h/_version.py @@ -15,23 +15,24 @@ def fetch_git_ref(): - return subprocess.check_output( - ["git", "rev-parse", "--short", "HEAD"], stderr=DEVNULL + return subprocess.check_output( # noqa: S603 + ["git", "rev-parse", "--short", "HEAD"], # noqa: S607 + stderr=DEVNULL, ).strip() def fetch_git_date(ref): - output = subprocess.check_output(["git", "show", "-s", "--format=%ct", ref]) - return datetime.datetime.fromtimestamp(int(output)) + output = subprocess.check_output(["git", "show", "-s", "--format=%ct", ref]) # noqa: S603, S607 + return datetime.datetime.fromtimestamp(int(output)) # noqa: DTZ006 def fetch_git_dirty(): # Ensure git index is up-to-date first. This usually isn't necessary, but # can be needed inside a docker container where the index is out of date. - subprocess.call(["git", "update-index", "-q", "--refresh"]) - dirty_tree = bool(subprocess.call(["git", "diff-files", "--quiet"])) + subprocess.call(["git", "update-index", "-q", "--refresh"]) # noqa: S603, S607 + dirty_tree = bool(subprocess.call(["git", "diff-files", "--quiet"])) # noqa: S603, S607 dirty_index = bool( - subprocess.call(["git", "diff-index", "--quiet", "--cached", "HEAD"]) + subprocess.call(["git", "diff-index", "--quiet", "--cached", "HEAD"]) # noqa: S603, S607 ) return dirty_tree or dirty_index @@ -45,11 +46,11 @@ def git_version(): def git_archive_version(): # pragma: no cover ref = VERSION_GIT_REF - date = datetime.datetime.fromtimestamp(int(VERSION_GIT_DATE)) + date = datetime.datetime.fromtimestamp(int(VERSION_GIT_DATE)) # noqa: DTZ006 return pep440_version(date, ref) -def pep440_version(date, ref, dirty=False): +def pep440_version(date, ref, dirty=False): # noqa: FBT002 """Build a PEP440-compliant version number from the passed information.""" return f"{date.strftime('%Y%m%d')}+g{ref}{'.dirty' if dirty else ''}" diff --git a/h/accounts/schemas.py b/h/accounts/schemas.py index b5357071640..b9ce92481b6 100644 --- a/h/accounts/schemas.py +++ b/h/accounts/schemas.py @@ -1,7 +1,7 @@ import codecs import logging from datetime import datetime, timedelta -from functools import lru_cache +from functools import cache import colander import deform @@ -24,7 +24,7 @@ log = logging.getLogger(__name__) -@lru_cache(maxsize=None) +@cache def get_blacklist(): # Try to load the blacklist file from disk. If, for whatever reason, we # can't load the file, then don't crash out, just log a warning about @@ -32,10 +32,10 @@ def get_blacklist(): try: with codecs.open("h/accounts/blacklist", encoding="utf-8") as handle: blacklist = handle.readlines() - except (IOError, ValueError): # pragma: no cover + except (IOError, ValueError): # pragma: no cover # noqa: UP024 log.exception("unable to load blacklist") blacklist = [] - return set(line.strip().lower() for line in blacklist) + return set(line.strip().lower() for line in blacklist) # noqa: C401 def unique_email(node, value): @@ -68,7 +68,7 @@ def unique_username(node, value): ) # 31 days is an arbitrary time delta that should be more than enough # time for all the previous user's data to be expunged. - .where(models.UserDeletion.requested_at > datetime.now() - timedelta(days=31)) + .where(models.UserDeletion.requested_at > datetime.now() - timedelta(days=31)) # noqa: DTZ005 ).first(): raise exc diff --git a/h/accounts/util.py b/h/accounts/util.py index 2c0643c3c31..113030f4979 100644 --- a/h/accounts/util.py +++ b/h/accounts/util.py @@ -23,10 +23,10 @@ def validate_url(url): parsed_url = urlparse("http://" + url) if not re.match("https?", parsed_url.scheme): - raise ValueError('Links must have an "http" or "https" prefix') + raise ValueError('Links must have an "http" or "https" prefix') # noqa: EM101, TRY003 if not parsed_url.netloc: - raise ValueError("Links must include a domain name") + raise ValueError("Links must include a domain name") # noqa: EM101, TRY003 return parsed_url.geturl() @@ -44,10 +44,10 @@ def validate_orcid(orcid): orcid_regex = r"\A[0-9]{4}-[0-9]{4}-[0-9]{4}-[0-9]{3}[0-9X]\Z" if not re.match(orcid_regex, orcid): - raise ValueError(f"The format of this ORCID is incorrect: {orcid}") + raise ValueError(f"The format of this ORCID is incorrect: {orcid}") # noqa: EM102, TRY003 if _orcid_checksum_digit(orcid[:-1]) != orcid[-1:]: - raise ValueError(f"{orcid} is not a valid ORCID") + raise ValueError(f"{orcid} is not a valid ORCID") # noqa: EM102, TRY003 return True diff --git a/h/activity/bucketing.py b/h/activity/bucketing.py index 1b8313ea1f8..832e10a13c5 100644 --- a/h/activity/bucketing.py +++ b/h/activity/bucketing.py @@ -146,7 +146,7 @@ def next(self, annotation): if timeframe.within_cutoff(annotation): return timeframe - cutoff_time = datetime.datetime( + cutoff_time = datetime.datetime( # noqa: DTZ001 year=annotation.updated.year, month=annotation.updated.month, day=1 ) timeframe = Timeframe(annotation.updated.strftime("%b %Y"), cutoff_time) @@ -179,4 +179,4 @@ def bucket(annotations): def utcnow(): # pragma: no cover - return datetime.datetime.utcnow() + return datetime.datetime.utcnow() # noqa: DTZ003 diff --git a/h/activity/query.py b/h/activity/query.py index ab98f574702..3945143b413 100644 --- a/h/activity/query.py +++ b/h/activity/query.py @@ -17,7 +17,7 @@ from h.services.annotation_read import AnnotationReadService -class ActivityResults( +class ActivityResults( # noqa: SLOT002 namedtuple("ActivityResults", ["total", "aggregations", "timeframes"]) # noqa: PYI024 ): pass diff --git a/h/app.py b/h/app.py index 244a24f89b7..c9db4b8c2c0 100644 --- a/h/app.py +++ b/h/app.py @@ -63,7 +63,7 @@ def includeme(config): # pragma: no cover config.add_settings( { - "tm.manager_hook": lambda request: transaction.TransactionManager(), + "tm.manager_hook": lambda request: transaction.TransactionManager(), # noqa: ARG005 "tm.annotate_user": False, } ) diff --git a/h/cli/__init__.py b/h/cli/__init__.py index 4c1ed6a2f80..8791b8c22d9 100644 --- a/h/cli/__init__.py +++ b/h/cli/__init__.py @@ -22,7 +22,7 @@ ) -def bootstrap(app_url, dev=False): +def bootstrap(app_url, dev=False): # noqa: FBT002 """ Bootstrap the application from the given arguments. @@ -34,7 +34,7 @@ def bootstrap(app_url, dev=False): if dev: app_url = "http://localhost:5000" else: - raise click.ClickException("the app URL must be set in production mode!") + raise click.ClickException("the app URL must be set in production mode!") # noqa: EM101, TRY003 config = "conf/development.ini" if dev else "conf/production.ini" diff --git a/h/cli/commands/authclient.py b/h/cli/commands/authclient.py index 8d425436e07..f0b124414c8 100644 --- a/h/cli/commands/authclient.py +++ b/h/cli/commands/authclient.py @@ -36,7 +36,7 @@ def authclient(): help="An allowable grant type", ) @click.pass_context -def add(ctx, name, authority, type_, redirect_uri, grant_type): +def add(ctx, name, authority, type_, redirect_uri, grant_type): # noqa: PLR0913 """Create a new OAuth client.""" request = ctx.obj["bootstrap"]() diff --git a/h/cli/commands/create_annotations.py b/h/cli/commands/create_annotations.py index e55f820ec85..70211c46230 100644 --- a/h/cli/commands/create_annotations.py +++ b/h/cli/commands/create_annotations.py @@ -15,13 +15,13 @@ def create_annotations(ctx, number): tm = request.tm for _ in range(number): - created = updated = datetime.datetime( - year=random.randint(2015, 2020), - month=random.randint(1, 12), - day=random.randint(1, 27), - hour=random.randint(1, 12), - minute=random.randint(0, 59), - second=random.randint(0, 59), + created = updated = datetime.datetime( # noqa: DTZ001 + year=random.randint(2015, 2020), # noqa: S311 + month=random.randint(1, 12), # noqa: S311 + day=random.randint(1, 27), # noqa: S311 + hour=random.randint(1, 12), # noqa: S311 + minute=random.randint(0, 59), # noqa: S311 + second=random.randint(0, 59), # noqa: S311 ) db.add( factories.Annotation.build(created=created, updated=updated, shared=True) diff --git a/h/cli/commands/devdata.py b/h/cli/commands/devdata.py index ee6a2541159..771660ec38b 100644 --- a/h/cli/commands/devdata.py +++ b/h/cli/commands/devdata.py @@ -55,7 +55,7 @@ def create_all(self): elif type_ == "restricted_group": self.upsert_restricted_group(data_dict) else: - raise RuntimeError(f"Unrecognized type: {type_}") + raise RuntimeError(f"Unrecognized type: {type_}") # noqa: EM102, TRY003 self.tm.commit() @@ -113,7 +113,7 @@ def upsert_group(self, group_data, group_create_method): creator = models.User.get_by_username( self.db, group_data.pop("creator_username"), group_data["authority"] ) - assert creator + assert creator # noqa: S101 organization = ( self.db.query(models.Organization) @@ -145,23 +145,24 @@ def setattrs(object_, attrs): def devdata(ctx): with tempfile.TemporaryDirectory() as tmpdirname: # The directory that we'll clone the devdata git repo into. - git_dir = os.path.join(tmpdirname, "devdata") + git_dir = os.path.join(tmpdirname, "devdata") # noqa: PTH118 # Clone the private devdata repo from GitHub. # This will fail if Git->GitHub HTTPS authentication isn't set up or if # your GitHub account doesn't have access to the private repo. - subprocess.check_call( - ["git", "clone", "https://github.com/hypothesis/devdata.git", git_dir] + subprocess.check_call( # noqa: S603 + ["git", "clone", "https://github.com/hypothesis/devdata.git", git_dir] # noqa: S607 ) # Copy environment variables file into place. shutil.copyfile( - os.path.join(git_dir, "h", "devdata.env"), - os.path.join(pathlib.Path(h.__file__).parent.parent, ".devdata.env"), + os.path.join(git_dir, "h", "devdata.env"), # noqa: PTH118 + os.path.join(pathlib.Path(h.__file__).parent.parent, ".devdata.env"), # noqa: PTH118 ) - with open( - os.path.join(git_dir, "h", "devdata.json"), "r", encoding="utf8" + with open( # noqa: PTH123 + os.path.join(git_dir, "h", "devdata.json"), # noqa: PTH118 + encoding="utf8", ) as handle: DevDataFactory( ctx.obj["bootstrap"](), diff --git a/h/cli/commands/move_uri.py b/h/cli/commands/move_uri.py index 26d462845fc..8d7b4dd3d47 100644 --- a/h/cli/commands/move_uri.py +++ b/h/cli/commands/move_uri.py @@ -40,7 +40,7 @@ def move_uri(ctx, old, new): ) answer = click.prompt(prompt, default="n", show_default=False) if answer != "y": - print("Aborted") + print("Aborted") # noqa: T201 return for annotation in annotations: diff --git a/h/cli/commands/normalize_uris.py b/h/cli/commands/normalize_uris.py index 885b2ae5de4..2144d4f2d7d 100644 --- a/h/cli/commands/normalize_uris.py +++ b/h/cli/commands/normalize_uris.py @@ -8,7 +8,7 @@ from h.util import uri -class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024 +class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024, SLOT002 pass diff --git a/h/cli/commands/search.py b/h/cli/commands/search.py index d9d6fb3f6ed..ba4db3d98a6 100644 --- a/h/cli/commands/search.py +++ b/h/cli/commands/search.py @@ -23,4 +23,4 @@ def update_settings(ctx): try: config.update_index_settings(request.es) except RuntimeError as exc: - raise click.ClickException(str(exc)) + raise click.ClickException(str(exc)) # noqa: B904 diff --git a/h/cli/commands/user.py b/h/cli/commands/user.py index a562720621d..86fda601f06 100644 --- a/h/cli/commands/user.py +++ b/h/cli/commands/user.py @@ -39,7 +39,7 @@ def add(ctx, username, email, password, authority): message = ( f"could not create user due to integrity constraint.\n\n{upstream_error}" ) - raise click.ClickException(message) + raise click.ClickException(message) # noqa: B904 click.echo(f"{username} created", err=True) @@ -92,8 +92,8 @@ def password(ctx, username, authority, password): user = models.User.get_by_username(request.db, username, authority) if user is None: - raise click.ClickException( - f'no user with username "{username}" and authority "{authority}"' + raise click.ClickException( # noqa: TRY003 + f'no user with username "{username}" and authority "{authority}"' # noqa: EM102 ) password_service.update_password(user, password) diff --git a/h/config.py b/h/config.py index eb971061ff9..2d7d6b02183 100644 --- a/h/config.py +++ b/h/config.py @@ -23,7 +23,7 @@ ) -def configure(environ=None, settings=None): +def configure(environ=None, settings=None): # noqa: PLR0915 if environ is None: # pragma: no cover environ = os.environ if settings is None: # pragma: no cover diff --git a/h/db/__init__.py b/h/db/__init__.py index 75468e34e23..b8bcbd5354b 100644 --- a/h/db/__init__.py +++ b/h/db/__init__.py @@ -20,7 +20,7 @@ from sqlalchemy import text from sqlalchemy.orm import declarative_base, sessionmaker -__all__ = ("Base", "Session", "pre_create", "post_create", "create_engine") +__all__ = ("Base", "Session", "create_engine", "post_create", "pre_create") log = logging.getLogger(__name__) diff --git a/h/db/types.py b/h/db/types.py index 400adec5194..d7d35e7b82d 100644 --- a/h/db/types.py +++ b/h/db/types.py @@ -1,4 +1,4 @@ -"""Custom SQLAlchemy types for use with the Annotations API database.""" +"""Custom SQLAlchemy types for use with the Annotations API database.""" # noqa: A005 import base64 import binascii @@ -34,10 +34,10 @@ def __init__(self): # We handle the UUID conversion, explicitly not use as_uuid super().__init__(as_uuid=False) - def process_bind_param(self, value, dialect): + def process_bind_param(self, value, dialect): # noqa: ARG002 return self.url_safe_to_hex(value) - def process_result_value(self, value, dialect): + def process_result_value(self, value, dialect): # noqa: ARG002 return self.hex_to_url_safe(value) @classmethod @@ -52,7 +52,7 @@ def url_safe_to_hex(cls, value): return None if not isinstance(value, str): - raise InvalidUUID(f"`url_safe` is {type(value)}, expected str") + raise InvalidUUID(f"`url_safe` is {type(value)}, expected str") # noqa: EM102, TRY003 byte_str = value.encode() @@ -61,7 +61,7 @@ def url_safe_to_hex(cls, value): base64.urlsafe_b64decode(byte_str + b"==") ).decode() except binascii.Error as err: - raise InvalidUUID(f"{value!r} is not a valid encoded UUID") from err + raise InvalidUUID(f"{value!r} is not a valid encoded UUID") from err # noqa: EM102, TRY003 lengths = len(byte_str), len(hex_str) @@ -71,7 +71,7 @@ def url_safe_to_hex(cls, value): if lengths == (20, 30): # ElasticSearch flake ID return cls._add_magic_byte(hex_str) - raise InvalidUUID(f"{value!r} is not a valid encoded UUID") + raise InvalidUUID(f"{value!r} is not a valid encoded UUID") # noqa: EM102, TRY003 @classmethod def hex_to_url_safe(cls, value): @@ -112,7 +112,7 @@ def hex_to_url_safe(cls, value): # with any specified UUID so the resulting UUID can be distinguished from # those generated by, for example, PostgreSQL's uuid_generate_v1mc(), and # mapped back to a 20-char ElasticSearch flake ID. - _MAGIC_BYTE = ["e", "5"] + _MAGIC_BYTE = ["e", "5"] # noqa: RUF012 @classmethod def _has_magic_byte(cls, hex_str): @@ -143,10 +143,10 @@ class AnnotationSelectorJSONB(types.TypeDecorator): impl = postgresql.JSONB cache_ok = False - def process_bind_param(self, value, dialect): + def process_bind_param(self, value, dialect): # noqa: ARG002 return _transform_quote_selector(value, _escape_null_byte) - def process_result_value(self, value, dialect): + def process_result_value(self, value, dialect): # noqa: ARG002 return _transform_quote_selector(value, _unescape_null_byte) @@ -161,7 +161,7 @@ def _transform_quote_selector(selectors, transform_func): # pragma: no cover if not isinstance(selector, dict): continue - if not selector.get("type") == "TextQuoteSelector": + if selector.get("type") != "TextQuoteSelector": continue if "prefix" in selector: diff --git a/h/emails/test.py b/h/emails/test.py index aba4f67b011..fbe430af030 100644 --- a/h/emails/test.py +++ b/h/emails/test.py @@ -19,7 +19,7 @@ def generate(request, recipient): """ context = { - "time": datetime.datetime.utcnow().isoformat(), + "time": datetime.datetime.utcnow().isoformat(), # noqa: DTZ003 "hostname": platform.node(), "python_version": platform.python_version(), "version": __version__, diff --git a/h/feeds/atom.py b/h/feeds/atom.py index c2a9a3c1c14..8dd46624efb 100644 --- a/h/feeds/atom.py +++ b/h/feeds/atom.py @@ -54,7 +54,7 @@ def _feed_entry_from_annotation(annotation, annotation_url, annotation_api_url=N return entry -def feed_from_annotations( +def feed_from_annotations( # noqa: PLR0913 annotations, atom_url, annotation_url, diff --git a/h/feeds/render.py b/h/feeds/render.py index 8e3d5d96ed1..051d7c34c74 100644 --- a/h/feeds/render.py +++ b/h/feeds/render.py @@ -3,7 +3,7 @@ from h.feeds import atom, rss -def render_atom(request, annotations, atom_url, html_url, title, subtitle): +def render_atom(request, annotations, atom_url, html_url, title, subtitle): # noqa: PLR0913 """ Return a rendered Atom feed of the given annotations. @@ -51,7 +51,7 @@ def annotation_api_url(annotation): # pragma: no cover return response -def render_rss(request, annotations, rss_url, html_url, title, description): +def render_rss(request, annotations, rss_url, html_url, title, description): # noqa: PLR0913 """ Return a rendered RSS feed of the given annotations. diff --git a/h/feeds/rss.py b/h/feeds/rss.py index 81a93c43273..46387f7df4b 100644 --- a/h/feeds/rss.py +++ b/h/feeds/rss.py @@ -49,7 +49,7 @@ def _feed_item_from_annotation(annotation, annotation_url): } -def feed_from_annotations( +def feed_from_annotations( # noqa: PLR0913 annotations, annotation_url, rss_url, html_url, title, description ): """ diff --git a/h/form.py b/h/form.py index bd15aa9306c..e40736b38db 100644 --- a/h/form.py +++ b/h/form.py @@ -87,7 +87,7 @@ def configure_environment(config): # pragma: no cover config.registry[ENVIRONMENT_KEY] = create_environment(base) -def handle_form_submission(request, form, on_success, on_failure, flash_success=True): +def handle_form_submission(request, form, on_success, on_failure, flash_success=True): # noqa: FBT002 """ Handle the submission of the given form in a standard way. diff --git a/h/jinja_extensions/svg_icon.py b/h/jinja_extensions/svg_icon.py index eed78e2f3fe..179d5f9e5a2 100644 --- a/h/jinja_extensions/svg_icon.py +++ b/h/jinja_extensions/svg_icon.py @@ -1,4 +1,4 @@ -from xml.etree import ElementTree +from xml.etree import ElementTree # noqa: ICN001 from markupsafe import Markup @@ -30,12 +30,12 @@ def svg_icon(name, css_class=""): # See http://stackoverflow.com/questions/8983041 ElementTree.register_namespace("", SVG_NAMESPACE_URI) - with open(f"build/images/icons/{name}.svg", encoding="utf8") as handle: + with open(f"build/images/icons/{name}.svg", encoding="utf8") as handle: # noqa: PTH123 svg_data = handle.read() - root = ElementTree.fromstring(svg_data) + root = ElementTree.fromstring(svg_data) # noqa: S314 - if css_class: + if css_class: # noqa: SIM108 css_class = "svg-icon " + css_class else: css_class = "svg-icon" diff --git a/h/links.py b/h/links.py index 848b69f833d..ad55bf6464a 100644 --- a/h/links.py +++ b/h/links.py @@ -14,8 +14,7 @@ def pretty_link(url): if parsed.scheme not in ["http", "https"]: return url netloc = parsed.netloc - if netloc.startswith("www."): - netloc = netloc[4:] + netloc = netloc.removeprefix("www.") return unquote(netloc + parsed.path) diff --git a/h/migrations/versions/3d71ec81d18c_delete_empty_document_titles.py b/h/migrations/versions/3d71ec81d18c_delete_empty_document_titles.py index d59e005a52d..2e4eebb01f0 100644 --- a/h/migrations/versions/3d71ec81d18c_delete_empty_document_titles.py +++ b/h/migrations/versions/3d71ec81d18c_delete_empty_document_titles.py @@ -40,17 +40,13 @@ def upgrade(): for original_title in document_meta.value: if original_title == "": n += 1 - log.info( - "removing empty title from document_meta {id}".format( - id=document_meta.id - ) - ) + log.info(f"removing empty title from document_meta {document_meta.id}") # noqa: G004 else: new_titles.append(original_title) if len(new_titles) != len(document_meta.value): document_meta.value = new_titles session.commit() - log.info("deleted {n} empty-string document titles".format(n=n)) + log.info(f"deleted {n} empty-string document titles") # noqa: G004 def downgrade(): diff --git a/h/migrations/versions/42bd46b9b1ea_fill_in_missing_password_updated_fields.py b/h/migrations/versions/42bd46b9b1ea_fill_in_missing_password_updated_fields.py index d9e26cfed62..f0cc3968717 100644 --- a/h/migrations/versions/42bd46b9b1ea_fill_in_missing_password_updated_fields.py +++ b/h/migrations/versions/42bd46b9b1ea_fill_in_missing_password_updated_fields.py @@ -36,7 +36,7 @@ def upgrade(): user.update() .where(user.c.id == id_) .where(user.c.password_updated == None) # noqa: E711 - .values(password_updated=datetime.datetime.utcnow()) + .values(password_updated=datetime.datetime.utcnow()) # noqa: DTZ003 ) diff --git a/h/migrations/versions/467ea2898660_fix_document_uri_unique_constraint.py b/h/migrations/versions/467ea2898660_fix_document_uri_unique_constraint.py index 74927f81703..7fb6558d67c 100644 --- a/h/migrations/versions/467ea2898660_fix_document_uri_unique_constraint.py +++ b/h/migrations/versions/467ea2898660_fix_document_uri_unique_constraint.py @@ -111,7 +111,7 @@ def merge_duplicate_document_uris(session): ) n += batch_delete(document_uris, session) - log.info("deleted %d duplicate rows from document_uri (NULL)" % n) + log.info("deleted %d duplicate rows from document_uri (NULL)" % n) # noqa: G002, UP031 def delete_conflicting_document_uris(session): @@ -169,7 +169,7 @@ def delete_conflicting_document_uris(session): n += batch_delete(conflicting_doc_uris, session) - log.info("deleted %d duplicate rows from document_uri (empty string/NULL)" % n) + log.info("deleted %d duplicate rows from document_uri (empty string/NULL)" % n) # noqa: G002, UP031 def change_nulls_to_empty_strings(session): @@ -204,7 +204,7 @@ def change_nulls_to_empty_strings(session): session.commit() - log.info("replaced NULL with '' in %d rows" % n) + log.info("replaced NULL with '' in %d rows" % n) # noqa: G002, UP031 def upgrade(): diff --git a/h/migrations/versions/58bb601c390f_fill_in_missing_denormalized_document_title.py b/h/migrations/versions/58bb601c390f_fill_in_missing_denormalized_document_title.py index 27f0c546e50..f2742585c17 100644 --- a/h/migrations/versions/58bb601c390f_fill_in_missing_denormalized_document_title.py +++ b/h/migrations/versions/58bb601c390f_fill_in_missing_denormalized_document_title.py @@ -20,7 +20,7 @@ Session = sessionmaker() -class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024 +class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024, SLOT002 pass @@ -71,7 +71,7 @@ def downgrade(): def _document_title(document): - for meta in document.meta_titles: + for meta in document.meta_titles: # noqa: RET503 if meta.value: return meta.value[0] diff --git a/h/migrations/versions/5d1abac3c1a1_revert_annotation_metadata.py b/h/migrations/versions/5d1abac3c1a1_revert_annotation_metadata.py index 67c06946fef..1b0f8597f89 100644 --- a/h/migrations/versions/5d1abac3c1a1_revert_annotation_metadata.py +++ b/h/migrations/versions/5d1abac3c1a1_revert_annotation_metadata.py @@ -13,4 +13,3 @@ def upgrade(): def downgrade(): """No downgrade, see orignal migration 5d1abac3c1a1.""" - pass diff --git a/h/migrations/versions/5e535a075f16_remove_null_document_titles.py b/h/migrations/versions/5e535a075f16_remove_null_document_titles.py index 86fcce80900..a6c062ca5de 100644 --- a/h/migrations/versions/5e535a075f16_remove_null_document_titles.py +++ b/h/migrations/versions/5e535a075f16_remove_null_document_titles.py @@ -40,17 +40,13 @@ def upgrade(): for original_title in document_meta.value: if original_title is None: n += 1 - log.info( - "removing null title from document_meta {id}".format( - id=document_meta.id - ) - ) + log.info(f"removing null title from document_meta {document_meta.id}") # noqa: G004 else: new_titles.append(original_title) if len(new_titles) != len(document_meta.value): document_meta.value = new_titles session.commit() - log.info("deleted {n} null document titles".format(n=n)) + log.info(f"deleted {n} null document titles") # noqa: G004 def downgrade(): diff --git a/h/migrations/versions/615358b6c428_move_all_groups_to_default_organization.py b/h/migrations/versions/615358b6c428_move_all_groups_to_default_organization.py index ce02d2ef8f2..90f8f0d10a1 100644 --- a/h/migrations/versions/615358b6c428_move_all_groups_to_default_organization.py +++ b/h/migrations/versions/615358b6c428_move_all_groups_to_default_organization.py @@ -79,7 +79,7 @@ class Organization(Base): def new_org(authority, session): organization = Organization(authority=authority, name=authority) session.add(organization) - log.info("Created new organization {name}".format(name=organization.name)) + log.info(f"Created new organization {organization.name}") # noqa: G004 return organization @@ -119,17 +119,11 @@ def upgrade(): session.commit() - log.info("Skipped {n} groups that already had an organization".format(n=skipped)) + log.info(f"Skipped {skipped} groups that already had an organization") # noqa: G004 log.info( - "Assigned {n} groups to the __default__ organization".format( - n=assigned_to_default_org - ) - ) - log.info( - "Assigned {n} groups to authority organizations".format( - n=assigned_to_authority_org - ) + f"Assigned {assigned_to_default_org} groups to the __default__ organization" # noqa: G004 ) + log.info(f"Assigned {assigned_to_authority_org} groups to authority organizations") # noqa: G004 def downgrade(): diff --git a/h/migrations/versions/63e8b1fe1d4b_clean_up_document_uris.py b/h/migrations/versions/63e8b1fe1d4b_clean_up_document_uris.py index ee9752fff30..4744a171c5d 100644 --- a/h/migrations/versions/63e8b1fe1d4b_clean_up_document_uris.py +++ b/h/migrations/versions/63e8b1fe1d4b_clean_up_document_uris.py @@ -46,8 +46,8 @@ def upgrade(): session.commit() - log.info("Removed whitespace from {n} document_uris".format(n=len(changed))) - log.info("Deleted {n} document_uris with empty uris".format(n=len(to_delete))) + log.info(f"Removed whitespace from {len(changed)} document_uris") # noqa: G004 + log.info(f"Deleted {len(to_delete)} document_uris with empty uris") # noqa: G004 def downgrade(): diff --git a/h/migrations/versions/6964a8237c88_strip_whitespace_from_document_titles.py b/h/migrations/versions/6964a8237c88_strip_whitespace_from_document_titles.py index dc5d07d56e3..e274b81a73d 100644 --- a/h/migrations/versions/6964a8237c88_strip_whitespace_from_document_titles.py +++ b/h/migrations/versions/6964a8237c88_strip_whitespace_from_document_titles.py @@ -41,18 +41,14 @@ def upgrade(): stripped_title = original_title.strip() if original_title != stripped_title: n += 1 - log.info( - "updated '{original_title}' to '{stripped_title}'".format( - original_title=original_title, stripped_title=stripped_title - ) - ) + log.info(f"updated '{original_title}' to '{stripped_title}'") # noqa: G004 new_titles.append(stripped_title) if new_titles != document_meta.value: document_meta.value = new_titles session.commit() - log.info("updated {n} document titles".format(n=n)) + log.info(f"updated {n} document titles") # noqa: G004 def downgrade(): diff --git a/h/migrations/versions/6d9257ad610d_delete_empty_array_document_titles.py b/h/migrations/versions/6d9257ad610d_delete_empty_array_document_titles.py index fb4124971f5..e35c1c012c3 100644 --- a/h/migrations/versions/6d9257ad610d_delete_empty_array_document_titles.py +++ b/h/migrations/versions/6d9257ad610d_delete_empty_array_document_titles.py @@ -37,11 +37,11 @@ def upgrade(): to_delete = [] for document_meta in session.query(DocumentMeta).filter_by(type="title"): if document_meta.value == []: - to_delete.append(document_meta) + to_delete.append(document_meta) # noqa: PERF401 for document_meta in to_delete: session.delete(document_meta) session.commit() - log.info("deleted {n} empty-array document titles".format(n=len(to_delete))) + log.info(f"deleted {len(to_delete)} empty-array document titles") # noqa: G004 def downgrade(): diff --git a/h/migrations/versions/6df1c8c3e423_revert_annotation_user_id.py b/h/migrations/versions/6df1c8c3e423_revert_annotation_user_id.py index 28e60072ef7..aa0c79612c8 100644 --- a/h/migrations/versions/6df1c8c3e423_revert_annotation_user_id.py +++ b/h/migrations/versions/6df1c8c3e423_revert_annotation_user_id.py @@ -16,4 +16,3 @@ def upgrade(): def downgrade(): """No downgrade, see the upgrade for migration 8250dce465f2.""" - pass diff --git a/h/migrations/versions/77bc5b4f2205_revert_annotation_pk.py b/h/migrations/versions/77bc5b4f2205_revert_annotation_pk.py index 63ceb856887..42ab26b642b 100644 --- a/h/migrations/versions/77bc5b4f2205_revert_annotation_pk.py +++ b/h/migrations/versions/77bc5b4f2205_revert_annotation_pk.py @@ -15,4 +15,3 @@ def upgrade(): def downgrade(): """No downgrade, check version f064c2b2e04a.""" - pass diff --git a/h/migrations/versions/7f3d80550fff_correct_timestamps_of_elife_test_annotations.py b/h/migrations/versions/7f3d80550fff_correct_timestamps_of_elife_test_annotations.py index ac84f5675d9..4190053c319 100644 --- a/h/migrations/versions/7f3d80550fff_correct_timestamps_of_elife_test_annotations.py +++ b/h/migrations/versions/7f3d80550fff_correct_timestamps_of_elife_test_annotations.py @@ -50,8 +50,8 @@ def upgrade(): # Pre-parse the timestamps to reduce the amount of time spent in the DB # transaction. for timestamp in TIMESTAMPS: - timestamp["created"] = datetime.strptime(timestamp["created"], FORMAT) - timestamp["updated"] = datetime.strptime(timestamp["updated"], FORMAT) + timestamp["created"] = datetime.strptime(timestamp["created"], FORMAT) # noqa: DTZ007 + timestamp["updated"] = datetime.strptime(timestamp["updated"], FORMAT) # noqa: DTZ007 # Now make the DB changes. corrected = 0 diff --git a/h/migrations/versions/8fcdcefd8c6f_delete_orphaned_api_tokens.py b/h/migrations/versions/8fcdcefd8c6f_delete_orphaned_api_tokens.py index 91b451b5839..d550fc51cb6 100644 --- a/h/migrations/versions/8fcdcefd8c6f_delete_orphaned_api_tokens.py +++ b/h/migrations/versions/8fcdcefd8c6f_delete_orphaned_api_tokens.py @@ -49,7 +49,7 @@ def upgrade(): ).all() op.execute(delete(Token).where(Token.userid.in_(userids))) - log.info(f"Deleted %d orphaned tokens", len(userids)) # noqa: F541 + log.info(f"Deleted %d orphaned tokens", len(userids)) # noqa: F541, G004 def downgrade(): diff --git a/h/migrations/versions/9cbc5c5ad23d_fill_in_missing_annotation_deleted.py b/h/migrations/versions/9cbc5c5ad23d_fill_in_missing_annotation_deleted.py index ab5323356fd..148db4bbcd0 100644 --- a/h/migrations/versions/9cbc5c5ad23d_fill_in_missing_annotation_deleted.py +++ b/h/migrations/versions/9cbc5c5ad23d_fill_in_missing_annotation_deleted.py @@ -19,7 +19,7 @@ def upgrade(): op.execute( annotation.update() .where(annotation.c.deleted == None) # noqa: E711 - .values(deleted=False) # noqa: E711 + .values(deleted=False) ) diff --git a/h/migrations/versions/9e47da806421_remove_old_tokens.py b/h/migrations/versions/9e47da806421_remove_old_tokens.py index 5c3a0cada1f..92f54e80b08 100644 --- a/h/migrations/versions/9e47da806421_remove_old_tokens.py +++ b/h/migrations/versions/9e47da806421_remove_old_tokens.py @@ -56,7 +56,7 @@ def upgrade(): for deleted_token_id in deleted_token_ids: count += 1 - log.info(f"Deleted token (%d): %s", count, deleted_token_id) # noqa: F541 + log.info(f"Deleted token (%d): %s", count, deleted_token_id) # noqa: F541, G004 def downgrade(): diff --git a/h/migrations/versions/9e6b4f70f588_removing_trailing_from_pdf_urns.py b/h/migrations/versions/9e6b4f70f588_removing_trailing_from_pdf_urns.py index 3c166fe6f8d..cd206070b95 100644 --- a/h/migrations/versions/9e6b4f70f588_removing_trailing_from_pdf_urns.py +++ b/h/migrations/versions/9e6b4f70f588_removing_trailing_from_pdf_urns.py @@ -45,7 +45,7 @@ class DocumentURI(Base): content_type = sa.Column(sa.UnicodeText) -def upgrade(): +def upgrade(): # noqa: PLR0912 session = Session(bind=op.get_bind()) document_uris = session.query(DocumentURI).filter( diff --git a/h/migrations/versions/9f5e274b202c_update_all_document_web_uris.py b/h/migrations/versions/9f5e274b202c_update_all_document_web_uris.py index 31708ceb731..24d792ee0cb 100644 --- a/h/migrations/versions/9f5e274b202c_update_all_document_web_uris.py +++ b/h/migrations/versions/9f5e274b202c_update_all_document_web_uris.py @@ -24,7 +24,7 @@ log = logging.getLogger(__name__) -class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024 +class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024, SLOT002 pass @@ -39,7 +39,7 @@ class Document(Base): def updated_web_uri(self): def first_http_url(type_=None): - for document_uri in self.document_uris: + for document_uri in self.document_uris: # noqa: RET503 uri = document_uri.uri if type_ is not None and document_uri.type != type_: continue @@ -89,8 +89,8 @@ def upgrade(): session.commit() - log.info("Updated {updated} web_uris".format(updated=updated)) - log.info("Left {not_changed} web_uris unchanged".format(not_changed=not_changed)) + log.info(f"Updated {updated} web_uris") # noqa: G004 + log.info(f"Left {not_changed} web_uris unchanged") # noqa: G004 def downgrade(): diff --git a/h/migrations/versions/a44ef07b085a_fill_in_missing_denormalized_document_web_uri.py b/h/migrations/versions/a44ef07b085a_fill_in_missing_denormalized_document_web_uri.py index 2116a09e14c..e240c5770a6 100644 --- a/h/migrations/versions/a44ef07b085a_fill_in_missing_denormalized_document_web_uri.py +++ b/h/migrations/versions/a44ef07b085a_fill_in_missing_denormalized_document_web_uri.py @@ -21,7 +21,7 @@ Session = sessionmaker() -class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024 +class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024, SLOT002 pass @@ -68,7 +68,7 @@ def downgrade(): def _document_web_uri(document): - for docuri in document.document_uris: + for docuri in document.document_uris: # noqa: RET503 uri = urlparse(docuri.uri) if uri.scheme in ["http", "https"]: return docuri.uri diff --git a/h/migrations/versions/b102c50b1133_clean_up_moderation_extra_key.py b/h/migrations/versions/b102c50b1133_clean_up_moderation_extra_key.py index 8c77ed33bba..76ad97b22f7 100644 --- a/h/migrations/versions/b102c50b1133_clean_up_moderation_extra_key.py +++ b/h/migrations/versions/b102c50b1133_clean_up_moderation_extra_key.py @@ -41,7 +41,7 @@ def upgrade(): session = Session(bind=op.get_bind()) anns = session.query(Annotation).filter( - Annotation.extra.has_key("moderation"), # noqa + Annotation.extra.has_key("moderation"), ) found = 0 for ann in anns: diff --git a/h/migrations/versions/b980b1a8f6af_add_authclient_grant_response_type_columns.py b/h/migrations/versions/b980b1a8f6af_add_authclient_grant_response_type_columns.py index dc0fce3c064..a54d9a370ac 100644 --- a/h/migrations/versions/b980b1a8f6af_add_authclient_grant_response_type_columns.py +++ b/h/migrations/versions/b980b1a8f6af_add_authclient_grant_response_type_columns.py @@ -28,7 +28,7 @@ class GrantType(enum.Enum): authorization_code = "authorization_code" client_credentials = "client_credentials" jwt_bearer = "urn:ietf:params:oauth:grant-type:jwt-bearer" - password = "password" + password = "password" # noqa: S105 grant_type = sa.Enum(GrantType, name="authclient_grant_type") @@ -36,7 +36,7 @@ class GrantType(enum.Enum): class ResponseType(enum.Enum): code = "code" - token = "token" + token = "token" # noqa: S105 response_type = sa.Enum(ResponseType, name="authclient_response_type") diff --git a/h/migrations/versions/bcdd81e23920_fill_in_missing_annotation_document_id.py b/h/migrations/versions/bcdd81e23920_fill_in_missing_annotation_document_id.py index 18eb0e5106b..b131da0702b 100644 --- a/h/migrations/versions/bcdd81e23920_fill_in_missing_annotation_document_id.py +++ b/h/migrations/versions/bcdd81e23920_fill_in_missing_annotation_document_id.py @@ -26,7 +26,7 @@ Session = sessionmaker() -class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024 +class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024, SLOT002 pass @@ -118,8 +118,8 @@ def upgrade(): session.commit() - log.debug("Created %d new documents" % new_documents) - log.debug("Filled in %d existing document ids" % document_id_updated) + log.debug("Created %d new documents" % new_documents) # noqa: G002, UP031 + log.debug("Filled in %d existing document ids" % document_id_updated) # noqa: G002, UP031 def downgrade(): diff --git a/h/migrations/versions/c943c3f8a7e5_update_imported_elife_ann_timestamps.py b/h/migrations/versions/c943c3f8a7e5_update_imported_elife_ann_timestamps.py index f5e0b55dd8d..8cc96e4dda1 100644 --- a/h/migrations/versions/c943c3f8a7e5_update_imported_elife_ann_timestamps.py +++ b/h/migrations/versions/c943c3f8a7e5_update_imported_elife_ann_timestamps.py @@ -51,8 +51,8 @@ def upgrade(): # Pre-parse the timestamps to reduce the amount of time spent in the DB # transaction. for timestamp in TIMESTAMPS: - timestamp["created"] = datetime.strptime(timestamp["created"], FORMAT) - timestamp["updated"] = datetime.strptime(timestamp["updated"], FORMAT) + timestamp["created"] = datetime.strptime(timestamp["created"], FORMAT) # noqa: DTZ007 + timestamp["updated"] = datetime.strptime(timestamp["updated"], FORMAT) # noqa: DTZ007 # Now make the DB changes. corrected = 0 diff --git a/h/migrations/versions/d536d9a342f3_fill_in_annotation_text_rendered_column.py b/h/migrations/versions/d536d9a342f3_fill_in_annotation_text_rendered_column.py index f2995540bc5..d1cca0ec9da 100644 --- a/h/migrations/versions/d536d9a342f3_fill_in_annotation_text_rendered_column.py +++ b/h/migrations/versions/d536d9a342f3_fill_in_annotation_text_rendered_column.py @@ -23,7 +23,7 @@ Session = sessionmaker() -class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024 +class Window(namedtuple("Window", ["start", "end"])): # noqa: PYI024, SLOT002 pass @@ -55,7 +55,7 @@ def fill_annotations_text_rendered(session): _fill_annotation_window_text_rendered(session, window) session.commit() - print(".", end="") + print(".", end="") # noqa: T201 sys.stdout.flush() diff --git a/h/migrations/versions/f32200e2e496_backfill_user_pubid.py b/h/migrations/versions/f32200e2e496_backfill_user_pubid.py index b9433a7bc2c..8af0aa58fa9 100644 --- a/h/migrations/versions/f32200e2e496_backfill_user_pubid.py +++ b/h/migrations/versions/f32200e2e496_backfill_user_pubid.py @@ -69,7 +69,7 @@ def backfill_users( ) session.rollback() else: - raise RuntimeError(f"Failed to generate {batch_count} unique pubids") + raise RuntimeError(f"Failed to generate {batch_count} unique pubids") # noqa: EM102, TRY003 logger.info("Back-filled %d user.pubid's", count) diff --git a/h/models/auth_client.py b/h/models/auth_client.py index ebc60d1e2d2..bf3dca830e7 100644 --- a/h/models/auth_client.py +++ b/h/models/auth_client.py @@ -40,7 +40,7 @@ class GrantType(enum.Enum): #: Resource owner credentials grant. Can be used by trusted clients that #: are allowed to ask users for their login credentials directly. - password = "password" + password = "password" # noqa: S105 class ResponseType(enum.Enum): @@ -56,7 +56,7 @@ class ResponseType(enum.Enum): #: "Implicit" grant, in which an authorization request receives an access #: token directly. - token = "token" + token = "token" # noqa: S105 class AuthClient(Base, Timestamps): diff --git a/h/models/document/_document.py b/h/models/document/_document.py index b8652305c92..6d94fa07e96 100644 --- a/h/models/document/_document.py +++ b/h/models/document/_document.py @@ -24,7 +24,7 @@ class Document(Base, mixins.Timestamps): #: The denormalized value of the "best" http(s) DocumentURI for this Document. web_uri = sa.Column("web_uri", sa.UnicodeText()) - # FIXME: This relationship should be named `uris` again after the + # FIXME: This relationship should be named `uris` again after the # noqa: FIX001, TD001, TD002, TD003 # dependency on the annotator-store is removed, as it clashes with # making the Postgres and Elasticsearch interface of a Document # object behave the same way. @@ -56,7 +56,7 @@ def first_http_url(type_=None): If no type is given just return this document's first http(s) URL, or None. """ - for document_uri in self.document_uris: + for document_uri in self.document_uris: # noqa: RET503 uri = document_uri.uri if type_ is not None and document_uri.type != type_: continue @@ -97,7 +97,7 @@ def find_or_create_by_uris( responsibility to create any other document uris. """ - finduris = [claimant_uri] + uris + finduris = [claimant_uri] + uris # noqa: RUF005 documents = cls.find_by_uris(session, finduris) if not documents.count(): @@ -115,7 +115,7 @@ def find_or_create_by_uris( try: session.flush() except sa.exc.IntegrityError as err: - raise ConcurrentUpdateError("concurrent document creation") from err + raise ConcurrentUpdateError("concurrent document creation") from err # noqa: EM101, TRY003 return documents @@ -129,7 +129,7 @@ def merge_documents(session, documents, updated=None): """ if updated is None: - updated = datetime.utcnow() + updated = datetime.utcnow() # noqa: DTZ003 master = documents[0] duplicates = documents[1:] @@ -163,12 +163,12 @@ def merge_documents(session, documents, updated=None): ) except sa.exc.IntegrityError as err: - raise ConcurrentUpdateError("concurrent document merges") from err + raise ConcurrentUpdateError("concurrent document merges") from err # noqa: EM101, TRY003 return master -def update_document_metadata( +def update_document_metadata( # noqa: PLR0913 session, target_uri, document_meta_dicts, @@ -199,9 +199,9 @@ def update_document_metadata( :rtype: h.models.Document """ if created is None: - created = datetime.utcnow() + created = datetime.utcnow() # noqa: DTZ003 if updated is None: - updated = datetime.utcnow() + updated = datetime.utcnow() # noqa: DTZ003 documents = Document.find_or_create_by_uris( session, diff --git a/h/models/document/_meta.py b/h/models/document/_meta.py index 096b39fa50d..0026a7d9774 100644 --- a/h/models/document/_meta.py +++ b/h/models/document/_meta.py @@ -48,8 +48,14 @@ def __repr__(self): return f"" -def create_or_update_document_meta( - session, claimant, type, value, document, created, updated +def create_or_update_document_meta( # noqa: PLR0913 + session, + claimant, + type, # noqa: A002 + value, + document, + created, + updated, ): """ Create or update a DocumentMeta with the given parameters. @@ -103,7 +109,7 @@ def create_or_update_document_meta( else: existing_dm.value = value existing_dm.updated = updated - if not existing_dm.document == document: + if not existing_dm.document == document: # noqa: SIM201 log.warning( "Found DocumentMeta (id: %s)'s document_id (%s) doesn't " "match given Document's id (%s)", @@ -118,4 +124,4 @@ def create_or_update_document_meta( try: session.flush() except sa.exc.IntegrityError as err: - raise ConcurrentUpdateError("concurrent document meta updates") from err + raise ConcurrentUpdateError("concurrent document meta updates") from err # noqa: EM101, TRY003 diff --git a/h/models/document/_uri.py b/h/models/document/_uri.py index b20d526c976..9d375b2a411 100644 --- a/h/models/document/_uri.py +++ b/h/models/document/_uri.py @@ -75,8 +75,15 @@ def __repr__(self): return f"" -def create_or_update_document_uri( - session, claimant, uri, type, content_type, document, created, updated +def create_or_update_document_uri( # noqa: PLR0913 + session, + claimant, + uri, + type, # noqa: A002 + content_type, + document, + created, + updated, ): """ Create or update a DocumentURI with the given parameters. @@ -129,7 +136,7 @@ def create_or_update_document_uri( updated=updated, ) session.add(docuri) - elif not docuri.document == document: + elif not docuri.document == document: # noqa: SIM201 log.warning( "Found DocumentURI (id: %s)'s document_id (%s) doesn't match " "given Document's id (%s)", @@ -143,4 +150,4 @@ def create_or_update_document_uri( try: session.flush() except sa.exc.IntegrityError as err: - raise ConcurrentUpdateError("concurrent document uri updates") from err + raise ConcurrentUpdateError("concurrent document uri updates") from err # noqa: EM101, TRY003 diff --git a/h/models/feature.py b/h/models/feature.py index 4e52945ec0e..d50cd73889b 100644 --- a/h/models/feature.py +++ b/h/models/feature.py @@ -95,4 +95,4 @@ def remove_old_flags(cls, session): log.info("removed %d old/unknown feature flags from database", count) def __repr__(self): # pragma: no cover - return "".format(f=self) + return f"" diff --git a/h/models/group.py b/h/models/group.py index 44c362872b6..e4212d2294e 100644 --- a/h/models/group.py +++ b/h/models/group.py @@ -219,8 +219,8 @@ def members(self) -> tuple[User, ...]: @sa.orm.validates("name") def validate_name(self, _key, name): if not GROUP_NAME_MIN_LENGTH <= len(name) <= GROUP_NAME_MAX_LENGTH: - raise ValueError( - f"name must be between {GROUP_NAME_MIN_LENGTH} and {GROUP_NAME_MAX_LENGTH} characters long" + raise ValueError( # noqa: TRY003 + f"name must be between {GROUP_NAME_MIN_LENGTH} and {GROUP_NAME_MAX_LENGTH} characters long" # noqa: EM102 ) return name @@ -230,14 +230,14 @@ def validate_authority_provided_id(self, _key, authority_provided_id): return None if not re.match(AUTHORITY_PROVIDED_ID_PATTERN, authority_provided_id): - raise ValueError( - "authority_provided_id must only contain characters allowed" + raise ValueError( # noqa: TRY003 + "authority_provided_id must only contain characters allowed" # noqa: EM101 r" in encoded URIs: [a-zA-Z0-9._\-+!~*()']" ) if len(authority_provided_id) > AUTHORITY_PROVIDED_ID_MAX_LENGTH: - raise ValueError( - f"authority_provided_id must be {AUTHORITY_PROVIDED_ID_MAX_LENGTH} characters or fewer" + raise ValueError( # noqa: TRY003 + f"authority_provided_id must be {AUTHORITY_PROVIDED_ID_MAX_LENGTH} characters or fewer" # noqa: EM102 " characters long" ) @@ -267,8 +267,8 @@ def type(self): if self_type_flags == type_flags: return type_ - raise ValueError( - "This group doesn't seem to match any known type of group. " + raise ValueError( # noqa: TRY003 + "This group doesn't seem to match any known type of group. " # noqa: EM101 "This shouldn't be in the database!" ) @@ -277,7 +277,7 @@ def type(self, value): try: new_type_flags = GROUP_TYPE_FLAGS[value] except KeyError as err: - raise ValueError() from err + raise ValueError from err for index, flag in enumerate(new_type_flags._fields): setattr(self, flag, new_type_flags[index]) diff --git a/h/models/group_scope.py b/h/models/group_scope.py index ba990975921..27227374dbc 100644 --- a/h/models/group_scope.py +++ b/h/models/group_scope.py @@ -67,7 +67,7 @@ def scope(self, value): """ parsed_origin, parsed_path = parse_scope_from_url(value) if parsed_origin is None: - raise ValueError("Invalid URL for scope: missing origin component") + raise ValueError("Invalid URL for scope: missing origin component") # noqa: EM101, TRY003 self._origin = parsed_origin self._path = parsed_path diff --git a/h/models/organization.py b/h/models/organization.py index fa55e0725c3..b9fa11b561c 100644 --- a/h/models/organization.py +++ b/h/models/organization.py @@ -29,8 +29,8 @@ def validate_name(self, _key, name): if not ( Organization.NAME_MIN_CHARS <= len(name) <= Organization.NAME_MAX_CHARS ): - raise ValueError( - f"name must be between {Organization.NAME_MIN_CHARS} and {Organization.NAME_MAX_CHARS} characters long" + raise ValueError( # noqa: TRY003 + f"name must be between {Organization.NAME_MIN_CHARS} and {Organization.NAME_MAX_CHARS} characters long" # noqa: EM102 ) return name diff --git a/h/models/token.py b/h/models/token.py index 0101094dfb4..df744ce6518 100644 --- a/h/models/token.py +++ b/h/models/token.py @@ -1,4 +1,4 @@ -import datetime +import datetime # noqa: A005 import sqlalchemy from sqlalchemy.dialects import postgresql @@ -65,7 +65,7 @@ class Token(Base, mixins.Timestamps): def expired(self): """Return True if this access token has expired, False otherwise.""" if self.expires: - return datetime.datetime.utcnow() > self.expires + return datetime.datetime.utcnow() > self.expires # noqa: DTZ003 return False @@ -73,7 +73,7 @@ def expired(self): def refresh_token_expired(self): """Return True if this refresh token has expired, False otherwise.""" if self.refresh_token_expires: - return datetime.datetime.utcnow() > self.refresh_token_expires + return datetime.datetime.utcnow() > self.refresh_token_expires # noqa: DTZ003 return False @@ -83,7 +83,7 @@ def ttl(self): if not self.expires: return None - now = datetime.datetime.utcnow() + now = datetime.datetime.utcnow() # noqa: DTZ003 ttl = self.expires - now ttl_in_seconds = ttl.total_seconds() # We truncate (rather than round) ttl_in_seconds to get an int. diff --git a/h/models/user.py b/h/models/user.py index d883e0629db..d14c7d29164 100644 --- a/h/models/user.py +++ b/h/models/user.py @@ -267,7 +267,7 @@ def userid(cls): # noqa: N805 @property def is_activated(self): - if self.activation_id is None: + if self.activation_id is None: # noqa: SIM103 return True return False @@ -276,7 +276,7 @@ def activate(self): """Activate the user by deleting any activation they have.""" session = sa.orm.object_session(self) - self.activation_date = datetime.datetime.utcnow() + self.activation_date = datetime.datetime.utcnow() # noqa: DTZ003 session.delete(self.activation) #: Hashed password @@ -335,22 +335,22 @@ def validate_email(self, _key, email): return email if len(email) > EMAIL_MAX_LENGTH: - raise ValueError( - f"email must be less than {EMAIL_MAX_LENGTH} characters long" + raise ValueError( # noqa: TRY003 + f"email must be less than {EMAIL_MAX_LENGTH} characters long" # noqa: EM102 ) return email @sa.orm.validates("_username") def validate_username(self, _key, username): if not USERNAME_MIN_LENGTH <= len(username) <= USERNAME_MAX_LENGTH: - raise ValueError( - f"username must be between {USERNAME_MIN_LENGTH} and {USERNAME_MAX_LENGTH} " + raise ValueError( # noqa: TRY003 + f"username must be between {USERNAME_MIN_LENGTH} and {USERNAME_MAX_LENGTH} " # noqa: EM102 "characters long" ) if not re.match(USERNAME_PATTERN, username): - raise ValueError( - "username must have only letters, numbers, periods, and underscores." + raise ValueError( # noqa: TRY003 + "username must have only letters, numbers, periods, and underscores." # noqa: EM101 ) return username diff --git a/h/models/user_identity.py b/h/models/user_identity.py index ff1cd10ceec..c2801bc3a74 100644 --- a/h/models/user_identity.py +++ b/h/models/user_identity.py @@ -15,6 +15,4 @@ class UserIdentity(Base): ) def __repr__(self): - return "{}(provider={!r}, provider_unique_id={!r})".format( - self.__class__.__name__, self.provider, self.provider_unique_id - ) + return f"{self.__class__.__name__}(provider={self.provider!r}, provider_unique_id={self.provider_unique_id!r})" diff --git a/h/notification/reply.py b/h/notification/reply.py index a956cfc752d..bdbcbd8819a 100644 --- a/h/notification/reply.py +++ b/h/notification/reply.py @@ -8,7 +8,7 @@ log = logging.getLogger(__name__) -class Notification( +class Notification( # noqa: SLOT002 namedtuple( # noqa: PYI024 "Notification", ["reply", "reply_user", "parent", "parent_user", "document"] ) @@ -29,7 +29,7 @@ class Notification( """ -def get_notification( # noqa: C901 +def get_notification( # noqa: C901, PLR0911 request, annotation, action ): """ @@ -94,7 +94,7 @@ def get_notification( # noqa: C901 if not reply.shared: return None - # FIXME: we should be retrieving the document from the root annotation, not + # FIXME: we should be retrieving the document from the root annotation, not # noqa: FIX001, TD001, TD002, TD003 # the reply, and dealing with the possibility that we have no document # metadata. if reply.document is None: diff --git a/h/paginator.py b/h/paginator.py index 2fc77bfe671..a1de99412da 100644 --- a/h/paginator.py +++ b/h/paginator.py @@ -23,7 +23,7 @@ def paginate(request, total, page_size=PAGE_SIZE): # last pages are always shown. There should be at most 3 pages # to the left and 3 to the right of the current page. Any more # pages than that are represented by ellipses on either side. - # Ex: [1, '...',27, 28, 29, 30, 31, 32, 33, '...', 60] + # Ex: [1, '...',27, 28, 29, 30, 31, 32, 33, '...', 60] # noqa: ERA001 page_numbers = [] buffer = 3 diff --git a/h/presenters/__init__.py b/h/presenters/__init__.py index 37a9cdb2c5d..b97c047eb7a 100644 --- a/h/presenters/__init__.py +++ b/h/presenters/__init__.py @@ -16,8 +16,8 @@ "DocumentJSONPresenter", "DocumentSearchIndexPresenter", "GroupJSONPresenter", - "GroupsJSONPresenter", - "UserJSONPresenter", "GroupMembershipJSONPresenter", + "GroupsJSONPresenter", "TrustedUserJSONPresenter", + "UserJSONPresenter", ) diff --git a/h/presenters/annotation_jsonld.py b/h/presenters/annotation_jsonld.py index ef7ee81b5a4..63084627f8d 100644 --- a/h/presenters/annotation_jsonld.py +++ b/h/presenters/annotation_jsonld.py @@ -39,7 +39,7 @@ def _bodies(self): ] if self.annotation.tags: # pragma: no cover for tag in self.annotation.tags: - bodies.append( + bodies.append( # noqa: PERF401 {"type": "TextualBody", "value": tag, "purpose": "tagging"} ) @@ -65,7 +65,7 @@ def _target(self): except KeyError: continue if type_ == "RangeSelector": - selector = _convert_range_selector(selector) + selector = _convert_range_selector(selector) # noqa: PLW2901 if selector is None: continue selectors.append(selector) diff --git a/h/presenters/annotation_searchindex.py b/h/presenters/annotation_searchindex.py index 18ddbba69d4..ca23c0fad25 100644 --- a/h/presenters/annotation_searchindex.py +++ b/h/presenters/annotation_searchindex.py @@ -47,7 +47,7 @@ def asdict(self): def _add_hidden(self, result): # Mark an annotation as hidden if it and all of it's children have been # moderated and hidden. - parents_and_replies = [self.annotation.id] + self.annotation.thread_ids + parents_and_replies = [self.annotation.id] + self.annotation.thread_ids # noqa: RUF005 ann_mod_svc = self.request.find_service(name="annotation_moderation") is_hidden = len(ann_mod_svc.all_hidden(parents_and_replies)) == len( diff --git a/h/presenters/document_html.py b/h/presenters/document_html.py index fd8f1ec0074..be91aedc051 100644 --- a/h/presenters/document_html.py +++ b/h/presenters/document_html.py @@ -135,7 +135,7 @@ def link_text(self): # has no title). In those cases we want to remove the http(s):// from # the front and unquote it for link text. lower = title.lower() - if lower.startswith("http://") or lower.startswith("https://"): + if lower.startswith("http://") or lower.startswith("https://"): # noqa: PIE810 parts = urlparse(title) return unquote(parts.netloc + parts.path) diff --git a/h/presenters/organization_json.py b/h/presenters/organization_json.py index c5a75c9585c..f06370bac0b 100644 --- a/h/presenters/organization_json.py +++ b/h/presenters/organization_json.py @@ -5,7 +5,7 @@ def __init__(self, organization, request): self.request = request self.organization = organization - def asdict(self, summary=False): + def asdict(self, summary=False): # noqa: FBT002 """ Create a dict of the organization. diff --git a/h/realtime.py b/h/realtime.py index 374e908549b..0a7f9feaf48 100644 --- a/h/realtime.py +++ b/h/realtime.py @@ -31,7 +31,7 @@ def __init__(self, connection, routing_key, handler): self.handler = handler self.exchange = get_exchange() - def get_consumers(self, consumer_factory, channel): + def get_consumers(self, consumer_factory, channel): # noqa: ARG002 name = self.generate_queue_name() queue = kombu.Queue( name, @@ -106,7 +106,7 @@ def _publish(self, routing_key, payload): except (OperationalError, LimitExceeded) as err: # If we fail to connect (OperationalError), or we don't get a # producer from the pool in time (LimitExceeded) raise - raise RealtimeMessageQueueError() from err + raise RealtimeMessageQueueError() from err # noqa: RSE102 def get_exchange(): @@ -117,7 +117,7 @@ def get_exchange(): ) -def get_connection(settings, fail_fast=False): +def get_connection(settings, fail_fast=False): # noqa: FBT002 """ Return a `kombu.Connection` based on the application's settings. diff --git a/h/renderers.py b/h/renderers.py index 21b94df0999..348b01baad0 100644 --- a/h/renderers.py +++ b/h/renderers.py @@ -32,7 +32,7 @@ def __call__(self, value, system): # file to user agents that don't support compression, or vice-versa. if response.vary: if "Accept-Encoding" not in response.vary: - response.vary = response.vary + ("Accept-Encoding",) + response.vary = response.vary + ("Accept-Encoding",) # noqa: RUF005 else: response.vary = ("Accept-Encoding",) diff --git a/h/routes.py b/h/routes.py index 107c1713f04..a53154761d6 100644 --- a/h/routes.py +++ b/h/routes.py @@ -1,4 +1,4 @@ -def includeme(config): +def includeme(config): # noqa: PLR0915 # Core config.add_route("index", "/") config.add_route("robots", "/robots.txt") diff --git a/h/schemas/analytics.py b/h/schemas/analytics.py index 43b836e7119..a0885e1d8f6 100644 --- a/h/schemas/analytics.py +++ b/h/schemas/analytics.py @@ -4,7 +4,7 @@ class EventSchema(JSONSchema): - schema = { + schema = { # noqa: RUF012 "type": "object", "required": ["event"], "properties": { diff --git a/h/schemas/annotation.py b/h/schemas/annotation.py index a86ea64bdae..313024f2b4b 100644 --- a/h/schemas/annotation.py +++ b/h/schemas/annotation.py @@ -69,7 +69,7 @@ def _validate_wildcard_uri(node, value): class AnnotationSchema(JSONSchema): """Validate an annotation object.""" - schema = { + schema = { # noqa: RUF012 "type": "object", "properties": { "document": copy.deepcopy(DOCUMENT_SCHEMA), @@ -107,7 +107,7 @@ class URLMigrationSchema(JSONSchema): schema_version = 7 # Required for `propertyNames` - schema = { + schema = { # noqa: RUF012 "type": "object", # The restriction to HTTP(S) URLs is just to help catch mistakes # in the input. We could relax this constraint if needed. @@ -311,7 +311,7 @@ def _target_selectors(targets): for target_selector in selectors: for field in ["suffix", "prefix"]: - if value := target_selector.get(field): + if value := target_selector.get(field): # noqa: SIM102 if not is_valid_unicode(value): raise ValidationError( f"{field}: " + _(f"'{field}' must be valid unicode") # noqa: INT001 @@ -482,7 +482,7 @@ def _date_is_parsable(value): # than 9999 it is assumed to be a year and not ms since the epoch. try: if float(value) < 9999: - raise ValueError("This is not in the form ms since the epoch.") + raise ValueError("This is not in the form ms since the epoch.") # noqa: EM101, TRY003, TRY301 except ValueError: try: parse(value) diff --git a/h/schemas/api/group.py b/h/schemas/api/group.py index 02373701351..b076130b5f9 100644 --- a/h/schemas/api/group.py +++ b/h/schemas/api/group.py @@ -24,7 +24,7 @@ class GroupAPISchema(JSONSchema): """Base class for validating group resource API data.""" - schema = {"type": "object", "properties": GROUP_SCHEMA_PROPERTIES} + schema = {"type": "object", "properties": GROUP_SCHEMA_PROPERTIES} # noqa: RUF012 def __init__(self, group_authority=None, default_authority=None): """ @@ -63,8 +63,8 @@ def _validate_name(self, appstruct): name = appstruct.get("name") if name and name.strip() != name: - raise ValidationError( - "Group names can't have leading or trailing whitespace." + raise ValidationError( # noqa: TRY003 + "Group names can't have leading or trailing whitespace." # noqa: EM101 ) def _validate_groupid(self, appstruct): @@ -88,7 +88,7 @@ def _validate_groupid(self, appstruct): ): # This is a first-party group raise ValidationError( - "{err_msg} '{authority}'".format( + "{err_msg} '{authority}'".format( # noqa: EM103 err_msg=_( "groupid may only be set on groups outside of the default authority" ), @@ -102,7 +102,7 @@ def _validate_groupid(self, appstruct): # The authority part of the ``groupid`` doesn't match the # group's authority raise ValidationError( - "{err_msg} '{groupid}'".format( + "{err_msg} '{groupid}'".format( # noqa: EM103 err_msg=_("Invalid authority specified in groupid"), groupid=groupid ) ) @@ -123,7 +123,7 @@ def _whitelisted_fields_only(appstruct): class CreateGroupAPISchema(GroupAPISchema): """Schema for validating create-group API data.""" - schema = { + schema = { # noqa: RUF012 "type": "object", "properties": GROUP_SCHEMA_PROPERTIES, "required": ["name"], # ``name`` is a required field when creating diff --git a/h/schemas/api/group_membership.py b/h/schemas/api/group_membership.py index 167acaed0c6..8c7fb0cdedb 100644 --- a/h/schemas/api/group_membership.py +++ b/h/schemas/api/group_membership.py @@ -2,7 +2,7 @@ class EditGroupMembershipAPISchema(JSONSchema): - schema = { + schema = { # noqa: RUF012 "type": "object", "properties": { "roles": { diff --git a/h/schemas/api/user.py b/h/schemas/api/user.py index eb0160f4767..116450486a0 100644 --- a/h/schemas/api/user.py +++ b/h/schemas/api/user.py @@ -10,7 +10,7 @@ class CreateUserAPISchema(JSONSchema): """Validate a user JSON object.""" - schema = { + schema = { # noqa: RUF012 "type": "object", "properties": { "authority": {"type": "string", "format": "hostname"}, @@ -53,7 +53,7 @@ def validate(self, data): class UpdateUserAPISchema(JSONSchema): """Validate a user JSON object.""" - schema = { + schema = { # noqa: RUF012 "type": "object", "properties": { "email": { diff --git a/h/schemas/base.py b/h/schemas/base.py index 97f8ae30f74..e1d118acf68 100644 --- a/h/schemas/base.py +++ b/h/schemas/base.py @@ -47,7 +47,7 @@ class JSONSchema: valid JSON schema. """ - schema = {} + schema = {} # noqa: RUF012 schema_version = 4 """The JSON Schema version used by this schema.""" @@ -60,7 +60,7 @@ def __init__(self): elif self.schema_version == 7: validator_cls = jsonschema.Draft7Validator else: - raise ValueError("Unsupported schema version") + raise ValueError("Unsupported schema version") # noqa: EM101, TRY003 self.validator = validator_cls(self.schema, format_checker=format_checker) diff --git a/h/schemas/forms/accounts/edit_profile.py b/h/schemas/forms/accounts/edit_profile.py index b7bcf4e657a..826f5e81a7a 100644 --- a/h/schemas/forms/accounts/edit_profile.py +++ b/h/schemas/forms/accounts/edit_profile.py @@ -14,14 +14,14 @@ def validate_url(node, cstruct): try: util.validate_url(cstruct) except ValueError as exc: - raise colander.Invalid(node, str(exc)) + raise colander.Invalid(node, str(exc)) # noqa: B904 def validate_orcid(node, cstruct): try: util.validate_orcid(cstruct) except ValueError as exc: - raise colander.Invalid(node, str(exc)) + raise colander.Invalid(node, str(exc)) # noqa: B904 class EditProfileSchema(CSRFSchema): diff --git a/h/schemas/forms/accounts/login.py b/h/schemas/forms/accounts/login.py index 3841a21fbed..ba950bedaa7 100644 --- a/h/schemas/forms/accounts/login.py +++ b/h/schemas/forms/accounts/login.py @@ -83,7 +83,7 @@ def default_values(request): def _should_autofocus_username(kwargs): # pragma: no cover """Return True if the username widget should be autofocused.""" - if LoginSchema.default_values(kwargs["request"]).get("username"): + if LoginSchema.default_values(kwargs["request"]).get("username"): # noqa: SIM103 # The username widget is going to be pre-filled, so don't autofocus it. # (This allows the password widget, which the user still has to type # into, to be autofocused instead.) diff --git a/h/schemas/forms/admin/group.py b/h/schemas/forms/admin/group.py index 4701abef295..4ea132f1b60 100644 --- a/h/schemas/forms/admin/group.py +++ b/h/schemas/forms/admin/group.py @@ -134,17 +134,17 @@ def group_organization_select_widget(_node, kwargs): # `zip` returns an iterator. The `SelectWidget` constructor requires an # actual list. - return SelectWidget(values=list(zip(org_pubids, org_labels))) + return SelectWidget(values=list(zip(org_pubids, org_labels, strict=False))) class AdminGroupSchema(CSRFSchema): def __init__(self, *args): - super().__init__(validator=username_validator, *args) + super().__init__(validator=username_validator, *args) # noqa: B026 group_type = colander.SchemaNode( colander.String(), title=_("Group Type"), - widget=SelectWidget(values=(("", _("Select")),) + VALID_GROUP_TYPES), + widget=SelectWidget(values=(("", _("Select")),) + VALID_GROUP_TYPES), # noqa: RUF005 validator=group_type_validator, ) diff --git a/h/schemas/forms/admin/organization.py b/h/schemas/forms/admin/organization.py index 63476d31052..d26322a9a5f 100644 --- a/h/schemas/forms/admin/organization.py +++ b/h/schemas/forms/admin/organization.py @@ -1,4 +1,4 @@ -from xml.etree import ElementTree +from xml.etree import ElementTree # noqa: ICN001 import colander from deform.widget import TextAreaWidget, TextInputWidget @@ -22,13 +22,13 @@ def validate_logo(node, value): raise colander.Invalid( node, _( - "Logo is larger than {:,d} characters".format( # noqa: INT002 + "Logo is larger than {:,d} characters".format( # noqa: INT002, UP032 Organization.LOGO_MAX_CHARS ) ), ) try: - root = ElementTree.fromstring(value) + root = ElementTree.fromstring(value) # noqa: S314 except ElementTree.ParseError as err: raise colander.Invalid(node, _("Logo is not parsable XML")) from err diff --git a/h/scripts/init_elasticsearch.py b/h/scripts/init_elasticsearch.py index 1f5e1389bff..26edb8528e4 100755 --- a/h/scripts/init_elasticsearch.py +++ b/h/scripts/init_elasticsearch.py @@ -35,7 +35,7 @@ def main(): settings = env["registry"].settings client = search.get_client(settings) - print("Initializing Elasticsearch index") + print("Initializing Elasticsearch index") # noqa: T201 search.init(client, check_icu_plugin=settings.get("es.check_icu_plugin", True)) diff --git a/h/search/__init__.py b/h/search/__init__.py index bd03b20763f..61b109d1a1b 100644 --- a/h/search/__init__.py +++ b/h/search/__init__.py @@ -13,17 +13,17 @@ ) __all__ = ( - "Search", - "TopLevelAnnotationsFilter", + "AuthorityFilter", "DeletedFilter", "Limiter", - "UserFilter", - "AuthorityFilter", + "Search", + "SharedAnnotationsFilter", "TagsAggregation", + "TopLevelAnnotationsFilter", + "UserFilter", "UsersAggregation", "get_client", "init", - "SharedAnnotationsFilter", ) diff --git a/h/search/config.py b/h/search/config.py index 5ad3421449f..d3e69745a26 100644 --- a/h/search/config.py +++ b/h/search/config.py @@ -105,7 +105,7 @@ } -def init(client, check_icu_plugin=True): +def init(client, check_icu_plugin=True): # noqa: FBT002 """Initialise Elasticsearch, creating necessary indices and aliases.""" # Ensure the ICU analysis plugin is installed if check_icu_plugin: @@ -151,11 +151,11 @@ def get_aliased_index(client): except elasticsearch.exceptions.NotFoundError: # no alias with that name return None if len(result) > 1: - raise RuntimeError( - "We don't support managing aliases that " + raise RuntimeError( # noqa: TRY003 + "We don't support managing aliases that " # noqa: EM101 "point to multiple indices at the moment!" ) - return list(result.keys())[0] + return list(result.keys())[0] # noqa: RUF015 def update_aliased_index(client, new_target): @@ -167,8 +167,8 @@ def update_aliased_index(client, new_target): """ old_target = get_aliased_index(client) if old_target is None: - raise RuntimeError( - "Cannot update aliased index for index that is not already aliased." + raise RuntimeError( # noqa: TRY003 + "Cannot update aliased index for index that is not already aliased." # noqa: EM101 ) client.conn.indices.update_aliases( diff --git a/h/search/core.py b/h/search/core.py index 5802016bf88..a6b62cbd71f 100644 --- a/h/search/core.py +++ b/h/search/core.py @@ -36,8 +36,8 @@ class Search: def __init__( self, request, - separate_replies=False, - separate_wildcard_uri_keys=True, + separate_replies=False, # noqa: FBT002 + separate_wildcard_uri_keys=True, # noqa: FBT002 _replies_limit=200, ): self.es = request.es @@ -99,7 +99,7 @@ def _search(self, modifiers, aggregations, params): # Don't return any fields, just the metadata so set _source=False. search = elasticsearch_dsl.Search( using=self.es.conn, index=self.es.index - ).source(False) + ).source(False) # noqa: FBT003 for agg in aggregations: agg(search, params) @@ -112,7 +112,7 @@ def _search_annotations(self, params): # If separate_replies is True, don't return any replies to annotations. modifiers = self._modifiers if self.separate_replies: - modifiers = [query.TopLevelAnnotationsFilter()] + modifiers + modifiers = [query.TopLevelAnnotationsFilter()] + modifiers # noqa: RUF005 response = self._search(modifiers, self._aggregations, params) @@ -129,7 +129,7 @@ def _search_replies(self, annotation_ids): # replies to annotations is the RepliesMatcher and the params passed to # the modifiers. response = self._search( - [query.RepliesMatcher(annotation_ids)] + self._modifiers, + [query.RepliesMatcher(annotation_ids)] + self._modifiers, # noqa: RUF005 [], # Aggregations aren't used in replies. MultiDict({"limit": self._replies_limit}), ) diff --git a/h/search/index.py b/h/search/index.py index 78a105329cc..04606ba78b4 100644 --- a/h/search/index.py +++ b/h/search/index.py @@ -159,7 +159,7 @@ def _log_status(stream, log_every=1000): then = time.time() for item in stream: yield item - i += 1 + i += 1 # noqa: SIM113 if not i % log_every: now = time.time() delta = now - then diff --git a/h/search/parser.py b/h/search/parser.py index 142bdfb0149..8db801394d4 100644 --- a/h/search/parser.py +++ b/h/search/parser.py @@ -6,7 +6,7 @@ """ from collections import namedtuple -from functools import lru_cache +from functools import cache import pyparsing as pp from webob.multidict import MultiDict @@ -92,7 +92,7 @@ def unparse(query): return " ".join(terms) -@lru_cache(maxsize=None) +@cache def _make_parser(): word = pp.CharsNotIn("".join(whitespace)) word.skipWhitespace = True diff --git a/h/search/query.py b/h/search/query.py index a5e27ac41b8..22b2b20f396 100644 --- a/h/search/query.py +++ b/h/search/query.py @@ -13,7 +13,7 @@ # Elasticsearch requires offset + limit must be <= 10,000. LIMIT_MAX = 200 OFFSET_MAX = 9800 -DEFAULT_DATE = dt(1970, 1, 1, 0, 0, 0, 0).replace(tzinfo=tz.tzutc()) +DEFAULT_DATE = dt(1970, 1, 1, 0, 0, 0, 0).replace(tzinfo=tz.tzutc()) # noqa: DTZ001 def popall(multidict, key): @@ -100,7 +100,7 @@ def __call__(self, search, params): # Since search_after depends on the field that the annotations are # being sorted by, it is set here rather than in a separate class. search_after = params.pop("search_after", None) - if search_after: + if search_after: # noqa: SIM102 if sort_by in ["updated", "created"]: search_after = self._parse_date(search_after) @@ -137,8 +137,8 @@ def _parse_date(str_value): try: epoch = float(str_value) if epoch < 9999: - raise ValueError("This is not in the form ms since the epoch.") - return epoch + raise ValueError("This is not in the form ms since the epoch.") # noqa: EM101, TRY003, TRY301 + return epoch # noqa: TRY300 except ValueError: try: date = parse(str_value, default=DEFAULT_DATE) @@ -163,7 +163,7 @@ class AuthorityFilter: def __init__(self, authority): self.authority = authority - def __call__(self, search, params): + def __call__(self, search, params): # noqa: ARG002 return search.filter("term", authority=self.authority) @@ -183,7 +183,7 @@ def __init__(self, request): """ self.request = request - def __call__(self, search, params): + def __call__(self, search, params): # noqa: ARG002 userid = self.request.authenticated_userid if userid is None: return search.filter("term", shared=True) @@ -203,7 +203,7 @@ class SharedAnnotationsFilter: belong to the authenticated user. """ - def __call__(self, search, params): + def __call__(self, search, params): # noqa: ARG002 return search.filter("term", shared=True) @@ -242,7 +242,7 @@ class UriCombinedWildcardFilter: any single character. """ - def __init__(self, request, separate_keys=False): + def __init__(self, request, separate_keys=False): # noqa: FBT002 """ Initialize a new UriFilter. diff --git a/h/search/util.py b/h/search/util.py index 34860bc5122..72b3978e40c 100644 --- a/h/search/util.py +++ b/h/search/util.py @@ -20,7 +20,7 @@ def wildcard_uri_is_valid(wildcard_uri): # Note: according to the URL spec _'s are allowed in the domain so this may be # something that needs to be supported at a later date. normalized_uri = urlparse(wildcard_uri) - if ( + if ( # noqa: SIM103 not normalized_uri.scheme or "*" in normalized_uri.netloc or "_" in normalized_uri.netloc diff --git a/h/security/__init__.py b/h/security/__init__.py index a74ddb510fe..bf1c5d6f61a 100644 --- a/h/security/__init__.py +++ b/h/security/__init__.py @@ -2,13 +2,13 @@ import logging -from h.security.encryption import ( # noqa:F401 +from h.security.encryption import ( derive_key, password_context, token_urlsafe, ) -from h.security.identity import Identity # noqa:F401 -from h.security.permissions import Permission # noqa:F401 +from h.security.identity import Identity +from h.security.permissions import Permission from h.security.permits import identity_permits from h.security.policy import StreamerPolicy, TopLevelPolicy diff --git a/h/security/encryption.py b/h/security/encryption.py index 034a743d436..a5c74c6e058 100644 --- a/h/security/encryption.py +++ b/h/security/encryption.py @@ -50,12 +50,12 @@ def derive_key(key_material, salt, info): # If we have problems with PyCryptodome, an alternative implementation using # cryptography would be: -# from cryptography.hazmat.primitives.kdf.hkdf import HKDF as HKDF2 -# from cryptography.hazmat.primitives import hashes +# from cryptography.hazmat.primitives.kdf.hkdf import HKDF as HKDF2 # noqa: ERA001 +# from cryptography.hazmat.primitives import hashes # noqa: ERA001 # # def derive_key(key_material, salt, info): # if not isinstance(key_material, bytes): -# key_material = key_material.encode() +# key_material = key_material.encode() # noqa: ERA001 # # return HKDF2( # algorithm=hashes.SHA512(), length=64, salt=salt, info=info diff --git a/h/security/identity.py b/h/security/identity.py index ac0220a6b56..dc83ec79aa8 100644 --- a/h/security/identity.py +++ b/h/security/identity.py @@ -1,7 +1,7 @@ """Data classes used to represent authenticated users.""" from dataclasses import dataclass, field -from typing import List, Optional, Self +from typing import Self from h.models import AuthClient, Group, GroupMembershipRoles, User @@ -12,7 +12,7 @@ class LongLivedMembership: group: "LongLivedGroup" user: "LongLivedUser" - roles: List[str] + roles: list[str] @dataclass @@ -46,7 +46,7 @@ class LongLivedUser: authority: str staff: bool admin: bool - memberships: List[LongLivedMembership] = field(default_factory=list) + memberships: list[LongLivedMembership] = field(default_factory=list) @classmethod def from_model(cls, user: User): @@ -105,8 +105,8 @@ class Identity: pre-shared key, or both. """ - user: Optional[LongLivedUser] = None - auth_client: Optional[LongLivedAuthClient] = None + user: LongLivedUser | None = None + auth_client: LongLivedAuthClient | None = None @classmethod def from_models(cls, user: User = None, auth_client: AuthClient = None): diff --git a/h/security/permits.py b/h/security/permits.py index bd6efc23b5f..87294f635c5 100644 --- a/h/security/permits.py +++ b/h/security/permits.py @@ -1,5 +1,3 @@ -from typing import Optional - from pyramid.security import Allowed, Denied from h.security.identity import Identity @@ -7,7 +5,7 @@ def identity_permits( - identity: Optional[Identity], context, permission + identity: Identity | None, context, permission ) -> Allowed | Denied: """ Check whether a given identity has permission to operate on a context. @@ -19,7 +17,7 @@ def identity_permits( :param context: Context object representing the objects acted upon :param permission: Permission requested """ - if clauses := PERMISSION_MAP.get(permission): + if clauses := PERMISSION_MAP.get(permission): # noqa: SIM102 # Grant the permissions if for *any* single clause... if any( # .. *all* elements in it are true diff --git a/h/security/policy/_auth_client.py b/h/security/policy/_auth_client.py index 3c1f3debb4b..2cdb79ab630 100644 --- a/h/security/policy/_auth_client.py +++ b/h/security/policy/_auth_client.py @@ -26,7 +26,7 @@ class AuthClientPolicy: #: List of route name-method combinations that should #: allow AuthClient authentication - API_WHITELIST = [ + API_WHITELIST = [ # noqa: RUF012 ("api.groups", "POST"), ("api.group", "PATCH"), ("api.group", "GET"), diff --git a/h/security/policy/_cookie.py b/h/security/policy/_cookie.py index c0025def0ce..9e97f011e53 100644 --- a/h/security/policy/_cookie.py +++ b/h/security/policy/_cookie.py @@ -54,7 +54,7 @@ def identity(self, request): def authenticated_userid(self, request): return Identity.authenticated_userid(self.identity(request)) - def remember(self, request, userid, **kw): + def remember(self, request, userid, **kw): # noqa: ARG002 self.helper.add_vary_by_cookie(request) previous_userid = self.authenticated_userid(request) @@ -111,7 +111,7 @@ def _issue_api_authcookie(self, identity, request, auth_ticket): ) def add_api_authcookie_headers( - request, + request, # noqa: ARG001 response, ): log.info("Fixing missing API auth cookie") diff --git a/h/security/policy/helpers.py b/h/security/policy/helpers.py index 4936616a023..91b6f771c24 100644 --- a/h/security/policy/helpers.py +++ b/h/security/policy/helpers.py @@ -55,7 +55,7 @@ def forget(self, cookie: SignedCookieProfile, request: Request): @staticmethod @lru_cache # Ensure we only add this once per request def add_vary_by_cookie(request: Request): - def vary_add(request, response): + def vary_add(request, response): # noqa: ARG001 vary = set(response.vary if response.vary is not None else []) vary.add("Cookie") response.vary = list(vary) diff --git a/h/security/predicates.py b/h/security/predicates.py index 53224d25da4..cbfbc4af60c 100644 --- a/h/security/predicates.py +++ b/h/security/predicates.py @@ -209,8 +209,8 @@ def group_member_remove(identity, context: GroupMembershipContext): @requires(authenticated_user, group_found) -def group_member_edit(identity, context: EditGroupMembershipContext): - assert context.new_roles is not None, ( +def group_member_edit(identity, context: EditGroupMembershipContext): # noqa: PLR0911 + assert context.new_roles is not None, ( # noqa: S101 "new_roles must be set before checking permissions" ) @@ -247,7 +247,7 @@ def group_member_edit(identity, context: EditGroupMembershipContext): if GroupMembershipRoles.ADMIN in authenticated_users_roles: # Admins can change the role of anyone but owners or admins to anything # but owner or admin. - if ( + if ( # noqa: SIM103 GroupMembershipRoles.OWNER in old_roles + new_roles or GroupMembershipRoles.ADMIN in old_roles + new_roles ): diff --git a/h/services/analytics.py b/h/services/analytics.py index 02440d8bd4c..4c6918a73c1 100644 --- a/h/services/analytics.py +++ b/h/services/analytics.py @@ -8,7 +8,7 @@ def __init__(self): self._log = logging.getLogger(__name__) def create(self, event: Event): - # TODO Enhance this + # TODO Enhance this # noqa: FIX002, TD002, TD003, TD004 self._log.info(event) diff --git a/h/services/annotation_delete.py b/h/services/annotation_delete.py index 87190281054..c667882c395 100644 --- a/h/services/annotation_delete.py +++ b/h/services/annotation_delete.py @@ -27,7 +27,7 @@ def delete(self, annotation): :param annotation: the annotation to be deleted :type annotation: h.models.Annotation """ - annotation.updated = datetime.utcnow() + annotation.updated = datetime.utcnow() # noqa: DTZ003 annotation.deleted = True self.job_queue.add_by_id( name="sync_annotation", @@ -63,7 +63,7 @@ def bulk_delete(self): # give the streamer time to process the deletion. .where( Annotation.updated - < datetime.utcnow() - timedelta(minutes=10) + < datetime.utcnow() - timedelta(minutes=10) # noqa: DTZ003 ) # Only expunge up to 1000 annotations at a time to # avoid long-running DB queries. This method is called diff --git a/h/services/annotation_read.py b/h/services/annotation_read.py index ad614883fd3..1634a0d4519 100644 --- a/h/services/annotation_read.py +++ b/h/services/annotation_read.py @@ -1,4 +1,4 @@ -from typing import Iterable, List, Optional +from collections.abc import Iterable from sqlalchemy import select from sqlalchemy.orm import Query, Session, subqueryload @@ -13,7 +13,7 @@ class AnnotationReadService: def __init__(self, db_session: Session): self._db = db_session - def get_annotation_by_id(self, id_: str) -> Optional[Annotation]: + def get_annotation_by_id(self, id_: str) -> Annotation | None: """ Fetch the annotation with the given id. @@ -25,7 +25,7 @@ def get_annotation_by_id(self, id_: str) -> Optional[Annotation]: return None def get_annotations_by_id( - self, ids: List[str], eager_load: Optional[List] = None + self, ids: list[str], eager_load: list | None = None ) -> Iterable[Annotation]: """ Get annotations in the same order as the provided ids. @@ -46,7 +46,8 @@ def get_annotations_by_id( @staticmethod def _annotation_search_query( - ids: List[str] = None, eager_load: Optional[List] = None + ids: list[str] = None, # noqa: RUF013 + eager_load: list | None = None, ) -> Query: """Create a query for searching for annotations.""" diff --git a/h/services/annotation_stats.py b/h/services/annotation_stats.py index 93557d2757a..894d973d93f 100644 --- a/h/services/annotation_stats.py +++ b/h/services/annotation_stats.py @@ -49,7 +49,7 @@ def group_annotation_count(self, pubid): params = MultiDict({"limit": 0, "group": pubid}) return self._search(params) - def total_group_annotation_count(self, pubid, unshared=True): + def total_group_annotation_count(self, pubid, unshared=True): # noqa: FBT002 """ Return the count of all annotations for a group. diff --git a/h/services/annotation_write.py b/h/services/annotation_write.py index 10d5497ecb9..dade8d1bac0 100644 --- a/h/services/annotation_write.py +++ b/h/services/annotation_write.py @@ -65,7 +65,7 @@ def create_annotation(self, data: dict) -> Annotation: self._db.enable_relationship_loading(annotation) self._validate_group(annotation) - annotation.created = annotation.updated = datetime.utcnow() + annotation.created = annotation.updated = datetime.utcnow() # noqa: DTZ003 annotation.document = update_document_metadata( self._db, annotation.target_uri, @@ -94,9 +94,9 @@ def update_annotation( self, annotation: Annotation, data: dict, - update_timestamp: bool = True, + update_timestamp: bool = True, # noqa: FBT001, FBT002 reindex_tag: str = "storage.update_annotation", - enforce_write_permission: bool = True, + enforce_write_permission: bool = True, # noqa: FBT001, FBT002 ) -> Annotation: """ Update an annotation and its associated document metadata. @@ -115,7 +115,7 @@ def update_annotation( annotation_metadata = data.pop("metadata", None) self._update_annotation_values(annotation, data) if update_timestamp: - annotation.updated = datetime.utcnow() + annotation.updated = datetime.utcnow() # noqa: DTZ003 # Expire the group relationship, so we get the most up-to-date value # instead of the one which was present when we loaded the model @@ -189,7 +189,7 @@ def _update_annotation_values(annotation: Annotation, data: dict): extra = data.get("extra", {}) annotation.extra.update(extra) - def _validate_group(self, annotation: Annotation, enforce_write_permission=True): + def _validate_group(self, annotation: Annotation, enforce_write_permission=True): # noqa: FBT002 group = annotation.group if not group: raise ValidationError( diff --git a/h/services/auth_ticket.py b/h/services/auth_ticket.py index 09d624b2af9..774229b4a1b 100644 --- a/h/services/auth_ticket.py +++ b/h/services/auth_ticket.py @@ -51,8 +51,8 @@ def verify_ticket( # We don't want to update the `expires` column of an auth ticket on # every single request, but only when the ticket hasn't been touched # within a the defined `TICKET_REFRESH_INTERVAL`. - if (datetime.utcnow() - ticket.updated) > self.TICKET_REFRESH_INTERVAL: - ticket.expires = datetime.utcnow() + self.TICKET_TTL + if (datetime.utcnow() - ticket.updated) > self.TICKET_REFRESH_INTERVAL: # noqa: DTZ003 + ticket.expires = datetime.utcnow() + self.TICKET_TTL # noqa: DTZ003 # Update the cache to allow quick checking if we are called again self._ticket = ticket @@ -65,13 +65,13 @@ def add_ticket(self, userid: str, ticket_id: str) -> None: user = self._user_service.fetch(userid) if user is None: - raise ValueError(f"Cannot find user with userid {userid}") + raise ValueError(f"Cannot find user with userid {userid}") # noqa: EM102, TRY003 ticket = AuthTicket( id=ticket_id, user=user, user_userid=user.userid, - expires=datetime.utcnow() + self.TICKET_TTL, + expires=datetime.utcnow() + self.TICKET_TTL, # noqa: DTZ003 ) self._session.add(ticket) diff --git a/h/services/auth_token.py b/h/services/auth_token.py index e91c406c411..03d9ff8d026 100644 --- a/h/services/auth_token.py +++ b/h/services/auth_token.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import Optional import newrelic.agent @@ -27,7 +26,7 @@ def is_valid(self): if self.expires is None: return True - return datetime.utcnow() < self.expires + return datetime.utcnow() < self.expires # noqa: DTZ003 class AuthTokenService: @@ -35,7 +34,7 @@ def __init__(self, session): self._session = session self._validate_cache = {} - def validate(self, token_str) -> Optional[LongLivedToken]: + def validate(self, token_str) -> LongLivedToken | None: """ Get a validated token from the token string or None. diff --git a/h/services/bulk_api/_helpers.py b/h/services/bulk_api/_helpers.py index d77292c0e97..ad8e24100f6 100644 --- a/h/services/bulk_api/_helpers.py +++ b/h/services/bulk_api/_helpers.py @@ -22,7 +22,7 @@ def date_match(column: sa.Column, spec: dict): :raises BadDateFilter: For unrecognised operators or no spec """ if not spec: - raise BadDateFilter(f"No spec given to filter '{column}' on") + raise BadDateFilter(f"No spec given to filter '{column}' on") # noqa: EM102, TRY003 clauses = [] @@ -40,6 +40,6 @@ def date_match(column: sa.Column, spec: dict): elif op_key == "ne": clauses.append(column != value) else: - raise BadDateFilter(f"Unknown date filter operator: {op_key}") + raise BadDateFilter(f"Unknown date filter operator: {op_key}") # noqa: EM102, TRY003 return sa.and_(*clauses) diff --git a/h/services/bulk_api/annotation.py b/h/services/bulk_api/annotation.py index a30879d40e6..1da4ec8d992 100644 --- a/h/services/bulk_api/annotation.py +++ b/h/services/bulk_api/annotation.py @@ -1,5 +1,4 @@ from dataclasses import dataclass -from typing import List import sqlalchemy as sa from sqlalchemy.orm import Session @@ -34,7 +33,7 @@ def annotation_search( username: str, created: dict, limit=100000, - ) -> List[BulkAnnotation]: + ) -> list[BulkAnnotation]: """ Get a list of annotations or rows viewable by a given user. diff --git a/h/services/bulk_api/group.py b/h/services/bulk_api/group.py index 10b11a90d4b..14b9c632930 100644 --- a/h/services/bulk_api/group.py +++ b/h/services/bulk_api/group.py @@ -1,5 +1,4 @@ from dataclasses import dataclass -from typing import List import sqlalchemy as sa from sqlalchemy.orm import Session @@ -20,8 +19,8 @@ def __init__(self, db_replica: Session): self._db_replica = db_replica def group_search( - self, groups: List[str], annotations_created: dict - ) -> List[BulkGroup]: + self, groups: list[str], annotations_created: dict + ) -> list[BulkGroup]: """ Get a list of groups. diff --git a/h/services/bulk_executor/_actions.py b/h/services/bulk_executor/_actions.py index 0bcaa526537..6e4c86063eb 100644 --- a/h/services/bulk_executor/_actions.py +++ b/h/services/bulk_executor/_actions.py @@ -34,7 +34,7 @@ def execute(self, batch, **kwargs): The commands are assumed to be appropriate for this action type. """ - raise NotImplementedError() # pragma: no cover + raise NotImplementedError # pragma: no cover @staticmethod def _check_upsert_queries(batch, expected_keys): @@ -65,16 +65,16 @@ def _check_upsert_queries(batch, expected_keys): # This is technically overkill as the schema should make sure we # can't receive queries we aren't expecting if set(query.keys()) != set(expected_keys): - raise UnsupportedOperationError( - f"Upserting by query fields '{query.keys()}' is not supported" + raise UnsupportedOperationError( # noqa: TRY003 + f"Upserting by query fields '{query.keys()}' is not supported" # noqa: EM102 ) # Checking that the values are the same is a bit more important, as # this happens post schema, and could therefore be wrong for key, expected in query.items(): if command.body.attributes[key] != expected: - raise UnsupportedOperationError( - "Upserting different values to the query is not supported. " + raise UnsupportedOperationError( # noqa: TRY003 + "Upserting different values to the query is not supported. " # noqa: EM102 f"Different value found in key '{key}'" ) @@ -96,8 +96,8 @@ class GroupUpsertAction(DBAction): def execute(self, batch, effective_user_id=None, **_): if effective_user_id is None: - raise CommandSequenceError( - "Effective user must be configured before upserting groups" + raise CommandSequenceError( # noqa: TRY003 + "Effective user must be configured before upserting groups" # noqa: EM101 ) # Check that we can actually process this batch @@ -133,8 +133,8 @@ def execute(self, batch, effective_user_id=None, **_): # https://www.postgresql.org/docs/9.4/errcodes-appendix.html # 21000 == cardinality violation if err.orig.pgcode == "21000": - raise ConflictingDataError( - "Attempted to create two groups with the same authority and id" + raise ConflictingDataError( # noqa: TRY003 + "Attempted to create two groups with the same authority and id" # noqa: EM101 ) from err raise @@ -167,8 +167,8 @@ def execute(self, batch, on_duplicate="continue", **_): default is "continue" """ if on_duplicate != "continue": - raise UnsupportedOperationError( - "Create modes other than 'continue' have not been implemented" + raise UnsupportedOperationError( # noqa: TRY003 + "Create modes other than 'continue' have not been implemented" # noqa: EM101 ) values = [ @@ -195,8 +195,8 @@ def execute(self, batch, on_duplicate="continue", **_): # https://www.postgresql.org/docs/9.1/errcodes-appendix.html # 23503 = foreign_key_violation if err.orig.pgcode == "23503": - raise ConflictingDataError( - "Cannot insert group membership as either the user or " + raise ConflictingDataError( # noqa: TRY003 + "Cannot insert group membership as either the user or " # noqa: EM102 f"group specified does not exist: {err.params}" ) from err @@ -257,7 +257,7 @@ def _upsert_identities(self, identities, user_ids): flat_identities = [] # Flatten the nested lists into a single list with user ids - for id_, identity_list in zip(user_ids, identities): + for id_, identity_list in zip(user_ids, identities, strict=False): for identity in identity_list: identity["user_id"] = id_ flat_identities.append(identity) @@ -284,8 +284,8 @@ def _upsert_identities(self, identities, user_ids): # 21000 == cardinality violation # This indicates the identity belongs to another user if err.orig.pgcode == "21000": - raise ConflictingDataError( - "Attempted to assign existing identity to a different user" + raise ConflictingDataError( # noqa: TRY003 + "Attempted to assign existing identity to a different user" # noqa: EM101 ) from err raise diff --git a/h/services/bulk_executor/_executor.py b/h/services/bulk_executor/_executor.py index 7d110be55f8..3424f99e4b1 100644 --- a/h/services/bulk_executor/_executor.py +++ b/h/services/bulk_executor/_executor.py @@ -47,8 +47,8 @@ def configure(self, config): ) except NoResultFound as err: - raise InvalidDeclarationError( - f"No user found for effective user: '{config.effective_user}'" + raise InvalidDeclarationError( # noqa: TRY003 + f"No user found for effective user: '{config.effective_user}'" # noqa: EM102 ) from err self.effective_user_id = user.id @@ -70,8 +70,8 @@ def execute_batch(self, command_type, data_type, default_config, batch): handler = self.handlers.get((command_type, data_type), None) if handler is None: - raise UnsupportedOperationError( - f"No implementation for {command_type.value} {data_type.value}" + raise UnsupportedOperationError( # noqa: TRY003 + f"No implementation for {command_type.value} {data_type.value}" # noqa: EM102 ) # Do it @@ -79,13 +79,13 @@ def execute_batch(self, command_type, data_type, default_config, batch): batch, effective_user_id=self.effective_user_id, **default_config ) - def _assert_authority(self, field, value, embedded=False): + def _assert_authority(self, field, value, embedded=False): # noqa: FBT002 if embedded and value.endswith(f"@{self.authority}"): return if value == self.authority: return - raise InvalidDeclarationError( - f"The {field} '{value}' does not match the expected authority" + raise InvalidDeclarationError( # noqa: TRY003 + f"The {field} '{value}' does not match the expected authority" # noqa: EM102 ) diff --git a/h/services/feature.py b/h/services/feature.py index f168b9e94bb..de89d50c52e 100644 --- a/h/services/feature.py +++ b/h/services/feature.py @@ -63,7 +63,7 @@ def enabled(self, name, user=None): features = self.all(user=user) if name not in features: - raise UnknownFeatureError(f"{name} is not a valid feature name") + raise UnknownFeatureError(f"{name} is not a valid feature name") # noqa: EM102, TRY003 return features[name] @@ -75,7 +75,7 @@ def _load(self): """Load the feature flags from the database.""" return models.Feature.all(self.session) - def _state(self, feature, user=None): + def _state(self, feature, user=None): # noqa: PLR0911 # Features that are explicitly overridden are on. if self.overrides is not None and feature.name in self.overrides: return True diff --git a/h/services/group_create.py b/h/services/group_create.py index fef6034bf7b..f7011526989 100644 --- a/h/services/group_create.py +++ b/h/services/group_create.py @@ -134,8 +134,8 @@ def _create(self, name, userid, type_flags, scopes, **kwargs): @staticmethod def _validate_authorities_match(group_authority, org_authority): if group_authority != org_authority: - raise ValueError( - f"Organization's authority {org_authority} must match the group creator's authority {group_authority}." + raise ValueError( # noqa: TRY003 + f"Organization's authority {org_authority} must match the group creator's authority {group_authority}." # noqa: EM102 ) diff --git a/h/services/group_delete.py b/h/services/group_delete.py index 5ae82716064..456a7b59081 100644 --- a/h/services/group_delete.py +++ b/h/services/group_delete.py @@ -22,7 +22,7 @@ def delete(self, group): def _delete_annotations(self, group): if group.pubid == "__world__": - raise DeletePublicGroupError("Public group can not be deleted") + raise DeletePublicGroupError("Public group can not be deleted") # noqa: EM101, TRY003 annotations = self.request.db.query(Annotation).filter_by(groupid=group.pubid) self._annotation_delete_service.delete_annotations(annotations) diff --git a/h/services/group_members.py b/h/services/group_members.py index f5cfa5345fd..56542ae9d96 100644 --- a/h/services/group_members.py +++ b/h/services/group_members.py @@ -135,8 +135,8 @@ def member_join(self, group, userid, roles=None) -> GroupMembership: if existing_membership := self.get_membership(group, user): for key, value in kwargs.items(): if getattr(existing_membership, key) != value: - raise ConflictError( - "The user is already a member of the group, with conflicting membership attributes" + raise ConflictError( # noqa: TRY003 + "The user is already a member of the group, with conflicting membership attributes" # noqa: EM101 ) return existing_membership diff --git a/h/services/group_update.py b/h/services/group_update.py index b78733a01aa..fc207c9a6b2 100644 --- a/h/services/group_update.py +++ b/h/services/group_update.py @@ -41,8 +41,8 @@ def update(self, group, **kwargs): 'duplicate key value violates unique constraint "ix__group__groupid"' in repr(err) ): - raise ConflictError( - f"""authority_provided_id '{kwargs["authority_provided_id"]}' is already in use""" + raise ConflictError( # noqa: TRY003 + f"""authority_provided_id '{kwargs["authority_provided_id"]}' is already in use""" # noqa: EM102 ) from err # Re-raise as this is an unexpected problem diff --git a/h/services/job_queue.py b/h/services/job_queue.py index 77976a4a843..65b3ab409c4 100644 --- a/h/services/job_queue.py +++ b/h/services/job_queue.py @@ -19,7 +19,7 @@ def __init__(self, db): self._db = db def get(self, name, limit): - now = datetime.utcnow() + now = datetime.utcnow() # noqa: DTZ003 query = self._db.query(Job).filter( Job.name == name, Job.expires_at >= now, Job.scheduled_at < now @@ -36,7 +36,7 @@ def delete(self, jobs): for job in jobs: self._db.delete(job) - def add_between_times(self, name, start_time, end_time, tag, force=False): + def add_between_times(self, name, start_time, end_time, tag, force=False): # noqa: FBT002 """ Queue all annotations between two times. @@ -48,7 +48,7 @@ def add_between_times(self, name, start_time, end_time, tag, force=False): where = [Annotation.updated >= start_time, Annotation.updated <= end_time] self.add_where(name, where, tag, Priority.BETWEEN_TIMES, force) - def add_by_id(self, name, annotation_id, tag, force=False, schedule_in=None): + def add_by_id(self, name, annotation_id, tag, force=False, schedule_in=None): # noqa: FBT002 """ Queue an annotation. @@ -61,7 +61,12 @@ def add_by_id(self, name, annotation_id, tag, force=False, schedule_in=None): self.add_where(name, where, tag, Priority.SINGLE_ITEM, force, schedule_in) def add_by_ids( - self, name, annotation_ids: list[str], tag, force=False, schedule_in=None + self, + name, + annotation_ids: list[str], + tag, + force=False, # noqa: FBT002 + schedule_in=None, ): """ Queue annotations by ID. @@ -74,7 +79,7 @@ def add_by_ids( name, where, tag, Priority.BY_IDS, force=force, schedule_in=schedule_in ) - def add_by_user(self, name, userid: str, tag, force=False, schedule_in=None): + def add_by_user(self, name, userid: str, tag, force=False, schedule_in=None): # noqa: FBT002 """ Queue all a user's annotations. @@ -85,7 +90,7 @@ def add_by_user(self, name, userid: str, tag, force=False, schedule_in=None): where = [Annotation.userid == userid] self.add_where(name, where, tag, Priority.SINGLE_USER, force, schedule_in) - def add_by_group(self, name, groupid: str, tag, force=False, schedule_in=None): + def add_by_group(self, name, groupid: str, tag, force=False, schedule_in=None): # noqa: FBT002 """ Queue all annotations in a group. @@ -96,13 +101,13 @@ def add_by_group(self, name, groupid: str, tag, force=False, schedule_in=None): where = [Annotation.groupid == groupid] self.add_where(name, where, tag, Priority.SINGLE_GROUP, force, schedule_in) - def add_where( + def add_where( # noqa: PLR0913 self, name, where, tag, priority, - force=False, + force=False, # noqa: FBT002 schedule_in=None, ): """ @@ -122,7 +127,7 @@ def add_where( until at least `schedule_in` seconds from now """ where_clause = and_(*where) if len(where) > 1 else where[0] - schedule_at = datetime.utcnow() + timedelta(seconds=schedule_in or 0) + schedule_at = datetime.utcnow() + timedelta(seconds=schedule_in or 0) # noqa: DTZ003 query = Job.__table__.insert().from_select( [Job.name, Job.scheduled_at, Job.priority, Job.tag, Job.kwargs], diff --git a/h/services/job_queue_metrics.py b/h/services/job_queue_metrics.py index 294d19cca98..59f10ac52c2 100644 --- a/h/services/job_queue_metrics.py +++ b/h/services/job_queue_metrics.py @@ -20,7 +20,7 @@ def metrics(self): newrelic.agent.record_custom_metrics(). """ metrics = defaultdict(int) - now = datetime.utcnow() + now = datetime.utcnow() # noqa: DTZ003 # Expired jobs. metrics["Custom/JobQueue/Count/Expired"] = ( diff --git a/h/services/links.py b/h/services/links.py index 8268714df74..28b0caa1ab2 100644 --- a/h/services/links.py +++ b/h/services/links.py @@ -74,7 +74,7 @@ def links_factory(_context, request): return LinksService(base_url=base_url, registry=request.registry) -def add_annotation_link_generator(config, name, generator, hidden=False): +def add_annotation_link_generator(config, name, generator, hidden=False): # noqa: FBT002 """ Register a function which generates a named link for an annotation. diff --git a/h/services/oauth/__init__.py b/h/services/oauth/__init__.py index e980aebf591..59057a4bf68 100644 --- a/h/services/oauth/__init__.py +++ b/h/services/oauth/__init__.py @@ -1,3 +1,3 @@ -ACCESS_TOKEN_PREFIX = "5768-" -REFRESH_TOKEN_PREFIX = "4657-" +ACCESS_TOKEN_PREFIX = "5768-" # noqa: S105 +REFRESH_TOKEN_PREFIX = "4657-" # noqa: S105 DEFAULT_SCOPES = ["annotation:read", "annotation:write"] diff --git a/h/services/oauth/_bearer_token.py b/h/services/oauth/_bearer_token.py index 485e12ea9c0..f8f2f3736d9 100644 --- a/h/services/oauth/_bearer_token.py +++ b/h/services/oauth/_bearer_token.py @@ -19,7 +19,7 @@ def __init__( self.refresh_token_expires_in = refresh_token_expires_in - def create_token(self, request, refresh_token=False, **kwargs): + def create_token(self, request, refresh_token=False, **kwargs): # noqa: FBT002, ARG002 if request.extra_credentials is None: request.extra_credentials = {} request.extra_credentials["refresh_token_expires_in"] = ( diff --git a/h/services/oauth/_jwt_grant.py b/h/services/oauth/_jwt_grant.py index ce1ae65d78b..a6713fb7d3f 100644 --- a/h/services/oauth/_jwt_grant.py +++ b/h/services/oauth/_jwt_grant.py @@ -98,7 +98,7 @@ def validate_token_request(self, request): try: assertion = request.assertion except AttributeError as err: - raise errors.InvalidRequestFatalError("Missing assertion.") from err + raise errors.InvalidRequestFatalError("Missing assertion.") from err # noqa: EM101, TRY003 token = JWTGrantToken(assertion) @@ -119,13 +119,13 @@ def validate_token_request(self, request): user = self.user_svc.fetch(verified_token.subject) if user is None: - raise errors.InvalidGrantError( - "Grant token subject (sub) could not be found." + raise errors.InvalidGrantError( # noqa: TRY003 + "Grant token subject (sub) could not be found." # noqa: EM101 ) if user.authority != authclient.authority: - raise errors.InvalidGrantError( - "Grant token subject (sub) does not match issuer (iss)." + raise errors.InvalidGrantError( # noqa: TRY003 + "Grant token subject (sub) does not match issuer (iss)." # noqa: EM101 ) request.user = user diff --git a/h/services/oauth/_jwt_grant_token.py b/h/services/oauth/_jwt_grant_token.py index 73e3f273368..3d65b0f87a6 100644 --- a/h/services/oauth/_jwt_grant_token.py +++ b/h/services/oauth/_jwt_grant_token.py @@ -34,13 +34,13 @@ def __init__(self, token): options={"verify_signature": False}, ) except jwt.DecodeError as err: - raise InvalidRequestFatalError("Invalid JWT grant token format.") from err + raise InvalidRequestFatalError("Invalid JWT grant token format.") from err # noqa: EM101, TRY003 @property def issuer(self): iss = self._claims.get("iss", None) if not iss: - raise MissingJWTGrantTokenClaimError("iss", "issuer") + raise MissingJWTGrantTokenClaimError("iss", "issuer") # noqa: EM101 return iss def verified(self, key, audience): @@ -65,7 +65,7 @@ def __init__(self, token, key, audience): def _verify(self, key, audience): # noqa: C901 if self.expiry - self.not_before > self.MAX_LIFETIME: - raise InvalidGrantError("Grant token lifetime is too long.") + raise InvalidGrantError("Grant token lifetime is too long.") # noqa: EM101, TRY003 try: jwt.decode( self._token, @@ -75,25 +75,25 @@ def _verify(self, key, audience): # noqa: C901 leeway=self.LEEWAY, ) except TypeError as err: - raise InvalidClientError("Client is invalid.") from err + raise InvalidClientError("Client is invalid.") from err # noqa: EM101, TRY003 except jwt.DecodeError as err: - raise InvalidGrantError("Invalid grant token signature.") from err + raise InvalidGrantError("Invalid grant token signature.") from err # noqa: EM101, TRY003 except jwt.exceptions.InvalidAlgorithmError as err: - raise InvalidGrantError("Invalid grant token signature algorithm.") from err + raise InvalidGrantError("Invalid grant token signature algorithm.") from err # noqa: EM101, TRY003 except jwt.MissingRequiredClaimError as err: # pragma: no cover if err.claim == "aud": - raise MissingJWTGrantTokenClaimError("aud", "audience") from err + raise MissingJWTGrantTokenClaimError("aud", "audience") from err # noqa: EM101 raise MissingJWTGrantTokenClaimError(err.claim) from err except jwt.InvalidAudienceError as err: - raise InvalidJWTGrantTokenClaimError("aud", "audience") from err + raise InvalidJWTGrantTokenClaimError("aud", "audience") from err # noqa: EM101 except jwt.ImmatureSignatureError as err: - raise InvalidGrantError("Grant token is not yet valid.") from err + raise InvalidGrantError("Grant token is not yet valid.") from err # noqa: EM101, TRY003 except jwt.ExpiredSignatureError as err: - raise InvalidGrantError("Grant token is expired.") from err + raise InvalidGrantError("Grant token is expired.") from err # noqa: EM101, TRY003 except jwt.InvalidIssuedAtError as err: # pragma: no cover - raise InvalidGrantError( - "Grant token issue time (iat) is in the future." + raise InvalidGrantError( # noqa: TRY003 + "Grant token issue time (iat) is in the future." # noqa: EM101 ) from err @property @@ -109,7 +109,7 @@ def _timestamp_claim(self, key, description): if claim is None: raise MissingJWTGrantTokenClaimError(key, description) try: - return datetime.datetime.utcfromtimestamp(claim) + return datetime.datetime.utcfromtimestamp(claim) # noqa: DTZ004 except (TypeError, ValueError) as err: raise InvalidJWTGrantTokenClaimError(key, description) from err @@ -117,5 +117,5 @@ def _timestamp_claim(self, key, description): def subject(self): sub = self._claims.get("sub", None) if not sub: - raise MissingJWTGrantTokenClaimError("sub", "subject") + raise MissingJWTGrantTokenClaimError("sub", "subject") # noqa: EM101 return sub diff --git a/h/services/oauth/_validator.py b/h/services/oauth/_validator.py index 4901606aee7..ae5460a9893 100644 --- a/h/services/oauth/_validator.py +++ b/h/services/oauth/_validator.py @@ -45,7 +45,7 @@ def __init__(self, session): self._find_token ) - def authenticate_client(self, request, *args, **kwargs): + def authenticate_client(self, request, *args, **kwargs): # noqa: ARG002 """Authenticate a client, returns True if the client exists and its secret matches the request.""" client = self.find_client(request.client_id) provided_secret = request.client_secret @@ -59,7 +59,7 @@ def authenticate_client(self, request, *args, **kwargs): request.client = Client(client) return True - def authenticate_client_id(self, client_id, request, *args, **kwargs): + def authenticate_client_id(self, client_id, request, *args, **kwargs): # noqa: ARG002 """Authenticate a client_id, returns True if the client_id exists.""" client = self.find_client(client_id) @@ -69,7 +69,7 @@ def authenticate_client_id(self, client_id, request, *args, **kwargs): request.client = Client(client) return True - def client_authentication_required(self, request, *args, **kwargs): + def client_authentication_required(self, request, *args, **kwargs): # noqa: ARG002 """ Determine if client authentication is required for an access token request. @@ -99,7 +99,13 @@ def client_authentication_required(self, request, *args, **kwargs): return client.secret is not None def confirm_redirect_uri( - self, client_id, code, redirect_uri, client, *args, **kwargs + self, + client_id, # noqa: ARG002 + code, # noqa: ARG002 + redirect_uri, + client, + *args, # noqa: ARG002 + **kwargs, # noqa: ARG002 ): """ Validate that the redirect_uri didn't get tampered with. @@ -135,7 +141,7 @@ def find_refresh_token(self, value): def find_token(self, value): return self._cached_find_token(value) - def get_default_redirect_uri(self, client_id, request, *args, **kwargs): + def get_default_redirect_uri(self, client_id, request, *args, **kwargs): # noqa: ARG002 """Return the ``redirect_uri`` stored on the client with the given id.""" client = self.find_client(client_id) @@ -144,15 +150,15 @@ def get_default_redirect_uri(self, client_id, request, *args, **kwargs): return render_url_template(client.redirect_uri, example_url=request.uri) - def get_default_scopes(self, client_id, request, *args, **kwargs): + def get_default_scopes(self, client_id, request, *args, **kwargs): # noqa: ARG002 """Return the default scopes for the provided client.""" return DEFAULT_SCOPES - def get_original_scopes(self, refresh_token, request, *args, **kwargs): + def get_original_scopes(self, refresh_token, request, *args, **kwargs): # noqa: ARG002 """As we don't supports scopes, this returns the default scopes.""" return self.get_default_scopes(self, request.client_id, request) - def invalidate_authorization_code(self, client_id, code, request, *args, **kwargs): + def invalidate_authorization_code(self, client_id, code, request, *args, **kwargs): # noqa: ARG002 """Delete authorization code once it has been exchanged for an access token.""" authz_code = self.find_authz_code(code) if authz_code: @@ -172,7 +178,7 @@ def invalidate_refresh_token(self, refresh_token, _request, *_args, **_kwargs): if (token.refresh_token_expires - now) > new_ttl: token.refresh_token_expires = now + new_ttl - def revoke_token(self, token, token_type_hint, request, *args, **kwargs): + def revoke_token(self, token, token_type_hint, request, *args, **kwargs): # noqa: ARG002 """ Revoke a token. @@ -195,10 +201,10 @@ def revoke_token(self, token, token_type_hint, request, *args, **kwargs): if tok: self.session.delete(tok) - def save_authorization_code(self, client_id, code, request, *args, **kwargs): + def save_authorization_code(self, client_id, code, request, *args, **kwargs): # noqa: ARG002 client = self.find_client(client_id) if client is None: - raise InvalidClientIdError() + raise InvalidClientIdError() # noqa: RSE102 codestr = code.get("code") expires = utcnow() + AUTHZ_CODE_TTL @@ -208,7 +214,7 @@ def save_authorization_code(self, client_id, code, request, *args, **kwargs): self.session.add(authzcode) return authzcode - def save_bearer_token(self, token, request, *args, **kwargs): + def save_bearer_token(self, token, request, *args, **kwargs): # noqa: ARG002 """Save a generated bearer token for the authenticated user to the database.""" expires = utcnow() + datetime.timedelta(seconds=token["expires_in"]) @@ -235,13 +241,13 @@ def save_bearer_token(self, token, request, *args, **kwargs): return oauth_token - def validate_client_id(self, client_id, request, *args, **kwargs): + def validate_client_id(self, client_id, request, *args, **kwargs): # noqa: ARG002 """Check if the provided client_id belongs to a valid AuthClient.""" client = self.find_client(client_id) return client is not None - def validate_code(self, client_id, code, client, request, *args, **kwargs): + def validate_code(self, client_id, code, client, request, *args, **kwargs): # noqa: ARG002 """ Validate an authorization code. @@ -271,7 +277,13 @@ def validate_code(self, client_id, code, client, request, *args, **kwargs): return True def validate_grant_type( - self, client_id, grant_type, client, request, *args, **kwargs + self, + client_id, # noqa: ARG002 + grant_type, + client, + request, # noqa: ARG002 + *args, # noqa: ARG002 + **kwargs, # noqa: ARG002 ): """Validate that the given client is allowed to use the give grant type.""" if client.authclient.grant_type is None: @@ -282,7 +294,7 @@ def validate_grant_type( return grant_type == client.authclient.grant_type.value - def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwargs): + def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwargs): # noqa: ARG002 """Validate that the provided ``redirect_uri`` matches the one stored on the client.""" client = self.find_client(client_id) @@ -296,7 +308,7 @@ def validate_redirect_uri(self, client_id, redirect_uri, request, *args, **kwarg return False - def validate_refresh_token(self, refresh_token, client, request, *args, **kwargs): + def validate_refresh_token(self, refresh_token, client, request, *args, **kwargs): # noqa: ARG002 """ Validate a supplied refresh token. @@ -321,7 +333,12 @@ def validate_refresh_token(self, refresh_token, client, request, *args, **kwargs return True def validate_response_type( - self, client_id, response_type, request, *args, **kwargs + self, + client_id, + response_type, + request, # noqa: ARG002 + *args, # noqa: ARG002 + **kwargs, # noqa: ARG002 ): """Validate that the provided ``response_type`` matches the one stored on the client.""" @@ -383,4 +400,4 @@ def _find_token(self, value): def utcnow(): - return datetime.datetime.utcnow() + return datetime.datetime.utcnow() # noqa: DTZ003 diff --git a/h/services/oauth/service.py b/h/services/oauth/service.py index bc15624f2e4..1920e6a1072 100644 --- a/h/services/oauth/service.py +++ b/h/services/oauth/service.py @@ -100,7 +100,7 @@ def validate_revocation_request(self, request): # Mark this request as a revocation request so we can know _not_ # to trigger full client validation later on in - # OAuthValidatorService.client_authentication_required() + # OAuthValidatorService.client_authentication_required() # noqa: ERA001 request.h_revoke_request = True return super().validate_revocation_request(request) @@ -128,10 +128,10 @@ def _load_client_id_from_refresh_token(self, request): if token: request.client_id = token.authclient.id else: - raise InvalidRefreshTokenError() + raise InvalidRefreshTokenError() # noqa: RSE102 @staticmethod - def _generate_access_token(oauth_request): + def _generate_access_token(oauth_request): # noqa: ARG004 return ACCESS_TOKEN_PREFIX + token_urlsafe() @staticmethod diff --git a/h/services/search_index.py b/h/services/search_index.py index 15f19fb5118..b2d5f6ca10b 100644 --- a/h/services/search_index.py +++ b/h/services/search_index.py @@ -65,7 +65,7 @@ def add_annotation(self, annotation): self._index_annotation_body(annotation.id, body, refresh=False) - def delete_annotation_by_id(self, annotation_id, refresh=False): + def delete_annotation_by_id(self, annotation_id, refresh=False): # noqa: FBT002 """ Mark an annotation as deleted in the search index. diff --git a/h/services/subscription.py b/h/services/subscription.py index 03b64f58e90..b35cd95f880 100644 --- a/h/services/subscription.py +++ b/h/services/subscription.py @@ -1,5 +1,3 @@ -from typing import List - from sqlalchemy import func from sqlalchemy.orm import Session from webob.cookies import SignedSerializer @@ -49,7 +47,7 @@ def get_subscription( return subscription - def get_all_subscriptions(self, user_id: str) -> List[Subscriptions]: + def get_all_subscriptions(self, user_id: str) -> list[Subscriptions]: """ Get all subscriptions for a particular user, creating any missing ones. @@ -79,7 +77,7 @@ def unsubscribe_using_token(self, token: str): try: payload = self._token_serializer.loads(token) except ValueError as err: - raise InvalidUnsubscribeToken() from err + raise InvalidUnsubscribeToken from err self.get_subscription( user_id=payload["uri"], type_=Subscriptions.Type(payload["type"]) diff --git a/h/services/user.py b/h/services/user.py index f1659658a02..ae3e5eba4ed 100644 --- a/h/services/user.py +++ b/h/services/user.py @@ -157,7 +157,7 @@ def fetch_for_login(self, username_or_email): return None if not user.is_activated: - raise UserNotActivated() + raise UserNotActivated return user @@ -166,7 +166,7 @@ def update_preferences(user, **kwargs): invalid_keys = set(kwargs.keys()) - UPDATE_PREFS_ALLOWED_KEYS if invalid_keys: keys = ", ".join(sorted(invalid_keys)) - raise TypeError(f"settings with keys {keys} are not allowed") + raise TypeError(f"settings with keys {keys} are not allowed") # noqa: EM102, TRY003 if "show_sidebar_tutorial" in kwargs: # pragma: no cover user.sidebar_tutorial_dismissed = not kwargs["show_sidebar_tutorial"] diff --git a/h/services/user_delete.py b/h/services/user_delete.py index 001bf02b970..03b4c547372 100644 --- a/h/services/user_delete.py +++ b/h/services/user_delete.py @@ -31,12 +31,12 @@ def make_log_message(user, message): def log_updated_rows(user, log_message, updated_ids): if updated_ids: log.info( - f"{make_log_message(user, log_message)}: %s", + f"{make_log_message(user, log_message)}: %s", # noqa: G004 ", ".join(str(id_) for id_ in updated_ids), ) -def log_deleted_rows(user, model_class, deleted_ids, log_ids=True): +def log_deleted_rows(user, model_class, deleted_ids, log_ids=True): # noqa: FBT002 if deleted_ids: if log_ids: log.info( @@ -180,7 +180,7 @@ def delete_featurecohort_memberships(self, user): def delete_annotations(self, user): """Delete all of `user`'s annotations from both Postgres and Elasticsearch.""" - now = datetime.utcnow() + now = datetime.utcnow() # noqa: DTZ003 deleted_annotation_ids = self.worker.update( Annotation, @@ -409,7 +409,7 @@ def delete(self, model_class, select_stmnt) -> list: def _execute(self, stmnt): if self.limit < 1: - raise LimitReached() + raise LimitReached affected_ids = self.db.scalars(stmnt).all() self.limit -= len(affected_ids) diff --git a/h/services/user_password.py b/h/services/user_password.py index 378eefe77e0..f979a8b34cd 100644 --- a/h/services/user_password.py +++ b/h/services/user_password.py @@ -23,7 +23,7 @@ def check_password(self, user, password): # Old-style separate salt. # - # TODO: remove this deprecated code path when a suitable proportion of + # TODO: remove this deprecated code path when a suitable proportion of # noqa: FIX002, TD002, TD003 # users have updated their password by logging-in. (Check how many # users still have a non-null salt in the database.) if user.salt is not None: @@ -51,7 +51,7 @@ def update_password(self, user, new_password): # password automatically). user.salt = None user.password = self.hasher.hash(new_password) - user.password_updated = datetime.datetime.utcnow() + user.password_updated = datetime.datetime.utcnow() # noqa: DTZ003 def user_password_service_factory(_context, _request): diff --git a/h/services/user_rename.py b/h/services/user_rename.py index 75c67859291..d43e1744a34 100644 --- a/h/services/user_rename.py +++ b/h/services/user_rename.py @@ -30,8 +30,8 @@ def __init__(self, db): def check(self, user, new_username): existing_user = User.get_by_username(self.db, new_username, user.authority) if existing_user and existing_user != user: - raise UserRenameError( - f'Another user already has the username "{new_username}"' + raise UserRenameError( # noqa: TRY003 + f'Another user already has the username "{new_username}"' # noqa: EM102 ) return True diff --git a/h/services/user_signup.py b/h/services/user_signup.py index 98949d1f4a4..a8652d7801b 100644 --- a/h/services/user_signup.py +++ b/h/services/user_signup.py @@ -37,7 +37,7 @@ def __init__( self.password_service = password_service self.subscription_service = subscription_service - def signup(self, require_activation: bool = True, **kwargs) -> User: + def signup(self, require_activation: bool = True, **kwargs) -> User: # noqa: FBT001, FBT002 """ Create a new user. @@ -97,13 +97,13 @@ def signup(self, require_activation: bool = True, **kwargs) -> User: "concurrent account signup conflict error occurred during user signup %s", err.args[0], ) - raise ConflictError( - f"The email address {user.email} has already been registered." + raise ConflictError( # noqa: TRY003 + f"The email address {user.email} has already been registered." # noqa: EM102 ) from err # If the exception is not related to the email or username, re-raise it. raise - # FIXME: this is horrible, but is needed until the + # FIXME: this is horrible, but is needed until the # noqa: FIX001, TD001, TD002, TD003 # notification/subscription system is made opt-out rather than opt-in # (at least from the perspective of the database). for subscription in self.subscription_service.get_all_subscriptions( diff --git a/h/services/user_unique.py b/h/services/user_unique.py index f521dca5500..70f254c9ddc 100644 --- a/h/services/user_unique.py +++ b/h/services/user_unique.py @@ -57,7 +57,7 @@ def ensure_unique(self, data, authority): if self.user_service.fetch_by_identity( identity["provider"], identity["provider_unique_id"] ): - errors.append( + errors.append( # noqa: PERF401 _( "user with provider '{}' and unique id '{}' already exists".format( # noqa: INT002 identity["provider"], identity["provider_unique_id"] diff --git a/h/services/user_update.py b/h/services/user_update.py index 231bfe9c0b0..c32db9770de 100644 --- a/h/services/user_update.py +++ b/h/services/user_update.py @@ -31,7 +31,7 @@ def update(self, user, **kwargs): # May wish to re-evaluate later if users need to be moved between # authorities. if "authority" in kwargs: - raise ValidationError("A user's authority may not be changed") + raise ValidationError("A user's authority may not be changed") # noqa: EM101, TRY003 for key, value in kwargs.items(): try: @@ -51,8 +51,8 @@ def update(self, user, **kwargs): # This conflict can arise from changes to either username or authority. # We know this isn't authority, because the presence of authority # would have already raised. - raise ConflictError( - f"""username '{kwargs["username"]}' is already in use""" + raise ConflictError( # noqa: TRY003 + f"""username '{kwargs["username"]}' is already in use""" # noqa: EM102 ) from err # Re-raise as this is an unexpected problem diff --git a/h/settings.py b/h/settings.py index 26092168cfa..fde77562dee 100644 --- a/h/settings.py +++ b/h/settings.py @@ -38,12 +38,12 @@ def __init__(self, settings=None, environ=None): self._environ = environ - def set( + def set( # noqa: PLR0913 self, name, envvar, type_=str, - required=False, + required=False, # noqa: FBT002 default=None, deprecated_msg=None, ): @@ -83,11 +83,11 @@ def set( val = default cast_message = f"{name}'s default {val!r}" elif required and name not in self.settings: - raise SettingError(f"error parsing environment variable {envvar} not found") + raise SettingError(f"error parsing environment variable {envvar} not found") # noqa: EM102, TRY003 if val: try: self.settings[name] = type_(val) except ValueError as err: - raise SettingError( - f"error casting {cast_message} as {type_.__name__}" + raise SettingError( # noqa: TRY003 + f"error casting {cast_message} as {type_.__name__}" # noqa: EM102 ) from err diff --git a/h/storage.py b/h/storage.py index 6a601148739..3f3a0f03dd9 100644 --- a/h/storage.py +++ b/h/storage.py @@ -14,7 +14,7 @@ _ = i18n.TranslationStringFactory(__package__) -def expand_uri(session, uri, normalized=False): +def expand_uri(session, uri, normalized=False): # noqa: FBT002 """ Return all URIs which refer to the same underlying document as `uri`. diff --git a/h/streamer/__init__.py b/h/streamer/__init__.py index 4dc59f4f1d5..b060fe8baca 100644 --- a/h/streamer/__init__.py +++ b/h/streamer/__init__.py @@ -1,4 +1,4 @@ from h.streamer.app import create_app from h.streamer.worker import Worker -__all__ = ("create_app", "Worker") +__all__ = ("Worker", "create_app") diff --git a/h/streamer/filter.py b/h/streamer/filter.py index 0362326e091..77ef65655ef 100644 --- a/h/streamer/filter.py +++ b/h/streamer/filter.py @@ -26,7 +26,7 @@ class SocketFilter: - KNOWN_FIELDS = {"/id", "/group", "/uri", "/references"} + KNOWN_FIELDS = {"/id", "/group", "/uri", "/references"} # noqa: RUF012 @classmethod def matching(cls, sockets, annotation, session): @@ -94,6 +94,6 @@ def _rows_for(cls, filter_): for value in values: if field == "/uri": - value = normalize_uri(value) + value = normalize_uri(value) # noqa: PLW2901 yield field, value diff --git a/h/streamer/messages.py b/h/streamer/messages.py index 7c2d3f35f54..0427bad085c 100644 --- a/h/streamer/messages.py +++ b/h/streamer/messages.py @@ -20,7 +20,7 @@ Message = namedtuple("Message", ["topic", "payload"]) # noqa: PYI024 -def process_messages(settings, routing_key, work_queue, raise_error=True): +def process_messages(settings, routing_key, work_queue, raise_error=True): # noqa: FBT002 """ Configure, start, and monitor a realtime consumer for the specified routing key. @@ -44,7 +44,7 @@ def _handler(payload): consumer.run() if raise_error: - raise RuntimeError("Realtime consumer quit unexpectedly!") + raise RuntimeError("Realtime consumer quit unexpectedly!") # noqa: EM101, TRY003 def handle_message(message, registry, session, topic_handlers): @@ -61,8 +61,8 @@ def handle_message(message, registry, session, topic_handlers): try: handler = topic_handlers[message.topic] except KeyError as err: - raise RuntimeError( - f"Don't know how to handle message from topic: {message.topic}" + raise RuntimeError( # noqa: TRY003 + f"Don't know how to handle message from topic: {message.topic}" # noqa: EM102 ) from err # N.B. We iterate over a non-weak list of instances because there's nothing diff --git a/h/streamer/websocket.py b/h/streamer/websocket.py index bfbbf20a339..a04228e4278 100644 --- a/h/streamer/websocket.py +++ b/h/streamer/websocket.py @@ -18,8 +18,8 @@ # An incoming message from a WebSocket client. -class Message(namedtuple("Message", ["socket", "payload"])): # noqa: PYI024 - def reply(self, payload, ok=True): +class Message(namedtuple("Message", ["socket", "payload"])): # noqa: PYI024, SLOT002 + def reply(self, payload, ok=True): # noqa: FBT002 """ Send a response to this message. @@ -28,7 +28,7 @@ def reply(self, payload, ok=True): """ reply_to = self.payload.get("id") # Short-circuit if message is missing an ID or has a non-numeric ID. - if not isinstance(reply_to, (int, float)): + if not isinstance(reply_to, (int, float)): # noqa: UP038 return data = copy.deepcopy(payload) data["ok"] = ok @@ -64,7 +64,7 @@ def __init__(self, sock, protocols=None, extensions=None, environ=None): self._work_queue = environ["h.ws.streamer_work_queue"] def __new__(cls, *_args, **_kwargs): - instance = super(WebSocket, cls).__new__(cls) + instance = super(WebSocket, cls).__new__(cls) # noqa: UP008 cls.instances.add(instance) return instance @@ -91,7 +91,7 @@ def received_message(self, message): def closed(self, code, reason=None): if self.debug: log.info("Closed connection code=%s reason=%s", code, reason) - try: + try: # noqa: SIM105 self.instances.remove(self) except KeyError: pass @@ -123,7 +123,7 @@ def handle_message(message, session=None): payload = message.payload type_ = payload.get("type") - # FIXME: This code is here to tolerate old and deprecated message formats. + # FIXME: This code is here to tolerate old and deprecated message formats. # noqa: FIX001, TD001, TD002, TD003 if type_ is None: # pragma: no cover if "messageType" in payload and payload["messageType"] == "client_id": type_ = "client_id" @@ -136,7 +136,7 @@ def handle_message(message, session=None): handler(message, session=session) -def handle_client_id_message(message, session=None): +def handle_client_id_message(message, session=None): # noqa: ARG001 """Answer to a client telling us its client ID.""" if "value" not in message.payload: message.reply( @@ -153,7 +153,7 @@ def handle_client_id_message(message, session=None): MESSAGE_HANDLERS["client_id"] = handle_client_id_message -def handle_filter_message(message, session=None): +def handle_filter_message(message, session=None): # noqa: ARG001 """Answer to a client updating its streamer filter.""" if "filter" not in message.payload: message.reply( @@ -187,7 +187,7 @@ def handle_filter_message(message, session=None): MESSAGE_HANDLERS["filter"] = handle_filter_message -def handle_ping_message(message, session=None): +def handle_ping_message(message, session=None): # noqa: ARG001 """Reply to a client requesting a pong.""" message.reply({"type": "pong"}) @@ -195,7 +195,7 @@ def handle_ping_message(message, session=None): MESSAGE_HANDLERS["ping"] = handle_ping_message -def handle_whoami_message(message, session=None): +def handle_whoami_message(message, session=None): # noqa: ARG001 """Reply to a client requesting information on its auth state.""" message.reply( @@ -211,7 +211,7 @@ def handle_whoami_message(message, session=None): MESSAGE_HANDLERS["whoami"] = handle_whoami_message -def handle_unknown_message(message, session=None): +def handle_unknown_message(message, session=None): # noqa: ARG001 """Handle the message type being missing or not recognised.""" type_ = json.dumps(message.payload.get("type")) message.reply( diff --git a/h/streamer/worker.py b/h/streamer/worker.py index d579f30d925..4163d064e29 100644 --- a/h/streamer/worker.py +++ b/h/streamer/worker.py @@ -114,7 +114,7 @@ def clear(self): # pragma: no cover websocket = greenlet._run.__self__ # noqa: SLF001 if websocket: websocket.close(1001, "Server is shutting down") - except: # noqa: E722 + except: # noqa: E722, S110 pass finally: self.discard(greenlet) diff --git a/h/tasks/celery.py b/h/tasks/celery.py index eb81fb34d8d..d53ea3723b6 100644 --- a/h/tasks/celery.py +++ b/h/tasks/celery.py @@ -179,7 +179,7 @@ def add_task_name_and_id_to_log_messages( root_loggers_handler.setFormatter( logging.Formatter( - "[%(asctime)s: %(levelname)s/%(processName)s] " + "[%(asctime)s: %(levelname)s/%(processName)s] " # noqa: ISC003 + f"{task.name}[{task_id}] " + "%(message)s" ) diff --git a/h/tasks/cleanup.py b/h/tasks/cleanup.py index 343321b443d..2132bd5d100 100644 --- a/h/tasks/cleanup.py +++ b/h/tasks/cleanup.py @@ -44,20 +44,20 @@ def report_num_deleted_annotations(): @celery.task def purge_expired_auth_tickets(): celery.request.db.query(models.AuthTicket).filter( - models.AuthTicket.expires < datetime.utcnow() + models.AuthTicket.expires < datetime.utcnow() # noqa: DTZ003 ).delete() @celery.task def purge_expired_authz_codes(): celery.request.db.query(models.AuthzCode).filter( - models.AuthzCode.expires < datetime.utcnow() + models.AuthzCode.expires < datetime.utcnow() # noqa: DTZ003 ).delete() @celery.task def purge_expired_tokens(): - now = datetime.utcnow() + now = datetime.utcnow() # noqa: DTZ003 celery.request.db.query(models.Token).filter( models.Token.expires < now, models.Token.refresh_token_expires < now ).delete() diff --git a/h/tasks/indexer.py b/h/tasks/indexer.py index 92737bc005f..2b2def6599d 100644 --- a/h/tasks/indexer.py +++ b/h/tasks/indexer.py @@ -12,7 +12,7 @@ # See: https://docs.celeryproject.org/en/stable/userguide/tasks.html#automatic-retry-for-known-exceptions class _BaseTaskWithRetry(ABC, Task): autoretry_for = (Exception,) - retry_kwargs = {"countdown": 5, "max_retries": 1} + retry_kwargs = {"countdown": 5, "max_retries": 1} # noqa: RUF012 @celery.task(base=_BaseTaskWithRetry, acks_late=True) diff --git a/h/tasks/job_queue.py b/h/tasks/job_queue.py index ab970b28a12..e13542c339c 100644 --- a/h/tasks/job_queue.py +++ b/h/tasks/job_queue.py @@ -11,14 +11,14 @@ def add_annotations_between_times(name, start_time, end_time, tag): @celery.task -def add_annotations_from_user(name, userid, tag, force=False, schedule_in=None): +def add_annotations_from_user(name, userid, tag, force=False, schedule_in=None): # noqa: FBT002 celery.request.find_service(name="queue_service").add_by_user( name, userid, tag, force=force, schedule_in=schedule_in ) @celery.task -def add_annotations_from_group(name, groupid, tag, force=False, schedule_in=None): +def add_annotations_from_group(name, groupid, tag, force=False, schedule_in=None): # noqa: FBT002 celery.request.find_service(name="queue_service").add_by_group( name, groupid, tag, force=force, schedule_in=schedule_in ) @@ -26,7 +26,11 @@ def add_annotations_from_group(name, groupid, tag, force=False, schedule_in=None @celery.task def add_annotations_by_ids( - name, annotation_ids: list[str], tag, force=False, schedule_in=None + name, + annotation_ids: list[str], + tag, + force=False, # noqa: FBT002 + schedule_in=None, ): celery.request.find_service(name="queue_service").add_by_ids( name, annotation_ids, tag, force=force, schedule_in=schedule_in diff --git a/h/traversal/__init__.py b/h/traversal/__init__.py index b3e5493f378..16529475e56 100644 --- a/h/traversal/__init__.py +++ b/h/traversal/__init__.py @@ -72,19 +72,19 @@ from h.traversal.user import UserByIDRoot, UserByNameRoot, UserContext, UserRoot __all__ = ( + "AddGroupMembershipContext", "AnnotationContext", "AnnotationRoot", + "EditGroupMembershipContext", + "GroupContext", + "GroupMembershipContext", "GroupRequiredRoot", "GroupRoot", "OrganizationContext", "OrganizationRoot", - "UserContext", - "UserByNameRoot", "UserByIDRoot", + "UserByNameRoot", + "UserContext", "UserRoot", - "GroupContext", - "AddGroupMembershipContext", - "EditGroupMembershipContext", - "GroupMembershipContext", "group_membership_api_factory", ) diff --git a/h/traversal/annotation.py b/h/traversal/annotation.py index 48476dad654..dd48fa83c39 100644 --- a/h/traversal/annotation.py +++ b/h/traversal/annotation.py @@ -26,6 +26,6 @@ def __init__(self, request): def __getitem__(self, annotation_id): annotation = self._annotation_read_service.get_annotation_by_id(annotation_id) if annotation is None: - raise KeyError() + raise KeyError return AnnotationContext(annotation) diff --git a/h/traversal/group.py b/h/traversal/group.py index 919fc4dd137..7e1db96b342 100644 --- a/h/traversal/group.py +++ b/h/traversal/group.py @@ -1,5 +1,4 @@ from dataclasses import dataclass -from typing import Optional from h.models import Group @@ -8,7 +7,7 @@ class GroupContext: """Context for a single (optional) group.""" - group: Optional[Group] = None + group: Group | None = None class GroupRoot: @@ -29,6 +28,6 @@ class GroupRequiredRoot(GroupRoot): def __getitem__(self, pubid_or_groupid): group_context = super().__getitem__(pubid_or_groupid) if group_context.group is None: - raise KeyError() + raise KeyError return group_context diff --git a/h/traversal/group_membership.py b/h/traversal/group_membership.py index 16bfd639e44..af64589bab2 100644 --- a/h/traversal/group_membership.py +++ b/h/traversal/group_membership.py @@ -63,10 +63,10 @@ def group_membership_api_factory( group = _get_group(request, pubid) if not user: - raise HTTPNotFound(f"User not found: {userid}") + raise HTTPNotFound(f"User not found: {userid}") # noqa: EM102, TRY003 if not group: - raise HTTPNotFound(f"Group not found: {pubid}") + raise HTTPNotFound(f"Group not found: {pubid}") # noqa: EM102, TRY003 if request.method == "POST": return AddGroupMembershipContext(group, user, new_roles=None) @@ -74,10 +74,10 @@ def group_membership_api_factory( membership = _get_membership(request, group, user) if not membership: - raise HTTPNotFound(f"Membership not found: ({pubid}, {userid})") + raise HTTPNotFound(f"Membership not found: ({pubid}, {userid})") # noqa: EM102, TRY003 if request.method in ("GET", "DELETE"): return GroupMembershipContext(group=group, user=user, membership=membership) - assert request.method == "PATCH" + assert request.method == "PATCH" # noqa: S101 return EditGroupMembershipContext(group, user, membership, new_roles=None) diff --git a/h/traversal/organization.py b/h/traversal/organization.py index b099a4aea15..91d27a17624 100644 --- a/h/traversal/organization.py +++ b/h/traversal/organization.py @@ -21,6 +21,6 @@ def __getitem__(self, pubid): pubid ) if organization is None: - raise KeyError() + raise KeyError return OrganizationContext(organization=organization) diff --git a/h/traversal/user.py b/h/traversal/user.py index 00f1489faf3..174720dd367 100644 --- a/h/traversal/user.py +++ b/h/traversal/user.py @@ -28,7 +28,7 @@ def get_user_context(self, userid_or_username, authority): raise HTTPBadRequest(err.args[0]) from err if not user: - raise KeyError() + raise KeyError return UserContext(user) diff --git a/h/tweens.py b/h/tweens.py index 445712b1695..3283a93d94a 100644 --- a/h/tweens.py +++ b/h/tweens.py @@ -12,7 +12,10 @@ resolver = DottedNameResolver(None) -def conditional_http_tween_factory(handler, registry): # pragma: no cover +def conditional_http_tween_factory( + handler, + registry, # noqa: ARG001 +): # pragma: no cover """Set up conditional response handling for some requests.""" def conditional_http_tween(request): @@ -46,7 +49,7 @@ def conditional_http_tween(request): return conditional_http_tween -def invalid_path_tween_factory(handler, registry): # pragma: no cover +def invalid_path_tween_factory(handler, registry): # pragma: no cover # noqa: ARG001 def invalid_path_tween(request): # Due to a bug in WebOb accessing request.path (or request.path_info # etc) will raise UnicodeDecodeError if the requested path doesn't @@ -59,7 +62,7 @@ def invalid_path_tween(request): # https://github.com/Pylons/webob/issues/115 # https://github.com/hypothesis/h/issues/4915 try: - request.path + request.path # noqa: B018 except UnicodeDecodeError: return httpexceptions.HTTPBadRequest() @@ -68,7 +71,7 @@ def invalid_path_tween(request): return invalid_path_tween -def redirect_tween_factory(handler, registry, redirects=None): +def redirect_tween_factory(handler, registry, redirects=None): # noqa: ARG001 if redirects is None: # N.B. If we fail to load or parse the redirects file, the application # will fail to boot. This is deliberate: a missing/corrupt redirects @@ -88,7 +91,7 @@ def redirect_tween(request): return redirect_tween -def security_header_tween_factory(handler, registry): +def security_header_tween_factory(handler, registry): # noqa: ARG001 """Add security-related headers to every response.""" def security_header_tween(request): @@ -109,7 +112,7 @@ def security_header_tween(request): return security_header_tween -def cache_header_tween_factory(handler, registry): +def cache_header_tween_factory(handler, registry): # noqa: ARG001 """Set default caching headers on responses depending on the content type.""" def cache_header_tween(request): @@ -124,7 +127,7 @@ def cache_header_tween(request): return cache_header_tween -def rollback_db_session_on_exception_factory(handler, registry): +def rollback_db_session_on_exception_factory(handler, registry): # noqa: ARG001 """ Catch exceptions and rolls the database back. diff --git a/h/util/datetime.py b/h/util/datetime.py index 56bf4db04d1..0e788f57c50 100644 --- a/h/util/datetime.py +++ b/h/util/datetime.py @@ -1,4 +1,4 @@ -"""Shared utility functions for manipulating dates and times.""" +"""Shared utility functions for manipulating dates and times.""" # noqa: A005 def utc_iso8601(datetime): @@ -12,4 +12,4 @@ def utc_iso8601(datetime): def utc_us_style_date(datetime): """Convert a UTC datetime into a Month day, year (August 1, 1990).""" - return "{d:%B} {d.day}, {d:%Y}".format(d=datetime) + return f"{datetime:%B} {datetime.day}, {datetime:%Y}" diff --git a/h/util/db.py b/h/util/db.py index 7a2392b07c5..c3cd9878e76 100644 --- a/h/util/db.py +++ b/h/util/db.py @@ -32,7 +32,7 @@ def fetch_user(userid): fetch_user('acct:foo@example.com') # => executes a query """ - def __init__(self, session, maxsize=128, typed=False): + def __init__(self, session, maxsize=128, typed=False): # noqa: FBT002 self._session = session self._maxsize = maxsize self._typed = typed diff --git a/h/util/document_claims.py b/h/util/document_claims.py index 9374c39f996..09deb1a8bde 100644 --- a/h/util/document_claims.py +++ b/h/util/document_claims.py @@ -110,7 +110,7 @@ def transform_meta_(document_meta_dicts, items, path_prefix=None): transform_meta_(document_meta_dicts, value, path_prefix=keypath) else: if not isinstance(value, list): - value = [value] + value = [value] # noqa: PLW2901 type_ = ".".join(keypath) @@ -118,7 +118,7 @@ def transform_meta_(document_meta_dicts, items, path_prefix=None): # We don't allow None, empty strings, whitespace-only # strings, leading or trailing whitespaces, or empty arrays # in document title values. - value = [v.strip() for v in value if v and v.strip()] + value = [v.strip() for v in value if v and v.strip()] # noqa: PLW2901 if not value: continue @@ -158,14 +158,14 @@ def document_uris_from_links(link_dicts, claimant): # Disregard Highwire PDF links as these are being added separately from # the highwire metadata later on. - if set(link_keys) == {"href", "type"}: + if set(link_keys) == {"href", "type"}: # noqa: SIM102 if link["type"] == "application/pdf": continue uri_ = link["href"] # Handle rel="..." links. - if "rel" in link: + if "rel" in link: # noqa: SIM108 type_ = f"rel-{link['rel']}" else: type_ = "" @@ -197,7 +197,7 @@ def document_uris_from_highwire_pdf(highwire_dict, claimant): document_uris = [] hwpdfvalues = highwire_dict.get("pdf_url", []) for pdf in hwpdfvalues: - document_uris.append( + document_uris.append( # noqa: PERF401 { "claimant": claimant, "uri": pdf, @@ -220,7 +220,7 @@ def document_uris_from_highwire_doi(highwire_dict, claimant): document_uris = [] hwdoivalues = highwire_dict.get("doi", []) for doi in hwdoivalues: - doi = doi_uri_from_string(doi) + doi = doi_uri_from_string(doi) # noqa: PLW2901 if doi is not None: document_uris.append( { @@ -245,7 +245,7 @@ def document_uris_from_dc(dc_dict, claimant): document_uris = [] dcdoivalues = dc_dict.get("identifier", []) for doi in dcdoivalues: - doi = doi_uri_from_string(doi) + doi = doi_uri_from_string(doi) # noqa: PLW2901 if doi is not None: document_uris.append( {"claimant": claimant, "uri": doi, "type": "dc-doi", "content_type": ""} @@ -278,8 +278,7 @@ def doi_uri_from_string(string): """ string = string.strip() - if string.startswith("doi:"): - string = string[len("doi:") :] + string = string.removeprefix("doi:") string = string.strip() diff --git a/h/util/group.py b/h/util/group.py index 2d39ea5b3c0..cbe39cd7b42 100644 --- a/h/util/group.py +++ b/h/util/group.py @@ -21,7 +21,7 @@ def split_groupid(groupid): "authority_provided_id": match.groups()[0], "authority": match.groups()[1], } - raise ValueError(f"{groupid} isn't a valid groupid") + raise ValueError(f"{groupid} isn't a valid groupid") # noqa: EM102, TRY003 def is_groupid(maybe_groupid): diff --git a/h/util/group_scope.py b/h/util/group_scope.py index 54c52fa8c7e..e8fe27824d7 100644 --- a/h/util/group_scope.py +++ b/h/util/group_scope.py @@ -13,7 +13,7 @@ def url_in_scope(url, scope_urls): :type scope_urls: list(str) :rtype: bool """ - return any((url.startswith(scope_url) for scope_url in scope_urls)) + return any(url.startswith(scope_url) for scope_url in scope_urls) def parse_scope_from_url(url): diff --git a/h/util/logging_filters.py b/h/util/logging_filters.py index 3217e8d1ebf..3b70ce46743 100644 --- a/h/util/logging_filters.py +++ b/h/util/logging_filters.py @@ -35,8 +35,8 @@ def __init__(self, ignore_exceptions): try: self._ignore_exceptions.append((logging_levels[exc_level], exc_name)) except KeyError as err: - raise ValueError( - f"""The logging level provided ({exc_level}) is invalid. Valid options: {logging_levels.keys()}""" + raise ValueError( # noqa: TRY003 + f"""The logging level provided ({exc_level}) is invalid. Valid options: {logging_levels.keys()}""" # noqa: EM102 ) from err def filter(self, record): diff --git a/h/util/markdown_render.py b/h/util/markdown_render.py index d33a4af1257..c51566b891d 100644 --- a/h/util/markdown_render.py +++ b/h/util/markdown_render.py @@ -1,4 +1,4 @@ -from functools import lru_cache, partial +from functools import cache, partial import bleach from bleach.linkifier import LinkifyFilter @@ -48,7 +48,7 @@ def render(text): # them we'll double escape them. text = text.replace("\\(", "\\\\(").replace("\\)", "\\\\)") - return _get_cleaner().clean((RENDER_MARKDOWN(text))) + return _get_cleaner().clean(RENDER_MARKDOWN(text)) def _filter_link_attributes(_tag, name, value): @@ -58,14 +58,14 @@ def _filter_link_attributes(_tag, name, value): if name == "target" and value == "_blank": return True - if name == "rel" and value == LINK_REL: + if name == "rel" and value == LINK_REL: # noqa: SIM103 return True return False -def _linkify_target_blank(attrs, new=False): - # FIXME: when bleach>2.0.0 is released we can use +def _linkify_target_blank(attrs, new=False): # noqa: FBT002, ARG001 + # FIXME: when bleach>2.0.0 is released we can use # noqa: FIX001, TD001, TD002 # bleach.callbacks.target_blank instead of this function. We have our own # copy to work around a bug in 2.0.0: # @@ -83,7 +83,7 @@ def _linkify_target_blank(attrs, new=False): return attrs -def _linkify_rel(attrs, new=False): +def _linkify_rel(attrs, new=False): # noqa: FBT002, ARG001 href_key = (None, "href") if href_key not in attrs: @@ -103,7 +103,7 @@ def _linkify_rel(attrs, new=False): ALLOWED_ATTRIBUTES.update(MARKDOWN_ATTRIBUTES) -@lru_cache(maxsize=None) +@cache def _get_cleaner(): linkify_filter = partial( LinkifyFilter, callbacks=[_linkify_target_blank, _linkify_rel] diff --git a/h/util/redirects.py b/h/util/redirects.py index 9880501643c..a3eb5e0531c 100644 --- a/h/util/redirects.py +++ b/h/util/redirects.py @@ -25,7 +25,7 @@ from collections import namedtuple -class Redirect( +class Redirect( # noqa: SLOT002 namedtuple( # noqa: PYI024 "Redirect", [ @@ -75,7 +75,7 @@ def parse(specs): try: src, typ, dst = line.split(None, 3) except ValueError as err: - raise ParseError(f"invalid redirect specification: {line!r}") from err + raise ParseError(f"invalid redirect specification: {line!r}") from err # noqa: EM102, TRY003 if typ == "internal-exact": redirect = Redirect(prefix=False, internal=True, src=src, dst=dst) elif typ == "internal-prefix": @@ -85,7 +85,7 @@ def parse(specs): elif typ == "prefix": redirect = Redirect(prefix=True, internal=False, src=src, dst=dst) else: - raise ParseError(f"unknown redirect type: {typ!r}") + raise ParseError(f"unknown redirect type: {typ!r}") # noqa: EM102, TRY003 result.append(redirect) return result diff --git a/h/util/uri.py b/h/util/uri.py index 114f10fd1b4..1d18fcdb2b7 100644 --- a/h/util/uri.py +++ b/h/util/uri.py @@ -112,11 +112,11 @@ # # path-abempty = *( "/" segment ) # ... -# segment = *pchar +# segment = *pchar # noqa: ERA001 # ... -# pchar = unreserved / pct-encoded / sub-delims / ":" / "@" +# pchar = unreserved / pct-encoded / sub-delims / ":" / "@" # noqa: ERA001 # ... -# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" +# unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" # noqa: ERA001 # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" # / "*" / "+" / "," / ";" / "=" # @@ -126,7 +126,7 @@ # From RFC3986. The ABNF for query strings is # -# query = *( pchar / "/" / "?" ) +# query = *( pchar / "/" / "?" ) # noqa: ERA001 # # Where the definition of pchar is as given above. # @@ -211,9 +211,7 @@ def _normalize_netloc(uri): hostname = hostname.lower() # Remove port if default for the scheme - if uri.scheme == "http" and port == 80: - port = None - elif uri.scheme == "https" and port == 443: + if (uri.scheme == "http" and port == 80) or (uri.scheme == "https" and port == 443): port = None # Put it all back together again... @@ -230,8 +228,8 @@ def _normalize_netloc(uri): if port is not None: hostinfo += ":" + str(port) - if userinfo is not None: - netloc = "@".join([userinfo, hostinfo]) + if userinfo is not None: # noqa: SIM108 + netloc = "@".join([userinfo, hostinfo]) # noqa: FLY002 else: netloc = hostinfo diff --git a/h/util/view.py b/h/util/view.py index 96f96ae0846..cfd6eb23163 100644 --- a/h/util/view.py +++ b/h/util/view.py @@ -1,7 +1,7 @@ from pyramid.view import view_config -def handle_exception(request, exception): +def handle_exception(request, exception): # noqa: ARG001 """ Handle an uncaught exception for the passed request. diff --git a/h/viewderivers.py b/h/viewderivers.py index d27f56a374d..10ada820f27 100644 --- a/h/viewderivers.py +++ b/h/viewderivers.py @@ -17,7 +17,8 @@ def csp_protected_view(view, info): policy = info.registry.settings.get("csp", {}) clauses = [ - " ".join([directive] + values) for directive, values in sorted(policy.items()) + " ".join([directive] + values) # noqa: RUF005 + for directive, values in sorted(policy.items()) ] header_value = "; ".join(clauses) diff --git a/h/views/account_signup.py b/h/views/account_signup.py index 8c6a598509d..b7658bca980 100644 --- a/h/views/account_signup.py +++ b/h/views/account_signup.py @@ -55,14 +55,12 @@ def post(self): username=appstruct["username"], email=appstruct["email"], password=appstruct["password"], - privacy_accepted=datetime.datetime.utcnow(), + privacy_accepted=datetime.datetime.utcnow(), # noqa: DTZ003 comms_opt_in=appstruct["comms_opt_in"], ) except ConflictError as exc: template_context["heading"] = _("Account already registered") - template_context["message"] = _( - "{failure_reason}".format(failure_reason=exc.args[0]) # noqa: INT002 - ) + template_context["message"] = _(f"{exc.args[0]}") # noqa: INT001 return template_context diff --git a/h/views/accounts.py b/h/views/accounts.py index bd4be108458..1a3a3cb9762 100644 --- a/h/views/accounts.py +++ b/h/views/accounts.py @@ -151,7 +151,7 @@ def _login_redirect(self): return self.request.params.get("next", _login_redirect_url(self.request)) def _login(self, user): - user.last_login_date = datetime.datetime.utcnow() + user.last_login_date = datetime.datetime.utcnow() # noqa: DTZ003 self.request.registry.notify(LoginEvent(self.request, user)) headers = security.remember(self.request, user.userid) return headers @@ -244,7 +244,7 @@ def get_with_prefilled_code(self): try: user = ResetCode().deserialize(self.schema, code) except colander.Invalid as err: - raise httpexceptions.HTTPNotFound() from err + raise httpexceptions.HTTPNotFound() from err # noqa: RSE102 # N.B. the form field for the reset code is called 'user'. See the # comment in `~h.schemas.forms.accounts.ResetPasswordSchema` for details. @@ -320,7 +320,7 @@ def get_when_not_logged_in(self): try: id_ = int(id_) except ValueError as err: - raise httpexceptions.HTTPNotFound() from err + raise httpexceptions.HTTPNotFound() from err # noqa: RSE102 activation = models.Activation.get_by_code(self.request.db, code) if activation is None: @@ -339,7 +339,7 @@ def get_when_not_logged_in(self): user = models.User.get_by_activation(self.request.db, activation) if user is None or user.id != id_: - raise httpexceptions.HTTPNotFound() + raise httpexceptions.HTTPNotFound() # noqa: RSE102 user.activate() @@ -367,7 +367,7 @@ def get_when_logged_in(self): try: id_ = int(id_) except ValueError as err: - raise httpexceptions.HTTPNotFound() from err + raise httpexceptions.HTTPNotFound() from err # noqa: RSE102 if id_ == self.request.user.id: # The user is already logged in to the account (so the account @@ -602,7 +602,7 @@ def post(self): """(Re-)generate the user's API token.""" token = self.svc.fetch(self.userid) - if token: + if token: # noqa: SIM108 # The user already has an API token, regenerate it. token = self.svc.regenerate(token) else: @@ -684,7 +684,7 @@ def query(column): } -# TODO: This can be removed after October 2016, which will be >1 year from the +# TODO: This can be removed after October 2016, which will be >1 year from the # noqa: FIX002, TD002, TD003 # date that the last account claim emails were sent out. At this point, # if we have not done so already, we should remove all unclaimed # usernames from the accounts tables. @@ -703,7 +703,7 @@ def claim_account_legacy(_request): # pragma: no cover ) def dismiss_sidebar_tutorial(request): # pragma: no cover if request.authenticated_userid is None: - raise accounts.JSONError() + raise accounts.JSONError() # noqa: RSE102 request.user.sidebar_tutorial_dismissed = True return ajax_payload(request, {"status": "okay"}) diff --git a/h/views/activity.py b/h/views/activity.py index 3f0d48a5421..b7d69067bbd 100644 --- a/h/views/activity.py +++ b/h/views/activity.py @@ -53,7 +53,7 @@ def search(self): # pragma: no cover if self.request.user: for group in self.request.user.groups: - groups_suggestions.append({"name": group.name, "pubid": group.pubid}) + groups_suggestions.append({"name": group.name, "pubid": group.pubid}) # noqa: PERF401 def tag_link(tag): tag = parser.unparse({"tag": tag}) @@ -203,7 +203,7 @@ def join(self): browser to the search page. """ if not self.request.has_permission(Permission.Group.JOIN, context=self.context): - raise httpexceptions.HTTPNotFound() + raise httpexceptions.HTTPNotFound() # noqa: RSE102 group_members_service = self.request.find_service(name="group_members") group_members_service.member_join(self.group, self.request.authenticated_userid) @@ -308,7 +308,7 @@ def _check_access_permissions(self): self.request.override_renderer = "h:templates/groups/join.html.jinja2" return {"group": self.group} - raise httpexceptions.HTTPNotFound() + raise httpexceptions.HTTPNotFound() # noqa: RSE102 return None diff --git a/h/views/admin/admins.py b/h/views/admin/admins.py index 58a2e86b21e..ca7eae5c4b1 100644 --- a/h/views/admin/admins.py +++ b/h/views/admin/admins.py @@ -36,7 +36,7 @@ def admins_add(request): user = models.User.get_by_username(request.db, username, authority) if user is None: request.session.flash( - _("User {username} doesn't exist.".format(username=username)), # noqa: INT002 + _(f"User {username} doesn't exist."), # noqa: INT001 "error", ) else: diff --git a/h/views/admin/features.py b/h/views/admin/features.py index 6447b4bd26d..28af3528bb9 100644 --- a/h/views/admin/features.py +++ b/h/views/admin/features.py @@ -2,7 +2,7 @@ from pyramid.view import view_config from h import models, paginator -from h.i18n import TranslationString as _ # noqa: N813 +from h.i18n import TranslationString as _ from h.security import Permission @@ -123,7 +123,7 @@ def cohorts_edit_add(request): if member is None: # pragma: no cover request.session.flash( _( - "User {member_name} with authority {authority} doesn't exist.".format( # noqa: INT002 + "User {member_name} with authority {authority} doesn't exist.".format( # noqa: INT002, UP032 member_name=member_name, authority=member_authority ) ), @@ -156,7 +156,7 @@ def cohorts_edit_remove(request): except ValueError: # pragma: no cover request.session.flash( _( - "User {member_userid} doesn't exist.".format( # noqa: INT002 + "User {member_userid} doesn't exist.".format( # noqa: INT002, UP032 member_userid=member_userid ) ), diff --git a/h/views/admin/groups.py b/h/views/admin/groups.py index 72e9149df3e..480ac9f62f4 100644 --- a/h/views/admin/groups.py +++ b/h/views/admin/groups.py @@ -3,7 +3,7 @@ from pyramid.view import view_config, view_defaults from h import ( - form, # noqa F401 + form, i18n, models, paginator, @@ -94,7 +94,7 @@ def on_success(appstruct): type_ = appstruct["group_type"] if type_ not in ["open", "restricted"]: # pragma: no cover - raise ValueError(f"Unsupported group type {type_}") + raise ValueError(f"Unsupported group type {type_}") # noqa: EM102, TRY003 group = create_fns[type_]( name=appstruct["name"], @@ -109,7 +109,7 @@ def on_success(appstruct): # because that check is part of form schema validation. member_userids = [] for username in appstruct["members"]: - member_userids.append( + member_userids.append( # noqa: PERF401 self.user_svc.fetch(username, organization.authority).userid ) @@ -171,7 +171,7 @@ def delete(self): self.request.find_service(name="group_delete").delete(self.group) self.request.session.flash( - _("Successfully deleted group %s" % (self.group.name), "success"), # noqa: INT003 + _("Successfully deleted group %s" % (self.group.name), "success"), # noqa: INT003, UP031 queue="success", ) @@ -199,7 +199,7 @@ def on_success(appstruct): memberids = [] for username in appstruct["members"]: - memberids.append(self.user_svc.fetch(username, group.authority).userid) + memberids.append(self.user_svc.fetch(username, group.authority).userid) # noqa: PERF401 self.group_members_svc.update_members(group, memberids) diff --git a/h/views/admin/nipsa.py b/h/views/admin/nipsa.py index 23d8d80dbf7..480826f491e 100644 --- a/h/views/admin/nipsa.py +++ b/h/views/admin/nipsa.py @@ -39,7 +39,7 @@ def nipsa_add(request): if user is None: raise UserNotFoundError( _( - "Could not find user with username %s and authority %s" # noqa: INT003 + "Could not find user with username %s and authority %s" # noqa: INT003, UP031 % (username, authority) ) ) @@ -62,7 +62,7 @@ def nipsa_remove(request): userid = request.params["remove"] user = request.db.query(models.User).filter_by(userid=userid).first() if user is None: - raise UserNotFoundError(_("Could not find user with userid %s" % userid)) # noqa: INT003 + raise UserNotFoundError(_("Could not find user with userid %s" % userid)) # noqa: INT003, UP031 nipsa_service = request.find_service(name="nipsa") nipsa_service.unflag(user) diff --git a/h/views/admin/oauthclients.py b/h/views/admin/oauthclients.py index 573d6dff8d7..3db99fee7ba 100644 --- a/h/views/admin/oauthclients.py +++ b/h/views/admin/oauthclients.py @@ -163,4 +163,4 @@ def _get_client(cls, request): return request.db.query(AuthClient).filter_by(id=client_id).one() except (NoResultFound, StatementError) as err: # Statement errors happen if the id is invalid - raise HTTPNotFound() from err + raise HTTPNotFound() from err # noqa: RSE102 diff --git a/h/views/admin/organizations.py b/h/views/admin/organizations.py index a42823b93e4..5bafa5fe16c 100644 --- a/h/views/admin/organizations.py +++ b/h/views/admin/organizations.py @@ -60,7 +60,7 @@ def on_success(appstruct): self.request.db.add(organization) self.request.session.flash( - Markup(_("Created new organization {}".format(name))), # noqa: INT002 + Markup(_(f"Created new organization {name}")), # noqa: INT001 "success", ) @@ -106,7 +106,7 @@ def delete(self): self.request.response.status_int = 400 self.request.session.flash( _( - "Cannot delete organization because it is associated with {} groups".format( # noqa: INT002 + "Cannot delete organization because it is associated with {} groups".format( # noqa: INT002, UP032 group_count ) ), @@ -118,7 +118,7 @@ def delete(self): self.request.db.delete(self.organization) self.request.session.flash( _( - "Successfully deleted organization %s" % (self.organization.name), # noqa: INT003 + "Successfully deleted organization %s" % (self.organization.name), # noqa: INT003, UP031 "success", ) ) diff --git a/h/views/admin/search.py b/h/views/admin/search.py index 197623eecd8..f474202b851 100644 --- a/h/views/admin/search.py +++ b/h/views/admin/search.py @@ -61,7 +61,7 @@ def reindex_user(self): self.request.db, username, self.request.default_authority ) if not user: - raise NotFoundError(f"User {username} not found") + raise NotFoundError(f"User {username} not found") # noqa: EM102, TRY003 tasks.job_queue.add_annotations_from_user.delay( self.request.params["name"], @@ -85,7 +85,7 @@ def reindex_group(self): group = self.request.find_service(name="group").fetch_by_pubid(groupid) if not group: - raise NotFoundError(f"Group {groupid} not found") + raise NotFoundError(f"Group {groupid} not found") # noqa: EM102, TRY003 tasks.job_queue.add_annotations_from_group.delay( self.request.params["name"], diff --git a/h/views/admin/staff.py b/h/views/admin/staff.py index cec9ba1dd3d..89b677012a9 100644 --- a/h/views/admin/staff.py +++ b/h/views/admin/staff.py @@ -36,7 +36,7 @@ def staff_add(request): user = models.User.get_by_username(request.db, username, authority) if user is None: request.session.flash( - _("User {username} doesn't exist.".format(username=username)), # noqa: INT002 + _(f"User {username} doesn't exist."), # noqa: INT001 "error", ) else: diff --git a/h/views/admin/users.py b/h/views/admin/users.py index 419a344c9b1..e289d1fcf6b 100644 --- a/h/views/admin/users.py +++ b/h/views/admin/users.py @@ -86,7 +86,7 @@ def users_activate(request): user.activate() request.session.flash( - Markup(_("User {name} has been activated!".format(name=user.username))), # noqa: INT002 + Markup(_(f"User {user.username} has been activated!")), # noqa: INT001 "success", ) @@ -172,6 +172,6 @@ def _form_request_user(request): user = user_service.fetch(userid) if user is None: - raise UserNotFoundError(f"Could not find user with userid {userid}") + raise UserNotFoundError(f"Could not find user with userid {userid}") # noqa: EM102, TRY003 return user diff --git a/h/views/api/analytics.py b/h/views/api/analytics.py index c49007314cc..30bf00f9c93 100644 --- a/h/views/api/analytics.py +++ b/h/views/api/analytics.py @@ -2,7 +2,7 @@ from pyramid.request import Request from h.schemas.analytics import CreateEventSchema -from h.services.analytics import AnalyticsService +from h.services.analytics import AnalyticsService # noqa: TC001 from h.views.api.config import api_config diff --git a/h/views/api/annotations.py b/h/views/api/annotations.py index 424f0cd9aa2..df8e512ab81 100644 --- a/h/views/api/annotations.py +++ b/h/views/api/annotations.py @@ -162,7 +162,7 @@ def delete(context, request): annotation_delete_service = request.find_service(name="annotation_delete") annotation_delete_service.delete(context.annotation) - # TODO: Track down why we don't return an HTTP 204 like other DELETEs + # TODO: Track down why we don't return an HTTP 204 like other DELETEs # noqa: FIX002, TD002, TD003 return {"id": context.annotation.id, "deleted": True} diff --git a/h/views/api/auth.py b/h/views/api/auth.py index 3b1233cea84..54bb97af10d 100644 --- a/h/views/api/auth.py +++ b/h/views/api/auth.py @@ -172,15 +172,15 @@ def _authorized_response(self): return HTTPFound(location=headers["Location"]) except KeyError as err: # pragma: no cover client_id = self.request.params.get("client_id") - raise RuntimeError( - f'created authorisation code for client "{client_id}" but got no redirect location' + raise RuntimeError( # noqa: TRY003 + f'created authorisation code for client "{client_id}" but got no redirect location' # noqa: EM102 ) from err @classmethod def _render_web_message_response(cls, redirect_uri): location = urlparse(redirect_uri) params = parse_qs(location.query) - origin = "{url.scheme}://{url.netloc}".format(url=location) + origin = f"{location.scheme}://{location.netloc}" state = None states = params.get("state", []) @@ -239,13 +239,15 @@ def debug_token(request): bearer_token = svc.get_bearer_token(request) if not bearer_token: - raise OAuthTokenError( - "Bearer token is missing in Authorization HTTP header", "missing_token" + raise OAuthTokenError( # noqa: TRY003 + "Bearer token is missing in Authorization HTTP header", # noqa: EM101 + "missing_token", ) if not svc.validate(bearer_token): - raise OAuthTokenError( - "Bearer token does not exist or is expired", "missing_token" + raise OAuthTokenError( # noqa: TRY003 + "Bearer token does not exist or is expired", # noqa: EM101 + "missing_token", ) token = svc.fetch(bearer_token) diff --git a/h/views/api/bulk/_ndjson.py b/h/views/api/bulk/_ndjson.py index 11d58157a0d..8aba352140f 100644 --- a/h/views/api/bulk/_ndjson.py +++ b/h/views/api/bulk/_ndjson.py @@ -1,11 +1,11 @@ import json +from collections.abc import Iterable from itertools import chain -from typing import Iterable, Optional from pyramid.response import Response -def get_ndjson_response(results: Optional[Iterable]) -> Response: +def get_ndjson_response(results: Iterable | None) -> Response: """ Create a streaming response for an NDJSON based end-point. diff --git a/h/views/api/bulk/annotation.py b/h/views/api/bulk/annotation.py index a621aff650e..1e081d20718 100644 --- a/h/views/api/bulk/annotation.py +++ b/h/views/api/bulk/annotation.py @@ -51,7 +51,7 @@ def bulk_annotation(request): raise ValidationError(str(err)) from err return get_ndjson_response( - (_present_annotation(annotation) for annotation in annotations) + _present_annotation(annotation) for annotation in annotations ) diff --git a/h/views/api/config.py b/h/views/api/config.py index f160482217e..28e7298e28a 100644 --- a/h/views/api/config.py +++ b/h/views/api/config.py @@ -27,13 +27,13 @@ ) -def add_api_view( +def add_api_view( # noqa: PLR0913 config, view, versions, link_name=None, description=None, - enable_preflight=True, + enable_preflight=True, # noqa: FBT002 subtype="json", **settings, ): @@ -122,7 +122,7 @@ def wrapper(wrapped): # pragma: no cover # Support use as a class method decorator. # Taken from Pyramid's `view_config` decorator implementation. - if info.scope == "class": + if info.scope == "class": # noqa: SIM102 if settings.get("attr") is None: settings["attr"] = wrapped.__name__ diff --git a/h/views/api/decorators/__init__.py b/h/views/api/decorators/__init__.py index a866ec9ed3f..0dc3947fad9 100644 --- a/h/views/api/decorators/__init__.py +++ b/h/views/api/decorators/__init__.py @@ -6,8 +6,8 @@ from h.views.api.decorators.response import version_media_type_header __all__ = ( - "unauthorized_to_not_found", "normalize_not_found", + "unauthorized_to_not_found", "validate_media_types", "version_media_type_header", ) diff --git a/h/views/api/decorators/client_errors.py b/h/views/api/decorators/client_errors.py index b15d998cfb1..745a842615a 100644 --- a/h/views/api/decorators/client_errors.py +++ b/h/views/api/decorators/client_errors.py @@ -10,7 +10,7 @@ def unauthorized_to_not_found(wrapped): def wrapper(_context, request): # We convert all 403s to 404s—replace the current context with a 404 - # FIXME: We should be more nuanced about when we do this + # FIXME: We should be more nuanced about when we do this # noqa: FIX001, TD001, TD002, TD003 response = wrapped(_standard_not_found(), request) return response @@ -33,9 +33,9 @@ def validate_media_types(wrapped): def wrapper(context, request): # If Accept has been set - if request.accept: + if request.accept: # noqa: SIM102 # At least one of the media types in Accept must be known to the app - if not any((t in valid_media_types() for t in request.accept)): + if not any(t in valid_media_types() for t in request.accept): # If no Accept media types are known, convert to a 406 error context = HTTPNotAcceptable("Not acceptable") response = wrapped(context, request) diff --git a/h/views/api/group_members.py b/h/views/api/group_members.py index eb85e90c5ed..5b27140a9ac 100644 --- a/h/views/api/group_members.py +++ b/h/views/api/group_members.py @@ -117,7 +117,7 @@ def remove_member(context: GroupMembershipContext, request): ) def add_member(context: AddGroupMembershipContext, request): if context.user.authority != context.group.authority: - raise HTTPNotFound() + raise HTTPNotFound() # noqa: RSE102 if request.body: appstruct = EditGroupMembershipAPISchema().validate(json_payload(request)) @@ -151,7 +151,7 @@ def edit_member(context: EditGroupMembershipContext, request): context.new_roles = appstruct["roles"] if not request.has_permission(Permission.Group.MEMBER_EDIT, context): - raise HTTPNotFound() + raise HTTPNotFound() # noqa: RSE102 if context.membership.roles != context.new_roles: old_roles = context.membership.roles diff --git a/h/views/api/groups.py b/h/views/api/groups.py index 8334b38e3c4..cd765ea90ba 100644 --- a/h/views/api/groups.py +++ b/h/views/api/groups.py @@ -73,13 +73,13 @@ def create(request): if group_type == "private": method = group_create_service.create_private_group else: - assert group_type in ("restricted", "open") + assert group_type in ("restricted", "open") # noqa: S101 kwargs["scopes"] = [] if group_type == "restricted": method = group_create_service.create_restricted_group else: - assert group_type == "open" + assert group_type == "open" # noqa: S101 method = group_create_service.create_open_group group = method(**kwargs) diff --git a/h/views/api/helpers/cors.py b/h/views/api/helpers/cors.py index 654a3c2eb96..24e1d47c52a 100644 --- a/h/views/api/helpers/cors.py +++ b/h/views/api/helpers/cors.py @@ -3,7 +3,7 @@ def policy( - allow_credentials=False, + allow_credentials=False, # noqa: FBT002 allow_headers=None, allow_methods=None, expose_headers=None, @@ -44,10 +44,10 @@ def wrapper(context, request): return cors_decorator -def set_cors_headers( +def set_cors_headers( # noqa: PLR0913 request, response, - allow_credentials=False, + allow_credentials=False, # noqa: FBT002 allow_headers=None, allow_methods=None, expose_headers=None, @@ -64,7 +64,7 @@ def raise_bad_request(missing_header): # Don't raise an exception if Pyramid is already processing an # exception view, because that will cause Pyramid to crash. return response - raise HTTPBadRequest(f"CORS preflight request lacks {missing_header} header.") + raise HTTPBadRequest(f"CORS preflight request lacks {missing_header} header.") # noqa: EM102, TRY003 # Otherwise, we're dealing with a CORS preflight request, which, # according to the spec: diff --git a/h/views/api/helpers/json_payload.py b/h/views/api/helpers/json_payload.py index a43073ff92f..7f0f379a9d8 100644 --- a/h/views/api/helpers/json_payload.py +++ b/h/views/api/helpers/json_payload.py @@ -10,4 +10,4 @@ def json_payload(request): try: return request.json_body except ValueError as err: - raise PayloadError() from err + raise PayloadError() from err # noqa: RSE102 diff --git a/h/views/api/helpers/media_types.py b/h/views/api/helpers/media_types.py index 02f441fc6a6..d77eb9e9645 100644 --- a/h/views/api/helpers/media_types.py +++ b/h/views/api/helpers/media_types.py @@ -30,7 +30,7 @@ def valid_media_types(): :rtype: list(str) """ - valid_types = ["*/*", "application/json"] + version_media_types() + valid_types = ["*/*", "application/json"] + version_media_types() # noqa: RUF005 return valid_types @@ -46,5 +46,5 @@ def version_media_types(versions=None): versions = versions or API_VERSIONS version_types = [] for version in versions: - version_types.append(media_type_for_version(version)) + version_types.append(media_type_for_version(version)) # noqa: PERF401 return version_types diff --git a/h/views/api/profile.py b/h/views/api/profile.py index 1448bab6ca8..a6488433db7 100644 --- a/h/views/api/profile.py +++ b/h/views/api/profile.py @@ -1,4 +1,4 @@ -from pyramid.httpexceptions import HTTPBadRequest +from pyramid.httpexceptions import HTTPBadRequest # noqa: A005 from h import session as h_session from h.presenters import GroupsJSONPresenter @@ -53,7 +53,7 @@ def update_preferences(request): preferences = request.json_body.get("preferences", {}) svc = request.find_service(name="user") - # TODO: The following exception doesn't match convention for validation + # TODO: The following exception doesn't match convention for validation # noqa: FIX002, TD002, TD003 # used in other endpoints try: svc.update_preferences(request.user, **preferences) diff --git a/h/views/api/users.py b/h/views/api/users.py index e9888929733..7b723ef2c05 100644 --- a/h/views/api/users.py +++ b/h/views/api/users.py @@ -58,8 +58,8 @@ def create(request): # Enforce authority match client_authority = request.identity.auth_client.authority if appstruct["authority"] != client_authority: - raise ValidationError( - f"""authority '{appstruct["authority"]}' does not match client authority""" + raise ValidationError( # noqa: TRY003 + f"""authority '{appstruct["authority"]}' does not match client authority""" # noqa: EM102 ) user_unique_service = request.find_service(name="user_unique") diff --git a/h/views/badge.py b/h/views/badge.py index 43bd4736ec3..19f22c27662 100644 --- a/h/views/badge.py +++ b/h/views/badge.py @@ -83,12 +83,12 @@ def badge(request): that there are 0 annotations. """ # Disable NewRelic for this function. - # newrelic.agent.ignore_transaction(flag=True) + # newrelic.agent.ignore_transaction(flag=True) # noqa: ERA001 uri = request.params.get("uri") if not uri: - raise httpexceptions.HTTPBadRequest() + raise httpexceptions.HTTPBadRequest() # noqa: RSE102 if Blocklist.is_blocked(uri): count = 0 diff --git a/h/views/home.py b/h/views/home.py index 4eb634a23eb..790f3c4709e 100644 --- a/h/views/home.py +++ b/h/views/home.py @@ -9,7 +9,7 @@ def via_redirect(_context, request): # pragma: no cover url = request.params.get("url") if url is None: - raise httpexceptions.HTTPBadRequest('"url" parameter missing') + raise httpexceptions.HTTPBadRequest('"url" parameter missing') # noqa: EM101, TRY003 via_link = f"https://via.hypothes.is/{url}" raise httpexceptions.HTTPFound(location=via_link) diff --git a/h/views/main.py b/h/views/main.py index 79cce02a33f..4bf3d673664 100644 --- a/h/views/main.py +++ b/h/views/main.py @@ -103,7 +103,7 @@ def stream_user_redirect(request): # The client generates /u/ links which include the full account ID if user.startswith("acct:"): - try: + try: # noqa: SIM105 user = split_user(user)["username"] except InvalidUserId: # If it's not a valid userid, catch the exception and just treat diff --git a/h/views/notification.py b/h/views/notification.py index a3e650169bd..791d8fff289 100644 --- a/h/views/notification.py +++ b/h/views/notification.py @@ -21,6 +21,6 @@ def unsubscribe(request): token=request.matchdict["token"] ) except InvalidUnsubscribeToken as err: - raise HTTPNotFound() from err + raise HTTPNotFound() from err # noqa: RSE102 return {} diff --git a/h/views/organizations.py b/h/views/organizations.py index a008a169af4..54c98b3b32b 100644 --- a/h/views/organizations.py +++ b/h/views/organizations.py @@ -9,4 +9,4 @@ def organization_logo(context, _request): if logo := context.organization.logo: return logo - raise NotFound() + raise NotFound() # noqa: RSE102 diff --git a/h/views/status.py b/h/views/status.py index aa612b5290c..90721ac4052 100644 --- a/h/views/status.py +++ b/h/views/status.py @@ -14,15 +14,15 @@ def status(request): try: request.db.execute(text("SELECT 1")) except Exception as err: - log.exception(err) - raise HTTPInternalServerError("Database connection failed") from err + log.exception(err) # noqa: TRY401 + raise HTTPInternalServerError("Database connection failed") from err # noqa: EM101, TRY003 if "replica" in request.params: try: request.db_replica.execute(text("SELECT 1")) except Exception as err: - log.exception(err) - raise HTTPInternalServerError("Replica database connection failed") from err + log.exception(err) # noqa: TRY401 + raise HTTPInternalServerError("Replica database connection failed") from err # noqa: EM101, TRY003 if "sentry" in request.params: capture_message("Test message from h's status view") diff --git a/pyproject.toml b/pyproject.toml index 724b249591e..2432cbdb2be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,49 +27,9 @@ target-version = "py311" [tool.ruff.lint] select = ["ALL"] ignore = [ - "UP", # pyupgrade - "YTT", # flake8-2020 (checks for misuse of sys.version or sys.version_info "ANN", # flake8-annotations (checks for absence of type annotations on functions) - "ASYNC", # flake8-async (checks for asyncio-related problems) - "S", # flake8-bandit (checks for security issues) - "FBT", # flake8-boolean-trap (checks for the "boolean trap" anti-pattern) - "B", # flake8-bugbear (checks for bugs and design problems) - "A", # flake8-builtins (checks for builtins being overridden) "CPY", # flake8-copyright (checks for missing copyright notices) - "C4", # flake8-comprehensions (helps write better list/set/dict comprehensions) - "DTZ", # flake8-datetimez (checks for usages of unsafe naive datetime class) - "T10", # flake8-debugger (checks for set traces etc) - "EM", # flake8-errmsg (checks for error message formatting issues) - "EXE", # flake8-executable (checks for incorrect executable permissions and shebangs) - "FA", # flake8-future-annotations (checks for missing from __future__ import annotations) - "ISC", # flake8-implicit-str-concat (checks for style problems with string literal concatenation) - "ICN", # flake8-import-conventions (checks for unconventional imports and aliases) - "LOG", # flake8-logging (checks for issues with using the logging module) - "G", # flake8-logging-format (enforce usage of `extra` in logging calls) - "INP", # flake8-no-pep420 (checks for missing __init__.py files) - "PIE", # flake8-pie (miscellaneous) - "T20", # flake8-print (checks for print and pprint statements) - "PT", # flake8-pytest-style (checks for common pytest style and consistency issues) - "RSE", # flake8-raise (checks for issues with raising exceptions) - "RET", # flake8-return (checks for issues with return values) - "SLOT", # flake8-slots (requires __slots__ in subclasses of immutable types) - "SIM", # flake8-simplify (lots of code simplification checks) - "TID", # flake8-tidy-imports (checks for issues with imports) - "TC", # flake8-type-checking (checks for type checking imports that aren't in TYPE_CHECKING blocks) - "ARG", # flake8-unused-arguments (checks for unused arguments) - "PTH", # flake8-use-pathlib (checks for cases with pathlib could be used but isn't) - "TD", # flake8-todos (enforces good style for "# TODO" comments) - "FIX", # flake8-fixme (checks for FIXMEs, TODOs, HACKs, etc) - "ERA", # eradicate (checks for commented-out code) - "PGH", # pygrep-hooks (miscellaneous) - "PL", # pylint (miscellaneous rules from pylint) - "TRY", # tryceratops (various try/except-related checks) - "FLY", # flynt (checks for old-style %-formatted strings) - "PERF", # perflint (checks for performance anti-patterns) - "FURB", # refurb (various "refurbishing and modernizing" checks) - "DOC", # pydoclint (docstring checks) - "RUF", # Ruff-specific rules - "COM", # flake8-commas (we used a code formatter so we don't need a linter to check this) + "COM", # flake8-commas (we use a code formatter so we don't need a linter to check this) "D100","D101","D102","D103","D104","D105","D106","D107", # Missing docstrings. "D202", # "No blank lines allowed after function docstring" conflicts with the Ruff code formatter. # "Multi-line docstring summary should start at the first line" (D212) @@ -101,10 +61,23 @@ ignore = [ "PLR0913", # Too many arguments. Tests often have lots of arguments. "PLR0917", # Too many positional arguments. Tests often have lots of arguments. "PLR0904", # Too many public methods. Test classes often have lots of test methods. + "S101", # Use of `assert` detected. + "PT006", # Enforces a consistent style for the type of the `argnames` parameter to + # pytest.mark.parametrize. We have too many pre-existing violations of + # this. + "PT007", # Enforces a consistent style for the type of the `argvalues` parameter to + # pytest.mark.parametrize. We have too many pre-existing violations of + # this. ] "__init__.py" = [ "F401", # Ignore unused import errors on __init__ files to avoid having to add either a noqa stament or an __all__ declaration. ] +"h/migrations/*" = [ + "INP001", +] +"bin/*" = [ + "INP001", +] [tool.coverage.run] branch = true diff --git a/tests/common/factories/annotation.py b/tests/common/factories/annotation.py index 90d6b034fd7..3b3e4b1161e 100644 --- a/tests/common/factories/annotation.py +++ b/tests/common/factories/annotation.py @@ -20,7 +20,7 @@ class Meta: "flush" # Always flush the db to generate annotation.id. ) - tags = factory.LazyFunction(lambda: list(FAKER.words(nb=random.randint(0, 5)))) + tags = factory.LazyFunction(lambda: list(FAKER.words(nb=random.randint(0, 5)))) # noqa: S311 target_uri = factory.Faker("uri") text = factory.Faker("paragraph") userid = factory.LazyFunction(lambda: f"acct:{FAKER.user_name()}@localhost") @@ -47,7 +47,7 @@ def target_selectors(self): ] @factory.post_generation - def make_metadata(self, create, extracted, **kwargs): + def make_metadata(self, create, extracted, **kwargs): # noqa: ARG002 """Create associated document metadata for the annotation.""" # The metadata objects are going to be added to the db, so if we're not # using the create strategy then simply don't make any. @@ -70,7 +70,7 @@ def document_uri_dict(): "content_type": document_uri.content_type, } - document_uri_dicts = [document_uri_dict() for _ in range(random.randint(1, 3))] + document_uri_dicts = [document_uri_dict() for _ in range(random.randint(1, 3))] # noqa: S311 def document_meta_dict(type_=None): """ @@ -92,7 +92,8 @@ def document_meta_dict(type_=None): } document_meta_dicts = [ - document_meta_dict() for _ in range(random.randint(1, 3)) + document_meta_dict() + for _ in range(random.randint(1, 3)) # noqa: S311 ] # Make sure that there's always at least one DocumentMeta with @@ -110,7 +111,7 @@ def document_meta_dict(type_=None): ) @factory.post_generation - def make_id(self, create, extracted, **kwargs): + def make_id(self, create, extracted, **kwargs): # noqa: ARG002 """Add a randomly ID if the annotation doesn't have one yet.""" # If using the create strategy don't generate an id. # models.Annotation.id's server_default function will generate one @@ -126,7 +127,7 @@ def make_id(self, create, extracted, **kwargs): self.id = URLSafeUUID().process_result_value(uuid.uuid4().hex, None) @factory.post_generation - def timestamps(self, create, extracted, **kwargs): + def timestamps(self, create, extracted, **kwargs): # noqa: ARG002 # If using the create strategy let sqlalchemy set the created and # updated times when saving to the DB. if create: @@ -139,5 +140,5 @@ def timestamps(self, create, extracted, **kwargs): # instead of just once) so created and updated won't be exactly the # same. This is consistent with how models.Annotation does it when # saving to the DB. - self.created = self.created or datetime.datetime.now() - self.updated = self.updated or datetime.datetime.now() + self.created = self.created or datetime.datetime.now() # noqa: DTZ005 + self.updated = self.updated or datetime.datetime.now() # noqa: DTZ005 diff --git a/tests/common/factories/auth_ticket.py b/tests/common/factories/auth_ticket.py index 2948970d653..1cd0925c7da 100644 --- a/tests/common/factories/auth_ticket.py +++ b/tests/common/factories/auth_ticket.py @@ -20,7 +20,7 @@ class Meta: ) user = factory.SubFactory(User) expires = factory.LazyAttribute( - lambda _: (datetime.utcnow() + timedelta(minutes=10)) + lambda _: (datetime.utcnow() + timedelta(minutes=10)) # noqa: DTZ003 ) @factory.lazy_attribute diff --git a/tests/common/factories/authz_code.py b/tests/common/factories/authz_code.py index 48bb90f0854..5cd053c1ee2 100644 --- a/tests/common/factories/authz_code.py +++ b/tests/common/factories/authz_code.py @@ -27,5 +27,5 @@ class Meta: authclient = factory.SubFactory(AuthClient) code = factory.LazyAttribute(generate_code) expires = factory.LazyAttribute( - lambda _: (datetime.utcnow() + timedelta(minutes=10)) + lambda _: (datetime.utcnow() + timedelta(minutes=10)) # noqa: DTZ003 ) diff --git a/tests/common/factories/base.py b/tests/common/factories/base.py index b7342036a92..2b7c48813e0 100644 --- a/tests/common/factories/base.py +++ b/tests/common/factories/base.py @@ -6,7 +6,7 @@ def set_session(value): - global SESSION + global SESSION # noqa: PLW0603 SESSION = value @@ -23,7 +23,7 @@ def _create(cls, model_class, *args, **kwargs): # which is dynamically filled out by the `factories` fixture when # used. if SESSION is None: - raise RuntimeError("no session: did you use the factories fixture?") + raise RuntimeError("no session: did you use the factories fixture?") # noqa: EM101, TRY003 obj = model_class(*args, **kwargs) SESSION.add(obj) if cls._meta.sqlalchemy_session_persistence == "flush": diff --git a/tests/common/factories/group.py b/tests/common/factories/group.py index b5573caa045..e4d1d203723 100644 --- a/tests/common/factories/group.py +++ b/tests/common/factories/group.py @@ -24,9 +24,9 @@ class Meta: enforce_scope = True @factory.post_generation - def scopes(self, create, scopes=0, **kwargs): + def scopes(self, create, scopes=0, **kwargs): # noqa: ARG002 if isinstance(scopes, int): - scopes = [GroupScope(group=self) for _ in range(0, scopes)] + scopes = [GroupScope(group=self) for _ in range(scopes)] self.scopes = scopes or [] diff --git a/tests/common/factories/job.py b/tests/common/factories/job.py index a8c30e93693..6679f40b051 100644 --- a/tests/common/factories/job.py +++ b/tests/common/factories/job.py @@ -41,7 +41,7 @@ class Meta: name = "sync_annotation" scheduled_at = LazyFunction( - lambda: datetime.datetime.utcnow() - datetime.timedelta(hours=1) + lambda: datetime.datetime.utcnow() - datetime.timedelta(hours=1) # noqa: DTZ003 ) kwargs = LazyAttribute( lambda o: { diff --git a/tests/common/factories/token.py b/tests/common/factories/token.py index 3238922b926..ff7d24107c8 100644 --- a/tests/common/factories/token.py +++ b/tests/common/factories/token.py @@ -1,4 +1,4 @@ -from datetime import datetime, timedelta +from datetime import datetime, timedelta # noqa: A005 import factory @@ -34,8 +34,8 @@ class Meta: refresh_token = factory.LazyAttribute( lambda _: (REFRESH_TOKEN_PREFIX + security.token_urlsafe()) ) - expires = factory.LazyAttribute(lambda _: (datetime.utcnow() + timedelta(hours=1))) + expires = factory.LazyAttribute(lambda _: (datetime.utcnow() + timedelta(hours=1))) # noqa: DTZ003 refresh_token_expires = factory.LazyAttribute( - lambda _: (datetime.utcnow() + timedelta(days=7)) + lambda _: (datetime.utcnow() + timedelta(days=7)) # noqa: DTZ003 ) authclient = factory.SubFactory(AuthClient) diff --git a/tests/common/fixtures/elasticsearch.py b/tests/common/fixtures/elasticsearch.py index 717a4864896..3f000af6ee9 100644 --- a/tests/common/fixtures/elasticsearch.py +++ b/tests/common/fixtures/elasticsearch.py @@ -10,7 +10,7 @@ ELASTICSEARCH_INDEX = os.environ["ELASTICSEARCH_INDEX"] ELASTICSEARCH_URL = os.environ.get("ELASTICSEARCH_URL", "http://localhost:9200") -__all__ = ("es_client", "mock_es_client", "init_elasticsearch") +__all__ = ("es_client", "init_elasticsearch", "mock_es_client") @pytest.fixture @@ -68,7 +68,7 @@ def maybe_delete_index(): es_client.conn.indices.delete(index=index) # Delete the test search index at the end of the test run. - request.addfinalizer(maybe_delete_index) + request.addfinalizer(maybe_delete_index) # noqa: PT021 # Delete the test search index at the start of the run, just in case it # was somehow left behind by a previous test run. diff --git a/tests/common/fixtures/services.py b/tests/common/fixtures/services.py index 6d008002ff7..13646b591e6 100644 --- a/tests/common/fixtures/services.py +++ b/tests/common/fixtures/services.py @@ -44,7 +44,6 @@ from h.services.user_update import UserUpdateService __all__ = ( - "mock_service", "analytics_service", "annotation_delete_service", "annotation_json_service", @@ -59,8 +58,6 @@ "bulk_group_service", "bulk_stats_service", "developer_token_service", - "links_service", - "list_organizations_service", "flag_service", "group_create_service", "group_delete_service", @@ -69,18 +66,21 @@ "group_members_service", "group_service", "group_update_service", - "nipsa_service", + "links_service", + "list_organizations_service", + "mock_service", "moderation_service", + "nipsa_service", "oauth_provider_service", "organization_service", - "search_index", "queue_service", + "search_index", "subscription_service", "url_migration_service", "user_delete_service", "user_password_service", - "user_service", "user_password_service", + "user_service", "user_signup_service", "user_unique_service", "user_update_service", @@ -89,7 +89,7 @@ @pytest.fixture def mock_service(pyramid_config): - def mock_service(service_class, name=None, iface=None, spec_set=True, **kwargs): + def mock_service(service_class, name=None, iface=None, spec_set=True, **kwargs): # noqa: FBT002 service = create_autospec( service_class, instance=True, spec_set=spec_set, **kwargs ) diff --git a/tests/conftest.py b/tests/conftest.py index 90ff0f9735f..21d760c5c46 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -46,4 +46,4 @@ def db_session(db_engine, db_sessionfactory): @pytest.fixture def db_session_replica(db_session): db_session.execute(text("SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY;")) - yield db_session + yield db_session # noqa: PT022 diff --git a/tests/functional/accounts_test.py b/tests/functional/accounts_test.py index 4f2aaf62717..eddb832ea91 100644 --- a/tests/functional/accounts_test.py +++ b/tests/functional/accounts_test.py @@ -9,7 +9,7 @@ def test_submit_email_form_without_xhr_returns_full_html_page(self, app): email_form = res.forms["email"] email_form["email"] = "new_email1@example.com" - email_form["password"] = "pass" + email_form["password"] = "pass" # noqa: S105 res = email_form.submit().follow() @@ -20,7 +20,7 @@ def test_submit_email_form_with_xhr_returns_partial_html_snippet(self, app): email_form = res.forms["email"] email_form["email"] = "new_email2@example.com" - email_form["password"] = "pass" + email_form["password"] = "pass" # noqa: S105 res = email_form.submit(xhr=True, status=200) @@ -31,7 +31,7 @@ def test_submit_email_form_with_xhr_returns_plain_text(self, app): email_form = res.forms["email"] email_form["email"] = "new_email3@example.com" - email_form["password"] = "pass" + email_form["password"] = "pass" # noqa: S105 res = email_form.submit(xhr=True) @@ -41,9 +41,9 @@ def test_submit_password_form_without_xhr_returns_full_html_page(self, app): res = app.get("/account/settings") password_form = res.forms["password"] - password_form["password"] = "pass" - password_form["new_password"] = "new_password" - password_form["new_password_confirm"] = "new_password" + password_form["password"] = "pass" # noqa: S105 + password_form["new_password"] = "new_password" # noqa: S105 + password_form["new_password_confirm"] = "new_password" # noqa: S105 res = password_form.submit().follow() @@ -53,9 +53,9 @@ def test_submit_password_form_with_xhr_returns_partial_html_snippet(self, app): res = app.get("/account/settings") password_form = res.forms["password"] - password_form["password"] = "pass" - password_form["new_password"] = "new_password" - password_form["new_password_confirm"] = "new_password" + password_form["password"] = "pass" # noqa: S105 + password_form["new_password"] = "new_password" # noqa: S105 + password_form["new_password_confirm"] = "new_password" # noqa: S105 res = password_form.submit(xhr=True) @@ -65,9 +65,9 @@ def test_submit_password_form_with_xhr_returns_plain_text(self, app): res = app.get("/account/settings") password_form = res.forms["password"] - password_form["password"] = "pass" - password_form["new_password"] = "new_password" - password_form["new_password_confirm"] = "new_password" + password_form["password"] = "pass" # noqa: S105 + password_form["new_password"] = "new_password" # noqa: S105 + password_form["new_password_confirm"] = "new_password" # noqa: S105 res = password_form.submit(xhr=True) @@ -77,9 +77,9 @@ def test_submit_invalid_password_form_with_xhr_returns_400(self, app): res = app.get("/account/settings") password_form = res.forms["password"] - password_form["password"] = "pass" - password_form["new_password"] = "new_password" - password_form["new_password_confirm"] = "WRONG" + password_form["password"] = "pass" # noqa: S105 + password_form["new_password"] = "new_password" # noqa: S105 + password_form["new_password_confirm"] = "WRONG" # noqa: S105 password_form.submit(xhr=True, status=400) @@ -87,7 +87,7 @@ def test_submit_invalid_password_form_with_xhr_returns_400(self, app): def user(self, db_session, factories): # Password is 'pass' user = factories.User( - password="$2b$12$21I1LjTlGJmLXzTDrQA8gusckjHEMepTmLY5WN3Kx8hSaqEEKj9V6" + password="$2b$12$21I1LjTlGJmLXzTDrQA8gusckjHEMepTmLY5WN3Kx8hSaqEEKj9V6" # noqa: S106 ) db_session.commit() return user @@ -96,6 +96,6 @@ def user(self, db_session, factories): def app(self, app, user): res = app.get("/login") res.form["username"] = user.username - res.form["password"] = "pass" + res.form["password"] = "pass" # noqa: S105 res.form.submit() return app diff --git a/tests/functional/api/annotations_test.py b/tests/functional/api/annotations_test.py index d38d34695f7..10557a16120 100644 --- a/tests/functional/api/annotations_test.py +++ b/tests/functional/api/annotations_test.py @@ -83,7 +83,7 @@ def test_it_returns_http_404_for_private_annotation_when_unauthorized( class TestPostAnnotation: def test_it_returns_http_404_if_unauthorized(self, app): - # FIXME: This should return a 403 + # FIXME: This should return a 403 # noqa: FIX001, TD001, TD002, TD003 # This isn't a valid payload, but it won't get validated because the # authorization will fail first @@ -131,7 +131,7 @@ def test_it_returns_http_400_if_group_forbids_write( assert res.status_code == 400 assert res.json["reason"].startswith("group:") - # TODO: This endpoint should return a 201 + # TODO: This endpoint should return a 201 # noqa: FIX002, TD002, TD003 def test_it_returns_http_200_when_annotation_created(self, app, user_with_token): _, token = user_with_token diff --git a/tests/functional/api/api_test.py b/tests/functional/api/api_test.py index ea754aedc60..19e16718656 100644 --- a/tests/functional/api/api_test.py +++ b/tests/functional/api/api_test.py @@ -9,8 +9,8 @@ def test_cors_preflight(self, app): # Note that no `Authorization` header is set. origin = "https://custom-client.herokuapp.com" headers = { - "Access-Control-Request-Headers": str("authorization,content-type"), - "Access-Control-Request-Method": str("POST"), + "Access-Control-Request-Headers": "authorization,content-type", + "Access-Control-Request-Method": "POST", "Origin": str(origin), } diff --git a/tests/functional/api/bulk/action_test.py b/tests/functional/api/bulk/action_test.py index 235c16f2a52..0a821ba9467 100644 --- a/tests/functional/api/bulk/action_test.py +++ b/tests/functional/api/bulk/action_test.py @@ -127,7 +127,7 @@ def commands(self, user): ] for i in range(group_count): - commands.append( + commands.append( # noqa: PERF401 CommandBuilder.group.upsert( { "authority": self.AUTHORITY, @@ -139,7 +139,7 @@ def commands(self, user): ) for i in range(group_count): - commands.append( + commands.append( # noqa: PERF401 CommandBuilder.group_membership.create("user_ref", f"group_ref_{i}") ) @@ -158,7 +158,7 @@ def nd_json(self, commands): @pytest.fixture(params=[None, "token", "non_lms_auth"]) def bad_header(self, request, token_auth_header, auth_header): - yield {"token": token_auth_header, "non_lms_auth": auth_header}.get( + yield {"token": token_auth_header, "non_lms_auth": auth_header}.get( # noqa: PT022 request.param, request.param ) diff --git a/tests/functional/api/bulk/annotation_test.py b/tests/functional/api/bulk/annotation_test.py index 57eb4dd7a97..236583c2a60 100644 --- a/tests/functional/api/bulk/annotation_test.py +++ b/tests/functional/api/bulk/annotation_test.py @@ -71,7 +71,7 @@ def test_it_accepts_a_valid_request(self, make_request, factories): @pytest.fixture def make_request(self, app, auth_header_for_authority): - def make_request(json_body=None, expect_errors=False, headers=None): + def make_request(json_body=None, expect_errors=False, headers=None): # noqa: FBT002 return app.post( "/api/bulk/annotation", json.dumps(json_body or {}), diff --git a/tests/functional/api/conftest.py b/tests/functional/api/conftest.py index a75dd03e0be..9c6e8ac12d3 100644 --- a/tests/functional/api/conftest.py +++ b/tests/functional/api/conftest.py @@ -28,7 +28,7 @@ def _make_headers(authority): ) db_session.commit() - user_pass = f"{auth_client.id}:{auth_client.secret}".encode("utf-8") + user_pass = f"{auth_client.id}:{auth_client.secret}".encode() encoded = base64.standard_b64encode(user_pass).decode("ascii") return {"Authorization": f"Basic {encoded}"} diff --git a/tests/functional/api/errors_test.py b/tests/functional/api/errors_test.py index 74f6720c9c6..d1de2a5a0cf 100644 --- a/tests/functional/api/errors_test.py +++ b/tests/functional/api/errors_test.py @@ -29,7 +29,7 @@ def test_it_400s_for_create_group_if_groupid_set_on_default_authority( headers = append_token_auth() res = app.post_json("/api/groups", group, headers=headers, expect_errors=True) reason = res.json["reason"] - # FIXME: The `reason` is double-escaped + # FIXME: The `reason` is double-escaped # noqa: FIX001, TD001, TD002, TD003 expected = ( "groupid: '3434kjkjk'" ' does not match "^group:([a-zA-Z0-9._\\\\-+!~*()\']{1,1024})@(.*)$"' @@ -40,7 +40,7 @@ def test_it_400s_for_create_group_if_groupid_set_on_default_authority( class Test404Errors: - # TODO: Some of these 404s should really be 403s + # TODO: Some of these 404s should really be 403s # noqa: FIX002, TD002, TD003 reason_message = ( "Either the resource you requested doesn't exist," " or you are not currently authorized to see it." diff --git a/tests/functional/api/groups/__init__.py b/tests/functional/api/groups/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/api/groups/create_test.py b/tests/functional/api/groups/create_test.py index 4e14b6ff74f..1ea38d5a11c 100644 --- a/tests/functional/api/groups/create_test.py +++ b/tests/functional/api/groups/create_test.py @@ -49,7 +49,7 @@ def test_it_returns_http_400_if_groupid_set_on_default_authority( def test_it_returns_http_404_if_no_authenticated_user( self, app, auth_client_header, group_payload ): - # FIXME: This should return a 403 + # FIXME: This should return a 403 # noqa: FIX001, TD001, TD002, TD003 res = app.post_json( "/api/groups", group_payload, headers=auth_client_header, expect_errors=True ) @@ -107,7 +107,7 @@ def test_it_returns_HTTP_Conflict_if_groupid_is_duplicate( def test_it_returns_http_404_with_invalid_forwarded_user_format( self, app, auth_client_header ): - # FIXME: This should return a 403 + # FIXME: This should return a 403 # noqa: FIX001, TD001, TD002, TD003 headers = auth_client_header headers["X-Forwarded-User"] = "floopflarp" group = {} @@ -142,9 +142,7 @@ def auth_client(db_session, factories): @pytest.fixture def auth_client_header(auth_client): - user_pass = "{client_id}:{secret}".format( - client_id=auth_client.id, secret=auth_client.secret - ) + user_pass = f"{auth_client.id}:{auth_client.secret}" encoded = base64.standard_b64encode(user_pass.encode("utf-8")) return {"Authorization": "Basic {creds}".format(creds=encoded.decode("ascii"))} @@ -161,4 +159,4 @@ def user_with_token(db_session, factories): @pytest.fixture def token_auth_header(user_with_token): user, token = user_with_token - return {"Authorization": "Bearer {}".format(token.value)} + return {"Authorization": f"Bearer {token.value}"} diff --git a/tests/functional/api/groups/members_test.py b/tests/functional/api/groups/members_test.py index cb868bb3e84..49f37de736d 100644 --- a/tests/functional/api/groups/members_test.py +++ b/tests/functional/api/groups/members_test.py @@ -17,8 +17,8 @@ def test_it_returns_list_of_members_for_restricted_group_without_authn( memberships=[ GroupMembership( user=user, - created=datetime(1970, 1, 1, 0, 0, second), - updated=datetime(1970, 1, 2, 0, 0, second), + created=datetime(1970, 1, 1, 0, 0, second), # noqa: DTZ001 + updated=datetime(1970, 1, 2, 0, 0, second), # noqa: DTZ001 ) for second, user in enumerate(factories.User.create_batch(size=3)) ] @@ -26,7 +26,7 @@ def test_it_returns_list_of_members_for_restricted_group_without_authn( db_session.commit() res = app.get( - "/api/groups/{pubid}/members".format(pubid=group.pubid), + f"/api/groups/{group.pubid}/members", headers={"User-Agent": "test_user_agent", "Referer": "test_referer"}, ) @@ -58,20 +58,20 @@ def test_it_returns_list_of_members_if_user_has_access_to_private_group( [ GroupMembership( user=user, - created=datetime(1970, 1, 1, 0, 0, 0), - updated=datetime(1970, 1, 1, 0, 0, 1), + created=datetime(1970, 1, 1, 0, 0, 0), # noqa: DTZ001 + updated=datetime(1970, 1, 1, 0, 0, 1), # noqa: DTZ001 ), GroupMembership( user=other_user, - created=datetime(1971, 1, 2, 0, 0, 0), - updated=datetime(1971, 1, 2, 0, 0, 1), + created=datetime(1971, 1, 2, 0, 0, 0), # noqa: DTZ001 + updated=datetime(1971, 1, 2, 0, 0, 1), # noqa: DTZ001 ), ] ) db_session.commit() res = app.get( - "/api/groups/{pubid}/members".format(pubid=group.pubid), + f"/api/groups/{group.pubid}/members", headers=token_authorization_header(token), ) @@ -106,7 +106,7 @@ def test_it_returns_404_if_user_does_not_have_read_access_to_group( db_session.commit() res = app.get( - "/api/groups/{pubid}/members".format(pubid=group.pubid), + f"/api/groups/{group.pubid}/members", headers=token_authorization_header(factories.DeveloperToken()), expect_errors=True, ) @@ -127,8 +127,8 @@ def test_it_returns_list_of_members_for_restricted_group_without_auth( memberships=[ GroupMembership( user=user, - created=datetime(1970, 1, 1, 0, 0, second), - updated=datetime(1970, 1, 2, 0, 0, second), + created=datetime(1970, 1, 1, 0, 0, second), # noqa: DTZ001 + updated=datetime(1970, 1, 2, 0, 0, second), # noqa: DTZ001 ) for second, user in enumerate(factories.User.create_batch(size=9)) ] @@ -136,7 +136,7 @@ def test_it_returns_list_of_members_for_restricted_group_without_auth( db_session.commit() res = app.get( - "/api/groups/{pubid}/members".format(pubid=group.pubid), + f"/api/groups/{group.pubid}/members", params={"page[number]": 2, "page[size]": 3}, headers={"User-Agent": "test_user_agent", "Referer": "test_referer"}, ) @@ -169,20 +169,20 @@ def test_it_returns_list_of_members_if_user_has_access_to_private_group( [ GroupMembership( user=user, - created=datetime(1970, 1, 1, 0, 0, 0), - updated=datetime(1970, 1, 1, 0, 0, 1), + created=datetime(1970, 1, 1, 0, 0, 0), # noqa: DTZ001 + updated=datetime(1970, 1, 1, 0, 0, 1), # noqa: DTZ001 ), GroupMembership( user=other_user, - created=datetime(1971, 1, 2, 0, 0, 0), - updated=datetime(1971, 1, 2, 0, 0, 1), + created=datetime(1971, 1, 2, 0, 0, 0), # noqa: DTZ001 + updated=datetime(1971, 1, 2, 0, 0, 1), # noqa: DTZ001 ), ] ) db_session.commit() res = app.get( - "/api/groups/{pubid}/members".format(pubid=group.pubid), + f"/api/groups/{group.pubid}/members", params={"page[number]": 1}, headers=token_authorization_header(token), ) @@ -221,8 +221,8 @@ def test_it_returns_empty_list_if_page_number_beyond_last_page( memberships=[ GroupMembership( user=user, - created=datetime(1970, 1, 1, 0, 0, second), - updated=datetime(1970, 1, 2, 0, 0, second), + created=datetime(1970, 1, 1, 0, 0, second), # noqa: DTZ001 + updated=datetime(1970, 1, 2, 0, 0, second), # noqa: DTZ001 ) for second, user in enumerate(factories.User.create_batch(size=2)) ] @@ -230,7 +230,7 @@ def test_it_returns_empty_list_if_page_number_beyond_last_page( db_session.commit() res = app.get( - "/api/groups/{pubid}/members".format(pubid=group.pubid), + f"/api/groups/{group.pubid}/members", params={"page[number]": 2, "page[size]": 10}, headers={"User-Agent": "test_user_agent", "Referer": "test_referer"}, ) @@ -245,7 +245,7 @@ def test_it_returns_404_if_user_does_not_have_read_access_to_group( db_session.commit() res = app.get( - "/api/groups/{pubid}/members".format(pubid=group.pubid), + f"/api/groups/{group.pubid}/members", params={"page[number]": 1}, headers=token_authorization_header(factories.DeveloperToken()), expect_errors=True, @@ -271,7 +271,7 @@ def test_it_returns_an_error_if_number_and_size_are_invalid( db_session.commit() res = app.get( - "/api/groups/{pubid}/members".format(pubid=group.pubid), + f"/api/groups/{group.pubid}/members", params={"page[number]": 0, "page[size]": 0}, headers=token_authorization_header(token), expect_errors=True, @@ -285,7 +285,7 @@ def test_it_returns_an_error_if_number_and_size_are_invalid( class TestGetMember: - def test_it(self, app, db_session, do_request, group, target_user): + def test_it(self, app, db_session, do_request, group, target_user): # noqa: ARG002 response = do_request() assert response.json == { @@ -357,8 +357,8 @@ def target_user(self, factories, group): group.memberships.append( GroupMembership( user=target_user, - created=datetime(1970, 1, 1, 0, 0, 0), - updated=datetime(1970, 1, 1, 0, 0, 1), + created=datetime(1970, 1, 1, 0, 0, 0), # noqa: DTZ001 + updated=datetime(1970, 1, 1, 0, 0, 1), # noqa: DTZ001 ) ) return target_user @@ -370,8 +370,8 @@ def authenticated_user(self, factories, group): GroupMembership( user=authenticated_user, roles=[GroupMembershipRoles.OWNER], - created=datetime(1971, 1, 1, 0, 0, 0), - updated=datetime(1971, 1, 1, 0, 0, 1), + created=datetime(1971, 1, 1, 0, 0, 0), # noqa: DTZ001 + updated=datetime(1971, 1, 1, 0, 0, 1), # noqa: DTZ001 ) ) return authenticated_user @@ -381,7 +381,7 @@ def token(self, factories, authenticated_user): return factories.DeveloperToken(user=authenticated_user) @pytest.fixture - def headers(self, factories, token): + def headers(self, factories, token): # noqa: ARG002 return token_authorization_header(token) @pytest.fixture @@ -413,7 +413,7 @@ def test_it(self, do_request, group, user, json, expected_roles): assert membership.roles == expected_roles break else: - assert False, "No membership was created" + assert False, "No membership was created" # noqa: B011, PT015 def test_it_does_nothing_if_the_user_is_already_a_member_of_the_group( self, do_request, group, user @@ -443,7 +443,7 @@ def test_it_when_a_conflicting_membership_already_exists( def test_it_errors_if_the_pubid_is_unknown(self, do_request): do_request(pubid="UNKNOWN_PUBID", status=404) - def test_it_errors_if_the_userid_is_unknown(self, do_request, authclient): + def test_it_errors_if_the_userid_is_unknown(self, do_request, authclient): # noqa: ARG002 do_request(userid="acct:UNKOWN_USERNAME@{authclient.authority}", status=404) def test_it_errors_if_the_userid_is_invalid(self, do_request): @@ -528,8 +528,7 @@ def do_request(pubid=group.pubid, userid=user.userid, status=200, json=None): if json is None: return app.post(path, headers=headers, status=status) - else: - return app.post_json(path, json, headers=headers, status=status) + return app.post_json(path, json, headers=headers, status=status) return do_request @@ -541,9 +540,7 @@ def authclient(self, factories): @pytest.fixture def headers(self, authclient): - user_pass = "{client_id}:{secret}".format( - client_id=authclient.id, secret=authclient.secret - ) + user_pass = f"{authclient.id}:{authclient.secret}" encoded = base64.standard_b64encode(user_pass.encode("utf-8")) return {"Authorization": "Basic {creds}".format(creds=encoded.decode("ascii"))} @@ -878,7 +875,7 @@ def do_request(self, app, db_session, group, target_user, headers): def do_request( pubid=group.pubid, userid=target_user.userid, - json={"roles": ["member"]}, + json={"roles": ["member"]}, # noqa: B006 headers=headers, status=200, ): @@ -895,7 +892,7 @@ def do_request( def token_authorization_header(token) -> dict: """Return an Authorization header for the given developer token.""" - return {"Authorization": "Bearer {}".format(token.value)} + return {"Authorization": f"Bearer {token.value}"} @pytest.fixture diff --git a/tests/functional/api/groups/read_test.py b/tests/functional/api/groups/read_test.py index be1845c331e..e78e7f3c75e 100644 --- a/tests/functional/api/groups/read_test.py +++ b/tests/functional/api/groups/read_test.py @@ -7,7 +7,7 @@ class TestReadGroups: - # TODO: In subsequent versions of the API, this should really be a group + # TODO: In subsequent versions of the API, this should really be a group # noqa: FIX002, TD002, TD003 # search endpoint and should have its own functional test module def test_it_returns_world_group(self, app): @@ -61,7 +61,7 @@ def test_it_returns_http_200_for_world_readable_group_pubid( group = factories.OpenGroup() db_session.commit() - res = app.get("/api/groups/{pubid}".format(pubid=group.pubid)) + res = app.get(f"/api/groups/{group.pubid}") assert res.status_code == 200 @@ -83,9 +83,7 @@ def test_it_returns_http_404_for_private_group_no_authentication( group = factories.Group() db_session.commit() - res = app.get( - "/api/groups/{pubid}".format(pubid=group.pubid), expect_errors=True - ) + res = app.get(f"/api/groups/{group.pubid}", expect_errors=True) assert res.status_code == 404 @@ -96,9 +94,7 @@ def test_it_returns_http_200_for_private_group_with_creator_authentication( group = factories.Group(creator=user, memberships=[GroupMembership(user=user)]) db_session.commit() - res = app.get( - "/api/groups/{pubid}".format(pubid=group.pubid), headers=token_auth_header - ) + res = app.get(f"/api/groups/{group.pubid}", headers=token_auth_header) assert res.status_code == 200 @@ -110,9 +106,7 @@ def test_it_returns_http_200_for_private_group_with_member_authentication( group.memberships.append(GroupMembership(user=user)) db_session.commit() - res = app.get( - "/api/groups/{pubid}".format(pubid=group.pubid), headers=token_auth_header - ) + res = app.get(f"/api/groups/{group.pubid}", headers=token_auth_header) assert res.status_code == 200 @@ -123,7 +117,7 @@ def test_it_returns_http_404_for_private_group_if_token_user_not_creator( db_session.commit() res = app.get( - "/api/groups/{pubid}".format(pubid=group.pubid), + f"/api/groups/{group.pubid}", headers=token_auth_header, expect_errors=True, ) @@ -136,9 +130,7 @@ def test_it_returns_http_200_for_private_group_with_auth_client_matching_authori group = factories.Group(authority="thirdparty.com") db_session.commit() - res = app.get( - "/api/groups/{pubid}".format(pubid=group.pubid), headers=auth_client_header - ) + res = app.get(f"/api/groups/{group.pubid}", headers=auth_client_header) assert res.status_code == 200 @@ -149,7 +141,7 @@ def test_it_returns_http_404_for_private_group_with_auth_client_mismatched_autho db_session.commit() res = app.get( - "/api/groups/{pubid}".format(pubid=group.pubid), + f"/api/groups/{group.pubid}", headers=auth_client_header, expect_errors=True, ) @@ -168,9 +160,7 @@ def auth_client(db_session, factories): @pytest.fixture def auth_client_header(auth_client): - user_pass = "{client_id}:{secret}".format( - client_id=auth_client.id, secret=auth_client.secret - ) + user_pass = f"{auth_client.id}:{auth_client.secret}" encoded = base64.standard_b64encode(user_pass.encode("utf-8")) return {"Authorization": "Basic {creds}".format(creds=encoded.decode("ascii"))} @@ -187,4 +177,4 @@ def user_with_token(db_session, factories): @pytest.fixture def token_auth_header(user_with_token): user, token = user_with_token - return {"Authorization": "Bearer {}".format(token.value)} + return {"Authorization": f"Bearer {token.value}"} diff --git a/tests/functional/api/groups/update_test.py b/tests/functional/api/groups/update_test.py index faed7250907..f92bde7d5c8 100644 --- a/tests/functional/api/groups/update_test.py +++ b/tests/functional/api/groups/update_test.py @@ -31,7 +31,7 @@ def test_it_returns_http_200_with_valid_payload_and_user_token( db_session.commit() res = app.patch_json( - "/api/groups/{id}".format(id=first_party_group.pubid), + f"/api/groups/{first_party_group.pubid}", {"name": "Rename My Group"}, headers=token_auth_header, ) @@ -50,7 +50,7 @@ def test_it_does_not_update_group_if_empty_payload_and_user_token( db_session.commit() res = app.patch_json( - "/api/groups/{id}".format(id=first_party_group.pubid), + f"/api/groups/{first_party_group.pubid}", {}, headers=token_auth_header, ) @@ -75,7 +75,7 @@ def test_it_ignores_non_whitelisted_fields_in_payload_and_user_token( "joinable_by": "whoever", } res = app.patch_json( - "/api/groups/{id}".format(id=first_party_group.pubid), + f"/api/groups/{first_party_group.pubid}", group, headers=token_auth_header, ) @@ -94,7 +94,7 @@ def test_it_returns_http_400_with_invalid_payload_and_user_token( db_session.commit() res = app.patch_json( - "/api/groups/{id}".format(id=first_party_group.pubid), + f"/api/groups/{first_party_group.pubid}", { "name": "Oooopoooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo" }, @@ -118,7 +118,7 @@ def test_it_returns_http_400_if_groupid_set_on_default_authority_and_user_token( db_session.commit() res = app.patch_json( - "/api/groups/{id}".format(id=first_party_group.pubid), + f"/api/groups/{first_party_group.pubid}", {"groupid": "3434kjkjk"}, headers=token_auth_header, expect_errors=True, @@ -133,7 +133,7 @@ def test_it_returns_http_400_if_groupid_set_on_default_authority_and_user_token( def test_it_returns_http_404_if_no_authenticated_user(self, app, first_party_group): group = {"name": "My Group"} res = app.patch_json( - "/api/groups/{id}".format(id=first_party_group.pubid), + f"/api/groups/{first_party_group.pubid}", group, expect_errors=True, ) @@ -149,7 +149,7 @@ def test_it_returns_http_404_if_token_user_unauthorized( group_payload = {"name": "My Group"} res = app.patch_json( - "/api/groups/{id}".format(id=group.pubid), + f"/api/groups/{group.pubid}", group_payload, headers=token_auth_header, expect_errors=True, @@ -169,7 +169,7 @@ def test_it_allows_auth_client_with_valid_forwarded_user( headers["X-Forwarded-User"] = third_party_user.userid group_payload = {"name": "My Group"} - path = "/api/groups/{id}".format(id=group.pubid) + path = f"/api/groups/{group.pubid}" res = app.patch_json(path, group_payload, headers=headers) assert res.status_code == 200 @@ -185,7 +185,7 @@ def test_it_allows_auth_client_with_matching_authority( group_payload = {"name": "My Group"} - path = "/api/groups/{id}".format(id=group.pubid) + path = f"/api/groups/{group.pubid}" res = app.patch_json(path, group_payload, headers=auth_client_header) assert res.status_code == 200 @@ -199,7 +199,7 @@ def test_it_does_not_allow_auth_client_with_mismatched_authority( group_payload = {"name": "My Group"} - path = "/api/groups/{id}".format(id=group.pubid) + path = f"/api/groups/{group.pubid}" res = app.patch_json( path, group_payload, headers=auth_client_header, expect_errors=True ) @@ -221,7 +221,7 @@ def test_it_allows_groupid_from_auth_client_with_forwarded_user( "groupid": "group:98762557@thirdparty.com", } - path = "/api/groups/{id}".format(id=group.pubid) + path = f"/api/groups/{group.pubid}" res = app.patch_json(path, group_payload, headers=headers) assert res.status_code == 200 @@ -248,7 +248,7 @@ def test_it_returns_HTTP_Conflict_if_groupid_is_duplicate( group_payload = {"groupid": "group:update_one@thirdparty.com"} # Attempting to set group2's `groupid` to one already taken by group1 - path = "/api/groups/{id}".format(id=group2.pubid) + path = f"/api/groups/{group2.pubid}" res = app.patch_json(path, group_payload, headers=headers, expect_errors=True) assert group1.groupid in res.json_body["reason"] @@ -286,7 +286,7 @@ def user_with_token(db_session, factories, first_party_user): @pytest.fixture def token_auth_header(user_with_token): user, token = user_with_token - return {"Authorization": "Bearer {}".format(token.value)} + return {"Authorization": f"Bearer {token.value}"} @pytest.fixture @@ -307,8 +307,6 @@ def auth_client(db_session, factories): @pytest.fixture def auth_client_header(auth_client): - user_pass = "{client_id}:{secret}".format( - client_id=auth_client.id, secret=auth_client.secret - ) + user_pass = f"{auth_client.id}:{auth_client.secret}" encoded = base64.standard_b64encode(user_pass.encode("utf-8")) return {"Authorization": "Basic {creds}".format(creds=encoded.decode("ascii"))} diff --git a/tests/functional/api/profile_test.py b/tests/functional/api/profile_test.py index 9d93a90d658..55daeb84ada 100644 --- a/tests/functional/api/profile_test.py +++ b/tests/functional/api/profile_test.py @@ -108,7 +108,7 @@ def test_it_updates_user_profile(self, app, user_with_token): assert res.status_code == 200 def test_it_raises_http_404_if_unauthenticated(self, app): - # FIXME: This should return a 403 + # FIXME: This should return a 403 # noqa: FIX001, TD001, TD002, TD003 profile = {"preferences": {"show_sidebar_tutorial": False}} res = app.patch_json("/api/profile", profile, expect_errors=True) diff --git a/tests/functional/api/users_test.py b/tests/functional/api/users_test.py index c37f438b8de..22d4d1cd1ac 100644 --- a/tests/functional/api/users_test.py +++ b/tests/functional/api/users_test.py @@ -31,7 +31,7 @@ def test_it_returns_http_200_when_successful( assert res.status_code == 200 def test_it_returns_404_if_missing_auth_client(self, app, user_payload): - # FIXME: This should return a 403; our exception views squash it into a 404 + # FIXME: This should return a 403; our exception views squash it into a 404 # noqa: FIX001, TD001, TD002, TD003 res = app.post_json("/api/users", user_payload, expect_errors=True) assert res.status_code == 404 diff --git a/tests/functional/api/versions_test.py b/tests/functional/api/versions_test.py index a560678cbf9..6facea477e8 100644 --- a/tests/functional/api/versions_test.py +++ b/tests/functional/api/versions_test.py @@ -26,7 +26,7 @@ def test_index_200s_with_application_json(self, app): # Send ``application/json`` and we should get a 200 response from the # default version. - headers = {"Accept": str("application/json")} + headers = {"Accept": "application/json"} res = app.get("/api/", headers=headers) @@ -36,7 +36,7 @@ def test_index_200s_with_application_json(self, app): def test_index_200s_with_v1_header(self, app): # Set a v1 Accept header and we should get a 200 response. - headers = {"Accept": str("application/vnd.hypothesis.v1+json")} + headers = {"Accept": "application/vnd.hypothesis.v1+json"} res = app.get("/api/", headers=headers) @@ -46,7 +46,7 @@ def test_index_200s_with_v1_header(self, app): def test_index_200s_with_v2_header(self, app): # Set a v2 Accept header and we should get a 200 response. - headers = {"Accept": str("application/vnd.hypothesis.v2+json")} + headers = {"Accept": "application/vnd.hypothesis.v2+json"} res = app.get("/api/", headers=headers) @@ -57,7 +57,7 @@ def test_index_406s_with_invalid_version_header(self, app): # Set a v3 Accept header and we should get a 406 response. # (For now because the version doesn't exist yet) - headers = {"Accept": str("application/vnd.hypothesis.v3+json")} + headers = {"Accept": "application/vnd.hypothesis.v3+json"} res = app.get("/api/", headers=headers, expect_errors=True) @@ -66,7 +66,7 @@ def test_index_406s_with_invalid_version_header(self, app): def test_index_200s_with_invalid_accept_header_value(self, app): # Set a generally-invalid Accept header and we should get a 200. - headers = {"Accept": str("nonsensical")} + headers = {"Accept": "nonsensical"} res = app.get("/api/", headers=headers, expect_errors=True) @@ -90,7 +90,7 @@ def test_index_adds_v2_response_header(self, app): # Set a v2 Accept header and we should get a version media type # response header. - headers = {"Accept": str("application/vnd.hypothesis.v2+json")} + headers = {"Accept": "application/vnd.hypothesis.v2+json"} res = app.get("/api/", headers=headers) diff --git a/tests/functional/bin/run_data_task_test.py b/tests/functional/bin/run_data_task_test.py index c3efc443b34..7109f88f63c 100644 --- a/tests/functional/bin/run_data_task_test.py +++ b/tests/functional/bin/run_data_task_test.py @@ -4,7 +4,7 @@ import pytest from importlib_resources import files -from pytest import fixture +from pytest import fixture # noqa: PT013 from tests.functional.conftest import TEST_ENVIRONMENT @@ -24,7 +24,7 @@ def test_reporting_tasks(self, environ): # Ensure we can run the "create from scratch" after everything "report/create_from_scratch", ): - result = check_output( + result = check_output( # noqa: S603 [ sys.executable, "bin/run_data_task.py", @@ -38,8 +38,8 @@ def test_reporting_tasks(self, environ): assert result - print(f"Task {task_name} OK!") - print(result.decode("utf-8")) + print(f"Task {task_name} OK!") # noqa: T201 + print(result.decode("utf-8")) # noqa: T201 @fixture def environ(self): @@ -54,7 +54,7 @@ def environ(self): def run_in_root(self): # A context manager to ensure we work from the root, but return the # path to where it was before - current_dir = os.getcwd() + current_dir = os.getcwd() # noqa: PTH109 os.chdir(str(files("h") / "..")) yield diff --git a/tests/functional/client_login_test.py b/tests/functional/client_login_test.py index 4b85a0ca4c3..c4db51295d1 100644 --- a/tests/functional/client_login_test.py +++ b/tests/functional/client_login_test.py @@ -88,7 +88,7 @@ def _parse_url(cls, url): def login(cls, app, user): res = app.get("/login") res.form["username"] = user.username - res.form["password"] = "pass" + res.form["password"] = "pass" # noqa: S105 res.form.submit() @pytest.fixture @@ -117,7 +117,7 @@ def authclient(self, db_session, factories): def user(self, db_session, factories): # Password is 'pass' user = factories.User( - password="$2b$12$21I1LjTlGJmLXzTDrQA8gusckjHEMepTmLY5WN3Kx8hSaqEEKj9V6" + password="$2b$12$21I1LjTlGJmLXzTDrQA8gusckjHEMepTmLY5WN3Kx8hSaqEEKj9V6" # noqa: S106 ) db_session.commit() return user diff --git a/tests/functional/fixtures/authentication.py b/tests/functional/fixtures/authentication.py index c7783e17ab4..5b46bf56b94 100644 --- a/tests/functional/fixtures/authentication.py +++ b/tests/functional/fixtures/authentication.py @@ -1,11 +1,11 @@ import pytest __all__ = ( - "user", "login_user", - "with_logged_in_user", - "with_logged_in_staff_member", + "user", "with_logged_in_admin", + "with_logged_in_staff_member", + "with_logged_in_user", ) @@ -16,16 +16,16 @@ def user(factories): @pytest.fixture def login_user(db_session, app, user): - def login_user(staff=False, admin=False): + def login_user(staff=False, admin=False): # noqa: FBT002 # This is the hash for `pass` used below - user.password = "$2b$12$21I1LjTlGJmLXzTDrQA8gusckjHEMepTmLY5WN3Kx8hSaqEEKj9V6" + user.password = "$2b$12$21I1LjTlGJmLXzTDrQA8gusckjHEMepTmLY5WN3Kx8hSaqEEKj9V6" # noqa: S105 user.staff = staff user.admin = admin db_session.commit() login_page = app.get("/login") login_page.form["username"] = user.username - login_page.form["password"] = "pass" + login_page.form["password"] = "pass" # noqa: S105 login_page.form.submit() return login_user diff --git a/tests/functional/fixtures/groups.py b/tests/functional/fixtures/groups.py index 1d9fcb8379a..a8679c00bda 100644 --- a/tests/functional/fixtures/groups.py +++ b/tests/functional/fixtures/groups.py @@ -1,6 +1,6 @@ import pytest -__all__ = ("group", "other_authority_group", "open_group", "user_owned_group") +__all__ = ("group", "open_group", "other_authority_group", "user_owned_group") @pytest.fixture diff --git a/tests/functional/h/views/admin/permissions_test.py b/tests/functional/h/views/admin/permissions_test.py index 4b20204c0b9..52ce2a6e69f 100644 --- a/tests/functional/h/views/admin/permissions_test.py +++ b/tests/functional/h/views/admin/permissions_test.py @@ -61,7 +61,12 @@ def test_group_end_points_accessible_by_staff( @pytest.mark.usefixtures("with_logged_in_user") @pytest.mark.parametrize("method,url_template,_", GROUP_PAGES) def test_group_end_points_not_accessible_by_regular_user( - self, app, group, method, url_template, _ + self, + app, + group, + method, + url_template, + _, # noqa: PT019 ): url = url_template.format(pubid=group.pubid) diff --git a/tests/functional/oauth_test.py b/tests/functional/oauth_test.py index a8b1ddf15d0..284fe0189f8 100644 --- a/tests/functional/oauth_test.py +++ b/tests/functional/oauth_test.py @@ -19,11 +19,11 @@ def test_getting_an_access_token(self, app, authclient, userid): self.assert_is_authorized(app, userid, access_token) def test_request_fails_if_access_token_wrong(self, app): - self.assert_is_not_authorised(app, access_token="wrong") + self.assert_is_not_authorised(app, access_token="wrong") # noqa: S106 def test_request_fails_if_access_token_expired(self, app, db_session, factories): token = factories.DeveloperToken( - expires=datetime.datetime.utcnow() - datetime.timedelta(hours=1) + expires=datetime.datetime.utcnow() - datetime.timedelta(hours=1) # noqa: DTZ003 ) token = token.value db_session.commit() @@ -61,7 +61,7 @@ def test_refresh_token_request_fails_if_token_expired( self, app, db_session, factories ): token = factories.DeveloperToken( - expires=datetime.datetime.utcnow() - datetime.timedelta(hours=1) + expires=datetime.datetime.utcnow() - datetime.timedelta(hours=1) # noqa: DTZ003 ) refresh_token = token.refresh_token db_session.commit() @@ -145,7 +145,7 @@ def get_access_token(self, app, authclient, userid): def epoch(self, delta=None): """Get a Unix timestamp for the current time, with optional offset.""" - timestamp = datetime.datetime.utcnow() + timestamp = datetime.datetime.utcnow() # noqa: DTZ003 if delta is not None: timestamp = timestamp + delta diff --git a/tests/unit/h/accounts/schemas_test.py b/tests/unit/h/accounts/schemas_test.py index 2e62dbe2bf1..80781a523a6 100644 --- a/tests/unit/h/accounts/schemas_test.py +++ b/tests/unit/h/accounts/schemas_test.py @@ -80,7 +80,7 @@ def test_it_is_invalid_when_password_too_short(self, pyramid_request): with pytest.raises(colander.Invalid) as exc: schema.deserialize({"password": "a"}) - assert exc.value.asdict()["password"] == ("Must be 8 characters or more.") + assert exc.value.asdict()["password"] == ("Must be 8 characters or more.") # noqa: S105 def test_it_is_invalid_when_username_too_short(self, pyramid_request): schema = schemas.RegisterSchema().bind(request=pyramid_request) @@ -151,7 +151,7 @@ def test_it_validates_with_valid_payload( userid=format_userid( valid_params["username"], pyramid_csrf_request.default_authority ), - requested_at=datetime.now() - timedelta(days=32), + requested_at=datetime.now() - timedelta(days=32), # noqa: DTZ005 ) result = schema.deserialize(valid_params) diff --git a/tests/unit/h/accounts/util_test.py b/tests/unit/h/accounts/util_test.py index 2e8c5e6131a..58584da0e63 100644 --- a/tests/unit/h/accounts/util_test.py +++ b/tests/unit/h/accounts/util_test.py @@ -4,7 +4,7 @@ def test_validate_url_rejects_urls_without_domains(): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 validate_url("http:///path") @@ -17,7 +17,7 @@ def test_validate_url_accepts_http_urls(): def test_validate_url_rejects_non_http_urls(): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 validate_url("mailto:jim@smith.org") @@ -29,10 +29,10 @@ def test_validate_orcid_accepts_valid_ids(orcid_id): def test_validate_orcid_rejects_malformed_ids(): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 validate_orcid("not-an-orcid") def test_validate_orcid_rejects_mismatching_check_digit(): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 validate_orcid("1000-0002-1825-0097") diff --git a/tests/unit/h/activity/bucketing_test.py b/tests/unit/h/activity/bucketing_test.py index 9225ccb25e0..72c55cbf6df 100644 --- a/tests/unit/h/activity/bucketing_test.py +++ b/tests/unit/h/activity/bucketing_test.py @@ -6,11 +6,11 @@ from h.activity import bucketing from tests.common import factories -UTCNOW = datetime.datetime(year=1970, month=2, day=21, hour=19, minute=30) +UTCNOW = datetime.datetime(year=1970, month=2, day=21, hour=19, minute=30) # noqa: DTZ001 FIVE_MINS_AGO = UTCNOW - datetime.timedelta(minutes=5) YESTERDAY = UTCNOW - datetime.timedelta(days=1) -THIRD_MARCH_1968 = datetime.datetime(year=1968, month=3, day=3) -FIFTH_NOVEMBER_1969 = datetime.datetime(year=1969, month=11, day=5) +THIRD_MARCH_1968 = datetime.datetime(year=1968, month=3, day=3) # noqa: DTZ001 +FIFTH_NOVEMBER_1969 = datetime.datetime(year=1969, month=11, day=5) # noqa: DTZ001 class timeframe_with: @@ -24,7 +24,7 @@ def __eq__(self, timeframe): and self.document_buckets == timeframe.document_buckets ) - # pragma: nocover + # pragma: nocover # noqa: ERA001 def __repr__(self): # pragma: nocover return f'{self.__class__} "{self.label}" with {len(self.document_buckets)} document buckets' # pragma: nocover diff --git a/tests/unit/h/activity/query_test.py b/tests/unit/h/activity/query_test.py index a146f8c21b6..cf051de9e2b 100644 --- a/tests/unit/h/activity/query_test.py +++ b/tests/unit/h/activity/query_test.py @@ -373,7 +373,10 @@ def test_it_buckets_the_annotations( assert result.timeframes == bucketing.bucket.return_value def test_it_fetches_the_groups_from_the_database( - self, _fetch_groups, group_pubids, pyramid_request + self, + _fetch_groups, # noqa: PT019 + group_pubids, + pyramid_request, ): execute(pyramid_request, MultiDict(), self.PAGE_SIZE) @@ -396,7 +399,7 @@ def test_it_returns_each_annotation_presented(self, annotations, pyramid_request } ) - def test_it_returns_each_annotations_group(self, _fetch_groups, pyramid_request): + def test_it_returns_each_annotations_group(self, _fetch_groups, pyramid_request): # noqa: PT019 result = execute(pyramid_request, MultiDict(), self.PAGE_SIZE) presented_annotations = [] diff --git a/tests/unit/h/app_test.py b/tests/unit/h/app_test.py index 52b5c3aa588..e1185b24028 100644 --- a/tests/unit/h/app_test.py +++ b/tests/unit/h/app_test.py @@ -24,7 +24,7 @@ def test_it_configures_pyramid_sentry_plugin(self, pyramid_config): def pyramid_config(self, pyramid_config): # Mock out jinja2 related stuff pyramid_config.get_jinja2_environment = mock.create_autospec( - # pragma: nocover + # pragma: nocover # noqa: ERA001 spec=lambda: JinjaEnvironment() ) @@ -36,12 +36,12 @@ def pyramid_config(self, pyramid_config): ) pyramid_config.add_jinja2_extension = mock.create_autospec( - lambda name: True + lambda name: True # noqa: ARG005 ) # pragma: nocover # Prevent us from really loading the includes pyramid_config.include = mock.create_autospec( - lambda name: True + lambda name: True # noqa: ARG005 ) # pragma: nocover return pyramid_config diff --git a/tests/unit/h/celery_test.py b/tests/unit/h/celery_test.py index 6cc9f41b3de..1d9e02ddbd7 100644 --- a/tests/unit/h/celery_test.py +++ b/tests/unit/h/celery_test.py @@ -53,7 +53,7 @@ def test_report_failure_reports_failure_in_debug_mode(self, patch): einfo = None # Make a fake ExceptionInfo object try: - raise RuntimeError("asplode!") + raise RuntimeError("asplode!") # noqa: EM101, TRY301 except RuntimeError: einfo = ExceptionInfo() @@ -70,7 +70,7 @@ def test_report_failure_skipped_when_not_in_debug_mode(self, patch): einfo = None # Make a fake ExceptionInfo object try: - raise RuntimeError("asplode!") + raise RuntimeError("asplode!") # noqa: EM101, TRY301 except RuntimeError: einfo = ExceptionInfo() diff --git a/tests/unit/h/cli/commands/authclient_test.py b/tests/unit/h/cli/commands/authclient_test.py index 93e42585367..d93afc86a96 100644 --- a/tests/unit/h/cli/commands/authclient_test.py +++ b/tests/unit/h/cli/commands/authclient_test.py @@ -28,7 +28,7 @@ def test_it_creates_a_confidential_authclient( assert authclient.authority == "publisher.org" assert authclient.name == "Publisher" - assert authclient.secret == "fixed-secret-token" + assert authclient.secret == "fixed-secret-token" # noqa: S105 def test_it_prints_the_id_for_public_client(self, cli, cliconfig, db_session): (authclient, output) = self._add_authclient( diff --git a/tests/unit/h/cli/commands/user_test.py b/tests/unit/h/cli/commands/user_test.py index 62edc0af5ef..276c2779f44 100644 --- a/tests/unit/h/cli/commands/user_test.py +++ b/tests/unit/h/cli/commands/user_test.py @@ -25,7 +25,7 @@ def test_it_adds_user_with_default_authority(self, invoke_cli, user_signup_servi user_signup_service.signup.assert_called_with( username="admin", email="admin@localhost", - password="admin", + password="admin", # noqa: S106 require_activation=False, ) @@ -51,7 +51,7 @@ def test_it_adds_user_with_specific_authority( user_signup_service.signup.assert_called_with( username="admin", email="admin@localhost", - password="admin", + password="admin", # noqa: S106 authority="publisher.org", require_activation=False, ) diff --git a/tests/unit/h/conftest.py b/tests/unit/h/conftest.py index 9966bc2c196..9c93fa4bc84 100644 --- a/tests/unit/h/conftest.py +++ b/tests/unit/h/conftest.py @@ -30,7 +30,7 @@ def __init__(self): self.flags = {} self.loaded = False - def __call__(self, name, *args, **kwargs): + def __call__(self, name, *args, **kwargs): # noqa: ARG002 return self.flags.get(name, True) def all(self): @@ -114,7 +114,7 @@ def matchers(): @pytest.fixture def notify(pyramid_config, request): patcher = mock.patch.object(pyramid_config.registry, "notify", autospec=True) - request.addfinalizer(patcher.stop) + request.addfinalizer(patcher.stop) # noqa: PT021 return patcher.start() diff --git a/tests/unit/h/emails/flag_notification_test.py b/tests/unit/h/emails/flag_notification_test.py index 74fc70838b4..86a3e7e2038 100644 --- a/tests/unit/h/emails/flag_notification_test.py +++ b/tests/unit/h/emails/flag_notification_test.py @@ -14,8 +14,8 @@ def test_calls_renderers_with_appropriate_context( ) expected_context = {"incontext_link": "http://hyp.is/a/ann1"} - html_renderer.assert_(**expected_context) - text_renderer.assert_(**expected_context) + html_renderer.assert_(**expected_context) # noqa: PT009 + text_renderer.assert_(**expected_context) # noqa: PT009 def test_appropriate_return_values( self, pyramid_request, html_renderer, text_renderer diff --git a/tests/unit/h/emails/reply_notification_test.py b/tests/unit/h/emails/reply_notification_test.py index cdbc09206a4..af634e6dea2 100644 --- a/tests/unit/h/emails/reply_notification_test.py +++ b/tests/unit/h/emails/reply_notification_test.py @@ -41,8 +41,8 @@ def test_it( "reply_user_url": "http://example.com/stream/user/ron", "unsubscribe_url": "http://example.com/unsub/FAKETOKEN", } - html_renderer.assert_(**expected_context) - text_renderer.assert_(**expected_context) + html_renderer.assert_(**expected_context) # noqa: PT009 + text_renderer.assert_(**expected_context) # noqa: PT009 def test_falls_back_to_target_uri_for_document_title( self, notification, pyramid_request, html_renderer, text_renderer @@ -51,8 +51,8 @@ def test_falls_back_to_target_uri_for_document_title( generate(pyramid_request, notification) - html_renderer.assert_(document_title="http://example.org/") - text_renderer.assert_(document_title="http://example.org/") + html_renderer.assert_(document_title="http://example.org/") # noqa: PT009 + text_renderer.assert_(document_title="http://example.org/") # noqa: PT009 def test_falls_back_to_individual_page_if_no_bouncer( self, @@ -86,8 +86,8 @@ def test_falls_back_to_individual_page_if_no_bouncer( "reply_user_url": "http://example.com/stream/user/ron", "unsubscribe_url": "http://example.com/unsub/FAKETOKEN", } - html_renderer.assert_(**expected_context) - text_renderer.assert_(**expected_context) + html_renderer.assert_(**expected_context) # noqa: PT009 + text_renderer.assert_(**expected_context) # noqa: PT009 def test_supports_non_ascii_display_names( self, @@ -121,8 +121,8 @@ def test_returns_usernames_if_no_display_names( "parent_user_display_name": parent_user.username, "reply_user_display_name": reply_user.username, } - html_renderer.assert_(**expected_context) - text_renderer.assert_(**expected_context) + html_renderer.assert_(**expected_context) # noqa: PT009 + text_renderer.assert_(**expected_context) # noqa: PT009 def test_returns_text_and_body_results_from_renderers( self, notification, pyramid_request, html_renderer, text_renderer @@ -172,8 +172,8 @@ def test_urls_not_set_for_third_party_users( generate(pyramid_request, notification) - html_renderer.assert_(**expected_context) - text_renderer.assert_(**expected_context) + html_renderer.assert_(**expected_context) # noqa: PT009 + text_renderer.assert_(**expected_context) # noqa: PT009 def test_urls_set_for_first_party_users( self, notification, pyramid_request, html_renderer, text_renderer @@ -185,8 +185,8 @@ def test_urls_set_for_first_party_users( generate(pyramid_request, notification) - html_renderer.assert_(**expected_context) - text_renderer.assert_(**expected_context) + html_renderer.assert_(**expected_context) # noqa: PT009 + text_renderer.assert_(**expected_context) # noqa: PT009 @pytest.fixture def document(self, db_session): @@ -213,8 +213,8 @@ def notification(self, reply, reply_user, parent, parent_user, document): def parent(self): common = { "id": "foo123", - "created": datetime.datetime.utcnow(), - "updated": datetime.datetime.utcnow(), + "created": datetime.datetime.utcnow(), # noqa: DTZ003 + "updated": datetime.datetime.utcnow(), # noqa: DTZ003 "text": "Foo is true", } return Annotation(target_uri="http://example.org/", **common) @@ -229,8 +229,8 @@ def parent_user(self, factories): def reply(self): common = { "id": "bar456", - "created": datetime.datetime.utcnow(), - "updated": datetime.datetime.utcnow(), + "created": datetime.datetime.utcnow(), # noqa: DTZ003 + "updated": datetime.datetime.utcnow(), # noqa: DTZ003 "text": "No it is not!", } return Annotation(target_uri="http://example.org/", **common) diff --git a/tests/unit/h/emails/reset_password_test.py b/tests/unit/h/emails/reset_password_test.py index 8e73ad3d64e..d70d8b5ff05 100644 --- a/tests/unit/h/emails/reset_password_test.py +++ b/tests/unit/h/emails/reset_password_test.py @@ -19,8 +19,8 @@ def test_calls_renderers_with_appropriate_context( "reset_code": "s3cr3t-r3s3t-c0d3", "reset_link": "http://example.com/reset/s3cr3t-r3s3t-c0d3", } - html_renderer.assert_(**expected_context) - text_renderer.assert_(**expected_context) + html_renderer.assert_(**expected_context) # noqa: PT009 + text_renderer.assert_(**expected_context) # noqa: PT009 @pytest.mark.usefixtures("html_renderer", "text_renderer") def test_generates_token_using_username(self, pyramid_request, serializer, user): diff --git a/tests/unit/h/emails/signup_test.py b/tests/unit/h/emails/signup_test.py index bb4c1de6413..2e690c18494 100644 --- a/tests/unit/h/emails/signup_test.py +++ b/tests/unit/h/emails/signup_test.py @@ -18,8 +18,8 @@ def test_calls_renderers_with_appropriate_context( expected_context = { "activate_link": "http://example.com/activate/1234/abcd4567" } - html_renderer.assert_(**expected_context) - text_renderer.assert_(**expected_context) + html_renderer.assert_(**expected_context) # noqa: PT009 + text_renderer.assert_(**expected_context) # noqa: PT009 def test_appropriate_return_values( self, pyramid_request, html_renderer, text_renderer diff --git a/tests/unit/h/emails/test_test.py b/tests/unit/h/emails/test_test.py index bcc79fb0d03..1ae9495ab01 100644 --- a/tests/unit/h/emails/test_test.py +++ b/tests/unit/h/emails/test_test.py @@ -17,8 +17,8 @@ def test_calls_renderers_with_appropriate_context( "python_version": Any.string(), "version": __version__, } - html_renderer.assert_(**expected_context) - text_renderer.assert_(**expected_context) + html_renderer.assert_(**expected_context) # noqa: PT009 + text_renderer.assert_(**expected_context) # noqa: PT009 def test_appropriate_return_values( self, pyramid_request, html_renderer, text_renderer diff --git a/tests/unit/h/eventqueue_test.py b/tests/unit/h/eventqueue_test.py index 4ae47c08bc1..6f129cd78ae 100644 --- a/tests/unit/h/eventqueue_test.py +++ b/tests/unit/h/eventqueue_test.py @@ -72,7 +72,7 @@ def test_publish_all_reraises_in_debug_mode(self, subscriber, pyramid_request): pyramid_request.debug = True subscriber.side_effect = ValueError("boom!") - with pytest.raises(ValueError) as excinfo: + with pytest.raises(ValueError) as excinfo: # noqa: PT011, PT012 queue(DummyEvent(pyramid_request)) queue.publish_all() assert str(excinfo.value) == "boom!" diff --git a/tests/unit/h/feeds/atom_test.py b/tests/unit/h/feeds/atom_test.py index 7bfce2488ba..131e5ef1393 100644 --- a/tests/unit/h/feeds/atom_test.py +++ b/tests/unit/h/feeds/atom_test.py @@ -25,7 +25,7 @@ def test_feed_subtitle(): @mock.patch("h.feeds.atom._feed_entry_from_annotation") -def test_feed_contains_entries(_feed_entry_from_annotation, factories): +def test_feed_contains_entries(_feed_entry_from_annotation, factories): # noqa: PT019 """The feed should contain an entry for each annotation.""" annotations = [ factories.Annotation(), @@ -40,7 +40,7 @@ def test_feed_contains_entries(_feed_entry_from_annotation, factories): "feed entry for annotation 3", ] - def pop(*args, **kwargs): + def pop(*args, **kwargs): # noqa: ARG001 return entries.pop(0) _feed_entry_from_annotation.side_effect = pop @@ -127,11 +127,14 @@ def test_entry_title(factories): def test_entry_dates(factories): annotation = factories.Annotation( - created=datetime.utcnow(), updated=datetime.utcnow() + timedelta(hours=1) + created=datetime.utcnow(), # noqa: DTZ003 + updated=datetime.utcnow() + timedelta(hours=1), # noqa: DTZ003 ) feed = atom.feed_from_annotations( - [annotation], "atom_url", lambda annotation: "annotation url" + [annotation], + "atom_url", + lambda annotation: "annotation url", # noqa: ARG005 ) assert feed["entries"][0]["published"] == f"utc_iso8601_return:{annotation.created}" @@ -147,7 +150,9 @@ def test_entry_content(factories): annotation = factories.Annotation() feed = atom.feed_from_annotations( - [annotation], "atom_url", lambda annotation: "annotation url" + [annotation], + "atom_url", + lambda annotation: "annotation url", # noqa: ARG005 ) mock_description.assert_called_once_with() @@ -155,7 +160,7 @@ def test_entry_content(factories): @mock.patch("h.feeds.util") -def test_annotation_url_links(_, factories): +def test_annotation_url_links(_, factories): # noqa: PT019 """Entries should contain links to the HTML pages for the annotations.""" annotation = factories.Annotation() annotation_url = mock.Mock() @@ -171,7 +176,7 @@ def test_annotation_url_links(_, factories): @mock.patch("h.feeds.util") -def test_annotation_api_url_links(_, factories): +def test_annotation_api_url_links(_, factories): # noqa: PT019 """Entries should contain links to the JSON pages for the annotations.""" annotation = factories.Annotation() annotation_api_url = mock.Mock() @@ -190,10 +195,12 @@ def test_annotation_api_url_links(_, factories): def test_feed_updated(factories): annotations = factories.Annotation.build_batch(3) - annotations[0].updated = datetime.utcnow() + annotations[0].updated = datetime.utcnow() # noqa: DTZ003 feed = atom.feed_from_annotations( - annotations, "atom_url", lambda annotation: "annotation url" + annotations, + "atom_url", + lambda annotation: "annotation url", # noqa: ARG005 ) assert feed["updated"] == f"utc_iso8601_return:{annotations[0].updated}" diff --git a/tests/unit/h/feeds/rss_test.py b/tests/unit/h/feeds/rss_test.py index 8d09c095c85..c2723ac9926 100644 --- a/tests/unit/h/feeds/rss_test.py +++ b/tests/unit/h/feeds/rss_test.py @@ -22,8 +22,8 @@ def _annotation(**kwargs): args = { "userid": "acct:janebloggs@hypothes.is", "target_selectors": [], - "created": datetime.datetime.utcnow(), - "updated": datetime.datetime.utcnow(), + "created": datetime.datetime.utcnow(), # noqa: DTZ003 + "updated": datetime.datetime.utcnow(), # noqa: DTZ003 "document": models.Document(), } args.update(**kwargs) @@ -51,7 +51,7 @@ def test_feed_from_annotations_item_author(userid, name): def test_feed_annotations_pubDate(): """It should render the pubDates of annotations correctly.""" ann = _annotation( - created=datetime.datetime( + created=datetime.datetime( # noqa: DTZ001 year=2015, month=3, day=11, hour=10, minute=43, second=54 ) ) @@ -101,7 +101,7 @@ def test_feed_from_annotations_item_descriptions(factories): def test_feed_from_annotations_item_guid(factories): """Feed items should use the annotation's HTML URL as their GUID.""" annotation = factories.Annotation( - created=datetime.datetime(year=2015, month=3, day=11) + created=datetime.datetime(year=2015, month=3, day=11) # noqa: DTZ001 ) feed = rss.feed_from_annotations( @@ -173,7 +173,7 @@ def test_feed_from_annotations_pubDate(): """The pubDate should be the updated time of the most recent annotation.""" annotations = [ _annotation( - updated=datetime.datetime( + updated=datetime.datetime( # noqa: DTZ001 year=2015, month=3, day=11, @@ -184,7 +184,7 @@ def test_feed_from_annotations_pubDate(): ) ), _annotation( - updated=datetime.datetime( + updated=datetime.datetime( # noqa: DTZ001 year=2015, month=2, day=11, @@ -195,7 +195,7 @@ def test_feed_from_annotations_pubDate(): ) ), _annotation( - updated=datetime.datetime( + updated=datetime.datetime( # noqa: DTZ001 year=2015, month=1, day=11, diff --git a/tests/unit/h/feeds/util_test.py b/tests/unit/h/feeds/util_test.py index 240ec093272..e197be643c9 100644 --- a/tests/unit/h/feeds/util_test.py +++ b/tests/unit/h/feeds/util_test.py @@ -7,7 +7,7 @@ def test_tag_uri_for_annotation(factories): """Entry IDs should be tag URIs based on domain, day and annotation ID.""" annotation = factories.Annotation( - created=datetime.datetime(year=2015, month=3, day=19) + created=datetime.datetime(year=2015, month=3, day=19) # noqa: DTZ001 ) tag_uri = util.tag_uri_for_annotation( diff --git a/tests/unit/h/jinja2_extensions/__init___test.py b/tests/unit/h/jinja2_extensions/__init___test.py index e48d0af3e1c..84b8b0f6ad9 100644 --- a/tests/unit/h/jinja2_extensions/__init___test.py +++ b/tests/unit/h/jinja2_extensions/__init___test.py @@ -11,7 +11,7 @@ class TestSetupJinja2Env: def test_it(self): - environment = Environment() + environment = Environment() # noqa: S701 setup_jinja2_env(environment) diff --git a/tests/unit/h/jinja2_extensions/filter_test.py b/tests/unit/h/jinja2_extensions/filter_test.py index 042be9091d0..d145ee49151 100644 --- a/tests/unit/h/jinja2_extensions/filter_test.py +++ b/tests/unit/h/jinja2_extensions/filter_test.py @@ -27,14 +27,14 @@ def test_to_json(value_in, json_out): "timestamp_in,string_out", [ # Basic format for recent timestamps - (datetime(2016, 4, 14, 16, 45, 36, 529730), "14 April at 16:45"), + (datetime(2016, 4, 14, 16, 45, 36, 529730), "14 April at 16:45"), # noqa: DTZ001 # For times more than a year ago, add the year - (datetime(2012, 4, 14, 16, 45, 36, 529730), "14 April 2012 at 16:45"), + (datetime(2012, 4, 14, 16, 45, 36, 529730), "14 April 2012 at 16:45"), # noqa: DTZ001 ], ) def test_human_timestamp(timestamp_in, string_out): assert ( - human_timestamp(timestamp_in, now=lambda: datetime(2016, 4, 14)) == string_out + human_timestamp(timestamp_in, now=lambda: datetime(2016, 4, 14)) == string_out # noqa: DTZ001 ) diff --git a/tests/unit/h/models/document/_document_test.py b/tests/unit/h/models/document/_document_test.py index a0c82929241..4a56f787330 100644 --- a/tests/unit/h/models/document/_document_test.py +++ b/tests/unit/h/models/document/_document_test.py @@ -100,7 +100,7 @@ def err(): monkeypatch.setattr(db_session, "flush", err) - with pytest.raises(ConcurrentUpdateError): + with pytest.raises(ConcurrentUpdateError): # noqa: SIM117 with db_session.no_autoflush: # prevent premature IntegrityError Document.find_or_create_by_uris( db_session, @@ -194,7 +194,7 @@ def test_it_deletes_all_but_the_first(self, db_session, duplicate_docs): .count() ) - @pytest.mark.parametrize("updated", (None, _datetime(2001, 1, 1))) + @pytest.mark.parametrize("updated", (None, _datetime(2001, 1, 1))) # noqa: DTZ001 @pytest.mark.parametrize("sub_item", ("document_uris", "meta")) def test_it_moves_sub_items_to_the_first( self, db_session, duplicate_docs, datetime, updated, sub_item @@ -217,7 +217,7 @@ def test_it_moves_annotations_to_the_first(self, db_session, duplicate_docs): merge_documents(db_session, duplicate_docs) db_session.flush() - for document, expected_count in zip(duplicate_docs, (3, 0, 0)): + for document, expected_count in zip(duplicate_docs, (3, 0, 0), strict=False): count = ( db_session.query(models.Annotation) .filter_by(document_id=document.id) @@ -432,6 +432,6 @@ def merge_documents(self, patch): @pytest.fixture def datetime(patch): datetime = patch("h.models.document._document.datetime") - datetime.utcnow.return_value = _datetime.utcnow() + datetime.utcnow.return_value = _datetime.utcnow() # noqa: DTZ003 return datetime diff --git a/tests/unit/h/models/document/_meta_test.py b/tests/unit/h/models/document/_meta_test.py index dd83b7e301f..789650ea8e4 100644 --- a/tests/unit/h/models/document/_meta_test.py +++ b/tests/unit/h/models/document/_meta_test.py @@ -41,8 +41,8 @@ def test_it_updates_an_existing_DocumentMeta_if_there_is_one( value="new value", # This should be ignored either way. document=meta_attrs["document"] if correct_document else Document(), - created=datetime.now(), # This should be ignored. - updated=datetime.now(), + created=datetime.now(), # This should be ignored. # noqa: DTZ005 + updated=datetime.now(), # noqa: DTZ005 ) document_meta = DocumentMeta(**original_attrs) db_session.add(document_meta) @@ -96,7 +96,7 @@ def err(): monkeypatch.setattr(db_session, "flush", err) - with pytest.raises(ConcurrentUpdateError): + with pytest.raises(ConcurrentUpdateError): # noqa: SIM117 with db_session.no_autoflush: # prevent premature IntegrityError create_or_update_document_meta(session=db_session, **meta_attrs) @@ -107,11 +107,11 @@ def meta_attrs(self): "type": "title", "value": "the title", "document": Document(), - "created": datetime.now() - timedelta(days=1), - "updated": datetime.now(), + "created": datetime.now() - timedelta(days=1), # noqa: DTZ005 + "updated": datetime.now(), # noqa: DTZ005 } - @pytest.fixture() + @pytest.fixture def mock_db_session(self, db_session): return Mock(spec=db_session) diff --git a/tests/unit/h/models/document/_uri_test.py b/tests/unit/h/models/document/_uri_test.py index 97b1bc65bca..38235b98a28 100644 --- a/tests/unit/h/models/document/_uri_test.py +++ b/tests/unit/h/models/document/_uri_test.py @@ -87,7 +87,9 @@ def test_it_updates_the_existing_DocumentURI_if_there_is_one( ): original_attrs = doc_uri_attrs updated_attrs = dict( - original_attrs, created=datetime.now(), updated=datetime.now() + original_attrs, + created=datetime.now(), # noqa: DTZ005 + updated=datetime.now(), # noqa: DTZ005 ) document_uri = DocumentURI(**original_attrs) db_session.add(document_uri) @@ -105,7 +107,9 @@ def test_it_creates_a_new_DocumentURI_if_there_is_no_existing_one( ): original_attrs = doc_uri_attrs updated_attrs = dict( - original_attrs, created=datetime.now(), updated=datetime.now() + original_attrs, + created=datetime.now(), # noqa: DTZ005 + updated=datetime.now(), # noqa: DTZ005 ) # Add one non-matching DocumentURI to the database. db_session.add(DocumentURI(**dict(original_attrs, content_type="different"))) @@ -154,7 +158,7 @@ def err(): monkeypatch.setattr(db_session, "flush", err) - with pytest.raises(ConcurrentUpdateError): + with pytest.raises(ConcurrentUpdateError): # noqa: SIM117 with db_session.no_autoflush: # prevent premature IntegrityError create_or_update_document_uri(session=db_session, **doc_uri_attrs) @@ -166,11 +170,11 @@ def doc_uri_attrs(self): "type": "self-claim", "content_type": "", "document": Document(), - "created": datetime.now() - timedelta(days=1), - "updated": datetime.now() - timedelta(days=1), + "created": datetime.now() - timedelta(days=1), # noqa: DTZ005 + "updated": datetime.now() - timedelta(days=1), # noqa: DTZ005 } - @pytest.fixture() + @pytest.fixture def mock_db_session(self, db_session): return Mock(spec=db_session) diff --git a/tests/unit/h/models/feature_test.py b/tests/unit/h/models/feature_test.py index 3fc2818e457..8bc8421640c 100644 --- a/tests/unit/h/models/feature_test.py +++ b/tests/unit/h/models/feature_test.py @@ -60,4 +60,4 @@ def features_override(self, request): clear=True, ) patcher.start() - request.addfinalizer(patcher.stop) + request.addfinalizer(patcher.stop) # noqa: PT021 diff --git a/tests/unit/h/models/group_test.py b/tests/unit/h/models/group_test.py index 9d3ef233d2e..8fa3b3c876b 100644 --- a/tests/unit/h/models/group_test.py +++ b/tests/unit/h/models/group_test.py @@ -20,13 +20,13 @@ def test_init_sets_given_attributes(): def test_with_short_name(): """Should raise ValueError if name shorter than 3 characters.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 models.Group(name="ab") def test_with_long_name(): """Should raise ValueError if name longer than 25 characters.""" - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 models.Group(name="abcdefghijklmnopqrstuvwxyz") @@ -172,7 +172,7 @@ def test_you_can_set_type(factories, original_type, new_type): def test_you_cant_set_type_to_an_invalid_value(factories): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 factories.Group().type = "invalid" @@ -309,6 +309,6 @@ def group(self, factories): return factories.Group() -@pytest.fixture() +@pytest.fixture def organization(factories): return factories.Organization() diff --git a/tests/unit/h/models/organization_test.py b/tests/unit/h/models/organization_test.py index 9bd19912b58..7739c50c7d1 100644 --- a/tests/unit/h/models/organization_test.py +++ b/tests/unit/h/models/organization_test.py @@ -24,12 +24,12 @@ def test_null_logo(): def test_too_short_name_raises_value_error(): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 models.Organization(name="") def test_too_long_name_raises_value_error(): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 models.Organization(name="abcdefghijklmnopqrstuvwxyz") diff --git a/tests/unit/h/models/token_test.py b/tests/unit/h/models/token_test.py index 0da63b592d5..6449cd3c3d4 100644 --- a/tests/unit/h/models/token_test.py +++ b/tests/unit/h/models/token_test.py @@ -8,13 +8,13 @@ def test_ttl_is_none_if_token_has_no_expires(self): assert Token().ttl is None def test_ttl_when_token_does_expire(self): - expires = datetime.datetime.utcnow() + datetime.timedelta(hours=1) + expires = datetime.datetime.utcnow() + datetime.timedelta(hours=1) # noqa: DTZ003 token = Token(expires=expires) assert 0 < token.ttl < 3601 def test_expired_is_false_if_expires_is_in_the_future(self): - expires = datetime.datetime.utcnow() + datetime.timedelta(hours=1) + expires = datetime.datetime.utcnow() + datetime.timedelta(hours=1) # noqa: DTZ003 token = Token(expires=expires) assert not token.expired @@ -25,13 +25,13 @@ def test_expired_is_false_if_expires_is_none(self): assert not token.expired def test_expired_is_true_if_expires_is_in_the_past(self): - expires = datetime.datetime.utcnow() - datetime.timedelta(hours=1) + expires = datetime.datetime.utcnow() - datetime.timedelta(hours=1) # noqa: DTZ003 token = Token(expires=expires) assert token.expired is True def test_refresh_token_expired_is_false_if_in_future(self): - refresh_token_expires = datetime.datetime.utcnow() + datetime.timedelta(hours=1) + refresh_token_expires = datetime.datetime.utcnow() + datetime.timedelta(hours=1) # noqa: DTZ003 token = Token(refresh_token_expires=refresh_token_expires) assert not token.refresh_token_expired @@ -42,7 +42,7 @@ def test_refresh_token_expired_is_false_if_none(self): assert not token.refresh_token_expired def test_refresh_token_expired_is_true_if_in_past(self): - refresh_token_expires = datetime.datetime.utcnow() - datetime.timedelta(hours=1) + refresh_token_expires = datetime.datetime.utcnow() - datetime.timedelta(hours=1) # noqa: DTZ003 token = Token(refresh_token_expires=refresh_token_expires) assert token.refresh_token_expired is True diff --git a/tests/unit/h/models/user_test.py b/tests/unit/h/models/user_test.py index b00eb6ec541..b39543202c9 100644 --- a/tests/unit/h/models/user_test.py +++ b/tests/unit/h/models/user_test.py @@ -21,7 +21,7 @@ def test__eq__returns_a_BinaryExpression(self, comparator, other): ("not_a_valid_user_id", "acct:DIFFERENT@authority", "acct:username@DIFFERENT"), ) def test__eq___returns_False(self, comparator, non_matching): - assert not comparator == non_matching + assert not comparator == non_matching # noqa: SIM201 @pytest.fixture def comparator(self): @@ -72,19 +72,19 @@ def test_userid_derived_from_username_and_authority(self, fred): assert fred.userid == "acct:fredbloggs@example.com" def test_cannot_create_user_with_too_short_username(self): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 User(username="aa") def test_cannot_create_user_with_too_long_username(self): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 User(username="1234567890123456789012345678901") def test_cannot_create_user_with_invalid_chars(self): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 User(username="foo-bar") def test_cannot_create_user_with_too_long_email(self): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 User(email="bob@b" + "o" * 100 + "b.com") def test_can_create_user_with_null_email(self): @@ -197,11 +197,11 @@ def test_activate_updates_activation_date(self, user): assert isinstance(user.activation_date, datetime) # We can't test for the exact time, but this should be close - assert user.activation_date - datetime.utcnow() < timedelta(seconds=1) + assert user.activation_date - datetime.utcnow() < timedelta(seconds=1) # noqa: DTZ003 def test_privacy_accepted_defaults_to_None(self): # nullable - assert getattr(User(), "privacy_accepted") is None + assert getattr(User(), "privacy_accepted") is None # noqa: B009 def test_repr(self, user): assert repr(user) == f"User(id={user.id})" diff --git a/tests/unit/h/paginator_test.py b/tests/unit/h/paginator_test.py index 621a5e12e51..710f430fd67 100644 --- a/tests/unit/h/paginator_test.py +++ b/tests/unit/h/paginator_test.py @@ -227,8 +227,8 @@ def query(self): """Return a mock SQLAlchemy Query object.""" mock_query = mock.Mock(spec_set=["count", "offset", "limit", "all"]) mock_query.count.side_effect = lambda: mock.sentinel.total - mock_query.offset.side_effect = lambda n: mock_query - mock_query.limit.side_effect = lambda n: mock_query + mock_query.offset.side_effect = lambda n: mock_query # noqa: ARG005 + mock_query.limit.side_effect = lambda n: mock_query # noqa: ARG005 mock_query.all.side_effect = lambda: mock.sentinel.all return mock_query diff --git a/tests/unit/h/presenters/annotation_html_test.py b/tests/unit/h/presenters/annotation_html_test.py index 86a2861bfd0..35a56a456e3 100644 --- a/tests/unit/h/presenters/annotation_html_test.py +++ b/tests/unit/h/presenters/annotation_html_test.py @@ -76,7 +76,7 @@ def test_description(self, annotation, presenter): ) def test_created_day_string_from_annotation(self, annotation, presenter): - annotation.created = datetime.datetime(2015, 9, 4, 17, 37, 49, 517852) + annotation.created = datetime.datetime(2015, 9, 4, 17, 37, 49, 517852) # noqa: DTZ001 assert presenter.created_day_string == "2015-09-04" diff --git a/tests/unit/h/presenters/annotation_jsonld_test.py b/tests/unit/h/presenters/annotation_jsonld_test.py index 94e16656d51..6fa30438b82 100644 --- a/tests/unit/h/presenters/annotation_jsonld_test.py +++ b/tests/unit/h/presenters/annotation_jsonld_test.py @@ -8,8 +8,8 @@ class TestAnnotationJSONLDPresenter: def test_it(self, presenter, annotation, links_service): - annotation.created = datetime.datetime(2016, 2, 24, 18, 3, 25, 768) - annotation.updated = datetime.datetime(2016, 2, 29, 10, 24, 5, 564) + annotation.created = datetime.datetime(2016, 2, 24, 18, 3, 25, 768) # noqa: DTZ001 + annotation.updated = datetime.datetime(2016, 2, 29, 10, 24, 5, 564) # noqa: DTZ001 expected = { "@context": "http://www.w3.org/ns/anno.jsonld", diff --git a/tests/unit/h/presenters/document_json_test.py b/tests/unit/h/presenters/document_json_test.py index b4a0855dd1b..0cd1e2064da 100644 --- a/tests/unit/h/presenters/document_json_test.py +++ b/tests/unit/h/presenters/document_json_test.py @@ -46,4 +46,4 @@ def test_asdict_does_not_render_other_meta_than_title(self, db_session): db_session.flush() presenter = DocumentJSONPresenter(document) - assert {"title": ["Foo"]} == presenter.asdict() + assert {"title": ["Foo"]} == presenter.asdict() # noqa: SIM300 diff --git a/tests/unit/h/presenters/group_membership_json_test.py b/tests/unit/h/presenters/group_membership_json_test.py index 5e3a919f3a9..8f807088f8d 100644 --- a/tests/unit/h/presenters/group_membership_json_test.py +++ b/tests/unit/h/presenters/group_membership_json_test.py @@ -69,6 +69,6 @@ def membership(self, user, group): return GroupMembership( user=user, group=group, - created=datetime(1970, 1, 1, 0, 0, 0), - updated=datetime(1970, 1, 1, 0, 0, 1), + created=datetime(1970, 1, 1, 0, 0, 0), # noqa: DTZ001 + updated=datetime(1970, 1, 1, 0, 0, 1), # noqa: DTZ001 ) diff --git a/tests/unit/h/schemas/annotation_test.py b/tests/unit/h/schemas/annotation_test.py index b74cfd0c74a..ef94c15fb30 100644 --- a/tests/unit/h/schemas/annotation_test.py +++ b/tests/unit/h/schemas/annotation_test.py @@ -342,7 +342,7 @@ def test_it_does_not_pass_modified_dict_to_document_metas_from_data( def document_uris_from_data( document, - claimant, + claimant, # noqa: ARG001 ): document["new_key"] = "new_value" document["top_level_key"] = "new_value" diff --git a/tests/unit/h/schemas/base_test.py b/tests/unit/h/schemas/base_test.py index 01ecdbf08d8..e660a12705f 100644 --- a/tests/unit/h/schemas/base_test.py +++ b/tests/unit/h/schemas/base_test.py @@ -17,7 +17,7 @@ class ExampleCSRFSchema(CSRFSchema): class ExampleJSONSchema(JSONSchema): - schema = { + schema = { # noqa: RUF012 "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "properties": {"foo": {"type": "string"}, "bar": {"type": "integer"}}, @@ -52,7 +52,7 @@ def test_it_raises_for_unsupported_schema_versions(self): class BadSchema(JSONSchema): schema_version = 95 - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 BadSchema() def test_it_returns_data_when_valid(self): diff --git a/tests/unit/h/schemas/forms/accounts/reset_password_test.py b/tests/unit/h/schemas/forms/accounts/reset_password_test.py index e1f5d856f16..c13557cc13c 100644 --- a/tests/unit/h/schemas/forms/accounts/reset_password_test.py +++ b/tests/unit/h/schemas/forms/accounts/reset_password_test.py @@ -64,7 +64,7 @@ def test_it_is_invalid_with_expired_token(self, schema, serializer): ( # This situation triggers if the users password has not been used since # the token was issued. Note our DB dates are not timezone aware. - datetime.now() - timedelta(days=1), + datetime.now() - timedelta(days=1), # noqa: DTZ005 # ... or if it's never been reset None, ), @@ -88,7 +88,7 @@ def test_it_returns_user_when_valid( def test_it_is_invalid_if_user_has_already_reset_their_password(self, schema, user): # This situation triggers if the users password has been used since # the token was issued. Note our DB dates are not timezone aware. - user.password_updated = datetime.now() + timedelta(days=1) + user.password_updated = datetime.now() + timedelta(days=1) # noqa: DTZ005 with pytest.raises(colander.Invalid) as exc: schema.deserialize({"user": "EXPIRED_TOKEN", "password": "new-password"}) @@ -101,7 +101,7 @@ def schema(self, pyramid_csrf_request): return ResetPasswordSchema().bind(request=pyramid_csrf_request) @pytest.fixture(autouse=True) - def serializer(self, pyramid_csrf_request, pyramid_config): + def serializer(self, pyramid_csrf_request, pyramid_config): # noqa: ARG002 # We must be after `pyramid_config` in the queue, as it replaces the # registry object with another one which undoes our changes here diff --git a/tests/unit/h/schemas/forms/admin/group_test.py b/tests/unit/h/schemas/forms/admin/group_test.py index 1bc9e67843d..b92a9c766c8 100644 --- a/tests/unit/h/schemas/forms/admin/group_test.py +++ b/tests/unit/h/schemas/forms/admin/group_test.py @@ -192,7 +192,7 @@ def group_data(org): @pytest.fixture def user_service(user_service, factories): - def fetch(username, authority): + def fetch(username, authority): # noqa: ARG001 if "invalid" in username: return False diff --git a/tests/unit/h/search/config_test.py b/tests/unit/h/search/config_test.py index 72f3dbe410a..ff96e176408 100644 --- a/tests/unit/h/search/config_test.py +++ b/tests/unit/h/search/config_test.py @@ -35,7 +35,7 @@ def test_strip_scheme_char_filter(): assert re.sub(p, r, "uri:x-pdf:1234") == "x-pdf:1234" assert re.sub(p, r, "example.com") == "example.com" # This is ambiguous, and possibly cannot be expected to work. - # assert(re.sub(p, r, 'localhost:5000') == 'localhost:5000') + # assert(re.sub(p, r, 'localhost:5000') == 'localhost:5000') # noqa: ERA001 def test_path_url_filter(): @@ -137,7 +137,7 @@ def mock_es_client(self, mock_es_client): # Simulate the ICU Analysis plugin, and get around some funky stuff # the ES client library does which confuses autospeccing mock_es_client.conn.cat.plugins = MagicMock() - mock_es_client.conn.cat.plugins.return_value = "\n".join( + mock_es_client.conn.cat.plugins.return_value = "\n".join( # noqa: FLY002 ["foo", "analysis-icu"] ) return mock_es_client diff --git a/tests/unit/h/search/conftest.py b/tests/unit/h/search/conftest.py index f0512a03f9c..77c3ae04748 100644 --- a/tests/unit/h/search/conftest.py +++ b/tests/unit/h/search/conftest.py @@ -48,7 +48,7 @@ def _index(*annotations): def search_index( es_client, pyramid_request, - moderation_service, + moderation_service, # noqa: ARG001 annotation_read_service, ): return SearchIndexService( diff --git a/tests/unit/h/search/core_test.py b/tests/unit/h/search/core_test.py index 086679bfb90..67b24276758 100644 --- a/tests/unit/h/search/core_test.py +++ b/tests/unit/h/search/core_test.py @@ -121,7 +121,7 @@ def test_replies_can_come_before_annotations(self, pyramid_request, Annotation): Things are returned in updated order so normally a reply would appear before the annotation that it is a reply to in the search results. """ - now = datetime.datetime.now() + now = datetime.datetime.now() # noqa: DTZ005 five_mins = datetime.timedelta(minutes=5) annotation = Annotation(updated=now, shared=True) reply = Annotation( @@ -141,7 +141,7 @@ def test_replies_can_come_after_annotations(self, pyramid_request, Annotation): updated the top-level annotation since the reply was created, then the annotation would come before the reply in the search results. """ - now = datetime.datetime.now() + now = datetime.datetime.now() # noqa: DTZ005 five_mins = datetime.timedelta(minutes=5) annotation = Annotation(updated=now + five_mins, shared=True) reply = Annotation(updated=now, references=[annotation.id], shared=True) @@ -194,7 +194,7 @@ def test_replies_are_ordered_most_recently_updated_first( self, Annotation, pyramid_request ): annotation = Annotation(shared=True) - now = datetime.datetime.now() + now = datetime.datetime.now() # noqa: DTZ005 five_mins = datetime.timedelta(minutes=5) reply_1 = Annotation( updated=now + (five_mins * 2), references=[annotation.id], shared=True @@ -212,7 +212,7 @@ def test_replies_are_ordered_most_recently_updated_first( def test_replies_ignore_the_sort_param(self, Annotation, pyramid_request): annotation = Annotation(shared=True) - now = datetime.datetime.now() + now = datetime.datetime.now() # noqa: DTZ005 five_mins = datetime.timedelta(minutes=5) reply_1 = Annotation( id="3", updated=now, references=[annotation.id], shared=True @@ -263,7 +263,7 @@ def test_replies_from_different_pages_are_included( ): """Replies that would not be on the same page are included.""" # First create an annotation and a reply. - now = datetime.datetime.now() + now = datetime.datetime.now() # noqa: DTZ005 five_mins = datetime.timedelta(minutes=5) annotation = Annotation(updated=now + five_mins, shared=True) reply = Annotation(updated=now, references=[annotation.id], shared=True) diff --git a/tests/unit/h/search/index_test.py b/tests/unit/h/search/index_test.py index 803a2d802e0..86f2815144d 100644 --- a/tests/unit/h/search/index_test.py +++ b/tests/unit/h/search/index_test.py @@ -143,7 +143,7 @@ def test_delete(self, batch_indexer, factories, get_indexed_ann): @pytest.fixture -def batch_indexer(db_session, es_client, pyramid_request, moderation_service): +def batch_indexer(db_session, es_client, pyramid_request, moderation_service): # noqa: ARG001 return BatchIndexer(db_session, es_client, pyramid_request) diff --git a/tests/unit/h/search/parser_test.py b/tests/unit/h/search/parser_test.py index 15a27017e47..f16a39ad2f8 100644 --- a/tests/unit/h/search/parser_test.py +++ b/tests/unit/h/search/parser_test.py @@ -169,8 +169,8 @@ def test_parse_with_any_nonwhitespace_text(kw, value): # Items which used escape sequences rather than using alternate quotes, # e.g. original queries such as: # - # group:"foo \"hello\" bar" - # tag:'wibble \'giraffe\' bang' + # group:"foo \"hello\" bar" # noqa: ERA001 + # tag:'wibble \'giraffe\' bang' # noqa: ERA001 {"group": 'foo \\"hello\\" bar'}, {"tag": "wibble \\'giraffe\\' bang"}, # Items which contain both single and double quotes diff --git a/tests/unit/h/search/query_test.py b/tests/unit/h/search/query_test.py index 4321bbae98d..2c6d5b2c38c 100644 --- a/tests/unit/h/search/query_test.py +++ b/tests/unit/h/search/query_test.py @@ -378,7 +378,7 @@ def test_matches_only_annotations_from_specified_groups( annotation_ids = [Annotation(groupid=pubid).id for pubid in group_pubids] result = search.run( - webob.multidict.MultiDict((("group", pubid) for pubid in group_pubids)) + webob.multidict.MultiDict(("group", pubid) for pubid in group_pubids) ) group_service.groupids_readable_by.assert_called_with( @@ -455,7 +455,7 @@ def search(self, search): class TestUriCombinedWildcardFilter: - # TODO - Explicit test of URL normalization (ie. that search normalizes input + # TODO - Explicit test of URL normalization (ie. that search normalizes input # noqa: FIX002, TD002, TD003, TD004 # URL using `h.util.uri.normalize` and queries with that). @pytest.mark.parametrize("field", ("uri", "url")) @@ -496,7 +496,7 @@ def test_returns_all_annotations_with_equivalent_uris( ): search = get_search() # Mark all these uri's as equivalent uri's. - storage.expand_uri.side_effect = lambda _, x: [ + storage.expand_uri.side_effect = lambda _, x: [ # noqa: ARG005 "urn:x-pdf:1234", "file:///Users/june/article.pdf", "doi:10.1.1/1234", @@ -659,7 +659,7 @@ def test_pops_params(self, es_dsl_search, pyramid_request, params, separate_keys @pytest.fixture def get_search(self, search, pyramid_request): - def _get_search(separate_keys=True): + def _get_search(separate_keys=True): # noqa: FBT002 search.append_modifier( query.UriCombinedWildcardFilter(pyramid_request, separate_keys) ) @@ -1003,7 +1003,7 @@ def test_it_returns_annotation_counts_by_user( @pytest.fixture -def search(pyramid_request, group_service): +def search(pyramid_request, group_service): # noqa: ARG001 search = Search(pyramid_request) # Remove all default modifiers and aggregators except Sorter. search.clear() diff --git a/tests/unit/h/security/permits_test.py b/tests/unit/h/security/permits_test.py index 07f89bce309..8408a57110a 100644 --- a/tests/unit/h/security/permits_test.py +++ b/tests/unit/h/security/permits_test.py @@ -18,7 +18,7 @@ def always_false(_identity, _context): def explode(_identity, _context): - assert False, "This should not be reached" # pragma: no cover + assert False, "This should not be reached" # pragma: no cover # noqa: B011, PT015 class TestIdentityPermits: diff --git a/tests/unit/h/security/policy/_api_test.py b/tests/unit/h/security/policy/_api_test.py index 8e42d4ba7a4..3f4100b681d 100644 --- a/tests/unit/h/security/policy/_api_test.py +++ b/tests/unit/h/security/policy/_api_test.py @@ -127,7 +127,7 @@ def handles(request) -> bool: create_autospec(Spec, spec_set=True), create_autospec(Spec, spec_set=True), ] - for policy, return_value in zip(policies, return_values): + for policy, return_value in zip(policies, return_values, strict=False): policy.handles.return_value = return_value returned_policies = applicable_policies(pyramid_request, policies) diff --git a/tests/unit/h/security/policy/_auth_client_test.py b/tests/unit/h/security/policy/_auth_client_test.py index f39f86f90c9..8ec90f6f690 100644 --- a/tests/unit/h/security/policy/_auth_client_test.py +++ b/tests/unit/h/security/policy/_auth_client_test.py @@ -136,9 +136,7 @@ def with_credentials(self, pyramid_request, auth_client): @classmethod def set_http_credentials(cls, pyramid_request, client_id, client_secret): - encoded = base64.standard_b64encode( - f"{client_id}:{client_secret}".encode("utf-8") - ) + encoded = base64.standard_b64encode(f"{client_id}:{client_secret}".encode()) creds = encoded.decode("ascii") pyramid_request.headers["Authorization"] = f"Basic {creds}" diff --git a/tests/unit/h/security/policy/_cookie_test.py b/tests/unit/h/security/policy/_cookie_test.py index 090e329645f..3e2603c1d7f 100644 --- a/tests/unit/h/security/policy/_cookie_test.py +++ b/tests/unit/h/security/policy/_cookie_test.py @@ -117,7 +117,9 @@ def remember(cookie, *_args, **_kwargs): sentinel.api_authcookie_header_2, ] - assert False, "Should never reach here" # pragma: no cover + assert False, ( # noqa: B011, PT015 + "Should never reach here" + ) # pragma: no cover helper.remember.side_effect = remember @@ -165,7 +167,9 @@ def forget(cookie, *_args, **_kwargs): sentinel.api_authcookie_header_2, ] - assert False, "Should never reach here" # pragma: no cover + assert False, ( # noqa: B011, PT015 + "Should never reach here" + ) # pragma: no cover helper.forget.side_effect = forget diff --git a/tests/unit/h/security/policy/top_level_test.py b/tests/unit/h/security/policy/top_level_test.py index 5bd7fa6d48c..d56cc511ddf 100644 --- a/tests/unit/h/security/policy/top_level_test.py +++ b/tests/unit/h/security/policy/top_level_test.py @@ -81,7 +81,7 @@ def test_api_request( AuthClientPolicy.assert_called_once_with() assert webob.cookies.SignedCookieProfile.call_args_list == [ call( - secret="test_h_api_auth_cookie_secret", + secret="test_h_api_auth_cookie_secret", # noqa: S106 salt="test_h_api_auth_cookie_salt", cookie_name="h_api_authcookie.v2", max_age=31539600, @@ -129,7 +129,7 @@ def test_non_api_request( assert webob.cookies.SignedCookieProfile.call_args_list == [ call( - secret="test_h_api_auth_cookie_secret", + secret="test_h_api_auth_cookie_secret", # noqa: S106 salt="test_h_api_auth_cookie_salt", cookie_name="h_api_authcookie.v2", max_age=31539600, @@ -138,7 +138,7 @@ def test_non_api_request( samesite="strict", ), call( - secret="test_h_auth_cookie_secret", + secret="test_h_auth_cookie_secret", # noqa: S106 salt="authsanity", cookie_name="auth", max_age=31536000, @@ -196,8 +196,8 @@ def CookiePolicy(mocker): @pytest.fixture def pyramid_settings(pyramid_settings): - pyramid_settings["h_auth_cookie_secret"] = "test_h_auth_cookie_secret" - pyramid_settings["h_api_auth_cookie_secret"] = "test_h_api_auth_cookie_secret" + pyramid_settings["h_auth_cookie_secret"] = "test_h_auth_cookie_secret" # noqa: S105 + pyramid_settings["h_api_auth_cookie_secret"] = "test_h_api_auth_cookie_secret" # noqa: S105 pyramid_settings["h_api_auth_cookie_salt"] = "test_h_api_auth_cookie_salt" return pyramid_settings diff --git a/tests/unit/h/services/annotation_delete_test.py b/tests/unit/h/services/annotation_delete_test.py index 299f8113308..16cfdb6dca6 100644 --- a/tests/unit/h/services/annotation_delete_test.py +++ b/tests/unit/h/services/annotation_delete_test.py @@ -68,7 +68,7 @@ def test_it_deletes_all_annotations(self, svc, annotation): ], ) def test_bulk_delete(self, db_session, svc, factories, deleted, mins_ago, purged): - updated = datetime.utcnow() - timedelta(minutes=mins_ago) + updated = datetime.utcnow() - timedelta(minutes=mins_ago) # noqa: DTZ003 annotation = factories.Annotation(deleted=deleted, updated=updated) annotation_slim = factories.AnnotationSlim( deleted=deleted, updated=updated, annotation=annotation @@ -96,7 +96,7 @@ def annotation(factories): @pytest.fixture -def svc(db_session, pyramid_request, annotation_write_service, queue_service): +def svc(db_session, pyramid_request, annotation_write_service, queue_service): # noqa: ARG001 pyramid_request.db = db_session return annotation_delete_service_factory({}, pyramid_request) diff --git a/tests/unit/h/services/annotation_json_test.py b/tests/unit/h/services/annotation_json_test.py index 20095051308..5b563200dbf 100644 --- a/tests/unit/h/services/annotation_json_test.py +++ b/tests/unit/h/services/annotation_json_test.py @@ -15,8 +15,8 @@ class TestAnnotationJSONService: def test_present( self, service, annotation, links_service, user_service, DocumentJSONPresenter ): - annotation.created = datetime(2016, 2, 24, 18, 3, 25, 768) - annotation.updated = datetime(2016, 2, 29, 10, 24, 5, 564) + annotation.created = datetime(2016, 2, 24, 18, 3, 25, 768) # noqa: DTZ001 + annotation.updated = datetime(2016, 2, 29, 10, 24, 5, 564) # noqa: DTZ001 annotation.references = ["referenced-id-1", "referenced-id-2"] annotation.extra = {"extra-1": "foo", "extra-2": "bar"} diff --git a/tests/unit/h/services/annotation_read_test.py b/tests/unit/h/services/annotation_read_test.py index 7eaab7b98fc..8b0e12a39b3 100644 --- a/tests/unit/h/services/annotation_read_test.py +++ b/tests/unit/h/services/annotation_read_test.py @@ -58,7 +58,7 @@ def query_counter(self, db_engine): class QueryCounter: count = 0 - def __call__(self, *args, **kwargs): + def __call__(self, *args, **kwargs): # noqa: ARG002 self.count += 1 def reset(self): diff --git a/tests/unit/h/services/annotation_sync_test.py b/tests/unit/h/services/annotation_sync_test.py index 6bea8ab4ff0..1ccd80ed3ca 100644 --- a/tests/unit/h/services/annotation_sync_test.py +++ b/tests/unit/h/services/annotation_sync_test.py @@ -259,7 +259,7 @@ def test_deleting_multiple_jobs_with_the_same_annotation_id( def test_metrics(self, factories, index, now, svc, queue_service): queue_service.get.return_value = [] - def add_job(indexed=True, updated=False, deleted=False, **kwargs): + def add_job(indexed=True, updated=False, deleted=False, **kwargs): # noqa: FBT002 annotation = factories.Annotation() job = factories.SyncAnnotationJob(annotation=annotation, **kwargs) queue_service.get.return_value.append(job) @@ -299,7 +299,7 @@ def add_job(indexed=True, updated=False, deleted=False, **kwargs): @pytest.fixture def now(self): """Return the current UTC time.""" - return datetime.datetime.utcnow() + return datetime.datetime.utcnow() # noqa: DTZ003 @pytest.fixture(autouse=True) def noise_annotations(self, factories, index): @@ -387,7 +387,7 @@ def test_get(self, db_helper, db_session, factories): ), } - # TODO: Annotations that don't exist in the DB. + # TODO: Annotations that don't exist in the DB. # noqa: FIX002, TD002, TD003 @pytest.fixture def db_helper(self, db_session): @@ -660,8 +660,8 @@ def pyramid_request(self, pyramid_request): def search_index_service( pyramid_request, es_client, - moderation_service, - nipsa_service, + moderation_service, # noqa: ARG001 + nipsa_service, # noqa: ARG001 ): # Construct a real (not mock) SearchIndexService so we can call its # methods to index annotations. diff --git a/tests/unit/h/services/annotation_write_test.py b/tests/unit/h/services/annotation_write_test.py index 65d6c6b2e9a..c28781b1bcc 100644 --- a/tests/unit/h/services/annotation_write_test.py +++ b/tests/unit/h/services/annotation_write_test.py @@ -20,7 +20,7 @@ def test_create_annotation( update_document_metadata, queue_service, annotation_read_service, - _validate_group, + _validate_group, # noqa: PT019 db_session, ): root_annotation = factories.Annotation() @@ -95,9 +95,9 @@ def test_update_annotation( annotation, update_document_metadata, queue_service, - _validate_group, + _validate_group, # noqa: PT019 ): - then = datetime.now() - timedelta(days=1) + then = datetime.now() - timedelta(days=1) # noqa: DTZ005 annotation.extra = {"key": "value"} annotation.updated = then @@ -142,7 +142,7 @@ def test_update_annotation( self.assert_annotation_slim(db_session, anno) def test_update_annotation_with_non_defaults(self, svc, annotation, queue_service): - then = datetime.now() - timedelta(days=1) + then = datetime.now() - timedelta(days=1) # noqa: DTZ005 annotation.updated = then result = svc.update_annotation( diff --git a/tests/unit/h/services/auth_ticket_test.py b/tests/unit/h/services/auth_ticket_test.py index 46a143c122a..f73ad781bf4 100644 --- a/tests/unit/h/services/auth_ticket_test.py +++ b/tests/unit/h/services/auth_ticket_test.py @@ -47,7 +47,7 @@ def test_verify_ticket_when_theres_no_ticket_id(self, service, user): def test_verify_ticket_returns_None_if_the_ticket_has_expired( self, service, auth_ticket ): - auth_ticket.expires = datetime.utcnow() - timedelta(hours=1) + auth_ticket.expires = datetime.utcnow() - timedelta(hours=1) # noqa: DTZ003 assert service.verify_ticket(auth_ticket.user.userid, auth_ticket.id) is None @@ -63,14 +63,15 @@ def test_verify_ticket_returns_None_if_the_ticket_has_expired( def test_verify_ticket_updates_the_expiry_time( self, service, auth_ticket, offset, expect_update ): - auth_ticket.updated = datetime.utcnow() - offset + auth_ticket.updated = datetime.utcnow() - offset # noqa: DTZ003 expires = auth_ticket.expires service.verify_ticket(auth_ticket.user.userid, auth_ticket.id) if expect_update: assert_nearly_equal( - auth_ticket.expires, datetime.utcnow() + AuthTicketService.TICKET_TTL + auth_ticket.expires, + datetime.utcnow() + AuthTicketService.TICKET_TTL, # noqa: DTZ003 ) else: assert auth_ticket.expires == expires @@ -85,7 +86,8 @@ def test_add_ticket(self, service, user, user_service): assert auth_ticket.user_userid == user.userid assert auth_ticket.id == "test_ticket_id" assert_nearly_equal( - auth_ticket.expires, datetime.utcnow() + AuthTicketService.TICKET_TTL + auth_ticket.expires, + datetime.utcnow() + AuthTicketService.TICKET_TTL, # noqa: DTZ003 ) assert service._ticket == auth_ticket # noqa: SLF001 assert inspect(auth_ticket).pending is True diff --git a/tests/unit/h/services/auth_token_test.py b/tests/unit/h/services/auth_token_test.py index 212dbff8b4d..19ffe8ec344 100644 --- a/tests/unit/h/services/auth_token_test.py +++ b/tests/unit/h/services/auth_token_test.py @@ -1,7 +1,7 @@ import datetime import pytest -from pytest import param +from pytest import param # noqa: PT013 from h.services.auth_token import ( AuthTokenService, @@ -86,11 +86,11 @@ def token(self, factories): return factories.DeveloperToken() def time(self, days_delta=0): - return datetime.datetime.utcnow() + datetime.timedelta(days=days_delta) + return datetime.datetime.utcnow() + datetime.timedelta(days=days_delta) # noqa: DTZ003 def _seconds_from_now(seconds): - return datetime.datetime.utcnow() + datetime.timedelta(seconds=seconds) + return datetime.datetime.utcnow() + datetime.timedelta(seconds=seconds) # noqa: DTZ003 class TestLongLivedToken: diff --git a/tests/unit/h/services/bulk_api/__init__.py b/tests/unit/h/services/bulk_api/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/h/services/bulk_api/group_test.py b/tests/unit/h/services/bulk_api/group_test.py index b8736babfe1..94a7a0bc510 100644 --- a/tests/unit/h/services/bulk_api/group_test.py +++ b/tests/unit/h/services/bulk_api/group_test.py @@ -8,7 +8,7 @@ class TestBulkGroupService: def test_it(self, svc, factories): - since = datetime(2023, 1, 1) + since = datetime(2023, 1, 1) # noqa: DTZ001 group = factories.Group() group_without_annos = factories.Group() group_with_annos_in_other_dates = factories.Group() diff --git a/tests/unit/h/services/bulk_api/lms_stats_test.py b/tests/unit/h/services/bulk_api/lms_stats_test.py index 7865d68c07f..2a5c3c00df5 100644 --- a/tests/unit/h/services/bulk_api/lms_stats_test.py +++ b/tests/unit/h/services/bulk_api/lms_stats_test.py @@ -85,7 +85,7 @@ def test_get_annotation_counts_filter_by_h_userids( svc, group, annotation_reply, - annotation_in_another_assignment, + annotation_in_another_assignment, # noqa: ARG002 reply_user, ): stats = svc.get_annotation_counts( diff --git a/tests/unit/h/services/bulk_executor/_executor_test.py b/tests/unit/h/services/bulk_executor/_executor_test.py index a065165280f..b0c058df247 100644 --- a/tests/unit/h/services/bulk_executor/_executor_test.py +++ b/tests/unit/h/services/bulk_executor/_executor_test.py @@ -2,7 +2,7 @@ from h_api.bulk_api.model.config_body import Configuration from h_api.enums import CommandType, DataType from h_api.exceptions import InvalidDeclarationError, UnsupportedOperationError -from pytest import param +from pytest import param # noqa: PT013 from h.services.bulk_executor._executor import BulkExecutor from tests.unit.h.services.bulk_executor.conftest import ( diff --git a/tests/unit/h/services/group_create_test.py b/tests/unit/h/services/group_create_test.py index 3cccd7d9a76..5ee317ddc37 100644 --- a/tests/unit/h/services/group_create_test.py +++ b/tests/unit/h/services/group_create_test.py @@ -83,7 +83,7 @@ def test_it_creates_group_with_no_organization_by_default( and if the caller passes `organization=None`. It's convenient for the caller to be able to do it either way. """ - if pass_kwarg: + if pass_kwarg: # noqa: SIM108 kwargs = {"organization": None} else: kwargs = {} @@ -203,7 +203,7 @@ def test_it_creates_group_with_no_organization_by_default( and if the caller passes `organization=None`. It's convenient for the caller to be able to do it either way. """ - if pass_kwarg: + if pass_kwarg: # noqa: SIM108 kwargs = {"organization": None} else: kwargs = {} @@ -371,7 +371,7 @@ def test_it_creates_group_with_no_organization_by_default( and if the caller passes `organization=None`. It's convenient for the caller to be able to do it either way. """ - if pass_kwarg: + if pass_kwarg: # noqa: SIM108 kwargs = {"organization": None} else: kwargs = {} @@ -452,7 +452,7 @@ def test_it_with_mismatched_authorities_raises_value_error( self, svc, origins, creator, factories ): org = factories.Organization(name="My organization", authority="bar.com") - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 svc.create_restricted_group( name="test_group", userid=creator.userid, diff --git a/tests/unit/h/services/group_members_test.py b/tests/unit/h/services/group_members_test.py index 461566ecc1a..55a7e3e8731 100644 --- a/tests/unit/h/services/group_members_test.py +++ b/tests/unit/h/services/group_members_test.py @@ -141,12 +141,12 @@ def membership_with_no_created_time(**kwargs): GroupMembership( group=group, user=users[0], - created=datetime(1970, 1, 1, 0, 0, 1), + created=datetime(1970, 1, 1, 0, 0, 1), # noqa: DTZ001 ), GroupMembership( group=group, user=users[1], - created=datetime(1970, 1, 1, 0, 0, 0), + created=datetime(1970, 1, 1, 0, 0, 0), # noqa: DTZ001 ), membership_with_no_created_time(group=group, user=users[3]), membership_with_no_created_time(group=group, user=users[2]), @@ -163,8 +163,8 @@ def membership_with_no_created_time(**kwargs): (None, "user_2"), (None, "user_3"), # Non-NULLs are ordered by created time, oldest first. - (datetime(1970, 1, 1, 0, 0, 0), "user_1"), - (datetime(1970, 1, 1, 0, 0, 1), "user_0"), + (datetime(1970, 1, 1, 0, 0, 0), "user_1"), # noqa: DTZ001 + (datetime(1970, 1, 1, 0, 0, 1), "user_0"), # noqa: DTZ001 ] diff --git a/tests/unit/h/services/group_test.py b/tests/unit/h/services/group_test.py index debe85d5d27..dc39b8ba649 100644 --- a/tests/unit/h/services/group_test.py +++ b/tests/unit/h/services/group_test.py @@ -15,7 +15,7 @@ def test_it_proxies_to_fetch_by_groupid_if_groupid_valid(self, svc): result = svc.fetch("group:something@somewhere.com") - assert svc.fetch_by_groupid.called_once_with("group:something@somewhere.com") + assert svc.fetch_by_groupid.called_once_with("group:something@somewhere.com") # noqa: PGH005 assert result == svc.fetch_by_groupid.return_value def test_it_proxies_to_fetch_by_pubid_if_not_groupid_syntax(self, svc): @@ -23,7 +23,7 @@ def test_it_proxies_to_fetch_by_pubid_if_not_groupid_syntax(self, svc): result = svc.fetch("abcdppp") - assert svc.fetch_by_pubid.called_once_with("abcdppp") + assert svc.fetch_by_pubid.called_once_with("abcdppp") # noqa: PGH005 assert result == svc.fetch_by_pubid.return_value @@ -95,8 +95,8 @@ def test_results_sorted_by_created_desc(self, svc): @pytest.fixture def groups(self, factories): return [ - factories.Group(name="Finger", created=datetime.datetime(2015, 8, 2)), - factories.Group(name="Fingers", created=datetime.datetime(2018, 2, 1)), + factories.Group(name="Finger", created=datetime.datetime(2015, 8, 2)), # noqa: DTZ001 + factories.Group(name="Fingers", created=datetime.datetime(2018, 2, 1)), # noqa: DTZ001 factories.Group(name="Hello"), factories.Group(name="Amber"), factories.Group(name="amber"), diff --git a/tests/unit/h/services/job_queue_metrics_test.py b/tests/unit/h/services/job_queue_metrics_test.py index f7b79001255..247aa5cc676 100644 --- a/tests/unit/h/services/job_queue_metrics_test.py +++ b/tests/unit/h/services/job_queue_metrics_test.py @@ -8,7 +8,7 @@ class TestJobQueue: def test_metrics_queue_length(self, factories, job_queue_metrics): - now = datetime.datetime.utcnow() + now = datetime.datetime.utcnow() # noqa: DTZ003 one_minute = datetime.timedelta(minutes=1) class JobFactory(factories.Job): diff --git a/tests/unit/h/services/job_queue_test.py b/tests/unit/h/services/job_queue_test.py index a3024759161..f0f4911c250 100644 --- a/tests/unit/h/services/job_queue_test.py +++ b/tests/unit/h/services/job_queue_test.py @@ -17,14 +17,14 @@ class TestQueueService: def test_get_ignores_jobs_that_are_expired(self, factories, svc): - now = datetime.utcnow() + now = datetime.utcnow() # noqa: DTZ003 factories.SyncAnnotationJob(expires_at=now - timedelta(hours=1)) assert not svc.get("sync_annotation", limit=100) def test_it_ignores_jobs_that_arent_scheduled_yet(self, factories, svc): - now = datetime.utcnow() + now = datetime.utcnow() # noqa: DTZ003 factories.SyncAnnotationJob(scheduled_at=now + timedelta(hours=1)) assert not svc.get("sync_annotation", limit=100) @@ -39,7 +39,7 @@ def test_it_ignores_jobs_beyond_limit(self, factories, svc): @freeze_time("2023-01-01") def test_add_where(self, factories, db_session, svc): - now = datetime.utcnow() + now = datetime.utcnow() # noqa: DTZ003 matching = [ factories.Annotation(shared=True), factories.Annotation(shared=True), @@ -222,11 +222,11 @@ def database_id(self, annotation): """Return `annotation.id` in the internal format used within the database.""" return str(uuid.UUID(URLSafeUUID.url_safe_to_hex(annotation.id))) - @pytest.fixture() + @pytest.fixture def add_where(self, svc): with patch.object(svc, "add_where") as add_where: yield add_where - @pytest.fixture() + @pytest.fixture def svc(self, db_session): return JobQueueService(db_session) diff --git a/tests/unit/h/services/links_test.py b/tests/unit/h/services/links_test.py index 186c9cc5971..a1dddc6b9c3 100644 --- a/tests/unit/h/services/links_test.py +++ b/tests/unit/h/services/links_test.py @@ -88,17 +88,26 @@ def registry(pyramid_config): pyramid_config.add_route("param.route", "/annotations/{id}") add_annotation_link_generator( - pyramid_config, "giraffe", lambda r, a: "http://giraffes.com" + pyramid_config, + "giraffe", + lambda r, a: "http://giraffes.com", # noqa: ARG005 ) add_annotation_link_generator( - pyramid_config, "elephant", lambda r, a: "https://elephant.org" + pyramid_config, + "elephant", + lambda r, a: "https://elephant.org", # noqa: ARG005 ) add_annotation_link_generator( - pyramid_config, "kiwi", lambda r, a: "http://kiwi.net", hidden=True + pyramid_config, + "kiwi", + lambda r, a: "http://kiwi.net", # noqa: ARG005 + hidden=True, ) - add_annotation_link_generator(pyramid_config, "returnsnone", lambda r, a: None) + add_annotation_link_generator(pyramid_config, "returnsnone", lambda r, a: None) # noqa: ARG005 add_annotation_link_generator( - pyramid_config, "namedroute", lambda r, a: r.route_url("some.named.route") + pyramid_config, + "namedroute", + lambda r, a: r.route_url("some.named.route"), # noqa: ARG005 ) add_annotation_link_generator( pyramid_config, diff --git a/tests/unit/h/services/oauth/_jwt_grant_test.py b/tests/unit/h/services/oauth/_jwt_grant_test.py index 2a7b801ef1b..d6cdb56c9ac 100644 --- a/tests/unit/h/services/oauth/_jwt_grant_test.py +++ b/tests/unit/h/services/oauth/_jwt_grant_test.py @@ -115,7 +115,7 @@ def test_validates_grant_type(self, grant, oauth_request, request_validator): grant.validate_token_request(oauth_request) def test_verifies_grant_token(self, grant, oauth_request): - oauth_request.client.authclient.secret = "bogus" + oauth_request.client.authclient.secret = "bogus" # noqa: S105 with pytest.raises(errors.InvalidGrantError) as exc: grant.validate_token_request(oauth_request) @@ -162,8 +162,8 @@ def grant(pyramid_request, request_validator): def _oauth_request(authclient, user): - exp = datetime.utcnow() + timedelta(minutes=5) - nbf = datetime.utcnow() - timedelta(seconds=2) + exp = datetime.utcnow() + timedelta(minutes=5) # noqa: DTZ003 + nbf = datetime.utcnow() - timedelta(seconds=2) # noqa: DTZ003 claims = { "aud": "domain.test", "exp": timegm(exp.utctimetuple()), diff --git a/tests/unit/h/services/oauth/_jwt_grant_token_test.py b/tests/unit/h/services/oauth/_jwt_grant_token_test.py index b9f51647724..2139e449c0f 100644 --- a/tests/unit/h/services/oauth/_jwt_grant_token_test.py +++ b/tests/unit/h/services/oauth/_jwt_grant_token_test.py @@ -179,7 +179,7 @@ def test_init_raises_for_nbf_claim_in_future(self, claims): assert exc.value.description == "Grant token is not yet valid." def test_expiry_returns_exp_claim(self, claims): - now = datetime.utcnow().replace(microsecond=0) + now = datetime.utcnow().replace(microsecond=0) # noqa: DTZ003 delta = timedelta(minutes=2) claims["exp"] = epoch(timestamp=now, delta=delta) @@ -190,7 +190,7 @@ def test_expiry_returns_exp_claim(self, claims): assert grant_token.expiry == (now + delta) def test_not_before_returns_nbf_claim(self, claims): - now = datetime.utcnow().replace(microsecond=0) + now = datetime.utcnow().replace(microsecond=0) # noqa: DTZ003 delta = timedelta(minutes=-2) claims["nbf"] = epoch(timestamp=now, delta=delta) @@ -246,7 +246,7 @@ def claims(self): def epoch(timestamp=None, delta=None): if timestamp is None: - timestamp = datetime.utcnow() + timestamp = datetime.utcnow() # noqa: DTZ003 if delta is not None: timestamp = timestamp + delta diff --git a/tests/unit/h/services/oauth/_validator_test.py b/tests/unit/h/services/oauth/_validator_test.py index 842c3c68e7d..d93c9e618de 100644 --- a/tests/unit/h/services/oauth/_validator_test.py +++ b/tests/unit/h/services/oauth/_validator_test.py @@ -50,7 +50,7 @@ def test_returns_False_for_missing_client(self, svc, client, oauth_request): def test_returns_False_when_secrets_do_not_match(self, svc, client, oauth_request): oauth_request.client_id = client.id - oauth_request.client_secret = "this-is-invalid" + oauth_request.client_secret = "this-is-invalid" # noqa: S105 assert not svc.authenticate_client(oauth_request) @@ -283,19 +283,19 @@ def token(self, factories): class TestInvalidateRefreshToken: def test_it_shortens_refresh_token_expires(self, svc, oauth_request, token, utcnow): - utcnow.return_value = datetime.datetime(2017, 8, 2, 18, 36, 53) + utcnow.return_value = datetime.datetime(2017, 8, 2, 18, 36, 53) # noqa: DTZ001 svc.invalidate_refresh_token(token.refresh_token, oauth_request) - assert token.refresh_token_expires == datetime.datetime(2017, 8, 2, 18, 39, 53) + assert token.refresh_token_expires == datetime.datetime(2017, 8, 2, 18, 39, 53) # noqa: DTZ001 def test_it_is_noop_when_refresh_token_expires_within_new_ttl( self, svc, oauth_request, token, utcnow ): - utcnow.return_value = datetime.datetime(2017, 8, 2, 18, 36, 53) - token.refresh_token_expires = datetime.datetime(2017, 8, 2, 18, 37, 53) + utcnow.return_value = datetime.datetime(2017, 8, 2, 18, 36, 53) # noqa: DTZ001 + token.refresh_token_expires = datetime.datetime(2017, 8, 2, 18, 37, 53) # noqa: DTZ001 svc.invalidate_refresh_token(token.refresh_token, oauth_request) - assert token.refresh_token_expires == datetime.datetime(2017, 8, 2, 18, 37, 53) + assert token.refresh_token_expires == datetime.datetime(2017, 8, 2, 18, 37, 53) # noqa: DTZ001 @pytest.fixture def token(self, factories): @@ -358,10 +358,10 @@ def test_it_sets_authclient(self, svc, client, code, oauth_request): assert authz_code.authclient == client def test_it_sets_expires(self, svc, client, code, oauth_request, utcnow): - utcnow.return_value = datetime.datetime(2017, 7, 13, 18, 29, 28) + utcnow.return_value = datetime.datetime(2017, 7, 13, 18, 29, 28) # noqa: DTZ001 authz_code = svc.save_authorization_code(client.id, code, oauth_request) - assert authz_code.expires == datetime.datetime(2017, 7, 13, 18, 39, 28) + assert authz_code.expires == datetime.datetime(2017, 7, 13, 18, 39, 28) # noqa: DTZ001 def test_it_sets_code(self, svc, client, code, oauth_request): authz_code = svc.save_authorization_code(client.id, code, oauth_request) @@ -391,22 +391,22 @@ def test_it_sets_value(self, svc, token_payload, oauth_request): assert token.value == "test-access-token" def test_it_sets_expires(self, svc, token_payload, oauth_request, utcnow): - utcnow.return_value = datetime.datetime(2017, 7, 13, 18, 29, 28) + utcnow.return_value = datetime.datetime(2017, 7, 13, 18, 29, 28) # noqa: DTZ001 token = svc.save_bearer_token(token_payload, oauth_request) - assert token.expires == datetime.datetime(2017, 7, 13, 19, 29, 28) + assert token.expires == datetime.datetime(2017, 7, 13, 19, 29, 28) # noqa: DTZ001 def test_it_sets_refresh_token_expires( self, svc, token_payload, oauth_request, utcnow ): - utcnow.return_value = datetime.datetime(2017, 7, 13, 18, 29, 28) + utcnow.return_value = datetime.datetime(2017, 7, 13, 18, 29, 28) # noqa: DTZ001 token = svc.save_bearer_token(token_payload, oauth_request) - assert token.refresh_token_expires == datetime.datetime(2017, 7, 13, 20, 29, 28) + assert token.refresh_token_expires == datetime.datetime(2017, 7, 13, 20, 29, 28) # noqa: DTZ001 def test_it_sets_authclient(self, svc, token_payload, oauth_request): token = svc.save_bearer_token(token_payload, oauth_request) - assert token.refresh_token == "test-refresh-token" + assert token.refresh_token == "test-refresh-token" # noqa: S105 def test_it_removes_refresh_token_expires_in_from_payload( self, svc, token_payload, oauth_request @@ -422,7 +422,7 @@ def test_it_invalidates_old_refresh_token( "h.services.oauth._validator.OAuthValidator.invalidate_refresh_token" ) oauth_request.grant_type = "refresh_token" - oauth_request.refresh_token = "the-refresh-token" + oauth_request.refresh_token = "the-refresh-token" # noqa: S105 svc.save_bearer_token(token_payload, oauth_request) @@ -502,7 +502,7 @@ def test_returns_False_when_clients_do_not_match( assert not result def test_returns_False_when_expired(self, svc, authz_code, client, oauth_request): - authz_code.expires = datetime.datetime.utcnow() - datetime.timedelta(minutes=5) + authz_code.expires = datetime.datetime.utcnow() - datetime.timedelta(minutes=5) # noqa: DTZ003 result = svc.validate_code( client.client_id, authz_code.code, client, oauth_request @@ -612,7 +612,7 @@ def test_returns_False_when_token_not_found(self, svc, client, oauth_request): def test_returns_False_when_refresh_token_expired( self, svc, client, oauth_request, token ): - token.refresh_token_expires = datetime.datetime.utcnow() - datetime.timedelta( + token.refresh_token_expires = datetime.datetime.utcnow() - datetime.timedelta( # noqa: DTZ003 minutes=2 ) result = svc.validate_refresh_token(token.refresh_token, client, oauth_request) @@ -634,7 +634,7 @@ def test_returns_True_when_token_valid(self, svc, client, oauth_request, token): def test_returns_True_when_access_token_expired( self, svc, client, oauth_request, token ): - token.expires = datetime.datetime.utcnow() - datetime.timedelta(minutes=2) + token.expires = datetime.datetime.utcnow() - datetime.timedelta(minutes=2) # noqa: DTZ003 result = svc.validate_refresh_token(token.refresh_token, client, oauth_request) assert result is True diff --git a/tests/unit/h/services/oauth/service_test.py b/tests/unit/h/services/oauth/service_test.py index 699bd66fb3e..8d81ee512ae 100644 --- a/tests/unit/h/services/oauth/service_test.py +++ b/tests/unit/h/services/oauth/service_test.py @@ -42,7 +42,7 @@ def test_load_client_id_raises_for_missing_refresh_token( self, svc, oauth_request, oauth_validator ): oauth_validator.find_refresh_token.return_value = None - oauth_request.refresh_token = "missing" + oauth_request.refresh_token = "missing" # noqa: S105 with pytest.raises(InvalidRefreshTokenError): svc._load_client_id_from_refresh_token(oauth_request) # noqa: SLF001 diff --git a/tests/unit/h/services/user_delete_test.py b/tests/unit/h/services/user_delete_test.py index bcae9448339..953c38209f8 100644 --- a/tests/unit/h/services/user_delete_test.py +++ b/tests/unit/h/services/user_delete_test.py @@ -130,7 +130,7 @@ def test_purge_deleted_users( logging.INFO, f"Purging {user!r} - completed job: {job!r}", ) - for user, job in zip(users[:2], jobs[:2]) + for user, job in zip(users[:2], jobs[:2], strict=False) ] assert purger.delete_authtickets.call_args_list == [ call(user) for user in users diff --git a/tests/unit/h/services/user_password_test.py b/tests/unit/h/services/user_password_test.py index 605fa3ca78d..78cd3107104 100644 --- a/tests/unit/h/services/user_password_test.py +++ b/tests/unit/h/services/user_password_test.py @@ -29,7 +29,7 @@ def test_check_password_false_with_incorrect_password(self, svc, user): def test_check_password_validates_old_style_passwords(self, svc, user): user.salt = "somesalt" # Generated with passlib.hash.bcrypt.hash('foobar' + 'somesalt', rounds=4) - user.password = "$2a$04$zDQnlV/YBG.ju2i14V15p.5nWYL52ZBqjGsBWgLAisGkEJw812BHy" + user.password = "$2a$04$zDQnlV/YBG.ju2i14V15p.5nWYL52ZBqjGsBWgLAisGkEJw812BHy" # noqa: S105 assert not svc.check_password(user, "somethingelse") assert svc.check_password(user, "foobar") @@ -37,7 +37,7 @@ def test_check_password_validates_old_style_passwords(self, svc, user): def test_check_password_upgrades_old_style_passwords(self, hasher, svc, user): user.salt = "somesalt" # Generated with passlib.hash.bcrypt.hash('foobar' + 'somesalt', rounds=4) - user.password = "$2a$04$zDQnlV/YBG.ju2i14V15p.5nWYL52ZBqjGsBWgLAisGkEJw812BHy" + user.password = "$2a$04$zDQnlV/YBG.ju2i14V15p.5nWYL52ZBqjGsBWgLAisGkEJw812BHy" # noqa: S105 svc.check_password(user, "foobar") @@ -49,7 +49,7 @@ def test_check_password_only_upgrades_when_password_is_correct( ): user.salt = "somesalt" # Generated with passlib.hash.bcrypt.hash('foobar' + 'somesalt', rounds=4) - user.password = "$2a$04$zDQnlV/YBG.ju2i14V15p.5nWYL52ZBqjGsBWgLAisGkEJw812BHy" + user.password = "$2a$04$zDQnlV/YBG.ju2i14V15p.5nWYL52ZBqjGsBWgLAisGkEJw812BHy" # noqa: S105 svc.check_password(user, "donkeys") @@ -59,7 +59,7 @@ def test_check_password_only_upgrades_when_password_is_correct( def test_check_password_works_after_upgrade(self, svc, user): user.salt = "somesalt" # Generated with passlib.hash.bcrypt.hash('foobar' + 'somesalt', rounds=4) - user.password = "$2a$04$zDQnlV/YBG.ju2i14V15p.5nWYL52ZBqjGsBWgLAisGkEJw812BHy" + user.password = "$2a$04$zDQnlV/YBG.ju2i14V15p.5nWYL52ZBqjGsBWgLAisGkEJw812BHy" # noqa: S105 svc.check_password(user, "foobar") @@ -67,7 +67,7 @@ def test_check_password_works_after_upgrade(self, svc, user): def test_check_password_upgrades_new_style_passwords(self, hasher, svc, user): # Generated with passlib.hash.bcrypt.hash('foobar', rounds=4, ident='2b') - user.password = "$2b$04$L2j.vXxlLt9JJNHHsy0EguslcaphW7vssSpHbhqCmf9ECsMiuTd1y" + user.password = "$2b$04$L2j.vXxlLt9JJNHHsy0EguslcaphW7vssSpHbhqCmf9ECsMiuTd1y" # noqa: S105 svc.check_password(user, "foobar") @@ -75,7 +75,7 @@ def test_check_password_upgrades_new_style_passwords(self, hasher, svc, user): def test_updating_password_unsets_salt(self, svc, user): user.salt = "somesalt" - user.password = "whatever" + user.password = "whatever" # noqa: S105 svc.update_password(user, "flibble") diff --git a/tests/unit/h/services/user_rename_test.py b/tests/unit/h/services/user_rename_test.py index 984ac4bccb4..9b15dc97876 100644 --- a/tests/unit/h/services/user_rename_test.py +++ b/tests/unit/h/services/user_rename_test.py @@ -111,7 +111,7 @@ def user(self, factories, db_session): def annotations(self, user, factories, db_session): anns = [] for _ in range(8): - anns.append(factories.Annotation(userid=user.userid)) + anns.append(factories.Annotation(userid=user.userid)) # noqa: PERF401 db_session.add_all(anns) db_session.flush() diff --git a/tests/unit/h/services/user_signup_test.py b/tests/unit/h/services/user_signup_test.py index 523c9d80cb5..c75b4c2832d 100644 --- a/tests/unit/h/services/user_signup_test.py +++ b/tests/unit/h/services/user_signup_test.py @@ -55,7 +55,7 @@ def test_signup_allows_user_with_empty_identities(self, svc): assert user.identities == [] def test_signup_passes_through_privacy_acceptance(self, svc): - now = datetime.datetime.utcnow() + now = datetime.datetime.utcnow() # noqa: DTZ003 user = svc.signup(username="foo", email="foo@bar.com", privacy_accepted=now) assert user.privacy_accepted == now @@ -89,7 +89,7 @@ def test_signup_raises_with_invalid_identities(self, svc): def test_signup_sets_password_using_password_service( self, svc, user_password_service ): - user = svc.signup(username="foo", email="foo@bar.com", password="wibble") + user = svc.signup(username="foo", email="foo@bar.com", password="wibble") # noqa: S106 user_password_service.update_password.assert_called_once_with(user, "wibble") @@ -130,7 +130,7 @@ def test_signup_logs_conflict_error_when_account_with_email_already_exists( ): log = patch("h.services.user_signup.log") - with pytest.raises(ConflictError): + with pytest.raises(ConflictError): # noqa: PT012 svc.signup(username="foo", email="foo@bar.com") svc.signup(username="foo", email="foo@bar.com") @@ -157,7 +157,7 @@ def test_signup_raises_conflict_error_when_account_already_exists( ): # This happens when two or more identical # concurrent signup requests race each other to the db. - with pytest.raises( + with pytest.raises( # noqa: PT012 ConflictError, match=f"The email address {email} has already been registered.", ): diff --git a/tests/unit/h/services/user_test.py b/tests/unit/h/services/user_test.py index 0d2b8e005ab..331b79505c1 100644 --- a/tests/unit/h/services/user_test.py +++ b/tests/unit/h/services/user_test.py @@ -121,7 +121,7 @@ def test_clears_cache_on_transaction_end(self, patch, db_session, users): # We need to capture the inline `clear_cache` function so we can # call it manually later - def on_transaction_end_decorator(session): + def on_transaction_end_decorator(session): # noqa: ARG001 def on_transaction_end(func): funcs["clear_cache"] = func diff --git a/tests/unit/h/settings_test.py b/tests/unit/h/settings_test.py index a04574dad57..2355d5aa5d8 100644 --- a/tests/unit/h/settings_test.py +++ b/tests/unit/h/settings_test.py @@ -7,7 +7,7 @@ class TestSettingsManager: def test_set_does_not_warn_when_deprecated_setting_is_not_used(self, caplog): - with caplog.at_level(logging.WARN): + with caplog.at_level(logging.WARNING): settings_manager = SettingsManager({}, {}) settings_manager.set("foo", "FOO", deprecated_msg="what to do instead") assert not caplog.records @@ -21,7 +21,7 @@ def test_set_sets_value_when_deprecated_setting_is_used(self): assert result == "bar" def test_set_warns_when_deprecated_setting_is_used(self, caplog): - with caplog.at_level(logging.WARN): + with caplog.at_level(logging.WARNING): settings_manager = SettingsManager({}, {"FOO": "bar"}) settings_manager.set("foo", "FOO", deprecated_msg="what to do instead") assert "what to do instead" in caplog.text diff --git a/tests/unit/h/streamer/db_test.py b/tests/unit/h/streamer/db_test.py index 4d7160cad36..f8b6bc9a2ba 100644 --- a/tests/unit/h/streamer/db_test.py +++ b/tests/unit/h/streamer/db_test.py @@ -45,7 +45,7 @@ def test_it_rolls_back_on_handler_exception(self, session, exception): @pytest.mark.parametrize("exception", (KeyboardInterrupt, SystemExit)) def test_it_reraises_certain_exceptions(self, session, exception): - with pytest.raises(exception): + with pytest.raises(exception): # noqa: SIM117 with read_only_transaction(session): raise exception diff --git a/tests/unit/h/streamer/filter_test.py b/tests/unit/h/streamer/filter_test.py index 855ad1a1d79..ed34daa04bd 100644 --- a/tests/unit/h/streamer/filter_test.py +++ b/tests/unit/h/streamer/filter_test.py @@ -3,7 +3,7 @@ import pytest from h_matchers import Any -from pytest import param +from pytest import param # noqa: PT013 from h.streamer.filter import SocketFilter @@ -199,13 +199,13 @@ def test_speed(self, factories, db_session): # pragma: no cover ann = factories.Annotation(target_uri="https://example.org") - start = datetime.utcnow() + start = datetime.utcnow() # noqa: DTZ003 # This returns a generator, we need to force it to produce answers tuple(SocketFilter.matching(sockets, ann, db_session)) - diff = datetime.utcnow() - start + diff = datetime.utcnow() - start # noqa: DTZ003 ms = diff.seconds * 1000 + diff.microseconds / 1000 - print(ms, "ms") + print(ms, "ms") # noqa: T201 def get_randomized_filter(self): # pragma: no cover return { @@ -215,14 +215,14 @@ def get_randomized_filter(self): # pragma: no cover { "field": "/id", "operator": "equals", - "value": "3jgSANNlEeebpLMf36MACw" + str(random()), + "value": "3jgSANNlEeebpLMf36MACw" + str(random()), # noqa: S311 }, { "field": "/references", "operator": "one_of", "value": [ - "3jgSANNlEeebpLMf36MACw" + str(random()), - "3jgSANNlEeebpLMf36MACw" + str(random()), + "3jgSANNlEeebpLMf36MACw" + str(random()), # noqa: S311 + "3jgSANNlEeebpLMf36MACw" + str(random()), # noqa: S311 ], }, { @@ -230,15 +230,15 @@ def get_randomized_filter(self): # pragma: no cover "operator": "one_of", "value": [ "https://example.com", - "https://example.org" + str(random()), + "https://example.org" + str(random()), # noqa: S311 ], }, { "field": "/group", "operator": "equals", "value": [ - "3jgSANNlEeebpLMf36MACw" + str(random()), - "3jgSANNlEeebpLMf36MACw" + str(random()), + "3jgSANNlEeebpLMf36MACw" + str(random()), # noqa: S311 + "3jgSANNlEeebpLMf36MACw" + str(random()), # noqa: S311 ], }, ], diff --git a/tests/unit/h/streamer/messages_speed_test.py b/tests/unit/h/streamer/messages_speed_test.py index c1563fbdc5e..3470a5c5da2 100644 --- a/tests/unit/h/streamer/messages_speed_test.py +++ b/tests/unit/h/streamer/messages_speed_test.py @@ -20,7 +20,7 @@ def test_load_request(self): @pytest.mark.parametrize("reps", (1, 16, 256, 4096)) @pytest.mark.parametrize("action", ("create", "delete")) def test_speed(self, db_session, pyramid_request, socket, message, action, reps): - sockets = list(socket for _ in range(reps)) + sockets = list(socket for _ in range(reps)) # noqa: C400 message["action"] = action start = datetime.utcnow() @@ -35,7 +35,7 @@ def test_speed(self, db_session, pyramid_request, socket, message, action, reps) assert socket.send_json.count == reps millis = diff.seconds * 1000 + diff.microseconds / 1000 - print( + print( # noqa: T201 f"{action} x {reps}: {millis} ms, {millis / reps} ms/item, {reps / millis * 1000} items/sec" ) @@ -73,7 +73,7 @@ def SocketFilter(self, patch): # We aren't interested in the speed of the socket filter, as that has # it's own speed tests SocketFilter = patch("h.streamer.messages.SocketFilter") - SocketFilter.matching.side_effect = lambda sockets, annotation: iter(sockets) + SocketFilter.matching.side_effect = lambda sockets, annotation: iter(sockets) # noqa: ARG005 return SocketFilter @pytest.fixture diff --git a/tests/unit/h/streamer/messages_test.py b/tests/unit/h/streamer/messages_test.py index dd379619537..4e935c5144b 100644 --- a/tests/unit/h/streamer/messages_test.py +++ b/tests/unit/h/streamer/messages_test.py @@ -26,14 +26,14 @@ def test_it_creates_and_runs_a_consumer(self, Consumer, realtime, work_queue): consumer = Consumer.return_value consumer.run.assert_called_once_with() - def test_it_puts_message_on_queue(self, _handler, work_queue): + def test_it_puts_message_on_queue(self, _handler, work_queue): # noqa: PT019 _handler({"foo": "bar"}) result = work_queue.get_nowait() assert result.topic == "routing_key" # Set by _handler fixture assert result.payload == {"foo": "bar"} - def test_it_handles_a_full_queue(self, _handler, work_queue): + def test_it_handles_a_full_queue(self, _handler, work_queue): # noqa: PT019 work_queue.put(messages.Message(topic="queue_is_full", payload={})) _handler({"foo": "bar"}) @@ -269,7 +269,7 @@ def identity_permits(self, patch): def SocketFilter(self, patch): SocketFilter = patch("h.streamer.messages.SocketFilter") SocketFilter.matching.side_effect = ( - lambda sockets, annotation, db_session: iter(sockets) + lambda sockets, annotation, db_session: iter(sockets) # noqa: ARG005 ) return SocketFilter diff --git a/tests/unit/h/streamer/tweens_test.py b/tests/unit/h/streamer/tweens_test.py index 6fe854de1d8..0a560691280 100644 --- a/tests/unit/h/streamer/tweens_test.py +++ b/tests/unit/h/streamer/tweens_test.py @@ -34,7 +34,9 @@ def close_db_session_tween(self, handler): @pytest.fixture def handler(self): - handler = mock.create_autospec(lambda request: None) # pragma: nocover + handler = mock.create_autospec( + lambda request: None # noqa: ARG005 + ) # pragma: nocover return handler @pytest.fixture diff --git a/tests/unit/h/streamer/websocket_test.py b/tests/unit/h/streamer/websocket_test.py index 7fdd6d0e9bf..7fe5206480a 100644 --- a/tests/unit/h/streamer/websocket_test.py +++ b/tests/unit/h/streamer/websocket_test.py @@ -251,7 +251,7 @@ def handlers(self, request, foo_handler, unknown_handler): clear=True, ) handlers = patcher.start() - request.addfinalizer(patcher.stop) + request.addfinalizer(patcher.stop) # noqa: PT021 return handlers diff --git a/tests/unit/h/subscribers_test.py b/tests/unit/h/subscribers_test.py index 931ccdfb688..a6a95835af3 100644 --- a/tests/unit/h/subscribers_test.py +++ b/tests/unit/h/subscribers_test.py @@ -97,7 +97,7 @@ def test_it_exits_cleanly_when_RealtimeMessageQueueError_is_raised(self, event): def test_it_raises_for_other_errors(self, event): event.request.realtime.publish_annotation.side_effect = EnvironmentError - with pytest.raises(EnvironmentError): + with pytest.raises(EnvironmentError): # noqa: PT011 subscribers.publish_annotation_event(event) @pytest.fixture diff --git a/tests/unit/h/tasks/cleanup_test.py b/tests/unit/h/tasks/cleanup_test.py index 1b4f3c5d821..f420764ae8c 100644 --- a/tests/unit/h/tasks/cleanup_test.py +++ b/tests/unit/h/tasks/cleanup_test.py @@ -41,8 +41,8 @@ def test_report_num_deleted_annotations(self, factories, newrelic): class TestPurgeExpiredAuthTickets: def test_it_removes_expired_tickets(self, db_session, factories): tickets = [ - factories.AuthTicket(expires=datetime(2014, 5, 6, 7, 8, 9)), - factories.AuthTicket(expires=(datetime.utcnow() - timedelta(seconds=1))), + factories.AuthTicket(expires=datetime(2014, 5, 6, 7, 8, 9)), # noqa: DTZ001 + factories.AuthTicket(expires=(datetime.utcnow() - timedelta(seconds=1))), # noqa: DTZ003 ] db_session.add_all(tickets) @@ -52,8 +52,8 @@ def test_it_removes_expired_tickets(self, db_session, factories): def test_it_leaves_valid_tickets(self, db_session, factories): tickets = [ - factories.AuthTicket(expires=datetime(2014, 5, 6, 7, 8, 9)), - factories.AuthTicket(expires=(datetime.utcnow() + timedelta(hours=1))), + factories.AuthTicket(expires=datetime(2014, 5, 6, 7, 8, 9)), # noqa: DTZ001 + factories.AuthTicket(expires=(datetime.utcnow() + timedelta(hours=1))), # noqa: DTZ003 ] db_session.add_all(tickets) @@ -66,8 +66,8 @@ def test_it_leaves_valid_tickets(self, db_session, factories): class TestPurgeExpiredAuthzCodes: def test_it_removes_expired_authz_codes(self, db_session, factories): authz_codes = [ - factories.AuthzCode(expires=datetime(2014, 5, 6, 7, 8, 9)), - factories.AuthzCode(expires=(datetime.utcnow() - timedelta(seconds=1))), + factories.AuthzCode(expires=datetime(2014, 5, 6, 7, 8, 9)), # noqa: DTZ001 + factories.AuthzCode(expires=(datetime.utcnow() - timedelta(seconds=1))), # noqa: DTZ003 ] db_session.add_all(authz_codes) @@ -77,8 +77,8 @@ def test_it_removes_expired_authz_codes(self, db_session, factories): def test_it_leaves_valid_authz_codes(self, db_session, factories): authz_codes = [ - factories.AuthzCode(expires=datetime(2014, 5, 6, 7, 8, 9)), - factories.AuthzCode(expires=(datetime.utcnow() + timedelta(hours=1))), + factories.AuthzCode(expires=datetime(2014, 5, 6, 7, 8, 9)), # noqa: DTZ001 + factories.AuthzCode(expires=(datetime.utcnow() + timedelta(hours=1))), # noqa: DTZ003 ] db_session.add_all(authz_codes) @@ -91,12 +91,12 @@ def test_it_leaves_valid_authz_codes(self, db_session, factories): class TestPurgeExpiredTokens: def test_it_removes_expired_tokens(self, db_session, factories): factories.DeveloperToken( - expires=datetime(2014, 5, 6, 7, 8, 9), - refresh_token_expires=datetime(2014, 5, 13, 7, 8, 9), + expires=datetime(2014, 5, 6, 7, 8, 9), # noqa: DTZ001 + refresh_token_expires=datetime(2014, 5, 13, 7, 8, 9), # noqa: DTZ001 ) factories.DeveloperToken( - expires=(datetime.utcnow() - timedelta(hours=2)), - refresh_token_expires=(datetime.utcnow() - timedelta(seconds=1)), + expires=(datetime.utcnow() - timedelta(hours=2)), # noqa: DTZ003 + refresh_token_expires=(datetime.utcnow() - timedelta(seconds=1)), # noqa: DTZ003 ) assert db_session.query(Token).count() == 2 @@ -105,20 +105,20 @@ def test_it_removes_expired_tokens(self, db_session, factories): def test_it_leaves_valid_tickets(self, db_session, factories): factories.DeveloperToken( - expires=datetime(2014, 5, 6, 7, 8, 9), - refresh_token_expires=datetime(2014, 5, 13, 7, 8, 9), + expires=datetime(2014, 5, 6, 7, 8, 9), # noqa: DTZ001 + refresh_token_expires=datetime(2014, 5, 13, 7, 8, 9), # noqa: DTZ001 ) factories.DeveloperToken( - expires=(datetime.utcnow() + timedelta(hours=1)), - refresh_token_expires=datetime.utcnow() + timedelta(days=7), + expires=(datetime.utcnow() + timedelta(hours=1)), # noqa: DTZ003 + refresh_token_expires=datetime.utcnow() + timedelta(days=7), # noqa: DTZ003 ) factories.DeveloperToken( - expires=(datetime.utcnow() - timedelta(hours=1)), - refresh_token_expires=datetime.utcnow() + timedelta(days=7), + expires=(datetime.utcnow() - timedelta(hours=1)), # noqa: DTZ003 + refresh_token_expires=datetime.utcnow() + timedelta(days=7), # noqa: DTZ003 ) factories.DeveloperToken( - expires=(datetime.utcnow() + timedelta(hours=1)), - refresh_token_expires=datetime.utcnow() - timedelta(days=7), + expires=(datetime.utcnow() + timedelta(hours=1)), # noqa: DTZ003 + refresh_token_expires=datetime.utcnow() - timedelta(days=7), # noqa: DTZ003 ) assert db_session.query(Token).count() == 4 diff --git a/tests/unit/h/tweens_test.py b/tests/unit/h/tweens_test.py index 996bc78bf53..a2f3ad3d774 100644 --- a/tests/unit/h/tweens_test.py +++ b/tests/unit/h/tweens_test.py @@ -46,7 +46,7 @@ def test_it_redirects_for_redirected_routes(self, pyramid_request): pyramid_request.path = "/foo" tween = tweens.redirect_tween_factory( - # pragma: nocover + # pragma: nocover # noqa: ERA001 lambda req: req.response, pyramid_request.registry, redirects, @@ -61,7 +61,7 @@ def test_it_redirects_for_redirected_routes(self, pyramid_request): class TestSecurityHeaderTween: def test_it_adds_security_headers_to_the_response(self, pyramid_request): tween = tweens.security_header_tween_factory( - # pragma: nocover + # pragma: nocover # noqa: ERA001 lambda req: req.response, pyramid_request.registry, ) @@ -121,7 +121,7 @@ def test_it_calls_db_rollback_on_exception(self, handler, pyramid_request): handler, pyramid_request.registry ) - with pytest.raises(IOError): + with pytest.raises(IOError): # noqa: PT011 tween(pyramid_request) handler.assert_called_once_with(pyramid_request) @@ -129,7 +129,9 @@ def test_it_calls_db_rollback_on_exception(self, handler, pyramid_request): @pytest.fixture def handler(self): - return mock.create_autospec(lambda request: None) # pragma: nocover + return mock.create_autospec( + lambda request: None # noqa: ARG005 + ) # pragma: nocover @pytest.fixture def pyramid_request(self, pyramid_request): diff --git a/tests/unit/h/util/datetime_test.py b/tests/unit/h/util/datetime_test.py index 3f1b16afb6b..6e305f7a241 100644 --- a/tests/unit/h/util/datetime_test.py +++ b/tests/unit/h/util/datetime_test.py @@ -11,7 +11,7 @@ "date,expected", ( ( - datetime.datetime(2016, 2, 24, 18, 3, 25, 7685), + datetime.datetime(2016, 2, 24, 18, 3, 25, 7685), # noqa: DTZ001 "2016-02-24T18:03:25.007685+00:00", ), # We ignore timezones @@ -27,5 +27,5 @@ def test_utc_iso8601(date, expected): def test_utc_us_style_date(): - t = datetime.datetime(2016, 2, 4) + t = datetime.datetime(2016, 2, 4) # noqa: DTZ001 assert utc_us_style_date(t) == "February 4, 2016" diff --git a/tests/unit/h/util/db_test.py b/tests/unit/h/util/db_test.py index 9eab9b5ffad..cf0141788c1 100644 --- a/tests/unit/h/util/db_test.py +++ b/tests/unit/h/util/db_test.py @@ -13,8 +13,8 @@ def test_caches_during_transaction(self, db_session, mock_transaction): # cleared when the transaction ends. @lru_cache_in_transaction(db_session) - def random_float(*args, **kwargs): - return random.random() + def random_float(*args, **kwargs): # noqa: ARG001 + return random.random() # noqa: S311 a = random_float("a") b = random_float("b") @@ -36,8 +36,8 @@ def test_cache_not_cleared_for_nested_transaction( """The cache should not be cleared when a nested transaction ends.""" @lru_cache_in_transaction(db_session) - def random_float(*args, **kwargs): - return random.random() + def random_float(*args, **kwargs): # noqa: ARG001 + return random.random() # noqa: S311 a = random_float("a") b = random_float("b") diff --git a/tests/unit/h/util/document_claims_test.py b/tests/unit/h/util/document_claims_test.py index d8e86ed54b8..b348e4da448 100644 --- a/tests/unit/h/util/document_claims_test.py +++ b/tests/unit/h/util/document_claims_test.py @@ -264,7 +264,7 @@ def test_document_metas_from_data_allows_null_non_titles(self): if not isinstance(value, list): # We expect it to turn non-lists into length-1 lists. - value = [value] + value = [value] # noqa: PLW2901 assert document_metas == [ {"type": "foo", "value": value, "claimant": "http://example/claimant"} @@ -292,7 +292,7 @@ def test_document_metas_from_data_allows_empty_string_non_titles(self): if not isinstance(value, list): # We expect it to turn non-lists into length-1 lists. - value = [value] + value = [value] # noqa: PLW2901 assert document_metas == [ {"type": "foo", "value": value, "claimant": "http://example/claimant"} @@ -320,7 +320,7 @@ def test_document_metas_from_data_allows_whitespace_only_non_titles(self): if not isinstance(value, list): # We expect it to turn non-lists into length-1 lists. - value = [value] + value = [value] # noqa: PLW2901 assert document_metas == [ {"type": "foo", "value": value, "claimant": "http://example/claimant"} diff --git a/tests/unit/h/util/metrics_test.py b/tests/unit/h/util/metrics_test.py index 370e47c6d20..9643615cb13 100644 --- a/tests/unit/h/util/metrics_test.py +++ b/tests/unit/h/util/metrics_test.py @@ -9,7 +9,7 @@ class TestRecordSearchQueryParams: def test_it_passes_parameters_to_newrelic(self, newrelic_agent): params = MultiDict(tag="tagsvalue", _separate_replies=True, url="urlvalue") - metrics.record_search_query_params(params, True) + metrics.record_search_query_params(params, True) # noqa: FBT003 newrelic_agent.add_custom_attributes.assert_called_once_with( [ ("es_url", "urlvalue"), @@ -20,14 +20,14 @@ def test_it_passes_parameters_to_newrelic(self, newrelic_agent): def test_it_does_not_pass_unrecognized_parameters_to_newrelic(self, newrelic_agent): params = MultiDict(bad="unwanted") - metrics.record_search_query_params(params, True) + metrics.record_search_query_params(params, True) # noqa: FBT003 newrelic_agent.add_custom_attributes.assert_called_once_with( [("es__separate_replies", True)] ) def test_it_does_not_record_separate_replies_if_False(self, newrelic_agent): params = MultiDict({}) - metrics.record_search_query_params(params, False) + metrics.record_search_query_params(params, False) # noqa: FBT003 newrelic_agent.add_custom_attributes.assert_called_once_with([]) @pytest.fixture diff --git a/tests/unit/h/util/test_logging_filters.py b/tests/unit/h/util/test_logging_filters.py index 56f017acf42..6d870a01449 100644 --- a/tests/unit/h/util/test_logging_filters.py +++ b/tests/unit/h/util/test_logging_filters.py @@ -8,7 +8,7 @@ class TestExceptionFilter: def test_raises_if_invalid_level_name(self): - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 ExceptionFilter((("ReadTimeoutError", "WARNI"),)) def test_specify_level_as_int(self): @@ -34,7 +34,7 @@ def test_does_log_if_log_level_mismatch(self, logger, read_timeout_exception): def test_does_log_if_exception_mismatch(self, logger): try: - raise ValueError("Not a read timeout") + raise ValueError("Not a read timeout") # noqa: EM101, TRY003, TRY301 except ValueError: logger.warning("warning", exc_info=True) assert logger.handlers[0].handler_called, ( @@ -62,10 +62,10 @@ def logger(): class TestHandler(logging.Handler): handler_called = False - def emit(self, record): + def emit(self, record): # noqa: ARG002 self.handler_called = True - log = logging.Logger("test_logger") + log = logging.Logger("test_logger") # noqa: LOG001 log.addHandler(TestHandler()) log.addFilter(ExceptionFilter((("ReadTimeoutError", "WARNING"),))) return log diff --git a/tests/unit/h/util/uri_test.py b/tests/unit/h/util/uri_test.py index d09f4fa6866..baeeb891b5e 100644 --- a/tests/unit/h/util/uri_test.py +++ b/tests/unit/h/util/uri_test.py @@ -210,7 +210,7 @@ def test_it_handles_invalid_params(self, url_in, url_out): ("http://example.com?foo=[bar]baz", "httpx://example.com?foo=%5Bbar%5Dbaz"), # Query: ensure OTHER characters are encoded ( - "http://example.com?你好世界=γειά σου κόσμος", + "http://example.com?你好世界=γειά σου κόσμος", # noqa: RUF001 "httpx://example.com?%E4%BD%A0%E5%A5%BD%E4%B8%96%E7%95%8C=%CE%B3%CE%B5%CE%B9%CE%AC+%CF%83%CE%BF%CF%85+%CE%BA%CF%8C%CF%83%CE%BC%CE%BF%CF%82", ), ("http://example.com?love=♥", "httpx://example.com?love=%E2%99%A5"), diff --git a/tests/unit/h/views/account_signup_test.py b/tests/unit/h/views/account_signup_test.py index 909fc33f271..218353e930b 100644 --- a/tests/unit/h/views/account_signup_test.py +++ b/tests/unit/h/views/account_signup_test.py @@ -40,7 +40,7 @@ def test_post_creates_user_from_form_data( user_signup_service.signup.assert_called_with( username="bob", email="bob@example.com", - password="s3crets", + password="s3crets", # noqa: S106 privacy_accepted=datetime.datetime.utcnow.return_value, comms_opt_in=True, ) diff --git a/tests/unit/h/views/accounts_test.py b/tests/unit/h/views/accounts_test.py index 8bc168779cd..688c976d606 100644 --- a/tests/unit/h/views/accounts_test.py +++ b/tests/unit/h/views/accounts_test.py @@ -887,10 +887,12 @@ def test_get( self, authenticated_user, controller, factories, pyramid_request, schemas ): oldest_annotation = factories.Annotation( - userid=authenticated_user.userid, created=datetime(1970, 1, 1) + userid=authenticated_user.userid, + created=datetime(1970, 1, 1), # noqa: DTZ001 ) newest_annotation = factories.Annotation( - userid=authenticated_user.userid, created=datetime(1990, 1, 1) + userid=authenticated_user.userid, + created=datetime(1990, 1, 1), # noqa: DTZ001 ) # An annotation by another user. This shouldn't be counted. factories.Annotation(created=oldest_annotation.created - timedelta(days=1)) diff --git a/tests/unit/h/views/activity_test.py b/tests/unit/h/views/activity_test.py index 90a9d9e1012..29f65c1772c 100644 --- a/tests/unit/h/views/activity_test.py +++ b/tests/unit/h/views/activity_test.py @@ -114,7 +114,7 @@ class TestGroupSearchController: """Tests unique to GroupSearchController.""" @staticmethod - def fake_has_permission(permission, context=None): + def fake_has_permission(permission, context=None): # noqa: ARG004 return False def test_renders_join_template_when_no_read_permission( @@ -149,7 +149,11 @@ def test_renders_join_template_when_not_logged_in( ) @pytest.mark.usefixtures("toggle_user_facet_request") def test_raises_not_found_when_no_read_or_join_permissions( - self, controller, pyramid_request, test_group, test_user + self, + controller, + pyramid_request, + test_group, # noqa: ARG002 + test_user, # noqa: ARG002 ): pyramid_request.has_permission = mock.Mock(side_effect=self.fake_has_permission) @@ -182,7 +186,11 @@ def test_search_redirects_if_slug_wrong( indirect=["test_group", "test_user"], ) def test_search_calls_search_with_the_request( - self, controller, test_group, test_user, search + self, + controller, + test_group, # noqa: ARG002 + test_user, # noqa: ARG002 + search, ): controller.search() @@ -196,7 +204,12 @@ def test_search_calls_search_with_the_request( indirect=["test_group", "test_user"], ) def test_search_just_returns_search_result_if_group_does_not_exist( - self, controller, test_group, test_user, pyramid_request, search + self, + controller, + test_group, # noqa: ARG002 + test_user, # noqa: ARG002 + pyramid_request, + search, ): pyramid_request.matchdict["pubid"] = "does_not_exist" @@ -204,7 +217,10 @@ def test_search_just_returns_search_result_if_group_does_not_exist( @pytest.mark.parametrize("test_group", GROUP_TYPE_OPTIONS, indirect=["test_group"]) def test_search_just_returns_search_result_if_user_not_logged_in( - self, controller, test_group, search + self, + controller, + test_group, # noqa: ARG002 + search, ): assert controller.search() == search.return_value @@ -214,7 +230,11 @@ def test_search_just_returns_search_result_if_user_not_logged_in( indirect=["test_group", "test_user"], ) def test_search_just_returns_search_result_if_user_not_a_member_of_group( - self, controller, test_group, test_user, search + self, + controller, + test_group, # noqa: ARG002 + test_user, # noqa: ARG002 + search, ): assert controller.search() == search.return_value @@ -224,7 +244,10 @@ def test_search_just_returns_search_result_if_user_not_a_member_of_group( indirect=["test_group", "test_user"], ) def test_search_returns_group_creator_is_none_if_group_creator_is_empty( - self, controller, test_group, test_user + self, + controller, + test_group, # noqa: ARG002 + test_user, # noqa: ARG002 ): group_info = controller.search()["group_users_args"] @@ -236,12 +259,16 @@ def test_search_returns_group_creator_is_none_if_group_creator_is_empty( indirect=["test_group", "test_user"], ) def test_search_returns_group_info_if_user_has_read_permissions( - self, controller, test_group, test_user + self, + controller, + test_group, + test_user, # noqa: ARG002 ): group_info = controller.search()["group"] - assert group_info["created"] == "{d:%B} {d.day}, {d:%Y}".format( - d=test_group.created + assert ( + group_info["created"] + == f"{test_group.created:%B} {test_group.created.day}, {test_group.created:%Y}" ) assert group_info["description"] == test_group.description assert group_info["name"] == test_group.name @@ -257,12 +284,13 @@ def test_search_does_not_return_organization_info_if_missing( self, controller, test_group, - test_user, + test_user, # noqa: ARG002 ): group_info = controller.search()["group"] - assert group_info["created"] == "{d:%B} {d.day}, {d:%Y}".format( - d=test_group.created + assert ( + group_info["created"] + == f"{test_group.created:%B} {test_group.created.day}, {test_group.created:%Y}" ) assert group_info["description"] == test_group.description assert group_info["name"] == test_group.name @@ -275,9 +303,13 @@ def test_search_does_not_return_organization_info_if_missing( indirect=["test_group", "test_user"], ) def test_search_does_not_show_the_edit_link_to_non_admin_users( - self, controller, test_group, test_user, pyramid_request + self, + controller, + test_group, # noqa: ARG002 + test_user, # noqa: ARG002 + pyramid_request, ): - def fake_has_permission(permission, context=None): + def fake_has_permission(permission, context=None): # noqa: ARG001 return permission != Permission.Group.EDIT pyramid_request.has_permission = mock.Mock(side_effect=fake_has_permission) @@ -292,7 +324,11 @@ def fake_has_permission(permission, context=None): indirect=["test_group", "test_user"], ) def test_search_does_show_the_group_edit_link_to_group_creators( - self, controller, test_group, test_user, pyramid_request + self, + controller, + test_group, # noqa: ARG002 + test_user, # noqa: ARG002 + pyramid_request, ): pyramid_request.has_permission = mock.Mock(return_value=True) @@ -306,7 +342,11 @@ def test_search_does_show_the_group_edit_link_to_group_creators( indirect=["test_group", "test_user"], ) def test_search_shows_the_more_info_version_of_the_page_if_more_info_is_in_the_request_params( - self, controller, test_group, test_user, pyramid_request + self, + controller, + test_group, # noqa: ARG002 + test_user, # noqa: ARG002 + pyramid_request, ): pyramid_request.params["more_info"] = "" @@ -318,7 +358,10 @@ def test_search_shows_the_more_info_version_of_the_page_if_more_info_is_in_the_r indirect=["test_group", "test_user"], ) def test_search_shows_the_normal_version_of_the_page_if_more_info_is_not_in_the_request_params( - self, controller, test_group, test_user + self, + controller, + test_group, # noqa: ARG002 + test_user, # noqa: ARG002 ): assert not controller.search()["more_info"] @@ -333,7 +376,7 @@ def test_search_returns_name_in_opts(self, controller, test_group): [("group", "member"), ("open_group", "user")], indirect=["test_group", "test_user"], ) - def test_search_returns_group_creator(self, controller, test_user, test_group): + def test_search_returns_group_creator(self, controller, test_user, test_group): # noqa: ARG002 result = controller.search() assert result["group_users_args"][2] == test_group.creator.userid @@ -344,7 +387,10 @@ def test_search_returns_group_creator(self, controller, test_user, test_group): indirect=["test_group", "test_user"], ) def test_search_returns_group_members_usernames( - self, controller, test_user, test_group + self, + controller, + test_user, # noqa: ARG002 + test_group, ): result = controller.search() @@ -358,7 +404,10 @@ def test_search_returns_group_members_usernames( indirect=["test_group", "test_user"], ) def test_search_returns_group_members_userid( - self, controller, test_user, test_group + self, + controller, + test_user, # noqa: ARG002 + test_group, ): result = controller.search() @@ -373,7 +422,10 @@ def test_search_returns_group_members_userid( ) @pytest.mark.usefixtures("query") def test_search_returns_group_members_faceted_by( - self, controller, test_user, test_group + self, + controller, + test_user, # noqa: ARG002 + test_group, ): faceted_user = test_group.members[0] controller.parsed_query_params = MultiDict({"user": faceted_user.username}) @@ -414,7 +466,10 @@ def test_search_returns_annotation_count_for_group_members( indirect=["test_group", "test_user"], ) def test_search_returns_group_moderators_usernames( - self, controller, test_user, test_group + self, + controller, + test_user, # noqa: ARG002 + test_group, ): result = controller.search() @@ -428,7 +483,10 @@ def test_search_returns_group_moderators_usernames( indirect=["test_group", "test_user"], ) def test_search_returns_group_moderators_userid( - self, controller, test_user, test_group + self, + controller, + test_user, # noqa: ARG002 + test_group, ): result = controller.search() @@ -442,7 +500,11 @@ def test_search_returns_group_moderators_userid( indirect=["test_group", "test_user"], ) def test_search_returns_group_moderators_faceted_by( - self, controller, pyramid_request, test_user, test_group + self, + controller, + pyramid_request, + test_user, # noqa: ARG002 + test_group, # noqa: ARG002 ): pyramid_request.params = {"q": "user:does_not_matter"} @@ -456,7 +518,12 @@ def test_search_returns_group_moderators_faceted_by( indirect=["test_group", "test_user"], ) def test_search_returns_annotation_count_for_group_moderators( - self, controller, test_group, test_user, search, factories + self, + controller, + test_group, + test_user, # noqa: ARG002 + search, + factories, ): user_1 = test_group.creator user_2 = factories.User() @@ -482,7 +549,11 @@ def test_search_returns_annotation_count_for_group_moderators( indirect=["test_group", "test_user"], ) def test_search_returns_the_default_zero_message_to_the_template( - self, controller, test_group, test_user, search + self, + controller, + test_group, # noqa: ARG002 + test_user, # noqa: ARG002 + search, ): """If there's a non-empty query it uses the default zero message.""" search.return_value["q"] = "foo" @@ -497,7 +568,11 @@ def test_search_returns_the_default_zero_message_to_the_template( indirect=["test_group", "test_user"], ) def test_search_returns_the_group_zero_message_to_the_template( - self, controller, test_group, test_user, search + self, + controller, + test_group, + test_user, # noqa: ARG002 + search, ): """If the query is empty it overrides the default zero message.""" search.return_value["q"] = "" @@ -584,7 +659,7 @@ def test_search_passes_the_group_annotation_count_to_the_template( self, controller, test_group, - test_user, + test_user, # noqa: ARG002 annotation_stats_service, ): result = controller.search()["stats"] @@ -603,7 +678,7 @@ def test_search_reuses_group_annotation_count_if_able( self, controller, test_group, - test_user, + test_user, # noqa: ARG002 annotation_stats_service, ): # In cases where the annotation count returned from search is the same calc @@ -646,7 +721,7 @@ def test_search_sets_display_strings_for_group( self, controller, test_group, - test_user, + test_user, # noqa: ARG002 test_heading, test_subtitle, test_share_msg, @@ -667,7 +742,11 @@ def test_search_sets_display_strings_for_group( indirect=["test_group", "test_user"], ) def test_search_sets_display_members_for_group( - self, controller, test_group, test_user, search + self, + controller, + test_group, + test_user, # noqa: ARG002 + search, ): info = controller.search()["group_users_args"] userids = [i["userid"] for i in info[1]] @@ -801,11 +880,11 @@ def test_toggle_user_facet_removes_empty_query( f"http://example.com/groups/{test_group.pubid}/{test_group.slug}" ) - @pytest.fixture(scope="function") + @pytest.fixture(scope="function") # noqa: PT003 def test_group(self, request, groups): return groups[request.param] - @pytest.fixture(scope="function") + @pytest.fixture(scope="function") # noqa: PT003 def test_user(self, request, users): # Since open groups don't have members we only eval this # if member was specifically requested. @@ -814,12 +893,12 @@ def test_user(self, request, users): return users[request.param] @pytest.fixture - def users(self, request, user, factories): + def users(self, request, user, factories): # noqa: ARG002 group = request.getfixturevalue("test_group") return {None: None, "creator": group.creator, "user": factories.User()} @pytest.fixture - def controller(self, request, group, pyramid_request, query): + def controller(self, request, group, pyramid_request, query): # noqa: ARG002 test_group = group if "test_group" in request.fixturenames: test_group = request.getfixturevalue("test_group") @@ -1026,7 +1105,7 @@ def controller( self, user, pyramid_request, - query, + query, # noqa: ARG002 ): return activity.UserSearchController(UserContext(user), pyramid_request) @@ -1039,7 +1118,7 @@ def pyramid_request(self, pyramid_request, user): @pytest.fixture def user(self, factories): return factories.User( - registered_date=datetime.datetime(year=2016, month=8, day=1), + registered_date=datetime.datetime(year=2016, month=8, day=1), # noqa: DTZ001 uri="http://www.example.com/me", orcid="0000-0000-0000-0000", ) diff --git a/tests/unit/h/views/admin/badge_test.py b/tests/unit/h/views/admin/badge_test.py index 42789b232e1..ad9a6f0b279 100644 --- a/tests/unit/h/views/admin/badge_test.py +++ b/tests/unit/h/views/admin/badge_test.py @@ -80,7 +80,7 @@ def test_remove_redirects_to_index_even_if_not_blocked(self, pyramid_request): def blocked_uris(db_session): uris = [] for uri in ["blocked1", "blocked2", "blocked3"]: - uris.append(models.Blocklist(uri=uri)) + uris.append(models.Blocklist(uri=uri)) # noqa: PERF401 db_session.add_all(uris) db_session.flush() diff --git a/tests/unit/h/views/admin/groups_test.py b/tests/unit/h/views/admin/groups_test.py index 9778c6b224f..8dd57c9d07d 100644 --- a/tests/unit/h/views/admin/groups_test.py +++ b/tests/unit/h/views/admin/groups_test.py @@ -86,7 +86,7 @@ def test_post_handles_form_submission( def test_post_redirects_to_list_view_on_success( self, pyramid_request, matchers, handle_form_submission, base_appstruct ): - def call_on_success(request, form, on_success, on_failure): + def call_on_success(request, form, on_success, on_failure): # noqa: ARG001 return on_success(base_appstruct) handle_form_submission.side_effect = call_on_success @@ -106,7 +106,7 @@ def test_post_creates_open_group_on_success( user_service, base_appstruct, ): - def call_on_success(request, form, on_success, on_failure): + def call_on_success(request, form, on_success, on_failure): # noqa: ARG001 base_appstruct["group_type"] = "open" return on_success(base_appstruct) @@ -133,7 +133,7 @@ def test_post_creates_restricted_group_on_success( user_service, base_appstruct, ): - def call_on_success(request, form, on_success, on_failure): + def call_on_success(request, form, on_success, on_failure): # noqa: ARG001 base_appstruct["group_type"] = "restricted" return on_success(base_appstruct) @@ -164,7 +164,7 @@ def test_post_adds_members_on_success( user = factories.User() user_service.fetch.return_value = user - def call_on_success(request, form, on_success, on_failure): + def call_on_success(request, form, on_success, on_failure): # noqa: ARG001 base_appstruct["members"] = ["someusername"] return on_success(base_appstruct) @@ -188,7 +188,7 @@ def test_post_with_no_organization( """Test creating a new group with no organization.""" base_appstruct["organization"] = None - def call_on_success(request, form, on_success, on_failure): + def call_on_success(request, form, on_success, on_failure): # noqa: ARG001 return on_success(base_appstruct) handle_form_submission.side_effect = call_on_success @@ -308,7 +308,7 @@ def test_update_proxies_to_update_service_on_success( list_organizations_service.organizations.return_value.append(updated_org) - def call_on_success(request, form, on_success, on_failure): + def call_on_success(request, form, on_success, on_failure): # noqa: ARG001 return on_success( { "creator": fetched_user.username, @@ -346,7 +346,7 @@ def test_update_when_group_has_no_organization( ): group.organization = None - def call_on_success(request, form, on_success, on_failure): + def call_on_success(request, form, on_success, on_failure): # noqa: ARG001 return on_success( { "creator": "creator", @@ -386,7 +386,7 @@ def test_update_updates_group_members_on_success( fetched_user = factories.User() user_service.fetch.return_value = fetched_user - def call_on_success(request, form, on_success, on_failure): + def call_on_success(request, form, on_success, on_failure): # noqa: ARG001 return on_success( { "authority": pyramid_request.default_authority, diff --git a/tests/unit/h/views/admin/nipsa_test.py b/tests/unit/h/views/admin/nipsa_test.py index 693a883d146..ef53b22cd66 100644 --- a/tests/unit/h/views/admin/nipsa_test.py +++ b/tests/unit/h/views/admin/nipsa_test.py @@ -16,7 +16,7 @@ def test_lists_flagged_usernames(self, pyramid_request): } def test_lists_flagged_usernames_no_results(self, nipsa_service, pyramid_request): - nipsa_service.flagged = set([]) + nipsa_service.flagged = set([]) # noqa: C405 result = nipsa_index(pyramid_request) diff --git a/tests/unit/h/views/admin/oauthclients_test.py b/tests/unit/h/views/admin/oauthclients_test.py index c01b10d1bd4..e201b802bf7 100644 --- a/tests/unit/h/views/admin/oauthclients_test.py +++ b/tests/unit/h/views/admin/oauthclients_test.py @@ -26,9 +26,9 @@ def validate(self, items): self.appstruct = {} for name, value in items: if name == "grant_type": - value = GrantType[value] + value = GrantType[value] # noqa: PLW2901 elif name == "response_type": - value = ResponseType[value] + value = ResponseType[value] # noqa: PLW2901 self.appstruct[name] = value return self.appstruct @@ -95,7 +95,7 @@ def test_post_generates_secret_for_jwt_clients(self, form_post, pyramid_request) ctrl.post() client = pyramid_request.db.query(AuthClient).one() - assert client.secret == "keep-me-secret" + assert client.secret == "keep-me-secret" # noqa: S105 def test_post_generates_secret_for_client_credentials_clients( self, form_post, pyramid_request @@ -108,7 +108,7 @@ def test_post_generates_secret_for_client_credentials_clients( ctrl.post() client = pyramid_request.db.query(AuthClient).one() - assert client.secret == "keep-me-secret" + assert client.secret == "keep-me-secret" # noqa: S105 def test_post_does_not_generate_secret_for_authcode_clients( self, form_post, pyramid_request @@ -196,7 +196,7 @@ def test_update_does_not_update_read_only_fields( old_id = auth_client.id old_secret = auth_client.secret form_post["client_id"] = "new-id" - form_post["client_secret"] = "new-secret" + form_post["client_secret"] = "new-secret" # noqa: S105 pyramid_request.POST = form_post controller = AuthClientEditController(sentinel.context, pyramid_request) @@ -249,7 +249,7 @@ def auth_client(self, factories): return factories.AuthClient( name="testclient", authority="annotator.org", - secret="not_a_secret", + secret="not_a_secret", # noqa: S106 trusted=False, grant_type=GrantType.authorization_code, response_type=ResponseType.code, diff --git a/tests/unit/h/views/admin/organizations_test.py b/tests/unit/h/views/admin/organizations_test.py index b806dba5e83..f1bcaf2b0aa 100644 --- a/tests/unit/h/views/admin/organizations_test.py +++ b/tests/unit/h/views/admin/organizations_test.py @@ -54,7 +54,7 @@ def orgs(self, factories): @pytest.mark.usefixtures("routes") class TestOrganizationCreateController: @staticmethod - def call_on_success(request, form, on_success, on_failure): + def call_on_success(request, form, on_success, on_failure): # noqa: ARG004 return on_success( { "name": "New organization", @@ -128,7 +128,7 @@ def test_read_does_not_show_delete_button_for_default_org( def test_update_saves_org( self, get_controller, organization, handle_form_submission ): - def call_on_success(request, form, on_success, on_failure): + def call_on_success(request, form, on_success, on_failure): # noqa: ARG001 return on_success( { "name": "Updated name", diff --git a/tests/unit/h/views/admin/search_test.py b/tests/unit/h/views/admin/search_test.py index 1d150192e21..1957ec37978 100644 --- a/tests/unit/h/views/admin/search_test.py +++ b/tests/unit/h/views/admin/search_test.py @@ -22,8 +22,8 @@ def test_reindex_date(self, views, tasks, pyramid_request): tasks.job_queue.add_annotations_between_times.delay.assert_called_once_with( "sync_annotation", - datetime.datetime(year=2020, month=9, day=9), - datetime.datetime(year=2020, month=9, day=11), + datetime.datetime(year=2020, month=9, day=9), # noqa: DTZ001 + datetime.datetime(year=2020, month=9, day=11), # noqa: DTZ001 tag="reindex_date", ) assert pyramid_request.session.peek_flash("success") == [ @@ -117,7 +117,7 @@ def test_queue_annotaions_by_id(self, views, tasks, pyramid_request): ] @pytest.fixture - def views(self, pyramid_request, queue_service): + def views(self, pyramid_request, queue_service): # noqa: ARG002 return SearchAdminViews(pyramid_request) @pytest.fixture(autouse=True) diff --git a/tests/unit/h/views/admin/users_test.py b/tests/unit/h/views/admin/users_test.py index 30119e4e9b3..93a70e651fb 100644 --- a/tests/unit/h/views/admin/users_test.py +++ b/tests/unit/h/views/admin/users_test.py @@ -20,7 +20,7 @@ @pytest.mark.parametrize( "input_date,expected", - ((datetime(2001, 11, 29, 21, 50, 59, 999999), "2001-11-29 21:50"), (None, "")), + ((datetime(2001, 11, 29, 21, 50, 59, 999999), "2001-11-29 21:50"), (None, "")), # noqa: DTZ001 ) def test_format_date(input_date, expected): assert format_date(input_date) == expected diff --git a/tests/unit/h/views/api/auth_test.py b/tests/unit/h/views/api/auth_test.py index 0a87aefc34e..a1fe18b687a 100644 --- a/tests/unit/h/views/api/auth_test.py +++ b/tests/unit/h/views/api/auth_test.py @@ -32,7 +32,7 @@ def test_get_raises_for_invalid_request(self, controller, view_name): InvalidRequestFatalError("boom!") ) - with pytest.raises(OAuthAuthorizeError) as exc: + with pytest.raises(OAuthAuthorizeError) as exc: # noqa: PT012 view = getattr(controller, view_name) view() @@ -42,7 +42,7 @@ def test_get_raises_for_invalid_request(self, controller, view_name): def test_get_redirects_to_login_when_not_authenticated( self, controller, pyramid_request, view_name ): - with pytest.raises(httpexceptions.HTTPFound) as exc: + with pytest.raises(httpexceptions.HTTPFound) as exc: # noqa: PT012 view = getattr(controller, view_name) view() @@ -147,7 +147,7 @@ def test_post_creates_authorization_response( self, controller, pyramid_request, authenticated_user, view_name ): pyramid_request.url = ( - "http://example.com/auth?client_id=the-client-id" + "http://example.com/auth?client_id=the-client-id" # noqa: ISC003 + "&response_type=code" + "&state=foobar" + "&scope=exploit" @@ -169,7 +169,7 @@ def test_post_raises_for_invalid_request(self, controller, view_name): InvalidRequestFatalError("boom!") ) - with pytest.raises(OAuthAuthorizeError) as exc: + with pytest.raises(OAuthAuthorizeError) as exc: # noqa: PT012 view = getattr(controller, view_name) view() @@ -355,7 +355,7 @@ def test_get_raises_for_invalid_request(self, controller): @pytest.fixture def controller(self, pyramid_request): pyramid_request.method = "POST" - pyramid_request.POST["token"] = "the-token" + pyramid_request.POST["token"] = "the-token" # noqa: S105 pyramid_request.headers = {"X-Test-ID": "1234"} return views.OAuthRevocationController(pyramid_request) @@ -411,8 +411,8 @@ def test_it_with_invalid_token_string(self, pyramid_request, auth_token_service) def oauth_token(self, factories, auth_token_service): oauth_token = factories.DeveloperToken( authclient=factories.AuthClient(), - expires=datetime.datetime(2001, 11, 30, 17, 45, 50), - created=datetime.datetime(2000, 10, 16, 15, 51, 59), + expires=datetime.datetime(2001, 11, 30, 17, 45, 50), # noqa: DTZ001 + created=datetime.datetime(2000, 10, 16, 15, 51, 59), # noqa: DTZ001 ) auth_token_service.fetch.return_value = oauth_token diff --git a/tests/unit/h/views/api/bulk/_ndjson_test.py b/tests/unit/h/views/api/bulk/_ndjson_test.py index d820515c58c..7ae0a296f21 100644 --- a/tests/unit/h/views/api/bulk/_ndjson_test.py +++ b/tests/unit/h/views/api/bulk/_ndjson_test.py @@ -31,11 +31,11 @@ def test_it_returns_204_if_no_content_is_to_be_returned(self): assert result.status == "204 No Content" def test_it_captures_initial_errors(self): - def failing_method(fail=True): + def failing_method(fail=True): # noqa: FBT002 if fail: - raise ValueError("Oh no!") + raise ValueError("Oh no!") # noqa: EM101, TRY003 yield 1 # pragma: nocover - with pytest.raises(ValueError): + with pytest.raises(ValueError): # noqa: PT011 get_ndjson_response(failing_method()) diff --git a/tests/unit/h/views/api/bulk/stats_test.py b/tests/unit/h/views/api/bulk/stats_test.py index ce3d0104282..ad955c160e2 100644 --- a/tests/unit/h/views/api/bulk/stats_test.py +++ b/tests/unit/h/views/api/bulk/stats_test.py @@ -29,7 +29,7 @@ def test_get_annotation_counts( annotations=i, replies=i, page_notes=i, - last_activity=datetime.now(), + last_activity=datetime.now(), # noqa: DTZ005 ) for i in range(3) ] diff --git a/tests/unit/h/views/api/helpers/angular_test.py b/tests/unit/h/views/api/helpers/angular_test.py index 6c394a1a370..a4e159ab004 100644 --- a/tests/unit/h/views/api/helpers/angular_test.py +++ b/tests/unit/h/views/api/helpers/angular_test.py @@ -3,7 +3,7 @@ class TestAngularRouteTemplater: def test_static_route(self): - def route_url(route_name, **kwargs): + def route_url(route_name, **kwargs): # noqa: ARG001 return "/" + route_name templater = AngularRouteTemplater(route_url, params=[]) diff --git a/tests/unit/h/views/api/helpers/cors_test.py b/tests/unit/h/views/api/helpers/cors_test.py index c707aeed173..118a39acb43 100644 --- a/tests/unit/h/views/api/helpers/cors_test.py +++ b/tests/unit/h/views/api/helpers/cors_test.py @@ -227,7 +227,7 @@ def test_it_adds_one_preflight_view_per_route(self, pyramid_config): # A tiny WSGI application used for testing the middleware -def wsgi_testapp(environ, start_response): +def wsgi_testapp(environ, start_response): # noqa: ARG001 start_response("200 OK", [("Content-Type", "text/plain")]) return ["OK"] diff --git a/tests/unit/h/views/api/helpers/links_test.py b/tests/unit/h/views/api/helpers/links_test.py index f09c24d3d5b..2ae640503f4 100644 --- a/tests/unit/h/views/api/helpers/links_test.py +++ b/tests/unit/h/views/api/helpers/links_test.py @@ -52,43 +52,7 @@ def test_it_does_not_register_link_for_unsupported_versions( assert second_service not in pyramid_config.registry.api_links["v2"] -# TODO: Fix these tests -# class TestFormatNestedLinks: -# def test_it_formats_link_objects_as_dicts(self, templater): -# link = _service_link(name="flat") -# -# formatted = links.format_nested_links([link], "v1", templater) -# -# assert "flat" in formatted -# assert formatted["flat"] == { -# "method": link.primary_method(), -# "url": templater.route_template(link.route_name), -# "desc": link.description, -# } -# -# def test_it_nests_links_based_on_service_name_split_on_periods(self, templater): -# api_links = [ -# _service_link(name="1"), -# _service_link(name="1.2"), -# _service_link(name="1.2.3"), -# _service_link(name="1.2.A"), -# _service_link(name="1.B"), -# ] -# -# formatted = links.format_nested_links([api_links], "v1", templater) -# -# assert "1" in formatted -# assert "2" in formatted["1"] -# assert "B" in formatted["1"] -# assert "3" in formatted["1"]["2"] -# assert "A" in formatted["1"]["2"] - -# @pytest.fixture -# def templater(): -# return mock.create_autospec(AngularRouteTemplater, spec_set=True, instance=True) - - -def _service_link(name="api.example_service"): +def _service_link(name="api.example_service"): # noqa: ARG001 return links.ServiceLink( name="name", route_name="api.example_service", diff --git a/tests/unit/h/views/badge_test.py b/tests/unit/h/views/badge_test.py index 7101b9129d0..dddf9297063 100644 --- a/tests/unit/h/views/badge_test.py +++ b/tests/unit/h/views/badge_test.py @@ -48,21 +48,15 @@ def test_its_fast(self): # Check any modifications haven't made this significantly slower reps = 10000 - start = datetime.utcnow() + start = datetime.utcnow() # noqa: DTZ003 for _ in range(reps): Blocklist.is_blocked("http://example.com/this/is/fine") - diff = datetime.utcnow() - start + diff = datetime.utcnow() - start # noqa: DTZ003 seconds = diff.seconds + diff.microseconds / 1000000 calls_per_second = int(reps // seconds) - # Handy to know while tinkering - # print( - # f"Calls per second: {calls_per_second}, " - # f"{1000000 / calls_per_second:.03f} μs/call" - # ) - # It should be above this number by quite a margin (20x), but we # don't want flaky tests assert calls_per_second > 50000 @@ -105,7 +99,7 @@ def test_it_raises_if_no_uri(self): @pytest.fixture def badge_request(self, pyramid_request, factories, Blocklist): - def caller(uri, annotated=True, blocked=False): + def caller(uri, annotated=True, blocked=False): # noqa: FBT002 if annotated: factories.DocumentURI(uri=uri) pyramid_request.db.flush() diff --git a/tests/unit/h/views/main_test.py b/tests/unit/h/views/main_test.py index 4d9c9ebd43d..f85a93deab6 100644 --- a/tests/unit/h/views/main_test.py +++ b/tests/unit/h/views/main_test.py @@ -7,7 +7,7 @@ from h.views import main -def _fake_sidebar_app(request, extra): +def _fake_sidebar_app(request, extra): # noqa: ARG001 return extra From cfa6e2a5a9da7885e32f3db0ea58215ed98ac6d6 Mon Sep 17 00:00:00 2001 From: Sean Hammond Date: Thu, 6 Feb 2025 11:37:55 +0000 Subject: [PATCH 2/2] Remove executable bit from run_data_task.py This isn't needed (it's always run with commands like `python run_data_task.py`) and Ruff complains if executable files don't have a shebang. --- bin/run_data_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) mode change 100755 => 100644 bin/run_data_task.py diff --git a/bin/run_data_task.py b/bin/run_data_task.py old mode 100755 new mode 100644 index 8981d3289cf..928584b563c --- a/bin/run_data_task.py +++ b/bin/run_data_task.py @@ -4,7 +4,7 @@ This is a general mechanism for running tasks defined in SQL, however it's currently only used to perform the aggregations and mappings required for reporting. -""" # noqa: EXE002 +""" from argparse import ArgumentParser