Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ruff modernization #218

Merged
merged 1 commit into from
Oct 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,10 +33,10 @@ jobs:
# We could also use the official GitHub Actions integration.
# https://beta.ruff.rs/docs/usage/#github-action
# uses: chartboost/ruff-action@v1
run: ruff check --output-format github ./webapp ./tests
run: ruff check --output-format github ./webapp ./tests ./migrations

- name: format using ruff formatter
# We could also use the official GitHub Actions integration.
# https://beta.ruff.rs/docs/usage/#github-action
# uses: chartboost/ruff-action@v1
run: ruff check ./webapp ./tests
run: ruff check ./webapp ./tests ./migrations
15 changes: 15 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml

- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.7.1
hooks:
- id: ruff-format
- id: ruff
args: [ --fix, --exit-non-zero-on-fix ]
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -179,9 +179,9 @@ open-coverage:
.PHONY: lint-fix
lint-fix:
$(FLASK_RUN) ruff format ./webapp
$(FLASK_RUN) ruff check --fix ./webapp
$(FLASK_RUN) ruff check --fix ./webapp ./tests ./migrations

.PHONY: lint-check
lint-check:
$(FLASK_RUN) ruff format --check --diff webapp
$(FLASK_RUN) ruff check ./webapp
$(FLASK_RUN) ruff check ./webapp ./tests ./migrations
21 changes: 18 additions & 3 deletions migrations/versions/2023-10-25-12-33-52_687daced5384_init.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,11 @@ def upgrade():
sa.Column('attribution_license', sa.Text(), nullable=True),
sa.Column('attribution_contributor', sa.String(length=256), nullable=True),
sa.Column('attribution_url', sa.String(length=256), nullable=True),
sa.Column('status', sa.Enum('DISABLED', 'ACTIVE', 'FAILED', 'PROVISIONED', name='sourcestatus'), nullable=False),
sa.Column(
'status',
sa.Enum('DISABLED', 'ACTIVE', 'FAILED', 'PROVISIONED', name='sourcestatus'),
nullable=False,
),
sa.Column('static_parking_site_error_count', sa.Integer(), nullable=False),
sa.Column('realtime_parking_site_error_count', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_source')),
Expand All @@ -59,7 +63,14 @@ def upgrade():
sa.Column('description', sa.String(length=4096), nullable=True),
sa.Column(
'type',
sa.Enum('ON_STREET', 'OFF_STREET_PARKING_GROUND', 'UNDERGROUND', 'CAR_PARK', 'OTHER', name='parkingsitetype'),
sa.Enum(
'ON_STREET',
'OFF_STREET_PARKING_GROUND',
'UNDERGROUND',
'CAR_PARK',
'OTHER',
name='parkingsitetype',
),
nullable=True,
),
sa.Column('max_stay', sa.Integer(), nullable=True),
Expand All @@ -71,7 +82,11 @@ def upgrade():
sa.Column('has_realtime_data', sa.Boolean(), nullable=False),
sa.Column('static_data_updated_at', sqlalchemy_utc.sqltypes.UtcDateTime(timezone=True), nullable=True),
sa.Column('realtime_data_updated_at', sqlalchemy_utc.sqltypes.UtcDateTime(timezone=True), nullable=True),
sa.Column('realtime_opening_status', sa.Enum('OPEN', 'CLOSED', 'UNKNOWN', name='openingstatus'), nullable=False),
sa.Column(
'realtime_opening_status',
sa.Enum('OPEN', 'CLOSED', 'UNKNOWN', name='openingstatus'),
nullable=False,
),
sa.Column('lat', sa.Numeric(precision=10, scale=7), nullable=False),
sa.Column('lon', sa.Numeric(precision=10, scale=7), nullable=False),
sa.Column('capacity', sa.Integer(), nullable=True),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,24 @@
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('parking_site', schema=None) as batch_op:
batch_op.alter_column('fee_description', existing_type=sa.VARCHAR(length=256), type_=sa.String(length=4096), existing_nullable=True)
batch_op.alter_column(
'fee_description',
existing_type=sa.VARCHAR(length=256),
type_=sa.String(length=4096),
existing_nullable=True,
)

# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('parking_site', schema=None) as batch_op:
batch_op.alter_column('fee_description', existing_type=sa.String(length=4096), type_=sa.VARCHAR(length=256), existing_nullable=True)
batch_op.alter_column(
'fee_description',
existing_type=sa.String(length=4096),
type_=sa.VARCHAR(length=256),
existing_nullable=True,
)

# ### end Alembic commands ###
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,11 @@ def upgrade():
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('created_at', sqlalchemy_utc.sqltypes.UtcDateTime(timezone=True), nullable=False),
sa.Column('modified_at', sqlalchemy_utc.sqltypes.UtcDateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['parking_site_id'], ['parking_site.id'], name=op.f('fk_external_identifier_parking_site_id')),
sa.ForeignKeyConstraint(
['parking_site_id'],
['parking_site.id'],
name=op.f('fk_external_identifier_parking_site_id'),
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_external_identifier')),
mysql_charset='utf8mb4',
mysql_collate='utf8mb4_unicode_ci',
Expand All @@ -65,9 +69,22 @@ def upgrade():
sa.Enum(*old_parking_site_types, name='_parkingsitetype').drop(op.get_bind())

with op.batch_alter_table('parking_site', schema=None) as batch_op:
batch_op.add_column(sa.Column('purpose', sa.Enum('CAR', 'BIKE', 'ITEM', name='purposetype'), server_default='CAR', nullable=False))
batch_op.add_column(
sa.Column(
'purpose',
sa.Enum('CAR', 'BIKE', 'ITEM', name='purposetype'),
server_default='CAR',
nullable=False,
),
)
batch_op.add_column(sa.Column('is_covered', sa.Boolean(), nullable=True))
batch_op.add_column(sa.Column('supervision_type', sa.Enum('YES', 'NO', 'VIDEO', 'ATTENDED', name='supervisiontype'), nullable=True))
batch_op.add_column(
sa.Column(
'supervision_type',
sa.Enum('YES', 'NO', 'VIDEO', 'ATTENDED', name='supervisiontype'),
nullable=True,
),
)
batch_op.add_column(sa.Column('related_location', sa.String(length=256), nullable=True))
batch_op.drop_column('is_supervised')
batch_op.alter_column(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,10 @@ def upgrade():
batch_op.add_column(sa.Column('duplicate_of_parking_site_id', sa.BigInteger(), nullable=True))
batch_op.add_column(sa.Column('photo_url', sa.String(length=4096), nullable=True))
batch_op.create_foreign_key(
batch_op.f('fk_parking_site_duplicate_of_parking_site_id'), 'parking_site', ['duplicate_of_parking_site_id'], ['id']
batch_op.f('fk_parking_site_duplicate_of_parking_site_id'),
'parking_site',
['duplicate_of_parking_site_id'],
['id'],
)

# ### end Alembic commands ###
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,11 @@ def upgrade():
sa.Column('parking_site_id', sa.BigInteger(), nullable=False),
sa.Column('static_data_updated_at', sqlalchemy_utc.sqltypes.UtcDateTime(timezone=True), nullable=True),
sa.Column('realtime_data_updated_at', sqlalchemy_utc.sqltypes.UtcDateTime(timezone=True), nullable=True),
sa.Column('realtime_opening_status', sa.Enum('OPEN', 'CLOSED', 'UNKNOWN', name='history_openingstatus'), nullable=False),
sa.Column(
'realtime_opening_status',
sa.Enum('OPEN', 'CLOSED', 'UNKNOWN', name='history_openingstatus'),
nullable=False,
),
sa.Column('capacity', sa.Integer(), nullable=True),
sa.Column('capacity_disabled', sa.Integer(), nullable=True),
sa.Column('capacity_woman', sa.Integer(), nullable=True),
Expand Down Expand Up @@ -52,7 +56,11 @@ def upgrade():
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('created_at', sqlalchemy_utc.sqltypes.UtcDateTime(timezone=True), nullable=False),
sa.Column('modified_at', sqlalchemy_utc.sqltypes.UtcDateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(['parking_site_id'], ['parking_site.id'], name=op.f('fk_parking_site_history_parking_site_id')),
sa.ForeignKeyConstraint(
['parking_site_id'],
['parking_site.id'],
name=op.f('fk_parking_site_history_parking_site_id'),
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_parking_site_history')),
mysql_charset='utf8mb4',
mysql_collate='utf8mb4_unicode_ci',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
Create Date: 2024-08-23 10:50:44.402057

"""
from alembic import op

import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op

# revision identifiers, used by Alembic.
revision = '3e92c13d297e'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,9 @@
Create Date: 2024-09-22 16:53:00.630618

"""
from alembic import op

import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from alembic import op

# revision identifiers, used by Alembic.
revision = '95992608c5d1'
Expand Down Expand Up @@ -50,6 +50,7 @@
'OTHER',
]


def upgrade():
# Set all generic bike to other
op.execute("UPDATE parking_site SET type = 'OTHER' WHERE type = 'GENERIC_BIKE'")
Expand Down Expand Up @@ -87,4 +88,3 @@ def downgrade():
type_=sa.Enum(*old_parking_site_types, name='parkingsitetype'),
existing_nullable=True,
)

38 changes: 28 additions & 10 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,20 +1,26 @@
[tool.ruff]
lint.select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
"F", # pyflakes
"Q", # pydocstyle
"T", # mypy
"R", # pylint
"S", # flake8
"I", # isort
"C", # flake8-comprehensions
"B", # flake8-bugbear
"S", # flake8-bandit
"ASYNC", # flake8-async
"ISC", # flake8-implicit-str-concat
"LOG", # flake8-logging
"INP", # flake8-no-pep420 (missing __init__.py)
"PIE", # flake8-pie
"T20", # flake8-print
"Q", # flake8-quotes
"TID", # flake8-tidy-imports
"FLY", # flynt (use f-string instead of static join)
]

# Enable preview rules since a lot of basic pycodestyle rules are in preview mode for some reason
preview = true

lint.ignore = [
"B008", # do not perform function calls in argument defaults
"C901", # too complex
"ISC001", # single-line-implicit-string-concatenation - conflicts with formatter
]


Expand All @@ -41,11 +47,23 @@ exclude = [
"venv",
]

line-length = 140
line-length = 120

[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401"]
"tests/*" = ["S101", "S105", "S106"]
"tests/*" = [
# Allow assert
"S101",
# Ignore unsafe practices like hardcoded passwords
"S101", "S105", "S106",
# Don't require __init__.py files
"INP",
]
"migrations/*" = [
# Don't require __init__.py files
"INP",
]


[tool.ruff.lint.flake8-quotes]
inline-quotes = "single"
Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
pytest~=8.3.3
ruff~=0.6.5
ruff~=0.7.1
10 changes: 5 additions & 5 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,25 @@ Flask~=3.0.3
Flask-Failsafe~=0.2
Flask-Migrate~=4.0.7
Flask-SQLAlchemy~=3.1.1
SQLAlchemy~=2.0.35
SQLAlchemy~=2.0.36
SQLAlchemy-Utc~=0.14.0
pytz~=2024.2
psycopg2-binary~=2.9.9
psycopg2-binary~=2.9.10
pymysql~=1.1.1
requests~=2.32.3
alembic~=1.13.3
gunicorn~=23.0.0
pyyaml~=6.0.2
celery~=5.4.0
werkzeug~=3.0.4
werkzeug~=3.0.6
validataclass~=0.11.0
python-dotenv~=1.0.1
click~=8.1.7
openpyxl~=3.1.5
opening-hours-py~=0.6.18
kombu~=5.4.1
kombu~=5.4.2
lxml~=5.3.0
parkapi-sources~=0.14.0
parkapi-sources~=0.14.1

# required for converters
beautifulsoup4~=4.12.3
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@ def handle_json_data(
data: dict | list,
) -> tuple[list[StaticParkingSiteInput | RealtimeParkingSiteInput], list[ImportParkingSiteException]]:
source = self.parking_site_generic_import_service.get_upserted_source(source_uid)
import_service: JsonConverter = self.parking_site_generic_import_service.park_api_sources.converter_by_uid[source_uid] # type: ignore
import_service: JsonConverter = self.parking_site_generic_import_service.park_api_sources.converter_by_uid[
source_uid
] # type: ignore

parking_site_inputs, parking_site_errors = import_service.handle_json(data)

Expand All @@ -59,7 +61,9 @@ def handle_xml_data(
self, source_uid: str, data: bytes
) -> tuple[list[StaticParkingSiteInput | RealtimeParkingSiteInput], list[ImportParkingSiteException]]:
source = self.parking_site_generic_import_service.get_upserted_source(source_uid)
import_service: XmlConverter = self.parking_site_generic_import_service.park_api_sources.converter_by_uid[source_uid] # type: ignore
import_service: XmlConverter = self.parking_site_generic_import_service.park_api_sources.converter_by_uid[
source_uid
] # type: ignore

try:
root_element = etree.fromstring(data, parser=etree.XMLParser(resolve_entities=False)) # noqa: S320
Expand All @@ -76,7 +80,9 @@ def handle_csv_data(
self, source_uid: str, data: str
) -> tuple[list[StaticParkingSiteInput | RealtimeParkingSiteInput], list[ImportParkingSiteException]]:
source = self.parking_site_generic_import_service.get_upserted_source(source_uid)
import_service: CsvConverter = self.parking_site_generic_import_service.park_api_sources.converter_by_uid[source_uid] # type: ignore
import_service: CsvConverter = self.parking_site_generic_import_service.park_api_sources.converter_by_uid[
source_uid
] # type: ignore

try:
parking_site_inputs, parking_site_errors = import_service.handle_csv_string(StringIO(data))
Expand All @@ -93,7 +99,9 @@ def handle_xlsx_data(
data: bytes,
) -> tuple[list[StaticParkingSiteInput | RealtimeParkingSiteInput], list[ImportParkingSiteException]]:
source = self.parking_site_generic_import_service.get_upserted_source(source_uid)
import_service: XlsxConverter = self.parking_site_generic_import_service.park_api_sources.converter_by_uid[source_uid] # type: ignore
import_service: XlsxConverter = self.parking_site_generic_import_service.park_api_sources.converter_by_uid[
source_uid
] # type: ignore

try:
workbook = load_workbook(filename=BytesIO(data))
Expand Down Expand Up @@ -121,9 +129,15 @@ def _handle_import_results(
static_parking_site_inputs = [item for item in parking_site_inputs if isinstance(item, StaticParkingSiteInput)]

if len(static_parking_site_inputs):
self.parking_site_generic_import_service.handle_static_import_results(source, static_parking_site_inputs, parking_site_errors)
self.parking_site_generic_import_service.handle_static_import_results(
source,
static_parking_site_inputs,
parking_site_errors,
)

realtime_parking_site_inputs = [item for item in parking_site_inputs if isinstance(item, RealtimeParkingSiteInput)]
realtime_parking_site_inputs = [
item for item in parking_site_inputs if isinstance(item, RealtimeParkingSiteInput)
]

if len(realtime_parking_site_inputs):
self.parking_site_generic_import_service.handle_realtime_import_results(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,9 @@ def _generate_response(
parking_site_errors: list[ImportParkingSiteException],
) -> dict:
static_parking_site_inputs = [item for item in parking_site_inputs if isinstance(item, StaticParkingSiteInput)]
realtime_parking_site_inputs = [item for item in parking_site_inputs if isinstance(item, RealtimeParkingSiteInput)]
realtime_parking_site_inputs = [
item for item in parking_site_inputs if isinstance(item, RealtimeParkingSiteInput)
]

return {
'summary': {
Expand Down
Loading