Skip to content

Commit

Permalink
Merge branch 'master' into TP2000-1434--packaging-queue-race-condition
Browse files Browse the repository at this point in the history
  • Loading branch information
dalecannon committed Jul 24, 2024
2 parents 64e3221 + 8d07ef2 commit 5635e3c
Show file tree
Hide file tree
Showing 28 changed files with 412 additions and 173 deletions.
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,17 +41,17 @@ repos:
- id: pii_secret_filename
files: ''
language: python
language_version: python3.9
language_version: python3.12
pass_filenames: true
require_serial: true
- id: pii_secret_file_content
files: ''
language: python
language_version: python3.9
language_version: python3.12
pass_filenames: true
require_serial: true
- id: hooks_version_check
name: Checking local hooks against latest release
verbose: true
require_serial: true


2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@

############################################################

FROM python:3.9-bookworm
FROM python:3.12-bookworm

LABEL maintainer="[email protected]"

Expand Down
2 changes: 1 addition & 1 deletion Dockerfile.pytest
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
###############
# Build on Python-based image using default target platform (TARGETPLATFORM).

FROM python:3.9-bookworm
FROM python:3.12-bookworm

ARG TARGETPLATFORM
ARG TARGETARCH
Expand Down
2 changes: 1 addition & 1 deletion README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ Prerequisites

The following dependencies are required to run this app:

- Python_ 3.9.x
- Python_ 3.12.x
- Node.js_ 20.10.0 (LTS)
- PostgreSQL_ 12
- Redis_ 5.x
Expand Down
3 changes: 2 additions & 1 deletion common/inspect_tap_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,10 @@ def clean_tasks(self, tasks, task_status="", task_name="") -> List[Dict]:

return tasks_cleaned

def current_rule_checks(self, task_name="") -> List[CeleryTask]:
def current_tasks(self, task_name="") -> List[CeleryTask]:
"""Return the list of tasks queued or started, ready to display in the
view."""

inspect = app.control.inspect()
if not inspect:
return []
Expand Down
4 changes: 3 additions & 1 deletion common/tests/test_files/bomb.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3,5 +3,7 @@
<!ENTITY b "&a;&a;&a;&a;&a;&a;&a;&a;">
<!ENTITY c "&b;&b;&b;&b;&b;&b;&b;&b;">
<!ENTITY d "&c;&c;&c;&c;&c;&c;&c;&c;">
<!ENTITY e "&d;&d;&d;&d;&d;&d;&d;&d;">
<!ENTITY f "&e;&e;&e;&e;&e;&e;&e;&e;">
]>
<bomb>&c;</bomb>
<bomb>&f;</bomb>
62 changes: 48 additions & 14 deletions common/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@

from __future__ import annotations

import os
import re
import typing
from datetime import date
from datetime import datetime
from datetime import timedelta
Expand Down Expand Up @@ -44,27 +46,16 @@
from django.db.models.functions.text import Upper
from django.db.transaction import atomic
from django.template import loader
from django.utils import timezone
from lxml import etree
from psycopg.types.range import DateRange
from psycopg.types.range import TimestampRange

major, minor, patch = python_version_tuple()

# The preferred style of combining @classmethod and @property is only in 3.9.
# When we stop support for 3.8, we should remove both of these branches.
if int(major) == 3 and int(minor) < 9:
# https://stackoverflow.com/a/13624858
class classproperty(object):
def __init__(self, fget):
self.fget = fget

def __get__(self, owner_self, owner_cls):
return self.fget(owner_cls)

else:

def classproperty(fn):
return classmethod(property(fn))
def classproperty(fn):
return classmethod(property(fn))


def is_truthy(value: Union[str, bool]) -> bool:
Expand Down Expand Up @@ -616,3 +607,46 @@ def format_date_string(date_string: str, short_format=False) -> str:
return date_parser.parse(date_string).strftime(settings.DATE_FORMAT)
except:
return ""


def log_timing(logger_function: typing.Callable):
"""
Decorator function to log start and end times of a decorated function.
When decorating a function, `logger_function` must be passed in to the
decorator to ensure the correct logger instance and function are applied.
`logger_function` may be any one of the logging output functions, but is
likely to be either `debug` or `info`.
Example:
```
import logging
logger = logging.getLogger(__name__)
@log_timing(logger_function=logger.info)
def my_function():
...
```
"""

@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
start_time = timezone.localtime()
logger_function(
f"Entering the function {wrapped.__name__}() on process "
f"pid={os.getpid()} at {start_time.isoformat()}",
)

result = wrapped(*args, **kwargs)

end_time = timezone.localtime()
elapsed_time = end_time - start_time
logger_function(
f"Exited the function {wrapped.__name__}() on "
f"process pid={os.getpid()} at {end_time.isoformat()} after "
f"an elapsed time of {elapsed_time}.",
)

return result

return wrapper
6 changes: 2 additions & 4 deletions common/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
from commodities.models import GoodsNomenclature
from common.business_rules import BusinessRule
from common.business_rules import BusinessRuleViolation
from common.celery import app
from common.celery import app as celery_app
from common.forms import HomeSearchForm
from common.models import TrackedModel
from common.models import Transaction
Expand All @@ -65,8 +65,6 @@
from workbaskets.models import WorkflowStatus
from workbaskets.views.mixins import WithCurrentWorkBasket

from .celery import app as celery_app


class HomeView(LoginRequiredMixin, FormView):
template_name = "common/homepage.jinja"
Expand Down Expand Up @@ -350,7 +348,7 @@ class AppInfoView(
DATETIME_FORMAT = "%d %b %Y, %H:%M"

def active_tasks(self) -> Dict:
inspect = app.control.inspect()
inspect = celery_app.control.inspect()
if not inspect:
return {}

Expand Down
8 changes: 4 additions & 4 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -1099,7 +1099,7 @@ def make_storage_mock(s3, storage_class, **override_settings):
"LocationConstraint": settings.AWS_S3_REGION_NAME,
},
)
except s3.exceptions.BucketAlreadyExists:
except (s3.exceptions.BucketAlreadyExists, s3.exceptions.BucketAlreadyOwnedByYou):
return storage

return storage
Expand All @@ -1119,13 +1119,13 @@ def hmrc_storage(s3):

@pytest.fixture
def sqlite_storage(s3, s3_bucket_names):
"""Patch SQLiteStorage with moto so that nothing is really uploaded to
"""Patch SQLiteS3VFSStorage with moto so that nothing is really uploaded to
s3."""
from exporter.storages import SQLiteStorage
from exporter.storages import SQLiteS3VFSStorage

storage = make_storage_mock(
s3,
SQLiteStorage,
SQLiteS3VFSStorage,
bucket_name=settings.SQLITE_STORAGE_BUCKET_NAME,
)
assert storage.endpoint_url is settings.SQLITE_S3_ENDPOINT_URL
Expand Down
29 changes: 24 additions & 5 deletions exporter/management/commands/dump_sqlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,20 +11,39 @@


class Command(BaseCommand):
help = (
"Create a snapshot of the application database to a file in SQLite "
"format. Snapshot file names take the form <transaction-order>.db, "
"where <transaction-order> is the value of the last published "
"transaction's order attribute. Care should be taken to ensure that "
"there is sufficient local file system storage to accomodate the "
"SQLite file - if you choose to target remote S3 storage, then a "
"temporary local copy of the file will be created and cleaned up."
)

def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument(
"--immediately",
"--asynchronous",
action="store_const",
help="Run the task in this process now rather than queueing it up",
help="Queue the snapshot task to run in an asynchronous process.",
const=True,
default=False,
)
parser.add_argument(
"--save-local",
help=(
"Save the SQLite snapshot to the local file system under the "
"(existing) directory given by DIRECTORY_PATH."
),
dest="DIRECTORY_PATH",
)
return super().add_arguments(parser)

def handle(self, *args: Any, **options: Any) -> Optional[str]:
logger.info(f"Triggering tariff database export to SQLite")

if options["immediately"]:
export_and_upload_sqlite()
local_path = options["DIRECTORY_PATH"]
if options["asynchronous"]:
export_and_upload_sqlite.delay(local_path)
else:
export_and_upload_sqlite.delay()
export_and_upload_sqlite(local_path)
33 changes: 21 additions & 12 deletions exporter/sqlite/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,32 +41,41 @@
}


def make_export_plan(sqlite: runner.Runner) -> plan.Plan:
names = (
def make_export_plan(sqlite_runner: runner.Runner) -> plan.Plan:
app_names = (
name.split(".")[0]
for name in settings.DOMAIN_APPS
if name not in settings.SQLITE_EXCLUDED_APPS
)
all_models = chain(*[apps.get_app_config(name).get_models() for name in names])
all_models = chain(*[apps.get_app_config(name).get_models() for name in app_names])
models_by_table = {model._meta.db_table: model for model in all_models}

import_script = plan.Plan()
for table, sql in sqlite.tables:
for table, create_table_statement in sqlite_runner.tables:
model = models_by_table.get(table)
if model is None or model.__name__ in SKIPPED_MODELS:
continue

columns = list(sqlite.read_column_order(model._meta.db_table))
import_script.add_schema(sql)
columns = list(sqlite_runner.read_column_order(model._meta.db_table))
import_script.add_schema(create_table_statement)
import_script.add_data(model, columns)

return import_script


def make_export(connection: apsw.Connection):
with NamedTemporaryFile() as db_name:
sqlite = runner.Runner.make_tamato_database(Path(db_name.name))
plan = make_export_plan(sqlite)

export = runner.Runner(connection)
export.run_operations(plan.operations)
with NamedTemporaryFile() as temp_sqlite_db:
# Create Runner instance with its SQLite file name pointing at a path on
# the local file system. This is only required temporarily in order to
# create an in-memory plan that can be run against a target database
# object.
plan_runner = runner.Runner.make_tamato_database(
Path(temp_sqlite_db.name),
)
plan = make_export_plan(plan_runner)
# make_tamato_database() creates a Connection instance that needs
# closing once an in-memory plan has been created from it.
plan_runner.database.close()

export_runner = runner.Runner(connection)
export_runner.run_operations(plan.operations)
2 changes: 2 additions & 0 deletions exporter/sqlite/plan.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,9 +100,11 @@ def operations(self) -> Iterable[Operation]:
]

def add_schema(self, sql: str):
"""Add sql schema (table) creation statements to this Plan instance."""
self._operations.append((sql, [[]]))

def add_data(self, model: Type[Model], columns: Iterable[str]):
"""Add data insert statements to this Plan instance."""
queryset = model.objects
output_columns = []
for column in columns:
Expand Down
Loading

0 comments on commit 5635e3c

Please sign in to comment.