Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
repos:
# Using this mirror lets us use mypyc-compiled black, which is about 2x faster
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 25.1.0
rev: 26.1.0
hooks:
- id: black
# It is recommended to specify the latest version of Python
Expand Down
6 changes: 3 additions & 3 deletions python/nwsc_proxy/ncp_web_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def handler(self):
if request.method == "DELETE":
return self._handle_delete()

if request.method == "PATCH":
if request.method == "PUT":
return self._handle_update()

# otherwise, must be 'GET' operation
Expand Down Expand Up @@ -133,7 +133,7 @@ def _handle_create(self) -> Response:

def _handle_update(self) -> Response:
if not request.data:
return jsonify({"message": "PATCH requires request body"}), 400
return jsonify({"message": "PUT requires request body"}), 400

request_body: dict = request.json
profile_id = request.args.get("id", request.args.get("uuid"))
Expand Down Expand Up @@ -168,7 +168,7 @@ def __init__(self, base_dir: str):
"/all-events",
"events",
view_func=events_route.handler,
methods=["GET", "POST", "PATCH", "DELETE"],
methods=["GET", "POST", "PUT", "DELETE"],
)

def run(self, **kwargs):
Expand Down
130 changes: 68 additions & 62 deletions python/nwsc_proxy/src/profile_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,13 @@
import os
import json
import logging
from dataclasses import dataclass

from datetime import datetime, UTC
from glob import glob
from math import inf

from dateutil.parser import parse as dt_parse

from src.utils import deep_update

# constants controlling the subdirectory where new vs. existing Profiles are saved
NEW_SUBDIR = "new"
EXISTING_SUBDIR = "existing"
Expand All @@ -29,7 +27,6 @@
logger = logging.getLogger(__name__)


@dataclass
class CachedProfile:
"""Data class to hold Support Profile's data and metadata ("new" vs "existing" status),
as well as some derived properties extracted from the `data` JSON (e.g.
Expand All @@ -41,13 +38,14 @@ class CachedProfile:
is_new (bool): track if Support Profile has ever been processed. Ought to start as True
"""

# pylint: disable=invalid-name
data: dict
is_new: bool
def __init__(self, data: dict, is_new: bool):
self.data = data
self.is_new = is_new

@property
def id(self) -> str:
"""The Support Profile UUID"""
# pylint: disable=invalid-name
return self.data.get("id")

@property
Expand Down Expand Up @@ -116,7 +114,7 @@ def __init__(self, base_dir: str):
logger.info("Scanning base directory for raw NWS Connect API response files: %s", base_dir)
for response_filename in glob("*.json", root_dir=base_dir):
response_filepath = os.path.join(base_dir, response_filename)
logger.warning("Loading profiles from raw API response file: %s", response_filepath)
logger.info("Loading profiles from raw API response file: %s", response_filepath)

with open(response_filepath, "r", encoding="utf-8") as infile:
data: dict = json.load(infile)
Expand All @@ -131,16 +129,16 @@ def __init__(self, base_dir: str):
json.dump(profile, outfile)

# populate cache of JSON data of all Support Profiles, marked as new vs. existing
existing_profiles = [
CachedProfile(profile, is_new=False)
existing_profiles = {
profile["id"]: CachedProfile(profile, is_new=False)
for profile in self._load_profiles_from_filesystem(self._existing_dir)
]
new_profiles = [
CachedProfile(profile, is_new=True)
}
new_profiles = {
profile["id"]: CachedProfile(profile, is_new=True)
for profile in self._load_profiles_from_filesystem(self._new_dir)
]
}

self.profile_cache = existing_profiles + new_profiles
self.profile_cache: dict[str, CachedProfile] = {**existing_profiles, **new_profiles}

def get_all(self, data_source="ANY", is_new=False, include_inactive=False) -> list[dict]:
"""Get all Support Profile JSONs persisted in this API, filtering by status='new'
Expand All @@ -154,7 +152,7 @@ def get_all(self, data_source="ANY", is_new=False, include_inactive=False) -> li
"""
profiles_by_status = [
cached_profile
for cached_profile in self.profile_cache
for cached_profile in self.profile_cache.values()
if (
# is new, if client requested new profiles, or is existing
cached_profile.is_new == is_new
Expand Down Expand Up @@ -186,31 +184,15 @@ def save(self, profile: dict, is_new=True) -> str | None:
logger.debug("Now saving new profile: %s", profile)

# if profile ID is already in the cache, reject this save
existing_profile = next(
(
(
cached_obj
for cached_obj in self.profile_cache
if cached_obj.id == profile.get("id")
)
),
None,
)
if existing_profile:
if existing_profile := self.profile_cache.get(profile.get("id")):
logger.warning("Cannot save profile; already exists %s", existing_profile.id)
return None

cached_profile = CachedProfile(profile, is_new=is_new)

# save Profile JSON to filesystem
file_dir = self._new_dir if is_new else self._existing_dir
filepath = os.path.join(file_dir, f"{cached_profile.id}.json")
logger.debug("Now saving profile to path: %s", filepath)
with open(filepath, "w", encoding="utf-8") as file:
json.dump(profile, file)
filepath = self._save_profile_to_filesystem(cached_profile)

# add profile to in-memory cache
self.profile_cache.append(cached_profile)
self.profile_cache[cached_profile.id] = cached_profile
logger.info("Saved profile to cache, file location: %s", filepath)
return cached_profile.id

Expand All @@ -222,9 +204,7 @@ def mark_as_existing(self, profile_id: str) -> bool:
bool: True on success. False if JSON with this profile_id not found on filesystem
"""
# find the profile data from the new_profiles cache and move it to existing_profiles
cached_profile = next(
(profile for profile in self.profile_cache if profile.id == profile_id), None
)
cached_profile = self.profile_cache.get(profile_id)
if not cached_profile:
# profile is not in cache; it must not exist
logger.warning(
Expand All @@ -242,7 +222,9 @@ def mark_as_existing(self, profile_id: str) -> bool:
# move the JSON file from the "new" to the "existing" directory and update cache
existing_filepath = os.path.join(self._existing_dir, f"{profile_id}.json")
os.rename(new_filepath, existing_filepath)

cached_profile.is_new = False
self.profile_cache[profile_id] = cached_profile

return True

Expand All @@ -251,37 +233,40 @@ def update(self, profile_id: str, data: dict) -> dict:

Args:
profile_id (str): The UUID of the Support Profile to update
data (dict): The JSON attributes to apply. Can be partial Support Profile
data (dict): The JSON attributes to apply. Must be complete Support Profile

Returns:
dict: the latest version of the Profile, with all attribute changes applied
dict: the latest version of the Profile, with the profile overwritten by `data`, or
`None` on error (existing profile will be unchanged)

Raises:
FileNotFoundError: if no Support Profile exists with the provided id
"""
logger.info("Updating profile_id %s with new attributes: %s", profile_id, data)
logger.info("Updating profile_id %s with new data: %s", profile_id, data)

# find the profile data from the new_profiles cache, then apply updates and save over it
cached_profile = next(
(profile for profile in self.profile_cache if profile.id == profile_id), None
)
# find the profile data from the new_profiles cache, then save over it
cached_profile = self.profile_cache.get(profile_id)
if not cached_profile:
raise FileNotFoundError # Profile with this ID does not exist in cache

# Apply dict of edits on top of existing cached profile (combining any nested attributes)
new_profile_data = deep_update(cached_profile.data, data)
is_new_profile = cached_profile.is_new

# a bit hacky, but the fastest and least duplicative way to update a Profile
# in cache + disk is to delete the existing profile and re-save with modified JSON data
self.delete(profile_id)
update_success = self.save(new_profile_data, is_new_profile)
if profile_id != data.get("id"):
raise ValueError(
(
f"Refusing to overwrite existing profile ID {profile_id} with mismatched data "
f"including id {data.get('id')}"
)
)

if not update_success:
updated_profile = CachedProfile(data, cached_profile.is_new)
# update disk with latest data; if the write fails, reject update
saved_file = self._save_profile_to_filesystem(updated_profile)
if not saved_file:
logger.warning("Unable to update Profile ID %s for some reason", profile_id)
return None

return new_profile_data
# update in-memory cache to overwrite previous profile by ID
self.profile_cache[profile_id] = updated_profile
return updated_profile.data

def delete(self, profile_id: str) -> bool:
"""Delete a Support Profile profile from storage, based on its UUID.
Expand All @@ -305,18 +290,39 @@ def delete(self, profile_id: str) -> bool:
)
return False

# drop profile from disk
logger.debug("Attempting to delete profile at path: %s", filepath)
os.remove(filepath)

# drop profile from cache
self.profile_cache = [
cached_profile
for cached_profile in self.profile_cache
if cached_profile.id != profile_id
]
del self.profile_cache[profile_id]
return True

def _load_profiles_from_filesystem(self, dir_: str) -> list[dict]:
def _save_profile_to_filesystem(self, profile: CachedProfile) -> str | None:
"""Save CachedProfile data (dict) to filesystem so it persists through service restarts"""
profile_id = profile.data.get("id")
if not profile_id:
raise ValueError("Cannot save CachedProfile to file that has no `id` attribute")

file_dir = self._new_dir if profile.is_new else self._existing_dir
filepath = os.path.join(file_dir, f"{profile_id}.json")
logger.debug("Now saving profile to path: %s", filepath)
try:
with open(filepath, "w", encoding="utf-8") as file:
json.dump(profile.data, file)
except (PermissionError, json.JSONDecodeError, TypeError) as exc:
logger.error(
"Failed to save Profile %s to file %s due to: (%s) %s",
profile_id,
filepath,
type(exc),
str(exc),
)
return None

return filepath

@staticmethod
def _load_profiles_from_filesystem(dir_: str) -> list[dict]:
"""Read all JSON files from one of this ProfileStore's subdirectories, and return list of
the discovered files' json data.

Expand Down
14 changes: 7 additions & 7 deletions python/nwsc_proxy/test/test_ncp_web_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,20 +233,20 @@ def test_delete_profile_failure(wrapper: AppWrapper, mock_request: Mock, mock_pr


def test_update_profile_success(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock):
mock_request.method = "PATCH"
mock_request.method = "PUT"
mock_request.args = MultiDict({"id": EXAMPLE_UUID})
mock_request.json = {"name": "Some new name"}
updated_profile = {"id": EXAMPLE_UUID, "name": "Some new name"}
expected_data = {"id": EXAMPLE_UUID, "name": "Some new name"}
mock_request.json = expected_data
mock_profile_store.return_value.update.return_value = expected_data

mock_profile_store.return_value.update.return_value = updated_profile
result: tuple[Response, int] = wrapper.app.view_functions["events"]()

assert result[1] == 200
assert result[0].json["profile"] == updated_profile
assert result[0].json["profile"] == expected_data


def test_update_no_body(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock):
mock_request.method = "PATCH"
mock_request.method = "PUT"
mock_request.args = MultiDict({"uuid": EXAMPLE_UUID})
mock_request.data = None

Expand All @@ -256,7 +256,7 @@ def test_update_no_body(wrapper: AppWrapper, mock_request: Mock, mock_profile_st


def test_update_profile_missing(wrapper: AppWrapper, mock_request: Mock, mock_profile_store: Mock):
mock_request.method = "PATCH"
mock_request.method = "PUT"
mock_request.args = MultiDict({"uuid": EXAMPLE_UUID})
mock_profile_store.return_value.update.side_effect = FileNotFoundError

Expand Down
35 changes: 24 additions & 11 deletions python/nwsc_proxy/test/test_profile_store.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,16 +73,16 @@ def store():

# tests
def test_profile_store_loads_api_responses(store: ProfileStore):
assert sorted([c.id for c in store.profile_cache]) == [
assert sorted(list(store.profile_cache.keys())) == [
"a08370c6-ab87-4808-bd51-a8597e58410d",
"e1033860-f198-4c6a-a91b-beaec905132f",
"fd35adec-d2a0-49a9-a320-df20a7b6d681",
]

for cache_obj in store.profile_cache:
for cache_id, cache_obj in store.profile_cache.items():
# should have loaded all profiles as status "existing", file should exist in that subdir
assert not cache_obj.is_new
filepath = os.path.join(STORE_BASE_DIR, EXISTING_SUBDIR, f"{cache_obj.id}.json")
filepath = os.path.join(STORE_BASE_DIR, EXISTING_SUBDIR, f"{cache_id}.json")
assert os.path.exists(filepath)

# new directory should be empty to begin with
Expand Down Expand Up @@ -186,24 +186,37 @@ def test_update_profile_success(store: ProfileStore):
profile_id = EXAMPLE_SUPPORT_PROFILE["id"]
new_start_dt = "2026-01-01T12:00:00Z"
new_name = "A different name"
new_profile_data = {"name": new_name, "setting": {"timing": {"start": new_start_dt}}}
new_profile = deepcopy(EXAMPLE_SUPPORT_PROFILE)
new_profile["name"] = new_name
new_profile["setting"] = {"timing": {"start": new_start_dt}}

updated_profile = store.update(profile_id, new_profile_data)
updated_profile = store.update(profile_id, new_profile)

# data returned should have updated attributes
assert updated_profile["name"] == new_name
assert updated_profile["setting"]["timing"]["start"] == new_start_dt
# attributes at same level as any nested updated ones should not have changed
assert (
updated_profile["setting"]["timing"].get("durationInMinutes")
== EXAMPLE_SUPPORT_PROFILE["setting"]["timing"]["durationInMinutes"]
)
# profile in cache should have indeed been changed
refetched_profile = next((p for p in store.profile_cache if p.id == profile_id), None)
refetched_profile = store.profile_cache.get(profile_id)
assert refetched_profile.name == new_name
assert datetime.fromtimestamp(refetched_profile.start_timestamp, UTC) == dt_parse(new_start_dt)


def test_update_profile_error_rollback(store: ProfileStore):
profile_id = EXAMPLE_SUPPORT_PROFILE["id"]
new_name = "A different name"
new_profile = deepcopy(EXAMPLE_SUPPORT_PROFILE)
new_profile["name"] = new_name
# inject unhashable content into JSON data, will cause save() to fail and return None
new_profile["unhashable"] = set(["foo", "bar"])

updated_profile = store.update(profile_id, new_profile)

assert updated_profile is None
# profile in cache is still there, no changes were made to data
refetched_profile = store.profile_cache.get(profile_id)
assert refetched_profile.name != new_name


def test_update_profile_not_found(store: ProfileStore):
profile_id = "11111111-2222-3333-444444444444" # fake ID does not exist in ProfileStore
new_profile_data = {"name": "A different name"}
Expand Down