Skip to content
This repository has been archived by the owner on Nov 19, 2024. It is now read-only.

Commit

Permalink
more auditing of dandi to lincbrain renaming
Browse files Browse the repository at this point in the history
  • Loading branch information
Aaron Kanzer authored and Aaron Kanzer committed Jan 10, 2024
1 parent 61ea2b0 commit 3e3af7a
Show file tree
Hide file tree
Showing 9 changed files with 33 additions and 32 deletions.
2 changes: 1 addition & 1 deletion lincbrain/cli/tests/test_service_scripts.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import vcr

from dandi import __version__
from dandi.tests.fixtures import SampleDandiset
from lincbrain.tests.fixtures import SampleDandiset

from ..cmd_service_scripts import service_scripts

Expand Down
1 change: 1 addition & 0 deletions lincbrain/dandiapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -1684,6 +1684,7 @@ def from_data(
raise ValueError("Asset data contains both `blob` and `zarr`'")
else:
raise ValueError("Asset data contains neither `blob` nor `zarr`")
# TODO: Aaron -- validation starts here...
return klass( # type: ignore[call-arg]
client=dandiset.client,
dandiset_id=dandiset.identifier,
Expand Down
6 changes: 3 additions & 3 deletions lincbrain/files/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@
import os.path
from pathlib import Path

from dandi import get_logger
from dandi.consts import BIDS_DATASET_DESCRIPTION, dandiset_metadata_file
from dandi.exceptions import UnknownAssetError
from lincbrain import get_logger
from lincbrain.consts import BIDS_DATASET_DESCRIPTION, dandiset_metadata_file
from lincbrain.exceptions import UnknownAssetError

from ._private import BIDSFileFactory, DandiFileFactory
from .bases import (
Expand Down
4 changes: 2 additions & 2 deletions lincbrain/files/_private.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,12 @@
from typing import ClassVar
import weakref

from dandi.consts import (
from lincbrain.consts import (
BIDS_DATASET_DESCRIPTION,
VIDEO_FILE_EXTENSIONS,
ZARR_EXTENSIONS,
)
from dandi.exceptions import UnknownAssetError
from lincbrain.exceptions import UnknownAssetError

from .bases import DandiFile, GenericAsset, LocalAsset, NWBAsset, VideoAsset
from .bids import (
Expand Down
26 changes: 13 additions & 13 deletions lincbrain/files/bases.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,14 @@
from pydantic import ValidationError
import requests

import dandi
from dandi.dandiapi import RemoteAsset, RemoteDandiset, RESTFullAPIClient
from dandi.metadata.core import get_default_metadata
from dandi.misctypes import DUMMY_DANDI_ETAG, Digest, LocalReadableFile, P
from dandi.utils import yaml_load
from dandi.validate_types import Scope, Severity, ValidationOrigin, ValidationResult
import lincbrain
from lincbrain.dandiapi import RemoteAsset, RemoteDandiset, RESTFullAPIClient
from lincbrain.metadata.core import get_default_metadata
from lincbrain.misctypes import DUMMY_DANDI_ETAG, Digest, LocalReadableFile, P
from lincbrain.utils import yaml_load
from lincbrain.validate_types import Scope, Severity, ValidationOrigin, ValidationResult

lgr = dandi.get_logger()
lgr = lincbrain.get_logger()

# TODO -- should come from schema. This is just a simplistic example for now
_required_dandiset_metadata_fields = ["identifier", "name", "description"]
Expand Down Expand Up @@ -204,7 +204,7 @@ def get_validation_errors(
ValidationResult(
origin=ValidationOrigin(
name="dandi",
version=dandi.__version__,
version=lincbrain.__version__,
),
severity=Severity.ERROR,
id="dandi.SOFTWARE_ERROR",
Expand Down Expand Up @@ -299,7 +299,7 @@ def get_metadata(

def get_digest(self) -> Digest:
"""Calculate a dandi-etag digest for the asset"""
from dandi.support.digests import get_digest
from lincbrain.support.digests import get_digest

value = get_digest(self.filepath, digest="dandi-etag")
return Digest.dandi_etag(value)
Expand Down Expand Up @@ -331,7 +331,7 @@ def iter_upload(
``"done"`` and an ``"asset"`` key containing the resulting
`RemoteAsset`.
"""
from dandi.support.digests import get_dandietag
from lincbrain.support.digests import get_dandietag

asset_path = metadata.setdefault("path", self.path)
client = dandiset.client
Expand Down Expand Up @@ -469,7 +469,7 @@ def get_metadata(
digest: Digest | None = None,
ignore_errors: bool = True,
) -> BareAsset:
from dandi.metadata.nwb import nwb2asset
from lincbrain.metadata.nwb import nwb2asset

try:
metadata = nwb2asset(self.filepath, digest=digest)
Expand Down Expand Up @@ -501,7 +501,7 @@ def get_validation_errors(
"""
from nwbinspector import Importance, inspect_nwbfile, load_config

from dandi.pynwb_utils import validate as pynwb_validate
from lincbrain.pynwb_utils import validate as pynwb_validate

errors: list[ValidationResult] = pynwb_validate(
self.filepath, devel_debug=devel_debug
Expand Down Expand Up @@ -559,7 +559,7 @@ def get_validation_errors(
[e], self.filepath, scope=Scope.FILE
)

from dandi.organize import validate_organized_path
from lincbrain.organize import validate_organized_path

from .bids import NWBBIDSAsset

Expand Down
2 changes: 1 addition & 1 deletion lincbrain/files/bids.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def _validate(self) -> None:
with self._lock:
if self._dataset_errors is None:
# Import here to avoid circular import
from dandi.validate import validate_bids
from lincbrain.validate import validate_bids

bids_paths = [str(self.filepath)] + [
str(asset.filepath) for asset in self.dataset_files
Expand Down
10 changes: 5 additions & 5 deletions lincbrain/files/zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def get_digest(self) -> Digest:
it is a file, it will be MD5.
"""

from dandi.support.digests import get_digest, get_zarr_checksum
from lincbrain.support.digests import get_digest, get_zarr_checksum

if self.is_dir():
return Digest.dandi_zarr(get_zarr_checksum(self.filepath))
Expand Down Expand Up @@ -151,7 +151,7 @@ def stat(self) -> ZarrStat:
"""Return various details about the Zarr asset"""

def dirstat(dirpath: LocalZarrEntry) -> ZarrStat:
from dandi.support.digests import md5file_nocache
from lincbrain.support.digests import md5file_nocache

size = 0
dir_md5s = {}
Expand All @@ -177,7 +177,7 @@ def dirstat(dirpath: LocalZarrEntry) -> ZarrStat:

def get_digest(self) -> Digest:
"""Calculate a dandi-zarr-checksum digest for the asset"""
from dandi.support.digests import get_zarr_checksum
from lincbrain.support.digests import get_zarr_checksum

return Digest.dandi_zarr(get_zarr_checksum(self.filepath))

Expand Down Expand Up @@ -594,7 +594,7 @@ def register(self, e: LocalZarrEntry, digest: str | None = None) -> None:

@staticmethod
def _mkitem(e: LocalZarrEntry) -> UploadItem:
from dandi.support.digests import md5file_nocache
from lincbrain.support.digests import md5file_nocache

digest = md5file_nocache(e.filepath)
return UploadItem.from_entry(e, digest)
Expand Down Expand Up @@ -646,7 +646,7 @@ def upload_request(self) -> dict[str, str]:
def _cmp_digests(
asset_path: str, local_entry: LocalZarrEntry, remote_digest: str
) -> tuple[LocalZarrEntry, str, bool]:
from dandi.support.digests import md5file_nocache
from lincbrain.support.digests import md5file_nocache

local_digest = md5file_nocache(local_entry.filepath)
if local_digest != remote_digest:
Expand Down
4 changes: 2 additions & 2 deletions tools/migrate-dandisets.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
import click
import requests

from dandi.dandiapi import DandiAPIClient
from dandi.dandiset import APIDandiset
from lincbrain.dandiapi import DandiAPIClient
from lincbrain.dandiset import APIDandiset


@click.command()
Expand Down
10 changes: 5 additions & 5 deletions tox.ini
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ commands =
# Using pytest-cov instead of using coverage directly leaves a bunch of
# .coverage.$HOSTNAME.#.# files lying around for some reason
coverage erase
coverage run -m pytest -v {posargs} dandi
coverage run -m pytest -v {posargs} lincbrain
coverage combine
coverage report

Expand All @@ -23,16 +23,16 @@ deps =
codespell~=2.0
flake8
commands =
codespell dandi setup.py
flake8 --config=setup.cfg {posargs} dandi setup.py
codespell lincbrain setup.py
flake8 --config=setup.cfg {posargs} lincbrain setup.py

[testenv:typing]
deps =
mypy
types-python-dateutil
types-requests
commands =
mypy dandi
mypy lincbrain

[testenv:docs]
basepython = python3
Expand Down Expand Up @@ -69,7 +69,7 @@ filterwarnings =

[coverage:run]
parallel = True
source = dandi
source = lincbrain

[coverage:report]
precision = 2
Expand Down

0 comments on commit 3e3af7a

Please sign in to comment.