Skip to content

Commit

Permalink
Merge pull request #1368 from dandi/move-imports
Browse files Browse the repository at this point in the history
Move imports in functions to top level or annotate why they can't be moved
  • Loading branch information
yarikoptic authored Nov 28, 2023
2 parents 0971d02 + 5ae3731 commit 6f18a4b
Show file tree
Hide file tree
Showing 23 changed files with 63 additions and 73 deletions.
1 change: 1 addition & 0 deletions dandi/cli/cmd_digest.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
@map_to_click_exceptions
def digest(paths: tuple[str, ...], digest_alg: str) -> None:
"""Calculate file digests"""
# Avoid heavy import by importing within function:
from ..support.digests import get_digest

for p in paths:
Expand Down
3 changes: 1 addition & 2 deletions dandi/cli/cmd_download.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

from .base import ChoiceList, IntColonInt, instance_option, map_to_click_exceptions
from ..dandiarchive import _dandi_url_parser, parse_dandi_url
from ..dandiset import Dandiset
from ..download import DownloadExisting, DownloadFormat, PathType
from ..utils import get_instance

Expand Down Expand Up @@ -123,8 +124,6 @@ def download(
f"{u} does not point to {dandi_instance!r} instance"
)
else:
from ..dandiset import Dandiset

try:
dandiset_id = Dandiset(os.curdir).identifier
except ValueError:
Expand Down
23 changes: 6 additions & 17 deletions dandi/cli/cmd_ls.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,16 @@
import os.path as op

import click
from dandischema import models

from .base import devel_option, lgr, map_to_click_exceptions
from .formatter import JSONFormatter, JSONLinesFormatter, PYOUTFormatter, YAMLFormatter
from ..consts import ZARR_EXTENSIONS, metadata_all_fields
from ..dandiarchive import DandisetURL, _dandi_url_parser, parse_dandi_url
from ..dandiset import Dandiset
from ..misctypes import Digest
from ..utils import is_url
from ..support.pyout import PYOUT_SHORT_NAMES, PYOUT_SHORT_NAMES_rev
from ..utils import find_files, is_url

# TODO: all the recursion options etc

Expand Down Expand Up @@ -87,21 +91,8 @@ def ls(
"""List .nwb files and dandisets metadata."""

# TODO: more logical ordering in case of fields = None
from .formatter import (
JSONFormatter,
JSONLinesFormatter,
PYOUTFormatter,
YAMLFormatter,
)

# TODO: avoid
from ..support.pyout import PYOUT_SHORT_NAMES_rev
from ..utils import find_files

common_fields = ("path", "size")
if schema is not None:
from dandischema import models

all_fields = tuple(
sorted(
set(common_fields)
Expand Down Expand Up @@ -249,8 +240,6 @@ def _add_exc_error(asset, rec, errors, exc):


def display_known_fields(all_fields):
from ..support.pyout import PYOUT_SHORT_NAMES

# Display all known fields
click.secho("Known fields:")
for field in all_fields:
Expand Down Expand Up @@ -330,7 +319,7 @@ def flatten_meta_to_pyout(meta):
def get_metadata_ls(
path, keys, errors, flatten=False, schema=None, use_fake_digest=False
):
from ..dandiset import Dandiset
# Avoid heavy import by importing within function:
from ..metadata.nwb import get_metadata, nwb2asset
from ..pynwb_utils import get_nwb_version, ignore_benign_pynwb_warnings
from ..support.digests import get_digest
Expand Down
1 change: 1 addition & 0 deletions dandi/cli/cmd_shell_completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ def shell_completion(shell):
varfmt = "{shell}_source"
os.environ["_DANDI_COMPLETE"] = varfmt.format(shell=shell)

# Avoid circular import by importing within function:
from .command import main

main.main(args=[])
1 change: 1 addition & 0 deletions dandi/cli/cmd_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ def upload(
directories starting with a period) will be considered for the upload. You
can point to specific files you would like to validate and have uploaded.
"""
# Avoid heavy imports by importing with function:
from ..upload import upload

if jobs_pair is None:
Expand Down
3 changes: 2 additions & 1 deletion dandi/cli/cmd_validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

from .base import devel_debug_option, devel_option, map_to_click_exceptions
from ..utils import pluralize
from ..validate import validate as validate_
from ..validate_types import Severity, ValidationResult


Expand Down Expand Up @@ -101,8 +102,8 @@ def validate(
Exits with non-0 exit code if any file is not compliant.
"""
# Avoid heavy import by importing within function:
from ..pynwb_utils import ignore_benign_pynwb_warnings
from ..validate import validate as validate_

# Don't log validation warnings, as this command reports them to the user
# anyway:
Expand Down
6 changes: 1 addition & 5 deletions dandi/cli/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

from .base import lgr, map_to_click_exceptions
from .. import __version__, set_logger_level
from ..utils import get_module_version
from ..utils import check_dandi_version, get_module_version, setup_exceptionhook

# Delay imports leading to import of heavy modules such as pynwb and h5py
# Import at the point of use
Expand Down Expand Up @@ -130,12 +130,8 @@ def main(ctx, log_level, pdb=False):

if pdb:
map_to_click_exceptions._do_map = False
from ..utils import setup_exceptionhook

setup_exceptionhook()

from ..utils import check_dandi_version

check_dandi_version()


Expand Down
11 changes: 4 additions & 7 deletions dandi/cli/formatter.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import datetime
import json
import sys
from textwrap import indent

import ruamel.yaml

from .. import get_logger
from ..support import pyout as pyouts
Expand Down Expand Up @@ -39,9 +43,6 @@ def __exit__(self, exc_type, exc_value, traceback):
print("]", file=self.out)

def __call__(self, rec):
import json
from textwrap import indent

if self.first:
print(file=self.out)
self.first = False
Expand All @@ -66,8 +67,6 @@ def _serializer(o):
return o

def __call__(self, rec):
import json

print(
json.dumps(
rec, indent=self.indent, sort_keys=True, default=self._serializer
Expand All @@ -82,8 +81,6 @@ def __init__(self, out=None):
self.records = []

def __exit__(self, exc_type, exc_value, traceback):
import ruamel.yaml

yaml = ruamel.yaml.YAML(typ="safe")
yaml.default_flow_style = False
yaml.dump(self.records, self.out)
Expand Down
8 changes: 2 additions & 6 deletions dandi/cli/tests/test_cmd_validate.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@
from click.testing import CliRunner
import pytest

from ..cmd_validate import validate
from ..cmd_validate import _process_issues, validate
from ...tests.fixtures import BIDS_ERROR_TESTDATA_SELECTION
from ...validate_types import Scope, Severity, ValidationOrigin, ValidationResult


@pytest.mark.parametrize("dataset", BIDS_ERROR_TESTDATA_SELECTION)
Expand Down Expand Up @@ -67,11 +68,6 @@ def test_validate_nwb_path_grouping(organized_nwb_dir4: Path) -> None:


def test_process_issues(capsys):
from pathlib import Path

from ..cmd_validate import _process_issues
from ...validate_types import Scope, Severity, ValidationOrigin, ValidationResult

issues = [
ValidationResult(
id="NWBI.check_data_orientation",
Expand Down
2 changes: 2 additions & 0 deletions dandi/dandiapi.py
Original file line number Diff line number Diff line change
Expand Up @@ -1194,6 +1194,7 @@ def upload_raw_asset(
:param RemoteAsset replace_asset: If set, replace the given asset,
which must have the same path as the new asset
"""
# Avoid circular import by importing within function:
from .files import LocalAsset, dandi_file

df = dandi_file(filepath)
Expand Down Expand Up @@ -1235,6 +1236,7 @@ def iter_upload_raw_asset(
``"done"`` and an ``"asset"`` key containing the resulting
`RemoteAsset`.
"""
# Avoid circular import by importing within function:
from .files import LocalAsset, dandi_file

df = dandi_file(filepath)
Expand Down
6 changes: 2 additions & 4 deletions dandi/delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,9 @@
from .consts import DRAFT, ZARR_EXTENSIONS, DandiInstance, dandiset_metadata_file
from .dandiapi import DandiAPIClient, RemoteAsset, RemoteDandiset
from .dandiarchive import BaseAssetIDURL, DandisetURL, ParsedDandiURL, parse_dandi_url
from .dandiset import Dandiset
from .exceptions import NotFoundError
from .support import pyout as pyouts
from .utils import get_instance, is_url


Expand Down Expand Up @@ -214,8 +216,6 @@ def delete(
for r in gen:
print(r, flush=True)
else:
from .support import pyout as pyouts

pyout_style = pyouts.get_style(hide_if_missing=False)
rec_fields = ("path", "status", "message")
out = pyouts.LogSafeTabular(
Expand All @@ -232,8 +232,6 @@ def find_local_asset(filepath: str) -> tuple[str, str]:
located and the path to the file relative to the root of said Dandiset. If
the file is a directory, the path will end with a trailing slash.
"""
from .dandiset import Dandiset

path = Path(filepath).absolute()
dandiset = Dandiset.find(path.parent)
if dandiset is None:
Expand Down
8 changes: 4 additions & 4 deletions dandi/download.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
from types import TracebackType
from typing import IO, Any, Literal

from dandischema.digests.dandietag import ETagHashlike
from dandischema.models import DigestType
from fasteners import InterProcessLock
import humanize
Expand All @@ -31,6 +32,7 @@
from .dandiset import Dandiset
from .exceptions import NotFoundError
from .files import LocalAsset, find_dandi_files
from .support import pyout as pyouts
from .support.iterators import IteratorWithAggregation
from .support.pyout import naturalsize
from .utils import (
Expand Down Expand Up @@ -91,8 +93,6 @@ def download(
# TODO: unduplicate with upload. For now stole from that one
# We will again use pyout to provide a neat table summarizing our progress
# with upload etc
from .support import pyout as pyouts

urls = flattened([urls])
if not urls:
# if no paths provided etc, we will download dandiset path
Expand Down Expand Up @@ -551,6 +551,7 @@ def _download_file(
possible checksums or other digests provided for the file. Only one
will be used to verify download
"""
# Avoid heavy import by importing within function:
from .support.digests import get_digest

if op.lexists(path):
Expand Down Expand Up @@ -650,8 +651,6 @@ def _download_file(
# TODO: reuse that sorting based on speed
for algo, digest in digests.items():
if algo == "dandi-etag" and size is not None:
from dandischema.digests.dandietag import ETagHashlike

# Instantiate outside the lambda so that mypy is assured that
# `size` is not None:
hasher = ETagHashlike(size)
Expand Down Expand Up @@ -855,6 +854,7 @@ def _download_zarr(
lock: Lock,
jobs: int | None = None,
) -> Iterator[dict]:
# Avoid heavy import by importing within function:
from .support.digests import get_zarr_checksum

download_gens = {}
Expand Down
10 changes: 8 additions & 2 deletions dandi/files/bases.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,6 +299,7 @@ def get_metadata(

def get_digest(self) -> Digest:
"""Calculate a dandi-etag digest for the asset"""
# Avoid heavy import by importing within function:
from dandi.support.digests import get_digest

value = get_digest(self.filepath, digest="dandi-etag")
Expand Down Expand Up @@ -331,6 +332,7 @@ def iter_upload(
``"done"`` and an ``"asset"`` key containing the resulting
`RemoteAsset`.
"""
# Avoid heavy import by importing within function:
from dandi.support.digests import get_dandietag

asset_path = metadata.setdefault("path", self.path)
Expand Down Expand Up @@ -469,6 +471,7 @@ def get_metadata(
digest: Digest | None = None,
ignore_errors: bool = True,
) -> BareAsset:
# Avoid heavy import by importing within function:
from dandi.metadata.nwb import nwb2asset

try:
Expand Down Expand Up @@ -499,8 +502,10 @@ def get_validation_errors(
If ``schema_version`` was provided, we only validate basic metadata,
and completely skip validation using nwbinspector.inspect_nwbfile
"""
# Avoid heavy import by importing within function:
from nwbinspector import Importance, inspect_nwbfile, load_config

# Avoid heavy import by importing within function:
from dandi.pynwb_utils import validate as pynwb_validate

errors: list[ValidationResult] = pynwb_validate(
Expand Down Expand Up @@ -559,9 +564,9 @@ def get_validation_errors(
[e], self.filepath, scope=Scope.FILE
)

from dandi.organize import validate_organized_path

# Avoid circular imports by importing within function:
from .bids import NWBBIDSAsset
from ..organize import validate_organized_path

if not isinstance(self, NWBBIDSAsset) and self.dandiset_path is not None:
errors.extend(
Expand Down Expand Up @@ -716,6 +721,7 @@ def _check_required_fields(


def _get_nwb_inspector_version():
# Avoid heavy import by importing within function:
from nwbinspector.utils import get_package_version

global _current_nwbinspector_version
Expand Down
7 changes: 6 additions & 1 deletion dandi/files/zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def get_digest(self) -> Digest:
directory, the algorithm will be the Dandi Zarr checksum algorithm; if
it is a file, it will be MD5.
"""

# Avoid heavy import by importing within function:
from dandi.support.digests import get_digest, get_zarr_checksum

if self.is_dir():
Expand Down Expand Up @@ -152,6 +152,7 @@ def stat(self) -> ZarrStat:
"""Return various details about the Zarr asset"""

def dirstat(dirpath: LocalZarrEntry) -> ZarrStat:
# Avoid heavy import by importing within function:
from dandi.support.digests import md5file_nocache

size = 0
Expand All @@ -178,6 +179,7 @@ def dirstat(dirpath: LocalZarrEntry) -> ZarrStat:

def get_digest(self) -> Digest:
"""Calculate a dandi-zarr-checksum digest for the asset"""
# Avoid heavy import by importing within function:
from dandi.support.digests import get_zarr_checksum

return Digest.dandi_zarr(get_zarr_checksum(self.filepath))
Expand All @@ -197,6 +199,7 @@ def get_validation_errors(
schema_version: str | None = None,
devel_debug: bool = False,
) -> list[ValidationResult]:
# Avoid heavy import by importing within function:
import zarr

errors: list[ValidationResult] = []
Expand Down Expand Up @@ -591,6 +594,7 @@ def register(self, e: LocalZarrEntry, digest: str | None = None) -> None:

@staticmethod
def _mkitem(e: LocalZarrEntry) -> UploadItem:
# Avoid heavy import by importing within function:
from dandi.support.digests import md5file_nocache

digest = md5file_nocache(e.filepath)
Expand Down Expand Up @@ -643,6 +647,7 @@ def upload_request(self) -> dict[str, str]:
def _cmp_digests(
asset_path: str, local_entry: LocalZarrEntry, remote_digest: str
) -> tuple[LocalZarrEntry, str, bool]:
# Avoid heavy import by importing within function:
from dandi.support.digests import md5file_nocache

local_digest = md5file_nocache(local_entry.filepath)
Expand Down
Loading

0 comments on commit 6f18a4b

Please sign in to comment.