From 5ae3731d1e88b1d194d079c89ade7b795b4ea667 Mon Sep 17 00:00:00 2001 From: "John T. Wodder II" Date: Tue, 28 Nov 2023 11:04:35 -0500 Subject: [PATCH] Move imports in functions to top level or annotate why they can't be moved --- dandi/cli/cmd_digest.py | 1 + dandi/cli/cmd_download.py | 3 +-- dandi/cli/cmd_ls.py | 23 ++++++----------------- dandi/cli/cmd_shell_completion.py | 1 + dandi/cli/cmd_upload.py | 1 + dandi/cli/cmd_validate.py | 3 ++- dandi/cli/command.py | 6 +----- dandi/cli/formatter.py | 11 ++++------- dandi/cli/tests/test_cmd_validate.py | 8 ++------ dandi/dandiapi.py | 2 ++ dandi/delete.py | 6 ++---- dandi/download.py | 8 ++++---- dandi/files/bases.py | 10 ++++++++-- dandi/files/zarr.py | 7 ++++++- dandi/metadata/nwb.py | 3 +-- dandi/misctypes.py | 1 + dandi/move.py | 3 +-- dandi/organize.py | 10 ++++++++-- dandi/pynwb_utils.py | 3 +-- dandi/tests/fixtures.py | 1 + dandi/tests/test_download.py | 5 +---- dandi/upload.py | 14 ++++++-------- dandi/utils.py | 6 ++---- 23 files changed, 63 insertions(+), 73 deletions(-) diff --git a/dandi/cli/cmd_digest.py b/dandi/cli/cmd_digest.py index 7bcceff22..639f1bda9 100644 --- a/dandi/cli/cmd_digest.py +++ b/dandi/cli/cmd_digest.py @@ -22,6 +22,7 @@ @map_to_click_exceptions def digest(paths: tuple[str, ...], digest_alg: str) -> None: """Calculate file digests""" + # Avoid heavy import by importing within function: from ..support.digests import get_digest for p in paths: diff --git a/dandi/cli/cmd_download.py b/dandi/cli/cmd_download.py index 9c8c6f6d6..7bd4bf76d 100644 --- a/dandi/cli/cmd_download.py +++ b/dandi/cli/cmd_download.py @@ -7,6 +7,7 @@ from .base import ChoiceList, IntColonInt, instance_option, map_to_click_exceptions from ..dandiarchive import _dandi_url_parser, parse_dandi_url +from ..dandiset import Dandiset from ..download import DownloadExisting, DownloadFormat, PathType from ..utils import get_instance @@ -123,8 +124,6 @@ def download( f"{u} does not point to {dandi_instance!r} instance" ) else: - from ..dandiset import Dandiset - try: dandiset_id = Dandiset(os.curdir).identifier except ValueError: diff --git a/dandi/cli/cmd_ls.py b/dandi/cli/cmd_ls.py index 0cfacaeb3..72e933aad 100644 --- a/dandi/cli/cmd_ls.py +++ b/dandi/cli/cmd_ls.py @@ -3,12 +3,16 @@ import os.path as op import click +from dandischema import models from .base import devel_option, lgr, map_to_click_exceptions +from .formatter import JSONFormatter, JSONLinesFormatter, PYOUTFormatter, YAMLFormatter from ..consts import ZARR_EXTENSIONS, metadata_all_fields from ..dandiarchive import DandisetURL, _dandi_url_parser, parse_dandi_url +from ..dandiset import Dandiset from ..misctypes import Digest -from ..utils import is_url +from ..support.pyout import PYOUT_SHORT_NAMES, PYOUT_SHORT_NAMES_rev +from ..utils import find_files, is_url # TODO: all the recursion options etc @@ -87,21 +91,8 @@ def ls( """List .nwb files and dandisets metadata.""" # TODO: more logical ordering in case of fields = None - from .formatter import ( - JSONFormatter, - JSONLinesFormatter, - PYOUTFormatter, - YAMLFormatter, - ) - - # TODO: avoid - from ..support.pyout import PYOUT_SHORT_NAMES_rev - from ..utils import find_files - common_fields = ("path", "size") if schema is not None: - from dandischema import models - all_fields = tuple( sorted( set(common_fields) @@ -249,8 +240,6 @@ def _add_exc_error(asset, rec, errors, exc): def display_known_fields(all_fields): - from ..support.pyout import PYOUT_SHORT_NAMES - # Display all known fields click.secho("Known fields:") for field in all_fields: @@ -330,7 +319,7 @@ def flatten_meta_to_pyout(meta): def get_metadata_ls( path, keys, errors, flatten=False, schema=None, use_fake_digest=False ): - from ..dandiset import Dandiset + # Avoid heavy import by importing within function: from ..metadata.nwb import get_metadata, nwb2asset from ..pynwb_utils import get_nwb_version, ignore_benign_pynwb_warnings from ..support.digests import get_digest diff --git a/dandi/cli/cmd_shell_completion.py b/dandi/cli/cmd_shell_completion.py index 9e627e813..c858c0461 100644 --- a/dandi/cli/cmd_shell_completion.py +++ b/dandi/cli/cmd_shell_completion.py @@ -48,6 +48,7 @@ def shell_completion(shell): varfmt = "{shell}_source" os.environ["_DANDI_COMPLETE"] = varfmt.format(shell=shell) + # Avoid circular import by importing within function: from .command import main main.main(args=[]) diff --git a/dandi/cli/cmd_upload.py b/dandi/cli/cmd_upload.py index a17a69c3c..3629e1c77 100644 --- a/dandi/cli/cmd_upload.py +++ b/dandi/cli/cmd_upload.py @@ -91,6 +91,7 @@ def upload( directories starting with a period) will be considered for the upload. You can point to specific files you would like to validate and have uploaded. """ + # Avoid heavy imports by importing with function: from ..upload import upload if jobs_pair is None: diff --git a/dandi/cli/cmd_validate.py b/dandi/cli/cmd_validate.py index 2bc9946e8..749a1fc8c 100644 --- a/dandi/cli/cmd_validate.py +++ b/dandi/cli/cmd_validate.py @@ -11,6 +11,7 @@ from .base import devel_debug_option, devel_option, map_to_click_exceptions from ..utils import pluralize +from ..validate import validate as validate_ from ..validate_types import Severity, ValidationResult @@ -101,8 +102,8 @@ def validate( Exits with non-0 exit code if any file is not compliant. """ + # Avoid heavy import by importing within function: from ..pynwb_utils import ignore_benign_pynwb_warnings - from ..validate import validate as validate_ # Don't log validation warnings, as this command reports them to the user # anyway: diff --git a/dandi/cli/command.py b/dandi/cli/command.py index 2791c38de..bc2ef2e80 100644 --- a/dandi/cli/command.py +++ b/dandi/cli/command.py @@ -15,7 +15,7 @@ from .base import lgr, map_to_click_exceptions from .. import __version__, set_logger_level -from ..utils import get_module_version +from ..utils import check_dandi_version, get_module_version, setup_exceptionhook # Delay imports leading to import of heavy modules such as pynwb and h5py # Import at the point of use @@ -130,12 +130,8 @@ def main(ctx, log_level, pdb=False): if pdb: map_to_click_exceptions._do_map = False - from ..utils import setup_exceptionhook - setup_exceptionhook() - from ..utils import check_dandi_version - check_dandi_version() diff --git a/dandi/cli/formatter.py b/dandi/cli/formatter.py index 05d6992b8..9bf71db13 100644 --- a/dandi/cli/formatter.py +++ b/dandi/cli/formatter.py @@ -1,5 +1,9 @@ import datetime +import json import sys +from textwrap import indent + +import ruamel.yaml from .. import get_logger from ..support import pyout as pyouts @@ -39,9 +43,6 @@ def __exit__(self, exc_type, exc_value, traceback): print("]", file=self.out) def __call__(self, rec): - import json - from textwrap import indent - if self.first: print(file=self.out) self.first = False @@ -66,8 +67,6 @@ def _serializer(o): return o def __call__(self, rec): - import json - print( json.dumps( rec, indent=self.indent, sort_keys=True, default=self._serializer @@ -82,8 +81,6 @@ def __init__(self, out=None): self.records = [] def __exit__(self, exc_type, exc_value, traceback): - import ruamel.yaml - yaml = ruamel.yaml.YAML(typ="safe") yaml.default_flow_style = False yaml.dump(self.records, self.out) diff --git a/dandi/cli/tests/test_cmd_validate.py b/dandi/cli/tests/test_cmd_validate.py index 8f63254ce..66e0cbb0c 100644 --- a/dandi/cli/tests/test_cmd_validate.py +++ b/dandi/cli/tests/test_cmd_validate.py @@ -4,8 +4,9 @@ from click.testing import CliRunner import pytest -from ..cmd_validate import validate +from ..cmd_validate import _process_issues, validate from ...tests.fixtures import BIDS_ERROR_TESTDATA_SELECTION +from ...validate_types import Scope, Severity, ValidationOrigin, ValidationResult @pytest.mark.parametrize("dataset", BIDS_ERROR_TESTDATA_SELECTION) @@ -67,11 +68,6 @@ def test_validate_nwb_path_grouping(organized_nwb_dir4: Path) -> None: def test_process_issues(capsys): - from pathlib import Path - - from ..cmd_validate import _process_issues - from ...validate_types import Scope, Severity, ValidationOrigin, ValidationResult - issues = [ ValidationResult( id="NWBI.check_data_orientation", diff --git a/dandi/dandiapi.py b/dandi/dandiapi.py index 609e5dbc5..2f644fd22 100644 --- a/dandi/dandiapi.py +++ b/dandi/dandiapi.py @@ -1194,6 +1194,7 @@ def upload_raw_asset( :param RemoteAsset replace_asset: If set, replace the given asset, which must have the same path as the new asset """ + # Avoid circular import by importing within function: from .files import LocalAsset, dandi_file df = dandi_file(filepath) @@ -1235,6 +1236,7 @@ def iter_upload_raw_asset( ``"done"`` and an ``"asset"`` key containing the resulting `RemoteAsset`. """ + # Avoid circular import by importing within function: from .files import LocalAsset, dandi_file df = dandi_file(filepath) diff --git a/dandi/delete.py b/dandi/delete.py index eb3684bf1..cae325968 100644 --- a/dandi/delete.py +++ b/dandi/delete.py @@ -10,7 +10,9 @@ from .consts import DRAFT, ZARR_EXTENSIONS, DandiInstance, dandiset_metadata_file from .dandiapi import DandiAPIClient, RemoteAsset, RemoteDandiset from .dandiarchive import BaseAssetIDURL, DandisetURL, ParsedDandiURL, parse_dandi_url +from .dandiset import Dandiset from .exceptions import NotFoundError +from .support import pyout as pyouts from .utils import get_instance, is_url @@ -214,8 +216,6 @@ def delete( for r in gen: print(r, flush=True) else: - from .support import pyout as pyouts - pyout_style = pyouts.get_style(hide_if_missing=False) rec_fields = ("path", "status", "message") out = pyouts.LogSafeTabular( @@ -232,8 +232,6 @@ def find_local_asset(filepath: str) -> tuple[str, str]: located and the path to the file relative to the root of said Dandiset. If the file is a directory, the path will end with a trailing slash. """ - from .dandiset import Dandiset - path = Path(filepath).absolute() dandiset = Dandiset.find(path.parent) if dandiset is None: diff --git a/dandi/download.py b/dandi/download.py index f9cd05167..f1611fcef 100644 --- a/dandi/download.py +++ b/dandi/download.py @@ -18,6 +18,7 @@ from types import TracebackType from typing import IO, Any, Literal +from dandischema.digests.dandietag import ETagHashlike from dandischema.models import DigestType from fasteners import InterProcessLock import humanize @@ -31,6 +32,7 @@ from .dandiset import Dandiset from .exceptions import NotFoundError from .files import LocalAsset, find_dandi_files +from .support import pyout as pyouts from .support.iterators import IteratorWithAggregation from .support.pyout import naturalsize from .utils import ( @@ -91,8 +93,6 @@ def download( # TODO: unduplicate with upload. For now stole from that one # We will again use pyout to provide a neat table summarizing our progress # with upload etc - from .support import pyout as pyouts - urls = flattened([urls]) if not urls: # if no paths provided etc, we will download dandiset path @@ -551,6 +551,7 @@ def _download_file( possible checksums or other digests provided for the file. Only one will be used to verify download """ + # Avoid heavy import by importing within function: from .support.digests import get_digest if op.lexists(path): @@ -650,8 +651,6 @@ def _download_file( # TODO: reuse that sorting based on speed for algo, digest in digests.items(): if algo == "dandi-etag" and size is not None: - from dandischema.digests.dandietag import ETagHashlike - # Instantiate outside the lambda so that mypy is assured that # `size` is not None: hasher = ETagHashlike(size) @@ -855,6 +854,7 @@ def _download_zarr( lock: Lock, jobs: int | None = None, ) -> Iterator[dict]: + # Avoid heavy import by importing within function: from .support.digests import get_zarr_checksum download_gens = {} diff --git a/dandi/files/bases.py b/dandi/files/bases.py index 10f887563..7236b55d8 100644 --- a/dandi/files/bases.py +++ b/dandi/files/bases.py @@ -299,6 +299,7 @@ def get_metadata( def get_digest(self) -> Digest: """Calculate a dandi-etag digest for the asset""" + # Avoid heavy import by importing within function: from dandi.support.digests import get_digest value = get_digest(self.filepath, digest="dandi-etag") @@ -331,6 +332,7 @@ def iter_upload( ``"done"`` and an ``"asset"`` key containing the resulting `RemoteAsset`. """ + # Avoid heavy import by importing within function: from dandi.support.digests import get_dandietag asset_path = metadata.setdefault("path", self.path) @@ -469,6 +471,7 @@ def get_metadata( digest: Digest | None = None, ignore_errors: bool = True, ) -> BareAsset: + # Avoid heavy import by importing within function: from dandi.metadata.nwb import nwb2asset try: @@ -499,8 +502,10 @@ def get_validation_errors( If ``schema_version`` was provided, we only validate basic metadata, and completely skip validation using nwbinspector.inspect_nwbfile """ + # Avoid heavy import by importing within function: from nwbinspector import Importance, inspect_nwbfile, load_config + # Avoid heavy import by importing within function: from dandi.pynwb_utils import validate as pynwb_validate errors: list[ValidationResult] = pynwb_validate( @@ -559,9 +564,9 @@ def get_validation_errors( [e], self.filepath, scope=Scope.FILE ) - from dandi.organize import validate_organized_path - + # Avoid circular imports by importing within function: from .bids import NWBBIDSAsset + from ..organize import validate_organized_path if not isinstance(self, NWBBIDSAsset) and self.dandiset_path is not None: errors.extend( @@ -716,6 +721,7 @@ def _check_required_fields( def _get_nwb_inspector_version(): + # Avoid heavy import by importing within function: from nwbinspector.utils import get_package_version global _current_nwbinspector_version diff --git a/dandi/files/zarr.py b/dandi/files/zarr.py index 8ea610df0..a39b5153d 100644 --- a/dandi/files/zarr.py +++ b/dandi/files/zarr.py @@ -93,7 +93,7 @@ def get_digest(self) -> Digest: directory, the algorithm will be the Dandi Zarr checksum algorithm; if it is a file, it will be MD5. """ - + # Avoid heavy import by importing within function: from dandi.support.digests import get_digest, get_zarr_checksum if self.is_dir(): @@ -152,6 +152,7 @@ def stat(self) -> ZarrStat: """Return various details about the Zarr asset""" def dirstat(dirpath: LocalZarrEntry) -> ZarrStat: + # Avoid heavy import by importing within function: from dandi.support.digests import md5file_nocache size = 0 @@ -178,6 +179,7 @@ def dirstat(dirpath: LocalZarrEntry) -> ZarrStat: def get_digest(self) -> Digest: """Calculate a dandi-zarr-checksum digest for the asset""" + # Avoid heavy import by importing within function: from dandi.support.digests import get_zarr_checksum return Digest.dandi_zarr(get_zarr_checksum(self.filepath)) @@ -197,6 +199,7 @@ def get_validation_errors( schema_version: str | None = None, devel_debug: bool = False, ) -> list[ValidationResult]: + # Avoid heavy import by importing within function: import zarr errors: list[ValidationResult] = [] @@ -591,6 +594,7 @@ def register(self, e: LocalZarrEntry, digest: str | None = None) -> None: @staticmethod def _mkitem(e: LocalZarrEntry) -> UploadItem: + # Avoid heavy import by importing within function: from dandi.support.digests import md5file_nocache digest = md5file_nocache(e.filepath) @@ -643,6 +647,7 @@ def upload_request(self) -> dict[str, str]: def _cmp_digests( asset_path: str, local_entry: LocalZarrEntry, remote_digest: str ) -> tuple[LocalZarrEntry, str, bool]: + # Avoid heavy import by importing within function: from dandi.support.digests import md5file_nocache local_digest = md5file_nocache(local_entry.filepath) diff --git a/dandi/metadata/nwb.py b/dandi/metadata/nwb.py index 0c7583922..ae3ef6654 100644 --- a/dandi/metadata/nwb.py +++ b/dandi/metadata/nwb.py @@ -12,6 +12,7 @@ from .util import process_ndtypes from .. import get_logger from ..consts import metadata_all_fields +from ..files import bids, dandi_file, find_bids_dataset_description from ..misctypes import DUMMY_DANDI_ETAG, Digest, LocalReadableFile, Readable from ..pynwb_utils import ( _get_pynwb_metadata, @@ -43,8 +44,6 @@ def get_metadata( dict """ - from ..files import bids, dandi_file, find_bids_dataset_description - # when we run in parallel, these annoying warnings appear ignore_benign_pynwb_warnings() diff --git a/dandi/misctypes.py b/dandi/misctypes.py index df1eaf2d2..20887a6ab 100644 --- a/dandi/misctypes.py +++ b/dandi/misctypes.py @@ -342,6 +342,7 @@ class RemoteReadableAsset(Readable): name: str def open(self) -> IO[bytes]: + # Optional dependency: import fsspec return cast(IO[bytes], fsspec.open(self.url, mode="rb")) diff --git a/dandi/move.py b/dandi/move.py index b98c5fb19..9996212bb 100644 --- a/dandi/move.py +++ b/dandi/move.py @@ -19,6 +19,7 @@ from .dandiset import Dandiset from .exceptions import NotFoundError from .files import DandisetMetadataFile, LocalAsset, find_dandi_files +from .support import pyout as pyouts lgr = get_logger() @@ -872,8 +873,6 @@ def move( for r in gen: print(r, flush=True) else: - from .support import pyout as pyouts - pyout_style = pyouts.get_style(hide_if_missing=False) out = pyouts.LogSafeTabular( style=pyout_style, columns=mover.columns, max_workers=jobs diff --git a/dandi/organize.py b/dandi/organize.py index 73d5fcc30..c7217437c 100644 --- a/dandi/organize.py +++ b/dandi/organize.py @@ -15,6 +15,8 @@ import re import uuid +import ruamel.yaml + from . import __version__, get_logger from .consts import dandi_layout_fields from .dandiset import Dandiset @@ -302,6 +304,7 @@ def organize_external_files( def _assign_obj_id(metadata, non_unique): + # Avoid heavy import by importing within function: from .pynwb_utils import get_object_id msg = "%d out of %d paths are not unique" % (len(non_unique), len(metadata)) @@ -371,6 +374,7 @@ def _get_unique_values_among_non_unique(metadata, non_unique_paths, field): def get_obj_id(object_id): """Given full object_id, get its shortened version""" + # Avoid heavy import by importing within function: import numpy as np return np.base_repr(binascii.crc32(object_id.encode("ascii")), 36).lower() @@ -435,6 +439,7 @@ def _sanitize_value(value, field): def _populate_modalities(metadata): + # Avoid heavy import by importing within function: from .pynwb_utils import get_neurodata_types_to_modalities_map ndtypes_to_modalities = get_neurodata_types_to_modalities_map() @@ -593,8 +598,6 @@ def create_dataset_yml_template(filepath): def populate_dataset_yml(filepath, metadata): # To preserve comments, let's use ruamel - import ruamel.yaml - yaml = ruamel.yaml.YAML() # defaults to round-trip if no parameters given if not op.lexists(filepath): # Create an empty one, which we would populate with information @@ -801,6 +804,7 @@ def act(func, *args, **kwargs): % dandiset_path ) + # Avoid heavy import by importing within function: from .pynwb_utils import ignore_benign_pynwb_warnings ignore_benign_pynwb_warnings() @@ -840,6 +844,7 @@ def act(func, *args, **kwargs): failed = [] def _get_metadata(path): + # Avoid heavy import by importing within function: from .metadata.nwb import get_metadata try: @@ -1041,6 +1046,7 @@ def _get_metadata(path): # create video file name and re write nwb file external files: if update_external_file_paths: + # Avoid heavy import by importing within function: from .pynwb_utils import rename_nwb_external_files rename_nwb_external_files(metadata, dandiset_path) diff --git a/dandi/pynwb_utils.py b/dandi/pynwb_utils.py index d504f82ef..3d0a8b3ec 100644 --- a/dandi/pynwb_utils.py +++ b/dandi/pynwb_utils.py @@ -2,6 +2,7 @@ from collections import Counter from collections.abc import Callable +import inspect import os import os.path as op from pathlib import Path @@ -137,8 +138,6 @@ def get_neurodata_types_to_modalities_map() -> dict[str, str]: It would base modality on the filename within pynwb providing that neural data type """ - import inspect - ndtypes: dict[str, str] = {} # TODO: if there are extensions, they might have types subclassed from the base diff --git a/dandi/tests/fixtures.py b/dandi/tests/fixtures.py index 01612b826..2981ddd62 100644 --- a/dandi/tests/fixtures.py +++ b/dandi/tests/fixtures.py @@ -655,6 +655,7 @@ def bids_dandiset_invalid( @pytest.fixture() def video_files(tmp_path: Path) -> list[tuple[Path, Path]]: + # Avoid heavy import by importing within function: import cv2 video_paths = [] diff --git a/dandi/tests/test_download.py b/dandi/tests/test_download.py index c72ef4a6d..d5bc1f9a9 100644 --- a/dandi/tests/test_download.py +++ b/dandi/tests/test_download.py @@ -30,6 +30,7 @@ download, ) from ..exceptions import NotFoundError +from ..support.digests import Digester from ..utils import list_paths @@ -56,8 +57,6 @@ def test_download_000027( dsdir / "sub-RAT123" / "sub-RAT123.nwb", ] # and checksum should be correct as well - from ..support.digests import Digester - assert ( Digester(["md5"])(dsdir / "sub-RAT123" / "sub-RAT123.nwb")["md5"] == "33318fd510094e4304868b4a481d4a5a" @@ -122,8 +121,6 @@ def test_download_000027_assets_only(url: str, tmp_path: Path) -> None: def test_download_000027_resume( tmp_path: Path, resizer: Callable[[int], int], version: str ) -> None: - from ..support.digests import Digester - url = f"https://dandiarchive.org/dandiset/000027/{version}" digester = Digester() download(url, tmp_path, get_metadata=False) diff --git a/dandi/upload.py b/dandi/upload.py index bf9eea14b..9ee543aa4 100644 --- a/dandi/upload.py +++ b/dandi/upload.py @@ -22,7 +22,8 @@ dandiset_identifier_regex, dandiset_metadata_file, ) -from .dandiapi import RemoteAsset +from .dandiapi import DandiAPIClient, RemoteAsset +from .dandiset import Dandiset from .exceptions import NotFoundError, UploadError from .files import ( DandiFile, @@ -32,7 +33,9 @@ ZarrAsset, ) from .misctypes import Digest -from .utils import ensure_datetime, pluralize +from .support import pyout as pyouts +from .support.pyout import naturalsize +from .utils import ensure_datetime, path_is_subpath, pluralize from .validate_types import Severity @@ -73,9 +76,6 @@ def upload( jobs_per_file: int | None = None, sync: bool = False, ) -> None: - from .dandiapi import DandiAPIClient - from .dandiset import Dandiset - if paths: paths = [Path(p).absolute() for p in paths] dandiset = Dandiset.find(os.path.commonpath(paths)) @@ -121,9 +121,8 @@ def new_super_len(o): f"convention {dandiset_identifier_regex!r}." ) + # Avoid heavy import by importing within function: from .pynwb_utils import ignore_benign_pynwb_warnings - from .support.pyout import naturalsize - from .utils import path_is_subpath ignore_benign_pynwb_warnings() # so validate doesn't whine @@ -316,7 +315,6 @@ def process_path(dfile: DandiFile) -> Iterator[dict]: # We will again use pyout to provide a neat table summarizing our progress # with upload etc - from .support import pyout as pyouts # for the upload speeds we need to provide a custom aggregate t0 = time.time() diff --git a/dandi/utils.py b/dandi/utils.py index afd13c4fc..0046b7a5c 100644 --- a/dandi/utils.py +++ b/dandi/utils.py @@ -13,11 +13,13 @@ import os import os.path as op from pathlib import Path, PurePath, PurePosixPath +import pdb import platform import re import shutil import subprocess import sys +import traceback import types from typing import IO, Any, List, Optional, Protocol, TypeVar, Union from urllib.parse import parse_qs, urlparse, urlunparse @@ -87,13 +89,9 @@ def _pdb_excepthook( value: BaseException, tb: types.TracebackType | None, ) -> None: - import traceback - traceback.print_exception(exc_type, value, tb) print() if is_interactive(): - import pdb - pdb.post_mortem(tb) if ipython: