diff --git a/doc/api/log.rst b/doc/api/log.rst index e51be16..411a84b 100644 --- a/doc/api/log.rst +++ b/doc/api/log.rst @@ -12,8 +12,11 @@ Setting and retrieving the pshell logger .. autodata:: pshell.context_logger :class:`~contextvars.ContextVar`. Context-local logger, for use in multithreaded and -asynchronous code. This is not inherited when creating a new thread. -See :mod:`contextvars` for more information on how context variables propagate. +asynchronous code. On Python 3.14t and later free-threading nterpreters, this is +inherited when creating a new thread; on most other interpreters, it's not; see +:attr:`sys.flags.thread_inherit_context`. See :mod:`contextvars` for more information on +how context variables propagate. + Set to None to use the global logger instead. .. autofunction:: pshell.get_logger @@ -25,3 +28,4 @@ Using the pshell logger .. autofunction:: pshell.log.warning .. autofunction:: pshell.log.error .. autofunction:: pshell.log.critical +.. autofunction:: pshell.log.inc_stacklevel diff --git a/doc/index.rst b/doc/index.rst index f076b8b..b312f53 100644 --- a/doc/index.rst +++ b/doc/index.rst @@ -38,15 +38,16 @@ Quick start >>> import pshell as sh >>> logging.basicConfig( ... level=logging.INFO, -... format='%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] %(message)s') ->>> +... format='%(asctime)s %(levelname)s [%(filename)s:%(lineno)d] %(message)s' +>>> ) >>> with sh.open("hello.txt", "w") as fh: ... fh.write("Hello world!") -2018-10-06 22:09:06,161 INFO [open.py:70] Opening 'hello.txt' for write +2026-01-14 13:15:43,045 INFO [myscript.py:7] Opening 'hello.txt' for write >>> sh.mkdir("somedir") -2018-10-06 22:10:28,969 INFO [file.py:298] Creating directory somedir +2026-01-14 13:16:08,503 INFO [myscript.py:9] Creating directory somedir >>> sh.copy("hello.txt", "somedir/") -2018-10-06 22:10:37,354 INFO [file.py:152] Copying hello.txt to somedir/ +2026-01-14 13:16:30,468 INFO [myscript.py:10] Copying hello.txt to somedir/ + Index ----- diff --git a/doc/whats-new.rst b/doc/whats-new.rst index f2059a7..9178ca7 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -5,6 +5,8 @@ What's New v1.6.0 (unreleased) ------------------- +- All log messages are reported as coming from the user's frames instead of from pshell +- New function :func:`log.inc_stacklevel` v1.5.0 (2025-12-28) diff --git a/pshell/call.py b/pshell/call.py index 634cb1d..c28cd5d 100644 --- a/pshell/call.py +++ b/pshell/call.py @@ -137,7 +137,7 @@ def _call_cmd( ] shell = False - log.info("Executing: %s", log_cmd) + log.info("Executing: %s", log_cmd, stacklevel=3) return cmd, shell diff --git a/pshell/env.py b/pshell/env.py index 7aa0d70..a79dfe4 100644 --- a/pshell/env.py +++ b/pshell/env.py @@ -15,6 +15,7 @@ __all__ = ("override_env", "putenv", "resolve_env", "source") +@log.inc_stacklevel() def source(bash_file: str | Path, *, stderr: IO | None = None) -> None: """Emulate the bash command ``source ``. The stdout of the command, if any, will be redirected to stderr. @@ -78,11 +79,20 @@ def putenv(key: str, value: str | Path | None) -> None: It can be a reference other variables, e.g. ``${FOO}.${BAR}``. :class:`~pathlib.Path` objects are transparently converted to strings. """ + _putenv(key, value, set_msg="Setting", stacklevel=3) + + +def _putenv( + key: str, value: str | Path | None, *, set_msg: str, stacklevel: int +) -> None: + """Helper of putenv() and override_env() to have the correct stacklevel.""" if value is None: - log.info("Deleting environment variable %s", key) + log.info("Deleting environment variable %s", key, stacklevel=stacklevel) os.environ.pop(key, None) else: - log.info("Setting environment variable %s=%s", key, value) + log.info( + "%s environment variable %s=%s", set_msg, key, value, stacklevel=stacklevel + ) # Do NOT use os.putenv() - see python documentation os.environ[key] = resolve_env(str(value)) @@ -111,12 +121,12 @@ def override_env(key: str, value: str | Path | None) -> Iterator[None]: foo """ orig = os.getenv(key) - putenv(key, value) + _putenv(key, value, set_msg="Setting", stacklevel=4) try: yield finally: - putenv(key, orig) + _putenv(key, orig, set_msg="Restoring", stacklevel=4) @overload diff --git a/pshell/file.py b/pshell/file.py index 58800f1..5a46c44 100644 --- a/pshell/file.py +++ b/pshell/file.py @@ -36,6 +36,7 @@ def _unix_only() -> None: raise OSError("Not supported on Windows") # pragma: nocover +@log.inc_stacklevel() def remove( path: str | Path, *, @@ -138,7 +139,7 @@ def chdir(path: str | Path) -> None: is thread-unsafe and not context-aware. """ path = Path(path) - log.info("chdir %s", path) + log.info("chdir %s", path, stacklevel=2) os.chdir(resolve_env(path)) @@ -168,12 +169,12 @@ def pushd(path: str | Path) -> Iterator[None]: """ path = Path(path) cwd = os.getcwd() - log.info("pushd %s", path) + log.info("pushd %s", path, stacklevel=3) os.chdir(resolve_env(path)) try: yield finally: - log.info("popd") + log.info("popd", stacklevel=3) os.chdir(cwd) @@ -184,7 +185,7 @@ def move(src: str | Path, dst: str | Path) -> None: exist. If the destination already exists but is not a directory, it may be overwritten depending on :func:`os.rename` semantics. """ - log.info("Moving %s to %s", src, dst) + log.info("Moving %s to %s", src, dst, stacklevel=2) shutil.move(resolve_env(str(src)), resolve_env(str(dst))) @@ -220,7 +221,7 @@ def copy(src: str | Path, dst: str | Path, *, ignore: Callable | None = None) -> :param ignore: Only effective when copying a directory. See :func:`shutil.copytree`. """ - log.info("Copying %s to %s", src, dst) + log.info("Copying %s to %s", src, dst, stacklevel=2) src = resolve_env(src) dst = resolve_env(dst) if os.path.isdir(src): @@ -251,6 +252,7 @@ def backup( ) -> Path | None: ... +@log.inc_stacklevel() def backup( path: str | Path, *, @@ -291,7 +293,7 @@ def backup( # In case of collision, call the subsequent backups as .2, .3, etc. i = 2 while os.path.lexists(resolve_env(path_bak)): - log.info("%s already exists, generating a unique name") + log.info("%s already exists, generating a unique name", path_bak) path_bak = f"{path}.{suffix}.{i}" i += 1 @@ -303,6 +305,7 @@ def backup( return type(path)(path_bak) +@log.inc_stacklevel() def symlink( src: str | Path, dst: str | Path, *, force: bool = False, abspath: bool = False ) -> None: @@ -363,9 +366,9 @@ def exists(path: str | Path) -> bool: """ respath = resolve_env(path) if os.path.exists(respath): - log.debug("File exists: %s", path) + log.debug("File exists: %s", path, stacklevel=2) return True - log.debug("File does not exist or is a broken symlink: %s", path) + log.debug("File does not exist or is a broken symlink: %s", path, stacklevel=2) return False @@ -375,9 +378,9 @@ def lexists(path: str | Path) -> bool: """ respath = resolve_env(path) if os.path.lexists(respath): - log.debug("File exists: %s", path) + log.debug("File exists: %s", path, stacklevel=2) return True - log.debug("File does not exist: %s", path) + log.debug("File does not exist: %s", path, stacklevel=2) return False @@ -396,7 +399,7 @@ def mkdir(path: str | Path, *, parents: bool = True, force: bool = True) -> None """ respath = resolve_env(path) - log.info("Creating directory %s", path) + log.info("Creating directory %s", path, stacklevel=2) try: if parents: os.makedirs(respath) @@ -406,7 +409,7 @@ def mkdir(path: str | Path, *, parents: bool = True, force: bool = True) -> None # Cannot rely on checking for EEXIST, since the operating system # could give priority to other errors like EACCES or EROFS if force and os.path.isdir(respath): - log.info("Directory %s already exists", path) + log.info("Directory %s already exists", path, stacklevel=2) else: raise diff --git a/pshell/log.py b/pshell/log.py index 44ca7f0..c37a1f7 100644 --- a/pshell/log.py +++ b/pshell/log.py @@ -2,13 +2,28 @@ from __future__ import annotations +import contextlib +from collections.abc import Generator from contextvars import ContextVar from logging import Logger, getLogger from typing import Any +__all__ = ( + "context_logger", + "critical", + "debug", + "error", + "get_logger", + "inc_stacklevel", + "info", + "set_global_logger", + "warning", +) + _global_logger: Logger | None = None context_logger: ContextVar[Logger | None] = ContextVar("context_logger", default=None) +_inc_stacklevel: ContextVar[int] = ContextVar("_inc_stacklevel", default=1) def set_global_logger(logger: Logger | str | None) -> Logger | None: @@ -41,36 +56,67 @@ def get_logger() -> Logger: return getLogger("pshell") -def debug(msg: str, *args: Any, **kwargs: Any) -> None: - """Wrapper around :meth:`logging.Logger.debug` which uses the logger returned by - :func:`~pshell.get_logger`. +@contextlib.contextmanager +def inc_stacklevel(levels: int = 2) -> Generator[None]: + """Function decorator to be added to helper functions. + Will cause all log messages to be logged as if they were emitted + by the function's caller instead of the function itself. + + :param levels: + Number of stack levels to increase. Default: 2 (1 for contextlib, 1 for the + decorated function) + + Example: + + >>> @sh.log.inc_stacklevel() + >>> def f(): + ... sh.log.info("This is logged as coming from g()") + >>> def g(): + ... f() + """ + prev = _inc_stacklevel.get() + tok = _inc_stacklevel.set(prev + levels) + try: + yield + finally: + _inc_stacklevel.reset(tok) + + +def debug(msg: str, *args: Any, stacklevel: int = 1, **kwargs: Any) -> None: + """Wrapper around :meth:`logging.Logger.debug` which uses the logger set by + :func:`~pshell.set_global_logger` or by :attr:`~pshell.context_logger`. """ - get_logger().debug(msg, *args, **kwargs) + stacklevel += _inc_stacklevel.get() + get_logger().debug(msg, *args, stacklevel=stacklevel, **kwargs) -def info(msg: str, *args: Any, **kwargs: Any) -> None: - """Wrapper around :meth:`logging.Logger.info` which uses the logger returned by - :func:`~pshell.get_logger`. +def info(msg: str, *args: Any, stacklevel: int = 1, **kwargs: Any) -> None: + """Wrapper around :meth:`logging.Logger.info` which uses the logger set by + :func:`~pshell.set_global_logger` or by :attr:`~pshell.context_logger`. """ - get_logger().info(msg, *args, **kwargs) + stacklevel += _inc_stacklevel.get() + get_logger().info(msg, *args, stacklevel=stacklevel, **kwargs) -def warning(msg: str, *args: Any, **kwargs: Any) -> None: - """Wrapper around :meth:`logging.Logger.warning` which uses the logger returned by - :func:`~pshell.get_logger`. +def warning(msg: str, *args: Any, stacklevel: int = 1, **kwargs: Any) -> None: + """Wrapper around :meth:`logging.Logger.warning` which uses the logger set by + :func:`~pshell.set_global_logger` or by :attr:`~pshell.context_logger`. """ - get_logger().warning(msg, *args, **kwargs) + stacklevel += _inc_stacklevel.get() + get_logger().warning(msg, *args, stacklevel=stacklevel, **kwargs) -def error(msg: str, *args: Any, **kwargs: Any) -> None: - """Wrapper around :meth:`logging.Logger.error` which uses the logger returned by - :func:`~pshell.get_logger`. +def error(msg: str, *args: Any, stacklevel: int = 1, **kwargs: Any) -> None: + """Wrapper around :meth:`logging.Logger.error` which uses the logger set by + :func:`~pshell.set_global_logger` or by :attr:`~pshell.context_logger`. """ - get_logger().error(msg, *args, **kwargs) + stacklevel += _inc_stacklevel.get() + get_logger().error(msg, *args, stacklevel=stacklevel, **kwargs) -def critical(msg: str, *args: Any, **kwargs: Any) -> None: - """Wrapper around :meth:`logging.Logger.critical` which uses the logger returned by - :func:`~pshell.get_logger`. +def critical(msg: str, *args: Any, stacklevel: int = 1, **kwargs: Any) -> None: + """Wrapper around :meth:`logging.Logger.critical` which uses the logger set by + :func:`~pshell.set_global_logger` or by :attr:`~pshell.context_logger`. """ - get_logger().critical(msg, *args, **kwargs) + stacklevel += _inc_stacklevel.get() + get_logger().critical(msg, *args, stacklevel=stacklevel, **kwargs) diff --git a/pshell/manipulate.py b/pshell/manipulate.py index 8da6116..6c9d017 100644 --- a/pshell/manipulate.py +++ b/pshell/manipulate.py @@ -12,6 +12,7 @@ __all__ = ("concatenate",) +@log.inc_stacklevel(3) def concatenate( input_fnames: Sequence[str | Path], output_fname: str | Path, @@ -44,7 +45,7 @@ def concatenate( in binary mode, the inputs will too; no extra bytes will be added between files. """ - log.info("Appending files: %s to: %s", input_fnames, output_fname) + log.info("Appending files: %s to: %s", input_fnames, output_fname, stacklevel=0) if "b" in mode: _concatenate_binary(input_fnames, output_fname, mode, **kwargs) diff --git a/pshell/open.py b/pshell/open.py index 19f12d6..bb13927 100644 --- a/pshell/open.py +++ b/pshell/open.py @@ -126,14 +126,22 @@ def pshell_open( # resolve env variables and write log message. if isinstance(file, (str, Path)): - log.info("Opening '%s' for %s%s", file, mode_label, compress_label) + log.info( + "Opening '%s' for %s%s", file, mode_label, compress_label, stacklevel=2 + ) file = resolve_env(file) elif isinstance(file, int): if compression: raise TypeError("compression not supported when opening a file descriptor") - log.info("Opening file descriptor %d for %s", file, mode_label) + log.info("Opening file descriptor %d for %s", file, mode_label, stacklevel=2) else: - log.info("Opening file handle for %s%s%s", file, mode_label, compress_label) + log.info( + "Opening file handle for %s%s%s", + file, + mode_label, + compress_label, + stacklevel=2, + ) open_func: Callable[..., IO] if compression is False: diff --git a/pshell/procs.py b/pshell/procs.py index 11cc568..00a3382 100644 --- a/pshell/procs.py +++ b/pshell/procs.py @@ -56,7 +56,9 @@ def find_procs_by_cmdline(*cmdlines: str | Path) -> list[psutil.Process]: matches = [resolve_env(str(x)) for x in cmdlines] log.debug( - "Finding processes that match command lines:\n - %s", "\n - ".join(matches) + "Finding processes that match command lines:\n - %s", + "\n - ".join(matches), + stacklevel=2, ) procs = [] @@ -69,7 +71,7 @@ def find_procs_by_cmdline(*cmdlines: str | Path) -> list[psutil.Process]: for match in matches: if cmdline.find(match) != -1: - log.debug("Process %d matches: %s", proc.pid, cmdline) + log.debug("Process %d matches: %s", proc.pid, cmdline, stacklevel=2) procs.append(proc) break except (psutil.NoSuchProcess, psutil.ZombieProcess): @@ -83,6 +85,7 @@ def find_procs_by_cmdline(*cmdlines: str | Path) -> list[psutil.Process]: return procs +@log.inc_stacklevel() def kill(*procs: int | psutil.Process | None, term_timeout: float = 10) -> None: """Send SIGTERM to one or more processes. After ``term_timeout`` seconds, send SIGKILL to the surviving processes. @@ -178,6 +181,7 @@ def kill(*procs: int | psutil.Process | None, term_timeout: float = 10) -> None: log.info("All processes terminated") +@log.inc_stacklevel() def killall(*cmdlines: str | Path, term_timeout: float = 10) -> None: """Find all processes with the target command line(s), send SIGTERM, and then send SIGKILL to the survivors. diff --git a/pshell/search.py b/pshell/search.py index 00b1963..1033a98 100644 --- a/pshell/search.py +++ b/pshell/search.py @@ -96,7 +96,7 @@ def glob( ): raise FileMatchError(pathname, min_results, max_results, len(results)) - log.info("File match %s produced %d results", pathname, len(results)) + log.info("File match %s produced %d results", pathname, len(results), stacklevel=2) return [Path(r) for r in results] if isinstance(pathname, Path) else results @@ -151,4 +151,4 @@ def iglob( if count < min_results: raise FileMatchError(pathname, min_results, max_results, count) - log.info("File match %s produced %d results", pathname, count) + log.info("File match %s produced %d results", pathname, count, stacklevel=2) diff --git a/pshell/tests/conftest.py b/pshell/tests/conftest.py index 93e85a3..ea1c2ec 100644 --- a/pshell/tests/conftest.py +++ b/pshell/tests/conftest.py @@ -1,3 +1,4 @@ +import logging import os import sys from pathlib import Path @@ -31,3 +32,15 @@ def clear_names(): @pytest.fixture def thread_index(): return 0 + + +@pytest.fixture(autouse=True) +def assert_log_stacklevel(request, caplog): + """Test that all log messages emitted by the test that uses this fixture are logged + as emitted by the test function itself, and not by pshell. + """ + with caplog.at_level(logging.DEBUG): + yield + records = caplog.get_records("call") + for record in records: + assert record.filename == os.path.basename(request.node.fspath) diff --git a/pshell/tests/test_call.py b/pshell/tests/test_call.py index 9cc9d36..7032c84 100644 --- a/pshell/tests/test_call.py +++ b/pshell/tests/test_call.py @@ -1,4 +1,5 @@ import io +import logging import os import sys import tempfile @@ -96,11 +97,10 @@ def test_call_pipefail(): @unix_only def test_call_obfuscate_pwd(caplog): - caplog.set_level(20) n = get_name() assert sh.call(f"echo -P mypass {n}", obfuscate_pwd="mypass") == 0 tups = [row for row in caplog.record_tuples if n in row[2]] - assert tups == [("pshell", 20, f"Executing: echo -P XXXX {n}")] + assert tups == [("pshell", logging.INFO, f"Executing: echo -P XXXX {n}")] def test_call_noshell1(): @@ -167,11 +167,10 @@ def test_check_call_pipefail(): @unix_only def test_check_call_obfuscate_pwd(caplog): - caplog.set_level(20) n = get_name() sh.check_call(f"echo -P mypass {n}", obfuscate_pwd="mypass") tups = [row for row in caplog.record_tuples if n in row[2]] - assert tups == [("pshell", 20, f"Executing: echo -P XXXX {n}")] + assert tups == [("pshell", logging.INFO, f"Executing: echo -P XXXX {n}")] def test_check_call_noshell1(): @@ -260,12 +259,11 @@ def test_check_output_pipefail(): @unix_only def test_check_output_obfuscate_pwd(caplog): - caplog.set_level(20) n = get_name() out = sh.check_output(f"echo Hello world {n}", obfuscate_pwd="world") assert out.strip() == f"Hello world {n}" tups = [row for row in caplog.record_tuples if n in row[2]] - assert tups == [("pshell", 20, f"Executing: echo Hello XXXX {n}")] + assert tups == [("pshell", logging.INFO, f"Executing: echo Hello XXXX {n}")] @unix_only diff --git a/pshell/tests/test_log.py b/pshell/tests/test_log.py index d61f62c..b528006 100644 --- a/pshell/tests/test_log.py +++ b/pshell/tests/test_log.py @@ -9,7 +9,6 @@ @pytest.mark.thread_unsafe(reason="Modifies global logger") def test_global_log(caplog): - caplog.set_level(10) log.debug("%d", 1) assert sh.get_logger().name == "pshell" sh.set_global_logger(logging.getLogger("g1")) @@ -23,10 +22,10 @@ def test_global_log(caplog): assert sh.get_logger().name == "pshell" assert caplog.record_tuples == [ - ("pshell", 10, "1"), - ("g1", 20, "2"), - ("g2", 30, "3"), - ("pshell", 20, "4"), + ("pshell", logging.DEBUG, "1"), + ("g1", logging.INFO, "2"), + ("g2", logging.WARNING, "3"), + ("pshell", logging.INFO, "4"), ] @@ -40,7 +39,7 @@ def test_context_log(caplog): log.error("%s %d", foo, 2) assert sh.get_logger().name == bar sh.context_logger.reset(tok) - log.error("%s %d", foo, 3) + log.critical("%s %d", foo, 3) assert sh.get_logger().name == "pshell" # caplog records all log calls from all threads; @@ -48,7 +47,67 @@ def test_context_log(caplog): tups = [t for t in caplog.record_tuples if foo in t[2]] assert tups == [ - ("pshell", 40, f"{foo} 1"), - (bar, 40, f"{foo} 2"), - ("pshell", 40, f"{foo} 3"), + ("pshell", logging.ERROR, f"{foo} 1"), + (bar, logging.ERROR, f"{foo} 2"), + ("pshell", logging.CRITICAL, f"{foo} 3"), ] + + +def test_default_stacklevel(): + """Test that the default stacklevel captures the function calling pshell""" + log.debug("debug") + log.info("info") + log.warning("warning") + log.error("error") + log.critical("critical") + + +def test_custom_stacklevel(): + """Teest that if the user passes the stacklevel parameter, it is respected + and matches the behaviour of the bare logging module. + """ + + def f(): + log.info("from pshell", stacklevel=2) + logging.info("from logging", stacklevel=2) + + f() + + +def test_inc_stacklevel_simple(): + @log.inc_stacklevel() + def f(): + log.info("inside f()") + + f() + + +def test_inc_stacklevel_nested(): + @log.inc_stacklevel(3) + def f(): + g() + + def g(): + log.info("inside g()") + + f() + + +def test_inc_stacklevel_context_manager(): + def f(): + with log.inc_stacklevel(1): + log.info("inside f()") + + f() + + +def test_inc_stacklevel_nested_decorators(): + @log.inc_stacklevel() + def f(): + g() + + @log.inc_stacklevel() + def g(): + log.info("inside g()") + + f()