Skip to content

Commit 6b0bacc

Browse files
authored
docs(python): Address ignored Ruff doc rules (pola-rs#9919)
1 parent 1a4eaa5 commit 6b0bacc

File tree

14 files changed

+140
-51
lines changed

14 files changed

+140
-51
lines changed

py-polars/docs/source/conf.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -235,14 +235,14 @@ def _minify_classpaths(s: str) -> str:
235235
)
236236

237237

238-
def process_signature(app, what, name, obj, opts, sig, ret):
238+
def process_signature(app, what, name, obj, opts, sig, ret): # noqa: D103
239239
return (
240240
_minify_classpaths(sig) if sig else sig,
241241
_minify_classpaths(ret) if ret else ret,
242242
)
243243

244244

245-
def setup(app):
245+
def setup(app): # noqa: D103
246246
# TODO: a handful of methods do not seem to trigger the event for
247247
# some reason (possibly @overloads?) - investigate further...
248248
app.connect("autodoc-process-signature", process_signature)

py-polars/polars/dataframe/frame.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3306,9 +3306,9 @@ def write_delta(
33063306
... ) # doctest: +SKIP
33073307
33083308
"""
3309-
from polars.io.delta import check_if_delta_available, resolve_delta_lake_uri
3309+
from polars.io.delta import _check_if_delta_available, _resolve_delta_lake_uri
33103310

3311-
check_if_delta_available()
3311+
_check_if_delta_available()
33123312

33133313
from deltalake.writer import (
33143314
try_get_deltatable,
@@ -3319,7 +3319,7 @@ def write_delta(
33193319
delta_write_options = {}
33203320

33213321
if isinstance(target, (str, Path)):
3322-
target = resolve_delta_lake_uri(str(target), strict=False)
3322+
target = _resolve_delta_lake_uri(str(target), strict=False)
33233323

33243324
unsupported_cols = {}
33253325
unsupported_types = [Time, Categorical, Null]

py-polars/polars/dataframe/groupby.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -845,6 +845,19 @@ def agg(
845845
*aggs: IntoExpr | Iterable[IntoExpr],
846846
**named_aggs: IntoExpr,
847847
) -> DataFrame:
848+
"""
849+
Compute aggregations for each group of a groupby operation.
850+
851+
Parameters
852+
----------
853+
*aggs
854+
Aggregations to compute for each group of the groupby operation,
855+
specified as positional arguments.
856+
Accepts expression input. Strings are parsed as column names.
857+
**named_aggs
858+
Additional aggregations, specified as keyword arguments.
859+
The resulting columns will be renamed to the keyword used.
860+
"""
848861
return (
849862
self.df.lazy()
850863
.groupby_rolling(
@@ -1046,6 +1059,19 @@ def agg(
10461059
*aggs: IntoExpr | Iterable[IntoExpr],
10471060
**named_aggs: IntoExpr,
10481061
) -> DataFrame:
1062+
"""
1063+
Compute aggregations for each group of a groupby operation.
1064+
1065+
Parameters
1066+
----------
1067+
*aggs
1068+
Aggregations to compute for each group of the groupby operation,
1069+
specified as positional arguments.
1070+
Accepts expression input. Strings are parsed as column names.
1071+
**named_aggs
1072+
Additional aggregations, specified as keyword arguments.
1073+
The resulting columns will be renamed to the keyword used.
1074+
"""
10491075
return (
10501076
self.df.lazy()
10511077
.groupby_dynamic(

py-polars/polars/datatypes/classes.py

Lines changed: 34 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import contextlib
44
from datetime import timezone
55
from inspect import isclass
6-
from typing import TYPE_CHECKING, Any, Callable, Iterator, Mapping, Sequence
6+
from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Mapping, Sequence
77

88
import polars.datatypes
99

@@ -31,7 +31,7 @@ def __init__(self, method: Callable[..., Any] | None = None) -> None:
3131
def __get__(self, instance: Any, cls: type | None = None) -> Any:
3232
return self.fget(cls) # type: ignore[misc]
3333

34-
def getter(self, method: Callable[..., Any]) -> Any:
34+
def getter(self, method: Callable[..., Any]) -> Any: # noqa: D102
3535
self.fget = method
3636
return self
3737

@@ -46,25 +46,29 @@ def _string_repr(cls) -> str:
4646
return _dtype_str_repr(cls)
4747

4848
def base_type(cls) -> PolarsDataType:
49+
"""Return the base type."""
4950
return cls
5051

5152
@classproperty
5253
def is_nested(self) -> bool:
54+
"""Check if this data type is nested."""
5355
return False
5456

5557
@classmethod
5658
def is_(cls, other: PolarsDataType) -> bool:
59+
"""Check if this DataType is the same as another DataType."""
5760
return cls == other and hash(cls) == hash(other)
5861

5962
@classmethod
6063
def is_not(cls, other: PolarsDataType) -> bool:
64+
"""Check if this DataType is NOT the same as another DataType."""
6165
return not cls.is_(other)
6266

6367

6468
class DataType(metaclass=DataTypeClass):
6569
"""Base class for all Polars data types."""
6670

67-
def __new__(cls, *args: Any, **kwargs: Any) -> PolarsDataType: # type: ignore[misc]
71+
def __new__(cls, *args: Any, **kwargs: Any) -> PolarsDataType: # type: ignore[misc] # noqa: D102
6872
# this formulation allows for equivalent use of "pl.Type" and "pl.Type()", while
6973
# still respecting types that take initialisation params (eg: Duration/Datetime)
7074
if args or kwargs:
@@ -95,6 +99,7 @@ def base_type(cls) -> DataTypeClass:
9599

96100
@classproperty
97101
def is_nested(self) -> bool:
102+
"""Check if this data type is nested."""
98103
return False
99104

100105
@classinstmethod # type: ignore[arg-type]
@@ -158,15 +163,30 @@ def _custom_reconstruct(
158163

159164

160165
class DataTypeGroup(frozenset): # type: ignore[type-arg]
166+
"""Group of data types."""
167+
161168
_match_base_type: bool
162169

163-
def __new__(cls, items: Any, *, match_base_type: bool = True) -> DataTypeGroup:
170+
def __new__(
171+
cls, items: Iterable[DataType | DataTypeClass], *, match_base_type: bool = True
172+
) -> DataTypeGroup:
173+
"""
174+
Construct a DataTypeGroup.
175+
176+
Parameters
177+
----------
178+
items :
179+
iterable of data types
180+
match_base_type:
181+
match the base type
182+
183+
"""
164184
for it in items:
165185
if not isinstance(it, (DataType, DataTypeClass)):
166186
raise TypeError(
167187
f"DataTypeGroup items must be dtypes; found {type(it).__name__!r}"
168188
)
169-
dtype_group = super().__new__(cls, items)
189+
dtype_group = super().__new__(cls, items) # type: ignore[arg-type]
170190
dtype_group._match_base_type = match_base_type
171191
return dtype_group
172192

@@ -201,6 +221,7 @@ class NestedType(DataType):
201221

202222
@classproperty
203223
def is_nested(self) -> bool:
224+
"""Check if this data type is nested."""
204225
return True
205226

206227

@@ -406,6 +427,8 @@ class Unknown(DataType):
406427

407428

408429
class List(NestedType):
430+
"""Nested list/array type with variable length of inner lists."""
431+
409432
inner: PolarsDataType | None = None
410433

411434
def __init__(self, inner: PolarsDataType | PythonDataType):
@@ -466,6 +489,8 @@ def __repr__(self) -> str:
466489

467490

468491
class Array(NestedType):
492+
"""Nested list/array type with fixed length of inner arrays."""
493+
469494
inner: PolarsDataType | None = None
470495
width: int
471496

@@ -524,6 +549,8 @@ def __repr__(self) -> str:
524549

525550

526551
class Field:
552+
"""Definition of a single field within a `Struct` DataType."""
553+
527554
def __init__(self, name: str, dtype: PolarsDataType):
528555
"""
529556
Definition of a single field within a `Struct` DataType.
@@ -551,6 +578,8 @@ def __repr__(self) -> str:
551578

552579

553580
class Struct(NestedType):
581+
"""Struct composite type."""
582+
554583
def __init__(self, fields: Sequence[Field] | SchemaDict):
555584
"""
556585
Struct composite type.

py-polars/polars/datatypes/convert.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@
7373
T = TypeVar("T")
7474

7575

76-
def cache(function: Callable[..., T]) -> T:
76+
def cache(function: Callable[..., T]) -> T: # noqa: D103
7777
# need this to satisfy mypy issue with "@property/@cache combination"
7878
# See: https://github.com/python/mypy/issues/5858
7979
return functools.lru_cache()(function) # type: ignore[return-value]
@@ -98,7 +98,10 @@ def cache(function: Callable[..., T]) -> T:
9898

9999

100100
@functools.lru_cache(16)
101-
def map_py_type_to_dtype(python_dtype: PythonDataType | type[object]) -> PolarsDataType:
101+
def _map_py_type_to_dtype(
102+
python_dtype: PythonDataType | type[object],
103+
) -> PolarsDataType:
104+
"""Convert Python data type to Polars data type."""
102105
if python_dtype is float:
103106
return Float64
104107
if python_dtype is int:
@@ -134,14 +137,14 @@ def map_py_type_to_dtype(python_dtype: PythonDataType | type[object]) -> PolarsD
134137
if hasattr(python_dtype, "__origin__") and hasattr(python_dtype, "__args__"):
135138
base_type = python_dtype.__origin__
136139
if base_type is not None:
137-
dtype = map_py_type_to_dtype(base_type)
140+
dtype = _map_py_type_to_dtype(base_type)
138141
nested = python_dtype.__args__
139142
if len(nested) == 1:
140143
nested = nested[0]
141144
return (
142145
dtype
143146
if nested is None
144-
else dtype(map_py_type_to_dtype(nested)) # type: ignore[operator]
147+
else dtype(_map_py_type_to_dtype(nested)) # type: ignore[operator]
145148
)
146149

147150
raise TypeError("Invalid type")
@@ -424,7 +427,7 @@ def py_type_to_dtype(
424427
if is_polars_dtype(data_type):
425428
return data_type
426429
try:
427-
return map_py_type_to_dtype(data_type)
430+
return _map_py_type_to_dtype(data_type)
428431
except (KeyError, TypeError): # pragma: no cover
429432
if not raise_unmatched:
430433
return None

0 commit comments

Comments
 (0)