Skip to content

Commit 743e874

Browse files
sobolevnAA-Turner
andcommitted
[3.14] gh-138281: Run ruff on Tools/peg_generator (GH-138282)
(cherry picked from commit 0d1f4e1) Co-authored-by: sobolevn <[email protected]> Co-authored-by: Adam Turner <[email protected]>
1 parent d7c5aa9 commit 743e874

16 files changed

+251
-237
lines changed

.pre-commit-config.yaml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,10 @@ repos:
1818
name: Run Ruff (lint) on Argument Clinic
1919
args: [--exit-non-zero-on-fix, --config=Tools/clinic/.ruff.toml]
2020
files: ^Tools/clinic/|Lib/test/test_clinic.py
21+
- id: ruff
22+
name: Run Ruff (lint) on Tools/peg_generator/
23+
args: [--exit-non-zero-on-fix, --config=Tools/peg_generator/.ruff.toml]
24+
files: ^Tools/peg_generator/
2125
- id: ruff-format
2226
name: Run Ruff (format) on Doc/
2327
args: [--check]

Tools/peg_generator/.ruff.toml

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
extend = "../../.ruff.toml" # Inherit the project-wide settings
2+
3+
extend-exclude = [
4+
# Generated files:
5+
"Tools/peg_generator/pegen/grammar_parser.py",
6+
]
7+
8+
[lint]
9+
select = [
10+
"F", # pyflakes
11+
"I", # isort
12+
"UP", # pyupgrade
13+
"RUF100", # Ban unused `# noqa` comments
14+
"PGH004", # Ban blanket `# noqa` comments (only ignore specific error codes)
15+
]
16+
ignore = [
17+
# Use PEP-604 unions rather than tuples for isinstance() checks.
18+
# Makes code slower and more verbose. https://github.com/astral-sh/ruff/issues/7871.
19+
"UP038",
20+
]
21+
unfixable = [
22+
# The autofixes sometimes do the wrong things for these;
23+
# it's better to have to manually look at the code and see how it needs fixing
24+
"F841", # Detects unused variables
25+
"F601", # Detects dictionaries that have duplicate keys
26+
"F602", # Also detects dictionaries that have duplicate keys
27+
]

Tools/peg_generator/pegen/__main__.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
import time
1111
import token
1212
import traceback
13-
from typing import Tuple
1413

1514
from pegen.grammar import Grammar
1615
from pegen.parser import Parser
@@ -21,7 +20,7 @@
2120

2221
def generate_c_code(
2322
args: argparse.Namespace,
24-
) -> Tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
23+
) -> tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
2524
from pegen.build import build_c_parser_and_generator
2625

2726
verbose = args.verbose
@@ -50,7 +49,7 @@ def generate_c_code(
5049

5150
def generate_python_code(
5251
args: argparse.Namespace,
53-
) -> Tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
52+
) -> tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
5453
from pegen.build import build_python_parser_and_generator
5554

5655
verbose = args.verbose
@@ -188,7 +187,7 @@ def main() -> None:
188187

189188

190189
if __name__ == "__main__":
191-
if sys.version_info < (3, 8):
190+
if sys.version_info < (3, 8): # noqa: UP036
192191
print("ERROR: using pegen requires at least Python 3.8!", file=sys.stderr)
193192
sys.exit(1)
194193
main()

Tools/peg_generator/pegen/ast_dump.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -6,17 +6,17 @@
66
TODO: Remove the above-described hack.
77
"""
88

9-
from typing import Any, Optional, Tuple
9+
from typing import Any
1010

1111

1212
def ast_dump(
1313
node: Any,
1414
annotate_fields: bool = True,
1515
include_attributes: bool = False,
1616
*,
17-
indent: Optional[str] = None,
17+
indent: str | None = None,
1818
) -> str:
19-
def _format(node: Any, level: int = 0) -> Tuple[str, bool]:
19+
def _format(node: Any, level: int = 0) -> tuple[str, bool]:
2020
if indent is not None:
2121
level += 1
2222
prefix = "\n" + indent * level
@@ -41,7 +41,7 @@ def _format(node: Any, level: int = 0) -> Tuple[str, bool]:
4141
value, simple = _format(value, level)
4242
allsimple = allsimple and simple
4343
if keywords:
44-
args.append("%s=%s" % (name, value))
44+
args.append(f"{name}={value}")
4545
else:
4646
args.append(value)
4747
if include_attributes and node._attributes:
@@ -54,16 +54,16 @@ def _format(node: Any, level: int = 0) -> Tuple[str, bool]:
5454
continue
5555
value, simple = _format(value, level)
5656
allsimple = allsimple and simple
57-
args.append("%s=%s" % (name, value))
57+
args.append(f"{name}={value}")
5858
if allsimple and len(args) <= 3:
59-
return "%s(%s)" % (node.__class__.__name__, ", ".join(args)), not args
60-
return "%s(%s%s)" % (node.__class__.__name__, prefix, sep.join(args)), False
59+
return "{}({})".format(node.__class__.__name__, ", ".join(args)), not args
60+
return f"{node.__class__.__name__}({prefix}{sep.join(args)})", False
6161
elif isinstance(node, list):
6262
if not node:
6363
return "[]", True
64-
return "[%s%s]" % (prefix, sep.join(_format(x, level)[0] for x in node)), False
64+
return f"[{prefix}{sep.join(_format(x, level)[0] for x in node)}]", False
6565
return repr(node), True
6666

6767
if all(cls.__name__ != "AST" for cls in node.__class__.__mro__):
68-
raise TypeError("expected AST, got %r" % node.__class__.__name__)
68+
raise TypeError(f"expected AST, got {node.__class__.__name__!r}")
6969
return _format(node)[0]

Tools/peg_generator/pegen/build.py

Lines changed: 11 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import sysconfig
77
import tempfile
88
import tokenize
9-
from typing import IO, Any, Dict, List, Optional, Set, Tuple
9+
from typing import IO, Any
1010

1111
from pegen.c_generator import CParserGenerator
1212
from pegen.grammar import Grammar
@@ -18,11 +18,11 @@
1818

1919
MOD_DIR = pathlib.Path(__file__).resolve().parent
2020

21-
TokenDefinitions = Tuple[Dict[int, str], Dict[str, int], Set[str]]
21+
TokenDefinitions = tuple[dict[int, str], dict[str, int], set[str]]
2222
Incomplete = Any # TODO: install `types-setuptools` and remove this alias
2323

2424

25-
def get_extra_flags(compiler_flags: str, compiler_py_flags_nodist: str) -> List[str]:
25+
def get_extra_flags(compiler_flags: str, compiler_py_flags_nodist: str) -> list[str]:
2626
flags = sysconfig.get_config_var(compiler_flags)
2727
py_flags_nodist = sysconfig.get_config_var(compiler_py_flags_nodist)
2828
if flags is None or py_flags_nodist is None:
@@ -71,11 +71,11 @@ def fixup_build_ext(cmd: Incomplete) -> None:
7171

7272
def compile_c_extension(
7373
generated_source_path: str,
74-
build_dir: Optional[str] = None,
74+
build_dir: str | None = None,
7575
verbose: bool = False,
7676
keep_asserts: bool = True,
7777
disable_optimization: bool = False,
78-
library_dir: Optional[str] = None,
78+
library_dir: str | None = None,
7979
) -> pathlib.Path:
8080
"""Compile the generated source for a parser generator into an extension module.
8181
@@ -93,11 +93,10 @@ def compile_c_extension(
9393
"""
9494
import setuptools.command.build_ext
9595
import setuptools.logging
96-
97-
from setuptools import Extension, Distribution
98-
from setuptools.modified import newer_group
96+
from setuptools import Distribution, Extension
9997
from setuptools._distutils.ccompiler import new_compiler
10098
from setuptools._distutils.sysconfig import customize_compiler
99+
from setuptools.modified import newer_group
101100

102101
if verbose:
103102
setuptools.logging.set_threshold(logging.DEBUG)
@@ -241,7 +240,7 @@ def compile_c_extension(
241240

242241
def build_parser(
243242
grammar_file: str, verbose_tokenizer: bool = False, verbose_parser: bool = False
244-
) -> Tuple[Grammar, Parser, Tokenizer]:
243+
) -> tuple[Grammar, Parser, Tokenizer]:
245244
with open(grammar_file) as file:
246245
tokenizer = Tokenizer(tokenize.generate_tokens(file.readline), verbose=verbose_tokenizer)
247246
parser = GrammarParser(tokenizer, verbose=verbose_parser)
@@ -292,7 +291,7 @@ def build_c_generator(
292291
keep_asserts_in_extension: bool = True,
293292
skip_actions: bool = False,
294293
) -> ParserGenerator:
295-
with open(tokens_file, "r") as tok_file:
294+
with open(tokens_file) as tok_file:
296295
all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file)
297296
with open(output_file, "w") as file:
298297
gen: ParserGenerator = CParserGenerator(
@@ -333,7 +332,7 @@ def build_c_parser_and_generator(
333332
verbose_c_extension: bool = False,
334333
keep_asserts_in_extension: bool = True,
335334
skip_actions: bool = False,
336-
) -> Tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
335+
) -> tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
337336
"""Generate rules, C parser, tokenizer, parser generator for a given grammar
338337
339338
Args:
@@ -373,7 +372,7 @@ def build_python_parser_and_generator(
373372
verbose_tokenizer: bool = False,
374373
verbose_parser: bool = False,
375374
skip_actions: bool = False,
376-
) -> Tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
375+
) -> tuple[Grammar, Parser, Tokenizer, ParserGenerator]:
377376
"""Generate rules, python parser, tokenizer, parser generator for a given grammar
378377
379378
Args:

0 commit comments

Comments
 (0)