Skip to content

Commit 08e463a

Browse files
committed
2025-01-16T18:54:28Z
1 parent c9da0d4 commit 08e463a

File tree

18 files changed

+162
-158
lines changed

18 files changed

+162
-158
lines changed

omdev/.manifests.json

Lines changed: 15 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -326,11 +326,25 @@
326326
}
327327
}
328328
},
329+
{
330+
"module": ".tools.mkenv",
331+
"attr": "_CLI_MODULE",
332+
"file": "omdev/tools/mkenv.py",
333+
"line": 123,
334+
"value": {
335+
"$.cli.types.CliModule": {
336+
"cmd_name": [
337+
"mkenv"
338+
],
339+
"mod_name": "omdev.tools.mkenv"
340+
}
341+
}
342+
},
329343
{
330344
"module": ".tools.mkrelimp",
331345
"attr": "_CLI_MODULE",
332346
"file": "omdev/tools/mkrelimp.py",
333-
"line": 148,
347+
"line": 146,
334348
"value": {
335349
"$.cli.types.CliModule": {
336350
"cmd_name": "py/mkrelimp",

omdev/amalg/gen.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212
from .srcfiles import SrcFile
1313
from .srcfiles import make_src_file
1414
from .strip import strip_main_lines
15-
from .types import Tokens
1615
from .typing import Typing
1716

1817

@@ -71,7 +70,7 @@ def _main_file(self) -> SrcFile:
7170
return self._src_files()[self._main_path]
7271

7372
@cached.function
74-
def _header_lines(self) -> list[Tokens]:
73+
def _header_lines(self) -> list[str]:
7574
header_lines = []
7675

7776
if self._main_file().header_lines:

omdev/amalg/imports.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
from omlish import check
66

77
from ..tokens import all as tks
8-
from .types import Tokens
98

109

1110
##
@@ -22,11 +21,11 @@ class Import:
2221

2322
mod_path: str | None
2423

25-
toks: Tokens = dc.field(repr=False)
24+
toks: tks.Tokens = dc.field(repr=False)
2625

2726

2827
def make_import(
29-
lts: Tokens,
28+
lts: tks.Tokens,
3029
*,
3130
src_path: str,
3231
mounts: ta.Mapping[str, str],
@@ -90,7 +89,7 @@ def make_import(
9089
as_=as_,
9190

9291
src_path=src_path,
93-
line=ft.line,
92+
line=check.not_none(ft.line),
9493

9594
mod_path=mod_path,
9695

omdev/amalg/manifests.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,14 @@
11
import ast
22

3-
import tokenize_rt as trt
4-
53
from omlish import check
64

75
from ..tokens import all as tks
8-
from .types import Tokens
96

107

118
##
129

1310

14-
def is_manifest_comment(line: Tokens) -> bool:
11+
def is_manifest_comment(line: tks.Tokens) -> bool:
1512
if not line:
1613
return False
1714

@@ -22,10 +19,10 @@ def is_manifest_comment(line: Tokens) -> bool:
2219

2320

2421
def comment_out_manifest_comment(
25-
line: Tokens,
26-
cls: list[Tokens],
22+
line: tks.Tokens,
23+
cls: list[tks.Tokens],
2724
i: int,
28-
) -> tuple[list[Tokens], int]:
25+
) -> tuple[list[tks.Tokens], int]:
2926
mls = [line]
3027
while True:
3128
mls.append(cls[i])
@@ -41,8 +38,8 @@ def comment_out_manifest_comment(
4138
check.isinstance(check.single(mmod.body), ast.Assign)
4239
break
4340

44-
out: list[Tokens] = [
45-
[trt.Token('COMMENT', '# ' + tks.join_toks(ml))]
41+
out: list[tks.Tokens] = [
42+
[tks.Token('COMMENT', '# ' + tks.join_toks(ml))]
4643
for ml in mls
4744
]
4845

omdev/amalg/resources.py

Lines changed: 23 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,7 @@
44
import os.path
55
import typing as ta
66

7-
import tokenize_rt as trt
8-
97
from ..tokens import all as tks
10-
from .types import Tokens
118

129

1310
##
@@ -19,7 +16,7 @@ class RootLevelResourcesRead(ta.NamedTuple):
1916
resource: str
2017

2118

22-
def is_root_level_resources_read(lts: Tokens) -> RootLevelResourcesRead | None:
19+
def is_root_level_resources_read(lts: tks.Tokens) -> RootLevelResourcesRead | None:
2320
wts = list(tks.ignore_ws(lts, keep=['INDENT']))
2421

2522
if not tks.match_toks(wts, [
@@ -47,36 +44,36 @@ def is_root_level_resources_read(lts: Tokens) -> RootLevelResourcesRead | None:
4744
def build_resource_lines(
4845
rsrc: RootLevelResourcesRead,
4946
path: str,
50-
) -> list[Tokens]:
47+
) -> list[tks.Tokens]:
5148
rf = os.path.join(os.path.dirname(path), rsrc.resource)
5249

5350
if rsrc.kind == 'binary':
5451
with open(rf, 'rb') as bf:
5552
rb = bf.read() # noqa
5653

57-
out: list[Tokens] = [[
58-
trt.Token(name='NAME', src=rsrc.variable),
59-
trt.Token(name='UNIMPORTANT_WS', src=' '),
60-
trt.Token(name='OP', src='='),
61-
trt.Token(name='UNIMPORTANT_WS', src=' '),
62-
trt.Token(name='NAME', src='base64'),
63-
trt.Token(name='OP', src='.'),
64-
trt.Token(name='NAME', src='b64decode'),
65-
trt.Token(name='OP', src='('),
66-
trt.Token(name='NL', src='\n'),
54+
out: list[tks.Tokens] = [[
55+
tks.Token(name='NAME', src=rsrc.variable),
56+
tks.Token(name='UNIMPORTANT_WS', src=' '),
57+
tks.Token(name='OP', src='='),
58+
tks.Token(name='UNIMPORTANT_WS', src=' '),
59+
tks.Token(name='NAME', src='base64'),
60+
tks.Token(name='OP', src='.'),
61+
tks.Token(name='NAME', src='b64decode'),
62+
tks.Token(name='OP', src='('),
63+
tks.Token(name='NL', src='\n'),
6764
]]
6865

6966
rb64 = base64.b64encode(rb).decode('ascii')
7067
for chunk in itertools.batched(rb64, 96):
7168
out.append([
72-
trt.Token(name='UNIMPORTANT_WS', src=' '),
73-
trt.Token(name='STRING', src=f"'{''.join(chunk)}'"),
74-
trt.Token(name='NL', src='\n'),
69+
tks.Token(name='UNIMPORTANT_WS', src=' '),
70+
tks.Token(name='STRING', src=f"'{''.join(chunk)}'"),
71+
tks.Token(name='NL', src='\n'),
7572
])
7673

7774
out.append([
78-
trt.Token(name='OP', src=')'),
79-
trt.Token(name='NEWLINE', src='\n'),
75+
tks.Token(name='OP', src=')'),
76+
tks.Token(name='NEWLINE', src='\n'),
8077
])
8178

8279
return out
@@ -87,12 +84,12 @@ def build_resource_lines(
8784
rt = rt.replace('\\', '\\\\') # Escape backslashes
8885
rt = rt.replace('"""', r'\"\"\"')
8986
return [[
90-
trt.Token(name='NAME', src=rsrc.variable),
91-
trt.Token(name='UNIMPORTANT_WS', src=' '),
92-
trt.Token(name='OP', src='='),
93-
trt.Token(name='UNIMPORTANT_WS', src=' '),
94-
trt.Token(name='STRING', src=f'"""\\\n{rt}""" # noqa\n'),
95-
trt.Token(name='NEWLINE', src=''),
87+
tks.Token(name='NAME', src=rsrc.variable),
88+
tks.Token(name='UNIMPORTANT_WS', src=' '),
89+
tks.Token(name='OP', src='='),
90+
tks.Token(name='UNIMPORTANT_WS', src=' '),
91+
tks.Token(name='STRING', src=f'"""\\\n{rt}""" # noqa\n'),
92+
tks.Token(name='NEWLINE', src=''),
9693
]]
9794

9895
else:

omdev/amalg/srcfiles.py

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
import dataclasses as dc
22
import typing as ta
33

4-
import tokenize_rt as trt
5-
64
from omlish import collections as col
75
from omlish import lang
86

@@ -15,7 +13,6 @@
1513
from .resources import is_root_level_resources_read
1614
from .strip import split_header_lines
1715
from .strip import strip_header_lines
18-
from .types import Tokens
1916
from .typing import Typing
2017
from .typing import is_root_level_if_type_checking_block
2118
from .typing import make_typing
@@ -30,13 +27,13 @@ class SrcFile:
3027
path: str
3128

3229
src: str = dc.field(repr=False)
33-
tokens: Tokens = dc.field(repr=False)
34-
lines: ta.Sequence[Tokens] = dc.field(repr=False)
30+
tokens: tks.Tokens = dc.field(repr=False)
31+
lines: ta.Sequence[tks.Tokens] = dc.field(repr=False)
3532

36-
header_lines: ta.Sequence[Tokens] = dc.field(repr=False)
33+
header_lines: ta.Sequence[tks.Tokens] = dc.field(repr=False)
3734
imports: ta.Sequence[Import] = dc.field(repr=False)
3835
typings: ta.Sequence[Typing] = dc.field(repr=False)
39-
content_lines: ta.Sequence[Tokens] = dc.field(repr=False)
36+
content_lines: ta.Sequence[tks.Tokens] = dc.field(repr=False)
4037

4138
ruff_noqa: ta.AbstractSet[str] = dc.field(repr=False)
4239

@@ -51,7 +48,7 @@ def make_src_file(
5148
with open(path) as f:
5249
src = f.read().strip()
5350

54-
tokens = trt.src_to_tokens(src)
51+
tokens = tks.src_to_tokens(src)
5552
lines = tks.split_lines(tokens)
5653

5754
header_lines, cls = split_header_lines(lines)
@@ -61,7 +58,7 @@ def make_src_file(
6158

6259
imps: list[Import] = []
6360
tys: list[Typing] = []
64-
ctls: list[Tokens] = []
61+
ctls: list[tks.Tokens] = []
6562

6663
has_binary_resources = False
6764

omdev/amalg/strip.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33

44
from .. import magic
55
from ..tokens import all as tks
6-
from .types import Tokens
76

87

98
##
@@ -12,7 +11,7 @@
1211
HEADER_NAMES = (*tks.WS_NAMES, 'COMMENT', 'STRING')
1312

1413

15-
def split_header_lines(lines: ta.Iterable[Tokens]) -> tuple[list[Tokens], list[Tokens]]:
14+
def split_header_lines(lines: ta.Iterable[tks.Tokens]) -> tuple[list[tks.Tokens], list[tks.Tokens]]:
1615
ws = []
1716
nws = []
1817
for line in (it := iter(lines)):
@@ -31,7 +30,7 @@ def split_header_lines(lines: ta.Iterable[Tokens]) -> tuple[list[Tokens], list[T
3130
IF_MAIN_PAT = re.compile(r'if\s+__name__\s+==\s+[\'"]__main__[\'"]\s*:')
3231

3332

34-
def strip_main_lines(cls: ta.Sequence[Tokens]) -> list[Tokens]:
33+
def strip_main_lines(cls: ta.Sequence[tks.Tokens]) -> list[tks.Tokens]:
3534
out = []
3635

3736
for l in (it := iter(cls)):
@@ -59,7 +58,7 @@ def strip_main_lines(cls: ta.Sequence[Tokens]) -> list[Tokens]:
5958
)
6059

6160

62-
def strip_header_lines(hls: ta.Sequence[Tokens]) -> list[Tokens]:
61+
def strip_header_lines(hls: ta.Sequence[tks.Tokens]) -> list[tks.Tokens]:
6362
if hls and tks.join_toks(hls[0]).startswith('#!'):
6463
hls = hls[1:]
6564
out = []

omdev/amalg/typing.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
import dataclasses as dc
22

3+
from omlish import check
4+
35
from ..tokens import all as tks
4-
from .types import Tokens
56

67

78
##
@@ -18,11 +19,11 @@ class Typing:
1819
src_path: str
1920
line: int
2021

21-
toks: Tokens = dc.field(repr=False)
22+
toks: tks.Tokens = dc.field(repr=False)
2223

2324

2425
def _is_typing(
25-
lts: Tokens,
26+
lts: tks.Tokens,
2627
*,
2728
exclude_newtypes: bool = False,
2829
) -> bool:
@@ -48,7 +49,7 @@ def _is_typing(
4849

4950

5051
def make_typing(
51-
lts: Tokens,
52+
lts: tks.Tokens,
5253
*,
5354
src_path: str,
5455
) -> Typing | None:
@@ -63,7 +64,7 @@ def make_typing(
6364
src=tks.join_toks(lts),
6465

6566
src_path=src_path,
66-
line=ft.line,
67+
line=check.not_none(ft.line),
6768

6869
toks=lts,
6970
)
@@ -72,7 +73,7 @@ def make_typing(
7273
##
7374

7475

75-
def is_root_level_if_type_checking_block(lts: Tokens) -> bool:
76+
def is_root_level_if_type_checking_block(lts: tks.Tokens) -> bool:
7677
return tks.match_toks(tks.ignore_ws(lts, keep=['INDENT']), [
7778
('NAME', 'if'),
7879
('NAME', 'ta'),
@@ -83,7 +84,7 @@ def is_root_level_if_type_checking_block(lts: Tokens) -> bool:
8384

8485

8586
def skip_root_level_if_type_checking_block(
86-
cls: list[Tokens],
87+
cls: list[tks.Tokens],
8788
i: int,
8889
) -> int:
8990
def skip_block():

omdev/ci/compose.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -200,7 +200,10 @@ def run(self) -> None:
200200
'-f', compose_file,
201201
'run',
202202
'--rm',
203-
*itertools.chain.from_iterable(['-e', k] for k in (self._cfg.cmd.env or [])),
203+
*itertools.chain.from_iterable(
204+
['-e', k]
205+
for k in (self._cfg.cmd.env or [])
206+
),
204207
*(self._cfg.run_options or []),
205208
self._cfg.service,
206209
'sh', '-c', shlex.quote(self._cfg.cmd.s),

omdev/ci/requirements.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,8 @@ def download_requirements(
7373
'pip',
7474
'download',
7575
'-d', '/requirements',
76-
*itertools.chain.from_iterable([
76+
*itertools.chain.from_iterable(
7777
['-r', f'/requirements_txt/{os.path.basename(rt)}']
7878
for rt in requirements_txts
79-
]),
79+
),
8080
)

0 commit comments

Comments
 (0)