44import os .path
55import typing as ta
66
7- import tokenize_rt as trt
8-
97from ..tokens import all as tks
10- from .types import Tokens
118
129
1310##
@@ -19,7 +16,7 @@ class RootLevelResourcesRead(ta.NamedTuple):
1916 resource : str
2017
2118
22- def is_root_level_resources_read (lts : Tokens ) -> RootLevelResourcesRead | None :
19+ def is_root_level_resources_read (lts : tks . Tokens ) -> RootLevelResourcesRead | None :
2320 wts = list (tks .ignore_ws (lts , keep = ['INDENT' ]))
2421
2522 if not tks .match_toks (wts , [
@@ -47,36 +44,36 @@ def is_root_level_resources_read(lts: Tokens) -> RootLevelResourcesRead | None:
4744def build_resource_lines (
4845 rsrc : RootLevelResourcesRead ,
4946 path : str ,
50- ) -> list [Tokens ]:
47+ ) -> list [tks . Tokens ]:
5148 rf = os .path .join (os .path .dirname (path ), rsrc .resource )
5249
5350 if rsrc .kind == 'binary' :
5451 with open (rf , 'rb' ) as bf :
5552 rb = bf .read () # noqa
5653
57- out : list [Tokens ] = [[
58- trt .Token (name = 'NAME' , src = rsrc .variable ),
59- trt .Token (name = 'UNIMPORTANT_WS' , src = ' ' ),
60- trt .Token (name = 'OP' , src = '=' ),
61- trt .Token (name = 'UNIMPORTANT_WS' , src = ' ' ),
62- trt .Token (name = 'NAME' , src = 'base64' ),
63- trt .Token (name = 'OP' , src = '.' ),
64- trt .Token (name = 'NAME' , src = 'b64decode' ),
65- trt .Token (name = 'OP' , src = '(' ),
66- trt .Token (name = 'NL' , src = '\n ' ),
54+ out : list [tks . Tokens ] = [[
55+ tks .Token (name = 'NAME' , src = rsrc .variable ),
56+ tks .Token (name = 'UNIMPORTANT_WS' , src = ' ' ),
57+ tks .Token (name = 'OP' , src = '=' ),
58+ tks .Token (name = 'UNIMPORTANT_WS' , src = ' ' ),
59+ tks .Token (name = 'NAME' , src = 'base64' ),
60+ tks .Token (name = 'OP' , src = '.' ),
61+ tks .Token (name = 'NAME' , src = 'b64decode' ),
62+ tks .Token (name = 'OP' , src = '(' ),
63+ tks .Token (name = 'NL' , src = '\n ' ),
6764 ]]
6865
6966 rb64 = base64 .b64encode (rb ).decode ('ascii' )
7067 for chunk in itertools .batched (rb64 , 96 ):
7168 out .append ([
72- trt .Token (name = 'UNIMPORTANT_WS' , src = ' ' ),
73- trt .Token (name = 'STRING' , src = f"'{ '' .join (chunk )} '" ),
74- trt .Token (name = 'NL' , src = '\n ' ),
69+ tks .Token (name = 'UNIMPORTANT_WS' , src = ' ' ),
70+ tks .Token (name = 'STRING' , src = f"'{ '' .join (chunk )} '" ),
71+ tks .Token (name = 'NL' , src = '\n ' ),
7572 ])
7673
7774 out .append ([
78- trt .Token (name = 'OP' , src = ')' ),
79- trt .Token (name = 'NEWLINE' , src = '\n ' ),
75+ tks .Token (name = 'OP' , src = ')' ),
76+ tks .Token (name = 'NEWLINE' , src = '\n ' ),
8077 ])
8178
8279 return out
@@ -87,12 +84,12 @@ def build_resource_lines(
8784 rt = rt .replace ('\\ ' , '\\ \\ ' ) # Escape backslashes
8885 rt = rt .replace ('"""' , r'\"\"\"' )
8986 return [[
90- trt .Token (name = 'NAME' , src = rsrc .variable ),
91- trt .Token (name = 'UNIMPORTANT_WS' , src = ' ' ),
92- trt .Token (name = 'OP' , src = '=' ),
93- trt .Token (name = 'UNIMPORTANT_WS' , src = ' ' ),
94- trt .Token (name = 'STRING' , src = f'"""\\ \n { rt } """ # noqa\n ' ),
95- trt .Token (name = 'NEWLINE' , src = '' ),
87+ tks .Token (name = 'NAME' , src = rsrc .variable ),
88+ tks .Token (name = 'UNIMPORTANT_WS' , src = ' ' ),
89+ tks .Token (name = 'OP' , src = '=' ),
90+ tks .Token (name = 'UNIMPORTANT_WS' , src = ' ' ),
91+ tks .Token (name = 'STRING' , src = f'"""\\ \n { rt } """ # noqa\n ' ),
92+ tks .Token (name = 'NEWLINE' , src = '' ),
9693 ]]
9794
9895 else :
0 commit comments