58
58
__credits__ = "GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro"
59
59
60
60
import re
61
- from codecs import BOM_UTF8 , lookup
62
61
import token
62
+ from codecs import BOM_UTF8 , lookup
63
63
64
64
import pytokens
65
65
from pytokens import TokenType
66
66
67
-
68
67
from . import token
69
68
70
69
__all__ = [x for x in dir (token ) if x [0 ] != "_" ] + [
@@ -109,10 +108,10 @@ def token_type(token: pytokens.Token, source: str) -> int:
109
108
if tok_type == NAME :
110
109
if source == "async" :
111
110
return ASYNC
112
-
111
+
113
112
if source == "await" :
114
113
return AWAIT
115
-
114
+
116
115
return tok_type
117
116
118
117
def tokenize (source : str , grammar : Grammar | None = None ) -> Iterator [TokenInfo ]:
@@ -126,7 +125,7 @@ def tokenize(source: str, grammar: Grammar | None = None) -> Iterator[TokenInfo]
126
125
continue
127
126
128
127
token_string = source [token .start_index :token .end_index ]
129
-
128
+
130
129
if token .type == TokenType .newline and token_string == '' :
131
130
# Black doesn't yield empty newline tokens at the end of a file
132
131
# if there's no newline at the end of a file.
@@ -145,7 +144,7 @@ def tokenize(source: str, grammar: Grammar | None = None) -> Iterator[TokenInfo]
145
144
yield (token_type (token , token_string ), token_string , (token .start_line , start_col ), (token .end_line , end_col ), source_line )
146
145
else :
147
146
yield (token_type (token , token_string ), token_string , (token .start_line , token .start_col ), (token .end_line , token .end_col ), source_line )
148
- except Exception as exc : # TODO:
147
+ except Exception as exc : # TODO:
149
148
raise TokenError (repr (exc ), (line , column ))
150
149
151
150
def printtoken (
@@ -162,6 +161,6 @@ def printtoken(
162
161
token_iterator = tokenize (open (sys .argv [1 ]).read ())
163
162
else :
164
163
token_iterator = tokenize (sys .stdin .read ())
165
-
164
+
166
165
for tok in token_iterator :
167
- printtoken (* tok )
166
+ printtoken (* tok )
0 commit comments