Skip to content

Commit 1174fbc

Browse files
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
1 parent b37624f commit 1174fbc

File tree

1 file changed

+7
-8
lines changed

1 file changed

+7
-8
lines changed

src/blib2to3/pgen2/tokenize.py

+7-8
Original file line numberDiff line numberDiff line change
@@ -58,13 +58,12 @@
5858
__credits__ = "GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro"
5959

6060
import re
61-
from codecs import BOM_UTF8, lookup
6261
import token
62+
from codecs import BOM_UTF8, lookup
6363

6464
import pytokens
6565
from pytokens import TokenType
6666

67-
6867
from . import token
6968

7069
__all__ = [x for x in dir(token) if x[0] != "_"] + [
@@ -109,10 +108,10 @@ def token_type(token: pytokens.Token, source: str) -> int:
109108
if tok_type == NAME:
110109
if source == "async":
111110
return ASYNC
112-
111+
113112
if source == "await":
114113
return AWAIT
115-
114+
116115
return tok_type
117116

118117
def tokenize(source: str, grammar: Grammar | None = None) -> Iterator[TokenInfo]:
@@ -126,7 +125,7 @@ def tokenize(source: str, grammar: Grammar | None = None) -> Iterator[TokenInfo]
126125
continue
127126

128127
token_string = source[token.start_index:token.end_index]
129-
128+
130129
if token.type == TokenType.newline and token_string == '':
131130
# Black doesn't yield empty newline tokens at the end of a file
132131
# if there's no newline at the end of a file.
@@ -145,7 +144,7 @@ def tokenize(source: str, grammar: Grammar | None = None) -> Iterator[TokenInfo]
145144
yield (token_type(token, token_string), token_string, (token.start_line, start_col), (token.end_line, end_col), source_line)
146145
else:
147146
yield (token_type(token, token_string), token_string, (token.start_line, token.start_col), (token.end_line, token.end_col), source_line)
148-
except Exception as exc: # TODO:
147+
except Exception as exc: # TODO:
149148
raise TokenError(repr(exc), (line, column))
150149

151150
def printtoken(
@@ -162,6 +161,6 @@ def printtoken(
162161
token_iterator = tokenize(open(sys.argv[1]).read())
163162
else:
164163
token_iterator = tokenize(sys.stdin.read())
165-
164+
166165
for tok in token_iterator:
167-
printtoken(*tok)
166+
printtoken(*tok)

0 commit comments

Comments
 (0)