Skip to content

Commit 55a2fb1

Browse files
authored
Merge in 2 changes from psf/black (#4)
1 parent 8d9f1b3 commit 55a2fb1

File tree

2 files changed

+26
-16
lines changed

2 files changed

+26
-16
lines changed

src/blib2to3/pgen2/tokenize.py

Lines changed: 22 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,7 @@
3434
Iterator,
3535
List,
3636
Optional,
37+
Set,
3738
Text,
3839
Tuple,
3940
Pattern,
@@ -66,19 +67,19 @@
6667
del token
6768

6869

69-
def group(*choices):
70+
def group(*choices: str) -> str:
7071
return "(" + "|".join(choices) + ")"
7172

7273

73-
def any(*choices):
74+
def any(*choices: str) -> str:
7475
return group(*choices) + "*"
7576

7677

77-
def maybe(*choices):
78+
def maybe(*choices: str) -> str:
7879
return group(*choices) + "?"
7980

8081

81-
def _combinations(*l):
82+
def _combinations(*l: str) -> Set[str]:
8283
return set(x + y for x in l for y in l + ("",) if x.casefold() != y.casefold())
8384

8485

@@ -163,7 +164,6 @@ def _combinations(*l):
163164
'"""': double3prog,
164165
**{f"{prefix}'''": single3prog for prefix in _strprefixes},
165166
**{f'{prefix}"""': double3prog for prefix in _strprefixes},
166-
**{prefix: None for prefix in _strprefixes},
167167
}
168168

169169
triple_quoted: Final = (
@@ -188,15 +188,19 @@ class StopTokenizing(Exception):
188188
pass
189189

190190

191-
def printtoken(type, token, xxx_todo_changeme, xxx_todo_changeme1, line): # for testing
192-
(srow, scol) = xxx_todo_changeme
193-
(erow, ecol) = xxx_todo_changeme1
191+
Coord = Tuple[int, int]
192+
193+
194+
def printtoken(
195+
type: int, token: Text, srow_col: Coord, erow_col: Coord, line: Text
196+
) -> None: # for testing
197+
(srow, scol) = srow_col
198+
(erow, ecol) = erow_col
194199
print(
195200
"%d,%d-%d,%d:\t%s\t%s" % (srow, scol, erow, ecol, tok_name[type], repr(token))
196201
)
197202

198203

199-
Coord = Tuple[int, int]
200204
TokenEater = Callable[[int, Text, Coord, Coord, Text], None]
201205

202206

@@ -220,7 +224,7 @@ def tokenize(readline: Callable[[], Text], tokeneater: TokenEater = printtoken)
220224

221225

222226
# backwards compatible interface
223-
def tokenize_loop(readline, tokeneater):
227+
def tokenize_loop(readline: Callable[[], Text], tokeneater: TokenEater) -> None:
224228
for token_info in generate_tokens(readline):
225229
tokeneater(*token_info)
226230

@@ -230,7 +234,6 @@ def tokenize_loop(readline, tokeneater):
230234

231235

232236
class Untokenizer:
233-
234237
tokens: List[Text]
235238
prev_row: int
236239
prev_col: int
@@ -599,11 +602,15 @@ def generate_tokens(
599602
):
600603
if token[-1] == "\n": # continued string
601604
strstart = (lnum, start)
602-
endprog = (
603-
endprogs[initial]
604-
or endprogs[token[1]]
605-
or endprogs[token[2]]
605+
maybe_endprog = (
606+
endprogs.get(initial)
607+
or endprogs.get(token[1])
608+
or endprogs.get(token[2])
606609
)
610+
assert (
611+
maybe_endprog is not None
612+
), f"endprog not found for {token}"
613+
endprog = maybe_endprog
607614
contstr, needcont = line[start:], 1
608615
contline = line
609616
break
@@ -631,7 +638,6 @@ def generate_tokens(
631638

632639
if token in ("def", "for"):
633640
if stashed and stashed[0] == NAME and stashed[1] == "async":
634-
635641
if token == "def":
636642
async_def = True
637643
async_def_indent = indents[-1]

tests/data/simple_cases/fstring.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,8 @@
77
f"\"{f'{nested} inner'}\" outer"
88
f"space between opening braces: { {a for a in (1, 2, 3)}}"
99
f'Hello \'{tricky + "example"}\''
10+
f"Tried directories {str(rootdirs)} \
11+
but none started with prefix {parentdir_prefix}"
1012

1113
# output
1214

@@ -19,3 +21,5 @@
1921
f"\"{f'{nested} inner'}\" outer"
2022
f"space between opening braces: { {a for a in (1, 2, 3)}}"
2123
f'Hello \'{tricky + "example"}\''
24+
f"Tried directories {str(rootdirs)} \
25+
but none started with prefix {parentdir_prefix}"

0 commit comments

Comments
 (0)