3434 Iterator ,
3535 List ,
3636 Optional ,
37+ Set ,
3738 Text ,
3839 Tuple ,
3940 Pattern ,
6667del token
6768
6869
69- def group (* choices ) :
70+ def group (* choices : str ) -> str :
7071 return "(" + "|" .join (choices ) + ")"
7172
7273
73- def any (* choices ) :
74+ def any (* choices : str ) -> str :
7475 return group (* choices ) + "*"
7576
7677
77- def maybe (* choices ) :
78+ def maybe (* choices : str ) -> str :
7879 return group (* choices ) + "?"
7980
8081
81- def _combinations (* l ) :
82+ def _combinations (* l : str ) -> Set [ str ] :
8283 return set (x + y for x in l for y in l + ("" ,) if x .casefold () != y .casefold ())
8384
8485
@@ -163,7 +164,6 @@ def _combinations(*l):
163164 '"""' : double3prog ,
164165 ** {f"{ prefix } '''" : single3prog for prefix in _strprefixes },
165166 ** {f'{ prefix } """' : double3prog for prefix in _strprefixes },
166- ** {prefix : None for prefix in _strprefixes },
167167}
168168
169169triple_quoted : Final = (
@@ -188,15 +188,19 @@ class StopTokenizing(Exception):
188188 pass
189189
190190
191- def printtoken (type , token , xxx_todo_changeme , xxx_todo_changeme1 , line ): # for testing
192- (srow , scol ) = xxx_todo_changeme
193- (erow , ecol ) = xxx_todo_changeme1
191+ Coord = Tuple [int , int ]
192+
193+
194+ def printtoken (
195+ type : int , token : Text , srow_col : Coord , erow_col : Coord , line : Text
196+ ) -> None : # for testing
197+ (srow , scol ) = srow_col
198+ (erow , ecol ) = erow_col
194199 print (
195200 "%d,%d-%d,%d:\t %s\t %s" % (srow , scol , erow , ecol , tok_name [type ], repr (token ))
196201 )
197202
198203
199- Coord = Tuple [int , int ]
200204TokenEater = Callable [[int , Text , Coord , Coord , Text ], None ]
201205
202206
@@ -220,7 +224,7 @@ def tokenize(readline: Callable[[], Text], tokeneater: TokenEater = printtoken)
220224
221225
222226# backwards compatible interface
223- def tokenize_loop (readline , tokeneater ) :
227+ def tokenize_loop (readline : Callable [[], Text ], tokeneater : TokenEater ) -> None :
224228 for token_info in generate_tokens (readline ):
225229 tokeneater (* token_info )
226230
@@ -230,7 +234,6 @@ def tokenize_loop(readline, tokeneater):
230234
231235
232236class Untokenizer :
233-
234237 tokens : List [Text ]
235238 prev_row : int
236239 prev_col : int
@@ -599,11 +602,15 @@ def generate_tokens(
599602 ):
600603 if token [- 1 ] == "\n " : # continued string
601604 strstart = (lnum , start )
602- endprog = (
603- endprogs [ initial ]
604- or endprogs [ token [1 ]]
605- or endprogs [ token [2 ]]
605+ maybe_endprog = (
606+ endprogs . get ( initial )
607+ or endprogs . get ( token [1 ])
608+ or endprogs . get ( token [2 ])
606609 )
610+ assert (
611+ maybe_endprog is not None
612+ ), f"endprog not found for { token } "
613+ endprog = maybe_endprog
607614 contstr , needcont = line [start :], 1
608615 contline = line
609616 break
@@ -631,7 +638,6 @@ def generate_tokens(
631638
632639 if token in ("def" , "for" ):
633640 if stashed and stashed [0 ] == NAME and stashed [1 ] == "async" :
634-
635641 if token == "def" :
636642 async_def = True
637643 async_def_indent = indents [- 1 ]
0 commit comments