Lines Matching refs:tokenize
2 import tokenize
10 def shorttok(tok: tokenize.TokenInfo) -> str:
15 """Caching wrapper for the tokenize module.
20 _tokens: List[tokenize.TokenInfo]
23 self, tokengen: Iterator[tokenize.TokenInfo], *, path: str = "", verbose: bool = False
34 def getnext(self) -> tokenize.TokenInfo:
43 def peek(self) -> tokenize.TokenInfo:
47 if tok.type in (tokenize.NL, tokenize.COMMENT):
62 def diagnose(self) -> tokenize.TokenInfo:
67 def get_last_non_whitespace_token(self) -> tokenize.TokenInfo:
69 if tok.type != tokenize.ENDMARKER and (
70 tok.type < tokenize.NEWLINE or tok.type > tokenize.DEDENT