Searched refs:generate_tokens (Results 1 - 22 of 22) sorted by relevance
/third_party/python/Lib/lib2to3/pgen2/ |
H A D | driver.py | 88 tokens = tokenize.generate_tokens(stream.readline) 102 tokens = tokenize.generate_tokens(io.StringIO(text).readline)
|
H A D | tokenize.py | 6 generate_tokens(readline) is a generator that breaks a stream of 38 "generate_tokens", "untokenize"] 159 tuples generated by generate_tokens(). 168 for token_info in generate_tokens(readline): 326 t1 = [tok[:2] for tok in generate_tokens(f.readline)] 329 t2 = [tok[:2] for tokin generate_tokens(readline)] 335 def generate_tokens(readline): function 337 The generate_tokens() generator requires one argument, readline, which
|
H A D | pgen.py | 19 self.generator = tokenize.generate_tokens(stream.readline)
|
/third_party/python/Tools/scripts/ |
H A D | finddiv.py | 59 g = tokenize.generate_tokens(fp.readline)
|
H A D | cleanfuture.py | 164 get = tokenize.generate_tokens(self.getline).__next__
|
H A D | highlight.py | 37 for tok in tokenize.generate_tokens(readline):
|
H A D | fixdiv.py | 218 g = tokenize.generate_tokens(f.readline)
|
H A D | reindent.py | 202 tokens = tokenize.generate_tokens(self.getline)
|
/third_party/python/Lib/test/ |
H A D | test_tabnanny.py | 266 tabnanny.process_tokens(tokenize.generate_tokens(f.readline)) 282 tokens = tokenize.generate_tokens(f.readline)
|
H A D | test_tokenize.py | 5 open as tokenize_open, Untokenizer, generate_tokens, 954 result = stringify_tokens_from_source(generate_tokens(f.readline), s)
|
/third_party/python/Tools/peg_generator/pegen/ |
H A D | testutil.py | 40 tokenizer = Tokenizer(tokenize.generate_tokens(file.readline)) # type: ignore # typeshed issue #3515
|
H A D | build.py | 172 tokenizer = Tokenizer(tokenize.generate_tokens(file.readline), verbose=verbose_tokenizer)
|
H A D | parser.py | 293 tokengen = tokenize.generate_tokens(file.readline)
|
/third_party/python/Lib/idlelib/ |
H A D | runscript.py | 65 tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
|
H A D | editor.py | 1647 tokens = tokenize.generate_tokens(self.readline)
|
/third_party/python/Lib/lib2to3/ |
H A D | patcomp.py | 31 tokens = tokenize.generate_tokens(io.StringIO(input).readline)
|
H A D | refactor.py | 113 gen = tokenize.generate_tokens(io.StringIO(source).readline) 644 tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__)
|
/third_party/python/Lib/ |
H A D | tabnanny.py | 105 process_tokens(tokenize.generate_tokens(f.readline))
|
H A D | cgitb.py | 88 for ttype, token, start, end, line in tokenize.generate_tokens(reader):
|
H A D | tokenize.py | 42 __all__ = token.__all__ + ["tokenize", "generate_tokens", "detect_encoding", 616 def generate_tokens(readline): function
|
H A D | trace.py | 363 tok = tokenize.generate_tokens(f.readline)
|
H A D | inspect.py | 1228 tokens = tokenize.generate_tokens(iter(lines).__next__)
|
Completed in 20 milliseconds