Home
last modified time | relevance | path

Searched refs:generate_tokens (Results 1 - 22 of 22) sorted by relevance

/third_party/python/Lib/lib2to3/pgen2/
H A Ddriver.py88 tokens = tokenize.generate_tokens(stream.readline)
102 tokens = tokenize.generate_tokens(io.StringIO(text).readline)
H A Dtokenize.py6 generate_tokens(readline) is a generator that breaks a stream of
38 "generate_tokens", "untokenize"]
159 tuples generated by generate_tokens().
168 for token_info in generate_tokens(readline):
326 t1 = [tok[:2] for tok in generate_tokens(f.readline)]
329 t2 = [tok[:2] for tokin generate_tokens(readline)]
335 def generate_tokens(readline): function
337 The generate_tokens() generator requires one argument, readline, which
H A Dpgen.py19 self.generator = tokenize.generate_tokens(stream.readline)
/third_party/python/Tools/scripts/
H A Dfinddiv.py59 g = tokenize.generate_tokens(fp.readline)
H A Dcleanfuture.py164 get = tokenize.generate_tokens(self.getline).__next__
H A Dhighlight.py37 for tok in tokenize.generate_tokens(readline):
H A Dfixdiv.py218 g = tokenize.generate_tokens(f.readline)
H A Dreindent.py202 tokens = tokenize.generate_tokens(self.getline)
/third_party/python/Lib/test/
H A Dtest_tabnanny.py266 tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
282 tokens = tokenize.generate_tokens(f.readline)
H A Dtest_tokenize.py5 open as tokenize_open, Untokenizer, generate_tokens,
954 result = stringify_tokens_from_source(generate_tokens(f.readline), s)
/third_party/python/Tools/peg_generator/pegen/
H A Dtestutil.py40 tokenizer = Tokenizer(tokenize.generate_tokens(file.readline)) # type: ignore # typeshed issue #3515
H A Dbuild.py172 tokenizer = Tokenizer(tokenize.generate_tokens(file.readline), verbose=verbose_tokenizer)
H A Dparser.py293 tokengen = tokenize.generate_tokens(file.readline)
/third_party/python/Lib/idlelib/
H A Drunscript.py65 tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
H A Deditor.py1647 tokens = tokenize.generate_tokens(self.readline)
/third_party/python/Lib/lib2to3/
H A Dpatcomp.py31 tokens = tokenize.generate_tokens(io.StringIO(input).readline)
H A Drefactor.py113 gen = tokenize.generate_tokens(io.StringIO(source).readline)
644 tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__)
/third_party/python/Lib/
H A Dtabnanny.py105 process_tokens(tokenize.generate_tokens(f.readline))
H A Dcgitb.py88 for ttype, token, start, end, line in tokenize.generate_tokens(reader):
H A Dtokenize.py42 __all__ = token.__all__ + ["tokenize", "generate_tokens", "detect_encoding",
616 def generate_tokens(readline): function
H A Dtrace.py363 tok = tokenize.generate_tokens(f.readline)
H A Dinspect.py1228 tokens = tokenize.generate_tokens(iter(lines).__next__)

Completed in 20 milliseconds