/third_party/node/deps/v8/tools/cppgc/ |
H A D | gen_cmake.py | 470 tokens = lexer.tokenize(contents)
|
/third_party/ltp/tools/sparse/sparse-src/ |
H A D | lib.c | 353 token = tokenize(NULL, filename, fd, NULL, includepath); in sparse_file()
|
H A D | tokenize.c | 1018 struct token * tokenize(const struct position *pos, const char *name, int fd, struct token *endtoken, const char **next_path) in tokenize() function
|
/third_party/vk-gl-cts/framework/randomshaders/ |
H A D | rsgBinaryOps.hpp | 48 void tokenize (GeneratorState& state, TokenStream& str) const;
|
H A D | rsgShaderGenerator.cpp | 343 shader.tokenize(m_state, tokenStr); in generate()
|
/third_party/glslang/glslang/HLSL/ |
H A D | hlslScanContext.cpp | 542 void HlslScanContext::tokenize(HlslToken& token) in tokenize() function in glslang::HlslScanContext 567 int token = ppContext.tokenize(ppToken); in tokenizeClass()
|
/third_party/libwebsockets/include/ |
H A D | libwebsockets.h | 637 #include <libwebsockets/lws-tokenize.h>
|
/third_party/python/Tools/peg_generator/pegen/ |
H A D | grammar_parser.py | 6 import tokenize namespace
|
/third_party/python/Lib/importlib/ |
H A D | _bootstrap_external.py | 766 import tokenize # To avoid bootstrap issues. namespace 768 encoding = tokenize.detect_encoding(source_bytes_readline)
|
/third_party/glslang/glslang/MachineIndependent/ |
H A D | Scan.cpp | 296 return parseContext.getScanContext()->tokenize(parseContext.getPpContext(), token); in yylex() 824 int TScanContext::tokenize(TPpContext* pp, TParserToken& token) in tokenize() function in glslang::TScanContext 829 int token = pp->tokenize(ppToken); in tokenize()
|
/third_party/selinux/libsepol/src/ |
H A D | module_to_cil.c | 3023 matched = tokenize(levelstr, ':', 2, &sens, &cats); in level_string_to_cil() 3088 matched = tokenize(contextstr, ':', 4, &user, &role, &type, &level); in context_string_to_cil() 3148 matched = tokenize(tmp, ':', 3, &user, &seuser, &level); in seusers_to_cil() 3239 matched = tokenize(tmp, ' ', 4, &user_str, &user, &prefix_str, &prefix); in user_extra_to_cil() 3309 matched = tokenize(tmp, ' ', 3, ®ex, &mode, &context); in file_contexts_to_cil()
|
/third_party/glslang/glslang/MachineIndependent/preprocessor/ |
H A D | PpContext.h | 203 int tokenize(TPpToken& ppToken);
|
/third_party/jinja2/ |
H A D | lexer.py | 602 def tokenize( member in Lexer 609 """Calls tokeniter + tokenize and wraps it in a token stream.""" 619 """This is called with the stream as returned by `tokenize` and wraps 675 generator. Use this method if you just want to tokenize a template.
|
H A D | environment.py | 667 stream = self.lexer.tokenize(source, name, filename, state)
|
/third_party/node/deps/v8/third_party/jinja2/ |
H A D | lexer.py | 613 def tokenize(self, source, name=None, filename=None, state=None): member in Lexer 614 """Calls tokeniter + tokenize and wraps it in a token stream.""" 619 """This is called with the stream as returned by `tokenize` and wraps 664 generator. Use this method if you just want to tokenize a template.
|
H A D | environment.py | 573 stream = self.lexer.tokenize(source, name, filename, state)
|
/third_party/node/tools/inspector_protocol/jinja2/ |
H A D | lexer.py | 552 def tokenize(self, source, name=None, filename=None, state=None): member in Lexer 553 """Calls tokeniter + tokenize and wraps it in a token stream. 559 """This is called with the stream as returned by `tokenize` and wraps 601 generator. Use this method if you just want to tokenize a template.
|
H A D | environment.py | 529 stream = self.lexer.tokenize(source, name, filename, state)
|
/third_party/skia/third_party/externals/jinja2/ |
H A D | lexer.py | 613 def tokenize(self, source, name=None, filename=None, state=None): member in Lexer 614 """Calls tokeniter + tokenize and wraps it in a token stream.""" 619 """This is called with the stream as returned by `tokenize` and wraps 664 generator. Use this method if you just want to tokenize a template.
|
H A D | environment.py | 573 stream = self.lexer.tokenize(source, name, filename, state)
|
/third_party/python/Lib/test/test_peg_generator/ |
H A D | test_pegen.py | 9 from tokenize import TokenInfo, NAME, NEWLINE, NUMBER, OP
|
/third_party/python/Lib/idlelib/ |
H A D | pyshell.py | 38 import tokenize namespace 669 with tokenize.open(filename) as fp:
|
/third_party/python/Lib/ |
H A D | pdb.py | 82 import tokenize namespace 100 fp = tokenize.open(filename)
|
H A D | pydoc.py | 72 import tokenize namespace 366 file = tokenize.open(filename)
|
/third_party/mesa3d/src/gallium/frontends/clover/spirv/ |
H A D | invocation.cpp | 802 std::vector<std::string> options = tokenize(opts); in link_program()
|