/third_party/python/Tools/peg_generator/scripts/ |
H A D | test_parse_directory.py | 9 import tokenize namespace 117 with tokenize.open(file) as f:
|
/third_party/python/Lib/distutils/command/ |
H A D | build_scripts.py | 12 import tokenize namespace 82 encoding, lines = tokenize.detect_encoding(f.readline)
|
/third_party/python/Lib/ |
H A D | linecache.py | 11 import tokenize namespace 136 with tokenize.open(fullname) as fp:
|
H A D | cgitb.py | 32 import tokenize namespace 88 for ttype, token, start, end, line in tokenize.generate_tokens(reader): 89 if ttype == tokenize.NEWLINE: break 90 if ttype == tokenize.NAME and token not in keyword.kwlist:
|
H A D | trace.py | 58 import tokenize namespace 274 encoding, _ = tokenize.detect_encoding(fp.readline) 363 tok = tokenize.generate_tokens(f.readline) 377 with tokenize.open(filename) as f:
|
/third_party/vk-gl-cts/framework/randomshaders/ |
H A D | rsgShader.hpp | 54 void tokenize (GeneratorState& state, TokenStream& stream) const; 106 void tokenize (GeneratorState& state, TokenStream& str) const;
|
H A D | rsgExpression.cpp | 423 void FloatLiteral::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::FloatLiteral 477 void IntLiteral::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::IntLiteral 527 void BoolLiteral::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::BoolLiteral 800 void ConstructorOp::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ConstructorOp 812 (*i)->tokenize(state, str); in tokenize() 954 void AssignOp::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::AssignOp 956 m_lvalueExpr->tokenize(state, str); in tokenize() 958 m_rvalueExpr->tokenize(state, str); in tokenize() 1271 void ParenOp::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ParenOp 1274 m_child->tokenize(stat in tokenize() 1365 void SwizzleOp::tokenize (GeneratorState& state, TokenStream& str) const tokenize() function in rsg::SwizzleOp 1575 void TexLookup::tokenize (GeneratorState& state, TokenStream& str) const tokenize() function in rsg::TexLookup [all...] |
/third_party/python/Lib/test/ |
H A D | test_tokenize.py | 3 from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP, namespace 39 # Tests for the tokenize module. 49 result = stringify_tokens_from_source(tokenize(f.readline), s) 58 tokens = list(tokenize(f.readline)) 96 for tok in tokenize(readline): 223 for toktype, token, start, end, line in tokenize(f.readline): 960 g = tokenize(BytesIO(s.encode('utf-8')).readline) # tokenize the string 1320 with mock.patch('tokenize [all...] |
H A D | test_tabnanny.py | 10 import tokenize namespace 204 ' (<tokenize>, line 3)\n') 209 """A python source code file eligible for raising 'tokenize.TokenError'.""" 252 ' (<tokenize>, line 3)\n') 266 tabnanny.process_tokens(tokenize.generate_tokens(f.readline)) 272 # "tab_space_errored_1": executes block under type == tokenize.INDENT 282 tokens = tokenize.generate_tokens(f.readline) 312 ' (<tokenize>, line 3)')
|
/third_party/python/Tools/peg_generator/pegen/ |
H A D | testutil.py | 8 import tokenize namespace 40 tokenizer = Tokenizer(tokenize.generate_tokens(file.readline)) # type: ignore # typeshed issue #3515
|
/third_party/ltp/lib/ |
H A D | tst_bool_expr.c | 64 static unsigned int tokenize(const char *expr, struct tst_expr_tok *last) in tokenize() function 340 unsigned int tok_cnt = tokenize(expr, NULL); in tst_bool_expr_parse() 349 tokenize(expr, ret->buf); in tst_bool_expr_parse()
|
/third_party/python/Tools/scripts/ |
H A D | fixdiv.py | 91 This really shouldn't happen. It means that the tokenize module 136 import tokenize namespace 218 g = tokenize.generate_tokens(f.readline) 367 if type == tokenize.NEWLINE:
|
/third_party/node/deps/npm/node_modules/diff/lib/diff/ |
H A D | css.js | 34 cssDiff.tokenize = function (value) {
|
H A D | sentence.js | 34 sentenceDiff.tokenize = function (value) {
|
H A D | array.js | 34 arrayDiff.tokenize = function (value) {
|
H A D | word.js | 70 wordDiff.tokenize = function (value) {
|
H A D | line.js | 41 lineDiff.tokenize = function (value) {
|
/third_party/selinux/libsepol/include/sepol/policydb/ |
H A D | util.h | 40 * The tokenize function may be used to 43 extern int tokenize(char *line_buf, char delim, int num_args, ...);
|
/third_party/python/Lib/lib2to3/pgen2/ |
H A D | pgen.py | 5 from . import grammar, token, tokenize namespace 19 self.generator = tokenize.generate_tokens(stream.readline) 323 while tup[0] in (tokenize.COMMENT, tokenize.NL):
|
/third_party/glslang/glslang/MachineIndependent/ |
H A D | ScanContext.h | 62 int tokenize(TPpContext*, TParserToken&);
|
/third_party/glslang/glslang/HLSL/ |
H A D | hlslScanContext.h | 83 void tokenize(HlslToken&);
|
/third_party/python/Lib/lib2to3/ |
H A D | patcomp.py | 17 from .pgen2 import driver, literals, token, tokenize, parse, grammar namespace 31 tokens = tokenize.generate_tokens(io.StringIO(input).readline)
|
H A D | refactor.py | 25 from .pgen2 import driver, tokenize, token namespace 113 gen = tokenize.generate_tokens(io.StringIO(source).readline) 117 ignore = frozenset({token.NEWLINE, tokenize.NL, token.COMMENT}) 318 encoding = tokenize.detect_encoding(f.readline)[0] 643 """Wraps a tokenize stream to systematically modify start/end.""" 644 tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__) 657 """Generates lines as expected by tokenize from a list of lines.
|
/third_party/skia/third_party/externals/swiftshader/third_party/subzero/src/ |
H A D | IceRangeSpec.h | 62 // Helper function to tokenize a string into a vector of string tokens, given 64 // vector. Zero-length tokens are allowed, e.g. ",a,,,b," may tokenize to 66 static std::vector<std::string> tokenize(const std::string &Spec,
|
/third_party/f2fs-tools/fsck/ |
H A D | dict.c | 1206 static int tokenize(char *string, ...) in tokenize() function 1301 if (tokenize(in+1, &tok1, &tok2, (char **) 0) != 2) { in construct() 1372 if (tokenize(in+1, &tok1, &tok2, (char **) 0) != 2) { in main() 1393 if (tokenize(in+1, &tok1, (char **) 0) != 1) { in main() 1415 if (tokenize(in+1, &tok1, (char **) 0) != 1) { in main() 1465 if (tokenize(in+1, &tok1, (char **) 0) != 1) { in main() 1478 if (tokenize(in+1, &tok1, &tok2, (char **) 0) != 2) { in main()
|