Home
last modified time | relevance | path

Searched refs:tokenize (Results 26 - 50 of 104) sorted by relevance

12345

/third_party/python/Tools/peg_generator/scripts/
H A Dtest_parse_directory.py9 import tokenize namespace
117 with tokenize.open(file) as f:
/third_party/python/Lib/distutils/command/
H A Dbuild_scripts.py12 import tokenize namespace
82 encoding, lines = tokenize.detect_encoding(f.readline)
/third_party/python/Lib/
H A Dlinecache.py11 import tokenize namespace
136 with tokenize.open(fullname) as fp:
H A Dcgitb.py32 import tokenize namespace
88 for ttype, token, start, end, line in tokenize.generate_tokens(reader):
89 if ttype == tokenize.NEWLINE: break
90 if ttype == tokenize.NAME and token not in keyword.kwlist:
H A Dtrace.py58 import tokenize namespace
274 encoding, _ = tokenize.detect_encoding(fp.readline)
363 tok = tokenize.generate_tokens(f.readline)
377 with tokenize.open(filename) as f:
/third_party/vk-gl-cts/framework/randomshaders/
H A DrsgShader.hpp54 void tokenize (GeneratorState& state, TokenStream& stream) const;
106 void tokenize (GeneratorState& state, TokenStream& str) const;
H A DrsgExpression.cpp423 void FloatLiteral::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::FloatLiteral
477 void IntLiteral::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::IntLiteral
527 void BoolLiteral::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::BoolLiteral
800 void ConstructorOp::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ConstructorOp
812 (*i)->tokenize(state, str); in tokenize()
954 void AssignOp::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::AssignOp
956 m_lvalueExpr->tokenize(state, str); in tokenize()
958 m_rvalueExpr->tokenize(state, str); in tokenize()
1271 void ParenOp::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::ParenOp
1274 m_child->tokenize(stat in tokenize()
1365 void SwizzleOp::tokenize (GeneratorState& state, TokenStream& str) const tokenize() function in rsg::SwizzleOp
1575 void TexLookup::tokenize (GeneratorState& state, TokenStream& str) const tokenize() function in rsg::TexLookup
[all...]
/third_party/python/Lib/test/
H A Dtest_tokenize.py3 from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP, namespace
39 # Tests for the tokenize module.
49 result = stringify_tokens_from_source(tokenize(f.readline), s)
58 tokens = list(tokenize(f.readline))
96 for tok in tokenize(readline):
223 for toktype, token, start, end, line in tokenize(f.readline):
960 g = tokenize(BytesIO(s.encode('utf-8')).readline) # tokenize the string
1320 with mock.patch('tokenize
[all...]
H A Dtest_tabnanny.py10 import tokenize namespace
204 ' (<tokenize>, line 3)\n')
209 """A python source code file eligible for raising 'tokenize.TokenError'."""
252 ' (<tokenize>, line 3)\n')
266 tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
272 # "tab_space_errored_1": executes block under type == tokenize.INDENT
282 tokens = tokenize.generate_tokens(f.readline)
312 ' (<tokenize>, line 3)')
/third_party/python/Tools/peg_generator/pegen/
H A Dtestutil.py8 import tokenize namespace
40 tokenizer = Tokenizer(tokenize.generate_tokens(file.readline)) # type: ignore # typeshed issue #3515
/third_party/ltp/lib/
H A Dtst_bool_expr.c64 static unsigned int tokenize(const char *expr, struct tst_expr_tok *last) in tokenize() function
340 unsigned int tok_cnt = tokenize(expr, NULL); in tst_bool_expr_parse()
349 tokenize(expr, ret->buf); in tst_bool_expr_parse()
/third_party/python/Tools/scripts/
H A Dfixdiv.py91 This really shouldn't happen. It means that the tokenize module
136 import tokenize namespace
218 g = tokenize.generate_tokens(f.readline)
367 if type == tokenize.NEWLINE:
/third_party/node/deps/npm/node_modules/diff/lib/diff/
H A Dcss.js34 cssDiff.tokenize = function (value) {
H A Dsentence.js34 sentenceDiff.tokenize = function (value) {
H A Darray.js34 arrayDiff.tokenize = function (value) {
H A Dword.js70 wordDiff.tokenize = function (value) {
H A Dline.js41 lineDiff.tokenize = function (value) {
/third_party/selinux/libsepol/include/sepol/policydb/
H A Dutil.h40 * The tokenize function may be used to
43 extern int tokenize(char *line_buf, char delim, int num_args, ...);
/third_party/python/Lib/lib2to3/pgen2/
H A Dpgen.py5 from . import grammar, token, tokenize namespace
19 self.generator = tokenize.generate_tokens(stream.readline)
323 while tup[0] in (tokenize.COMMENT, tokenize.NL):
/third_party/glslang/glslang/MachineIndependent/
H A DScanContext.h62 int tokenize(TPpContext*, TParserToken&);
/third_party/glslang/glslang/HLSL/
H A DhlslScanContext.h83 void tokenize(HlslToken&);
/third_party/python/Lib/lib2to3/
H A Dpatcomp.py17 from .pgen2 import driver, literals, token, tokenize, parse, grammar namespace
31 tokens = tokenize.generate_tokens(io.StringIO(input).readline)
H A Drefactor.py25 from .pgen2 import driver, tokenize, token namespace
113 gen = tokenize.generate_tokens(io.StringIO(source).readline)
117 ignore = frozenset({token.NEWLINE, tokenize.NL, token.COMMENT})
318 encoding = tokenize.detect_encoding(f.readline)[0]
643 """Wraps a tokenize stream to systematically modify start/end."""
644 tokens = tokenize.generate_tokens(self.gen_lines(block, indent).__next__)
657 """Generates lines as expected by tokenize from a list of lines.
/third_party/skia/third_party/externals/swiftshader/third_party/subzero/src/
H A DIceRangeSpec.h62 // Helper function to tokenize a string into a vector of string tokens, given
64 // vector. Zero-length tokens are allowed, e.g. ",a,,,b," may tokenize to
66 static std::vector<std::string> tokenize(const std::string &Spec,
/third_party/f2fs-tools/fsck/
H A Ddict.c1206 static int tokenize(char *string, ...) in tokenize() function
1301 if (tokenize(in+1, &tok1, &tok2, (char **) 0) != 2) { in construct()
1372 if (tokenize(in+1, &tok1, &tok2, (char **) 0) != 2) { in main()
1393 if (tokenize(in+1, &tok1, (char **) 0) != 1) { in main()
1415 if (tokenize(in+1, &tok1, (char **) 0) != 1) { in main()
1465 if (tokenize(in+1, &tok1, (char **) 0) != 1) { in main()
1478 if (tokenize(in+1, &tok1, &tok2, (char **) 0) != 2) { in main()

Completed in 17 milliseconds

12345