Home
last modified time | relevance | path

Searched refs:tokenize (Results 51 - 75 of 104) sorted by relevance

12345

/third_party/glslang/glslang/HLSL/
H A DhlslTokenStream.cpp108 scanner.tokenize(token); in advanceToken()
/third_party/python/Lib/idlelib/
H A Diomenu.py6 import tokenize namespace
126 with tokenize.open(filename) as f:
286 enc, _ = tokenize.detect_encoding(io.BytesIO(encoded).readline)
H A Deditor.py8 import tokenize namespace
1629 INDENT=tokenize.INDENT,
1630 NAME=tokenize.NAME,
1647 tokens = tokenize.generate_tokens(self.readline)
1650 except (tokenize.TokenError, SyntaxError):
/third_party/python/Lib/lib2to3/pgen2/
H A Dtokenize.py23 tokenize(readline, tokeneater=printtoken)
37 __all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize",
148 def tokenize(readline, tokeneater=printtoken): function
150 The tokenize() function accepts two parameters: one representing the
151 input stream, and one providing an output mechanism for tokenize().
245 in the same way as the tokenize() generator.
325 # Output text will tokenize the back to the input
426 ("<tokenize>", lnum, pos, line))
563 if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
564 else: tokenize(sy
[all...]
/third_party/vk-gl-cts/framework/randomshaders/
H A DrsgBuiltinFunctions.hpp43 void tokenize (GeneratorState& state, TokenStream& str) const;
95 void UnaryBuiltinVecFunc<GetValueRangeWeight, ComputeValueRange, Evaluate>::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::UnaryBuiltinVecFunc
98 m_child->tokenize(state, str); in tokenize()
H A DrsgBinaryOps.cpp46 void tokenize (GeneratorState& state, TokenStream& str) const;
82 void CustomAbsOp::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::CustomAbsOp
85 m_child->tokenize(state, str); in tokenize()
343 void BinaryOp<Precedence, Assoc>::tokenize (GeneratorState& state, TokenStream& str) const in tokenize() function in rsg::BinaryOp
345 m_leftValueExpr->tokenize(state, str); in tokenize()
347 m_rightValueExpr->tokenize(state, str); in tokenize()
/third_party/node/deps/openssl/openssl/
H A DConfigure2028 my @a = tokenize($attr_str, qr|\s*,\s*|);
2060 @indexes = tokenize($index_str);
2145 foreach (tokenize($expand_variables->($+{VALUE}))) {
2153 tokenize($expand_variables->($+{VALUE})))
2158 tokenize($expand_variables->($+{VALUE})))
2163 tokenize($expand_variables->($+{VALUE})))
2168 tokenize($expand_variables->($+{VALUE})))
2173 tokenize($expand_variables->($+{VALUE})))
2178 tokenize($expand_variables->($+{VALUE})))
2183 tokenize(
3566 sub tokenize { global() subroutine
[all...]
/third_party/openssl/
H A DConfigure2023 my @a = tokenize($attr_str, qr|\s*,\s*|);
2055 @indexes = tokenize($index_str);
2140 foreach (tokenize($expand_variables->($+{VALUE}))) {
2148 tokenize($expand_variables->($+{VALUE})))
2153 tokenize($expand_variables->($+{VALUE})))
2158 tokenize($expand_variables->($+{VALUE})))
2163 tokenize($expand_variables->($+{VALUE})))
2168 tokenize($expand_variables->($+{VALUE})))
2173 tokenize($expand_variables->($+{VALUE})))
2178 tokenize(
3561 sub tokenize { global() subroutine
[all...]
/third_party/node/deps/npm/node_modules/diff/lib/
H A Dindex.es6.js31 oldString = this.removeEmpty(this.tokenize(oldString));
32 newString = this.removeEmpty(this.tokenize(newString));
233 tokenize: function tokenize(value) {
358 wordDiff.tokenize = function (value) {
386 lineDiff.tokenize = function (value) {
429 sentenceDiff.tokenize = function (value) {
439 cssDiff.tokenize = function (value) {
550 jsonDiff.tokenize = lineDiff.tokenize;
[all...]
/third_party/node/deps/npm/node_modules/postcss-selector-parser/dist/
H A Dtokenize.js5 exports["default"] = tokenize;
78 function tokenize(input) { function
/third_party/python/Tools/peg_generator/pegen/
H A Dbuild.py6 import tokenize namespace
172 tokenizer = Tokenizer(tokenize.generate_tokens(file.readline), verbose=verbose_tokenizer)
/third_party/python/Lib/
H A Dimp.py27 import tokenize namespace
302 encoding = tokenize.detect_encoding(file.readline)[0]
H A Dinspect.py147 import tokenize namespace
1192 elif type == tokenize.NEWLINE:
1203 elif type == tokenize.INDENT:
1208 elif type == tokenize.DEDENT:
1215 elif type == tokenize.COMMENT:
1219 elif self.indent == 0 and type not in (tokenize.COMMENT, tokenize.NL):
1228 tokens = tokenize.generate_tokens(iter(lines).__next__)
2123 token_stream = tokenize.tokenize(generato
[all...]
H A Dtokenize.py3 tokenize(readline) is a generator that breaks a stream of bytes into
42 __all__ = token.__all__ + ["tokenize", "generate_tokens", "detect_encoding",
264 token, which is the first token sequence output by tokenize.
274 # Output bytes will tokenize back to the input
275 t1 = [tok[:2] for tok in tokenize(f.readline)]
278 t2 = [tok[:2] for tok in tokenize(readline)]
303 in the same way as the tokenize() generator.
408 def tokenize(readline): function
410 The tokenize() generator requires one argument, readline, which
518 ("<tokenize>", lnu
[all...]
/third_party/ltp/tools/sparse/sparse-src/
H A Dtoken.h229 extern struct token * tokenize(const struct position *pos, const char *, int, struct token *, const char **next_path);
H A DMakefile89 LIB_OBJS += tokenize.o
/third_party/python/Lib/idlelib/idle_test/
H A Dtest_iomenu.py11 # Fail if either tokenize.open and t.detect_encoding does not exist.
14 from tokenize import open, detect_encoding
/third_party/mesa3d/src/gallium/frontends/clover/llvm/
H A Dinvocation.cpp410 tokenize(opts + " input.cl"), r_log); in compile_program()
475 std::vector<std::string> options = tokenize(opts + " input.cl"); in link_program()
521 tokenize(opts + " -O0 -fgnu89-inline input.cl"), r_log); in compile_to_spirv()
/third_party/mesa3d/src/gallium/frontends/clover/util/
H A Dalgorithm.hpp223 tokenize(const std::string &s) { in tokenize() function
/third_party/mesa3d/src/amd/compiler/tests/
H A Dglsl_scraper.py203 def tokenize(f): function
229 self.token_iter = tokenize(self.infile)
/third_party/python/Lib/test/
H A Dtest_unparse.py7 import tokenize namespace
14 with tokenize.open(filename) as stream:
H A Dtest_linecache.py7 import tokenize namespace
59 with tokenize.open(self.file_name) as fp:
/third_party/python/Lib/lib2to3/tests/
H A Dtest_parser.py28 from lib2to3.pgen2 import tokenize namespace
595 encoding = tokenize.detect_encoding(fp.readline)[0]
/third_party/skia/third_party/externals/swiftshader/third_party/subzero/src/
H A DIceBrowserCompileServer.cpp110 auto Args = RangeSpec::tokenize(ArgsList, '|'); in getExternalArgs()
/third_party/selinux/libsepol/src/
H A Dutil.c200 * The tokenize and tokenize_str functions may be used to
244 * line_buf - Buffer containing string to tokenize.
245 * delim - The delimiter used to tokenize line_buf. A whitespace delimiter will
253 * function will not tokenize more than num_args and the last argument will
257 int tokenize(char *line_buf, char delim, int num_args, ...) in tokenize() function

Completed in 24 milliseconds

12345