/third_party/mesa3d/src/gallium/drivers/i915/ |
H A D | i915_fpc_optimize.c | 289 struct i915_token_list *tokens) in liveness_analysis() 303 for (i = 0; i < tokens->NumTokens; i++) { in liveness_analysis() 304 current = &tokens->Tokens[i]; in liveness_analysis() 325 for (i = tokens->NumTokens - 1; i >= 0; i--) { in liveness_analysis() 326 current = &tokens->Tokens[i]; in liveness_analysis() 396 struct i915_token_list *tokens, int index) in i915_fpc_optimize_mov_before_tex() 398 union i915_full_token *current = &tokens->Tokens[index - 1]; in i915_fpc_optimize_mov_before_tex() 399 union i915_full_token *next = &tokens->Tokens[index]; in i915_fpc_optimize_mov_before_tex() 589 struct i915_token_list *tokens, in i915_fpc_optimize_useless_mov_after_inst() 592 union i915_full_token *current = &tokens in i915_fpc_optimize_useless_mov_after_inst() 288 liveness_analysis(struct i915_optimize_context *ctx, struct i915_token_list *tokens) liveness_analysis() argument 395 i915_fpc_optimize_mov_before_tex(struct i915_optimize_context *ctx, struct i915_token_list *tokens, int index) i915_fpc_optimize_mov_before_tex() argument 588 i915_fpc_optimize_useless_mov_after_inst(struct i915_optimize_context *ctx, struct i915_token_list *tokens, int index) i915_fpc_optimize_useless_mov_after_inst() argument 620 i915_optimize(const struct tgsi_token *tokens) i915_optimize() argument 677 i915_optimize_free(struct i915_token_list *tokens) i915_optimize_free() argument [all...] |
/third_party/rust/crates/proc-macro2/tests/ |
H A D | test.rs | 162 let mut tokens = strings.parse::<TokenStream>().unwrap().into_iter(); in literal_c_string() variables 170 match tokens.next().unwrap() { in literal_c_string() 178 if let Some(unexpected) = tokens.next() { in literal_c_string() 265 let tokens = TokenStream::from(TokenTree::Literal(negative_literal)); in literal_iter_negative() 266 let mut iter = tokens.into_iter(); in literal_iter_negative() 332 let mut tokens = input in source_text() variables 337 let first = tokens.next().unwrap(); in source_text() 340 let second = tokens.next().unwrap(); in source_text() 341 let third = tokens.next().unwrap(); in source_text() 655 let tokens in tokenstream_size_hint() 663 let mut tokens = "tuple.0.0".parse::<TokenStream>().unwrap().into_iter(); tuple_indexing() variables [all...] |
/third_party/PyYAML/tests/lib/ |
H A D | test_canonical.py | 7 tokens = list(yaml.canonical_scan(data)) 8 assert tokens, tokens 10 for token in tokens:
|
/third_party/mesa3d/src/gallium/auxiliary/tgsi/ |
H A D | tgsi_build.c | 405 struct tgsi_token *tokens, in tgsi_build_full_declaration() 415 declaration = (struct tgsi_declaration *) &tokens[size]; in tgsi_build_full_declaration() 433 dr = (struct tgsi_declaration_range *) &tokens[size]; in tgsi_build_full_declaration() 448 dd = (struct tgsi_declaration_dimension *)&tokens[size]; in tgsi_build_full_declaration() 462 di = (struct tgsi_declaration_interp *)&tokens[size]; in tgsi_build_full_declaration() 476 ds = (struct tgsi_declaration_semantic *) &tokens[size]; in tgsi_build_full_declaration() 496 di = (struct tgsi_declaration_image *)&tokens[size]; in tgsi_build_full_declaration() 513 dsv = (struct tgsi_declaration_sampler_view *)&tokens[size]; in tgsi_build_full_declaration() 532 da = (struct tgsi_declaration_array *)&tokens[size]; in tgsi_build_full_declaration() 603 struct tgsi_token *tokens, in tgsi_build_full_immediate() 403 tgsi_build_full_declaration( const struct tgsi_full_declaration *full_decl, struct tgsi_token *tokens, struct tgsi_header *header, unsigned maxsize ) tgsi_build_full_declaration() argument 601 tgsi_build_full_immediate( const struct tgsi_full_immediate *full_imm, struct tgsi_token *tokens, struct tgsi_header *header, unsigned maxsize ) tgsi_build_full_immediate() argument 1053 tgsi_build_full_instruction( const struct tgsi_full_instruction *full_inst, struct tgsi_token *tokens, struct tgsi_header *header, unsigned maxsize ) tgsi_build_full_instruction() argument 1357 tgsi_build_full_property( const struct tgsi_full_property *full_prop, struct tgsi_token *tokens, struct tgsi_header *header, unsigned maxsize ) tgsi_build_full_property() argument [all...] |
H A D | tgsi_dump.h | 45 const struct tgsi_token *tokens, 51 tgsi_dump_to_file(const struct tgsi_token *tokens, uint flags, FILE *file); 55 const struct tgsi_token *tokens,
|
/third_party/skia/third_party/externals/tint/tools/src/cmd/intrinsic-gen/parser/ |
H A D | parser.go | 27 // Parse produces a list of tokens for the given source code 30 tokens, err := lexer.Lex(runes, filepath) 35 p := parser{tokens: tokens} 40 tokens []tok.Token 258 if len(p.tokens) > 0 { 260 p.tokens[0].Source, kind, use, p.tokens[0].Kind) 288 if p.err != nil || len(p.tokens) == 0 { 291 t := p.tokens[ [all...] |
/third_party/node/deps/v8/src/torque/ |
H A D | earley-parser.h | 233 const Item* completed_item, const LexerResult& tokens) const; 269 const Item* item, const LexerResult& tokens); 323 std::string SplitByChildren(const LexerResult& tokens) const; 325 void CheckAmbiguity(const Item& other, const LexerResult& tokens) const; 327 MatchedInput GetMatchedInput(const LexerResult& tokens) const { in GetMatchedInput() 328 const MatchedInput& start = tokens.token_contents[start_]; in GetMatchedInput() 329 const MatchedInput& end = start_ == pos_ ? tokens.token_contents[start_] in GetMatchedInput() 330 : tokens.token_contents[pos_ - 1]; in GetMatchedInput() 365 const Item* item, const LexerResult& tokens) { in RunAction() 368 return item->rule()->RunAction(item, tokens); in RunAction() 364 RunAction( const Item* item, const LexerResult& tokens) RunAction() argument 375 ParseTokens(Symbol* start, const LexerResult& tokens) ParseTokens() argument 425 LexerResult tokens = lexer().RunLexer(input); Parse() local [all...] |
/third_party/libuv/test/ |
H A D | test-strtok.c | 34 const char* tokens[] = { variable 68 size_t tokens_len = ARRAY_SIZE(tokens); in TEST_IMPL() 80 ASSERT_STRCMP(tok_r, tokens[j]); in TEST_IMPL() 85 ASSERT_STRCMP(tok_r, tokens[j]); in TEST_IMPL()
|
/third_party/mesa3d/src/compiler/nir/ |
H A D | nir_lower_patch_vertices.c | 28 make_uniform(nir_shader *nir, const gl_state_index16 *tokens) in make_uniform() argument 38 memcpy(var->state_slots[0].tokens, tokens, sizeof(*tokens) * STATE_LENGTH); in make_uniform()
|
/third_party/mesa3d/src/gallium/drivers/llvmpipe/ |
H A D | lp_state_tess.c | 54 tgsi_dump(templ->tokens, 0); in llvmpipe_create_tcs_state() 58 state->no_tokens = !templ->tokens; in llvmpipe_create_tcs_state() 61 if (templ->tokens || templ->type == PIPE_SHADER_IR_NIR) { in llvmpipe_create_tcs_state() 122 tgsi_dump(templ->tokens, 0); in llvmpipe_create_tes_state() 126 state->no_tokens = !templ->tokens; in llvmpipe_create_tes_state() 129 if (templ->tokens || templ->type == PIPE_SHADER_IR_NIR) { in llvmpipe_create_tes_state()
|
/third_party/skia/src/sksl/lex/ |
H A D | Main.cpp | 35 const std::vector<std::string>& tokens, const char* hPath) { in writeH() 48 for (const std::string& t : tokens) { in writeH() 148 // tokens. Our grammar doesn't have this property, so we can simplify the logic in writeCPP() 187 std::vector<std::string> tokens; in process() local 188 tokens.push_back("END_OF_FILE"); in process() 205 tokens.push_back(name); in process() 222 writeH(dfa, lexer, token, tokens, hPath); in process() 34 writeH(const DFA& dfa, const char* lexer, const char* token, const std::vector<std::string>& tokens, const char* hPath) writeH() argument
|
/third_party/toybox/toys/pending/ |
H A D | crond.c | 210 char *name, *val, *tokens[5] = {0,}; in parse_line() local 234 tokens[count++] = line; in parse_line() 242 name = tokens[0]; in parse_line() 247 name = tokens[0]; in parse_line() 250 val = tokens[1]; in parse_line() 252 if (*(tokens[1]) != '=') return; in parse_line() 253 val = tokens[1] + 1; in parse_line() 258 name = tokens[0]; in parse_line() 259 val = tokens[2]; in parse_line() 260 if (*(tokens[ in parse_line() [all...] |
/third_party/mesa3d/src/gallium/auxiliary/draw/ |
H A D | draw_vs_llvm.c | 80 FREE((void*) dvs->state.tokens); in vs_llvm_delete() 101 /* we make a private copy of the tokens */ in draw_create_vs_llvm() 102 vs->base.state.tokens = tgsi_dup_tokens(state->tokens); in draw_create_vs_llvm() 103 if (!vs->base.state.tokens) { in draw_create_vs_llvm() 108 tgsi_scan_shader(state->tokens, &vs->base.info); in draw_create_vs_llvm()
|
H A D | draw_vs_exec.c | 75 if (evs->machine->Tokens != shader->state.tokens) { in vs_exec_prepare() 77 shader->state.tokens, in vs_exec_prepare() 213 FREE((void*) dvs->state.tokens); in vs_exec_delete() 229 vs->base.state.tokens = nir_to_tgsi(state->ir.nir, draw->pipe->screen); in draw_create_vs_exec() 234 /* we need to keep a local copy of the tokens */ in draw_create_vs_exec() 235 vs->base.state.tokens = tgsi_dup_tokens(state->tokens); in draw_create_vs_exec() 236 if (!vs->base.state.tokens) { in draw_create_vs_exec() 242 tgsi_scan_shader(vs->base.state.tokens, &vs->base.info); in draw_create_vs_exec()
|
/third_party/rust/crates/syn/tests/ |
H A D | test_path.rs | 13 let tokens = TokenStream::from_iter(vec![ in parse_interpolated_leading_component() 20 snapshot!(tokens.clone() as Expr, @r###" in parse_interpolated_leading_component() 36 snapshot!(tokens as Type, @r###" in parse_interpolated_leading_component() 108 let tokens = quote!(dyn FnOnce::() -> !); in parse_parenthesized_path_arguments_with_disambiguator() 109 snapshot!(tokens as Type, @r###" in parse_parenthesized_path_arguments_with_disambiguator()
|
/third_party/node/deps/npm/node_modules/@pkgjs/parseargs/examples/ |
H A D | limit-long-syntax.js | 3 // This is an example of using tokens to add a custom behaviour. 20 const { values, tokens } = parseArgs({ options, tokens: true }); 22 const badToken = tokens.find((token) => token.kind === 'option' &&
|
H A D | no-repeated-options.js | 3 // This is an example of using tokens to add a custom behaviour. 15 const { values, tokens } = parseArgs({ options, tokens: true }); 18 tokens.forEach((token) => {
|
H A D | is-default-value.js | 13 const { values, tokens } = parseArgs({ options, tokens: true }); 15 const isFileDefault = !tokens.some((token) => token.kind === 'option' &&
|
H A D | negate.js | 17 const { values, tokens } = parseArgs({ options, tokens: true }); 19 // Reprocess the option tokens and overwrite the returned values. 20 tokens
|
/third_party/icu/tools/unicode/c/genprops/ |
H A D | namespropsbuilder.cpp | 194 static int16_t tokens[LEADBYTE_LIMIT*256]; variable 225 tokens[i]=0; in NamesPropsBuilder() 508 if(tokens[i]==-1) { in compress() 516 /* do we need double-byte tokens? */ in compress() 518 /* no, single-byte tokens are enough */ in compress() 521 if(tokens[i]!=-1) { in compress() 522 tokens[i]=wordNumber; in compress() 524 printf("tokens[0x%03x]: word%8ld \"%.*s\"\n", in compress() 534 * The tokens that need two token bytes in compress() 553 /* how many tokens an in compress() [all...] |
/third_party/rust/crates/log/src/ |
H A D | serde.rs | 303 for &(value, tokens) in &cases { in test_level_de_bytes() 304 assert_de_tokens(&value, &tokens); in test_level_de_bytes() 318 for &(value, tokens) in &cases { in test_level_de_variant_index() 319 assert_de_tokens(&value, &tokens); in test_level_de_variant_index() 373 for &(value, tokens) in &cases { in test_level_filter_de_bytes() 374 assert_de_tokens(&value, &tokens); in test_level_filter_de_bytes() 389 for &(value, tokens) in &cases { in test_level_filter_de_variant_index() 390 assert_de_tokens(&value, &tokens); in test_level_filter_de_variant_index()
|
/third_party/protobuf/csharp/src/Google.Protobuf/ |
H A D | JsonTokenizer.cs | 45 /// This tokenizer is stateful, and only returns "useful" tokens - names, values etc. 46 /// It does not create tokens for the separator between names and values, or for the comma 48 /// tokens it produces are appropriate. For example, it would never produce "start object, end array." 66 /// Creates a tokenizer that first replays the given list of tokens, then continues reading 71 internal static JsonTokenizer FromReplayedTokens(IList<JsonToken> tokens, JsonTokenizer continuation) in FromReplayedTokens() argument 73 return new JsonReplayTokenizer(tokens, continuation); in FromReplayedTokens() 169 /// Tokenizer which first exhausts a list of tokens, then consults another tokenizer. 173 private readonly IList<JsonToken> tokens; field in Google.Protobuf.JsonTokenizer.JsonReplayTokenizer 177 internal JsonReplayTokenizer(IList<JsonToken> tokens, JsonTokenizer nextTokenizer) in JsonReplayTokenizer() argument 179 this.tokens in JsonReplayTokenizer() [all...] |
/third_party/littlefs/scripts/ |
H A D | prettyasserts.py | 214 tokens = [] 221 tokens.append((None, data[:m.start()], line, col)) 222 tokens.append((m.lastgroup, m.group(), line, col)) 225 tokens.append((None, data, line, col)) 227 self.tokens = tokens 231 if self.off < len(self.tokens): 232 token = self.tokens[self.off] 248 raise ParseFailure(patterns, self.tokens[self.off:]) 422 for i in range(p.off, len(p.tokens)) [all...] |
/third_party/typescript/tests/baselines/reference/ |
H A D | arrayAssignmentTest6.js | 9 tokens: IToken[]; 16 public tokenize(line:string, tokens:IToken[], includeStates:boolean):ILineTokens { 29 Bug.prototype.tokenize = function (line, tokens, includeStates) {
|
H A D | objectLitArrayDeclNoNew.js | 12 tokens: IToken[]; 23 tokens: Gar[],//IToken[], // Missing new. Correct syntax is: tokens: new IToken[] 44 tokens: Gar[],
|