/third_party/mesa3d/src/gallium/auxiliary/tgsi/ |
H A D | tgsi_parse.h | 122 const struct tgsi_token *tokens ); 137 tgsi_num_tokens(const struct tgsi_token *tokens) in tgsi_num_tokens() argument 140 memcpy(&header, tokens, sizeof(header)); in tgsi_num_tokens() 145 tgsi_dump_tokens(const struct tgsi_token *tokens); 148 tgsi_dup_tokens(const struct tgsi_token *tokens); 154 tgsi_free_tokens(const struct tgsi_token *tokens); 157 tgsi_get_processor_type(const struct tgsi_token *tokens);
|
H A D | tgsi_ureg.c | 77 union tgsi_any_token *tokens; member 215 static void tokens_error( struct ureg_tokens *tokens ) in tokens_error() 217 if (tokens->tokens && tokens->tokens != error_tokens) in tokens_error() 218 FREE(tokens->tokens); in tokens_error() 220 tokens->tokens in tokens_error() 226 tokens_expand( struct ureg_tokens *tokens, unsigned count ) tokens_expand() argument 258 struct ureg_tokens *tokens = &ureg->domain[domain]; get_tokens() local 2113 const struct tgsi_token *tokens; ureg_finalize() local 2204 const struct tgsi_token *tokens; ureg_get_tokens() local [all...] |
H A D | tgsi_build.h | 60 struct tgsi_token *tokens, 74 struct tgsi_token *tokens, 88 struct tgsi_token *tokens, 105 struct tgsi_token *tokens,
|
/third_party/rust/crates/syn/benches/ |
H A D | file.rs | 40 let tokens = get_tokens(); in baseline() 41 b.iter(|| drop(tokens.clone())); in baseline() 46 let tokens = get_tokens(); in create_token_buffer() 50 b.iter(|| immediate_fail.parse2(tokens.clone())); in create_token_buffer() 55 let tokens = get_tokens(); in parse_file() 56 b.iter(|| syn::parse2::<syn::File>(tokens.clone())); in parse_file()
|
/third_party/rust/crates/syn/src/ |
H A D | attr.rs | 45 /// - Meta::List — attributes that carry arbitrary tokens after the 72 /// # Parsing from tokens to Attribute 117 /// `Meta::List` variety of attribute are held in an arbitrary `tokens: 119 /// decide whether they recognize it, and then parse the remaining tokens 121 /// attribute. Use [`parse_args()`] to parse those tokens into the expected 185 /// doing `syn::parse2::<T>(meta_list.tokens)`, except that using 186 /// `parse_args` the error message has a more useful span when `tokens` is 395 /// [*Parsing from tokens to Attribute*](#parsing-from-tokens-to-attribute). 411 /// [*Parsing from tokens t [all...] |
H A D | macros.rs | 84 tokens 112 ($($arms:tt)*) $tokens:ident $name:ident { 121 $tokens $name { $($next)* } 126 ($($arms:tt)*) $tokens:ident $name:ident { 134 ($($arms)* $(#[cfg $cfg_attr])* $name::$variant(_e) => _e.to_tokens($tokens),) 135 $tokens $name { $($next)* } 139 (($($arms:tt)*) $tokens:ident $name:ident {}) => { 142 fn to_tokens(&self, $tokens: &mut ::proc_macro2::TokenStream) { in to_tokens()
|
/third_party/node/lib/internal/util/parse_args/ |
H A D | parse_args.js | 76 * @param {object} token - from tokens as available from parseArgs 94 * @param {object} token - from tokens as available from parseArgs 164 * Process args and turn into identified tokens: 172 const tokens = []; 189 ArrayPrototypePush(tokens, { kind: 'option-terminator', index }); 191 tokens, ArrayPrototypeMap(remainingArgs, (arg) => { 211 tokens, 246 tokens, 264 tokens, 277 tokens, [all...] |
/third_party/node/deps/npm/node_modules/@pkgjs/parseargs/ |
H A D | index.js | 76 * @param {object} token - from tokens as available from parseArgs 95 * @param {object} token - from tokens as available from parseArgs 167 * Process args and turn into identified tokens: 176 const tokens = []; 193 ArrayPrototypePush(tokens, { kind: 'option-terminator', index }); 195 tokens, ArrayPrototypeMap(remainingArgs, (arg) => { 215 tokens, 250 tokens, 268 tokens, 281 tokens, [all...] |
/third_party/gn/src/gn/ |
H A D | parser_unittest.cc | 22 std::vector<Token> tokens; in DoParserPrintTest() local 25 ASSERT_TRUE(GetTokens(&input_file, &tokens)); in DoParserPrintTest() 28 std::unique_ptr<ParseNode> result = Parser::Parse(tokens, &err); in DoParserPrintTest() 40 std::vector<Token> tokens; in DoExpressionPrintTest() local 43 ASSERT_TRUE(GetTokens(&input_file, &tokens)); in DoExpressionPrintTest() 46 std::unique_ptr<ParseNode> result = Parser::ParseExpression(tokens, &err); in DoExpressionPrintTest() 62 std::vector<Token> tokens = Tokenizer::Tokenize(&input_file, &err); in DoParserErrorTest() local 64 std::unique_ptr<ParseNode> result = Parser::Parse(tokens, &err); in DoParserErrorTest() 80 std::vector<Token> tokens = Tokenizer::Tokenize(&input_file, &err); in DoExpressionErrorTest() local 82 std::unique_ptr<ParseNode> result = Parser::ParseExpression(tokens, in DoExpressionErrorTest() [all...] |
H A D | input_file_manager.cc | 42 std::vector<Token>* tokens, in DoLoadFile() 87 *tokens = Tokenizer::Tokenize(file, err); in DoLoadFile() 92 *root = Parser::Parse(*tokens, err); in DoLoadFile() 250 std::vector<Token>** tokens, in AddDynamicInput() 254 *tokens = &data->tokens; in AddDynamicInput() 291 std::vector<Token> tokens; in LoadFile() local 294 file, &tokens, &root, err); in LoadFile() 310 data->tokens = std::move(tokens); in LoadFile() 37 DoLoadFile(const LocationRange& origin, const BuildSettings* build_settings, const SourceFile& name, InputFileManager::SyncLoadFileCallback load_file_callback, InputFile* file, std::vector<Token>* tokens, std::unique_ptr<ParseNode>* root, Err* err) DoLoadFile() argument 247 AddDynamicInput( const SourceFile& name, InputFile** file, std::vector<Token>** tokens, std::unique_ptr<ParseNode>** parse_root) AddDynamicInput() argument [all...] |
/third_party/glslang/glslang/MachineIndependent/preprocessor/ |
H A D | PpAtom.cpp | 97 } tokens[] = { member 163 // Add single character tokens to the atom table: in TStringAtomMap() 174 // Add multiple character scanner tokens : in TStringAtomMap() 175 for (size_t ii = 0; ii < sizeof(tokens)/sizeof(tokens[0]); ii++) in TStringAtomMap() 176 addAtomFixed(tokens[ii].str, tokens[ii].val); in TStringAtomMap()
|
/third_party/node/deps/npm/lib/commands/ |
H A D | token.js | 11 static description = 'Manage your authentication tokens' 54 const tokens = await pulseTillDone.withPromise(profile.listTokens(conf)) 56 this.npm.output(JSON.stringify(tokens, null, 2)) 60 tokens.forEach(token => { 73 this.generateTokenIds(tokens, 6) 74 const idWidth = tokens.reduce((acc, token) => Math.max(acc, token.id.length), 0) 79 tokens.forEach(token => { 98 const progress = log.newItem('removing tokens', toRemove.length) 100 const tokens = await pulseTillDone.withPromise(profile.listTokens(conf)) 102 const matches = tokens [all...] |
/third_party/jerryscript/tools/ |
H A D | gen-doctest.py | 64 tokens = list(shlex.shlex(params)) 67 for i in range(0, len(tokens), 4): 68 if i + 2 >= len(tokens) or tokens[i + 1] != '=' or (i + 3 < len(tokens) and tokens[i + 3] != ','): 72 decl[tokens[i]] = tokens[i + 2].strip('\'"')
|
/third_party/rust/crates/bindgen/bindgen/codegen/ |
H A D | impl_partialeq.rs | 14 let mut tokens = vec![]; in gen_partialeq_impl() variables 17 tokens.push(quote! { in gen_partialeq_impl() 22 tokens.push(quote! { in gen_partialeq_impl() 36 tokens.push(quote! { in gen_partialeq_impl() 40 tokens.push(gen_field(ctx, ty_item, field_name)); in gen_partialeq_impl() 49 tokens.push(gen_field(ctx, ty_item, name)); in gen_partialeq_impl() 56 tokens.push(quote! { in gen_partialeq_impl() 68 #( #tokens )&&* in gen_partialeq_impl()
|
/third_party/alsa-lib/src/topology/ |
H A D | data.c | 515 struct tplg_vendor_tokens *tokens) in get_token_value() 519 for (i = 0; i < tokens->num_tokens; i++) { in get_token_value() 520 if (strcmp(token_id, tokens->token[i].id) == 0) in get_token_value() 521 return tokens->token[i].value; in get_token_value() 528 /* get the vendor tokens referred by the vendor tuples */ 589 struct tplg_vendor_tokens *tokens) in copy_tuples() 643 token_val = get_token_value(tuple->token, tokens); in copy_tuples() 678 struct tplg_elem *tuples, *tokens; in build_tuples() local 700 tokens = get_tokens(tplg, tuples); in build_tuples() 701 if (!tokens) { in build_tuples() 514 get_token_value(const char *token_id, struct tplg_vendor_tokens *tokens) get_token_value() argument 587 copy_tuples(struct tplg_elem *elem, struct tplg_vendor_tuples *tuples, struct tplg_vendor_tokens *tokens) copy_tuples() argument 1043 struct tplg_vendor_tokens *tokens; tplg_parse_tokens() local 1090 struct tplg_vendor_tokens *tokens = elem->tokens; tplg_save_tokens() local 1624 struct tplg_vendor_tokens *tokens; tplg_add_token() local [all...] |
/third_party/littlefs/scripts/ |
H A D | explode_asserts.py | 209 tokens = [] 214 tokens.append((None, data[:m.start()])) 215 tokens.append((m.lastgroup, m.group())) 218 tokens.append((None, data)) 220 self.tokens = tokens 224 if self.off < len(self.tokens): 225 token = self.tokens[self.off] 241 raise ParseFailure(patterns, self.tokens[self.off:]) 368 for i in range(p.off, len(p.tokens)) [all...] |
/third_party/mesa3d/src/gallium/drivers/r300/ |
H A D | r300_vs_draw.c | 317 const uint newLen = tgsi_num_tokens(vs->state.tokens) + 100; in r300_draw_init_vertex_shader() 320 .tokens = tgsi_alloc_tokens(newLen) in r300_draw_init_vertex_shader() 324 tgsi_scan_shader(vs->state.tokens, &info); in r300_draw_init_vertex_shader() 350 new_vs.tokens = tgsi_transform_shader(vs->state.tokens, newLen, &transform.base); in r300_draw_init_vertex_shader() 351 if (!new_vs.tokens) in r300_draw_init_vertex_shader() 356 tgsi_dump(vs->state.tokens, 0); in r300_draw_init_vertex_shader() 358 tgsi_dump(new_vs.tokens, 0); in r300_draw_init_vertex_shader() 362 /* Free old tokens. */ in r300_draw_init_vertex_shader() 363 FREE((void*)vs->state.tokens); in r300_draw_init_vertex_shader() [all...] |
/third_party/rust/crates/rust-cexpr/tests/ |
H A D | clang.rs | 25 tokens: &[Token], in test_definition() 105 match fn_macro_declaration(&tokens) { in test_definition() 133 .macro_definition(&tokens) in test_definition() 266 let tokens = slice::from_raw_parts(token_ptr, num as usize); in file_visit_macros() 267 let tokens: Vec<_> = tokens in file_visit_macros() 278 visitor(clang_str_to_vec(clang_getCursorSpelling(cur)), tokens) in file_visit_macros() 290 file_visit_macros(file, fix_bug_9069(), |ident, tokens| { in test_file() 291 all_succeeded &= test_definition(ident, &tokens, &mut idents) in test_file() 302 |ident, tokens| { in fix_bug_9069() [all...] |
/third_party/python/Parser/ |
H A D | pegen.c | 54 m->next = p->tokens[mark]->memo; in _PyPegen_insert_memo() 55 p->tokens[mark]->memo = m; in _PyPegen_insert_memo() 63 for (Memo *m = p->tokens[mark]->memo; m != NULL; m = m->next) { in _PyPegen_update_memo() 189 Token **new_tokens = PyMem_Realloc(p->tokens, newsize * sizeof(Token *)); in _resize_tokens_array() 194 p->tokens = new_tokens; in _resize_tokens_array() 197 p->tokens[i] = PyMem_Calloc(1, sizeof(Token)); in _resize_tokens_array() 198 if (p->tokens[i] == NULL) { in _resize_tokens_array() 252 Token *t = p->tokens[p->fill]; in _PyPegen_fill_token() 258 // The array counts the number of tokens skipped by memoization, 305 Token *t = p->tokens[ in _PyPegen_is_memoized() [all...] |
/third_party/PyYAML/lib/yaml/ |
H A D | scanner.py | 2 # Scanner produces tokens of the following types: 30 from .tokens import * 66 # List of processed tokens that are not yet emitted. 67 self.tokens = [] 72 # Number of tokens that were emitted through the `get_token` method. 108 # '[', or '{' tokens. 117 if self.tokens: 121 if isinstance(self.tokens[0], choice): 127 # Return None if no more tokens. 130 if self.tokens [all...] |
/third_party/musl/libc-test/src/functionalext/supplement/misc/ |
H A D | getsubopt.c | 28 char *tokens[] = {"getsubopt", "test", NULL}; in getsubopt_0100() local 33 EXPECT_EQ("getsubopt_0100", 0, getsubopt(&subopts, tokens, &value)); in getsubopt_0100() 44 char *tokens[] = {"getsubopt", "test", NULL}; in getsubopt_0200() local 49 EXPECT_EQ("getsubopt_0100", -1, getsubopt(&subopts, tokens, &value)); in getsubopt_0200()
|
/third_party/mesa3d/src/gallium/auxiliary/util/ |
H A D | u_simple_shaders.c | 147 struct tgsi_token tokens[1000]; in util_make_layered_clear_helper_vertex_shader() local 150 if (!tgsi_text_translate(text, tokens, ARRAY_SIZE(tokens))) { in util_make_layered_clear_helper_vertex_shader() 154 pipe_shader_state_from_tgsi(&state, tokens); in util_make_layered_clear_helper_vertex_shader() 191 struct tgsi_token tokens[1000]; in util_make_layered_clear_geometry_shader() local 194 if (!tgsi_text_translate(text, tokens, ARRAY_SIZE(tokens))) { in util_make_layered_clear_geometry_shader() 198 pipe_shader_state_from_tgsi(&state, tokens); in util_make_layered_clear_geometry_shader() 460 struct tgsi_token tokens[1000]; in util_make_fragment_passthrough_shader() local 468 if (!tgsi_text_translate(text, tokens, ARRAY_SIZ in util_make_fragment_passthrough_shader() 556 struct tgsi_token tokens[1000]; util_make_fs_blit_msaa_gen() local 685 struct tgsi_token tokens[1000]; util_make_fs_blit_msaa_depthstencil() local 1205 struct tgsi_token tokens[1000]; util_make_fs_stencil_blit() local 1235 struct tgsi_token tokens[1000]; util_make_fs_clear_all_cbufs() local [all...] |
/third_party/rust/crates/cfg-if/src/ |
H A D | lib.rs | 37 if #[cfg($meta:meta)] { $($tokens:tt)* } 44 $( ( ($meta) ($($tokens)*) ), )* 70 (@__items ($($not:meta,)*) ; ( ($($m:meta),*) ($($tokens:tt)*) ), $($rest:tt)*) => { 74 #[cfg(all($($m,)* not(any($($not),*))))] $crate::cfg_if! { @__identity $($tokens)* } 84 (@__identity $($tokens:tt)*) => { 85 $($tokens)*
|
/third_party/rust/crates/syn/tests/ |
H A D | test_generics.rs | 130 let tokens = quote!('a); in test_ty_param_bound() 131 snapshot!(tokens as TypeParamBound, @r###" in test_ty_param_bound() 137 let tokens = quote!('_); in test_ty_param_bound() 138 snapshot!(tokens as TypeParamBound, @r###" in test_ty_param_bound() 144 let tokens = quote!(Debug); in test_ty_param_bound() 145 snapshot!(tokens as TypeParamBound, @r###" in test_ty_param_bound() 157 let tokens = quote!(?Sized); in test_ty_param_bound() 158 snapshot!(tokens as TypeParamBound, @r###" in test_ty_param_bound()
|
/third_party/rust/crates/syn/tests/macros/ |
H A D | mod.rs | 39 let tokens = crate::macros::TryIntoTokens::try_into_tokens($expr).unwrap(); 40 let $expr: $t = syn::parse_quote!(#tokens); 50 let tokens = crate::macros::TryIntoTokens::try_into_tokens($($expr)*).unwrap(); 51 let syntax_tree: $t = syn::parse_quote!(#tokens); 83 let tokens = proc_macro2::TokenStream::from_str(self)?; in try_into_tokens() 84 Ok(tokens) in try_into_tokens()
|