/third_party/pulseaudio/src/tests/ |
H A D | ladspa-dbus.py | 57 tokens = args[2].split(",") 59 for token in tokens:
|
/third_party/rust/crates/cxx/macro/src/syntax/ |
H A D | symbol.rs | 18 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens() 19 ToTokens::to_tokens(&self.0, tokens); in to_tokens()
|
/third_party/rust/crates/cxx/syntax/ |
H A D | symbol.rs | 18 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens() 19 ToTokens::to_tokens(&self.0, tokens); in to_tokens()
|
/third_party/gn/src/gn/ |
H A D | parser.cc | 20 GN build files are read as sequences of tokens. While splitting the file 21 into tokens, the next token is the longest sequence of characters that form a 31 White space and comments are ignored except that they may separate tokens 97 The input tokens form a syntax tree following a context-free grammar: 291 Parser::Parser(const std::vector<Token>& tokens, Err* err) in Parser() argument 295 for (const auto& token : tokens) { in Parser() 315 std::unique_ptr<ParseNode> Parser::Parse(const std::vector<Token>& tokens, in Parse() argument 317 Parser p(tokens, err); in Parse() 323 const std::vector<Token>& tokens, in ParseExpression() 325 Parser p(tokens, er in ParseExpression() 322 ParseExpression( const std::vector<Token>& tokens, Err* err) ParseExpression() argument 335 ParseValue(const std::vector<Token>& tokens, Err* err) ParseValue() argument [all...] |
H A D | input_file_manager.h | 75 // the file, tokens, and parse node that this class created. The calling 84 // associated parse nodes, tokens, and file data in memory. This function 88 std::vector<Token>** tokens, 132 std::vector<Token> tokens; member
|
/third_party/mesa3d/src/gallium/include/pipe/ |
H A D | p_state.h | 286 * tokens, etc. If the driver returns 'PIPE_SHADER_IR_TGSI' for the 288 * 'PIPE_SHADER_IR_TGSI' and the tokens ptr will be valid. If the driver 290 * enum to see if it is getting TGSI tokens or its preferred IR. 303 /* TODO move tokens into union. */ 304 const struct tgsi_token *tokens; member 314 const struct tgsi_token *tokens) in pipe_shader_state_from_tgsi() 317 state->tokens = tokens; in pipe_shader_state_from_tgsi() 313 pipe_shader_state_from_tgsi(struct pipe_shader_state *state, const struct tgsi_token *tokens) pipe_shader_state_from_tgsi() argument
|
/third_party/mesa3d/src/gallium/drivers/llvmpipe/ |
H A D | lp_state_fs_analysis.c | 88 match_aero_minification_shader(const struct tgsi_token *tokens, in match_aero_minification_shader() argument 131 tgsi_parse_init(&parse, tokens); in match_aero_minification_shader() 456 const struct tgsi_token *tokens) in llvmpipe_fs_analyse() 477 match_aero_minification_shader(tokens, &shader->info)) { in llvmpipe_fs_analyse() 455 llvmpipe_fs_analyse(struct lp_fragment_shader *shader, const struct tgsi_token *tokens) llvmpipe_fs_analyse() argument
|
/third_party/mesa3d/src/gallium/drivers/d3d12/ |
H A D | d3d12_lower_point_sprite.c | 239 const gl_state_index16 tokens[4] = { STATE_INTERNAL_DRIVER, in d3d12_lower_point_sprite() local 259 memcpy(state.uniform->state_slots[0].tokens, tokens, in d3d12_lower_point_sprite() 260 sizeof(state.uniform->state_slots[0].tokens)); in d3d12_lower_point_sprite()
|
H A D | d3d12_nir_passes.c | 47 const gl_state_index16 tokens[STATE_LENGTH] = { STATE_INTERNAL_DRIVER, var_enum }; in d3d12_get_state_var() local 56 memcpy(var->state_slots[0].tokens, tokens, in d3d12_get_state_var() 57 sizeof(var->state_slots[0].tokens)); in d3d12_get_state_var() 546 variable->state_slots[0].tokens[0] != STATE_INTERNAL_DRIVER) in lower_instr() 549 enum d3d12_state_var var = variable->state_slots[0].tokens[1]; in lower_instr() 590 var->state_slots[0].tokens[0] == STATE_INTERNAL_DRIVER) { in d3d12_lower_state_vars() 624 var->state_slots[0].tokens[0] == STATE_INTERNAL_DRIVER) { in d3d12_lower_state_vars() 630 const gl_state_index16 tokens[STATE_LENGTH] = { STATE_INTERNAL_DRIVER }; in d3d12_lower_state_vars() local 640 memcpy(ubo->state_slots[0].tokens, token in d3d12_lower_state_vars() [all...] |
/third_party/rust/crates/bindgen/bindgen/codegen/ |
H A D | helpers.rs | 119 let mut tokens = quote! {}; in bitfield_unit() variables 122 tokens.append_all(quote! { root:: }); in bitfield_unit() 126 tokens.append_all(quote! { in bitfield_unit() 130 tokens in bitfield_unit()
|
/third_party/rust/crates/syn/src/ |
H A D | error.rs | 16 /// Error returned when a Syn parser cannot parse the input tokens. 178 /// `tokens` which is a syntax tree node. This allows the resulting `Error` 179 /// to attempt to span all tokens inside of `tokens`. While you would 189 pub fn new_spanned<T: ToTokens, U: Display>(tokens: T, message: U) -> Self { in new_spanned() 190 return new_spanned(tokens.into_token_stream(), message.to_string()); in new_spanned() 192 fn new_spanned(tokens: TokenStream, message: String) -> Error { in new_spanned() 193 let mut iter = tokens.into_iter(); in new_spanned()
|
/third_party/mesa3d/src/gallium/drivers/r300/ |
H A D | r300_vs.c | 177 tgsi_scan_shader(vs->state.tokens, &vs->shader->info); in r300_init_vs_outputs() 211 tgsi_dump(shader->state.tokens, 0); in r300_translate_vertex_shader() 219 r300_tgsi_to_rc(&ttr, shader->state.tokens); in r300_translate_vertex_shader()
|
/third_party/python/Doc/tools/ |
H A D | rstlint.py | 293 tokens = line.split("``") 294 for token_no, token in enumerate(tokens): 299 if token_no == len(tokens) - 1:
|
/third_party/jinja2/ |
H A D | ext.py | 111 to filter tokens returned. This method has to return an iterable of 741 self, tokens: t.Sequence[t.Tuple[int, str, str]], comment_tags: t.Sequence[str] 743 self.tokens = tokens 751 self.tokens[self.offset : offset] 767 for idx, (token_lineno, _, _) in enumerate(self.tokens[self.offset :]): 770 return self.find_backwards(len(self.tokens)) 853 tokens = list(environment.lex(environment.preprocess(source))) 860 finder = _CommentFinder(tokens, comment_tags)
|
/third_party/mesa3d/src/gallium/drivers/svga/ |
H A D | svga_state_fs.c | 62 const struct tgsi_token *tokens; in get_dummy_fragment_shader() local 75 tokens = ureg_get_tokens(ureg, NULL); in get_dummy_fragment_shader() 79 return tokens; in get_dummy_fragment_shader() 100 FREE((void *) fs->base.tokens); in svga_get_compiled_dummy_fragment_shader() 101 fs->base.tokens = dummy; in svga_get_compiled_dummy_fragment_shader()
|
/third_party/python/Lib/lib2to3/pgen2/ |
H A D | tokenize.py | 7 text into Python tokens. It accepts a readline-like method which is called 18 that it produces COMMENT tokens for comments and gives type OP for all 24 are the same, except instead of generating tokens, tokeneater is a callback 174 self.tokens = [] 183 self.tokens.append(" " * col_offset) 192 self.tokens.append(token) 197 return "".join(self.tokens) 202 toks_append = self.tokens.append 315 """Transform tokens back into Python source code. 319 only two tokens ar [all...] |
/third_party/glslang/glslang/MachineIndependent/preprocessor/ |
H A D | PpContext.h | 215 virtual bool peekContinuedPasting(int) { return false; } // true when non-spaced tokens can paste in peekContinuedPasting() 220 // Will be called when we start reading tokens from this instance 222 // Will be called when we do not read tokens from this instance anymore 286 // separates numeric-literals plus bad suffix into two tokens, which in peekContinuedPasting() 362 // that are out of tokens, down until an input source is found that has a token. 363 // Return EndOfInput when there are no more tokens to be found by doing this. 509 tokens(t), in tTokenInput() 513 int token = tokens->getToken(pp->parseContext, ppToken); 515 if (tokens->atEnd() && token == PpAtomIdentifier) { 525 virtual bool peekPasting() override { return tokens 528 TokenStream* tokens; global() member in glslang::TPpContext::tTokenInput [all...] |
/third_party/mesa3d/src/compiler/nir/ |
H A D | nir_lower_pntc_ytransform.c | 54 memcpy(var->state_slots[0].tokens, state->pntc_state_tokens, in get_pntc_transform() 55 sizeof(var->state_slots[0].tokens)); in get_pntc_transform()
|
H A D | nir_lower_point_size_mov.c | 50 memcpy(in->state_slots[0].tokens, in lower_impl() 52 sizeof(in->state_slots[0].tokens)); in lower_impl()
|
/third_party/node/tools/msvs/ |
H A D | find_python.cmd | 38 for /f "skip=2 tokens=1* delims=)" %%a in ('reg query "%1\InstallPath" /ve /t REG_SZ 2^> nul') do (
39 for /f "tokens=1*" %%c in ("%%b") do (
|
/third_party/mesa3d/src/gallium/drivers/softpipe/ |
H A D | sp_state.h | 74 const struct tgsi_token *tokens; member 136 struct tgsi_token *tokens; member
|
/third_party/rust/crates/syn/tests/ |
H A D | test_visibility.rs | 105 let tokens = TokenStream::from_iter(vec![ in test_empty_group_vis() 125 snapshot!(tokens as DeriveInput, @r###" in test_empty_group_vis()
|
H A D | test_parse_buffer.rs | 72 let tokens = TokenStream::from_iter(vec![ in trailing_empty_none_group() 91 parse.parse2(tokens).unwrap(); in trailing_empty_none_group()
|
/third_party/rust/crates/quote/src/ |
H A D | runtime.rs | 179 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens() 180 self.0.to_tokens(tokens); in to_tokens() 242 pub fn push_group(tokens: &mut TokenStream, delimiter: Delimiter, inner: TokenStream) { in push_group() 243 tokens.append(Group::new(delimiter, inner)); in push_group() 248 tokens: &mut TokenStream, in push_group_spanned() 255 tokens.append(g); in push_group_spanned() 259 pub fn parse(tokens: &mut TokenStream, s: &str) { in parse() 261 tokens.extend(iter::once(s)); in parse() 265 pub fn parse_spanned(tokens: &mut TokenStream, span: Span, s: &str) { in parse_spanned() 267 tokens in parse_spanned() [all...] |
/third_party/vk-gl-cts/modules/egl/ |
H A D | teglInfoTests.cpp | 94 std::vector<std::string> tokens = de::splitString(result, ' '); in validateString() local 101 const std::vector<std::string> versionTokens = de::splitString(tokens[0], '.'); in validateString()
|