Home
last modified time | relevance | path

Searched refs:tokens (Results 101 - 125 of 644) sorted by relevance

12345678910>>...26

/third_party/node/deps/npm/node_modules/@pkgjs/parseargs/examples/
H A Dlimit-long-syntax.js3 // This is an example of using tokens to add a custom behaviour.
20 const { values, tokens } = parseArgs({ options, tokens: true });
22 const badToken = tokens.find((token) => token.kind === 'option' &&
H A Dno-repeated-options.js3 // This is an example of using tokens to add a custom behaviour.
15 const { values, tokens } = parseArgs({ options, tokens: true });
18 tokens.forEach((token) => {
H A Dis-default-value.js13 const { values, tokens } = parseArgs({ options, tokens: true });
15 const isFileDefault = !tokens.some((token) => token.kind === 'option' &&
H A Dnegate.js17 const { values, tokens } = parseArgs({ options, tokens: true });
19 // Reprocess the option tokens and overwrite the returned values.
20 tokens
/third_party/mesa3d/src/gallium/auxiliary/tgsi/
H A Dtgsi_dump.h45 const struct tgsi_token *tokens,
51 tgsi_dump_to_file(const struct tgsi_token *tokens, uint flags, FILE *file);
55 const struct tgsi_token *tokens,
/third_party/typescript/tests/baselines/reference/
H A DarrayAssignmentTest6.js9 tokens: IToken[];
16 public tokenize(line:string, tokens:IToken[], includeStates:boolean):ILineTokens {
29 Bug.prototype.tokenize = function (line, tokens, includeStates) {
H A DobjectLitArrayDeclNoNew.js12 tokens: IToken[];
23 tokens: Gar[],//IToken[], // Missing new. Correct syntax is: tokens: new IToken[]
44 tokens: Gar[],
/third_party/icu/tools/unicode/c/genprops/
H A Dnamespropsbuilder.cpp194 static int16_t tokens[LEADBYTE_LIMIT*256]; variable
225 tokens[i]=0; in NamesPropsBuilder()
508 if(tokens[i]==-1) { in compress()
516 /* do we need double-byte tokens? */ in compress()
518 /* no, single-byte tokens are enough */ in compress()
521 if(tokens[i]!=-1) { in compress()
522 tokens[i]=wordNumber; in compress()
524 printf("tokens[0x%03x]: word%8ld \"%.*s\"\n", in compress()
534 * The tokens that need two token bytes in compress()
553 /* how many tokens an in compress()
[all...]
/third_party/mesa3d/src/gallium/auxiliary/draw/
H A Ddraw_vs_exec.c75 if (evs->machine->Tokens != shader->state.tokens) { in vs_exec_prepare()
77 shader->state.tokens, in vs_exec_prepare()
213 FREE((void*) dvs->state.tokens); in vs_exec_delete()
229 vs->base.state.tokens = nir_to_tgsi(state->ir.nir, draw->pipe->screen); in draw_create_vs_exec()
234 /* we need to keep a local copy of the tokens */ in draw_create_vs_exec()
235 vs->base.state.tokens = tgsi_dup_tokens(state->tokens); in draw_create_vs_exec()
236 if (!vs->base.state.tokens) { in draw_create_vs_exec()
242 tgsi_scan_shader(vs->base.state.tokens, &vs->base.info); in draw_create_vs_exec()
/third_party/rust/crates/log/src/
H A Dserde.rs303 for &(value, tokens) in &cases { in test_level_de_bytes()
304 assert_de_tokens(&value, &tokens); in test_level_de_bytes()
318 for &(value, tokens) in &cases { in test_level_de_variant_index()
319 assert_de_tokens(&value, &tokens); in test_level_de_variant_index()
373 for &(value, tokens) in &cases { in test_level_filter_de_bytes()
374 assert_de_tokens(&value, &tokens); in test_level_filter_de_bytes()
389 for &(value, tokens) in &cases { in test_level_filter_de_variant_index()
390 assert_de_tokens(&value, &tokens); in test_level_filter_de_variant_index()
/third_party/protobuf/csharp/src/Google.Protobuf/
H A DJsonTokenizer.cs45 /// This tokenizer is stateful, and only returns "useful" tokens - names, values etc.
46 /// It does not create tokens for the separator between names and values, or for the comma
48 /// tokens it produces are appropriate. For example, it would never produce "start object, end array."
66 /// Creates a tokenizer that first replays the given list of tokens, then continues reading
71 internal static JsonTokenizer FromReplayedTokens(IList<JsonToken> tokens, JsonTokenizer continuation) in FromReplayedTokens() argument
73 return new JsonReplayTokenizer(tokens, continuation); in FromReplayedTokens()
169 /// Tokenizer which first exhausts a list of tokens, then consults another tokenizer.
173 private readonly IList<JsonToken> tokens; field in Google.Protobuf.JsonTokenizer.JsonReplayTokenizer
177 internal JsonReplayTokenizer(IList<JsonToken> tokens, JsonTokenizer nextTokenizer) in JsonReplayTokenizer() argument
179 this.tokens in JsonReplayTokenizer()
[all...]
/kernel/linux/linux-6.6/tools/arch/x86/kcpuid/
H A Dkcpuid.c311 char *tokens[6]; in parse_line() local
325 tokens[i] = strtok(str, ","); in parse_line()
326 if (!tokens[i]) in parse_line()
330 tokens[5] = strtok(str, "\n"); in parse_line()
331 if (!tokens[5]) in parse_line()
335 index = strtoull(tokens[0], NULL, 0); in parse_line()
354 sub = strtoul(tokens[1], NULL, 0); in parse_line()
359 buf = tokens[2]; in parse_line()
376 buf = tokens[3]; in parse_line()
387 strcpy(bdesc->simp, tokens[ in parse_line()
[all...]
/third_party/littlefs/scripts/
H A Dprettyasserts.py214 tokens = []
221 tokens.append((None, data[:m.start()], line, col))
222 tokens.append((m.lastgroup, m.group(), line, col))
225 tokens.append((None, data, line, col))
227 self.tokens = tokens
231 if self.off < len(self.tokens):
232 token = self.tokens[self.off]
248 raise ParseFailure(patterns, self.tokens[self.off:])
422 for i in range(p.off, len(p.tokens))
[all...]
/third_party/rust/crates/cxx/gen/build/src/syntax/
H A Ddoc.rs39 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
41 tokens.extend(quote! { #(#[doc = #fragments])* }); in to_tokens()
43 tokens.extend(quote! { #[doc(hidden)] }); in to_tokens()
/third_party/rust/crates/cxx/gen/lib/src/syntax/
H A Ddoc.rs39 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
41 tokens.extend(quote! { #(#[doc = #fragments])* }); in to_tokens()
43 tokens.extend(quote! { #[doc(hidden)] }); in to_tokens()
/third_party/rust/crates/cxx/macro/src/syntax/
H A Ddoc.rs39 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
41 tokens.extend(quote! { #(#[doc = #fragments])* }); in to_tokens()
43 tokens.extend(quote! { #[doc(hidden)] }); in to_tokens()
/third_party/rust/crates/cxx/syntax/
H A Ddoc.rs39 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
41 tokens.extend(quote! { #(#[doc = #fragments])* }); in to_tokens()
43 tokens.extend(quote! { #[doc(hidden)] }); in to_tokens()
/third_party/rust/crates/cxx/gen/cmd/src/syntax/
H A Ddoc.rs39 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
41 tokens.extend(quote! { #(#[doc = #fragments])* }); in to_tokens()
43 tokens.extend(quote! { #[doc(hidden)] }); in to_tokens()
/third_party/rust/crates/cxx/macro/src/
H A Dtype_id.rs12 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
14 Crate::Cxx => tokens.extend(quote!(::cxx)), in to_tokens()
15 Crate::DollarCrate(krate) => krate.to_tokens(tokens), in to_tokens()
/third_party/rust/crates/syn/src/
H A Dfile.rs120 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
121 tokens.append_all(self.attrs.inner()); in to_tokens()
122 tokens.append_all(&self.items); in to_tokens()
/third_party/rust/crates/syn/tests/
H A Dtest_grouping.rs11 let tokens: TokenStream = TokenStream::from_iter(vec![ in test_grouping()
26 assert_eq!(tokens.to_string(), "1i32 + 2i32 + 3i32 * 4i32"); in test_grouping()
28 snapshot!(tokens as Expr, @r###" in test_grouping()
/third_party/json/include/nlohmann/detail/
H A Dexceptions.hpp66 std::vector<std::string> tokens; in diagnostics() local
77 tokens.emplace_back(std::to_string(i)); in diagnostics()
90 tokens.emplace_back(element.first.c_str()); in diagnostics()
110 if (tokens.empty()) in diagnostics()
115 auto str = std::accumulate(tokens.rbegin(), tokens.rend(), std::string{}, in diagnostics()
/third_party/rust/crates/bindgen/bindgen/codegen/
H A Dimpl_debug.rs14 let mut tokens = vec![]; in gen_debug_impl() variables
33 tokens.extend(toks); in gen_debug_impl()
41 tokens.insert(0, quote! { #format_string }); in gen_debug_impl()
47 write!(f, #( #tokens ),*) in gen_debug_impl()
52 /// A trait for the things which we can codegen tokens that contribute towards a
92 let mut tokens = vec![]; in impl_debug() variables
102 tokens.push(quote! { in impl_debug()
108 Some((format_string, tokens)) in impl_debug()
/third_party/rust/crates/clap/clap_derive/src/
H A Dattr.rs62 AttrValue::LitStr(tokens) => Ok(tokens), in lit_str_or_abort()
182 fn to_tokens(&self, tokens: &mut TokenStream) { in to_tokens()
184 Self::LitStr(t) => t.to_tokens(tokens), in to_tokens()
185 Self::Expr(t) => t.to_tokens(tokens), in to_tokens()
188 t.to_tokens(tokens) in to_tokens()
/third_party/python/Lib/lib2to3/pgen2/
H A Ddriver.py38 def parse_tokens(self, tokens, debug=False):
39 """Parse a series of tokens and return the syntax tree."""
47 for quintuple in tokens:
88 tokens = tokenize.generate_tokens(stream.readline)
89 return self.parse_tokens(tokens, debug)
102 tokens = tokenize.generate_tokens(io.StringIO(text).readline)
103 return self.parse_tokens(tokens, debug)

Completed in 12 milliseconds

12345678910>>...26