/third_party/json/tests/src/ |
H A D | unit-class_lexer.cpp | 18 json::lexer::token_type scan_string(const char* s, bool ignore_comments = false); 19 json::lexer::token_type scan_string(const char* s, const bool ignore_comments) in scan_string() 22 return nlohmann::detail::lexer<json, decltype(ia)>(std::move(ia), ignore_comments).scan(); // NOLINT(hicpp-move-const-arg,performance-move-const-arg) in scan_string() 30 auto lexer = nlohmann::detail::lexer<json, decltype(ia)>(std::move(ia), ignore_comments); // NOLINT(hicpp-move-const-arg,performance-move-const-arg) in get_error_message() local 31 lexer.scan(); in get_error_message() 32 return lexer.get_error_message(); in get_error_message() 35 TEST_CASE("lexer class") 41 CHECK((scan_string("[") == json::lexer::token_type::begin_array)); 42 CHECK((scan_string("]") == json::lexer [all...] |
/third_party/ninja/src/ |
H A D | lexer_test.cc | 15 #include "lexer.h" 23 Lexer lexer("plain text $var $VaR ${x}\n"); in TEST() 26 EXPECT_TRUE(lexer.ReadVarValue(&eval, &err)); in TEST() 33 Lexer lexer("$ $$ab c$: $\ncde\n"); in TEST() 36 EXPECT_TRUE(lexer.ReadVarValue(&eval, &err)); in TEST() 43 Lexer lexer("foo baR baz_123 foo-bar"); in TEST() 45 EXPECT_TRUE(lexer.ReadIdent(&ident)); in TEST() 47 EXPECT_TRUE(lexer.ReadIdent(&ident)); in TEST() 49 EXPECT_TRUE(lexer.ReadIdent(&ident)); in TEST() 51 EXPECT_TRUE(lexer in TEST() [all...] |
/third_party/python/Lib/ |
H A D | netrc.py | 81 lexer = _netrclex(fp) 84 saved_lineno = lexer.lineno 85 toplevel = tt = lexer.get_token() 89 if lexer.lineno == saved_lineno and len(tt) == 1: 90 lexer.instream.readline() 93 entryname = lexer.get_token() 97 entryname = lexer.get_token() 100 line = lexer.instream.readline() 104 file, lexer.lineno) 114 "bad toplevel token %r" % tt, file, lexer [all...] |
/third_party/skia/src/sksl/lex/ |
H A D | Main.cpp | 34 static void writeH(const DFA& dfa, const char* lexer, const char* token, in writeH() argument 39 out << "#ifndef SKSL_" << lexer << "\n"; in writeH() local 40 out << "#define SKSL_" << lexer << "\n"; in writeH() local 69 class )" << lexer << R"( { in writeH() 104 static void writeCPP(const DFA& dfa, const char* lexer, const char* token, const char* include, in writeCPP() argument 143 out << token << " " << lexer << "::next() {"; in writeCPP() local 145 // note that we cheat here: normally a lexer needs to worry about the case in writeCPP() 184 static void process(const char* inPath, const char* lexer, const char* token, const char* hPath, in process() argument 222 writeH(dfa, lexer, token, tokens, hPath); in process() 223 writeCPP(dfa, lexer, toke in process() [all...] |
/third_party/skia/third_party/externals/tint/tools/src/cmd/intrinsic-gen/lexer/ |
H A D | lexer.go | 15 // Package lexer provides a basic lexer for the Tint intrinsic definition 17 package lexer 28 l := lexer{ 39 type lexer struct { type 46 func (l *lexer) lex() error { 106 func (l *lexer) next() rune { 123 func (l *lexer) skip(n int) { 130 func (l *lexer) peek(i int) rune { 142 func (l *lexer) coun [all...] |
/third_party/PyYAML/examples/pygments-lexer/ |
H A D | yaml.py | 13 from pygments.lexer import \ 22 """Indentation context for the YAML lexer.""" 34 def callback(lexer, match, context): 44 def callback(lexer, match, context): 56 def callback(lexer, match, context): 78 def callback(lexer, match, context): 91 def callback(lexer, match, context): 108 def callback(lexer, match, context): 125 def callback(lexer, match, context): 145 def callback(lexer, matc [all...] |
/third_party/vk-gl-cts/external/vulkan-docs/src/config/chunkindex/ |
H A D | lunr.js | 3083 lunr.QueryLexer.lexField = function (lexer) { 3084 lexer.backup() 3085 lexer.emit(lunr.QueryLexer.FIELD) 3086 lexer.ignore() 3090 lunr.QueryLexer.lexTerm = function (lexer) { 3091 if (lexer.width() > 1) { 3092 lexer.backup() 3093 lexer.emit(lunr.QueryLexer.TERM) 3096 lexer.ignore() 3098 if (lexer [all...] |
/third_party/skia/third_party/externals/spirv-tools/utils/vscode/src/parser/ |
H A D | parser.go | 164 type lexer struct { type 178 func (l *lexer) err(tok *Token, msg string, args ...interface{}) { 192 func (l *lexer) next() rune { 213 func (l *lexer) save() lexerState { 217 // restore restores the current lexer state with s. 218 func (l *lexer) restore(s lexerState) { 223 // The lexer *must* know the next token is a PIdent before calling. 224 func (l *lexer) pident() { 227 log.Fatalf("lexer expected '%%', got '%v'", r) 244 func (l *lexer) numberOrIden [all...] |
/third_party/skia/third_party/externals/swiftshader/third_party/SPIRV-Tools/utils/vscode/src/parser/ |
H A D | parser.go | 164 type lexer struct { type 178 func (l *lexer) err(tok *Token, msg string, args ...interface{}) { 192 func (l *lexer) next() rune { 213 func (l *lexer) save() lexerState { 217 // restore restores the current lexer state with s. 218 func (l *lexer) restore(s lexerState) { 223 // The lexer *must* know the next token is a PIdent before calling. 224 func (l *lexer) pident() { 227 log.Fatalf("lexer expected '%%', got '%v'", r) 244 func (l *lexer) numberOrIden [all...] |
/third_party/spirv-tools/utils/vscode/src/parser/ |
H A D | parser.go | 164 type lexer struct { type 178 func (l *lexer) err(tok *Token, msg string, args ...interface{}) { 192 func (l *lexer) next() rune { 213 func (l *lexer) save() lexerState { 217 // restore restores the current lexer state with s. 218 func (l *lexer) restore(s lexerState) { 223 // The lexer *must* know the next token is a PIdent before calling. 224 func (l *lexer) pident() { 227 log.Fatalf("lexer expected '%%', got '%v'", r) 244 func (l *lexer) numberOrIden [all...] |
/third_party/json/include/nlohmann/detail/input/ |
H A D | lexer.hpp | 30 // lexer // 111 class lexer : public lexer_base<BasicJsonType> class 123 explicit lexer(InputAdapterType&& adapter, bool ignore_comments_ = false) noexcept 130 lexer(const lexer&) = delete; 131 lexer(lexer&&) = default; // NOLINT(hicpp-noexcept-move,performance-noexcept-move-constructor) 132 lexer& operator=(lexer&) = delete; 133 lexer [all...] |
H A D | parser.hpp | 21 #include <nlohmann/detail/input/lexer.hpp> 66 using lexer_t = lexer<BasicJsonType, InputAdapterType>; 460 /// get next token from lexer 500 /// the lexer
|
/third_party/ninja/misc/ |
H A D | ninja.vim | 11 " ninja lexer and parser are at 12 " https://github.com/ninja-build/ninja/blob/master/src/lexer.in.cc 31 " lexer.in.cc, ReadToken() and manifest_parser.cc, Parse() 53 " lexer.in.cc, ReadEvalString()
|
/third_party/skia/third_party/externals/angle2/src/compiler/preprocessor/ |
H A D | preprocessor.y | 68 angle::pp::Lexer *lexer; 343 context->lexer->lex(token); 428 ExpressionParser::ExpressionParser(Lexer *lexer, Diagnostics *diagnostics) 429 : mLexer(lexer), 442 context.lexer = mLexer;
|
H A D | MacroExpander.cpp | 87 MacroExpander::MacroExpander(Lexer *lexer, in MacroExpander() argument 92 : mLexer(lexer), in MacroExpander() 435 TokenLexer lexer(&arg); in collectMacroArgs() 444 MacroExpander expander(&lexer, mMacroSet, mDiagnostics, nestedSettings, mParseDefined); in collectMacroArgs()
|
H A D | ExpressionParser.h | 31 ExpressionParser(Lexer *lexer, Diagnostics *diagnostics);
|
/third_party/skia/third_party/externals/swiftshader/src/OpenGL/compiler/preprocessor/ |
H A D | ExpressionParser.y | 80 pp::Lexer* lexer; 355 context->lexer->lex(token); 438 ExpressionParser::ExpressionParser(Lexer *lexer, Diagnostics *diagnostics) 439 : mLexer(lexer), 452 context.lexer = mLexer;
|
H A D | MacroExpander.cpp | 96 MacroExpander::MacroExpander(Lexer *lexer, in MacroExpander() argument 101 : mLexer(lexer), in MacroExpander() 441 TokenLexer lexer(&arg); in collectMacroArgs() 448 MacroExpander expander(&lexer, mMacroSet, mDiagnostics, mParseDefined, mAllowedMacroExpansionDepth - 1); in collectMacroArgs()
|
H A D | ExpressionParser.h | 35 ExpressionParser(Lexer *lexer, Diagnostics *diagnostics);
|
/third_party/node/tools/dep_updaters/ |
H A D | update-cjs-module-lexer.sh | 3 # Shell script to update cjs-module-lexer in the source tree to a specific version 14 const res = await fetch('https://api.github.com/repos/nodejs/cjs-module-lexer/tags'); 22 CURRENT_VERSION=$("$NODE" -p "require('./deps/cjs-module-lexer/package.json').version") 27 echo "Skipped because cjs-module-lexer is on the latest version." 47 "$NODE" "$NPM" install --global-style --no-bin-links --ignore-scripts "cjs-module-lexer@$NEW_VERSION" 49 rm -rf "$DEPS_DIR/cjs-module-lexer" 51 mv node_modules/cjs-module-lexer "$DEPS_DIR/cjs-module-lexer" 55 echo "Please git add cjs-module-lexer, commit the new version:" 57 echo "$ git add -A deps/cjs-module-lexer sr [all...] |
/third_party/node/deps/v8/third_party/jinja2/ |
H A D | lexer.py | 2 """Implements a Jinja / Python combination lexer. The ``Lexer`` class 20 # environments with the same lexer 400 argument as :meth:`jinja2.lexer.Token.test`. 424 """Return a lexer which is probably cached.""" 439 lexer = _lexer_cache.get(key) 440 if lexer is None: 441 lexer = Lexer(environment) 442 _lexer_cache[key] = lexer 443 return lexer 460 """Class that implements a lexer fo [all...] |
H A D | environment.py | 46 from .lexer import get_lexer 47 from .lexer import TokenStream 324 # lexer / parser information 443 lexer = property(get_lexer, doc="The lexer for this environment.") variable in Environment 553 return self.lexer.tokeniter(source, name, filename) 570 for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. 573 stream = self.lexer.tokenize(source, name, filename, state)
|
/third_party/node/tools/inspector_protocol/jinja2/ |
H A D | lexer.py | 3 jinja2.lexer 6 This module implements a Jinja / Python combination lexer. The 26 # environments with the same lexer 372 argument as :meth:`jinja2.lexer.Token.test`. 392 """Return a lexer which is probably cached.""" 405 lexer = _lexer_cache.get(key) 406 if lexer is None: 407 lexer = Lexer(environment) 408 _lexer_cache[key] = lexer 409 return lexer [all...] |
/third_party/skia/third_party/externals/jinja2/ |
H A D | lexer.py | 2 """Implements a Jinja / Python combination lexer. The ``Lexer`` class 20 # environments with the same lexer 400 argument as :meth:`jinja2.lexer.Token.test`. 424 """Return a lexer which is probably cached.""" 439 lexer = _lexer_cache.get(key) 440 if lexer is None: 441 lexer = Lexer(environment) 442 _lexer_cache[key] = lexer 443 return lexer 460 """Class that implements a lexer fo [all...] |
/third_party/jinja2/ |
H A D | lexer.py | 1 """Implements a Jinja / Python combination lexer. The ``Lexer`` class 21 # environments with the same lexer 403 argument as :meth:`jinja2.lexer.Token.test`. 427 """Return a lexer which is probably cached.""" 442 lexer = _lexer_cache.get(key) 444 if lexer is None: 445 _lexer_cache[key] = lexer = Lexer(environment) 447 return lexer 470 """Class that implements a lexer for a given environment. Automatically 473 Note that the lexer i [all...] |