/third_party/lwip/src/apps/http/makefsdata/ |
H A D | makefsdata.c | 81 #define NEWLINE "\r\n" macro 158 printf(" Usage: htmlgen [targetdir] [-s] [-e] [-11] [-nossi] [-ssi:<filename>] [-c] [-f:<filename>] [-m] [-svr:<name>] [-x:<ext_list>] [-xc:<ext_list>" USAGE_ARG_DEFLATE NEWLINE NEWLINE); in print_usage() 159 printf(" targetdir: relative or absolute path to files to convert" NEWLINE); in print_usage() 160 printf(" switch -s: toggle processing of subdirectories (default is on)" NEWLINE); in print_usage() 161 printf(" switch -e: exclude HTTP header from file (header is created at runtime, default is off)" NEWLINE); in print_usage() 162 printf(" switch -11: include HTTP 1.1 header (1.0 is default)" NEWLINE); in print_usage() 163 printf(" switch -nossi: no support for SSI (cannot calculate Content-Length for SSI)" NEWLINE); in print_usage() 164 printf(" switch -ssi: ssi filename (ssi support controlled by file list, not by extension)" NEWLINE); in print_usage() 165 printf(" switch -c: precalculate checksums for all pages (default is off)" NEWLINE); in print_usage() [all...] |
/third_party/gn/src/gn/ |
H A D | rust_project_writer.cc | 24 #define NEWLINE "\r\n" macro 26 #define NEWLINE "\n" macro 252 rust_project << "{" NEWLINE; in WriteCrates() local 262 rust_project << " \"sysroot\": \"" << sysroot_path << "\"," NEWLINE; in WriteCrates() local 275 rust_project << NEWLINE << " {" NEWLINE in WriteCrates() 276 << " \"crate_id\": " << crate.index() << "," NEWLINE in WriteCrates() 277 << " \"root_module\": \"" << crate_module << "\"," NEWLINE in WriteCrates() 278 << " \"label\": \"" << crate.label() << "\"," NEWLINE in WriteCrates() 279 << " \"source\": {" NEWLINE in WriteCrates() 290 << "\\"" NEWLINE; WriteCrates() local 292 rust_project << NEWLINE; WriteCrates() local 296 << " }," NEWLINE; WriteCrates() local 318 rust_project << "]," << NEWLINE; WriteCrates() local 333 rust_project << NEWLINE " ]," NEWLINE; // end dep list WriteCrates() local 339 rust_project << " \\"is_proc_macro\\": true," NEWLINE; WriteCrates() local 343 << "\\"," NEWLINE; WriteCrates() local 356 rust_project << NEWLINE; WriteCrates() local 359 rust_project << NEWLINE; WriteCrates() local 363 rust_project << "," NEWLINE; WriteCrates() local 373 rust_project << NEWLINE; WriteCrates() local 378 rust_project << NEWLINE; WriteCrates() local 379 rust_project << " }" NEWLINE; // end env vars WriteCrates() local 381 rust_project << NEWLINE; WriteCrates() local 385 rust_project << NEWLINE " ]" NEWLINE; // end crate list WriteCrates() local 386 rust_project << "}" NEWLINE; WriteCrates() local [all...] |
/third_party/python/Lib/test/ |
H A D | test_tokenize.py | 6 NEWLINE, _generate_tokens_from_c_tokenizer, DEDENT) 31 if missing_trailing_nl and type == NEWLINE and end[0] == num_lines: 43 # final NEWLINE are omitted for brevity. 47 # The ENDMARKER and final NEWLINE are omitted. 55 # Make sure that the tokenizer puts in an implicit NEWLINE 59 self.assertEqual(tokens[-2].type, NEWLINE) 71 " True = False # NEWLINE\n", """\ 75 NEWLINE '\\n' (1, 9) (1, 10) 83 COMMENT '# NEWLINE' (4, 17) (4, 26) 84 NEWLINE '\\ [all...] |
H A D | test_univnewlines.py | 43 # and a class variable NEWLINE to set the expected newlines value 64 self.assertEqual(repr(fp.newlines), repr(self.NEWLINE)) 70 self.assertEqual(repr(fp.newlines), repr(self.NEWLINE)) 80 self.assertEqual(repr(fp.newlines), repr(self.NEWLINE)) 94 NEWLINE = '\r' variable in TestCRNewlines 100 NEWLINE = '\n' variable in TestLFNewlines 106 NEWLINE = '\r\n' variable in TestCRLFNewlines 114 self.assertEqual(repr(fp.newlines), repr(self.NEWLINE)) 119 NEWLINE = ('\r', '\n') variable in TestMixedNewlines
|
/third_party/python/Lib/test/test_peg_generator/ |
H A D | test_pegen.py | 9 from tokenize import TokenInfo, NAME, NEWLINE, NUMBER, OP namespace 25 start: sum NEWLINE 30 start: sum NEWLINE 38 self.assertEqual(str(rules["start"]), "start: sum NEWLINE") 65 start[int]: sum NEWLINE 71 self.assertEqual(str(rules["start"]), "start: sum NEWLINE") 80 start: ','.thing+ NEWLINE 84 self.assertEqual(str(rules["start"]), "start: ','.thing+ NEWLINE") 106 NEWLINE, string="\n", start=(1, 4), end=(1, 5), line="1, 2\n" 113 start: sum NEWLINE [all...] |
/third_party/mesa3d/src/compiler/glsl/glcpp/ |
H A D | glcpp-parse.y | 196 %token DEFINED ELIF_EXPANDED HASH_TOKEN DEFINE_TOKEN FUNC_IDENTIFIER OBJ_IDENTIFIER ELIF ELSE ENDIF ERROR_TOKEN IF IFDEF IFNDEF LINE PRAGMA UNDEF VERSION_TOKEN GARBAGE IDENTIFIER IF_EXPANDED INTEGER INTEGER_STRING LINE_EXPANDED NEWLINE OTHER PLACEHOLDER SPACE PLUS_PLUS MINUS_MINUS PATH INCLUDE 236 IF_EXPANDED expression NEWLINE { 241 | ELIF_EXPANDED expression NEWLINE { 246 | LINE_EXPANDED integer_constant NEWLINE { 251 | LINE_EXPANDED integer_constant integer_constant NEWLINE { 260 | LINE_EXPANDED integer_constant PATH NEWLINE { 270 OBJ_IDENTIFIER replacement_list NEWLINE { 273 | FUNC_IDENTIFIER '(' ')' replacement_list NEWLINE { 276 | FUNC_IDENTIFIER '(' identifier_list ')' replacement_list NEWLINE { 286 | HASH_TOKEN LINE pp_tokens NEWLINE { [all...] |
/third_party/vk-gl-cts/framework/randomshaders/ |
H A D | rsgShader.cpp | 97 str << Token::PRECISION << Token::MEDIUM_PRECISION << Token::FLOAT << Token::SEMICOLON << Token::NEWLINE; in tokenize() member in rsg::Token 106 str << Token::NEWLINE; in tokenize() member in rsg::Token 111 str << Token::NEWLINE; in tokenize() member in rsg::Token 144 str << Token::RIGHT_PAREN << Token::NEWLINE; in tokenize() member in rsg::Token
|
H A D | rsgStatement.cpp | 205 str << Token::LEFT_BRACE << Token::NEWLINE << Token::INDENT_INC; in tokenize() 210 str << Token::INDENT_DEC << Token::RIGHT_BRACE << Token::NEWLINE; in tokenize() member in rsg::Token 223 str << Token::SEMICOLON << Token::NEWLINE; in tokenize() member in rsg::Token 342 str << Token::SEMICOLON << Token::NEWLINE; in tokenize() member in rsg::Token 462 str << Token::RIGHT_PAREN << Token::NEWLINE; in tokenize() member in rsg::Token 484 str << Token::NEWLINE; in tokenize() member in rsg::Token 489 str << Token::NEWLINE << Token::INDENT_INC; in tokenize() 559 str << Token::SEMICOLON << Token::NEWLINE; in tokenize() member in rsg::Token
|
/third_party/python/Tools/scripts/ |
H A D | cleanfuture.py | 158 NEWLINE = tokenize.NEWLINE 168 while type in (COMMENT, NL, NEWLINE): 178 while type in (COMMENT, NL, NEWLINE): 210 if type is not NEWLINE:
|
H A D | reindent.py | 288 NEWLINE=tokenize.NEWLINE, 292 if type == NEWLINE: 316 # This is the first "real token" following a NEWLINE, so it
|
/third_party/python/Include/ |
H A D | token.h | 17 #define NEWLINE 4 macro 83 (x) == NEWLINE || \
|
/third_party/python/Tools/peg_generator/pegen/ |
H A D | tokenizer.py | 52 tok.type == token.NEWLINE 54 and self._tokens[-1].type == token.NEWLINE 70 tok.type < tokenize.NEWLINE or tok.type > tokenize.DEDENT
|
/third_party/icu/icu4j/main/classes/core/src/com/ibm/icu/util/ |
H A D | VTimeZone.java | 243 bw.write(NEWLINE); in write() 250 bw.write(NEWLINE); in write() 254 bw.write(NEWLINE); in write() 419 private static final String NEWLINE = "\r\n"; // CRLF field in VTimeZone 1253 w.write(NEWLINE); in writeZone() 1533 writer.write(NEWLINE); in writeZonePropsByTime() 1553 writer.write(NEWLINE); in writeZonePropsByDOM() 1574 writer.write(NEWLINE); in writeZonePropsByDOW() 1659 writer.write(NEWLINE); in writeZonePropsByDOW_GEQ_DOM_sub() 1811 writer.write(NEWLINE); in beginZoneProps() [all...] |
/third_party/icu/ohos_icu4j/src/main/java/ohos/global/icu/util/ |
H A D | VTimeZone.java | 224 bw.write(NEWLINE); in write() 231 bw.write(NEWLINE); in write() 235 bw.write(NEWLINE); in write() 390 private static final String NEWLINE = "\r\n"; // CRLF field in VTimeZone 1224 w.write(NEWLINE); in writeZone() 1504 writer.write(NEWLINE); in writeZonePropsByTime() 1524 writer.write(NEWLINE); in writeZonePropsByDOM() 1545 writer.write(NEWLINE); in writeZonePropsByDOW() 1630 writer.write(NEWLINE); in writeZonePropsByDOW_GEQ_DOM_sub() 1782 writer.write(NEWLINE); in beginZoneProps() [all...] |
/third_party/ninja/src/ |
H A D | manifest_parser.cc | 87 case Lexer::NEWLINE: in Parse() 103 if (!ExpectToken(Lexer::NEWLINE, err)) in ParsePool() 140 if (!ExpectToken(Lexer::NEWLINE, err)) in ParseRule() 208 return ExpectToken(Lexer::NEWLINE, err); in ParseDefault() 305 if (!ExpectToken(Lexer::NEWLINE, err)) in ParseEdge() 443 if (!ExpectToken(Lexer::NEWLINE, err)) in ParseFileInclude()
|
H A D | lexer.in.cc | 84 case NEWLINE: return "newline"; in TokenName() 137 [ ]*"\r\n" { token = NEWLINE; break; } in ReadToken() 138 [ ]*"\n" { token = NEWLINE; break; } in ReadToken() 159 if (token != NEWLINE && token != TEOF) in ReadToken()
|
H A D | lexer.cc | 85 case NEWLINE: return "newline"; in TokenName() 232 { token = NEWLINE; break; } in ReadToken() 300 { token = NEWLINE; break; } in ReadToken() 458 if (token != NEWLINE && token != TEOF) in ReadToken()
|
/third_party/python/Lib/lib2to3/fixes/ |
H A D | fix_metaclass.py | 96 if node.children and node.children[-1].type == token.NEWLINE: 220 node.append_child(Leaf(token.NEWLINE, '\n')) 228 suite.insert_child(-1, Leaf(token.NEWLINE, '\n'))
|
/third_party/python/Lib/ |
H A D | tokenize.py | 208 elif tok_type in (NEWLINE, NL): 219 if tok_type in (NEWLINE, NL): 227 startline = token[0] in (NEWLINE, NL) 253 elif toknum in (NEWLINE, NL): 544 yield TokenInfo(NEWLINE, token, spos, epos, line) 608 # Add an implicit NEWLINE if the input doesn't end in one 610 yield TokenInfo(NEWLINE, '', (lnum - 1, len(last_line)), (lnum - 1, len(last_line) + 1), '')
|
H A D | tabnanny.py | 278 NEWLINE = tokenize.NEWLINE 284 if type == NEWLINE: 310 # assert check_equal # else no earlier NEWLINE, or an earlier INDENT 316 # this is the first "real token" following a NEWLINE, so it
|
/third_party/selinux/libsepol/cil/src/ |
H A D | cil_parser.c | 176 if (tok.type != NEWLINE) { in add_hll_linemark() 182 /* Need to increment because of the NEWLINE */ in add_hll_linemark() 278 case NEWLINE : in cil_parser() 284 while (tok.type != NEWLINE && tok.type != END_OF_FILE) { in cil_parser()
|
H A D | cil_lexer.h | 41 #define NEWLINE 7 macro
|
/third_party/python/Lib/lib2to3/pgen2/ |
H A D | tokenize.py | 194 if tok_type in (NEWLINE, NL): 206 if toknum in (NEWLINE, NL): 220 elif toknum in (NEWLINE, NL): 457 newline = NEWLINE
|
/third_party/skia/third_party/externals/spirv-tools/test/ |
H A D | text_word_get_test.cpp | 25 #define NEWLINE "\n" macro 166 AutoText input(QUOTE "white " NEWLINE TAB " space" QUOTE); 215 AutoText input("w" BACKSLASH QUOTE "o" BACKSLASH NEWLINE "r" BACKSLASH ";d");
|
/third_party/skia/third_party/externals/swiftshader/third_party/SPIRV-Tools/test/ |
H A D | text_word_get_test.cpp | 25 #define NEWLINE "\n" macro 166 AutoText input(QUOTE "white " NEWLINE TAB " space" QUOTE); 215 AutoText input("w" BACKSLASH QUOTE "o" BACKSLASH NEWLINE "r" BACKSLASH ";d");
|