Lines Matching refs:check_tokenize

45     def check_tokenize(self, s, expected):
63 self.check_tokenize("1 + 1", """\
68 self.check_tokenize("if False:\n"
101 self.check_tokenize("0xff <= 255", """\
106 self.check_tokenize("0b10 <= 255", """\
111 self.check_tokenize("0o123 <= 0O123", """\
116 self.check_tokenize("1234567 > ~0x15", """\
122 self.check_tokenize("2134568 != 1231515", """\
127 self.check_tokenize("(-124561-1) & 200000000", """\
137 self.check_tokenize("0xdeadbeef != -1", """\
143 self.check_tokenize("0xdeadc0de & 12345", """\
148 self.check_tokenize("0xFF & 0x15 | 1234", """\
158 self.check_tokenize("x = 0", """\
163 self.check_tokenize("x = 0xfffffffffff", """\
168 self.check_tokenize("x = 123141242151251616110", """\
173 self.check_tokenize("x = -15921590215012591", """\
182 self.check_tokenize("x = 3.14159", """\
187 self.check_tokenize("x = 314159.", """\
192 self.check_tokenize("x = .314159", """\
197 self.check_tokenize("x = 3e14159", """\
202 self.check_tokenize("x = 3E123", """\
207 self.check_tokenize("x+y = 3e-1230", """\
214 self.check_tokenize("x = 3.14e159", """\
237 self.check_tokenize("x = ''; y = \"\"", """\
246 self.check_tokenize("x = '\"'; y = \"'\"", """\
255 self.check_tokenize("x = \"doesn't \"shrink\", does it\"", """\
262 self.check_tokenize("x = 'abc' + 'ABC'", """\
269 self.check_tokenize('y = "ABC" + "ABC"', """\
276 self.check_tokenize("x = r'abc' + r'ABC' + R'ABC' + R'ABC'", """\
287 self.check_tokenize('y = r"abc" + r"ABC" + R"ABC" + R"ABC"', """\
299 self.check_tokenize("u'abc' + U'abc'", """\
304 self.check_tokenize('u"abc" + U"abc"', """\
310 self.check_tokenize("b'abc' + B'abc'", """\
315 self.check_tokenize('b"abc" + B"abc"', """\
320 self.check_tokenize("br'abc' + bR'abc' + Br'abc' + BR'abc'", """\
329 self.check_tokenize('br"abc" + bR"abc" + Br"abc" + BR"abc"', """\
338 self.check_tokenize("rb'abc' + rB'abc' + Rb'abc' + RB'abc'", """\
347 self.check_tokenize('rb"abc" + rB"abc" + Rb"abc" + RB"abc"', """\
357 self.check_tokenize(r'"a\
362 self.check_tokenize(r'u"a\
366 self.check_tokenize(r'rb"a\
370 self.check_tokenize(r'"""a\
374 self.check_tokenize(r'u"""a\
378 self.check_tokenize(r'rb"""a\
383 self.check_tokenize('f"abc"', """\
386 self.check_tokenize('fR"a{b}c"', """\
389 self.check_tokenize('f"""abc"""', """\
392 self.check_tokenize(r'f"abc\
396 self.check_tokenize(r'Rf"abc\
402 self.check_tokenize("def d22(a, b, c=2, d=2, *k): pass", """\
424 self.check_tokenize("def d01v_(a=1, *k, **w): pass", """\
441 self.check_tokenize("def d23(a: str, b: int=3) -> int: pass", """\
463 self.check_tokenize("if 1 < 1 > 1 == 1 >= 5 <= 0x15 <= 0x12 != "
501 self.check_tokenize("x = 1 << 1 >> 5", """\
513 self.check_tokenize("x = 1 - y + 15 - 1 + 0x124 + z + a[5]", """\
536 self.check_tokenize("x = 1//1*1/5*12%0x12@42", """\
556 self.check_tokenize("~1 ^ 1 & 1 |1 ^ -1", """\
569 self.check_tokenize("-1*1/1+1*1//1 - ---1**1", """\
593 self.check_tokenize("import sys, time\nx = sys.modules['time'].time()", """\
615 self.check_tokenize("@staticmethod\ndef foo(x,y): pass", """\
632 self.check_tokenize("def f():\n"
653 self.check_tokenize("Örter = 'places'\ngrün = 'green'", """\
665 self.check_tokenize("Örter = u'places'\ngrün = U'green'", """\
677 self.check_tokenize("async = 1", """\
683 self.check_tokenize("a = (async = 1)", """\
693 self.check_tokenize("async()", """\
699 self.check_tokenize("class async(Bar):pass", """\
709 self.check_tokenize("class async:pass", """\
716 self.check_tokenize("await = 1", """\
722 self.check_tokenize("foo.async", """\
728 self.check_tokenize("async for a in b: pass", """\
738 self.check_tokenize("async with a as b: pass", """\
748 self.check_tokenize("async.foo", """\
754 self.check_tokenize("async", """\
758 self.check_tokenize("async\n#comment\nawait", """\
766 self.check_tokenize("async\n...\nawait", """\
774 self.check_tokenize("async\nawait", """\
780 self.check_tokenize("foo.async + 1", """\
788 self.check_tokenize("async def foo(): pass", """\
798 self.check_tokenize('''\
842 self.check_tokenize('''\
863 self.check_tokenize('''async def foo(async): await''', """\
874 self.check_tokenize('''\
911 self.check_tokenize('''\
950 def check_tokenize(self, s, expected):
1659 def check_tokenize(self, s, expected):
1670 self.check_tokenize('0xff <= 255', """\
1676 self.check_tokenize('0b10 <= 255', """\
1682 self.check_tokenize('0o123 <= 0O123', """\
1688 self.check_tokenize('1234567 > ~0x15', """\
1695 self.check_tokenize('2134568 != 1231515', """\
1701 self.check_tokenize('(-124561-1) & 200000000', """\
1712 self.check_tokenize('0xdeadbeef != -1', """\
1719 self.check_tokenize('0xdeadc0de & 12345', """\
1725 self.check_tokenize('0xFF & 0x15 | 1234', """\
1735 self.check_tokenize('x = 3.14159', """\
1741 self.check_tokenize('x = 314159.', """\
1747 self.check_tokenize('x = .314159', """\
1753 self.check_tokenize('x = 3e14159', """\
1759 self.check_tokenize('x = 3E123', """\
1765 self.check_tokenize('x+y = 3e-1230', """\
1773 self.check_tokenize('x = 3.14e159', """\
1781 self.check_tokenize('x = \'\'; y = ""', """\
1791 self.check_tokenize('x = \'"\'; y = "\'"', """\
1801 self.check_tokenize('x = "doesn\'t "shrink", does it"', """\
1809 self.check_tokenize("x = 'abc' + 'ABC'", """\
1817 self.check_tokenize('y = "ABC" + "ABC"', """\
1825 self.check_tokenize("x = r'abc' + r'ABC' + R'ABC' + R'ABC'", """\
1837 self.check_tokenize('y = r"abc" + r"ABC" + R"ABC" + R"ABC"', """\
1849 self.check_tokenize("u'abc' + U'abc'", """\
1855 self.check_tokenize('u"abc" + U"abc"', """\
1861 self.check_tokenize("b'abc' + B'abc'", """\
1867 self.check_tokenize('b"abc" + B"abc"', """\
1873 self.check_tokenize("br'abc' + bR'abc' + Br'abc' + BR'abc'", """\
1883 self.check_tokenize('br"abc" + bR"abc" + Br"abc" + BR"abc"', """\
1893 self.check_tokenize("rb'abc' + rB'abc' + Rb'abc' + RB'abc'", """\
1903 self.check_tokenize('rb"abc" + rB"abc" + Rb"abc" + RB"abc"', """\
1913 self.check_tokenize('"a\\\nde\\\nfg"', """\
1917 self.check_tokenize('u"a\\\nde"', """\
1921 self.check_tokenize('rb"a\\\nd"', """\
1925 self.check_tokenize(r'"""a\
1929 self.check_tokenize(r'u"""a\
1933 self.check_tokenize(r'rb"""a\
1939 self.check_tokenize('f"abc"', """\
1943 self.check_tokenize('fR"a{b}c"', """\
1947 self.check_tokenize('f"""abc"""', """\
1951 self.check_tokenize(r'f"abc\
1956 self.check_tokenize(r'Rf"abc\
1963 self.check_tokenize('def d22(a, b, c=2, d=2, *k): pass', """\
1986 self.check_tokenize('def d01v_(a=1, *k, **w): pass', """\
2004 self.check_tokenize('def d23(a: str, b: int=3) -> int: pass', """\
2026 self.check_tokenize("if 1 < 1 > 1 == 1 >= 5 <= 0x15 <= 0x12 != "
2064 self.check_tokenize('x = 1 - y + 15 - 1 + 0x124 + z + a[5]', """\
2087 self.check_tokenize('x = 1//1*1/5*12%0x12@42', """\
2107 self.check_tokenize('~1 ^ 1 & 1 |1 ^ -1', """\
2121 self.check_tokenize('-1*1/1+1*1//1 - ---1**1', """\
2145 self.check_tokenize("import sys, time\nx = sys.modules['time'].time()", """\
2167 self.check_tokenize('@staticmethod\ndef foo(x,y): pass', """\
2184 self.check_tokenize('@staticmethod\ndef foo(x,y): pass', """\
2201 self.check_tokenize('async = 1', """\
2207 self.check_tokenize('a = (async = 1)', """\
2217 self.check_tokenize('async()', """\
2223 self.check_tokenize('class async(Bar):pass', """\
2233 self.check_tokenize('class async:pass', """\
2240 self.check_tokenize('await = 1', """\
2246 self.check_tokenize('foo.async', """\
2252 self.check_tokenize('async for a in b: pass', """\
2262 self.check_tokenize('async with a as b: pass', """\
2272 self.check_tokenize('async.foo', """\
2278 self.check_tokenize('async', """\
2282 self.check_tokenize('async\n#comment\nawait', """\
2288 self.check_tokenize('async\n...\nawait', """\
2296 self.check_tokenize('async\nawait', """\
2302 self.check_tokenize('foo.async + 1', """\
2310 self.check_tokenize('async def foo(): pass', """\
2320 self.check_tokenize('''\
2364 self.check_tokenize('async def foo():\n async for i in 1: pass', """\
2383 self.check_tokenize('async def foo(async): await', """\
2394 self.check_tokenize('''\
2429 self.check_tokenize('''\
2467 self.check_tokenize("Örter = u'places'\ngrün = U'green'", """\
2549 self.check_tokenize(code, """\
2586 self.check_tokenize(code, """\
2610 self.check_tokenize(code, """\