Lines Matching refs:self
24 def logger_wrapper(self: P, *args: object) -> T:
25 if not self._verbose:
26 return method(self, *args)
28 fill = " " * self._level
29 print(f"{fill}{method_name}({argsr}) .... (looking at {self.showpeek()})")
30 self._level += 1
31 tree = method(self, *args)
32 self._level -= 1
44 def memoize_wrapper(self: P, *args: object) -> T:
45 mark = self._mark()
48 if key in self._cache and not self._verbose:
49 tree, endmark = self._cache[key]
50 self._reset(endmark)
53 verbose = self._verbose
55 fill = " " * self._level
56 if key not in self._cache:
58 print(f"{fill}{method_name}({argsr}) ... (looking at {self.showpeek()})")
59 self._level += 1
60 tree = method(self, *args)
61 self._level -= 1
64 endmark = self._mark()
65 self._cache[key] = tree, endmark
67 tree, endmark = self._cache[key]
70 self._reset(endmark)
81 def memoize_left_rec_wrapper(self: P) -> Optional[T]:
82 mark = self._mark()
85 if key in self._cache and not self._verbose:
86 tree, endmark = self._cache[key]
87 self._reset(endmark)
90 verbose = self._verbose
91 fill = " " * self._level
92 if key not in self._cache:
94 print(f"{fill}{method_name} ... (looking at {self.showpeek()})")
95 self._level += 1
106 self._cache[key] = None, mark
113 self._reset(mark)
114 self.in_recursive_rule += 1
116 result = method(self)
118 self.in_recursive_rule -= 1
119 endmark = self._mark()
133 self._cache[key] = lastresult, lastmark = result, endmark
135 self._reset(lastmark)
138 self._level -= 1
142 endmark = self._mark()
145 self._reset(endmark)
146 self._cache[key] = tree, endmark
148 tree, endmark = self._cache[key]
152 self._reset(endmark)
166 def __init__(self, tokenizer: Tokenizer, *, verbose: bool = False):
167 self._tokenizer = tokenizer
168 self._verbose = verbose
169 self._level = 0
170 self._cache: Dict[Tuple[Mark, str, Tuple[Any, ...]], Tuple[Any, Mark]] = {}
173 self.in_recursive_rule = 0
175 self._mark = self._tokenizer.mark
176 self._reset = self._tokenizer.reset
179 def start(self) -> Any:
182 def showpeek(self) -> str:
183 tok = self._tokenizer.peek()
187 def name(self) -> Optional[tokenize.TokenInfo]:
188 tok = self._tokenizer.peek()
189 if tok.type == token.NAME and tok.string not in self.KEYWORDS:
190 return self._tokenizer.getnext()
194 def number(self) -> Optional[tokenize.TokenInfo]:
195 tok = self._tokenizer.peek()
197 return self._tokenizer.getnext()
201 def string(self) -> Optional[tokenize.TokenInfo]:
202 tok = self._tokenizer.peek()
204 return self._tokenizer.getnext()
208 def op(self) -> Optional[tokenize.TokenInfo]:
209 tok = self._tokenizer.peek()
211 return self._tokenizer.getnext()
215 def type_comment(self) -> Optional[tokenize.TokenInfo]:
216 tok = self._tokenizer.peek()
218 return self._tokenizer.getnext()
222 def soft_keyword(self) -> Optional[tokenize.TokenInfo]:
223 tok = self._tokenizer.peek()
224 if tok.type == token.NAME and tok.string in self.SOFT_KEYWORDS:
225 return self._tokenizer.getnext()
229 def expect(self, type: str) -> Optional[tokenize.TokenInfo]:
230 tok = self._tokenizer.peek()
232 return self._tokenizer.getnext()
235 return self._tokenizer.getnext()
238 return self._tokenizer.getnext()
240 return self._tokenizer.getnext()
243 def expect_forced(self, res: Any, expectation: str) -> Optional[tokenize.TokenInfo]:
245 raise self.make_syntax_error(f"expected {expectation}")
248 def positive_lookahead(self, func: Callable[..., T], *args: object) -> T:
249 mark = self._mark()
251 self._reset(mark)
254 def negative_lookahead(self, func: Callable[..., object], *args: object) -> bool:
255 mark = self._mark()
257 self._reset(mark)
260 def make_syntax_error(self, message: str, filename: str = "<unknown>") -> SyntaxError:
261 tok = self._tokenizer.diagnose()