Lines Matching refs:self

258         self, message: str, cls: t.Type[TemplateSyntaxError] = TemplateSyntaxError
260 self.message = message
261 self.error_class = cls
263 def __call__(self, lineno: int, filename: str) -> "te.NoReturn":
264 raise self.error_class(self.message, lineno, filename)
272 def __str__(self) -> str:
273 return describe_token(self)
275 def test(self, expr: str) -> bool:
282 if self.type == expr:
286 return expr.split(":", 1) == [self.type, self.value]
290 def test_any(self, *iterable: str) -> bool:
292 return any(self.test(expr) for expr in iterable)
300 def __init__(self, stream: "TokenStream") -> None:
301 self.stream = stream
303 def __iter__(self) -> "TokenStreamIterator":
304 return self
306 def __next__(self) -> Token:
307 token = self.stream.current
310 self.stream.close()
313 next(self.stream)
324 self,
329 self._iter = iter(generator)
330 self._pushed: "te.Deque[Token]" = deque()
331 self.name = name
332 self.filename = filename
333 self.closed = False
334 self.current = Token(1, TOKEN_INITIAL, "")
335 next(self)
337 def __iter__(self) -> TokenStreamIterator:
338 return TokenStreamIterator(self)
340 def __bool__(self) -> bool:
341 return bool(self._pushed) or self.current.type is not TOKEN_EOF
344 def eos(self) -> bool:
346 return not self
348 def push(self, token: Token) -> None:
350 self._pushed.append(token)
352 def look(self) -> Token:
354 old_token = next(self)
355 result = self.current
356 self.push(result)
357 self.current = old_token
360 def skip(self, n: int = 1) -> None:
363 next(self)
365 def next_if(self, expr: str) -> t.Optional[Token]:
369 if self.current.test(expr):
370 return next(self)
374 def skip_if(self, expr: str) -> bool:
376 return self.next_if(expr) is not None
378 def __next__(self) -> Token:
383 rv = self.current
385 if self._pushed:
386 self.current = self._pushed.popleft()
387 elif self.current.type is not TOKEN_EOF:
389 self.current = next(self._iter)
391 self.close()
395 def close(self) -> None:
397 self.current = Token(self.current.lineno, TOKEN_EOF, "")
398 self._iter = iter(())
399 self.closed = True
401 def expect(self, expr: str) -> Token:
405 if not self.current.test(expr):
408 if self.current.type is TOKEN_EOF:
411 self.current.lineno,
412 self.name,
413 self.filename,
417 f"expected token {expr!r}, got {describe_token(self.current)!r}",
418 self.current.lineno,
419 self.name,
420 self.filename,
423 return next(self)
477 def __init__(self, environment: "Environment") -> None:
510 self.lstrip_blocks = environment.lstrip_blocks
512 self.newline_sequence = environment.newline_sequence
513 self.keep_trailing_newline = environment.keep_trailing_newline
524 self.rules: t.Dict[str, t.List[_Rule]] = {
596 def _normalize_newlines(self, value: str) -> str:
600 return newline_re.sub(self.newline_sequence, value)
603 self,
610 stream = self.tokeniter(source, name, filename, state)
611 return TokenStream(self.wrap(stream, name, filename), name, filename)
614 self,
636 value = self._normalize_newlines(value_str)
650 self._normalize_newlines(value_str[1:-1])
668 self,
683 if not self.keep_trailing_newline and lines[-1] == "":
695 statetokens = self.rules[stack[-1]]
743 and self.lstrip_blocks
844 statetokens = self.rules[stack[-1]]