Lines Matching refs:self

224     def __init__(self, message, cls=TemplateSyntaxError):
225 self.message = message
226 self.error_class = cls
228 def __call__(self, lineno, filename):
229 raise self.error_class(self.message, lineno, filename)
240 def __str__(self):
241 if self.type in reverse_operators:
242 return reverse_operators[self.type]
243 elif self.type == 'name':
244 return self.value
245 return self.type
247 def test(self, expr):
254 if self.type == expr:
257 return expr.split(':', 1) == [self.type, self.value]
260 def test_any(self, *iterable):
263 if self.test(expr):
267 def __repr__(self):
269 self.lineno,
270 self.type,
271 self.value
281 def __init__(self, stream):
282 self.stream = stream
284 def __iter__(self):
285 return self
287 def __next__(self):
288 token = self.stream.current
290 self.stream.close()
292 next(self.stream)
303 def __init__(self, generator, name, filename):
304 self._iter = iter(generator)
305 self._pushed = deque()
306 self.name = name
307 self.filename = filename
308 self.closed = False
309 self.current = Token(1, TOKEN_INITIAL, '')
310 next(self)
312 def __iter__(self):
313 return TokenStreamIterator(self)
315 def __bool__(self):
316 return bool(self._pushed) or self.current.type is not TOKEN_EOF
321 def push(self, token):
323 self._pushed.append(token)
325 def look(self):
327 old_token = next(self)
328 result = self.current
329 self.push(result)
330 self.current = old_token
333 def skip(self, n=1):
336 next(self)
338 def next_if(self, expr):
342 if self.current.test(expr):
343 return next(self)
345 def skip_if(self, expr):
347 return self.next_if(expr) is not None
349 def __next__(self):
354 rv = self.current
355 if self._pushed:
356 self.current = self._pushed.popleft()
357 elif self.current.type is not TOKEN_EOF:
359 self.current = next(self._iter)
361 self.close()
364 def close(self):
366 self.current = Token(self.current.lineno, TOKEN_EOF, '')
367 self._iter = None
368 self.closed = True
370 def expect(self, expr):
374 if not self.current.test(expr):
376 if self.current.type is TOKEN_EOF:
379 self.current.lineno,
380 self.name, self.filename)
382 (expr, describe_token(self.current)),
383 self.current.lineno,
384 self.name, self.filename)
386 return self.current
388 next(self)
420 def __init__(self, environment):
482 self.newline_sequence = environment.newline_sequence
483 self.keep_trailing_newline = environment.keep_trailing_newline
486 self.rules = {
548 def _normalize_newlines(self, value):
550 return newline_re.sub(self.newline_sequence, value)
552 def tokenize(self, source, name=None, filename=None, state=None):
555 stream = self.tokeniter(source, name, filename, state)
556 return TokenStream(self.wrap(stream, name, filename), name, filename)
558 def wrap(self, stream, name=None, filename=None):
573 value = self._normalize_newlines(value)
585 value = self._normalize_newlines(value[1:-1]) \
599 def tokeniter(self, source, name, filename=None, state=None):
605 if self.keep_trailing_newline and source:
619 statetokens = self.rules[stack[-1]]
720 statetokens = self.rules[stack[-1]]