Lines Matching refs:tokens

339         # TODO(nnorwitz): bases are tokens, do name comparison.
382 # TODO(nnorwitz): parameters are tokens, do name comparison.
461 def _GetTemplateEnd(self, tokens, start):
465 token = tokens[end]
473 return tokens[start:end-1], end
475 def ToType(self, tokens):
488 # Partition tokens into name and modifier tokens.
504 end = len(tokens)
506 token = tokens[i]
508 new_tokens, new_end = self._GetTemplateEnd(tokens, i+1)
530 # No '<' in the tokens, just a simple name and no template.
589 def ToParameters(self, tokens):
590 if not tokens:
615 for s in tokens:
659 AddParameter(tokens[-1].end)
689 self.tokens = token_stream
873 tokens = []
876 tokens.append(last_token)
878 return tokens, last_token
885 tokens = []
890 tokens.append(last_token)
896 return tokens, last_token
946 return next(self.tokens)
958 def _AddBackTokens(self, tokens):
959 if tokens:
960 if tokens[-1].whence == tokenize.WHENCE_STREAM:
961 for token in tokens:
963 self.token_queue[:0] = reversed(tokens)
965 assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens
966 self.token_queue.extend(reversed(tokens))
969 """Returns ([tokens], next_token_info)."""
975 tokens = []
985 tokens.append(next_token)
988 tokens.extend(self._GetMatchingChar('<', '>', GetNextToken))
991 return tokens, next_token
1138 # TODO(nnorwitz): store tokens and improve parsing.
1140 tokens = list(self._GetMatchingChar('[', ']'))
1365 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1366 assert tokens
1367 t0 = tokens[0]
1368 return Friend(t0.start, t0.end, tokens, self.namespace_stack)
1386 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1387 assert tokens
1388 return Delete(tokens[0].start, tokens[0].end, tokens)
1397 tokens = [method()]
1400 tokens = [token]
1403 tokens.extend(self._GetTokensUpTo(tokenize.SYNTAX, ';'))
1406 assert tokens
1407 name = tokens.pop()
1409 if tokens:
1410 indices = tokens[0]
1415 if (len(tokens) >= 4 and
1416 tokens[1].name == '(' and tokens[2].name == '*'):
1417 tokens.append(name)
1418 name = tokens[3]
1421 if len(tokens) >= 2:
1422 tokens.append(name)
1423 name = tokens[1]
1424 new_type = tokens
1425 if tokens and isinstance(tokens[0], tokenize.Token):
1426 new_type = self.converter.ToType(tokens)[0]
1438 tokens = list(self._GetMatchingChar('<', '>'))
1439 len_tokens = len(tokens) - 1 # Ignore trailing '>'.
1442 key = tokens[i].name
1449 if tokens[i-1].name == '=':
1450 assert i < len_tokens, '%s %s' % (i, tokens)
1451 default, unused_next_token = self.GetName(tokens[i:])
1454 if tokens[i-1].name != ',':
1457 key = tokens[i-1].name
1458 type_name = tokens[i-2]
1478 tokens, last = self._GetVarTokensUpTo(tokenize.SYNTAX, '(', ';')
1479 tokens.append(last)
1480 self._AddBackTokens(tokens)
1565 tokens = (class_token, token, name_token, next_token)
1566 self._AddBackTokens(tokens)
1621 tokens = list(self.GetScope())
1623 tokens[-1] = internal_token
1625 self._AddBackTokens(tokens)
1629 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1630 assert tokens
1631 return Using(tokens[0].start, tokens[0].end, tokens)
1668 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1669 if not tokens:
1671 return Return(tokens[0].start, tokens[0].end, tokens)
1674 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1675 assert len(tokens) == 1, str(tokens)
1676 return Goto(tokens[0].start, tokens[0].end, tokens[0].name)