Lines Matching refs:token

285     for token in self.alias:
286 if token is not None and name == token.name:
340 for token in token_list:
341 if token.name == node.name:
465 token = tokens[end]
467 if token.name == '<':
469 elif token.name == '>':
506 token = tokens[i]
507 if token.name == '<':
514 elif token.name == ',':
517 elif token.name == '*':
519 elif token.name == '&':
521 elif token.name == '[':
523 elif token.name == ']':
526 name_tokens.append(token)
709 def HandleError(self, msg, token):
712 (msg, self.filename, token, printable_queue))
716 token = self._GetNextToken()
717 if not token:
720 # Get the next token.
721 self.current_token = token
723 # Dispatch on the next token type.
724 if token.token_type == _INTERNAL_TOKEN:
725 if token.name == _NAMESPACE_POP:
730 result = self._GenerateOne(token)
734 self.HandleError('exception', token)
748 def _GenerateOne(self, token):
749 if token.token_type == tokenize.NAME:
750 if (keywords.IsKeyword(token.name) and
751 not keywords.IsBuiltinType(token.name)):
752 if token.name == 'enum':
753 # Pop the next token and only put it back if it's not
754 # 'class'. This allows us to support the two-token
760 method = getattr(self, 'handle_' + token.name)
762 elif token.name == self.in_class_name_only:
763 # The token name is the same as the class, must be a ctor if
765 # Peek ahead to get the next token to figure out which.
769 return self._GetMethod([token], FUNCTION_CTOR, None, True)
777 temp_tokens.insert(0, token)
825 elif token.token_type == tokenize.SYNTAX:
826 if token.name == '~' and self.in_class:
828 token = self._GetNextToken()
831 if (token.token_type == tokenize.NAME and
832 token.name == self.in_class_name_only):
833 return self._GetMethod([token], FUNCTION_DTOR, None, True)
835 elif token.token_type == tokenize.PREPROCESSOR:
837 # token starts with a #, so remove it and strip whitespace.
838 name = token.name[1:].lstrip()
846 assert name[0] in '<"', token
847 assert name[-1] in '>"', token
850 return Include(token.start, token.end, filename, system)
861 return Define(token.start, token.end, name, value)
881 # expected token.
899 def _IgnoreUpTo(self, token_type, token):
900 unused_tokens = self._GetTokensUpTo(token_type, token)
905 token = self._GetNextToken()
906 if token.token_type != tokenize.PREPROCESSOR:
909 name = token.name[1:].lstrip()
920 # Assumes the current token is open_paren and we will consume
923 token = GetNextToken()
925 if token.token_type == tokenize.SYNTAX:
926 if token.name == open_paren:
928 elif token.name == close_paren:
932 yield token
933 token = GetNextToken()
934 yield token
950 def _AddBackToken(self, token):
951 if token.whence == tokenize.WHENCE_STREAM:
952 token.whence = tokenize.WHENCE_QUEUE
953 self.token_queue.insert(0, token)
955 assert token.whence == tokenize.WHENCE_QUEUE, token
956 self.token_queue.append(token)
961 for token in tokens:
962 token.whence = tokenize.WHENCE_QUEUE
1003 token = self._GetNextToken()
1004 assert token.token_type == tokenize.SYNTAX, token
1005 if token.name == '<':
1007 template_portion = [token]
1009 token = self._GetNextToken()
1010 assert token.token_type == tokenize.SYNTAX, token
1011 assert token.name == '(', token
1047 token = self._GetNextToken()
1048 assert token.name == '(', token
1052 token = self._GetNextToken()
1053 while token.token_type == tokenize.NAME:
1054 modifier_token = token
1055 token = self._GetNextToken()
1061 assert token.name == '(', token
1064 token = self._GetNextToken()
1067 assert token.name == '(', token
1070 token = self._GetNextToken()
1078 self.HandleError('unexpected token', modifier_token)
1080 assert token.token_type == tokenize.SYNTAX, token
1082 if token.name == ':':
1084 while token.name != ';' and token.name != '{':
1085 token = self._GetNextToken()
1089 if token.name == '(':
1099 token = self._GetNextToken()
1100 assert token.token_type == tokenize.SYNTAX, token
1101 assert token.name == ';', token
1118 if token.name == '{':
1123 if token.name == '=':
1124 token = self._GetNextToken()
1126 if token.name == 'default' or token.name == 'delete':
1129 token = self._GetNextToken()
1132 assert token.token_type == tokenize.CONSTANT, token
1133 assert token.name == '0', token
1135 token = self._GetNextToken()
1137 if token.name == '[':
1141 token = self._GetNextToken()
1143 assert token.name == ';', (token, return_type_and_name, parameters)
1194 # Flatten the token sequence for the return type.
1235 name_tokens, token = self.GetName()
1240 if token.token_type == tokenize.SYNTAX and token.name == ';':
1241 return ctor(token.start, token.end, name, None,
1244 if token.token_type == tokenize.NAME and self._handling_typedef:
1245 self._AddBackToken(token)
1246 return ctor(token.start, token.end, name, None,
1252 if token.token_type == tokenize.SYNTAX and token.name == '{':
1254 new_type = ctor(token.start, token.end, name, fields,
1261 token = next
1264 assert token.token_type == tokenize.NAME, token
1265 return self._CreateVariable(token, token.name, name, [], '', None)
1331 token = token2 = self._GetNextToken()
1332 if token.name == 'inline':
1337 assert token.token_type == tokenize.NAME or token.name == '::', token
1340 return_type_and_name.insert(0, token)
1341 if token2 is not token:
1391 token = self._GetNextToken()
1392 if (token.token_type == tokenize.NAME and
1393 keywords.IsKeyword(token.name)):
1395 method = getattr(self, 'handle_' + token.name)
1400 tokens = [token]
1412 indices = token
1464 token = self._GetNextToken()
1465 assert token.token_type == tokenize.SYNTAX, token
1466 assert token.name == '<', token
1469 token = self._GetNextToken()
1470 if token.token_type == tokenize.NAME:
1471 if token.name == 'class':
1473 elif token.name == 'struct':
1475 elif token.name == 'friend':
1477 self._AddBackToken(token)
1502 token = self._GetNextToken()
1503 assert token.token_type == tokenize.NAME, token
1505 if token.name not in ('public', 'protected', 'private'):
1507 # Just put the token back so we can form a name.
1509 self._AddBackToken(token)
1512 token = self._GetNextToken()
1513 if token.name != 'virtual':
1514 self._AddBackToken(token)
1524 token = next_token
1528 return bases, token
1535 token = class_token
1544 name_tokens, token = self.GetName()
1547 if token.token_type == tokenize.SYNTAX:
1548 if token.name == ';':
1553 if token.name in '*&':
1562 modifiers, token.name, None)
1565 tokens = (class_token, token, name_token, next_token)
1568 if token.name == ':':
1569 bases, token = self._GetBases()
1572 if token.token_type == tokenize.SYNTAX and token.name == '{':
1573 assert token.token_type == tokenize.SYNTAX, token
1574 assert token.name == '{', token
1581 token = self._GetNextToken()
1582 if token.token_type != tokenize.NAME:
1583 assert token.token_type == tokenize.SYNTAX, token
1584 assert token.name == ';', token
1592 token.name, new_class,
1593 modifiers, token.name, None)
1596 self.HandleError('non-typedef token', token)
1597 self._AddBackToken(token)
1605 name_tokens, token = self.GetName()
1609 assert token.token_type == tokenize.SYNTAX, token
1610 # Create an internal token that denotes when the namespace is complete.
1613 internal_token.whence = token.whence
1614 if token.name == '=':
1620 assert token.name == '{', token
1622 # Replace the trailing } with the internal namespace pop token.
1644 # Pull off the next token(s?) and make that part of the method name.
1657 token = self._GetNextToken()
1658 assert token.token_type == tokenize.SYNTAX
1659 assert token.name == ':'
1721 should_print: predicate with signature: bool Function(token)
1745 should_print: predicate with signature: bool Function(token)