Lines Matching refs:tokens

341                 # TODO(nnorwitz): bases are tokens, do name comparision.
384 # TODO(nnorwitz): parameters are tokens, do name comparision.
463 def _GetTemplateEnd(self, tokens, start):
467 token = tokens[end]
475 return tokens[start:end-1], end
477 def ToType(self, tokens):
490 # Partition tokens into name and modifier tokens.
506 end = len(tokens)
508 token = tokens[i]
510 new_tokens, new_end = self._GetTemplateEnd(tokens, i+1)
532 # No '<' in the tokens, just a simple name and no template.
591 def ToParameters(self, tokens):
592 if not tokens:
616 for s in tokens:
649 AddParameter(tokens[-1].end)
679 self.tokens = token_stream
854 tokens = []
857 tokens.append(last_token)
859 return tokens, last_token
908 return next(self.tokens)
918 def _AddBackTokens(self, tokens):
919 if tokens:
920 if tokens[-1].whence == tokenize.WHENCE_STREAM:
921 for token in tokens:
923 self.token_queue[:0] = reversed(tokens)
925 assert tokens[-1].whence == tokenize.WHENCE_QUEUE, tokens
926 self.token_queue.extend(reversed(tokens))
929 """Returns ([tokens], next_token_info)."""
935 tokens = []
945 tokens.append(next_token)
948 tokens.extend(self._GetMatchingChar('<', '>', GetNextToken))
951 return tokens, next_token
1098 # TODO(nnorwitz): store tokens and improve parsing.
1100 tokens = list(self._GetMatchingChar('[', ']'))
1324 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1325 assert tokens
1326 t0 = tokens[0]
1327 return Friend(t0.start, t0.end, tokens, self.namespace_stack)
1345 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1346 assert tokens
1347 return Delete(tokens[0].start, tokens[0].end, tokens)
1356 tokens = [method()]
1359 tokens = [token]
1362 tokens.extend(self._GetTokensUpTo(tokenize.SYNTAX, ';'))
1365 assert tokens
1366 name = tokens.pop()
1368 if tokens:
1369 indices = tokens[0]
1374 if (len(tokens) >= 4 and
1375 tokens[1].name == '(' and tokens[2].name == '*'):
1376 tokens.append(name)
1377 name = tokens[3]
1380 if len(tokens) >= 2:
1381 tokens.append(name)
1382 name = tokens[1]
1383 new_type = tokens
1384 if tokens and isinstance(tokens[0], tokenize.Token):
1385 new_type = self.converter.ToType(tokens)[0]
1397 tokens = list(self._GetMatchingChar('<', '>'))
1398 len_tokens = len(tokens) - 1 # Ignore trailing '>'.
1401 key = tokens[i].name
1408 if tokens[i-1].name == '=':
1409 assert i < len_tokens, '%s %s' % (i, tokens)
1410 default, unused_next_token = self.GetName(tokens[i:])
1413 if tokens[i-1].name != ',':
1416 key = tokens[i-1].name
1417 type_name = tokens[i-2]
1437 tokens, last = self._GetVarTokensUpTo(tokenize.SYNTAX, '(', ';')
1438 tokens.append(last)
1439 self._AddBackTokens(tokens)
1524 tokens = (class_token, token, name_token, next_token)
1525 self._AddBackTokens(tokens)
1581 tokens = list(self.GetScope())
1583 tokens[-1] = internal_token
1585 self._AddBackTokens(tokens)
1589 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1590 assert tokens
1591 return Using(tokens[0].start, tokens[0].end, tokens)
1628 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1629 if not tokens:
1631 return Return(tokens[0].start, tokens[0].end, tokens)
1634 tokens = self._GetTokensUpTo(tokenize.SYNTAX, ';')
1635 assert len(tokens) == 1, str(tokens)
1636 return Goto(tokens[0].start, tokens[0].end, tokens[0].name)