Lines Matching refs:tokens

13 # Default preprocessor lexer definitions.   These tokens are enough to get
17 tokens = (
113 # .value - Macro value (a list of tokens)
149 # Probe the lexer for selected tokens
160 # Utility function. Given a string of text, tokenize into a list of tokens
164 tokens = []
169 tokens.append(tok)
170 return tokens
187 # with any suitable lexer regardless of how tokens have been named.
292 # Remove leading/trailing whitespace tokens from a token list
295 def tokenstrip(self,tokens):
297 while i < len(tokens) and tokens[i].type in self.t_WS:
299 del tokens[:i]
300 i = len(tokens)-1
301 while i >= 0 and tokens[i].type in self.t_WS:
303 del tokens[i+1:]
304 return tokens
310 # Collects comma separated arguments from a list of tokens. The arguments
312 # where tokencount is the number of tokens consumed, args is a list of arguments,
314 # argument. Each argument is represented by a list of tokens.
416 # representing the replacement macro tokens
464 # Given a list of tokens, this function performs macro expansion.
469 def expand_macros(self,tokens,expanded=None):
473 while i < len(tokens):
474 t = tokens[i]
486 tokens[i:i+1] = ex
491 while j < len(tokens) and tokens[j].type in self.t_WS:
493 if tokens[j].value == '(':
494 tokcount,args,positions = self.collect_args(tokens[j:])
509 args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
517 tokens[i:j+tokcount] = rep
526 return tokens
535 def evalexpr(self,tokens):
536 # tokens = tokenize(line)
539 while i < len(tokens):
540 if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
544 while j < len(tokens):
545 if tokens[j].type in self.t_WS:
548 elif tokens[j].type == self.t_ID:
549 if tokens[j].value in self.macros:
554 elif tokens[j].value == '(':
556 elif tokens[j].value == ')':
559 self.error(self.source,tokens[i].lineno,"Malformed defined()")
561 tokens[i].type = self.t_INTEGER
562 tokens[i].value = self.t_INTEGER_TYPE(result)
563 del tokens[i+1:j+1]
565 tokens = self.expand_macros(tokens)
566 for i,t in enumerate(tokens):
568 tokens[i] = copy.copy(t)
569 tokens[i].type = self.t_INTEGER
570 tokens[i].value = self.t_INTEGER_TYPE("0L")
572 tokens[i] = copy.copy(t)
574 tokens[i].value = str(tokens[i].value)
575 while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
576 tokens[i].value = tokens[i].value[:-1]
578 expr = "".join([str(x.value) for x in tokens])
585 self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
724 def include(self,tokens):
726 if not tokens:
728 if tokens:
729 if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
730 tokens = self.expand_macros(tokens)
732 if tokens[0].value == '<':
735 while i < len(tokens):
736 if tokens[i].value == '>':
742 filename = "".join([x.value for x in tokens[1:i]])
744 elif tokens[0].type == self.t_STRING:
745 filename = tokens[0].value[1:-1]
773 def define(self,tokens):
774 if isinstance(tokens,(str,unicode)):
775 tokens = self.tokenize(tokens)
777 linetok = tokens
843 def undef(self,tokens):
844 id = tokens[0].value
862 # Method to return individual tokens