Searched refs:token (Results 1 - 16 of 16) sorted by relevance

/gem5/ext/googletest/googlemock/scripts/generator/cpp/
H A Dkeywords.py52 def IsKeyword(token):
53 return token in ALL
55 def IsBuiltinType(token):
56 if token in ('virtual', 'inline'):
59 return token in TYPES or token in TYPE_MODIFIERS
H A Dast.py287 for token in self.alias:
288 if token is not None and name == token.name:
342 for token in token_list:
343 if token.name == node.name:
467 token = tokens[end]
469 if token.name == '<':
471 elif token.name == '>':
508 token = tokens[i]
509 if token
[all...]
H A Dtokenize.py59 # Where the token originated from. This can be used for backtracking.
65 """Data container to represent a C++ token.
70 start contains the index of the first char of the token in the source
71 end contains the index of the last char of the token in the source
126 Token that represents the next token in the source.
150 if c.isalpha() or c == '_': # Find a string token.
263 sys.stderr.write('Got invalid token in %s @ %d token:%s: %r\n' %
265 raise RuntimeError('unexpected token')
281 for token i
[all...]
/gem5/src/unittest/
H A Dtokentest.cc43 cout << "Usage: " << argv[0] << " <string> <token>\n";
51 char token = argv[2][0]; local
53 cout << "string = \"" << test << "\", token = \'" << token << "\'\n"; local
55 tokenize(tokens1, test, token, false);
69 tokenize(tokens2, test, token, true);
/gem5/ext/ply/example/GardenSnake/
H A DGardenSnake.py154 # The original lex token stream contains WS and NEWLINE characters.
158 # "must_indent" is True if the token must be indented from the
176 for token in tokens:
177 token.at_line_start = at_line_start
179 if token.type == "COLON":
182 token.must_indent = False
184 elif token.type == "NEWLINE":
188 token.must_indent = False
190 elif token.type == "WS":
191 assert token
324 def token(self): member in class:IndentLexer
[all...]
/gem5/ext/ply/test/
H A Dlex_token5.py3 # Return a bad token name
29 t = lex.token()
/gem5/src/base/
H A Dstr.cc69 tokenize(vector<string>& v, const string &s, char token, bool ignore) argument
72 string::size_type last = s.find_first_of(token);
79 last = s.find_first_of(token, ++first);
92 first = s.find_first_not_of(token, last + 1);
99 last = s.find_first_of(token, first);
H A Dmatch.cc84 const vector<string> &token = tokens[i]; local
85 int jstop = token.size();
92 const string &var = token[j];
H A Dstr.hh97 // Tokenize the string <s> splitting on the character <token>, and
103 char token, bool ign = true);
/gem5/src/arch/generic/
H A Ddebugfaults.hh86 // with the same token from blocking each other.
95 lookUpToken(const OnceToken &token) argument
98 return tokenMap[token];
103 M5DebugOnceFault(const OnceToken &token, const std::string &format, argument
105 M5DebugFault(format, args...), once(lookUpToken<Flavor>(token))
/gem5/ext/googletest/googletest/scripts/
H A Dpump.py138 """Represents a token in a Pump source file."""
303 def RStripNewLineFromToken(token):
304 if token.value.endswith('\n'):
305 return Token(token.start, token.end, token.value[:-1], token.token_type)
307 return token
386 for token in TokenizeLines(lines, Cursor(0, 0)):
387 yield token
[all...]
/gem5/src/python/m5/util/
H A Dgrammar.py34 def __init__(self, message, token=None):
36 self.token = token
/gem5/ext/ply/ply/
H A Dcpp.py118 # When a macro is created, the macro replacement token sequence is
167 tok = self.lexer.token()
185 # the token types of symbols that are important to the preprocessor.
192 # Determine the token type for identifiers
194 tok = self.lexer.token()
200 # Determine the token type for integers
202 tok = self.lexer.token()
209 # Determine the token type for strings enclosed in double quotes
211 tok = self.lexer.token()
217 # Determine the token typ
864 def token(self): member in class:Preprocessor
[all...]
H A Dlex.py57 # This regular expression is used to match valid token names
60 # Exception thrown when invalid token encountered and no default error
107 # token() - Get the next token
298 # opttoken() - Return the next token from the Lexer
304 def token(self): member in class:Lexer
322 # Create a token for return
332 # If no token type was set, it's an ignored token
342 # If token i
[all...]
H A Dyacc.py310 get_token = lexer.token
322 errtoken = None # Err token
334 # the next token off of the lookaheadstack or from the lexer
343 lookahead = get_token() # Get the next token
497 # this, we are going to push the current token onto
498 # the tokenstack and replace it with an 'error' token.
502 # In addition to pushing the error token, we call call
513 global errok,token,restart
515 token = get_token
520 del errok, token, restar
[all...]
/gem5/ext/googletest/googlemock/scripts/
H A Dgmock_doctor.py462 r'error: expected `;\' before \'::\' token\n'

Completed in 27 milliseconds