Searched refs:tokens (Results 26 - 50 of 101) sorted by relevance

12345

/gem5/ext/ply/test/
H A Dlex_rule2.py10 tokens = [ variable
H A Dlex_rule3.py10 tokens = [ variable
H A Dlex_token5.py10 tokens = [ variable
H A Dlex_token_dup.py3 # Duplicate token name in tokens
10 tokens = [ variable
H A Dlex_state1.py10 tokens = [ variable
H A Dlex_state2.py10 tokens = [ variable
H A Dlex_state3.py10 tokens = [ variable
H A Dlex_state4.py10 tokens = [ variable
H A Dlex_state5.py10 tokens = [ variable
H A Dlex_state_noerror.py10 tokens = [ variable
H A Dlex_state_norule.py10 tokens = [ variable
H A Dlex_hedit.py6 # These tokens can't be easily tokenized because they are of the following
14 # such tokens
21 tokens = ( variable
H A Dlex_module_import.py7 tokens = ( variable
H A Dlex_state_try.py10 tokens = [ variable
H A Dyacc_inf.py11 from calclex import tokens
H A Dyacc_nested.py11 tokens = ('A', 'B', 'C') variable
H A Dcalclex.py9 tokens = ( variable
H A Dlex_closure.py9 tokens = ( variable
H A Dlex_object.py10 tokens = ( variable in class:CalcLexer
H A Dlex_opt_alias.py11 tokens = ( variable
/gem5/ext/googletest/googlemock/scripts/generator/cpp/
H A Dast.py341 # TODO(nnorwitz): bases are tokens, do name comparision.
384 # TODO(nnorwitz): parameters are tokens, do name comparision.
463 def _GetTemplateEnd(self, tokens, start):
467 token = tokens[end]
475 return tokens[start:end-1], end
477 def ToType(self, tokens):
490 # Partition tokens into name and modifier tokens.
506 end = len(tokens)
508 token = tokens[
[all...]
/gem5/ext/mcpat/regression/
H A Dverify_output.py212 tokens = line.split()
214 curr_node.area = toNumber(tokens[2])
216 curr_node.peak_dynamic_power = toNumber(tokens[4])
218 curr_node.peak_dynamic_power = toNumber(tokens[3])
220 curr_node.subthreshold_leakage = toNumber(tokens[4])
222 curr_node.subthreshold_leakage = toNumber(tokens[3])
224 curr_node.gate_leakage = toNumber(tokens[4])
226 curr_node.gate_leakage = toNumber(tokens[3])
228 curr_node.runtime_dynamic_power = toNumber(tokens[4])
230 curr_node.runtime_dynamic_energy = toNumber(tokens[
[all...]
/gem5/src/sim/
H A Dserialize.hh500 std::vector<std::string> tokens; local
502 tokenize(tokens, str, ' ');
505 // value.resize(tokens.size());
507 fatal_if(tokens.size() != size,
509 section, name, tokens.size(), size);
511 for (std::vector<std::string>::size_type i = 0; i < tokens.size(); i++) {
517 if (!parseParam(tokens[i], scalar_value)) {
544 std::vector<std::string> tokens; local
546 tokenize(tokens, str, ' ');
549 // value.resize(tokens
584 std::vector<std::string> tokens; local
614 std::vector<std::string> tokens; local
[all...]
/gem5/ext/ply/example/hedit/
H A Dhedit.py6 # These tokens can't be easily tokenized because they are of the following
14 # such tokens
21 tokens = ( variable
/gem5/ext/ply/example/BASIC/
H A Dbasiclex.py10 tokens = keywords + ( variable

Completed in 24 milliseconds

12345