16498Snate@binkert.org# lex_many_tokens.py
26498Snate@binkert.org#
36498Snate@binkert.org# Test lex's ability to handle a large number of tokens (beyond the
46498Snate@binkert.org# 100-group limit of the re module)
56498Snate@binkert.org
66498Snate@binkert.orgimport sys
76498Snate@binkert.orgif ".." not in sys.path: sys.path.insert(0,"..")
86498Snate@binkert.org
96498Snate@binkert.orgimport ply.lex as lex
106498Snate@binkert.org
116498Snate@binkert.orgtokens = ["TOK%d" % i for i in range(1000)]
126498Snate@binkert.org
136498Snate@binkert.orgfor tok in tokens:
146498Snate@binkert.org    if sys.version_info[0] < 3:
156498Snate@binkert.org        exec("t_%s = '%s:'" % (tok,tok))
166498Snate@binkert.org    else:
176498Snate@binkert.org        exec("t_%s = '%s:'" % (tok,tok), globals())
186498Snate@binkert.org
196498Snate@binkert.orgt_ignore = " \t"
206498Snate@binkert.org
216498Snate@binkert.orgdef t_error(t):
226498Snate@binkert.org    pass
236498Snate@binkert.org
246498Snate@binkert.orglex.lex(optimize=1,lextab="manytab")
256498Snate@binkert.orglex.runmain(data="TOK34: TOK143: TOK269: TOK372: TOK452: TOK561: TOK999:")
266498Snate@binkert.org
276498Snate@binkert.org
28