1# lex_many_tokens.py
2#
3# Test lex's ability to handle a large number of tokens (beyond the
4# 100-group limit of the re module)
5
6import sys
7if ".." not in sys.path: sys.path.insert(0,"..")
8
9import ply.lex as lex
10
11tokens = ["TOK%d" % i for i in range(1000)]
12
13for tok in tokens:
14    if sys.version_info[0] < 3:
15        exec("t_%s = '%s:'" % (tok,tok))
16    else:
17        exec("t_%s = '%s:'" % (tok,tok), globals())
18
19t_ignore = " \t"
20
21def t_error(t):
22    pass
23
24lex.lex(optimize=1,lextab="manytab")
25lex.runmain(data="TOK34: TOK143: TOK269: TOK372: TOK452: TOK561: TOK999:")
26
27
28