1# ----------------------------------------------------------------------------- 2# hedit.py 3# 4# Paring of Fortran H Edit descriptions (Contributed by Pearu Peterson) 5# 6# These tokens can't be easily tokenized because they are of the following 7# form: 8# 9# nHc1...cn 10# 11# where n is a positive integer and c1 ... cn are characters. 12# 13# This example shows how to modify the state of the lexer to parse 14# such tokens 15# ----------------------------------------------------------------------------- 16import sys
| 1# ----------------------------------------------------------------------------- 2# hedit.py 3# 4# Paring of Fortran H Edit descriptions (Contributed by Pearu Peterson) 5# 6# These tokens can't be easily tokenized because they are of the following 7# form: 8# 9# nHc1...cn 10# 11# where n is a positive integer and c1 ... cn are characters. 12# 13# This example shows how to modify the state of the lexer to parse 14# such tokens 15# ----------------------------------------------------------------------------- 16import sys
|
17sys.path.insert(0,"..")
| 17if ".." not in sys.path: sys.path.insert(0,"..")
|
18 19import ply.lex as lex 20 21tokens = ( 22 'H_EDIT_DESCRIPTOR', 23 ) 24 25# Tokens 26t_ignore = " \t\n" 27 28def t_H_EDIT_DESCRIPTOR(t): 29 r"\d+H.*" # This grabs all of the remaining text 30 i = t.value.index('H') 31 n = eval(t.value[:i])
| 18 19import ply.lex as lex 20 21tokens = ( 22 'H_EDIT_DESCRIPTOR', 23 ) 24 25# Tokens 26t_ignore = " \t\n" 27 28def t_H_EDIT_DESCRIPTOR(t): 29 r"\d+H.*" # This grabs all of the remaining text 30 i = t.value.index('H') 31 n = eval(t.value[:i])
|
32
| 32
|
33 # Adjust the tokenizing position 34 t.lexer.lexpos -= len(t.value) - (i+1+n) 35 t.value = t.value[i+1:i+1+n]
| 33 # Adjust the tokenizing position 34 t.lexer.lexpos -= len(t.value) - (i+1+n) 35 t.value = t.value[i+1:i+1+n]
|
36 return t 37
| 36 return t 37
|
38def t_error(t):
| 38def t_error(t):
|
39 print "Illegal character '%s'" % t.value[0]
| 39 print("Illegal character '%s'" % t.value[0])
|
40 t.lexer.skip(1)
| 40 t.lexer.skip(1)
|
41
| 41
|
42# Build the lexer 43lex.lex() 44lex.runmain(data="3Habc 10Habcdefghij 2Hxy") 45 46 47
| 42# Build the lexer 43lex.lex() 44lex.runmain(data="3Habc 10Habcdefghij 2Hxy") 45 46 47
|