# lexer.input(data). Reset the lexer and store a new input string. # lexer.token(). Return the next token. Returns a special LexToken instance on success or None if the end of the input text has been reached. # lexer = lex.lex() import lex # List of token names. This is always required tokens = ( 'NUMBER', 'RED', 'GREEN', 'BLUE', 'BLACK', 'FORW', 'RIGHT', 'LOOP', 'COLOR', 'PEN', 'LSQB', 'RSQB', 'EMPTY' ) # Regular expression rules for simple tokens t_FORW = r'F' t_RIGHT = r'R' t_LOOP = r'L' t_COLOR = r'COLOR' t_PEN = r'PEN' t_LSQB = r'\[' t_RSQB = r'\]' t_RED = r'K' t_GREEN = r'Y' t_BLUE = r'M' t_BLACK = r'S' t_EMPTY = r'\ ' # A regular expression rule with some action code def t_NUMBER(t): r'\d+' t.value = int(t.value) return t # Define a rule so we can track line numbers def t_newline(t): r'\n+' t.lexer.lineno += len(t.value) # A string containing ignored characters (spaces and tabs) t_ignore = ' \t' # Error handling rule def t_error(t): print("Illegal character '%s'" % t.value[0]) t.lexer.skip(1) # Build the lexer lexer = lex.lex() # Test it out data = '''F 100 90''' # Give the lexer some input lexer.input(data) # Tokenize for tok in lexer: print(tok) ######################################## #PARSER ''' statement : forward | right | loop forward : option F num statement | option F num option : color | pen | empty right : R num statement | R num loop : L num [ statement ] color : Color clrs clrs : m | k | s | y pen: Pen num '''
import yacc from lexing import data, tokens def p_root(p): """root : function NUMBER option | COLOR colors option | PEN NUMBER option """ def p_option(p): """option : root | LSQB root RSQB root | EMPTY """ def p_function(p): """function : FORW | RIGHT | LOOP """ def p_colors(p): """colors : RED | BLUE | GREEN | BLACK """ def p_error(p): print("Syntax error in input!") # Build the parser parser = yacc.yacc() result = parser.parse(data) # print (result)
var
This content, along with any associated source code and files, is licensed under The Code Project Open License (CPOL)