# defines a RuleOutput class
# and its subclasses
# these perform singular operators on Ingredient objects
from .MyNumber import MyNumber
from .Ingredient import Ingredient
from .FormatCheck import listOfStrings, listOfTupleStringBools
class RuleOutput(object):
def __init__(self):
raise Exception("creating an instance of RuleOutput base class is forbidden")
@staticmethod
def check_conformity(ig,match_token,match_token_length):
# check that an input to apply makes sense
if not isinstance(ig,Ingredient):
raise TypeError("Expected ig to be an Ingredient")
if type(match_token) != tuple:
raise TypeError("Expected tuple")
if type(match_token_length) != int:
raise TypeError("Expected int")
if len(match_token) != match_token_length:
raise ValueError("Expected tuple of length "+str(match_token_length))
if not all(map(lambda x: type(x) == int, match_token)):
raise TypeError("Expected tuple filled with ints")
class NoneRuleOutput(RuleOutput):
def __init__(self):
pass
@staticmethod
def apply(ig,match_token):
return ig
@staticmethod
def priority():
return 1
NoneRuleOutputInstance = NoneRuleOutput()
class RenamingRuleOutput(RuleOutput):
def __init__(self,output_name):
if output_name is not None:
listOfStrings(output_name)
self.output_name = output_name
def apply(self,ig,match_token):
if self.output_name is None:
return ig
# match token must come from SinglePattern
self.check_conformity(ig,match_token,2)
output = ig.duplicate()
if match_token[0] == 1:
# editing unit
output.unit = output.unit[:max(0,match_token[1])] + self.output_name
else:
#editing name
output.name = output.name[:max(0,match_token[1])] + self.output_name
return output
@staticmethod
def priority():
return 2
class PrefixingRuleOutput(RuleOutput):
def __init__(self,prefix):
listOfStrings(prefix)
self.prefix = prefix
def apply(self,ig,match_token):
# match token must come from SinglePattern
self.check_conformity(ig,match_token,2)
if match_token[1] > 0:
# if the input already has a prefix,
# we shouldn't change it
return ig
output = ig.duplicate()
if match_token[0] == 1:
# editing unit
output.unit = self.prefix + output.unit
else:
# editing name
output.name = self.prefix + output.name
return output
@staticmethod
def priority():
return 3
class InsertingRuleOutput(RuleOutput):
def __init__(self,pattern_size,insertion):
if type(pattern_size) != int:
raise TypeError("Expeected int")
if insertion != []:
listOfStrings(insertion)
self.pattern_size = pattern_size
self.insertion = insertion
def apply(self,ig,match_token):
# match token must come from SinglePattern
self.check_conformity(ig,match_token,2)
output = ig.duplicate()
if match_token[0] == 1:
# editing unit
output.unit = output.unit[:max(0,match_token[1])] + self.insertion + output.unit[max(0,match_token[1])+self.pattern_size:]
else:
# editing name
output.name = output.name[:max(0,match_token[1])] + self.insertion + output.name[max(0,match_token[1])+self.pattern_size:]
return output
@staticmethod
def priority():
return 2
class SingleConvertingRuleOutput(RenamingRuleOutput):
def __init__(self,ratio,output_name):
super().__init__(output_name)
if not isinstance(ratio,MyNumber):
raise TypeError("Expected MyNumber")
self.ratio = ratio
def apply(self,ig,match_token):
self.check_conformity(ig,match_token,2)
output = super().apply(ig,match_token).duplicate()
output.count = output.count * self.ratio
return output
@staticmethod
def priority():
return 5
class DoubleConvertingRuleOutput(RuleOutput):
def __init__(self,ratio,output_unit,output_name):
# ratio, output_unit, and output_name
# may each be None to indicate a wildcard
if ratio is not None:
if not isinstance(ratio,MyNumber):
raise TypeError("Expected MyNumber")
if output_unit is not None:
output_unit = RenamingRuleOutput(output_unit)
if output_name is not None:
output_name = RenamingRuleOutput(output_name)
self.ratio = ratio
self.output_unit = output_unit
self.output_name = output_name
def apply(self,ig,match_token):
# match_token must be from DoublePattern
self.check_conformity(ig,match_token,3)
if self.output_unit is not None:
ig = self.output_unit.apply(ig,(1,max(0,match_token[1])))
if self.output_name is not None:
ig = self.output_name.apply(ig,(2,max(0,match_token[2])))
if self.ratio is not None:
ig = ig.duplicate()
ig.count = ig.count * self.ratio
return ig
@staticmethod
def priority():
return 5
class PropertiesRuleOutput(RuleOutput):
def __init__(self,base,edits):
if not isinstance(base,RuleOutput):
raise TypeError("Expected child class of RuleOutput")
listOfTupleStringBools(edits)
self.base = base
self.edits = edits
def apply(self,ig,match_token):
output = self.base.apply(ig,match_token).duplicate()
for edit in self.edits:
# edit is (str,bool)
if edit[1]:
output.props.add(edit[0])
else:
if edit[0] == "$":
# remove all
output.props = set()
elif edit[0] in output.props:
output.props.remove(edit[0])
return output
def priority(self):
return self.base.priority()
class DecRuleOutput(RuleOutput):
def __init__(self):
pass
@staticmethod
def apply(ig,match_token):
output = ig.duplicate()
output.count = output.count.as_float()
return output
@staticmethod
def priority():
return 1
DecRuleOutputInstance = DecRuleOutput()
class FracRuleOutput(RuleOutput):
def __init__(self):
pass
@staticmethod
def apply(ig,match_token):
output = ig.duplicate()
output.count = output.count.as_fraction(10)
return output
@staticmethod
def priority():
return 1
FracRuleOutputInstance = FracRuleOutput()
""" Parser logic that parses statement nodes """
from myparser.utils import (add_range, log_error, match_token, token_is, ParserError)
from myparser.declaration import parse_declaration
from myparser.expression import parse_expression
import myparser.utils as p
import tree.tree as nodes
import token_kinds
@add_range
def parse_statement(index):
""" Parse a statement. Try each possible type of statement, catching/logging exceptions upon parse failures.
On the last try, raise the exception on to the caller.
"""
for func in (parse_compound_statement, parse_return, parse_break, parse_continue, parse_if_statement,
parse_while_statement, parse_do_while_statement, parse_for_statement):
with log_error():
return func(index)
return parse_expr_statement(index)
@add_range
def parse_compound_statement(index):
""" Parse a compound statement.
A compound statement is a collection of several statements/declarations, enclosed in braces.
"""
p.symbols.new_scope()
index = match_token(index, token_kinds.open_brack, ParserError.GOT)
# Read block items (statements/declarations) until there are no more.
items = []
while True:
with log_error():
item, index = parse_statement(index)
items.append(item)
continue
with log_error():
item, index = parse_declaration(index)
items.append(item)
continue
break
index = match_token(index, token_kinds.close_brack, ParserError.GOT)
p.symbols.end_scope()
return nodes.Compound(items), index
@add_range
def parse_return(index):
""" Parse a return statement.
Ex: return 5;
"""
index = match_token(index, token_kinds.return_kw, ParserError.GOT)
if token_is(index, token_kinds.semicolon):
return nodes.Return(None), index
node, index = parse_expression(index)
index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
return nodes.Return(node), index
@add_range
def parse_break(index):
""" Parse a break statement """
index = match_token(index, token_kinds.break_kw, ParserError.GOT)
index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
return nodes.Break(), index
@add_range
def parse_continue(index):
""" Parse a continue statement """
index = match_token(index, token_kinds.continue_kw, ParserError.GOT)
index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
return nodes.Continue(), index
@add_range
def parse_if_statement(index):
""" Parse an if statement """
index = match_token(index, token_kinds.if_kw, ParserError.GOT)
index = match_token(index, token_kinds.open_paren, ParserError.AFTER)
conditional, index = parse_expression(index)
index = match_token(index, token_kinds.close_paren, ParserError.AFTER)
statement, index = parse_statement(index)
# If there is an else that follows, parse that too.
is_else = token_is(index, token_kinds.else_kw)
if not is_else:
else_statement = None
else:
index = match_token(index, token_kinds.else_kw, ParserError.GOT)
else_statement, index = parse_statement(index)
return nodes.IfStatement(conditional, statement, else_statement), index
@add_range
def parse_while_statement(index):
""" Parse a while statement """
index = match_token(index, token_kinds.while_kw, ParserError.GOT)
index = match_token(index, token_kinds.open_paren, ParserError.AFTER)
conditional, index = parse_expression(index)
index = match_token(index, token_kinds.close_paren, ParserError.AFTER)
statement, index = parse_statement(index)
return nodes.WhileStatement(conditional, statement), index
@add_range
def parse_do_while_statement(index):
""" Parse a do-while statement """
index = match_token(index, token_kinds.do_kw, ParserError.GOT)
statement, index = parse_statement(index)
index = match_token(index, token_kinds.while_kw, ParserError.GOT)
index = match_token(index, token_kinds.open_paren, ParserError.AFTER)
conditional, index = parse_expression(index)
index = match_token(index, token_kinds.close_paren, ParserError.AFTER)
index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
return nodes.DoWhileStatement(conditional, statement), index
@add_range
def parse_for_statement(index):
""" Parse a for statement """
index = match_token(index, token_kinds.for_kw, ParserError.GOT)
index = match_token(index, token_kinds.open_paren, ParserError.AFTER)
first, second, third, index = get_for_clauses(index)
stat, index = parse_statement(index)
return nodes.ForStatement(first, second, third, stat), index
def get_for_clauses(index):
"""Get the three clauses of a for-statement.
index - Index of the beginning of the first clause.
returns - Tuple (Node, Node, Node, index). Each Node is the corresponding clause, or None if that clause is
empty The index is that of first token after the close paren terminating the for clauses.
Raises exception on malformed input.
"""
first, index = get_first_for_clause(index)
if token_is(index, token_kinds.semicolon):
second = None
index += 1
else:
second, index = parse_expression(index)
index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
if token_is(index, token_kinds.close_paren):
third = None
index += 1
else:
third, index = parse_expression(index)
index = match_token(index, token_kinds.close_paren, ParserError.AFTER)
return first, second, third, index
def get_first_for_clause(index):
"""Get the first clause of a for-statement.
index - Index of the beginning of the first clause in the for-statement.
returns - Tuple. First element is a node if a clause is found and None if there is no clause (i.e. semicolon
terminating the clause). Second element is an integer index where the next token begins.
If malformed, raises exception.
"""
if token_is(index, token_kinds.semicolon): return None, index + 1
with log_error():
return parse_declaration(index)
clause, index = parse_expression(index)
index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
return clause, index
@add_range
def parse_expr_statement(index):
"""Parse a statement that is an expression.
Ex: a = 3 + 4
"""
if token_is(index, token_kinds.semicolon):
return nodes.EmptyStatement(), index + 1
node, index = parse_expression(index)
index = match_token(index, token_kinds.semicolon, ParserError.AFTER)
return nodes.ExprStatement(node), index
from utils import Token_Type
from utils import ExprNode
from utils import syntax_error
from Lexer import Lexer
from utils import set_param, get_param, change_param
import math
import matplotlib.pyplot as plt
# global
Origin_x = 0.0
Origin_y = 0.0
Rot_ang = 0.0
Scale_x = 1
Scale_y = 1
Color = 'BLACK'
def set_origin(x, y):
global Origin_x, Origin_y
Origin_x = x
Origin_y = y
print("^^^^^^^^^^^Set Origin^^^^^^^^^^^^")
print(str(Origin_x) + ' ' + str(Origin_y))
def set_scale(x, y):
global Scale_x, Scale_y
Scale_x = x
Scale_y = y
print("^^^^^^^^^^^Set Scale^^^^^^^^^^^^")
print(str(Scale_x) + ' ' + str(Scale_y))
def set_rot(x):
global Rot_ang
Rot_ang = x
print("^^^^^^^^^^^Set Rot^^^^^^^^^^^^")
print(str(Rot_ang))
def set_color(x):
global Color
Color = x
print("^^^^^^^^^^^Set Color^^^^^^^^^^^^")
print(Color)
# get the value of expression
# dfs
def get_expr_value(root):
if root is None:
return 0.0
if root.type == Token_Type.PLUS.name:
return float(get_expr_value(root.left)) + float(get_expr_value(root.right))
elif root.type == Token_Type.MINUS.name:
return float(get_expr_value(root.left)) - float(get_expr_value(root.right))
elif root.type == Token_Type.MUL.name:
return float(get_expr_value(root.left)) * float(get_expr_value(root.right))
elif root.type == Token_Type.DIV.name:
return float(get_expr_value(root.left)) / float(get_expr_value(root.right))
elif root.type == Token_Type.FUNC.name:
return float(root.func(get_expr_value(root.left)))
elif root.type == Token_Type.CONST_ID.name:
return float(root.value)
elif root.type == Token_Type.T.name:
return float(root.get_param())
return 0.0
# calculate (x, y) after transformation
def cal_coord(x_ptr, y_ptr):
global Origin_x, Origin_y, Scale_x, Scale_y
x = get_expr_value(x_ptr)
y = get_expr_value(y_ptr)
# scaling
x *= Scale_x
y *= Scale_y
# rotation
temp = x * math.cos(Rot_ang) + y * math.sin(Rot_ang)
y = y * math.cos(Rot_ang) - x * math.sin(Rot_ang)
x = temp
# translation
x += Origin_x
y += Origin_y
return x, y
# draw the pic dot by dot
def draw_loop(start, end, step, x_ptr, y_ptr):
global Color
set_param(start)
while get_param() <= end:
x, y = cal_coord(x_ptr, y_ptr)
if Color == 'RED':
plt.plot(x, y, 'r.')
elif Color == 'GREEN':
plt.plot(x, y, 'g.')
elif Color == 'BLUE':
plt.plot(x, y, 'b.')
else:
plt.plot(x, y, 'k.')
change_param(step)
def close_scanner():
print("Close scanner")
# print the syntax tree
def print_tree(root):
if root is not None:
root.show()
print("left_child: ")
print_tree(root.left)
print("right_child: ")
print_tree(root.right)
class Parser:
def __init__(self, filename):
self.lexer = Lexer(filename)
self.token = None
self.root = None
def start(self):
print("-----Enter Start-----")
self.lexer.start()
self.fetch_token()
self.program()
close_scanner()
print("-----Exit Start-----")
# get one token
def fetch_token(self):
print("-----Enter FetchToken-----")
self.token = self.lexer.gettoken()
if self.token.type == Token_Type.ERRTOKEN.name:
syntax_error(1)
# skip comment
if self.token.type == Token_Type.COMMENT.name:
self.fetch_token()
print("-----Exit FetchToken-----")
def match_token(self, ob):
print("-----Enter MatchToken-----")
if self.token.type != ob:
syntax_error(2, sb=self.token.type, ob=ob)
print("-----Exit MatchToken-----")
return False
print("*****MatchToken " + ob + "*****")
print("-----Exit MatchToken-----")
return True
def program(self):
print("-----Enter Program-----")
while self.token.type != Token_Type.NONTOKEN.name:
self.statement()
# end with ';'
self.match_token(Token_Type.SEMICO.name)
self.fetch_token()
print("-----Exit Program-----")
def statement(self):
print("-----Enter Statement-----")
if self.token.type == Token_Type.ORIGIN.name:
self.origin_statement()
elif self.token.type == Token_Type.SCALE.name:
self.scale_statement()
elif self.token.type == Token_Type.ROT.name:
self.rot_statement()
elif self.token.type == Token_Type.FOR.name:
self.for_statement()
elif self.token.type == Token_Type.COLOR.name:
self.color_statement()
else:
syntax_error(3)
print("-----Exit Statement-----")
def origin_statement(self):
print("-----Enter OriginStatement-----")
self.match_token(Token_Type.ORIGIN.name)
self.fetch_token()
self.match_token(Token_Type.IS.name)
self.fetch_token()
self.match_token(Token_Type.L_BRACKET.name)
self.fetch_token()
tmp_ptr = self.expression()
print("--------------------------------------------------")
print_tree(tmp_ptr)
print("--------------------------------------------------")
x = get_expr_value(tmp_ptr)
self.match_token(Token_Type.COMMA.name)
self.fetch_token()
tmp_ptr = self.expression()
print("--------------------------------------------------")
print_tree(tmp_ptr)
print("--------------------------------------------------")
y = get_expr_value(tmp_ptr)
self.match_token(Token_Type.R_BRACKET.name)
self.fetch_token()
set_origin(x, y)
print("-----Exit OriginStatement-----")
def scale_statement(self):
print("-----Enter ScaleStatement-----")
self.match_token(Token_Type.SCALE.name)
self.fetch_token()
self.match_token(Token_Type.IS.name)
self.fetch_token()
self.match_token(Token_Type.L_BRACKET.name)
self.fetch_token()
tmp_ptr = self.expression()
print("--------------------------------------------------")
print_tree(tmp_ptr)
print("--------------------------------------------------")
x = get_expr_value(tmp_ptr)
self.match_token(Token_Type.COMMA.name)
self.fetch_token()
tmp_ptr = self.expression()
print("--------------------------------------------------")
print_tree(tmp_ptr)
print("--------------------------------------------------")
y = get_expr_value(tmp_ptr)
self.match_token(Token_Type.R_BRACKET.name)
self.fetch_token()
set_scale(x, y)
print("-----Exit ScaleStatement-----")
def rot_statement(self):
print("-----Enter RotStatement-----")
self.match_token(Token_Type.ROT.name)
self.fetch_token()
self.match_token(Token_Type.IS.name)
self.fetch_token()
tmp_ptr = self.expression()
print("--------------------------------------------------")
print_tree(tmp_ptr)
print("--------------------------------------------------")
x = get_expr_value(tmp_ptr)
# self.fetch_token()
set_rot(x)
print("-----Exit RotStatement-----")
def for_statement(self):
print("-----Enter ForStatement-----")
self.match_token(Token_Type.FOR.name)
self.fetch_token()
self.match_token(Token_Type.T.name)
self.fetch_token()
self.match_token(Token_Type.FROM.name)
self.fetch_token()
start_ptr = self.expression()
print("--------------------------------------------------")
print_tree(start_ptr)
print("--------------------------------------------------")
start = get_expr_value(start_ptr)
self.match_token(Token_Type.TO.name)
self.fetch_token()
end_ptr = self.expression()
print("--------------------------------------------------")
print_tree(end_ptr)
print("--------------------------------------------------")
end = get_expr_value(end_ptr)
self.match_token(Token_Type.STEP.name)
self.fetch_token()
step_ptr = self.expression()
print("--------------------------------------------------")
print_tree(step_ptr)
print("--------------------------------------------------")
step = get_expr_value(step_ptr)
self.match_token(Token_Type.DRAW.name)
self.fetch_token()
self.match_token(Token_Type.L_BRACKET.name)
self.fetch_token()
x_ptr = self.expression()
print("--------------------------------------------------")
print_tree(x_ptr)
print("--------------------------------------------------")
self.match_token(Token_Type.COMMA.name)
self.fetch_token()
y_ptr = self.expression()
print("--------------------------------------------------")
print_tree(y_ptr)
print("--------------------------------------------------")
self.match_token(Token_Type.R_BRACKET.name)
self.fetch_token()
draw_loop(start, end, step, x_ptr, y_ptr)
print("-----Exit ForStatement-----")
def color_statement(self):
print("-----Enter ColorStatement-----")
self.match_token(Token_Type.COLOR.name)
self.fetch_token()
self.match_token(Token_Type.IS.name)
self.fetch_token()
self.match_token(Token_Type.SP_COLOR.name)
set_color(self.token.lexeme)
self.fetch_token()
print("-----Exit ColorStatement-----")
def expression(self):
print("-----Enter Expression-----")
left = self.term()
while self.token.type == Token_Type.PLUS.name or self.token.type == Token_Type.MINUS.name:
token_tmp = self.token.type
self.match_token(token_tmp)
right = self.term()
left = ExprNode(token_tmp, lnode=left, rnode=right)
print("-----Exit Expression-----")
return left
def term(self):
print("-----Enter Term-----")
left = self.factor()
while self.token.type == Token_Type.MUL.name or self.token.type == Token_Type.DIV.name:
token_tmp = self.token.type
self.match_token(token_tmp)
self.fetch_token()
right = self.factor()
left = ExprNode(token_tmp, lnode=left, rnode=right)
print("-----Exit Term-----")
return left
def factor(self):
print("-----Enter Factor-----")
if self.token.type == Token_Type.PLUS.name or self.token.type == Token_Type.MINUS.name:
token_tmp = self.token.type
self.match_token(token_tmp)
left = ExprNode(Token_Type.CONST_ID.name, 0)
self.fetch_token()
right = self.factor()
res = ExprNode(token_tmp, lnode=left, rnode=right)
print("-----Exit Factor-----")
return res
else:
res = self.component()
print("-----Exit Factor-----")
return res
def component(self):
print("-----Enter Component-----")
left = self.atom()
self.fetch_token()
while self.token.type == Token_Type.POWER.name:
token_tmp = self.token.type
self.match_token(token_tmp)
self.fetch_token()
right = self.component()
left = ExprNode(token_tmp, lnode=left, rnode=right)
print("-----Exit Component-----")
return left
def atom(self):
print("-----Enter Atom-----")
if self.token.type == Token_Type.CONST_ID.name:
print("leaf: " + str(self.token.value))
print("-----Exit Atom-----")
return ExprNode(self.token.type, self.token.value) # leaf
elif self.token.type == Token_Type.T.name:
print("leaf: " + self.token.type)
print("-----Exit Atom-----")
return ExprNode(self.token.type, self.token.value) # leaf
elif self.token.type == Token_Type.FUNC.name:
token_tmp = self.token.type
func_tmp = self.token.func
self.fetch_token()
self.match_token(Token_Type.L_BRACKET.name)
self.fetch_token()
left = self.expression()
self.match_token(Token_Type.R_BRACKET.name)
print("-----Exit Atom-----")
return ExprNode(token_tmp, lnode=left, func=func_tmp)
elif self.token.type == Token_Type.L_BRACKET:
self.match_token(Token_Type.L_BRACKET.name)
self.fetch_token()
left = self.expression()
self.match_token(Token_Type.R_BRACKET.name)
print("-----Exit Atom-----")
return left
if __name__ == '__main__':
# init the parser
p = Parser("test.txt")
# run the parser
p.start()
plt.xlim(0)
plt.ylim(0)
plt.show()