diff --git a/RASP_support/DrawCompFlow.py b/RASP_support/DrawCompFlow.py new file mode 100644 index 0000000..bc370ea --- /dev/null +++ b/RASP_support/DrawCompFlow.py @@ -0,0 +1,505 @@ +from FunctionalSupport import UnfinishedSelect, Unfinished, UnfinishedSequence, \ + guarded_contains, guarded_compare, indices, base_tokens, tokens_asis +from Support import clean_val +import analyse # adds useful functions to all the unfinisheds +from analyse import UnfinishedFunc +import os +from copy import copy +import string + +# fix: in ordering, we always connect bottom FF to top select. but sometimes, there is no FF (if go straight into next select), or there is no rendered select (in special case of full-select) + +layer_color = 'lemonchiffon' +head_color = 'bisque' #'yellow' + +indices_colour = 'bisque3' +comment_colour = 'cornsilk' +select_on_colour = 'plum' +select_off_colour = head_color + +def windows_path_cleaner(s): + if os.name == "nt": # is windows + validchars = "-_.() "+string.ascii_letters+string.digits + def fix(c): + return c if c in validchars else "." + return "".join([fix(c) for c in s]) + else: + return s + +def colour_scheme(row_type): + if row_type == INPUT: + return 'gray', 'gray', 'gray' + if row_type == QVAR: + return 'palegreen4','mediumseagreen', 'palegreen1' + elif row_type == KVAR: + return 'deepskyblue3','darkturquoise','darkslategray1' + elif row_type == VVAR: + return 'palevioletred3','palevioletred2','lightpink' + elif row_type == VREAL: + return 'plum4','plum3','thistle2' + elif row_type == RES: + return 'lightsalmon3','burlywood','burlywood1' + else: + raise Exception("unknown row type: "+str(row_type)) + +QVAR, KVAR, VVAR, VREAL, RES, INPUT = ["QVAR","KVAR","VVAR","VREAL","RES","INPUT"] +POSS_ROWS = [QVAR,KVAR,VVAR,VREAL,RES,INPUT] +ROW_NAMES = {QVAR:"Me",KVAR:"Other",VVAR:"X",VREAL:"f(X)",RES:"FF",INPUT:""} + +def UnfinishedFunc(f): + setattr(Unfinished,f.__name__,f) + +@UnfinishedFunc +def last_val(self): + return self.last_res.get_vals() + +def makeQKStable(qvars,kvars,select,ref_in_g): + qvars = [q.last_val() for q in qvars] + kvars = [k.last_val() for k in kvars] + select = select.last_val() + q_val_len, k_val_len = len(select), len(select[0]) + + qvars_skip = len(kvars) + kvars_skip = len(qvars) + _, _, qvars_colour = colour_scheme(QVAR) + _, _, kvars_colour = colour_scheme(KVAR) + # select has qvars along the rows and kvars along the columns, so we'll do the same. + # i.e. top rows will just be the kvars and first columns will just be the qvars + # if (not qvars) and (not kvars): # no qvars or kvars -> full select -> dont waste space drawing + # num_rows, num_columns = 0, 0 + # pass + # else: + # num_rows = qvars_skip+(len(qvars[0]) if qvars else 1) + # num_columns = kvars_skip+(len(kvars[0]) if kvars else 1) + num_rows = qvars_skip+q_val_len + num_columns = kvars_skip+k_val_len + + select_cells = {i:[CellVals('',head_color,j,i) for j in range(num_columns)] \ + for i in range(num_rows)} + + + for i,seq in enumerate(kvars): + for j,v in enumerate(seq): + select_cells[i][j+kvars_skip] = CellVals(v,kvars_colour,i,j+kvars_skip) + for j,seq in enumerate(qvars): + for i,v in enumerate(seq): + select_cells[i+qvars_skip][j] = CellVals(v,qvars_colour,i+qvars_skip,j) + + for i in range(num_rows-qvars_skip): # i goes over the q_var values + for j in range(num_columns-kvars_skip): # j goes over the k_var values + v = select[i][j] + colour = select_on_colour if v else select_off_colour + select_cells[i+qvars_skip][j+kvars_skip] = CellVals(v,colour,i+qvars_skip,j+kvars_skip,select_internal=True) + + # TODO: make an ugly little q\k triangle thingy in the top corner + return GridTable(select_cells,ref_in_g) + +class CellVals: + def __init__(self,val,colour,i_row,i_col,select_internal=False,known_portstr=None): + def mystr(v): + if isinstance(v,bool): + if select_internal: + return ' ' if v else ' ' # color gives it all! + else: + return 'T' if v else 'F' + if isinstance(v,float): + v = clean_val(v,3) + if isinstance(v,int) and len(str(v))==1: + v = " "+str(v) # for pretty square selectors + return str(v).replace("<","<").replace(">",">") + self.val = mystr(val) + self.colour = colour + if None is known_portstr: + self.portstr = "_col"+str(i_col)+"_row"+str(i_row) + else: + self.portstr = known_portstr + def __str__(self): + return ''+self.val+'' + + +class GridTable: + def __init__(self,cellvals,ref_in_g): + self.ref_in_g = ref_in_g + self.cellvals = cellvals + self.numcols = len(cellvals.get(0,[])) + self.numrows = len(cellvals) + self.empty = 0 in [self.numcols,self.numrows] + def to_str(self,transposed=False): + ii = sorted(list(self.cellvals.keys())) + rows = [self.cellvals[i] for i in ii] + def cells2row(cells): + return ''+''.join(map(str,cells))+'' + return '<'+''.join(map(cells2row,rows))+'
>' + def bottom_left_portstr(self): + return self.access_portstr(0,-1) + def bottom_right_portstr(self): + return self.access_portstr(-1,-1) + def top_left_portstr(self): + return self.access_portstr(0,0) + def top_right_portstr(self): + return self.access_portstr(-1,0) + def top_access_portstr(self,i_col): + return self.access_portstr(i_col,0) + def bottom_access_portstr(self,i_col): + return self.access_portstr(i_col,-1) + def access_portstr(self,i_col,i_row): + return self.ref_in_g + ":" + self.internal_portstr(i_col,i_row) + def internal_portstr(self,i_col,i_row): + if i_col < 0: + i_col = self.numcols + i_col + if i_row < 0: + i_row = self.numrows + i_row + return "_col"+str(i_col)+"_row"+str(i_row) + def add_to_graph(self,g): + if self.empty: + pass + else: + g.node(name=self.ref_in_g,shape='none',margin='0',label=self.to_str()) + +class Table: + def __init__(self,seqs_by_rowtype,ref_in_g,rowtype_order=[]): + self.ref_in_g = ref_in_g + # consistent presentation, and v useful for feedforward clarity + self.rows = [] + self.seq_index = {} + if len(rowtype_order)>1: + self.add_rowtype_cell = True + else: + assert len(seqs_by_rowtype.keys()) == 1, "table got multiple row types but no order for them" + rowtype_order = list(seqs_by_rowtype.keys()) + self.add_rowtype_cell = not (rowtype_order[0] == RES) + self.note_res_dependencies = len(seqs_by_rowtype.get(RES,[]))>1 + self.leading_metadata_offset = 1 + self.add_rowtype_cell + for rt in rowtype_order: + seqs = sorted(seqs_by_rowtype[rt],key=lambda seq:seq.creation_order_id) + for i,seq in enumerate(seqs): + self.n = self.add_row(seq,rt) # each one appends to self.rows. + # self.n stores length of a single row, they will all be the same, + # just easiest to get like this + # add_row has to happen one at a time b/c they care about length of + # self.rows at time of addition (to get ports right) + self.empty = len(self.rows)==0 + if self.empty: + self.n = 0 + self.transpose = False # (len(rowtype_order)==1 and rowtype_order[0]==QVAR) + # no need to twist Q, just making the table under anyway + # transpose affects the port accesses, but think about that later + def to_str(self): + rows = self.rows if not self.transpose else list(zip(*self.rows)) + def cells2row(cells): + return ''+''.join(cells)+'' + return '<'+''.join(map(cells2row,rows))+'
>' + def bottom_left_portstr(self): + return self.access_portstr(0,-1) + def bottom_right_portstr(self): + return self.access_portstr(-1,-1) + def top_left_portstr(self): + return self.access_portstr(0,0) + def top_right_portstr(self): + return self.access_portstr(-1,0) + def top_access_portstr(self,i_col,skip_meta=False): + return self.access_portstr(i_col,0,skip_meta=skip_meta) + def bottom_access_portstr(self,i_col,skip_meta=False): + return self.access_portstr(i_col,-1,skip_meta=skip_meta) + def access_portstr(self,i_col,i_row,skip_meta=False): + return self.ref_in_g + ":" + self.internal_portstr(i_col,i_row,skip_meta=skip_meta) + def internal_portstr(self,i_col,i_row,skip_meta=False): + if skip_meta and (i_col >= 0): # before flip things for reverse column access + i_col += self.leading_metadata_offset + if i_col < 0: + i_col = (self.n) + i_col + if i_row < 0: + i_row = len(self.rows) + i_row + return "_col"+str(i_col)+"_row"+str(i_row) + def add_row(self,seq,row_type): + def add_cell(val,colour): + res = CellVals(val,colour,-1,-1, + known_portstr=self.internal_portstr(len(cells),len(self.rows))) + cells.append(str(res)) + + def add_strong_line(): + # after failing to inject css styles in graphviz, + # seeing that their suggestion only creates lines (if at all? unclear) of + # width 1 (same as the border already there) and it wont make multiple VRs, + # and realising their suggestion also does nothing, + # refer to hack at the top of this priceless page: + # http://jkorpela.fi/html/cellborder.html + cells.append('') + + qkvr_colour, name_colour, data_colour = colour_scheme(row_type) + cells = [] # has to be created in advance, and not just be all the results of add_cell, + # because add_cell cares about current length of 'cells' + if self.add_rowtype_cell: + add_cell(ROW_NAMES[row_type],qkvr_colour) + add_cell(seq.name,name_colour) + for v in seq.last_val(): + add_cell(v,data_colour) + if self.note_res_dependencies: + self.seq_index[seq] = len(self.rows) + add_strong_line() + add_cell("("+str(self.seq_index[seq])+")",indices_colour) + add_cell(self.dependencies_str(seq,row_type),comment_colour) + self.rows.append(cells) + return len(cells) + + def dependencies_str(self,seq,row_type): + if not row_type == RES: + return "" + return "from ("+", ".join(str(self.seq_index[m]) for m in seq.get_nonminor_parent_sequences()) +")" + + def add_to_graph(self,g): + if self.empty: + # g.node(name=self.ref_in_g,label="empty table") + pass + else: + g.node(name=self.ref_in_g,shape='none',margin='0',label=self.to_str()) + +def place_above(g,node1,node2): + + g.edge(node1.bottom_left_portstr(),node2.top_left_portstr(),style="invis") + g.edge(node1.bottom_right_portstr(),node2.top_right_portstr(),style="invis") + +def connect(g,top_table,bottom_table,select_vals): + # connects top_table as k and bottom_table as q + if top_table.empty or bottom_table.empty: + return # not doing this for now + place_above(g,top_table,bottom_table) + # just so it positions them one on top of the other, even if select is empty + for q_i in select_vals: + for k_i,b in enumerate(select_vals[q_i]): + if b: + # have to add 2 cause first 2 are data type and row name + g.edge(top_table.bottom_access_portstr(k_i,skip_meta=True), + bottom_table.top_access_portstr(q_i,skip_meta=True), + arrowhead='none') + +class SubHead: + def __init__(self,name,seq): + vvars = seq.get_immediate_parent_sequences() + if not seq.definitely_uses_identity_function: + vreal = seq.pre_aggregate_comp() + vreal(seq.last_w) # run it on same w to fill with right results + vreals = [vreal] + else: + vreals = [] + + self.name = name + self.vvars_table = Table({VVAR:vvars,VREAL:vreals},self.name+"_vvars",rowtype_order=[VVAR,VREAL]) + self.res_table = Table({RES:[seq]},self.name+"_res") + self.default = "default: "+str(seq.default) if not None is seq.default else "" + # self.vreals_table = ## ? add partly processed vals, useful for eg conditioned_contains? + + def add_to_graph(self,g): + self.vvars_table.add_to_graph(g) + self.res_table.add_to_graph(g) + if self.default: + g.node(self.name+"_default",shape='rectangle',label=self.default) + g.edge(self.name+"_default",self.res_table.top_left_portstr(), + arrowhead='none') + + def add_edges(self,g,select_vals): + connect(g,self.vvars_table,self.res_table,select_vals) + + def bottom_left_portstr(self): + return self.res_table.bottom_left_portstr() + def bottom_right_portstr(self): + return self.res_table.bottom_right_portstr() + def top_left_portstr(self): + return self.vvars_table.top_left_portstr() + def top_right_portstr(self): + return self.vvars_table.top_right_portstr() + +class Head: + def __init__(self,name,head_primitives,i): + self.name = name + self.i = i + self.head_primitives = head_primitives + select = self.head_primitives.select + q_vars, k_vars = select.q_vars, select.k_vars + q_vars = sorted(list(set(q_vars)),key=lambda a:a.creation_order_id) + k_vars = sorted(list(set(k_vars)),key=lambda a:a.creation_order_id) + self.kq_table = Table({QVAR:q_vars,KVAR:k_vars},self.name+"_qvars",rowtype_order=[KVAR,QVAR]) + # self.k_table = Table({KVAR:k_vars},self.name+"_kvars") + self.select_result_table = makeQKStable(q_vars,k_vars,select,self.name+"_select") + # self.select_table = SelectTable(self.head_primitives.select,self.name+"_select") + self.subheads = [SubHead(self.name+"_subcomp_"+str(i),seq) for i,seq in \ + enumerate(self.head_primitives.sequences)] + + def add_to_graph(self,g): + with g.subgraph(name=self.name) as head: + def headlabel(): + # return self.head_primitives.select.name + return 'head '+str(self.i)+\ + "\n("+self.head_primitives.select.name+")" + head.attr(fillcolor=head_color, label=headlabel(), + fontcolor='black', style='filled') + with head.subgraph(name=self.name+"_select_parts") as sel: + sel.attr(rankdir="LR",label="",style="invis",rank="same") + if True: # not (self.kq_table.empty): + self.select_result_table.add_to_graph(sel) + self.kq_table.add_to_graph(sel) + # sel.edge(self.kq_table.bottom_right_portstr(), + # self.select_result_table.bottom_left_portstr(),style="invis") + + [s.add_to_graph(head) for s in self.subheads] + + def add_organising_edges(self,g): + if self.kq_table.empty: + return + for s in self.subheads: + place_above(g,self.select_result_table,s) + + def bottom_left_portstr(self): + return self.subheads[0].bottom_left_portstr() + def bottom_right_portstr(self): + return self.subheads[-1].bottom_right_portstr() + def top_left_portstr(self): + if not (self.kq_table.empty): + return self.kq_table.top_left_portstr() + else: # no kq (and so no select either) table. go into subheads + return self.subheads[0].top_left_portstr() + def top_right_portstr(self): + if not (self.kq_table.empty): + return self.kq_table.top_right_portstr() + else: + return self.subheads[-1].top_right_portstr() + + + def add_edges(self,g): + select_vals = self.head_primitives.select.last_val() + # connect(g,self.k_table,self.q_table,select_vals) + for s in self.subheads: + s.add_edges(g,select_vals) + self.add_organising_edges(g) + +def contains_tokens(mvs): + return next((True for mv in mvs if guarded_contains(base_tokens,mv)),False) + +class Layer: + def __init__(self,depth,d_heads,d_ffs,add_tokens_on_ff=False): + self.heads = [] + self.depth = depth + self.name = self.layer_cluster_name(depth) + for i,h in enumerate(d_heads): + self.heads.append(Head(self.name+"_head"+str(i),h,i)) + ff_parents = [] + for ff in d_ffs: + ff_parents += ff.get_nonminor_parent_sequences() + ff_parents = list(set(ff_parents)) + ff_parents = [p for p in ff_parents if not guarded_contains(d_ffs,p)] + rows_by_type = {RES:d_ffs,VVAR:ff_parents} + rowtype_order = [VVAR,RES] + if add_tokens_on_ff and not contains_tokens(ff_parents): + rows_by_type[INPUT] = [tokens_asis] + rowtype_order = [INPUT] + rowtype_order + self.ff_table = Table(rows_by_type,self.name+"_ffs",rowtype_order) + + def bottom_object(self): + if not self.ff_table.empty: + return self.ff_table + else: + return self.heads[-1] + def top_object(self): + if self.heads: + return self.heads[0] + else: + return self.ff_table + def bottom_left_portstr(self): + return self.bottom_object().bottom_left_portstr() + def bottom_right_portstr(self): + return self.bottom_object().bottom_right_portstr() + def top_left_portstr(self): + return self.top_object().top_left_portstr() + def top_right_portstr(self): + return self.top_object().top_right_portstr() + + def add_to_graph(self,g): + with g.subgraph(name=self.name) as l: + l.attr(fillcolor=layer_color, label='layer '+str(self.depth), + fontcolor='black', style='filled') + for h in self.heads: + h.add_to_graph(l) + self.ff_table.add_to_graph(l) + + def add_organising_edges(self,g): + if self.ff_table.empty: + return + for h in self.heads: + place_above(g,h,self.ff_table) + + def add_edges(self,g): + for h in self.heads: + h.add_edges(g) + self.add_organising_edges(g) + + def layer_cluster_name(self,depth): + return 'cluster_l'+str(depth) # graphviz needs + # cluster names to start with 'cluster' + +class CompFlow: + def __init__(self,all_heads,all_ffs,force_vertical_layers,add_tokens_on_ff=False): + self.force_vertical_layers = force_vertical_layers + self.add_tokens_on_ff = add_tokens_on_ff + self.make_all_layers(all_heads,all_ffs) + def make_all_layers(self,all_heads,all_ffs): + self.layers = [] + ff_depths = [seq.scheduled_comp_depth for seq in all_ffs] + head_depths = [h.comp_depth for h in all_heads] + depths = sorted(list(set(ff_depths+head_depths))) + for d in depths: + d_heads = [h for h in all_heads if h.comp_depth==d] + d_heads = sorted(d_heads,key=lambda h:h.select.creation_order_id) + # only important for determinism to help debug + d_ffs = [f for f in all_ffs if f.scheduled_comp_depth == d] + self.layers.append(Layer(d,d_heads,d_ffs,self.add_tokens_on_ff)) + + def add_all_layers(self,g): + [l.add_to_graph(g) for l in self.layers] + + def add_organising_edges(self,g): + if self.force_vertical_layers: + for l1,l2 in zip(self.layers,self.layers[1:]): + place_above(g,l1,l2) + + def add_edges(self,g): + self.add_organising_edges(g) + [l.add_edges(g) for l in self.layers] + +@UnfinishedFunc +def draw_comp_flow(self,w,filename=None, + keep_dot=False,show=True, + force_vertical_layers=True, add_tokens_on_ff=False): + if not None is w: + self(w) # execute seq (and all its ancestors) on the given input w. + # if w==None, assume seq has already been executed on some input. + if not self.last_w == w: + print("evaluating input failed") + return + else: + w = self.last_w + if None is filename: + name = self.name + filename=os.path.join("comp_flows",windows_path_cleaner(name+"("+(str(w) if not isinstance(w,str) else "\""+w+"\"")+")")) + self.mark_all_minor_ancestors() + self.make_display_names_for_all_parents(skip_minors=True) + + all_heads,all_ffs = self.get_all_ancestor_heads_and_ffs(remove_minors=True) + # this scheduling also marks the analysis parent selects + compflow = CompFlow(all_heads,all_ffs, + force_vertical_layers=force_vertical_layers, + add_tokens_on_ff = add_tokens_on_ff) + + # only import graphviz *inside* this function - + # that way RASP can run even if graphviz setup fails + # (though it will not be able to draw computation flows without it) + from graphviz import Digraph + g = Digraph('g') + g.attr(splines='polyline') # with curved lines it fusses over separating score edges + # and makes weirdly curved ones that start overlapping with the sequences :( + compflow.add_all_layers(g) + compflow.add_edges(g) + img_filename = g.render(filename=filename) # img_filename will end with png or something, filename is an intermediate + if show: + g.view() + if not keep_dot: + os.remove(filename) \ No newline at end of file diff --git a/RASP_support/Environment.py b/RASP_support/Environment.py new file mode 100644 index 0000000..cd35d1e --- /dev/null +++ b/RASP_support/Environment.py @@ -0,0 +1,77 @@ +from Sugar import tokens_asis, tokens_str, tokens_int, tokens_bool, tokens_float, indices, length +from FunctionalSupport import Unfinished, RASPTypeError +from Evaluator import RASPFunction +from copy import deepcopy + +class UndefinedVariable(Exception): + def __init__(self,varname): + super().__init__("Error: Undefined variable: "+varname) + +class ReservedName(Exception): + def __init__(self,varname): + super().__init__("Error: Cannot set reserved name: "+varname) + +class Environment: + def __init__(self,parent_env=None,name=None,stealing_env=None): + self.variables = {} + self.name = name + self.parent_env = parent_env + self.stealing_env = stealing_env + self.base_setup() # nested envs can have them too. makes life simpler, + # instead of checking if they have the constant_variables etc in get. bit heavier on memory + # but no one's going to use this language for big nested stuff anyway + + def base_setup(self): + self.constant_variables = {"tokens_asis":tokens_asis, + "tokens_str":tokens_str, + "tokens_int":tokens_int, + "tokens_bool":tokens_bool, + "tokens_float":tokens_float, + "indices":indices, + "length":length, + "True":True, + "False":False} + self.reserved_words=["if","else","not","and","or","out","def","return","range","for","in","zip","len","get"] +\ + list(self.constant_variables.keys()) + + def snapshot(self): + res = Environment(parent_env=self.parent_env,name=self.name,stealing_env=self.stealing_env) + def carefulcopy(val): + if isinstance(val,Unfinished) or isinstance(val,RASPFunction): + return val # non mutable, at least not through rasp commands + elif isinstance(val,float) or isinstance(val,int) or isinstance(val,str) or isinstance(val,bool): + return val # non mutable + elif isinstance(val,list): + return [carefulcopy(v) for v in val] + else: + raise RASPTypeError("environment contains element that is not unfinished,", + "rasp function, float, int, string, bool, or list? :",val) + res.variables = {d:carefulcopy(self.variables[d]) for d in self.variables} + return res + + def make_nested(self,names_vars=[]): + res = Environment(self,name=str(self.name)+"'") + for n,v in names_vars: + res.set_variable(n,v) + return res + + def get_variable(self,name): + if name in self.constant_variables: + return self.constant_variables[name] + if name in self.variables: + return self.variables[name] + if not None is self.parent_env: + return self.parent_env.get_variable(name) + raise UndefinedVariable(name) + + def set_variable(self,name,val): + if name in self.reserved_words: + raise ReservedName(name) + self.variables[name] = val + if not None is self.stealing_env: + if name.startswith("_") or name=="out": # things we don't want to steal + return + self.stealing_env.set_variable(name,val) + + def set_out(self,val): + self.variables["out"] = val diff --git a/RASP_support/Evaluator.py b/RASP_support/Evaluator.py new file mode 100644 index 0000000..35314f7 --- /dev/null +++ b/RASP_support/Evaluator.py @@ -0,0 +1,606 @@ +from Sugar import select, zipmap, aggregate, \ + tplor, tpland, tplnot, toseq, \ + or_selects, and_selects, not_select, full_s, indices, length +from FunctionalSupport import Unfinished, UnfinishedSequence, UnfinishedSelect +from Support import RASPTypeError, RASPError, Select, Sequence +from collections.abc import Iterable +import sys +from zzantlr.RASPParser import RASPParser + +encoder_name = "s-op" + +def strdesc(o,desc_cap=None): + if isinstance(o,Unfinished): + return o.name + if isinstance(o,list): + res = "["+", ".join([strdesc(v) for v in o])+"]" + if not None is desc_cap and len(res)>desc_cap: + return "(list)" + else: + return res + if isinstance(o,dict): + res = "{"+", ".join((strdesc(k)+": "+strdesc(o[k])) for k in o)+"}" + if not None is desc_cap and len(res)>desc_cap: + return "(dict)" + else: + return res + else: + if isinstance(o,str): + return "\""+o+"\"" + else: + return str(o) + + +class RASPValueError(RASPError): + def __init__(self,*a): + super().__init__(*a) + + +debug = True +def debprint(*a,**kw): + if debug: + print(*a,**kw) + +def ast_text(ast): # just so don't have to go remembering this somewhere + # consider seeing if can make it add spaces between the tokens when doing this tho + return ast.getText() + +def isatom(v): + # the legal atoms + return True in [isinstance(v,t) for t in [int,float,str,bool]] + +def name_general_type(v): + if isinstance(v,list): + return "list" + if isinstance(v,dict): + return "dict" + if isinstance(v,UnfinishedSequence): + return encoder_name + if isinstance(v,UnfinishedSelect): + return "selector" + if isinstance(v,RASPFunction): + return "function" + if isatom(v): + return "atom" + return "??" + +class ArgsError(Exception): + def __init__(self,name,expected,got): + super().__init__("wrong number of args for "+name+\ + "- expected: "+str(expected)+", got: "+str(got)+".") + +class NamedVal: + def __init__(self,name,val): + self.name = name + self.val = val + +class NamedValList: + def __init__(self,namedvals): + self.nvs = namedvals + +class JustVal: + def __init__(self,val): + self.val = val + +class RASPFunction: + def __init__(self,name,enclosing_env,argnames,statement_trees,returnexpr,creator_name): + self.name = name # just for debug purposes + self.enclosing_env = enclosing_env + self.argnames = argnames + self.statement_trees = statement_trees + self.returnexpr = returnexpr + self.creator = creator_name + def __str__(self): + return self.creator + " function: "+self.name+"("+", ".join(self.argnames)+")" + + def __call__(self,*args): + top_eval = args[-1] + args = args[:-1] + env = self.enclosing_env.make_nested([]) # nesting, because function shouldn't affect the enclosing environment + if not len(args)==len(self.argnames): + raise ArgsError(self.name,len(self.argnames),len(args)) + for n,v in zip(self.argnames,args): + env.set_variable(n,v) + evaluator = Evaluator(env,top_eval.repl) + for at in self.statement_trees: + evaluator.evaluate(at) + res = evaluator.evaluateExprsList(self.returnexpr) + return res[0] if len(res)==1 else res + +class Evaluator: + def __init__(self,env,repl): + self.env = env + self.sequence_running_example = repl.sequence_running_example + self.repl = repl + + def evaluate(self,ast): + if ast.expr(): + return self.evaluateExpr(ast.expr(),from_top=True) + if ast.assign(): + return self.assign(ast.assign()) + if ast.funcDef(): + return self.funcDef(ast.funcDef()) + if ast.draw(): + return self.draw(ast.draw()) + if ast.forLoop(): + return self.forLoop(ast.forLoop()) + if ast.loadFile(): + return self.repl.loadFile(ast.loadFile(),self.env) + + # more to come + raise NotImplementedError + + def draw(self,ast): + # TODO: make at least some rudimentary comparisons of selectors somehow to merge heads idk?????? + # maybe keep trace of operations used to create them and those with exact same parent s-ops and operations + # can get in? would still find eg select(0,0,==) and select(1,1,==) different, but its better than nothing at all + unf = self.evaluateExpr(ast.unf) + if not isinstance(unf,UnfinishedSequence): + raise RASPTypeError("draw expects unfinished sequence, got:",unf) + example = self.evaluateExpr(ast.inputseq) if ast.inputseq else self.sequence_running_example + if not isinstance(example,str): + raise RASPTypeError("draw expects to evaluate sequence on string, got:",example) + unf.draw_comp_flow(example) + return JustVal(unf(example)) + + def assign(self,ast): + def set_val_and_name(val,name): + self.env.set_variable(name,val) + if isinstance(val,Unfinished): + val.setname(name) # completely irrelevant really for the REPL, + # but will help maintain sanity when printing computation flows + return NamedVal(name,val) + + varnames = self._names_list(ast.var) + values = self.evaluateExprsList(ast.val) + if len(values)==1: + values = values[0] + + if len(varnames)==1: + return set_val_and_name(values,varnames[0]) + else: + if not len(varnames) == len(values): + raise RASPTypeError("expected",len(varnames),"values, but got:",len(values)) + reslist = [] + for v,name in zip(values,varnames): + reslist.append(set_val_and_name(v,name)) + return NamedValList(reslist) + + def _names_list(self,ast): + idsList = self._get_first_cont_list(ast) + return [i.text for i in idsList] + + def _set_iterator_and_vals(self,iterator_names,iterator_vals): + if len(iterator_names)==1: + self.env.set_variable(iterator_names[0],iterator_vals) + elif isinstance(iterator_vals,Iterable) and (len(iterator_vals)==len(iterator_names)): + for n,v in zip(iterator_names,iterator_vals): + self.env.set_variable(n,v) + else: + if not isinstance(iterator_vals,Iterable): + raise RASPTypeError("iterating with multiple iterator names, but got single iterator value:",iterator_vals) + else: + assert not (len(iterator_vals)==len(iterator_names)), "something wrong with Evaluator logic" # should work out by logic of last failed elif + raise RASPTypeError("iterating with",len(iterator_names),"names but got",len(iterator_vals),"values (",iterator_vals,")") + + def _evaluateDictComp(self,ast): + ast = ast.dictcomp + d = self.evaluateExpr(ast.iterable) + if not (isinstance(d,list) or isinstance(d,dict)): + raise RASPTypeError("dict comprehension should have got a list or dict to loop over, but got:",l) + res = {} + iterator_names = self._names_list(ast.iterator) + for vals in d: + orig_env = self.env + self.env = self.env.make_nested() + self._set_iterator_and_vals(iterator_names,vals) + key = self.make_dict_key(ast.key) + res[key] = self.evaluateExpr(ast.val) + self.env = orig_env + return res + + + def _evaluateListComp(self,ast): + ast = ast.listcomp + l = self.evaluateExpr(ast.iterable) + if not (isinstance(l,list) or isinstance(l,dict)): + raise RASPTypeError("list comprehension should have got a list or dict to loop over, but got:",l) + res = [] + iterator_names = self._names_list(ast.iterator) + for vals in l: + orig_env = self.env + self.env = self.env.make_nested() + self._set_iterator_and_vals(iterator_names,vals) # sets inside the now-nested env - + # don't want to keep the internal iterators after finishing this list comp + res.append(self.evaluateExpr(ast.val)) + self.env = orig_env + return res + + def forLoop(self,ast): + iterator_names = self._names_list(ast.iterator) + iterable = self.evaluateExpr(ast.iterable) + if not (isinstance(iterable,list) or isinstance(iterable,dict)): + raise RASPTypeError("for loop needs to iterate over a list or dict, but got:",iterable) + statements = self._get_first_cont_list(ast.mainbody) + for vals in iterable: + self._set_iterator_and_vals(iterator_names,vals) + for s in statements: + self.evaluate(s) + return JustVal(None) + + + def _get_first_cont_list(self,ast): + res = [] + while ast: + if ast.first: + res.append(ast.first) + # sometimes there's no first cause it's just eating a comment + ast = ast.cont + return res + + def funcDef(self,ast): + funcname = ast.name.text + argname_trees = self._get_first_cont_list(ast.arguments) + argnames = [a.text for a in argname_trees] + statement_trees = self._get_first_cont_list(ast.mainbody) + returnexpr = ast.retstatement.res + res = RASPFunction(funcname,self.env,argnames,statement_trees,returnexpr,self.env.name) + self.env.set_variable(funcname,res) + return NamedVal(funcname,res) + + def _evaluateUnaryExpr(self,ast): + uexpr = self.evaluateExpr(ast.uexpr) + uop = ast.uop.text + if uop =="not": + if isinstance(uexpr,UnfinishedSequence): + return tplnot(uexpr) + elif isinstance(uexpr,UnfinishedSelect): + return not_select(uexpr) + else: + return not uexpr + if uop =="-": + return -uexpr + if uop == "+": + return +uexpr + if uop =="round": + return round(uexpr) + if uop == "indicator": + if isinstance(uexpr,UnfinishedSequence): + name = "I("+uexpr.name+")" + return zipmap(uexpr,lambda a:1 if a else 0,name=name).allow_suppressing_display() + # naming res makes RASP think it is important, i.e., + # must always be displayed. but here it has only been named for clarity, so + # correct RASP using .allow_suppressing_display() + + raise RASPTypeError("indicator operator expects "+encoder_name+", got:",uexpr) + raise NotImplementedError + + def _evaluateRange(self,ast): + valsList = self.evaluateExprsList(ast.rangevals) + if not len(valsList) in [1,2,3]: + raise RASPTypeError("wrong number of inputs to range, expected: 1, 2, or 3, got:",len(valsList)) + for v in valsList: + if not isinstance(v,int): + raise RASPTypeError("range expects all integer inputs, but got:",strdesc(valsList)) + return list(range(*valsList)) + + def _index_into_dict(self,d,index): + if not isatom(index): + raise RASPTypeError("index into dict has to be atom (i.e., string, int, float, bool), got:",strdesc(index)) + if index not in d: + raise RASPValueError("index [",strdesc(index),"] not in dict.") + else: + return d[index] + + def _index_into_list_or_str(self,l,index): + lname = "list" if isinstance(l,list) else "string" + if not isinstance(index,int): + raise RASPTypeError("index into",lname,"has to be integer, got:",strdesc(index)) + if index>=len(l) or (-index)>len(l): + raise RASPValueError("index",index,"out of range for",lname,"of length",len(l)) + return l[index] + + def _index_into_sequence(self,s,index): + if isinstance(index,int): + if index>=0: + sel = select(toseq(index),indices,lambda q,k:q==k,name="load from "+str(index)) + else: + real_index = length + index + real_index.setname(length.name+str(index)) + sel = select(real_index,indices,lambda q,k:q==k,name="load from "+str(index)) + return aggregate(sel,s,name=s.name+"["+str(index)+"]").allow_suppressing_display() + else: + raise RASPValueError("index into sequence has to be integer, got:",strdesc(index)) + + + + + def _evaluateIndexing(self,ast): + indexable = self.evaluateExpr(ast.indexable) + index = self.evaluateExpr(ast.index) + + if isinstance(indexable,list) or isinstance(indexable,str): + return self._index_into_list_or_str(indexable,index) + elif isinstance(indexable,dict): + return self._index_into_dict(indexable,index) + elif isinstance(indexable,UnfinishedSequence): + return self._index_into_sequence(indexable,index) + else: + raise RASPTypeError("can only index into a list, dict, string, or sequence, "+\ + "but instead got:",strdesc(indexable)) + + def _evaluateSelectExpr(self,ast): + key = self.evaluateExpr(ast.key) + query = self.evaluateExpr(ast.query) + sop = ast.selop.text + key = toseq(key) # in case got an atom in one of these, + query = toseq(query) # e.g. selecting 0th index: indices @= 0 + if sop=="<": + return select(query,key,lambda q,k:q>k) + if sop==">": + return select(query,key,lambda q,k:q=k) + if sop==">=": + return select(query,key,lambda q,k:q<=k) + + def _evaluateBinaryExpr(self,ast): + def has_sequence(l,r): + return isinstance(l,UnfinishedSequence) or isinstance(r,UnfinishedSequence) + def has_selector(l,r): + return isinstance(l,UnfinishedSelect) or isinstance(r,UnfinishedSelect) + def both_selectors(l,r): + return isinstance(l,UnfinishedSelect) and isinstance(r,UnfinishedSelect) + left = self.evaluateExpr(ast.left) + right = self.evaluateExpr(ast.right) + bop = ast.bop.text + bad_pair = RASPTypeError("Cannot apply and/or between selector and non-selector") + if bop=="and": + if has_sequence(left,right): + if has_selector(left,right): + raise bad_pair + return tpland(left,right) + elif has_selector(left,right): + if not both_selectors(left,right): + raise bad_pair + return and_selects(left,right) + else: + return (left and right) + elif bop=="or": + if has_sequence(left,right): + if has_selector(left,right): + raise bad_pair + return tplor(left,right) + elif has_selector(left,right): + if not both_selectors(left,right): + raise bad_pair + return or_selects(left,right) + else: + return (left or right) + if has_selector(left,right): + raise RASPTypeError("Cannot apply",bop,"to selector(s)") + elif bop == "+": + return left + right + elif bop == "-": + return left - right + elif bop == "*": + return left * right + elif bop == "/": + return left/right + elif bop=="^": + return pow(left,right) + elif bop=='%': + return left%right + elif bop=="==": + return left==right + elif bop=="<=": + return left<=right + elif bop==">=": + return left>=right + elif bop=="<": + return left": + return left>right + # more, like modulo and power and all the other operators, to come + raise NotImplementedError + + def _evaluateStandalone(self,ast): + if ast.anint: + return int(ast.anint.text) + if ast.afloat: + return float(ast.afloat.text) + if ast.astring: + return ast.astring.text[1:-1] + raise NotImplementedError + + def _evaluateTernaryExpr(self,ast): + cond = self.evaluateExpr(ast.cond) + if isinstance(cond,Unfinished): + res1 = self.evaluateExpr(ast.res1) + res2 = self.evaluateExpr(ast.res2) + cond, res1, res2 = tuple(map(toseq,(cond,res1,res2))) + return zipmap((cond,res1,res2),lambda c,r1,r2:r1 \ + if c else r2,name=res1.name+" if "+cond.name+" else "+res2.name).allow_suppressing_display() + else: + return self.evaluateExpr(ast.res1) if cond else self.evaluateExpr(ast.res2) + # lazy eval when cond is non-unfinished allows legal loops over actual atoms + + def _evaluateAggregateExpr(self,ast): + sel = self.evaluateExpr(ast.sel) + seq = self.evaluateExpr(ast.seq) + seq = toseq(seq) # just in case its an atom + default = self.evaluateExpr(ast.default) if ast.default else None + + if not isinstance(sel,UnfinishedSelect): + raise RASPTypeError("Expected selector, got:",strdesc(selector)) + if not isinstance(seq,UnfinishedSequence): + raise RASPTypeError("Expected sequence, got:",strdesc(seq)) + if isinstance(default,Unfinished): + raise RASPTypeError("Expected atom, got:",strdesc(default)) + return aggregate(sel,seq,default=default) + + + + def _evaluateZip(self,ast): + list_exps = self._get_first_cont_list(ast.lists) + lists = [self.evaluateExpr(e) for e in list_exps] + if not lists: + raise RASPTypeError("zip needs at least one list") + for i,l in enumerate(lists): + if not isinstance(l,list): + raise RASPTypeError("attempting to zip lists, but",i,"-th element is not list:",strdesc(l)) + n = len(lists[0]) + for i,l in enumerate(lists): + if not len(l)==n: + raise RASPTypeError("attempting to zip lists of length",n,", but",i,"-th list has length",len(l)) + return [list(v) for v in zip(*lists)] # keep everything lists, no tuples/lists mixing here, all the same to rasp (no stuff like append etc) + + def make_dict_key(self,ast): + res = self.evaluateExpr(ast) + if not isatom(res): + raise RASPTypeError("dictionary keys can only be atoms, but instead got:",strdesc(res)) + return res + + def _evaluateDict(self,ast): + named_exprs_list = self._get_first_cont_list(ast.dictContents) + return {self.make_dict_key(e.key):self.evaluateExpr(e.val) for e in named_exprs_list} + + def _evaluateList(self,ast): + exprs_list = self._get_first_cont_list(ast.listContents) + return [self.evaluateExpr(e) for e in exprs_list] + + def _evaluateApplication(self,ast,unf): + input_vals = self._get_first_cont_list(ast.inputexprs) + if not len(input_vals) == 1: + raise ArgsError("evaluate unfinished",1,len(input_vals)) + input_val = self.evaluateExpr(input_vals[0]) + if not isinstance(unf,Unfinished): + raise RASPTypeError("Applying unfinished expects to apply",encoder_name,"or selector, got:",strdesc(sel)) + if not isinstance(input_val,Iterable): + raise RASPTypeError("Applying unfinished expects iterable input, got:",strdesc(input_val)) + res = unf(input_val) + res.created_from_input = input_val + return res + + def _evaluateRASPFunction(self,ast,raspfun): + args_trees = self._get_first_cont_list(ast.inputexprs) + args = tuple(self.evaluateExpr(t) for t in args_trees) + (self,) + real_args = args[:-1] + res = raspfun(*args) + if isinstance(res,Unfinished): + res.setname(raspfun.name+"("+" , ".join(strdesc(a,desc_cap=20) for a in real_args)+")") + return res + + + def _evaluateContains(self,ast): + contained = self.evaluateExpr(ast.contained) + container = self.evaluateExpr(ast.container) + container_name = ast.container.var.text if ast.container.var \ + else str(container) + if isinstance(contained,UnfinishedSequence): + if not isinstance(container,list): + raise RASPTypeError("\"["+encoder_name+"] in X\" expects X to be "\ + "list of atoms, but got non-list:",strdesc(container)) + for v in container: + if not isatom(v): + raise RASPTypeError("\"["+encoder_name+"] in X\" expects X to be "\ + "list of atoms, but got list with values:",strdesc(container)) + return zipmap(contained,lambda c:c in container, + name=contained.name+" in "+container_name).allow_suppressing_display() + elif isatom(contained): # contained is now an atom + if isinstance(container,list): + return contained in container + elif isinstance(container,UnfinishedSequence): + indicator = zipmap(container,lambda v:int(v==contained)) + return aggregate(full_s,indicator)>0 + else: + raise RASPTypeError("\"[atom] in X\" expects X to be list or "+encoder_name+", but got:",strdesc(container)) + if isinstance(contained,UnfinishedSelect) or isinstance(contained,RASPFunction): + obj_name = "select" if isinstance(contained,UnfinishedSelect) else "function" + raise RASPTypeError("don't check if",obj_name, + "is contained in list/dict: unless exact same instance,", + "unable to check equivalence of",obj_name+"s") + else: + raise RASPTypeError("\"A in X\" expects A to be",encoder_name,"or atom, but got A:",strdesc(contained)) + + def _evaluateLen(self,ast): + singleList = self.evaluateExpr(ast.singleList) + if not isinstance(singleList,list) or isinstance(singleList,dict): + raise RASPTypeError("attempting to compute length of non-list:",strdesc(singleList)) + return len(singleList) + + def evaluateExprsList(self,ast): + exprsList = self._get_first_cont_list(ast) + return [self.evaluateExpr(v) for v in exprsList] + + def evaluateExpr(self,ast,from_top=False): + def format_return(res,resname="out",is_application_of_unfinished=False): + ast.evaled_value = res + if is_application_of_unfinished: + return JustVal(res) + else: + self.env.set_out(res) + if from_top: + return NamedVal(resname, res) # this is when an expression has been evaled + else: + return res + if ast.bracketed: # in parentheses - get out of them + return self.evaluateExpr(ast.bracketed,from_top=from_top) + if ast.var: # calling single variable + varname = ast.var.text + return format_return(self.env.get_variable(varname),resname=varname) + if ast.standalone: + return format_return(self._evaluateStandalone(ast.standalone)) + if ast.bop: + return format_return(self._evaluateBinaryExpr(ast)) + if ast.uop: + return format_return(self._evaluateUnaryExpr(ast)) + if ast.cond: + return format_return(self._evaluateTernaryExpr(ast)) + if ast.aggregate: + return format_return(self._evaluateAggregateExpr(ast.aggregate)) + if ast.unfORfun: + unfORfun = self.evaluateExpr(ast.unfORfun) + if isinstance(unfORfun,Unfinished): + return format_return(self._evaluateApplication(ast,unfORfun), + is_application_of_unfinished=True) + elif isinstance(unfORfun,RASPFunction): + return format_return(self._evaluateRASPFunction(ast,unfORfun)) + if ast.selop: + return format_return(self._evaluateSelectExpr(ast)) + if ast.aList(): + return format_return(self._evaluateList(ast.aList())) + if ast.aDict(): + return format_return(self._evaluateDict(ast.aDict())) + if ast.indexable: # indexing into a list, dict, or s-op + return format_return(self._evaluateIndexing(ast)) + if ast.rangevals: + return format_return(self._evaluateRange(ast)) + if ast.listcomp: + return format_return(self._evaluateListComp(ast)) + if ast.dictcomp: + return format_return(self._evaluateDictComp(ast)) + if ast.container: + return format_return(self._evaluateContains(ast)) + if ast.lists: + return format_return(self._evaluateZip(ast)) + if ast.singleList: + return format_return(self._evaluateLen(ast)) + raise NotImplementedError + + + + +# new ast getText function for expressions +def new_getText(self): # original getText function stored as self._getText + if hasattr(self,"evaled_value") and isatom(self.evaled_value): + return str(self.evaled_value) + else: + return self._getText() + +RASPParser.ExprContext._getText = RASPParser.ExprContext.getText +RASPParser.ExprContext.getText = new_getText \ No newline at end of file diff --git a/RASP_support/FunctionalSupport.py b/RASP_support/FunctionalSupport.py new file mode 100644 index 0000000..adf8c08 --- /dev/null +++ b/RASP_support/FunctionalSupport.py @@ -0,0 +1,408 @@ +from Support import aggregate as _aggregate +from Support import Sequence, RASPTypeError +from Support import select as _select +from Support import zipmap as _zipmap +import traceback, sys # for readable exception handling +from collections.abc import Iterable + +name_maxlen = 30 +plain_unfinished_name = "unf" +plain_unfinished_select_name = "sel" +plain_unfinished_sequence_name = "s-op" +plain_indices = "indices" +plain_tokens = "tokens" + + +# unique ids for all Unfinished objects, numbered by order of creation. ends up very useful sometimes +class NextId: + def __init__(self): + self.i = 0 + def get_next(self): + self.i += 1 + return self.i + +unique_id_maker = NextId() +def creation_order_id(): + return unique_id_maker.get_next() + + + + +class AlreadyPrintedTheException: + def __init__(self): + self.b = False + def __bool__(self): + return self.b + +global_printed = AlreadyPrintedTheException() + +# various unfinished objects +class Unfinished: + def __init__(self,parents_tuple,parents2self,name=plain_unfinished_name,is_toplevel_input=False,min_poss_depth=-1): + self.parents_tuple = parents_tuple + self.parents2self = parents2self + self.last_w = None + self.last_res = None + self.is_toplevel_input = is_toplevel_input + self.setname(name if not self.is_toplevel_input else "input") + self.creation_order_id = creation_order_id() + self.min_poss_depth = min_poss_depth + + def setname(self,name,always_display_when_named=True): + if not None is name: + if len(name)>name_maxlen: + if isinstance(self,UnfinishedSequence): + name = plain_unfinished_sequence_name + elif isinstance(self,UnfinishedSelect): + name = plain_unfinished_select_name + else: + name = plain_unfinished_name + self.name = name + self.always_display = always_display_when_named # if you set something's name, you probably want to see it + return self # return self to allow chaining with other calls and throwing straight into a return statement etc + + def __call__(self,w,print_all_named_sequences=False,print_input=False, + print_all_sequences=False,print_all=False,have_printed=global_printed,topcall=True): + if (not isinstance(w,Iterable)) or (not w): + raise RASPTypeError("RASP sequences/selectors expect non-empty iterables, got: "+str(w)) + global_printed.b = False + if w == self.last_w: + return self.last_res # don't print same calculation multiple times + else: + if self.is_toplevel_input: + res = w + self.last_w, self.last_res = w, w + else: + try: + res = self.parents2self(*tuple(p(w, + print_all_named_sequences=print_all_named_sequences, + print_input=print_input, + print_all_sequences=print_all_sequences, + print_all=print_all, + topcall=False) + for p in self.parents_tuple)) + except Exception as e: + if isinstance(e,RASPTypeError): + raise e + if not global_printed.b: + print("===============================================================") + print("===============================================================") + print("evaluation failed in: [",self.name,"] with exception:\n",e) + print("===============================================================") + print("parent values are:") + for p in self.parents_tuple: + print("=============") + print(p.name) + print(p.last_res) + print("===============================================================") + print("===============================================================") + a,b,tb = sys.exc_info() + tt = traceback.extract_tb(tb) + last_call = max([i for i,t in enumerate(tt) if "__call__" in str(t)]) + print(''.join(traceback.format_list(tt[last_call+1:]))) + + # traceback.print_exception(a,b,tb) + + global_printed.b = True + if not topcall: + raise + else: + return "EVALUATION FAILURE" + + self.last_w, self.last_res = w, res + + def should_print(): + if isinstance(res,Sequence): + if print_all_named_sequences and not (self.name in plain_names): + return True + if print_all_sequences: + return True + if self.is_toplevel_input and print_input: + return True + return print_all + if should_print(): + print("resolved \""+self.name+\ + (("\" from:\" "+str(self.get_own_root_input(w))+" \"") if print_root_inputs_too else ""),\ + ":\n\t",res) + return res + +class UnfinishedSequence(Unfinished): + def __init__(self,parents_tuple,parents2self,name=plain_unfinished_sequence_name, + elementwise_function=None,default=None,min_poss_depth=0,from_zipmap=False, + output_index=-1,definitely_uses_identity_function=False): + # min_poss_depth=0 starts all of the base sequences (eg indices) off right + if None is name: # might have got none from some default value, fix it before continuing because later things eg DrawCompFlow + name = plain_unfinished_sequence_name # will expect name to be a string + super(UnfinishedSequence, self).__init__(parents_tuple,parents2self,name=name,min_poss_depth=min_poss_depth) + self.from_zipmap = from_zipmap # can be inferred (by seeing if there are parent selects), but this is simple enough. helpful for rendering comp flow visualisations + self.elementwise_function = elementwise_function # useful for analysis later + self.output_index = output_index + self.default = default # useful for analysis later + self.definitely_uses_identity_function = definitely_uses_identity_function + self.never_display = False + self._constant = False + + def __str__(self): + return "UnfinishedSequence object, name: "+self.name+" id: "+str(self.creation_order_id) + def mark_as_constant(self): + self._constant = True + return self + def is_constant(self): + return self._constant + + +class UnfinishedSelect(Unfinished): + def __init__(self,parents_tuple,parents2self, + name=plain_unfinished_select_name,compare_string=None,min_poss_depth=-1, + q_vars=None,k_vars=None,orig_selector=None): # selects should be told their depth, -1 will warn of problems properly + if None is name: # as in unfinishedsequence, some other function might have passed in a None somewhere + name = plain_unfinished_select_name # so fix before a print goes wrong + super(UnfinishedSelect, self).__init__(parents_tuple,parents2self,name=name,min_poss_depth=min_poss_depth) + self.compare_string = str(self.creation_order_id) if None is compare_string else compare_string + assert not None in [q_vars,k_vars] # they're not really optional i just dont want to add more mess to the func + self.q_vars = q_vars # don't actually need them, but useful for + self.k_vars = k_vars # drawing comp flow + # use compare string for comparison/uniqueness rather than overloading __eq__ of unfinishedselect, + # to avoid breaking things in unknown locations, and to be able to put selects in dictionaries + # and stuff (overloading __eq__ makes an object unhasheable unless i guess you overload the + # hash too?). need these comparisons for optimisations in analysis eg if two selects are identical + # they can be same head + self.orig_selector = orig_selector # for comfortable compositions of selectors + def __str__(self): + return "UnfinishedSelect object, name: "+self.name+" id: "+str(self.creation_order_id) + + +# some tiny bit of sugar that fits here: +def is_sequence_of_unfinishedseqs(seqs): + if not isinstance(seqs,Iterable): + return False + return False not in [isinstance(seq,UnfinishedSequence) for seq in seqs] + +class BareBonesFunctionalSupportException(Exception): + def __init__(self,m): + Exception.__init__(self,m) + +def to_tuple_of_unfinishedseqs(seqs): + if is_sequence_of_unfinishedseqs(seqs): + return tuple(seqs) + if isinstance(seqs,UnfinishedSequence): + return (seqs,) + print("seqs:",seqs) + raise BareBonesFunctionalSupportException( + "input to select/aggregate not an unfinished sequence or sequence of unfinished sequences") + +tup2tup = lambda *x:tuple([*x]) +class UnfinishedSequencesTuple(Unfinished): + def __init__(self,parents_tuple,parents2self=None): + # sequence tuples only exist in here, user doesn't 'see' them. can have lots of default values + # they're just a convenience for me + if None is parents2self: # just sticking a bunch of unfinished sequences together into one thing for reasons + parents2self = tup2tup + parents_tuple = to_tuple_of_unfinishedseqs(parents_tuple) + assert is_sequence_of_unfinishedseqs(parents_tuple) and isinstance(parents_tuple,tuple) + # else - probably creating several sequences at once from one aggregate + super(UnfinishedSequencesTuple, self).__init__(parents_tuple,parents2self,name="plain unfinished tuple") + def __add__(self,other): + assert isinstance(other,UnfinishedSequencesTuple) + assert self.parents2self is tup2tup + assert other.parents2self is tup2tup + return UnfinishedSequencesTuple(self.parents_tuple+other.parents_tuple) + + +_input = Unfinished((),None,is_toplevel_input=True) +#### and now, the actual exposed functions +indices = UnfinishedSequence((_input,),lambda w:Sequence(list(range(len(w)))),name=plain_indices) +tokens_str = UnfinishedSequence((_input,),lambda w:Sequence(list(map(str,w))),name=plain_tokens+"_str") +tokens_int = UnfinishedSequence((_input,),lambda w:Sequence(list(map(int,w))),name=plain_tokens+"_int") +tokens_float = UnfinishedSequence((_input,),lambda w:Sequence(list(map(float,w))),name=plain_tokens+"_float") +tokens_bool = UnfinishedSequence((_input,),lambda w:Sequence(list(map(bool,w))),name=plain_tokens+"_bool") +tokens_asis = UnfinishedSequence((_input,),lambda w:Sequence(w),name=plain_tokens+"_asis") +base_tokens = [tokens_str,tokens_int,tokens_float,tokens_bool,tokens_asis] + +def _min_poss_depth(unfs): + if isinstance(unfs,Unfinished): # got single unfinished and not iterable of them + unfs = [unfs] + return max([u.min_poss_depth for u in unfs]+[0]) # max b/c cant go less deep than deepest + # add that 0 thing so list is never empty and max complains. + +def tupleise(v): + if isinstance(v,tuple) or isinstance(v,list): + return tuple(v) + return (v,) + +def select(q_vars,k_vars,selector,name=None,compare_string=None): + if None is name: + name = "plain select" + # potentially here check the qvars all reference the same input sequence as each other and same for the kvars, + # technically dont *have* to but is helpful for the user so consider maybe adding a tiny bit of mess here + # (including markings inside sequences and selectors so they know which index they're gathering to and from) + # to allow it + q_vars = tupleise(q_vars) # we're ok with getting a single q or k var, not in a tuple, + k_vars = tupleise(k_vars) # but important to fix it before '+' on two UnfinishedSequences + # (as opposed to two tuples) sends everything sideways + new_depth = _min_poss_depth(q_vars+k_vars)+1 # attn layer is one after values it needs to be calculated + res = UnfinishedSelect((_input, # need input seq length to create select of correct size + UnfinishedSequencesTuple(q_vars), + UnfinishedSequencesTuple(k_vars) ), + lambda input_seq,qv,kv: _select(len(input_seq),qv,kv,selector), + name=name,compare_string=compare_string,min_poss_depth=new_depth, + q_vars=q_vars,k_vars=k_vars,orig_selector=selector) + return res + +def _compose_selects(select1,select2,compose_op=None,name=None,compare_string=None): + nq1 = len(select1.q_vars) + nq2 = len(select2.q_vars)+nq1 + nk1 = len(select1.k_vars)+nq2 + + def new_selector(*qqkk): + q1 = qqkk[:nq1] + q2 = qqkk[nq1:nq2] + k1 = qqkk[nq2:nk1] + k2 = qqkk[nk1:] + return compose_op(select1.orig_selector(*q1,*k1), select2.orig_selector(*q2,*k2)) + return select(select1.q_vars+select2.q_vars, + select1.k_vars+select2.k_vars, + new_selector,name=name,compare_string=compare_string) + +def _compose_select(select1,compose_op=None,name=None,compare_string=None): + def new_selector(*qk): + return compose_op(select1.orig_selector(*qk)) + return select(select1.q_vars, + select1.k_vars, + new_selector,name=name,compare_string=compare_string) + +def not_select(select,name=None,compare_string=None): + return _compose_select(select,lambda a:not a, + name=name,compare_string=compare_string) + +def and_selects(select1,select2,name=None,compare_string=None): + return _compose_selects(select1,select2,lambda a,b:a and b, + name=name,compare_string=compare_string) + +def or_selects(select1,select2,name=None,compare_string=None): + return _compose_selects(select1,select2,lambda a,b:a or b, + name=name,compare_string=compare_string) + +def example_input(): + return [100] # all the input types (str, int, float, bool), should be able to convert 0 to their own type when go to evaluate tokens + # when come to have multiple inputs, this wont fly, becuase will be expecting list of iterables. consider making optional argument + # that says 'i have not broken it into a list of the different inputs, it is presented here with seperators' which of course it cant + # be because the seperators will be special classes to avoid trouble, but basically it will just interpret it as all sequences except + # the first being empty + +def format_output(example_output,parents_tuple,parents2res,name,elementwise_function=None, + default=None,min_poss_depth=0,from_zipmap=False, + definitely_uses_identity_function=False): + if not isinstance(example_output,tuple): + return UnfinishedSequence(parents_tuple,parents2res, + elementwise_function=elementwise_function,default=default, + name=name,min_poss_depth=min_poss_depth,from_zipmap=from_zipmap, + definitely_uses_identity_function=definitely_uses_identity_function) + else: + num_outputs = len(example_output) + names = name + if not isinstance(names,list) or isinstance(names,tuple): + names = [names]*num_outputs + assert len(names) == num_outputs + def get_ith_output(i): + return lambda x:x[i] # would have this lambda directly below, + # but python will have this horrible thing where it then takes + # the last value that the variable 'i' had for the lambda + unfinished_results_tuple = UnfinishedSequencesTuple(parents_tuple,parents2res) + return tuple( UnfinishedSequence((unfinished_results_tuple,), + get_ith_output(i), + elementwise_function=elementwise_function, + default=default, + name=names[i], + min_poss_depth=min_poss_depth, + from_zipmap=from_zipmap, + output_index=i, + definitely_uses_identity_function=definitely_uses_identity_function) + for i in range(num_outputs)) + +def get_identity_function(num_params): + def identity1(a): + return a + def identityx(*a): + return a + return identity1 if num_params==1 else identityx + +def zipmap(sequences_tuple,elementwise_function,name=plain_unfinished_sequence_name): + sequences_tuple = tupleise(sequences_tuple) + unfinished_parents_tuple = UnfinishedSequencesTuple(sequences_tuple) # this also takes care of turning the + # value in sequences_tuple to indeed a tuple of sequences and not eg a single sequence which will + # cause weird behaviour later + example_parents = unfinished_parents_tuple(example_input()) + example_output = _zipmap(len(example_input()),example_parents,elementwise_function) + parents_tuple = (_input,unfinished_parents_tuple) + parents2res = lambda w,vt: _zipmap(len(w),vt,elementwise_function) + min_poss_depth = _min_poss_depth(sequences_tuple) # feedforward doesn't increase layer + # new assumption, to be revised later: can do arbitrary zipmap even before first feed-forward, + # i.e. in build up to first attention. truth is can do 'simple' zipmap towards first attention + # (no xor, but yes things like 'and' or 'indicator for ==' or whatever) based on initial linear + # translation done for Q,K in attention (not deep enough for xor, but deep enough for simple stuff) + # alongside use of initial embedding. honestly literally can just put everything in initial embedding + # if need it so bad its the first layer and its zipmap its only a function of the token and indices, + # so long as its not computing any weird combination between them you can do it in the embedding + # if len(sequences_tuple)>0: + # min_poss_depth = max(min_poss_depth,1) # except for the very specific case where + # # it is the very first thing to be done, in which case we do have to go through + # # one layer to get to the first feedforward. + # # the 'if' is there to rule out increasing when doing a feedforward on nothing, + # # ie, when making a constant. constants are allowed to be created on layer 0, they're + # # part of the embedding or the weights that will use them later or whatever, it's fine + return format_output(example_output,parents_tuple,parents2res,name, + min_poss_depth=min_poss_depth,elementwise_function=elementwise_function, + from_zipmap=True) # at least as deep as needed MVs, but no + # deeper cause FF (which happens at end of layer) + +def aggregate(select,sequences_tuple,elementwise_function=None, + default=None,name=plain_unfinished_sequence_name): + sequences_tuple = tupleise(sequences_tuple) + definitely_uses_identity_function = None is elementwise_function + if definitely_uses_identity_function: + elementwise_function = get_identity_function(len(sequences_tuple)) + unfinished_parents_tuple = UnfinishedSequencesTuple(sequences_tuple) + example_output = _aggregate(select(example_input()), + unfinished_parents_tuple(example_input()),elementwise_function,default=default) + parents_tuple = (select,unfinished_parents_tuple) + parents2res = lambda s,vt:_aggregate(s,vt,elementwise_function,default=default) + return format_output(example_output,parents_tuple,parents2res,name, + elementwise_function=elementwise_function,default=default, + min_poss_depth=max(_min_poss_depth(sequences_tuple)+1,select.min_poss_depth), + definitely_uses_identity_function=definitely_uses_identity_function) + # at least as deep as needed attention and at least one deeper than needed MVs + +########### up to here was just plain transformer 'assembly'. any addition is a lie ############## +##################### now begin the bells and whistles ########################################### + +def UnfinishedSequenceFunc(f): + setattr(UnfinishedSequence,f.__name__,f) + +def UnfinishedFunc(f): + setattr(Unfinished,f.__name__,f) + + + +@UnfinishedSequenceFunc +def allow_suppressing_display(self): + self.always_display = False + return self # return self to allow chaining with other calls and throwing straight into a return statement etc + +# later, we will overload == for unfinished sequences, such that it always returns another +# unfinished sequence. unfortunately this creates the following upsetting behaviour: +# "a in l" and "a==b" always evaluates to true for any unfinishedsequences a,b and non-empty +# list l, and any item a and list l containing at least one unfinished sequence. hence, to +# check if a sequence is really in a list we have to do it ourselves, some other way. + +def guarded_compare(seq1,seq2): + if isinstance(seq1,UnfinishedSequence) or isinstance(seq2,UnfinishedSequence): + return seq1 is seq2 + return seq1 == seq2 + +def guarded_contains(l,a): + if isinstance(a,Unfinished): + return True in [(a is e) for e in l] + else: + l = [e for e in l if not isinstance(e,Unfinished)] + return a in l diff --git a/RASP_support/RASP.g4 b/RASP_support/RASP.g4 new file mode 100644 index 0000000..751d580 --- /dev/null +++ b/RASP_support/RASP.g4 @@ -0,0 +1,80 @@ +//compile with: +// antlr4 -Dlanguage=Python3 -visitor RASP.g4 -o zzantlr/ +grammar RASP; +r : (statement)+? EOF ; +statement : raspstatement Comment? | replstatement Comment? | Comment ; +raspstatement : expr ';' | assign ';' | draw ';' | loadFile ';'| funcDef | forLoop ; +replstatement : setExample | showExample | toggleExample | toggleSeqVerbose | exit ; +setExample : 'set' subset=('s-op'|'selector')? 'example' example=expr ; +showExample : 'show' subset=('s-op'|'selector')? 'example'; +toggleSeqVerbose : 'full seq display' switch=('on'|'off'); +toggleExample : subset=('s-op'|'selector')? 'examples' switch=('on'|'off'); +exit : 'exit()' | 'exit' | 'quit' | 'quit()' ; + +loadFile : 'load' filename=String; +assign : var=idsList '=' val=exprsList; +draw : 'draw' '(' unf=expr (',' inputseq=expr)?')'; +exprsList : first=expr (',' cont=exprsList)?; +namedExprsList: first=namedExpr (',' cont=namedExprsList)?; +namedExpr: key=expr ':' val=expr; +raspstatementsList : first=raspstatement Comment? (cont=raspstatementsList)? | Comment (cont=raspstatementsList)? ; +funcDef: 'def' name=ID '(' (arguments=idsList)? ')' '{' + commentsList? (mainbody=raspstatementsList)? //(mainbody=assignsAndCommentsList)? + retstatement=returnStatement commentsList? '}'; +forLoop: 'for' iterator=idsList 'in' iterable=expr '{' mainbody=raspstatementsList '}'; +commentsList: Comment (cont=commentsList)?; +assignsAndCommentsList : first=assign ';' Comment? (cont=assignsAndCommentsList)? + | Comment (cont=assignsAndCommentsList)?; +returnStatement : 'return' res=exprsList ';'; +idsList : first=ID (',' cont=idsList)?; +aggregateExpr: 'aggregate(' sel=expr ',' seq=expr (',' default=expr)? ')'; +atom : anint=PosInt | afloat=Float | astring=String; + + +expr + : '(' bracketed=expr ')' + | indexable=expr '[' index=expr ']' // just fails if bad index + | unfORfun=expr '(' (inputexprs=exprsList)? ')' // bit problematic cause if unfORfun is an unf + //this is actually not an expression, make sure such cases + // get caught and handled properly + | uop=('not'|'-'|'+') uexpr=expr + | uop=('round'|'indicator') '(' uexpr=expr ')' + | left=expr bop='^' right=expr + | left=expr bop=('*'|'/') right=expr + | left=expr bop='%' right=expr + | left=expr bop=('+'|'-') right=expr + | left=expr bop=('=='|'<='|'>='|'>'|'<') right=expr + | 'select(' key=expr ',' query=expr ',' selop=('=='|'<'|'>'|'>='|'<='|'!=') ')' + | left=expr bop=('and'|'or') right=expr + | res1=expr 'if' cond=expr 'else' res2=expr + | var=ID + | standalone=atom + | aList | aDict + | aggregate=aggregateExpr + | 'range(' rangevals=exprsList ')' + | listcomp=listCompExpr + | dictcomp=dictCompExpr + | contained=expr 'in' container=expr + | 'zip(' lists=exprsList ')' + | 'len(' singleList=expr ')' + ; + +aList: '[' listContents=exprsList? ']'; +aDict: '{' dictContents=namedExprsList? '}'; + + +listCompExpr : '[' val=expr 'for' iterator=idsList 'in' iterable=expr ']'; +dictCompExpr : '{' key=expr ':' val=expr 'for' iterator=idsList 'in' iterable=expr '}'; +// negative ints come from the expression expr -> '-' expr -> '-' atom -> '-' PosInt +// bools are stored in the environment as reserved words, don't need to be in the grammar (only 2 vals) +// (recognised by the grammar as an identifier) + +Float : PosInt'.'PosInt ;// no fancy floats here sir, this is a simple operation +PosInt : [0-9]+ ; +// CommentContent: ~[\r\n]+ ; // keep going until newline +String: '"' ~["\r\n]* '"'; +Comment : '#' ~( '\r' | '\n' )* ; + +ID: [a-zA-Z_] [a-zA-Z_0-9]*; +WS : [ \t\r\n]+ -> skip ; // skip spaces, tabs, newlines +// ErrorChars : .+? ; \ No newline at end of file diff --git a/RASP_support/RASP.sublime-syntax b/RASP_support/RASP.sublime-syntax new file mode 100644 index 0000000..ccb6080 --- /dev/null +++ b/RASP_support/RASP.sublime-syntax @@ -0,0 +1,128 @@ +%YAML 1.2 +--- +## for Mac, keep in: ~/Library/Application Support/Sublime Text 3/Packages/User/RASP.sublime-syntax + + + +# See http://www.sublimetext.com/docs/3/syntax.html + +# how to name the scopes: https://www.sublimetext.com/docs/3/scope_naming.html +# (easiest solution though: open some python file in sublime, +# write a 'parallel' python expression to what you want to name, +# and cmd-alt-p while cursor on it to see how python sublime-syntax scopes it) +# not very helpful, but: what this file looks like for python: +# https://github.com/sublimehq/Packages/blob/master/Python/Python.sublime-syntax + +name: RASP +file_extensions: [rasp] +scope: source.rasp + +variables: + identifier_continue: '[[:alnum:]_]' + identifier: '\b[[:alpha:]_]{{identifier_continue}}*\b' + +contexts: + # The prototype context is prepended to all contexts but those setting + # meta_include_prototype: false. + prototype: + - include: comments + + main: + # The main context is the initial starting point of our syntax. + # Include other contexts from here (or specify them directly). + - include: keywords + - include: numbers + - include: strings + - include: functions + - include: function_call + - include: constants + + constants: + - match: '\b(indices|length|tokens_str|tokens_int|tokens_bool|tokens_float|True|False)\b' + scope: constant.language.rasp + + keywords: + # Keywords are if, else for and while. + # Note that blackslashes don't need to be escaped within single quoted + # strings in YAML. When using single quoted strings, only single quotes + # need to be escaped: this is done by using two single quotes next to each + # other. + - include: operators + - match: '\b(if|else|for|in|return)\b' + scope: keyword.control.rasp + - match: '^\s*(?:(async)\s+)?(def)\b' + scope: storage.type.function.rasp + push: function_name + - match: '\b(select|aggregate|selector_width)\b' + scope: support.function.builtin.base.rasp + - match: '\b(zip|range|len)\b' + scope: support.function.builtin.lists.rasp + - match: '\b(round|indicator)\b' + scope: support.function.builtin.elementwise.rasp + - match: '({{identifier}})\.(get)' + captures: + 1 : meta.generic-name.rasp + 2 : support.function.builtin.rasp + + operators: + - match: '(-|\+|\*|/|\^|%)' + scope: keyword.operator.arithmetic.rasp + - match: '(=)' + scope: keyword.operator.assignment.rasp + - match: '(==|!=|>=|<=|>|<)' + scope: keyword.operator.comparison.rasp + - match: '\b(and|or|not)\b' + scope: keyword.operator.logical.rasp + + function_call: + - match: '({{identifier}})(\()' + captures: + 1 : variable.function.rasp + 2 : punctuation.section.arguments.begin.rasp + + + function_name: + - match: '{{identifier}}' + scope: entity.name.function.rasp + - match: '\(' + push: function_params + - match: '{' + pop: true + + function_params: + - match: '({{identifier}})' + scope: variable.parameter.rasp + - match: ',' + - match: '\)' + pop: true + + numbers: + - match: '\b(-)?[0-9]+([.][0-9])?+\b' + scope: constant.numeric.rasp + + strings: + # Strings begin and end with quotes, and use backslashes as an escape + # character. + - match: '"' + scope: punctuation.definition.string.begin.rasp + push: inside_string + + inside_string: + - meta_include_prototype: false + - meta_scope: string.quoted.double.rasp + - match: '\.' + scope: constant.character.escape.rasp + - match: '"' + scope: punctuation.definition.string.end.rasp + pop: true + + comments: + # Comments begin with a '//' and finish at the end of the line. + - match: '#' + scope: punctuation.definition.comment.rasp + push: + # This is an anonymous context push for brevity. + - meta_scope: comment.line.hashtag.rasp + - match: $\n? + pop: true + diff --git a/RASP_support/REPL.py b/RASP_support/REPL.py new file mode 100644 index 0000000..10a4213 --- /dev/null +++ b/RASP_support/REPL.py @@ -0,0 +1,503 @@ +from antlr4 import CommonTokenStream, InputStream +from collections.abc import Iterable + +from zzantlr.RASPLexer import RASPLexer +from zzantlr.RASPParser import RASPParser +from zzantlr.RASPVisitor import RASPVisitor + +from Environment import Environment, UndefinedVariable, ReservedName +from FunctionalSupport import UnfinishedSequence, UnfinishedSelect, Unfinished +from Evaluator import Evaluator, NamedVal, NamedValList, JustVal, \ + RASPFunction, ArgsError, RASPTypeError, RASPValueError +from Support import Select, Sequence + +encoder_name = "s-op" + +class ResultToPrint: + def __init__(self,res,to_print): + self.res, self.print = res, to_print + +class LazyPrint: + def __init__(self,*a,**kw): + self.a, self.kw = a, kw + def print(self): + print(*self.a,**self.kw) + +class StopException(Exception): + def __init__(self): + super().__init__() + +debug = True + +def debprint(*a,**kw): + if debug: + print(*a,**kw) + +class ReturnExample: + def __init__(self,subset): + self.subset = subset + +class LoadError(Exception): + def __init__(self,msg): + super().__init__(msg) + +def is_comment(line): + if not isinstance(line,str): + return False + return line.strip().startswith("#") + +def formatstr(res): + if isinstance(res,str): + return "\""+res+"\"" + return str(res) + +class REPL: + def __init__(self): + self.env = Environment(name="console") + self.sequence_running_example = "hello" + self.selector_running_example = "hello" + self.sequence_prints_verbose = False + self.show_sequence_examples = True + self.show_selector_examples = True + self.results_to_print = [] + self.print_welcome() + self.load_base_libraries_and_make_base_env() + + def load_base_libraries_and_make_base_env(self): + self.silent = True + self.base_env = self.env.snapshot() # base env: the env from which every load begins + # bootstrap base_env with current (basically empty except indices etc) env, then load + # the base libraries to build the actual base env + for l in ["RASP_support/rasplib"]: + self.run_given_line("load \""+l+"\";") + self.base_env = self.env.snapshot() + self.silent = False + + + def set_running_example(self,example,which="both"): + if which in ["both",encoder_name]: + self.sequence_running_example = example + if which in ["both","selector"]: + self.selector_running_example = example + + def print_welcome(self): + print("RASP 0.0") + print("running example is:",self.sequence_running_example) + + def print_just_val(self,justval): + val = justval.val + if None is val: + return + if isinstance(val,Select): + print("\t = ") + print_select(val.created_from_input,val) + elif isinstance(val,Sequence) and self.sequence_prints_verbose: + print("\t = ",end="") + print_seq(val.created_from_input,val,still_on_prev_line=True) + else: + print("\t = ",str(val).replace("\n","\n\t\t\t")) + + def print_named_val(self,name,val,ntabs=0,extra_first_pref=""): + pref="\t"*ntabs + if (None is name) and isinstance(val,Unfinished): + name = val.name + if isinstance(val,UnfinishedSequence): + print(pref,extra_first_pref," "+encoder_name+":",name) + if self.show_sequence_examples: + if self.sequence_prints_verbose: + print(pref,"\t Example:",end="") + optional_exampledesc = name+"("+formatstr(self.sequence_running_example)+") =" + print_seq(self.selector_running_example,val(self.sequence_running_example),still_on_prev_line=True, + extra_pref=pref,lastpref_if_shortprint=optional_exampledesc) + else: + print(pref,"\t Example:",name+"("+formatstr(self.sequence_running_example)+\ + ") =",val(self.sequence_running_example)) + elif isinstance(val,UnfinishedSelect): + print(pref,extra_first_pref," selector:",name) + if self.show_selector_examples: + print(pref,"\t Example:")#,name+"("+formatstr(self.selector_running_example)+") =") + print_select(self.selector_running_example,val(self.selector_running_example),extra_pref=pref) + elif isinstance(val,RASPFunction): + print(pref,extra_first_pref," "+str(val)) + elif isinstance(val,list): + named = " list: "+((name+" = ") if not None is name else "") + print(pref,extra_first_pref,named,end="") + flat = True not in [isinstance(v,list) or isinstance(v,dict) or isinstance(v,Unfinished) for v in val] + if flat: + print(val) + else: + print(pref,"[") + for v in val: + self.print_named_val(None,v,ntabs=ntabs+2) + print(pref," "*len(named),"]") + elif isinstance(val,dict): + named = " dict: "+((name+" = ") if not None is name else "") + print(pref,extra_first_pref,named,end="") + flat = True not in [isinstance(val[v],list) or isinstance(val[v],dict) or isinstance(val[v],Unfinished) for v in val] + if flat: + print(val) + else: + print(pref,"{") + for v in val: + self.print_named_val(None,val[v],ntabs=ntabs+3,extra_first_pref=formatstr(v)+" : ") + print(pref," "*len(named),"}") + + else: + print(pref," value:",((name+" = ") if not None is name else ""),formatstr(val)) + + def print_example(self,nres): + if nres.subset in ["both",encoder_name]: + print("\t"+encoder_name+" example:",formatstr(self.sequence_running_example)) + if nres.subset in ["both","selector"]: + print("\tselector example:",formatstr(self.selector_running_example)) + + def print_result(self,rp): + if self.silent: + return + if isinstance(rp,LazyPrint): + return rp.print() + if isinstance(rp,list): # a list of multiple ResultToPrint s -- probably the result of a multi-assignment + for v in rp: + self.print_result(v) + return + if not rp.print: + return + res = rp.res + if isinstance(res,NamedVal): + self.print_named_val(res.name,res.val) + elif isinstance(res,ReturnExample): + self.print_example(res) + elif isinstance(res,JustVal): + self.print_just_val(res) + + def evaluate_replstatement(self,ast): + if ast.setExample(): + return ResultToPrint(self.setExample(ast.setExample()), False) + if ast.showExample(): + return ResultToPrint(self.showExample(ast.showExample()), True) + if ast.toggleExample(): + return ResultToPrint(self.toggleExample(ast.toggleExample()), False) + if ast.toggleSeqVerbose: + return ResultToPrint(self.toggleSeqVerbose(ast.toggleSeqVerbose()), False) + if ast.exit(): + raise StopException() + + def toggleSeqVerbose(self,ast): + switch = ast.switch.text + self.sequence_prints_verbose = switch == "on" + + def toggleExample(self,ast): + subset = ast.subset + subset = "both" if not subset else subset.text + switch = ast.switch.text + examples_on = switch=="on" + if subset in ["both",encoder_name]: + self.show_sequence_examples = examples_on + if subset in ["both","selector"]: + self.show_selector_examples = examples_on + + def showExample(self,ast): + subset = ast.subset + subset = "both" if not subset else subset.text + return ReturnExample(subset) + + def setExample(self,ast): + example = Evaluator(self.env,self).evaluateExpr(ast.example) + if not isinstance(example,Iterable): + raise RASPTypeError("example not iterable: "+str(example)) + subset = ast.subset + subset = "both" if not subset else subset.text + self.set_running_example(example,subset) + return ReturnExample(subset) + + def loadFile(self,ast,calling_env=None): + if None is calling_env: + calling_env = self.env + libname = ast.filename.text[1:-1] + filename = libname + ".rasp" + try: + with open(filename,"r") as f: + prev_example_settings = self.show_sequence_examples, self.show_selector_examples + self.show_sequence_examples, self.show_selector_examples = False, False + self.run(fromfile=f,env = Environment(name=libname,parent_env=self.base_env,stealing_env=calling_env),store_prints=True) + self.filter_and_dump_prints() + self.show_sequence_examples, self.show_selector_examples = prev_example_settings + except FileNotFoundError: + raise LoadError("could not find file: "+filename) + + def get_tree(self,fromfile=None): + try: + return LineReader(fromfile=fromfile).get_input_tree() + except AntlrException as e: + print("\t!! antlr exception:",e.msg,"\t-- ignoring input") + return None + + def run_given_line(self,line): + try: + tree = LineReader(given_line=line).get_input_tree() + if isinstance(tree,Stop): + return None + rp = self.evaluate_tree(tree) + if isinstance(rp,LazyPrint): + rp.print() # error messages get raised, but ultimately have to be printed somewhere if not caught? idk + except AntlrException as e: + print("\t!! REPL failed to run initiating line:",line) + print("\t --got antlr exception:",e.msg) + return None + + def assigned_to_top(self,res,env): + if env is self.env: + return True + # we are now definitely inside some file, the question is whether we have taken + # the result and kept it in the top level too, i.e., whether we have imported a non-private value. + # checking whether it is also in self.env, even identical, will not tell us much as it may have been here and the same + # already. so we have to replicate the logic here. + if not isinstance(res,NamedVal): + return False # only namedvals get set to begin with + if res.name.startswith("_") or (res.name=="out"): + return False + return True + + def evaluate_tree(self,tree,env=None): + if None is env: + env = self.env # otherwise, can pass custom env + # (e.g. when loading from a file, make env for that file, + # to keep that file's private (i.e. underscore-prefixed) variables to itself) + if None is tree: + return ResultToPrint(None,False) + try: + if tree.replstatement(): + return self.evaluate_replstatement(tree.replstatement()) + elif tree.raspstatement(): + res = Evaluator(env,self).evaluate(tree.raspstatement()) + if isinstance(res,NamedValList): + return [ResultToPrint(r,self.assigned_to_top(r,env)) for r in res.nvs] + return ResultToPrint(res, self.assigned_to_top(res,env)) + except (UndefinedVariable, ReservedName) as e: + return LazyPrint("\t\t!!ignoring input:\n\t",e) + except NotImplementedError: + return LazyPrint("not implemented this command yet! ignoring") + except (ArgsError,RASPTypeError,LoadError,RASPValueError) as e: + return LazyPrint("\t\t!!ignoring input:\n\t",e) + # if not replstatement or raspstatement, then comment + return ResultToPrint(None,False) + + def filter_and_dump_prints(self): + # TODO: some error messages are still rising up and getting printed before reaching this position :( + def filter_named_val_reps(rps): + # do the filtering. no namedvallists here - those are converted into a list of ResultToPrint s + # containing NamedVal s immediately after receiving them in evaluate_tree + res = [] + names = set() + for r in rps[::-1]: # go backwards - want to print the last occurence of each named item, not first, so filter works backwards + if isinstance(r.res,NamedVal): + if r.res.name in names: + continue + names.add(r.res.name) + res.append(r) + return res[::-1] # flip back forwards + + if not True in [isinstance(v,LazyPrint) for v in self.results_to_print]: + self.results_to_print = filter_named_val_reps(self.results_to_print) + # if isinstance(res,NamedVal): + # self.print_named_val(res.name,res.val) + # + # print all that needs to be printed: + for r in self.results_to_print: + if isinstance(r,LazyPrint): + r.print() + else: + self.print_result(r) + # clear the list + self.results_to_print = [] + + + + + def run(self,fromfile=None,env=None,store_prints=False): + def careful_print(*a,**kw): + if store_prints: + self.results_to_print.append(LazyPrint(*a,**kw)) + else: + print(*a,**kw) + while True: + try: + tree = self.get_tree(fromfile) + if isinstance(tree,Stop): + break + rp = self.evaluate_tree(tree,env) + if store_prints: + if isinstance(rp,list): + self.results_to_print += rp # multiple results given - a multi-assignment + else: + self.results_to_print.append(rp) + else: + self.print_result(rp) + except RASPTypeError as e: + careful_print("\t!!statement executed, but result fails on evaluation:\n\t\t",e) + except EOFError: + careful_print("") + break + except StopException: + break + except KeyboardInterrupt: + careful_print("") # makes newline + except Exception as e: + if debug: + raise e + careful_print("something went wrong:",e) + + + + +from antlr4.error.ErrorListener import ErrorListener + + +class AntlrException(Exception): + def __init__(self,msg): + self.msg = msg + +class InputNotFinished(Exception): + def __init__(self): + pass + +class MyErrorListener( ErrorListener ): + def __init__(self): + super(MyErrorListener, self).__init__() + + def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e): + if offendingSymbol and offendingSymbol.text == "": + raise InputNotFinished() + if msg.startswith("missing ';' at"): + raise InputNotFinished() + if "mismatched input" in msg: + a=str(offendingSymbol) + b=a[a.find("=")+2:] + c=b[:b.find(",<")-1] + ae = AntlrException(msg) + ae.recognizer, ae.offendingSymbol, ae.line, ae.column, ae.msg, ae.e = recognizer, offendingSymbol, line, column, msg, e + raise ae + + # def reportAmbiguity(self, recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs): + # raise AntlrException("ambiguity") + + # def reportAttemptingFullContext(self, recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs): + # we're ok with this: happens with func defs it seems + + # def reportContextSensitivity(self, recognizer, dfa, startIndex, stopIndex, prediction, configs): + # we're ok with this: happens with func defs it seems + +class Stop: + def __init__(self): + pass + +class LineReader: + def __init__(self,prompt=">>",fromfile=None,given_line=None): + self.fromfile = fromfile + self.given_line = given_line + self.prompt = prompt + " " + self.cont_prompt = "."*len(prompt)+" " + + def str_to_antlr_parser(self,s): + antlrinput = InputStream(s) + lexer = RASPLexer(antlrinput) + lexer.removeErrorListeners() + lexer.addErrorListener( MyErrorListener() ) + stream = CommonTokenStream(lexer) + parser = RASPParser(stream) + parser.removeErrorListeners() + parser.addErrorListener( MyErrorListener() ) + return parser + + + def read_line(self,continuing=False,nest_depth=0): + prompt = self.cont_prompt if continuing else self.prompt + if not None is self.fromfile: + res = self.fromfile.readline() + if not res: # python files return "" on last line (as opposed to "\n" on empty lines) + return Stop() + return res + if not None is self.given_line: + res = self.given_line + self.given_line = Stop() + return res + else: + return input(prompt+(" "*nest_depth)) + + + def get_input_tree(self): + pythoninput="" + multiline = False + while True: + newinput = self.read_line(continuing=multiline, + nest_depth=pythoninput.split().count("def")) + if isinstance(newinput,Stop): # input stream ended + return Stop() + if is_comment(newinput): + newinput = "" # don't let comments get in and ruin things somehow + pythoninput += newinput # don't replace newlines! this is how in-function comments get broken .replace("\n","")+" " + parser = self.str_to_antlr_parser(pythoninput) + try: + res = parser.r().statement() + if isinstance(res,list): + # TODO: this seems to happen when there's ambiguity. figure out what is going on!! + assert len(res)==1 + res = res[0] + return res + except InputNotFinished: + multiline = True + pythoninput+=" " + + +def print_seq(example,seq,still_on_prev_line=False,extra_pref="",lastpref_if_shortprint=""): + if len(set(seq.get_vals()))==1: + print(extra_pref if not still_on_prev_line else "", + lastpref_if_shortprint, + str(seq)) # when there is only one value, it's nicer to just print that than the full list, verbosity be damned + return + if still_on_prev_line: + print("") + + seq = seq.get_vals() + def cleanboolslist(seq): + if isinstance(seq[0],bool): + tstr = "T" if seq.count(True) <= seq.count(False) else "" + fstr = "F" if seq.count(False) <= seq.count(True) else "" + return [tstr if v else fstr for v in seq] + else: + return seq + + example = cleanboolslist(example) + seq = cleanboolslist(seq) + example = [str(v) for v in example] + seq = [str(v) for v in seq] + maxlen = max(len(v) for v in example+seq) + + + def neatline(seq): + def padded(s): + return " "*(maxlen-len(s))+s + return " ".join(padded(v) for v in seq) + print(extra_pref,"\t\tinput: ",neatline(example)) + print(extra_pref,"\t\toutput: ",neatline(seq)) + +def print_select(example,select,extra_pref=""): + # .replace("\n","\n\t\t\t") + def nice_matrix_line(m): + return " ".join("1" if v else " " for v in m) + print(extra_pref,"\t\t\t "," ".join(str(v) for v in example)) + matrix = select.get_vals() + [print(extra_pref,"\t\t\t",v,"|",nice_matrix_line(matrix[m])) for v,m in zip(example,matrix)] + + +if __name__ == "__main__": + REPL().run() + +def runner(): + a = REPL() + try: + a.run() + except Exception as e: + print(e) + return a,e + return a,None \ No newline at end of file diff --git a/RASP_support/Sugar.py b/RASP_support/Sugar.py new file mode 100644 index 0000000..ef4905e --- /dev/null +++ b/RASP_support/Sugar.py @@ -0,0 +1,307 @@ +from FunctionalSupport import indices, tokens_str, tokens_int, tokens_float, tokens_asis, \ +tokens_bool, or_selects, and_selects, not_select +from FunctionalSupport import select, aggregate, zipmap +from FunctionalSupport import UnfinishedSequence as _UnfinishedSequence +from FunctionalSupport import guarded_compare as _guarded_compare +from FunctionalSupport import guarded_contains as _guarded_contains +import DrawCompFlow # not at all necessary for sugar, but sugar is really the top-level tpl file we import, +# and nice to have draw_comp_flow added into the sequences already on load +from collections.abc import Iterable +from make_operators import add_ops + +def select_i(q_vars,f_get_index,name=None,compare_string=None): + return select(q_vars,indices,lambda *a:a[-1]==f_get_index(*(a[:-1]))) + +def _apply_unary_op(self,f): + return zipmap(self,f) + +def _apply_binary_op(self,other,f): + def seq_and_other_op(self,other,f): + return zipmap(self,lambda a:f(a,other)) + def seq_and_seq_op(self,other_seq,f): + return zipmap((self,other_seq),f) + if isinstance(other,_UnfinishedSequence): + return seq_and_seq_op(self,other,f) + else: + return seq_and_other_op(self,other,f) + +add_ops(_UnfinishedSequence,_apply_unary_op,_apply_binary_op) + + +def _seqname(seq): + def __seqname(seq): + return getattr(seq,"name",str(seq)) + return __seqname(seq) if not isinstance(seq,Iterable) else "("+", ".join(_seqname(m) for m in seq)+")" + +def _tupleise(seqs): + return tuple(seqs) if isinstance(seqs,Iterable) else (seqs,) + +def _addname(seq,name,default_name,always_display_when_named=True): + if None is name: + res = seq.setname(default_name,always_display_when_named=always_display_when_named).allow_suppressing_display() + else: + res = seq.setname(name,always_display_when_named=always_display_when_named) + return res + + +full_s = select((),(),lambda :True,name="full average",compare_string="full average") + +def tplconst(v,name=None): + return _addname(zipmap((),lambda :v),name,"constant: "+str(v),always_display_when_named=False).mark_as_constant() + # always_display_when_named = False : constants aren't worth displaying, but still going to name them in background, + # in case change mind about this + +def fixorder(seq1,seq2): + if isinstance(seq1,_UnfinishedSequence): + return seq1,seq2 + else: + return seq2,seq1 + +def tplmax(seq1,seq2,name=None): + seq1,seq2 = fixorder(seq1,seq2) # order doesnt matter in max, but need to have a sequence on left for _apply_binary_op + return _addname( _apply_binary_op(seq1,seq2,lambda x,y:max(x,y)),name, + "max("+_seqname(seq1)+", "+_seqname(seq2)+")") + +def tplmin(seq1,seq2,name=None): + seq1,seq2 = fixorder(seq1,seq2) # order doesnt matter in min, but need to have a sequence on left for _apply_binary_op + return _addname( _apply_binary_op(seq1,seq2,lambda x,y:min(x,y)),name, + "min("+_seqname(seq1)+", "+_seqname(seq2)+")") + +def average(seqs,f,name=None): + return _addname(aggregate(full_s,seqs,f),name,"full average") + +def frac_quality(seqs,f,name=None,allow_suppressing_display=False): + if None is name: + name = "(#quality)/length" + res = average(seqs,lambda *x:int(f(*x)),name=name) + if allow_suppressing_display: + res.always_display = False + return res + +def frac(seq,t,name=None): + return _addname(average(seq,lambda v:int(v==t)),name, + "(#"+str(t)+")/length") + +length = round(1/frac(indices,0)).setname("length") +flip_s = select_i((indices,length),lambda i,n:n-(i+1), + name="flip select",compare_string="full flip") + +# allow suppressing display for bool, not, and, or : all of these would have been boring operators if +# only python let me overload them + +# always have to call allow_suppressing_display after setname because setname marks the variable as +# crucial to display under assumption user named it + +def toseq(seq): + if not isinstance(seq,_UnfinishedSequence): + seq = tplconst(seq,str(seq)) + return seq + +def asbool(seq,name=None): + res = zipmap(seq,lambda a:bool(a)) + return _addname(res,name,"bool("+seq.name+")") + # would do res = seq==True but it seems this has different behaviour to bool eg 'bool(2)' + # is True but '2==True' returns False + +def asint(seq,name=None): + res = zipmap(seq,lambda a:int(a)) + return _addname(res,name,"int("+seq.name+")") + # would do res = seq==True but it seems this has different behaviour to bool eg 'bool(2)' + # is True but '2==True' returns False + +def tplnot(seq,name=None): + res = asbool(seq) == False # this one does correct conversion using asbool and then we really can just do ==False + return _addname(res,name,"( not "+str(seq.name)+" )") + +def count_trues_across_vars(*seqs,name=None): + res = asbool(seqs[0])*1 + for seq in seqs[1:]: + res += 1*asbool(seq) + return _addname(res,name,"num trues") + +def tpland(*seqs,name=None): + seqs = tuple(toseq(s) for s in seqs) + res = count_trues_across_vars(*seqs) == len(seqs) + return _addname(res,name,"( "+" and ".join(seq.name for seq in seqs)+")") + +def tplor(*seqs,name=None): + seqs = tuple(toseq(s) for s in seqs) + res = count_trues_across_vars(*seqs) > 0 + return _addname(res,name,"( "+" or ".join(seq.name for seq in seqs)+")") + +def tplxor(seq1,seq2,name=None): + res = tplor( tpland(seq1,tplnot(seq2)) , tpland(tplnot(seq1),seq2) ) + return _addname(res,name,"( "+seq1.name+" xor "+seq2.name+" )") + +def conditioned_contains(qseqs,kseqs,f,name=None,allow_suppressing_display=False,sel_name=None): + if None is name: + name = "conditioned contains" + s = select(qseqs,kseqs,f,name=sel_name) + res = aggregate(s,(),lambda :1,default=0,name=name+" (as number)") > 0 + res.setname(name) + if allow_suppressing_display: + res.allow_suppressing_display() + return res + +def count_conditioned(qseqs,kseqs,f,name=None): + # output 1 at 0th index and 0 everywhere else + # focus on 0th index and on all hits + # obtained fraction helps deduce how many hits you had except for 0th index + # add 1/0 depending on whether 0 is a hit + if not isinstance(kseqs,Iterable): + kseqs = (kseqs,) + kseqs = kseqs + (indices,) + s = select(qseqs,kseqs,lambda *a:f(*(a[:-1])) or (a[-1]==0),name="find all conditions, and 0.") + frac = aggregate(s,indices,lambda i:int(i==0), + name="1/(1+(n outside 0)), for "+(name if not None is name else "condition")) + count_outside_0 = (1/frac)-1 + count_outside_0.setname("count outside 0") + contains_in_0 = conditioned_contains(qseqs,kseqs,lambda *a:f(*(a[:-1])) and (a[-1]==0), + name="check 0, for "+(name if not None is name else "condition"), + allow_suppressing_display=True,sel_name="find condition, but only at 0") + contains_in_0.setname("count at 0") + res = count_outside_0 + contains_in_0 + if None is name: + name = "count conditioned" + return res.setname(name).allow_suppressing_display() # if goes straight into something else + +def contains_quality(seqs,f,name=None): + if None is name: + name = "contains quality" + return conditioned_contains((),seqs,f,name=name) + +def contains(seq,t,name=None): + if None is name: + name = "contains "+str(t) + return contains_quality(seq,lambda v:v==t,name=name) + +def count_quality(seqs,f,name=None,allow_suppressing_display=False): + if None is name: + name = "count of quality" + res = round( frac_quality(seqs,f,allow_suppressing_display=True) * length ) + res.setname(name) + if allow_suppressing_display: + res.allow_suppressing_display() + return res + +def count(seq,t,name=None): + if None is name: + name = "count "+str(t) + return count_quality(seq,lambda v:v==t,name=name) + +def shift_select(n): + # plain! selects from global indices + return select_i(indices,lambda i:i+n,name="shift "+str(n), + compare_string="full shift by "+str(n)) + +def index_select(i): + # plain! selects from global indices + name="select index "+str(i) + compare_string="full index seek ("+str(i)+")" + if i<0: + locseq = length+i + return select_i(locseq,lambda a:a,name=name,compare_string=compare_string) + else: + return select_i((),lambda :i,name=name,compare_string=compare_string) + + + +def load_from_target_index(i,seq,default,name=None): # can probably do a getitem overload for this, if not too confusing.. + if None is name: + name = seq.name+"["+str(i)+"]" + return aggregate(index_select(i),seq,default=default,name=name) + +def load_from_target_indices(locseq,seq,default,name=None): + assert not isinstance(locseq,Iterable) and not isinstance(seq,Iterable) + s = select_i(locseq,lambda a:a,compare_string="load from indices given by seq #"+str(locseq.creation_order_id)) + return aggregate(s,seq,default=default,name=name) + +def item_select(seq,val,name=None): + if None is name: + name = "focus on "+seq.name + return select((),seq,lambda v:v==val,"full seek of ["+repr(val)+"] in seq #"+str(seq.creation_order_id)) + +def get_shifted(seq,n,filler,name=None): + assert not isinstance(seq,Iterable) # just want the one here, makes more sense or maybe just am lazy + if None is name: + name = str(n)+"-shifted "+seq.name + return aggregate(shift_select(n),seq,default=filler,name=name) + +def mark_last_condition(seqs,f,name=None): + if None is name: + name = "last to satisfy f" + if not isinstance(seqs,Iterable): # just one seq + seqs = (seqs,) + satisfies_f = zipmap(seqs,f,name="satisfies f").allow_suppressing_display() + has_later = conditioned_contains(indices,(satisfies_f,indices),lambda i,sf,j:sf and (ij),name="exists earlier satisfying f").allow_suppressing_display() + return tpland(satisfies_f,tplnot(has_earlier),name=name) # the fact that we have given it a name will prevent suppression in display + +def mark_last_value(seq,v,name=None): + if None is name: + name = "last "+str(v)+" in "+seq.name + return mark_last_condition(seq,lambda e:e==v,name=name) + +def mark_first_value(seq,v,name=None): + if None is name: + name = "first "+str(v)+" in "+seq.name + return mark_first_condition(seq,lambda e:e==v,name=name) + +# def find_last_instance(seq,v,name=None): +# if None is name: +# name = "index of last instance of "+str(v) +# return find_last_condition(seq,lambda e:e==v,name=name) + +def select_from_last_condition(k_vars,f,name=None): # todo: generalise to select from i'th condition, with i's both positive and negative + # will write everywhere, but read only from k_vars + if None is name: + name = "select last satisfying f from k_vars" + return select((),mark_last_condition(k_vars,f).allow_suppressing_display(), + lambda a:a,name=name) + +def select_from_first_condition(k_vars,f,name=None): # todo: generalise to select from i'th condition, with i's both positive and negative + # will write everywhere, but read only from k_vars + if None is name: + name = "select first satisfying f from k_vars" + return select((),mark_first_condition(k_vars,f).allow_suppressing_display(), + lambda a:a,name=name) + +def select_from_last_value(k_var,v,name=None): + # will write everywhere, but read only from k_vars + if None is name: + name = "select last "+str(v) + assert not isinstance(k_var,Iterable), "got iterable k vars in select last "+str(v)+" in "+k_var.name + return select_from_last_condition(k_var,lambda e:e==v,name=name) + +def select_from_first_value(k_var,v,name=None): + # will write everywhere, but read only from k_vars + if None is name: + name = "select first "+str(v) + assert not isinstance(k_var,Iterable), "got iterable k vars in select last "+str(v)+" in "+k_var.name + return select_from_first_condition(k_var,lambda e:e==v,name=name) + +def sort(seqs,key=None,name=None): + keyname = "" if None is key else ", key="+_seqname(key) + if None is key: + assert not isinstance(seqs,Iterable) + key = seqs + num_smaller = count_conditioned((key,indices),(key,indices), + lambda vq,iq,vk,ik:(vk1): + return "["+small_str(self._vals[0])+"]*"+str(len(self._vals)) + return "["+", ".join(small_str(v) for v in self._vals)+"]" + + def __repr__(self): + return str(self) + + def __len__(self): + return len(self._vals) + + def get_vals(self): + return deepcopy(self._vals) + + +def dims_match(seqs,expected_dim): + return False not in [expected_dim == len(seq) for seq in seqs] + +class Select: + def __init__(self, n, q_vars, k_vars, f): + self.n = n + self.makeselect(q_vars,k_vars,f) + self.niceprint = None + + def get_vals(self): + if None is self.select: + self.makeselect() + return deepcopy(self.select) + + def makeselect(self,q_vars=None,k_vars=None,f=None): + if None is q_vars: + assert (None is k_vars) and (None is f) + q_vars = (Sequence(self.target_index),) + k_vars = (Sequence(list(range(self.n))),) + f = lambda t,i:t==i + self.select = {i:[f(*get(q_vars,i),*get(k_vars,j)) for j in range(self.n)] + for i in range(self.n)} # outputs of f should be + # True or False. j goes along input dim, i along output + + def __str__(self): + select = self.get_vals() + if None is self.niceprint: + d = {i:list(map(int,self.select[i])) for i in self.select} + self.niceprint = str(self.niceprint) + if len(str(d)) > 40: + starter = "\n" + self.niceprint = pprint.pformat(d) + else: + starter = "" + self.niceprint = str(d) + self.niceprint = starter + self.niceprint + return self.niceprint + + def __repr__(self): + return str(self) + +def select(n,q_vars,k_vars,f): + return Select(n,q_vars,k_vars,f) + +## applying selects or feedforward (map) +def aggregate(select,k_vars,func,default=None): + return to_sequences(apply_average_select(select,k_vars,func,default)) + +def to_sequences(results_by_index): + def totup(r): + if not isinstance(r,tuple): + return (r,) + return r + results_by_index = list(map(totup,results_by_index)) # convert scalar results to tuples of length 1 + results_by_output_val = list(zip(*results_by_index)) # one list (sequence) per output value + res = tuple(map(Sequence,results_by_output_val)) + if len(res) == 1: + return res[0] + else: + return res + +def zipmap(n,k_vars,func): + # assert len(k_vars) >= 1, "dont make a whole sequence for a plain constant you already know the value of.." + results_by_index = [func(*get(k_vars,i)) for i in range(n)] + return to_sequences(results_by_index) + +def verify_default_size(default,num_output_vars): + assert num_output_vars > 0 + if num_output_vars == 1: + assert not isinstance(default,tuple), "aggregates on functions with single output should have scalar default" + elif num_output_vars > 1: + assert isinstance(default,tuple) and len(default)==num_output_vars,\ + "for function with >1 output values, default should be tuple of default \ + values, of equal length to passed function's output values (for function \ + with single output value, default should be single value too)" + +def apply_average_select(select,k_vars,func,default=0): + def apply_func_to_each_index(): + kvs = [get(k_vars,i) for i in list(range(select.n))] # kvs is list [by index] of lists [by varname] of values + candidate_i = [func(*kvi) for kvi in kvs] # candidate output per index + if num_output_vars > 1: + candidates_by_varname = list(zip(*candidate_i)) + else: + candidates_by_varname = (candidate_i,) # expect tuples of values for conversions in return_sequences + return candidates_by_varname + + def prep_default(default,num_output_vars): + if None is default: + default = 0 + # output of average is always floats, so will be converting all + # to floats here else we'll fail the lazy type check in the Sequences. + # (and float(None) doesn't 'compile' ) + # TODO: maybe just lose the lazy type check? + if not isinstance(default,tuple) and (num_output_vars>1): + default = tuple([default]*num_output_vars) + # *specifically* in apply_average, where values have to be floats, + # allow default to be single val, + #that will be repeated for all wanted outputs + verify_default_size(default,num_output_vars) + if not isinstance(default,tuple): + default = (default,) # specifically with how we're going to do things here in the average aggregate, + # will help to actually have the outputs get passed around as tuples, even if they're scalars really. + # but do this after the size check for the scalar one so it doesn't get filled with weird ifs... this + # tupled scalar thing is only a convenience in this implementation in this here function + return default + + def apply_and_average_single_index(outputs_by_varname,index, + index_scores,num_output_vars,default): + def mean(scores,vals): + n = scores.count(True) # already >0 by earlier + if n == 1: + return vals[scores.index(True)] + # else # n>1 + if not _lazy_type_check(vals)==TNUM: + raise Exception("asked to average multiple values, but they are non-numbers: "+str(vals)) + return sum([v for s,v in zip(scores,vals) if s])*1.0/n + + num_influencers = index_scores.count(True) + if num_influencers == 0: + return default + else: + return tuple(mean(index_scores,o_by_i) for o_by_i in outputs_by_varname) # return_sequences expects multiple outputs to be in tuple form + num_output_vars = get_num_outputs(func(*get(k_vars,0))) + candidates_by_varname = apply_func_to_each_index() + default = prep_default(default,num_output_vars) + means_per_index = [apply_and_average_single_index(candidates_by_varname, + i,select.select[i],num_output_vars,default) + for i in range(select.n)] + # list (per index) of all the new variable values (per varname) + return means_per_index + +def get_num_outputs(dummy_out): # user's responsibility to give functions that always have same number of outputs + if isinstance(dummy_out,tuple): + return len(dummy_out) + return 1 + +def small_str(v): + if isinstance(v,float): + return str(clean_val(v,3)) + if isinstance(v,bool): + return "T" if v else "F" + return str(v) + + +def get(vars_list,index): # index should be within range to access +# v._vals and if not absolutely should raise an error, as it will here +# by the attempted access + res = deepcopy([v._vals[index] for v in vars_list]) + return res + + + diff --git a/RASP_support/analyse.py b/RASP_support/analyse.py new file mode 100644 index 0000000..1a6f96b --- /dev/null +++ b/RASP_support/analyse.py @@ -0,0 +1,333 @@ +from FunctionalSupport import Unfinished, UnfinishedSequence, UnfinishedSelect, \ +guarded_contains, guarded_compare, zipmap # need these for actually comparing sequences and not just making more sequences +from collections import defaultdict, Counter +from copy import copy + +def UnfinishedFunc(f): + setattr(Unfinished,f.__name__,f) + +def is_real_unfinished(unf): # as opposed to intermediate unfinisheds like tuples of sequences + return isinstance(unf,UnfinishedSequence) or isinstance(unf,UnfinishedSelect) + +Unfinished._parents = None +@UnfinishedFunc +def get_parents(self): + if not None is self._parents: + return self._parents + real_parents_part1 = [p for p in self.parents_tuple if is_real_unfinished(p)] + other_parents = [p for p in self.parents_tuple if not is_real_unfinished(p)] + res = real_parents_part1 + for p in other_parents: + res += p.get_parents() # recursion: branch back through all the parents of the unf, + # always stopping wherever hit something 'real' ie a select or a sequence + assert len([p for p in res if isinstance(p,UnfinishedSelect)]) <= 1 # nothing is made from more than one select... + self._parents = list(set(res)) + return copy(self._parents) # in case someone messes with the list eg popping through it + +@UnfinishedFunc +def get_parent_sequences(self): + # for UnfinishedSequences, this should get just the tuple of sequences the aggregate is applied to, + # and I think in order (as the parents will only be a select and a sequencestuple, and the seqs in the + # sequencestuple will be added in order and the select will be removed in this function) + return [p for p in self.get_parents() if isinstance(p,UnfinishedSequence)] # i.e. drop the selects + +Unfinished._full_parents = None +@UnfinishedFunc +def get_full_parents(self): + if None is self._full_parents: + explored = set() + not_explored = [self] + while not_explored: + p = not_explored.pop(0) + new_parents = p.get_parents() + explored.add(p) + not_explored += [p for p in new_parents if not guarded_contains(explored,p)] + self._full_parents = explored + return copy(self._full_parents) + +Unfinished._full_seq_parents = None +@UnfinishedFunc +def get_full_seq_parents(self): + if None is self._full_seq_parents: + self._full_seq_parents = [u for u in self.get_full_parents() \ + if isinstance(u,UnfinishedSequence)] + return copy(self._full_seq_parents) + +@UnfinishedFunc +def get_parent_select(self): + if not hasattr(self,"parent_select"): + real_parents = self.get_parents() + self.parent_select = next((s for s in real_parents if \ + isinstance(s,UnfinishedSelect)), None) + return self.parent_select + +@UnfinishedFunc +def set_analysis_parent_select(self,options): + # doesn't really need to be a function but feels clearer visually to have it + # out here so i can see this variable is being registered to the unfinisheds + if None is self.parent_select: + self.analysis_parent_select = self.parent_select + else: + self.analysis_parent_select = next((ps for ps in options if \ + ps.compare_string==self.get_parent_select().compare_string), None) + assert not None is self.analysis_parent_select, "parent options given to seq: "+self.name+" did not"+\ + "include anything equivalent to actual seq's parent"+\ + " select ("+self.get_parent_select().compare_string+")" + +def squeeze_selects(selects): + compstrs = set([s.compare_string for s in selects]) + if len(compstrs) == len(selects): + return selects + return [next(s for s in selects if s.compare_string==cs) for cs in compstrs] + +@UnfinishedFunc +def schedule(self,scheduler='best',remove_minors=False): +# recall attentions can be created on level 1 but still generate seqs on level 3 etc +# hence width is number of *seqs* with different attentions per level. + def choose_scheduler(scheduler): + if scheduler == 'best': + return 'greedy' + # TODO: implement lastminute, maybe others, and choose narrowest + # result of all options + return scheduler + scheduler = choose_scheduler(scheduler) + seq_layers = self.greedy_seq_scheduler() if scheduler == 'greedy' \ + else self.lastminute_seq_scheduler() + + if remove_minors: + for i in seq_layers: + seq_layers[i] = [seq for seq in seq_layers[i] if not seq.is_minor] + + num_layers = max(seq_layers.keys()) + + def get_seqs_selects(seqs): + # all the selects needed to compute a set of seqs + all_selects = set(seq.get_parent_select() for seq in seqs) + all_selects -= set([None]) # some of the seqs may not have parent matches, + # eg, indices. these will return None, which we don't want to count + return squeeze_selects(all_selects) # squeeze identical parents + + + layer_selects = { i:get_seqs_selects(seq_layers[i]) for i in seq_layers } + + # mark remaining parent select after squeeze + for i in seq_layers: + for seq in seq_layers[i]: + seq.set_analysis_parent_select(layer_selects[i]) + + return seq_layers, layer_selects + +@UnfinishedFunc +def greedy_seq_scheduler(self): + all_seqs = sorted(self.get_full_seq_parents(), + key = lambda seq:seq.creation_order_id) + # sorting in order of creation automatically sorts by order of in-layer + # dependencies (i.e. things got through feedforwards), makes prints clearer + # and eventually is helpful for drawcompflow + levels = defaultdict(lambda :[]) + for seq in all_seqs: + levels[seq.min_poss_depth].append(seq) # schedule all seqs as early as possible + return levels + + +Unfinished.max_poss_depth_for_seq = (None,None) +@UnfinishedFunc +def lastminute_for_seq(self,seq): + raise NotImplementedError + + +@UnfinishedFunc +def lastminute_seq_scheduler(self): + all_seqs = self.get_full_seq_parents() + + +@UnfinishedFunc +def typestr(self): + if isinstance(self,UnfinishedSelect): + return "select" + elif isinstance(self,UnfinishedSequence): + return "seq" + else: + return "internal" + +@UnfinishedFunc +def width_and_depth(self,scheduler='greedy',loud=True,print_tree_too=False,remove_minors=False): + seq_layers, layer_selects = self.schedule(scheduler=scheduler,remove_minors=remove_minors) + widths = {i:len(layer_selects[i]) for i in layer_selects} + n_layers = max(seq_layers.keys()) + max_width = max(widths[i] for i in widths) + if loud: + print("analysing unfinished",self.typestr()+":",self.name) + print("using scheduler:",scheduler) + print("num layers:",n_layers,"max width:",max_width) + print("width per layer:") + print("\n".join( str(i)+"\t: "+str(widths[i]) \ + for i in range(1,n_layers+1) )) + # start from 1 to skip layer 0, which has width 0 + # and is just the inputs (tokens and indices) + if print_tree_too: + def print_layer(i,d): + print(i,"\t:",", ".join(seq.name for seq in d[i])) + print("==== seqs at each layer: ====") + [print_layer(i,seq_layers) for i in range(1,n_layers+1)] + print("==== selects at each layer: ====") + [print_layer(i,layer_selects) for i in range(1,n_layers+1)] + return n_layers, max_width, widths + +@UnfinishedFunc +def schedule_comp_depth(self,d): + self.scheduled_comp_depth = d + +@UnfinishedFunc +def get_all_ancestor_heads_and_ffs(self,remove_minors=False): + class Head: + def __init__(self,select,sequences,comp_depth): + self.comp_depth = comp_depth + self.name = str([m.name for m in sequences]) + self.sequences = sequences + self.select = select + seq_layers, layer_selects = self.schedule('best',remove_minors=remove_minors) + + all_ffs = [m for m in self.get_full_seq_parents() if m.from_zipmap] + if remove_minors: + all_ffs = [ff for ff in all_ffs if not ff.is_minor] + + + for i in seq_layers: + for m in seq_layers[i]: + if guarded_contains(all_ffs,m): + m.schedule_comp_depth(i) # mark comp depths of the ffs... drawcompflow wants to know + + heads = [] + for i in layer_selects: + for s in layer_selects[i]: + seqs = [m for m in seq_layers[i] if m.analysis_parent_select==s] + heads.append(Head(s,seqs,i)) + + return heads,all_ffs + +@UnfinishedFunc +def set_display_name(self,display_name): + self.display_name = display_name + # again just making it more visible??? that there's an attribute being set somewhere + +@UnfinishedFunc +def make_display_names_for_all_parents(self,skip_minors=False): + all_unfs = self.get_full_parents() + all_seqs = [u for u in set(all_unfs) if isinstance(u,UnfinishedSequence)] + all_selects = [u for u in set(all_unfs) if isinstance(u,UnfinishedSelect)] + if skip_minors: + num_orig = len(all_seqs) + all_seqs = [seq for seq in all_seqs if not seq.is_minor] + name_counts = Counter([m.name for m in all_seqs]) + name_suff = Counter() + for m in sorted(all_seqs+all_selects,key=lambda u:u.creation_order_id): + # yes, even the non-seqs need display names, albeit for now only worry about repeats in the seqs + # and sort by creation order to get name suffixes with chronological (and so non-confusing) order + if name_counts[m.name]>1: + m.set_display_name(m.name+"_"+str(name_suff[m.name])) + name_suff[m.name] += 1 + + else: + m.set_display_name(m.name) + +@UnfinishedFunc +def note_if_seeker(self): + if not isinstance(self,UnfinishedSequence): + return + + if (not self.get_parent_sequences()) and (not None is self.get_parent_select()): + # no parent sequences, but yes parent select: this value is a function + # of only its parent select, i.e., a seeker (marks whether select found something or not) + self.is_seeker = True + self.seeker_flag = self.elementwise_function() + self.seeker_default = self._default + else: + self.is_seeker = False + +@UnfinishedFunc +def mark_all_ancestor_seekers(self): + [u.note_if_seeker() for u in self.get_full_parents()] + +Unfinished._full_descendants_for_seq = (None,None) +@UnfinishedFunc +def descendants_towards_seq(self,seq): + if not guarded_compare(self._full_descendants_for_seq[0],seq): + + relevant = seq.get_full_parents() + res = [r for r in relevant if guarded_contains(r.get_parents(),self)] + + self._full_descendants_for_seq = (seq,res) + return self._full_descendants_for_seq[1] + +@UnfinishedFunc +def is_minor_comp_towards_seq(self,seq): + if not isinstance(self,UnfinishedSequence): + return False # selects are always important + if self.never_display: # priority: never over always + return True + if self.always_display: + if self.is_constant(): + print("displaying constant:",self.name) + return False + if self.is_constant(): # e.g. 1 or "a" etc, just stuff created around constants by REPL behind the scenes + return True + children = self.descendants_towards_seq(seq) + if len(children)>1: + return False # this sequence was used twice -> must have been actually + # named as a real variable in the code (and not part of some bunch of operators) + # -> make it visible in the comp flow too + if len(children)==0: + return not guarded_compare(self,seq) # if it's the seq itself then clearly + # we're very interested in it. if it has no children and isnt the seq then we're checking out + # a weird dangly unused leaf, we shouldn't reach such a scenario through any of functions + # we'll be using to call this one, but might as well make this function complete just in case + # we forget + child = children[0] + if isinstance(child,UnfinishedSelect): + return False # this thing feeds directly into a select, lets make it visible + return (child.from_zipmap and self.from_zipmap) # obtained through zipmap and feeds + # directly into another zipmap: minor operation as part of something more complicated + +Unfinished.is_minor = False +@UnfinishedFunc +def set_minor_for_seq(self,seq): # another func just to be very explicit about an attribute that's getting set + self.is_minor = self.is_minor_comp_towards_seq(seq) + +@UnfinishedFunc +def mark_all_minor_ancestors(self): + all_ancestors = self.get_full_parents() + for a in all_ancestors: + a.set_minor_for_seq(self) + +@UnfinishedFunc +def get_nonminor_parents(self): # assumes have already marked the minor parents +# according to current interests. +# otherwise, may remain marked according to a different seq, or possibly all on default value +# (none are minor, all are important) + potentials = copy(self.get_parents()) + nonminors = [] + while potentials: + p = potentials.pop() + if not p.is_minor: + nonminors.append(p) + else: + potentials += p.get_parents() + return list(set(nonminors)) + +@UnfinishedFunc +def get_nonminor_parent_sequences(self): + return [p for p in self.get_nonminor_parents() if isinstance(p,UnfinishedSequence)] + +@UnfinishedFunc +def get_immediate_parent_sequences(self): # gets both minor and nonminor sequences + return [p for p in self.get_parents() if isinstance(p,UnfinishedSequence)] + +@UnfinishedFunc +def pre_aggregate_comp(seq): + vvars = seq.get_parent_sequences() + vreal = zipmap(vvars,seq.elementwise_function) + if isinstance(vreal,tuple): # equivalently, if seq.output_index >= 0: + vreal = vreal[seq.output_index] + return vreal + diff --git a/RASP_support/make_operators.py b/RASP_support/make_operators.py new file mode 100644 index 0000000..d6a3e66 --- /dev/null +++ b/RASP_support/make_operators.py @@ -0,0 +1,146 @@ +# extend UnfinishedSequence with a bunch of operators, +# provided the unary and binary ops. +# make them fully named functions instead of lambdas, even though +# it's more lines, because the debug prints are so much clearer +# this way +def add_ops(Class,apply_unary_op,apply_binary_op): + + def addsetname(f,opname,rev): + def f_with_setname(*a): + + assert len(a) in [1,2] + if len(a)==2: + a0,a1 = a if not rev else (a[1],a[0]) + name0 = a0.name if hasattr(a0,"name") else str(a0) + name1 = a1.name if hasattr(a1,"name") else str(a1) + # a0/a1 might not be an seq, just having an op on it with an seq. + name = name0 + " " + opname + " " + name1 + else: # len(a)==1 + name = opname + " " +a[0].name + name = "( "+name+" )" # probably going to be composed with more ops, so... + return f(*a).setname(name).allow_suppressing_display() + # seqs created as parts of long sequences of operators + # may be suppressed in display, the final name of the whole composition will be + # sufficiently informative. + # have to set always_display to false *after* the setname, because setname marks + # always_display as True (under assumption it is normally being called by the user, + # who must clearly be naming some variable they care about) + return f_with_setname + + def listop(f,listing_name): + setattr(Class,listing_name,f) + + def addop(opname,rev=False): + return lambda f:listop(addsetname(f,opname,rev),f.__name__) + + @addop("==") + def __eq__(self,other): + return apply_binary_op(self,other,lambda a,b:a==b) + + @addop("!=") + def __ne__(self,other): + return apply_binary_op(self,other,lambda a,b:a!=b) + + @addop("<") + def __lt__(self,other): + return apply_binary_op(self,other,lambda a,b:a") + def __gt__(self,other): + return apply_binary_op(self,other,lambda a,b:a>b) + + @addop("<=") + def __le__(self,other): + return apply_binary_op(self,other,lambda a,b:a<=b) + + @addop(">=") + def __ge__(self,other): + return apply_binary_op(self,other,lambda a,b:a>=b) + + + @addop("+") + def __add__(self,other): + return apply_binary_op(self,other,lambda a,b:a+b) + + @addop("+",True) + def __radd__(self,other): + return apply_binary_op(self,other,lambda a,b:b+a) + + @addop("-") + def __sub__(self,other): + return apply_binary_op(self,other,lambda a,b:a-b) + + @addop("-",True) + def __rsub__(self,other): + return apply_binary_op(self,other,lambda a,b:b-a) + + @addop("*") + def __mul__(self,other): + return apply_binary_op(self,other,lambda a,b:a*b) + + @addop("*",True) + def __rmul__(self,other): + return apply_binary_op(self,other,lambda a,b:b*a) + + @addop("//") + def __floordiv__(self,other): + return apply_binary_op(self,other,lambda a,b:a//b) + + @addop("//",True) + def __rfloordiv__(self,other): + return apply_binary_op(self,other,lambda a,b:b//a) + + @addop("/") + def __truediv__(self,other): + return apply_binary_op(self,other,lambda a,b:a/b) + + @addop("/",True) + def __rtruediv__(self,other): + return apply_binary_op(self,other,lambda a,b:b/a) + + @addop("%") + def __mod__(self,other): + return apply_binary_op(self,other,lambda a,b:a%b) + + @addop("%",True) + def __rmod__(self,other): + return apply_binary_op(self,other,lambda a,b:b%a) + + @addop("divmod") + def __divmod__(self,other): + return apply_binary_op(self,other,lambda a,b:divmod(a,b)) + + @addop("divmod",True) + def __rdivmod__(self,other): + return apply_binary_op(self,other,lambda a,b:divmod(b,a)) + + @addop("pow") + def __pow__(self,other): + return apply_binary_op(self,other,lambda a,b:pow(a,b)) + + @addop("pow",True) + def __rpow__(self,other): + return apply_binary_op(self,other,lambda a,b:pow(b,a)) + + # skipping and, or, xor, which are bitwise and dont implement 'and' and 'or' but rather & and | + # similarly skipping lshift, rshift cause who wants them + # wish i had not, and, or primitives, but can accept that dont. + # if people really want to do 'not' they can do '==False' instead, can do a little macro for it in the other sugar file or whatever + + @addop("+") + def __pos__(self): + return apply_unary_op(self,lambda a:+a) + + @addop("-") + def __neg__(self): + return apply_unary_op(self,lambda a:-a) + + @addop("abs") + def __abs__(self): + return apply_unary_op(self,abs) + + @addop("round") + def __round__(self): # not sure if python will get upset if round doesnt return an actual int tbh... will have to check. + return apply_unary_op(self,round) + + # defining floor, ceil, trunc showed up funny (green instead of blue), gonna go ahead and avoid diff --git a/RASP_support/rasplib.rasp b/RASP_support/rasplib.rasp new file mode 100644 index 0000000..6e958f7 --- /dev/null +++ b/RASP_support/rasplib.rasp @@ -0,0 +1,94 @@ +tokens = tokens_str; + +def selector_width(sel) { + at0 = select(indices,0,==); + sAND0 = sel and at0; + sOR0 = sel or at0; + inverted = aggregate(sOR0,indicator(indices==0)); + except0 = (1/inverted)-1; + valat0 = aggregate(sAND0,1,0); + return round(except0 + valat0); +} + +def has_focus(sel) { + return aggregate(sel,1,0)>0; +} + +full_s = select(1,1,==); + +def count(seq,atom) { + return round( + length * aggregate( + full_s, indicator(seq==atom))); +} + +def contains(seq,atom) { + return aggregate(full_s,indicator(seq==atom))>0; +} + +def max(a,b) { + return a if a>b else b; +} + +def min(a,b) { + return a if a); + find_later_val = find_val and find_later; + has_later_val = has_focus(find_later_val); + return has_val and not has_later_val; +} + +def mark_first_instance(seq,val) { + has_val = seq == val; + find_val = select(seq,val,==); + find_earlier = select(indices,indices,>); + find_earlier_val = find_val and find_earlier; + has_earlier_val = has_focus(find_earlier_val); + return has_val and not has_earlier_val; +} + +def select_from_last(seq,val) { + return select(mark_last_instance(seq,val),True,==); +} + +def select_from_first(seq,val) { + return select(mark_first_instance(seq,val),True,==); +} + +def sort(seq,key) { + select_earlier_in_sorted = + select(key,key,<) or (select(key,key,==) and select(indices,indices,<)); + target_position = + selector_width(select_earlier_in_sorted); + select_new_val = + select(target_position,indices,==); + return aggregate(select_new_val,seq); +} + +def select_next_identical(seq) { + select_prev_identical = + select(seq,seq,==) and select(indices,indices,<); + num_prev_identical = + selector_width(select_prev_identical); + return select(seq,seq,==) and select(num_prev_identical,num_prev_identical+1,==); +} + + diff --git a/RASP_support/zzantlr/RASP.interp b/RASP_support/zzantlr/RASP.interp new file mode 100644 index 0000000..6603dac --- /dev/null +++ b/RASP_support/zzantlr/RASP.interp @@ -0,0 +1,158 @@ +token literal names: +null +';' +'set' +'s-op' +'selector' +'example' +'show' +'full seq display' +'on' +'off' +'examples' +'exit()' +'exit' +'quit' +'quit()' +'load' +'=' +'draw' +'(' +',' +')' +':' +'def' +'{' +'}' +'for' +'in' +'return' +'aggregate(' +'[' +']' +'not' +'-' +'+' +'round' +'indicator' +'^' +'*' +'/' +'%' +'==' +'<=' +'>=' +'>' +'<' +'select(' +'!=' +'and' +'or' +'if' +'else' +'range(' +'zip(' +'len(' +null +null +null +null +null +null + +token symbolic names: +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +Float +PosInt +String +Comment +ID +WS + +rule names: +r +statement +raspstatement +replstatement +setExample +showExample +toggleSeqVerbose +toggleExample +exit +loadFile +assign +draw +exprsList +namedExprsList +namedExpr +raspstatementsList +funcDef +forLoop +commentsList +assignsAndCommentsList +returnStatement +idsList +aggregateExpr +atom +expr +aList +aDict +listCompExpr +dictCompExpr + + +atn: +[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 3, 61, 353, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, 9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9, 28, 4, 29, 9, 29, 4, 30, 9, 30, 3, 2, 6, 2, 62, 10, 2, 13, 2, 14, 2, 63, 3, 2, 3, 2, 3, 3, 3, 3, 5, 3, 70, 10, 3, 3, 3, 3, 3, 5, 3, 74, 10, 3, 3, 3, 5, 3, 77, 10, 3, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 5, 4, 93, 10, 4, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 5, 5, 100, 10, 5, 3, 6, 3, 6, 5, 6, 104, 10, 6, 3, 6, 3, 6, 3, 6, 3, 7, 3, 7, 5, 7, 111, 10, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 8, 3, 9, 5, 9, 119, 10, 9, 3, 9, 3, 9, 3, 9, 3, 10, 3, 10, 3, 11, 3, 11, 3, 11, 3, 12, 3, 12, 3, 12, 3, 12, 3, 13, 3, 13, 3, 13, 3, 13, 3, 13, 5, 13, 138, 10, 13, 3, 13, 3, 13, 3, 14, 3, 14, 3, 14, 5, 14, 145, 10, 14, 3, 15, 3, 15, 3, 15, 5, 15, 150, 10, 15, 3, 16, 3, 16, 3, 16, 3, 16, 3, 17, 3, 17, 5, 17, 158, 10, 17, 3, 17, 5, 17, 161, 10, 17, 3, 17, 3, 17, 5, 17, 165, 10, 17, 5, 17, 167, 10, 17, 3, 18, 3, 18, 3, 18, 3, 18, 5, 18, 173, 10, 18, 3, 18, 3, 18, 3, 18, 5, 18, 178, 10, 18, 3, 18, 5, 18, 181, 10, 18, 3, 18, 3, 18, 5, 18, 185, 10, 18, 3, 18, 3, 18, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 3, 19, 3, 20, 3, 20, 5, 20, 199, 10, 20, 3, 21, 3, 21, 3, 21, 5, 21, 204, 10, 21, 3, 21, 5, 21, 207, 10, 21, 3, 21, 3, 21, 5, 21, 211, 10, 21, 5, 21, 213, 10, 21, 3, 22, 3, 22, 3, 22, 3, 22, 3, 23, 3, 23, 3, 23, 5, 23, 222, 10, 23, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 3, 24, 5, 24, 230, 10, 24, 3, 24, 3, 24, 3, 25, 3, 25, 3, 25, 5, 25, 237, 10, 25, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 5, 26, 278, 10, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 3, 26, 5, 26, 315, 10, 26, 3, 26, 7, 26, 318, 10, 26, 12, 26, 14, 26, 321, 11, 26, 3, 27, 3, 27, 5, 27, 325, 10, 27, 3, 27, 3, 27, 3, 28, 3, 28, 5, 28, 331, 10, 28, 3, 28, 3, 28, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 30, 3, 30, 3, 30, 3, 30, 3, 30, 3, 30, 3, 30, 3, 30, 3, 30, 3, 30, 3, 30, 3, 63, 3, 50, 31, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 2, 12, 3, 2, 5, 6, 3, 2, 10, 11, 3, 2, 13, 16, 3, 2, 33, 35, 3, 2, 36, 37, 4, 2, 42, 46, 48, 48, 3, 2, 39, 40, 3, 2, 34, 35, 3, 2, 42, 46, 3, 2, 49, 50, 2, 386, 2, 61, 3, 2, 2, 2, 4, 76, 3, 2, 2, 2, 6, 92, 3, 2, 2, 2, 8, 99, 3, 2, 2, 2, 10, 101, 3, 2, 2, 2, 12, 108, 3, 2, 2, 2, 14, 114, 3, 2, 2, 2, 16, 118, 3, 2, 2, 2, 18, 123, 3, 2, 2, 2, 20, 125, 3, 2, 2, 2, 22, 128, 3, 2, 2, 2, 24, 132, 3, 2, 2, 2, 26, 141, 3, 2, 2, 2, 28, 146, 3, 2, 2, 2, 30, 151, 3, 2, 2, 2, 32, 166, 3, 2, 2, 2, 34, 168, 3, 2, 2, 2, 36, 188, 3, 2, 2, 2, 38, 196, 3, 2, 2, 2, 40, 212, 3, 2, 2, 2, 42, 214, 3, 2, 2, 2, 44, 218, 3, 2, 2, 2, 46, 223, 3, 2, 2, 2, 48, 236, 3, 2, 2, 2, 50, 277, 3, 2, 2, 2, 52, 322, 3, 2, 2, 2, 54, 328, 3, 2, 2, 2, 56, 334, 3, 2, 2, 2, 58, 342, 3, 2, 2, 2, 60, 62, 5, 4, 3, 2, 61, 60, 3, 2, 2, 2, 62, 63, 3, 2, 2, 2, 63, 64, 3, 2, 2, 2, 63, 61, 3, 2, 2, 2, 64, 65, 3, 2, 2, 2, 65, 66, 7, 2, 2, 3, 66, 3, 3, 2, 2, 2, 67, 69, 5, 6, 4, 2, 68, 70, 7, 59, 2, 2, 69, 68, 3, 2, 2, 2, 69, 70, 3, 2, 2, 2, 70, 77, 3, 2, 2, 2, 71, 73, 5, 8, 5, 2, 72, 74, 7, 59, 2, 2, 73, 72, 3, 2, 2, 2, 73, 74, 3, 2, 2, 2, 74, 77, 3, 2, 2, 2, 75, 77, 7, 59, 2, 2, 76, 67, 3, 2, 2, 2, 76, 71, 3, 2, 2, 2, 76, 75, 3, 2, 2, 2, 77, 5, 3, 2, 2, 2, 78, 79, 5, 50, 26, 2, 79, 80, 7, 3, 2, 2, 80, 93, 3, 2, 2, 2, 81, 82, 5, 22, 12, 2, 82, 83, 7, 3, 2, 2, 83, 93, 3, 2, 2, 2, 84, 85, 5, 24, 13, 2, 85, 86, 7, 3, 2, 2, 86, 93, 3, 2, 2, 2, 87, 88, 5, 20, 11, 2, 88, 89, 7, 3, 2, 2, 89, 93, 3, 2, 2, 2, 90, 93, 5, 34, 18, 2, 91, 93, 5, 36, 19, 2, 92, 78, 3, 2, 2, 2, 92, 81, 3, 2, 2, 2, 92, 84, 3, 2, 2, 2, 92, 87, 3, 2, 2, 2, 92, 90, 3, 2, 2, 2, 92, 91, 3, 2, 2, 2, 93, 7, 3, 2, 2, 2, 94, 100, 5, 10, 6, 2, 95, 100, 5, 12, 7, 2, 96, 100, 5, 16, 9, 2, 97, 100, 5, 14, 8, 2, 98, 100, 5, 18, 10, 2, 99, 94, 3, 2, 2, 2, 99, 95, 3, 2, 2, 2, 99, 96, 3, 2, 2, 2, 99, 97, 3, 2, 2, 2, 99, 98, 3, 2, 2, 2, 100, 9, 3, 2, 2, 2, 101, 103, 7, 4, 2, 2, 102, 104, 9, 2, 2, 2, 103, 102, 3, 2, 2, 2, 103, 104, 3, 2, 2, 2, 104, 105, 3, 2, 2, 2, 105, 106, 7, 7, 2, 2, 106, 107, 5, 50, 26, 2, 107, 11, 3, 2, 2, 2, 108, 110, 7, 8, 2, 2, 109, 111, 9, 2, 2, 2, 110, 109, 3, 2, 2, 2, 110, 111, 3, 2, 2, 2, 111, 112, 3, 2, 2, 2, 112, 113, 7, 7, 2, 2, 113, 13, 3, 2, 2, 2, 114, 115, 7, 9, 2, 2, 115, 116, 9, 3, 2, 2, 116, 15, 3, 2, 2, 2, 117, 119, 9, 2, 2, 2, 118, 117, 3, 2, 2, 2, 118, 119, 3, 2, 2, 2, 119, 120, 3, 2, 2, 2, 120, 121, 7, 12, 2, 2, 121, 122, 9, 3, 2, 2, 122, 17, 3, 2, 2, 2, 123, 124, 9, 4, 2, 2, 124, 19, 3, 2, 2, 2, 125, 126, 7, 17, 2, 2, 126, 127, 7, 58, 2, 2, 127, 21, 3, 2, 2, 2, 128, 129, 5, 44, 23, 2, 129, 130, 7, 18, 2, 2, 130, 131, 5, 26, 14, 2, 131, 23, 3, 2, 2, 2, 132, 133, 7, 19, 2, 2, 133, 134, 7, 20, 2, 2, 134, 137, 5, 50, 26, 2, 135, 136, 7, 21, 2, 2, 136, 138, 5, 50, 26, 2, 137, 135, 3, 2, 2, 2, 137, 138, 3, 2, 2, 2, 138, 139, 3, 2, 2, 2, 139, 140, 7, 22, 2, 2, 140, 25, 3, 2, 2, 2, 141, 144, 5, 50, 26, 2, 142, 143, 7, 21, 2, 2, 143, 145, 5, 26, 14, 2, 144, 142, 3, 2, 2, 2, 144, 145, 3, 2, 2, 2, 145, 27, 3, 2, 2, 2, 146, 149, 5, 30, 16, 2, 147, 148, 7, 21, 2, 2, 148, 150, 5, 28, 15, 2, 149, 147, 3, 2, 2, 2, 149, 150, 3, 2, 2, 2, 150, 29, 3, 2, 2, 2, 151, 152, 5, 50, 26, 2, 152, 153, 7, 23, 2, 2, 153, 154, 5, 50, 26, 2, 154, 31, 3, 2, 2, 2, 155, 157, 5, 6, 4, 2, 156, 158, 7, 59, 2, 2, 157, 156, 3, 2, 2, 2, 157, 158, 3, 2, 2, 2, 158, 160, 3, 2, 2, 2, 159, 161, 5, 32, 17, 2, 160, 159, 3, 2, 2, 2, 160, 161, 3, 2, 2, 2, 161, 167, 3, 2, 2, 2, 162, 164, 7, 59, 2, 2, 163, 165, 5, 32, 17, 2, 164, 163, 3, 2, 2, 2, 164, 165, 3, 2, 2, 2, 165, 167, 3, 2, 2, 2, 166, 155, 3, 2, 2, 2, 166, 162, 3, 2, 2, 2, 167, 33, 3, 2, 2, 2, 168, 169, 7, 24, 2, 2, 169, 170, 7, 60, 2, 2, 170, 172, 7, 20, 2, 2, 171, 173, 5, 44, 23, 2, 172, 171, 3, 2, 2, 2, 172, 173, 3, 2, 2, 2, 173, 174, 3, 2, 2, 2, 174, 175, 7, 22, 2, 2, 175, 177, 7, 25, 2, 2, 176, 178, 5, 38, 20, 2, 177, 176, 3, 2, 2, 2, 177, 178, 3, 2, 2, 2, 178, 180, 3, 2, 2, 2, 179, 181, 5, 32, 17, 2, 180, 179, 3, 2, 2, 2, 180, 181, 3, 2, 2, 2, 181, 182, 3, 2, 2, 2, 182, 184, 5, 42, 22, 2, 183, 185, 5, 38, 20, 2, 184, 183, 3, 2, 2, 2, 184, 185, 3, 2, 2, 2, 185, 186, 3, 2, 2, 2, 186, 187, 7, 26, 2, 2, 187, 35, 3, 2, 2, 2, 188, 189, 7, 27, 2, 2, 189, 190, 5, 44, 23, 2, 190, 191, 7, 28, 2, 2, 191, 192, 5, 50, 26, 2, 192, 193, 7, 25, 2, 2, 193, 194, 5, 32, 17, 2, 194, 195, 7, 26, 2, 2, 195, 37, 3, 2, 2, 2, 196, 198, 7, 59, 2, 2, 197, 199, 5, 38, 20, 2, 198, 197, 3, 2, 2, 2, 198, 199, 3, 2, 2, 2, 199, 39, 3, 2, 2, 2, 200, 201, 5, 22, 12, 2, 201, 203, 7, 3, 2, 2, 202, 204, 7, 59, 2, 2, 203, 202, 3, 2, 2, 2, 203, 204, 3, 2, 2, 2, 204, 206, 3, 2, 2, 2, 205, 207, 5, 40, 21, 2, 206, 205, 3, 2, 2, 2, 206, 207, 3, 2, 2, 2, 207, 213, 3, 2, 2, 2, 208, 210, 7, 59, 2, 2, 209, 211, 5, 40, 21, 2, 210, 209, 3, 2, 2, 2, 210, 211, 3, 2, 2, 2, 211, 213, 3, 2, 2, 2, 212, 200, 3, 2, 2, 2, 212, 208, 3, 2, 2, 2, 213, 41, 3, 2, 2, 2, 214, 215, 7, 29, 2, 2, 215, 216, 5, 26, 14, 2, 216, 217, 7, 3, 2, 2, 217, 43, 3, 2, 2, 2, 218, 221, 7, 60, 2, 2, 219, 220, 7, 21, 2, 2, 220, 222, 5, 44, 23, 2, 221, 219, 3, 2, 2, 2, 221, 222, 3, 2, 2, 2, 222, 45, 3, 2, 2, 2, 223, 224, 7, 30, 2, 2, 224, 225, 5, 50, 26, 2, 225, 226, 7, 21, 2, 2, 226, 229, 5, 50, 26, 2, 227, 228, 7, 21, 2, 2, 228, 230, 5, 50, 26, 2, 229, 227, 3, 2, 2, 2, 229, 230, 3, 2, 2, 2, 230, 231, 3, 2, 2, 2, 231, 232, 7, 22, 2, 2, 232, 47, 3, 2, 2, 2, 233, 237, 7, 57, 2, 2, 234, 237, 7, 56, 2, 2, 235, 237, 7, 58, 2, 2, 236, 233, 3, 2, 2, 2, 236, 234, 3, 2, 2, 2, 236, 235, 3, 2, 2, 2, 237, 49, 3, 2, 2, 2, 238, 239, 8, 26, 1, 2, 239, 240, 7, 20, 2, 2, 240, 241, 5, 50, 26, 2, 241, 242, 7, 22, 2, 2, 242, 278, 3, 2, 2, 2, 243, 244, 9, 5, 2, 2, 244, 278, 5, 50, 26, 23, 245, 246, 9, 6, 2, 2, 246, 247, 7, 20, 2, 2, 247, 248, 5, 50, 26, 2, 248, 249, 7, 22, 2, 2, 249, 278, 3, 2, 2, 2, 250, 251, 7, 47, 2, 2, 251, 252, 5, 50, 26, 2, 252, 253, 7, 21, 2, 2, 253, 254, 5, 50, 26, 2, 254, 255, 7, 21, 2, 2, 255, 256, 9, 7, 2, 2, 256, 257, 7, 22, 2, 2, 257, 278, 3, 2, 2, 2, 258, 278, 7, 60, 2, 2, 259, 278, 5, 48, 25, 2, 260, 278, 5, 52, 27, 2, 261, 278, 5, 54, 28, 2, 262, 278, 5, 46, 24, 2, 263, 264, 7, 53, 2, 2, 264, 265, 5, 26, 14, 2, 265, 266, 7, 22, 2, 2, 266, 278, 3, 2, 2, 2, 267, 278, 5, 56, 29, 2, 268, 278, 5, 58, 30, 2, 269, 270, 7, 54, 2, 2, 270, 271, 5, 26, 14, 2, 271, 272, 7, 22, 2, 2, 272, 278, 3, 2, 2, 2, 273, 274, 7, 55, 2, 2, 274, 275, 5, 50, 26, 2, 275, 276, 7, 22, 2, 2, 276, 278, 3, 2, 2, 2, 277, 238, 3, 2, 2, 2, 277, 243, 3, 2, 2, 2, 277, 245, 3, 2, 2, 2, 277, 250, 3, 2, 2, 2, 277, 258, 3, 2, 2, 2, 277, 259, 3, 2, 2, 2, 277, 260, 3, 2, 2, 2, 277, 261, 3, 2, 2, 2, 277, 262, 3, 2, 2, 2, 277, 263, 3, 2, 2, 2, 277, 267, 3, 2, 2, 2, 277, 268, 3, 2, 2, 2, 277, 269, 3, 2, 2, 2, 277, 273, 3, 2, 2, 2, 278, 319, 3, 2, 2, 2, 279, 280, 12, 21, 2, 2, 280, 281, 7, 38, 2, 2, 281, 318, 5, 50, 26, 22, 282, 283, 12, 20, 2, 2, 283, 284, 9, 8, 2, 2, 284, 318, 5, 50, 26, 21, 285, 286, 12, 19, 2, 2, 286, 287, 7, 41, 2, 2, 287, 318, 5, 50, 26, 20, 288, 289, 12, 18, 2, 2, 289, 290, 9, 9, 2, 2, 290, 318, 5, 50, 26, 19, 291, 292, 12, 17, 2, 2, 292, 293, 9, 10, 2, 2, 293, 318, 5, 50, 26, 18, 294, 295, 12, 15, 2, 2, 295, 296, 9, 11, 2, 2, 296, 318, 5, 50, 26, 16, 297, 298, 12, 14, 2, 2, 298, 299, 7, 51, 2, 2, 299, 300, 5, 50, 26, 2, 300, 301, 7, 52, 2, 2, 301, 302, 5, 50, 26, 15, 302, 318, 3, 2, 2, 2, 303, 304, 12, 5, 2, 2, 304, 305, 7, 28, 2, 2, 305, 318, 5, 50, 26, 6, 306, 307, 12, 25, 2, 2, 307, 308, 7, 31, 2, 2, 308, 309, 5, 50, 26, 2, 309, 310, 7, 32, 2, 2, 310, 318, 3, 2, 2, 2, 311, 312, 12, 24, 2, 2, 312, 314, 7, 20, 2, 2, 313, 315, 5, 26, 14, 2, 314, 313, 3, 2, 2, 2, 314, 315, 3, 2, 2, 2, 315, 316, 3, 2, 2, 2, 316, 318, 7, 22, 2, 2, 317, 279, 3, 2, 2, 2, 317, 282, 3, 2, 2, 2, 317, 285, 3, 2, 2, 2, 317, 288, 3, 2, 2, 2, 317, 291, 3, 2, 2, 2, 317, 294, 3, 2, 2, 2, 317, 297, 3, 2, 2, 2, 317, 303, 3, 2, 2, 2, 317, 306, 3, 2, 2, 2, 317, 311, 3, 2, 2, 2, 318, 321, 3, 2, 2, 2, 319, 317, 3, 2, 2, 2, 319, 320, 3, 2, 2, 2, 320, 51, 3, 2, 2, 2, 321, 319, 3, 2, 2, 2, 322, 324, 7, 31, 2, 2, 323, 325, 5, 26, 14, 2, 324, 323, 3, 2, 2, 2, 324, 325, 3, 2, 2, 2, 325, 326, 3, 2, 2, 2, 326, 327, 7, 32, 2, 2, 327, 53, 3, 2, 2, 2, 328, 330, 7, 25, 2, 2, 329, 331, 5, 28, 15, 2, 330, 329, 3, 2, 2, 2, 330, 331, 3, 2, 2, 2, 331, 332, 3, 2, 2, 2, 332, 333, 7, 26, 2, 2, 333, 55, 3, 2, 2, 2, 334, 335, 7, 31, 2, 2, 335, 336, 5, 50, 26, 2, 336, 337, 7, 27, 2, 2, 337, 338, 5, 44, 23, 2, 338, 339, 7, 28, 2, 2, 339, 340, 5, 50, 26, 2, 340, 341, 7, 32, 2, 2, 341, 57, 3, 2, 2, 2, 342, 343, 7, 25, 2, 2, 343, 344, 5, 50, 26, 2, 344, 345, 7, 23, 2, 2, 345, 346, 5, 50, 26, 2, 346, 347, 7, 27, 2, 2, 347, 348, 5, 44, 23, 2, 348, 349, 7, 28, 2, 2, 349, 350, 5, 50, 26, 2, 350, 351, 7, 26, 2, 2, 351, 59, 3, 2, 2, 2, 36, 63, 69, 73, 76, 92, 99, 103, 110, 118, 137, 144, 149, 157, 160, 164, 166, 172, 177, 180, 184, 198, 203, 206, 210, 212, 221, 229, 236, 277, 314, 317, 319, 324, 330] \ No newline at end of file diff --git a/RASP_support/zzantlr/RASP.tokens b/RASP_support/zzantlr/RASP.tokens new file mode 100644 index 0000000..04616df --- /dev/null +++ b/RASP_support/zzantlr/RASP.tokens @@ -0,0 +1,112 @@ +T__0=1 +T__1=2 +T__2=3 +T__3=4 +T__4=5 +T__5=6 +T__6=7 +T__7=8 +T__8=9 +T__9=10 +T__10=11 +T__11=12 +T__12=13 +T__13=14 +T__14=15 +T__15=16 +T__16=17 +T__17=18 +T__18=19 +T__19=20 +T__20=21 +T__21=22 +T__22=23 +T__23=24 +T__24=25 +T__25=26 +T__26=27 +T__27=28 +T__28=29 +T__29=30 +T__30=31 +T__31=32 +T__32=33 +T__33=34 +T__34=35 +T__35=36 +T__36=37 +T__37=38 +T__38=39 +T__39=40 +T__40=41 +T__41=42 +T__42=43 +T__43=44 +T__44=45 +T__45=46 +T__46=47 +T__47=48 +T__48=49 +T__49=50 +T__50=51 +T__51=52 +T__52=53 +Float=54 +PosInt=55 +String=56 +Comment=57 +ID=58 +WS=59 +';'=1 +'set'=2 +'s-op'=3 +'selector'=4 +'example'=5 +'show'=6 +'full seq display'=7 +'on'=8 +'off'=9 +'examples'=10 +'exit()'=11 +'exit'=12 +'quit'=13 +'quit()'=14 +'load'=15 +'='=16 +'draw'=17 +'('=18 +','=19 +')'=20 +':'=21 +'def'=22 +'{'=23 +'}'=24 +'for'=25 +'in'=26 +'return'=27 +'aggregate('=28 +'['=29 +']'=30 +'not'=31 +'-'=32 +'+'=33 +'round'=34 +'indicator'=35 +'^'=36 +'*'=37 +'/'=38 +'%'=39 +'=='=40 +'<='=41 +'>='=42 +'>'=43 +'<'=44 +'select('=45 +'!='=46 +'and'=47 +'or'=48 +'if'=49 +'else'=50 +'range('=51 +'zip('=52 +'len('=53 diff --git a/RASP_support/zzantlr/RASPLexer.interp b/RASP_support/zzantlr/RASPLexer.interp new file mode 100644 index 0000000..923be33 --- /dev/null +++ b/RASP_support/zzantlr/RASPLexer.interp @@ -0,0 +1,194 @@ +token literal names: +null +';' +'set' +'s-op' +'selector' +'example' +'show' +'full seq display' +'on' +'off' +'examples' +'exit()' +'exit' +'quit' +'quit()' +'load' +'=' +'draw' +'(' +',' +')' +':' +'def' +'{' +'}' +'for' +'in' +'return' +'aggregate(' +'[' +']' +'not' +'-' +'+' +'round' +'indicator' +'^' +'*' +'/' +'%' +'==' +'<=' +'>=' +'>' +'<' +'select(' +'!=' +'and' +'or' +'if' +'else' +'range(' +'zip(' +'len(' +null +null +null +null +null +null + +token symbolic names: +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +null +Float +PosInt +String +Comment +ID +WS + +rule names: +T__0 +T__1 +T__2 +T__3 +T__4 +T__5 +T__6 +T__7 +T__8 +T__9 +T__10 +T__11 +T__12 +T__13 +T__14 +T__15 +T__16 +T__17 +T__18 +T__19 +T__20 +T__21 +T__22 +T__23 +T__24 +T__25 +T__26 +T__27 +T__28 +T__29 +T__30 +T__31 +T__32 +T__33 +T__34 +T__35 +T__36 +T__37 +T__38 +T__39 +T__40 +T__41 +T__42 +T__43 +T__44 +T__45 +T__46 +T__47 +T__48 +T__49 +T__50 +T__51 +T__52 +Float +PosInt +String +Comment +ID +WS + +channel names: +DEFAULT_TOKEN_CHANNEL +HIDDEN + +mode names: +DEFAULT_MODE + +atn: +[3, 24715, 42794, 33075, 47597, 16764, 15335, 30598, 22884, 2, 61, 395, 8, 1, 4, 2, 9, 2, 4, 3, 9, 3, 4, 4, 9, 4, 4, 5, 9, 5, 4, 6, 9, 6, 4, 7, 9, 7, 4, 8, 9, 8, 4, 9, 9, 9, 4, 10, 9, 10, 4, 11, 9, 11, 4, 12, 9, 12, 4, 13, 9, 13, 4, 14, 9, 14, 4, 15, 9, 15, 4, 16, 9, 16, 4, 17, 9, 17, 4, 18, 9, 18, 4, 19, 9, 19, 4, 20, 9, 20, 4, 21, 9, 21, 4, 22, 9, 22, 4, 23, 9, 23, 4, 24, 9, 24, 4, 25, 9, 25, 4, 26, 9, 26, 4, 27, 9, 27, 4, 28, 9, 28, 4, 29, 9, 29, 4, 30, 9, 30, 4, 31, 9, 31, 4, 32, 9, 32, 4, 33, 9, 33, 4, 34, 9, 34, 4, 35, 9, 35, 4, 36, 9, 36, 4, 37, 9, 37, 4, 38, 9, 38, 4, 39, 9, 39, 4, 40, 9, 40, 4, 41, 9, 41, 4, 42, 9, 42, 4, 43, 9, 43, 4, 44, 9, 44, 4, 45, 9, 45, 4, 46, 9, 46, 4, 47, 9, 47, 4, 48, 9, 48, 4, 49, 9, 49, 4, 50, 9, 50, 4, 51, 9, 51, 4, 52, 9, 52, 4, 53, 9, 53, 4, 54, 9, 54, 4, 55, 9, 55, 4, 56, 9, 56, 4, 57, 9, 57, 4, 58, 9, 58, 4, 59, 9, 59, 4, 60, 9, 60, 3, 2, 3, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 3, 4, 3, 4, 3, 4, 3, 4, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 5, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 6, 3, 7, 3, 7, 3, 7, 3, 7, 3, 7, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 8, 3, 9, 3, 9, 3, 9, 3, 10, 3, 10, 3, 10, 3, 10, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 11, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 12, 3, 13, 3, 13, 3, 13, 3, 13, 3, 13, 3, 14, 3, 14, 3, 14, 3, 14, 3, 14, 3, 15, 3, 15, 3, 15, 3, 15, 3, 15, 3, 15, 3, 15, 3, 16, 3, 16, 3, 16, 3, 16, 3, 16, 3, 17, 3, 17, 3, 18, 3, 18, 3, 18, 3, 18, 3, 18, 3, 19, 3, 19, 3, 20, 3, 20, 3, 21, 3, 21, 3, 22, 3, 22, 3, 23, 3, 23, 3, 23, 3, 23, 3, 24, 3, 24, 3, 25, 3, 25, 3, 26, 3, 26, 3, 26, 3, 26, 3, 27, 3, 27, 3, 27, 3, 28, 3, 28, 3, 28, 3, 28, 3, 28, 3, 28, 3, 28, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 29, 3, 30, 3, 30, 3, 31, 3, 31, 3, 32, 3, 32, 3, 32, 3, 32, 3, 33, 3, 33, 3, 34, 3, 34, 3, 35, 3, 35, 3, 35, 3, 35, 3, 35, 3, 35, 3, 36, 3, 36, 3, 36, 3, 36, 3, 36, 3, 36, 3, 36, 3, 36, 3, 36, 3, 36, 3, 37, 3, 37, 3, 38, 3, 38, 3, 39, 3, 39, 3, 40, 3, 40, 3, 41, 3, 41, 3, 41, 3, 42, 3, 42, 3, 42, 3, 43, 3, 43, 3, 43, 3, 44, 3, 44, 3, 45, 3, 45, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 46, 3, 47, 3, 47, 3, 47, 3, 48, 3, 48, 3, 48, 3, 48, 3, 49, 3, 49, 3, 49, 3, 50, 3, 50, 3, 50, 3, 51, 3, 51, 3, 51, 3, 51, 3, 51, 3, 52, 3, 52, 3, 52, 3, 52, 3, 52, 3, 52, 3, 52, 3, 53, 3, 53, 3, 53, 3, 53, 3, 53, 3, 54, 3, 54, 3, 54, 3, 54, 3, 54, 3, 55, 3, 55, 3, 55, 3, 55, 3, 56, 6, 56, 362, 10, 56, 13, 56, 14, 56, 363, 3, 57, 3, 57, 7, 57, 368, 10, 57, 12, 57, 14, 57, 371, 11, 57, 3, 57, 3, 57, 3, 58, 3, 58, 7, 58, 377, 10, 58, 12, 58, 14, 58, 380, 11, 58, 3, 59, 3, 59, 7, 59, 384, 10, 59, 12, 59, 14, 59, 387, 11, 59, 3, 60, 6, 60, 390, 10, 60, 13, 60, 14, 60, 391, 3, 60, 3, 60, 2, 2, 61, 3, 3, 5, 4, 7, 5, 9, 6, 11, 7, 13, 8, 15, 9, 17, 10, 19, 11, 21, 12, 23, 13, 25, 14, 27, 15, 29, 16, 31, 17, 33, 18, 35, 19, 37, 20, 39, 21, 41, 22, 43, 23, 45, 24, 47, 25, 49, 26, 51, 27, 53, 28, 55, 29, 57, 30, 59, 31, 61, 32, 63, 33, 65, 34, 67, 35, 69, 36, 71, 37, 73, 38, 75, 39, 77, 40, 79, 41, 81, 42, 83, 43, 85, 44, 87, 45, 89, 46, 91, 47, 93, 48, 95, 49, 97, 50, 99, 51, 101, 52, 103, 53, 105, 54, 107, 55, 109, 56, 111, 57, 113, 58, 115, 59, 117, 60, 119, 61, 3, 2, 8, 3, 2, 50, 59, 5, 2, 12, 12, 15, 15, 36, 36, 4, 2, 12, 12, 15, 15, 5, 2, 67, 92, 97, 97, 99, 124, 6, 2, 50, 59, 67, 92, 97, 97, 99, 124, 5, 2, 11, 12, 15, 15, 34, 34, 2, 399, 2, 3, 3, 2, 2, 2, 2, 5, 3, 2, 2, 2, 2, 7, 3, 2, 2, 2, 2, 9, 3, 2, 2, 2, 2, 11, 3, 2, 2, 2, 2, 13, 3, 2, 2, 2, 2, 15, 3, 2, 2, 2, 2, 17, 3, 2, 2, 2, 2, 19, 3, 2, 2, 2, 2, 21, 3, 2, 2, 2, 2, 23, 3, 2, 2, 2, 2, 25, 3, 2, 2, 2, 2, 27, 3, 2, 2, 2, 2, 29, 3, 2, 2, 2, 2, 31, 3, 2, 2, 2, 2, 33, 3, 2, 2, 2, 2, 35, 3, 2, 2, 2, 2, 37, 3, 2, 2, 2, 2, 39, 3, 2, 2, 2, 2, 41, 3, 2, 2, 2, 2, 43, 3, 2, 2, 2, 2, 45, 3, 2, 2, 2, 2, 47, 3, 2, 2, 2, 2, 49, 3, 2, 2, 2, 2, 51, 3, 2, 2, 2, 2, 53, 3, 2, 2, 2, 2, 55, 3, 2, 2, 2, 2, 57, 3, 2, 2, 2, 2, 59, 3, 2, 2, 2, 2, 61, 3, 2, 2, 2, 2, 63, 3, 2, 2, 2, 2, 65, 3, 2, 2, 2, 2, 67, 3, 2, 2, 2, 2, 69, 3, 2, 2, 2, 2, 71, 3, 2, 2, 2, 2, 73, 3, 2, 2, 2, 2, 75, 3, 2, 2, 2, 2, 77, 3, 2, 2, 2, 2, 79, 3, 2, 2, 2, 2, 81, 3, 2, 2, 2, 2, 83, 3, 2, 2, 2, 2, 85, 3, 2, 2, 2, 2, 87, 3, 2, 2, 2, 2, 89, 3, 2, 2, 2, 2, 91, 3, 2, 2, 2, 2, 93, 3, 2, 2, 2, 2, 95, 3, 2, 2, 2, 2, 97, 3, 2, 2, 2, 2, 99, 3, 2, 2, 2, 2, 101, 3, 2, 2, 2, 2, 103, 3, 2, 2, 2, 2, 105, 3, 2, 2, 2, 2, 107, 3, 2, 2, 2, 2, 109, 3, 2, 2, 2, 2, 111, 3, 2, 2, 2, 2, 113, 3, 2, 2, 2, 2, 115, 3, 2, 2, 2, 2, 117, 3, 2, 2, 2, 2, 119, 3, 2, 2, 2, 3, 121, 3, 2, 2, 2, 5, 123, 3, 2, 2, 2, 7, 127, 3, 2, 2, 2, 9, 132, 3, 2, 2, 2, 11, 141, 3, 2, 2, 2, 13, 149, 3, 2, 2, 2, 15, 154, 3, 2, 2, 2, 17, 171, 3, 2, 2, 2, 19, 174, 3, 2, 2, 2, 21, 178, 3, 2, 2, 2, 23, 187, 3, 2, 2, 2, 25, 194, 3, 2, 2, 2, 27, 199, 3, 2, 2, 2, 29, 204, 3, 2, 2, 2, 31, 211, 3, 2, 2, 2, 33, 216, 3, 2, 2, 2, 35, 218, 3, 2, 2, 2, 37, 223, 3, 2, 2, 2, 39, 225, 3, 2, 2, 2, 41, 227, 3, 2, 2, 2, 43, 229, 3, 2, 2, 2, 45, 231, 3, 2, 2, 2, 47, 235, 3, 2, 2, 2, 49, 237, 3, 2, 2, 2, 51, 239, 3, 2, 2, 2, 53, 243, 3, 2, 2, 2, 55, 246, 3, 2, 2, 2, 57, 253, 3, 2, 2, 2, 59, 264, 3, 2, 2, 2, 61, 266, 3, 2, 2, 2, 63, 268, 3, 2, 2, 2, 65, 272, 3, 2, 2, 2, 67, 274, 3, 2, 2, 2, 69, 276, 3, 2, 2, 2, 71, 282, 3, 2, 2, 2, 73, 292, 3, 2, 2, 2, 75, 294, 3, 2, 2, 2, 77, 296, 3, 2, 2, 2, 79, 298, 3, 2, 2, 2, 81, 300, 3, 2, 2, 2, 83, 303, 3, 2, 2, 2, 85, 306, 3, 2, 2, 2, 87, 309, 3, 2, 2, 2, 89, 311, 3, 2, 2, 2, 91, 313, 3, 2, 2, 2, 93, 321, 3, 2, 2, 2, 95, 324, 3, 2, 2, 2, 97, 328, 3, 2, 2, 2, 99, 331, 3, 2, 2, 2, 101, 334, 3, 2, 2, 2, 103, 339, 3, 2, 2, 2, 105, 346, 3, 2, 2, 2, 107, 351, 3, 2, 2, 2, 109, 356, 3, 2, 2, 2, 111, 361, 3, 2, 2, 2, 113, 365, 3, 2, 2, 2, 115, 374, 3, 2, 2, 2, 117, 381, 3, 2, 2, 2, 119, 389, 3, 2, 2, 2, 121, 122, 7, 61, 2, 2, 122, 4, 3, 2, 2, 2, 123, 124, 7, 117, 2, 2, 124, 125, 7, 103, 2, 2, 125, 126, 7, 118, 2, 2, 126, 6, 3, 2, 2, 2, 127, 128, 7, 117, 2, 2, 128, 129, 7, 47, 2, 2, 129, 130, 7, 113, 2, 2, 130, 131, 7, 114, 2, 2, 131, 8, 3, 2, 2, 2, 132, 133, 7, 117, 2, 2, 133, 134, 7, 103, 2, 2, 134, 135, 7, 110, 2, 2, 135, 136, 7, 103, 2, 2, 136, 137, 7, 101, 2, 2, 137, 138, 7, 118, 2, 2, 138, 139, 7, 113, 2, 2, 139, 140, 7, 116, 2, 2, 140, 10, 3, 2, 2, 2, 141, 142, 7, 103, 2, 2, 142, 143, 7, 122, 2, 2, 143, 144, 7, 99, 2, 2, 144, 145, 7, 111, 2, 2, 145, 146, 7, 114, 2, 2, 146, 147, 7, 110, 2, 2, 147, 148, 7, 103, 2, 2, 148, 12, 3, 2, 2, 2, 149, 150, 7, 117, 2, 2, 150, 151, 7, 106, 2, 2, 151, 152, 7, 113, 2, 2, 152, 153, 7, 121, 2, 2, 153, 14, 3, 2, 2, 2, 154, 155, 7, 104, 2, 2, 155, 156, 7, 119, 2, 2, 156, 157, 7, 110, 2, 2, 157, 158, 7, 110, 2, 2, 158, 159, 7, 34, 2, 2, 159, 160, 7, 117, 2, 2, 160, 161, 7, 103, 2, 2, 161, 162, 7, 115, 2, 2, 162, 163, 7, 34, 2, 2, 163, 164, 7, 102, 2, 2, 164, 165, 7, 107, 2, 2, 165, 166, 7, 117, 2, 2, 166, 167, 7, 114, 2, 2, 167, 168, 7, 110, 2, 2, 168, 169, 7, 99, 2, 2, 169, 170, 7, 123, 2, 2, 170, 16, 3, 2, 2, 2, 171, 172, 7, 113, 2, 2, 172, 173, 7, 112, 2, 2, 173, 18, 3, 2, 2, 2, 174, 175, 7, 113, 2, 2, 175, 176, 7, 104, 2, 2, 176, 177, 7, 104, 2, 2, 177, 20, 3, 2, 2, 2, 178, 179, 7, 103, 2, 2, 179, 180, 7, 122, 2, 2, 180, 181, 7, 99, 2, 2, 181, 182, 7, 111, 2, 2, 182, 183, 7, 114, 2, 2, 183, 184, 7, 110, 2, 2, 184, 185, 7, 103, 2, 2, 185, 186, 7, 117, 2, 2, 186, 22, 3, 2, 2, 2, 187, 188, 7, 103, 2, 2, 188, 189, 7, 122, 2, 2, 189, 190, 7, 107, 2, 2, 190, 191, 7, 118, 2, 2, 191, 192, 7, 42, 2, 2, 192, 193, 7, 43, 2, 2, 193, 24, 3, 2, 2, 2, 194, 195, 7, 103, 2, 2, 195, 196, 7, 122, 2, 2, 196, 197, 7, 107, 2, 2, 197, 198, 7, 118, 2, 2, 198, 26, 3, 2, 2, 2, 199, 200, 7, 115, 2, 2, 200, 201, 7, 119, 2, 2, 201, 202, 7, 107, 2, 2, 202, 203, 7, 118, 2, 2, 203, 28, 3, 2, 2, 2, 204, 205, 7, 115, 2, 2, 205, 206, 7, 119, 2, 2, 206, 207, 7, 107, 2, 2, 207, 208, 7, 118, 2, 2, 208, 209, 7, 42, 2, 2, 209, 210, 7, 43, 2, 2, 210, 30, 3, 2, 2, 2, 211, 212, 7, 110, 2, 2, 212, 213, 7, 113, 2, 2, 213, 214, 7, 99, 2, 2, 214, 215, 7, 102, 2, 2, 215, 32, 3, 2, 2, 2, 216, 217, 7, 63, 2, 2, 217, 34, 3, 2, 2, 2, 218, 219, 7, 102, 2, 2, 219, 220, 7, 116, 2, 2, 220, 221, 7, 99, 2, 2, 221, 222, 7, 121, 2, 2, 222, 36, 3, 2, 2, 2, 223, 224, 7, 42, 2, 2, 224, 38, 3, 2, 2, 2, 225, 226, 7, 46, 2, 2, 226, 40, 3, 2, 2, 2, 227, 228, 7, 43, 2, 2, 228, 42, 3, 2, 2, 2, 229, 230, 7, 60, 2, 2, 230, 44, 3, 2, 2, 2, 231, 232, 7, 102, 2, 2, 232, 233, 7, 103, 2, 2, 233, 234, 7, 104, 2, 2, 234, 46, 3, 2, 2, 2, 235, 236, 7, 125, 2, 2, 236, 48, 3, 2, 2, 2, 237, 238, 7, 127, 2, 2, 238, 50, 3, 2, 2, 2, 239, 240, 7, 104, 2, 2, 240, 241, 7, 113, 2, 2, 241, 242, 7, 116, 2, 2, 242, 52, 3, 2, 2, 2, 243, 244, 7, 107, 2, 2, 244, 245, 7, 112, 2, 2, 245, 54, 3, 2, 2, 2, 246, 247, 7, 116, 2, 2, 247, 248, 7, 103, 2, 2, 248, 249, 7, 118, 2, 2, 249, 250, 7, 119, 2, 2, 250, 251, 7, 116, 2, 2, 251, 252, 7, 112, 2, 2, 252, 56, 3, 2, 2, 2, 253, 254, 7, 99, 2, 2, 254, 255, 7, 105, 2, 2, 255, 256, 7, 105, 2, 2, 256, 257, 7, 116, 2, 2, 257, 258, 7, 103, 2, 2, 258, 259, 7, 105, 2, 2, 259, 260, 7, 99, 2, 2, 260, 261, 7, 118, 2, 2, 261, 262, 7, 103, 2, 2, 262, 263, 7, 42, 2, 2, 263, 58, 3, 2, 2, 2, 264, 265, 7, 93, 2, 2, 265, 60, 3, 2, 2, 2, 266, 267, 7, 95, 2, 2, 267, 62, 3, 2, 2, 2, 268, 269, 7, 112, 2, 2, 269, 270, 7, 113, 2, 2, 270, 271, 7, 118, 2, 2, 271, 64, 3, 2, 2, 2, 272, 273, 7, 47, 2, 2, 273, 66, 3, 2, 2, 2, 274, 275, 7, 45, 2, 2, 275, 68, 3, 2, 2, 2, 276, 277, 7, 116, 2, 2, 277, 278, 7, 113, 2, 2, 278, 279, 7, 119, 2, 2, 279, 280, 7, 112, 2, 2, 280, 281, 7, 102, 2, 2, 281, 70, 3, 2, 2, 2, 282, 283, 7, 107, 2, 2, 283, 284, 7, 112, 2, 2, 284, 285, 7, 102, 2, 2, 285, 286, 7, 107, 2, 2, 286, 287, 7, 101, 2, 2, 287, 288, 7, 99, 2, 2, 288, 289, 7, 118, 2, 2, 289, 290, 7, 113, 2, 2, 290, 291, 7, 116, 2, 2, 291, 72, 3, 2, 2, 2, 292, 293, 7, 96, 2, 2, 293, 74, 3, 2, 2, 2, 294, 295, 7, 44, 2, 2, 295, 76, 3, 2, 2, 2, 296, 297, 7, 49, 2, 2, 297, 78, 3, 2, 2, 2, 298, 299, 7, 39, 2, 2, 299, 80, 3, 2, 2, 2, 300, 301, 7, 63, 2, 2, 301, 302, 7, 63, 2, 2, 302, 82, 3, 2, 2, 2, 303, 304, 7, 62, 2, 2, 304, 305, 7, 63, 2, 2, 305, 84, 3, 2, 2, 2, 306, 307, 7, 64, 2, 2, 307, 308, 7, 63, 2, 2, 308, 86, 3, 2, 2, 2, 309, 310, 7, 64, 2, 2, 310, 88, 3, 2, 2, 2, 311, 312, 7, 62, 2, 2, 312, 90, 3, 2, 2, 2, 313, 314, 7, 117, 2, 2, 314, 315, 7, 103, 2, 2, 315, 316, 7, 110, 2, 2, 316, 317, 7, 103, 2, 2, 317, 318, 7, 101, 2, 2, 318, 319, 7, 118, 2, 2, 319, 320, 7, 42, 2, 2, 320, 92, 3, 2, 2, 2, 321, 322, 7, 35, 2, 2, 322, 323, 7, 63, 2, 2, 323, 94, 3, 2, 2, 2, 324, 325, 7, 99, 2, 2, 325, 326, 7, 112, 2, 2, 326, 327, 7, 102, 2, 2, 327, 96, 3, 2, 2, 2, 328, 329, 7, 113, 2, 2, 329, 330, 7, 116, 2, 2, 330, 98, 3, 2, 2, 2, 331, 332, 7, 107, 2, 2, 332, 333, 7, 104, 2, 2, 333, 100, 3, 2, 2, 2, 334, 335, 7, 103, 2, 2, 335, 336, 7, 110, 2, 2, 336, 337, 7, 117, 2, 2, 337, 338, 7, 103, 2, 2, 338, 102, 3, 2, 2, 2, 339, 340, 7, 116, 2, 2, 340, 341, 7, 99, 2, 2, 341, 342, 7, 112, 2, 2, 342, 343, 7, 105, 2, 2, 343, 344, 7, 103, 2, 2, 344, 345, 7, 42, 2, 2, 345, 104, 3, 2, 2, 2, 346, 347, 7, 124, 2, 2, 347, 348, 7, 107, 2, 2, 348, 349, 7, 114, 2, 2, 349, 350, 7, 42, 2, 2, 350, 106, 3, 2, 2, 2, 351, 352, 7, 110, 2, 2, 352, 353, 7, 103, 2, 2, 353, 354, 7, 112, 2, 2, 354, 355, 7, 42, 2, 2, 355, 108, 3, 2, 2, 2, 356, 357, 5, 111, 56, 2, 357, 358, 7, 48, 2, 2, 358, 359, 5, 111, 56, 2, 359, 110, 3, 2, 2, 2, 360, 362, 9, 2, 2, 2, 361, 360, 3, 2, 2, 2, 362, 363, 3, 2, 2, 2, 363, 361, 3, 2, 2, 2, 363, 364, 3, 2, 2, 2, 364, 112, 3, 2, 2, 2, 365, 369, 7, 36, 2, 2, 366, 368, 10, 3, 2, 2, 367, 366, 3, 2, 2, 2, 368, 371, 3, 2, 2, 2, 369, 367, 3, 2, 2, 2, 369, 370, 3, 2, 2, 2, 370, 372, 3, 2, 2, 2, 371, 369, 3, 2, 2, 2, 372, 373, 7, 36, 2, 2, 373, 114, 3, 2, 2, 2, 374, 378, 7, 37, 2, 2, 375, 377, 10, 4, 2, 2, 376, 375, 3, 2, 2, 2, 377, 380, 3, 2, 2, 2, 378, 376, 3, 2, 2, 2, 378, 379, 3, 2, 2, 2, 379, 116, 3, 2, 2, 2, 380, 378, 3, 2, 2, 2, 381, 385, 9, 5, 2, 2, 382, 384, 9, 6, 2, 2, 383, 382, 3, 2, 2, 2, 384, 387, 3, 2, 2, 2, 385, 383, 3, 2, 2, 2, 385, 386, 3, 2, 2, 2, 386, 118, 3, 2, 2, 2, 387, 385, 3, 2, 2, 2, 388, 390, 9, 7, 2, 2, 389, 388, 3, 2, 2, 2, 390, 391, 3, 2, 2, 2, 391, 389, 3, 2, 2, 2, 391, 392, 3, 2, 2, 2, 392, 393, 3, 2, 2, 2, 393, 394, 8, 60, 2, 2, 394, 120, 3, 2, 2, 2, 8, 2, 363, 369, 378, 385, 391, 3, 8, 2, 2] \ No newline at end of file diff --git a/RASP_support/zzantlr/RASPLexer.py b/RASP_support/zzantlr/RASPLexer.py new file mode 100644 index 0000000..76fa3e9 --- /dev/null +++ b/RASP_support/zzantlr/RASPLexer.py @@ -0,0 +1,278 @@ +# Generated from RASP.g4 by ANTLR 4.9 +from antlr4 import * +from io import StringIO +from typing.io import TextIO +import sys + + + +def serializedATN(): + with StringIO() as buf: + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2=") + buf.write("\u018b\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7") + buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r") + buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23") + buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30") + buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36") + buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%") + buf.write("\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.") + buf.write("\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64") + buf.write("\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:") + buf.write("\4;\t;\4<\t<\3\2\3\2\3\3\3\3\3\3\3\3\3\4\3\4\3\4\3\4\3") + buf.write("\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\5\3\6\3\6\3\6\3\6") + buf.write("\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3") + buf.write("\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\b\3\t") + buf.write("\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\13\3\13\3\13") + buf.write("\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\3\r\3\r\3") + buf.write("\r\3\r\3\r\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17") + buf.write("\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\22") + buf.write("\3\22\3\22\3\22\3\22\3\23\3\23\3\24\3\24\3\25\3\25\3\26") + buf.write("\3\26\3\27\3\27\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32") + buf.write("\3\32\3\32\3\33\3\33\3\33\3\34\3\34\3\34\3\34\3\34\3\34") + buf.write("\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35") + buf.write("\3\35\3\36\3\36\3\37\3\37\3 \3 \3 \3 \3!\3!\3\"\3\"\3") + buf.write("#\3#\3#\3#\3#\3#\3$\3$\3$\3$\3$\3$\3$\3$\3$\3$\3%\3%\3") + buf.write("&\3&\3\'\3\'\3(\3(\3)\3)\3)\3*\3*\3*\3+\3+\3+\3,\3,\3") + buf.write("-\3-\3.\3.\3.\3.\3.\3.\3.\3.\3/\3/\3/\3\60\3\60\3\60\3") + buf.write("\60\3\61\3\61\3\61\3\62\3\62\3\62\3\63\3\63\3\63\3\63") + buf.write("\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3\65") + buf.write("\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\67\3\67\3\67\3\67") + buf.write("\38\68\u016a\n8\r8\168\u016b\39\39\79\u0170\n9\f9\169") + buf.write("\u0173\139\39\39\3:\3:\7:\u0179\n:\f:\16:\u017c\13:\3") + buf.write(";\3;\7;\u0180\n;\f;\16;\u0183\13;\3<\6<\u0186\n<\r<\16") + buf.write("<\u0187\3<\3<\2\2=\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n") + buf.write("\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'") + buf.write("\25)\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ") + buf.write("?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g") + buf.write("\65i\66k\67m8o9q:s;u\3\2\2") + buf.write("\2\u010c\u010d\7p\2\2\u010d\u010e\7q\2\2\u010e\u010f\7") + buf.write("v\2\2\u010f@\3\2\2\2\u0110\u0111\7/\2\2\u0111B\3\2\2\2") + buf.write("\u0112\u0113\7-\2\2\u0113D\3\2\2\2\u0114\u0115\7t\2\2") + buf.write("\u0115\u0116\7q\2\2\u0116\u0117\7w\2\2\u0117\u0118\7p") + buf.write("\2\2\u0118\u0119\7f\2\2\u0119F\3\2\2\2\u011a\u011b\7k") + buf.write("\2\2\u011b\u011c\7p\2\2\u011c\u011d\7f\2\2\u011d\u011e") + buf.write("\7k\2\2\u011e\u011f\7e\2\2\u011f\u0120\7c\2\2\u0120\u0121") + buf.write("\7v\2\2\u0121\u0122\7q\2\2\u0122\u0123\7t\2\2\u0123H\3") + buf.write("\2\2\2\u0124\u0125\7`\2\2\u0125J\3\2\2\2\u0126\u0127\7") + buf.write(",\2\2\u0127L\3\2\2\2\u0128\u0129\7\61\2\2\u0129N\3\2\2") + buf.write("\2\u012a\u012b\7\'\2\2\u012bP\3\2\2\2\u012c\u012d\7?\2") + buf.write("\2\u012d\u012e\7?\2\2\u012eR\3\2\2\2\u012f\u0130\7>\2") + buf.write("\2\u0130\u0131\7?\2\2\u0131T\3\2\2\2\u0132\u0133\7@\2") + buf.write("\2\u0133\u0134\7?\2\2\u0134V\3\2\2\2\u0135\u0136\7@\2") + buf.write("\2\u0136X\3\2\2\2\u0137\u0138\7>\2\2\u0138Z\3\2\2\2\u0139") + buf.write("\u013a\7u\2\2\u013a\u013b\7g\2\2\u013b\u013c\7n\2\2\u013c") + buf.write("\u013d\7g\2\2\u013d\u013e\7e\2\2\u013e\u013f\7v\2\2\u013f") + buf.write("\u0140\7*\2\2\u0140\\\3\2\2\2\u0141\u0142\7#\2\2\u0142") + buf.write("\u0143\7?\2\2\u0143^\3\2\2\2\u0144\u0145\7c\2\2\u0145") + buf.write("\u0146\7p\2\2\u0146\u0147\7f\2\2\u0147`\3\2\2\2\u0148") + buf.write("\u0149\7q\2\2\u0149\u014a\7t\2\2\u014ab\3\2\2\2\u014b") + buf.write("\u014c\7k\2\2\u014c\u014d\7h\2\2\u014dd\3\2\2\2\u014e") + buf.write("\u014f\7g\2\2\u014f\u0150\7n\2\2\u0150\u0151\7u\2\2\u0151") + buf.write("\u0152\7g\2\2\u0152f\3\2\2\2\u0153\u0154\7t\2\2\u0154") + buf.write("\u0155\7c\2\2\u0155\u0156\7p\2\2\u0156\u0157\7i\2\2\u0157") + buf.write("\u0158\7g\2\2\u0158\u0159\7*\2\2\u0159h\3\2\2\2\u015a") + buf.write("\u015b\7|\2\2\u015b\u015c\7k\2\2\u015c\u015d\7r\2\2\u015d") + buf.write("\u015e\7*\2\2\u015ej\3\2\2\2\u015f\u0160\7n\2\2\u0160") + buf.write("\u0161\7g\2\2\u0161\u0162\7p\2\2\u0162\u0163\7*\2\2\u0163") + buf.write("l\3\2\2\2\u0164\u0165\5o8\2\u0165\u0166\7\60\2\2\u0166") + buf.write("\u0167\5o8\2\u0167n\3\2\2\2\u0168\u016a\t\2\2\2\u0169") + buf.write("\u0168\3\2\2\2\u016a\u016b\3\2\2\2\u016b\u0169\3\2\2\2") + buf.write("\u016b\u016c\3\2\2\2\u016cp\3\2\2\2\u016d\u0171\7$\2\2") + buf.write("\u016e\u0170\n\3\2\2\u016f\u016e\3\2\2\2\u0170\u0173\3") + buf.write("\2\2\2\u0171\u016f\3\2\2\2\u0171\u0172\3\2\2\2\u0172\u0174") + buf.write("\3\2\2\2\u0173\u0171\3\2\2\2\u0174\u0175\7$\2\2\u0175") + buf.write("r\3\2\2\2\u0176\u017a\7%\2\2\u0177\u0179\n\4\2\2\u0178") + buf.write("\u0177\3\2\2\2\u0179\u017c\3\2\2\2\u017a\u0178\3\2\2\2") + buf.write("\u017a\u017b\3\2\2\2\u017bt\3\2\2\2\u017c\u017a\3\2\2") + buf.write("\2\u017d\u0181\t\5\2\2\u017e\u0180\t\6\2\2\u017f\u017e") + buf.write("\3\2\2\2\u0180\u0183\3\2\2\2\u0181\u017f\3\2\2\2\u0181") + buf.write("\u0182\3\2\2\2\u0182v\3\2\2\2\u0183\u0181\3\2\2\2\u0184") + buf.write("\u0186\t\7\2\2\u0185\u0184\3\2\2\2\u0186\u0187\3\2\2\2") + buf.write("\u0187\u0185\3\2\2\2\u0187\u0188\3\2\2\2\u0188\u0189\3") + buf.write("\2\2\2\u0189\u018a\b<\2\2\u018ax\3\2\2\2\b\2\u016b\u0171") + buf.write("\u017a\u0181\u0187\3\b\2\2") + return buf.getvalue() + + +class RASPLexer(Lexer): + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + T__0 = 1 + T__1 = 2 + T__2 = 3 + T__3 = 4 + T__4 = 5 + T__5 = 6 + T__6 = 7 + T__7 = 8 + T__8 = 9 + T__9 = 10 + T__10 = 11 + T__11 = 12 + T__12 = 13 + T__13 = 14 + T__14 = 15 + T__15 = 16 + T__16 = 17 + T__17 = 18 + T__18 = 19 + T__19 = 20 + T__20 = 21 + T__21 = 22 + T__22 = 23 + T__23 = 24 + T__24 = 25 + T__25 = 26 + T__26 = 27 + T__27 = 28 + T__28 = 29 + T__29 = 30 + T__30 = 31 + T__31 = 32 + T__32 = 33 + T__33 = 34 + T__34 = 35 + T__35 = 36 + T__36 = 37 + T__37 = 38 + T__38 = 39 + T__39 = 40 + T__40 = 41 + T__41 = 42 + T__42 = 43 + T__43 = 44 + T__44 = 45 + T__45 = 46 + T__46 = 47 + T__47 = 48 + T__48 = 49 + T__49 = 50 + T__50 = 51 + T__51 = 52 + T__52 = 53 + Float = 54 + PosInt = 55 + String = 56 + Comment = 57 + ID = 58 + WS = 59 + + channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ] + + modeNames = [ "DEFAULT_MODE" ] + + literalNames = [ "", + "';'", "'set'", "'s-op'", "'selector'", "'example'", "'show'", + "'full seq display'", "'on'", "'off'", "'examples'", "'exit()'", + "'exit'", "'quit'", "'quit()'", "'load'", "'='", "'draw'", "'('", + "','", "')'", "':'", "'def'", "'{'", "'}'", "'for'", "'in'", + "'return'", "'aggregate('", "'['", "']'", "'not'", "'-'", "'+'", + "'round'", "'indicator'", "'^'", "'*'", "'/'", "'%'", "'=='", + "'<='", "'>='", "'>'", "'<'", "'select('", "'!='", "'and'", + "'or'", "'if'", "'else'", "'range('", "'zip('", "'len('" ] + + symbolicNames = [ "", + "Float", "PosInt", "String", "Comment", "ID", "WS" ] + + ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6", + "T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13", + "T__14", "T__15", "T__16", "T__17", "T__18", "T__19", + "T__20", "T__21", "T__22", "T__23", "T__24", "T__25", + "T__26", "T__27", "T__28", "T__29", "T__30", "T__31", + "T__32", "T__33", "T__34", "T__35", "T__36", "T__37", + "T__38", "T__39", "T__40", "T__41", "T__42", "T__43", + "T__44", "T__45", "T__46", "T__47", "T__48", "T__49", + "T__50", "T__51", "T__52", "Float", "PosInt", "String", + "Comment", "ID", "WS" ] + + grammarFileName = "RASP.g4" + + def __init__(self, input=None, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.9") + self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache()) + self._actions = None + self._predicates = None + + diff --git a/RASP_support/zzantlr/RASPLexer.tokens b/RASP_support/zzantlr/RASPLexer.tokens new file mode 100644 index 0000000..04616df --- /dev/null +++ b/RASP_support/zzantlr/RASPLexer.tokens @@ -0,0 +1,112 @@ +T__0=1 +T__1=2 +T__2=3 +T__3=4 +T__4=5 +T__5=6 +T__6=7 +T__7=8 +T__8=9 +T__9=10 +T__10=11 +T__11=12 +T__12=13 +T__13=14 +T__14=15 +T__15=16 +T__16=17 +T__17=18 +T__18=19 +T__19=20 +T__20=21 +T__21=22 +T__22=23 +T__23=24 +T__24=25 +T__25=26 +T__26=27 +T__27=28 +T__28=29 +T__29=30 +T__30=31 +T__31=32 +T__32=33 +T__33=34 +T__34=35 +T__35=36 +T__36=37 +T__37=38 +T__38=39 +T__39=40 +T__40=41 +T__41=42 +T__42=43 +T__43=44 +T__44=45 +T__45=46 +T__46=47 +T__47=48 +T__48=49 +T__49=50 +T__50=51 +T__51=52 +T__52=53 +Float=54 +PosInt=55 +String=56 +Comment=57 +ID=58 +WS=59 +';'=1 +'set'=2 +'s-op'=3 +'selector'=4 +'example'=5 +'show'=6 +'full seq display'=7 +'on'=8 +'off'=9 +'examples'=10 +'exit()'=11 +'exit'=12 +'quit'=13 +'quit()'=14 +'load'=15 +'='=16 +'draw'=17 +'('=18 +','=19 +')'=20 +':'=21 +'def'=22 +'{'=23 +'}'=24 +'for'=25 +'in'=26 +'return'=27 +'aggregate('=28 +'['=29 +']'=30 +'not'=31 +'-'=32 +'+'=33 +'round'=34 +'indicator'=35 +'^'=36 +'*'=37 +'/'=38 +'%'=39 +'=='=40 +'<='=41 +'>='=42 +'>'=43 +'<'=44 +'select('=45 +'!='=46 +'and'=47 +'or'=48 +'if'=49 +'else'=50 +'range('=51 +'zip('=52 +'len('=53 diff --git a/RASP_support/zzantlr/RASPListener.py b/RASP_support/zzantlr/RASPListener.py new file mode 100644 index 0000000..2d404b9 --- /dev/null +++ b/RASP_support/zzantlr/RASPListener.py @@ -0,0 +1,273 @@ +# Generated from RASP.g4 by ANTLR 4.9 +from antlr4 import * +if __name__ is not None and "." in __name__: + from .RASPParser import RASPParser +else: + from RASPParser import RASPParser + +# This class defines a complete listener for a parse tree produced by RASPParser. +class RASPListener(ParseTreeListener): + + # Enter a parse tree produced by RASPParser#r. + def enterR(self, ctx:RASPParser.RContext): + pass + + # Exit a parse tree produced by RASPParser#r. + def exitR(self, ctx:RASPParser.RContext): + pass + + + # Enter a parse tree produced by RASPParser#statement. + def enterStatement(self, ctx:RASPParser.StatementContext): + pass + + # Exit a parse tree produced by RASPParser#statement. + def exitStatement(self, ctx:RASPParser.StatementContext): + pass + + + # Enter a parse tree produced by RASPParser#raspstatement. + def enterRaspstatement(self, ctx:RASPParser.RaspstatementContext): + pass + + # Exit a parse tree produced by RASPParser#raspstatement. + def exitRaspstatement(self, ctx:RASPParser.RaspstatementContext): + pass + + + # Enter a parse tree produced by RASPParser#replstatement. + def enterReplstatement(self, ctx:RASPParser.ReplstatementContext): + pass + + # Exit a parse tree produced by RASPParser#replstatement. + def exitReplstatement(self, ctx:RASPParser.ReplstatementContext): + pass + + + # Enter a parse tree produced by RASPParser#setExample. + def enterSetExample(self, ctx:RASPParser.SetExampleContext): + pass + + # Exit a parse tree produced by RASPParser#setExample. + def exitSetExample(self, ctx:RASPParser.SetExampleContext): + pass + + + # Enter a parse tree produced by RASPParser#showExample. + def enterShowExample(self, ctx:RASPParser.ShowExampleContext): + pass + + # Exit a parse tree produced by RASPParser#showExample. + def exitShowExample(self, ctx:RASPParser.ShowExampleContext): + pass + + + # Enter a parse tree produced by RASPParser#toggleSeqVerbose. + def enterToggleSeqVerbose(self, ctx:RASPParser.ToggleSeqVerboseContext): + pass + + # Exit a parse tree produced by RASPParser#toggleSeqVerbose. + def exitToggleSeqVerbose(self, ctx:RASPParser.ToggleSeqVerboseContext): + pass + + + # Enter a parse tree produced by RASPParser#toggleExample. + def enterToggleExample(self, ctx:RASPParser.ToggleExampleContext): + pass + + # Exit a parse tree produced by RASPParser#toggleExample. + def exitToggleExample(self, ctx:RASPParser.ToggleExampleContext): + pass + + + # Enter a parse tree produced by RASPParser#exit. + def enterExit(self, ctx:RASPParser.ExitContext): + pass + + # Exit a parse tree produced by RASPParser#exit. + def exitExit(self, ctx:RASPParser.ExitContext): + pass + + + # Enter a parse tree produced by RASPParser#loadFile. + def enterLoadFile(self, ctx:RASPParser.LoadFileContext): + pass + + # Exit a parse tree produced by RASPParser#loadFile. + def exitLoadFile(self, ctx:RASPParser.LoadFileContext): + pass + + + # Enter a parse tree produced by RASPParser#assign. + def enterAssign(self, ctx:RASPParser.AssignContext): + pass + + # Exit a parse tree produced by RASPParser#assign. + def exitAssign(self, ctx:RASPParser.AssignContext): + pass + + + # Enter a parse tree produced by RASPParser#draw. + def enterDraw(self, ctx:RASPParser.DrawContext): + pass + + # Exit a parse tree produced by RASPParser#draw. + def exitDraw(self, ctx:RASPParser.DrawContext): + pass + + + # Enter a parse tree produced by RASPParser#exprsList. + def enterExprsList(self, ctx:RASPParser.ExprsListContext): + pass + + # Exit a parse tree produced by RASPParser#exprsList. + def exitExprsList(self, ctx:RASPParser.ExprsListContext): + pass + + + # Enter a parse tree produced by RASPParser#namedExprsList. + def enterNamedExprsList(self, ctx:RASPParser.NamedExprsListContext): + pass + + # Exit a parse tree produced by RASPParser#namedExprsList. + def exitNamedExprsList(self, ctx:RASPParser.NamedExprsListContext): + pass + + + # Enter a parse tree produced by RASPParser#namedExpr. + def enterNamedExpr(self, ctx:RASPParser.NamedExprContext): + pass + + # Exit a parse tree produced by RASPParser#namedExpr. + def exitNamedExpr(self, ctx:RASPParser.NamedExprContext): + pass + + + # Enter a parse tree produced by RASPParser#raspstatementsList. + def enterRaspstatementsList(self, ctx:RASPParser.RaspstatementsListContext): + pass + + # Exit a parse tree produced by RASPParser#raspstatementsList. + def exitRaspstatementsList(self, ctx:RASPParser.RaspstatementsListContext): + pass + + + # Enter a parse tree produced by RASPParser#funcDef. + def enterFuncDef(self, ctx:RASPParser.FuncDefContext): + pass + + # Exit a parse tree produced by RASPParser#funcDef. + def exitFuncDef(self, ctx:RASPParser.FuncDefContext): + pass + + + # Enter a parse tree produced by RASPParser#forLoop. + def enterForLoop(self, ctx:RASPParser.ForLoopContext): + pass + + # Exit a parse tree produced by RASPParser#forLoop. + def exitForLoop(self, ctx:RASPParser.ForLoopContext): + pass + + + # Enter a parse tree produced by RASPParser#commentsList. + def enterCommentsList(self, ctx:RASPParser.CommentsListContext): + pass + + # Exit a parse tree produced by RASPParser#commentsList. + def exitCommentsList(self, ctx:RASPParser.CommentsListContext): + pass + + + # Enter a parse tree produced by RASPParser#assignsAndCommentsList. + def enterAssignsAndCommentsList(self, ctx:RASPParser.AssignsAndCommentsListContext): + pass + + # Exit a parse tree produced by RASPParser#assignsAndCommentsList. + def exitAssignsAndCommentsList(self, ctx:RASPParser.AssignsAndCommentsListContext): + pass + + + # Enter a parse tree produced by RASPParser#returnStatement. + def enterReturnStatement(self, ctx:RASPParser.ReturnStatementContext): + pass + + # Exit a parse tree produced by RASPParser#returnStatement. + def exitReturnStatement(self, ctx:RASPParser.ReturnStatementContext): + pass + + + # Enter a parse tree produced by RASPParser#idsList. + def enterIdsList(self, ctx:RASPParser.IdsListContext): + pass + + # Exit a parse tree produced by RASPParser#idsList. + def exitIdsList(self, ctx:RASPParser.IdsListContext): + pass + + + # Enter a parse tree produced by RASPParser#aggregateExpr. + def enterAggregateExpr(self, ctx:RASPParser.AggregateExprContext): + pass + + # Exit a parse tree produced by RASPParser#aggregateExpr. + def exitAggregateExpr(self, ctx:RASPParser.AggregateExprContext): + pass + + + # Enter a parse tree produced by RASPParser#atom. + def enterAtom(self, ctx:RASPParser.AtomContext): + pass + + # Exit a parse tree produced by RASPParser#atom. + def exitAtom(self, ctx:RASPParser.AtomContext): + pass + + + # Enter a parse tree produced by RASPParser#expr. + def enterExpr(self, ctx:RASPParser.ExprContext): + pass + + # Exit a parse tree produced by RASPParser#expr. + def exitExpr(self, ctx:RASPParser.ExprContext): + pass + + + # Enter a parse tree produced by RASPParser#aList. + def enterAList(self, ctx:RASPParser.AListContext): + pass + + # Exit a parse tree produced by RASPParser#aList. + def exitAList(self, ctx:RASPParser.AListContext): + pass + + + # Enter a parse tree produced by RASPParser#aDict. + def enterADict(self, ctx:RASPParser.ADictContext): + pass + + # Exit a parse tree produced by RASPParser#aDict. + def exitADict(self, ctx:RASPParser.ADictContext): + pass + + + # Enter a parse tree produced by RASPParser#listCompExpr. + def enterListCompExpr(self, ctx:RASPParser.ListCompExprContext): + pass + + # Exit a parse tree produced by RASPParser#listCompExpr. + def exitListCompExpr(self, ctx:RASPParser.ListCompExprContext): + pass + + + # Enter a parse tree produced by RASPParser#dictCompExpr. + def enterDictCompExpr(self, ctx:RASPParser.DictCompExprContext): + pass + + # Exit a parse tree produced by RASPParser#dictCompExpr. + def exitDictCompExpr(self, ctx:RASPParser.DictCompExprContext): + pass + + + +del RASPParser \ No newline at end of file diff --git a/RASP_support/zzantlr/RASPParser.py b/RASP_support/zzantlr/RASPParser.py new file mode 100644 index 0000000..adb15c6 --- /dev/null +++ b/RASP_support/zzantlr/RASPParser.py @@ -0,0 +1,2785 @@ +# Generated from RASP.g4 by ANTLR 4.9 +# encoding: utf-8 +from antlr4 import * +from io import StringIO +import sys +if sys.version_info[1] > 5: + from typing import TextIO +else: + from typing.io import TextIO + + +def serializedATN(): + with StringIO() as buf: + buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3=") + buf.write("\u0161\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7") + buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16") + buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23") + buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31") + buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36") + buf.write("\3\2\6\2>\n\2\r\2\16\2?\3\2\3\2\3\3\3\3\5\3F\n\3\3\3\3") + buf.write("\3\5\3J\n\3\3\3\5\3M\n\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3") + buf.write("\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4]\n\4\3\5\3\5\3\5\3\5\3") + buf.write("\5\5\5d\n\5\3\6\3\6\5\6h\n\6\3\6\3\6\3\6\3\7\3\7\5\7o") + buf.write("\n\7\3\7\3\7\3\b\3\b\3\b\3\t\5\tw\n\t\3\t\3\t\3\t\3\n") + buf.write("\3\n\3\13\3\13\3\13\3\f\3\f\3\f\3\f\3\r\3\r\3\r\3\r\3") + buf.write("\r\5\r\u008a\n\r\3\r\3\r\3\16\3\16\3\16\5\16\u0091\n\16") + buf.write("\3\17\3\17\3\17\5\17\u0096\n\17\3\20\3\20\3\20\3\20\3") + buf.write("\21\3\21\5\21\u009e\n\21\3\21\5\21\u00a1\n\21\3\21\3\21") + buf.write("\5\21\u00a5\n\21\5\21\u00a7\n\21\3\22\3\22\3\22\3\22\5") + buf.write("\22\u00ad\n\22\3\22\3\22\3\22\5\22\u00b2\n\22\3\22\5\22") + buf.write("\u00b5\n\22\3\22\3\22\5\22\u00b9\n\22\3\22\3\22\3\23\3") + buf.write("\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24\3\24\5\24\u00c7") + buf.write("\n\24\3\25\3\25\3\25\5\25\u00cc\n\25\3\25\5\25\u00cf\n") + buf.write("\25\3\25\3\25\5\25\u00d3\n\25\5\25\u00d5\n\25\3\26\3\26") + buf.write("\3\26\3\26\3\27\3\27\3\27\5\27\u00de\n\27\3\30\3\30\3") + buf.write("\30\3\30\3\30\3\30\5\30\u00e6\n\30\3\30\3\30\3\31\3\31") + buf.write("\3\31\5\31\u00ed\n\31\3\32\3\32\3\32\3\32\3\32\3\32\3") + buf.write("\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32") + buf.write("\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32") + buf.write("\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32") + buf.write("\5\32\u0116\n\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3") + buf.write("\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32") + buf.write("\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32\3\32") + buf.write("\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u013b\n\32\3\32\7") + buf.write("\32\u013e\n\32\f\32\16\32\u0141\13\32\3\33\3\33\5\33\u0145") + buf.write("\n\33\3\33\3\33\3\34\3\34\5\34\u014b\n\34\3\34\3\34\3") + buf.write("\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36") + buf.write("\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3?\3\62\37\2") + buf.write("\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64") + buf.write("\668:\2\f\3\2\5\6\3\2\n\13\3\2\r\20\3\2!#\3\2$%\4\2*.") + buf.write("\60\60\3\2\'(\3\2\"#\3\2*.\3\2\61\62\2\u0182\2=\3\2\2") + buf.write("\2\4L\3\2\2\2\6\\\3\2\2\2\bc\3\2\2\2\ne\3\2\2\2\fl\3\2") + buf.write("\2\2\16r\3\2\2\2\20v\3\2\2\2\22{\3\2\2\2\24}\3\2\2\2\26") + buf.write("\u0080\3\2\2\2\30\u0084\3\2\2\2\32\u008d\3\2\2\2\34\u0092") + buf.write("\3\2\2\2\36\u0097\3\2\2\2 \u00a6\3\2\2\2\"\u00a8\3\2\2") + buf.write("\2$\u00bc\3\2\2\2&\u00c4\3\2\2\2(\u00d4\3\2\2\2*\u00d6") + buf.write("\3\2\2\2,\u00da\3\2\2\2.\u00df\3\2\2\2\60\u00ec\3\2\2") + buf.write("\2\62\u0115\3\2\2\2\64\u0142\3\2\2\2\66\u0148\3\2\2\2") + buf.write("8\u014e\3\2\2\2:\u0156\3\2\2\2<>\5\4\3\2=<\3\2\2\2>?\3") + buf.write("\2\2\2?@\3\2\2\2?=\3\2\2\2@A\3\2\2\2AB\7\2\2\3B\3\3\2") + buf.write("\2\2CE\5\6\4\2DF\7;\2\2ED\3\2\2\2EF\3\2\2\2FM\3\2\2\2") + buf.write("GI\5\b\5\2HJ\7;\2\2IH\3\2\2\2IJ\3\2\2\2JM\3\2\2\2KM\7") + buf.write(";\2\2LC\3\2\2\2LG\3\2\2\2LK\3\2\2\2M\5\3\2\2\2NO\5\62") + buf.write("\32\2OP\7\3\2\2P]\3\2\2\2QR\5\26\f\2RS\7\3\2\2S]\3\2\2") + buf.write("\2TU\5\30\r\2UV\7\3\2\2V]\3\2\2\2WX\5\24\13\2XY\7\3\2") + buf.write("\2Y]\3\2\2\2Z]\5\"\22\2[]\5$\23\2\\N\3\2\2\2\\Q\3\2\2") + buf.write("\2\\T\3\2\2\2\\W\3\2\2\2\\Z\3\2\2\2\\[\3\2\2\2]\7\3\2") + buf.write("\2\2^d\5\n\6\2_d\5\f\7\2`d\5\20\t\2ad\5\16\b\2bd\5\22") + buf.write("\n\2c^\3\2\2\2c_\3\2\2\2c`\3\2\2\2ca\3\2\2\2cb\3\2\2\2") + buf.write("d\t\3\2\2\2eg\7\4\2\2fh\t\2\2\2gf\3\2\2\2gh\3\2\2\2hi") + buf.write("\3\2\2\2ij\7\7\2\2jk\5\62\32\2k\13\3\2\2\2ln\7\b\2\2m") + buf.write("o\t\2\2\2nm\3\2\2\2no\3\2\2\2op\3\2\2\2pq\7\7\2\2q\r\3") + buf.write("\2\2\2rs\7\t\2\2st\t\3\2\2t\17\3\2\2\2uw\t\2\2\2vu\3\2") + buf.write("\2\2vw\3\2\2\2wx\3\2\2\2xy\7\f\2\2yz\t\3\2\2z\21\3\2\2") + buf.write("\2{|\t\4\2\2|\23\3\2\2\2}~\7\21\2\2~\177\7:\2\2\177\25") + buf.write("\3\2\2\2\u0080\u0081\5,\27\2\u0081\u0082\7\22\2\2\u0082") + buf.write("\u0083\5\32\16\2\u0083\27\3\2\2\2\u0084\u0085\7\23\2\2") + buf.write("\u0085\u0086\7\24\2\2\u0086\u0089\5\62\32\2\u0087\u0088") + buf.write("\7\25\2\2\u0088\u008a\5\62\32\2\u0089\u0087\3\2\2\2\u0089") + buf.write("\u008a\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\7\26\2") + buf.write("\2\u008c\31\3\2\2\2\u008d\u0090\5\62\32\2\u008e\u008f") + buf.write("\7\25\2\2\u008f\u0091\5\32\16\2\u0090\u008e\3\2\2\2\u0090") + buf.write("\u0091\3\2\2\2\u0091\33\3\2\2\2\u0092\u0095\5\36\20\2") + buf.write("\u0093\u0094\7\25\2\2\u0094\u0096\5\34\17\2\u0095\u0093") + buf.write("\3\2\2\2\u0095\u0096\3\2\2\2\u0096\35\3\2\2\2\u0097\u0098") + buf.write("\5\62\32\2\u0098\u0099\7\27\2\2\u0099\u009a\5\62\32\2") + buf.write("\u009a\37\3\2\2\2\u009b\u009d\5\6\4\2\u009c\u009e\7;\2") + buf.write("\2\u009d\u009c\3\2\2\2\u009d\u009e\3\2\2\2\u009e\u00a0") + buf.write("\3\2\2\2\u009f\u00a1\5 \21\2\u00a0\u009f\3\2\2\2\u00a0") + buf.write("\u00a1\3\2\2\2\u00a1\u00a7\3\2\2\2\u00a2\u00a4\7;\2\2") + buf.write("\u00a3\u00a5\5 \21\2\u00a4\u00a3\3\2\2\2\u00a4\u00a5\3") + buf.write("\2\2\2\u00a5\u00a7\3\2\2\2\u00a6\u009b\3\2\2\2\u00a6\u00a2") + buf.write("\3\2\2\2\u00a7!\3\2\2\2\u00a8\u00a9\7\30\2\2\u00a9\u00aa") + buf.write("\7<\2\2\u00aa\u00ac\7\24\2\2\u00ab\u00ad\5,\27\2\u00ac") + buf.write("\u00ab\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad\u00ae\3\2\2\2") + buf.write("\u00ae\u00af\7\26\2\2\u00af\u00b1\7\31\2\2\u00b0\u00b2") + buf.write("\5&\24\2\u00b1\u00b0\3\2\2\2\u00b1\u00b2\3\2\2\2\u00b2") + buf.write("\u00b4\3\2\2\2\u00b3\u00b5\5 \21\2\u00b4\u00b3\3\2\2\2") + buf.write("\u00b4\u00b5\3\2\2\2\u00b5\u00b6\3\2\2\2\u00b6\u00b8\5") + buf.write("*\26\2\u00b7\u00b9\5&\24\2\u00b8\u00b7\3\2\2\2\u00b8\u00b9") + buf.write("\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00bb\7\32\2\2\u00bb") + buf.write("#\3\2\2\2\u00bc\u00bd\7\33\2\2\u00bd\u00be\5,\27\2\u00be") + buf.write("\u00bf\7\34\2\2\u00bf\u00c0\5\62\32\2\u00c0\u00c1\7\31") + buf.write("\2\2\u00c1\u00c2\5 \21\2\u00c2\u00c3\7\32\2\2\u00c3%\3") + buf.write("\2\2\2\u00c4\u00c6\7;\2\2\u00c5\u00c7\5&\24\2\u00c6\u00c5") + buf.write("\3\2\2\2\u00c6\u00c7\3\2\2\2\u00c7\'\3\2\2\2\u00c8\u00c9") + buf.write("\5\26\f\2\u00c9\u00cb\7\3\2\2\u00ca\u00cc\7;\2\2\u00cb") + buf.write("\u00ca\3\2\2\2\u00cb\u00cc\3\2\2\2\u00cc\u00ce\3\2\2\2") + buf.write("\u00cd\u00cf\5(\25\2\u00ce\u00cd\3\2\2\2\u00ce\u00cf\3") + buf.write("\2\2\2\u00cf\u00d5\3\2\2\2\u00d0\u00d2\7;\2\2\u00d1\u00d3") + buf.write("\5(\25\2\u00d2\u00d1\3\2\2\2\u00d2\u00d3\3\2\2\2\u00d3") + buf.write("\u00d5\3\2\2\2\u00d4\u00c8\3\2\2\2\u00d4\u00d0\3\2\2\2") + buf.write("\u00d5)\3\2\2\2\u00d6\u00d7\7\35\2\2\u00d7\u00d8\5\32") + buf.write("\16\2\u00d8\u00d9\7\3\2\2\u00d9+\3\2\2\2\u00da\u00dd\7") + buf.write("<\2\2\u00db\u00dc\7\25\2\2\u00dc\u00de\5,\27\2\u00dd\u00db") + buf.write("\3\2\2\2\u00dd\u00de\3\2\2\2\u00de-\3\2\2\2\u00df\u00e0") + buf.write("\7\36\2\2\u00e0\u00e1\5\62\32\2\u00e1\u00e2\7\25\2\2\u00e2") + buf.write("\u00e5\5\62\32\2\u00e3\u00e4\7\25\2\2\u00e4\u00e6\5\62") + buf.write("\32\2\u00e5\u00e3\3\2\2\2\u00e5\u00e6\3\2\2\2\u00e6\u00e7") + buf.write("\3\2\2\2\u00e7\u00e8\7\26\2\2\u00e8/\3\2\2\2\u00e9\u00ed") + buf.write("\79\2\2\u00ea\u00ed\78\2\2\u00eb\u00ed\7:\2\2\u00ec\u00e9") + buf.write("\3\2\2\2\u00ec\u00ea\3\2\2\2\u00ec\u00eb\3\2\2\2\u00ed") + buf.write("\61\3\2\2\2\u00ee\u00ef\b\32\1\2\u00ef\u00f0\7\24\2\2") + buf.write("\u00f0\u00f1\5\62\32\2\u00f1\u00f2\7\26\2\2\u00f2\u0116") + buf.write("\3\2\2\2\u00f3\u00f4\t\5\2\2\u00f4\u0116\5\62\32\27\u00f5") + buf.write("\u00f6\t\6\2\2\u00f6\u00f7\7\24\2\2\u00f7\u00f8\5\62\32") + buf.write("\2\u00f8\u00f9\7\26\2\2\u00f9\u0116\3\2\2\2\u00fa\u00fb") + buf.write("\7/\2\2\u00fb\u00fc\5\62\32\2\u00fc\u00fd\7\25\2\2\u00fd") + buf.write("\u00fe\5\62\32\2\u00fe\u00ff\7\25\2\2\u00ff\u0100\t\7") + buf.write("\2\2\u0100\u0101\7\26\2\2\u0101\u0116\3\2\2\2\u0102\u0116") + buf.write("\7<\2\2\u0103\u0116\5\60\31\2\u0104\u0116\5\64\33\2\u0105") + buf.write("\u0116\5\66\34\2\u0106\u0116\5.\30\2\u0107\u0108\7\65") + buf.write("\2\2\u0108\u0109\5\32\16\2\u0109\u010a\7\26\2\2\u010a") + buf.write("\u0116\3\2\2\2\u010b\u0116\58\35\2\u010c\u0116\5:\36\2") + buf.write("\u010d\u010e\7\66\2\2\u010e\u010f\5\32\16\2\u010f\u0110") + buf.write("\7\26\2\2\u0110\u0116\3\2\2\2\u0111\u0112\7\67\2\2\u0112") + buf.write("\u0113\5\62\32\2\u0113\u0114\7\26\2\2\u0114\u0116\3\2") + buf.write("\2\2\u0115\u00ee\3\2\2\2\u0115\u00f3\3\2\2\2\u0115\u00f5") + buf.write("\3\2\2\2\u0115\u00fa\3\2\2\2\u0115\u0102\3\2\2\2\u0115") + buf.write("\u0103\3\2\2\2\u0115\u0104\3\2\2\2\u0115\u0105\3\2\2\2") + buf.write("\u0115\u0106\3\2\2\2\u0115\u0107\3\2\2\2\u0115\u010b\3") + buf.write("\2\2\2\u0115\u010c\3\2\2\2\u0115\u010d\3\2\2\2\u0115\u0111") + buf.write("\3\2\2\2\u0116\u013f\3\2\2\2\u0117\u0118\f\25\2\2\u0118") + buf.write("\u0119\7&\2\2\u0119\u013e\5\62\32\26\u011a\u011b\f\24") + buf.write("\2\2\u011b\u011c\t\b\2\2\u011c\u013e\5\62\32\25\u011d") + buf.write("\u011e\f\23\2\2\u011e\u011f\7)\2\2\u011f\u013e\5\62\32") + buf.write("\24\u0120\u0121\f\22\2\2\u0121\u0122\t\t\2\2\u0122\u013e") + buf.write("\5\62\32\23\u0123\u0124\f\21\2\2\u0124\u0125\t\n\2\2\u0125") + buf.write("\u013e\5\62\32\22\u0126\u0127\f\17\2\2\u0127\u0128\t\13") + buf.write("\2\2\u0128\u013e\5\62\32\20\u0129\u012a\f\16\2\2\u012a") + buf.write("\u012b\7\63\2\2\u012b\u012c\5\62\32\2\u012c\u012d\7\64") + buf.write("\2\2\u012d\u012e\5\62\32\17\u012e\u013e\3\2\2\2\u012f") + buf.write("\u0130\f\5\2\2\u0130\u0131\7\34\2\2\u0131\u013e\5\62\32") + buf.write("\6\u0132\u0133\f\31\2\2\u0133\u0134\7\37\2\2\u0134\u0135") + buf.write("\5\62\32\2\u0135\u0136\7 \2\2\u0136\u013e\3\2\2\2\u0137") + buf.write("\u0138\f\30\2\2\u0138\u013a\7\24\2\2\u0139\u013b\5\32") + buf.write("\16\2\u013a\u0139\3\2\2\2\u013a\u013b\3\2\2\2\u013b\u013c") + buf.write("\3\2\2\2\u013c\u013e\7\26\2\2\u013d\u0117\3\2\2\2\u013d") + buf.write("\u011a\3\2\2\2\u013d\u011d\3\2\2\2\u013d\u0120\3\2\2\2") + buf.write("\u013d\u0123\3\2\2\2\u013d\u0126\3\2\2\2\u013d\u0129\3") + buf.write("\2\2\2\u013d\u012f\3\2\2\2\u013d\u0132\3\2\2\2\u013d\u0137") + buf.write("\3\2\2\2\u013e\u0141\3\2\2\2\u013f\u013d\3\2\2\2\u013f") + buf.write("\u0140\3\2\2\2\u0140\63\3\2\2\2\u0141\u013f\3\2\2\2\u0142") + buf.write("\u0144\7\37\2\2\u0143\u0145\5\32\16\2\u0144\u0143\3\2") + buf.write("\2\2\u0144\u0145\3\2\2\2\u0145\u0146\3\2\2\2\u0146\u0147") + buf.write("\7 \2\2\u0147\65\3\2\2\2\u0148\u014a\7\31\2\2\u0149\u014b") + buf.write("\5\34\17\2\u014a\u0149\3\2\2\2\u014a\u014b\3\2\2\2\u014b") + buf.write("\u014c\3\2\2\2\u014c\u014d\7\32\2\2\u014d\67\3\2\2\2\u014e") + buf.write("\u014f\7\37\2\2\u014f\u0150\5\62\32\2\u0150\u0151\7\33") + buf.write("\2\2\u0151\u0152\5,\27\2\u0152\u0153\7\34\2\2\u0153\u0154") + buf.write("\5\62\32\2\u0154\u0155\7 \2\2\u01559\3\2\2\2\u0156\u0157") + buf.write("\7\31\2\2\u0157\u0158\5\62\32\2\u0158\u0159\7\27\2\2\u0159") + buf.write("\u015a\5\62\32\2\u015a\u015b\7\33\2\2\u015b\u015c\5,\27") + buf.write("\2\u015c\u015d\7\34\2\2\u015d\u015e\5\62\32\2\u015e\u015f") + buf.write("\7\32\2\2\u015f;\3\2\2\2$?EIL\\cgnv\u0089\u0090\u0095") + buf.write("\u009d\u00a0\u00a4\u00a6\u00ac\u00b1\u00b4\u00b8\u00c6") + buf.write("\u00cb\u00ce\u00d2\u00d4\u00dd\u00e5\u00ec\u0115\u013a") + buf.write("\u013d\u013f\u0144\u014a") + return buf.getvalue() + + +class RASPParser ( Parser ): + + grammarFileName = "RASP.g4" + + atn = ATNDeserializer().deserialize(serializedATN()) + + decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ] + + sharedContextCache = PredictionContextCache() + + literalNames = [ "", "';'", "'set'", "'s-op'", "'selector'", + "'example'", "'show'", "'full seq display'", "'on'", + "'off'", "'examples'", "'exit()'", "'exit'", "'quit'", + "'quit()'", "'load'", "'='", "'draw'", "'('", "','", + "')'", "':'", "'def'", "'{'", "'}'", "'for'", "'in'", + "'return'", "'aggregate('", "'['", "']'", "'not'", + "'-'", "'+'", "'round'", "'indicator'", "'^'", "'*'", + "'/'", "'%'", "'=='", "'<='", "'>='", "'>'", "'<'", + "'select('", "'!='", "'and'", "'or'", "'if'", "'else'", + "'range('", "'zip('", "'len('" ] + + symbolicNames = [ "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "", "", + "", "", "Float", "PosInt", "String", + "Comment", "ID", "WS" ] + + RULE_r = 0 + RULE_statement = 1 + RULE_raspstatement = 2 + RULE_replstatement = 3 + RULE_setExample = 4 + RULE_showExample = 5 + RULE_toggleSeqVerbose = 6 + RULE_toggleExample = 7 + RULE_exit = 8 + RULE_loadFile = 9 + RULE_assign = 10 + RULE_draw = 11 + RULE_exprsList = 12 + RULE_namedExprsList = 13 + RULE_namedExpr = 14 + RULE_raspstatementsList = 15 + RULE_funcDef = 16 + RULE_forLoop = 17 + RULE_commentsList = 18 + RULE_assignsAndCommentsList = 19 + RULE_returnStatement = 20 + RULE_idsList = 21 + RULE_aggregateExpr = 22 + RULE_atom = 23 + RULE_expr = 24 + RULE_aList = 25 + RULE_aDict = 26 + RULE_listCompExpr = 27 + RULE_dictCompExpr = 28 + + ruleNames = [ "r", "statement", "raspstatement", "replstatement", "setExample", + "showExample", "toggleSeqVerbose", "toggleExample", "exit", + "loadFile", "assign", "draw", "exprsList", "namedExprsList", + "namedExpr", "raspstatementsList", "funcDef", "forLoop", + "commentsList", "assignsAndCommentsList", "returnStatement", + "idsList", "aggregateExpr", "atom", "expr", "aList", + "aDict", "listCompExpr", "dictCompExpr" ] + + EOF = Token.EOF + T__0=1 + T__1=2 + T__2=3 + T__3=4 + T__4=5 + T__5=6 + T__6=7 + T__7=8 + T__8=9 + T__9=10 + T__10=11 + T__11=12 + T__12=13 + T__13=14 + T__14=15 + T__15=16 + T__16=17 + T__17=18 + T__18=19 + T__19=20 + T__20=21 + T__21=22 + T__22=23 + T__23=24 + T__24=25 + T__25=26 + T__26=27 + T__27=28 + T__28=29 + T__29=30 + T__30=31 + T__31=32 + T__32=33 + T__33=34 + T__34=35 + T__35=36 + T__36=37 + T__37=38 + T__38=39 + T__39=40 + T__40=41 + T__41=42 + T__42=43 + T__43=44 + T__44=45 + T__45=46 + T__46=47 + T__47=48 + T__48=49 + T__49=50 + T__50=51 + T__51=52 + T__52=53 + Float=54 + PosInt=55 + String=56 + Comment=57 + ID=58 + WS=59 + + def __init__(self, input:TokenStream, output:TextIO = sys.stdout): + super().__init__(input, output) + self.checkVersion("4.9") + self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache) + self._predicates = None + + + + + class RContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def EOF(self): + return self.getToken(RASPParser.EOF, 0) + + def statement(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(RASPParser.StatementContext) + else: + return self.getTypedRuleContext(RASPParser.StatementContext,i) + + + def getRuleIndex(self): + return RASPParser.RULE_r + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterR" ): + listener.enterR(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitR" ): + listener.exitR(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitR" ): + return visitor.visitR(self) + else: + return visitor.visitChildren(self) + + + + + def r(self): + + localctx = RASPParser.RContext(self, self._ctx, self.state) + self.enterRule(localctx, 0, self.RULE_r) + try: + self.enterOuterAlt(localctx, 1) + self.state = 59 + self._errHandler.sync(self) + _alt = 1+1 + while _alt!=1 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt == 1+1: + self.state = 58 + self.statement() + + else: + raise NoViableAltException(self) + self.state = 61 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,0,self._ctx) + + self.state = 63 + self.match(RASPParser.EOF) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class StatementContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def raspstatement(self): + return self.getTypedRuleContext(RASPParser.RaspstatementContext,0) + + + def Comment(self): + return self.getToken(RASPParser.Comment, 0) + + def replstatement(self): + return self.getTypedRuleContext(RASPParser.ReplstatementContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_statement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterStatement" ): + listener.enterStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitStatement" ): + listener.exitStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitStatement" ): + return visitor.visitStatement(self) + else: + return visitor.visitChildren(self) + + + + + def statement(self): + + localctx = RASPParser.StatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 2, self.RULE_statement) + try: + self.state = 74 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [RASPParser.T__14, RASPParser.T__16, RASPParser.T__17, RASPParser.T__21, RASPParser.T__22, RASPParser.T__24, RASPParser.T__27, RASPParser.T__28, RASPParser.T__30, RASPParser.T__31, RASPParser.T__32, RASPParser.T__33, RASPParser.T__34, RASPParser.T__44, RASPParser.T__50, RASPParser.T__51, RASPParser.T__52, RASPParser.Float, RASPParser.PosInt, RASPParser.String, RASPParser.ID]: + self.enterOuterAlt(localctx, 1) + self.state = 65 + self.raspstatement() + self.state = 67 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,1,self._ctx) + if la_ == 1: + self.state = 66 + self.match(RASPParser.Comment) + + + pass + elif token in [RASPParser.T__1, RASPParser.T__2, RASPParser.T__3, RASPParser.T__5, RASPParser.T__6, RASPParser.T__9, RASPParser.T__10, RASPParser.T__11, RASPParser.T__12, RASPParser.T__13]: + self.enterOuterAlt(localctx, 2) + self.state = 69 + self.replstatement() + self.state = 71 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,2,self._ctx) + if la_ == 1: + self.state = 70 + self.match(RASPParser.Comment) + + + pass + elif token in [RASPParser.Comment]: + self.enterOuterAlt(localctx, 3) + self.state = 73 + self.match(RASPParser.Comment) + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class RaspstatementContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def expr(self): + return self.getTypedRuleContext(RASPParser.ExprContext,0) + + + def assign(self): + return self.getTypedRuleContext(RASPParser.AssignContext,0) + + + def draw(self): + return self.getTypedRuleContext(RASPParser.DrawContext,0) + + + def loadFile(self): + return self.getTypedRuleContext(RASPParser.LoadFileContext,0) + + + def funcDef(self): + return self.getTypedRuleContext(RASPParser.FuncDefContext,0) + + + def forLoop(self): + return self.getTypedRuleContext(RASPParser.ForLoopContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_raspstatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterRaspstatement" ): + listener.enterRaspstatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitRaspstatement" ): + listener.exitRaspstatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitRaspstatement" ): + return visitor.visitRaspstatement(self) + else: + return visitor.visitChildren(self) + + + + + def raspstatement(self): + + localctx = RASPParser.RaspstatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 4, self.RULE_raspstatement) + try: + self.state = 90 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,4,self._ctx) + if la_ == 1: + self.enterOuterAlt(localctx, 1) + self.state = 76 + self.expr(0) + self.state = 77 + self.match(RASPParser.T__0) + pass + + elif la_ == 2: + self.enterOuterAlt(localctx, 2) + self.state = 79 + self.assign() + self.state = 80 + self.match(RASPParser.T__0) + pass + + elif la_ == 3: + self.enterOuterAlt(localctx, 3) + self.state = 82 + self.draw() + self.state = 83 + self.match(RASPParser.T__0) + pass + + elif la_ == 4: + self.enterOuterAlt(localctx, 4) + self.state = 85 + self.loadFile() + self.state = 86 + self.match(RASPParser.T__0) + pass + + elif la_ == 5: + self.enterOuterAlt(localctx, 5) + self.state = 88 + self.funcDef() + pass + + elif la_ == 6: + self.enterOuterAlt(localctx, 6) + self.state = 89 + self.forLoop() + pass + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ReplstatementContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + def setExample(self): + return self.getTypedRuleContext(RASPParser.SetExampleContext,0) + + + def showExample(self): + return self.getTypedRuleContext(RASPParser.ShowExampleContext,0) + + + def toggleExample(self): + return self.getTypedRuleContext(RASPParser.ToggleExampleContext,0) + + + def toggleSeqVerbose(self): + return self.getTypedRuleContext(RASPParser.ToggleSeqVerboseContext,0) + + + def exit(self): + return self.getTypedRuleContext(RASPParser.ExitContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_replstatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterReplstatement" ): + listener.enterReplstatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitReplstatement" ): + listener.exitReplstatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitReplstatement" ): + return visitor.visitReplstatement(self) + else: + return visitor.visitChildren(self) + + + + + def replstatement(self): + + localctx = RASPParser.ReplstatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 6, self.RULE_replstatement) + try: + self.state = 97 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [RASPParser.T__1]: + self.enterOuterAlt(localctx, 1) + self.state = 92 + self.setExample() + pass + elif token in [RASPParser.T__5]: + self.enterOuterAlt(localctx, 2) + self.state = 93 + self.showExample() + pass + elif token in [RASPParser.T__2, RASPParser.T__3, RASPParser.T__9]: + self.enterOuterAlt(localctx, 3) + self.state = 94 + self.toggleExample() + pass + elif token in [RASPParser.T__6]: + self.enterOuterAlt(localctx, 4) + self.state = 95 + self.toggleSeqVerbose() + pass + elif token in [RASPParser.T__10, RASPParser.T__11, RASPParser.T__12, RASPParser.T__13]: + self.enterOuterAlt(localctx, 5) + self.state = 96 + self.exit() + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class SetExampleContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.subset = None # Token + self.example = None # ExprContext + + def expr(self): + return self.getTypedRuleContext(RASPParser.ExprContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_setExample + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterSetExample" ): + listener.enterSetExample(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitSetExample" ): + listener.exitSetExample(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitSetExample" ): + return visitor.visitSetExample(self) + else: + return visitor.visitChildren(self) + + + + + def setExample(self): + + localctx = RASPParser.SetExampleContext(self, self._ctx, self.state) + self.enterRule(localctx, 8, self.RULE_setExample) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 99 + self.match(RASPParser.T__1) + self.state = 101 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==RASPParser.T__2 or _la==RASPParser.T__3: + self.state = 100 + localctx.subset = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==RASPParser.T__2 or _la==RASPParser.T__3): + localctx.subset = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + + + self.state = 103 + self.match(RASPParser.T__4) + self.state = 104 + localctx.example = self.expr(0) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ShowExampleContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.subset = None # Token + + + def getRuleIndex(self): + return RASPParser.RULE_showExample + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterShowExample" ): + listener.enterShowExample(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitShowExample" ): + listener.exitShowExample(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitShowExample" ): + return visitor.visitShowExample(self) + else: + return visitor.visitChildren(self) + + + + + def showExample(self): + + localctx = RASPParser.ShowExampleContext(self, self._ctx, self.state) + self.enterRule(localctx, 10, self.RULE_showExample) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 106 + self.match(RASPParser.T__5) + self.state = 108 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==RASPParser.T__2 or _la==RASPParser.T__3: + self.state = 107 + localctx.subset = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==RASPParser.T__2 or _la==RASPParser.T__3): + localctx.subset = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + + + self.state = 110 + self.match(RASPParser.T__4) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ToggleSeqVerboseContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.switch = None # Token + + + def getRuleIndex(self): + return RASPParser.RULE_toggleSeqVerbose + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterToggleSeqVerbose" ): + listener.enterToggleSeqVerbose(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitToggleSeqVerbose" ): + listener.exitToggleSeqVerbose(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitToggleSeqVerbose" ): + return visitor.visitToggleSeqVerbose(self) + else: + return visitor.visitChildren(self) + + + + + def toggleSeqVerbose(self): + + localctx = RASPParser.ToggleSeqVerboseContext(self, self._ctx, self.state) + self.enterRule(localctx, 12, self.RULE_toggleSeqVerbose) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 112 + self.match(RASPParser.T__6) + self.state = 113 + localctx.switch = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==RASPParser.T__7 or _la==RASPParser.T__8): + localctx.switch = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ToggleExampleContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.subset = None # Token + self.switch = None # Token + + + def getRuleIndex(self): + return RASPParser.RULE_toggleExample + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterToggleExample" ): + listener.enterToggleExample(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitToggleExample" ): + listener.exitToggleExample(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitToggleExample" ): + return visitor.visitToggleExample(self) + else: + return visitor.visitChildren(self) + + + + + def toggleExample(self): + + localctx = RASPParser.ToggleExampleContext(self, self._ctx, self.state) + self.enterRule(localctx, 14, self.RULE_toggleExample) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 116 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==RASPParser.T__2 or _la==RASPParser.T__3: + self.state = 115 + localctx.subset = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==RASPParser.T__2 or _la==RASPParser.T__3): + localctx.subset = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + + + self.state = 118 + self.match(RASPParser.T__9) + self.state = 119 + localctx.switch = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==RASPParser.T__7 or _la==RASPParser.T__8): + localctx.switch = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ExitContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + + + def getRuleIndex(self): + return RASPParser.RULE_exit + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterExit" ): + listener.enterExit(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitExit" ): + listener.exitExit(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitExit" ): + return visitor.visitExit(self) + else: + return visitor.visitChildren(self) + + + + + def exit(self): + + localctx = RASPParser.ExitContext(self, self._ctx, self.state) + self.enterRule(localctx, 16, self.RULE_exit) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 121 + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << RASPParser.T__10) | (1 << RASPParser.T__11) | (1 << RASPParser.T__12) | (1 << RASPParser.T__13))) != 0)): + self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class LoadFileContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.filename = None # Token + + def String(self): + return self.getToken(RASPParser.String, 0) + + def getRuleIndex(self): + return RASPParser.RULE_loadFile + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterLoadFile" ): + listener.enterLoadFile(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitLoadFile" ): + listener.exitLoadFile(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitLoadFile" ): + return visitor.visitLoadFile(self) + else: + return visitor.visitChildren(self) + + + + + def loadFile(self): + + localctx = RASPParser.LoadFileContext(self, self._ctx, self.state) + self.enterRule(localctx, 18, self.RULE_loadFile) + try: + self.enterOuterAlt(localctx, 1) + self.state = 123 + self.match(RASPParser.T__14) + self.state = 124 + localctx.filename = self.match(RASPParser.String) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class AssignContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.var = None # IdsListContext + self.val = None # ExprsListContext + + def idsList(self): + return self.getTypedRuleContext(RASPParser.IdsListContext,0) + + + def exprsList(self): + return self.getTypedRuleContext(RASPParser.ExprsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_assign + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterAssign" ): + listener.enterAssign(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitAssign" ): + listener.exitAssign(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitAssign" ): + return visitor.visitAssign(self) + else: + return visitor.visitChildren(self) + + + + + def assign(self): + + localctx = RASPParser.AssignContext(self, self._ctx, self.state) + self.enterRule(localctx, 20, self.RULE_assign) + try: + self.enterOuterAlt(localctx, 1) + self.state = 126 + localctx.var = self.idsList() + self.state = 127 + self.match(RASPParser.T__15) + self.state = 128 + localctx.val = self.exprsList() + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class DrawContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.unf = None # ExprContext + self.inputseq = None # ExprContext + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(RASPParser.ExprContext) + else: + return self.getTypedRuleContext(RASPParser.ExprContext,i) + + + def getRuleIndex(self): + return RASPParser.RULE_draw + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterDraw" ): + listener.enterDraw(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitDraw" ): + listener.exitDraw(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitDraw" ): + return visitor.visitDraw(self) + else: + return visitor.visitChildren(self) + + + + + def draw(self): + + localctx = RASPParser.DrawContext(self, self._ctx, self.state) + self.enterRule(localctx, 22, self.RULE_draw) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 130 + self.match(RASPParser.T__16) + self.state = 131 + self.match(RASPParser.T__17) + self.state = 132 + localctx.unf = self.expr(0) + self.state = 135 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==RASPParser.T__18: + self.state = 133 + self.match(RASPParser.T__18) + self.state = 134 + localctx.inputseq = self.expr(0) + + + self.state = 137 + self.match(RASPParser.T__19) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ExprsListContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.first = None # ExprContext + self.cont = None # ExprsListContext + + def expr(self): + return self.getTypedRuleContext(RASPParser.ExprContext,0) + + + def exprsList(self): + return self.getTypedRuleContext(RASPParser.ExprsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_exprsList + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterExprsList" ): + listener.enterExprsList(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitExprsList" ): + listener.exitExprsList(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitExprsList" ): + return visitor.visitExprsList(self) + else: + return visitor.visitChildren(self) + + + + + def exprsList(self): + + localctx = RASPParser.ExprsListContext(self, self._ctx, self.state) + self.enterRule(localctx, 24, self.RULE_exprsList) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 139 + localctx.first = self.expr(0) + self.state = 142 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==RASPParser.T__18: + self.state = 140 + self.match(RASPParser.T__18) + self.state = 141 + localctx.cont = self.exprsList() + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class NamedExprsListContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.first = None # NamedExprContext + self.cont = None # NamedExprsListContext + + def namedExpr(self): + return self.getTypedRuleContext(RASPParser.NamedExprContext,0) + + + def namedExprsList(self): + return self.getTypedRuleContext(RASPParser.NamedExprsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_namedExprsList + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterNamedExprsList" ): + listener.enterNamedExprsList(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitNamedExprsList" ): + listener.exitNamedExprsList(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitNamedExprsList" ): + return visitor.visitNamedExprsList(self) + else: + return visitor.visitChildren(self) + + + + + def namedExprsList(self): + + localctx = RASPParser.NamedExprsListContext(self, self._ctx, self.state) + self.enterRule(localctx, 26, self.RULE_namedExprsList) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 144 + localctx.first = self.namedExpr() + self.state = 147 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==RASPParser.T__18: + self.state = 145 + self.match(RASPParser.T__18) + self.state = 146 + localctx.cont = self.namedExprsList() + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class NamedExprContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.key = None # ExprContext + self.val = None # ExprContext + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(RASPParser.ExprContext) + else: + return self.getTypedRuleContext(RASPParser.ExprContext,i) + + + def getRuleIndex(self): + return RASPParser.RULE_namedExpr + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterNamedExpr" ): + listener.enterNamedExpr(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitNamedExpr" ): + listener.exitNamedExpr(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitNamedExpr" ): + return visitor.visitNamedExpr(self) + else: + return visitor.visitChildren(self) + + + + + def namedExpr(self): + + localctx = RASPParser.NamedExprContext(self, self._ctx, self.state) + self.enterRule(localctx, 28, self.RULE_namedExpr) + try: + self.enterOuterAlt(localctx, 1) + self.state = 149 + localctx.key = self.expr(0) + self.state = 150 + self.match(RASPParser.T__20) + self.state = 151 + localctx.val = self.expr(0) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class RaspstatementsListContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.first = None # RaspstatementContext + self.cont = None # RaspstatementsListContext + + def raspstatement(self): + return self.getTypedRuleContext(RASPParser.RaspstatementContext,0) + + + def Comment(self): + return self.getToken(RASPParser.Comment, 0) + + def raspstatementsList(self): + return self.getTypedRuleContext(RASPParser.RaspstatementsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_raspstatementsList + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterRaspstatementsList" ): + listener.enterRaspstatementsList(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitRaspstatementsList" ): + listener.exitRaspstatementsList(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitRaspstatementsList" ): + return visitor.visitRaspstatementsList(self) + else: + return visitor.visitChildren(self) + + + + + def raspstatementsList(self): + + localctx = RASPParser.RaspstatementsListContext(self, self._ctx, self.state) + self.enterRule(localctx, 30, self.RULE_raspstatementsList) + self._la = 0 # Token type + try: + self.state = 164 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [RASPParser.T__14, RASPParser.T__16, RASPParser.T__17, RASPParser.T__21, RASPParser.T__22, RASPParser.T__24, RASPParser.T__27, RASPParser.T__28, RASPParser.T__30, RASPParser.T__31, RASPParser.T__32, RASPParser.T__33, RASPParser.T__34, RASPParser.T__44, RASPParser.T__50, RASPParser.T__51, RASPParser.T__52, RASPParser.Float, RASPParser.PosInt, RASPParser.String, RASPParser.ID]: + self.enterOuterAlt(localctx, 1) + self.state = 153 + localctx.first = self.raspstatement() + self.state = 155 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,12,self._ctx) + if la_ == 1: + self.state = 154 + self.match(RASPParser.Comment) + + + self.state = 158 + self._errHandler.sync(self) + _la = self._input.LA(1) + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << RASPParser.T__14) | (1 << RASPParser.T__16) | (1 << RASPParser.T__17) | (1 << RASPParser.T__21) | (1 << RASPParser.T__22) | (1 << RASPParser.T__24) | (1 << RASPParser.T__27) | (1 << RASPParser.T__28) | (1 << RASPParser.T__30) | (1 << RASPParser.T__31) | (1 << RASPParser.T__32) | (1 << RASPParser.T__33) | (1 << RASPParser.T__34) | (1 << RASPParser.T__44) | (1 << RASPParser.T__50) | (1 << RASPParser.T__51) | (1 << RASPParser.T__52) | (1 << RASPParser.Float) | (1 << RASPParser.PosInt) | (1 << RASPParser.String) | (1 << RASPParser.Comment) | (1 << RASPParser.ID))) != 0): + self.state = 157 + localctx.cont = self.raspstatementsList() + + + pass + elif token in [RASPParser.Comment]: + self.enterOuterAlt(localctx, 2) + self.state = 160 + self.match(RASPParser.Comment) + self.state = 162 + self._errHandler.sync(self) + _la = self._input.LA(1) + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << RASPParser.T__14) | (1 << RASPParser.T__16) | (1 << RASPParser.T__17) | (1 << RASPParser.T__21) | (1 << RASPParser.T__22) | (1 << RASPParser.T__24) | (1 << RASPParser.T__27) | (1 << RASPParser.T__28) | (1 << RASPParser.T__30) | (1 << RASPParser.T__31) | (1 << RASPParser.T__32) | (1 << RASPParser.T__33) | (1 << RASPParser.T__34) | (1 << RASPParser.T__44) | (1 << RASPParser.T__50) | (1 << RASPParser.T__51) | (1 << RASPParser.T__52) | (1 << RASPParser.Float) | (1 << RASPParser.PosInt) | (1 << RASPParser.String) | (1 << RASPParser.Comment) | (1 << RASPParser.ID))) != 0): + self.state = 161 + localctx.cont = self.raspstatementsList() + + + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class FuncDefContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.name = None # Token + self.arguments = None # IdsListContext + self.mainbody = None # RaspstatementsListContext + self.retstatement = None # ReturnStatementContext + + def ID(self): + return self.getToken(RASPParser.ID, 0) + + def returnStatement(self): + return self.getTypedRuleContext(RASPParser.ReturnStatementContext,0) + + + def commentsList(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(RASPParser.CommentsListContext) + else: + return self.getTypedRuleContext(RASPParser.CommentsListContext,i) + + + def idsList(self): + return self.getTypedRuleContext(RASPParser.IdsListContext,0) + + + def raspstatementsList(self): + return self.getTypedRuleContext(RASPParser.RaspstatementsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_funcDef + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterFuncDef" ): + listener.enterFuncDef(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitFuncDef" ): + listener.exitFuncDef(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitFuncDef" ): + return visitor.visitFuncDef(self) + else: + return visitor.visitChildren(self) + + + + + def funcDef(self): + + localctx = RASPParser.FuncDefContext(self, self._ctx, self.state) + self.enterRule(localctx, 32, self.RULE_funcDef) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 166 + self.match(RASPParser.T__21) + self.state = 167 + localctx.name = self.match(RASPParser.ID) + self.state = 168 + self.match(RASPParser.T__17) + self.state = 170 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==RASPParser.ID: + self.state = 169 + localctx.arguments = self.idsList() + + + self.state = 172 + self.match(RASPParser.T__19) + self.state = 173 + self.match(RASPParser.T__22) + self.state = 175 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,17,self._ctx) + if la_ == 1: + self.state = 174 + self.commentsList() + + + self.state = 178 + self._errHandler.sync(self) + _la = self._input.LA(1) + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << RASPParser.T__14) | (1 << RASPParser.T__16) | (1 << RASPParser.T__17) | (1 << RASPParser.T__21) | (1 << RASPParser.T__22) | (1 << RASPParser.T__24) | (1 << RASPParser.T__27) | (1 << RASPParser.T__28) | (1 << RASPParser.T__30) | (1 << RASPParser.T__31) | (1 << RASPParser.T__32) | (1 << RASPParser.T__33) | (1 << RASPParser.T__34) | (1 << RASPParser.T__44) | (1 << RASPParser.T__50) | (1 << RASPParser.T__51) | (1 << RASPParser.T__52) | (1 << RASPParser.Float) | (1 << RASPParser.PosInt) | (1 << RASPParser.String) | (1 << RASPParser.Comment) | (1 << RASPParser.ID))) != 0): + self.state = 177 + localctx.mainbody = self.raspstatementsList() + + + self.state = 180 + localctx.retstatement = self.returnStatement() + self.state = 182 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==RASPParser.Comment: + self.state = 181 + self.commentsList() + + + self.state = 184 + self.match(RASPParser.T__23) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ForLoopContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.iterator = None # IdsListContext + self.iterable = None # ExprContext + self.mainbody = None # RaspstatementsListContext + + def idsList(self): + return self.getTypedRuleContext(RASPParser.IdsListContext,0) + + + def expr(self): + return self.getTypedRuleContext(RASPParser.ExprContext,0) + + + def raspstatementsList(self): + return self.getTypedRuleContext(RASPParser.RaspstatementsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_forLoop + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterForLoop" ): + listener.enterForLoop(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitForLoop" ): + listener.exitForLoop(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitForLoop" ): + return visitor.visitForLoop(self) + else: + return visitor.visitChildren(self) + + + + + def forLoop(self): + + localctx = RASPParser.ForLoopContext(self, self._ctx, self.state) + self.enterRule(localctx, 34, self.RULE_forLoop) + try: + self.enterOuterAlt(localctx, 1) + self.state = 186 + self.match(RASPParser.T__24) + self.state = 187 + localctx.iterator = self.idsList() + self.state = 188 + self.match(RASPParser.T__25) + self.state = 189 + localctx.iterable = self.expr(0) + self.state = 190 + self.match(RASPParser.T__22) + self.state = 191 + localctx.mainbody = self.raspstatementsList() + self.state = 192 + self.match(RASPParser.T__23) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class CommentsListContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.cont = None # CommentsListContext + + def Comment(self): + return self.getToken(RASPParser.Comment, 0) + + def commentsList(self): + return self.getTypedRuleContext(RASPParser.CommentsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_commentsList + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterCommentsList" ): + listener.enterCommentsList(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitCommentsList" ): + listener.exitCommentsList(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitCommentsList" ): + return visitor.visitCommentsList(self) + else: + return visitor.visitChildren(self) + + + + + def commentsList(self): + + localctx = RASPParser.CommentsListContext(self, self._ctx, self.state) + self.enterRule(localctx, 36, self.RULE_commentsList) + try: + self.enterOuterAlt(localctx, 1) + self.state = 194 + self.match(RASPParser.Comment) + self.state = 196 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,20,self._ctx) + if la_ == 1: + self.state = 195 + localctx.cont = self.commentsList() + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class AssignsAndCommentsListContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.first = None # AssignContext + self.cont = None # AssignsAndCommentsListContext + + def assign(self): + return self.getTypedRuleContext(RASPParser.AssignContext,0) + + + def Comment(self): + return self.getToken(RASPParser.Comment, 0) + + def assignsAndCommentsList(self): + return self.getTypedRuleContext(RASPParser.AssignsAndCommentsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_assignsAndCommentsList + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterAssignsAndCommentsList" ): + listener.enterAssignsAndCommentsList(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitAssignsAndCommentsList" ): + listener.exitAssignsAndCommentsList(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitAssignsAndCommentsList" ): + return visitor.visitAssignsAndCommentsList(self) + else: + return visitor.visitChildren(self) + + + + + def assignsAndCommentsList(self): + + localctx = RASPParser.AssignsAndCommentsListContext(self, self._ctx, self.state) + self.enterRule(localctx, 38, self.RULE_assignsAndCommentsList) + try: + self.state = 210 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [RASPParser.ID]: + self.enterOuterAlt(localctx, 1) + self.state = 198 + localctx.first = self.assign() + self.state = 199 + self.match(RASPParser.T__0) + self.state = 201 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,21,self._ctx) + if la_ == 1: + self.state = 200 + self.match(RASPParser.Comment) + + + self.state = 204 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,22,self._ctx) + if la_ == 1: + self.state = 203 + localctx.cont = self.assignsAndCommentsList() + + + pass + elif token in [RASPParser.Comment]: + self.enterOuterAlt(localctx, 2) + self.state = 206 + self.match(RASPParser.Comment) + self.state = 208 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,23,self._ctx) + if la_ == 1: + self.state = 207 + localctx.cont = self.assignsAndCommentsList() + + + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ReturnStatementContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.res = None # ExprsListContext + + def exprsList(self): + return self.getTypedRuleContext(RASPParser.ExprsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_returnStatement + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterReturnStatement" ): + listener.enterReturnStatement(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitReturnStatement" ): + listener.exitReturnStatement(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitReturnStatement" ): + return visitor.visitReturnStatement(self) + else: + return visitor.visitChildren(self) + + + + + def returnStatement(self): + + localctx = RASPParser.ReturnStatementContext(self, self._ctx, self.state) + self.enterRule(localctx, 40, self.RULE_returnStatement) + try: + self.enterOuterAlt(localctx, 1) + self.state = 212 + self.match(RASPParser.T__26) + self.state = 213 + localctx.res = self.exprsList() + self.state = 214 + self.match(RASPParser.T__0) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class IdsListContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.first = None # Token + self.cont = None # IdsListContext + + def ID(self): + return self.getToken(RASPParser.ID, 0) + + def idsList(self): + return self.getTypedRuleContext(RASPParser.IdsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_idsList + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterIdsList" ): + listener.enterIdsList(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitIdsList" ): + listener.exitIdsList(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitIdsList" ): + return visitor.visitIdsList(self) + else: + return visitor.visitChildren(self) + + + + + def idsList(self): + + localctx = RASPParser.IdsListContext(self, self._ctx, self.state) + self.enterRule(localctx, 42, self.RULE_idsList) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 216 + localctx.first = self.match(RASPParser.ID) + self.state = 219 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==RASPParser.T__18: + self.state = 217 + self.match(RASPParser.T__18) + self.state = 218 + localctx.cont = self.idsList() + + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class AggregateExprContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.sel = None # ExprContext + self.seq = None # ExprContext + self.default = None # ExprContext + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(RASPParser.ExprContext) + else: + return self.getTypedRuleContext(RASPParser.ExprContext,i) + + + def getRuleIndex(self): + return RASPParser.RULE_aggregateExpr + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterAggregateExpr" ): + listener.enterAggregateExpr(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitAggregateExpr" ): + listener.exitAggregateExpr(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitAggregateExpr" ): + return visitor.visitAggregateExpr(self) + else: + return visitor.visitChildren(self) + + + + + def aggregateExpr(self): + + localctx = RASPParser.AggregateExprContext(self, self._ctx, self.state) + self.enterRule(localctx, 44, self.RULE_aggregateExpr) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 221 + self.match(RASPParser.T__27) + self.state = 222 + localctx.sel = self.expr(0) + self.state = 223 + self.match(RASPParser.T__18) + self.state = 224 + localctx.seq = self.expr(0) + self.state = 227 + self._errHandler.sync(self) + _la = self._input.LA(1) + if _la==RASPParser.T__18: + self.state = 225 + self.match(RASPParser.T__18) + self.state = 226 + localctx.default = self.expr(0) + + + self.state = 229 + self.match(RASPParser.T__19) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class AtomContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.anint = None # Token + self.afloat = None # Token + self.astring = None # Token + + def PosInt(self): + return self.getToken(RASPParser.PosInt, 0) + + def Float(self): + return self.getToken(RASPParser.Float, 0) + + def String(self): + return self.getToken(RASPParser.String, 0) + + def getRuleIndex(self): + return RASPParser.RULE_atom + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterAtom" ): + listener.enterAtom(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitAtom" ): + listener.exitAtom(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitAtom" ): + return visitor.visitAtom(self) + else: + return visitor.visitChildren(self) + + + + + def atom(self): + + localctx = RASPParser.AtomContext(self, self._ctx, self.state) + self.enterRule(localctx, 46, self.RULE_atom) + try: + self.state = 234 + self._errHandler.sync(self) + token = self._input.LA(1) + if token in [RASPParser.PosInt]: + self.enterOuterAlt(localctx, 1) + self.state = 231 + localctx.anint = self.match(RASPParser.PosInt) + pass + elif token in [RASPParser.Float]: + self.enterOuterAlt(localctx, 2) + self.state = 232 + localctx.afloat = self.match(RASPParser.Float) + pass + elif token in [RASPParser.String]: + self.enterOuterAlt(localctx, 3) + self.state = 233 + localctx.astring = self.match(RASPParser.String) + pass + else: + raise NoViableAltException(self) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ExprContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.indexable = None # ExprContext + self.unfORfun = None # ExprContext + self.left = None # ExprContext + self.res1 = None # ExprContext + self.contained = None # ExprContext + self.bracketed = None # ExprContext + self.uop = None # Token + self.uexpr = None # ExprContext + self.key = None # ExprContext + self.query = None # ExprContext + self.selop = None # Token + self.var = None # Token + self.standalone = None # AtomContext + self.aggregate = None # AggregateExprContext + self.rangevals = None # ExprsListContext + self.listcomp = None # ListCompExprContext + self.dictcomp = None # DictCompExprContext + self.lists = None # ExprsListContext + self.singleList = None # ExprContext + self.bop = None # Token + self.right = None # ExprContext + self.cond = None # ExprContext + self.res2 = None # ExprContext + self.container = None # ExprContext + self.index = None # ExprContext + self.inputexprs = None # ExprsListContext + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(RASPParser.ExprContext) + else: + return self.getTypedRuleContext(RASPParser.ExprContext,i) + + + def ID(self): + return self.getToken(RASPParser.ID, 0) + + def atom(self): + return self.getTypedRuleContext(RASPParser.AtomContext,0) + + + def aList(self): + return self.getTypedRuleContext(RASPParser.AListContext,0) + + + def aDict(self): + return self.getTypedRuleContext(RASPParser.ADictContext,0) + + + def aggregateExpr(self): + return self.getTypedRuleContext(RASPParser.AggregateExprContext,0) + + + def exprsList(self): + return self.getTypedRuleContext(RASPParser.ExprsListContext,0) + + + def listCompExpr(self): + return self.getTypedRuleContext(RASPParser.ListCompExprContext,0) + + + def dictCompExpr(self): + return self.getTypedRuleContext(RASPParser.DictCompExprContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_expr + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterExpr" ): + listener.enterExpr(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitExpr" ): + listener.exitExpr(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitExpr" ): + return visitor.visitExpr(self) + else: + return visitor.visitChildren(self) + + + + def expr(self, _p:int=0): + _parentctx = self._ctx + _parentState = self.state + localctx = RASPParser.ExprContext(self, self._ctx, _parentState) + _prevctx = localctx + _startState = 48 + self.enterRecursionRule(localctx, 48, self.RULE_expr, _p) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 275 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,28,self._ctx) + if la_ == 1: + self.state = 237 + self.match(RASPParser.T__17) + self.state = 238 + localctx.bracketed = self.expr(0) + self.state = 239 + self.match(RASPParser.T__19) + pass + + elif la_ == 2: + self.state = 241 + localctx.uop = self._input.LT(1) + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << RASPParser.T__30) | (1 << RASPParser.T__31) | (1 << RASPParser.T__32))) != 0)): + localctx.uop = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 242 + localctx.uexpr = self.expr(21) + pass + + elif la_ == 3: + self.state = 243 + localctx.uop = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==RASPParser.T__33 or _la==RASPParser.T__34): + localctx.uop = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 244 + self.match(RASPParser.T__17) + self.state = 245 + localctx.uexpr = self.expr(0) + self.state = 246 + self.match(RASPParser.T__19) + pass + + elif la_ == 4: + self.state = 248 + self.match(RASPParser.T__44) + self.state = 249 + localctx.key = self.expr(0) + self.state = 250 + self.match(RASPParser.T__18) + self.state = 251 + localctx.query = self.expr(0) + self.state = 252 + self.match(RASPParser.T__18) + self.state = 253 + localctx.selop = self._input.LT(1) + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << RASPParser.T__39) | (1 << RASPParser.T__40) | (1 << RASPParser.T__41) | (1 << RASPParser.T__42) | (1 << RASPParser.T__43) | (1 << RASPParser.T__45))) != 0)): + localctx.selop = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 254 + self.match(RASPParser.T__19) + pass + + elif la_ == 5: + self.state = 256 + localctx.var = self.match(RASPParser.ID) + pass + + elif la_ == 6: + self.state = 257 + localctx.standalone = self.atom() + pass + + elif la_ == 7: + self.state = 258 + self.aList() + pass + + elif la_ == 8: + self.state = 259 + self.aDict() + pass + + elif la_ == 9: + self.state = 260 + localctx.aggregate = self.aggregateExpr() + pass + + elif la_ == 10: + self.state = 261 + self.match(RASPParser.T__50) + self.state = 262 + localctx.rangevals = self.exprsList() + self.state = 263 + self.match(RASPParser.T__19) + pass + + elif la_ == 11: + self.state = 265 + localctx.listcomp = self.listCompExpr() + pass + + elif la_ == 12: + self.state = 266 + localctx.dictcomp = self.dictCompExpr() + pass + + elif la_ == 13: + self.state = 267 + self.match(RASPParser.T__51) + self.state = 268 + localctx.lists = self.exprsList() + self.state = 269 + self.match(RASPParser.T__19) + pass + + elif la_ == 14: + self.state = 271 + self.match(RASPParser.T__52) + self.state = 272 + localctx.singleList = self.expr(0) + self.state = 273 + self.match(RASPParser.T__19) + pass + + + self._ctx.stop = self._input.LT(-1) + self.state = 317 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,31,self._ctx) + while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER: + if _alt==1: + if self._parseListeners is not None: + self.triggerExitRuleEvent() + _prevctx = localctx + self.state = 315 + self._errHandler.sync(self) + la_ = self._interp.adaptivePredict(self._input,30,self._ctx) + if la_ == 1: + localctx = RASPParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 277 + if not self.precpred(self._ctx, 19): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 19)") + self.state = 278 + localctx.bop = self.match(RASPParser.T__35) + self.state = 279 + localctx.right = self.expr(20) + pass + + elif la_ == 2: + localctx = RASPParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 280 + if not self.precpred(self._ctx, 18): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 18)") + self.state = 281 + localctx.bop = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==RASPParser.T__36 or _la==RASPParser.T__37): + localctx.bop = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 282 + localctx.right = self.expr(19) + pass + + elif la_ == 3: + localctx = RASPParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 283 + if not self.precpred(self._ctx, 17): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 17)") + self.state = 284 + localctx.bop = self.match(RASPParser.T__38) + self.state = 285 + localctx.right = self.expr(18) + pass + + elif la_ == 4: + localctx = RASPParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 286 + if not self.precpred(self._ctx, 16): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 16)") + self.state = 287 + localctx.bop = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==RASPParser.T__31 or _la==RASPParser.T__32): + localctx.bop = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 288 + localctx.right = self.expr(17) + pass + + elif la_ == 5: + localctx = RASPParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 289 + if not self.precpred(self._ctx, 15): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 15)") + self.state = 290 + localctx.bop = self._input.LT(1) + _la = self._input.LA(1) + if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << RASPParser.T__39) | (1 << RASPParser.T__40) | (1 << RASPParser.T__41) | (1 << RASPParser.T__42) | (1 << RASPParser.T__43))) != 0)): + localctx.bop = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 291 + localctx.right = self.expr(16) + pass + + elif la_ == 6: + localctx = RASPParser.ExprContext(self, _parentctx, _parentState) + localctx.left = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 292 + if not self.precpred(self._ctx, 13): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 13)") + self.state = 293 + localctx.bop = self._input.LT(1) + _la = self._input.LA(1) + if not(_la==RASPParser.T__46 or _la==RASPParser.T__47): + localctx.bop = self._errHandler.recoverInline(self) + else: + self._errHandler.reportMatch(self) + self.consume() + self.state = 294 + localctx.right = self.expr(14) + pass + + elif la_ == 7: + localctx = RASPParser.ExprContext(self, _parentctx, _parentState) + localctx.res1 = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 295 + if not self.precpred(self._ctx, 12): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 12)") + self.state = 296 + self.match(RASPParser.T__48) + self.state = 297 + localctx.cond = self.expr(0) + self.state = 298 + self.match(RASPParser.T__49) + self.state = 299 + localctx.res2 = self.expr(13) + pass + + elif la_ == 8: + localctx = RASPParser.ExprContext(self, _parentctx, _parentState) + localctx.contained = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 301 + if not self.precpred(self._ctx, 3): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 3)") + self.state = 302 + self.match(RASPParser.T__25) + self.state = 303 + localctx.container = self.expr(4) + pass + + elif la_ == 9: + localctx = RASPParser.ExprContext(self, _parentctx, _parentState) + localctx.indexable = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 304 + if not self.precpred(self._ctx, 23): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 23)") + self.state = 305 + self.match(RASPParser.T__28) + self.state = 306 + localctx.index = self.expr(0) + self.state = 307 + self.match(RASPParser.T__29) + pass + + elif la_ == 10: + localctx = RASPParser.ExprContext(self, _parentctx, _parentState) + localctx.unfORfun = _prevctx + self.pushNewRecursionContext(localctx, _startState, self.RULE_expr) + self.state = 309 + if not self.precpred(self._ctx, 22): + from antlr4.error.Errors import FailedPredicateException + raise FailedPredicateException(self, "self.precpred(self._ctx, 22)") + self.state = 310 + self.match(RASPParser.T__17) + self.state = 312 + self._errHandler.sync(self) + _la = self._input.LA(1) + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << RASPParser.T__17) | (1 << RASPParser.T__22) | (1 << RASPParser.T__27) | (1 << RASPParser.T__28) | (1 << RASPParser.T__30) | (1 << RASPParser.T__31) | (1 << RASPParser.T__32) | (1 << RASPParser.T__33) | (1 << RASPParser.T__34) | (1 << RASPParser.T__44) | (1 << RASPParser.T__50) | (1 << RASPParser.T__51) | (1 << RASPParser.T__52) | (1 << RASPParser.Float) | (1 << RASPParser.PosInt) | (1 << RASPParser.String) | (1 << RASPParser.ID))) != 0): + self.state = 311 + localctx.inputexprs = self.exprsList() + + + self.state = 314 + self.match(RASPParser.T__19) + pass + + + self.state = 319 + self._errHandler.sync(self) + _alt = self._interp.adaptivePredict(self._input,31,self._ctx) + + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.unrollRecursionContexts(_parentctx) + return localctx + + + class AListContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.listContents = None # ExprsListContext + + def exprsList(self): + return self.getTypedRuleContext(RASPParser.ExprsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_aList + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterAList" ): + listener.enterAList(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitAList" ): + listener.exitAList(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitAList" ): + return visitor.visitAList(self) + else: + return visitor.visitChildren(self) + + + + + def aList(self): + + localctx = RASPParser.AListContext(self, self._ctx, self.state) + self.enterRule(localctx, 50, self.RULE_aList) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 320 + self.match(RASPParser.T__28) + self.state = 322 + self._errHandler.sync(self) + _la = self._input.LA(1) + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << RASPParser.T__17) | (1 << RASPParser.T__22) | (1 << RASPParser.T__27) | (1 << RASPParser.T__28) | (1 << RASPParser.T__30) | (1 << RASPParser.T__31) | (1 << RASPParser.T__32) | (1 << RASPParser.T__33) | (1 << RASPParser.T__34) | (1 << RASPParser.T__44) | (1 << RASPParser.T__50) | (1 << RASPParser.T__51) | (1 << RASPParser.T__52) | (1 << RASPParser.Float) | (1 << RASPParser.PosInt) | (1 << RASPParser.String) | (1 << RASPParser.ID))) != 0): + self.state = 321 + localctx.listContents = self.exprsList() + + + self.state = 324 + self.match(RASPParser.T__29) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ADictContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.dictContents = None # NamedExprsListContext + + def namedExprsList(self): + return self.getTypedRuleContext(RASPParser.NamedExprsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_aDict + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterADict" ): + listener.enterADict(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitADict" ): + listener.exitADict(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitADict" ): + return visitor.visitADict(self) + else: + return visitor.visitChildren(self) + + + + + def aDict(self): + + localctx = RASPParser.ADictContext(self, self._ctx, self.state) + self.enterRule(localctx, 52, self.RULE_aDict) + self._la = 0 # Token type + try: + self.enterOuterAlt(localctx, 1) + self.state = 326 + self.match(RASPParser.T__22) + self.state = 328 + self._errHandler.sync(self) + _la = self._input.LA(1) + if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << RASPParser.T__17) | (1 << RASPParser.T__22) | (1 << RASPParser.T__27) | (1 << RASPParser.T__28) | (1 << RASPParser.T__30) | (1 << RASPParser.T__31) | (1 << RASPParser.T__32) | (1 << RASPParser.T__33) | (1 << RASPParser.T__34) | (1 << RASPParser.T__44) | (1 << RASPParser.T__50) | (1 << RASPParser.T__51) | (1 << RASPParser.T__52) | (1 << RASPParser.Float) | (1 << RASPParser.PosInt) | (1 << RASPParser.String) | (1 << RASPParser.ID))) != 0): + self.state = 327 + localctx.dictContents = self.namedExprsList() + + + self.state = 330 + self.match(RASPParser.T__23) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class ListCompExprContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.val = None # ExprContext + self.iterator = None # IdsListContext + self.iterable = None # ExprContext + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(RASPParser.ExprContext) + else: + return self.getTypedRuleContext(RASPParser.ExprContext,i) + + + def idsList(self): + return self.getTypedRuleContext(RASPParser.IdsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_listCompExpr + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterListCompExpr" ): + listener.enterListCompExpr(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitListCompExpr" ): + listener.exitListCompExpr(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitListCompExpr" ): + return visitor.visitListCompExpr(self) + else: + return visitor.visitChildren(self) + + + + + def listCompExpr(self): + + localctx = RASPParser.ListCompExprContext(self, self._ctx, self.state) + self.enterRule(localctx, 54, self.RULE_listCompExpr) + try: + self.enterOuterAlt(localctx, 1) + self.state = 332 + self.match(RASPParser.T__28) + self.state = 333 + localctx.val = self.expr(0) + self.state = 334 + self.match(RASPParser.T__24) + self.state = 335 + localctx.iterator = self.idsList() + self.state = 336 + self.match(RASPParser.T__25) + self.state = 337 + localctx.iterable = self.expr(0) + self.state = 338 + self.match(RASPParser.T__29) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + class DictCompExprContext(ParserRuleContext): + + def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1): + super().__init__(parent, invokingState) + self.parser = parser + self.key = None # ExprContext + self.val = None # ExprContext + self.iterator = None # IdsListContext + self.iterable = None # ExprContext + + def expr(self, i:int=None): + if i is None: + return self.getTypedRuleContexts(RASPParser.ExprContext) + else: + return self.getTypedRuleContext(RASPParser.ExprContext,i) + + + def idsList(self): + return self.getTypedRuleContext(RASPParser.IdsListContext,0) + + + def getRuleIndex(self): + return RASPParser.RULE_dictCompExpr + + def enterRule(self, listener:ParseTreeListener): + if hasattr( listener, "enterDictCompExpr" ): + listener.enterDictCompExpr(self) + + def exitRule(self, listener:ParseTreeListener): + if hasattr( listener, "exitDictCompExpr" ): + listener.exitDictCompExpr(self) + + def accept(self, visitor:ParseTreeVisitor): + if hasattr( visitor, "visitDictCompExpr" ): + return visitor.visitDictCompExpr(self) + else: + return visitor.visitChildren(self) + + + + + def dictCompExpr(self): + + localctx = RASPParser.DictCompExprContext(self, self._ctx, self.state) + self.enterRule(localctx, 56, self.RULE_dictCompExpr) + try: + self.enterOuterAlt(localctx, 1) + self.state = 340 + self.match(RASPParser.T__22) + self.state = 341 + localctx.key = self.expr(0) + self.state = 342 + self.match(RASPParser.T__20) + self.state = 343 + localctx.val = self.expr(0) + self.state = 344 + self.match(RASPParser.T__24) + self.state = 345 + localctx.iterator = self.idsList() + self.state = 346 + self.match(RASPParser.T__25) + self.state = 347 + localctx.iterable = self.expr(0) + self.state = 348 + self.match(RASPParser.T__23) + except RecognitionException as re: + localctx.exception = re + self._errHandler.reportError(self, re) + self._errHandler.recover(self, re) + finally: + self.exitRule() + return localctx + + + + def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int): + if self._predicates == None: + self._predicates = dict() + self._predicates[24] = self.expr_sempred + pred = self._predicates.get(ruleIndex, None) + if pred is None: + raise Exception("No predicate with index:" + str(ruleIndex)) + else: + return pred(localctx, predIndex) + + def expr_sempred(self, localctx:ExprContext, predIndex:int): + if predIndex == 0: + return self.precpred(self._ctx, 19) + + + if predIndex == 1: + return self.precpred(self._ctx, 18) + + + if predIndex == 2: + return self.precpred(self._ctx, 17) + + + if predIndex == 3: + return self.precpred(self._ctx, 16) + + + if predIndex == 4: + return self.precpred(self._ctx, 15) + + + if predIndex == 5: + return self.precpred(self._ctx, 13) + + + if predIndex == 6: + return self.precpred(self._ctx, 12) + + + if predIndex == 7: + return self.precpred(self._ctx, 3) + + + if predIndex == 8: + return self.precpred(self._ctx, 23) + + + if predIndex == 9: + return self.precpred(self._ctx, 22) + + + + + diff --git a/RASP_support/zzantlr/RASPVisitor.py b/RASP_support/zzantlr/RASPVisitor.py new file mode 100644 index 0000000..2f4ba8c --- /dev/null +++ b/RASP_support/zzantlr/RASPVisitor.py @@ -0,0 +1,158 @@ +# Generated from RASP.g4 by ANTLR 4.9 +from antlr4 import * +if __name__ is not None and "." in __name__: + from .RASPParser import RASPParser +else: + from RASPParser import RASPParser + +# This class defines a complete generic visitor for a parse tree produced by RASPParser. + +class RASPVisitor(ParseTreeVisitor): + + # Visit a parse tree produced by RASPParser#r. + def visitR(self, ctx:RASPParser.RContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#statement. + def visitStatement(self, ctx:RASPParser.StatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#raspstatement. + def visitRaspstatement(self, ctx:RASPParser.RaspstatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#replstatement. + def visitReplstatement(self, ctx:RASPParser.ReplstatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#setExample. + def visitSetExample(self, ctx:RASPParser.SetExampleContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#showExample. + def visitShowExample(self, ctx:RASPParser.ShowExampleContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#toggleSeqVerbose. + def visitToggleSeqVerbose(self, ctx:RASPParser.ToggleSeqVerboseContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#toggleExample. + def visitToggleExample(self, ctx:RASPParser.ToggleExampleContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#exit. + def visitExit(self, ctx:RASPParser.ExitContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#loadFile. + def visitLoadFile(self, ctx:RASPParser.LoadFileContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#assign. + def visitAssign(self, ctx:RASPParser.AssignContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#draw. + def visitDraw(self, ctx:RASPParser.DrawContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#exprsList. + def visitExprsList(self, ctx:RASPParser.ExprsListContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#namedExprsList. + def visitNamedExprsList(self, ctx:RASPParser.NamedExprsListContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#namedExpr. + def visitNamedExpr(self, ctx:RASPParser.NamedExprContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#raspstatementsList. + def visitRaspstatementsList(self, ctx:RASPParser.RaspstatementsListContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#funcDef. + def visitFuncDef(self, ctx:RASPParser.FuncDefContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#forLoop. + def visitForLoop(self, ctx:RASPParser.ForLoopContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#commentsList. + def visitCommentsList(self, ctx:RASPParser.CommentsListContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#assignsAndCommentsList. + def visitAssignsAndCommentsList(self, ctx:RASPParser.AssignsAndCommentsListContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#returnStatement. + def visitReturnStatement(self, ctx:RASPParser.ReturnStatementContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#idsList. + def visitIdsList(self, ctx:RASPParser.IdsListContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#aggregateExpr. + def visitAggregateExpr(self, ctx:RASPParser.AggregateExprContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#atom. + def visitAtom(self, ctx:RASPParser.AtomContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#expr. + def visitExpr(self, ctx:RASPParser.ExprContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#aList. + def visitAList(self, ctx:RASPParser.AListContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#aDict. + def visitADict(self, ctx:RASPParser.ADictContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#listCompExpr. + def visitListCompExpr(self, ctx:RASPParser.ListCompExprContext): + return self.visitChildren(ctx) + + + # Visit a parse tree produced by RASPParser#dictCompExpr. + def visitDictCompExpr(self, ctx:RASPParser.DictCompExprContext): + return self.visitChildren(ctx) + + + +del RASPParser \ No newline at end of file diff --git a/RASP_support/zzantlr/__init__.py b/RASP_support/zzantlr/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/README.md b/README.md new file mode 100644 index 0000000..84fe750 --- /dev/null +++ b/README.md @@ -0,0 +1,156 @@ +# RASP + +## Setup +#### Mac or Linux +- Run `./setup.sh` . It will create a python3 virtual environment and install the dependencies for RASP. It will also try to install graphviz (the non-python part) and rlwrap on your machine. If these fail, you will still be able to use RASP, however: the interface will not be as nice without `rlwrap`, and drawing s-op computation flows will not be possible without `graphviz`. +- After having set up, you can run `./rasp.sh` to start the RASP read-evaluate-print-loop. +#### Windows +Follow the instructions given in `windows instructions.txt` + +## The REPL +Use Ctrl+C to quit a partially entered command, and Ctrl+D to exit the REPL. + +#### Initial Environment +RASP starts with the base s-ops: `tokens`, `indices`, and `length`. It also has the base functions `select`, `aggregate`, and `selector_width` as described in the paper, a selector `full_s` created through `select(1,1,==)` that creates a "full" attention pattern, and several other library functions (check out `RASP_support/rasplib.rasp` to see them). + +Additionally, the REPL begins with a base example, `"hello"`, on which it shows the output for each created s-op or selector. This example can be changed, and toggled on and off, through commands to the REPL. + +All RASP commands end with a semicolon. Commands to the REPL -- such as changing the base example -- do not. + +#### Examples + +Play around! Try simple elementwise manipulations of s-ops: +``` +>> 3xindices =3 * indices; + s-op: 3xindices + Example: 3xindices("hello") = [0, 3, 6, 9, 12] +>> indices+indices; + s-op: out + Example: out("hello") = [0, 2, 4, 6, 8] +``` + +Change the base example, and create a selector that focuses each position on all positions before it: +``` +>> set example "hey" +>> prevs=select(indices,indices,<); + selector: prevs + Example: prevs("hey") = + {0: [0, 0, 0], 1: [1, 0, 0], 2: [1, 1, 0]} +``` + +Check the output of an s-op on your new base example: +``` +>> 3xindices; + s-op: 3xindices + Example: 3xindices("hey") = [0, 3, 6] +``` + +Or on specific inputs: +``` +>> 3xindices(["hi","there"]); + = [0, 3] +>> 3xindices("hiya"); + = [0, 3, 6, 9] +``` + +Aggregate with the full selection pattern to compute the proportion of a letter in your input: +``` +>> full_s; + selector: full_s + Example: full_s("hey") = + {0: [1, 1, 1], 1: [1, 1, 1], 2: [1, 1, 1]} +>> my_frac=aggregate(full_s,indicator(tokens=="e")); + s-op: my_frac + Example: my_frac("hey") = [0.333]*3 +``` +Note: when an s-op's output is identical in all positions, RASP simply prints the output of one position, followed by "` * X`" (where X is the sequence length) to mark the repetition. + + +Check if a letter is in your input at all: +``` +>> "e" in tokens; + s-op: out + Example: out("hey") = [T]*3 +``` + +Alternately, in an elementwise fashion, check if each of your input tokens belongs to some group: +``` +>> vowels = ["a","e","i","o","u"]; + list: vowels = ['a', 'e', 'i', 'o', 'u'] +>> tokens in vowels; + s-op: out + Example: out("hey") = [F, T, F] +``` + +Draw the computation flow for an s-op you have created, on an input of your choice: +(this will create a pdf in a subfolder `comp_flows` of the current directory) +``` +>> draw(my_frac,"abcdeeee"); + = [0.5]*8 +``` + +Or simply on the base example: +``` +>> draw(my_frac); + = [0.333]*3 +``` + +If they bother you, turn the examples off, and bring them back when you need them: +``` +>> examples off +>> indices; + s-op: indices +>> full_s; + selector: full_s +>> examples on +>> indices; + s-op: indices + Example: indices("hello") = [0, 1, 2, 3, 4] +``` +You can also do this selectively, turning only selector or s-op examples on and off, e.g.: `selector examples off`. + +Create a selector that focuses each position on all other positions containing the same token. But first, set the base example to `"hello"` for a better idea of what's happening: +``` +>> set example "hello" +>> same_token=select(tokens,tokens,==); + selector: same_token + Example: same_token("hello") = + {0: [1, 0, 0, 0, 0], + 1: [0, 1, 0, 0, 0], + 2: [0, 0, 1, 1, 0], + 3: [0, 0, 1, 1, 0], + 4: [0, 0, 0, 0, 1]} +``` + +Then, use `selector_width` to compute, for each position, how many other positions the selector `same_token` focuses it on. This effectively computes an in-place histogram over the input: +``` +>> histogram=selector_width(same_token); + s-op: histogram + Example: histogram("hello") = [1, 1, 2, 2, 1] +``` + +For more complicated examples, check out `paper_examples.rasp`! + +#### Note on input types: +RASP expects inputs in four forms: strings, integers, floats, or booleans, handled respectively by `tokens_str`, `tokens_int`, `tokens_float`, and `tokens_bool`. Initially, RASP loads with `tokens` set to `tokens_str`, this can be changed by assignment, e.g.: `tokens=tokens_int;`. When changing the input type, you will also want to change the base example, e.g.: `set example [0,1,2]`. + +Note that assignments do not retroactively change the computation trees of existing s-ops! + + +## Writing and Loading RASP files + +To keep and load RASP code from files, save them with `.rasp` as the extension, and use the 'load' command without the extension. For example, you can load the examples file `paper_examples.rasp` in this repository to the REPL as follows: +``` +>> load "paper_examples"; +``` +This will make (almost) all values in the file available in the loading environment (whether the REPL, or a different `.rasp` file): values whose names begin with an underscore remain private to the file they are written in. +Loading files in the REPL will also print a list of all loaded values. + +#### Syntax Highlighting +For the Sublime Text editor, you can get syntax highlighting for `.rasp` files as follows: +1. Install package control for sublime (you might already have it: look in the menu [Sublime Text]->[Preferences] and see if it's there. If not, follow the instructions at https://packagecontrol.io/installation). +2. Install the 'packagedev' package through package control ([Sublime Text]->[Preferences]->[Package Control], then type [install package], then [packagedev]) +3. After installing PackageDev, create a new syntax definition file through [Tools]->[Packages]->[Package Development]->[New Syntax Definition]. +4. Copy the contents of `RASP_support/RASP.sublime-syntax` into the new syntax definition file, and save it as `RASP.sublime-syntax`. + +[Above is basically following the instructions in http://ilkinulas.github.io/programming/2016/02/05/sublime-text-syntax-highlighting.html , and then copying in the contents of the provided `RASP.sublime-syntax` file] diff --git a/paper_examples.rasp b/paper_examples.rasp new file mode 100644 index 0000000..ff5eb7c --- /dev/null +++ b/paper_examples.rasp @@ -0,0 +1,96 @@ +def _with_bos_selector_width(s) { + s = s or select(indices,0,==); + return (1/aggregate(s,indicator(indices==0)))-1; +} + +hist_bos = _with_bos_selector_width( select(tokens_str,tokens_str,==)); + +hist_nobos = selector_width(select(tokens_str,tokens_str,==)); + +_flip_s = select(indices,length-1-indices,==); +reverse = aggregate(_flip_s,tokens_str); + + +def _sort_bos(seq,key) { + should_be_earlier = select(key,key,<) or (select(key,key,==) and select(indices,indices,<)); + num_before = _with_bos_selector_width(should_be_earlier); + num_before = -1 if indices==0 else num_before; # this is to set bos to + # realise it is at position 0, else it confuses whatever position it ends up thinking it's in + main_seq_grab_output = select(num_before,indices-1,==); # indices-1 because they have to skip the bos-token + bos_grab_self = select(indices,0,==) and select(indices,indices,==); + grab_output = main_seq_grab_output or bos_grab_self; + return aggregate(grab_output,seq); +} + +sort_bos = _sort_bos(tokens_str,tokens_str); + +def _has_earlier_with_bos() { + earlier_copy = select(tokens_str,tokens_str,==) and select(indices,indices,<); + num_prev_copies = _with_bos_selector_width(earlier_copy); + return num_prev_copies > 0; +} + +def _hist2_bos() { + has_prev_copy = _has_earlier_with_bos(); + repr_with_own_count = select(hist_bos,hist_bos,==) and select(has_prev_copy,False,==); + return _with_bos_selector_width(repr_with_own_count); +} + +hist2_bos = _hist2_bos(); + + +def _sort_by_freq_bos(non_token,max_len) { + has_earlier = _has_earlier_with_bos(); + filtered_freq = hist_bos - (indicator(has_earlier)*max_len); + filtered_tokens = non_token if has_earlier else tokens_str; + return _sort_bos(filtered_tokens,-filtered_freq); +} + +sort_by_most_freq = _sort_by_freq_bos("ยง",300); + + +def _dyck1_ptf() { + up_to_self = select(indices,indices,<=); + n_opens = (indices+1)*aggregate(up_to_self,indicator(tokens_str=="(")); + n_closes = (indices+1)*aggregate(up_to_self,indicator(tokens_str==")")); + balance = n_opens - n_closes; + prev_imbalances = aggregate(up_to_self,indicator(balance<0)); + return "F" if prev_imbalances>0 else + ("T" if balance==0 else "P"); +} + +dyck1_ptf = _dyck1_ptf(); + +def dyckk_ptf(paren_pairs) { + # paren pairs should come as list of strings of length 2, e.g.: ["()","{}"] + openers = [p[0] for p in paren_pairs]; + closers = [p[1] for p in paren_pairs]; + opens = indicator(tokens_str in openers); + closes = indicator(tokens_str in closers); + up_to_self = select(indices,indices,<=); + n_opens = (indices+1)*aggregate(up_to_self,opens); + n_closes = (indices+1)*aggregate(up_to_self,closes); + depth = n_opens - n_closes; + delay_closer = depth + closes; + depth_index = selector_width(select(delay_closer,delay_closer,==) and up_to_self); + open_for_close = select(opens,True,==) and + select(delay_closer,delay_closer,==) and + select(depth_index,depth_index-1,==); + matched_opener = aggregate(open_for_close,tokens_str,"-"); + opener_matches = matched_opener+tokens_str in paren_pairs; + mismatch = closes and not opener_matches; + had_problem = aggregate(up_to_self,indicator(mismatch or (depth<0)))>0; + return "F" if had_problem else ("T" if depth==0 else "P"); +} +# your opener is the last opener in the sequence with depth one greater than yours (you reduce depth by closing). +# (if more closers happen between you they will seek less-deep openers. if more openers happen +# between you they will seek deeper closers, i.e. they will not have your depth. +# if both happen between you, then your depth-indices will not be subsequent.) +# ideally, would not use depth_index. instead, would have: +# potential_openers_for_closer = select(opens,True,==) and select(depth_delay_closer,depth_delay_closer,==) and up_to_self +# closer_grab_opener = best(potential_openers_for_closer,score(indices,0,+)) # where best(s1,sc) takes a selector s1 and a scorer sc (which is created like a selector, only it makes numbers instead of booleans) and returns a new selector s2 such that in each row of s2, at most one position is chosen, and it is the chosen position in s1 with the maximum score according to sc +# right now this function creates dyck2 with 4 layers and 2 heads, as follows: 1/2/1/1. but the second layer is just computing the depth_index which is then used in layer 3. so if we had 'best' and scorers, it would be 3 layers and 1 head. + + +dyck2_ptf = dyckk_ptf(["()","{}"]); +dyck3_ptf = dyckk_ptf(["()","{}","[]"]); \ No newline at end of file diff --git a/rasp.sh b/rasp.sh new file mode 100755 index 0000000..ad1edd8 --- /dev/null +++ b/rasp.sh @@ -0,0 +1,10 @@ +source raspenv/bin/activate + +if [[ $(rlwrap -v) == rlwrap* ]]; then + # the better option. requires rlwrap + rlwrap python3 RASP_support/REPL.py +else + python3 RASP_support/REPL.py +fi + +deactivate \ No newline at end of file diff --git a/rover.rasp b/rover.rasp new file mode 100644 index 0000000..b162af1 --- /dev/null +++ b/rover.rasp @@ -0,0 +1,280 @@ +# will make big ole rover function, with this number of logical leaps, once done: +depth=3; + +#### simplified version assumes reasonable simple inferences, i.e.: +# no "xb0; + } + + return {i:shared_sighting(sightings[i]) for i in sightings}; +} + +def receive(new,base,combine_condition) { + combined = {n:base[n] or new[n] for n in base}; + return {n: combined[n] if combine_condition else base[n] + for n in base}; +} + +def setchanger_update(contains,notcontains) { + # eg x0; + res = res or (e_REL_item and (tokens==e)); + } + return res; +} + +def getall(sightings_dict){ + res = ""; + for i in sightings_dict { + res = res + (i if sightings_dict[i] else ""); + } + return res; +} + +########## setup: mark elements, relations, clauses, etc ############## +clause_index = selector_width(select(tokens,",",==) and select(indices,indices,<)); +relative_clause_pos = selector_width(select(clause_index,clause_index,==) and select(indices,indices,<)); +num_clauses = clause_index[-1]; +is_sep = tokens==sep; + +inference_clause = selector_width(select(clause_index,clause_index,==) and + select(tokens,infers_token,==))>0 and not is_sep; +question_clause = (clause_index == num_clauses) and not is_sep; +base_clause = (not (question_clause or inference_clause)) and not is_sep; + + +is_set = tokens in sets; +is_element = tokens in elements; + +inference_lefthalf = relative_clause_pos<3 and inference_clause; +inference_righthalf = relative_clause_pos>3 and inference_clause; + +load_from_token = {v:select(tokens,v,==) for v in elements+sets}; + +######### first step: all base clauses load their values to their elements/sets ########### +item_in_base_clause = (is_set or is_element) and base_clause; +load_other_base_item = select(indices,indices,!=) and + select(clause_index,clause_index,==) and + select(item_in_base_clause,True,==) and + select(True,item_in_base_clause,==); +other_base_item = aggregate(load_other_base_item,tokens,","); +load_base_op = select(clause_index,clause_index,==) and + select(item_in_base_clause,False,==) and + select(base_clause,True,==); +base_op = aggregate(load_base_op,tokens,","); + + +contains = {e:checkfor(e,in_token) for e in elements}; +notcontains = {e:checkfor(e,notin_token) for e in elements}; +isin = {s:checkfor(s,in_token) for s in sets}; +notin = {s:checkfor(s,notin_token) for s in sets}; + + +inference_leftel = get_inference_clause_token(0); +inference_leftop = get_inference_clause_token(1); +inference_leftset = get_inference_clause_token(2); +inference_rightel = get_inference_clause_token(4); +inference_rightop = get_inference_clause_token(5); +inference_rightset = get_inference_clause_token(6); + +setchanger = inference_leftel==element_variable and + inference_rightel==element_variable and + (not (inference_leftset==set_variable)) and + (not (inference_rightset==set_variable)); +elementchanger = (not (inference_leftel==element_variable)) and + (not (inference_rightel==element_variable)) and + inference_leftset==set_variable and + inference_rightset==set_variable; +is_inference_receiver = inference_clause and + (relative_clause_pos in [0,2,4,6]) and + not tokens in [set_variable,element_variable]; + +setchanger_lr = "nc" if inference_leftop == in_token else "c"; +setchanger_rr = "c" if inference_rightop == in_token else "nc"; +elementchanger_lr = "ni" if inference_leftop == in_token else "i"; +elementchanger_rr = "i" if inference_rightop == in_token else "ni"; +left_receiver = elementchanger_lr if elementchanger else setchanger_lr; +right_receiver = elementchanger_rr if elementchanger else setchanger_rr; + +receive_to_contains = receiving_to_group("c"); +receive_to_notcontains = receiving_to_group("nc"); +receive_to_isin = receiving_to_group("i"); +receive_to_notin = receiving_to_group("ni"); + + +mutual_inference_set_loader = select(clause_index,clause_index,==) and + select(inference_clause,True,==) and + select(indices,indices,!=) and + (select(relative_clause_pos,2,==) or + select(relative_clause_pos,6,==)) and + (select(2,relative_clause_pos,==) or + select(6,relative_clause_pos,==)); + +mutual_inference_element_loader = select(clause_index,clause_index,==) and + select(inference_clause,True,==) and + select(indices,indices,!=) and + (select(relative_clause_pos,0,==) or + select(relative_clause_pos,4,==)) and + (select(0,relative_clause_pos,==) or + select(4,relative_clause_pos,==)); + +same_item = select(tokens,tokens,==); + +stages = []; +for _ in range(depth) { + ######## second step: all element/set values share their information!! (except for query) ####### + contains,notcontains,isin,notin = + [share_sightings(c) for c in [contains,notcontains,isin,notin]]; + stages = stages + [{"c":contains,"nc":notcontains,"i":isin,"ni":notin}]; + + ######## third step: inference clauses shift values inside their sharepoints! ########## + new_contains,new_notcontains = setchanger_update(contains,notcontains); + new_isin,new_notin = elementchanger_update(isin,notin); + + # make new values canon for correct clauses (e.g. dont make new_contains canon in an element-updating clause) + contains = {n:new_contains[n] if setchanger else contains[n] for n in contains}; + notcontains = {n:new_notcontains[n] if setchanger else notcontains[n] for n in contains}; + isin = {n:new_isin[n] if elementchanger else isin[n] for n in isin}; + notin = {n:new_notin[n] if elementchanger else notin[n] for n in isin}; + + + ###### fourth step: sets share info with elements and vice versa ####### + ###### (e.g. if xx