Last active
December 16, 2023 06:39
-
-
Save yamasushi/28fb60613726b55b7bea51d867b67fac to your computer and use it in GitHub Desktop.
desk calculator
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
% desk calculator | |
% parser | |
% https://gist.github.com/yamasushi/28fb60613726b55b7bea51d867b67fac | |
-module(calc). | |
-export([parse/3]). | |
parse(Env,Emitter,Lexer)-> | |
{{Env,_,Lexer},Syn}=expr({Env,Emitter,Lexer}), | |
{Env,Lexer,Syn}. | |
op ({Env,Emitter,Lexer},Op,X,Y) -> | |
{E,V}=Emitter(Env , [Op,X,Y]) , {{E,Emitter,Lexer},V}. | |
fn ({Env,Emitter,Lexer},Fn,X) -> | |
{E,V}=Emitter(Env , [Fn,X]) , {{E,Emitter,Lexer},V}. | |
var({Env,Emitter,Lexer},Vsym) -> | |
{E,V}=Emitter(Env , [Vsym]) , {{E,Emitter,Lexer},V}. | |
% lexer helper | |
is_lexer_empty({_,_,Lexer}) -> lex:is_lexer_empty(Lexer). | |
get_token ({Env,Emitter,Lexer}) -> | |
{L,T}=lex:get_token(Lexer) , {{Env,Emitter,L},T}. | |
unget_token ({Env,Emitter,Lexer},T) -> | |
L=lex:unget_token(Lexer,T) , {Env,Emitter,L}. | |
% restx -> <op> termx {restx_1.inh = restx.inh <op> termx.syn } | |
% restx_1 {restx.syn = restx_1.syn} | |
% | ε {rest.syn = rest.inh} | |
restx(Context , Syms, TermX, Inh) when is_tuple(Context) andalso is_list(Syms) -> | |
RestX=fun (Self, C, I) -> | |
%io:format("RestX Syms=~w,I=~w~n",[Syms,I]), | |
case is_lexer_empty(C) of | |
true -> {C,I}; % ε | |
false -> | |
{C0,T}=get_token(C), | |
%io:format("RestX T=~w~n",[T]), | |
case T of | |
false -> {C,I}; % ε | |
{op,TVal} -> % operator | |
%io:format("RestX lists:member(~w,~w)=~w ~n", | |
% [TVal,Syms,lists:member(TVal,Syms)]), | |
case lists:member(TVal,Syms) of | |
true -> | |
{C1, Syn1}=TermX(C0), | |
{C2, V }=op(C1, TVal, I, Syn1), | |
Self (Self, C2, V ); | |
false -> C1=unget_token(C0,T) , {C1,I} % ε | |
end; | |
_ -> C1=unget_token(C0,T) , {C1,I} % ε | |
end | |
end end, | |
RestX(RestX,Context,Inh). | |
% expr -> term1 {rest1.inh = term1.syn} | |
% rest1 {expr.syn = rest1.syn} | |
expr(Context) -> | |
{C0,Syn0} = term1(Context), | |
{C1,Syn1} = rest1(C0 ,Syn0), | |
{C1,Syn1}. | |
% rest1 -> + { term1.inh = rest1.inh} | |
% term1 {rest1_1.inh = rest1.inh + term1.syn } | |
% rest1_1 {rest1.syn = rest1_1.syn} | |
% ... | |
% | ε {rest1.syn = rest.inh} | |
rest1(Context, Inh) -> restx(Context,['+' , '-' ] , fun term1/1 , Inh). | |
% term1 -> {term2.inh = term1.inh} | |
% term2 {rest2.inh = term2.syn} | |
% rest2 {term.syn = rest2.syn} | |
term1(Context) -> | |
%io:format("term1 Inh=~w~n",[Inh]), | |
{C0,Syn0} = term2(Context) , | |
{C1,Syn1} = rest2(C0, Syn0), | |
{C1,Syn1}. | |
% rest2 -> * {term2.inh=rest2.inh} | |
% term2 {rest2_1.inh = rest2.inh * term2.syn } | |
% rest2_1 {rest2.syn = rest2_1.syn} | |
% ... | |
% | ε {rest.syn = rest.inh} | |
rest2(Context, Inh) -> restx(Context,['*' , '/' , '%' ] , fun term2/1 , Inh). | |
% term2 -> {term3.inh=term2.inh} | |
% term3 {rest3.inh = term3.syn} | |
% rest3 {term2.syn = rest3.syn} | |
term2(Context) -> | |
%io:format("term2 Inh=~w~n",[Inh]), | |
{C0,Syn0} = term3(Context) , | |
{C1,Syn1} = rest3(C0, Syn0), | |
{C1,Syn1}. | |
rest3(Context, Inh) -> restx(Context,['^' ] , fun term3/1 , Inh). | |
term3(Context) -> | |
%io:format("term3 Inh=~w~n",[Inh]), | |
factor(Context). | |
match_term(Context,T)-> | |
%io:format("match_term T=~w~n",[T]), | |
case is_lexer_empty(Context) of | |
true -> error({syntax_error,T,Context}); | |
false -> | |
{C0,Hd}=get_token(Context), | |
if | |
Hd =/= T -> error({syntax_error,T,Hd,C0}); | |
true -> C0 | |
end | |
end. | |
% factor -> <integer> {factor.syn = <integer>.syn } | |
% | ( expr ) {factor.syn = expr.syn} | |
% | -factor1 {factor.syn = -factor1.syn} | |
% | <function> ( expr ) { <function> ( expr.syn) } | |
factor(Context) -> | |
%io:format("factor Inh=~w~n",[Inh]), | |
case is_lexer_empty(Context) of | |
true -> error({factor,Context}) ; | |
false-> | |
{C0,Hd}=get_token(Context), | |
%io:format("factor Hd=~w~n",[Hd]), | |
case Hd of | |
{op , '-'} -> % negation | |
{C1,Syn1}=factor(C0), | |
fn(C1,'-',Syn1); | |
{paren , '('} -> % left parenthesis | |
{C1,Syn1}=expr (C0), | |
C2 =match_term(C1,{paren,')'}), | |
{C2,Syn1}; | |
{id , Id} -> % constant | |
var(C0,Id); | |
{id_func , Id} -> % function | |
C1 =match_term(C0,{paren,'('}), | |
{C2,Syn2} =expr (C1), | |
C3 =match_term(C2,{paren,')'}), | |
fn(C3,Id,Syn2); | |
{number ,Num}-> | |
{C0,Num}; | |
false -> | |
error({factor_error,Context}) | |
end | |
end. | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
% desk calculator | |
% main | |
% https://gist.github.com/yamasushi/28fb60613726b55b7bea51d867b67fac | |
-module(calc_main). | |
-export([calc/1,sexp/1]). | |
sexp(S)-> | |
Lexer=lex:spawn_lexer(S), | |
{_,_,Syn}=calc:parse(#{} , | |
fun(E,[Sym])->{E,Sym}; | |
(E,Arg )->{E,Arg} end, | |
Lexer), | |
Syn. | |
calc(S)-> | |
Lexer=lex:spawn_lexer(S), | |
Env =default_env(), | |
{_,_,Syn} =calc:parse(Env,fun emit/2,Lexer), | |
Syn. | |
default_env() -> | |
#{ | |
op_symmap => | |
#{ | |
'+' => fun (X,Y) -> X + Y end , | |
'-' => fun (X,Y) -> X - Y end , | |
'*' => fun (X,Y) -> X * Y end , | |
'/' => fun (X,Y) -> X / Y end , | |
'%' => fun (X,Y) -> X rem Y end , | |
'^' => fun (X,Y) -> math:pow(X,Y) end } , | |
fn_symmap => | |
#{ | |
'-' => fun (X) -> -X end , | |
'sin' => fun (D) -> math:sin( D * math:pi()/180.0 ) end , | |
'cos' => fun (D) -> math:cos( D * math:pi()/180.0 ) end , | |
'tan' => fun (D) -> math:tan( D * math:pi()/180.0 ) end , | |
'ln' => fun (X) -> math:log (X) end , | |
'log' => fun (X) -> math:log10(X) end } , | |
vars => | |
#{ | |
'pi' => math:pi() , | |
'e' => math:exp(1) } }. | |
var_(Env,V) -> map_get(V ,map_get(vars ,Env)). | |
fn_ (Env,Fn,X) -> (map_get(Fn,map_get(fn_symmap,Env)))(X). | |
op_ (Env,Op,X,Y) -> (map_get(Op,map_get(op_symmap,Env)))(X,Y). | |
emit(Env,[V]) -> { Env , var_(Env,V) }; | |
emit(Env,[Fn,X]) -> { Env , fn_ (Env,Fn,X) }; | |
emit(Env,[Op,X,Y]) -> { Env , op_ (Env,Op,X,Y)}. | |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
% desk calculator | |
% lexical analysis | |
% https://gist.github.com/yamasushi/28fb60613726b55b7bea51d867b67fac | |
-module(lex). | |
-export([ lexer_process/1 , | |
spawn_lexer/1 , | |
is_lexer_empty/1 , | |
get_token/1 , | |
unget_token/2 , | |
test/1]). | |
-define( is_blank(X) , ( (X==16#20) or (X==$\n) or (X==$\t) ) ). | |
test(S)-> | |
Lexer=spawn_lexer(S), | |
F=fun(Self,Lexer0)-> | |
{Lexer1,T}=get_token(Lexer0), | |
case T of | |
false -> done; | |
_ -> io:format("~w ",[T]),Self(Self,Lexer1) | |
end | |
end, | |
F(F,Lexer). | |
%test(S)-> | |
% lexer(S,fun(T) -> io:format("~w~n",[T]) end ). | |
token_number(V) -> {number ,V}. | |
token_op(V) -> {op ,V}. | |
token_id(V) -> {id ,V}. | |
token_id_func(V) -> {id_func ,V}. | |
token_paren(V) -> {paren ,V}. | |
skip_blank([]) -> []; | |
skip_blank([H|B]) when ?is_blank(H) -> skip_blank(B); | |
skip_blank(Xs) -> Xs. | |
-define( BASE , 10 ). | |
-define( is_digit(X) , ( (X>=$0) andalso (X=<$9) ) ). | |
-define( digit2int(X) , (X-$0) ). | |
-define( is_id_head(X) , ( (X>=$a) andalso (X=<$z) ) ). | |
-define( is_id_body(X) , ( ?is_id_head(X) orelse ?is_digit(X) ) ). | |
-define( is_parentheses(X) , ( ( X==$( ) orelse ( X==$) ) ) ). | |
-define( is_op(X) , ( | |
(X==$+) orelse | |
(X==$-) orelse | |
(X==$*) orelse | |
(X==$/) orelse | |
(X==$%) orelse | |
(X==$^) ) ). | |
lexer(Xs,Yield) -> | |
%io:format("lexer Xs=~s~n",[Xs]) , | |
Xs0=skip_blank(Xs), | |
case Xs0 of | |
[] -> | |
%io:format("lexer Xs0=~w~n",[Xs0]), | |
Yield(false), | |
ok; | |
[H|B] when ?is_blank(H) -> lexer(B,Yield); | |
[$.,D|Xs1] when ?is_digit(D) -> % decimal point | |
{Xs2,J}=decimal( Xs1 , D ), | |
Yield( token_number( J ) ), | |
lexer(Xs2,Yield); | |
[H|B] when ?is_digit(H) -> | |
{Xs1,I,_} = pos_int( B , ?digit2int(H) , 1 ), | |
case Xs1 of | |
[$.,D|Xs2] when ?is_digit(D)-> % integer with decimal | |
{Xs3,J}=decimal( Xs2 , D ), | |
Yield( token_number( I + J ) ), | |
lexer(Xs3,Yield); | |
[$.|Xs2] -> % integer without decimal | |
Yield( token_number(I)), | |
lexer(Xs2,Yield); | |
_ -> % integer without decimal point | |
Yield(token_number(I)), | |
lexer(Xs1,Yield) | |
end; | |
[H|Xs1] when ?is_id_head(H) -> % identifier | |
{ Xs2 , Sym } = id( Xs1 , [H] ), | |
%io:format("Xs2=~s , Sym=~w~n",[Xs2,Sym]), | |
Yield( | |
case skip_blank(Xs2) of % look ahead | |
[$(|_] -> token_id_func( Sym ); % function call | |
_ -> token_id( Sym ) | |
end ), | |
lexer(Xs2,Yield); | |
[P|Xs1] when ?is_parentheses(P) -> | |
Yield( token_paren( list_to_atom([P]) ) ), | |
lexer( Xs1 , Yield ); | |
[O|Xs1] when ?is_op(O) -> | |
Yield( token_op( list_to_atom([O]) ) ), | |
lexer( Xs1 , Yield ); | |
Xs1 -> error({lexer_error,xs_is,Xs1}) | |
end. | |
% positive integer | |
pos_int(Xs=[] , Acc,K) -> {Xs,Acc,K}; | |
pos_int([H|B] , Acc,K) when ?is_digit(H) -> | |
pos_int(B , Acc * ?BASE + ?digit2int(H) , K+1 ); | |
pos_int(Xs , Acc,K) -> {Xs,Acc,K}. | |
% decimal | |
decimal(Xs , Hd) -> | |
{ Xs0 , I , K }=pos_int( Xs , ?digit2int(Hd) , 1 ), | |
{ Xs0 , I/math:pow(?BASE,K) }. | |
% identifier | |
id( _ , [] ) -> error({id_eror,acc_is_nil}); | |
id( Xs=[] , Acc ) -> {Xs , list_to_atom(Acc) }; | |
id( [H|Xs0] , Acc ) when ?is_id_body(H) -> | |
id( Xs0 , Acc++[H] ); | |
id( Xs , Acc ) -> { Xs , list_to_atom(Acc) }. | |
% lexer process | |
spawn_lexer(Xs) -> | |
#{ stack=>[] , process=>spawn(lex,lexer_process,[Xs]) }. | |
lexer_process(Xs) -> | |
lexer( Xs , | |
fun(T) -> | |
%io:format("lexer_process T=~w~n",[T]) , | |
receive {get,V,P} -> P ! {token,T},V end | |
end ). | |
% is lexer? | |
-define (is_lexer(X) , ( | |
(is_map(X)) andalso | |
(is_pid( map_get(process,X) ) ) | |
) ). | |
% lexer process | |
process_of_lexer(Lexer) when ?is_lexer(Lexer) -> | |
map_get(process,Lexer). | |
% unget stack | |
stack_of_lexer(Lexer) when ?is_lexer(Lexer) -> | |
map_get(stack,Lexer). | |
% is lexer ended? | |
is_lexer_empty(Lexer) when ?is_lexer(Lexer) -> | |
case stack_of_lexer(Lexer) of | |
[] -> not(is_process_alive( process_of_lexer(Lexer) ) ) ; | |
_ -> false | |
end. | |
% get token | |
get_token(Lexer) when ?is_lexer(Lexer) -> | |
case is_lexer_empty(Lexer) of | |
false -> | |
case stack_of_lexer(Lexer) of | |
[] -> process_of_lexer(Lexer) ! {get,nil,self()} , | |
receive {token,T} -> { Lexer , T } end; | |
S -> { Lexer#{stack:=tl(S)}, hd(S) } | |
end; | |
true -> | |
io:format("empty lexer~n",[]) , | |
{Lexer,false} | |
end. | |
% unget token | |
unget_token(Lexer,T) when ?is_lexer(Lexer) -> | |
Lexer#{stack := [T|map_get(stack,Lexer)] }. | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment