diff --git a/src/passes/1-parser/cameligo.ml b/src/passes/1-parser/cameligo.ml index cd8bc94f2..d69da91b4 100644 --- a/src/passes/1-parser/cameligo.ml +++ b/src/passes/1-parser/cameligo.ml @@ -35,7 +35,7 @@ module Errors = struct let message () = str in let loc = if start.pos_cnum = -1 then Region.make - ~start: Pos.min + ~start:(Pos.min ~file:source) ~stop:(Pos.from_byte stop) else Region.make diff --git a/src/passes/1-parser/cameligo/AST.ml b/src/passes/1-parser/cameligo/AST.ml index b8ac37bd2..d00cf9cd7 100644 --- a/src/passes/1-parser/cameligo/AST.ml +++ b/src/passes/1-parser/cameligo/AST.ml @@ -131,7 +131,7 @@ type t = { and ast = t -and attributes = attribute list +and attributes = attribute list and declaration = Let of (kwd_let * let_binding * attributes) reg diff --git a/src/passes/1-parser/cameligo/LexToken.mli b/src/passes/1-parser/cameligo/LexToken.mli index b71398b62..3e3460bc2 100644 --- a/src/passes/1-parser/cameligo/LexToken.mli +++ b/src/passes/1-parser/cameligo/LexToken.mli @@ -85,7 +85,7 @@ type t = | Mutez of (string * Z.t) Region.reg | String of string Region.reg | Bytes of (string * Hex.t) Region.reg -| Attr2 of string Region.reg +| Attr of string Region.reg (* Keywords *) @@ -150,8 +150,7 @@ val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result val mk_string : lexeme -> Region.t -> token val mk_bytes : lexeme -> Region.t -> token val mk_constr : lexeme -> Region.t -> token -val mk_attr : lexeme -> Region.t -> (token, attr_err) result -val mk_attr2 : lexeme -> Region.t -> (token, attr_err) result +val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result val eof : Region.t -> token (* Predicates *) diff --git a/src/passes/1-parser/cameligo/LexToken.mll b/src/passes/1-parser/cameligo/LexToken.mll index f0bd0d319..d16388591 100644 --- a/src/passes/1-parser/cameligo/LexToken.mll +++ b/src/passes/1-parser/cameligo/LexToken.mll @@ -69,7 +69,7 @@ type t = | Mutez of (string * Z.t) Region.reg | String of string Region.reg | Bytes of (string * Hex.t) Region.reg -| Attr2 of string Region.reg +| Attr of string Region.reg (* Keywords *) @@ -147,6 +147,8 @@ let proj_token = function region, sprintf "Bytes (\"%s\", \"0x%s\")" s (Hex.show b) +| Attr Region.{region; value} -> + region, sprintf "Attr \"%s\"" value | Begin region -> region, "Begin" | Else region -> region, "Else" | End region -> region, "End" @@ -166,7 +168,6 @@ let proj_token = function | With region -> region, "With" | C_None region -> region, "C_None" | C_Some region -> region, "C_Some" -| Attr2 Region.{region; value} -> region, sprintf "Attr2 %s" value | EOF region -> region, "EOF" let to_lexeme = function @@ -205,6 +206,7 @@ let to_lexeme = function | Mutez i -> fst i.Region.value | String s -> String.escaped s.Region.value | Bytes b -> fst b.Region.value +| Attr a -> a.Region.value | Begin _ -> "begin" | Else _ -> "else" @@ -226,7 +228,7 @@ let to_lexeme = function | C_None _ -> "None" | C_Some _ -> "Some" -| Attr2 a -> a.Region.value + | EOF _ -> "" let to_string token ?(offsets=true) mode = @@ -469,11 +471,10 @@ let mk_constr lexeme region = (* Attributes *) -let mk_attr _lexeme _region = - Error Invalid_attribute - -let mk_attr2 lexeme region = - Ok (Attr2 { value = lexeme; region }) +let mk_attr header lexeme region = + if header = "[@" then + Error Invalid_attribute + else Ok (Attr Region.{value=lexeme; region}) (* Predicates *) diff --git a/src/passes/1-parser/cameligo/ParErr.ml b/src/passes/1-parser/cameligo/ParErr.ml index 7debe48ef..f1be602f1 100644 --- a/src/passes/1-parser/cameligo/ParErr.ml +++ b/src/passes/1-parser/cameligo/ParErr.ml @@ -46,7 +46,7 @@ let message = "\n" | 33 -> "\n" - | 460 -> + | 478 -> "\n" | 27 -> "\n" @@ -68,9 +68,13 @@ let message = "\n" | 133 -> "\n" - | 373 -> + | 379 -> "\n" - | 375 -> + | 381 -> + "\n" + | 472 -> + "\n" + | 169 -> "\n" | 134 -> "\n" @@ -80,7 +84,7 @@ let message = "\n" | 153 -> "\n" - | 374 -> + | 380 -> "\n" | 63 -> "\n" @@ -144,137 +148,141 @@ let message = "\n" | 156 -> "\n" - | 463 -> + | 481 -> "\n" - | 465 -> - "\n" - | 217 -> - "\n" - | 242 -> - "\n" - | 219 -> + | 483 -> "\n" | 221 -> "\n" - | 215 -> + | 246 -> "\n" - | 226 -> + | 223 -> "\n" - | 255 -> + | 225 -> "\n" - | 256 -> + | 219 -> "\n" - | 243 -> + | 230 -> "\n" - | 264 -> + | 259 -> "\n" - | 228 -> + | 260 -> "\n" - | 257 -> - "\n" - | 258 -> - "\n" - | 266 -> + | 247 -> "\n" | 268 -> "\n" + | 232 -> + "\n" + | 261 -> + "\n" + | 262 -> + "\n" | 270 -> "\n" | 272 -> "\n" | 274 -> "\n" - | 192 -> + | 276 -> "\n" - | 259 -> + | 278 -> "\n" - | 285 -> + | 195 -> "\n" - | 288 -> + | 263 -> "\n" - | 245 -> + | 289 -> "\n" - | 293 -> + | 292 -> "\n" - | 262 -> + | 249 -> + "\n" + | 297 -> + "\n" + | 266 -> "\n" | 160 -> "\n" | 164 -> "\n" - | 429 -> + | 445 -> "\n" - | 332 -> - "\n" - | 313 -> - "\n" - | 431 -> - "\n" - | 315 -> - "\n" - | 316 -> + | 337 -> "\n" | 317 -> "\n" - | 432 -> + | 447 -> "\n" - | 445 -> + | 319 -> "\n" - | 446 -> + | 320 -> "\n" - | 433 -> + | 321 -> "\n" - | 434 -> + | 448 -> "\n" - | 435 -> + | 462 -> "\n" - | 436 -> + | 463 -> "\n" - | 437 -> + | 449 -> "\n" - | 438 -> + | 450 -> "\n" - | 440 -> + | 452 -> "\n" - | 328 -> + | 451 -> "\n" - | 330 -> + | 453 -> + "\n" + | 454 -> + "\n" + | 455 -> + "\n" + | 457 -> + "\n" + | 333 -> + "\n" + | 335 -> + "\n" + | 339 -> + "\n" + | 336 -> "\n" | 334 -> "\n" - | 331 -> - "\n" - | 329 -> - "\n" - | 340 -> - "\n" - | 341 -> - "\n" - | 342 -> - "\n" - | 343 -> - "\n" - | 344 -> - "\n" | 345 -> "\n" - | 367 -> - "\n" | 346 -> "\n" | 348 -> "\n" - | 441 -> + | 347 -> "\n" - | 443 -> + | 349 -> "\n" - | 447 -> + | 350 -> "\n" - | 430 -> + | 351 -> "\n" - | 312 -> + | 373 -> "\n" - | 428 -> + | 352 -> + "\n" + | 354 -> + "\n" + | 458 -> + "\n" + | 460 -> + "\n" + | 464 -> + "\n" + | 446 -> + "\n" + | 316 -> + "\n" + | 444 -> "\n" | 165 -> "\n" @@ -282,65 +290,71 @@ let message = "\n" | 168 -> "\n" - | 169 -> + | 172 -> + "\n" + | 171 -> "\n" | 163 -> "\n" - | 448 -> + | 465 -> "\n" - | 450 -> + | 467 -> "\n" - | 451 -> + | 468 -> "\n" | 166 -> "\n" - | 235 -> - "\n" - | 236 -> - "\n" | 239 -> "\n" | 240 -> "\n" - | 425 -> + | 243 -> "\n" - | 170 -> + | 244 -> "\n" - | 171 -> + | 441 -> "\n" - | 172 -> + | 173 -> "\n" - | 418 -> + | 428 -> "\n" - | 419 -> + | 429 -> + "\n" + | 174 -> + "\n" + | 175 -> + "\n" + | 434 -> + "\n" + | 435 -> + "\n" + | 438 -> + "\n" + | 439 -> + "\n" + | 427 -> + "\n" + | 421 -> "\n" | 422 -> "\n" | 423 -> "\n" - | 174 -> - "\n" - | 304 -> - "\n" - | 305 -> - "\n" - | 405 -> - "\n" - | 412 -> - "\n" - | 404 -> - "\n" - | 306 -> + | 177 -> "\n" | 308 -> "\n" - | 320 -> + | 309 -> "\n" - | 321 -> + | 412 -> "\n" - | 322 -> + | 419 -> "\n" - | 323 -> + | 411 -> + "\n" + | 310 -> + "\n" + | 312 -> "\n" | 324 -> "\n" @@ -350,67 +364,79 @@ let message = "\n" | 327 -> "\n" - | 378 -> + | 329 -> "\n" - | 379 -> + | 328 -> "\n" - | 381 -> + | 330 -> "\n" - | 335 -> + | 331 -> "\n" - | 310 -> + | 332 -> "\n" - | 307 -> + | 384 -> "\n" - | 395 -> + | 385 -> "\n" - | 396 -> + | 387 -> "\n" - | 397 -> + | 340 -> "\n" - | 398 -> + | 314 -> "\n" - | 399 -> - "\n" - | 400 -> - "\n" - | 408 -> + | 311 -> "\n" | 401 -> "\n" + | 402 -> + "\n" + | 404 -> + "\n" | 403 -> "\n" - | 175 -> + | 405 -> "\n" - | 176 -> + | 406 -> + "\n" + | 407 -> + "\n" + | 415 -> + "\n" + | 408 -> + "\n" + | 410 -> + "\n" + | 178 -> "\n" | 179 -> "\n" - | 180 -> + | 182 -> "\n" | 183 -> "\n" - | 302 -> + | 186 -> "\n" - | 300 -> + | 306 -> "\n" - | 185 -> - "\n" - | 187 -> + | 304 -> "\n" | 188 -> "\n" - | 189 -> - "\n" | 190 -> "\n" - | 195 -> + | 191 -> + "\n" + | 192 -> + "\n" + | 193 -> + "\n" + | 198 -> + "\n" + | 218 -> + "\n" + | 197 -> "\n" | 214 -> "\n" - | 194 -> - "\n" - | 210 -> - "\n" | _ -> raise Not_found diff --git a/src/passes/1-parser/cameligo/ParToken.mly b/src/passes/1-parser/cameligo/ParToken.mly index 0368fad57..8319d166e 100644 --- a/src/passes/1-parser/cameligo/ParToken.mly +++ b/src/passes/1-parser/cameligo/ParToken.mly @@ -12,7 +12,7 @@ %token <(string * Z.t) Region.reg> Mutez "" %token Ident "" %token Constr "" -%token Attr2 "" +%token Attr "" (* Symbols *) diff --git a/src/passes/1-parser/cameligo/Parser.mly b/src/passes/1-parser/cameligo/Parser.mly index ced3d28f3..239d418cd 100644 --- a/src/passes/1-parser/cameligo/Parser.mly +++ b/src/passes/1-parser/cameligo/Parser.mly @@ -206,10 +206,10 @@ field_decl: (* Top-level non-recursive definitions *) let_declaration: - "let" let_binding seq(Attr2) { + "let" let_binding seq(Attr) { let kwd_let = $1 in let attributes = $3 in - let binding = $2 in + let binding = $2 in let value = kwd_let, binding, attributes in let stop = expr_to_region binding.let_rhs in let region = cover $1 stop @@ -452,10 +452,10 @@ case_clause(right_expr): {pattern=$1; arrow=$2; rhs=$3} } let_expr(right_expr): - "let" let_binding seq(Attr2) "in" right_expr { - let kwd_let = $1 + "let" let_binding seq(Attr) "in" right_expr { + let kwd_let = $1 and binding = $2 - and attributes = $3 + and attributes = $3 and kwd_in = $4 and body = $5 in let stop = expr_to_region body in @@ -634,9 +634,9 @@ update_record: lbrace = $1; record = $2; kwd_with = $3; - updates = { value = {compound = Braces($1,$5); - ne_elements; - terminator}; + updates = {value = {compound = Braces($1,$5); + ne_elements; + terminator}; region = cover $3 $5}; rbrace = $5} in {region; value} } @@ -664,5 +664,5 @@ sequence: in {region; value} } path : - "" {Name $1} -| projection { Path $1} + "" { Name $1 } +| projection { Path $1 } diff --git a/src/passes/1-parser/cameligo/ParserLog.ml b/src/passes/1-parser/cameligo/ParserLog.ml index 0ee1bd3e6..e0b7fd09b 100644 --- a/src/passes/1-parser/cameligo/ParserLog.ml +++ b/src/passes/1-parser/cameligo/ParserLog.ml @@ -128,15 +128,15 @@ let rec print_tokens state {decl;eof} = Utils.nseq_iter (print_statement state) decl; print_token state eof "EOF" -and print_attributes state attributes = +and print_attributes state attributes = List.iter ( - fun ({value = attribute; region}) -> - let attribute_formatted = sprintf "[@%s]" attribute in + fun ({value = attribute; region}) -> + let attribute_formatted = sprintf "[@@%s]" attribute in print_token state region attribute_formatted ) attributes and print_statement state = function - Let {value=kwd_let, let_binding, attributes; _} -> + Let {value=kwd_let, let_binding, attributes; _} -> print_token state kwd_let "let"; print_let_binding state let_binding; print_attributes state attributes @@ -538,7 +538,7 @@ and print_case_clause state {value; _} = print_expr state rhs and print_let_in state {value; _} = - let {kwd_let; binding; kwd_in; body; attributes} = value in + let {kwd_let; binding; kwd_in; body; attributes} = value in print_token state kwd_let "let"; print_let_binding state binding; print_attributes state attributes; @@ -610,31 +610,41 @@ let rec pp_ast state {decl; _} = List.iteri (List.length decls |> apply) decls and pp_declaration state = function - Let {value = (_, let_binding, _); region} -> + Let {value = (_, let_binding, attr); region} -> pp_loc_node state "Let" region; - pp_let_binding state let_binding + pp_let_binding state let_binding attr; | TypeDecl {value; region} -> pp_loc_node state "TypeDecl" region; pp_type_decl state value -and pp_let_binding state node = +and pp_let_binding state node attr = let {binders; lhs_type; let_rhs; _} = node in let fields = if lhs_type = None then 2 else 3 in - let () = + let fields = if attr = [] then fields else fields+1 in + let arity = let state = state#pad fields 0 in pp_node state ""; - pp_binders state binders in - let () = + pp_binders state binders; 0 in + let arity = match lhs_type with - None -> () + None -> arity | Some (_, type_expr) -> - let state = state#pad fields 1 in + let state = state#pad fields (arity+1) in pp_node state ""; - pp_type_expr (state#pad 1 0) type_expr in - let () = - let state = state#pad fields (fields - 1) in + pp_type_expr (state#pad 1 0) type_expr; + arity+1 in + let arity = + let state = state#pad fields (arity+1) in pp_node state ""; - pp_expr (state#pad 1 0) let_rhs + pp_expr (state#pad 1 0) let_rhs; + arity+1 in + let () = + if attr <> [] then + let state = state#pad fields (arity+1) in + pp_node state ""; + let length = List.length attr in + let apply len rank = pp_ident (state#pad len rank) + in List.iteri (apply length) attr in () and pp_type_decl state decl = @@ -838,28 +848,39 @@ and pp_fun_expr state node = in () and pp_let_in state node = - let {binding; body; _} = node in + let {binding; body; attributes; _} = node in let {binders; lhs_type; let_rhs; _} = binding in let fields = if lhs_type = None then 3 else 4 in - let () = + let fields = if attributes = [] then fields else fields+1 in + let arity = let state = state#pad fields 0 in pp_node state ""; - pp_binders state binders in - let () = + pp_binders state binders; 0 in + let arity = match lhs_type with - None -> () + None -> arity | Some (_, type_expr) -> - let state = state#pad fields 1 in + let state = state#pad fields (arity+1) in pp_node state ""; - pp_type_expr (state#pad 1 0) type_expr in - let () = - let state = state#pad fields (fields - 2) in + pp_type_expr (state#pad 1 0) type_expr; + arity+1 in + let arity = + let state = state#pad fields (arity+1) in pp_node state ""; - pp_expr (state#pad 1 0) let_rhs in - let () = - let state = state#pad fields (fields - 1) in + pp_expr (state#pad 1 0) let_rhs; + arity+1 in + let arity = + let state = state#pad fields (arity+1) in pp_node state ""; - pp_expr (state#pad 1 0) body + pp_expr (state#pad 1 0) body; + arity+1 in + let () = + if attributes <> [] then + let state = state#pad fields (arity+1) in + pp_node state ""; + let length = List.length attributes in + let apply len rank = pp_ident (state#pad len rank) + in List.iteri (apply length) attributes in () and pp_tuple_expr state {value; _} = diff --git a/src/passes/1-parser/cameligo/check_dot_git_is_dir.sh b/src/passes/1-parser/cameligo/check_dot_git_is_dir.sh deleted file mode 100755 index 7df363999..000000000 --- a/src/passes/1-parser/cameligo/check_dot_git_is_dir.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh - -set -e - -if test -d ../../.git; then - echo true > dot_git_is_dir -else - echo false > dot_git_is_dir - cat .git >> dot_git_is_dir -fi diff --git a/src/passes/1-parser/pascaligo.ml b/src/passes/1-parser/pascaligo.ml index 343f9195d..a464c6875 100644 --- a/src/passes/1-parser/pascaligo.ml +++ b/src/passes/1-parser/pascaligo.ml @@ -1,48 +1,10 @@ open Trace -(*module Parser = Parser_pascaligo.Parser*) -(*module ParserLog = Parser_pascaligo.ParserLog*) module AST = Parser_pascaligo.AST -module ParErr = Parser_pascaligo.ParErr module LexToken = Parser_pascaligo.LexToken module Lexer = Lexer.Make(LexToken) module Scoping = Parser_pascaligo.Scoping -module SSet = Utils.String.Set - -(* Mock options. TODO: Plug in cmdliner. *) - -let pre_options = - EvalOpt.make - ~libs:[] - ~verbose:SSet.empty - ~offsets:true - ~mode:`Point - ~cmd:EvalOpt.Quiet - ~mono:true (* Monolithic API of Menhir for now *) -(* ~input:None *) -(* ~expr:true *) - -module Parser = - struct - type ast = AST.t - type expr = AST.expr - include Parser_pascaligo.Parser - end - -module ParserLog = - struct - type ast = AST.t - type expr = AST.expr - include Parser_pascaligo.ParserLog - end - -module PreUnit = ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog) -module Front = ParserAPI.Make (Lexer)(Parser)(ParErr) - -let issue_error point = - let error = Front.format_error ~offsets:true (* TODO: CLI *) - `Point (* TODO: CLI *) point - in Stdlib.Error error +module Parser = Parser_pascaligo.Parser module Errors = struct @@ -103,14 +65,6 @@ module Errors = fun () -> Format.asprintf "%a" Location.pp_lift @@ loc)] in error ~data title message - let detached_attributes (attrs: AST.attributes) = - let title () = "detached attributes" in - let message () = "" in - let data = [ - ("location", - fun () -> Format.asprintf "%a" Location.pp_lift @@ attrs.region)] - in error ~data title message - let parser_error source (start: Lexing.position) (stop: Lexing.position) lexbuf = let title () = "parser error" in @@ -127,7 +81,8 @@ module Errors = file in let loc = if start.pos_cnum = -1 then - Region.make ~start: Pos.min ~stop:(Pos.from_byte stop) + Region.make + ~start:(Pos.min ~file:source) ~stop:(Pos.from_byte stop) else Region.make ~start:(Pos.from_byte start) ~stop:(Pos.from_byte stop) in @@ -167,14 +122,6 @@ let parse (parser: 'a parser) source lexbuf = fail @@ duplicate_variant name | Scoping.Error (Reserved_name name) -> fail @@ reserved_name name - | Scoping.Error (Detached_attributes attrs) -> - fail @@ detached_attributes attrs - | Parser.Error -> - let start = Lexing.lexeme_start_p lexbuf in - let end_ = Lexing.lexeme_end_p lexbuf in - fail @@ (parser_error source start end_ lexbuf) - | Lexer.Error e -> - fail @@ lexer_error e | _ -> let () = Printexc.print_backtrace Pervasives.stdout in let start = Lexing.lexeme_start_p lexbuf in @@ -198,6 +145,7 @@ let parse_file (source: string) : AST.t result = let lexbuf = Lexing.from_channel channel in parse (Parser.contract) source lexbuf +(* let parse_file' (source: string) : AST.t result = let module IO = struct @@ -208,6 +156,7 @@ let parse_file' (source: string) : AST.t result = match Unit.parse Unit.parse_contract with Ok ast -> ok ast | Error error -> failwith "TODO" (* fail @@ parser_or_lexer_error error *) + *) let parse_string (s:string) : AST.t result = let lexbuf = Lexing.from_string s in diff --git a/src/passes/1-parser/pascaligo/AST.ml b/src/passes/1-parser/pascaligo/AST.ml index ff00926d7..5f95dc3e5 100644 --- a/src/passes/1-parser/pascaligo/AST.ml +++ b/src/passes/1-parser/pascaligo/AST.ml @@ -145,12 +145,13 @@ type t = { and ast = t -and attributes = attribute ne_injection reg - and declaration = TypeDecl of type_decl reg | ConstDecl of const_decl reg | FunDecl of fun_decl reg +| AttrDecl of attr_decl + +and attr_decl = string reg ne_injection reg and const_decl = { kwd_const : kwd_const; @@ -160,7 +161,7 @@ and const_decl = { equal : equal; init : expr; terminator : semi option; - attributes : attributes option + attributes : attr_decl option } (* Type declarations *) @@ -218,7 +219,7 @@ and fun_decl = { block_with : (block reg * kwd_with) option; return : expr; terminator : semi option; - attributes : attributes option; + attributes : attr_decl option } and parameters = (param_decl, semi) nsepseq par reg @@ -261,11 +262,12 @@ and statements = (statement, semi) nsepseq and statement = Instr of instruction | Data of data_decl +| Attr of attr_decl and data_decl = LocalConst of const_decl reg -| LocalVar of var_decl reg -| LocalFun of fun_decl reg +| LocalVar of var_decl reg +| LocalFun of fun_decl reg and var_decl = { kwd_var : kwd_var; diff --git a/src/passes/1-parser/pascaligo/LexToken.mli b/src/passes/1-parser/pascaligo/LexToken.mli index f2ce709ae..620be977c 100644 --- a/src/passes/1-parser/pascaligo/LexToken.mli +++ b/src/passes/1-parser/pascaligo/LexToken.mli @@ -43,7 +43,6 @@ type t = | Mutez of (lexeme * Z.t) Region.reg | Ident of lexeme Region.reg | Constr of lexeme Region.reg -| Attr of attribute (* Symbols *) diff --git a/src/passes/1-parser/pascaligo/LexToken.mll b/src/passes/1-parser/pascaligo/LexToken.mll index 5a7020105..542a36c1e 100644 --- a/src/passes/1-parser/pascaligo/LexToken.mll +++ b/src/passes/1-parser/pascaligo/LexToken.mll @@ -41,7 +41,6 @@ type t = | Mutez of (lexeme * Z.t) Region.reg | Ident of lexeme Region.reg | Constr of lexeme Region.reg -| Attr of attribute (* Symbols *) @@ -150,8 +149,10 @@ let proj_token = function | Constr Region.{region; value} -> region, sprintf "Constr \"%s\"" value +(* | Attr {header; string={region; value}} -> region, sprintf "Attr (\"%s\",\"%s\")" header value + *) (* Symbols *) @@ -242,7 +243,6 @@ let to_lexeme = function | Mutez i -> fst i.Region.value | Ident id | Constr id -> id.Region.value -| Attr {string; _} -> string.Region.value (* Symbols *) diff --git a/src/passes/1-parser/pascaligo/Misc/pascaligo.ml b/src/passes/1-parser/pascaligo/Misc/pascaligo.ml new file mode 100644 index 000000000..c323496e5 --- /dev/null +++ b/src/passes/1-parser/pascaligo/Misc/pascaligo.ml @@ -0,0 +1,39 @@ + +module ParserLog = Parser_pascaligo.ParserLog +module ParErr = Parser_pascaligo.ParErr +module SSet = Utils.String.Set + +(* Mock options. TODO: Plug in cmdliner. *) + +let pre_options = + EvalOpt.make + ~libs:[] + ~verbose:SSet.empty + ~offsets:true + ~mode:`Point + ~cmd:EvalOpt.Quiet + ~mono:true (* Monolithic API of Menhir for now *) +(* ~input:None *) +(* ~expr:true *) + +module Parser = + struct + type ast = AST.t + type expr = AST.expr + include Parser_pascaligo.Parser + end + +module ParserLog = + struct + type ast = AST.t + type expr = AST.expr + include Parser_pascaligo.ParserLog + end + +module PreUnit = ParserUnit.Make (Lexer)(AST)(Parser)(ParErr)(ParserLog) +module Front = ParserAPI.Make (Lexer)(Parser)(ParErr) + +let issue_error point = + let error = Front.format_error ~offsets:true (* TODO: CLI *) + `Point (* TODO: CLI *) point + in Stdlib.Error error diff --git a/src/passes/1-parser/pascaligo/ParErr.ml b/src/passes/1-parser/pascaligo/ParErr.ml index 1e07f3913..a19b6aac2 100644 --- a/src/passes/1-parser/pascaligo/ParErr.ml +++ b/src/passes/1-parser/pascaligo/ParErr.ml @@ -58,13 +58,13 @@ let message = "\n" | 64 -> "\n" - | 517 -> + | 543 -> "\n" | 29 -> "\n" | 32 -> "\n" - | 515 -> + | 541 -> "\n" | 35 -> "\n" @@ -78,23 +78,9 @@ let message = "\n" | 67 -> "\n" - | 70 -> + | 68 -> "\n" - | 71 -> - "\n" - | 72 -> - "\n" - | 73 -> - "\n" - | 80 -> - "\n" - | 81 -> - "\n" - | 76 -> - "\n" - | 77 -> - "\n" - | 78 -> + | 84 -> "\n" | 85 -> "\n" @@ -102,241 +88,225 @@ let message = "\n" | 87 -> "\n" - | 88 -> - "\n" - | 512 -> - "\n" - | 358 -> - "\n" - | 359 -> - "\n" - | 499 -> - "\n" - | 362 -> - "\n" - | 360 -> - "\n" - | 361 -> - "\n" - | 363 -> - "\n" - | 364 -> - "\n" - | 365 -> - "\n" - | 366 -> - "\n" - | 367 -> - "\n" - | 475 -> - "\n" - | 476 -> - "\n" - | 477 -> - "\n" - | 478 -> - "\n" - | 496 -> - "\n" - | 503 -> - "\n" - | 502 -> - "\n" - | 371 -> - "\n" - | 372 -> + | 514 -> "\n" | 373 -> "\n" | 374 -> "\n" + | 507 -> + "\n" + | 377 -> + "\n" + | 375 -> + "\n" + | 376 -> + "\n" | 378 -> "\n" + | 379 -> + "\n" | 380 -> "\n" + | 381 -> + "\n" | 382 -> "\n" - | 383 -> + | 484 -> + "\n" + | 485 -> + "\n" + | 486 -> + "\n" + | 487 -> + "\n" + | 504 -> + "\n" + | 511 -> + "\n" + | 510 -> + "\n" + | 386 -> "\n" | 387 -> "\n" - | 384 -> - "\n" - | 385 -> + | 388 -> "\n" | 389 -> "\n" - | 390 -> - "\n" - | 391 -> - "\n" | 393 -> "\n" | 395 -> "\n" - | 399 -> - "\n" - | 396 -> - "\n" | 397 -> "\n" - | 375 -> + | 398 -> "\n" - | 381 -> + | 402 -> + "\n" + | 399 -> + "\n" + | 400 -> "\n" | 404 -> "\n" + | 408 -> + "\n" | 405 -> "\n" | 406 -> "\n" - | 492 -> + | 390 -> "\n" - | 493 -> - "\n" - | 494 -> - "\n" - | 407 -> - "\n" - | 488 -> - "\n" - | 408 -> - "\n" - | 452 -> - "\n" - | 447 -> - "\n" - | 453 -> - "\n" - | 409 -> - "\n" - | 410 -> - "\n" - | 416 -> - "\n" - | 420 -> - "\n" - | 421 -> - "\n" - | 411 -> - "\n" - | 424 -> - "\n" - | 425 -> - "\n" - | 426 -> + | 396 -> "\n" | 413 -> "\n" + | 414 -> + "\n" | 415 -> "\n" - | 435 -> + | 500 -> "\n" - | 436 -> + | 501 -> "\n" - | 437 -> + | 502 -> "\n" - | 440 -> + | 416 -> "\n" - | 441 -> + | 496 -> "\n" - | 469 -> + | 417 -> "\n" - | 470 -> + | 461 -> "\n" - | 473 -> + | 456 -> "\n" - | 472 -> + | 462 -> "\n" - | 438 -> + | 418 -> "\n" - | 467 -> + | 419 -> "\n" - | 439 -> - "\n" - | 69 -> - "\n" - | 428 -> + | 425 -> "\n" | 429 -> "\n" | 430 -> "\n" - | 431 -> + | 420 -> "\n" - | 432 -> + | 433 -> "\n" - | 508 -> + | 434 -> "\n" - | 521 -> + | 435 -> "\n" - | 159 -> + | 422 -> "\n" - | 523 -> + | 424 -> "\n" - | 137 -> + | 444 -> "\n" - | 150 -> + | 445 -> "\n" - | 166 -> + | 446 -> "\n" - | 167 -> + | 449 -> "\n" - | 158 -> + | 450 -> "\n" - | 173 -> + | 478 -> "\n" - | 152 -> + | 479 -> "\n" - | 168 -> + | 482 -> "\n" - | 169 -> + | 481 -> "\n" - | 175 -> + | 447 -> "\n" - | 177 -> + | 476 -> "\n" - | 179 -> + | 448 -> + "\n" + | 437 -> + "\n" + | 438 -> + "\n" + | 439 -> + "\n" + | 440 -> + "\n" + | 441 -> + "\n" + | 536 -> + "\n" + | 515 -> + "\n" + | 516 -> + "\n" + | 517 -> + "\n" + | 518 -> + "\n" + | 519 -> + "\n" + | 520 -> + "\n" + | 529 -> + "\n" + | 532 -> + "\n" + | 524 -> + "\n" + | 525 -> + "\n" + | 547 -> "\n" | 181 -> "\n" - | 183 -> + | 549 -> "\n" - | 160 -> + | 159 -> "\n" - | 170 -> + | 172 -> "\n" - | 157 -> + | 188 -> "\n" - | 163 -> + | 189 -> "\n" - | 187 -> + | 180 -> "\n" - | 92 -> + | 195 -> "\n" - | 318 -> + | 174 -> "\n" - | 319 -> + | 190 -> "\n" - | 322 -> + | 191 -> "\n" - | 323 -> + | 197 -> "\n" - | 356 -> + | 199 -> "\n" - | 351 -> + | 201 -> "\n" - | 353 -> + | 203 -> "\n" - | 93 -> + | 205 -> "\n" - | 94 -> + | 182 -> "\n" - | 338 -> + | 192 -> "\n" - | 95 -> + | 179 -> "\n" - | 96 -> + | 185 -> + "\n" + | 209 -> + "\n" + | 91 -> "\n" | 342 -> "\n" @@ -346,169 +316,231 @@ let message = "\n" | 347 -> "\n" - | 349 -> + | 371 -> "\n" - | 97 -> + | 366 -> "\n" - | 136 -> + | 368 -> + "\n" + | 92 -> + "\n" + | 93 -> + "\n" + | 362 -> + "\n" + | 94 -> + "\n" + | 95 -> + "\n" + | 144 -> + "\n" + | 145 -> + "\n" + | 148 -> + "\n" + | 149 -> + "\n" + | 364 -> + "\n" + | 96 -> + "\n" + | 158 -> + "\n" + | 100 -> + "\n" + | 217 -> + "\n" + | 218 -> + "\n" + | 220 -> + "\n" + | 221 -> + "\n" + | 224 -> + "\n" + | 225 -> + "\n" + | 358 -> + "\n" + | 353 -> + "\n" + | 355 -> "\n" | 101 -> "\n" - | 195 -> - "\n" - | 196 -> - "\n" - | 198 -> - "\n" - | 199 -> - "\n" - | 202 -> - "\n" - | 203 -> - "\n" - | 334 -> - "\n" - | 329 -> - "\n" - | 331 -> - "\n" | 102 -> "\n" + | 350 -> + "\n" + | 336 -> + "\n" + | 338 -> + "\n" | 103 -> "\n" - | 326 -> + | 332 -> "\n" - | 312 -> + | 330 -> "\n" - | 314 -> + | 333 -> "\n" - | 104 -> + | 334 -> "\n" - | 308 -> + | 328 -> "\n" - | 306 -> + | 156 -> + "\n" + | 105 -> + "\n" + | 320 -> + "\n" + | 321 -> + "\n" + | 322 -> + "\n" + | 323 -> + "\n" + | 324 -> + "\n" + | 137 -> + "\n" + | 138 -> + "\n" + | 139 -> + "\n" + | 140 -> + "\n" + | 151 -> + "\n" + | 106 -> + "\n" + | 107 -> "\n" | 309 -> "\n" | 310 -> "\n" - | 304 -> + | 154 -> "\n" - | 134 -> + | 177 -> "\n" - | 106 -> + | 312 -> "\n" - | 296 -> + | 315 -> "\n" - | 297 -> + | 316 -> "\n" - | 298 -> - "\n" - | 299 -> - "\n" - | 300 -> - "\n" - | 107 -> + | 133 -> "\n" | 108 -> "\n" - | 285 -> + | 69 -> "\n" - | 286 -> + | 70 -> "\n" - | 132 -> + | 71 -> "\n" - | 155 -> + | 72 -> + "\n" + | 79 -> + "\n" + | 80 -> + "\n" + | 75 -> + "\n" + | 76 -> + "\n" + | 77 -> + "\n" + | 109 -> + "\n" + | 110 -> + "\n" + | 111 -> + "\n" + | 112 -> + "\n" + | 114 -> + "\n" + | 117 -> + "\n" + | 230 -> + "\n" + | 231 -> + "\n" + | 269 -> + "\n" + | 293 -> + "\n" + | 270 -> + "\n" + | 272 -> + "\n" + | 273 -> + "\n" + | 294 -> + "\n" + | 300 -> + "\n" + | 299 -> + "\n" + | 303 -> + "\n" + | 302 -> + "\n" + | 240 -> + "\n" + | 283 -> + "\n" + | 284 -> + "\n" + | 287 -> "\n" | 288 -> "\n" | 291 -> "\n" - | 292 -> - "\n" - | 128 -> - "\n" - | 110 -> - "\n" - | 113 -> - "\n" - | 208 -> - "\n" - | 209 -> - "\n" - | 247 -> - "\n" - | 271 -> - "\n" - | 248 -> - "\n" - | 250 -> - "\n" - | 251 -> - "\n" - | 272 -> - "\n" - | 278 -> - "\n" | 277 -> "\n" - | 281 -> - "\n" - | 280 -> - "\n" - | 218 -> - "\n" - | 261 -> - "\n" - | 262 -> - "\n" - | 265 -> - "\n" - | 266 -> - "\n" - | 269 -> - "\n" - | 255 -> - "\n" - | 257 -> - "\n" - | 219 -> - "\n" - | 244 -> - "\n" - | 245 -> - "\n" - | 253 -> + | 279 -> "\n" | 241 -> "\n" - | 210 -> + | 266 -> + "\n" + | 267 -> "\n" | 275 -> "\n" - | 211 -> + | 263 -> "\n" - | 223 -> + | 232 -> "\n" - | 224 -> + | 297 -> "\n" - | 240 -> + | 233 -> "\n" - | 225 -> + | 245 -> "\n" - | 226 -> + | 246 -> "\n" - | 234 -> + | 262 -> "\n" - | 114 -> + | 247 -> + "\n" + | 248 -> + "\n" + | 256 -> "\n" | 118 -> "\n" - | 206 -> + | 122 -> "\n" - | 119 -> + | 228 -> "\n" - | 125 -> + | 123 -> + "\n" + | 130 -> "\n" | _ -> raise Not_found diff --git a/src/passes/1-parser/pascaligo/ParToken.mly b/src/passes/1-parser/pascaligo/ParToken.mly index 61397c7e3..11275b76e 100644 --- a/src/passes/1-parser/pascaligo/ParToken.mly +++ b/src/passes/1-parser/pascaligo/ParToken.mly @@ -12,7 +12,6 @@ %token <(LexToken.lexeme * Z.t) Region.reg> Mutez "" %token Ident "" %token Constr "" -%token Attr "" (* Symbols *) diff --git a/src/passes/1-parser/pascaligo/Parser.mly b/src/passes/1-parser/pascaligo/Parser.mly index 21e1947bf..9b41ba242 100644 --- a/src/passes/1-parser/pascaligo/Parser.mly +++ b/src/passes/1-parser/pascaligo/Parser.mly @@ -5,40 +5,6 @@ open Region open AST -(* -type statement_attributes_mixed = - PInstr of instruction -| PData of data_decl -| PAttr of attributes - -let attributes_to_statement (statement, statements) = - match statements with - [] -> - (match statement with - | PInstr i -> Instr i, [] - | PData d -> Data d, [] - | PAttr a -> - raise (Scoping.Error (Scoping.Detached_attributes a))) - | _ -> ( - let statements = (Region.ghost, statement) :: statements in - let rec inner result = function - | (t, PData (LocalConst const)) :: (_, PAttr a) :: rest -> - inner (result @ [(t, Data (LocalConst {const with value = {const.value with attributes = a}}))]) rest - | (t, PData (LocalFun func)) :: (_, PAttr a) :: rest -> - inner (result @ [(t, Data (LocalFun {func with value = {func.value with attributes = a}}))]) rest - | (t, PData d) :: rest -> - inner (result @ [(t, Data d)]) rest - | (t, PInstr i) :: rest -> - inner (result @ [(t, Instr i)]) rest - | (_, PAttr _) :: rest -> - inner result rest - | [] -> - result - in - let result = inner [] statements in - (snd (List.hd result), List.tl result) - ) - *) (* END HEADER *) %} @@ -144,9 +110,18 @@ contract: nseq(declaration) EOF { {decl=$1; eof=$2} } declaration: - type_decl { TypeDecl $1 } -| const_decl { ConstDecl $1 } -| fun_decl { FunDecl $1 } + type_decl { TypeDecl $1 } +| const_decl { ConstDecl $1 } +| fun_decl { FunDecl $1 } +| attr_decl { AttrDecl $1 } + +(* Attribute declarations *) + +attr_decl: + open_attr_decl ";"? { $1 } + +open_attr_decl: + ne_injection("attributes","") { $1 } (* Type declarations *) @@ -269,60 +244,54 @@ fun_expr: colon = $3; ret_type = $4; kwd_is = $5; - return = $6 - } + return = $6} in {region; value} } (* Function declarations *) open_fun_decl: "function" fun_name parameters ":" type_expr "is" - block - "with" expr { + block "with" expr { Scoping.check_reserved_name $2; - let stop = expr_to_region $9 in - let region = cover $1 stop - and value = {kwd_function = $1; - fun_name = $2; - param = $3; - colon = $4; - ret_type = $5; - kwd_is = $6; - block_with = Some ($7, $8); - return = $9; - terminator = None; - attributes = None} - in {region; value} } + let stop = expr_to_region $9 in + let region = cover $1 stop + and value = {kwd_function = $1; + fun_name = $2; + param = $3; + colon = $4; + ret_type = $5; + kwd_is = $6; + block_with = Some ($7, $8); + return = $9; + terminator = None; + attributes = None} + in {region; value} + } | "function" fun_name parameters ":" type_expr "is" expr { Scoping.check_reserved_name $2; - let stop = expr_to_region $7 in - let region = cover $1 stop - and value = {kwd_function = $1; - fun_name = $2; - param = $3; - colon = $4; - ret_type = $5; - kwd_is = $6; - block_with = None; - return = $7; - terminator = None; - attributes = None} + let stop = expr_to_region $7 in + let region = cover $1 stop + and value = {kwd_function = $1; + fun_name = $2; + param = $3; + colon = $4; + ret_type = $5; + kwd_is = $6; + block_with = None; + return = $7; + terminator = None; + attributes = None} in {region; value} } fun_decl: - open_fun_decl maybe_attributes? { - match $2 with - None -> $1 - | Some (terminator, attributes) -> - let value = {$1.value with terminator; attributes} - in {$1 with value} } + open_fun_decl ";"? { + {$1 with value = {$1.value with terminator=$2}} } parameters: par(nsepseq(param_decl,";")) { let params = Utils.nsepseq_to_list ($1.value: _ par).inside - in Scoping.check_parameters params; - $1 } + in Scoping.check_parameters params; $1 } param_decl: "var" var ":" param_type { @@ -352,25 +321,25 @@ block: "begin" sep_or_term_list(statement,";") "end" { let statements, terminator = $2 in let region = cover $1 $3 - and value = {opening = Begin $1; - statements (*= attributes_to_statement statements*); + and value = {opening = Begin $1; + statements; terminator; - closing = End $3} + closing = End $3} in {region; value} } | "block" "{" sep_or_term_list(statement,";") "}" { let statements, terminator = $3 in let region = cover $1 $4 - and value = {opening = Block ($1,$2); - statements (*= attributes_to_statement statements*); + and value = {opening = Block ($1,$2); + statements; terminator; - closing = Block $4} + closing = Block $4} in {region; value} } statement: - instruction { (*P*)Instr $1 } -| open_data_decl { (*P*)Data $1 } - (*| attributes { PAttr $1 }*) + instruction { Instr $1 } +| open_data_decl { Data $1 } +| open_attr_decl { Attr $1 } open_data_decl: open_const_decl { LocalConst $1 } @@ -410,20 +379,9 @@ unqualified_decl(OP): let region = expr_to_region $5 in $1, $2, $3, $4, $5, region } -attributes: - ne_injection("attributes","") { $1 } - -maybe_attributes: - ";" { Some $1, None } -| ";" attributes ";" { Some $1, Some $2 } - const_decl: - open_const_decl maybe_attributes? { - match $2 with - None -> $1 - | Some (terminator, attributes) -> - let value = {$1.value with terminator; attributes} - in {$1 with value} } + open_const_decl ";"? { + {$1 with value = {$1.value with terminator=$2}} } instruction: conditional { Cond $1 } @@ -587,7 +545,7 @@ clause_block: let statements, terminator = $2 in let region = cover $1 $3 in let value = {lbrace = $1; - inside = (*attributes_to_statement*) statements, terminator; + inside = statements, terminator; rbrace = $3} in ShortBlock {value; region} } diff --git a/src/passes/1-parser/pascaligo/ParserLog.ml b/src/passes/1-parser/pascaligo/ParserLog.ml index bacb39a4f..06c42718a 100644 --- a/src/passes/1-parser/pascaligo/ParserLog.ml +++ b/src/passes/1-parser/pascaligo/ParserLog.ml @@ -114,27 +114,25 @@ let rec print_tokens state ast = Utils.nseq_iter (print_decl state) decl; print_token state eof "EOF" -and print_attributes state = function - None -> () -| Some attr -> - print_ne_injection state "attributes" print_string attr +and print_attr_decl state = + print_ne_injection state "attributes" print_string and print_decl state = function TypeDecl decl -> print_type_decl state decl | ConstDecl decl -> print_const_decl state decl | FunDecl decl -> print_fun_decl state decl +| AttrDecl decl -> print_attr_decl state decl and print_const_decl state {value; _} = let {kwd_const; name; colon; const_type; - equal; init; terminator; attributes} = value in + equal; init; terminator; _} = value in print_token state kwd_const "const"; print_var state name; print_token state colon ":"; print_type_expr state const_type; print_token state equal "="; print_expr state init; - print_terminator state terminator; - print_attributes state attributes + print_terminator state terminator and print_type_decl state {value; _} = let {kwd_type; name; kwd_is; @@ -204,7 +202,7 @@ and print_type_tuple state {value; _} = and print_fun_decl state {value; _} = let {kwd_function; fun_name; param; colon; ret_type; kwd_is; block_with; - return; terminator; attributes } = value in + return; terminator; _} = value in print_token state kwd_function "function"; print_var state fun_name; print_parameters state param; @@ -218,7 +216,6 @@ and print_fun_decl state {value; _} = print_token state kwd_with "with"); print_expr state return; print_terminator state terminator; - print_attributes state attributes and print_fun_expr state {value; _} = let {kwd_function; param; colon; @@ -294,6 +291,7 @@ and print_statements state sequence = and print_statement state = function Instr instr -> print_instruction state instr | Data data -> print_data_decl state data +| Attr attr -> print_attr_decl state attr and print_instruction state = function Cond {value; _} -> print_conditional state value @@ -686,10 +684,10 @@ and print_opening state lexeme = function print_token state kwd lexeme | KwdBracket (kwd, lbracket) -> print_token state kwd lexeme; - print_token state lbracket "{" + print_token state lbracket "[" and print_closing state = function - RBracket rbracket -> print_token state rbracket "}" + RBracket rbracket -> print_token state rbracket "]" | End kwd_end -> print_token state kwd_end "end" and print_binding state {value; _} = @@ -846,12 +844,14 @@ and pp_declaration state = function | FunDecl {value; region} -> pp_loc_node state "FunDecl" region; pp_fun_decl state value +| AttrDecl {value; region} -> + pp_loc_node state "AttrDecl" region; + pp_attr_decl state value + +and pp_attr_decl state = pp_ne_injection pp_string state and pp_fun_decl state decl = - let arity = - match decl.attributes with - None -> 5 - | Some _ -> 6 in + let arity = 5 in let () = let state = state#pad arity 0 in pp_ident state decl.fun_name in @@ -874,33 +874,14 @@ and pp_fun_decl state decl = let () = let state = state#pad arity 4 in pp_node state ""; - pp_expr (state#pad 1 0) decl.return in - let () = - match decl.attributes with - None -> () - | Some attr -> - let state = state#pad arity 5 in - pp_node state ""; - pp_attributes (state#pad 1 0) attr + pp_expr (state#pad 1 0) decl.return in () -and pp_attributes state {value; _} = - pp_ne_injection pp_string state value - and pp_const_decl state decl = - let arity = - match decl.attributes with - None -> 3 - | Some _ -> 4 in + let arity = 3 in pp_ident (state#pad arity 0) decl.name; pp_type_expr (state#pad arity 1) decl.const_type; - pp_expr (state#pad arity 2) decl.init; - match decl.attributes with - None -> () - | Some attr -> - let state = state#pad arity 3 in - pp_node state ""; - pp_attributes (state#pad 1 0) attr + pp_expr (state#pad arity 2) decl.init and pp_type_expr state = function TProd cartesian -> @@ -1001,6 +982,9 @@ and pp_statement state = function | Data data_decl -> pp_node state "Data"; pp_data_decl (state#pad 1 0) data_decl +| Attr attr_decl -> + pp_node state "Attr"; + pp_attr_decl state attr_decl.value and pp_instruction state = function Cond {value; region} -> @@ -1183,18 +1167,18 @@ and pp_bin_cons state (head, _, tail) = and pp_injection : 'a.(state -> 'a -> unit) -> state -> 'a injection -> unit = fun printer state inj -> - let elements = Utils.sepseq_to_list inj.elements in - let length = List.length elements in - let apply len rank = printer (state#pad len rank) - in List.iteri (apply length) elements + let elements = Utils.sepseq_to_list inj.elements in + let length = List.length elements in + let apply len rank = printer (state#pad len rank) + in List.iteri (apply length) elements and pp_ne_injection : 'a.(state -> 'a -> unit) -> state -> 'a ne_injection -> unit = fun printer state inj -> - let ne_elements = Utils.nsepseq_to_list inj.ne_elements in - let length = List.length ne_elements in - let apply len rank = printer (state#pad len rank) - in List.iteri (apply length) ne_elements + let ne_elements = Utils.nsepseq_to_list inj.ne_elements in + let length = List.length ne_elements in + let apply len rank = printer (state#pad len rank) + in List.iteri (apply length) ne_elements and pp_tuple_pattern state tuple = let patterns = Utils.nsepseq_to_list tuple.inside in diff --git a/src/passes/1-parser/pascaligo/Scoping.ml b/src/passes/1-parser/pascaligo/Scoping.ml index a69b83381..73a7012ac 100644 --- a/src/passes/1-parser/pascaligo/Scoping.ml +++ b/src/passes/1-parser/pascaligo/Scoping.ml @@ -7,7 +7,6 @@ type t = | Duplicate_variant of AST.variable | Non_linear_pattern of AST.variable | Duplicate_field of AST.variable -| Detached_attributes of AST.attributes type error = t diff --git a/src/passes/1-parser/pascaligo/Scoping.mli b/src/passes/1-parser/pascaligo/Scoping.mli index b62ef7dd2..71f8c1244 100644 --- a/src/passes/1-parser/pascaligo/Scoping.mli +++ b/src/passes/1-parser/pascaligo/Scoping.mli @@ -6,7 +6,6 @@ type t = | Duplicate_variant of AST.variable | Non_linear_pattern of AST.variable | Duplicate_field of AST.variable -| Detached_attributes of AST.attributes type error = t diff --git a/src/passes/1-parser/pascaligo/check_dot_git_is_dir.sh b/src/passes/1-parser/pascaligo/check_dot_git_is_dir.sh deleted file mode 100755 index 7df363999..000000000 --- a/src/passes/1-parser/pascaligo/check_dot_git_is_dir.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/bin/sh - -set -e - -if test -d ../../.git; then - echo true > dot_git_is_dir -else - echo false > dot_git_is_dir - cat .git >> dot_git_is_dir -fi diff --git a/src/passes/1-parser/reasonligo.ml b/src/passes/1-parser/reasonligo.ml index b3cc3cc7d..c60a3367c 100644 --- a/src/passes/1-parser/reasonligo.ml +++ b/src/passes/1-parser/reasonligo.ml @@ -45,7 +45,7 @@ module Errors = let loc = if start.pos_cnum = -1 then Region.make - ~start: Pos.min + ~start:(Pos.min ~file:source) ~stop:(Pos.from_byte end_) else Region.make ~start:(Pos.from_byte start) diff --git a/src/passes/1-parser/reasonligo/LexToken.mli b/src/passes/1-parser/reasonligo/LexToken.mli index 3c8aadb96..09142e23d 100644 --- a/src/passes/1-parser/reasonligo/LexToken.mli +++ b/src/passes/1-parser/reasonligo/LexToken.mli @@ -143,8 +143,7 @@ val mk_int : lexeme -> Region.t -> (token, int_err) result val mk_nat : lexeme -> Region.t -> (token, nat_err) result val mk_mutez : lexeme -> Region.t -> (token, int_err) result val mk_ident : lexeme -> Region.t -> (token, ident_err) result -val mk_attr : lexeme -> Region.t -> (token, attr_err) result -val mk_attr2 : lexeme -> Region.t -> (token, attr_err) result +val mk_attr : string -> lexeme -> Region.t -> (token, attr_err) result val mk_sym : lexeme -> Region.t -> (token, sym_err) result val mk_kwd : lexeme -> Region.t -> (token, kwd_err) result val mk_string : lexeme -> Region.t -> token diff --git a/src/passes/1-parser/reasonligo/LexToken.mll b/src/passes/1-parser/reasonligo/LexToken.mll index 8949dc64f..e4689082a 100644 --- a/src/passes/1-parser/reasonligo/LexToken.mll +++ b/src/passes/1-parser/reasonligo/LexToken.mll @@ -453,11 +453,10 @@ let mk_constr lexeme region = mk_constr' lexeme region lexicon (* Attributes *) -let mk_attr lexeme region = - Ok (Attr { value = lexeme; region }) - -let mk_attr2 _lexeme _region = - Error Invalid_attribute +let mk_attr header lexeme region = + if header = "[@" then + Ok (Attr Region.{value=lexeme; region}) + else Error Invalid_attribute (* Predicates *) diff --git a/src/passes/1-parser/reasonligo/Misc/Misc.ml b/src/passes/1-parser/reasonligo/Misc/Misc.ml new file mode 100644 index 000000000..9e0ac54bf --- /dev/null +++ b/src/passes/1-parser/reasonligo/Misc/Misc.ml @@ -0,0 +1,65 @@ +type error = + IntErr of LexToken.int_err +| IdentErr of LexToken.ident_err +| NatErr of LexToken.nat_err +| SymErr of LexToken.sym_err +| KwdErr of LexToken.kwd_err + +let rec first_of_expr = function + ECase {value; _} -> + (match LexToken.mk_kwd "switch" value.kwd_match with + Error e -> Error (KwdErr e) + | Ok token -> Ok token) +| ECond {value; _} -> + (match LexToken.mk_kwd "if" value.kwd_if with + Error e -> Error (KwdErr e) + | Ok token -> Ok token) +| EPar {value; _} -> + (match LexToken.mk_sym "(" value.lpar with + Error e -> Error (SymErr e) + | Ok token -> Ok token) +| EAnnot {value; _} -> + (match LexToken.mk_sym "(" value.lpar with + Error e -> Error (SymErr e) + | Ok token -> Ok token) +| EUnit {value=opening, _; _} -> + (match LexToken.mk_sym "(" opening with + Error e -> Error (SymErr e) + | Ok token -> Ok token) +| EBytes b -> + Ok (LexToken.mk_bytes (fst b.value) b.region) +| EVar v -> + (match LexToken.mk_ident v.value v.region with + Error e -> Error (IdentErr e) + | Ok token -> Ok token) +| ESeq {value; _} -> + let opening = + match value.compound with + BeginEnd (opening, _) + | Braces (opening, _) + | Brackets (opening, _) -> opening + in (match LexToken.mk_sym "{" opening with + Error e -> Error (SymErr e) + | Ok token -> Ok token) +| EProj {value; _} -> + let structure = value.struct_name in + (match LexToken.mk_ident structure.value structure.region with + Error e -> Error (IdentErr e) + | Ok token -> Ok token) +| EFun {value; _} -> + (match LexToken.mk_kwd "fun" value.kwd_fun with + Error e -> Error (KwdErr e) + | Ok token -> Ok token) +| _ -> failwith "TODO" +(* + +| ELogic expr -> first_of_logic_expr expr +| EArith expr -> first_of_arith_expr expr +| EString expr -> first_of_string_expr expr +| EList expr -> first_of_list_expr expr +| EConstr expr -> first_of_constr_expr expr +| ECall {value=expr,_; _} -> first_of_expr expr +| ERecord {value; _} -> (*field_assign reg ne_injection *) +| ETuple {value; _} -> (* (expr, comma) nsepseq *) +| ELetIn {value; _} -> first_of_let_in value + *) diff --git a/src/passes/1-parser/reasonligo/ParErr.ml b/src/passes/1-parser/reasonligo/ParErr.ml index 594f9ecd4..18b32b373 100644 --- a/src/passes/1-parser/reasonligo/ParErr.ml +++ b/src/passes/1-parser/reasonligo/ParErr.ml @@ -46,9 +46,9 @@ let message = "\n" | 11 -> "\n" - | 509 -> + | 528 -> "\n" - | 503 -> + | 61 -> "\n" | 48 -> "\n" @@ -68,335 +68,387 @@ let message = "\n" | 14 -> "\n" - | 60 -> - "\n" | 65 -> "\n" - | 505 -> + | 70 -> "\n" - | 145 -> + | 524 -> "\n" - | 146 -> + | 185 -> "\n" - | 144 -> + | 186 -> "\n" - | 329 -> + | 184 -> "\n" - | 331 -> + | 302 -> "\n" - | 330 -> + | 304 -> "\n" - | 61 -> + | 303 -> + "\n" + | 66 -> + "\n" + | 69 -> "\n" | 64 -> "\n" - | 59 -> + | 183 -> "\n" - | 143 -> + | 311 -> "\n" - | 338 -> + | 313 -> "\n" - | 340 -> + | 312 -> "\n" - | 339 -> + | 191 -> "\n" - | 151 -> - "\n" - | 152 -> - "\n" - | 78 -> - "\n" - | 325 -> - "\n" - | 327 -> - "\n" - | 326 -> - "\n" - | 92 -> - "\n" - | 155 -> + | 192 -> "\n" | 118 -> "\n" - | 125 -> + | 298 -> "\n" - | 87 -> + | 300 -> "\n" - | 105 -> - "\n" - | 107 -> - "\n" - | 108 -> - "\n" - | 106 -> - "\n" - | 88 -> - "\n" - | 93 -> - "\n" - | 80 -> - "\n" - | 81 -> - "\n" - | 82 -> + | 299 -> "\n" | 132 -> "\n" - | 334 -> - "\n" - | 336 -> - "\n" - | 335 -> - "\n" - | 133 -> - "\n" - | 136 -> - "\n" - | 137 -> - "\n" - | 157 -> - "\n" - | 159 -> + | 195 -> "\n" | 158 -> "\n" - | 512 -> + | 165 -> "\n" - | 218 -> + | 127 -> "\n" - | 514 -> + | 145 -> "\n" - | 216 -> + | 147 -> "\n" - | 250 -> + | 148 -> "\n" - | 248 -> + | 146 -> "\n" - | 249 -> + | 128 -> "\n" - | 230 -> + | 133 -> "\n" - | 235 -> - "\n" - | 252 -> - "\n" - | 254 -> - "\n" - | 255 -> - "\n" - | 258 -> - "\n" - | 219 -> - "\n" - | 226 -> - "\n" - | 227 -> - "\n" - | 260 -> - "\n" - | 262 -> - "\n" - | 264 -> - "\n" - | 266 -> - "\n" - | 194 -> - "\n" - | 195 -> - "\n" - | 206 -> - "\n" - | 215 -> - "\n" - | 199 -> - "\n" - | 207 -> - "\n" - | 208 -> - "\n" - | 196 -> - "\n" - | 197 -> - "\n" - | 198 -> - "\n" - | 256 -> - "\n" - | 257 -> - "\n" - | 277 -> - "\n" - | 233 -> - "\n" - | 279 -> - "\n" - | 67 -> - "\n" - | 463 -> - "\n" - | 464 -> - "\n" - | 387 -> + | 120 -> "\n" | 121 -> "\n" | 122 -> "\n" - | 120 -> + | 172 -> "\n" - | 466 -> + | 307 -> "\n" - | 467 -> + | 309 -> + "\n" + | 308 -> + "\n" + | 173 -> + "\n" + | 176 -> + "\n" + | 177 -> + "\n" + | 197 -> + "\n" + | 199 -> + "\n" + | 198 -> + "\n" + | 59 -> + "\n" + | 531 -> + "\n" + | 225 -> + "\n" + | 533 -> + "\n" + | 223 -> + "\n" + | 257 -> + "\n" + | 255 -> + "\n" + | 256 -> + "\n" + | 237 -> + "\n" + | 242 -> + "\n" + | 259 -> + "\n" + | 261 -> + "\n" + | 262 -> + "\n" + | 265 -> + "\n" + | 226 -> + "\n" + | 233 -> + "\n" + | 234 -> + "\n" + | 267 -> + "\n" + | 269 -> + "\n" + | 271 -> + "\n" + | 273 -> + "\n" + | 201 -> + "\n" + | 202 -> + "\n" + | 213 -> + "\n" + | 222 -> + "\n" + | 206 -> + "\n" + | 214 -> + "\n" + | 215 -> + "\n" + | 203 -> + "\n" + | 204 -> + "\n" + | 205 -> + "\n" + | 263 -> + "\n" + | 284 -> + "\n" + | 240 -> + "\n" + | 286 -> + "\n" + | 72 -> "\n" | 483 -> "\n" - | 492 -> + | 484 -> "\n" - | 469 -> + | 423 -> "\n" - | 470 -> + | 161 -> "\n" - | 468 -> + | 162 -> "\n" - | 471 -> + | 160 -> "\n" - | 472 -> - "\n" - | 473 -> - "\n" - | 475 -> - "\n" - | 476 -> - "\n" - | 477 -> - "\n" - | 478 -> + | 486 -> "\n" | 487 -> "\n" - | 488 -> + | 504 -> "\n" - | 474 -> + | 513 -> + "\n" + | 498 -> "\n" | 499 -> "\n" | 497 -> "\n" - | 465 -> + | 488 -> "\n" - | 321 -> + | 489 -> + "\n" + | 490 -> + "\n" + | 492 -> + "\n" + | 493 -> + "\n" + | 494 -> + "\n" + | 495 -> + "\n" + | 509 -> + "\n" + | 510 -> + "\n" + | 491 -> + "\n" + | 520 -> + "\n" + | 518 -> + "\n" + | 485 -> + "\n" + | 372 -> + "\n" + | 366 -> + "\n" + | 367 -> + "\n" + | 369 -> + "\n" + | 368 -> + "\n" + | 365 -> + "\n" + | 76 -> + "\n" + | 446 -> + "\n" + | 326 -> + "\n" + | 332 -> + "\n" + | 333 -> + "\n" + | 336 -> + "\n" + | 337 -> + "\n" + | 328 -> + "\n" + | 339 -> + "\n" + | 100 -> + "\n" + | 78 -> + "\n" + | 80 -> "\n" | 315 -> "\n" | 316 -> "\n" - | 318 -> + | 117 -> "\n" - | 317 -> - "\n" - | 314 -> - "\n" - | 71 -> - "\n" - | 410 -> - "\n" - | 298 -> - "\n" - | 304 -> - "\n" - | 305 -> - "\n" - | 308 -> - "\n" - | 309 -> - "\n" - | 300 -> - "\n" - | 178 -> - "\n" - | 73 -> - "\n" - | 75 -> - "\n" - | 419 -> - "\n" - | 420 -> - "\n" - | 77 -> - "\n" - | 160 -> - "\n" - | 412 -> - "\n" - | 413 -> - "\n" - | 415 -> - "\n" - | 416 -> - "\n" - | 193 -> - "\n" - | 229 -> - "\n" - | 74 -> - "\n" - | 447 -> + | 82 -> "\n" | 448 -> "\n" - | 456 -> + | 449 -> "\n" - | 457 -> + | 451 -> "\n" - | 459 -> + | 452 -> + "\n" + | 200 -> + "\n" + | 236 -> + "\n" + | 79 -> + "\n" + | 467 -> + "\n" + | 468 -> + "\n" + | 476 -> + "\n" + | 477 -> + "\n" + | 479 -> + "\n" + | 480 -> + "\n" + | 469 -> + "\n" + | 470 -> + "\n" + | 81 -> "\n" | 460 -> "\n" - | 449 -> + | 461 -> "\n" - | 450 -> + | 455 -> "\n" - | 76 -> + | 454 -> + "\n" + | 458 -> + "\n" + | 348 -> + "\n" + | 356 -> + "\n" + | 360 -> + "\n" + | 359 -> + "\n" + | 355 -> + "\n" + | 349 -> + "\n" + | 457 -> + "\n" + | 340 -> + "\n" + | 341 -> + "\n" + | 346 -> + "\n" + | 347 -> + "\n" + | 342 -> + "\n" + | 343 -> + "\n" + | 344 -> + "\n" + | 84 -> + "\n" + | 85 -> + "\n" + | 318 -> + "\n" + | 323 -> + "\n" + | 324 -> + "\n" + | 389 -> + "\n" + | 436 -> + "\n" + | 437 -> + "\n" + | 438 -> + "\n" + | 439 -> "\n" | 440 -> "\n" | 441 -> "\n" - | 425 -> + | 435 -> "\n" - | 422 -> + | 325 -> "\n" - | 428 -> + | 362 -> "\n" - | 429 -> + | 363 -> "\n" - | 434 -> + | 373 -> "\n" - | 438 -> + | 374 -> "\n" - | 437 -> + | 413 -> "\n" - | 433 -> + | 420 -> "\n" - | 423 -> + | 408 -> "\n" - | 427 -> + | 409 -> "\n" - | 162 -> + | 407 -> "\n" - | 163 -> + | 375 -> "\n" - | 290 -> + | 376 -> "\n" - | 295 -> - "\n" - | 296 -> - "\n" - | 357 -> - "\n" - | 400 -> - "\n" - | 401 -> + | 377 -> "\n" | 402 -> "\n" @@ -406,105 +458,69 @@ let message = "\n" | 405 -> "\n" - | 399 -> + | 417 -> "\n" - | 297 -> + | 418 -> "\n" - | 311 -> + | 401 -> "\n" - | 312 -> + | 429 -> "\n" - | 322 -> + | 427 -> "\n" - | 323 -> - "\n" - | 377 -> + | 364 -> "\n" | 384 -> "\n" - | 342 -> + | 385 -> "\n" - | 343 -> + | 383 -> "\n" - | 324 -> - "\n" - | 344 -> - "\n" - | 345 -> - "\n" - | 346 -> - "\n" - | 370 -> - "\n" - | 371 -> - "\n" - | 372 -> - "\n" - | 373 -> + | 378 -> "\n" | 379 -> "\n" | 380 -> "\n" - | 369 -> + | 394 -> + "\n" + | 395 -> + "\n" + | 396 -> + "\n" + | 397 -> + "\n" + | 399 -> + "\n" + | 398 -> "\n" | 393 -> "\n" - | 391 -> + | 320 -> "\n" - | 313 -> + | 321 -> "\n" - | 348 -> + | 86 -> "\n" - | 349 -> + | 87 -> "\n" - | 347 -> + | 88 -> "\n" - | 350 -> + | 89 -> "\n" - | 351 -> + | 90 -> "\n" - | 352 -> + | 91 -> "\n" - | 359 -> + | 96 -> "\n" - | 360 -> + | 97 -> "\n" - | 361 -> + | 98 -> "\n" - | 362 -> + | 111 -> "\n" - | 364 -> - "\n" - | 363 -> - "\n" - | 358 -> - "\n" - | 292 -> - "\n" - | 293 -> - "\n" - | 164 -> - "\n" - | 165 -> - "\n" - | 166 -> - "\n" - | 167 -> - "\n" - | 168 -> - "\n" - | 169 -> - "\n" - | 174 -> - "\n" - | 175 -> - "\n" - | 176 -> - "\n" - | 188 -> - "\n" - | 237 -> + | 244 -> "\n" | _ -> raise Not_found diff --git a/src/passes/1-parser/reasonligo/ParserMain.ml b/src/passes/1-parser/reasonligo/ParserMain.ml index 5cf3bef69..c2df027e2 100644 --- a/src/passes/1-parser/reasonligo/ParserMain.ml +++ b/src/passes/1-parser/reasonligo/ParserMain.ml @@ -43,10 +43,10 @@ let parse parser : ('a,string) Stdlib.result = Examples of valid functions:\n\ let x = (a: string, b: int) : int => 3;\n\ let x = (a: string) : string => \"Hello, \" ++ a;\n" - and reg = AST.expr_to_region expr in - let error = Unit.short_error ~offsets:IO.options#offsets - IO.options#mode msg reg - in Stdlib.Error error + and reg = AST.expr_to_region expr in + let error = Unit.short_error ~offsets:IO.options#offsets + IO.options#mode msg reg + in Stdlib.Error error (* Scoping errors *) diff --git a/src/passes/1-parser/shared/Lexer.mll b/src/passes/1-parser/shared/Lexer.mll index 5b4a335e9..73b33b804 100644 --- a/src/passes/1-parser/shared/Lexer.mll +++ b/src/passes/1-parser/shared/Lexer.mll @@ -868,7 +868,7 @@ let open_token_stream file_path_opt = let file_path = match file_path_opt with None | Some "-" -> "" | Some file_path -> file_path in - let pos = Pos.min#set_file file_path in + let pos = Pos.min ~file:file_path in let buf_reg = ref (pos#byte, pos#byte) and first_call = ref true and decoder = Uutf.decoder ~encoding:`UTF_8 `Manual in diff --git a/src/passes/1-parser/shared/ParserAPI.ml b/src/passes/1-parser/shared/ParserAPI.ml index 50cb6d748..aabb1efef 100644 --- a/src/passes/1-parser/shared/ParserAPI.ml +++ b/src/passes/1-parser/shared/ParserAPI.ml @@ -77,44 +77,6 @@ module Make (Lexer: Lexer.S) exception Point of error - let failure get_win checkpoint = - let message = ParErr.message (state checkpoint) in - match get_win () with - Lexer.Nil -> assert false - | Lexer.One invalid -> - raise (Point (message, None, invalid)) - | Lexer.Two (invalid, valid) -> - raise (Point (message, Some valid, invalid)) - - (* The two Menhir APIs are called from the following functions. *) - - module Incr = Parser.Incremental - - let incr_contract memo Lexer.{read; buffer; get_win; close; _} = - let supplier = I.lexer_lexbuf_to_supplier read buffer - and failure = failure get_win in - let parser = Incr.contract buffer.Lexing.lex_curr_p in - let ast = - try I.loop_handle success failure supplier parser with - Point (message, valid_opt, invalid) -> - let error = Memo. (* TODO *) - in Stdlib.Error () - - in close (); ast - - let mono_contract = Parser.contract - - let incr_expr Lexer.{read; buffer; get_win; close; _} : Parser.expr = - let supplier = I.lexer_lexbuf_to_supplier read buffer - and failure = failure get_win in - let parser = Incr.interactive_expr buffer.Lexing.lex_curr_p in - let expr = I.loop_handle success failure supplier parser - in close (); expr - - let mono_expr = Parser.interactive_expr - - (* Errors *) - let format_error ?(offsets=true) mode (msg, valid_opt, invalid) = let invalid_region = Lexer.Token.to_region invalid in let header = @@ -135,4 +97,37 @@ module Make (Lexer: Lexer.S) let header = header ^ trailer in header ^ (if msg = "" then ".\n" else ":\n" ^ msg) + let failure get_win checkpoint = + let message = ParErr.message (state checkpoint) in + match get_win () with + Lexer.Nil -> assert false + | Lexer.One invalid -> + raise (Point (message, None, invalid)) + | Lexer.Two (invalid, valid) -> + raise (Point (message, Some valid, invalid)) + + (* The monolithic API of Menhir *) + + let mono_contract = Parser.contract + + let mono_expr = Parser.interactive_expr + + (* Incremental API of Menhir *) + + module Incr = Parser.Incremental + + let incr_contract Lexer.{read; buffer; get_win; close; _} = + let supplier = I.lexer_lexbuf_to_supplier read buffer + and failure = failure get_win in + let parser = Incr.contract buffer.Lexing.lex_curr_p in + let ast = I.loop_handle success failure supplier parser + in close (); ast + + let incr_expr Lexer.{read; buffer; get_win; close; _} = + let supplier = I.lexer_lexbuf_to_supplier read buffer + and failure = failure get_win in + let parser = Incr.interactive_expr buffer.Lexing.lex_curr_p in + let expr = I.loop_handle success failure supplier parser + in close (); expr + end diff --git a/src/passes/1-parser/shared/ParserAPI.mli b/src/passes/1-parser/shared/ParserAPI.mli index cbd26e317..396a8698c 100644 --- a/src/passes/1-parser/shared/ParserAPI.mli +++ b/src/passes/1-parser/shared/ParserAPI.mli @@ -47,24 +47,25 @@ module Make (Lexer: Lexer.S) (Parser: PARSER with type token = Lexer.Token.token) (ParErr: sig val message : int -> string end) : sig - (* The monolithic API of Menhir with memos *) + (* The monolithic API of Menhir *) val mono_contract : - (Lexing.lexbuf -> Lexer.token) -> - Lexing.lexbuf -> - (Parser.ast, string) Stdlib.result + (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> Parser.ast val mono_expr : - (Lexing.lexbuf -> Lexer.token) -> - Lexing.lexbuf -> - (Parser.expr, string) Stdlib.result + (Lexing.lexbuf -> Lexer.token) -> Lexing.lexbuf -> Parser.expr - (* Incremental API of Menhir with memos *) + (* Incremental API of Menhir *) - val incr_contract : - Lexer.instance -> (Parser.ast, string) Stdlib.result + type message = string + type valid = Parser.token + type invalid = Parser.token + type error = message * valid option * invalid - val incr_expr : - Lexer.instance -> - (Parser.expr, string) Stdlib.result + exception Point of error + + val incr_contract : Lexer.instance -> Parser.ast + val incr_expr : Lexer.instance -> Parser.expr + + val format_error : ?offsets:bool -> [`Point | `Byte] -> error -> string end diff --git a/src/passes/1-parser/shared/ParserUnit.ml b/src/passes/1-parser/shared/ParserUnit.ml index 2aaae54ee..dff827a56 100644 --- a/src/passes/1-parser/shared/ParserUnit.ml +++ b/src/passes/1-parser/shared/ParserUnit.ml @@ -89,6 +89,9 @@ module Make (Lexer: Lexer.S) let format_error = Front.format_error + let short_error ?(offsets=true) mode msg (reg: Region.t) = + sprintf "Parse error %s:\n%s" (reg#to_string ~offsets mode) msg + (* Parsing an expression *) let parse_expr lexer_inst tokeniser output state : diff --git a/src/passes/1-parser/shared/ParserUnit.mli b/src/passes/1-parser/shared/ParserUnit.mli index 59e665273..9c04d4885 100644 --- a/src/passes/1-parser/shared/ParserUnit.mli +++ b/src/passes/1-parser/shared/ParserUnit.mli @@ -46,6 +46,9 @@ module Make (Lexer: Lexer.S) val format_error : ?offsets:bool -> [`Byte | `Point] -> error -> string + val short_error : + ?offsets:bool -> [`Point | `Byte] -> string -> Region.t -> string + (* Parsers *) val parse : diff --git a/src/passes/2-simplify/pascaligo.ml b/src/passes/2-simplify/pascaligo.ml index afcab47ce..66f7f7de9 100644 --- a/src/passes/2-simplify/pascaligo.ml +++ b/src/passes/2-simplify/pascaligo.ml @@ -11,8 +11,8 @@ open Combinators let nseq_to_list (hd, tl) = hd :: tl let npseq_to_list (hd, tl) = hd :: (List.map snd tl) let pseq_to_list = function - | None -> [] - | Some lst -> npseq_to_list lst + None -> [] +| Some lst -> npseq_to_list lst let get_value : 'a Raw.reg -> 'a = fun x -> x.value let is_compiler_generated name = String.contains (Var.to_name name) '#' @@ -468,9 +468,9 @@ let rec simpl_expression (t:Raw.expr) : expr result = and simpl_update = fun (u:Raw.update Region.reg) -> let (u, loc) = r_split u in let (name, path) = simpl_path u.record in - let record = match path with + let record = match path with | [] -> e_variable (Var.of_name name) - | _ -> e_accessor (e_variable (Var.of_name name)) path in + | _ -> e_accessor (e_variable (Var.of_name name)) path in let updates = u.updates.value.ne_elements in let%bind updates' = let aux (f:Raw.field_assign Raw.reg) = @@ -478,7 +478,7 @@ and simpl_update = fun (u:Raw.update Region.reg) -> let%bind expr = simpl_expression f.field_expr in ok (f.field_name.value, expr) in - bind_map_list aux @@ npseq_to_list updates + bind_map_list aux @@ npseq_to_list updates in ok @@ e_update ~loc record updates' @@ -563,31 +563,43 @@ and simpl_tuple_expression ?loc (lst:Raw.expr list) : expression result = | [] -> return @@ e_literal Literal_unit | [hd] -> simpl_expression hd | lst -> - let%bind lst = bind_list @@ List.map simpl_expression lst in - return @@ e_tuple ?loc lst + let%bind lst = bind_list @@ List.map simpl_expression lst + in return @@ e_tuple ?loc lst -and simpl_data_declaration : Raw.data_decl -> _ result = fun t -> +and simpl_data_declaration : Raw.data_decl -> _ result = + fun t -> match t with | LocalVar x -> let (x , loc) = r_split x in let name = x.name.value in let%bind t = simpl_type_expression x.var_type in let%bind expression = simpl_expression x.init in - return_let_in ~loc (Var.of_name name , Some t) false expression + return_let_in ~loc (Var.of_name name, Some t) false expression | LocalConst x -> let (x , loc) = r_split x in let name = x.name.value in let%bind t = simpl_type_expression x.const_type in let%bind expression = simpl_expression x.init in - let inline = List.exists (fun (f: Raw.attribute) -> f.value = "\"inline\"") x.attributes.value in - return_let_in ~loc (Var.of_name name , Some t) inline expression + let inline = + match x.attributes with + None -> false + | Some {value; _} -> + npseq_to_list value.ne_elements + |> List.exists (fun Region.{value; _} -> value = "\"inline\"") + in return_let_in ~loc (Var.of_name name, Some t) inline expression | LocalFun f -> let (f , loc) = r_split f in let%bind (binder, expr) = simpl_fun_decl ~loc f in - let inline = List.exists (fun (f: Raw.attribute) -> f.value = "\"inline\"") f.attributes.value in - return_let_in ~loc binder inline expr + let inline = + match f.attributes with + None -> false + | Some {value; _} -> + npseq_to_list value.ne_elements + |> List.exists (fun Region.{value; _} -> value = "\"inline\"") + in return_let_in ~loc binder inline expr -and simpl_param : Raw.param_decl -> (expression_variable * type_expression) result = +and simpl_param : + Raw.param_decl -> (expression_variable * type_expression) result = fun t -> match t with | ParamConst c -> @@ -602,11 +614,18 @@ and simpl_param : Raw.param_decl -> (expression_variable * type_expression) resu ok (type_name , type_expression) and simpl_fun_decl : - loc:_ -> Raw.fun_decl -> ((expression_variable * type_expression option) * expression) result = + loc:_ -> Raw.fun_decl -> + ((expression_variable * type_expression option) * expression) result = fun ~loc x -> let open! Raw in - let {fun_name;param;ret_type;block_with;return; attributes} : fun_decl = x in - let inline = List.exists (fun (a: Raw.attribute) -> a.value = "\"inline\"") attributes.value in + let {fun_name; param; ret_type; block_with; + return; attributes} : fun_decl = x in + let inline = + match attributes with + None -> false + | Some {value; _} -> + npseq_to_list value.ne_elements + |> List.exists (fun Region.{value; _} -> value = "\"inline\"") in let statements = match block_with with | Some (block,_) -> npseq_to_list block.value.statements @@ -616,9 +635,7 @@ and simpl_fun_decl : a, [] -> ( let%bind input = simpl_param a in let (binder , input_type) = input in - let%bind instructions = bind_list - @@ List.map simpl_statement - @@ statements in + let%bind instructions = simpl_statement_list statements in let%bind result = simpl_expression return in let%bind output_type = simpl_type_expression ret_type in let body = instructions in @@ -648,9 +665,7 @@ and simpl_fun_decl : ass in bind_list @@ List.mapi aux params in - let%bind instructions = bind_list - @@ List.map simpl_statement - @@ statements in + let%bind instructions = simpl_statement_list statements in let%bind result = simpl_expression return in let%bind output_type = simpl_type_expression ret_type in let body = tpl_declarations @ instructions in @@ -674,9 +689,7 @@ and simpl_fun_expression : a, [] -> ( let%bind input = simpl_param a in let (binder , input_type) = input in - let%bind instructions = bind_list - @@ List.map simpl_statement - @@ statements in + let%bind instructions = simpl_statement_list statements in let%bind result = simpl_expression return in let%bind output_type = simpl_type_expression ret_type in let body = instructions in @@ -706,9 +719,7 @@ and simpl_fun_expression : ass in bind_list @@ List.mapi aux params in - let%bind instructions = bind_list - @@ List.map simpl_statement - @@ statements in + let%bind instructions = simpl_statement_list statements in let%bind result = simpl_expression return in let%bind output_type = simpl_type_expression ret_type in let body = tpl_declarations @ instructions in @@ -722,44 +733,39 @@ and simpl_fun_expression : ) ) -and simpl_declaration : Raw.declaration -> declaration Location.wrap result = - fun t -> - let open! Raw in - match t with - | TypeDecl x -> - let decl, loc = r_split x in - let {name;type_expr} : Raw.type_decl = decl in - let%bind type_expression = simpl_type_expression type_expr in - ok @@ Location.wrap ~loc (Declaration_type - (Var.of_name name.value, type_expression)) - - | ConstDecl x -> - let simpl_const_decl = fun {name;const_type; init; attributes} -> - let%bind expression = simpl_expression init in - let%bind t = simpl_type_expression const_type in - let type_annotation = Some t in - let inline = List.exists (fun (a: Raw.attribute) -> a.value = "\"inline\"") attributes.value in - ok @@ Declaration_constant - (Var.of_name name.value, type_annotation, inline, expression) - in bind_map_location simpl_const_decl (Location.lift_region x) - | FunDecl x -> - let decl, loc = r_split x in - let%bind ((name, ty_opt), expr) = simpl_fun_decl ~loc decl in - let inline = List.exists (fun (a: Raw.attribute) -> a.value = "\"inline\"") x.value.attributes.value in - ok @@ Location.wrap ~loc (Declaration_constant (name, ty_opt, inline, expr)) - -and simpl_statement : Raw.statement -> (_ -> expression result) result = - fun s -> - match s with - | Instr i -> simpl_instruction i - | Data d -> simpl_data_declaration d +and simpl_statement_list statements = + let open Raw in + let rec hook acc = function + [] -> acc + | [Attr _] -> + (* Detached attributes are erased. TODO: Warning. *) + acc + | Attr _ :: (Attr _ :: _ as statements) -> + (* Detached attributes are erased. TODO: Warning. *) + hook acc statements + | Attr decl :: Data (LocalConst {value; region}) :: statements -> + let new_const = + Data (LocalConst {value = {value with attributes = Some decl}; region}) + in hook acc (new_const :: statements) + | Attr decl :: Data (LocalFun {value; region}) :: statements -> + let new_fun = + Data (LocalFun {value = {value with attributes = Some decl}; region}) + in hook acc (new_fun :: statements) + | Attr _ :: statements -> + (* Detached attributes are erased. TODO: Warning. *) + hook acc statements + | Instr i :: statements -> + hook (simpl_instruction i :: acc) statements + | Data d :: statements -> + hook (simpl_data_declaration d :: acc) statements + in bind_list @@ hook [] (List.rev statements) and simpl_single_instruction : Raw.instruction -> (_ -> expression result) result = fun t -> match t with | ProcCall x -> ( - let ((f, args) , loc) = r_split x in - let (args , args_loc) = r_split args in + let (f, args) , loc = r_split x in + let args, args_loc = r_split args in let args' = npseq_to_list args.inside in match f with | EVar name -> ( @@ -1058,7 +1064,7 @@ and simpl_cases : type a . (Raw.pattern * a) list -> (a, unit) matching result = let aux (x , y) = let error = let title () = "Pattern" in - (** TODO: The labelled arguments should be flowing from the CLI. *) + (* TODO: The labelled arguments should be flowing from the CLI. *) let content () = Printf.sprintf "Pattern : %s" (ParserLog.pattern_to_string @@ -1072,23 +1078,22 @@ and simpl_cases : type a . (Raw.pattern * a) list -> (a, unit) matching result = ok @@ ez_match_variant constrs and simpl_instruction : Raw.instruction -> (_ -> expression result) result = - fun t -> - trace (simplifying_instruction t) @@ simpl_single_instruction t + fun t -> trace (simplifying_instruction t) @@ simpl_single_instruction t and simpl_statements : Raw.statements -> (_ -> expression result) result = - fun ss -> - let lst = npseq_to_list ss in - let%bind fs = bind_map_list simpl_statement lst in - let aux : _ -> (expression option -> expression result) -> _ = - fun prec cur -> - let%bind res = cur prec in - ok @@ Some res in - ok @@ fun (expr' : _ option) -> - let%bind ret = bind_fold_right_list aux expr' fs in - ok @@ Option.unopt_exn ret + fun statements -> + let lst = npseq_to_list statements in + let%bind fs = simpl_statement_list lst in + let aux : _ -> (expression option -> expression result) -> _ = + fun prec cur -> + let%bind res = cur prec + in ok @@ Some res in + ok @@ fun (expr' : _ option) -> + let%bind ret = bind_fold_right_list aux expr' fs in + ok @@ Option.unopt_exn ret -and simpl_block : Raw.block -> (_ -> expression result) result = fun t -> - simpl_statements t.statements +and simpl_block : Raw.block -> (_ -> expression result) result = + fun t -> simpl_statements t.statements and simpl_for_int : Raw.for_int -> (_ -> expression result) result = fun fi -> (* cond part *) @@ -1264,11 +1269,13 @@ and simpl_for_collect : Raw.for_collect -> (_ -> expression result) result = fun (* STEP 5 *) let rec add_return (expr : expression) = match expr.expression with | E_sequence (a,b) -> e_sequence a (add_return b) - | _ -> e_sequence expr (e_variable (Var.of_name "#COMPILER#acc")) in (* TODO fresh *) + | _ -> (* TODO fresh *) + e_sequence expr (e_variable (Var.of_name "#COMPILER#acc")) in let for_body = add_return for_body in (* STEP 6 *) let for_body = - let ( arg_access: Types.access_path -> expression ) = e_accessor (e_variable (Var.of_name "arguments")) in (* TODO fresh *) + let ( arg_access: Types.access_path -> expression ) = + e_accessor (e_variable (Var.of_name "arguments")) in (* TODO fresh *) ( match fc.collection with | Map _ -> let acc = arg_access [Access_tuple 0 ] in @@ -1291,7 +1298,8 @@ and simpl_for_collect : Raw.for_collect -> (_ -> expression result) result = fun let fold = e_constant op_name [lambda; collect ; init_record] in (* STEP 8 *) let assign_back (prev : expression option) (captured_varname : string) : expression option = - let access = e_accessor (e_variable (Var.of_name "#COMPILER#folded_record")) (* TODO fresh *) + let access = (* TODO fresh *) + e_accessor (e_variable (Var.of_name "#COMPILER#folded_record")) [Access_record captured_varname] in let assign = e_assign captured_varname [] access in match prev with @@ -1304,6 +1312,73 @@ and simpl_for_collect : Raw.for_collect -> (_ -> expression result) result = fun | None -> e_skip () | Some seq -> e_let_in (Var.of_name "#COMPILER#folded_record", None) false fold seq in (* TODO fresh *) return_statement @@ final_sequence +(* +and simpl_declaration : Raw.declaration -> declaration Location.wrap result = + *) -let simpl_program : Raw.ast -> program result = fun t -> - bind_list @@ List.map simpl_declaration @@ nseq_to_list t.decl +and simpl_declaration_list declarations : + Ast_simplified.declaration Location.wrap list result = + let open Raw in + let rec hook acc = function + [] -> acc + | [AttrDecl _] -> + (* Detached attributes are erased. TODO: Warning. *) + acc + | AttrDecl _ :: (AttrDecl _ :: _ as declarations) -> + (* Detached attributes are erased. TODO: Warning. *) + hook acc declarations + | AttrDecl decl :: ConstDecl {value; region} :: declarations -> + let new_const = + ConstDecl {value = {value with attributes = Some decl}; region} + in hook acc (new_const :: declarations) + | AttrDecl decl :: FunDecl {value; region} :: declarations -> + let new_fun = + FunDecl {value = {value with attributes = Some decl}; region} + in hook acc (new_fun :: declarations) + | AttrDecl _ :: declarations -> + (* Detached attributes are erased. TODO: Warning. *) + hook acc declarations + | TypeDecl decl :: declarations -> + let decl, loc = r_split decl in + let {name; type_expr} : Raw.type_decl = decl in + let%bind type_expression = simpl_type_expression type_expr in + let new_decl = + Declaration_type (Var.of_name name.value, type_expression) in + let res = ok @@ Location.wrap ~loc new_decl + in hook (res::acc) declarations + | ConstDecl decl :: declarations -> + let simpl_const_decl = + fun {name;const_type; init; attributes} -> + let%bind expression = simpl_expression init in + let%bind t = simpl_type_expression const_type in + let type_annotation = Some t in + let inline = + match attributes with + None -> false + | Some {value; _} -> + npseq_to_list value.ne_elements + |> List.exists (fun Region.{value; _} -> value = "\"inline\"") in + let new_decl = + Declaration_constant + (Var.of_name name.value, type_annotation, inline, expression) + in ok new_decl in + let res = + bind_map_location simpl_const_decl (Location.lift_region decl) + in hook (res::acc) declarations + | FunDecl fun_decl :: declarations -> + let decl, loc = r_split fun_decl in + let%bind ((name, ty_opt), expr) = simpl_fun_decl ~loc decl in + let inline = + match fun_decl.value.attributes with + None -> false + | Some {value; _} -> + npseq_to_list value.ne_elements + |> List.exists (fun Region.{value; _} -> value = "\"inline\"") in + let new_decl = + Declaration_constant (name, ty_opt, inline, expr) in + let res = ok @@ Location.wrap ~loc new_decl + in hook (res::acc) declarations + in bind_list @@ hook [] (List.rev declarations) + +let simpl_program : Raw.ast -> program result = + fun t -> simpl_declaration_list @@ nseq_to_list t.decl diff --git a/src/test/contracts/attributes.ligo b/src/test/contracts/attributes.ligo index 95aea1880..561e8f5ff 100644 --- a/src/test/contracts/attributes.ligo +++ b/src/test/contracts/attributes.ligo @@ -1,17 +1,19 @@ -const x: int = 1; attributes ["inline"]; +const x : int = 1; attributes ["inline"] function foo (const a : int) : int is - block { - const test: int = 2 + a; attributes ["inline"]; - } with test; + begin + const test : int = 2 + a; + attributes ["inline"]; + end with test; attributes ["inline"]; - -const y: int = 1; attributes ["inline"; "other"]; + +const y : int = 1; attributes ["inline"; "other"] function bar (const b : int) : int is - block { - function test (const z : int) : int is begin - const r : int = 2 + b + z - end with r; - attributes ["inline"; "foo"; "bar"]; - } with test(b); + begin + function test (const z : int) : int is + begin + const r : int = 2 + b + z + end with r; + attributes ["inline"; "foo"; "bar"] + end with test(b) diff --git a/src/test/contracts/attributes.mligo b/src/test/contracts/attributes.mligo index 72038110f..8f582a725 100644 --- a/src/test/contracts/attributes.mligo +++ b/src/test/contracts/attributes.mligo @@ -1,13 +1,10 @@ let x = 1 [@@inline] -let foo (a: int): int = ( - let test = 2 + a [@@inline] in - test -) [@@inline] +let foo (a: int): int = + (let test = 2 + a [@@inline] in test) [@@inline] let y = 1 [@@inline][@@other] -let bar (b: int): int = ( - let test = fun (z: int) -> 2 + b + z [@@inline][@@foo][@@bar] in - test b -) \ No newline at end of file +let bar (b: int): int = + let test = fun (z: int) -> 2 + b + z [@@inline][@@foo][@@bar] + in test b diff --git a/src/test/contracts/attributes.religo b/src/test/contracts/attributes.religo index c08916813..33062220f 100644 --- a/src/test/contracts/attributes.religo +++ b/src/test/contracts/attributes.religo @@ -2,7 +2,7 @@ let x = 1; [@inline] -let foo = (a: int): int => { +let foo = (a: int): int => { [@inline] let test = 2 + a; test; @@ -11,8 +11,8 @@ let foo = (a: int): int => { [@inline][@other] let y = 1; -let bar = (b: int): int => { +let bar = (b: int): int => { [@inline][@foo][@bar] let test = (z: int) => 2 + b + z; test(b); -}; \ No newline at end of file +}; diff --git a/src/test/integration_tests.ml b/src/test/integration_tests.ml index 394bcc4f0..afd93bb74 100644 --- a/src/test/integration_tests.ml +++ b/src/test/integration_tests.ml @@ -1846,31 +1846,31 @@ let deep_access_ligo () : unit result = let make_expected = e_string "one" in expect_eq program "nested_record" make_input make_expected in ok () - + let attributes_ligo () : unit result = let%bind program = type_file "./contracts/attributes.ligo" in - let%bind () = - let input = e_int 3 in + let%bind () = + let input = e_int 3 in let expected = e_int 5 in - expect_eq program "foo" input expected + expect_eq program "foo" input expected in ok () let attributes_mligo () : unit result = let%bind program = mtype_file "./contracts/attributes.mligo" in - let%bind () = - let input = e_int 3 in + let%bind () = + let input = e_int 3 in let expected = e_int 5 in - expect_eq program "foo" input expected + expect_eq program "foo" input expected in ok () let attributes_religo () : unit result = let%bind program = retype_file "./contracts/attributes.religo" in - let%bind () = - let input = e_int 3 in + let%bind () = + let input = e_int 3 in let expected = e_int 5 in - expect_eq program "foo" input expected + expect_eq program "foo" input expected in ok () diff --git a/vendors/ligo-utils/simple-utils/pos.ml b/vendors/ligo-utils/simple-utils/pos.ml index 4c4677bd8..cedf26050 100644 --- a/vendors/ligo-utils/simple-utils/pos.ml +++ b/vendors/ligo-utils/simple-utils/pos.ml @@ -56,11 +56,8 @@ let make ~byte ~point_num ~point_bol = method set_offset offset = {< byte = Lexing.{byte with pos_cnum = byte.pos_bol + offset} >} - method set ?file ~line ~offset = - let pos = - match file with - None -> self - | Some name -> self#set_file name in + method set ~file ~line ~offset = + let pos = self#set_file file in let pos = pos#set_line line in let pos = pos#set_offset offset in pos @@ -136,7 +133,7 @@ let from_byte byte = let ghost = make ~byte:Lexing.dummy_pos ~point_num:(-1) ~point_bol:(-1) -let min file = +let min ~file = let pos = make ~byte:Lexing.dummy_pos ~point_num:0 ~point_bol:0 in pos#set_file file diff --git a/vendors/ligo-utils/simple-utils/pos.mli b/vendors/ligo-utils/simple-utils/pos.mli index 376cb3202..70be477bd 100644 --- a/vendors/ligo-utils/simple-utils/pos.mli +++ b/vendors/ligo-utils/simple-utils/pos.mli @@ -58,17 +58,7 @@ {li The call [pos#byte_offset] is the offset of the position [pos] since the begininng of the file, counted in bytes.}} *) - -type invalid_pos = [ - `Invalid_line -| `Invalid_offset -] - -type invalid_line = `Invalid_line -type invalid_offset = `Invalid_offset -type invalid_nl = `Invalid_newline - -type t = private < +type t = < (* Payload *) byte : Lexing.position; @@ -80,14 +70,12 @@ type t = private < (* Setters *) set_file : string -> t; - set_line : int -> (t, invalid_line) Stdlib.result; - set_offset : int -> (t, invalid_offset) Stdlib.result; + set_line : int -> t; + set_offset : int -> t; - set : ?file:string -> line:int -> offset:int -> - (t, invalid_pos) Stdlib.result; + set : file:string -> line:int -> offset:int -> t; - (* String must be "\n" or "\c\r" *) - new_line : string -> (t, invalid_newline) Stdlib.result + new_line : string -> t; (* String must be "\n" or "\c\r" *) add_nl : t; shift_bytes : int -> t; @@ -119,11 +107,9 @@ type pos = t (** {1 Constructors} *) val make : - byte:Lexing.position -> point_num:int -> point_bol:int -> - (t, invalid_pos) Stdlin.result + byte:Lexing.position -> point_num:int -> point_bol:int -> t -val from_byte : - Lexing.position -> (t, invalid_pos) Stdlib.result +val from_byte : Lexing.position -> t (** {1 Special positions} *) diff --git a/vendors/ligo-utils/simple-utils/region.ml b/vendors/ligo-utils/simple-utils/region.ml index 21e47a605..a90c51604 100644 --- a/vendors/ligo-utils/simple-utils/region.ml +++ b/vendors/ligo-utils/simple-utils/region.ml @@ -120,7 +120,7 @@ let ghost = make ~start:Pos.ghost ~stop:Pos.ghost let wrap_ghost value = {value ; region = ghost} -let min = make ~start:Pos.min ~stop:Pos.min +let min ~file = make ~start:(Pos.min ~file) ~stop:(Pos.min ~file) (* Comparisons *) diff --git a/vendors/ligo-utils/simple-utils/region.mli b/vendors/ligo-utils/simple-utils/region.mli index f9e1d5a53..378830350 100644 --- a/vendors/ligo-utils/simple-utils/region.mli +++ b/vendors/ligo-utils/simple-utils/region.mli @@ -56,7 +56,7 @@ convention as [to_string], except that the resulting string is shorter (usually for debugging or tracing).}} *) -type t = private < +type t = < start : Pos.t; stop : Pos.t; @@ -119,7 +119,7 @@ val wrap_ghost : 'a -> 'a reg (** Occasionnally, we may need a minimum region. It is here made of two minimal positions. *) -val min : t +val min : file:string -> t (** {1 Comparisons} *) diff --git a/vendors/ligo-utils/simple-utils/trace.ml b/vendors/ligo-utils/simple-utils/trace.ml index dc80894d4..10efb5401 100644 --- a/vendors/ligo-utils/simple-utils/trace.ml +++ b/vendors/ligo-utils/simple-utils/trace.ml @@ -1,4 +1,4 @@ -(** Trace tutorial +(* Trace tutorial This module guides the reader through the writing of a simplified version of the trace monad [result], and the definition of a few @@ -6,53 +6,53 @@ *) module Trace_tutorial = struct - (** The trace monad is fairly similar to the predefined [option] - type. It is an instance of the predefined [result] type. *) + (* The trace monad is fairly similar to the predefined [option] + type. It is an instance of the predefined [result] type. *) type annotation = string type error = string - (** The type ['a result] is used by the trace monad to both model an - expected value of type ['a] or the failure to obtain it, instead - of working directly with ['a] values and handling separately - errors, for example by means of exceptions. (See the type [('a,'b) - result] in the module [Pervasives] of the OCaml system for a - comparable approach to error handling.) + (* The type ['a result] is used by the trace monad to both model an + expected value of type ['a] or the failure to obtain it, instead + of working directly with ['a] values and handling separately + errors, for example by means of exceptions. (See the type [('a,'b) + result] in the module [Pervasives] of the OCaml system for a + comparable approach to error handling.) + + The type ['a result] carries either a value of type ['a], with a + list of annotations (information about past successful + computations), or it is a list of errors accumulated so far. + The former case is denoted by the data constructor [Ok], and the + second by [Error]. *) - The type ['a result] carries either a value of type ['a], with a - list of annotations (information about past successful - computations), or it is a list of errors accumulated so far. - The former case is denoted by the data constructor [Ok], and the - second by [Error]. - *) type nonrec 'a result = ('a * annotation list, error list) result (* = Ok of 'a * annotation list | Error of error list *) - (** The function [divide_trace] shows the basic use of the trace - monad. - *) + (* The function [divide_trace] shows the basic use of the trace + monad. *) + let divide_trace a b = if b = 0 then Error [Printf.sprintf "division by zero: %d/%d" a b] else Ok (a/b, []) - (** The function [divide_three] shows that when composing two - functions, if the first call fails, the error is passed along - and the second call is not evaluated. (A pattern called - "error-passing style"). - *) + (* The function [divide_three] shows that when composing two + functions, if the first call fails, the error is passed along + and the second call is not evaluated. (A pattern called + "error-passing style"). *) + let divide_three a b c = match divide_trace a b with Ok (a_div_b , _) -> divide_trace a_div_b c | errors -> errors - (** The function [divide_three_annot] shows that when composing two - functions, if both calls are successful, the lists of - annotations are joined. - *) + (* The function [divide_three_annot] shows that when composing two + functions, if both calls are successful, the lists of + annotations are joined. *) + let divide_three_annot a b c = match divide_trace a b with Ok (a_div_b, annot1) -> ( @@ -62,21 +62,19 @@ module Trace_tutorial = struct | errors -> errors) | errors -> errors - (** The systematic matching of the result of each call in a function + (* The systematic matching of the result of each call in a function composition is bulky, so we define a [bind] function which takes a function [f: 'a -> 'b result] and applies it to a current ['a result] (not ['a]). - {ul - {li If the current result is an error, then [bind] - returns that same error without calling [f];} - {li otherwise [bind] unwraps the [Ok] of the current result - and calls [f] on it: - {ul - {li That call itself may return an error;} - {li if not, [bind] combines the annotations and returns the last - result.}}}} - *) + * If the current result is an error, then [bind] + returns that same error without calling [f]; + + * otherwise [bind] unwraps the [Ok] of the current result + and calls [f] on it: + * That call itself may return an error;} + * if not, [bind] combines the annotations and returns the + last result. *) let bind (f: 'a -> 'b result) : 'a result -> 'b result = function Ok (x, annot) -> ( @@ -85,64 +83,64 @@ module Trace_tutorial = struct | errors -> ignore annot; errors) | Error _ as e -> e - (** The function [divide_three_bind] is equivalent to the verbose - [divide_three] above, but makes use of [bind]. - *) + (* The function [divide_three_bind] is equivalent to the verbose + [divide_three] above, but makes use of [bind]. *) + let divide_three_bind a b c = let maybe_a_div_b = divide_trace a b in let continuation a_div_b = divide_trace a_div_b c in bind continuation maybe_a_div_b - (** The operator [(>>?)] is a redefinition of [bind] that makes the - program shorter, at the cost of a slightly - awkward reading because the two parameters are swapped. - *) + (* The operator [(>>?)] is a redefinition of [bind] that makes the + program shorter, at the cost of a slightly + awkward reading because the two parameters are swapped. *) + let (>>?) x f = bind f x - (** The function [divide_three_bind_symbol] is equivalent to - [divide_three_bind], but makes use of the operator [(>>?)]. - *) + (* The function [divide_three_bind_symbol] is equivalent to + [divide_three_bind], but makes use of the operator [(>>?)]. *) + let divide_three_bind_symbol a b c = let maybe_a_div_b = divide_trace a b in let continuation a_div_b = divide_trace a_div_b c in maybe_a_div_b >>? continuation - (** The function [divide_three_bind_symbol'] is equivalent to - [divide_three_bind_symbol], where the two temporary [let] - definitions are inlined for a more compact reading. - *) + (* The function [divide_three_bind_symbol'] is equivalent to + [divide_three_bind_symbol], where the two temporary [let] + definitions are inlined for a more compact reading. *) + let divide_three_bind_symbol' a b c = divide_trace a b >>? (fun a_div_b -> divide_trace a_div_b c) - (** This is now fairly legible, but chaining many such functions is - not the usual way of writing code. We use the PPX extension to - the OCaml compiler [ppx_let] to add some syntactic sugar. - The extension framework PPX is enabled by adding the following - lines inside the section [(library ...)] or [(executable ...)] - of the [dune] file for the project that uses [ppx_let], like so: - [(preprocess - (pps simple-utils.ppx_let_generalized))] - The extension [ppx_let] requires the module [Let_syntax] to be - defined. - *) + (* This is now fairly legible, but chaining many such functions is + not the usual way of writing code. We use the PPX extension to + the OCaml compiler [ppx_let] to add some syntactic sugar. + The extension framework PPX is enabled by adding the following + lines inside the section [(library ...)] or [(executable ...)] + of the [dune] file for the project that uses [ppx_let], like so: + [(preprocess + (pps simple-utils.ppx_let_generalized))] + The extension [ppx_let] requires the module [Let_syntax] to be + defined. *) + module Let_syntax = struct let bind m ~f = m >>? f module Open_on_rhs_bind = struct end end - (** The function [divide_three_bind_ppx_let] is equivalent to the - function [divide_three_bind_symbol']. The only difference is - that the module [Open_on_rhs_bind] is implicitly opened around - the expression on the righ-hand side of the [=] sign, namely - [divide_trace a b]. - *) + (* The function [divide_three_bind_ppx_let] is equivalent to the + function [divide_three_bind_symbol']. The only difference is + that the module [Open_on_rhs_bind] is implicitly opened around + the expression on the righ-hand side of the [=] sign, namely + [divide_trace a b]. *) + let divide_three_bind_ppx_let a b c = let%bind a_div_b = divide_trace a b in divide_trace a_div_b c (** The function [divide_many_bind_ppx_let] shows how well this - notation composes. - *) + notation composes. *) + let divide_many_bind_ppx_let a b c d e f = let x = a in let%bind x = divide_trace x b in @@ -153,34 +151,35 @@ module Trace_tutorial = struct in Ok (x, []) (** The function [ok] is a shorthand for an [Ok] without - annotations. - *) + annotations. *) + let ok x = Ok (x, []) - (** The function [map] lifts a regular ['a -> 'b] function on values to - a function on results, of type ['a result -> 'b result]. - *) + (* The function [map] lifts a regular ['a -> 'b] function on values to + a function on results, of type ['a result -> 'b result]. *) + let map f = function Ok (x, annotations) -> Ok (f x, annotations) | e -> e - (** The function [bind_list] turns a list of results of type [('a + (* The function [bind_list] turns a list of results of type [('a result) list] into a result of list, of type [('a list) result], as follows. - {ul - {li If the list only contains [Ok] values, it strips the [Ok] - of each element and returns that list wrapped with [Ok].} - {li Otherwise, one or more of the elements of the input list - is [Error], then [bind_list] returns the first error in the - list.}} + * If the list only contains [Ok] values, it strips the [Ok] + of each element and returns that list wrapped with [Ok].} + + * Otherwise, one or more of the elements of the input list + is [Error], then [bind_list] returns the first error in the + list. *) + let rec bind_list = function [] -> ok [] | hd::tl -> hd >>? fun hd -> bind_list tl >>? fun tl -> ok @@ hd::tl - (** A major feature of [Trace] is that it enables having a stack of + (* A major feature of [Trace] is that it enables having a stack of errors (that should act as a simplified stack frame), rather than a unique error. It is done by using the function [trace]. For instance, let's say that you have a function that @@ -198,17 +197,17 @@ module Trace_tutorial = struct trace (simple_error "error getting key") @@ get key map in ...] - And this will pass along the error triggered by [get key map]. - *) + And this will pass along the error triggered by [get key map]. *) + let trace err = function Error e -> Error (err::e) | ok -> ok - (** The real trace monad is very similar to the one that we have + (* The real trace monad is very similar to the one that we have defined above. The main difference is that the errors and annotations are structured data (instead of plain strings) and - are generated lazily. - *) + are generated lazily. *) + let the_end = "End of the tutorial." end (* end Trace_tutorial. *) @@ -239,8 +238,7 @@ module JSON_string_utils = struct match assoc j with None -> j | Some assoc -> `Assoc ( - List.map (fun (k', v') -> (k', if k = k' then v else v')) assoc - ) + List.map (fun (k', v') -> (k', if k = k' then v else v')) assoc) let swap f l r = f r l @@ -264,38 +262,39 @@ module JSON_string_utils = struct let (||) l r = l |> default r let (|^) = bind2 (^) - end type 'a thunk = unit -> 'a -(** Errors are encoded in JSON. This is because different libraries - will implement their own helpers, and we do not want to hardcode - in their type how they are supposed to interact. - *) +(* Errors are encoded in JSON. This is because different libraries + will implement their own helpers, and we do not want to hardcode + in their type how they are supposed to interact. *) + type error = J.t -(** Thunks are used because computing some errors can be costly, and - we do not want to spend most of our time building errors. Instead, - their computation is deferred. - *) +(* Thunks are used because computing some errors can be costly, and + we do not want to spend most of our time building errors. Instead, + their computation is deferred.*) + type error_thunk = error thunk -(** Annotations should be used in debug mode to aggregate information - about some value history. Where it was produced, when it was - modified, etc. It is currently not being used. *) +(* Annotations should be used in debug mode to aggregate information + about some value history. Where it was produced, when it was + modified, etc. It is currently not being used. *) + type annotation = J.t -(** Even in debug mode, building annotations can be quite +(* Even in debug mode, building annotations can be quite resource-intensive. Instead, a thunk is passed, that is computed - only when debug information is queried (typically before a print). - *) + only when debug information is queried (typically before a print).*) + type annotation_thunk = annotation thunk -(** Types of traced elements. It might be good to rename it [trace] at - some point. - *) +(* Types of traced elements. It might be good to rename it [trace] at + some point. *) + type nonrec 'a result = ('a * annotation_thunk list, error_thunk) result + (* = Ok of 'a * annotation_thunk list | Error of error_thunk @@ -308,29 +307,28 @@ let ok x = Ok (x, []) let fail err = Error err -(** {1 Monadic operators} *) +(* Monadic operators *) let bind f = function - Ok (x, ann) -> ( + Error _ as e -> e +| Ok (x, ann) -> match f x with Ok (x', ann') -> Ok (x', ann' @ ann) - | Error _ as e' -> ignore ann; e') -| Error _ as e -> e + | Error _ as e' -> ignore ann; e' let map f = function Ok (x, annotations) -> Ok (f x, annotations) | Error _ as e -> e -(** The lexical convention usually adopted for the bind function is +(* The lexical convention usually adopted for the bind function is [>>=], but ours comes from the Tezos code base, where the [result] bind is [>>?], and [Lwt]'s (threading library) is [>>=], and the - combination of both is [>>=?]. - *) + combination of both is [>>=?]. *) + let (>>?) x f = bind f x let (>>|?) x f = map f x -(** - Used by PPX_let, an OCaml preprocessor. +(* Used by PPX_let, an OCaml preprocessor. What it does is that, when you only care about the case where a result isn't an error, instead of writing: [ @@ -344,21 +342,20 @@ let (>>|?) x f = map f x ] This is much more typical of OCaml. This makes the code more readable, easy to write and refactor. It is used pervasively in - LIGO. - *) + LIGO. *) + module Let_syntax = struct let bind m ~f = m >>? f module Open_on_rhs_bind = struct end end +(* Build a thunk from a constant. *) -(** Build a thunk from a constant. - *) let thunk x () = x -(** Build a standard error, with a title, a message, an error code and - some data. - *) +(* Build a standard error, with a title, a message, an error code and + some data. *) + let mk_error ?(error_code : int thunk option) ?(message : string thunk option) ?(data : (string * string thunk) list option) @@ -373,9 +370,11 @@ let mk_error let type' = Some ("type" , `String "error") in let children' = Some ("children" , `List children) in let infos' = Some ("infos" , `List infos) in - `Assoc (X_option.collapse_list [ error_code' ; title' ; message' ; data' ; type' ; children' ; infos' ]) + `Assoc (X_option.collapse_list + [error_code'; title'; message'; data'; type'; children'; infos']) -let error ?data ?error_code ?children ?infos title message () = mk_error ?data ?error_code ?children ?infos ~title:(title) ~message:(message) () +let error ?data ?error_code ?children ?infos title message () = + mk_error ?data ?error_code ?children ?infos ~title ~message () let prepend_child = fun child err -> let open JSON_string_utils in @@ -389,9 +388,8 @@ let patch_children = fun children err -> let open JSON_string_utils in patch err "children" (`List (List.map (fun f -> f ()) children)) -(** - Build a standard info, with a title, a message, an info code and some data. -*) +(* Build a standard info, with a title, a message, an info code and some data. *) + let mk_info ?(info_code : int thunk option) ?(message : string thunk option) ?(data : (string * string thunk) list option) @@ -405,7 +403,8 @@ let mk_info let type' = Some ("type" , `String "info") in `Assoc (X_option.collapse_list [ error_code' ; title' ; message' ; data' ; type' ]) -let info ?data ?info_code title message () = mk_info ?data ?info_code ~title:(title) ~message:(message) () +let info ?data ?info_code title message () = + mk_info ?data ?info_code ~title ~message () let prepend_info = fun info err -> let open JSON_string_utils in @@ -416,32 +415,31 @@ let prepend_info = fun info err -> patch err "infos" (`List infos) -(** Helpers that ideally should not be used in production. -*) +(* Helpers that ideally should not be used in production. *) + let simple_error str () = mk_error ~title:(thunk str) () let simple_info str () = mk_info ~title:(thunk str) () let simple_fail str = fail @@ simple_error str let internal_assertion_failure str = simple_error ("assertion failed: " ^ str) -(** To be used when you only want to signal an error. It can be useful - when followed by [trace_strong]. - *) +(* To be used when you only want to signal an error. It can be useful + when followed by [trace_strong]. *) + let dummy_fail = simple_fail "dummy" let trace info = function Ok _ as o -> o | Error err -> Error (fun () -> prepend_info (info ()) (err ())) -(** Erase the current error stack, and replace it by the given +(* Erase the current error stack, and replace it by the given error. It's useful when using [Assert] and you want to discard its - autogenerated message. - *) + autogenerated message. *) + let trace_strong err = function Ok _ as o -> o | Error _ -> Error err -(** - Sometimes, when you have a list of potentially erroneous elements, you need +(* Sometimes, when you have a list of potentially erroneous elements, you need to retrieve all the errors, instead of just the first one. In that case, do: [let type_list lst = let%bind lst' = @@ -451,8 +449,7 @@ let trace_strong err = function Where before you would have written: [let type_list lst = let%bind lst' = bind_map_list type_element lst in - ...] -*) + ...] *) let trace_list err lst = let oks = let aux = function @@ -468,30 +465,24 @@ let trace_list err lst = | [] -> ok oks | errs -> fail (fun () -> patch_children errs err) -(** - Trace, but with an error which generation may itself fail. -*) +(* Trace, but with an error which generation may itself fail. *) + let trace_r err_thunk_may_fail = function - | Ok _ as o -> o - | Error _ -> ( - match err_thunk_may_fail () with - | Ok (err, annotations) -> ignore annotations; Error (err) - | Error errors_while_generating_error -> - (* TODO: the complexity could be O(n*n) in the worst case, - this should use some catenable lists. *) - Error (errors_while_generating_error) - ) + Ok _ as o -> o +| Error _ -> + match err_thunk_may_fail () with + Ok (err, annotations) -> ignore annotations; Error (err) + | Error errors_while_generating_error -> + (* TODO: the complexity could be O(n*n) in the worst case, + this should use some catenable lists. *) + Error (errors_while_generating_error) -(** - `trace_f f error` yields a function that acts the same as `f`, but with an - error frame that has one more error. -*) -let trace_f f error x = - trace error @@ f x +(* [trace_f f error] yields a function that acts the same as `f`, but with an + error frame that has one more error. *) + +let trace_f f error x = trace error @@ f x +(* Same, but for functions with 2 parameters. *) -(** - Same, but for functions with 2 parameters. -*) let trace_f_2 f error x y = trace error @@ f x y @@ -520,8 +511,8 @@ let to_option = function Convert an option to a result, with a given error if the parameter is None. *) let trace_option error = function - | None -> fail error - | Some s -> ok s + None -> fail error +| Some s -> ok s (** Utilities to interact with other data-structure. [bind_t] takes an ['a result t] and makes a ['a t result] out of it. It "lifts" the @@ -535,22 +526,14 @@ let trace_option error = function *) let bind_map_option f = function - | None -> ok None - | Some s -> f s >>? fun x -> ok (Some x) + None -> ok None +| Some s -> f s >>? fun x -> ok (Some x) let rec bind_list = function - | [] -> ok [] - | hd :: tl -> ( - hd >>? fun hd -> - bind_list tl >>? fun tl -> - ok @@ hd :: tl - ) - -let bind_ne_list = fun (hd , tl) -> - hd >>? fun hd -> - bind_list tl >>? fun tl -> - ok @@ (hd , tl) - + [] -> ok [] +| hd::tl -> hd >>? fun hd -> bind_list tl >>? fun tl -> ok @@ hd :: tl +let bind_ne_list (hd, tl) = + hd >>? fun hd -> bind_list tl >>? fun tl -> ok @@ (hd, tl) let bind_smap (s:_ X_map.String.t) = let open X_map.String in let aux k v prev =