diff --git a/CHANGES.md b/CHANGES.md index ef23a392c..45df48866 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/). - `simplify` now fails if the goal cannot be simplified. - Hover messages are now formated in Markdown. Position of the error is removed from diagnostics when the error occurs in the file currently open in the editor. +- Change lp parser based on menhir by one written by hand to provide more helpful error messages while being equally efficient. ## 3.0.0 (2025-07-16) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fdd32e49e..4178986f6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -66,8 +66,9 @@ Changing the syntax of Lambdapi When changing the syntax of Lambdapi, make sure to update the following files: +- `doc/lambdapi.bnf` - `src/core/lpLexer.ml` -- `src/core/lpParser.mly` +- `src/core/lpParser.ml` - `src/core/pretty.ml` - `src/core/print.ml` - `editors/vim/syntax/lambdapi.vim` @@ -76,8 +77,6 @@ following files: - `editors/vscode/lp.configuration.json` (comments configuration), - `editors/vscode/syntaxes/lp.tmLanguage.json` (syntax highlighting), - `misc/lambdapi.tex` -- `doc/Makefile.bnf` -- `doc/lambdapi.bnf` by doing `make bnf` - the User Manual files in the `doc/` repository (do `make doc` to generate and check the Sphynx documentation). Adding a tactic diff --git a/Makefile b/Makefile index bc1a60b25..3c017b996 100644 --- a/Makefile +++ b/Makefile @@ -15,13 +15,9 @@ odoc: @dune build --only-packages lambdapi @doc .PHONY: doc -doc: bnf +doc: $(MAKE) -C doc html -.PHONY: bnf -bnf: - $(MAKE) -C doc -f Makefile.bnf - #### Unit tests and sanity check ############################################# .PHONY: sanity_check diff --git a/doc/Makefile.bnf b/doc/Makefile.bnf deleted file mode 100644 index 599c96a93..000000000 --- a/doc/Makefile.bnf +++ /dev/null @@ -1,110 +0,0 @@ -default: lambdapi.bnf - -BNF_GEN := obelisk - -lambdapi.bnf: Makefile.bnf ../src/parsing/lpParser.mly - printf "; DO NOT EDIT THIS FILE MANUALLY\n" > $@ - printf "; It is generated automatically with 'make bnf'.\n\n" >> $@ - printf "QID ::= [UID \".\"]+ UID\n\n" >> $@ - printf " ::= \"on\"\n | \"off\"\n\n" >> $@ - printf " ::= \"left\"\n | \"right\"\n\n" >> $@ - printf " ::= \"assert\"\n | \"assertnot\"\n\n" >> $@ - $(BNF_GEN) ../src/parsing/lpParser.mly | sed \ - -e 's/SWITCH//g' \ - -e 's/ABORT/"abort"/g' \ - -e 's/ADMITTED/"admitted"/g' \ - -e 's/ADMIT/"admit"/g' \ - -e 's/APPLY/"apply"/g' \ - -e 's/ASSERT//g' \ - -e 's/ASSOCIATIVE/"associative"/g' \ - -e 's/ASSUME/"assume"/g' \ - -e 's/ASSIGN/"≔"/g' \ - -e 's/AS/"as"/g' \ - -e 's/BEGIN/"begin"/g' \ - -e 's/BUILTIN/"builtin"/g' \ - -e 's/CHANGE/"change"/g' \ - -e 's/COERCE_RULE/"coerce_rule"/g' \ - -e 's/COMMUTATIVE/"commutative"/g' \ - -e 's/COMPUTE/"compute"/g' \ - -e 's/CONSTANT/"constant"/g' \ - -e 's/DEBUG_FLAGS/("+"|"-") +/g' \ - -e 's/DEBUG/"debug"/g' \ - -e 's/DOT/"."/g' \ - -e 's/END/"end"/g' \ - -e 's/EVAL/"eval"/g' \ - -e 's/FAIL/"fail"/g' \ - -e 's/FLAG/"flag"/g' \ - -e 's/GENERALIZE/"generalize"/g' \ - -e 's/HAVE/"have"/g' \ - -e 's/INDUCTION/"induction"/g' \ - -e 's/INDUCTIVE/"inductive"/g' \ - -e 's/INFIX/"infix"/g' \ - -e 's/INJECTIVE/"injective"/g' \ - -e 's/LET/"let"/g' \ - -e 's/NEG_NAT/"-"/g' \ - -e 's/NAT//g' \ - -e 's/NOTATION/"notation"/g' \ - -e 's/OPEN/"open"/g' \ - -e 's/OPAQUE/"opaque"/g' \ - -e 's/ORELSE/"orelse"/g' \ - -e 's/POSTFIX/"postfix"/g' \ - -e 's/PREFIX/"prefix"/g' \ - -e 's/PRINT/"print"/g' \ - -e 's/PRIVATE/"private"/g' \ - -e 's/PROOFTERM/"proofterm"/g' \ - -e 's/PROTECTED/"protected"/g' \ - -e 's/PROVER_TIMEOUT/"prover_timeout"/g' \ - -e 's/PROVER/"prover"/g' \ - -e 's/QUANTIFIER/"quantifier"/g' \ - -e 's/REFLEXIVITY/"reflexivity"/g' \ - -e 's/REFINE/"refine"/g' \ - -e 's/REMOVE/"remove"/g' \ - -e 's/REPEAT/"repeat"/g' \ - -e 's/REQUIRE/"require"/g' \ - -e 's/RESOLVE/"resolve"/g' \ - -e 's/REWRITE/"rewrite"/g' \ - -e 's/SET/"set"/g' \ - -e 's/UNIF_RULE/"unif_rule"/g' \ - -e 's/RULE/"rule"/g' \ - -e 's/SIDE//g' \ - -e 's/SEARCH/"search"/g' \ - -e 's/SEQUENTIAL/"sequential"/g' \ - -e 's/SIMPLIFY/"simplify"/g' \ - -e 's/SOLVE/"solve"/g' \ - -e 's/STRINGLIT//g' \ - -e 's/SYMBOL/"symbol"/g' \ - -e 's/SYMMETRY/"symmetry"/g' \ - -e 's/TRY/"try"/g' \ - -e 's/TYPE_QUERY/"type"/g' \ - -e 's/TYPE_TERM/"TYPE"/g' \ - -e 's/VERBOSE/"verbose"/g' \ - -e 's/WHY3/"why3"/g' \ - -e 's/WITH/"with"/g' \ - -e 's/INT//g' \ - -e 's/IN/"in"/g' \ - -e 's/FLOAT//g' \ - -e 's/HOOK_ARROW/"↪"/g' \ - -e 's/ARROW/"→"/g' \ - -e 's/BACKQUOTE/"`"/g' \ - -e 's/COMMA/","/g' \ - -e 's/SEMICOLON/";"/g' \ - -e 's/COLON/":"/g' \ - -e 's/EQUIV/"≡"/g' \ - -e 's/LAMBDA/"λ"/g' \ - -e 's/L_CU_BRACKET/"{"/g' \ - -e 's/L_PAREN/"("/g' \ - -e 's/L_SQ_BRACKET/"["/g' \ - -e 's/PI/"Π"/g' \ - -e 's/R_CU_BRACKET/"}"/g' \ - -e 's/R_PAREN/")"/g' \ - -e 's/R_SQ_BRACKET/"]"/g' \ - -e 's/TURNSTILE/"⊢"/g' \ - -e 's/VBAR/"|"/g' \ - -e 's/UNDERSCORE/"_"/g' \ - -e 's/UID_EXPL/"@" UID/g' \ - -e 's/UID_META/"?" UID/g' \ - -e 's/UID_PATT/"$$" UID/g' \ - -e 's/QID_EXPL/"@" QID/g' \ - -e 's/| EOF//g' \ - -e 's/"rule" /"rule" "off"/' \ - >> $@ diff --git a/doc/lambdapi.bnf b/doc/lambdapi.bnf index 950debeec..92329d0c4 100644 --- a/doc/lambdapi.bnf +++ b/doc/lambdapi.bnf @@ -1,51 +1,32 @@ -; DO NOT EDIT THIS FILE MANUALLY -; It is generated automatically with 'make bnf'. + ::= [ "."]+ -QID ::= [UID "."]+ UID + ::= | - ::= "on" - | "off" + ::= "on" | "off" - ::= "left" - | "right" + ::= "left" | "right" ::= "assert" | "assertnot" - ::= EOF - - ::= EOF - - ::= EOF - - ::= EOF - ::= "opaque" ";" - | "require" * ";" - | "require" * ";" - | "require" "as" ";" - | * ";" - | * "symbol" * ":" [] - ";" - | * "symbol" * [":" ] "≔" - ";" - | [] * "inductive" ("with" - )* ";" + | "require" * ";" + | "require" [["private"] "open"] * ";" + | "require" "as" ";" + | ["private"] "open" * ";" + | * "symbol" * ":" [] ";" + | * "symbol" * [":" ] "≔" ";" + | [] * "inductive" ("with" )* ";" | "rule" ("with" )* ";" - | "builtin" "≔" ";" + | "builtin" "≔" ";" | "coerce_rule" ";" | "unif_rule" ";" - | "notation" ";" + | "notation" ";" | ";" - - ::= "open" - | "private" "open" - - ::= * "⊢" ":" - | * "⊢" "≡" + ::= * "⊢" (":"|"≡") | "compute" - | "print" [] + | "print" [ | "unif_rule" | "coerce_rule"] | "proofterm" | "debug" | "debug" ("+"|"-") + @@ -57,32 +38,20 @@ QID ::= [UID "."]+ UID | "type" | "search" - ::= - | "unif_rule" - | "coerce_rule" - - ::= UID - | QID - ::= [] "associative" | "commutative" | "constant" | "injective" | "opaque" | "sequential" - | - - ::= "private" - | "protected" - - ::= UID + | "private" + | "protected" ::= | "(" + ":" ")" | "[" + [":" ] "]" - ::= - | "_" + ::= | "_" ::= | @@ -99,23 +68,16 @@ QID ::= [UID "."]+ UID ::= | "_" | "TYPE" - | "?" UID [] - | "$" UID [] + | "?" [] + | "$" [] | "(" ")" | "[" "]" | | - ::= "." "[" [ (";" )*] "]" - - ::= - | + ::= ["@"] - ::= UID - | QID - - ::= "@" UID - | "@" QID + ::= "." "[" [ (";" )*] "]" ::= + "," | ":" "," @@ -154,33 +116,31 @@ QID ::= [UID "."]+ UID | "reflexivity" | "remove" + | "repeat" - | "rewrite" [] [] + | "rewrite" [] [] | "set" "≔" | "simplify" - | "simplify" + | "simplify" | "simplify" "rule" "off" | "solve" | "symmetry" | "try" | "why3" [] - ::= + ::= | "in" | "in" "in" | "in" ["in" ] | "as" "in" - ::= "." "[" "]" + ::= "." "[" "]" - ::= * ":" "≔" ["|"] [ - ("|" )*] + ::= * ":" "≔" ["|"] [ ("|" )*] ::= * ":" ::= "↪" - ::= "↪" "[" (";" - )* "]" + ::= "↪" "[" (";" )* "]" ::= "≡" @@ -189,16 +149,15 @@ QID ::= [UID "."]+ UID | "prefix" | "quantifier" - ::= - | + ::= | - ::= ["generalize"] + ::= "=" | ">" | ">=" | "≥" - ::= UID + ::= "concl" | "hyp" | "spine" | "rule" | "lhs" | "rhs" - ::= "type" - | "rule" - | UID + ::= "name" "=" + | ("type"|"anywhere") ("≥"|">=") ["generalize"] + | ["generalize"] | "(" ")" ::= ("," )* @@ -206,6 +165,4 @@ QID ::= [UID "."]+ UID ::= (";" )* ::= - | "|" - - + | "|" diff --git a/src/common/logger.ml b/src/common/logger.ml index b2324323b..c12761161 100644 --- a/src/common/logger.ml +++ b/src/common/logger.ml @@ -32,9 +32,11 @@ let update_log_enabled () = (** [make key name desc] registers a new logger and returns its pp. *) let make logger_key logger_name logger_desc = + (* Sanity checks. *) if String.length logger_name <> 4 then invalid_arg "Logger.make: name must be 4 characters long"; + let check data = if logger_key = data.logger_key then invalid_arg "Logger.make: key is already used"; @@ -44,6 +46,7 @@ let make logger_key logger_name logger_desc = List.iter check Stdlib.(!loggers); let logger_enabled = ref false in + (* Actual printing function. *) let pp fmt = Color.update_with_color Stdlib.(!Error.err_fmt); diff --git a/src/common/pos.ml b/src/common/pos.ml index f957a9a74..7996894fe 100644 --- a/src/common/pos.ml +++ b/src/common/pos.ml @@ -23,7 +23,60 @@ let cmp : pos cmp = fun p1 p2 -> (p1.start_line, p1.start_col, (p1.fname, p1.end_line, p1.end_col)) (p2.start_line, p2.start_col, (p2.fname, p2.end_line, p2.end_col)) -(** Convenient short name for an optional position. *) +(** [cat p1 p2] returns a position starting from [p1] start and ending with + [p2] end. The result position uses [p2] fname. *) +let cat : pos -> pos -> pos = fun p1 p2 -> + { fname = p2.fname + ; start_line = p1.start_line + ; start_col = p1.start_col + ; start_offset = p1.start_offset + ; end_line = p2.end_line + ; end_col = p2.end_col + ; end_offset = p2.end_offset } + +(** [locate ?fname (p1,p2)] converts the pair of Lexing positions [p1,p2] and + filename [fname] into a {!type:pos}. *) +let locate : ?fname:string -> Lexing.position * Lexing.position -> pos = + fun ?fname (p1, p2) -> + let fname = if p1.pos_fname = "" then fname else Some(p1.pos_fname) in + let start_line = p1.pos_lnum in + let start_col = p1.pos_cnum - p1.pos_bol in + let start_offset = p1.pos_cnum in + let end_line = p2.pos_lnum in + let end_col = p2.pos_cnum - p2.pos_bol in + let end_offset = p2.pos_cnum in + {fname; start_line; start_col; start_offset; end_line; end_col; end_offset } + +(** [lexing p] converts a [pos] into a [Lexing.position]. *) +let lexing (p:pos): Lexing.position = + { pos_fname = (match p.fname with None -> "" | Some s -> s) + ; pos_lnum = p.start_line + ; pos_bol = p.start_offset - p.start_col + ; pos_cnum = p.start_offset } + +(** [to_string ?print_fname pos] transforms [pos] into a readable string. If + [print_fname] is [true] (the default), the filename contained in [pos] is + printed. *) +let to_string : ?print_dirname:bool -> ?print_fname:bool -> pos -> string = + fun ?(print_dirname=true) ?(print_fname=true) + {fname; start_line; start_col; end_line; end_col} -> + let fname = + if print_fname then + match fname with + | None -> "" + | Some n -> (if print_dirname then n else Filename.basename n) ^ ":" + else "" + in + if start_line <> end_line then + Printf.sprintf "%s%d:%d-%d:%d" fname start_line start_col end_line end_col + else if start_col = end_col then + Printf.sprintf "%s%d:%d" fname start_line start_col + else + Printf.sprintf "%s%d:%d-%d" fname start_line start_col end_col + + + +(** Type of optional positions. *) type popt = pos option (** [equal p1 p2] tells whether [p1] and [p2] denote the same position. *) @@ -33,46 +86,13 @@ let equal : popt -> popt -> bool = fun p1 p2 -> | (None , None ) -> true | (_ , _ ) -> false -(** Type constructor extending a type (e.g. a piece of abstract syntax) with a - a source code position. *) -type 'a loc = - { elt : 'a (** The element that is being localised. *) - ; pos : popt (** Position of the element in the source code. *) } - -(** Localised string type (widely used). *) -type strloc = string loc - -(** [make pos elt] associates the position [pos] to [elt]. *) -let make : popt -> 'a -> 'a loc = fun pos elt -> { elt ; pos } - -(** [none elt] wraps [elt] in a ['a loc] structure without any specific source - code position. *) -let none : 'a -> 'a loc = fun elt -> make None elt - -(** [in_pos pos elt] associates the position [pos] to [elt]. *) -let in_pos : pos -> 'a -> 'a loc = fun p elt -> make (Some p) elt - (** [pos_end po] creates a position from the end of position [po]. *) let pos_end : popt -> popt = fun po -> match po with | None -> None | Some p -> Some {p with start_line = p.end_line; start_col = p.end_col} -(** [cat p1 p2] returns a position starting from [p1] start and ending with - [p2] end. [p1] and [p2] must have the same filename. *) -let cat : pos -> pos -> pos = fun p1 p2 -> - { fname = p2.fname - (*FIXME: temporary fix for - https://github.com/Deducteam/lambdapi/issues/1001 - if p1.fname <> p2.fname then invalid_arg __LOC__ else p1.fname*) - ; start_line = p1.start_line - ; start_col = p1.start_col - ; start_offset = p1.start_offset - ; end_line = p2.end_line - ; end_col = p2.end_col - ; end_offset = p2.end_offset - } - +(** [cat] extends and hide the above [cat] function from [pos] to [popt]. *) let cat : popt -> popt -> popt = fun p1 p2 -> match p1, p2 with | Some p1, Some p2 -> Some (cat p1 p2) @@ -89,27 +109,42 @@ let shift : int -> popt -> popt = fun k p -> let after = shift 1 let before = shift (-1) -(** [to_string ?print_fname pos] transforms [pos] into a readable string. If - [print_fname] is [true] (the default), the filename contained in [pos] is - printed. *) -let to_string : ?print_dirname:bool -> ?print_fname:bool -> pos -> string = - fun ?(print_dirname=true) ?(print_fname=true) - {fname; start_line; start_col; end_line; end_col} -> - let fname = - if not print_fname then "" else - match fname with - | None -> "" - | Some(n) -> - if print_dirname then n ^ ":" - else - Filename.basename n ^ ":" - in - if start_line <> end_line then - Printf.sprintf "%s%d:%d-%d:%d" fname start_line start_col end_line end_col - else if start_col = end_col then - Printf.sprintf "%s%d:%d" fname start_line start_col - else - Printf.sprintf "%s%d:%d-%d" fname start_line start_col end_col +(** [lexing_opt p] converts a [popt] into a [Lexing.position]. *) +let lexing_opt (p:popt): Lexing.position = + match p with + | None -> {pos_fname=""; pos_lnum=1; pos_bol=0; pos_cnum=0} + | Some p -> lexing p + + + +(** Type constructor extending a type (e.g. a piece of abstract syntax) with a + an optional source code position. *) +type 'a loc = + { elt : 'a (** The element that is being localised. *) + ; pos : popt (** Position of the element in the source code. *) } + +(** Localised string type (widely used). *) +type strloc = string loc + +(** [make pos elt] associates the position [pos] to [elt]. *) +let make : popt -> 'a -> 'a loc = fun pos elt -> { elt ; pos } + +(** [none elt] wraps [elt] in a ['a loc] structure without any specific source + code position. *) +let none : 'a -> 'a loc = fun elt -> make None elt + +(** [map f loc] applies function [f] on the value of [loc] and keeps the + position unchanged. *) +let map : ('a -> 'b) -> 'a loc -> 'b loc = fun f loc -> + {loc with elt = f loc.elt} + +(** [in_pos pos elt] associates the position [pos] to [elt]. *) +let in_pos : pos -> 'a -> 'a loc = fun p elt -> make (Some p) elt + +(** [make_pos lps elt] creates a located element from the lexing positions + [lps] and the element [elt]. *) +let make_pos : Lexing.position * Lexing.position -> 'a -> 'a loc = + fun lps elt -> in_pos (locate lps) elt let popt_to_string : ?print_dirname:bool -> ?print_fname:bool -> popt -> string = @@ -119,36 +154,19 @@ let popt_to_string : | Some (p) -> to_string ~print_dirname ~print_fname p (** [pp ppf pos] prints the optional position [pos] on [ppf]. *) -let pp : popt Lplib.Base.pp = fun ppf p -> +let pp : popt pp = fun ppf p -> string ppf (popt_to_string p) (** [short ppf pos] prints the optional position [pos] on [ppf]. *) -let short : popt Lplib.Base.pp = fun ppf p -> +let short : popt pp = fun ppf p -> let print_fname=false in string ppf (popt_to_string ~print_fname p) -(** [map f loc] applies function [f] on the value of [loc] and keeps the - position unchanged. *) -let map : ('a -> 'b) -> 'a loc -> 'b loc = fun f loc -> - {loc with elt = f loc.elt} +(** [pp_lexing ppf lps] prints the Lexing.position pair [lps] on [ppf]. *) +let pp_lexing : (Lexing.position * Lexing.position) pp = + fun ppf lps -> short ppf (Some (locate lps)) -(** [locate ?fname loc] converts the pair of position [loc] and filename - [fname] of the Lexing library into a {!type:pos}. *) -let locate : ?fname:string -> Lexing.position * Lexing.position -> pos = - fun ?fname (p1, p2) -> - let fname = if p1.pos_fname = "" then fname else Some(p1.pos_fname) in - let start_line = p1.pos_lnum in - let start_col = p1.pos_cnum - p1.pos_bol in - let start_offset = p1.pos_cnum in - let end_line = p2.pos_lnum in - let end_col = p2.pos_cnum - p2.pos_bol in - let end_offset = p2.pos_cnum in - {fname; start_line; start_col; start_offset; end_line; end_col; end_offset } -(** [make_pos lps elt] creates a located element from the lexing positions - [lps] and the element [elt]. *) -let make_pos : Lexing.position * Lexing.position -> 'a -> 'a loc = - fun lps elt -> in_pos (locate lps) elt (** [print_file_contents escape sep delimiters pos] prints the contents of the file at position [pos]. [sep] is the separator replacing each newline @@ -156,8 +174,7 @@ let make_pos : Lexing.position * Lexing.position -> 'a -> 'a loc = "unknown location" message returned when the position does not refer to a file. [escape] is used to escape the file contents.*) let print_file_contents : - escape:(string -> string) -> - delimiters:(string*string) -> popt Lplib.Base.pp = + escape:(string -> string) -> delimiters:(string*string) -> popt pp = fun ~escape ~delimiters:(db,de) ppf pos -> match pos with | Some { fname=Some fname; start_line; start_col; end_line; end_col } -> diff --git a/src/handle/command.ml b/src/handle/command.ml index 2d97840c3..617601e5c 100644 --- a/src/handle/command.ml +++ b/src/handle/command.ml @@ -626,8 +626,8 @@ let get_proof_data : compiler -> sig_state -> p_command -> cmd_output = with | Timeout as e -> raise e | Fatal(Some(Some(_)),_) as e -> raise e - | Fatal(None ,m) -> fatal pos "Error on command.@.%s" m - | Fatal(Some(None) ,m) -> fatal pos "Error on command.@.%s" m + | Fatal(None ,m) -> fatal pos "%s" m + | Fatal(Some(None) ,m) -> fatal pos "%s" m | e -> fatal pos "Uncaught exception: %s." (Printexc.to_string e) diff --git a/src/handle/query.ml b/src/handle/query.ml index 9fbf34e52..c88dfb338 100644 --- a/src/handle/query.ml +++ b/src/handle/query.ml @@ -184,9 +184,9 @@ let handle : Sig_state.t -> proof_state option -> p_query -> result = let ctxt = Env.to_ctxt env in let p = new_problem() in match elt with - | P_query_search s -> + | P_query_search q -> let dbpath = Path.default_dbpath in - return string (Tool.Indexing.search_cmd_txt ss s ~dbpath) + return string (Tool.Indexing.search_cmd_txt_query ss q ~dbpath) | P_query_debug(_,_) | P_query_verbose(_) | P_query_flag(_,_) diff --git a/src/handle/rewrite.ml b/src/handle/rewrite.ml index 245b696e6..9bce82d8d 100644 --- a/src/handle/rewrite.ml +++ b/src/handle/rewrite.ml @@ -372,7 +372,7 @@ let swap : eq_config -> term -> term -> term -> term -> term = equational lemma that is appied. It handles the full set of SSReflect patterns. *) let rewrite : Sig_state.t -> problem -> popt -> goal_typ -> bool -> - (term, binder) Parsing.Syntax.rw_patt option -> term -> term = + (term, binder) Parsing.Syntax.rwpatt option -> term -> term = fun ss p pos {goal_hyps=g_env; goal_type=g_type; _} l2r pat t -> (* Obtain the required symbols from the current signature. *) diff --git a/src/handle/tactic.ml b/src/handle/tactic.ml index 4392c23ab..3e56dde0d 100644 --- a/src/handle/tactic.ml +++ b/src/handle/tactic.ml @@ -307,29 +307,32 @@ let p_query_of_term (c:config) (pos:popt) (t:term) :p_query = | Symb s, ts -> p_query c pos s ts | _ -> fatal pos "Unhandled query expression: %a." term t*) +(** [p_term_of_string pos t] turns into a p_term a string literal term [t] + that is part of a bigger term obtained by scoping and normalizing of a + p_term at position [pos]. *) let p_term_of_string (pos:popt) (t:term): p_term = match t with | Symb s when String.is_string_literal s.sym_name -> begin let string = remove_quotes s.sym_name in - let fname = match pos with Some{fname=Some fn;_} -> fn | _ -> "" in - let stream = Parsing.Parser.Lp.parse_term_string fname string in - try Stream.next stream with Stream.Failure -> assert false + let p = lexing_opt (after s.sym_pos) in + Parsing.Parser.Lp.parse_term_string p string end - | _ -> fatal pos "refine tactic not applied to a term string literal" + | _ -> fatal pos "not a string literal" -let p_rw_patt_of_string (pos:popt) (t:term): p_rw_patt option = +(** [p_rwpatt_of_string pos t] turns into a p_rwpatt option a string literal + term [t] that is part of a bigger term obtained by scoping and normalizing + of a p_term at position [pos]. *) +let p_rwpatt_of_string (pos:popt) (t:term): p_rwpatt option = + if Logger.log_enabled() then + log "p_rwpatt_of_string %a %a" Pos.short pos term t; match t with | Symb s when String.is_string_literal s.sym_name -> let string = remove_quotes s.sym_name in if string = "" then None - else - begin - let fname = match pos with Some{fname=Some fn;_} -> fn | _ -> "" in - let stream = Parsing.Parser.Lp.parse_rwpatt_string fname string in - try Some (Stream.next stream) with Stream.Failure -> assert false - end - | _ -> fatal pos "rewrite tactic not applied to a pattern string literal" + else let p = lexing_opt (after s.sym_pos) in + Some (Parsing.Parser.Lp.parse_rwpatt_string p string) + | _ -> fatal pos "not a string literal" let is_right (pos:popt) (t:term): bool = match t with @@ -343,8 +346,9 @@ let is_right (pos:popt) (t:term): bool = end | _ -> fatal pos "rewrite tactic not applied to a side string literal" -(** [p_tactic t] interprets the term [t] as a tactic. *) -let p_tactic (ss:Sig_state.t) (pos:popt) :int StrMap.t -> term -> p_tactic = +(** [p_tactic ss pos idmap t] interprets as a tactic the term [t] obtained by + scoping and normalization of a p_term at position [pos]. *) +let p_tactic (ss:Sig_state.t) (pos:popt): int StrMap.t -> term -> p_tactic = let c = get_config ss pos in let rec tac idmap t = Pos.make pos (tac_aux idmap t) and tac_aux idmap t = @@ -383,7 +387,7 @@ let p_tactic (ss:Sig_state.t) (pos:popt) :int StrMap.t -> term -> p_tactic = | T_repeat, _ -> assert false | T_rewrite, [side;pat;_;t] -> P_tac_rewrite(is_right pos side, - p_rw_patt_of_string pos pat, p_term pos idmap t) + p_rwpatt_of_string pos pat, p_term pos idmap t) | T_rewrite, _ -> assert false | T_set, [t1;_;t2] -> P_tac_set(p_ident_of_sym pos t1, p_term pos idmap t2) @@ -632,7 +636,7 @@ let rec handle : let g = List.fold_left remove g ids in {ps with proof_goals = g::gs} | P_tac_rewrite(l2r,pat,eq) -> - let pat = Option.map (Scope.scope_rw_patt ss env) pat in + let pat = Option.map (Scope.scope_rwpatt ss env) pat in let p = new_problem() in tac_refine pos ps gt gs p (Rewrite.rewrite ss p pos gt l2r pat (scope eq)) @@ -683,7 +687,7 @@ let rec handle : | P_tac_eval pt -> let t = Eval.snf (Env.to_ctxt env) (scope pt) in let idmap = get_names g in - handle ss sym_pos prv ps (p_tactic ss pos idmap t) + handle ss sym_pos prv ps (p_tactic ss pt.pos idmap t) (** Representation of a tactic output. *) type tac_output = proof_state * Query.result @@ -700,7 +704,7 @@ let handle : ps, Query.handle ss (Some ps) q | _ -> match ps.proof_goals with - | [] -> fatal pos "No remaining goals." + | [] -> fatal pos "No remaining goal." | g::_ -> if Logger.log_enabled() then log ("goal %a") Goal.pp_no_hyp g; handle ss sym_pos prv ps tac, None diff --git a/src/parsing/dune b/src/parsing/dune index dbd73fc61..1ac0824ba 100644 --- a/src/parsing/dune +++ b/src/parsing/dune @@ -6,8 +6,6 @@ (libraries camlp-streams lambdapi.core menhirLib pratter sedlex sedlex.ppx lambdapi.common) (flags -w +3)) -(menhir (flags --explain --external-tokens LpLexer) (modules lpParser)) - (ocamllex dkLexer) (menhir (flags --explain --external-tokens DkTokens) (modules dkParser)) diff --git a/src/parsing/lpLexer.ml b/src/parsing/lpLexer.ml index d36c547a4..caa16cb35 100644 --- a/src/parsing/lpLexer.ml +++ b/src/parsing/lpLexer.ml @@ -4,13 +4,13 @@ open Lplib open Sedlexing open Common open Pos -let remove_first : Sedlexing.lexbuf -> string = fun lb -> +let remove_first : lexbuf -> string = fun lb -> Utf8.sub_lexeme lb 1 (lexeme_length lb - 1) -let remove_last : Sedlexing.lexbuf -> string = fun lb -> +let remove_last : lexbuf -> string = fun lb -> Utf8.sub_lexeme lb 0 (lexeme_length lb - 1) -let remove_ends : Sedlexing.lexbuf -> string = fun lb -> +let remove_ends : lexbuf -> string = fun lb -> Utf8.sub_lexeme lb 1 (lexeme_length lb - 2) exception SyntaxError of strloc @@ -18,10 +18,10 @@ exception SyntaxError of strloc let syntax_error : Lexing.position * Lexing.position -> string -> 'a = fun pos msg -> raise (SyntaxError (Pos.make_pos pos msg)) -let fail : Sedlexing.lexbuf -> string -> 'a = fun lb msg -> - syntax_error (Sedlexing.lexing_positions lb) msg +let fail : lexbuf -> string -> 'a = fun lb msg -> + syntax_error (lexing_positions lb) msg -let invalid_character : Sedlexing.lexbuf -> 'a = fun lb -> +let invalid_character : lexbuf -> 'a = fun lb -> fail lb "Invalid character" (** Tokens. *) @@ -340,14 +340,15 @@ and comment next i lb = | any -> comment next i lb | _ -> invalid_character lb -(** [token buf] is a lexing function on buffer [buf] that can be passed to - a parser. *) -let token : lexbuf -> unit -> token * Lexing.position * Lexing.position = - fun lb () -> try with_tokenizer token lb () with - | Sedlexing.MalFormed -> fail lb "Not Utf8 encoded file" - | Sedlexing.InvalidCodepoint k -> +(** [token lb] is a lexing function on [lb] that can be passed to a parser. *) +let token : lexbuf -> token * Lexing.position * Lexing.position = + fun lb -> try Sedlexing.with_tokenizer token lb () with + | MalFormed -> fail lb "Not Utf8 encoded file" + | InvalidCodepoint k -> fail lb ("Invalid Utf8 code point " ^ string_of_int k) +let dummy_token = (EOF, Lexing.dummy_pos, Lexing.dummy_pos) + let token = - let r = ref (EOF, Lexing.dummy_pos, Lexing.dummy_pos) in fun lb () -> - Debug.(record_time Lexing (fun () -> r := token lb ())); !r + let r = ref dummy_token in fun lb -> + Debug.(record_time Lexing (fun () -> r := token lb)); !r diff --git a/src/parsing/lpParser.ml b/src/parsing/lpParser.ml new file mode 100644 index 000000000..5f18081b3 --- /dev/null +++ b/src/parsing/lpParser.ml @@ -0,0 +1,1636 @@ +open Lplib +open Common open Pos open Logger +open Syntax +open Core +open LpLexer +open Lexing +open Sedlexing + +let log = Logger.make 'n' "pars" "parsing" +let log = log.pp + +(* token management *) + +let string_of_token = function + | EOF -> "end of file" + | ABORT -> "abort" + | ADMIT -> "admit" + | ADMITTED -> "admitted" + | APPLY -> "apply" + | ARROW -> "→" + | AS -> "as" + | ASSERT _ -> "assert or assertnot" + | ASSIGN -> "≔" + | ASSOCIATIVE -> "associative" + | ASSUME -> "assume" + | BACKQUOTE -> "`" + | BEGIN -> "begin" + | BUILTIN -> "builtin" + | CHANGE -> "change" + | COERCE_RULE -> "coerce_rule" + | COLON -> ":" + | COMMA -> "," + | COMMUTATIVE -> "commutative" + | COMPUTE -> "compute" + | CONSTANT -> "constant" + | DEBUG -> "debug" + | DEBUG_FLAGS _ -> "debug flags" + | DOT -> "." + | END -> "end" + | EQUIV -> "≡" + | EVAL -> "eval" + | FAIL -> "fail" + | FLAG -> "flag" + | FLOAT _ -> "float" + | GENERALIZE -> "generalize" + | HAVE -> "have" + | HOOK_ARROW -> "↪" + | IN -> "in" + | INDUCTION -> "induction" + | INDUCTIVE -> "inductive" + | INFIX -> "infix" + | INJECTIVE -> "injective" + | INT _ -> "integer" + | LAMBDA -> "λ" + | LET -> "let" + | L_CU_BRACKET -> "{" + | L_PAREN -> "(" + | L_SQ_BRACKET -> "[" + | NOTATION -> "notation" + | OPAQUE -> "opaque" + | OPEN -> "open" + | ORELSE -> "orelse" + | PI -> "Π" + | POSTFIX -> "postfix" + | PREFIX -> "prefix" + | PRINT -> "print" + | PRIVATE -> "private" + | PROOFTERM -> "proofterm" + | PROTECTED -> "protected" + | PROVER -> "prover" + | PROVER_TIMEOUT -> "prover_timeout" + | QID _ -> "qualified identifier" + | QID_EXPL _ -> "@-prefixed qualified identifier" + | QUANTIFIER -> "quantifier" + | REFINE -> "refine" + | REFLEXIVITY -> "reflexivity" + | REMOVE -> "remove" + | REPEAT -> "repeat" + | REQUIRE -> "require" + | REWRITE -> "rewrite" + | RULE -> "rule" + | R_CU_BRACKET -> "}" + | R_PAREN -> ")" + | R_SQ_BRACKET -> "]" + | SEARCH -> "search" + | SEQUENTIAL -> "sequential" + | SEMICOLON -> ";" + | SET -> "set" + | SIDE _ -> "left or right" + | SIMPLIFY -> "simplify" + | SOLVE -> "solve" + | STRINGLIT _ -> "string literal" + | SWITCH false -> "off" + | SWITCH true -> "on or off" + | SYMBOL -> "symbol" + | SYMMETRY -> "symmetry" + | TRY -> "try" + | TURNSTILE -> "⊢" + | TYPE_QUERY -> "type" + | TYPE_TERM -> "TYPE" + | UID _ -> "non-qualified identifier" + | UID_EXPL _ -> "@-prefixed non-qualified identifier" + | UID_META _ -> "?-prefixed metavariable number" + | UID_PATT _ -> "$-prefixed non-qualified identifier" + | UNDERSCORE -> "_" + | UNIF_RULE -> "unif_rule" + | VBAR -> "|" + | VERBOSE -> "verbose" + | WHY3 -> "why3" + | WITH -> "with" + +let pp_token ppf t = Base.string ppf (string_of_token t) + +let the_current_token : (token * position * position) Stdlib.ref = + Stdlib.ref dummy_token + +let current_token() : token = let (t,_,_) = !the_current_token in t + +let current_pos() : position * position = + let (_,p1,p2) = !the_current_token in (p1,p2) + +let new_parsing (entry:lexbuf -> 'a) (lb:lexbuf): 'a = + let t = !the_current_token in + let reset() = the_current_token := t in + the_current_token := LpLexer.token lb; + try let r = entry lb in begin reset(); r end + with e -> begin reset(); raise e end + +let expected (msg:string) (tokens:token list): 'a = + if msg <> "" then syntax_error (current_pos()) ("Expected: "^msg^".") + else + match tokens with + | [] -> assert false + | t::ts -> + let soft = string_of_token in + syntax_error (current_pos()) + (List.fold_left (fun s t -> s^", "^soft t) ("Expected: "^soft t) ts + ^".") + +let consume_token (lb:lexbuf) : unit = + the_current_token := LpLexer.token lb; + if log_enabled() then + let (t,p1,p2) = !the_current_token in + let p = locate (p1,p2) in + log "read new token %a %a" Pos.short (Some p) pp_token t + +(* building positions and terms *) + +let extend_pos (*s:string*) (lps:position * position): 'a -> 'a loc = + let p1 = fst lps and p2 = fst (current_pos()) in + let p2 = + if p2.pos_cnum > p2.pos_bol then + {p2 with pos_cnum = p2.pos_cnum - 1} + else p2 + in + (*if log_enabled() then + log "extend_pos %s %a -> %a" s Pos.pp_lexing lps Pos.pp_lexing lps2;*) + make_pos (p1,p2) + +let qid_of_path (lps: position * position): + string list -> (string list * string) loc = function + | [] -> assert false + | id::mp -> make_pos lps (List.rev mp, id) + +let make_abst (pos1:position) (ps:p_params list) (t:p_term) (pos2:position) + :p_term = + if ps = [] then t + else extend_pos (*__FUNCTION__*) (pos1,pos2) (P_Abst(ps,t)) + +let make_prod (pos1:position) (ps:p_params list) (t:p_term) (pos2:position) + :p_term = + if ps = [] then t + else extend_pos (*__FUNCTION__*) (pos1,pos2) (P_Prod(ps,t)) + +let ident_of_term pos1 {elt; _} = + match elt with + | P_Iden({elt=([], x); pos}, _) -> Pos.make pos x + | _ -> LpLexer.syntax_error pos1 "not an unqualified identifier." + +(* generic parsing functions *) + +let list (elt:lexbuf -> 'a) (lb:lexbuf): 'a list = + if log_enabled() then log "%s" __FUNCTION__; + let acc = ref [] in + (try while true do acc := elt lb :: !acc done with SyntaxError _ -> ()); + List.rev !acc + +let nelist (elt:lexbuf -> 'a) (lb:lexbuf): 'a list = + if log_enabled() then log "%s" __FUNCTION__; + let x = elt lb in + x :: list elt lb + +let consume (token:token) (lb:lexbuf): unit = + if current_token() = token then consume_token lb + else expected "" [token] + +let prefix (token:token) (elt:lexbuf -> 'a) (lb:lexbuf): 'a = + consume token lb; elt lb + +let alone (entry:lexbuf -> 'a) (lb:lexbuf): 'a = + let x = entry lb in if current_token() != EOF then expected "" [EOF] else x + +(* parsing functions *) + +let consume_STRINGLIT (lb:lexbuf): string = + match current_token() with + | STRINGLIT s -> + consume_token lb; + s + | _ -> + expected "" [STRINGLIT""] + +let consume_SWITCH (lb:lexbuf): bool = + match current_token() with + | SWITCH b -> + consume_token lb; + b + | _ -> + expected "" [SWITCH true] + +let consume_INT (lb:lexbuf): string = + match current_token() with + | INT s -> + consume_token lb; + s + | _ -> + expected "" [INT""] + +let consume_DEBUG_FLAGS (lb:lexbuf): bool * string = + match current_token() with + | DEBUG_FLAGS(b,s) -> + consume_token lb; + b,s + | _ -> + expected "" [DEBUG_FLAGS(true,"")] + +let qid (lb:lexbuf): (string list * string) loc = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | UID s -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 ([], s) + | QID p -> + let pos1 = current_pos() in + consume_token lb; + qid_of_path pos1 p + | _ -> + expected "" [UID"";QID[]] + +let qid_expl (lb:lexbuf): (string list * string) loc = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | UID_EXPL s -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 ([], s) + | QID_EXPL p -> + let pos1 = current_pos() in + consume_token lb; + qid_of_path pos1 p + | _ -> + expected "" [UID_EXPL"";QID_EXPL[]] + +let uid (lb:lexbuf): string loc = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | UID s -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 s + | _ -> + expected "" [UID""] + +let param (lb:lexbuf): string loc option = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | UID s -> + let pos1 = current_pos() in + consume_token lb; + Some (make_pos pos1 s) + | UNDERSCORE -> + consume_token lb; + None + | _ -> + expected "non-qualified identifier or \"_\"" [UID"";UNDERSCORE] + +let int (lb:lexbuf): string = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | INT s -> + consume_token lb; + s + | _ -> + expected "integer" [INT""] + +let float_or_int (lb:lexbuf): string = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | INT s + | FLOAT s -> + consume_token lb; + s + | _ -> + expected "integer or float" [INT"";FLOAT""] + +let path (lb:lexbuf): string list loc = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + (*| UID s -> + let pos1 = current_pos() in + LpLexer.syntax_error pos1 "Unqualified identifier"*) + | QID p -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 (List.rev p) + | _ -> + expected "" [QID[]] + +let qid_or_rule (lb:lexbuf): (string list * string) loc = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | UID s -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 ([], s) + | QID p -> + let pos1 = current_pos() in + consume_token lb; + qid_of_path pos1 p + | UNIF_RULE -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 (Sign.Ghost.path, Unif_rule.equiv.sym_name) + | COERCE_RULE -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 (Sign.Ghost.path, Coercion.coerce.sym_name) + | _ -> + expected "" [UID"";QID[];UNIF_RULE;COERCE_RULE] + +let term_id (lb:lexbuf): p_term = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | UID _ + | QID _ -> + let i = qid lb in + {i with elt=P_Iden(i, false)} + | UID_EXPL _ + | QID_EXPL _ -> + let i = qid_expl lb in + {i with elt=P_Iden(i, true)} + | _ -> + expected "" [UID"";QID[];UID_EXPL"";QID_EXPL[]] + +(* commands *) + +let rec command pos1 (p_sym_mod:p_modifier list) (lb:lexbuf): p_command = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | SIDE _ + | ASSOCIATIVE + | COMMUTATIVE + | CONSTANT + | INJECTIVE + | SEQUENTIAL + | PRIVATE + | OPAQUE + | PROTECTED -> + assert (p_sym_mod = []); + let pos1 = current_pos() in + command pos1 (nelist modifier lb) lb + (* qid *) + | UID _ + | QID _ -> + begin + match p_sym_mod with + | [{elt=P_opaq;_}] -> + let i = qid lb in + extend_pos (*__FUNCTION__*) pos1 (P_opaque i) + | [] -> + expected "command keyword missing" [] + | {elt=P_opaq;_}::{pos;_}::_ -> + expected "an opaque command must be followed by an identifier" [] + | _ -> + expected "" [SYMBOL] + end + | REQUIRE -> + if p_sym_mod <> [] then expected "" [SYMBOL]; (*or modifiers*) + let pos1 = current_pos() in + consume_token lb; + begin + match current_token() with + | OPEN -> + consume_token lb; + let ps = nelist path lb in + extend_pos (*__FUNCTION__*) pos1 (P_require(Some false,ps)) + | PRIVATE -> + consume_token lb; + begin + match current_token() with + | OPEN -> consume_token lb + | _ -> expected "" [OPEN] + end; + let ps = nelist path lb in + extend_pos (*__FUNCTION__*) pos1 (P_require(Some true,ps)) + | _ -> + let ps = nelist path lb in + begin + match current_token() with + | AS -> + let p = + match ps with + | [p] -> p + | _ -> expected "a single module before \"as\"" [] + in + consume_token lb; + let i = uid lb in + extend_pos (*__FUNCTION__*) pos1 (P_require_as(p,i)) + | _ -> + extend_pos (*__FUNCTION__*) pos1 (P_require(None,ps)) + end + end + | OPEN -> + let prv = + match p_sym_mod with + | [] -> false + | {elt=P_expo Term.Privat;_}::_ -> true + | _ -> expected "" [SYMBOL] + in + let pos1 = current_pos() in + consume_token lb; + let l = list path lb in + extend_pos (*__FUNCTION__*) pos1 (P_open(prv,l)) + | SYMBOL -> + let pos1 = current_pos() in + consume_token lb; + let p_sym_nam = uid lb in + let p_sym_arg = list params lb in + begin + match current_token() with + | COLON -> + consume_token lb; + let p_sym_typ = Some(term lb) in + begin + match current_token() with + | BEGIN -> + consume_token lb; + let p_sym_prf = Some (proof lb) in + let p_sym_def = false in + let sym = + {p_sym_mod; p_sym_nam; p_sym_arg; p_sym_typ; + p_sym_trm=None; p_sym_def; p_sym_prf} + in extend_pos (*__FUNCTION__*) pos1 (P_symbol(sym)) + | ASSIGN -> + consume_token lb; + let p_sym_trm, p_sym_prf = term_proof lb in + let p_sym_def = true in + let sym = + {p_sym_mod; p_sym_nam; p_sym_arg; p_sym_typ; + p_sym_trm; p_sym_def; p_sym_prf} + in extend_pos (*__FUNCTION__*) pos1 (P_symbol(sym)) + | SEMICOLON -> + let p_sym_trm = None in + let p_sym_def = false in + let p_sym_prf = None in + let sym = + {p_sym_mod; p_sym_nam; p_sym_arg; p_sym_typ; + p_sym_trm; p_sym_def; p_sym_prf} + in extend_pos (*__FUNCTION__*) pos1 (P_symbol(sym)) + | _ -> + expected "" [BEGIN;ASSIGN] + end + | ASSIGN -> + consume_token lb; + let p_sym_trm, p_sym_prf = term_proof lb in + let p_sym_def = true in + let p_sym_typ = None in + let sym = + {p_sym_mod; p_sym_nam; p_sym_arg; p_sym_typ; + p_sym_trm; p_sym_def; p_sym_prf} + in extend_pos (*__FUNCTION__*) pos1 (P_symbol(sym)) + | _ -> + expected "" [COLON;ASSIGN] + end + | L_PAREN + | L_SQ_BRACKET -> + let pos1 = current_pos() in + let xs = nelist params lb in + consume INDUCTIVE lb; + let i = inductive lb in + let is = list (prefix WITH inductive) lb in + extend_pos (*__FUNCTION__*) pos1 (P_inductive(p_sym_mod,xs,i::is)) + | INDUCTIVE -> + let pos1 = current_pos() in + consume_token lb; + let i = inductive lb in + let is = list (prefix WITH inductive) lb in + extend_pos (*__FUNCTION__*) pos1 (P_inductive(p_sym_mod,[],i::is)) + | RULE -> + if p_sym_mod <> [] then expected "" [SYMBOL]; (*or modifiers*) + let pos1 = current_pos() in + consume_token lb; + let r = rule lb in + let rs = list (prefix WITH rule) lb in + extend_pos (*__FUNCTION__*) pos1 (P_rules(r::rs)) + | UNIF_RULE -> + if p_sym_mod <> [] then expected "" [SYMBOL]; (*or modifiers*) + let pos1 = current_pos() in + consume_token lb; + let e = equation lb in + consume HOOK_ARROW lb; + consume L_SQ_BRACKET lb; + let eq1 = equation lb in + let eqs = list (prefix SEMICOLON equation) lb in + let es = eq1::eqs in + consume R_SQ_BRACKET lb; + (* FIXME: give sensible positions instead of Pos.none and P.appl. *) + let equiv = P.qiden Sign.Ghost.path Unif_rule.equiv.sym_name in + let cons = P.qiden Sign.Ghost.path Unif_rule.cons.sym_name in + let mk_equiv (t, u) = P.appl (P.appl equiv t) u in + let lhs = mk_equiv e in + let es = List.rev_map mk_equiv es in + let (en, es) = List.(hd es, tl es) in + let cat e es = P.appl (P.appl cons e) es in + let rhs = List.fold_right cat es en in + let r = extend_pos (*__FUNCTION__*) pos1 (lhs, rhs) in + extend_pos (*__FUNCTION__*) pos1 (P_unif_rule(r)) + | COERCE_RULE -> + if p_sym_mod <> [] then expected "" [SYMBOL]; (*or modifiers*) + let pos1 = current_pos() in + consume_token lb; + let r = rule lb in + extend_pos (*__FUNCTION__*) pos1 (P_coercion r) + | BUILTIN -> + if p_sym_mod <> [] then expected "" [SYMBOL]; (*or modifiers*) + let pos1 = current_pos() in + consume_token lb; + begin + match current_token() with + | STRINGLIT s -> + consume_token lb; + consume ASSIGN lb; + let i = qid lb in + extend_pos (*__FUNCTION__*) pos1 (P_builtin(s,i)) + | _ -> + expected "" [STRINGLIT""] + end + | NOTATION -> + if p_sym_mod <> [] then expected "" [SYMBOL]; (*or modifiers*) + let pos1 = current_pos() in + consume_token lb; + let i = qid lb in + let n = notation lb in + extend_pos (*__FUNCTION__*) pos1 (P_notation(i,n)) + | _ -> + if p_sym_mod <> [] then expected "" [SYMBOL]; (*or modifiers*) + let pos1 = current_pos() in + let q = query lb in + extend_pos (*__FUNCTION__*) pos1 (P_query(q)) + +and inductive (lb:lexbuf): p_inductive = + let pos0 = current_pos() in + let i = uid lb in + let pos1 = current_pos() in + let ps = list params lb in + consume COLON lb; + let t = term lb in + let pos2 = current_pos() in + let t = make_prod (fst pos1) ps t (snd pos2) in + consume ASSIGN lb; + begin + match current_token() with + | UID _ -> + let c = constructor lb in + let cs = list (prefix VBAR constructor) lb in + let l = c::cs in + extend_pos (*__FUNCTION__*) pos0 (i,t,l) + | VBAR -> + let l = list (prefix VBAR constructor) lb in + extend_pos (*__FUNCTION__*) pos0 (i,t,l) + | SEMICOLON -> + let l = [] in + extend_pos (*__FUNCTION__*) pos0 (i,t,l) + | _ -> + expected "identifier" [] + end + +and constructor (lb:lexbuf): p_ident * p_term = + let i = uid lb in + let pos1 = current_pos() in + let ps = list params lb in + consume COLON lb; + let t = term lb in + i, make_prod (fst pos1) ps t (snd (current_pos())) + +and modifier (lb:lexbuf): p_modifier = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | SIDE d -> + let pos1 = current_pos() in + consume_token lb; + begin + match current_token() with + | ASSOCIATIVE -> + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 + (P_prop (Term.Assoc((d = Pratter.Left)))) + | _ -> + expected "" [ASSOCIATIVE] + end + | ASSOCIATIVE -> + let pos1 = current_pos() in + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 (P_prop (Term.Assoc false)) + | COMMUTATIVE -> + let pos1 = current_pos() in + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 (P_prop Term.Commu) + | CONSTANT -> + let pos1 = current_pos() in + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 (P_prop Term.Const) + | INJECTIVE -> + let pos1 = current_pos() in + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 (P_prop Term.Injec) + | OPAQUE -> + let pos1 = current_pos() in + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 P_opaq + | SEQUENTIAL -> + let pos1 = current_pos() in + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 (P_mstrat Term.Sequen) + | _ -> + exposition lb + +and exposition (lb:lexbuf): p_modifier = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | PRIVATE -> + let pos1 = current_pos() in + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 (P_expo Term.Privat) + | PROTECTED -> + let pos1 = current_pos() in + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 (P_expo Term.Protec) + | _ -> + expected "" [PRIVATE;PROTECTED] + +and notation (lb:lexbuf): string Term.notation = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | INFIX -> + consume_token lb; + begin + match current_token() with + | SIDE d -> + consume_token lb; + let p = float_or_int lb in + Term.Infix(d, p) + | _ -> + let p = float_or_int lb in + Term.Infix(Pratter.Neither, p) + end + | POSTFIX -> + consume_token lb; + let p = float_or_int lb in + Term.Postfix p + | PREFIX -> + consume_token lb; + let p = float_or_int lb in + Term.Prefix p + | QUANTIFIER -> + consume_token lb; + Term.Quant + | _ -> + expected "" [INFIX;POSTFIX;PREFIX;QUANTIFIER] + +and rule (lb:lexbuf): (p_term * p_term) loc = + if log_enabled() then log "%s" __FUNCTION__; + let pos1 = current_pos() in + let l = term lb in + consume HOOK_ARROW lb; + let r = term lb in + extend_pos (*__FUNCTION__*) pos1 (l, r) + +and equation (lb:lexbuf): p_term * p_term = + if log_enabled() then log "%s" __FUNCTION__; + let l = term lb in + consume EQUIV lb; + let r = term lb in + (l, r) + +(* queries *) + +and query (lb:lexbuf): p_query = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | ASSERT b -> + let pos1 = current_pos() in + consume_token lb; + let ps = list params lb in + consume TURNSTILE lb; + let t = term lb in + begin + match current_token() with + | COLON -> + consume_token lb; + let a = term lb in + let pos2 = current_pos() in + let t = make_abst (snd pos1) ps t (fst pos2) in + let a = make_prod (snd pos1) ps a (fst pos2) in + extend_pos (*__FUNCTION__*) pos1 + (P_query_assert(b, P_assert_typing(t,a))) + | EQUIV -> + consume_token lb; + let u = term lb in + let pos2 = current_pos() in + let t = make_abst (snd pos1) ps t (fst pos2) in + let u = make_abst (snd pos1) ps u (fst pos2) in + extend_pos (*__FUNCTION__*) pos1 + (P_query_assert(b, P_assert_conv(t, u))) + | _ -> + expected "" [COLON;EQUIV] + end + | COMPUTE -> + let pos1 = current_pos() in + consume_token lb; + let t = term lb in + extend_pos (*__FUNCTION__*) pos1 + (P_query_normalize(t, {strategy=SNF; steps=None})) + | PRINT -> + let pos1 = current_pos() in + consume_token lb; + begin + match current_token() with + | SEMICOLON -> + extend_pos (*__FUNCTION__*) pos1 (P_query_print None) + | _ -> + let i = qid_or_rule lb in + extend_pos (*__FUNCTION__*) pos1 (P_query_print (Some i)) + end + | PROOFTERM -> + let pos1 = current_pos() in + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 P_query_proofterm + | DEBUG -> + let pos1 = current_pos() in + consume_token lb; + begin + match current_token() with + | SEMICOLON -> + extend_pos (*__FUNCTION__*) pos1 (P_query_debug(true,"")) + | _ -> + let b,s = consume_DEBUG_FLAGS lb in + extend_pos (*__FUNCTION__*) pos1 (P_query_debug(b,s)) + end + | FLAG -> + let pos1 = current_pos() in + consume_token lb; + let s = consume_STRINGLIT lb in + let b = consume_SWITCH lb in + extend_pos (*__FUNCTION__*) pos1 (P_query_flag(s,b)) + | PROVER -> + let pos1 = current_pos() in + consume_token lb; + let s = consume_STRINGLIT lb in + extend_pos (*__FUNCTION__*) pos1 (P_query_prover(s)) + | PROVER_TIMEOUT -> + let pos1 = current_pos() in + consume_token lb; + let n = consume_INT lb in + extend_pos (*__FUNCTION__*) pos1 (P_query_prover_timeout n) + | VERBOSE -> + let pos1 = current_pos() in + consume_token lb; + let n = consume_INT lb in + extend_pos (*__FUNCTION__*) pos1 (P_query_verbose n) + | TYPE_QUERY -> + let pos1 = current_pos() in + consume_token lb; + let t = term lb in + extend_pos (*__FUNCTION__*) pos1 + (P_query_infer(t, {strategy=NONE; steps=None})) + | SEARCH -> + let pos1 = current_pos() in + consume_token lb; + let q = search lb in + extend_pos (*__FUNCTION__*) pos1 (P_query_search q) + | _ -> + expected "query" [] + +and term_proof (lb:lexbuf): p_term option * (p_proof * p_proof_end) option = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | BEGIN -> + consume_token lb; + let p = proof lb in + None, Some p + | _ -> + let t = term lb in + begin + match current_token() with + | BEGIN -> + consume_token lb; + let p = proof lb in + Some t, Some p + | _ -> + Some t, None + end + +(* proofs *) + +and proof (lb:lexbuf): p_proof * p_proof_end = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | L_CU_BRACKET -> + let l = nelist subproof lb in + if current_token() = SEMICOLON then consume_token lb; + let pe = proof_end lb in + l, pe + (*queries*) + | ASSERT _ + | COMPUTE + | DEBUG + | FLAG + | PRINT + | PROOFTERM + | PROVER + | PROVER_TIMEOUT + | SEARCH + | TYPE_QUERY + | VERBOSE + (*tactics*) + | ADMIT + | APPLY + | ASSUME + | CHANGE + | EVAL + | FAIL + | GENERALIZE + | HAVE + | INDUCTION + | ORELSE + | REFINE + | REFLEXIVITY + | REMOVE + | REPEAT + | REWRITE + | SET + | SIMPLIFY + | SOLVE + | SYMMETRY + | TRY + | WHY3 -> + let l = steps lb in + let pe = proof_end lb in + [l], pe + | END + | ABORT + | ADMITTED -> + let pe = proof_end lb in + [], pe + | _ -> + expected "subproof, tactic or query" [] + +and subproof (lb:lexbuf): p_proofstep list = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | L_CU_BRACKET -> + consume_token lb; + let l = steps lb in + consume R_CU_BRACKET lb; + l + | _ -> + expected "" [L_CU_BRACKET] + +and steps (lb:lexbuf): p_proofstep list = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + (*queries*) + | ASSERT _ + | COMPUTE + | DEBUG + | FLAG + | PRINT + | PROOFTERM + | PROVER + | PROVER_TIMEOUT + | SEARCH + | TYPE_QUERY + | VERBOSE + (*tactics*) + | ADMIT + | APPLY + | ASSUME + | CHANGE + | EVAL + | FAIL + | GENERALIZE + | HAVE + | INDUCTION + | ORELSE + | REFINE + | REFLEXIVITY + | REMOVE + | REPEAT + | REWRITE + | SET + | SIMPLIFY + | SOLVE + | SYMMETRY + | TRY + | WHY3 -> + let a = step lb in + let acc = list (prefix SEMICOLON step) lb in + if current_token() = SEMICOLON then consume_token lb; + a::acc + | END + | ABORT + | ADMITTED -> + [] + | _ -> + expected "tactic or query" [] + +and step (lb:lexbuf): p_proofstep = + if log_enabled() then log "%s" __FUNCTION__; + let t = tactic lb in + let l = list subproof lb in + Tactic(t, l) + +and proof_end (lb:lexbuf): p_proof_end = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | ABORT -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 Syntax.P_proof_abort + | ADMITTED -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 Syntax.P_proof_admitted + | END -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 Syntax.P_proof_end + | _ -> + expected "" [ABORT;ADMITTED;END] + +and tactic (lb:lexbuf): p_tactic = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + (*queries*) + | ASSERT _ + | COMPUTE + | DEBUG + | FLAG + | PRINT + | PROOFTERM + | PROVER + | PROVER_TIMEOUT + | SEARCH + | TYPE_QUERY + | VERBOSE -> + let pos1 = current_pos() in + extend_pos (*__FUNCTION__*) pos1 (P_tac_query (query lb)) + | ADMIT -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 P_tac_admit + | APPLY -> + let pos1 = current_pos() in + consume_token lb; + let t = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_apply t) + | ASSUME -> + let pos1 = current_pos() in + consume_token lb; + let xs = nelist param lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_assume xs) + | CHANGE -> + let pos1 = current_pos() in + consume_token lb; + let t = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_change t) + | EVAL -> + let pos1 = current_pos() in + consume_token lb; + let t = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_eval t) + | FAIL -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 P_tac_fail + | GENERALIZE -> + let pos1 = current_pos() in + consume_token lb; + let i = uid lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_generalize i) + | HAVE -> + let pos1 = current_pos() in + consume_token lb; + let i = uid lb in + consume COLON lb; + let t = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_have(i,t)) + | INDUCTION -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 P_tac_induction + | ORELSE -> + let pos1 = current_pos() in + consume_token lb; + let t1 = tactic lb in + let t2 = tactic lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_orelse(t1,t2)) + | REFINE -> + let pos1 = current_pos() in + consume_token lb; + let t = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_refine t) + | REFLEXIVITY -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 P_tac_refl + | REMOVE -> + let pos1 = current_pos() in + consume_token lb; + let xs = nelist uid lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_remove xs) + | REPEAT -> + let pos1 = current_pos() in + consume_token lb; + let t = tactic lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_repeat t) + | REWRITE -> + let pos1 = current_pos() in + consume_token lb; + begin + match current_token() with + | SIDE d -> + consume_token lb; + begin + match current_token() with + | DOT -> + consume_token lb; + let p = rwpatt_bracket lb in + let t = term lb in + let b = d <> Pratter.Left in + extend_pos (*__FUNCTION__*) pos1 (P_tac_rewrite(b,Some p,t)) + | _ -> + let t = term lb in + let b = d <> Pratter.Left in + extend_pos (*__FUNCTION__*) pos1 (P_tac_rewrite(b,None,t)) + end + | DOT -> + consume_token lb; + let p = rwpatt_bracket lb in + let t = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_rewrite(true,Some p,t)) + | _ -> + let t = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_rewrite(true,None,t)) + end + | SET -> + let pos1 = current_pos() in + consume_token lb; + let i = uid lb in + consume ASSIGN lb; + let t = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_set(i,t)) + | SIMPLIFY -> + let pos1 = current_pos() in + consume_token lb; + begin + match current_token() with + | UID _ + | QID _ -> + extend_pos (*__FUNCTION__*) pos1 (P_tac_simpl(SimpSym(qid lb))) + | RULE -> + consume_token lb; + begin + match current_token() with + | SWITCH false -> + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 (P_tac_simpl SimpBetaOnly) + | _ -> expected "" [SWITCH false] + end + | _ -> + extend_pos (*__FUNCTION__*) pos1 (P_tac_simpl SimpAll) + end + | SOLVE -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 P_tac_solve + | SYMMETRY -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 P_tac_sym + | TRY -> + let pos1 = current_pos() in + consume_token lb; + let t = tactic lb in + extend_pos (*__FUNCTION__*) pos1 (P_tac_try t) + | WHY3 -> + let pos1 = current_pos() in + consume_token lb; + begin + match current_token() with + | STRINGLIT s -> + extend_pos (*__FUNCTION__*) pos1 (P_tac_why3 (Some s)) + | _ -> + make_pos pos1 (P_tac_why3 None) + end + | _ -> + expected "tactic" [] + +and rwpatt_content (lb:lexbuf): p_rwpatt = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + (* bterm *) + | BACKQUOTE + | PI + | LAMBDA + | LET + (* aterm *) + | UID _ + | QID _ + | UID_EXPL _ + | QID_EXPL _ + | UNDERSCORE + | TYPE_TERM + | UID_META _ + | UID_PATT _ + | L_PAREN + | L_SQ_BRACKET + | INT _ + | STRINGLIT _ -> + let pos1 = current_pos() in + let t1 = term lb in + begin + match current_token() with + | IN -> + consume_token lb; + let t2 = term lb in + begin + match current_token() with + | IN -> + consume_token lb; + let t3 = term lb in + let x = ident_of_term pos1 t2 in + extend_pos (*__FUNCTION__*) pos1 + (Rw_TermInIdInTerm(t1,(x,t3))) + | _ -> + let x = ident_of_term pos1 t1 in + extend_pos (*__FUNCTION__*) pos1 (Rw_IdInTerm(x,t2)) + end + | AS -> + consume_token lb; + let t2 = term lb in + begin + match current_token() with + | IN -> + consume_token lb; + let t3 = term lb in + let x = ident_of_term pos1 t2 in + extend_pos (*__FUNCTION__*) pos1 + (Rw_TermAsIdInTerm(t1,(x,t3))) + | _ -> + expected "" [IN] + end + | _ -> + extend_pos (*__FUNCTION__*) pos1 (Rw_Term(t1)) + end + | IN -> + let pos1 = current_pos() in + consume_token lb; + let t1 = term lb in + begin + match current_token() with + | IN -> + consume_token lb; + let t2 = term lb in + let x = ident_of_term pos1 t1 in + extend_pos (*__FUNCTION__*) pos1 (Rw_InIdInTerm(x,t2)) + | _ -> + extend_pos (*__FUNCTION__*) pos1 (Rw_InTerm(t1)) + end + | _ -> + expected "term or keyword \"in\"" [] + +and rwpatt_bracket (lb:lexbuf): p_rwpatt = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | L_SQ_BRACKET -> + consume_token lb; + let p = rwpatt_content lb in + consume R_SQ_BRACKET lb; (*add info on opening bracket*) + p + | _ -> + expected "" [L_SQ_BRACKET] + +and rwpatt (lb:lexbuf): p_rwpatt = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | DOT -> + consume_token lb; + rwpatt_bracket lb + | _ -> + expected "" [DOT] + +(* terms *) + +and params (lb:lexbuf): p_params = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | L_PAREN -> + consume_token lb; + let ps = nelist param lb in + begin + match current_token() with + | COLON -> + consume_token lb; + let typ = term lb in + consume R_PAREN lb; + ps, Some typ, false + | R_PAREN -> + consume_token lb; + ps, None, false + | _ -> + expected "" [COLON;R_PAREN] + end + | L_SQ_BRACKET -> + consume_token lb; + let ps = nelist param lb in + begin + match current_token() with + | COLON -> + consume_token lb; + let typ = term lb in + consume R_SQ_BRACKET lb; + ps, Some typ, true + | R_SQ_BRACKET -> + consume_token lb; + ps, None, true + | _ -> + expected "" [COLON;R_SQ_BRACKET] + end + | _ -> + let x = param lb in + [x], None, false + +and term (lb:lexbuf): p_term = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + (* bterm *) + | BACKQUOTE + | PI + | LAMBDA + | LET -> + bterm lb + (* aterm *) + | UID _ + | QID _ + | UID_EXPL _ + | QID_EXPL _ + | UNDERSCORE + | TYPE_TERM + | UID_META _ + | UID_PATT _ + | L_PAREN + | L_SQ_BRACKET + | INT _ + | STRINGLIT _ -> + let pos1 = current_pos() in + let h = aterm lb in + app pos1 h lb + | _ -> + expected "term" [] + +and app (pos1:position * position) (t: p_term) (lb:lexbuf): p_term = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + (* aterm *) + | UID _ + | QID _ + | UID_EXPL _ + | QID_EXPL _ + | UNDERSCORE + | TYPE_TERM + | UID_META _ + | UID_PATT _ + | L_PAREN + | L_SQ_BRACKET + | INT _ + | STRINGLIT _ -> + let u = aterm lb in + app pos1 (extend_pos (*__FUNCTION__*) pos1 (P_Appl(t,u))) lb + (* bterm *) + | BACKQUOTE + | PI + | LAMBDA + | LET -> + let u = bterm lb in + extend_pos (*__FUNCTION__*) pos1 (P_Appl(t,u)) + (* other cases *) + | ARROW -> + consume_token lb; + let u = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_Arro(t,u)) + | _ -> + t + +and bterm (lb:lexbuf): p_term = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | BACKQUOTE -> + let pos1 = current_pos() in + consume_token lb; + let q = term_id lb in + let b = binder lb in + let b = extend_pos (*__FUNCTION__*) pos1 (P_Abst(fst b, snd b)) in + extend_pos (*__FUNCTION__*) pos1 (P_Appl(q, b)) + | PI -> + let pos1 = current_pos() in + consume_token lb; + let b = binder lb in + extend_pos (*__FUNCTION__*) pos1 (P_Prod(fst b, snd b)) + | LAMBDA -> + let pos1 = current_pos() in + consume_token lb; + let b = binder lb in + extend_pos (*__FUNCTION__*) pos1 (P_Abst(fst b, snd b)) + | LET -> + let pos1 = current_pos() in + consume_token lb; + let x = uid lb in + let a = list params lb in + begin + match current_token() with + | COLON -> + consume_token lb; + let b = Some (term lb) in + consume ASSIGN lb; + let t = term lb in + consume IN lb; + let u = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_LLet(x, a, b, t, u)) + | _ -> + let b = None in + consume ASSIGN lb; + let t = term lb in + consume IN lb; + let u = term lb in + extend_pos (*__FUNCTION__*) pos1 (P_LLet(x, a, b, t, u)) + end + | _ -> + expected "" [BACKQUOTE;PI;LAMBDA;LET] + +and aterm (lb:lexbuf): p_term = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | UID _ + | QID _ + | UID_EXPL _ + | QID_EXPL _ -> + term_id lb + | UNDERSCORE -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 P_Wild + | TYPE_TERM -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 P_Type + | UID_META s -> + let pos1 = current_pos() in + consume_token lb; + let i = make_pos pos1 s in + begin + match current_token() with + | DOT -> + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 + (P_Meta(i,Array.of_list (env lb))) + | _ -> + {i with elt=P_Meta(i,[||])} + end + | UID_PATT s -> + let pos1 = current_pos() in + consume_token lb; + let i = + if s = "_" then None else Some(make_pos pos1 s) in + begin + match current_token() with + | DOT -> + consume_token lb; + extend_pos (*__FUNCTION__*) pos1 + (P_Patt(i, Some(Array.of_list (env lb)))) + | _ -> + make_pos pos1 (P_Patt(i, None)) + end + | L_PAREN -> + let pos1 = current_pos() in + consume_token lb; + let t = term lb in + consume R_PAREN lb; + extend_pos (*__FUNCTION__*) pos1 (P_Wrap(t)) + | L_SQ_BRACKET -> + let pos1 = current_pos() in + consume_token lb; + let t = term lb in + consume R_SQ_BRACKET lb; + extend_pos (*__FUNCTION__*) pos1 (P_Expl(t)) + | INT n -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 (P_NLit n) + | STRINGLIT s -> + let pos1 = current_pos() in + consume_token lb; + make_pos pos1 (P_SLit s) + | _ -> + expected "identifier, \"_\", or term between parentheses or square \ + brackets" [] + +and env (lb:lexbuf): p_term list = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | L_SQ_BRACKET -> + consume_token lb; + begin + match current_token() with + | R_SQ_BRACKET -> + consume_token lb; + [] + | _ -> + let t = term lb in + let ts = list (prefix SEMICOLON term) lb in + consume R_SQ_BRACKET lb; + t::ts + end + | _ -> + expected "" [L_SQ_BRACKET] + +and binder (lb:lexbuf): p_params list * p_term = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | UID _ + | UNDERSCORE -> + let s = param lb in + begin + match current_token() with + | UID _ + | UNDERSCORE + | L_PAREN + | L_SQ_BRACKET -> + let ps = list params lb in + consume COMMA lb; + let p = [s], None, false in + p::ps, term lb + | COMMA -> + consume_token lb; + let p = [s], None, false in + [p], term lb + | COLON -> + consume_token lb; + let a = term lb in + consume COMMA lb; + let p = [s], Some a, false in + [p], term lb + | _ -> + expected "parameter list" + [UID"";UNDERSCORE;L_PAREN;L_SQ_BRACKET;COMMA] + end + | L_PAREN -> + let ps = nelist params lb in + begin + match current_token() with + | COMMA -> + consume_token lb; + ps, term lb + | _ -> + expected "" [COMMA] + end + | L_SQ_BRACKET -> + let ps = nelist params lb in + begin + match current_token() with + | COMMA -> + consume_token lb; + ps, term lb + | _ -> + expected "" [COMMA] + end + | _ -> + expected "" [UID"";UNDERSCORE;L_PAREN;L_SQ_BRACKET] + +(* search *) + +and generalize (lb:lexbuf): bool = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | GENERALIZE -> consume_token lb; true + | _ -> false + +and relation (lb:lexbuf): relation option = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | UID "=" -> consume_token lb; Some Exact + | UID ">" -> consume_token lb; Some Inside + | UID ("≥"|">=") -> consume_token lb; None + | _ -> expected "\">\", \"=\", \"≥\",\">=\"" [] + +and where (lb:lexbuf): bool * relation option = + if log_enabled() then log "%s" __FUNCTION__; + let r = relation lb in + let g = generalize lb in + g,r + +and asearch (lb:lexbuf): search = + if log_enabled() then log "%s" __FUNCTION__; + match current_token() with + | UID "name" -> + begin + consume_token lb; + match current_token() with + | UID "=" -> + consume_token lb; + QBase(QName (uid lb).elt) + | _ -> expected "\"=\"" [] + end + | TYPE_QUERY -> + begin + consume_token lb; + match current_token() with + | UID ("≥"|">=") -> + consume_token lb; + let g = generalize lb in + let t = aterm lb in + QBase(QSearch(t,g,Some(QType None))) + | _ -> expected "\"≥\",\">=\"" [] + end + | UID "anywhere" -> + begin + consume_token lb; + match current_token() with + | UID ("≥"|">=") -> + consume_token lb; + let g = generalize lb in + let t = aterm lb in + QBase(QSearch(t,g,None)) + | _ -> expected "\"≥\",\">=\"" [] + end + | RULE -> + consume_token lb; + let r = relation lb in + let g = generalize lb in + let t = aterm lb in + QBase(QSearch(t,g,Some(QXhs(r,None)))) + | UID "spine" -> + let r = relation lb in + let g = generalize lb in + let t = aterm lb in + QBase(QSearch(t,g,Some(QType(Some(Spine r))))) + | UID "concl" -> + let r = relation lb in + let g = generalize lb in + let t = aterm lb in + QBase(QSearch(t,g,Some(QType(Some(Conclusion r))))) + | UID "hyp" -> + let r = relation lb in + let g = generalize lb in + let t = aterm lb in + QBase(QSearch(t,g,Some(QType(Some(Hypothesis r))))) + | UID "lhs" -> + let r = relation lb in + let g = generalize lb in + let t = aterm lb in + QBase(QSearch(t,g,Some(QXhs(r,Some Lhs)))) + | UID "rhs" -> + let r = relation lb in + let g = generalize lb in + let t = aterm lb in + QBase(QSearch(t,g,Some(QXhs(r,Some Rhs)))) + | L_PAREN -> + consume_token lb; + let q = search lb in + consume R_PAREN lb; + q + | _ -> + expected "name, anywhere, rule, lhs, rhs, type, concl, hyp, spine" [] + +and csearch (lb:lexbuf): search = + if log_enabled() then log "%s" __FUNCTION__; + let aq = asearch lb in + match current_token() with + | COMMA -> + let aqs = list (prefix COMMA asearch) lb in + List.fold_left (fun x aq -> QOp(x,Intersect,aq)) aq aqs + | _ -> + aq + +and ssearch (lb:lexbuf): search = + if log_enabled() then log "%s" __FUNCTION__; + let cq = csearch lb in + match current_token() with + | SEMICOLON -> + let cqs = list (prefix SEMICOLON csearch) lb in + List.fold_left (fun x cq -> QOp(x,Union,cq)) cq cqs + | _ -> + cq + +and search (lb:lexbuf): search = + if log_enabled() then log "%s" __FUNCTION__; + let q = ssearch lb in + let qids = list (prefix VBAR qid) lb in + let path_of_qid qid = + let p,n = qid.elt in + if p = [] then n + else Format.asprintf "%a.%a" Print.path p Print.uid n + in + List.fold_left (fun x qid -> QFilter(x,Path(path_of_qid qid))) q qids + +let command (lb:lexbuf): p_command = + if log_enabled() then log "------------------- start reading command"; + consume_token lb; + if current_token() = EOF then raise End_of_file + else + let c = command (Lexing.dummy_pos,Lexing.dummy_pos) [] lb in + match current_token() with + | SEMICOLON -> c + | _ -> expected "" [SEMICOLON] diff --git a/src/parsing/lpParser.mly b/src/parsing/lpParser.mly deleted file mode 100644 index d4e720f0d..000000000 --- a/src/parsing/lpParser.mly +++ /dev/null @@ -1,514 +0,0 @@ -(** Lambdapi parser, using the parser generator Menhir. *) - -%{ - open Lplib - open Common open Pos - open Syntax - open Core - - let qid_of_path lps = function - | [] -> assert false - | id::mp -> make_pos lps (List.rev mp, id) - - let make_abst startpos ps t endpos = - if ps = [] then t else make_pos (startpos,endpos) (P_Abst(ps,t)) - - let make_prod startpos ps t endpos = - if ps = [] then t else make_pos (startpos,endpos) (P_Prod(ps,t)) -%} - -// end of file - -%token EOF - -// keywords in alphabetical order - -%token ABORT -%token ADMIT -%token ADMITTED -%token APPLY -%token AS -%token ASSERT -%token ASSOCIATIVE -%token ASSUME -%token BEGIN -%token BUILTIN -%token CHANGE -%token COERCE_RULE -%token COMMUTATIVE -%token COMPUTE -%token CONSTANT -%token DEBUG -%token END -%token EVAL -%token FAIL -%token FLAG -%token GENERALIZE -%token HAVE -%token IN -%token INDUCTION -%token INDUCTIVE -%token INFIX -%token INJECTIVE -%token LET -%token NOTATION -%token OPAQUE -%token OPEN -%token ORELSE -%token POSTFIX -%token PREFIX -%token PRINT -%token PRIVATE -%token PROOFTERM -%token PROTECTED -%token PROVER -%token PROVER_TIMEOUT -%token QUANTIFIER -%token REFINE -%token REFLEXIVITY -%token REMOVE -%token REPEAT -%token REQUIRE -%token REWRITE -%token RULE -%token SEARCH -%token SEQUENTIAL -%token SET -%token SIMPLIFY -%token SOLVE -%token SYMBOL -%token SYMMETRY -%token TRY -%token TYPE_QUERY -%token TYPE_TERM -%token UNIF_RULE -%token VERBOSE -%token WHY3 -%token WITH - -// other tokens - -%token DEBUG_FLAGS -%token INT -%token FLOAT -%token SIDE -%token STRINGLIT -%token SWITCH - -// symbols - -%token ARROW -%token ASSIGN -%token BACKQUOTE -%token COMMA -%token COLON -%token DOT -%token EQUIV -%token HOOK_ARROW -%token LAMBDA -%token L_CU_BRACKET -%token L_PAREN -%token L_SQ_BRACKET -%token PI -%token R_CU_BRACKET -%token R_PAREN -%token R_SQ_BRACKET -%token SEMICOLON -%token TURNSTILE -%token UNDERSCORE -%token VBAR - -// identifiers - -%token UID -%token UID_EXPL -%token UID_META -%token UID_PATT -%token QID -%token QID_EXPL - -// types - -%start command -%start qid -%start qid_alone -%start term_alone -%start rw_patt_spec_alone -%start search_query_alone - -// patch (see https://github.com/Deducteam/lambdapi/pull/798) -%type equation - -%% - -term_alone: - | t=term EOF - { t } - -rw_patt_spec_alone: - | p=rw_patt_spec EOF - { p } - -qid_alone: - | q=qid EOF - { q } - -search_query_alone: - | q=search_query EOF - { q } - -command: - | OPAQUE i=qid SEMICOLON { make_pos $sloc (P_opaque i) } - | REQUIRE l=list(path) SEMICOLON - { make_pos $sloc (P_require(None,l)) } - | REQUIRE b=open_cmd l=list(path) SEMICOLON - { make_pos $sloc (P_require(Some b,l)) } - | REQUIRE p=path AS i=uid SEMICOLON - { make_pos $sloc (P_require_as(p,i)) } - | b=open_cmd l=list(path) SEMICOLON - { make_pos $sloc (P_open(b,l)) } - | ms=modifier* SYMBOL s=uid al=param_list* COLON a=term - po=proof? SEMICOLON - { let sym = - {p_sym_mod=ms; p_sym_nam=s; p_sym_arg=al; p_sym_typ=Some(a); - p_sym_trm=None; p_sym_def=false; p_sym_prf=po} - in make_pos $sloc (P_symbol(sym)) } - | ms=modifier* SYMBOL s=uid al=param_list* ao=preceded(COLON, term)? - ASSIGN tp=term_proof SEMICOLON - { let sym = - {p_sym_mod=ms; p_sym_nam=s; p_sym_arg=al; p_sym_typ=ao; - p_sym_trm=fst tp; p_sym_prf=snd tp; p_sym_def=true} - in make_pos $sloc (P_symbol(sym)) } - | exp=exposition? xs=param_list* INDUCTIVE - is=separated_nonempty_list(WITH, inductive) SEMICOLON - { make_pos $sloc (P_inductive(Option.to_list exp,xs,is)) } - | RULE rs=separated_nonempty_list(WITH, rule) SEMICOLON - { make_pos $sloc (P_rules(rs)) } - | BUILTIN s=STRINGLIT ASSIGN i=qid SEMICOLON - { make_pos $loc (P_builtin(s,i)) } - | COERCE_RULE r=rule SEMICOLON { make_pos $loc (P_coercion r) } - | UNIF_RULE r=unif_rule SEMICOLON { make_pos $loc (P_unif_rule(r)) } - | NOTATION i=qid n=notation SEMICOLON - { make_pos $loc (P_notation(i,n)) } - | q=query SEMICOLON { make_pos $sloc (P_query(q)) } - | EOF { raise End_of_file } - -open_cmd: - | OPEN { false } - | PRIVATE OPEN { true } - -query: - | k=ASSERT ps=param_list* TURNSTILE t=term COLON a=term - { let t = make_abst $startpos(ps) ps t $endpos(t) in - let a = make_prod $startpos(ps) ps a $endpos(a) in - make_pos $sloc (P_query_assert(k, P_assert_typing(t, a))) } - | k=ASSERT ps=param_list* TURNSTILE t=term EQUIV u=term - { let t = make_abst $startpos(ps) ps t $endpos(t) in - let u = make_abst $startpos(ps) ps u $endpos(u) in - make_pos $sloc (P_query_assert(k, P_assert_conv(t, u))) } - | COMPUTE t=term - { make_pos $sloc (P_query_normalize(t, {strategy=SNF; steps=None})) } - | PRINT i=qid_or_rule? { make_pos $sloc (P_query_print i) } - | PROOFTERM { make_pos $sloc P_query_proofterm } - | DEBUG { make_pos $sloc (P_query_debug(true,"")) } - | DEBUG fl=DEBUG_FLAGS - { let (b, s) = fl in make_pos $sloc (P_query_debug(b, s)) } - | FLAG { make_pos $sloc (P_query_flag("",true)) } - | FLAG s=STRINGLIT b=SWITCH { make_pos $sloc (P_query_flag(s,b)) } - | PROVER s=STRINGLIT { make_pos $sloc (P_query_prover(s)) } - | PROVER_TIMEOUT n=INT - { make_pos $sloc (P_query_prover_timeout n) } - | VERBOSE n=INT { make_pos $sloc (P_query_verbose n) } - | TYPE_QUERY t=term - { make_pos $sloc (P_query_infer(t, {strategy=NONE; steps=None}))} - | SEARCH s=STRINGLIT - { make_pos $sloc (P_query_search s) } - -qid_or_rule: - | i=qid { i } - | UNIF_RULE - { make_pos $sloc (Sign.Ghost.path, Unif_rule.equiv.sym_name) } - | COERCE_RULE - { make_pos $sloc (Sign.Ghost.path, Coercion.coerce.sym_name) } - -path: - | UID { LpLexer.syntax_error $sloc "Unqualified identifier" } - | p=QID { make_pos $sloc (List.rev p) } - -modifier: - | d=ioption(SIDE) ASSOCIATIVE - { let b = match d with Some Pratter.Left -> true | _ -> false in - make_pos $sloc (P_prop (Term.Assoc b)) } - | COMMUTATIVE { make_pos $sloc (P_prop Term.Commu) } - | CONSTANT { make_pos $sloc (P_prop Term.Const) } - | INJECTIVE { make_pos $sloc (P_prop Term.Injec) } - | OPAQUE { make_pos $sloc P_opaq } - | SEQUENTIAL { make_pos $sloc (P_mstrat Term.Sequen) } - | exp=exposition { exp } - -exposition: -| PRIVATE { make_pos $sloc (P_expo Term.Privat) } -| PROTECTED { make_pos $sloc (P_expo Term.Protec) } - -uid: s=UID { make_pos $sloc s} - -param_list: - | x=param { ([x], None, false) } - | L_PAREN xs=param+ COLON a=term R_PAREN { (xs, Some(a), false) } - | L_SQ_BRACKET xs=param+ a=preceded(COLON, term)? R_SQ_BRACKET - { (xs, a, true) } - -param: - | s=uid { Some s } - | UNDERSCORE { None } - -term: - | t=bterm { t } - | t=saterm { t } - | t=saterm u=bterm { make_pos $sloc (P_Appl(t,u)) } - | t=saterm ARROW u=term { make_pos $sloc (P_Arro(t, u)) } - -bterm: - | BACKQUOTE q=term_id b=binder - { let b = make_pos $loc(b) (P_Abst(fst b, snd b)) in - make_pos $sloc (P_Appl(q, b)) } - | PI b=binder { make_pos $sloc (P_Prod(fst b, snd b)) } - | LAMBDA b=binder { make_pos $sloc (P_Abst(fst b, snd b)) } - | LET x=uid a=param_list* b=preceded(COLON, term)? ASSIGN t=term IN u=term - { make_pos $sloc (P_LLet(x, a, b, t, u)) } - -saterm: - | t=saterm u=aterm { make_pos $sloc (P_Appl(t,u)) } - | t=aterm { t } - -aterm: - | ti=term_id { ti } - | UNDERSCORE { make_pos $sloc P_Wild } - | TYPE_TERM { make_pos $sloc P_Type } - | s=UID_META ts=env? - { let i = make_pos $loc(s) s - and ts = match ts with None -> [||] | Some ts -> Array.of_list ts in - make_pos $sloc (P_Meta(i,ts)) } - | s=UID_PATT e=env? - { let i = if s = "_" then None else Some(make_pos $loc(s) s) in - make_pos $sloc (P_Patt(i, Option.map Array.of_list e)) } - | L_PAREN t=term R_PAREN { make_pos $sloc (P_Wrap(t)) } - | L_SQ_BRACKET t=term R_SQ_BRACKET { make_pos $sloc (P_Expl(t)) } - | n=INT { make_pos $sloc (P_NLit n) } - | s=STRINGLIT { make_pos $sloc (P_SLit s) } - -env: DOT L_SQ_BRACKET ts=separated_list(SEMICOLON, term) R_SQ_BRACKET { ts } - -term_id: - | i=qid { make_pos $sloc (P_Iden(i, false)) } - | i=qid_expl { make_pos $sloc (P_Iden(i, true)) } - -qid: - | s=UID { make_pos $sloc ([], s) } - | p=QID { qid_of_path $sloc p } - -qid_expl: - | s=UID_EXPL { make_pos $sloc ([], s) } - | p=QID_EXPL { qid_of_path $sloc p } - -binder: - | ps=param_list+ COMMA t=term { (ps, t) } - | p=param COLON a=term COMMA t=term { ([[p], Some a, false], t) } - -term_proof: - | t=term { Some t, None } - | p=proof { None, Some p } - | t=term p=proof { Some t, Some p } - -proof: - | BEGIN l=subproof+ pe=proof_end { l, pe } - | BEGIN l=loption(proof_steps) pe=proof_end { [l], pe } - -subproof: L_CU_BRACKET l=loption(proof_steps) R_CU_BRACKET { l } - -proof_steps: - | a=proof_step { [a] } - | a=proof_step SEMICOLON { [a] } - | a=proof_step SEMICOLON l=proof_steps { a :: l } - -proof_step: t=tactic l=subproof* { Tactic(t, l) } - -proof_end: - | ABORT { make_pos $sloc Syntax.P_proof_abort } - | ADMITTED { make_pos $sloc Syntax.P_proof_admitted } - | END { make_pos $sloc Syntax.P_proof_end } - -tactic: - | q=query { make_pos $sloc (P_tac_query q) } - | ADMIT { make_pos $sloc P_tac_admit } - | APPLY t=term { make_pos $sloc (P_tac_apply t) } - | ASSUME xs=param+ { make_pos $sloc (P_tac_assume xs) } - | CHANGE t=term { make_pos $sloc (P_tac_change t) } - | EVAL t=term { make_pos $sloc (P_tac_eval t) } - | FAIL { make_pos $sloc P_tac_fail } - | GENERALIZE i=uid { make_pos $sloc (P_tac_generalize i) } - | HAVE i=uid COLON t=term { make_pos $sloc (P_tac_have(i,t)) } - | INDUCTION { make_pos $sloc P_tac_induction } - | ORELSE t1=tactic t2=tactic { make_pos $sloc (P_tac_orelse(t1,t2)) } - | REFINE t=term { make_pos $sloc (P_tac_refine t) } - | REFLEXIVITY { make_pos $sloc P_tac_refl } - | REMOVE xs=uid+ { make_pos $sloc (P_tac_remove xs) } - | REPEAT t=tactic { make_pos $sloc (P_tac_repeat t) } - | REWRITE d=SIDE? p=rw_patt_spec? t=term - { let b = match d with Some Pratter.Left -> false | _ -> true in - make_pos $sloc (P_tac_rewrite(b,p,t)) } - | SET i=uid ASSIGN t=term { make_pos $sloc (P_tac_set(i,t)) } - | SIMPLIFY { make_pos $sloc (P_tac_simpl SimpAll) } - | SIMPLIFY i=qid { make_pos $sloc (P_tac_simpl (SimpSym i)) } - | SIMPLIFY RULE s=SWITCH - { if s then LpLexer.syntax_error $sloc "Invalid tactic" - else make_pos $sloc (P_tac_simpl SimpBetaOnly) } - | SOLVE { make_pos $sloc P_tac_solve } - | SYMMETRY { make_pos $sloc P_tac_sym } - | TRY t=tactic { make_pos $sloc (P_tac_try t) } - | WHY3 s=STRINGLIT? { make_pos $sloc (P_tac_why3 s) } - -rw_patt: - | t=term { make_pos $sloc (Rw_Term(t)) } - | IN t=term { make_pos $sloc (Rw_InTerm(t)) } - | IN x=uid IN t=term { make_pos $sloc (Rw_InIdInTerm(x, t)) } - | u=term IN x=term t=preceded(IN, term)? - { let ident_of_term {elt; _} = - match elt with - | P_Iden({elt=([], x); pos}, _) -> Pos.make pos x - | _ -> LpLexer.syntax_error $sloc "Not an identifier" - in - match t with - | Some(t) -> make_pos $sloc (Rw_TermInIdInTerm(u, (ident_of_term x, t))) - | None -> make_pos $sloc (Rw_IdInTerm(ident_of_term u, x)) - } - | u=term AS x=uid IN t=term { make_pos $sloc (Rw_TermAsIdInTerm(u,(x,t))) } - -rw_patt_spec: DOT L_SQ_BRACKET p=rw_patt R_SQ_BRACKET { p } - -inductive: i=uid ps=param_list* COLON t=term ASSIGN - VBAR? l=separated_list(VBAR, constructor) - { let t = make_prod $startpos(ps) ps t $endpos(t) in - make_pos $sloc (i,t,l) } - -constructor: i=uid ps=param_list* COLON t=term - { (i, make_prod $startpos(ps) ps t $endpos(t)) } - -rule: l=term HOOK_ARROW r=term { make_pos $sloc (l, r) } - -unif_rule: e=equation HOOK_ARROW - L_SQ_BRACKET es=separated_nonempty_list(SEMICOLON, equation) R_SQ_BRACKET - { (* FIXME: give sensible positions instead of Pos.none and P.appl. *) - let equiv = P.qiden Sign.Ghost.path Unif_rule.equiv.sym_name in - let cons = P.qiden Sign.Ghost.path Unif_rule.cons.sym_name in - let mk_equiv (t, u) = P.appl (P.appl equiv t) u in - let lhs = mk_equiv e in - let es = List.rev_map mk_equiv es in - let (en, es) = List.(hd es, tl es) in - let cat e es = P.appl (P.appl cons e) es in - let rhs = List.fold_right cat es en in - make_pos $sloc (lhs, rhs) } - -equation: l=term EQUIV r=term { (l, r) } - -notation: - | INFIX a=SIDE? p=float_or_int - { Term.Infix(Option.get Pratter.Neither a, p) } - | POSTFIX p=float_or_int { Term.Postfix(p) } - | PREFIX p=float_or_int { Term.Prefix(p) } - | QUANTIFIER { Term.Quant } - -float_or_int: - | s=FLOAT { s } - | s=INT { s } - -maybe_generalize: - | g = GENERALIZE? - { g <> None } - -where: - | u = UID g=maybe_generalize - { g, match u with - | "=" -> Some SearchQuerySyntax.Exact - | ">" -> Some SearchQuerySyntax.Inside - | "≥" - | ">=" -> None - | _ -> - LpLexer.syntax_error $sloc - "Only \">\", \"=\", \"≥\" and \">=\" accepted" - } - -asearch_query: - (* "type" is a keyword... *) - | TYPE_QUERY gw=where t=aterm - { let g,w = gw in - if w <> None then - LpLexer.syntax_error $sloc - "Only \"≥\" and \">=\" accepted for \"type\"" - else - SearchQuerySyntax.QBase(QSearch(t,g,Some (QType None))) } - | RULE gw=where t=aterm - { let g,w = gw in - SearchQuerySyntax.QBase(QSearch(t,g,Some (QXhs(w,None)))) } - | k=UID gw=where t=aterm - { let open SearchQuerySyntax in - let g,w = gw in - match k,t.elt with - | "name",P_Iden(id,false) -> - assert (fst id.elt = []) ; - if w <> Some Exact then - LpLexer.syntax_error $sloc - "Only \"=\" accepted for \"name\"" - else if g = true then - LpLexer.syntax_error $sloc - "\"generalize\" cannot be used with \"name\"" - else - QBase(QName (snd id.elt)) - | "name",_ -> - LpLexer.syntax_error $sloc "Path prefix expected after \"name:\"" - | "anywhere",_ -> - if w <> None then - LpLexer.syntax_error $sloc - "Only \"≥\" and \">=\" accepted for \"anywhere\"" - else - QBase(QSearch(t,g,None)) - | "spine",_ -> - QBase(QSearch(t,g,Some (QType (Some (Spine w))))) - | "concl",_ -> - QBase(QSearch(t,g,Some (QType (Some (Conclusion w))))) - | "hyp",_ -> - QBase(QSearch(t,g,Some (QType (Some (Hypothesis w))))) - | "lhs",_ -> - QBase(QSearch(t,g,Some (QXhs(w,Some Lhs)))) - | "rhs",_ -> - QBase(QSearch(t,g,Some (QXhs(w,Some Rhs)))) - | _,_ -> - LpLexer.syntax_error $sloc ("Unknown keyword: " ^ k) - } - | L_PAREN q=search_query R_PAREN - { q } - -csearch_query: - | q=asearch_query - { q } - | q1=csearch_query COMMA q2=asearch_query - { SearchQuerySyntax.QOpp (q1,SearchQuerySyntax.Intersect,q2) } - -ssearch_query: - | q=csearch_query - { q } - | q1=ssearch_query SEMICOLON q2=csearch_query - { SearchQuerySyntax.QOpp (q1,SearchQuerySyntax.Union,q2) } - -search_query: - | q=ssearch_query - { q } - | q=search_query VBAR qid=qid - { let p,n = qid.elt in - let path = - if p = [] then n - else - Format.asprintf "%a.%a" Core.Print.path p Core.Print.uid n in - SearchQuerySyntax.QFilter (q,Path path) } - -%% diff --git a/src/parsing/parser.ml b/src/parsing/parser.ml index af8a97856..949117101 100644 --- a/src/parsing/parser.ml +++ b/src/parsing/parser.ml @@ -7,6 +7,8 @@ open Lplib open Base open Common +open Syntax +open Lexing (** [parser_fatal pos fmt] is a wrapper for [Error.fatal] that enforces that the error has an attached source code position. *) @@ -15,199 +17,178 @@ let parser_fatal : Pos.pos -> ('a,'b) koutfmt -> 'a = fun pos fmt -> (** Module type of a parser. *) module type PARSER = sig + type lexbuf - val parse : in_channel -> Syntax.ast - (** [parse inchan] returns a stream of commands parsed from - channel [inchan]. Commands are parsed lazily and the channel is + val parse_lexbuf : lexbuf -> ast + (** [parse_lexbuf lb] is the same as [parse_string] but with an already + created lexbuf. *) + + val parse_in_channel : string -> in_channel -> ast + (** [parse f ic] returns a stream of commands parsed from channel [ic] + created from file [f]. Commands are parsed lazily and the channel is closed once all entries are parsed. *) - val parse_file : string -> Syntax.ast - (** [parse_file fname] returns a stream of parsed commands of file - [fname]. Commands are parsed lazily. *) + val parse_file : string -> ast + (** [parse_file f] returns a stream of parsed commands of file [f]. Commands + are parsed lazily. *) - val parse_string : string -> string -> Syntax.ast + val parse_string : string -> string -> ast (** [parse_string f s] returns a stream of parsed commands from string [s] which comes from file [f] ([f] can be anything). *) - val parse_from_lexbuf : lexbuf -> Syntax.ast - (** [parse_from_lexbuf lexbuf] is the same than [parse_string] but with an - already created lexbuf *) - end -module Lp : -sig - include PARSER with type lexbuf := Sedlexing.lexbuf +(* defined in OCaml >= 4.11 only *) +let set_filename (lb:lexbuf) (fname:string): unit = + lb.lex_curr_p <- {lb.lex_curr_p with pos_fname = fname} - val parse_term : in_channel -> Syntax.p_term Stream.t - (** [parse inchan] returns a stream of terms parsed from - channel [inchan]. Terms are parsed lazily and the channel is - closed once all entries are parsed. *) +(** Parsing dk syntax. *) +module Dk : PARSER with type lexbuf := Lexing.lexbuf = struct - val parse_term_file : string -> Syntax.p_term Stream.t - (** [parse_file fname] returns a stream of parsed terms of file - [fname]. Terms are parsed lazily. *) + open Lexing + + let parse_lexbuf (icopt:in_channel option) + (entry:lexbuf -> 'a) (lb:lexbuf) : 'a Stream.t = + let generator _ = + try Some(entry lb) + with + | End_of_file -> Option.iter close_in icopt; None + | DkParser.Error -> + let pos = Pos.locate (lb.lex_start_p, lb.lex_curr_p) in + parser_fatal pos "Unexpected token \"%s\"." (lexeme lb) + in + Stream.from generator + + let parse_in_channel (entry:lexbuf -> 'a) (fname:string) (ic:in_channel) + : 'a Stream.t = + let lb = from_channel ic in + set_filename lb fname; + parse_lexbuf (Some ic) entry lb + + let parse_file entry fname = parse_in_channel entry fname (open_in fname) + + let parse_string (entry: lexbuf -> 'a) (fname:string) (s:string) + : 'a Stream.t = + let lb = from_string s in + set_filename lb fname; + parse_lexbuf None entry lb + + let command = + let r = ref (Pos.none (P_open(false,[]))) in + fun (lb:lexbuf): p_command -> + Debug.(record_time Parsing + (fun () -> r := DkParser.line DkLexer.token lb)); !r + + (* exported functions *) + let parse_string = parse_string command + let parse_in_channel = parse_in_channel command + let parse_file = parse_file command + let parse_lexbuf = parse_lexbuf None command +end - val parse_term_string : string -> string -> Syntax.p_term Stream.t - (** [parse_term_string f s] returns a stream of parsed terms from string [s] - which comes from file [f] ([f] can be anything). *) +open LpLexer +open Sedlexing - val parse_rwpatt_string : string -> string -> Syntax.p_rw_patt Stream.t - (** [parse_rwpatt_string f s] returns a stream of parsed rewrite pattern - specifications from string [s] which comes from file [f] ([f] can be - anything). *) +(** Parsing lp syntax. *) +module Lp : +sig + include PARSER with type lexbuf := Sedlexing.lexbuf - val parse_search_query_string : - string -> string -> SearchQuerySyntax.query Stream.t - (** [parse_search_query_string f s] returns a stream of parsed terms from - string [s] which comes from file [f] ([f] can be anything). *) + val parse_term_string: Lexing.position -> string -> Syntax.p_term + (** [parse_rwpatt_string p s] parses a term from string [s] assuming that + [s] starts at position [p]. *) - val parse_qid : string -> Core.Term.qident + val parse_rwpatt_string: Lexing.position -> string -> Syntax.p_rwpatt + (** [parse_rwpatt_string f s] parses a rewrite pattern specification from + string [s] assuming that [s] starts at position [p]. *) - val parse_from_lexbuf : Sedlexing.lexbuf -> Syntax.ast - (** [parse_from_lexbuf lexbuf] is the same as [parse_string] but with an - already created lexbuf *) + val parse_search_string: Lexing.position -> string -> Syntax.search + (** [parse_search_string f s] parses a query from string [s] assuming + that [s] starts at position [p]. *) end = struct - let stream_of_lexbuf : - grammar_entry:(LpLexer.token,'b) MenhirLib.Convert.traditional -> - ?inchan:in_channel -> ?fname:string -> Sedlexing.lexbuf -> - (* Input channel passed as parameter to be closed at the end of stream. *) - 'a Stream.t = - fun ~grammar_entry ?inchan ?fname lb -> - Option.iter (Sedlexing.set_filename lb) fname; - let parse = - MenhirLib.Convert.Simplified.traditional2revised - grammar_entry - in - let token = LpLexer.token lb in - let generator _ = - try Some(parse token) - with - | End_of_file -> Option.iter close_in inchan; None - | LpLexer.SyntaxError {pos=None; _} -> assert false - | LpLexer.SyntaxError {pos=Some pos; elt} -> parser_fatal pos "%s" elt - | LpParser.Error -> - let pos = Pos.locate (Sedlexing.lexing_positions lb) in - parser_fatal pos "Unexpected token: \"%s\"." - (Sedlexing.Utf8.lexeme lb) - in - Stream.from generator - - let parse ~grammar_entry inchan = - stream_of_lexbuf ~grammar_entry ~inchan - (Sedlexing.Utf8.from_channel inchan) - - let parse_file ~grammar_entry fname = - let inchan = open_in fname in - stream_of_lexbuf ~grammar_entry ~inchan ~fname - (Sedlexing.Utf8.from_channel inchan) - - let parse_string ~grammar_entry fname s = - stream_of_lexbuf ~grammar_entry ~fname (Sedlexing.Utf8.from_string s) - - let parse_from_lexbuf ~grammar_entry lexbuf = - stream_of_lexbuf ~grammar_entry ?fname:None lexbuf - - let parse_term = parse ~grammar_entry:LpParser.term_alone - let parse_term_string = parse_string ~grammar_entry:LpParser.term_alone - let parse_rwpatt_string = - parse_string ~grammar_entry:LpParser.rw_patt_spec_alone - let parse_search_query_string = - parse_string ~grammar_entry:LpParser.search_query_alone - let parse_term_file = parse_file ~grammar_entry:LpParser.term_alone - - let parse_qid s = - let stream = parse_string ~grammar_entry:LpParser.qid_alone "LPSearch" s in - (Stream.next stream).elt - - let parse = parse ~grammar_entry:LpParser.command - let parse_string = parse_string ~grammar_entry:LpParser.command - let parse_file = parse_file ~grammar_entry:LpParser.command - - let parse_from_lexbuf = parse_from_lexbuf ~grammar_entry:LpParser.command - -end - -(** Parsing dk syntax. *) -module Dk : PARSER with type lexbuf := Lexing.lexbuf = struct + let handle_error (icopt: in_channel option) + (entry: lexbuf -> 'a) (lb: lexbuf): 'a option = + try Some(entry lb) + with + | End_of_file -> Option.iter close_in icopt; None + | SyntaxError{pos=None; _} -> assert false + | SyntaxError{pos=Some pos; elt} -> + parser_fatal pos "Syntax error. %s" elt + + let parse_lexbuf (icopt: in_channel option) + (entry: lexbuf -> 'a) (lb: lexbuf): 'a Stream.t = + Stream.from (fun _ -> handle_error icopt entry lb) + + let parse_string (entry: lexbuf -> 'a) (fname: string) (s: string) + : 'a Stream.t = + let lb = Utf8.from_string s in + set_filename lb fname; + parse_lexbuf None entry lb + + let parse_in_channel (entry: lexbuf -> 'a) (fname:string) (ic: in_channel) + : 'a Stream.t = + let lb = Utf8.from_channel ic in + set_filename lb fname; + parse_lexbuf (Some ic) entry lb + + let parse_file (entry: lexbuf -> 'a) (fname: string): 'a Stream.t = + parse_in_channel entry fname (open_in fname) + + let parse_entry_string (entry:lexbuf -> 'a) (lexpos:Lexing.position) + (s:string): 'a = + let lb = Utf8.from_string s in + set_position lb lexpos; + set_filename lb lexpos.pos_fname; + Stream.next (parse_lexbuf None (LpParser.new_parsing entry) lb) + + (* exported functions *) + let parse_term_string = parse_entry_string LpParser.term + let parse_rwpatt_string = parse_entry_string LpParser.rwpatt + let parse_search_string = parse_entry_string LpParser.search + + let parse_in_channel = parse_in_channel LpParser.command + let parse_file = parse_file LpParser.command + let parse_string = parse_string LpParser.command + let parse_lexbuf = parse_lexbuf None LpParser.command - let token : Lexing.lexbuf -> DkTokens.token = - let r = ref DkTokens.EOF in fun lb -> - Debug.(record_time Lexing (fun () -> r := DkLexer.token lb)); !r - - let command : - (Lexing.lexbuf -> DkTokens.token) -> Lexing.lexbuf -> Syntax.p_command = - let r = ref (Pos.none (Syntax.P_open(false,[]))) in fun token lb -> - Debug.(record_time Parsing (fun () -> r := DkParser.line token lb)); !r - - let stream_of_lexbuf : - ?inchan:in_channel -> ?fname:string -> Lexing.lexbuf -> - (* Input channel passed as parameter to be closed at the end of stream. *) - Syntax.p_command Stream.t = - fun ?inchan ?fname lb -> - let fn n = - lb.lex_curr_p <- {lb.lex_curr_p with pos_fname = n} - in - Option.iter fn fname; - (*In OCaml >= 4.11: Lexing.set_filename lb fname;*) - let generator _ = - try Some (command token lb) - with - | End_of_file -> Option.iter close_in inchan; None - | DkParser.Error -> - let pos = Pos.locate (Lexing.(lb.lex_start_p, lb.lex_curr_p)) in - parser_fatal pos "Unexpected token \"%s\"." (Lexing.lexeme lb) - in - Stream.from generator - - let parse inchan = - try stream_of_lexbuf ~inchan (Lexing.from_channel inchan) - with e -> close_in inchan; raise e - - let parse_file fname = - let inchan = open_in fname in - stream_of_lexbuf ~inchan ~fname (Lexing.from_channel inchan) - - let parse_string fname s = stream_of_lexbuf ~fname (Lexing.from_string s) - - let parse_from_lexbuf lexbuf = stream_of_lexbuf ?fname:None lexbuf end -(* Include parser of new syntax so that functions are directly available.*) include Lp +open Error + (** [path_of_string s] converts the string [s] into a path. *) let path_of_string : string -> Path.t = fun s -> - let open LpLexer in - let lb = Sedlexing.Utf8.from_string s in + let lb = Utf8.from_string s in try - begin match token lb () with + begin match token lb with | UID s, _, _ -> [s] | QID p, _, _ -> List.rev p - | _ -> Error.fatal_no_pos "\"%s\" is not a path." s + | _ -> fatal_no_pos "Syntax error: \"%s\" is not a path." s end - with SyntaxError _ -> Error.fatal_no_pos "\"%s\" is not a path." s + with SyntaxError _ -> + fatal_no_pos "Syntax error: \"%s\" is not a path." s (** [qident_of_string s] converts the string [s] into a qident. *) let qident_of_string : string -> Core.Term.qident = fun s -> - let open LpLexer in - let lb = Sedlexing.Utf8.from_string s in + let lb = Utf8.from_string s in try - begin match token lb () with + begin match token lb with | QID [], _, _ -> assert false | QID (s::p), _, _ -> (List.rev p, s) - | _ -> Error.fatal_no_pos "\"%s\" is not a qualified identifier." s + | _ -> + fatal_no_pos "Syntax error: \"%s\" is not a qualified identifier." s end with SyntaxError _ -> - Error.fatal_no_pos "\"%s\" is not a qualified identifier." s + fatal_no_pos "Syntax error: \"%s\" is not a qualified identifier." s (** [parse_file fname] selects and runs the correct parser on file [fname], by looking at its extension. *) -let parse_file : string -> Syntax.ast = fun fname -> +let parse_file : string -> ast = fun fname -> match Filename.check_suffix fname Library.lp_src_extension with | true -> Lp.parse_file fname | false -> Dk.parse_file fname diff --git a/src/parsing/pretty.ml b/src/parsing/pretty.ml index 7d0522834..e50ad40a6 100644 --- a/src/parsing/pretty.ml +++ b/src/parsing/pretty.ml @@ -226,7 +226,7 @@ let proof_end : p_proof_end pp = fun ppf pe -> | P_proof_admitted -> "admitted" | P_proof_abort -> "abort") -let rw_patt : p_rw_patt pp = fun ppf p -> +let rwpatt : p_rwpatt pp = fun ppf p -> match p.elt with | Rw_Term(t) -> term ppf t | Rw_InTerm(t) -> out ppf "in %a" term t @@ -242,6 +242,36 @@ let assertion : p_assertion pp = fun ppf a -> | P_assert_typing (t, a) -> out ppf "@[%a@ : %a@]" term t term a | P_assert_conv (t, u) -> out ppf "@[%a@ ≡ %a@]" term t term u +let relation ppf s = string ppf (match s with Exact -> " =" | Inside -> " >") + +let where elt ppf = function + | Spine x -> out ppf "spine%a" elt x + | Conclusion x -> out ppf "concl%a" elt x + | Hypothesis x -> out ppf "hyp%a" elt x + +let search_constr ppf = function + | QType x -> out ppf "type%a" (Option.pp (where (Option.pp relation))) x + | QXhs(i,None) -> out ppf "rule%a" (Option.pp relation) i + | QXhs(i,Some Lhs) -> out ppf "lhs%a" (Option.pp relation) i + | QXhs(i,Some Rhs) -> out ppf "rhs%a" (Option.pp relation) i + +let generalize ppf b = if b then string ppf " generalize" + +let search_base ppf = function + | QName s -> out ppf "name %s" s + | QSearch(t,g,None) -> out ppf "anywhere%a%a" generalize g term t + | QSearch(t,g,Some c) -> + out ppf "%a%a%a" search_constr c generalize g term t + +let op ppf o = string ppf (match o with Union -> "; " | Intersect -> ", ") + +let filter ppf (Path s) = out ppf " | %s" s + +let rec search ppf = function + | QBase b -> search_base ppf b + | QOp(q1,o,q2) -> out ppf "%a%a%a" search q1 op o search q2 + | QFilter(q,f) -> out ppf "%a%a" search q filter f + let query : p_query pp = fun ppf { elt; _ } -> match elt with | P_query_assert(true, a) -> out ppf "assertnot ⊢ %a" assertion a @@ -259,7 +289,7 @@ let query : p_query pp = fun ppf { elt; _ } -> | P_query_print(Some qid) -> out ppf "print %a" qident qid | P_query_proofterm -> out ppf "proofterm" | P_query_verbose i -> out ppf "verbose %s" i - | P_query_search s -> out ppf "search \"%s\"" s + | P_query_search q -> out ppf "search %a" search q let rec tactic : p_tactic pp = fun ppf { elt; _ } -> begin match elt with @@ -283,7 +313,7 @@ let rec tactic : p_tactic pp = fun ppf { elt; _ } -> | P_tac_repeat t -> out ppf "repeat %a" tactic t | P_tac_rewrite(b,p,t) -> let dir ppf b = if not b then out ppf " left" in - let pat ppf p = out ppf " .[%a]" rw_patt p in + let pat ppf p = out ppf " .[%a]" rwpatt p in out ppf "rewrite%a%a %a" dir b (Option.pp pat) p term t | P_tac_set (id, t) -> out ppf "set %a ≔ %a" ident id term t | P_tac_simpl SimpAll -> out ppf "simplify" diff --git a/src/parsing/scope.ml b/src/parsing/scope.ml index b7140eb41..104e4b3d3 100644 --- a/src/parsing/scope.ml +++ b/src/parsing/scope.ml @@ -631,9 +631,9 @@ let scope_rule : let scope_pattern : sig_state -> env -> p_term -> term = fun ss env t -> scope 0 M_Patt ss env t -(** [scope_rw_patt ss env s] scopes the parser-level rewrite tactic +(** [scope_rwpatt ss env s] scopes the parser-level rewrite tactic specification [s] into an actual rewrite specification. *) -let scope_rw_patt : sig_state -> env -> p_rw_patt -> (term, binder) rw_patt = +let scope_rwpatt : sig_state -> env -> p_rwpatt -> (term, binder) rwpatt = fun ss env s -> match s.elt with | Rw_Term(t) -> Rw_Term(scope_pattern ss env t) diff --git a/src/parsing/scope.mli b/src/parsing/scope.mli index 6f87a9263..fbedb632a 100644 --- a/src/parsing/scope.mli +++ b/src/parsing/scope.mli @@ -34,7 +34,7 @@ val scope_search_pattern : val scope_rule : ?find_sym: find_sym -> bool -> sig_state -> p_rule -> sym_rule -(** [scope_rw_patt ss env t] turns a parser-level rewrite tactic specification +(** [scope_rwpatt ss env t] turns a parser-level rewrite tactic specification [s] into an actual rewrite specification (possibly containing variables of [env] and using [ss] for aliasing). *) -val scope_rw_patt : sig_state -> env -> p_rw_patt -> (term, binder) rw_patt +val scope_rwpatt : sig_state -> env -> p_rwpatt -> (term, binder) rwpatt diff --git a/src/parsing/searchQuerySyntax.ml b/src/parsing/searchQuerySyntax.ml deleted file mode 100644 index ab32e66ba..000000000 --- a/src/parsing/searchQuerySyntax.ml +++ /dev/null @@ -1,22 +0,0 @@ -(* query language *) -type side = Lhs | Rhs -type inside = Exact | Inside -type 'inside where = - | Spine of 'inside - | Conclusion of 'inside - | Hypothesis of 'inside -type constr = - | QType of (inside option) where option - | QXhs of inside option * side option -type base_query = - | QName of string - | QSearch of Syntax.p_term * (*generalize:*)bool * constr option -type op = - | Intersect - | Union -type filter = - | Path of string -type query = - | QBase of base_query - | QOpp of query * op * query - | QFilter of query * filter diff --git a/src/parsing/syntax.ml b/src/parsing/syntax.ml index 7ce8b9ade..f1601edd1 100644 --- a/src/parsing/syntax.ml +++ b/src/parsing/syntax.ml @@ -189,7 +189,7 @@ end Reflection Extension for the Coq system", by Georges Gonthier, Assia Mahboubi and Enrico Tassi, INRIA Research Report 6455, 2016, @see , section 8, p. 48. *) -type ('term, 'binder) rw_patt = +type ('term, 'binder) rwpatt = | Rw_Term of 'term | Rw_InTerm of 'term | Rw_InIdInTerm of 'binder @@ -197,7 +197,7 @@ type ('term, 'binder) rw_patt = | Rw_TermInIdInTerm of 'term * 'binder | Rw_TermAsIdInTerm of 'term * 'binder -type p_rw_patt = (p_term, p_ident * p_term) rw_patt loc +type p_rwpatt = (p_term, p_ident * p_term) rwpatt loc (** Parser-level representation of an assertion. *) type p_assertion = @@ -206,6 +206,29 @@ type p_assertion = | P_assert_conv of p_term * p_term (** The two given terms should be convertible. *) +(** Search queries. *) +type side = Lhs | Rhs +type relation = Exact | Inside +type 'a where = + | Spine of 'a + | Conclusion of 'a + | Hypothesis of 'a +type search_constr = + | QType of relation option where option + | QXhs of relation option * side option +type search_base = + | QName of string + | QSearch of p_term * (*generalize:*)bool * search_constr option +type op = + | Intersect + | Union +type filter = + | Path of string +type search = + | QBase of search_base + | QOp of search * op * search + | QFilter of search * filter + (** Parser-level representation of a query command. *) type p_query_aux = | P_query_verbose of string @@ -228,9 +251,8 @@ type p_query_aux = (** Print information about a symbol or the current goals. *) | P_query_proofterm (** Print the current proof term (possibly containing open goals). *) - | P_query_search of string - (** Runs a search query *) (* I use a string here to be parsed later - to avoid polluting LambdaPi code with index and retrieval code *) + | P_query_search of search + (** Runs a search query *) type p_query = p_query_aux loc @@ -257,7 +279,7 @@ type p_tactic_aux = | P_tac_refl | P_tac_remove of p_ident list | P_tac_repeat of p_tactic - | P_tac_rewrite of bool * p_rw_patt option * p_term + | P_tac_rewrite of bool * p_rwpatt option * p_term (* The boolean indicates if the equation is applied from left to right. *) | P_tac_set of p_ident * p_term | P_tac_simpl of simp_flag @@ -265,6 +287,7 @@ type p_tactic_aux = | P_tac_sym | P_tac_try of p_tactic | P_tac_why3 of string option + and p_tactic = p_tactic_aux loc (** [is_destructive t] says whether tactic [t] changes the current goal. *) @@ -383,7 +406,7 @@ let eq_p_inductive : p_inductive eq = fun {elt=(i1,t1,l1);_} {elt=(i2,t2,l2);_} -> List.eq eq_cons ((i1,t1)::l1) ((i2,t2)::l2) -let eq_p_rw_patt : p_rw_patt eq = fun {elt=r1;_} {elt=r2;_} -> +let eq_p_rwpatt : p_rwpatt eq = fun {elt=r1;_} {elt=r2;_} -> match r1, r2 with | Rw_Term t1, Rw_Term t2 | Rw_InTerm t1, Rw_InTerm t2 -> eq_p_term t1 t2 @@ -415,6 +438,7 @@ let eq_p_query : p_query eq = fun {elt=q1;_} {elt=q2;_} -> | P_query_verbose n1, P_query_verbose n2 -> n1 = n2 | P_query_debug (b1,s1), P_query_debug (b2,s2) -> b1 = b2 && s1 = s2 | P_query_proofterm, P_query_proofterm -> true + | P_query_search q1, P_query_search q2 -> q1 = q2 | _, _ -> false let eq_simp_flag : simp_flag eq = fun s1 s2 -> @@ -433,7 +457,7 @@ let eq_p_tactic : p_tactic eq = fun {elt=t1;_} {elt=t2;_} -> | P_tac_assume xs1, P_tac_assume xs2 -> List.eq (Option.eq eq_p_ident) xs1 xs2 | P_tac_rewrite(b1,p1,t1), P_tac_rewrite(b2,p2,t2) -> - b1 = b2 && Option.eq eq_p_rw_patt p1 p2 && eq_p_term t1 t2 + b1 = b2 && Option.eq eq_p_rwpatt p1 p2 && eq_p_term t1 t2 | P_tac_query q1, P_tac_query q2 -> eq_p_query q1 q2 | P_tac_why3 so1, P_tac_why3 so2 -> so1 = so2 | P_tac_simpl s1, P_tac_simpl s2 -> eq_simp_flag s1 s2 @@ -589,7 +613,7 @@ let fold_idents : ('a -> p_qident -> 'a) -> 'a -> p_command list -> 'a = fold_term (fold_term a l) r in - let fold_rw_patt_vars : StrSet.t -> 'a -> p_rw_patt -> 'a = fun vs a p -> + let fold_rwpatt_vars : StrSet.t -> 'a -> p_rwpatt -> 'a = fun vs a p -> match p.elt with | Rw_Term t | Rw_InTerm t -> fold_term_vars vs a t @@ -627,7 +651,7 @@ let fold_idents : ('a -> p_qident -> 'a) -> 'a -> p_command list -> 'a = | P_tac_change t | P_tac_rewrite (_, None, t) -> (vs, fold_term_vars vs a t) | P_tac_rewrite (_, Some p, t) -> - (vs, fold_term_vars vs (fold_rw_patt_vars vs a p) t) + (vs, fold_term_vars vs (fold_rwpatt_vars vs a p) t) | P_tac_query q -> (vs, fold_query_vars vs a q) | P_tac_assume idopts -> (add_idopts vs idopts, a) | P_tac_remove ids -> diff --git a/src/pure/pure.ml b/src/pure/pure.ml index 3e53ce3ba..bb18ed3e0 100644 --- a/src/pure/pure.ml +++ b/src/pure/pure.ml @@ -360,7 +360,7 @@ end = struct let loc pa = Sedlexing.lexing_position_curr pa let parse pa = - let p = Parser.parse_from_lexbuf pa in + let p = Parser.parse_lexbuf pa in parse_command p end diff --git a/src/tool/indexing.ml b/src/tool/indexing.ml index 5c07d7efe..0ea53c853 100644 --- a/src/tool/indexing.ml +++ b/src/tool/indexing.ml @@ -190,39 +190,39 @@ end module DB = struct (* fix codomain type *) - type side = Parsing.SearchQuerySyntax.side = Lhs | Rhs + type side = Parsing.Syntax.side = Lhs | Rhs - type inside = Parsing.SearchQuerySyntax.inside = Exact | Inside + type relation = Parsing.Syntax.relation = Exact | Inside - type 'inside where = 'inside Parsing.SearchQuerySyntax.where = - | Spine of 'inside - | Conclusion of 'inside - | Hypothesis of 'inside - (* the "name" in the sym_name of rules is just the printed position of - the rule; the associated position is never None *) + type 'relation where = 'relation Parsing.Syntax.where = + | Spine of 'relation + | Conclusion of 'relation + | Hypothesis of 'relation type position = | Name - | Type of inside where - | Xhs of inside * side + | Type of relation where + | Xhs of relation * side type item = sym_name * Common.Pos.pos option + (* the "name" in the sym_name of rules is just the printed position of + the rule; the associated position is never None *) let pp_side fmt = function | Lhs -> Lplib.Base.out fmt "lhs" | Rhs -> Lplib.Base.out fmt "rhs" - let pp_inside fmt = + let pp_relation fmt = function | Exact -> Lplib.Base.out fmt "as the exact" | Inside -> Lplib.Base.out fmt "inside the" let pp_where fmt = function - | Spine ins -> Lplib.Base.out fmt "%a spine of" pp_inside ins - | Hypothesis ins -> Lplib.Base.out fmt "%a hypothesis of" pp_inside ins - | Conclusion ins -> Lplib.Base.out fmt "%a conclusion of" pp_inside ins + | Spine ins -> Lplib.Base.out fmt "%a spine of" pp_relation ins + | Hypothesis ins -> Lplib.Base.out fmt "%a hypothesis of" pp_relation ins + | Conclusion ins -> Lplib.Base.out fmt "%a conclusion of" pp_relation ins module ItemSet = struct @@ -263,11 +263,11 @@ module DB = struct (escaper.run Print.term) term (if generalize then "generalized " else "") pp_where where - | generalize,term,Xhs (inside,side) -> + | generalize,term,Xhs (relation,side) -> Lplib.Base.out ppf "%a occurs %s %a %a" (escaper.run Print.term) term (if generalize then "generalized" else "") - pp_inside inside pp_side side)) + pp_relation relation pp_side side)) sep let generic_pp_of_item_list ~escape ~escaper ~separator ~sep ~delimiters @@ -528,14 +528,14 @@ let index_rule sym ({Core.Term.lhs=lhsargs ; rule_pos ; _} as rule) = | Some pos -> pos in let lhs = Core.Term.add_args (Core.Term.mk_Symb sym) lhsargs in let rhs = rule.rhs in - let get_inside = function | DB.Conclusion ins -> ins | _ -> assert false in + let get_relation = function | DB.Conclusion r -> r | _ -> assert false in let filename = Option.get rule_pos.fname in let path = Library.path_of_file Parsing.LpLexer.escape filename in let rule_name = (path,Common.Pos.to_string ~print_fname:false rule_pos) in index_term_and_subterms ~is_spine:false lhs - (fun where -> ((rule_name,Some rule_pos),[Xhs(get_inside where,Lhs)])) ; + (fun where -> ((rule_name,Some rule_pos),[Xhs(get_relation where,Lhs)])) ; index_term_and_subterms ~is_spine:false rhs - (fun where -> ((rule_name,Some rule_pos),[Xhs(get_inside where,Rhs)])) + (fun where -> ((rule_name,Some rule_pos),[Xhs(get_relation where,Rhs)])) let index_sym sym = let qname = name_of_sym sym in @@ -574,7 +574,7 @@ include DB module QueryLanguage = struct - open Parsing.SearchQuerySyntax + open Parsing.Syntax let match_opt p x = match p,x with @@ -637,7 +637,7 @@ module QueryLanguage = struct let rec aux = function | QBase bq -> answer_base_query ~mok ss env bq - | QOpp (q1,op,q2) -> perform_op op (aux q1) (aux q2) + | QOp (q1,op,q2) -> perform_op op (aux q1) (aux q2) | QFilter (q,f) -> filter (aux q) f in aux @@ -649,20 +649,10 @@ include QueryLanguage module UserLevelQueries = struct - (**transform_ascii_to_unicode [s] replaces all the occurences of '->' - and 'forall' with '→' and 'Π' in the search query [s] *) - let transform_ascii_to_unicode : string -> string = fun s -> - let s = Str.global_replace (Str.regexp_string " -> ") " → " s in - Str.global_replace (Str.regexp "\\bforall\\b") "Π" s - - let search_cmd_gen ss ~from ~how_many ~fail ~pp_results - ~title_tag:(hb,he) s = - let s = transform_ascii_to_unicode s in + let search_cmd_gen ss ~from ~how_many ~fail ~pp_results ~tag:(hb,he) q = try - let pstream = Parsing.Parser.Lp.parse_search_query_string "LPSearch" s in - let pq = Stream.next pstream in let mok _ = None in - let items = ItemSet.bindings (answer_query ~mok ss [] pq) in + let items = ItemSet.bindings (answer_query ~mok ss [] q) in let resultsno = List.length items in let _,items = Lplib.List.cut items from in let items,_ = Lplib.List.cut items how_many in @@ -685,26 +675,43 @@ module UserLevelQueries = struct | exn -> fail (Format.asprintf "Error: %s@." (Printexc.to_string exn)) - let search_cmd_html ss ~from ~how_many s ~dbpath = + let search_cmd_txt_query ss ~dbpath q = + the_dbpath := dbpath; + search_cmd_gen ss ~from:0 ~how_many:999999 + ~fail:(fun x -> Common.Error.fatal_no_pos "%s" x) + ~pp_results:pp_results_list ~tag:("","") q + + (** [transform_ascii_to_unicode s] replaces all the occurences of ["->"] and + ["forall"] with ["→"] and ["Π"] in the search query [s] *) + let transform_ascii_to_unicode : string -> string = + let arrow = Str.regexp_string " -> " + and forall = Str.regexp "\\bforall\\b" in + fun s -> Str.global_replace forall "Π" (Str.global_replace arrow " → " s) + + (* unit tests *) + let _ = + assert (transform_ascii_to_unicode "a -> b" = "a → b"); + assert (transform_ascii_to_unicode " forall x, y" = " Π x, y"); + assert (transform_ascii_to_unicode "forall x, y" = "Π x, y"); + assert (transform_ascii_to_unicode "forall.x, y" = "Π.x, y"); + assert (transform_ascii_to_unicode "((forall x, y" = "((Π x, y") + + let search_cmd_html ss ~from ~how_many ~dbpath s = the_dbpath := dbpath; search_cmd_gen ss ~from ~how_many ~fail:(fun x -> "" ^ x ^ "") - ~pp_results:(html_of_results_list from) ~title_tag:("

","

") s + ~pp_results:(html_of_results_list from) + ~tag:("

"," Common.Error.fatal_no_pos "%s" x) - ~pp_results:pp_results_list ~title_tag:("","") s + let search_cmd_txt ss ~dbpath s = + the_dbpath := dbpath; + search_cmd_txt_query ss ~dbpath + (Parsing.Parser.Lp.parse_search_string (lexing_opt None) + (transform_ascii_to_unicode s)) end (* let's flatten the interface *) include UserLevelQueries - -let _ = - assert (transform_ascii_to_unicode "a -> b" = "a → b"); - assert (transform_ascii_to_unicode " forall x, y" = " Π x, y"); - assert (transform_ascii_to_unicode "forall x, y" = "Π x, y"); - assert (transform_ascii_to_unicode "forall.x, y" = "Π.x, y"); - assert (transform_ascii_to_unicode "((forall x, y" = "((Π x, y") diff --git a/src/tool/indexing.mli b/src/tool/indexing.mli index 63ee8febb..eb61bdedb 100644 --- a/src/tool/indexing.mli +++ b/src/tool/indexing.mli @@ -1,4 +1,5 @@ -open Core +open Core open Sig_state +open Parsing open Syntax (* indexing *) val empty : unit -> unit @@ -7,9 +8,9 @@ val index_sign : Sign.t -> unit val dump : dbpath:string -> unit -> unit (* search command used by cli *) -val search_cmd_txt: Sig_state.sig_state -> string -> dbpath:string -> string +val search_cmd_txt: sig_state -> dbpath:string -> string -> string +val search_cmd_txt_query: sig_state -> dbpath:string -> search -> string (* search command used by websearch *) val search_cmd_html: - Sig_state.sig_state - -> from:int -> how_many:int -> string -> dbpath:string -> string + sig_state -> from:int -> how_many:int -> dbpath:string -> string -> string