diff --git a/src/HOL/Tools/Predicate_Compile/code_prolog.ML b/src/HOL/Tools/Predicate_Compile/code_prolog.ML --- a/src/HOL/Tools/Predicate_Compile/code_prolog.ML +++ b/src/HOL/Tools/Predicate_Compile/code_prolog.ML @@ -1,1084 +1,1084 @@ (* Title: HOL/Tools/Predicate_Compile/code_prolog.ML Author: Lukas Bulwahn, TU Muenchen Prototype of an code generator for logic programming languages (a.k.a. Prolog). *) signature CODE_PROLOG = sig type code_options = {ensure_groundness : bool, limit_globally : int option, limited_types : (typ * int) list, limited_predicates : (string list * int) list, replacing : ((string * string) * string) list, manual_reorder : ((string * int) * int list) list} val set_ensure_groundness : code_options -> code_options val map_limit_predicates : ((string list * int) list -> (string list * int) list) -> code_options -> code_options val code_options_of : theory -> code_options val map_code_options : (code_options -> code_options) -> theory -> theory val prolog_system: string Config.T val prolog_timeout: real Config.T datatype arith_op = Plus | Minus datatype prol_term = Var of string | Cons of string | AppF of string * prol_term list | Number of int | ArithOp of arith_op * prol_term list; datatype prem = Conj of prem list | Rel of string * prol_term list | NotRel of string * prol_term list | Eq of prol_term * prol_term | NotEq of prol_term * prol_term | ArithEq of prol_term * prol_term | NotArithEq of prol_term * prol_term | Ground of string * typ; type clause = ((string * prol_term list) * prem); type logic_program = clause list; type constant_table = (string * string) list val generate : Predicate_Compile_Aux.mode option * bool -> Proof.context -> string -> (logic_program * constant_table) val write_program : logic_program -> string val run : Proof.context -> logic_program -> (string * prol_term list) -> string list -> int option -> prol_term list list val active : bool Config.T val test_goals : Proof.context -> bool -> (string * typ) list -> (term * term list) list -> Quickcheck.result list val trace : bool Unsynchronized.ref val replace : ((string * string) * string) -> logic_program -> logic_program end; structure Code_Prolog : CODE_PROLOG = struct (* diagnostic tracing *) val trace = Unsynchronized.ref false fun tracing s = if !trace then Output.tracing s else () (* code generation options *) type code_options = {ensure_groundness : bool, limit_globally : int option, limited_types : (typ * int) list, limited_predicates : (string list * int) list, replacing : ((string * string) * string) list, manual_reorder : ((string * int) * int list) list} fun set_ensure_groundness {ensure_groundness, limit_globally, limited_types, limited_predicates, replacing, manual_reorder} = {ensure_groundness = true, limit_globally = limit_globally, limited_types = limited_types, limited_predicates = limited_predicates, replacing = replacing, manual_reorder = manual_reorder} fun map_limit_predicates f {ensure_groundness, limit_globally, limited_types, limited_predicates, replacing, manual_reorder} = {ensure_groundness = ensure_groundness, limit_globally = limit_globally, limited_types = limited_types, limited_predicates = f limited_predicates, replacing = replacing, manual_reorder = manual_reorder} fun merge_global_limit (NONE, NONE) = NONE | merge_global_limit (NONE, SOME n) = SOME n | merge_global_limit (SOME n, NONE) = SOME n | merge_global_limit (SOME n, SOME m) = SOME (Int.max (n, m)) (* FIXME odd merge *) structure Options = Theory_Data ( type T = code_options val empty = {ensure_groundness = false, limit_globally = NONE, limited_types = [], limited_predicates = [], replacing = [], manual_reorder = []} val extend = I; fun merge ({ensure_groundness = ensure_groundness1, limit_globally = limit_globally1, limited_types = limited_types1, limited_predicates = limited_predicates1, replacing = replacing1, manual_reorder = manual_reorder1}, {ensure_groundness = ensure_groundness2, limit_globally = limit_globally2, limited_types = limited_types2, limited_predicates = limited_predicates2, replacing = replacing2, manual_reorder = manual_reorder2}) = {ensure_groundness = ensure_groundness1 orelse ensure_groundness2 (* FIXME odd merge *), limit_globally = merge_global_limit (limit_globally1, limit_globally2), limited_types = AList.merge (op =) (K true) (limited_types1, limited_types2), limited_predicates = AList.merge (op =) (K true) (limited_predicates1, limited_predicates2), manual_reorder = AList.merge (op =) (K true) (manual_reorder1, manual_reorder2), replacing = Library.merge (op =) (replacing1, replacing2)}; ); val code_options_of = Options.get val map_code_options = Options.map (* system configuration *) datatype prolog_system = SWI_PROLOG | YAP fun string_of_system SWI_PROLOG = "swiprolog" | string_of_system YAP = "yap" val prolog_system = Attrib.setup_config_string \<^binding>\prolog_system\ (K "swiprolog") fun get_prolog_system ctxt = (case Config.get ctxt prolog_system of "swiprolog" => SWI_PROLOG | "yap" => YAP | name => error ("Bad prolog system: " ^ quote name ^ " (\"swiprolog\" or \"yap\" expected)")) val prolog_timeout = Attrib.setup_config_real \<^binding>\prolog_timeout\ (K 10.0) fun get_prolog_timeout ctxt = seconds (Config.get ctxt prolog_timeout) (* internal program representation *) datatype arith_op = Plus | Minus datatype prol_term = Var of string | Cons of string | AppF of string * prol_term list | Number of int | ArithOp of arith_op * prol_term list; fun dest_Var (Var v) = v fun add_vars (Var v) = insert (op =) v | add_vars (ArithOp (_, ts)) = fold add_vars ts | add_vars (AppF (_, ts)) = fold add_vars ts | add_vars _ = I fun map_vars f (Var v) = Var (f v) | map_vars f (ArithOp (opr, ts)) = ArithOp (opr, map (map_vars f) ts) | map_vars f (AppF (fs, ts)) = AppF (fs, map (map_vars f) ts) | map_vars f t = t fun maybe_AppF (c, []) = Cons c | maybe_AppF (c, xs) = AppF (c, xs) fun is_Var (Var _) = true | is_Var _ = false fun is_arith_term (Var _) = true | is_arith_term (Number _) = true | is_arith_term (ArithOp (_, operands)) = forall is_arith_term operands | is_arith_term _ = false fun string_of_prol_term (Var s) = "Var " ^ s | string_of_prol_term (Cons s) = "Cons " ^ s | string_of_prol_term (AppF (f, args)) = f ^ "(" ^ commas (map string_of_prol_term args) ^ ")" | string_of_prol_term (Number n) = "Number " ^ string_of_int n datatype prem = Conj of prem list | Rel of string * prol_term list | NotRel of string * prol_term list | Eq of prol_term * prol_term | NotEq of prol_term * prol_term | ArithEq of prol_term * prol_term | NotArithEq of prol_term * prol_term | Ground of string * typ; fun dest_Rel (Rel (c, ts)) = (c, ts) fun map_term_prem f (Conj prems) = Conj (map (map_term_prem f) prems) | map_term_prem f (Rel (r, ts)) = Rel (r, map f ts) | map_term_prem f (NotRel (r, ts)) = NotRel (r, map f ts) | map_term_prem f (Eq (l, r)) = Eq (f l, f r) | map_term_prem f (NotEq (l, r)) = NotEq (f l, f r) | map_term_prem f (ArithEq (l, r)) = ArithEq (f l, f r) | map_term_prem f (NotArithEq (l, r)) = NotArithEq (f l, f r) | map_term_prem f (Ground (v, T)) = Ground (dest_Var (f (Var v)), T) fun fold_prem_terms f (Conj prems) = fold (fold_prem_terms f) prems | fold_prem_terms f (Rel (_, ts)) = fold f ts | fold_prem_terms f (NotRel (_, ts)) = fold f ts | fold_prem_terms f (Eq (l, r)) = f l #> f r | fold_prem_terms f (NotEq (l, r)) = f l #> f r | fold_prem_terms f (ArithEq (l, r)) = f l #> f r | fold_prem_terms f (NotArithEq (l, r)) = f l #> f r | fold_prem_terms f (Ground (v, T)) = f (Var v) type clause = ((string * prol_term list) * prem); type logic_program = clause list; (* translation from introduction rules to internal representation *) fun mk_conform f empty avoid name = let fun dest_Char (Symbol.Char c) = c val name' = space_implode "" (map (dest_Char o Symbol.decode) (filter (fn s => Symbol.is_ascii_letter s orelse Symbol.is_ascii_digit s) (Symbol.explode name))) val name'' = f (if name' = "" then empty else name') in if member (op =) avoid name'' then singleton (Name.variant_list avoid) name'' else name'' end (** constant table **) type constant_table = (string * string) list fun declare_consts consts constant_table = let fun update' c table = if AList.defined (op =) table c then table else let val c' = mk_conform (Name.enforce_case false) "pred" (map snd table) (Long_Name.base_name c) in AList.update (op =) (c, c') table end in fold update' consts constant_table end fun translate_const constant_table c = (case AList.lookup (op =) constant_table c of SOME c' => c' | NONE => error ("No such constant: " ^ c)) fun inv_lookup _ [] _ = NONE | inv_lookup eq ((key, value)::xs) value' = if eq (value', value) then SOME key else inv_lookup eq xs value' fun restore_const constant_table c = (case inv_lookup (op =) constant_table c of SOME c' => c' | NONE => error ("No constant corresponding to " ^ c)) (** translation of terms, literals, premises, and clauses **) fun translate_arith_const \<^const_name>\Groups.plus_class.plus\ = SOME Plus | translate_arith_const \<^const_name>\Groups.minus_class.minus\ = SOME Minus | translate_arith_const _ = NONE fun mk_nat_term constant_table n = let val zero = translate_const constant_table \<^const_name>\Groups.zero_class.zero\ val Suc = translate_const constant_table \<^const_name>\Suc\ in funpow n (fn t => AppF (Suc, [t])) (Cons zero) end fun translate_term ctxt constant_table t = (case try HOLogic.dest_number t of SOME (\<^typ>\int\, n) => Number n | SOME (\<^typ>\nat\, n) => mk_nat_term constant_table n | NONE => (case strip_comb t of (Free (v, T), []) => Var v | (Const (c, _), []) => Cons (translate_const constant_table c) | (Const (c, _), args) => (case translate_arith_const c of SOME aop => ArithOp (aop, map (translate_term ctxt constant_table) args) | NONE => AppF (translate_const constant_table c, map (translate_term ctxt constant_table) args)) | _ => error ("illegal term for translation: " ^ Syntax.string_of_term ctxt t))) fun translate_literal ctxt constant_table t = (case strip_comb t of (Const (\<^const_name>\HOL.eq\, _), [l, r]) => let val l' = translate_term ctxt constant_table l val r' = translate_term ctxt constant_table r in (if is_Var l' andalso is_arith_term r' andalso not (is_Var r') then ArithEq else Eq) (l', r') end | (Const (c, _), args) => Rel (translate_const constant_table c, map (translate_term ctxt constant_table) args) | _ => error ("illegal literal for translation: " ^ Syntax.string_of_term ctxt t)) fun NegRel_of (Rel lit) = NotRel lit | NegRel_of (Eq eq) = NotEq eq | NegRel_of (ArithEq eq) = NotArithEq eq fun mk_groundness_prems t = map Ground (Term.add_frees t []) fun translate_prem ensure_groundness ctxt constant_table t = (case try HOLogic.dest_not t of SOME t => if ensure_groundness then Conj (mk_groundness_prems t @ [NegRel_of (translate_literal ctxt constant_table t)]) else NegRel_of (translate_literal ctxt constant_table t) | NONE => translate_literal ctxt constant_table t) fun imp_prems_conv cv ct = (case Thm.term_of ct of Const (\<^const_name>\Pure.imp\, _) $ _ $ _ => Conv.combination_conv (Conv.arg_conv cv) (imp_prems_conv cv) ct | _ => Conv.all_conv ct) fun preprocess_intro thy rule = Conv.fconv_rule (imp_prems_conv (HOLogic.Trueprop_conv (Conv.try_conv (Conv.rewr_conv @{thm Predicate.eq_is_eq})))) (Thm.transfer thy rule) fun translate_intros ensure_groundness ctxt gr const constant_table = let val intros = map (preprocess_intro (Proof_Context.theory_of ctxt)) (Graph.get_node gr const) val (intros', ctxt') = Variable.import_terms true (map Thm.prop_of intros) ctxt val constant_table' = declare_consts (fold Term.add_const_names intros' []) constant_table fun translate_intro intro = let val head = HOLogic.dest_Trueprop (Logic.strip_imp_concl intro) val prems = map HOLogic.dest_Trueprop (Logic.strip_imp_prems intro) val prems' = Conj (map (translate_prem ensure_groundness ctxt' constant_table') prems) val clause = (dest_Rel (translate_literal ctxt' constant_table' head), prems') in clause end in (map translate_intro intros', constant_table') end fun depending_preds_of (key, intros) = fold Term.add_const_names (map Thm.prop_of intros) [] fun add_edges edges_of key G = let fun extend' key (G, visited) = (case try (Graph.get_node G) key of SOME v => let val new_edges = filter (fn k => is_some (try (Graph.get_node G) k)) (edges_of (key, v)) val (G', visited') = fold extend' (subtract (op =) (key :: visited) new_edges) (G, key :: visited) in (fold (Graph.add_edge o (pair key)) new_edges G', visited') end | NONE => (G, visited)) in fst (extend' key (G, [])) end fun print_intros ctxt gr consts = tracing (cat_lines (map (fn const => "Constant " ^ const ^ "has intros:\n" ^ cat_lines (map (Thm.string_of_thm ctxt) (Graph.get_node gr const))) consts)) (* translation of moded predicates *) (** generating graph of moded predicates **) (* could be moved to Predicate_Compile_Core *) fun requires_modes polarity cls = let fun req_mode_of pol (t, derivation) = (case fst (strip_comb t) of Const (c, _) => SOME (c, (pol, Predicate_Compile_Core.head_mode_of derivation)) | _ => NONE) fun req (Predicate_Compile_Aux.Prem t, derivation) = req_mode_of polarity (t, derivation) | req (Predicate_Compile_Aux.Negprem t, derivation) = req_mode_of (not polarity) (t, derivation) | req _ = NONE in maps (fn (_, prems) => map_filter req prems) cls end structure Mode_Graph = Graph( type key = string * (bool * Predicate_Compile_Aux.mode) val ord = prod_ord fast_string_ord (prod_ord bool_ord Predicate_Compile_Aux.mode_ord) ) fun mk_moded_clauses_graph ctxt scc gr = let val options = Predicate_Compile_Aux.default_options val mode_analysis_options = {use_generators = true, reorder_premises = true, infer_pos_and_neg_modes = true} fun infer prednames (gr, (pos_modes, neg_modes, random)) = let val (lookup_modes, lookup_neg_modes, needs_random) = ((fn s => the (AList.lookup (op =) pos_modes s)), (fn s => the (AList.lookup (op =) neg_modes s)), (fn s => member (op =) (the (AList.lookup (op =) random s)))) val (preds, all_vs, param_vs, all_modes, clauses) = Predicate_Compile_Core.prepare_intrs options ctxt prednames (maps (Core_Data.intros_of ctxt) prednames) val ((moded_clauses, random'), _) = Mode_Inference.infer_modes mode_analysis_options options (lookup_modes, lookup_neg_modes, needs_random) ctxt preds all_modes param_vs clauses val modes = map (fn (p, mps) => (p, map fst mps)) moded_clauses val pos_modes' = map (apsnd (map_filter (fn (true, m) => SOME m | _ => NONE))) modes val neg_modes' = map (apsnd (map_filter (fn (false, m) => SOME m | _ => NONE))) modes val _ = tracing ("Inferred modes:\n" ^ cat_lines (map (fn (s, ms) => s ^ ": " ^ commas (map (fn (p, m) => Predicate_Compile_Aux.string_of_mode m ^ (if p then "pos" else "neg")) ms)) modes)) val gr' = gr |> fold (fn (p, mps) => fold (fn (mode, cls) => Mode_Graph.new_node ((p, mode), cls)) mps) moded_clauses |> fold (fn (p, mps) => fold (fn (mode, cls) => fold (fn req => Mode_Graph.add_edge ((p, mode), req)) (requires_modes (fst mode) cls)) mps) moded_clauses in (gr', (AList.merge (op =) (op =) (pos_modes, pos_modes'), AList.merge (op =) (op =) (neg_modes, neg_modes'), AList.merge (op =) (op =) (random, random'))) end in fst (fold infer (rev scc) (Mode_Graph.empty, ([], [], []))) end fun declare_moded_predicate moded_preds table = let fun update' (p as (pred, (pol, mode))) table = if AList.defined (op =) table p then table else let val name = Long_Name.base_name pred ^ (if pol then "p" else "n") ^ Predicate_Compile_Aux.ascii_string_of_mode mode val p' = mk_conform (Name.enforce_case false) "pred" (map snd table) name in AList.update (op =) (p, p') table end in fold update' moded_preds table end fun mk_program ctxt moded_gr moded_preds (prog, (moded_pred_table, constant_table)) = let val moded_pred_table' = declare_moded_predicate moded_preds moded_pred_table fun mk_literal pol derivation constant_table' t = let val (p, args) = strip_comb t val mode = Predicate_Compile_Core.head_mode_of derivation val name = fst (dest_Const p) val p' = the (AList.lookup (op =) moded_pred_table' (name, (pol, mode))) val args' = map (translate_term ctxt constant_table') args in Rel (p', args') end fun mk_prem pol (indprem, derivation) constant_table = (case indprem of Predicate_Compile_Aux.Generator (s, T) => (Ground (s, T), constant_table) | _ => declare_consts (Term.add_const_names (Predicate_Compile_Aux.dest_indprem indprem) []) constant_table |> (fn constant_table' => (case indprem of Predicate_Compile_Aux.Negprem t => NegRel_of (mk_literal (not pol) derivation constant_table' t) | _ => mk_literal pol derivation constant_table' (Predicate_Compile_Aux.dest_indprem indprem), constant_table'))) fun mk_clause pred_name pol (ts, prems) (prog, constant_table) = let val constant_table' = declare_consts (fold Term.add_const_names ts []) constant_table val args = map (translate_term ctxt constant_table') ts val (prems', constant_table'') = fold_map (mk_prem pol) prems constant_table' in (((pred_name, args), Conj prems') :: prog, constant_table'') end fun mk_clauses (pred, mode as (pol, _)) = let val clauses = Mode_Graph.get_node moded_gr (pred, mode) val pred_name = the (AList.lookup (op =) moded_pred_table' (pred, mode)) in fold (mk_clause pred_name pol) clauses end in apsnd (pair moded_pred_table') (fold mk_clauses moded_preds (prog, constant_table)) end fun generate (use_modes, ensure_groundness) ctxt const = let fun strong_conn_of gr keys = Graph.strong_conn (Graph.restrict (member (op =) (Graph.all_succs gr keys)) gr) val gr = Core_Data.intros_graph_of ctxt val gr' = add_edges depending_preds_of const gr val scc = strong_conn_of gr' [const] val initial_constant_table = declare_consts [\<^const_name>\Groups.zero_class.zero\, \<^const_name>\Suc\] [] in (case use_modes of SOME mode => let val moded_gr = mk_moded_clauses_graph ctxt scc gr val moded_gr' = Mode_Graph.restrict (member (op =) (Mode_Graph.all_succs moded_gr [(const, (true, mode))])) moded_gr val scc = Mode_Graph.strong_conn moded_gr' in apfst rev (apsnd snd (fold (mk_program ctxt moded_gr') (rev scc) ([], ([], initial_constant_table)))) end | NONE => let val _ = print_intros ctxt gr (flat scc) val constant_table = declare_consts (flat scc) initial_constant_table in apfst flat (fold_map (translate_intros ensure_groundness ctxt gr) (flat scc) constant_table) end) end (* implementation for fully enumerating predicates and for size-limited predicates for enumerating the values of a datatype upto a specific size *) fun add_ground_typ (Conj prems) = fold add_ground_typ prems | add_ground_typ (Ground (_, T)) = insert (op =) T | add_ground_typ _ = I fun mk_relname (Type (Tcon, Targs)) = Name.enforce_case false (Long_Name.base_name Tcon) ^ space_implode "_" (map mk_relname Targs) | mk_relname _ = raise Fail "unexpected type" fun mk_lim_relname T = "lim_" ^ mk_relname T fun is_recursive_constr T (Const (constr_name, T')) = member (op =) (binder_types T') T fun mk_ground_impl ctxt limited_types (T as Type (Tcon, Targs)) (seen, constant_table) = if member (op =) seen T then ([], (seen, constant_table)) else let val (limited, size) = (case AList.lookup (op =) limited_types T of SOME s => (true, s) | NONE => (false, 0)) val rel_name = (if limited then mk_lim_relname else mk_relname) T fun mk_impl (Const (constr_name, cT), recursive) (seen, constant_table) = let val constant_table' = declare_consts [constr_name] constant_table val Ts = binder_types cT val (rec_clauses, (seen', constant_table'')) = fold_map (mk_ground_impl ctxt limited_types) Ts (seen, constant_table') val vars = map (fn i => Var ("x" ^ string_of_int i)) (1 upto (length Ts)) val lim_var = if limited then if recursive then [AppF ("suc", [Var "Lim"])] else [Var "Lim"] else [] fun mk_prem v T' = if limited andalso T' = T then Rel (mk_lim_relname T', [Var "Lim", v]) else Rel (mk_relname T', [v]) val clause = ((rel_name, lim_var @ [maybe_AppF (translate_const constant_table'' constr_name, vars)]), Conj (map2 mk_prem vars Ts)) in (clause :: flat rec_clauses, (seen', constant_table'')) end val constrs = Function_Lib.inst_constrs_of ctxt T val constrs' = (constrs ~~ map (is_recursive_constr T) constrs) |> (fn cs => filter_out snd cs @ filter snd cs) val (clauses, constant_table') = apfst flat (fold_map mk_impl constrs' (T :: seen, constant_table)) val size_term = funpow size (fn t => AppF ("suc", [t])) (Cons "zero") in ((if limited then cons ((mk_relname T, [Var "x"]), Rel (mk_lim_relname T, [size_term, Var "x"])) else I) clauses, constant_table') end | mk_ground_impl ctxt _ T (seen, constant_table) = raise Fail ("unexpected type :" ^ Syntax.string_of_typ ctxt T) fun replace_ground (Conj prems) = Conj (map replace_ground prems) | replace_ground (Ground (x, T)) = Rel (mk_relname T, [Var x]) | replace_ground p = p fun add_ground_predicates ctxt limited_types (p, constant_table) = let val ground_typs = fold (add_ground_typ o snd) p [] val (grs, (_, constant_table')) = fold_map (mk_ground_impl ctxt limited_types) ground_typs ([], constant_table) val p' = map (apsnd replace_ground) p in ((flat grs) @ p', constant_table') end (* make depth-limited version of predicate *) fun mk_lim_rel_name rel_name = "lim_" ^ rel_name fun mk_depth_limited rel_names ((rel_name, ts), prem) = let fun has_positive_recursive_prems (Conj prems) = exists has_positive_recursive_prems prems | has_positive_recursive_prems (Rel (rel, ts)) = member (op =) rel_names rel | has_positive_recursive_prems _ = false fun mk_lim_prem (Conj prems) = Conj (map mk_lim_prem prems) | mk_lim_prem (p as Rel (rel, ts)) = if member (op =) rel_names rel then Rel (mk_lim_rel_name rel, Var "Lim" :: ts) else p | mk_lim_prem p = p in if has_positive_recursive_prems prem then ((mk_lim_rel_name rel_name, (AppF ("suc", [Var "Lim"])) :: ts), mk_lim_prem prem) else ((mk_lim_rel_name rel_name, (Var "Lim") :: ts), prem) end fun nat_term_of n = funpow n (fn t => AppF ("suc", [t])) (Cons "zero") fun add_limited_predicates limited_predicates (p, constant_table) = let fun add (rel_names, limit) p = let val clauses = filter (fn ((rel, _), _) => member (op =) rel_names rel) p val clauses' = map (mk_depth_limited rel_names) clauses fun mk_entry_clause rel_name = let val nargs = length (snd (fst (the (find_first (fn ((rel, _), _) => rel = rel_name) clauses)))) val vars = map (fn i => Var ("x" ^ string_of_int i)) (1 upto nargs) in (("limited_" ^ rel_name, vars), Rel ("lim_" ^ rel_name, nat_term_of limit :: vars)) end in (p @ (map mk_entry_clause rel_names) @ clauses') end in (fold add limited_predicates p, constant_table) end (* replace predicates in clauses *) (* replace (A, B, C) p = replace A by B in clauses of C *) fun replace ((from, to), location) p = let fun replace_prem (Conj prems) = Conj (map replace_prem prems) | replace_prem (r as Rel (rel, ts)) = if rel = from then Rel (to, ts) else r | replace_prem r = r in map (fn ((rel, args), prem) => ((rel, args), (if rel = location then replace_prem else I) prem)) p end (* reorder manually : reorder premises of ith clause of predicate p by a permutation perm *) fun reorder_manually reorder p = let fun reorder' ((rel, args), prem) seen = let val seen' = AList.map_default (op =) (rel, 0) (fn x => x + 1) seen val i = the (AList.lookup (op =) seen' rel) val perm = AList.lookup (op =) reorder (rel, i) val prem' = (case perm of SOME p => (case prem of Conj prems => Conj (map (nth prems) p) | _ => prem) | NONE => prem) in (((rel, args), prem'), seen') end in fst (fold_map reorder' p []) end (* rename variables to prolog-friendly names *) fun rename_vars_term renaming = map_vars (fn v => the (AList.lookup (op =) renaming v)) fun rename_vars_prem renaming = map_term_prem (rename_vars_term renaming) fun mk_renaming v renaming = (v, mk_conform (Name.enforce_case true) "Var" (map snd renaming) v) :: renaming fun rename_vars_clause ((rel, args), prem) = let val vars = fold_prem_terms add_vars prem (fold add_vars args []) val renaming = fold mk_renaming vars [] in ((rel, map (rename_vars_term renaming) args), rename_vars_prem renaming prem) end (* limit computation globally by some threshold *) fun limit_globally limit const_name (p, constant_table) = let val rel_names = fold (fn ((r, _), _) => insert (op =) r) p [] val p' = map (mk_depth_limited rel_names) p val rel_name = translate_const constant_table const_name val nargs = length (snd (fst (the (find_first (fn ((rel, _), _) => rel = rel_name) p)))) val vars = map (fn i => Var ("x" ^ string_of_int i)) (1 upto nargs) val entry_clause = ((rel_name, vars), Rel ("lim_" ^ rel_name, nat_term_of limit :: vars)) val p'' = filter_out (fn ((rel, _), _) => rel = rel_name) p in (entry_clause :: p' @ p'', constant_table) end (* post processing of generated prolog program *) fun post_process ctxt (options: code_options) const_name (p, constant_table) = (p, constant_table) |> (case #limit_globally options of SOME limit => limit_globally limit const_name | NONE => I) |> (if #ensure_groundness options then add_ground_predicates ctxt (#limited_types options) else I) |> tap (fn _ => tracing "Adding limited predicates...") |> add_limited_predicates (#limited_predicates options) |> tap (fn _ => tracing "Replacing predicates...") |> apfst (fold replace (#replacing options)) |> apfst (reorder_manually (#manual_reorder options)) |> apfst (map rename_vars_clause) (* code printer *) fun write_arith_op Plus = "+" | write_arith_op Minus = "-" fun write_term (Var v) = v | write_term (Cons c) = c | write_term (AppF (f, args)) = f ^ "(" ^ space_implode ", " (map write_term args) ^ ")" | write_term (ArithOp (oper, [a1, a2])) = write_term a1 ^ " " ^ write_arith_op oper ^ " " ^ write_term a2 | write_term (Number n) = string_of_int n fun write_rel (pred, args) = pred ^ "(" ^ space_implode ", " (map write_term args) ^ ")" fun write_prem (Conj prems) = space_implode ", " (map write_prem prems) | write_prem (Rel p) = write_rel p | write_prem (NotRel p) = "not(" ^ write_rel p ^ ")" | write_prem (Eq (l, r)) = write_term l ^ " = " ^ write_term r | write_prem (NotEq (l, r)) = write_term l ^ " \\= " ^ write_term r | write_prem (ArithEq (l, r)) = write_term l ^ " is " ^ write_term r | write_prem (NotArithEq (l, r)) = write_term l ^ " =\\= " ^ write_term r | write_prem _ = raise Fail "Not a valid prolog premise" fun write_clause (head, prem) = write_rel head ^ (if prem = Conj [] then "." else " :- " ^ write_prem prem ^ ".") fun write_program p = cat_lines (map write_clause p) (* query templates *) (** query and prelude for swi-prolog **) fun swi_prolog_query_first (rel, args) vnames = "eval :- once(" ^ rel ^ "(" ^ space_implode ", " (map write_term args) ^ ")),\n" ^ "writef('" ^ space_implode ";" (map (fn v => v ^ " = %w") vnames) ^ "\\n', [" ^ space_implode ", " vnames ^ "]).\n" fun swi_prolog_query_firstn n (rel, args) vnames = "eval :- findnsols(" ^ string_of_int n ^ ", (" ^ space_implode ", " vnames ^ "), " ^ rel ^ "(" ^ space_implode ", " (map write_term args) ^ "), Sols), writelist(Sols).\n" ^ "writelist([]).\n" ^ "writelist([(" ^ space_implode ", " vnames ^ ")|SolutionTail]) :- " ^ "writef('" ^ space_implode ";" (map (fn v => v ^ " = %w") vnames) ^ "\\n', [" ^ space_implode ", " vnames ^ "]), writelist(SolutionTail).\n" val swi_prolog_prelude = ":- use_module(library('dialect/ciao/aggregates')).\n" ^ ":- style_check(-singleton).\n" ^ ":- style_check(-discontiguous).\n" ^ ":- style_check(-atom).\n\n" ^ "main :- catch(eval, E, (print_message(error, E), fail)), halt.\n" ^ "main :- halt(1).\n" (** query and prelude for yap **) fun yap_query_first (rel, args) vnames = "eval :- once(" ^ rel ^ "(" ^ space_implode ", " (map write_term args) ^ ")),\n" ^ "format('" ^ space_implode ";" (map (fn v => v ^ " = ~w") vnames) ^ "\\n', [" ^ space_implode ", " vnames ^ "]).\n" val yap_prelude = ":- initialization(eval).\n" (* system-dependent query, prelude and invocation *) fun query system nsols = (case system of SWI_PROLOG => (case nsols of NONE => swi_prolog_query_first | SOME n => swi_prolog_query_firstn n) | YAP => (case nsols of NONE => yap_query_first | SOME n => error "No support for querying multiple solutions in the prolog system yap")) fun prelude system = (case system of SWI_PROLOG => swi_prolog_prelude | YAP => yap_prelude) fun invoke system file = let val (env_var, cmd) = (case system of SWI_PROLOG => ("ISABELLE_SWIPL", "\"$ISABELLE_SWIPL\" -q -t main -f ") | YAP => ("ISABELLE_YAP", "\"$ISABELLE_YAP\" -L ")) in if getenv env_var = "" then (warning (env_var ^ " not set; could not execute code for " ^ string_of_system system); "") else (case Isabelle_System.bash_output (cmd ^ File.bash_path file) of (out, 0) => out | (_, rc) => error ("Error caused by prolog system " ^ env_var ^ ": return code " ^ string_of_int rc)) end (* parsing prolog solution *) val scan_number = Scan.many1 Symbol.is_ascii_digit val scan_atom = Scan.many1 (fn s => Symbol.is_ascii_lower s orelse Symbol.is_ascii_digit s orelse Symbol.is_ascii_quasi s) val scan_var = Scan.many1 (fn s => Symbol.is_ascii_upper s orelse Symbol.is_ascii_digit s orelse Symbol.is_ascii_quasi s) fun dest_Char (Symbol.Char s) = s val string_of = implode o map (dest_Char o Symbol.decode) fun int_of_symbol_list xs = fold (fn x => fn s => s * 10 + (ord x - ord "0")) xs 0 fun scan_terms xs = (((scan_term --| $$ ",") ::: scan_terms) || (scan_term >> single)) xs and scan_term xs = ((scan_number >> (Number o int_of_symbol_list)) || (scan_var >> (Var o string_of)) || ((scan_atom -- ($$ "(" |-- scan_terms --| $$ ")")) >> (fn (f, ts) => AppF (string_of f, ts))) || (scan_atom >> (Cons o string_of))) xs val parse_term = fst o Scan.finite Symbol.stopper (Scan.error (!! (fn _ => raise Fail "parsing prolog output failed")) scan_term) o raw_explode fun parse_solutions sol = let fun dest_eq s = (case space_explode "=" s of (l :: r :: []) => parse_term (unprefix " " r) | _ => raise Fail "unexpected equation in prolog output") fun parse_solution s = map dest_eq (space_explode ";" s) - in map parse_solution (Library.trim_split_lines sol) end + in map parse_solution (split_lines sol) end (* calling external interpreter and getting results *) fun run ctxt p (query_rel, args) vnames nsols = let val timeout = get_prolog_timeout ctxt val system = get_prolog_system ctxt val renaming = fold mk_renaming (fold add_vars args vnames) [] val vnames' = map (fn v => the (AList.lookup (op =) renaming v)) vnames val args' = map (rename_vars_term renaming) args val prog = prelude system ^ query system nsols (query_rel, args') vnames' ^ write_program p val _ = tracing ("Generated prolog program:\n" ^ prog) val solution = Timeout.apply timeout (fn prog => Isabelle_System.with_tmp_file "prolog_file" "" (fn prolog_file => (File.write prolog_file prog; invoke system prolog_file))) prog val _ = tracing ("Prolog returned solution(s):\n" ^ solution) val tss = parse_solutions solution in tss end (* restoring types in terms *) fun restore_term ctxt constant_table (Var s, T) = Free (s, T) | restore_term ctxt constant_table (Number n, \<^typ>\int\) = HOLogic.mk_number \<^typ>\int\ n | restore_term ctxt constant_table (Number n, _) = raise (Fail "unexpected type for number") | restore_term ctxt constant_table (Cons s, T) = Const (restore_const constant_table s, T) | restore_term ctxt constant_table (AppF (f, args), T) = let val thy = Proof_Context.theory_of ctxt val c = restore_const constant_table f val cT = Sign.the_const_type thy c val (argsT, resT) = strip_type cT val subst = Sign.typ_match thy (resT, T) Vartab.empty val argsT' = map (Envir.subst_type subst) argsT in list_comb (Const (c, Envir.subst_type subst cT), map (restore_term ctxt constant_table) (args ~~ argsT')) end (* restore numerals in natural numbers *) fun restore_nat_numerals t = if fastype_of t = \<^typ>\nat\ andalso is_some (try HOLogic.dest_nat t) then HOLogic.mk_number \<^typ>\nat\ (HOLogic.dest_nat t) else (case t of t1 $ t2 => restore_nat_numerals t1 $ restore_nat_numerals t2 | t => t) (* values command *) val preprocess_options = Predicate_Compile_Aux.Options { expected_modes = NONE, proposed_modes = [], proposed_names = [], show_steps = false, show_intermediate_results = false, show_proof_trace = false, show_modes = false, show_mode_inference = false, show_compilation = false, show_caught_failures = false, show_invalid_clauses = false, skip_proof = true, no_topmost_reordering = false, function_flattening = true, specialise = false, fail_safe_function_flattening = false, no_higher_order_predicate = [], inductify = false, detect_switches = true, smart_depth_limiting = true, compilation = Predicate_Compile_Aux.Pred } fun values ctxt soln t_compr = let val options = code_options_of (Proof_Context.theory_of ctxt) val split = (case t_compr of (Const (\<^const_name>\Collect\, _) $ t) => t | _ => error ("Not a set comprehension: " ^ Syntax.string_of_term ctxt t_compr)) val (body, Ts, fp) = HOLogic.strip_ptupleabs split val output_names = Name.variant_list (Term.add_free_names body []) (map (fn i => "x" ^ string_of_int i) (1 upto length Ts)) val output_frees = rev (map2 (curry Free) output_names Ts) val body = subst_bounds (output_frees, body) val (pred as Const (name, T), all_args) = (case strip_comb body of (Const (name, T), all_args) => (Const (name, T), all_args) | (head, _) => error ("Not a constant: " ^ Syntax.string_of_term ctxt head)) val _ = tracing "Preprocessing specification..." val T = Sign.the_const_type (Proof_Context.theory_of ctxt) name val t = Const (name, T) val thy' = Proof_Context.theory_of ctxt |> Predicate_Compile.preprocess preprocess_options t val ctxt' = Proof_Context.init_global thy' val _ = tracing "Generating prolog program..." val (p, constant_table) = generate (NONE, #ensure_groundness options) ctxt' name (* FIXME *) |> post_process ctxt' options name val constant_table' = declare_consts (fold Term.add_const_names all_args []) constant_table val args' = map (translate_term ctxt constant_table') all_args val _ = tracing "Running prolog program..." val tss = run ctxt p (translate_const constant_table' name, args') output_names soln val _ = tracing "Restoring terms..." val empty = Const(\<^const_name>\bot\, fastype_of t_compr) fun mk_insert x S = Const (\<^const_name>\Set.insert\, fastype_of x --> fastype_of S --> fastype_of S) $ x $ S fun mk_set_compr in_insert [] xs = rev ((Free ("dots", fastype_of t_compr)) :: (* FIXME proper name!? *) (if null in_insert then xs else (fold mk_insert in_insert empty) :: xs)) | mk_set_compr in_insert (t :: ts) xs = let val frees = Term.add_frees t [] in if null frees then mk_set_compr (t :: in_insert) ts xs else let val uu as (uuN, uuT) = singleton (Variable.variant_frees ctxt' [t]) ("uu", fastype_of t) val set_compr = HOLogic.mk_Collect (uuN, uuT, fold (fn (s, T) => fn t => HOLogic.mk_exists (s, T, t)) frees (HOLogic.mk_conj (HOLogic.mk_eq (Free uu, t), \<^term>\True\))) in mk_set_compr [] ts (set_compr :: (if null in_insert then xs else (fold mk_insert in_insert empty) :: xs)) end end in foldl1 (HOLogic.mk_binop \<^const_name>\sup\) (mk_set_compr [] (map (fn ts => HOLogic.mk_tuple (map (restore_nat_numerals o restore_term ctxt' constant_table) (ts ~~ Ts))) tss) []) end fun values_cmd print_modes soln raw_t state = let val ctxt = Toplevel.context_of state val t = Syntax.read_term ctxt raw_t val t' = values ctxt soln t val ty' = Term.type_of t' val ctxt' = Proof_Context.augment t' ctxt val _ = tracing "Printing terms..." in Print_Mode.with_modes print_modes (fn () => Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t'), Pretty.fbrk, Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' ty')]) () end |> Pretty.writeln (* values command for Prolog queries *) val opt_print_modes = Scan.optional (\<^keyword>\(\ |-- Parse.!!! (Scan.repeat1 Parse.name --| \<^keyword>\)\)) [] val _ = Outer_Syntax.command \<^command_keyword>\values_prolog\ "enumerate and print comprehensions" (opt_print_modes -- Scan.optional (Parse.nat >> SOME) NONE -- Parse.term >> (fn ((print_modes, soln), t) => Toplevel.keep (values_cmd print_modes soln t))) (* quickcheck generator *) (* FIXME: a small clone of Predicate_Compile_Quickcheck - maybe refactor out commons *) val active = Attrib.setup_config_bool \<^binding>\quickcheck_prolog_active\ (K true) fun test_term ctxt (t, eval_terms) = let val t' = fold_rev absfree (Term.add_frees t []) t val options = code_options_of (Proof_Context.theory_of ctxt) val thy = Proof_Context.theory_of ctxt val ((((full_constname, constT), vs'), intro), thy1) = Predicate_Compile_Aux.define_quickcheck_predicate t' thy val thy2 = Context.theory_map (Named_Theorems.add_thm \<^named_theorems>\code_pred_def\ intro) thy1 val thy3 = Predicate_Compile.preprocess preprocess_options (Const (full_constname, constT)) thy2 val ctxt' = Proof_Context.init_global thy3 val _ = tracing "Generating prolog program..." val (p, constant_table) = generate (NONE, true) ctxt' full_constname |> post_process ctxt' (set_ensure_groundness options) full_constname val _ = tracing "Running prolog program..." val tss = run ctxt p (translate_const constant_table full_constname, map (Var o fst) vs') (map fst vs') (SOME 1) val _ = tracing "Restoring terms..." val counterexample = (case tss of [ts] => SOME (map (restore_term ctxt' constant_table) (ts ~~ map snd vs')) | _ => NONE) in Quickcheck.Result {counterexample = Option.map (pair true o curry (op ~~) (Term.add_free_names t [])) counterexample, evaluation_terms = Option.map (K []) counterexample, timings = [], reports = []} end fun test_goals ctxt _ insts goals = let val correct_inst_goals = Quickcheck_Common.instantiate_goals ctxt insts goals in Quickcheck_Common.collect_results (test_term ctxt) (maps (map snd) correct_inst_goals) [] end end diff --git a/src/HOL/Tools/Sledgehammer/sledgehammer_atp_systems.ML b/src/HOL/Tools/Sledgehammer/sledgehammer_atp_systems.ML --- a/src/HOL/Tools/Sledgehammer/sledgehammer_atp_systems.ML +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_atp_systems.ML @@ -1,726 +1,726 @@ (* Title: HOL/Tools/ATP/atp_systems.ML Author: Fabian Immler, TU Muenchen Author: Jasmin Blanchette, TU Muenchen Setup for supported ATPs. *) signature SLEDGEHAMMER_ATP_SYSTEMS = sig type term_order = ATP_Problem.term_order type atp_format = ATP_Problem.atp_format type atp_formula_role = ATP_Problem.atp_formula_role type atp_failure = ATP_Proof.atp_failure type slice_spec = (int * string) * atp_format * string * string * bool type atp_config = {exec : bool -> string list * string list, arguments : Proof.context -> bool -> string -> Time.time -> string -> term_order * (unit -> (string * int) list) * (unit -> (string * real) list) -> string, proof_delims : (string * string) list, known_failures : (atp_failure * string) list, prem_role : atp_formula_role, best_slices : Proof.context -> (real * (slice_spec * string)) list, best_max_mono_iters : int, best_max_new_mono_instances : int} val default_max_mono_iters : int val default_max_new_mono_instances : int val force_sos : bool Config.T val term_order : string Config.T val e_smartN : string val e_autoN : string val e_fun_weightN : string val e_sym_offset_weightN : string val e_selection_heuristic : string Config.T val e_default_fun_weight : real Config.T val e_fun_weight_base : real Config.T val e_fun_weight_span : real Config.T val e_default_sym_offs_weight : real Config.T val e_sym_offs_weight_base : real Config.T val e_sym_offs_weight_span : real Config.T val spass_H1SOS : string val spass_H2 : string val spass_H2LR0LT0 : string val spass_H2NuVS0 : string val spass_H2NuVS0Red2 : string val spass_H2SOS : string val is_vampire_noncommercial_license_accepted : unit -> bool option val remote_atp : string -> string -> string list -> (string * string) list -> (atp_failure * string) list -> atp_formula_role -> (Proof.context -> slice_spec * string) -> string * (unit -> atp_config) val add_atp : string * (unit -> atp_config) -> theory -> theory val get_atp : theory -> string -> (unit -> atp_config) val supported_atps : theory -> string list val is_atp_installed : theory -> string -> bool val refresh_systems_on_tptp : unit -> unit val effective_term_order : Proof.context -> string -> term_order end; structure Sledgehammer_ATP_Systems : SLEDGEHAMMER_ATP_SYSTEMS = struct open ATP_Problem open ATP_Proof open ATP_Problem_Generate (* ATP configuration *) val default_max_mono_iters = 3 (* FUDGE *) val default_max_new_mono_instances = 100 (* FUDGE *) type slice_spec = (int * string) * atp_format * string * string * bool type atp_config = {exec : bool -> string list * string list, arguments : Proof.context -> bool -> string -> Time.time -> string -> term_order * (unit -> (string * int) list) * (unit -> (string * real) list) -> string, proof_delims : (string * string) list, known_failures : (atp_failure * string) list, prem_role : atp_formula_role, best_slices : Proof.context -> (real * (slice_spec * string)) list, best_max_mono_iters : int, best_max_new_mono_instances : int} (* "best_slices" must be found empirically, taking a holistic approach since the ATPs are run in parallel. Each slice has the format (time_frac, ((max_facts, fact_filter), format, type_enc, lam_trans, uncurried_aliases), extra) where time_frac = faction of the time available given to the slice (which should add up to 1.0) extra = extra information to the prover (e.g., SOS or no SOS). The last slice should be the most "normal" one, because it will get all the time available if the other slices fail early and also because it is used if slicing is disabled (e.g., by the minimizer). *) val mepoN = "mepo" val mashN = "mash" val meshN = "mesh" val tstp_proof_delims = [("% SZS output start CNFRefutation", "% SZS output end CNFRefutation"), ("% SZS output start Refutation", "% SZS output end Refutation"), ("% SZS output start Proof", "% SZS output end Proof")] val known_perl_failures = [(CantConnect, "HTTP error"), (NoPerl, "env: perl"), (NoLibwwwPerl, "Can't locate HTTP")] fun known_szs_failures wrap = [(Unprovable, wrap "CounterSatisfiable"), (Unprovable, wrap "Satisfiable"), (GaveUp, wrap "GaveUp"), (GaveUp, wrap "Unknown"), (GaveUp, wrap "Incomplete"), (ProofMissing, wrap "Theorem"), (ProofMissing, wrap "Unsatisfiable"), (TimedOut, wrap "Timeout"), (Inappropriate, wrap "Inappropriate"), (OutOfResources, wrap "ResourceOut"), (OutOfResources, wrap "MemoryOut"), (Interrupted, wrap "Forced"), (Interrupted, wrap "User")] val known_szs_status_failures = known_szs_failures (prefix "SZS status ") val known_says_failures = known_szs_failures (prefix " says ") structure Data = Theory_Data ( type T = ((unit -> atp_config) * stamp) Symtab.table val empty = Symtab.empty val extend = I fun merge data : T = Symtab.merge (eq_snd (op =)) data handle Symtab.DUP name => error ("Duplicate ATP: " ^ quote name) ) fun to_secs min time = Int.max (min, (Time.toMilliseconds time + 999) div 1000) val sosN = "sos" val no_sosN = "no_sos" val force_sos = Attrib.setup_config_bool \<^binding>\atp_force_sos\ (K false) val smartN = "smart" (* val kboN = "kbo" *) val lpoN = "lpo" val xweightsN = "_weights" val xprecN = "_prec" val xsimpN = "_simp" (* SPASS-specific *) (* Possible values for "atp_term_order": "smart", "(kbo|lpo)(_weights)?(_prec|_simp)?" *) val term_order = Attrib.setup_config_string \<^binding>\atp_term_order\ (K smartN) (* agsyHOL *) val agsyhol_config : atp_config = {exec = K (["AGSYHOL_HOME"], ["agsyHOL"]), arguments = fn _ => fn _ => fn _ => fn timeout => fn file_name => fn _ => "--proof --time-out " ^ string_of_int (to_secs 1 timeout) ^ " " ^ file_name, proof_delims = tstp_proof_delims, known_failures = known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(1.0, (((60, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), ""))], best_max_mono_iters = default_max_mono_iters - 1 (* FUDGE *), best_max_new_mono_instances = default_max_new_mono_instances} val agsyhol = (agsyholN, fn () => agsyhol_config) (* Alt-Ergo *) val alt_ergo_config : atp_config = {exec = K (["WHY3_HOME"], ["why3"]), arguments = fn _ => fn _ => fn _ => fn timeout => fn file_name => fn _ => "--format tptp --prover 'Alt-Ergo,0.95.2,' --timelimit " ^ string_of_int (to_secs 1 timeout) ^ " " ^ file_name, proof_delims = [], known_failures = [(ProofMissing, ": Valid"), (TimedOut, ": Timeout"), (GaveUp, ": Unknown")], prem_role = Hypothesis, best_slices = fn _ => (* FUDGE *) [(1.0, (((100, ""), TFF (Without_FOOL, Polymorphic), "poly_native", liftingN, false), ""))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} val alt_ergo = (alt_ergoN, fn () => alt_ergo_config) (* E *) val e_smartN = "smart" val e_autoN = "auto" val e_fun_weightN = "fun_weight" val e_sym_offset_weightN = "sym_offset_weight" val e_selection_heuristic = Attrib.setup_config_string \<^binding>\atp_e_selection_heuristic\ (K e_smartN) (* FUDGE *) val e_default_fun_weight = Attrib.setup_config_real \<^binding>\atp_e_default_fun_weight\ (K 20.0) val e_fun_weight_base = Attrib.setup_config_real \<^binding>\atp_e_fun_weight_base\ (K 0.0) val e_fun_weight_span = Attrib.setup_config_real \<^binding>\atp_e_fun_weight_span\ (K 40.0) val e_default_sym_offs_weight = Attrib.setup_config_real \<^binding>\atp_e_default_sym_offs_weight\ (K 1.0) val e_sym_offs_weight_base = Attrib.setup_config_real \<^binding>\atp_e_sym_offs_weight_base\ (K ~20.0) val e_sym_offs_weight_span = Attrib.setup_config_real \<^binding>\atp_e_sym_offs_weight_span\ (K 60.0) fun e_selection_heuristic_case heuristic fw sow = if heuristic = e_fun_weightN then fw else if heuristic = e_sym_offset_weightN then sow else raise Fail ("unexpected " ^ quote heuristic) fun scaled_e_selection_weight ctxt heuristic w = w * Config.get ctxt (e_selection_heuristic_case heuristic e_fun_weight_span e_sym_offs_weight_span) + Config.get ctxt (e_selection_heuristic_case heuristic e_fun_weight_base e_sym_offs_weight_base) |> Real.ceil |> signed_string_of_int fun e_selection_weight_arguments ctxt heuristic sel_weights = if heuristic = e_fun_weightN orelse heuristic = e_sym_offset_weightN then (* supplied by Stephan Schulz *) "--split-clauses=4 --split-reuse-defs --simul-paramod --forward-context-sr \ \--destructive-er-aggressive --destructive-er --presat-simplify \ \--prefer-initial-clauses -winvfreqrank -c1 -Ginvfreqconjmax -F1 \ \--delete-bad-limit=150000000 -WSelectMaxLComplexAvoidPosPred -H'(4*" ^ e_selection_heuristic_case heuristic "FunWeight" "SymOffsetWeight" ^ "(SimulateSOS," ^ (e_selection_heuristic_case heuristic e_default_fun_weight e_default_sym_offs_weight |> Config.get ctxt |> Real.ceil |> signed_string_of_int) ^ ",20,1.5,1.5,1" ^ (sel_weights () |> map (fn (s, w) => "," ^ s ^ ":" ^ scaled_e_selection_weight ctxt heuristic w) |> implode) ^ "),3*ConjectureGeneralSymbolWeight(PreferNonGoals,200,100,200,50,50,1,100,\ \1.5,1.5,1),1*Clauseweight(PreferProcessed,1,1,1),1*\ \FIFOWeight(PreferProcessed))' " else "-xAuto " val e_ord_weights = map (fn (s, w) => s ^ ":" ^ string_of_int w) #> space_implode "," fun e_ord_precedence [_] = "" | e_ord_precedence info = info |> map fst |> space_implode "<" fun e_term_order_info_arguments false false _ = "" | e_term_order_info_arguments gen_weights gen_prec ord_info = let val ord_info = ord_info () in (if gen_weights then "--order-weights='" ^ e_ord_weights ord_info ^ "' " else "") ^ (if gen_prec then "--precedence='" ^ e_ord_precedence ord_info ^ "' " else "") end val e_config : atp_config = {exec = fn _ => (["E_HOME"], ["eprover"]), arguments = fn ctxt => fn _ => fn heuristic => fn timeout => fn file_name => fn ({is_lpo, gen_weights, gen_prec, ...}, ord_info, sel_weights) => "--auto-schedule --tstp-in --tstp-out --silent " ^ e_selection_weight_arguments ctxt heuristic sel_weights ^ e_term_order_info_arguments gen_weights gen_prec ord_info ^ "--term-ordering=" ^ (if is_lpo then "LPO4" else "KBO6") ^ " " ^ "--cpu-limit=" ^ string_of_int (to_secs 2 timeout) ^ " --proof-object=1 " ^ file_name, proof_delims = [("# SZS output start CNFRefutation", "# SZS output end CNFRefutation")] @ tstp_proof_delims, known_failures = [(TimedOut, "Failure: Resource limit exceeded (time)"), (TimedOut, "time limit exceeded")] @ known_szs_status_failures, prem_role = Conjecture, best_slices = fn ctxt => let val heuristic = Config.get ctxt e_selection_heuristic val modern = string_ord (getenv "E_VERSION", "2.3") <> LESS val (format, enc) = if modern then (THF (With_FOOL, Monomorphic, THF_Lambda_Free), "mono_native_higher_fool") else (TFF (Without_FOOL, Monomorphic), "mono_native") in (* FUDGE *) if heuristic = e_smartN then [(0.15, (((128, meshN), format, enc, combsN, false), e_fun_weightN)), (0.15, (((128, mashN), format, enc, combsN, false), e_sym_offset_weightN)), (0.15, (((91, mepoN), format, enc, combsN, false), e_autoN)), (0.15, (((1000, meshN), format, "poly_guards??", combsN, false), e_sym_offset_weightN)), (0.15, (((256, mepoN), format, enc, liftingN, false), e_fun_weightN)), (0.25, (((64, mashN), format, enc, combsN, false), e_fun_weightN))] else [(1.0, (((500, ""), format, enc, combsN, false), heuristic))] end, best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} val e = (eN, fn () => e_config) (* iProver *) val iprover_config : atp_config = {exec = K (["IPROVER_HOME"], ["iproveropt", "iprover"]), arguments = fn _ => fn _ => fn _ => fn timeout => fn file_name => fn _ => "--clausifier \"$E_HOME\"/eprover " ^ "--clausifier_options \"--tstp-format --silent --cnf\" " ^ "--time_out_real " ^ string_of_real (Time.toReal timeout) ^ " " ^ file_name, proof_delims = tstp_proof_delims, known_failures = [(ProofIncomplete, "% SZS output start CNFRefutation")] @ known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(1.0, (((150, ""), FOF, "mono_guards??", liftingN, false), ""))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} val iprover = (iproverN, fn () => iprover_config) (* LEO-II *) val leo2_config : atp_config = {exec = K (["LEO2_HOME"], ["leo.opt", "leo"]), arguments = fn _ => fn full_proofs => fn _ => fn timeout => fn file_name => fn _ => "--foatp e --atp e=\"$E_HOME\"/eprover \ \--atp epclextract=\"$E_HOME\"/epclextract \ \--proofoutput 1 --timeout " ^ string_of_int (to_secs 1 timeout) ^ " " ^ (if full_proofs then "--notReplLeibnizEQ --notReplAndrewsEQ --notUseExtCnfCmbd " else "") ^ file_name, proof_delims = tstp_proof_delims, known_failures = [(TimedOut, "CPU time limit exceeded, terminating"), (GaveUp, "No.of.Axioms")] @ known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(1.0, (((40, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), ""))], best_max_mono_iters = default_max_mono_iters - 1 (* FUDGE *), best_max_new_mono_instances = default_max_new_mono_instances} val leo2 = (leo2N, fn () => leo2_config) (* Leo-III *) (* Include choice? Disabled now since it's disabled for Satallax as well. *) val leo3_config : atp_config = {exec = K (["LEO3_HOME"], ["leo3"]), arguments = fn _ => fn full_proofs => fn _ => fn timeout => fn file_name => fn _ => file_name ^ " " ^ "--atp cvc=$CVC4_SOLVER --atp e=\"$E_HOME\"/eprover \ \-p -t " ^ string_of_int (to_secs 1 timeout) ^ " " ^ (if full_proofs then "--nleq --naeq " else ""), proof_delims = tstp_proof_delims, known_failures = known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(1.0, (((150, ""), THF (Without_FOOL, Polymorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), ""))], best_max_mono_iters = default_max_mono_iters - 1 (* FUDGE *), best_max_new_mono_instances = default_max_new_mono_instances} val leo3 = (leo3N, fn () => leo3_config) (* Satallax *) (* Choice is disabled until there is proper reconstruction for it. *) val satallax_config : atp_config = {exec = K (["SATALLAX_HOME"], ["satallax.opt", "satallax"]), arguments = fn _ => fn _ => fn _ => fn timeout => fn file_name => fn _ => (case getenv "E_HOME" of "" => "" | home => "-E " ^ home ^ "/eprover ") ^ "-p tstp -t " ^ string_of_int (to_secs 1 timeout) ^ " " ^ file_name, proof_delims = [("% SZS output start Proof", "% SZS output end Proof")], known_failures = known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(1.0, (((150, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), ""))], best_max_mono_iters = default_max_mono_iters - 1 (* FUDGE *), best_max_new_mono_instances = default_max_new_mono_instances} val satallax = (satallaxN, fn () => satallax_config) (* SPASS *) val spass_H1SOS = "-Heuristic=1 -SOS" val spass_H2 = "-Heuristic=2" val spass_H2LR0LT0 = "-Heuristic=2 -LR=0 -LT=0" val spass_H2NuVS0 = "-Heuristic=2 -RNuV=1 -Sorts=0" val spass_H2NuVS0Red2 = "-Heuristic=2 -RNuV=1 -Sorts=0 -RFRew=2 -RBRew=2 -RTaut=2" val spass_H2SOS = "-Heuristic=2 -SOS" val spass_config : atp_config = {exec = K (["SPASS_HOME"], ["SPASS"]), arguments = fn _ => fn full_proofs => fn extra_options => fn timeout => fn file_name => fn _ => "-Isabelle=1 " ^ (if full_proofs then "-CNFRenaming=0 -Splits=0 " else "") ^ "-TimeLimit=" ^ string_of_int (to_secs 1 timeout) ^ " " ^ file_name |> extra_options <> "" ? prefix (extra_options ^ " "), proof_delims = [("Here is a proof", "Formulae used in the proof")], known_failures = [(GaveUp, "SPASS beiseite: Completion found"), (TimedOut, "SPASS beiseite: Ran out of time"), (OutOfResources, "SPASS beiseite: Maximal number of loops exceeded"), (MalformedInput, "Undefined symbol"), (MalformedInput, "Free Variable"), (Unprovable, "No formulae and clauses found in input file"), (InternalError, "Please report this error")] @ known_perl_failures, prem_role = Conjecture, best_slices = fn _ => (* FUDGE *) [(0.1667, (((150, meshN), DFG Monomorphic, "mono_native", combsN, true), "")), (0.1667, (((500, meshN), DFG Monomorphic, "mono_native", liftingN, true), spass_H2SOS)), (0.1666, (((50, meshN), DFG Monomorphic, "mono_native", liftingN, true), spass_H2LR0LT0)), (0.1000, (((250, meshN), DFG Monomorphic, "mono_native", combsN, true), spass_H2NuVS0)), (0.1000, (((1000, mepoN), DFG Monomorphic, "mono_native", liftingN, true), spass_H1SOS)), (0.1000, (((150, meshN), DFG Monomorphic, "poly_guards??", liftingN, false), spass_H2NuVS0Red2)), (0.1000, (((300, meshN), DFG Monomorphic, "mono_native", combsN, true), spass_H2SOS)), (0.1000, (((100, meshN), DFG Monomorphic, "mono_native", combs_and_liftingN, true), spass_H2))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} val spass = (spassN, fn () => spass_config) (* Vampire *) fun is_vampire_noncommercial_license_accepted () = let val flag = Options.default_string \<^system_option>\vampire_noncommercial\ |> String.map Char.toLower in if flag = "yes" then SOME true else if flag = "no" then SOME false else NONE end fun check_vampire_noncommercial () = (case is_vampire_noncommercial_license_accepted () of SOME true => () | SOME false => error (Pretty.string_of (Pretty.para "The Vampire prover may be used only for noncommercial applications")) | NONE => error (Pretty.string_of (Pretty.para "The Vampire prover is not activated; to activate it, set the Isabelle system option \ \\"vampire_noncommercial\" to \"yes\" (e.g. via the Isabelle/jEdit menu Plugin Options \ \/ Isabelle / General)"))) val vampire_basic_options = "--proof tptp --output_axiom_names on $VAMPIRE_EXTRA_OPTIONS" val vampire_full_proof_options = " --proof_extra free --forced_options avatar=off:equality_proxy=off:general_splitting=off:inequality_splitting=0:naming=0" val remote_vampire_command = "vampire " ^ vampire_basic_options ^ " " ^ vampire_full_proof_options ^ " -t %d %s" val vampire_config : atp_config = {exec = K (["VAMPIRE_HOME"], ["vampire"]), arguments = fn _ => fn full_proofs => fn sos => fn timeout => fn file_name => fn _ => (check_vampire_noncommercial (); vampire_basic_options ^ (if full_proofs then " " ^ vampire_full_proof_options else "") ^ " -t " ^ string_of_int (to_secs 1 timeout) ^ " --input_file " ^ file_name |> sos = sosN ? prefix "--sos on "), proof_delims = [("=========== Refutation ==========", "======= End of refutation =======")] @ tstp_proof_delims, known_failures = [(GaveUp, "UNPROVABLE"), (GaveUp, "CANNOT PROVE"), (Unprovable, "Satisfiability detected"), (Unprovable, "Termination reason: Satisfiable"), (Interrupted, "Aborted by signal SIGINT")] @ known_szs_status_failures, prem_role = Hypothesis, best_slices = fn ctxt => (* FUDGE *) [(0.333, (((500, meshN), TFF (Without_FOOL, Monomorphic), "mono_native", combs_or_liftingN, false), sosN)), (0.333, (((150, meshN), TFF (Without_FOOL, Monomorphic), "poly_tags??", combs_or_liftingN, false), sosN)), (0.334, (((50, meshN), TFF (Without_FOOL, Monomorphic), "mono_native", combs_or_liftingN, false), no_sosN))] |> Config.get ctxt force_sos ? (hd #> apfst (K 1.0) #> single), best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = 2 * default_max_new_mono_instances (* FUDGE *)} val vampire = (vampireN, fn () => vampire_config) (* Z3 with TPTP syntax (half experimental, half legacy) *) val z3_tptp_config : atp_config = {exec = K (["Z3_TPTP_HOME"], ["z3_tptp"]), arguments = fn _ => fn _ => fn _ => fn timeout => fn file_name => fn _ => "-proof -t:" ^ string_of_int (to_secs 1 timeout) ^ " -file:" ^ file_name, proof_delims = [("SZS status Theorem", "")], known_failures = known_szs_status_failures, prem_role = Hypothesis, best_slices = (* FUDGE *) K [(0.5, (((250, meshN), TFF (Without_FOOL, Monomorphic), "mono_native", combsN, false), "")), (0.25, (((125, mepoN), TFF (Without_FOOL, Monomorphic), "mono_native", combsN, false), "")), (0.125, (((62, mashN), TFF (Without_FOOL, Monomorphic), "mono_native", combsN, false), "")), (0.125, (((31, meshN), TFF (Without_FOOL, Monomorphic), "mono_native", combsN, false), ""))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = 2 * default_max_new_mono_instances (* FUDGE *)} val z3_tptp = (z3_tptpN, fn () => z3_tptp_config) (* Zipperposition *) val zipperposition_blsimp = "--mode ho-pragmatic --max-inferences 3 --ho-max-app-projections 0 --ho-max-elims 0 --ho-max-rigid-imitations 2 --ho-max-identifications 0 --ho-unif-max-depth 2 --boolean-reasoning no-cases --ext-rules ext-family --ext-rules-max-depth 1 --kbo-weight-fun invdocc --ho-prim-enum tf --ho-prim-enum-early-bird true --tptp-def-as-rewrite --ho-unif-level pragmatic-framework -q '1|const|conjecture-relative-var(1,s,f)' -q '1|prefer-processed|pnrefined(1,1,1,2,2,2,0.5)' -q '1|prefer-sos|staggered(1)' -q '2|prefer-fo|default' -q '1|prefer-neg-unit|orient-lmax(2,1,2,1,1)' -q '2|prefer-easy-ho|conjecture-relative-struct(1.5,3.5,2,3)' --ho-elim-leibniz 2 --ho-fixpoint-decider true --ho-pattern-decider false --ho-solid-decider true --ho-max-solidification 12 --select e-selection11 --solve-formulas true --sup-at-vars false --sup-at-var-headed false --lazy-cnf true --lazy-cnf-kind simp --lazy-cnf-renaming-threshold 4 --sine 50 --sine-tolerance 1.7 --sine-depth-max 3 --sine-depth-min 1 --sine-trim-implications true --ho-selection-restriction none --sup-from-var-headed false --sine-trim-implications true" val zipperposition_s6 = "--tptp-def-as-rewrite --rewrite-before-cnf true --mode ho-competitive --boolean-reasoning no-cases --ext-rules off --ho-prim-enum none --recognize-injectivity true --ho-elim-leibniz off --ho-unif-level full-framework --no-max-vars -q '3|const|conjecture-relative-var(1.02,l,f)' -q '1|prefer-ho-steps|conjecture-relative-var(1,s,f)' -q '1|prefer-processed|fifo' -q '3|by-app-var-num|pnrefined(2,1,1,1,2,2,2)' --select ho-selection5 --prec-gen-fun unary_first --solid-subsumption false --ignore-orphans false --ho-solid-decider true --ho-fixpoint-decider true --ho-pattern-decider true --sup-at-vars false --sup-at-var-headed false --sup-from-var-headed false --ho-neg-ext-simpl true" val zipperposition_cdots = "--mode ho-competitive --boolean-reasoning cases-simpl --ext-rules ext-family --ext-rules-max-depth 1 --ho-prim-enum pragmatic --ho-prim-max 1 --bool-subterm-selection A --avatar off --recognize-injectivity true --ho-elim-leibniz 1 --ho-unif-level full-framework --no-max-vars -q '6|prefer-sos|pnrefined(1,1,1,2,2,2,0.5)' -q '6|const|conjecture-relative-var(1.02,l,f)' -q '1|prefer-processed|fifo' -q '1|prefer-non-goals|conjecture-relative-var(1,l,f)' -q '4|prefer-easy-ho|conjecture-relative-var(1.01,s,f)' --select e-selection7 --ho-choice-inst true --sine 50 --sine-tolerance 2 --sine-depth-max 4 --sine-depth-min 1 --scan-clause-ac true --lambdasup 0 --kbo-weight-fun invfreqrank" val zipperposition_config : atp_config = {exec = K (["ZIPPERPOSITION_HOME"], ["zipperposition"]), arguments = fn _ => fn _ => fn extra_options => fn timeout => fn file_name => fn _ => "--input tptp --output tptp --timeout " ^ string_of_int (to_secs 1 timeout) ^ " " ^ file_name |> extra_options <> "" ? prefix (extra_options ^ " "), proof_delims = tstp_proof_delims, known_failures = known_szs_status_failures, prem_role = Conjecture, best_slices = fn _ => (* FUDGE *) [(0.333, (((128, "meshN"), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), zipperposition_blsimp)), (0.333, (((32, "meshN"), THF (Without_FOOL, Polymorphic, THF_Without_Choice), "poly_native_higher", keep_lamsN, false), zipperposition_s6)), (0.334, (((512, "meshN"), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), zipperposition_cdots))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} val zipperposition = (zipperpositionN, fn () => zipperposition_config) (* Remote ATP invocation via SystemOnTPTP *) val remote_systems = Synchronized.var "atp_remote_systems" ([] : string list) fun get_remote_systems () = Timeout.apply (seconds 10.0) (fn () => (case Isabelle_System.bash_output "\"$ISABELLE_ATP/scripts/remote_atp\" -w 2>&1" of (output, 0) => split_lines output | (output, _) => (warning (case extract_known_atp_failure known_perl_failures output of SOME failure => string_of_atp_failure failure - | NONE => trim_line output); []))) () + | NONE => output); []))) () handle Timeout.TIMEOUT _ => [] fun find_remote_system name [] systems = find_first (String.isPrefix (name ^ "---")) systems | find_remote_system name (version :: versions) systems = case find_first (String.isPrefix (name ^ "---" ^ version)) systems of NONE => find_remote_system name versions systems | res => res fun get_remote_system name versions = Synchronized.change_result remote_systems (fn systems => (if null systems then get_remote_systems () else systems) |> `(`(find_remote_system name versions))) fun the_remote_system name versions = (case get_remote_system name versions of (SOME sys, _) => sys | (NONE, []) => error "SystemOnTPTP is currently not available" | (NONE, syss) => (case syss |> filter_out (String.isPrefix "%") |> filter_out (curry (op =) "") of [] => error "SystemOnTPTP is currently not available" | [msg] => error ("SystemOnTPTP is currently not available: " ^ msg) | syss => error ("System " ^ quote name ^ " is not available at SystemOnTPTP.\n(Available systems: " ^ commas_quote syss ^ ".)"))) val max_remote_secs = 1000 (* give Geoff Sutcliffe's servers a break *) fun remote_config system_name system_versions proof_delims known_failures prem_role best_slice = {exec = K (["ISABELLE_ATP"], ["scripts/remote_atp"]), arguments = fn _ => fn _ => fn command => fn timeout => fn file_name => fn _ => (if command <> "" then "-c " ^ quote command ^ " " else "") ^ "-s " ^ the_remote_system system_name system_versions ^ " " ^ "-t " ^ string_of_int (Int.min (max_remote_secs, to_secs 1 timeout)) ^ " " ^ file_name, proof_delims = union (op =) tstp_proof_delims proof_delims, known_failures = known_failures @ known_perl_failures @ known_says_failures, prem_role = prem_role, best_slices = fn ctxt => [(1.0, best_slice ctxt)], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} : atp_config fun remotify_config system_name system_versions best_slice ({proof_delims, known_failures, prem_role, ...} : atp_config) = remote_config system_name system_versions proof_delims known_failures prem_role best_slice fun remote_atp name system_name system_versions proof_delims known_failures prem_role best_slice = (remote_prefix ^ name, fn () => remote_config system_name system_versions proof_delims known_failures prem_role best_slice) fun remotify_atp (name, config) system_name system_versions best_slice = (remote_prefix ^ name, remotify_config system_name system_versions best_slice o config) fun gen_remote_waldmeister name type_enc = remote_atp name "Waldmeister" ["710"] tstp_proof_delims ([(OutOfResources, "Too many function symbols"), (Inappropriate, "**** Unexpected end of file."), (Crashed, "Unrecoverable Segmentation Fault")] @ known_szs_status_failures) Hypothesis (K (((50, ""), CNF_UEQ, type_enc, combsN, false), "") (* FUDGE *)) val remote_agsyhol = remotify_atp agsyhol "agsyHOL" ["1.0", "1"] (K (((60, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), "") (* FUDGE *)) val remote_alt_ergo = remotify_atp alt_ergo "Alt-Ergo" ["0.95.2"] (K (((250, ""), TFF (Without_FOOL, Polymorphic), "poly_native", keep_lamsN, false), "") (* FUDGE *)) val remote_e = remotify_atp e "E" ["2.0", "1.9.1", "1.8"] (K (((750, ""), TFF (Without_FOOL, Monomorphic), "mono_native", combsN, false), "") (* FUDGE *)) val remote_iprover = remotify_atp iprover "iProver" ["0.99"] (K (((150, ""), FOF, "mono_guards??", liftingN, false), "") (* FUDGE *)) val remote_leo2 = remotify_atp leo2 "LEO-II" ["1.5.0", "1.4", "1.3", "1.2", "1"] (K (((40, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", liftingN, false), "") (* FUDGE *)) val remote_leo3 = remotify_atp leo3 "Leo-III" ["1.1"] (K (((150, ""), THF (Without_FOOL, Polymorphic, THF_Without_Choice), "poly_native_higher", keep_lamsN, false), "") (* FUDGE *)) val remote_vampire = remotify_atp vampire "Vampire" ["THF-4.4"] (K (((400, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), remote_vampire_command) (* FUDGE *)) val remote_waldmeister = gen_remote_waldmeister waldmeisterN "raw_mono_tags??" val remote_zipperposition = remotify_atp zipperposition "Zipperpin" ["2.0"] (K (((512, ""), THF (Without_FOOL, Monomorphic, THF_Without_Choice), "mono_native_higher", keep_lamsN, false), "") (* FUDGE *)) (* Dummy prover *) fun dummy_config prem_role format type_enc uncurried_aliases : atp_config = {exec = K (["ISABELLE_ATP"], ["scripts/dummy_atp"]), arguments = K (K (K (K (K (K ""))))), proof_delims = [], known_failures = known_szs_status_failures, prem_role = prem_role, best_slices = K [(1.0, (((200, ""), format, type_enc, if is_format_higher_order format then keep_lamsN else combsN, uncurried_aliases), ""))], best_max_mono_iters = default_max_mono_iters, best_max_new_mono_instances = default_max_new_mono_instances} val dummy_tfx_format = TFF (With_FOOL, Polymorphic) val dummy_tfx_config = dummy_config Hypothesis dummy_tfx_format "mono_native_fool" false val dummy_tfx = (dummy_tfxN, fn () => dummy_tfx_config) (* Setup *) fun add_atp (name, config) thy = Data.map (Symtab.update_new (name, (config, stamp ()))) thy handle Symtab.DUP name => error ("Duplicate ATP: " ^ quote name) fun get_atp thy name = fst (the (Symtab.lookup (Data.get thy) name)) handle Option.Option => error ("Unknown ATP: " ^ name) val supported_atps = Symtab.keys o Data.get fun is_atp_installed thy name = let val {exec, ...} = get_atp thy name () in exists (fn var => getenv var <> "") (fst (exec false)) end fun refresh_systems_on_tptp () = Synchronized.change remote_systems (fn _ => get_remote_systems ()) fun effective_term_order ctxt atp = let val ord = Config.get ctxt term_order in if ord = smartN then {is_lpo = false, gen_weights = (atp = spassN), gen_prec = (atp = spassN), gen_simp = false} else let val is_lpo = String.isSubstring lpoN ord in {is_lpo = is_lpo, gen_weights = not is_lpo andalso String.isSubstring xweightsN ord, gen_prec = String.isSubstring xprecN ord, gen_simp = String.isSubstring xsimpN ord} end end val atps = [agsyhol, alt_ergo, e, iprover, leo2, leo3, satallax, spass, vampire, z3_tptp, zipperposition, remote_agsyhol, remote_alt_ergo, remote_e, remote_iprover, remote_leo2, remote_leo3, remote_vampire, remote_waldmeister, remote_zipperposition, dummy_tfx] val _ = Theory.setup (fold add_atp atps) end; diff --git a/src/Pure/Admin/components.scala b/src/Pure/Admin/components.scala --- a/src/Pure/Admin/components.scala +++ b/src/Pure/Admin/components.scala @@ -1,355 +1,354 @@ /* Title: Pure/Admin/components.scala Author: Makarius Isabelle system components. */ package isabelle import java.io.{File => JFile} object Components { /* archive name */ object Archive { val suffix: String = ".tar.gz" def apply(name: String): String = if (name == "") error("Bad component name: " + quote(name)) else name + suffix def unapply(archive: String): Option[String] = { for { name0 <- Library.try_unsuffix(suffix, archive) name <- proper_string(name0) } yield name } def get_name(archive: String): String = unapply(archive) getOrElse error("Bad component archive name (expecting .tar.gz): " + quote(archive)) } /* component collections */ def default_component_repository: String = Isabelle_System.getenv("ISABELLE_COMPONENT_REPOSITORY") val default_components_base: Path = Path.explode("$ISABELLE_COMPONENTS_BASE") def admin(dir: Path): Path = dir + Path.explode("Admin/components") def contrib(dir: Path = Path.current, name: String = ""): Path = dir + Path.explode("contrib") + Path.explode(name) def unpack(dir: Path, archive: Path, progress: Progress = new Progress): String = { val name = Archive.get_name(archive.file_name) progress.echo("Unpacking " + name) Isabelle_System.gnutar("-xzf " + File.bash_path(archive), dir = dir).check name } def resolve(base_dir: Path, names: List[String], target_dir: Option[Path] = None, copy_dir: Option[Path] = None, progress: Progress = new Progress) { Isabelle_System.make_directory(base_dir) for (name <- names) { val archive_name = Archive(name) val archive = base_dir + Path.explode(archive_name) if (!archive.is_file) { val remote = Components.default_component_repository + "/" + archive_name progress.echo("Getting " + remote) Bytes.write(archive, Url.read_bytes(Url(remote))) } for (dir <- copy_dir) { Isabelle_System.make_directory(dir) File.copy(archive, dir) } unpack(target_dir getOrElse base_dir, archive, progress = progress) } } def purge(dir: Path, platform: Platform.Family.Value) { def purge_platforms(platforms: String*): Set[String] = platforms.flatMap(name => List("arm64-" + name, "x86-" + name, "x86_64_32-" + name, "x86_64-" + name)).toSet + "ppc-darwin" + "arm64-linux" val purge_set = platform match { case Platform.Family.linux => purge_platforms("darwin", "cygwin", "windows") case Platform.Family.macos => purge_platforms("linux", "cygwin", "windows") case Platform.Family.windows => purge_platforms("linux", "darwin") } File.find_files(dir.file, (file: JFile) => file.isDirectory && purge_set(file.getName), include_dirs = true).foreach(Isabelle_System.rm_tree) } /* component directory content */ def settings(dir: Path = Path.current): Path = dir + Path.explode("etc/settings") def components(dir: Path = Path.current): Path = dir + Path.explode("etc/components") def check_dir(dir: Path): Boolean = settings(dir).is_file || components(dir).is_file def read_components(dir: Path): List[String] = split_lines(File.read(components(dir))).filter(_.nonEmpty) def write_components(dir: Path, lines: List[String]): Unit = File.write(components(dir), terminate_lines(lines)) /* component repository content */ val components_sha1: Path = Path.explode("~~/Admin/components/components.sha1") sealed case class SHA1_Digest(sha1: String, file_name: String) { override def toString: String = sha1 + " " + file_name } def read_components_sha1(lines: List[String] = Nil): List[SHA1_Digest] = (proper_list(lines) getOrElse split_lines(File.read(components_sha1))).flatMap(line => Word.explode(line) match { case Nil => None case List(sha1, name) => Some(SHA1_Digest(sha1, name)) case _ => error("Bad components.sha1 entry: " + quote(line)) }) def write_components_sha1(entries: List[SHA1_Digest]): Unit = File.write(components_sha1, entries.sortBy(_.file_name).mkString("", "\n", "\n")) /** manage user components **/ val components_path = Path.explode("$ISABELLE_HOME_USER/etc/components") def read_components(): List[String] = if (components_path.is_file) Library.trim_split_lines(File.read(components_path)) else Nil def write_components(lines: List[String]): Unit = { Isabelle_System.make_directory(components_path.dir) File.write(components_path, Library.terminate_lines(lines)) } def update_components(add: Boolean, path0: Path, progress: Progress = new Progress): Unit = { val path = path0.expand.absolute if (!(path + Path.explode("etc/settings")).is_file && !(path + Path.explode("etc/components")).is_file) error("Bad component directory: " + path) val lines1 = read_components() val lines2 = lines1.filter(line => line.isEmpty || line.startsWith("#") || !File.eq(Path.explode(line), path)) val lines3 = if (add) lines2 ::: List(path.implode) else lines2 if (lines1 != lines3) write_components(lines3) val prefix = if (lines1 == lines3) "Unchanged" else if (add) "Added" else "Removed" progress.echo(prefix + " component " + path) } /* main entry point */ def main(args: Array[String]): Unit = { Command_Line.tool { for (arg <- args) { val add = if (arg.startsWith("+")) true else if (arg.startsWith("-")) false else error("Bad argument: " + quote(arg)) val path = Path.explode(arg.substring(1)) update_components(add, path, progress = new Console_Progress) } } } /** build and publish components **/ def build_components( options: Options, components: List[Path], progress: Progress = new Progress, publish: Boolean = false, force: Boolean = false, update_components_sha1: Boolean = false) { val archives: List[Path] = for (path <- components) yield { path.file_name match { case Archive(_) => path case name => if (!path.is_dir) error("Bad component directory: " + path) else if (!check_dir(path)) { error("Malformed component directory: " + path + "\n (requires " + settings() + " or " + Components.components() + ")") } else { val component_path = path.expand val archive_dir = component_path.dir val archive_name = Archive(name) val archive = archive_dir + Path.explode(archive_name) if (archive.is_file && !force) { error("Component archive already exists: " + archive) } progress.echo("Packaging " + archive_name) Isabelle_System.gnutar("-czf " + File.bash_path(archive) + " " + Bash.string(name), dir = archive_dir).check archive } } } if ((publish && archives.nonEmpty) || update_components_sha1) { options.string("isabelle_components_server") match { case SSH.Target(user, host) => using(SSH.open_session(options, host = host, user = user))(ssh => { val components_dir = Path.explode(options.string("isabelle_components_dir")) val contrib_dir = Path.explode(options.string("isabelle_components_contrib_dir")) for (dir <- List(components_dir, contrib_dir) if !ssh.is_dir(dir)) { error("Bad remote directory: " + dir) } if (publish) { for (archive <- archives) { val archive_name = archive.file_name val name = Archive.get_name(archive_name) val remote_component = components_dir + archive.base val remote_contrib = contrib_dir + Path.explode(name) // component archive if (ssh.is_file(remote_component) && !force) { error("Remote component archive already exists: " + remote_component) } progress.echo("Uploading " + archive_name) ssh.write_file(remote_component, archive) // contrib directory val is_standard_component = Isabelle_System.with_tmp_dir("component")(tmp_dir => { Isabelle_System.gnutar("-xzf " + File.bash_path(archive), dir = tmp_dir).check check_dir(tmp_dir + Path.explode(name)) }) if (is_standard_component) { if (ssh.is_dir(remote_contrib)) { if (force) ssh.rm_tree(remote_contrib) else error("Remote component directory already exists: " + remote_contrib) } progress.echo("Unpacking remote " + archive_name) ssh.execute("tar -C " + ssh.bash_path(contrib_dir) + " -xzf " + ssh.bash_path(remote_component)).check } else { progress.echo_warning("No unpacking of non-standard component: " + archive_name) } } } // remote SHA1 digests if (update_components_sha1) { val lines = for { entry <- ssh.read_dir(components_dir) if entry.is_file && entry.name.endsWith(Archive.suffix) } yield { progress.echo("Digesting remote " + entry.name) - Library.trim_line( - ssh.execute("cd " + ssh.bash_path(components_dir) + - "; sha1sum " + Bash.string(entry.name)).check.out) + ssh.execute("cd " + ssh.bash_path(components_dir) + + "; sha1sum " + Bash.string(entry.name)).check.out } write_components_sha1(read_components_sha1(lines)) } }) case s => error("Bad isabelle_components_server: " + quote(s)) } } // local SHA1 digests { val new_entries = for (archive <- archives) yield { val file_name = archive.file_name progress.echo("Digesting local " + file_name) val sha1 = SHA1.digest(archive).rep SHA1_Digest(sha1, file_name) } val new_names = new_entries.map(_.file_name).toSet write_components_sha1( new_entries ::: read_components_sha1().filterNot(entry => new_names.contains(entry.file_name))) } } /* Isabelle tool wrapper */ private val relevant_options = List("isabelle_components_server", "isabelle_components_dir", "isabelle_components_contrib_dir") val isabelle_tool = Isabelle_Tool("build_components", "build and publish Isabelle components", Scala_Project.here, args => { var publish = false var update_components_sha1 = false var force = false var options = Options.init() def show_options: String = cat_lines(relevant_options.map(name => options.options(name).print)) val getopts = Getopts(""" Usage: isabelle build_components [OPTIONS] ARCHIVES... DIRS... Options are: -P publish on SSH server (see options below) -f force: overwrite existing component archives and directories -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -u update all SHA1 keys in Isabelle repository Admin/components Build and publish Isabelle components as .tar.gz archives on SSH server, depending on system options: """ + Library.prefix_lines(" ", show_options) + "\n", "P" -> (_ => publish = true), "f" -> (_ => force = true), "o:" -> (arg => options = options + arg), "u" -> (_ => update_components_sha1 = true)) val more_args = getopts(args) if (more_args.isEmpty && !update_components_sha1) getopts.usage() val progress = new Console_Progress build_components(options, more_args.map(Path.explode), progress = progress, publish = publish, force = force, update_components_sha1 = update_components_sha1) }) } diff --git a/src/Pure/System/isabelle_system.ML b/src/Pure/System/isabelle_system.ML --- a/src/Pure/System/isabelle_system.ML +++ b/src/Pure/System/isabelle_system.ML @@ -1,159 +1,159 @@ (* Title: Pure/System/isabelle_system.ML Author: Makarius Isabelle system support. *) signature ISABELLE_SYSTEM = sig val bash_process: string -> Process_Result.T val bash_output_check: string -> string val bash_output: string -> string * int val bash: string -> int val bash_functions: unit -> string list val check_bash_function: Proof.context -> string * Position.T -> string val rm_tree: Path.T -> unit val make_directory: Path.T -> Path.T val mkdir: Path.T -> unit val copy_dir: Path.T -> Path.T -> unit val copy_file: Path.T -> Path.T -> unit val copy_file_base: Path.T * Path.T -> Path.T -> unit val create_tmp_path: string -> string -> Path.T val with_tmp_file: string -> string -> (Path.T -> 'a) -> 'a val with_tmp_dir: string -> (Path.T -> 'a) -> 'a val download: string -> string end; structure Isabelle_System: ISABELLE_SYSTEM = struct (* bash *) fun bash_process script = Scala.function_thread "bash_process" ("export ISABELLE_TMP=" ^ Bash.string (getenv "ISABELLE_TMP") ^ "\n" ^ script) |> space_explode "\000" |> (fn [] => raise Exn.Interrupt | [err] => error err | a :: b :: c :: d :: lines => let val rc = Value.parse_int a; val (elapsed, cpu) = apply2 (Time.fromMilliseconds o Value.parse_int) (b, c); val (out_lines, err_lines) = chop (Value.parse_int d) lines; in Process_Result.make {rc = rc, out_lines = out_lines, err_lines = err_lines, timing = {elapsed = elapsed, cpu = cpu, gc = Time.zeroTime}} end | _ => raise Fail "Malformed Isabelle/Scala result"); fun bash_output_check s = let val res = bash_process s in - if Process_Result.ok res then trim_line (Process_Result.out res) - else error (trim_line (Process_Result.err res)) + if Process_Result.ok res then Process_Result.out res + else error (Process_Result.err res) end; fun bash_output s = let val res = bash_process s; - val _ = warning (trim_line (Process_Result.err res)); + val _ = warning (Process_Result.err res); in (Process_Result.out res, Process_Result.rc res) end; fun bash s = let val (out, rc) = bash_output s; - val _ = writeln (trim_line out); + val _ = writeln out; in rc end; (* bash functions *) fun bash_functions () = bash_output_check "declare -Fx" |> split_lines |> map_filter (space_explode " " #> try List.last); fun check_bash_function ctxt arg = Completion.check_entity Markup.bash_functionN (bash_functions () |> map (rpair Position.none)) ctxt arg; (* directory operations *) fun system_command cmd = if bash cmd <> 0 then error ("System command failed: " ^ cmd) else (); fun rm_tree path = system_command ("rm -r -f " ^ File.bash_path path); fun make_directory path = let val _ = if File.is_dir path then () else (bash ("perl -e \"use File::Path make_path; make_path('" ^ File.standard_path path ^ "');\""); if File.is_dir path then () else error ("Failed to create directory: " ^ Path.print path)); in path end; fun mkdir path = if File.is_dir path then () else OS.FileSys.mkDir (File.platform_path path); fun copy_dir src dst = if File.eq (src, dst) then () else (system_command ("cp -p -R -f " ^ File.bash_path src ^ "/. " ^ File.bash_path dst); ()); fun copy_file src0 dst0 = let val src = Path.expand src0; val dst = Path.expand dst0; val target = if File.is_dir dst then dst + Path.base src else dst; in if File.eq (src, target) then () else ignore (system_command ("cp -p -f " ^ File.bash_path src ^ " " ^ File.bash_path target)) end; fun copy_file_base (base_dir, src0) target_dir = let val src = Path.expand src0; val src_dir = Path.dir src; val _ = if Path.starts_basic src then () else error ("Illegal path specification " ^ Path.print src ^ " beyond base directory"); in copy_file (base_dir + src) (make_directory (target_dir + src_dir)) end; (* tmp files *) fun create_tmp_path name ext = let val path = File.tmp_path (Path.basic (name ^ serial_string ()) |> Path.ext ext); val _ = File.exists path andalso raise Fail ("Temporary file already exists: " ^ Path.print path); in path end; fun with_tmp_file name ext f = let val path = create_tmp_path name ext in Exn.release (Exn.capture f path before ignore (try File.rm path)) end; (* tmp dirs *) fun with_tmp_dir name f = let val path = create_tmp_path name "" in Exn.release (Exn.capture f (make_directory path) before ignore (try rm_tree path)) end; (* download file *) fun download url = with_tmp_file "download" "" (fn path => let val s = "curl --fail --silent --location " ^ Bash.string url ^ " > " ^ File.bash_path path; val res = bash_process s; in if Process_Result.ok res then File.read path else cat_error ("Failed to download " ^ quote url) (Process_Result.err res) end); end; diff --git a/src/Pure/System/process_result.ML b/src/Pure/System/process_result.ML --- a/src/Pure/System/process_result.ML +++ b/src/Pure/System/process_result.ML @@ -1,61 +1,61 @@ (* Title: Pure/System/process_result.scala Author: Makarius Result of system process. *) signature PROCESS_RESULT = sig type T val make: {rc: int, out_lines: string list, err_lines: string list, timing: Timing.timing} -> T val rc: T -> int val out_lines: T -> string list val err_lines: T -> string list val timing: T -> Timing.timing val out: T -> string val err: T -> string val ok: T -> bool val interrupted: T -> bool val check_rc: (int -> bool) -> T -> T val check: T -> T end; structure Process_Result: PROCESS_RESULT = struct abstype T = Process_Result of {rc: int, out_lines: string list, err_lines: string list, timing: Timing.timing} with val make = Process_Result; fun rep (Process_Result args) = args; val rc = #rc o rep; val out_lines = #out_lines o rep; val err_lines = #err_lines o rep; val timing = #timing o rep; -val out = cat_lines o out_lines; -val err = cat_lines o err_lines; +val out = trim_line o cat_lines o out_lines; +val err = trim_line o cat_lines o err_lines; fun ok result = rc result = 0; fun interrupted result = rc result = Exn.interrupt_return_code; fun check_rc pred result = if pred (rc result) then result else if interrupted result then raise Exn.Interrupt else error (err result); val check = check_rc (fn rc => rc = 0); end; end; diff --git a/src/Pure/System/process_result.scala b/src/Pure/System/process_result.scala --- a/src/Pure/System/process_result.scala +++ b/src/Pure/System/process_result.scala @@ -1,85 +1,85 @@ /* Title: Pure/System/process_result.scala Author: Makarius Result of system process. */ package isabelle object Process_Result { def print_return_code(rc: Int): String = "Return code: " + rc + rc_text(rc) def print_rc(rc: Int): String = "return code " + rc + rc_text(rc) def rc_text(rc: Int): String = return_code_text.get(rc) match { case None => "" case Some(text) => " (" + text + ")" } private val return_code_text = Map(0 -> "OK", 1 -> "ERROR", 2 -> "FAILURE", 127 -> "COMMAND NOT FOUND", 130 -> "INTERRUPT", 131 -> "QUIT SIGNAL", 137 -> "KILL SIGNAL", 138 -> "BUS ERROR", 139 -> "SEGMENTATION VIOLATION", 142 -> "TIMEOUT", 143 -> "TERMINATION SIGNAL") val timeout_rc = 142 } final case class Process_Result( rc: Int, out_lines: List[String] = Nil, err_lines: List[String] = Nil, timing: Timing = Timing.zero) { - def out: String = cat_lines(out_lines) - def err: String = cat_lines(err_lines) + def out: String = Library.trim_line(cat_lines(out_lines)) + def err: String = Library.trim_line(cat_lines(err_lines)) def output(outs: List[String]): Process_Result = copy(out_lines = out_lines ::: outs.flatMap(split_lines)) def errors(errs: List[String]): Process_Result = copy(err_lines = err_lines ::: errs.flatMap(split_lines)) def error(err: String): Process_Result = if (err.isEmpty) this else errors(List(err)) def ok: Boolean = rc == 0 def interrupted: Boolean = rc == Exn.Interrupt.return_code def timeout_rc: Process_Result = copy(rc = Process_Result.timeout_rc) def timeout: Boolean = rc == Process_Result.timeout_rc def error_rc: Process_Result = if (interrupted) this else copy(rc = rc max 1) def errors_rc(errs: List[String]): Process_Result = if (errs.isEmpty) this else errors(errs).error_rc def check_rc(pred: Int => Boolean): Process_Result = if (pred(rc)) this else if (interrupted) throw Exn.Interrupt() else Exn.error(err) def check: Process_Result = check_rc(_ == 0) def print_return_code: String = Process_Result.print_return_code(rc) def print_rc: String = Process_Result.print_rc(rc) def print: Process_Result = { Output.warning(err) Output.writeln(out) copy(out_lines = Nil, err_lines = Nil) } def print_stdout: Process_Result = { Output.warning(err, stdout = true) Output.writeln(out, stdout = true) copy(out_lines = Nil, err_lines = Nil) } } diff --git a/src/Pure/Thy/presentation.scala b/src/Pure/Thy/presentation.scala --- a/src/Pure/Thy/presentation.scala +++ b/src/Pure/Thy/presentation.scala @@ -1,787 +1,787 @@ /* Title: Pure/Thy/present.scala Author: Makarius HTML/PDF presentation of theory documents. */ package isabelle import scala.collection.immutable.SortedMap object Presentation { /** HTML documents **/ val fonts_path = Path.explode("fonts") sealed case class HTML_Document(title: String, content: String) def html_context(fonts_url: String => String = HTML.fonts_url()): HTML_Context = new HTML_Context(fonts_url) final class HTML_Context private[Presentation](fonts_url: String => String) { def init_fonts(dir: Path) { val fonts_dir = Isabelle_System.make_directory(dir + fonts_path) for (entry <- Isabelle_Fonts.fonts(hidden = true)) File.copy(entry.path, fonts_dir) } def head(title: String, rest: XML.Body = Nil): XML.Tree = HTML.div("head", HTML.chapter(title) :: rest) def source(body: XML.Body): XML.Tree = HTML.pre("source", body) def contents(heading: String, items: List[XML.Body], css_class: String = "contents") : List[XML.Elem] = { if (items.isEmpty) Nil else List(HTML.div(css_class, List(HTML.section(heading), HTML.itemize(items)))) } def output_document(title: String, body: XML.Body): String = HTML.output_document( List( HTML.style(HTML.fonts_css(fonts_url) + "\n\n" + File.read(HTML.isabelle_css)), HTML.title(title)), List(HTML.source(body)), css = "", structural = false) def html_document(title: String, body: XML.Body): HTML_Document = HTML_Document(title, output_document(title, body)) } /* presentation elements */ sealed case class Elements( html: Markup.Elements = Markup.Elements.empty, entity: Markup.Elements = Markup.Elements.empty, language: Markup.Elements = Markup.Elements.empty) val elements1: Elements = Elements( html = Rendering.foreground_elements ++ Rendering.text_color_elements + Markup.NUMERAL + Markup.COMMENT + Markup.ENTITY + Markup.LANGUAGE, entity = Markup.Elements(Markup.THEORY)) val elements2: Elements = Elements( html = elements1.html ++ Rendering.markdown_elements, language = Markup.Elements(Markup.Language.DOCUMENT)) /* HTML */ private val div_elements = Set(HTML.div.name, HTML.pre.name, HTML.par.name, HTML.list.name, HTML.enum.name, HTML.descr.name) def make_html( elements: Elements, entity_link: (Properties.T, XML.Body) => Option[XML.Tree], xml: XML.Body): XML.Body = { def html_div(html: XML.Body): Boolean = html exists { case XML.Elem(markup, body) => div_elements.contains(markup.name) || html_div(body) case XML.Text(_) => false } def html_class(c: String, html: XML.Body): XML.Body = if (c == "") html else if (html_div(html)) List(HTML.div(c, html)) else List(HTML.span(c, html)) def html_body(xml_body: XML.Body): XML.Body = xml_body flatMap { case XML.Wrapped_Elem(markup, _, body) => html_body(List(XML.Elem(markup, body))) case XML.Elem(Markup(Markup.ENTITY, props @ Markup.Kind(kind)), body) => val body1 = html_body(body) if (elements.entity(kind)) { entity_link(props, body1) match { case Some(link) => List(link) case None => body1 } } else body1 case XML.Elem(Markup(Markup.LANGUAGE, Markup.Name(name)), body) => html_class(if (elements.language(name)) name else "", html_body(body)) case XML.Elem(Markup(Markup.MARKDOWN_PARAGRAPH, _), body) => List(HTML.par(html_body(body))) case XML.Elem(Markup(Markup.MARKDOWN_ITEM, _), body) => List(HTML.item(html_body(body))) case XML.Elem(Markup(Markup.Markdown_Bullet.name, _), _) => Nil case XML.Elem(Markup.Markdown_List(kind), body) => if (kind == Markup.ENUMERATE) List(HTML.enum(html_body(body))) else List(HTML.list(html_body(body))) case XML.Elem(markup, body) => val name = markup.name val html = markup.properties match { case Markup.Kind(kind) if kind == Markup.COMMAND || kind == Markup.KEYWORD => html_class(kind, html_body(body)) case _ => html_body(body) } Rendering.foreground.get(name) orElse Rendering.text_color.get(name) match { case Some(c) => html_class(c.toString, html) case None => html_class(name, html) } case XML.Text(text) => HTML.text(Symbol.decode(text)) } html_body(xml) } /* PIDE HTML document */ def html_document( resources: Resources, snapshot: Document.Snapshot, html_context: HTML_Context, elements: Elements, plain_text: Boolean = false): HTML_Document = { require(!snapshot.is_outdated, "document snapshot outdated") val name = snapshot.node_name if (plain_text) { val title = "File " + Symbol.cartouche_decoded(name.path.file_name) val body = HTML.text(snapshot.node.source) html_context.html_document(title, body) } else { resources.html_document(snapshot) getOrElse { val title = if (name.is_theory) "Theory " + quote(name.theory_base_name) else "File " + Symbol.cartouche_decoded(name.path.file_name) val body = make_html(elements, (_, _) => None, snapshot.xml_markup(elements = elements.html)) html_context.html_document(title, body) } } } /** PDF LaTeX documents **/ /* document info */ abstract class Document_Name { def name: String def path: Path = Path.basic(name) override def toString: String = name } object Document_Variant { def parse(name: String, tags: String): Document_Variant = Document_Variant(name, Library.space_explode(',', tags)) def parse(opt: String): Document_Variant = Library.space_explode('=', opt) match { case List(name) => Document_Variant(name, Nil) case List(name, tags) => parse(name, tags) case _ => error("Malformed document variant: " + quote(opt)) } } sealed case class Document_Variant(name: String, tags: List[String]) extends Document_Name { def print_tags: String = tags.mkString(",") def print: String = if (tags.isEmpty) name else name + "=" + print_tags def latex_sty: String = Library.terminate_lines( tags.map(tag => tag.toList match { case '/' :: cs => "\\isafoldtag{" + cs.mkString + "}" case '-' :: cs => "\\isadroptag{" + cs.mkString + "}" case '+' :: cs => "\\isakeeptag{" + cs.mkString + "}" case cs => "\\isakeeptag{" + cs.mkString + "}" })) } sealed case class Document_Input(name: String, sources: SHA1.Digest) extends Document_Name sealed case class Document_Output(name: String, sources: SHA1.Digest, log_xz: Bytes, pdf: Bytes) extends Document_Name { def log: String = log_xz.uncompress().text def log_lines: List[String] = split_lines(log) def write(db: SQL.Database, session_name: String): Unit = write_document(db, session_name, this) } /* SQL data model */ object Data { val session_name = SQL.Column.string("session_name").make_primary_key val name = SQL.Column.string("name").make_primary_key val sources = SQL.Column.string("sources") val log_xz = SQL.Column.bytes("log_xz") val pdf = SQL.Column.bytes("pdf") val table = SQL.Table("isabelle_documents", List(session_name, name, sources, log_xz, pdf)) def where_equal(session_name: String, name: String = ""): SQL.Source = "WHERE " + Data.session_name.equal(session_name) + (if (name == "") "" else " AND " + Data.name.equal(name)) } def read_documents(db: SQL.Database, session_name: String): List[Document_Input] = { val select = Data.table.select(List(Data.name, Data.sources), Data.where_equal(session_name)) db.using_statement(select)(stmt => stmt.execute_query().iterator(res => { val name = res.string(Data.name) val sources = res.string(Data.sources) Document_Input(name, SHA1.fake(sources)) }).toList) } def read_document(db: SQL.Database, session_name: String, name: String): Option[Document_Output] = { val select = Data.table.select(sql = Data.where_equal(session_name, name)) db.using_statement(select)(stmt => { val res = stmt.execute_query() if (res.next()) { val name = res.string(Data.name) val sources = res.string(Data.sources) val log_xz = res.bytes(Data.log_xz) val pdf = res.bytes(Data.pdf) Some(Document_Output(name, SHA1.fake(sources), log_xz, pdf)) } else None }) } def write_document(db: SQL.Database, session_name: String, doc: Document_Output) { db.using_statement(Data.table.insert())(stmt => { stmt.string(1) = session_name stmt.string(2) = doc.name stmt.string(3) = doc.sources.toString stmt.bytes(4) = doc.log_xz stmt.bytes(5) = doc.pdf stmt.execute() }) } /* presentation context */ object Context { val none: Context = new Context { def enabled: Boolean = false } val standard: Context = new Context { def enabled: Boolean = true } def dir(path: Path): Context = new Context { def enabled: Boolean = true override def dir(store: Sessions.Store): Path = path } def make(s: String): Context = if (s == ":") standard else dir(Path.explode(s)) } abstract class Context private { def enabled: Boolean def enabled(info: Sessions.Info): Boolean = enabled || info.browser_info def dir(store: Sessions.Store): Path = store.presentation_dir def dir(store: Sessions.Store, info: Sessions.Info): Path = dir(store) + Path.explode(info.chapter_session) } /** HTML presentation **/ /* maintain chapter index */ private val sessions_path = Path.basic(".sessions") private def read_sessions(dir: Path): List[(String, String)] = { val path = dir + sessions_path if (path.is_file) { import XML.Decode._ list(pair(string, string))(Symbol.decode_yxml(File.read(path))) } else Nil } private def write_sessions(dir: Path, sessions: List[(String, String)]) { import XML.Encode._ File.write(dir + sessions_path, YXML.string_of_body(list(pair(string, string))(sessions))) } def update_chapter_index(browser_info: Path, chapter: String, new_sessions: List[(String, String)]) { val dir = Isabelle_System.make_directory(browser_info + Path.basic(chapter)) val sessions0 = try { read_sessions(dir) } catch { case _: XML.Error => Nil } val sessions = (SortedMap.empty[String, String] ++ sessions0 ++ new_sessions).toList write_sessions(dir, sessions) val title = "Isabelle/" + chapter + " sessions" HTML.write_document(dir, "index.html", List(HTML.title(title + " (" + Distribution.version + ")")), HTML.chapter(title) :: (if (sessions.isEmpty) Nil else List(HTML.div("sessions", List(HTML.description( sessions.map({ case (name, description) => val descr = Symbol.trim_blank_lines(description) (List(HTML.link(name + "/index.html", HTML.text(name))), if (descr == "") Nil else HTML.break ::: List(HTML.pre(HTML.text(descr)))) }))))))) } def make_global_index(browser_info: Path) { if (!(browser_info + Path.explode("index.html")).is_file) { Isabelle_System.make_directory(browser_info) File.copy(Path.explode("~~/lib/logo/isabelle.gif"), browser_info + Path.explode("isabelle.gif")) File.write(browser_info + Path.explode("index.html"), File.read(Path.explode("~~/lib/html/library_index_header.template")) + File.read(Path.explode("~~/lib/html/library_index_content.template")) + File.read(Path.explode("~~/lib/html/library_index_footer.template"))) } } /* present session */ val session_graph_path = Path.explode("session_graph.pdf") val readme_path = Path.explode("README.html") val files_path = Path.explode("files") def html_name(name: Document.Node.Name): String = name.theory_base_name + ".html" def theory_link(deps: Sessions.Deps, session0: String, name: Document.Node.Name, body: XML.Body): Option[XML.Tree] = { val session1 = deps(session0).theory_qualifier(name) val info0 = deps.sessions_structure.get(session0) val info1 = deps.sessions_structure.get(session1) if (info0.isDefined && info1.isDefined) { Some(HTML.link(info0.get.relative_path(info1.get) + html_name(name), body)) } else None } def session_html( resources: Resources, session: String, deps: Sessions.Deps, db_context: Sessions.Database_Context, progress: Progress = new Progress, verbose: Boolean = false, html_context: HTML_Context, elements: Elements, presentation: Context) { val info = deps.sessions_structure(session) val options = info.options val base = deps(session) val session_dir = presentation.dir(db_context.store, info) html_context.init_fonts(session_dir) Bytes.write(session_dir + session_graph_path, graphview.Graph_File.make_pdf(options, base.session_graph_display)) val documents = for { doc <- info.document_variants document <- db_context.input_database(session)(read_document(_, _, doc.name)) } yield { if (verbose) progress.echo("Presenting document " + session + "/" + doc.name) Bytes.write(session_dir + doc.path.pdf, document.pdf) doc } val view_links = { val deps_link = HTML.link(session_graph_path, HTML.text("theory dependencies")) val readme_links = if ((info.dir + readme_path).is_file) { File.copy(info.dir + readme_path, session_dir + readme_path) List(HTML.link(readme_path, HTML.text("README"))) } else Nil val document_links = documents.map(doc => HTML.link(doc.path.pdf, HTML.text(doc.name))) Library.separate(HTML.break ::: HTML.nl, (deps_link :: readme_links ::: document_links). map(link => HTML.text("View ") ::: List(link))).flatten } val theories: List[XML.Body] = { var seen_files = List.empty[(Path, String, Document.Node.Name)] def entity_link(props: Properties.T, body: XML.Body): Option[XML.Tree] = (props, props, props) match { case (Markup.Kind(Markup.THEORY), Markup.Name(theory), Position.Def_File(thy_file)) => val node_name = resources.file_node(Path.explode(thy_file), theory = theory) theory_link(deps, session, node_name, body) case _ => None } sealed case class Theory( name: Document.Node.Name, command: Command, files_html: List[(Path, XML.Tree)], html: XML.Tree) def read_theory(name: Document.Node.Name): Option[Theory] = { progress.expose_interrupt() if (verbose) progress.echo("Presenting theory " + name) for (command <- Build_Job.read_theory(db_context, resources, session, name.theory)) yield { val snapshot = Document.State.init.snippet(command) val files_html = for { (src_path, xml) <- snapshot.xml_markup_blobs(elements = elements.html) if xml.nonEmpty } yield { progress.expose_interrupt() if (verbose) progress.echo("Presenting file " + src_path) (src_path, html_context.source(make_html(elements, entity_link, xml))) } val html = html_context.source( make_html(elements, entity_link, snapshot.xml_markup(elements = elements.html))) Theory(name, command, files_html, html) } } for (thy <- Par_List.map(read_theory, base.session_theories).flatten) yield { val files = for { (src_path, file_html) <- thy.files_html } yield { val file_name = (files_path + src_path.squash.html).implode seen_files.find(p => p._1 == src_path || p._2 == file_name) match { case None => seen_files ::= (src_path, file_name, thy.name) case Some((_, _, thy_name1)) => error("Incoherent use of file name " + src_path + " as " + quote(file_name) + " in theory " + thy_name1 + " vs. " + thy.name) } val file_path = session_dir + Path.explode(file_name) html_context.init_fonts(file_path.dir) val file_title = "File " + Symbol.cartouche_decoded(src_path.implode_short) HTML.write_document(file_path.dir, file_path.file_name, List(HTML.title(file_title)), List(html_context.head(file_title), file_html)) List(HTML.link(file_name, HTML.text(file_title))) } val thy_title = "Theory " + thy.name.theory_base_name HTML.write_document(session_dir, html_name(thy.name), List(HTML.title(thy_title)), List(html_context.head(thy_title), thy.html)) List(HTML.link(html_name(thy.name), HTML.text(thy.name.theory_base_name) ::: (if (files.isEmpty) Nil else List(HTML.itemize(files))))) } } val title = "Session " + session HTML.write_document(session_dir, "index.html", List(HTML.title(title + " (" + Distribution.version + ")")), html_context.head(title, List(HTML.par(view_links))) :: html_context.contents("Theories", theories)) } /** build documents **/ val session_tex_path = Path.explode("session.tex") def tex_name(name: Document.Node.Name): String = name.theory_base_name + ".tex" def document_tex_name(name: Document.Node.Name): String = "document/" + tex_name(name) class Build_Error(val log_lines: List[String], val message: String) extends Exn.User_Error(message) def build_documents( session: String, deps: Sessions.Deps, db_context: Sessions.Database_Context, progress: Progress = new Progress, output_sources: Option[Path] = None, output_pdf: Option[Path] = None, verbose: Boolean = false, verbose_latex: Boolean = false): List[Document_Output] = { /* session info */ val info = deps.sessions_structure(session) val hierarchy = deps.sessions_structure.hierarchy(session) val options = info.options val base = deps(session) val graph_pdf = graphview.Graph_File.make_pdf(options, base.session_graph_display) /* prepare document directory */ lazy val tex_files = for (name <- base.session_theories ::: base.document_theories) yield { val entry = db_context.get_export(hierarchy, name.theory, document_tex_name(name)) Path.basic(tex_name(name)) -> entry.uncompressed } def prepare_dir1(dir: Path, doc: Document_Variant): (Path, String) = { val doc_dir = dir + Path.basic(doc.name) Isabelle_System.make_directory(doc_dir) Isabelle_System.bash("isabelle latex -o sty", cwd = doc_dir.file).check File.write(doc_dir + Path.explode("isabelletags.sty"), doc.latex_sty) for ((base_dir, src) <- info.document_files) File.copy_base(info.dir + base_dir, src, doc_dir) File.write(doc_dir + session_tex_path, Library.terminate_lines( base.session_theories.map(name => "\\input{" + tex_name(name) + "}"))) for ((path, tex) <- tex_files) Bytes.write(doc_dir + path, tex) val root1 = "root_" + doc.name val root = if ((doc_dir + Path.explode(root1).tex).is_file) root1 else "root" (doc_dir, root) } def prepare_dir2(dir: Path, doc: Document_Variant): Unit = { val doc_dir = dir + Path.basic(doc.name) // non-deterministic, but irrelevant Bytes.write(doc_dir + session_graph_path, graph_pdf) } /* produce documents */ val documents = for (doc <- info.documents) yield { Isabelle_System.with_tmp_dir("document")(tmp_dir => { progress.echo("Preparing " + session + "/" + doc.name + " ...") val start = Time.now() // prepare sources val (doc_dir, root) = prepare_dir1(tmp_dir, doc) val digests = File.find_files(doc_dir.file, follow_links = true).map(SHA1.digest) val sources = SHA1.digest_set(digests) prepare_dir2(tmp_dir, doc) for (dir <- output_sources) { prepare_dir1(dir, doc) prepare_dir2(dir, doc) } // old document from database val old_document = for { old_doc <- db_context.input_database(session)(read_document(_, _, doc.name)) if old_doc.sources == sources } yield { Bytes.write(doc_dir + doc.path.pdf, old_doc.pdf) old_doc } old_document getOrElse { // bash scripts def root_bash(ext: String): String = Bash.string(root + "." + ext) def latex_bash(fmt: String = "pdf", ext: String = "tex"): String = "isabelle latex -o " + Bash.string(fmt) + " " + Bash.string(root + "." + ext) def bash(items: String*): Process_Result = progress.bash(items.mkString(" && "), cwd = doc_dir.file, echo = verbose_latex, watchdog = Time.seconds(0.5)) // prepare document val result = if ((doc_dir + Path.explode("build")).is_file) { bash("./build pdf " + Bash.string(doc.name)) } else { bash( latex_bash(), "{ [ ! -f " + root_bash("bib") + " ] || " + latex_bash("bbl") + "; }", "{ [ ! -f " + root_bash("idx") + " ] || " + latex_bash("idx") + "; }", latex_bash(), latex_bash()) } // result val root_pdf = Path.basic(root).pdf val result_path = doc_dir + root_pdf val log_lines = result.out_lines ::: result.err_lines if (!result.ok) { val message = Exn.cat_message( - Library.trim_line(result.err), + result.err, cat_lines(Latex.latex_errors(doc_dir, root) ::: Bibtex.bibtex_errors(doc_dir, root)), "Failed to build document " + quote(doc.name)) throw new Build_Error(log_lines, message) } else if (!result_path.is_file) { val message = "Bad document result: expected to find " + root_pdf throw new Build_Error(log_lines, message) } else { val stop = Time.now() val timing = stop - start progress.echo("Finished " + session + "/" + doc.name + " (" + timing.message_hms + " elapsed time)") val log_xz = Bytes(cat_lines(log_lines)).compress() val pdf = Bytes.read(result_path) Document_Output(doc.name, sources, log_xz, pdf) } } }) } for (dir <- output_pdf; doc <- documents) { Isabelle_System.make_directory(dir) val path = dir + doc.path.pdf Bytes.write(path, doc.pdf) progress.echo("Document at " + path.absolute) } documents } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("document", "prepare session theory document", Scala_Project.here, args => { var output_sources: Option[Path] = None var output_pdf: Option[Path] = None var verbose_latex = false var dirs: List[Path] = Nil var options = Options.init() var verbose_build = false val getopts = Getopts( """ Usage: isabelle document [OPTIONS] SESSION Options are: -O DIR output directory for LaTeX sources and resulting PDF -P DIR output directory for resulting PDF -S DIR output directory for LaTeX sources -V verbose latex -d DIR include session directory -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose build Prepare the theory document of a session. """, "O:" -> (arg => { val dir = Path.explode(arg) output_sources = Some(dir) output_pdf = Some(dir) }), "P:" -> (arg => { output_pdf = Some(Path.explode(arg)) }), "S:" -> (arg => { output_sources = Some(Path.explode(arg)) }), "V" -> (_ => verbose_latex = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose_build = true)) val more_args = getopts(args) val session = more_args match { case List(a) => a case _ => getopts.usage() } val progress = new Console_Progress(verbose = verbose_build) val store = Sessions.store(options) progress.interrupt_handler { val res = Build.build(options, selection = Sessions.Selection.session(session), dirs = dirs, progress = progress, verbose = verbose_build) if (!res.ok) error("Failed to build session " + quote(session)) val deps = Sessions.load_structure(options + "document=pdf", dirs = dirs). selection_deps(Sessions.Selection.session(session)) if (output_sources.isEmpty && output_pdf.isEmpty) { progress.echo_warning("No output directory") } using(store.open_database_context())(db_context => build_documents(session, deps, db_context, progress = progress, output_sources = output_sources, output_pdf = output_pdf, verbose = true, verbose_latex = verbose_latex)) } }) }