diff --git a/src/HOL/Library/refute.ML b/src/HOL/Library/refute.ML --- a/src/HOL/Library/refute.ML +++ b/src/HOL/Library/refute.ML @@ -1,2991 +1,2991 @@ (* Title: HOL/Library/refute.ML Author: Tjark Weber, TU Muenchen Finite model generation for HOL formulas, using a SAT solver. *) signature REFUTE = sig exception REFUTE of string * string (* ------------------------------------------------------------------------- *) (* Model/interpretation related code (translation HOL -> propositional logic *) (* ------------------------------------------------------------------------- *) type params type interpretation type model type arguments exception MAXVARS_EXCEEDED val add_interpreter : string -> (Proof.context -> model -> arguments -> term -> (interpretation * model * arguments) option) -> theory -> theory val add_printer : string -> (Proof.context -> model -> typ -> interpretation -> (int -> bool) -> term option) -> theory -> theory val interpret : Proof.context -> model -> arguments -> term -> (interpretation * model * arguments) val print : Proof.context -> model -> typ -> interpretation -> (int -> bool) -> term val print_model : Proof.context -> model -> (int -> bool) -> string (* ------------------------------------------------------------------------- *) (* Interface *) (* ------------------------------------------------------------------------- *) val set_default_param : (string * string) -> theory -> theory val get_default_param : Proof.context -> string -> string option val get_default_params : Proof.context -> (string * string) list val actual_params : Proof.context -> (string * string) list -> params val find_model : Proof.context -> params -> term list -> term -> bool -> string (* tries to find a model for a formula: *) val satisfy_term : Proof.context -> (string * string) list -> term list -> term -> string (* tries to find a model that refutes a formula: *) val refute_term : Proof.context -> (string * string) list -> term list -> term -> string val refute_goal : Proof.context -> (string * string) list -> thm -> int -> string (* ------------------------------------------------------------------------- *) (* Additional functions used by Nitpick (to be factored out) *) (* ------------------------------------------------------------------------- *) val get_classdef : theory -> string -> (string * term) option val norm_rhs : term -> term val get_def : theory -> string * typ -> (string * term) option val get_typedef : theory -> typ -> (string * term) option val is_IDT_constructor : theory -> string * typ -> bool val is_IDT_recursor : theory -> string * typ -> bool val is_const_of_class: theory -> string * typ -> bool val string_of_typ : typ -> string end; structure Refute : REFUTE = struct open Prop_Logic; (* We use 'REFUTE' only for internal error conditions that should *) (* never occur in the first place (i.e. errors caused by bugs in our *) (* code). Otherwise (e.g. to indicate invalid input data) we use *) (* 'error'. *) exception REFUTE of string * string; (* ("in function", "cause") *) (* should be raised by an interpreter when more variables would be *) (* required than allowed by 'maxvars' *) exception MAXVARS_EXCEEDED; (* ------------------------------------------------------------------------- *) (* TREES *) (* ------------------------------------------------------------------------- *) (* ------------------------------------------------------------------------- *) (* tree: implements an arbitrarily (but finitely) branching tree as a list *) (* of (lists of ...) elements *) (* ------------------------------------------------------------------------- *) datatype 'a tree = Leaf of 'a | Node of ('a tree) list; fun tree_map f tr = case tr of Leaf x => Leaf (f x) | Node xs => Node (map (tree_map f) xs); fun tree_pair (t1, t2) = case t1 of Leaf x => (case t2 of Leaf y => Leaf (x,y) | Node _ => raise REFUTE ("tree_pair", "trees are of different height (second tree is higher)")) | Node xs => (case t2 of (* '~~' will raise an exception if the number of branches in *) (* both trees is different at the current node *) Node ys => Node (map tree_pair (xs ~~ ys)) | Leaf _ => raise REFUTE ("tree_pair", "trees are of different height (first tree is higher)")); (* ------------------------------------------------------------------------- *) (* params: parameters that control the translation into a propositional *) (* formula/model generation *) (* *) (* The following parameters are supported (and required (!), except for *) (* "sizes" and "expect"): *) (* *) (* Name Type Description *) (* *) (* "sizes" (string * int) list *) (* Size of ground types (e.g. 'a=2), or depth of IDTs. *) (* "minsize" int If >0, minimal size of each ground type/IDT depth. *) (* "maxsize" int If >0, maximal size of each ground type/IDT depth. *) (* "maxvars" int If >0, use at most 'maxvars' Boolean variables *) (* when transforming the term into a propositional *) (* formula. *) (* "maxtime" int If >0, terminate after at most 'maxtime' seconds. *) (* "satsolver" string SAT solver to be used. *) (* "no_assms" bool If "true", assumptions in structured proofs are *) (* not considered. *) (* "expect" string Expected result ("genuine", "potential", "none", or *) (* "unknown"). *) (* ------------------------------------------------------------------------- *) type params = { sizes : (string * int) list, minsize : int, maxsize : int, maxvars : int, maxtime : int, satsolver: string, no_assms : bool, expect : string }; (* ------------------------------------------------------------------------- *) (* interpretation: a term's interpretation is given by a variable of type *) (* 'interpretation' *) (* ------------------------------------------------------------------------- *) type interpretation = prop_formula list tree; (* ------------------------------------------------------------------------- *) (* model: a model specifies the size of types and the interpretation of *) (* terms *) (* ------------------------------------------------------------------------- *) type model = (typ * int) list * (term * interpretation) list; (* ------------------------------------------------------------------------- *) (* arguments: additional arguments required during interpretation of terms *) (* ------------------------------------------------------------------------- *) type arguments = { (* just passed unchanged from 'params': *) maxvars : int, (* whether to use 'make_equality' or 'make_def_equality': *) def_eq : bool, (* the following may change during the translation: *) next_idx : int, bounds : interpretation list, wellformed: prop_formula }; structure Data = Theory_Data ( type T = {interpreters: (string * (Proof.context -> model -> arguments -> term -> (interpretation * model * arguments) option)) list, printers: (string * (Proof.context -> model -> typ -> interpretation -> (int -> bool) -> term option)) list, parameters: string Symtab.table}; val empty = {interpreters = [], printers = [], parameters = Symtab.empty}; val extend = I; fun merge ({interpreters = in1, printers = pr1, parameters = pa1}, {interpreters = in2, printers = pr2, parameters = pa2}) : T = {interpreters = AList.merge (op =) (K true) (in1, in2), printers = AList.merge (op =) (K true) (pr1, pr2), parameters = Symtab.merge (op =) (pa1, pa2)}; ); val get_data = Data.get o Proof_Context.theory_of; (* ------------------------------------------------------------------------- *) (* interpret: interprets the term 't' using a suitable interpreter; returns *) (* the interpretation and a (possibly extended) model that keeps *) (* track of the interpretation of subterms *) (* ------------------------------------------------------------------------- *) fun interpret ctxt model args t = case get_first (fn (_, f) => f ctxt model args t) (#interpreters (get_data ctxt)) of NONE => raise REFUTE ("interpret", "no interpreter for term " ^ quote (Syntax.string_of_term ctxt t)) | SOME x => x; (* ------------------------------------------------------------------------- *) (* print: converts the interpretation 'intr', which must denote a term of *) (* type 'T', into a term using a suitable printer *) (* ------------------------------------------------------------------------- *) fun print ctxt model T intr assignment = case get_first (fn (_, f) => f ctxt model T intr assignment) (#printers (get_data ctxt)) of NONE => raise REFUTE ("print", "no printer for type " ^ quote (Syntax.string_of_typ ctxt T)) | SOME x => x; (* ------------------------------------------------------------------------- *) (* print_model: turns the model into a string, using a fixed interpretation *) (* (given by an assignment for Boolean variables) and suitable *) (* printers *) (* ------------------------------------------------------------------------- *) fun print_model ctxt model assignment = let val (typs, terms) = model val typs_msg = if null typs then "empty universe (no type variables in term)\n" else "Size of types: " ^ commas (map (fn (T, i) => Syntax.string_of_typ ctxt T ^ ": " ^ string_of_int i) typs) ^ "\n" val show_consts_msg = if not (Config.get ctxt show_consts) andalso Library.exists (is_Const o fst) terms then "enable \"show_consts\" to show the interpretation of constants\n" else "" val terms_msg = if null terms then "empty interpretation (no free variables in term)\n" else cat_lines (map_filter (fn (t, intr) => (* print constants only if 'show_consts' is true *) if Config.get ctxt show_consts orelse not (is_Const t) then SOME (Syntax.string_of_term ctxt t ^ ": " ^ Syntax.string_of_term ctxt (print ctxt model (Term.type_of t) intr assignment)) else NONE) terms) ^ "\n" in typs_msg ^ show_consts_msg ^ terms_msg end; (* ------------------------------------------------------------------------- *) (* PARAMETER MANAGEMENT *) (* ------------------------------------------------------------------------- *) fun add_interpreter name f = Data.map (fn {interpreters, printers, parameters} => case AList.lookup (op =) interpreters name of NONE => {interpreters = (name, f) :: interpreters, printers = printers, parameters = parameters} | SOME _ => error ("Interpreter " ^ name ^ " already declared")); fun add_printer name f = Data.map (fn {interpreters, printers, parameters} => case AList.lookup (op =) printers name of NONE => {interpreters = interpreters, printers = (name, f) :: printers, parameters = parameters} | SOME _ => error ("Printer " ^ name ^ " already declared")); (* ------------------------------------------------------------------------- *) (* set_default_param: stores the '(name, value)' pair in Data's *) (* parameter table *) (* ------------------------------------------------------------------------- *) fun set_default_param (name, value) = Data.map (fn {interpreters, printers, parameters} => {interpreters = interpreters, printers = printers, parameters = Symtab.update (name, value) parameters}); (* ------------------------------------------------------------------------- *) (* get_default_param: retrieves the value associated with 'name' from *) (* Data's parameter table *) (* ------------------------------------------------------------------------- *) val get_default_param = Symtab.lookup o #parameters o get_data; (* ------------------------------------------------------------------------- *) (* get_default_params: returns a list of all '(name, value)' pairs that are *) (* stored in Data's parameter table *) (* ------------------------------------------------------------------------- *) val get_default_params = Symtab.dest o #parameters o get_data; (* ------------------------------------------------------------------------- *) (* actual_params: takes a (possibly empty) list 'params' of parameters that *) (* override the default parameters currently specified, and *) (* returns a record that can be passed to 'find_model'. *) (* ------------------------------------------------------------------------- *) fun actual_params ctxt override = let fun read_bool (parms, name) = case AList.lookup (op =) parms name of SOME "true" => true | SOME "false" => false | SOME s => error ("parameter " ^ quote name ^ " (value is " ^ quote s ^ ") must be \"true\" or \"false\"") | NONE => error ("parameter " ^ quote name ^ " must be assigned a value") fun read_int (parms, name) = case AList.lookup (op =) parms name of SOME s => (case Int.fromString s of SOME i => i | NONE => error ("parameter " ^ quote name ^ " (value is " ^ quote s ^ ") must be an integer value")) | NONE => error ("parameter " ^ quote name ^ " must be assigned a value") fun read_string (parms, name) = case AList.lookup (op =) parms name of SOME s => s | NONE => error ("parameter " ^ quote name ^ " must be assigned a value") (* 'override' first, defaults last: *) val allparams = override @ get_default_params ctxt val minsize = read_int (allparams, "minsize") val maxsize = read_int (allparams, "maxsize") val maxvars = read_int (allparams, "maxvars") val maxtime = read_int (allparams, "maxtime") val satsolver = read_string (allparams, "satsolver") val no_assms = read_bool (allparams, "no_assms") val expect = the_default "" (AList.lookup (op =) allparams "expect") (* all remaining parameters of the form "string=int" are collected in *) (* 'sizes' *) (* TODO: it is currently not possible to specify a size for a type *) (* whose name is one of the other parameters (e.g. 'maxvars') *) (* (string * int) list *) val sizes = map_filter (fn (name, value) => Option.map (pair name) (Int.fromString value)) (filter (fn (name, _) => name<>"minsize" andalso name<>"maxsize" andalso name<>"maxvars" andalso name<>"maxtime" andalso name<>"satsolver" andalso name<>"no_assms") allparams) in {sizes=sizes, minsize=minsize, maxsize=maxsize, maxvars=maxvars, maxtime=maxtime, satsolver=satsolver, no_assms=no_assms, expect=expect} end; (* ------------------------------------------------------------------------- *) (* TRANSLATION HOL -> PROPOSITIONAL LOGIC, BOOLEAN ASSIGNMENT -> MODEL *) (* ------------------------------------------------------------------------- *) fun typ_of_dtyp _ typ_assoc (Old_Datatype_Aux.DtTFree a) = the (AList.lookup (op =) typ_assoc (Old_Datatype_Aux.DtTFree a)) | typ_of_dtyp descr typ_assoc (Old_Datatype_Aux.DtType (s, Us)) = Type (s, map (typ_of_dtyp descr typ_assoc) Us) | typ_of_dtyp descr typ_assoc (Old_Datatype_Aux.DtRec i) = let val (s, ds, _) = the (AList.lookup (op =) descr i) in Type (s, map (typ_of_dtyp descr typ_assoc) ds) end val close_form = ATP_Util.close_form val specialize_type = ATP_Util.specialize_type (* ------------------------------------------------------------------------- *) (* is_const_of_class: returns 'true' iff 'Const (s, T)' is a constant that *) (* denotes membership to an axiomatic type class *) (* ------------------------------------------------------------------------- *) fun is_const_of_class thy (s, _) = let val class_const_names = map Logic.const_of_class (Sign.all_classes thy) in (* I'm not quite sure if checking the name 's' is sufficient, *) (* or if we should also check the type 'T'. *) member (op =) class_const_names s end; (* ------------------------------------------------------------------------- *) (* is_IDT_constructor: returns 'true' iff 'Const (s, T)' is the constructor *) (* of an inductive datatype in 'thy' *) (* ------------------------------------------------------------------------- *) fun is_IDT_constructor thy (s, T) = (case body_type T of Type (s', _) => (case BNF_LFP_Compat.get_constrs thy s' of SOME constrs => List.exists (fn (cname, cty) => cname = s andalso Sign.typ_instance thy (T, cty)) constrs | NONE => false) | _ => false); (* ------------------------------------------------------------------------- *) (* is_IDT_recursor: returns 'true' iff 'Const (s, T)' is the recursion *) (* operator of an inductive datatype in 'thy' *) (* ------------------------------------------------------------------------- *) fun is_IDT_recursor thy (s, _) = let val rec_names = Symtab.fold (append o #rec_names o snd) (BNF_LFP_Compat.get_all thy []) [] in (* I'm not quite sure if checking the name 's' is sufficient, *) (* or if we should also check the type 'T'. *) member (op =) rec_names s end; (* ------------------------------------------------------------------------- *) (* norm_rhs: maps f ?t1 ... ?tn == rhs to %t1...tn. rhs *) (* ------------------------------------------------------------------------- *) fun norm_rhs eqn = let fun lambda (v as Var ((x, _), T)) t = Abs (x, T, abstract_over (v, t)) | lambda v t = raise TERM ("lambda", [v, t]) val (lhs, rhs) = Logic.dest_equals eqn val (_, args) = Term.strip_comb lhs in fold lambda (rev args) rhs end (* ------------------------------------------------------------------------- *) (* get_def: looks up the definition of a constant *) (* ------------------------------------------------------------------------- *) fun get_def thy (s, T) = let fun get_def_ax [] = NONE | get_def_ax ((axname, ax) :: axioms) = (let val (lhs, _) = Logic.dest_equals ax (* equations only *) val c = Term.head_of lhs val (s', T') = Term.dest_Const c in if s=s' then let val typeSubs = Sign.typ_match thy (T', T) Vartab.empty val ax' = Envir.subst_term_types typeSubs ax val rhs = norm_rhs ax' in SOME (axname, rhs) end else get_def_ax axioms end handle ERROR _ => get_def_ax axioms | TERM _ => get_def_ax axioms | Type.TYPE_MATCH => get_def_ax axioms) in get_def_ax (Theory.all_axioms_of thy) end; (* ------------------------------------------------------------------------- *) (* get_typedef: looks up the definition of a type, as created by "typedef" *) (* ------------------------------------------------------------------------- *) fun get_typedef thy T = let fun get_typedef_ax [] = NONE | get_typedef_ax ((axname, ax) :: axioms) = (let fun type_of_type_definition (Const (s', T')) = if s'= \<^const_name>\type_definition\ then SOME T' else NONE | type_of_type_definition (Free _) = NONE | type_of_type_definition (Var _) = NONE | type_of_type_definition (Bound _) = NONE | type_of_type_definition (Abs (_, _, body)) = type_of_type_definition body | type_of_type_definition (t1 $ t2) = (case type_of_type_definition t1 of SOME x => SOME x | NONE => type_of_type_definition t2) in case type_of_type_definition ax of SOME T' => let val T'' = domain_type (domain_type T') val typeSubs = Sign.typ_match thy (T'', T) Vartab.empty in SOME (axname, Envir.subst_term_types typeSubs ax) end | NONE => get_typedef_ax axioms end handle ERROR _ => get_typedef_ax axioms | TERM _ => get_typedef_ax axioms | Type.TYPE_MATCH => get_typedef_ax axioms) in get_typedef_ax (Theory.all_axioms_of thy) end; (* ------------------------------------------------------------------------- *) (* get_classdef: looks up the defining axiom for an axiomatic type class, as *) (* created by the "axclass" command *) (* ------------------------------------------------------------------------- *) fun get_classdef thy class = let val axname = class ^ "_class_def" in Option.map (pair axname) (AList.lookup (op =) (Theory.all_axioms_of thy) axname) end; (* ------------------------------------------------------------------------- *) (* unfold_defs: unfolds all defined constants in a term 't', beta-eta *) (* normalizes the result term; certain constants are not *) (* unfolded (cf. 'collect_axioms' and the various interpreters *) (* below): if the interpretation respects a definition anyway, *) (* that definition does not need to be unfolded *) (* ------------------------------------------------------------------------- *) (* Note: we could intertwine unfolding of constants and beta-(eta-) *) (* normalization; this would save some unfolding for terms where *) (* constants are eliminated by beta-reduction (e.g. 'K c1 c2'). On *) (* the other hand, this would cause additional work for terms where *) (* constants are duplicated by beta-reduction (e.g. 'S c1 c2 c3'). *) fun unfold_defs thy t = let fun unfold_loop t = case t of (* Pure *) Const (\<^const_name>\Pure.all\, _) => t | Const (\<^const_name>\Pure.eq\, _) => t | Const (\<^const_name>\Pure.imp\, _) => t | Const (\<^const_name>\Pure.type\, _) => t (* axiomatic type classes *) (* HOL *) | Const (\<^const_name>\Trueprop\, _) => t | Const (\<^const_name>\Not\, _) => t | (* redundant, since 'True' is also an IDT constructor *) Const (\<^const_name>\True\, _) => t | (* redundant, since 'False' is also an IDT constructor *) Const (\<^const_name>\False\, _) => t | Const (\<^const_name>\undefined\, _) => t | Const (\<^const_name>\The\, _) => t | Const (\<^const_name>\Hilbert_Choice.Eps\, _) => t | Const (\<^const_name>\All\, _) => t | Const (\<^const_name>\Ex\, _) => t | Const (\<^const_name>\HOL.eq\, _) => t | Const (\<^const_name>\HOL.conj\, _) => t | Const (\<^const_name>\HOL.disj\, _) => t | Const (\<^const_name>\HOL.implies\, _) => t (* sets *) | Const (\<^const_name>\Collect\, _) => t | Const (\<^const_name>\Set.member\, _) => t (* other optimizations *) | Const (\<^const_name>\Finite_Set.card\, _) => t | Const (\<^const_name>\Finite_Set.finite\, _) => t | Const (\<^const_name>\Orderings.less\, Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\bool\])])) => t | Const (\<^const_name>\Groups.plus\, Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\nat\])])) => t | Const (\<^const_name>\Groups.minus\, Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\nat\])])) => t | Const (\<^const_name>\Groups.times\, Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\nat\])])) => t | Const (\<^const_name>\append\, _) => t | Const (\<^const_name>\fst\, _) => t | Const (\<^const_name>\snd\, _) => t (* simply-typed lambda calculus *) | Const (s, T) => (if is_IDT_constructor thy (s, T) orelse is_IDT_recursor thy (s, T) then t (* do not unfold IDT constructors/recursors *) (* unfold the constant if there is a defining equation *) else case get_def thy (s, T) of SOME ((*axname*) _, rhs) => (* Note: if the term to be unfolded (i.e. 'Const (s, T)') *) (* occurs on the right-hand side of the equation, i.e. in *) (* 'rhs', we must not use this equation to unfold, because *) (* that would loop. Here would be the right place to *) (* check this. However, getting this really right seems *) (* difficult because the user may state arbitrary axioms, *) (* which could interact with overloading to create loops. *) ((*tracing (" unfolding: " ^ axname);*) unfold_loop rhs) | NONE => t) | Free _ => t | Var _ => t | Bound _ => t | Abs (s, T, body) => Abs (s, T, unfold_loop body) | t1 $ t2 => (unfold_loop t1) $ (unfold_loop t2) val result = Envir.beta_eta_contract (unfold_loop t) in result end; (* ------------------------------------------------------------------------- *) (* collect_axioms: collects (monomorphic, universally quantified, unfolded *) (* versions of) all HOL axioms that are relevant w.r.t 't' *) (* ------------------------------------------------------------------------- *) (* Note: to make the collection of axioms more easily extensible, this *) (* function could be based on user-supplied "axiom collectors", *) (* similar to 'interpret'/interpreters or 'print'/printers *) (* Note: currently we use "inverse" functions to the definitional *) (* mechanisms provided by Isabelle/HOL, e.g. for "axclass", *) (* "typedef", "definition". A more general approach could consider *) (* *every* axiom of the theory and collect it if it has a constant/ *) (* type/typeclass in common with the term 't'. *) (* Which axioms are "relevant" for a particular term/type goes hand in *) (* hand with the interpretation of that term/type by its interpreter (see *) (* way below): if the interpretation respects an axiom anyway, the axiom *) (* does not need to be added as a constraint here. *) (* To avoid collecting the same axiom multiple times, we use an *) (* accumulator 'axs' which contains all axioms collected so far. *) local fun get_axiom thy xname = Name_Space.check (Context.Theory thy) (Theory.axiom_table thy) (xname, Position.none); val the_eq_trivial = get_axiom \<^theory>\HOL\ "the_eq_trivial"; val someI = get_axiom \<^theory>\Hilbert_Choice\ "someI"; in fun collect_axioms ctxt t = let val thy = Proof_Context.theory_of ctxt val _ = tracing "Adding axioms..." fun collect_this_axiom (axname, ax) axs = let val ax' = unfold_defs thy ax in if member (op aconv) axs ax' then axs else (tracing axname; collect_term_axioms ax' (ax' :: axs)) end and collect_sort_axioms T axs = let val sort = (case T of TFree (_, sort) => sort | TVar (_, sort) => sort | _ => raise REFUTE ("collect_axioms", "type " ^ Syntax.string_of_typ ctxt T ^ " is not a variable")) (* obtain axioms for all superclasses *) val superclasses = sort @ maps (Sign.super_classes thy) sort (* merely an optimization, because 'collect_this_axiom' disallows *) (* duplicate axioms anyway: *) val superclasses = distinct (op =) superclasses val class_axioms = maps (fn class => map (fn ax => ("<" ^ class ^ ">", Thm.prop_of ax)) (#axioms (Axclass.get_info thy class) handle ERROR _ => [])) superclasses (* replace the (at most one) schematic type variable in each axiom *) (* by the actual type 'T' *) val monomorphic_class_axioms = map (fn (axname, ax) => (case Term.add_tvars ax [] of [] => (axname, ax) | [(idx, S)] => (axname, Envir.subst_term_types (Vartab.make [(idx, (S, T))]) ax) | _ => raise REFUTE ("collect_axioms", "class axiom " ^ axname ^ " (" ^ Syntax.string_of_term ctxt ax ^ ") contains more than one type variable"))) class_axioms in fold collect_this_axiom monomorphic_class_axioms axs end and collect_type_axioms T axs = case T of (* simple types *) Type (\<^type_name>\prop\, []) => axs | Type (\<^type_name>\fun\, [T1, T2]) => collect_type_axioms T2 (collect_type_axioms T1 axs) | Type (\<^type_name>\set\, [T1]) => collect_type_axioms T1 axs (* axiomatic type classes *) | Type (\<^type_name>\itself\, [T1]) => collect_type_axioms T1 axs | Type (s, Ts) => (case BNF_LFP_Compat.get_info thy [] s of SOME _ => (* inductive datatype *) (* only collect relevant type axioms for the argument types *) fold collect_type_axioms Ts axs | NONE => (case get_typedef thy T of SOME (axname, ax) => collect_this_axiom (axname, ax) axs | NONE => (* unspecified type, perhaps introduced with "typedecl" *) (* at least collect relevant type axioms for the argument types *) fold collect_type_axioms Ts axs)) (* axiomatic type classes *) | TFree _ => collect_sort_axioms T axs (* axiomatic type classes *) | TVar _ => collect_sort_axioms T axs and collect_term_axioms t axs = case t of (* Pure *) Const (\<^const_name>\Pure.all\, _) => axs | Const (\<^const_name>\Pure.eq\, _) => axs | Const (\<^const_name>\Pure.imp\, _) => axs (* axiomatic type classes *) | Const (\<^const_name>\Pure.type\, T) => collect_type_axioms T axs (* HOL *) | Const (\<^const_name>\Trueprop\, _) => axs | Const (\<^const_name>\Not\, _) => axs (* redundant, since 'True' is also an IDT constructor *) | Const (\<^const_name>\True\, _) => axs (* redundant, since 'False' is also an IDT constructor *) | Const (\<^const_name>\False\, _) => axs | Const (\<^const_name>\undefined\, T) => collect_type_axioms T axs | Const (\<^const_name>\The\, T) => let val ax = specialize_type thy (\<^const_name>\The\, T) (#2 the_eq_trivial) in collect_this_axiom (#1 the_eq_trivial, ax) axs end | Const (\<^const_name>\Hilbert_Choice.Eps\, T) => let val ax = specialize_type thy (\<^const_name>\Hilbert_Choice.Eps\, T) (#2 someI) in collect_this_axiom (#1 someI, ax) axs end | Const (\<^const_name>\All\, T) => collect_type_axioms T axs | Const (\<^const_name>\Ex\, T) => collect_type_axioms T axs | Const (\<^const_name>\HOL.eq\, T) => collect_type_axioms T axs | Const (\<^const_name>\HOL.conj\, _) => axs | Const (\<^const_name>\HOL.disj\, _) => axs | Const (\<^const_name>\HOL.implies\, _) => axs (* sets *) | Const (\<^const_name>\Collect\, T) => collect_type_axioms T axs | Const (\<^const_name>\Set.member\, T) => collect_type_axioms T axs (* other optimizations *) | Const (\<^const_name>\Finite_Set.card\, T) => collect_type_axioms T axs | Const (\<^const_name>\Finite_Set.finite\, T) => collect_type_axioms T axs | Const (\<^const_name>\Orderings.less\, T as Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\bool\])])) => collect_type_axioms T axs | Const (\<^const_name>\Groups.plus\, T as Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\nat\])])) => collect_type_axioms T axs | Const (\<^const_name>\Groups.minus\, T as Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\nat\])])) => collect_type_axioms T axs | Const (\<^const_name>\Groups.times\, T as Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\nat\])])) => collect_type_axioms T axs | Const (\<^const_name>\append\, T) => collect_type_axioms T axs | Const (\<^const_name>\fst\, T) => collect_type_axioms T axs | Const (\<^const_name>\snd\, T) => collect_type_axioms T axs (* simply-typed lambda calculus *) | Const (s, T) => if is_const_of_class thy (s, T) then (* axiomatic type classes: add "OFCLASS(?'a::c, c_class)" *) (* and the class definition *) let val class = Logic.class_of_const s val of_class = Logic.mk_of_class (TVar (("'a", 0), [class]), class) val ax_in = SOME (specialize_type thy (s, T) of_class) (* type match may fail due to sort constraints *) handle Type.TYPE_MATCH => NONE val ax_1 = Option.map (fn ax => (Syntax.string_of_term ctxt ax, ax)) ax_in val ax_2 = Option.map (apsnd (specialize_type thy (s, T))) (get_classdef thy class) in collect_type_axioms T (fold collect_this_axiom (map_filter I [ax_1, ax_2]) axs) end else if is_IDT_constructor thy (s, T) orelse is_IDT_recursor thy (s, T) then (* only collect relevant type axioms *) collect_type_axioms T axs else (* other constants should have been unfolded, with some *) (* exceptions: e.g. Abs_xxx/Rep_xxx functions for *) (* typedefs, or type-class related constants *) (* only collect relevant type axioms *) collect_type_axioms T axs | Free (_, T) => collect_type_axioms T axs | Var (_, T) => collect_type_axioms T axs | Bound _ => axs | Abs (_, T, body) => collect_term_axioms body (collect_type_axioms T axs) | t1 $ t2 => collect_term_axioms t2 (collect_term_axioms t1 axs) val result = map close_form (collect_term_axioms t []) val _ = tracing " ...done." in result end; end; (* ------------------------------------------------------------------------- *) (* ground_types: collects all ground types in a term (including argument *) (* types of other types), suppressing duplicates. Does not *) (* return function types, set types, non-recursive IDTs, or *) (* 'propT'. For IDTs, also the argument types of constructors *) (* and all mutually recursive IDTs are considered. *) (* ------------------------------------------------------------------------- *) fun ground_types ctxt t = let val thy = Proof_Context.theory_of ctxt fun collect_types T acc = (case T of Type (\<^type_name>\fun\, [T1, T2]) => collect_types T1 (collect_types T2 acc) | Type (\<^type_name>\prop\, []) => acc | Type (\<^type_name>\set\, [T1]) => collect_types T1 acc | Type (s, Ts) => (case BNF_LFP_Compat.get_info thy [] s of SOME info => (* inductive datatype *) let val index = #index info val descr = #descr info val (_, typs, _) = the (AList.lookup (op =) descr index) val typ_assoc = typs ~~ Ts (* sanity check: every element in 'dtyps' must be a *) (* 'DtTFree' *) val _ = if Library.exists (fn d => case d of Old_Datatype_Aux.DtTFree _ => false | _ => true) typs then raise REFUTE ("ground_types", "datatype argument (for type " ^ Syntax.string_of_typ ctxt T ^ ") is not a variable") else () (* required for mutually recursive datatypes; those need to *) (* be added even if they are an instance of an otherwise non- *) (* recursive datatype *) fun collect_dtyp d acc = let val dT = typ_of_dtyp descr typ_assoc d in case d of Old_Datatype_Aux.DtTFree _ => collect_types dT acc | Old_Datatype_Aux.DtType (_, ds) => collect_types dT (fold_rev collect_dtyp ds acc) | Old_Datatype_Aux.DtRec i => if member (op =) acc dT then acc (* prevent infinite recursion *) else let val (_, dtyps, dconstrs) = the (AList.lookup (op =) descr i) (* if the current type is a recursive IDT (i.e. a depth *) (* is required), add it to 'acc' *) val acc_dT = if Library.exists (fn (_, ds) => Library.exists Old_Datatype_Aux.is_rec_type ds) dconstrs then insert (op =) dT acc else acc (* collect argument types *) val acc_dtyps = fold_rev collect_dtyp dtyps acc_dT (* collect constructor types *) val acc_dconstrs = fold_rev collect_dtyp (maps snd dconstrs) acc_dtyps in acc_dconstrs end end in (* argument types 'Ts' could be added here, but they are also *) (* added by 'collect_dtyp' automatically *) collect_dtyp (Old_Datatype_Aux.DtRec index) acc end | NONE => (* not an inductive datatype, e.g. defined via "typedef" or *) (* "typedecl" *) insert (op =) T (fold collect_types Ts acc)) | TFree _ => insert (op =) T acc | TVar _ => insert (op =) T acc) in fold_types collect_types t [] end; (* ------------------------------------------------------------------------- *) (* string_of_typ: (rather naive) conversion from types to strings, used to *) (* look up the size of a type in 'sizes'. Parameterized *) (* types with different parameters (e.g. "'a list" vs. "bool *) (* list") are identified. *) (* ------------------------------------------------------------------------- *) fun string_of_typ (Type (s, _)) = s | string_of_typ (TFree (s, _)) = s | string_of_typ (TVar ((s,_), _)) = s; (* ------------------------------------------------------------------------- *) (* first_universe: returns the "first" (i.e. smallest) universe by assigning *) (* 'minsize' to every type for which no size is specified in *) (* 'sizes' *) (* ------------------------------------------------------------------------- *) fun first_universe xs sizes minsize = let fun size_of_typ T = case AList.lookup (op =) sizes (string_of_typ T) of SOME n => n | NONE => minsize in map (fn T => (T, size_of_typ T)) xs end; (* ------------------------------------------------------------------------- *) (* next_universe: enumerates all universes (i.e. assignments of sizes to *) (* types), where the minimal size of a type is given by *) (* 'minsize', the maximal size is given by 'maxsize', and a *) (* type may have a fixed size given in 'sizes' *) (* ------------------------------------------------------------------------- *) fun next_universe xs sizes minsize maxsize = let (* creates the "first" list of length 'len', where the sum of all list *) (* elements is 'sum', and the length of the list is 'len' *) fun make_first _ 0 sum = if sum = 0 then SOME [] else NONE | make_first max len sum = if sum <= max orelse max < 0 then Option.map (fn xs' => sum :: xs') (make_first max (len-1) 0) else Option.map (fn xs' => max :: xs') (make_first max (len-1) (sum-max)) (* enumerates all int lists with a fixed length, where 0<=x<='max' for *) (* all list elements x (unless 'max'<0) *) fun next _ _ _ [] = NONE | next max len sum [x] = (* we've reached the last list element, so there's no shift possible *) make_first max (len+1) (sum+x+1) (* increment 'sum' by 1 *) | next max len sum (x1::x2::xs) = if x1>0 andalso (x2 n-minsize) mutables in case next (maxsize-minsize) 0 0 diffs of SOME diffs' => (* merge with those types for which the size is fixed *) SOME (fst (fold_map (fn (T, _) => fn ds => case AList.lookup (op =) sizes (string_of_typ T) of (* return the fixed size *) SOME n => ((T, n), ds) (* consume the head of 'ds', add 'minsize' *) | NONE => ((T, minsize + hd ds), tl ds)) xs diffs')) | NONE => NONE end; (* ------------------------------------------------------------------------- *) (* toTrue: converts the interpretation of a Boolean value to a propositional *) (* formula that is true iff the interpretation denotes "true" *) (* ------------------------------------------------------------------------- *) fun toTrue (Leaf [fm, _]) = fm | toTrue _ = raise REFUTE ("toTrue", "interpretation does not denote a Boolean value"); (* ------------------------------------------------------------------------- *) (* toFalse: converts the interpretation of a Boolean value to a *) (* propositional formula that is true iff the interpretation *) (* denotes "false" *) (* ------------------------------------------------------------------------- *) fun toFalse (Leaf [_, fm]) = fm | toFalse _ = raise REFUTE ("toFalse", "interpretation does not denote a Boolean value"); (* ------------------------------------------------------------------------- *) (* find_model: repeatedly calls 'interpret' with appropriate parameters, *) (* applies a SAT solver, and (in case a model is found) displays *) (* the model to the user by calling 'print_model' *) (* {...} : parameters that control the translation/model generation *) (* assm_ts : assumptions to be considered unless "no_assms" is specified *) (* t : term to be translated into a propositional formula *) (* negate : if true, find a model that makes 't' false (rather than true) *) (* ------------------------------------------------------------------------- *) fun find_model ctxt {sizes, minsize, maxsize, maxvars, maxtime, satsolver, no_assms, expect} assm_ts t negate = let val thy = Proof_Context.theory_of ctxt fun check_expect outcome_code = if expect = "" orelse outcome_code = expect then outcome_code else error ("Unexpected outcome: " ^ quote outcome_code ^ ".") fun wrapper () = let val time_start = Time.now () val t = if no_assms then t else if negate then Logic.list_implies (assm_ts, t) else Logic.mk_conjunction_list (t :: assm_ts) val u = unfold_defs thy t val _ = tracing ("Unfolded term: " ^ Syntax.string_of_term ctxt u) val axioms = collect_axioms ctxt u val types = fold (union (op =) o ground_types ctxt) (u :: axioms) [] val _ = tracing ("Ground types: " ^ (if null types then "none." else commas (map (Syntax.string_of_typ ctxt) types))) (* we can only consider fragments of recursive IDTs, so we issue a *) (* warning if the formula contains a recursive IDT *) (* TODO: no warning needed for /positive/ occurrences of IDTs *) val maybe_spurious = Library.exists (fn Type (s, _) => (case BNF_LFP_Compat.get_info thy [] s of SOME info => (* inductive datatype *) let val index = #index info val descr = #descr info val (_, _, constrs) = the (AList.lookup (op =) descr index) in (* recursive datatype? *) Library.exists (fn (_, ds) => Library.exists Old_Datatype_Aux.is_rec_type ds) constrs end | NONE => false) | _ => false) types val _ = if maybe_spurious andalso Context_Position.is_visible ctxt then warning "Term contains a recursive datatype; countermodel(s) may be spurious!" else () fun find_model_loop universe = let val time_spent = Time.now () - time_start val _ = maxtime = 0 orelse time_spent < Timeout.scale_time (Time.fromSeconds maxtime) orelse raise Timeout.TIMEOUT time_spent val init_model = (universe, []) val init_args = {maxvars = maxvars, def_eq = false, next_idx = 1, bounds = [], wellformed = True} val _ = tracing ("Translating term (sizes: " ^ commas (map (fn (_, n) => string_of_int n) universe) ^ ") ...") (* translate 'u' and all axioms *) val (intrs, (model, args)) = fold_map (fn t' => fn (m, a) => let val (i, m', a') = interpret ctxt m a t' in (* set 'def_eq' to 'true' *) (i, (m', {maxvars = #maxvars a', def_eq = true, next_idx = #next_idx a', bounds = #bounds a', wellformed = #wellformed a'})) end) (u :: axioms) (init_model, init_args) (* make 'u' either true or false, and make all axioms true, and *) (* add the well-formedness side condition *) val fm_u = (if negate then toFalse else toTrue) (hd intrs) val fm_ax = Prop_Logic.all (map toTrue (tl intrs)) val fm = Prop_Logic.all [#wellformed args, fm_ax, fm_u] val _ = (if member (op =) ["cdclite"] satsolver andalso Context_Position.is_visible ctxt then warning ("Using SAT solver " ^ quote satsolver ^ "; for better performance, consider installing an \ \external solver.") else ()); val solver = SAT_Solver.invoke_solver satsolver handle Option.Option => error ("Unknown SAT solver: " ^ quote satsolver ^ ". Available solvers: " ^ commas (map (quote o fst) (SAT_Solver.get_solvers ())) ^ ".") in writeln "Invoking SAT solver..."; (case solver fm of SAT_Solver.SATISFIABLE assignment => (writeln ("Model found:\n" ^ print_model ctxt model (fn i => case assignment i of SOME b => b | NONE => true)); if maybe_spurious then "potential" else "genuine") | SAT_Solver.UNSATISFIABLE _ => (writeln "No model exists."; case next_universe universe sizes minsize maxsize of SOME universe' => find_model_loop universe' | NONE => (writeln "Search terminated, no larger universe within the given limits."; "none")) | SAT_Solver.UNKNOWN => (writeln "No model found."; case next_universe universe sizes minsize maxsize of SOME universe' => find_model_loop universe' | NONE => (writeln "Search terminated, no larger universe within the given limits."; "unknown"))) handle SAT_Solver.NOT_CONFIGURED => (error ("SAT solver " ^ quote satsolver ^ " is not configured."); "unknown") end handle MAXVARS_EXCEEDED => (writeln ("Search terminated, number of Boolean variables (" ^ string_of_int maxvars ^ " allowed) exceeded."); "unknown") val outcome_code = find_model_loop (first_universe types sizes minsize) in check_expect outcome_code end in (* some parameter sanity checks *) minsize>=1 orelse error ("\"minsize\" is " ^ string_of_int minsize ^ ", must be at least 1"); maxsize>=1 orelse error ("\"maxsize\" is " ^ string_of_int maxsize ^ ", must be at least 1"); maxsize>=minsize orelse error ("\"maxsize\" (=" ^ string_of_int maxsize ^ ") is less than \"minsize\" (=" ^ string_of_int minsize ^ ")."); maxvars>=0 orelse error ("\"maxvars\" is " ^ string_of_int maxvars ^ ", must be at least 0"); maxtime>=0 orelse error ("\"maxtime\" is " ^ string_of_int maxtime ^ ", must be at least 0"); (* enter loop with or without time limit *) writeln ("Trying to find a model that " ^ (if negate then "refutes" else "satisfies") ^ ": " ^ Syntax.string_of_term ctxt t); Timeout.apply (Time.fromSeconds maxtime) wrapper () handle Timeout.TIMEOUT _ => (writeln ("Search terminated, time limit (" ^ string_of_int maxtime ^ (if maxtime=1 then " second" else " seconds") ^ ") exceeded."); check_expect "unknown") end; (* ------------------------------------------------------------------------- *) (* INTERFACE, PART 2: FINDING A MODEL *) (* ------------------------------------------------------------------------- *) (* ------------------------------------------------------------------------- *) (* satisfy_term: calls 'find_model' to find a model that satisfies 't' *) (* params : list of '(name, value)' pairs used to override default *) (* parameters *) (* ------------------------------------------------------------------------- *) fun satisfy_term ctxt params assm_ts t = find_model ctxt (actual_params ctxt params) assm_ts t false; (* ------------------------------------------------------------------------- *) (* refute_term: calls 'find_model' to find a model that refutes 't' *) (* params : list of '(name, value)' pairs used to override default *) (* parameters *) (* ------------------------------------------------------------------------- *) fun refute_term ctxt params assm_ts t = let (* disallow schematic type variables, since we cannot properly negate *) (* terms containing them (their logical meaning is that there EXISTS a *) (* type s.t. ...; to refute such a formula, we would have to show that *) (* for ALL types, not ...) *) val _ = null (Term.add_tvars t []) orelse error "Term to be refuted contains schematic type variables" (* existential closure over schematic variables *) val vars = sort_by (fst o fst) (Term.add_vars t []) (* Term.term *) val ex_closure = fold (fn ((x, i), T) => fn t' => HOLogic.exists_const T $ Abs (x, T, abstract_over (Var ((x, i), T), t'))) vars t (* Note: If 't' is of type 'propT' (rather than 'boolT'), applying *) (* 'HOLogic.exists_const' is not type-correct. However, this is not *) (* really a problem as long as 'find_model' still interprets the *) (* resulting term correctly, without checking its type. *) (* replace outermost universally quantified variables by Free's: *) (* refuting a term with Free's is generally faster than refuting a *) (* term with (nested) quantifiers, because quantifiers are expanded, *) (* while the SAT solver searches for an interpretation for Free's. *) (* Also we get more information back that way, namely an *) (* interpretation which includes values for the (formerly) *) (* quantified variables. *) (* maps !!x1...xn. !xk...xm. t to t *) fun strip_all_body (Const (\<^const_name>\Pure.all\, _) $ Abs (_, _, t)) = strip_all_body t | strip_all_body (Const (\<^const_name>\Trueprop\, _) $ t) = strip_all_body t | strip_all_body (Const (\<^const_name>\All\, _) $ Abs (_, _, t)) = strip_all_body t | strip_all_body t = t (* maps !!x1...xn. !xk...xm. t to [x1, ..., xn, xk, ..., xm] *) fun strip_all_vars (Const (\<^const_name>\Pure.all\, _) $ Abs (a, T, t)) = (a, T) :: strip_all_vars t | strip_all_vars (Const (\<^const_name>\Trueprop\, _) $ t) = strip_all_vars t | strip_all_vars (Const (\<^const_name>\All\, _) $ Abs (a, T, t)) = (a, T) :: strip_all_vars t | strip_all_vars _ = [] : (string * typ) list val strip_t = strip_all_body ex_closure val frees = Term.rename_wrt_term strip_t (strip_all_vars ex_closure) val subst_t = Term.subst_bounds (map Free frees, strip_t) in find_model ctxt (actual_params ctxt params) assm_ts subst_t true end; (* ------------------------------------------------------------------------- *) (* refute_goal *) (* ------------------------------------------------------------------------- *) fun refute_goal ctxt params th i = let val t = th |> Thm.prop_of in - if Logic.count_prems t = 0 then + if Logic.no_prems t then (writeln "No subgoal!"; "none") else let val assms = map Thm.term_of (Assumption.all_assms_of ctxt) val (t, frees) = Logic.goal_params t i in refute_term ctxt params assms (subst_bounds (frees, t)) end end (* ------------------------------------------------------------------------- *) (* INTERPRETERS: Auxiliary Functions *) (* ------------------------------------------------------------------------- *) (* ------------------------------------------------------------------------- *) (* make_constants: returns all interpretations for type 'T' that consist of *) (* unit vectors with 'True'/'False' only (no Boolean *) (* variables) *) (* ------------------------------------------------------------------------- *) fun make_constants ctxt model T = let (* returns a list with all unit vectors of length n *) fun unit_vectors n = let (* returns the k-th unit vector of length n *) fun unit_vector (k, n) = Leaf ((replicate (k-1) False) @ (True :: (replicate (n-k) False))) fun unit_vectors_loop k = if k>n then [] else unit_vector (k,n) :: unit_vectors_loop (k+1) in unit_vectors_loop 1 end (* returns a list of lists, each one consisting of n (possibly *) (* identical) elements from 'xs' *) fun pick_all 1 xs = map single xs | pick_all n xs = let val rec_pick = pick_all (n - 1) xs in maps (fn x => map (cons x) rec_pick) xs end (* returns all constant interpretations that have the same tree *) (* structure as the interpretation argument *) fun make_constants_intr (Leaf xs) = unit_vectors (length xs) | make_constants_intr (Node xs) = map Node (pick_all (length xs) (make_constants_intr (hd xs))) (* obtain the interpretation for a variable of type 'T' *) val (i, _, _) = interpret ctxt model {maxvars=0, def_eq=false, next_idx=1, bounds=[], wellformed=True} (Free ("dummy", T)) in make_constants_intr i end; (* ------------------------------------------------------------------------- *) (* size_of_type: returns the number of elements in a type 'T' (i.e. 'length *) (* (make_constants T)', but implemented more efficiently) *) (* ------------------------------------------------------------------------- *) (* returns 0 for an empty ground type or a function type with empty *) (* codomain, but fails for a function type with empty domain -- *) (* admissibility of datatype constructor argument types (see "Inductive *) (* datatypes in HOL - lessons learned ...", S. Berghofer, M. Wenzel, *) (* TPHOLs 99) ensures that recursive, possibly empty, datatype fragments *) (* never occur as the domain of a function type that is the type of a *) (* constructor argument *) fun size_of_type ctxt model T = let (* returns the number of elements that have the same tree structure as a *) (* given interpretation *) fun size_of_intr (Leaf xs) = length xs | size_of_intr (Node xs) = Integer.pow (length xs) (size_of_intr (hd xs)) (* obtain the interpretation for a variable of type 'T' *) val (i, _, _) = interpret ctxt model {maxvars=0, def_eq=false, next_idx=1, bounds=[], wellformed=True} (Free ("dummy", T)) in size_of_intr i end; (* ------------------------------------------------------------------------- *) (* TT/FF: interpretations that denote "true" or "false", respectively *) (* ------------------------------------------------------------------------- *) val TT = Leaf [True, False]; val FF = Leaf [False, True]; (* ------------------------------------------------------------------------- *) (* make_equality: returns an interpretation that denotes (extensional) *) (* equality of two interpretations *) (* - two interpretations are 'equal' iff they are both defined and denote *) (* the same value *) (* - two interpretations are 'not_equal' iff they are both defined at least *) (* partially, and a defined part denotes different values *) (* - a completely undefined interpretation is neither 'equal' nor *) (* 'not_equal' to another interpretation *) (* ------------------------------------------------------------------------- *) (* We could in principle represent '=' on a type T by a particular *) (* interpretation. However, the size of that interpretation is quadratic *) (* in the size of T. Therefore comparing the interpretations 'i1' and *) (* 'i2' directly is more efficient than constructing the interpretation *) (* for equality on T first, and "applying" this interpretation to 'i1' *) (* and 'i2' in the usual way (cf. 'interpretation_apply') then. *) fun make_equality (i1, i2) = let fun equal (i1, i2) = (case i1 of Leaf xs => (case i2 of Leaf ys => Prop_Logic.dot_product (xs, ys) (* defined and equal *) | Node _ => raise REFUTE ("make_equality", "second interpretation is higher")) | Node xs => (case i2 of Leaf _ => raise REFUTE ("make_equality", "first interpretation is higher") | Node ys => Prop_Logic.all (map equal (xs ~~ ys)))) fun not_equal (i1, i2) = (case i1 of Leaf xs => (case i2 of (* defined and not equal *) Leaf ys => Prop_Logic.all ((Prop_Logic.exists xs) :: (Prop_Logic.exists ys) :: (map (fn (x,y) => SOr (SNot x, SNot y)) (xs ~~ ys))) | Node _ => raise REFUTE ("make_equality", "second interpretation is higher")) | Node xs => (case i2 of Leaf _ => raise REFUTE ("make_equality", "first interpretation is higher") | Node ys => Prop_Logic.exists (map not_equal (xs ~~ ys)))) in (* a value may be undefined; therefore 'not_equal' is not just the *) (* negation of 'equal' *) Leaf [equal (i1, i2), not_equal (i1, i2)] end; (* ------------------------------------------------------------------------- *) (* make_def_equality: returns an interpretation that denotes (extensional) *) (* equality of two interpretations *) (* This function treats undefined/partially defined interpretations *) (* different from 'make_equality': two undefined interpretations are *) (* considered equal, while a defined interpretation is considered not equal *) (* to an undefined interpretation. *) (* ------------------------------------------------------------------------- *) fun make_def_equality (i1, i2) = let fun equal (i1, i2) = (case i1 of Leaf xs => (case i2 of (* defined and equal, or both undefined *) Leaf ys => SOr (Prop_Logic.dot_product (xs, ys), SAnd (Prop_Logic.all (map SNot xs), Prop_Logic.all (map SNot ys))) | Node _ => raise REFUTE ("make_def_equality", "second interpretation is higher")) | Node xs => (case i2 of Leaf _ => raise REFUTE ("make_def_equality", "first interpretation is higher") | Node ys => Prop_Logic.all (map equal (xs ~~ ys)))) val eq = equal (i1, i2) in Leaf [eq, SNot eq] end; (* ------------------------------------------------------------------------- *) (* interpretation_apply: returns an interpretation that denotes the result *) (* of applying the function denoted by 'i1' to the *) (* argument denoted by 'i2' *) (* ------------------------------------------------------------------------- *) fun interpretation_apply (i1, i2) = let fun interpretation_disjunction (tr1,tr2) = tree_map (fn (xs,ys) => map (fn (x,y) => SOr(x,y)) (xs ~~ ys)) (tree_pair (tr1,tr2)) fun prop_formula_times_interpretation (fm,tr) = tree_map (map (fn x => SAnd (fm,x))) tr fun prop_formula_list_dot_product_interpretation_list ([fm],[tr]) = prop_formula_times_interpretation (fm,tr) | prop_formula_list_dot_product_interpretation_list (fm::fms,tr::trees) = interpretation_disjunction (prop_formula_times_interpretation (fm,tr), prop_formula_list_dot_product_interpretation_list (fms,trees)) | prop_formula_list_dot_product_interpretation_list (_,_) = raise REFUTE ("interpretation_apply", "empty list (in dot product)") (* returns a list of lists, each one consisting of one element from each *) (* element of 'xss' *) fun pick_all [xs] = map single xs | pick_all (xs::xss) = let val rec_pick = pick_all xss in maps (fn x => map (cons x) rec_pick) xs end | pick_all _ = raise REFUTE ("interpretation_apply", "empty list (in pick_all)") fun interpretation_to_prop_formula_list (Leaf xs) = xs | interpretation_to_prop_formula_list (Node trees) = map Prop_Logic.all (pick_all (map interpretation_to_prop_formula_list trees)) in case i1 of Leaf _ => raise REFUTE ("interpretation_apply", "first interpretation is a leaf") | Node xs => prop_formula_list_dot_product_interpretation_list (interpretation_to_prop_formula_list i2, xs) end; (* ------------------------------------------------------------------------- *) (* eta_expand: eta-expands a term 't' by adding 'i' lambda abstractions *) (* ------------------------------------------------------------------------- *) fun eta_expand t i = let val Ts = Term.binder_types (Term.fastype_of t) val t' = Term.incr_boundvars i t in fold_rev (fn T => fn term => Abs ("", T, term)) (List.take (Ts, i)) (Term.list_comb (t', map Bound (i-1 downto 0))) end; (* ------------------------------------------------------------------------- *) (* size_of_dtyp: the size of (an initial fragment of) an inductive data type *) (* is the sum (over its constructors) of the product (over *) (* their arguments) of the size of the argument types *) (* ------------------------------------------------------------------------- *) fun size_of_dtyp ctxt typ_sizes descr typ_assoc constructors = Integer.sum (map (fn (_, dtyps) => Integer.prod (map (size_of_type ctxt (typ_sizes, []) o (typ_of_dtyp descr typ_assoc)) dtyps)) constructors); (* ------------------------------------------------------------------------- *) (* INTERPRETERS: Actual Interpreters *) (* ------------------------------------------------------------------------- *) (* simply typed lambda calculus: Isabelle's basic term syntax, with type *) (* variables, function types, and propT *) fun stlc_interpreter ctxt model args t = let val (typs, terms) = model val {maxvars, def_eq, next_idx, bounds, wellformed} = args fun interpret_groundterm T = let fun interpret_groundtype () = let (* the model must specify a size for ground types *) val size = if T = Term.propT then 2 else the (AList.lookup (op =) typs T) val next = next_idx + size (* check if 'maxvars' is large enough *) val _ = (if next - 1 > maxvars andalso maxvars > 0 then raise MAXVARS_EXCEEDED else ()) val fms = map BoolVar (next_idx upto (next_idx + size - 1)) val intr = Leaf fms fun one_of_two_false [] = True | one_of_two_false (x::xs) = SAnd (Prop_Logic.all (map (fn x' => SOr (SNot x, SNot x')) xs), one_of_two_false xs) val wf = one_of_two_false fms in (* extend the model, increase 'next_idx', add well-formedness *) (* condition *) SOME (intr, (typs, (t, intr)::terms), {maxvars = maxvars, def_eq = def_eq, next_idx = next, bounds = bounds, wellformed = SAnd (wellformed, wf)}) end in case T of Type ("fun", [T1, T2]) => let (* we create 'size_of_type ... T1' different copies of the *) (* interpretation for 'T2', which are then combined into a single *) (* new interpretation *) (* make fresh copies, with different variable indices *) (* 'idx': next variable index *) (* 'n' : number of copies *) fun make_copies idx 0 = (idx, [], True) | make_copies idx n = let val (copy, _, new_args) = interpret ctxt (typs, []) {maxvars = maxvars, def_eq = false, next_idx = idx, bounds = [], wellformed = True} (Free ("dummy", T2)) val (idx', copies, wf') = make_copies (#next_idx new_args) (n-1) in (idx', copy :: copies, SAnd (#wellformed new_args, wf')) end val (next, copies, wf) = make_copies next_idx (size_of_type ctxt model T1) (* combine copies into a single interpretation *) val intr = Node copies in (* extend the model, increase 'next_idx', add well-formedness *) (* condition *) SOME (intr, (typs, (t, intr)::terms), {maxvars = maxvars, def_eq = def_eq, next_idx = next, bounds = bounds, wellformed = SAnd (wellformed, wf)}) end | Type _ => interpret_groundtype () | TFree _ => interpret_groundtype () | TVar _ => interpret_groundtype () end in case AList.lookup (op =) terms t of SOME intr => (* return an existing interpretation *) SOME (intr, model, args) | NONE => (case t of Const (_, T) => interpret_groundterm T | Free (_, T) => interpret_groundterm T | Var (_, T) => interpret_groundterm T | Bound i => SOME (nth (#bounds args) i, model, args) | Abs (_, T, body) => let (* create all constants of type 'T' *) val constants = make_constants ctxt model T (* interpret the 'body' separately for each constant *) val (bodies, (model', args')) = fold_map (fn c => fn (m, a) => let (* add 'c' to 'bounds' *) val (i', m', a') = interpret ctxt m {maxvars = #maxvars a, def_eq = #def_eq a, next_idx = #next_idx a, bounds = (c :: #bounds a), wellformed = #wellformed a} body in (* keep the new model m' and 'next_idx' and 'wellformed', *) (* but use old 'bounds' *) (i', (m', {maxvars = maxvars, def_eq = def_eq, next_idx = #next_idx a', bounds = bounds, wellformed = #wellformed a'})) end) constants (model, args) in SOME (Node bodies, model', args') end | t1 $ t2 => let (* interpret 't1' and 't2' separately *) val (intr1, model1, args1) = interpret ctxt model args t1 val (intr2, model2, args2) = interpret ctxt model1 args1 t2 in SOME (interpretation_apply (intr1, intr2), model2, args2) end) end; fun Pure_interpreter ctxt model args t = case t of Const (\<^const_name>\Pure.all\, _) $ t1 => let val (i, m, a) = interpret ctxt model args t1 in case i of Node xs => (* 3-valued logic *) let val fmTrue = Prop_Logic.all (map toTrue xs) val fmFalse = Prop_Logic.exists (map toFalse xs) in SOME (Leaf [fmTrue, fmFalse], m, a) end | _ => raise REFUTE ("Pure_interpreter", "\"Pure.all\" is followed by a non-function") end | Const (\<^const_name>\Pure.all\, _) => SOME (interpret ctxt model args (eta_expand t 1)) | Const (\<^const_name>\Pure.eq\, _) $ t1 $ t2 => let val (i1, m1, a1) = interpret ctxt model args t1 val (i2, m2, a2) = interpret ctxt m1 a1 t2 in (* we use either 'make_def_equality' or 'make_equality' *) SOME ((if #def_eq args then make_def_equality else make_equality) (i1, i2), m2, a2) end | Const (\<^const_name>\Pure.eq\, _) $ _ => SOME (interpret ctxt model args (eta_expand t 1)) | Const (\<^const_name>\Pure.eq\, _) => SOME (interpret ctxt model args (eta_expand t 2)) | Const (\<^const_name>\Pure.imp\, _) $ t1 $ t2 => (* 3-valued logic *) let val (i1, m1, a1) = interpret ctxt model args t1 val (i2, m2, a2) = interpret ctxt m1 a1 t2 val fmTrue = Prop_Logic.SOr (toFalse i1, toTrue i2) val fmFalse = Prop_Logic.SAnd (toTrue i1, toFalse i2) in SOME (Leaf [fmTrue, fmFalse], m2, a2) end | Const (\<^const_name>\Pure.imp\, _) $ _ => SOME (interpret ctxt model args (eta_expand t 1)) | Const (\<^const_name>\Pure.imp\, _) => SOME (interpret ctxt model args (eta_expand t 2)) | _ => NONE; fun HOLogic_interpreter ctxt model args t = (* Providing interpretations directly is more efficient than unfolding the *) (* logical constants. In HOL however, logical constants can themselves be *) (* arguments. They are then translated using eta-expansion. *) case t of Const (\<^const_name>\Trueprop\, _) => SOME (Node [TT, FF], model, args) | Const (\<^const_name>\Not\, _) => SOME (Node [FF, TT], model, args) (* redundant, since 'True' is also an IDT constructor *) | Const (\<^const_name>\True\, _) => SOME (TT, model, args) (* redundant, since 'False' is also an IDT constructor *) | Const (\<^const_name>\False\, _) => SOME (FF, model, args) | Const (\<^const_name>\All\, _) $ t1 => (* similar to "Pure.all" *) let val (i, m, a) = interpret ctxt model args t1 in case i of Node xs => (* 3-valued logic *) let val fmTrue = Prop_Logic.all (map toTrue xs) val fmFalse = Prop_Logic.exists (map toFalse xs) in SOME (Leaf [fmTrue, fmFalse], m, a) end | _ => raise REFUTE ("HOLogic_interpreter", "\"All\" is followed by a non-function") end | Const (\<^const_name>\All\, _) => SOME (interpret ctxt model args (eta_expand t 1)) | Const (\<^const_name>\Ex\, _) $ t1 => let val (i, m, a) = interpret ctxt model args t1 in case i of Node xs => (* 3-valued logic *) let val fmTrue = Prop_Logic.exists (map toTrue xs) val fmFalse = Prop_Logic.all (map toFalse xs) in SOME (Leaf [fmTrue, fmFalse], m, a) end | _ => raise REFUTE ("HOLogic_interpreter", "\"Ex\" is followed by a non-function") end | Const (\<^const_name>\Ex\, _) => SOME (interpret ctxt model args (eta_expand t 1)) | Const (\<^const_name>\HOL.eq\, _) $ t1 $ t2 => (* similar to Pure.eq *) let val (i1, m1, a1) = interpret ctxt model args t1 val (i2, m2, a2) = interpret ctxt m1 a1 t2 in SOME (make_equality (i1, i2), m2, a2) end | Const (\<^const_name>\HOL.eq\, _) $ _ => SOME (interpret ctxt model args (eta_expand t 1)) | Const (\<^const_name>\HOL.eq\, _) => SOME (interpret ctxt model args (eta_expand t 2)) | Const (\<^const_name>\HOL.conj\, _) $ t1 $ t2 => (* 3-valued logic *) let val (i1, m1, a1) = interpret ctxt model args t1 val (i2, m2, a2) = interpret ctxt m1 a1 t2 val fmTrue = Prop_Logic.SAnd (toTrue i1, toTrue i2) val fmFalse = Prop_Logic.SOr (toFalse i1, toFalse i2) in SOME (Leaf [fmTrue, fmFalse], m2, a2) end | Const (\<^const_name>\HOL.conj\, _) $ _ => SOME (interpret ctxt model args (eta_expand t 1)) | Const (\<^const_name>\HOL.conj\, _) => SOME (interpret ctxt model args (eta_expand t 2)) (* this would make "undef" propagate, even for formulae like *) (* "False & undef": *) (* SOME (Node [Node [TT, FF], Node [FF, FF]], model, args) *) | Const (\<^const_name>\HOL.disj\, _) $ t1 $ t2 => (* 3-valued logic *) let val (i1, m1, a1) = interpret ctxt model args t1 val (i2, m2, a2) = interpret ctxt m1 a1 t2 val fmTrue = Prop_Logic.SOr (toTrue i1, toTrue i2) val fmFalse = Prop_Logic.SAnd (toFalse i1, toFalse i2) in SOME (Leaf [fmTrue, fmFalse], m2, a2) end | Const (\<^const_name>\HOL.disj\, _) $ _ => SOME (interpret ctxt model args (eta_expand t 1)) | Const (\<^const_name>\HOL.disj\, _) => SOME (interpret ctxt model args (eta_expand t 2)) (* this would make "undef" propagate, even for formulae like *) (* "True | undef": *) (* SOME (Node [Node [TT, TT], Node [TT, FF]], model, args) *) | Const (\<^const_name>\HOL.implies\, _) $ t1 $ t2 => (* similar to Pure.imp *) (* 3-valued logic *) let val (i1, m1, a1) = interpret ctxt model args t1 val (i2, m2, a2) = interpret ctxt m1 a1 t2 val fmTrue = Prop_Logic.SOr (toFalse i1, toTrue i2) val fmFalse = Prop_Logic.SAnd (toTrue i1, toFalse i2) in SOME (Leaf [fmTrue, fmFalse], m2, a2) end | Const (\<^const_name>\HOL.implies\, _) $ _ => SOME (interpret ctxt model args (eta_expand t 1)) | Const (\<^const_name>\HOL.implies\, _) => SOME (interpret ctxt model args (eta_expand t 2)) (* this would make "undef" propagate, even for formulae like *) (* "False --> undef": *) (* SOME (Node [Node [TT, FF], Node [TT, TT]], model, args) *) | _ => NONE; (* interprets variables and constants whose type is an IDT (this is *) (* relatively easy and merely requires us to compute the size of the IDT); *) (* constructors of IDTs however are properly interpreted by *) (* 'IDT_constructor_interpreter' *) fun IDT_interpreter ctxt model args t = let val thy = Proof_Context.theory_of ctxt val (typs, terms) = model fun interpret_term (Type (s, Ts)) = (case BNF_LFP_Compat.get_info thy [] s of SOME info => (* inductive datatype *) let (* only recursive IDTs have an associated depth *) val depth = AList.lookup (op =) typs (Type (s, Ts)) (* sanity check: depth must be at least 0 *) val _ = (case depth of SOME n => if n < 0 then raise REFUTE ("IDT_interpreter", "negative depth") else () | _ => ()) in (* termination condition to avoid infinite recursion *) if depth = (SOME 0) then (* return a leaf of size 0 *) SOME (Leaf [], model, args) else let val index = #index info val descr = #descr info val (_, dtyps, constrs) = the (AList.lookup (op =) descr index) val typ_assoc = dtyps ~~ Ts (* sanity check: every element in 'dtyps' must be a 'DtTFree' *) val _ = if Library.exists (fn d => case d of Old_Datatype_Aux.DtTFree _ => false | _ => true) dtyps then raise REFUTE ("IDT_interpreter", "datatype argument (for type " ^ Syntax.string_of_typ ctxt (Type (s, Ts)) ^ ") is not a variable") else () (* if the model specifies a depth for the current type, *) (* decrement it to avoid infinite recursion *) val typs' = case depth of NONE => typs | SOME n => AList.update (op =) (Type (s, Ts), n-1) typs (* recursively compute the size of the datatype *) val size = size_of_dtyp ctxt typs' descr typ_assoc constrs val next_idx = #next_idx args val next = next_idx+size (* check if 'maxvars' is large enough *) val _ = (if next-1 > #maxvars args andalso #maxvars args > 0 then raise MAXVARS_EXCEEDED else ()) val fms = map BoolVar (next_idx upto (next_idx+size-1)) val intr = Leaf fms fun one_of_two_false [] = True | one_of_two_false (x::xs) = SAnd (Prop_Logic.all (map (fn x' => SOr (SNot x, SNot x')) xs), one_of_two_false xs) val wf = one_of_two_false fms in (* extend the model, increase 'next_idx', add well-formedness *) (* condition *) SOME (intr, (typs, (t, intr)::terms), {maxvars = #maxvars args, def_eq = #def_eq args, next_idx = next, bounds = #bounds args, wellformed = SAnd (#wellformed args, wf)}) end end | NONE => (* not an inductive datatype *) NONE) | interpret_term _ = (* a (free or schematic) type variable *) NONE in case AList.lookup (op =) terms t of SOME intr => (* return an existing interpretation *) SOME (intr, model, args) | NONE => (case t of Free (_, T) => interpret_term T | Var (_, T) => interpret_term T | Const (_, T) => interpret_term T | _ => NONE) end; (* This function imposes an order on the elements of a datatype fragment *) (* as follows: C_i x_1 ... x_n < C_j y_1 ... y_m iff i < j or *) (* (x_1, ..., x_n) < (y_1, ..., y_m). With this order, a constructor is *) (* a function C_i that maps some argument indices x_1, ..., x_n to the *) (* datatype element given by index C_i x_1 ... x_n. The idea remains the *) (* same for recursive datatypes, although the computation of indices gets *) (* a little tricky. *) fun IDT_constructor_interpreter ctxt model args t = let val thy = Proof_Context.theory_of ctxt (* returns a list of canonical representations for terms of the type 'T' *) (* It would be nice if we could just use 'print' for this, but 'print' *) (* for IDTs calls 'IDT_constructor_interpreter' again, and this could *) (* lead to infinite recursion when we have (mutually) recursive IDTs. *) fun canonical_terms typs T = (case T of Type ("fun", [T1, T2]) => (* 'T2' might contain a recursive IDT, so we cannot use 'print' (at *) (* least not for 'T2' *) let (* returns a list of lists, each one consisting of n (possibly *) (* identical) elements from 'xs' *) fun pick_all 1 xs = map single xs | pick_all n xs = let val rec_pick = pick_all (n-1) xs in maps (fn x => map (cons x) rec_pick) xs end (* ["x1", ..., "xn"] *) val terms1 = canonical_terms typs T1 (* ["y1", ..., "ym"] *) val terms2 = canonical_terms typs T2 (* [[("x1", "y1"), ..., ("xn", "y1")], ..., *) (* [("x1", "ym"), ..., ("xn", "ym")]] *) val functions = map (curry (op ~~) terms1) (pick_all (length terms1) terms2) (* [["(x1, y1)", ..., "(xn, y1)"], ..., *) (* ["(x1, ym)", ..., "(xn, ym)"]] *) val pairss = map (map HOLogic.mk_prod) functions val HOLogic_prodT = HOLogic.mk_prodT (T1, T2) val HOLogic_setT = HOLogic.mk_setT HOLogic_prodT val HOLogic_empty_set = Const (\<^const_abbrev>\Set.empty\, HOLogic_setT) val HOLogic_insert = Const (\<^const_name>\insert\, HOLogic_prodT --> HOLogic_setT --> HOLogic_setT) in (* functions as graphs, i.e. as a (HOL) set of pairs "(x, y)" *) map (fn ps => fold_rev (fn pair => fn acc => HOLogic_insert $ pair $ acc) ps HOLogic_empty_set) pairss end | Type (s, Ts) => (case BNF_LFP_Compat.get_info thy [] s of SOME info => (case AList.lookup (op =) typs T of SOME 0 => (* termination condition to avoid infinite recursion *) [] (* at depth 0, every IDT is empty *) | _ => let val index = #index info val descr = #descr info val (_, dtyps, constrs) = the (AList.lookup (op =) descr index) val typ_assoc = dtyps ~~ Ts (* sanity check: every element in 'dtyps' must be a 'DtTFree' *) val _ = if Library.exists (fn d => case d of Old_Datatype_Aux.DtTFree _ => false | _ => true) dtyps then raise REFUTE ("IDT_constructor_interpreter", "datatype argument (for type " ^ Syntax.string_of_typ ctxt T ^ ") is not a variable") else () (* decrement depth for the IDT 'T' *) val typs' = (case AList.lookup (op =) typs T of NONE => typs | SOME n => AList.update (op =) (T, n-1) typs) fun constructor_terms terms [] = terms | constructor_terms terms (d::ds) = let val dT = typ_of_dtyp descr typ_assoc d val d_terms = canonical_terms typs' dT in (* C_i x_1 ... x_n < C_i y_1 ... y_n if *) (* (x_1, ..., x_n) < (y_1, ..., y_n) *) constructor_terms (map_product (curry op $) terms d_terms) ds end in (* C_i ... < C_j ... if i < j *) maps (fn (cname, ctyps) => let val cTerm = Const (cname, map (typ_of_dtyp descr typ_assoc) ctyps ---> T) in constructor_terms [cTerm] ctyps end) constrs end) | NONE => (* not an inductive datatype; in this case the argument types in *) (* 'Ts' may not be IDTs either, so 'print' should be safe *) map (fn intr => print ctxt (typs, []) T intr (K false)) (make_constants ctxt (typs, []) T)) | _ => (* TFree ..., TVar ... *) map (fn intr => print ctxt (typs, []) T intr (K false)) (make_constants ctxt (typs, []) T)) val (typs, terms) = model in case AList.lookup (op =) terms t of SOME intr => (* return an existing interpretation *) SOME (intr, model, args) | NONE => (case t of Const (s, T) => (case body_type T of Type (s', Ts') => (case BNF_LFP_Compat.get_info thy [] s' of SOME info => (* body type is an inductive datatype *) let val index = #index info val descr = #descr info val (_, dtyps, constrs) = the (AList.lookup (op =) descr index) val typ_assoc = dtyps ~~ Ts' (* sanity check: every element in 'dtyps' must be a 'DtTFree' *) val _ = if Library.exists (fn d => case d of Old_Datatype_Aux.DtTFree _ => false | _ => true) dtyps then raise REFUTE ("IDT_constructor_interpreter", "datatype argument (for type " ^ Syntax.string_of_typ ctxt (Type (s', Ts')) ^ ") is not a variable") else () (* split the constructors into those occurring before/after *) (* 'Const (s, T)' *) val (constrs1, constrs2) = chop_prefix (fn (cname, ctypes) => not (cname = s andalso Sign.typ_instance thy (T, map (typ_of_dtyp descr typ_assoc) ctypes ---> Type (s', Ts')))) constrs in case constrs2 of [] => (* 'Const (s, T)' is not a constructor of this datatype *) NONE | (_, ctypes)::_ => let (* only /recursive/ IDTs have an associated depth *) val depth = AList.lookup (op =) typs (Type (s', Ts')) (* this should never happen: at depth 0, this IDT fragment *) (* is definitely empty, and in this case we don't need to *) (* interpret its constructors *) val _ = (case depth of SOME 0 => raise REFUTE ("IDT_constructor_interpreter", "depth is 0") | _ => ()) val typs' = (case depth of NONE => typs | SOME n => AList.update (op =) (Type (s', Ts'), n-1) typs) (* elements of the datatype come before elements generated *) (* by 'Const (s, T)' iff they are generated by a *) (* constructor in constrs1 *) val offset = size_of_dtyp ctxt typs' descr typ_assoc constrs1 (* compute the total (current) size of the datatype *) val total = offset + size_of_dtyp ctxt typs' descr typ_assoc constrs2 (* sanity check *) val _ = if total <> size_of_type ctxt (typs, []) (Type (s', Ts')) then raise REFUTE ("IDT_constructor_interpreter", "total is not equal to current size") else () (* returns an interpretation where everything is mapped to *) (* an "undefined" element of the datatype *) fun make_undef [] = Leaf (replicate total False) | make_undef (d::ds) = let (* compute the current size of the type 'd' *) val dT = typ_of_dtyp descr typ_assoc d val size = size_of_type ctxt (typs, []) dT in Node (replicate size (make_undef ds)) end (* returns the interpretation for a constructor *) fun make_constr [] offset = if offset < total then (Leaf (replicate offset False @ True :: (replicate (total - offset - 1) False)), offset + 1) else raise REFUTE ("IDT_constructor_interpreter", "offset >= total") | make_constr (d::ds) offset = let val dT = typ_of_dtyp descr typ_assoc d (* compute canonical term representations for all *) (* elements of the type 'd' (with the reduced depth *) (* for the IDT) *) val terms' = canonical_terms typs' dT (* sanity check *) val _ = if length terms' <> size_of_type ctxt (typs', []) dT then raise REFUTE ("IDT_constructor_interpreter", "length of terms' is not equal to old size") else () (* compute canonical term representations for all *) (* elements of the type 'd' (with the current depth *) (* for the IDT) *) val terms = canonical_terms typs dT (* sanity check *) val _ = if length terms <> size_of_type ctxt (typs, []) dT then raise REFUTE ("IDT_constructor_interpreter", "length of terms is not equal to current size") else () (* sanity check *) val _ = if length terms < length terms' then raise REFUTE ("IDT_constructor_interpreter", "current size is less than old size") else () (* sanity check: every element of terms' must also be *) (* present in terms *) val _ = if forall (member (op =) terms) terms' then () else raise REFUTE ("IDT_constructor_interpreter", "element has disappeared") (* sanity check: the order on elements of terms' is *) (* the same in terms, for those elements *) val _ = let fun search (x::xs) (y::ys) = if x = y then search xs ys else search (x::xs) ys | search (_::_) [] = raise REFUTE ("IDT_constructor_interpreter", "element order not preserved") | search [] _ = () in search terms' terms end val (intrs, new_offset) = fold_map (fn t_elem => fn off => (* if 't_elem' existed at the previous depth, *) (* proceed recursively, otherwise map the entire *) (* subtree to "undefined" *) if member (op =) terms' t_elem then make_constr ds off else (make_undef ds, off)) terms offset in (Node intrs, new_offset) end in SOME (fst (make_constr ctypes offset), model, args) end end | NONE => (* body type is not an inductive datatype *) NONE) | _ => (* body type is a (free or schematic) type variable *) NONE) | _ => (* term is not a constant *) NONE) end; (* Difficult code ahead. Make sure you understand the *) (* 'IDT_constructor_interpreter' and the order in which it enumerates *) (* elements of an IDT before you try to understand this function. *) fun IDT_recursion_interpreter ctxt model args t = let val thy = Proof_Context.theory_of ctxt in (* careful: here we descend arbitrarily deep into 't', possibly before *) (* any other interpreter for atomic terms has had a chance to look at *) (* 't' *) case strip_comb t of (Const (s, T), params) => (* iterate over all datatypes in 'thy' *) Symtab.fold (fn (_, info) => fn result => case result of SOME _ => result (* just keep 'result' *) | NONE => if member (op =) (#rec_names info) s then (* we do have a recursion operator of one of the (mutually *) (* recursive) datatypes given by 'info' *) let (* number of all constructors, including those of different *) (* (mutually recursive) datatypes within the same descriptor *) val mconstrs_count = Integer.sum (map (fn (_, (_, _, cs)) => length cs) (#descr info)) in if mconstrs_count < length params then (* too many actual parameters; for now we'll use the *) (* 'stlc_interpreter' to strip off one application *) NONE else if mconstrs_count > length params then (* too few actual parameters; we use eta expansion *) (* Note that the resulting expansion of lambda abstractions *) (* by the 'stlc_interpreter' may be rather slow (depending *) (* on the argument types and the size of the IDT, of *) (* course). *) SOME (interpret ctxt model args (eta_expand t (mconstrs_count - length params))) else (* mconstrs_count = length params *) let (* interpret each parameter separately *) val (p_intrs, (model', args')) = fold_map (fn p => fn (m, a) => let val (i, m', a') = interpret ctxt m a p in (i, (m', a')) end) params (model, args) val (typs, _) = model' (* 'index' is /not/ necessarily the index of the IDT that *) (* the recursion operator is associated with, but merely *) (* the index of some mutually recursive IDT *) val index = #index info val descr = #descr info val (_, dtyps, _) = the (AList.lookup (op =) descr index) (* sanity check: we assume that the order of constructors *) (* in 'descr' is the same as the order of *) (* corresponding parameters, otherwise the *) (* association code below won't match the *) (* right constructors/parameters; we also *) (* assume that the order of recursion *) (* operators in '#rec_names info' is the *) (* same as the order of corresponding *) (* datatypes in 'descr' *) val _ = if map fst descr <> (0 upto (length descr - 1)) then raise REFUTE ("IDT_recursion_interpreter", "order of constructors and corresponding parameters/" ^ "recursion operators and corresponding datatypes " ^ "different?") else () (* sanity check: every element in 'dtyps' must be a *) (* 'DtTFree' *) val _ = if Library.exists (fn d => case d of Old_Datatype_Aux.DtTFree _ => false | _ => true) dtyps then raise REFUTE ("IDT_recursion_interpreter", "datatype argument is not a variable") else () (* the type of a recursion operator is *) (* [T1, ..., Tn, IDT] ---> Tresult *) val IDT = nth (binder_types T) mconstrs_count (* by our assumption on the order of recursion operators *) (* and datatypes, this is the index of the datatype *) (* corresponding to the given recursion operator *) val idt_index = find_index (fn s' => s' = s) (#rec_names info) (* mutually recursive types must have the same type *) (* parameters, unless the mutual recursion comes from *) (* indirect recursion *) fun rec_typ_assoc acc [] = acc | rec_typ_assoc acc ((d, T)::xs) = (case AList.lookup op= acc d of NONE => (case d of Old_Datatype_Aux.DtTFree _ => (* add the association, proceed *) rec_typ_assoc ((d, T)::acc) xs | Old_Datatype_Aux.DtType (s, ds) => let val (s', Ts) = dest_Type T in if s=s' then rec_typ_assoc ((d, T)::acc) ((ds ~~ Ts) @ xs) else raise REFUTE ("IDT_recursion_interpreter", "DtType/Type mismatch") end | Old_Datatype_Aux.DtRec i => let val (_, ds, _) = the (AList.lookup (op =) descr i) val (_, Ts) = dest_Type T in rec_typ_assoc ((d, T)::acc) ((ds ~~ Ts) @ xs) end) | SOME T' => if T=T' then (* ignore the association since it's already *) (* present, proceed *) rec_typ_assoc acc xs else raise REFUTE ("IDT_recursion_interpreter", "different type associations for the same dtyp")) val typ_assoc = filter (fn (Old_Datatype_Aux.DtTFree _, _) => true | (_, _) => false) (rec_typ_assoc [] (#2 (the (AList.lookup (op =) descr idt_index)) ~~ (snd o dest_Type) IDT)) (* sanity check: typ_assoc must associate types to the *) (* elements of 'dtyps' (and only to those) *) val _ = if not (eq_set (op =) (dtyps, map fst typ_assoc)) then raise REFUTE ("IDT_recursion_interpreter", "type association has extra/missing elements") else () (* interpret each constructor in the descriptor (including *) (* those of mutually recursive datatypes) *) (* (int * interpretation list) list *) val mc_intrs = map (fn (idx, (_, _, cs)) => let val c_return_typ = typ_of_dtyp descr typ_assoc (Old_Datatype_Aux.DtRec idx) in (idx, map (fn (cname, cargs) => (#1 o interpret ctxt (typs, []) {maxvars=0, def_eq=false, next_idx=1, bounds=[], wellformed=True}) (Const (cname, map (typ_of_dtyp descr typ_assoc) cargs ---> c_return_typ))) cs) end) descr (* associate constructors with corresponding parameters *) (* (int * (interpretation * interpretation) list) list *) val (mc_p_intrs, p_intrs') = fold_map (fn (idx, c_intrs) => fn p_intrs' => let val len = length c_intrs in ((idx, c_intrs ~~ List.take (p_intrs', len)), List.drop (p_intrs', len)) end) mc_intrs p_intrs (* sanity check: no 'p_intr' may be left afterwards *) val _ = if p_intrs' <> [] then raise REFUTE ("IDT_recursion_interpreter", "more parameter than constructor interpretations") else () (* The recursion operator, applied to 'mconstrs_count' *) (* arguments, is a function that maps every element of the *) (* inductive datatype to an element of some result type. *) (* Recursion operators for mutually recursive IDTs are *) (* translated simultaneously. *) (* Since the order on datatype elements is given by an *) (* order on constructors (and then by the order on *) (* argument tuples), we can simply copy corresponding *) (* subtrees from 'p_intrs', in the order in which they are *) (* given. *) fun ci_pi (Leaf xs, pi) = (* if the constructor does not match the arguments to a *) (* defined element of the IDT, the corresponding value *) (* of the parameter must be ignored *) if List.exists (equal True) xs then [pi] else [] | ci_pi (Node xs, Node ys) = maps ci_pi (xs ~~ ys) | ci_pi (Node _, Leaf _) = raise REFUTE ("IDT_recursion_interpreter", "constructor takes more arguments than the " ^ "associated parameter") val rec_operators = map (fn (idx, c_p_intrs) => (idx, maps ci_pi c_p_intrs)) mc_p_intrs (* sanity check: every recursion operator must provide as *) (* many values as the corresponding datatype *) (* has elements *) val _ = map (fn (idx, intrs) => let val T = typ_of_dtyp descr typ_assoc (Old_Datatype_Aux.DtRec idx) in if length intrs <> size_of_type ctxt (typs, []) T then raise REFUTE ("IDT_recursion_interpreter", "wrong number of interpretations for rec. operator") else () end) rec_operators (* For non-recursive datatypes, we are pretty much done at *) (* this point. For recursive datatypes however, we still *) (* need to apply the interpretations in 'rec_operators' to *) (* (recursively obtained) interpretations for recursive *) (* constructor arguments. To do so more efficiently, we *) (* copy 'rec_operators' into arrays first. Each Boolean *) (* indicates whether the recursive arguments have been *) (* considered already. *) val REC_OPERATORS = map (fn (idx, intrs) => (idx, Array.fromList (map (pair false) intrs))) rec_operators (* takes an interpretation, and if some leaf of this *) (* interpretation is the 'elem'-th element of the type, *) (* the indices of the arguments leading to this leaf are *) (* returned *) fun get_args (Leaf xs) elem = if find_index (fn x => x = True) xs = elem then SOME [] else NONE | get_args (Node xs) elem = let fun search ([], _) = NONE | search (x::xs, n) = (case get_args x elem of SOME result => SOME (n::result) | NONE => search (xs, n+1)) in search (xs, 0) end (* returns the index of the constructor and indices for *) (* its arguments that generate the 'elem'-th element of *) (* the datatype given by 'idx' *) fun get_cargs idx elem = let fun get_cargs_rec (_, []) = raise REFUTE ("IDT_recursion_interpreter", "no matching constructor found for datatype element") | get_cargs_rec (n, x::xs) = (case get_args x elem of SOME args => (n, args) | NONE => get_cargs_rec (n+1, xs)) in get_cargs_rec (0, the (AList.lookup (op =) mc_intrs idx)) end (* computes one entry in 'REC_OPERATORS', and recursively *) (* all entries needed for it, where 'idx' gives the *) (* datatype and 'elem' the element of it *) fun compute_array_entry idx elem = let val arr = the (AList.lookup (op =) REC_OPERATORS idx) val (flag, intr) = Array.sub (arr, elem) in if flag then (* simply return the previously computed result *) intr else (* we have to apply 'intr' to interpretations for all *) (* recursive arguments *) let val (c, args) = get_cargs idx elem (* find the indices of the constructor's /recursive/ *) (* arguments *) val (_, _, constrs) = the (AList.lookup (op =) descr idx) val (_, dtyps) = nth constrs c val rec_dtyps_args = filter (Old_Datatype_Aux.is_rec_type o fst) (dtyps ~~ args) (* map those indices to interpretations *) val rec_dtyps_intrs = map (fn (dtyp, arg) => let val dT = typ_of_dtyp descr typ_assoc dtyp val consts = make_constants ctxt (typs, []) dT val arg_i = nth consts arg in (dtyp, arg_i) end) rec_dtyps_args (* takes the dtyp and interpretation of an element, *) (* and computes the interpretation for the *) (* corresponding recursive argument *) fun rec_intr (Old_Datatype_Aux.DtRec i) (Leaf xs) = (* recursive argument is "rec_i params elem" *) compute_array_entry i (find_index (fn x => x = True) xs) | rec_intr (Old_Datatype_Aux.DtRec _) (Node _) = raise REFUTE ("IDT_recursion_interpreter", "interpretation for IDT is a node") | rec_intr (Old_Datatype_Aux.DtType ("fun", [_, dt2])) (Node xs) = (* recursive argument is something like *) (* "\x::dt1. rec_? params (elem x)" *) Node (map (rec_intr dt2) xs) | rec_intr (Old_Datatype_Aux.DtType ("fun", [_, _])) (Leaf _) = raise REFUTE ("IDT_recursion_interpreter", "interpretation for function dtyp is a leaf") | rec_intr _ _ = (* admissibility ensures that every recursive type *) (* is of the form 'Dt_1 -> ... -> Dt_k -> *) (* (DtRec i)' *) raise REFUTE ("IDT_recursion_interpreter", "non-recursive codomain in recursive dtyp") (* obtain interpretations for recursive arguments *) (* interpretation list *) val arg_intrs = map (uncurry rec_intr) rec_dtyps_intrs (* apply 'intr' to all recursive arguments *) val result = fold (fn arg_i => fn i => interpretation_apply (i, arg_i)) arg_intrs intr (* update 'REC_OPERATORS' *) val _ = Array.update (arr, elem, (true, result)) in result end end val idt_size = Array.length (the (AList.lookup (op =) REC_OPERATORS idt_index)) (* sanity check: the size of 'IDT' should be 'size_idt' *) val _ = if idt_size <> size_of_type ctxt (typs, []) IDT then raise REFUTE ("IDT_recursion_interpreter", "unexpected size of IDT (wrong type associated?)") else () val rec_op = Node (map_range (compute_array_entry idt_index) idt_size) in SOME (rec_op, model', args') end end else NONE (* not a recursion operator of this datatype *) ) (BNF_LFP_Compat.get_all thy []) NONE | _ => (* head of term is not a constant *) NONE end; fun set_interpreter ctxt model args t = let val (typs, terms) = model in case AList.lookup (op =) terms t of SOME intr => (* return an existing interpretation *) SOME (intr, model, args) | NONE => (case t of Free (x, Type (\<^type_name>\set\, [T])) => let val (intr, _, args') = interpret ctxt (typs, []) args (Free (x, T --> HOLogic.boolT)) in SOME (intr, (typs, (t, intr)::terms), args') end | Var ((x, i), Type (\<^type_name>\set\, [T])) => let val (intr, _, args') = interpret ctxt (typs, []) args (Var ((x,i), T --> HOLogic.boolT)) in SOME (intr, (typs, (t, intr)::terms), args') end | Const (s, Type (\<^type_name>\set\, [T])) => let val (intr, _, args') = interpret ctxt (typs, []) args (Const (s, T --> HOLogic.boolT)) in SOME (intr, (typs, (t, intr)::terms), args') end (* 'Collect' == identity *) | Const (\<^const_name>\Collect\, _) $ t1 => SOME (interpret ctxt model args t1) | Const (\<^const_name>\Collect\, _) => SOME (interpret ctxt model args (eta_expand t 1)) (* 'op :' == application *) | Const (\<^const_name>\Set.member\, _) $ t1 $ t2 => SOME (interpret ctxt model args (t2 $ t1)) | Const (\<^const_name>\Set.member\, _) $ _ => SOME (interpret ctxt model args (eta_expand t 1)) | Const (\<^const_name>\Set.member\, _) => SOME (interpret ctxt model args (eta_expand t 2)) | _ => NONE) end; (* only an optimization: 'card' could in principle be interpreted with *) (* interpreters available already (using its definition), but the code *) (* below is more efficient *) fun Finite_Set_card_interpreter ctxt model args t = case t of Const (\<^const_name>\Finite_Set.card\, Type ("fun", [Type (\<^type_name>\set\, [T]), \<^typ>\nat\])) => let fun number_of_elements (Node xs) = fold (fn x => fn n => if x = TT then n + 1 else if x = FF then n else raise REFUTE ("Finite_Set_card_interpreter", "interpretation for set type does not yield a Boolean")) xs 0 | number_of_elements (Leaf _) = raise REFUTE ("Finite_Set_card_interpreter", "interpretation for set type is a leaf") val size_of_nat = size_of_type ctxt model (\<^typ>\nat\) (* takes an interpretation for a set and returns an interpretation *) (* for a 'nat' denoting the set's cardinality *) fun card i = let val n = number_of_elements i in if n < size_of_nat then Leaf ((replicate n False) @ True :: (replicate (size_of_nat-n-1) False)) else Leaf (replicate size_of_nat False) end val set_constants = make_constants ctxt model (HOLogic.mk_setT T) in SOME (Node (map card set_constants), model, args) end | _ => NONE; (* only an optimization: 'finite' could in principle be interpreted with *) (* interpreters available already (using its definition), but the code *) (* below is more efficient *) fun Finite_Set_finite_interpreter ctxt model args t = case t of Const (\<^const_name>\Finite_Set.finite\, Type ("fun", [_, \<^typ>\bool\])) $ _ => (* we only consider finite models anyway, hence EVERY set is *) (* "finite" *) SOME (TT, model, args) | Const (\<^const_name>\Finite_Set.finite\, Type ("fun", [set_T, \<^typ>\bool\])) => let val size_of_set = size_of_type ctxt model set_T in (* we only consider finite models anyway, hence EVERY set is *) (* "finite" *) SOME (Node (replicate size_of_set TT), model, args) end | _ => NONE; (* only an optimization: 'less' could in principle be interpreted with *) (* interpreters available already (using its definition), but the code *) (* below is more efficient *) fun Nat_less_interpreter ctxt model args t = case t of Const (\<^const_name>\Orderings.less\, Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\bool\])])) => let val size_of_nat = size_of_type ctxt model (\<^typ>\nat\) (* the 'n'-th nat is not less than the first 'n' nats, while it *) (* is less than the remaining 'size_of_nat - n' nats *) fun less n = Node ((replicate n FF) @ (replicate (size_of_nat - n) TT)) in SOME (Node (map less (1 upto size_of_nat)), model, args) end | _ => NONE; (* only an optimization: 'plus' could in principle be interpreted with *) (* interpreters available already (using its definition), but the code *) (* below is more efficient *) fun Nat_plus_interpreter ctxt model args t = case t of Const (\<^const_name>\Groups.plus\, Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\nat\])])) => let val size_of_nat = size_of_type ctxt model (\<^typ>\nat\) fun plus m n = let val element = m + n in if element > size_of_nat - 1 then Leaf (replicate size_of_nat False) else Leaf ((replicate element False) @ True :: (replicate (size_of_nat - element - 1) False)) end in SOME (Node (map_range (fn m => Node (map_range (plus m) size_of_nat)) size_of_nat), model, args) end | _ => NONE; (* only an optimization: 'minus' could in principle be interpreted *) (* with interpreters available already (using its definition), but the *) (* code below is more efficient *) fun Nat_minus_interpreter ctxt model args t = case t of Const (\<^const_name>\Groups.minus\, Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\nat\])])) => let val size_of_nat = size_of_type ctxt model (\<^typ>\nat\) fun minus m n = let val element = Int.max (m-n, 0) in Leaf ((replicate element False) @ True :: (replicate (size_of_nat - element - 1) False)) end in SOME (Node (map_range (fn m => Node (map_range (minus m) size_of_nat)) size_of_nat), model, args) end | _ => NONE; (* only an optimization: 'times' could in principle be interpreted *) (* with interpreters available already (using its definition), but the *) (* code below is more efficient *) fun Nat_times_interpreter ctxt model args t = case t of Const (\<^const_name>\Groups.times\, Type ("fun", [\<^typ>\nat\, Type ("fun", [\<^typ>\nat\, \<^typ>\nat\])])) => let val size_of_nat = size_of_type ctxt model (\<^typ>\nat\) fun mult m n = let val element = m * n in if element > size_of_nat - 1 then Leaf (replicate size_of_nat False) else Leaf ((replicate element False) @ True :: (replicate (size_of_nat - element - 1) False)) end in SOME (Node (map_range (fn m => Node (map_range (mult m) size_of_nat)) size_of_nat), model, args) end | _ => NONE; (* only an optimization: 'append' could in principle be interpreted with *) (* interpreters available already (using its definition), but the code *) (* below is more efficient *) fun List_append_interpreter ctxt model args t = case t of Const (\<^const_name>\append\, Type (\<^type_name>\fun\, [Type (\<^type_name>\list\, [T]), Type (\<^type_name>\fun\, [Type (\<^type_name>\list\, [_]), Type (\<^type_name>\list\, [_])])])) => let val size_elem = size_of_type ctxt model T val size_list = size_of_type ctxt model (Type (\<^type_name>\list\, [T])) (* maximal length of lists; 0 if we only consider the empty list *) val list_length = let fun list_length_acc len lists total = if lists = total then len else if lists < total then list_length_acc (len+1) (lists*size_elem) (total-lists) else raise REFUTE ("List_append_interpreter", "size_list not equal to 1 + size_elem + ... + " ^ "size_elem^len, for some len") in list_length_acc 0 1 size_list end val elements = 0 upto (size_list-1) (* FIXME: there should be a nice formula, which computes the same as *) (* the following, but without all this intermediate tree *) (* length/offset stuff *) (* associate each list with its length and offset in a complete tree *) (* of width 'size_elem' and depth 'length_list' (with 'size_list' *) (* nodes total) *) (* (int * (int * int)) list *) val (lenoff_lists, _) = fold_map (fn elem => fn (offsets, off) => (* corresponds to a pre-order traversal of the tree *) let val len = length offsets (* associate the given element with len/off *) val assoc = (elem, (len, off)) in if len < list_length then (* go to first child node *) (assoc, (off :: offsets, off * size_elem)) else if off mod size_elem < size_elem - 1 then (* go to next sibling node *) (assoc, (offsets, off + 1)) else (* go back up the stack until we find a level where we can go *) (* to the next sibling node *) let val offsets' = drop_prefix (fn off' => off' mod size_elem = size_elem - 1) offsets in case offsets' of [] => (* we're at the last node in the tree; the next value *) (* won't be used anyway *) (assoc, ([], 0)) | off'::offs' => (* go to next sibling node *) (assoc, (offs', off' + 1)) end end) elements ([], 0) (* we also need the reverse association (from length/offset to *) (* index) *) val lenoff'_lists = map Library.swap lenoff_lists (* returns the interpretation for "(list no. m) @ (list no. n)" *) fun append m n = let val (len_m, off_m) = the (AList.lookup (op =) lenoff_lists m) val (len_n, off_n) = the (AList.lookup (op =) lenoff_lists n) val len_elem = len_m + len_n val off_elem = off_m * Integer.pow len_n size_elem + off_n in case AList.lookup op= lenoff'_lists (len_elem, off_elem) of NONE => (* undefined *) Leaf (replicate size_list False) | SOME element => Leaf ((replicate element False) @ True :: (replicate (size_list - element - 1) False)) end in SOME (Node (map (fn m => Node (map (append m) elements)) elements), model, args) end | _ => NONE; (* only an optimization: 'fst' could in principle be interpreted with *) (* interpreters available already (using its definition), but the code *) (* below is more efficient *) fun Product_Type_fst_interpreter ctxt model args t = case t of Const (\<^const_name>\fst\, Type ("fun", [Type (\<^type_name>\Product_Type.prod\, [T, U]), _])) => let val constants_T = make_constants ctxt model T val size_U = size_of_type ctxt model U in SOME (Node (maps (replicate size_U) constants_T), model, args) end | _ => NONE; (* only an optimization: 'snd' could in principle be interpreted with *) (* interpreters available already (using its definition), but the code *) (* below is more efficient *) fun Product_Type_snd_interpreter ctxt model args t = case t of Const (\<^const_name>\snd\, Type ("fun", [Type (\<^type_name>\Product_Type.prod\, [T, U]), _])) => let val size_T = size_of_type ctxt model T val constants_U = make_constants ctxt model U in SOME (Node (flat (replicate size_T constants_U)), model, args) end | _ => NONE; (* ------------------------------------------------------------------------- *) (* PRINTERS *) (* ------------------------------------------------------------------------- *) fun stlc_printer ctxt model T intr assignment = let val strip_leading_quote = perhaps (try (unprefix "'")) fun string_of_typ (Type (s, _)) = s | string_of_typ (TFree (x, _)) = strip_leading_quote x | string_of_typ (TVar ((x,i), _)) = strip_leading_quote x ^ string_of_int i fun index_from_interpretation (Leaf xs) = find_index (Prop_Logic.eval assignment) xs | index_from_interpretation _ = raise REFUTE ("stlc_printer", "interpretation for ground type is not a leaf") in case T of Type ("fun", [T1, T2]) => let (* create all constants of type 'T1' *) val constants = make_constants ctxt model T1 val results = (case intr of Node xs => xs | _ => raise REFUTE ("stlc_printer", "interpretation for function type is a leaf")) (* Term.term list *) val pairs = map (fn (arg, result) => HOLogic.mk_prod (print ctxt model T1 arg assignment, print ctxt model T2 result assignment)) (constants ~~ results) val HOLogic_prodT = HOLogic.mk_prodT (T1, T2) val HOLogic_setT = HOLogic.mk_setT HOLogic_prodT val HOLogic_empty_set = Const (\<^const_abbrev>\Set.empty\, HOLogic_setT) val HOLogic_insert = Const (\<^const_name>\insert\, HOLogic_prodT --> HOLogic_setT --> HOLogic_setT) in SOME (fold_rev (fn pair => fn acc => HOLogic_insert $ pair $ acc) pairs HOLogic_empty_set) end | Type (\<^type_name>\prop\, []) => (case index_from_interpretation intr of ~1 => SOME (HOLogic.mk_Trueprop (Const (\<^const_name>\undefined\, HOLogic.boolT))) | 0 => SOME (HOLogic.mk_Trueprop \<^term>\True\) | 1 => SOME (HOLogic.mk_Trueprop \<^term>\False\) | _ => raise REFUTE ("stlc_interpreter", "illegal interpretation for a propositional value")) | Type _ => if index_from_interpretation intr = (~1) then SOME (Const (\<^const_name>\undefined\, T)) else SOME (Const (string_of_typ T ^ string_of_int (index_from_interpretation intr), T)) | TFree _ => if index_from_interpretation intr = (~1) then SOME (Const (\<^const_name>\undefined\, T)) else SOME (Const (string_of_typ T ^ string_of_int (index_from_interpretation intr), T)) | TVar _ => if index_from_interpretation intr = (~1) then SOME (Const (\<^const_name>\undefined\, T)) else SOME (Const (string_of_typ T ^ string_of_int (index_from_interpretation intr), T)) end; fun set_printer ctxt model T intr assignment = (case T of Type (\<^type_name>\set\, [T1]) => let (* create all constants of type 'T1' *) val constants = make_constants ctxt model T1 val results = (case intr of Node xs => xs | _ => raise REFUTE ("set_printer", "interpretation for set type is a leaf")) val elements = map_filter (fn (arg, result) => case result of Leaf [fmTrue, (* fmFalse *) _] => if Prop_Logic.eval assignment fmTrue then SOME (print ctxt model T1 arg assignment) else (* if Prop_Logic.eval assignment fmFalse then *) NONE | _ => raise REFUTE ("set_printer", "illegal interpretation for a Boolean value")) (constants ~~ results) val HOLogic_setT1 = HOLogic.mk_setT T1 val HOLogic_empty_set = Const (\<^const_abbrev>\Set.empty\, HOLogic_setT1) val HOLogic_insert = Const (\<^const_name>\insert\, T1 --> HOLogic_setT1 --> HOLogic_setT1) in SOME (Library.foldl (fn (acc, elem) => HOLogic_insert $ elem $ acc) (HOLogic_empty_set, elements)) end | _ => NONE); fun IDT_printer ctxt model T intr assignment = let val thy = Proof_Context.theory_of ctxt in (case T of Type (s, Ts) => (case BNF_LFP_Compat.get_info thy [] s of SOME info => (* inductive datatype *) let val (typs, _) = model val index = #index info val descr = #descr info val (_, dtyps, constrs) = the (AList.lookup (op =) descr index) val typ_assoc = dtyps ~~ Ts (* sanity check: every element in 'dtyps' must be a 'DtTFree' *) val _ = if Library.exists (fn d => case d of Old_Datatype_Aux.DtTFree _ => false | _ => true) dtyps then raise REFUTE ("IDT_printer", "datatype argument (for type " ^ Syntax.string_of_typ ctxt (Type (s, Ts)) ^ ") is not a variable") else () (* the index of the element in the datatype *) val element = (case intr of Leaf xs => find_index (Prop_Logic.eval assignment) xs | Node _ => raise REFUTE ("IDT_printer", "interpretation is not a leaf")) in if element < 0 then SOME (Const (\<^const_name>\undefined\, Type (s, Ts))) else let (* takes a datatype constructor, and if for some arguments this *) (* constructor generates the datatype's element that is given by *) (* 'element', returns the constructor (as a term) as well as the *) (* indices of the arguments *) fun get_constr_args (cname, cargs) = let val cTerm = Const (cname, map (typ_of_dtyp descr typ_assoc) cargs ---> Type (s, Ts)) val (iC, _, _) = interpret ctxt (typs, []) {maxvars=0, def_eq=false, next_idx=1, bounds=[], wellformed=True} cTerm fun get_args (Leaf xs) = if find_index (fn x => x = True) xs = element then SOME [] else NONE | get_args (Node xs) = let fun search ([], _) = NONE | search (x::xs, n) = (case get_args x of SOME result => SOME (n::result) | NONE => search (xs, n+1)) in search (xs, 0) end in Option.map (fn args => (cTerm, cargs, args)) (get_args iC) end val (cTerm, cargs, args) = (* we could speed things up by computing the correct *) (* constructor directly (rather than testing all *) (* constructors), based on the order in which constructors *) (* generate elements of datatypes; the current implementation *) (* of 'IDT_printer' however is independent of the internals *) (* of 'IDT_constructor_interpreter' *) (case get_first get_constr_args constrs of SOME x => x | NONE => raise REFUTE ("IDT_printer", "no matching constructor found for element " ^ string_of_int element)) val argsTerms = map (fn (d, n) => let val dT = typ_of_dtyp descr typ_assoc d (* we only need the n-th element of this list, so there *) (* might be a more efficient implementation that does not *) (* generate all constants *) val consts = make_constants ctxt (typs, []) dT in print ctxt (typs, []) dT (nth consts n) assignment end) (cargs ~~ args) in SOME (list_comb (cTerm, argsTerms)) end end | NONE => (* not an inductive datatype *) NONE) | _ => (* a (free or schematic) type variable *) NONE) end; (* ------------------------------------------------------------------------- *) (* Note: the interpreters and printers are used in reverse order; however, *) (* an interpreter that can handle non-atomic terms ends up being *) (* applied before the 'stlc_interpreter' breaks the term apart into *) (* subterms that are then passed to other interpreters! *) (* ------------------------------------------------------------------------- *) (* FIXME formal @{const_name} for some entries (!??) *) val _ = Theory.setup (add_interpreter "stlc" stlc_interpreter #> add_interpreter "Pure" Pure_interpreter #> add_interpreter "HOLogic" HOLogic_interpreter #> add_interpreter "set" set_interpreter #> add_interpreter "IDT" IDT_interpreter #> add_interpreter "IDT_constructor" IDT_constructor_interpreter #> add_interpreter "IDT_recursion" IDT_recursion_interpreter #> add_interpreter "Finite_Set.card" Finite_Set_card_interpreter #> add_interpreter "Finite_Set.finite" Finite_Set_finite_interpreter #> add_interpreter "Nat_Orderings.less" Nat_less_interpreter #> add_interpreter "Nat_HOL.plus" Nat_plus_interpreter #> add_interpreter "Nat_HOL.minus" Nat_minus_interpreter #> add_interpreter "Nat_HOL.times" Nat_times_interpreter #> add_interpreter "List.append" List_append_interpreter #> add_interpreter "Product_Type.prod.fst" Product_Type_fst_interpreter #> add_interpreter "Product_Type.prod.snd" Product_Type_snd_interpreter #> add_printer "stlc" stlc_printer #> add_printer "set" set_printer #> add_printer "IDT" IDT_printer); (** outer syntax commands 'refute' and 'refute_params' **) (* argument parsing *) (*optional list of arguments of the form [name1=value1, name2=value2, ...]*) val scan_parm = Parse.name -- (Scan.optional (\<^keyword>\=\ |-- Parse.name) "true") val scan_parms = Scan.optional (\<^keyword>\[\ |-- Parse.list scan_parm --| \<^keyword>\]\) []; (* 'refute' command *) val _ = Outer_Syntax.command \<^command_keyword>\refute\ "try to find a model that refutes a given subgoal" (scan_parms -- Scan.optional Parse.nat 1 >> (fn (parms, i) => Toplevel.keep_proof (fn state => let val ctxt = Toplevel.context_of state; val {goal = st, ...} = Proof.raw_goal (Toplevel.proof_of state); in refute_goal ctxt parms st i; () end))); (* 'refute_params' command *) val _ = Outer_Syntax.command \<^command_keyword>\refute_params\ "show/store default parameters for the 'refute' command" (scan_parms >> (fn parms => Toplevel.theory (fn thy => let val thy' = fold set_default_param parms thy; val output = (case get_default_params (Proof_Context.init_global thy') of [] => "none" | new_defaults => cat_lines (map (fn (x, y) => x ^ "=" ^ y) new_defaults)); val _ = writeln ("Default parameters for 'refute':\n" ^ output); in thy' end))); end; diff --git a/src/HOL/Tools/Nunchaku/nunchaku.ML b/src/HOL/Tools/Nunchaku/nunchaku.ML --- a/src/HOL/Tools/Nunchaku/nunchaku.ML +++ b/src/HOL/Tools/Nunchaku/nunchaku.ML @@ -1,340 +1,340 @@ (* Title: HOL/Tools/Nunchaku/nunchaku.ML Author: Jasmin Blanchette, VU Amsterdam Copyright 2015, 2016, 2017 The core of the Nunchaku integration in Isabelle. *) signature NUNCHAKU = sig type isa_model = Nunchaku_Reconstruct.isa_model datatype mode = Auto_Try | Try | Normal type mode_of_operation_params = {solvers: string list, falsify: bool, assms: bool, spy: bool, overlord: bool, expect: string} type scope_of_search_params = {wfs: ((string * typ) option * bool option) list, whacks: (term option * bool) list, min_bound: int, max_bound: int option, bound_increment: int, cards: (typ option * (int option * int option)) list, monos: (typ option * bool option) list} type output_format_params = {verbose: bool, debug: bool, max_potential: int, max_genuine: int, evals: term list, atomss: (typ option * string list) list} type optimization_params = {specialize: bool, multithread: bool} type timeout_params = {timeout: Time.time, wf_timeout: Time.time} type params = {mode_of_operation_params: mode_of_operation_params, scope_of_search_params: scope_of_search_params, output_format_params: output_format_params, optimization_params: optimization_params, timeout_params: timeout_params} val genuineN: string val quasi_genuineN: string val potentialN: string val noneN: string val unknownN: string val no_nunchakuN: string val run_chaku_on_prop: Proof.state -> params -> mode -> int -> term list -> term -> string * isa_model option val run_chaku_on_subgoal: Proof.state -> params -> mode -> int -> string * isa_model option end; structure Nunchaku : NUNCHAKU = struct open Nunchaku_Util; open Nunchaku_Collect; open Nunchaku_Problem; open Nunchaku_Translate; open Nunchaku_Model; open Nunchaku_Reconstruct; open Nunchaku_Display; open Nunchaku_Tool; datatype mode = Auto_Try | Try | Normal; type mode_of_operation_params = {solvers: string list, falsify: bool, assms: bool, spy: bool, overlord: bool, expect: string}; type scope_of_search_params = {wfs: ((string * typ) option * bool option) list, whacks: (term option * bool) list, min_bound: int, max_bound: int option, bound_increment: int, cards: (typ option * (int option * int option)) list, monos: (typ option * bool option) list}; type output_format_params = {verbose: bool, debug: bool, max_potential: int, max_genuine: int, evals: term list, atomss: (typ option * string list) list}; type optimization_params = {specialize: bool, multithread: bool}; type timeout_params = {timeout: Time.time, wf_timeout: Time.time}; type params = {mode_of_operation_params: mode_of_operation_params, scope_of_search_params: scope_of_search_params, output_format_params: output_format_params, optimization_params: optimization_params, timeout_params: timeout_params}; val genuineN = "genuine"; val quasi_genuineN = "quasi_genuine"; val potentialN = "potential"; val noneN = "none"; val unknownN = "unknown"; val no_nunchakuN = "no_nunchaku"; fun str_of_mode Auto_Try = "Auto_Try" | str_of_mode Try = "Try" | str_of_mode Normal = "Normal"; fun none_true assigns = forall (curry (op <>) (SOME true) o snd) assigns; fun has_lonely_bool_var \<^Const_>\Pure.conjunction for \<^Const_>\Trueprop for \Free _\\ _\ = true | has_lonely_bool_var _ = false; val syntactic_sorts = \<^sort>\{default,zero,one,plus,minus,uminus,times,inverse,abs,sgn,ord,equal}\ @ \<^sort>\numeral\; fun has_tfree_syntactic_sort (TFree (_, S as _ :: _)) = subset (op =) (S, syntactic_sorts) | has_tfree_syntactic_sort _ = false; val has_syntactic_sorts = exists_type (exists_subtype has_tfree_syntactic_sort); val unprefix_error = perhaps (try (unprefix "failure: ")) #> perhaps (try (unprefix "Error: ")); (* Give the soft timeout a chance. *) val timeout_slack = seconds 1.0; fun run_chaku_on_prop state ({mode_of_operation_params = {solvers, falsify, assms, spy, overlord, expect}, scope_of_search_params = {wfs, whacks, min_bound, max_bound, bound_increment, cards, monos}, output_format_params = {verbose, debug, evals, atomss, ...}, optimization_params = {specialize, ...}, timeout_params = {timeout, wf_timeout}}) mode i all_assms subgoal = let val ctxt = Proof.context_of state; val time_start = Time.now () val print = writeln; val print_n = if mode = Normal then writeln else K (); fun print_v f = if verbose then writeln (f ()) else (); fun print_d f = if debug then writeln (f ()) else (); val das_wort_Model = if falsify then "Countermodel" else "Model"; val das_wort_model = if falsify then "countermodel" else "model"; val tool_params = {solvers = solvers, overlord = overlord, min_bound = min_bound, max_bound = max_bound, bound_increment = bound_increment, debug = debug, specialize = specialize, timeout = timeout}; fun run () = let val outcome as (outcome_code, _) = let val (poly_axioms, isa_problem as {sound, complete, ...}) = isa_problem_of_subgoal ctxt falsify wfs whacks cards debug wf_timeout evals (if assms then all_assms else []) subgoal; val _ = print_d (fn () => "*** Isabelle problem ***\n" ^ str_of_isa_problem ctxt isa_problem); val ugly_nun_problem = nun_problem_of_isa ctxt isa_problem; val _ = print_d (fn () => "*** Ugly Nunchaku problem ***\n" ^ str_of_nun_problem ugly_nun_problem); val (nice_nun_problem, pool) = nice_nun_problem ugly_nun_problem; val _ = print_d (fn () => "*** Nice Nunchaku problem ***\n" ^ str_of_nun_problem nice_nun_problem); fun print_any_hints () = if has_lonely_bool_var subgoal then print "Hint: Maybe you forgot a colon after the lemma's name?" else if has_syntactic_sorts subgoal then print "Hint: Maybe you forgot a type constraint?" else (); fun get_isa_model_opt output = let val nice_nun_model = nun_model_of_str output; val _ = print_d (fn () => "*** Nice Nunchaku model ***\n" ^ str_of_nun_model nice_nun_model); val ugly_nun_model = ugly_nun_model pool nice_nun_model; val _ = print_d (fn () => "*** Ugly Nunchaku model ***\n" ^ str_of_nun_model ugly_nun_model); val pat_completes = pat_completes_of_isa_problem isa_problem; val raw_isa_model = isa_model_of_nun ctxt pat_completes atomss ugly_nun_model; val _ = print_d (fn () => "*** Raw Isabelle model ***\n" ^ str_of_isa_model ctxt raw_isa_model); val cleaned_isa_model = clean_up_isa_model ctxt raw_isa_model; val _ = print_d (fn () => "*** Cleaned-up Isabelle model ***\n" ^ str_of_isa_model ctxt cleaned_isa_model); in cleaned_isa_model end; fun isa_model_opt output = if debug then SOME (get_isa_model_opt output) else try get_isa_model_opt output; val model_str = isa_model_opt #> pretty_of_isa_model_opt ctxt #> Pretty.string_of; fun unsat_means_theorem () = null whacks andalso null cards andalso null monos; fun unknown () = (print_n ("No " ^ das_wort_model ^ " found"); (unknownN, NONE)); fun unsat_or_unknown solver complete = if complete then (print_n ("No " ^ das_wort_model ^ " exists (according to " ^ solver ^ ")" ^ (if falsify andalso unsat_means_theorem () then "\nThe goal is a theorem" else "")); (noneN, NONE)) else unknown (); fun sat_or_maybe_sat solver sound output = let val header = if sound then das_wort_Model else "Potential " ^ das_wort_model in (case (null poly_axioms, none_true wfs) of (true, true) => (print (header ^ " (according to " ^ solver ^ "):\n" ^ model_str output); print_any_hints (); (genuineN, isa_model_opt output)) | (no_poly, no_wf) => let val ignorings = [] |> not no_poly ? cons "polymorphic axioms" |> not no_wf ? cons "unchecked well-foundedness"; in (print (header ^ " (according to " ^ solver ^ ", ignoring " ^ space_implode " and " ignorings ^ "):\n" ^ model_str output ^ (if no_poly then "" else "\nIgnored axioms:\n" ^ cat_lines (map (prefix " " o Syntax.string_of_term ctxt) poly_axioms))); print_any_hints (); (quasi_genuineN, isa_model_opt output)) end) end; in (case solve_nun_problem tool_params nice_nun_problem of Unsat solver => unsat_or_unknown solver complete | Sat (solver, output, _) => sat_or_maybe_sat solver sound output | Unknown NONE => unknown () | Unknown (SOME (solver, output, _)) => sat_or_maybe_sat solver false output | Timeout => (print_n "Time out"; (unknownN, NONE)) | Nunchaku_Var_Not_Set => (print_n ("Variable $" ^ nunchaku_home_env_var ^ " not set"); (unknownN, NONE)) | Nunchaku_Cannot_Execute => (print_n "External tool \"nunchaku\" cannot execute"; (unknownN, NONE)) | Nunchaku_Not_Found => (print_n "External tool \"nunchaku\" not found"; (unknownN, NONE)) | CVC4_Cannot_Execute => (print_n "External tool \"cvc4\" cannot execute"; (unknownN, NONE)) | CVC4_Not_Found => (print_n "External tool \"cvc4\" not found"; (unknownN, NONE)) | Unknown_Error (code, msg) => (print_n ("Error: " ^ unprefix_error msg ^ (if code = 0 then "" else " (code " ^ string_of_int code ^ ")")); (unknownN, NONE))) end handle CYCLIC_DEPS () => (print_n "Cyclic dependencies (or bug in Nunchaku)"; (unknownN, NONE)) | TOO_DEEP_DEPS () => (print_n "Too deep dependencies (or bug in Nunchaku)"; (unknownN, NONE)) | TOO_META t => (print_n ("Formula too meta for Nunchaku:\n" ^ Syntax.string_of_term ctxt t); (unknownN, NONE)) | UNEXPECTED_POLYMORPHISM t => (print_n ("Unexpected polymorphism in term\n" ^ Syntax.string_of_term ctxt t); (unknownN, NONE)) | UNEXPECTED_VAR t => (print_n ("Unexpected schematic variables in term\n" ^ Syntax.string_of_term ctxt t); (unknownN, NONE)) | UNSUPPORTED_FUNC t => (print_n ("Unsupported low-level constant in problem: " ^ Syntax.string_of_term ctxt t); (unknownN, NONE)); in if expect = "" orelse outcome_code = expect then outcome else error ("Unexpected outcome: " ^ quote outcome_code) end; val _ = spying spy (fn () => (state, i, "starting " ^ str_of_mode mode ^ " mode")); val outcome as (outcome_code, _) = Timeout.apply (timeout + timeout_slack) run () handle Timeout.TIMEOUT _ => (print_n "Time out"; (unknownN, NONE)); val _ = print_v (fn () => "Total time: " ^ string_of_time (Time.now () - time_start)); val _ = spying spy (fn () => (state, i, "outcome: " ^ outcome_code)); in if expect = "" orelse outcome_code = expect then outcome else error ("Unexpected outcome: " ^ quote outcome_code) end; fun run_chaku_on_subgoal state params mode i = let val ctxt = Proof.context_of state; val goal = Thm.prop_of (#goal (Proof.raw_goal state)); in - if Logic.count_prems goal = 0 then + if Logic.no_prems goal then (writeln "No subgoal!"; (noneN, NONE)) else let val subgoal = fst (Logic.goal_params goal i); val all_assms = map Thm.term_of (Assumption.all_assms_of ctxt); in run_chaku_on_prop state params mode i all_assms subgoal end end; end; diff --git a/src/Pure/Isar/element.ML b/src/Pure/Isar/element.ML --- a/src/Pure/Isar/element.ML +++ b/src/Pure/Isar/element.ML @@ -1,473 +1,473 @@ (* Title: Pure/Isar/element.ML Author: Makarius Explicit data structures for some Isar language elements, with derived logical operations. *) signature ELEMENT = sig type ('typ, 'term) obtain = binding * ((binding * 'typ option * mixfix) list * 'term list) type obtains = (string, string) obtain list type obtains_i = (typ, term) obtain list datatype ('typ, 'term) stmt = Shows of (Attrib.binding * ('term * 'term list) list) list | Obtains of ('typ, 'term) obtain list type statement = (string, string) stmt type statement_i = (typ, term) stmt datatype ('typ, 'term, 'fact) ctxt = Fixes of (binding * 'typ option * mixfix) list | Constrains of (string * 'typ) list | Assumes of (Attrib.binding * ('term * 'term list) list) list | Defines of (Attrib.binding * ('term * 'term list)) list | Notes of string * (Attrib.binding * ('fact * Token.src list) list) list | Lazy_Notes of string * (binding * 'fact lazy) type context = (string, string, Facts.ref) ctxt type context_i = (typ, term, thm list) ctxt val map_ctxt: {binding: binding -> binding, typ: 'typ -> 'a, term: 'term -> 'b, pattern: 'term -> 'b, fact: 'fact -> 'c, attrib: Token.src -> Token.src} -> ('typ, 'term, 'fact) ctxt -> ('a, 'b, 'c) ctxt val map_ctxt_attrib: (Token.src -> Token.src) -> ('typ, 'term, 'fact) ctxt -> ('typ, 'term, 'fact) ctxt val transform_ctxt: morphism -> context_i -> context_i val pretty_stmt: Proof.context -> statement_i -> Pretty.T list val pretty_ctxt: Proof.context -> context_i -> Pretty.T list val pretty_ctxt_no_attribs: Proof.context -> context_i -> Pretty.T list val pretty_statement: Proof.context -> string -> thm -> Pretty.T type witness val prove_witness: Proof.context -> term -> tactic -> witness val witness_proof: (witness list list -> Proof.context -> Proof.context) -> term list list -> Proof.context -> Proof.state val witness_proof_eqs: (witness list list -> thm list -> Proof.context -> Proof.context) -> term list list -> term list -> Proof.context -> Proof.state val witness_local_proof: (witness list list -> Proof.state -> Proof.state) -> string -> term list list -> Proof.context -> Proof.state -> Proof.state val witness_local_proof_eqs: (witness list list -> thm list -> Proof.state -> Proof.state) -> string -> term list list -> term list -> Proof.context -> Proof.state -> Proof.state val transform_witness: morphism -> witness -> witness val conclude_witness: Proof.context -> witness -> thm val pretty_witness: Proof.context -> witness -> Pretty.T val instantiate_normalize_morphism: ctyp TFrees.table * cterm Frees.table -> morphism val satisfy_morphism: witness list -> morphism val eq_term_morphism: theory -> term list -> morphism option val eq_morphism: theory -> thm list -> morphism option val init: context_i -> Context.generic -> Context.generic val activate_i: context_i -> Proof.context -> context_i * Proof.context val activate: (typ, term, Facts.ref) ctxt -> Proof.context -> context_i * Proof.context end; structure Element: ELEMENT = struct (** language elements **) (* statement *) type ('typ, 'term) obtain = binding * ((binding * 'typ option * mixfix) list * 'term list); type obtains = (string, string) obtain list; type obtains_i = (typ, term) obtain list; datatype ('typ, 'term) stmt = Shows of (Attrib.binding * ('term * 'term list) list) list | Obtains of ('typ, 'term) obtain list; type statement = (string, string) stmt; type statement_i = (typ, term) stmt; (* context *) datatype ('typ, 'term, 'fact) ctxt = Fixes of (binding * 'typ option * mixfix) list | Constrains of (string * 'typ) list | Assumes of (Attrib.binding * ('term * 'term list) list) list | Defines of (Attrib.binding * ('term * 'term list)) list | Notes of string * (Attrib.binding * ('fact * Token.src list) list) list | Lazy_Notes of string * (binding * 'fact lazy); type context = (string, string, Facts.ref) ctxt; type context_i = (typ, term, thm list) ctxt; fun map_ctxt {binding, typ, term, pattern, fact, attrib} = fn Fixes fixes => Fixes (fixes |> map (fn (x, T, mx) => (binding x, Option.map typ T, mx))) | Constrains xs => Constrains (xs |> map (fn (x, T) => (Variable.check_name (binding (Binding.name x)), typ T))) | Assumes asms => Assumes (asms |> map (fn ((a, atts), propps) => ((binding a, map attrib atts), propps |> map (fn (t, ps) => (term t, map pattern ps))))) | Defines defs => Defines (defs |> map (fn ((a, atts), (t, ps)) => ((binding a, map attrib atts), (term t, map pattern ps)))) | Notes (kind, facts) => Notes (kind, facts |> map (fn ((a, atts), bs) => ((binding a, map attrib atts), bs |> map (fn (ths, btts) => (fact ths, map attrib btts))))) | Lazy_Notes (kind, (a, ths)) => Lazy_Notes (kind, (binding a, Lazy.map fact ths)); fun map_ctxt_attrib attrib = map_ctxt {binding = I, typ = I, term = I, pattern = I, fact = I, attrib = attrib}; fun transform_ctxt phi = map_ctxt {binding = Morphism.binding phi, typ = Morphism.typ phi, term = Morphism.term phi, pattern = Morphism.term phi, fact = Morphism.fact phi, attrib = map (Token.transform phi)}; (** pretty printing **) fun pretty_items _ _ [] = [] | pretty_items keyword sep (x :: ys) = Pretty.block [Pretty.keyword2 keyword, Pretty.brk 1, x] :: map (fn y => Pretty.block [Pretty.str " ", Pretty.keyword2 sep, Pretty.brk 1, y]) ys; (* pretty_stmt *) fun pretty_stmt ctxt = let val prt_typ = Pretty.quote o Syntax.pretty_typ ctxt; val prt_term = Pretty.quote o Syntax.pretty_term ctxt; val prt_terms = separate (Pretty.keyword2 "and") o map prt_term; val prt_binding = Attrib.pretty_binding ctxt; val prt_name = Proof_Context.pretty_name ctxt; fun prt_show (a, ts) = Pretty.block (Pretty.breaks (prt_binding a ":" @ prt_terms (map fst ts))); fun prt_var (x, SOME T, _) = Pretty.block [prt_name (Binding.name_of x), Pretty.str " ::", Pretty.brk 1, prt_typ T] | prt_var (x, NONE, _) = prt_name (Binding.name_of x); val prt_vars = separate (Pretty.keyword2 "and") o map prt_var; fun prt_obtain (_, ([], props)) = Pretty.block (Pretty.breaks (prt_terms props)) | prt_obtain (_, (vars, props)) = Pretty.block (Pretty.breaks (prt_vars vars @ [Pretty.keyword2 "where"] @ prt_terms props)); in fn Shows shows => pretty_items "shows" "and" (map prt_show shows) | Obtains obtains => pretty_items "obtains" "|" (map prt_obtain obtains) end; (* pretty_ctxt *) fun gen_pretty_ctxt show_attribs ctxt = let val prt_typ = Pretty.quote o Syntax.pretty_typ ctxt; val prt_term = Pretty.quote o Syntax.pretty_term ctxt; val prt_thm = Pretty.cartouche o Thm.pretty_thm ctxt; val prt_name = Proof_Context.pretty_name ctxt; fun prt_binding (b, atts) = Attrib.pretty_binding ctxt (b, if show_attribs then atts else []); fun prt_fact (ths, atts) = if not show_attribs orelse null atts then map prt_thm ths else Pretty.enclose "(" ")" (Pretty.breaks (map prt_thm ths)) :: Attrib.pretty_attribs ctxt atts; fun prt_mixfix NoSyn = [] | prt_mixfix mx = [Pretty.brk 2, Mixfix.pretty_mixfix mx]; fun prt_fix (x, SOME T, mx) = Pretty.block (prt_name (Binding.name_of x) :: Pretty.str " ::" :: Pretty.brk 1 :: prt_typ T :: prt_mixfix mx) | prt_fix (x, NONE, mx) = Pretty.block (prt_name (Binding.name_of x) :: prt_mixfix mx); fun prt_constrain (x, T) = prt_fix (Binding.name x, SOME T, NoSyn); fun prt_asm (a, ts) = Pretty.block (Pretty.breaks (prt_binding a ":" @ map (prt_term o fst) ts)); fun prt_def (a, (t, _)) = Pretty.block (Pretty.breaks (prt_binding a ":" @ [prt_term t])); fun prt_note (a, ths) = Pretty.block (Pretty.breaks (flat (prt_binding a " =" :: map prt_fact ths))); fun notes_kind "" = "notes" | notes_kind kind = "notes " ^ kind; in fn Fixes fixes => pretty_items "fixes" "and" (map prt_fix fixes) | Constrains xs => pretty_items "constrains" "and" (map prt_constrain xs) | Assumes asms => pretty_items "assumes" "and" (map prt_asm asms) | Defines defs => pretty_items "defines" "and" (map prt_def defs) | Notes (kind, facts) => pretty_items (notes_kind kind) "and" (map prt_note facts) | Lazy_Notes (kind, (a, ths)) => pretty_items (notes_kind kind) "and" [prt_note ((a, []), [(Lazy.force ths, [])])] end; val pretty_ctxt = gen_pretty_ctxt true; val pretty_ctxt_no_attribs = gen_pretty_ctxt false; (* pretty_statement *) local fun standard_elim ctxt th = (case Object_Logic.elim_concl ctxt th of SOME C => let val thesis = Var ((Auto_Bind.thesisN, Thm.maxidx_of th + 1), fastype_of C); val insts = (TVars.empty, Vars.make [(Term.dest_Var C, Thm.cterm_of ctxt thesis)]); val th' = Thm.instantiate insts th; in (th', true) end | NONE => (th, false)); fun thm_name ctxt kind th prts = let val head = if Thm.has_name_hint th then Pretty.block [Pretty.keyword1 kind, Pretty.brk 1, Proof_Context.pretty_name ctxt (Long_Name.base_name (Thm.get_name_hint th)), Pretty.str ":"] else Pretty.keyword1 kind in Pretty.block (Pretty.fbreaks (head :: prts)) end; fun obtain prop ctxt = let val ((ps, prop'), ctxt') = Variable.focus NONE prop ctxt; fun fix (x, T) = (Binding.name (Variable.revert_fixed ctxt' x), SOME T, NoSyn); val xs = map (fix o #2) ps; val As = Logic.strip_imp_prems prop'; in ((Binding.empty, (xs, As)), ctxt') end; in fun pretty_statement ctxt kind raw_th = let val (th, is_elim) = standard_elim ctxt (Raw_Simplifier.norm_hhf ctxt raw_th); val ((_, [th']), ctxt') = Variable.import true [th] (Variable.set_body true ctxt); val prop = Thm.prop_of th'; val (prems, concl) = Logic.strip_horn prop; val concl_term = Object_Logic.drop_judgment ctxt concl; val (assumes, cases) = chop_suffix (fn prem => is_elim andalso concl aconv Logic.strip_assums_concl prem) prems; val is_thesis = if null cases then K false else fn v => v aconv concl_term; val fixes = rev (fold_aterms (fn v as Free (x, T) => if Variable.is_newly_fixed ctxt' ctxt x andalso not (is_thesis v) then insert (op =) (Variable.revert_fixed ctxt' x, T) else I | _ => I) prop []); in pretty_ctxt ctxt' (Fixes (map (fn (x, T) => (Binding.name x, SOME T, NoSyn)) fixes)) @ pretty_ctxt ctxt' (Assumes (map (fn t => (Binding.empty_atts, [(t, [])])) assumes)) @ (if null cases then pretty_stmt ctxt' (Shows [(Binding.empty_atts, [(concl, [])])]) else let val (clauses, ctxt'') = fold_map obtain cases ctxt' in pretty_stmt ctxt'' (Obtains clauses) end) end |> thm_name ctxt kind raw_th; end; (** logical operations **) (* witnesses -- hypotheses as protected facts *) datatype witness = Witness of term * thm; val mark_witness = Logic.protect; fun witness_prop (Witness (t, _)) = t; fun witness_hyps (Witness (_, th)) = Thm.hyps_of th; fun map_witness f (Witness witn) = Witness (f witn); fun transform_witness phi = map_witness (fn (t, th) => (Morphism.term phi t, Morphism.thm phi th)); fun prove_witness ctxt t tac = Witness (t, Goal.prove ctxt [] [] (mark_witness t) (fn _ => resolve_tac ctxt [Drule.protectI] 1 THEN tac) |> Thm.close_derivation \<^here>); local val refine_witness = Proof.refine_singleton (Method.Basic (fn ctxt => CONTEXT_TACTIC o K (ALLGOALS (CONJUNCTS (ALLGOALS (CONJUNCTS (TRYALL (resolve_tac ctxt [Drule.protectI])))))))); fun gen_witness_proof proof after_qed wit_propss eq_props = let val propss = (map o map) (fn prop => (mark_witness prop, [])) wit_propss @ [map (rpair []) eq_props]; fun after_qed' thmss = let val (wits, eqs) = split_last ((map o map) (Thm.close_derivation \<^here>) thmss); in after_qed ((map2 o map2) (curry Witness) wit_propss wits) eqs end; in proof after_qed' propss #> refine_witness end; fun proof_local cmd goal_ctxt after_qed propp = let fun after_qed' (result_ctxt, results) state' = after_qed (burrow (Proof_Context.export result_ctxt (Proof.context_of state')) results) state'; in Proof.map_context (K goal_ctxt) #> Proof.internal_goal (K (K ())) (Proof_Context.get_mode goal_ctxt) true cmd NONE after_qed' [] [] (map (pair Binding.empty_atts) propp) #> snd end; in fun witness_proof after_qed wit_propss = gen_witness_proof (Proof.theorem NONE) (fn wits => fn _ => after_qed wits) wit_propss []; val witness_proof_eqs = gen_witness_proof (Proof.theorem NONE); fun witness_local_proof after_qed cmd wit_propss goal_ctxt = gen_witness_proof (proof_local cmd goal_ctxt) (fn wits => fn _ => after_qed wits) wit_propss []; fun witness_local_proof_eqs after_qed cmd wit_propss eq_props goal_ctxt = gen_witness_proof (proof_local cmd goal_ctxt) after_qed wit_propss eq_props; end; fun conclude_witness ctxt (Witness (_, th)) = Goal.conclude th |> Raw_Simplifier.norm_hhf_protect ctxt |> Thm.close_derivation \<^here>; fun pretty_witness ctxt witn = let val prt_term = Pretty.quote o Syntax.pretty_term ctxt in Pretty.block (prt_term (witness_prop witn) :: (if Config.get ctxt show_hyps then [Pretty.brk 2, Pretty.list "[" "]" (map prt_term (witness_hyps witn))] else [])) end; (* instantiate frees, with beta normalization *) fun instantiate_normalize_morphism insts = Morphism.instantiate_frees_morphism insts $> Morphism.term_morphism "beta_norm" Envir.beta_norm $> Morphism.thm_morphism "beta_conversion" (Conv.fconv_rule (Thm.beta_conversion true)); (* satisfy hypotheses *) local val norm_term = Envir.beta_eta_contract; val norm_conv = Drule.beta_eta_conversion; val norm_cterm = Thm.rhs_of o norm_conv; fun find_witness witns hyp = (case find_first (fn Witness (t, _) => hyp aconv t) witns of NONE => let val hyp' = norm_term hyp in find_first (fn Witness (t, _) => hyp' aconv norm_term t) witns end | some => some); fun compose_witness (Witness (_, th)) r = let val th' = Goal.conclude th; val A = Thm.cprem_of r 1; in Thm.implies_elim (Conv.gconv_rule norm_conv 1 r) (Conv.fconv_rule norm_conv (Thm.instantiate (Thm.match (apply2 norm_cterm (Thm.cprop_of th', A))) th')) end; in fun satisfy_thm witns thm = (Thm.chyps_of thm, thm) |-> fold (fn hyp => (case find_witness witns (Thm.term_of hyp) of NONE => I | SOME w => Thm.implies_intr hyp #> compose_witness w)); val satisfy_morphism = Morphism.thm_morphism "Element.satisfy" o satisfy_thm; end; (* rewriting with equalities *) (* for activating declarations only *) fun eq_term_morphism _ [] = NONE | eq_term_morphism thy props = let fun decomp_simp prop = let val ctxt = Proof_Context.init_global thy; - val _ = Logic.count_prems prop = 0 orelse + val _ = Logic.no_prems prop orelse error ("Bad conditional rewrite rule " ^ Syntax.string_of_term ctxt prop); val lhsrhs = Logic.dest_equals prop handle TERM _ => error ("Rewrite rule not a meta-equality " ^ Syntax.string_of_term ctxt prop); in lhsrhs end; val phi = Morphism.morphism "Element.eq_term_morphism" {binding = [], typ = [], term = [Pattern.rewrite_term thy (map decomp_simp props) []], fact = [fn _ => error "Illegal application of Element.eq_term_morphism"]}; in SOME phi end; fun eq_morphism _ [] = NONE | eq_morphism thy thms = let (* FIXME proper context!? *) fun rewrite th = rewrite_rule (Proof_Context.init_global (Thm.theory_of_thm th)) thms th; val phi = Morphism.morphism "Element.eq_morphism" {binding = [], typ = [], term = [Raw_Simplifier.rewrite_term thy thms []], fact = [map rewrite]}; in SOME phi end; (** activate in context **) (* init *) fun init (Fixes fixes) = Context.map_proof (Proof_Context.add_fixes fixes #> #2) | init (Constrains _) = I | init (Assumes asms) = Context.map_proof (fn ctxt => let val asms' = Attrib.map_specs (map (Attrib.attribute ctxt)) asms; val (_, ctxt') = ctxt |> fold Proof_Context.augment (maps (map #1 o #2) asms') |> Proof_Context.add_assms Assumption.assume_export asms'; in ctxt' end) | init (Defines defs) = Context.map_proof (fn ctxt => let val defs' = Attrib.map_specs (map (Attrib.attribute ctxt)) defs; val asms = defs' |> map (fn (b, (t, ps)) => let val (_, t') = Local_Defs.cert_def ctxt (K []) t (* FIXME adapt ps? *) in (t', (b, [(t', ps)])) end); val (_, ctxt') = ctxt |> fold Proof_Context.augment (map #1 asms) |> Proof_Context.add_assms Local_Defs.def_export (map #2 asms); in ctxt' end) | init (Notes (kind, facts)) = Attrib.generic_notes kind facts #> #2 | init (Lazy_Notes (kind, ths)) = Attrib.lazy_notes kind ths; (* activate *) fun activate_i elem ctxt = let val elem' = (case (map_ctxt_attrib o map) Token.init_assignable elem of Defines defs => Defines (defs |> map (fn ((a, atts), (t, ps)) => ((Thm.def_binding_optional (Binding.name (#1 (#1 (Local_Defs.cert_def ctxt (K []) t)))) a, atts), (t, ps)))) | e => e); val ctxt' = Context.proof_map (init elem') ctxt; in ((map_ctxt_attrib o map) Token.closure elem', ctxt') end; fun activate raw_elem ctxt = let val elem = raw_elem |> map_ctxt {binding = I, typ = I, term = I, pattern = I, fact = Proof_Context.get_fact ctxt, attrib = Attrib.check_src ctxt} in activate_i elem ctxt end; end; diff --git a/src/Pure/logic.ML b/src/Pure/logic.ML --- a/src/Pure/logic.ML +++ b/src/Pure/logic.ML @@ -1,666 +1,670 @@ (* Title: Pure/logic.ML Author: Lawrence C Paulson, Cambridge University Computer Laboratory Author: Makarius Abstract syntax operations of the Pure meta-logic. *) signature LOGIC = sig val all_const: typ -> term val all: term -> term -> term val dependent_all_name: string * term -> term -> term val is_all: term -> bool val dest_all: term -> (string * typ) * term val list_all: (string * typ) list * term -> term val all_constraint: (string -> typ option) -> string * string -> term -> term val dependent_all_constraint: (string -> typ option) -> string * string -> term -> term val mk_equals: term * term -> term val dest_equals: term -> term * term val implies: term val mk_implies: term * term -> term val dest_implies: term -> term * term val list_implies: term list * term -> term val strip_imp_prems: term -> term list val strip_imp_concl: term -> term val strip_prems: int * term list * term -> term list * term val count_prems: term -> int + val no_prems: term -> bool val nth_prem: int * term -> term val close_term: (string * term) list -> term -> term val close_prop: (string * term) list -> term list -> term -> term val close_prop_constraint: (string -> typ option) -> (string * string) list -> term list -> term -> term val true_prop: term val conjunction: term val mk_conjunction: term * term -> term val mk_conjunction_list: term list -> term val mk_conjunction_balanced: term list -> term val dest_conjunction: term -> term * term val dest_conjunction_list: term -> term list val dest_conjunction_balanced: int -> term -> term list val dest_conjunctions: term -> term list val strip_horn: term -> term list * term val mk_type: typ -> term val dest_type: term -> typ val type_map: (term -> term) -> typ -> typ val const_of_class: class -> string val class_of_const: string -> class val mk_of_class: typ * class -> term val dest_of_class: term -> typ * class val mk_of_sort: typ * sort -> term list val name_classrel: string * string -> string val mk_classrel: class * class -> term val dest_classrel: term -> class * class val name_arities: arity -> string list val name_arity: string * sort list * class -> string val mk_arities: arity -> term list val mk_arity: string * sort list * class -> term val dest_arity: term -> string * sort list * class val dummy_tfree: sort -> typ type unconstrain_context = {present_map: (typ * typ) list, constraints_map: (sort * typ) list, atyp_map: typ -> typ, map_atyps: typ -> typ, constraints: ((typ * class) * term) list, outer_constraints: (typ * class) list}; val unconstrainT: sort list -> term -> unconstrain_context * term val protectC: term val protect: term -> term val unprotect: term -> term val mk_term: term -> term val dest_term: term -> term val occs: term * term -> bool val close_form: term -> term val combound: term * int * int -> term val rlist_abs: (string * typ) list * term -> term val incr_tvar_same: int -> typ Same.operation val incr_tvar: int -> typ -> typ val incr_indexes_same: string list * typ list * int -> term Same.operation val incr_indexes: string list * typ list * int -> term -> term val lift_abs: int -> term -> term -> term val lift_all: int -> term -> term -> term val strip_assums_hyp: term -> term list val strip_assums_concl: term -> term val strip_params: term -> (string * typ) list val has_meta_prems: term -> bool val flatten_params: int -> term -> term val list_rename_params: string list -> term -> term val assum_pairs: int * term -> (term * term) list val assum_problems: int * term -> (term -> term) * term list * term val bad_schematic: indexname -> string val bad_fixed: string -> string val varifyT_global: typ -> typ val unvarifyT_global: typ -> typ val varify_types_global: term -> term val unvarify_types_global: term -> term val varify_global: term -> term val unvarify_global: term -> term val get_goal: term -> int -> term val goal_params: term -> int -> term * term list val prems_of_goal: term -> int -> term list val concl_of_goal: term -> int -> term end; structure Logic : LOGIC = struct (*** Abstract syntax operations on the meta-connectives ***) (** all **) fun all_const T = Const ("Pure.all", (T --> propT) --> propT); fun all v t = all_const (Term.fastype_of v) $ lambda v t; fun dependent_all_name (x, v) t = let val x' = if x = "" then Term.term_name v else x; val T = Term.fastype_of v; val t' = Term.abstract_over (v, t); in if Term.is_dependent t' then all_const T $ Abs (x', T, t') else t end; fun is_all (Const ("Pure.all", _) $ Abs _) = true | is_all _ = false; fun dest_all (Const ("Pure.all", _) $ Abs (abs as (_, T, _))) = let val (x, b) = Term.dest_abs abs (*potentially slow*) in ((x, T), b) end | dest_all t = raise TERM ("dest_all", [t]); fun list_all ([], t) = t | list_all ((a, T) :: vars, t) = all_const T $ Abs (a, T, list_all (vars, t)); (* operations before type-inference *) local fun abs_body default_type z tm = let fun abs lev (Abs (x, T, b)) = Abs (x, T, abs (lev + 1) b) | abs lev (t $ u) = abs lev t $ abs lev u | abs lev (a as Free (x, T)) = if x = z then Type.constraint (the_default dummyT (default_type x)) (Type.constraint T (Bound lev)) else a | abs _ a = a; in abs 0 (Term.incr_boundvars 1 tm) end; in fun all_constraint default_type (y, z) t = all_const dummyT $ Abs (y, dummyT, abs_body default_type z t); fun dependent_all_constraint default_type (y, z) t = let val t' = abs_body default_type z t in if Term.is_dependent t' then all_const dummyT $ Abs (y, dummyT, t') else t end; end; (** equality **) fun mk_equals (t, u) = let val T = Term.fastype_of t in Const ("Pure.eq", T --> T --> propT) $ t $ u end; fun dest_equals (Const ("Pure.eq", _) $ t $ u) = (t, u) | dest_equals t = raise TERM ("dest_equals", [t]); (** implies **) val implies = Const ("Pure.imp", propT --> propT --> propT); fun mk_implies (A, B) = implies $ A $ B; fun dest_implies (Const ("Pure.imp", _) $ A $ B) = (A, B) | dest_implies A = raise TERM ("dest_implies", [A]); (** nested implications **) (* [A1,...,An], B goes to A1\...An\B *) fun list_implies ([], B) = B | list_implies (A::As, B) = implies $ A $ list_implies(As,B); (* A1\...An\B goes to [A1,...,An], where B is not an implication *) fun strip_imp_prems (Const("Pure.imp", _) $ A $ B) = A :: strip_imp_prems B | strip_imp_prems _ = []; (* A1\...An\B goes to B, where B is not an implication *) fun strip_imp_concl (Const("Pure.imp", _) $ A $ B) = strip_imp_concl B | strip_imp_concl A = A : term; (*Strip and return premises: (i, [], A1\...Ai\B) goes to ([Ai, A(i-1),...,A1] , B) (REVERSED) if i<0 or else i too big then raises TERM*) fun strip_prems (0, As, B) = (As, B) | strip_prems (i, As, Const("Pure.imp", _) $ A $ B) = strip_prems (i-1, A::As, B) | strip_prems (_, As, A) = raise TERM("strip_prems", A::As); (*Count premises -- quicker than (length o strip_prems) *) fun count_prems (Const ("Pure.imp", _) $ _ $ B) = 1 + count_prems B | count_prems _ = 0; +fun no_prems (Const ("Pure.imp", _) $ _ $ _) = false + | no_prems _ = true; + (*Select Ai from A1\...Ai\B*) fun nth_prem (1, Const ("Pure.imp", _) $ A $ _) = A | nth_prem (i, Const ("Pure.imp", _) $ _ $ B) = nth_prem (i - 1, B) | nth_prem (_, A) = raise TERM ("nth_prem", [A]); (*strip a proof state (Horn clause): B1 \ ... Bn \ C goes to ([B1, ..., Bn], C) *) fun strip_horn A = (strip_imp_prems A, strip_imp_concl A); (* close -- omit vacuous quantifiers *) val close_term = fold_rev Term.dependent_lambda_name; fun close_prop xs As B = fold_rev dependent_all_name xs (list_implies (As, B)); fun close_prop_constraint default_type xs As B = fold_rev (dependent_all_constraint default_type) xs (list_implies (As, B)); (** conjunction **) val true_prop = all_const propT $ Abs ("dummy", propT, mk_implies (Bound 0, Bound 0)); val conjunction = Const ("Pure.conjunction", propT --> propT --> propT); (*A &&& B*) fun mk_conjunction (A, B) = conjunction $ A $ B; (*A &&& B &&& C -- improper*) fun mk_conjunction_list [] = true_prop | mk_conjunction_list ts = foldr1 mk_conjunction ts; (*(A &&& B) &&& (C &&& D) -- balanced*) fun mk_conjunction_balanced [] = true_prop | mk_conjunction_balanced ts = Balanced_Tree.make mk_conjunction ts; (*A &&& B*) fun dest_conjunction (Const ("Pure.conjunction", _) $ A $ B) = (A, B) | dest_conjunction t = raise TERM ("dest_conjunction", [t]); (*A &&& B &&& C -- improper*) fun dest_conjunction_list t = (case try dest_conjunction t of NONE => [t] | SOME (A, B) => A :: dest_conjunction_list B); (*(A &&& B) &&& (C &&& D) -- balanced*) fun dest_conjunction_balanced 0 _ = [] | dest_conjunction_balanced n t = Balanced_Tree.dest dest_conjunction n t; (*((A &&& B) &&& C) &&& D &&& E -- flat*) fun dest_conjunctions t = (case try dest_conjunction t of NONE => [t] | SOME (A, B) => dest_conjunctions A @ dest_conjunctions B); (** types as terms **) fun mk_type ty = Const ("Pure.type", Term.itselfT ty); fun dest_type (Const ("Pure.type", Type ("itself", [ty]))) = ty | dest_type t = raise TERM ("dest_type", [t]); fun type_map f = dest_type o f o mk_type; (** type classes **) (* const names *) val classN = "_class"; val const_of_class = suffix classN; fun class_of_const c = unsuffix classN c handle Fail _ => raise TERM ("class_of_const: bad name " ^ quote c, []); (* class/sort membership *) fun mk_of_class (ty, c) = Const (const_of_class c, Term.itselfT ty --> propT) $ mk_type ty; fun dest_of_class (Const (c_class, _) $ ty) = (dest_type ty, class_of_const c_class) | dest_of_class t = raise TERM ("dest_of_class", [t]); fun mk_of_sort (ty, S) = map (fn c => mk_of_class (ty, c)) S; (* class relations *) fun name_classrel (c1, c2) = Long_Name.base_name c1 ^ "_" ^ Long_Name.base_name c2; fun mk_classrel (c1, c2) = mk_of_class (Term.aT [c1], c2); fun dest_classrel tm = (case dest_of_class tm of (TVar (_, [c1]), c2) => (c1, c2) | _ => raise TERM ("dest_classrel", [tm])); (* type arities *) fun name_arities (t, _, S) = let val b = Long_Name.base_name t in S |> map (fn c => Long_Name.base_name c ^ "_" ^ b) end; fun name_arity (t, dom, c) = hd (name_arities (t, dom, [c])); fun mk_arities (t, Ss, S) = let val T = Type (t, ListPair.map TFree (Name.invent Name.context Name.aT (length Ss), Ss)) in map (fn c => mk_of_class (T, c)) S end; fun mk_arity (t, Ss, c) = the_single (mk_arities (t, Ss, [c])); fun dest_arity tm = let fun err () = raise TERM ("dest_arity", [tm]); val (ty, c) = dest_of_class tm; val (t, tvars) = (case ty of Type (t, tys) => (t, map dest_TVar tys handle TYPE _ => err ()) | _ => err ()); val Ss = if has_duplicates (eq_fst (op =)) tvars then err () else map snd tvars; in (t, Ss, c) end; (* internalized sort constraints *) fun dummy_tfree S = TFree ("'dummy", S); type unconstrain_context = {present_map: (typ * typ) list, constraints_map: (sort * typ) list, atyp_map: typ -> typ, map_atyps: typ -> typ, constraints: ((typ * class) * term) list, outer_constraints: (typ * class) list}; fun unconstrainT shyps prop = let val present = rev ((fold_types o fold_atyps_sorts) (insert (eq_fst op =)) prop []); val extra = fold (Sorts.remove_sort o #2) present shyps; val n = length present; val (names1, names2) = Name.invent Name.context Name.aT (n + length extra) |> chop n; val present_map = map2 (fn (T, S) => fn a => (T, TVar ((a, 0), S))) present names1; val constraints_map = map2 (fn (_, S) => fn a => (S, TVar ((a, 0), S))) present names1 @ map2 (fn S => fn a => (S, TVar ((a, 0), S))) extra names2; fun atyp_map T = (case AList.lookup (op =) present_map T of SOME U => U | NONE => (case AList.lookup (op =) constraints_map (Type.sort_of_atyp T) of SOME U => U | NONE => raise TYPE ("Dangling type variable ", [T], [prop]))); val map_atyps = Term.map_atyps (Type.strip_sorts o atyp_map); val constraints = constraints_map |> maps (fn (_, T as TVar (ai, S)) => map (fn c => ((T, c), mk_of_class (TVar (ai, []), c))) S); val outer_constraints = maps (fn (T, S) => map (pair T) S) (present @ map (`dummy_tfree) extra); val ucontext = {present_map = present_map, constraints_map = constraints_map, atyp_map = atyp_map, map_atyps = map_atyps, constraints = constraints, outer_constraints = outer_constraints}; val prop' = prop |> Term.map_types map_atyps |> curry list_implies (map #2 constraints); in (ucontext, prop') end; (** protected propositions and embedded terms **) val protectC = Const ("Pure.prop", propT --> propT); fun protect t = protectC $ t; fun unprotect (Const ("Pure.prop", _) $ t) = t | unprotect t = raise TERM ("unprotect", [t]); fun mk_term t = Const ("Pure.term", Term.fastype_of t --> propT) $ t; fun dest_term (Const ("Pure.term", _) $ t) = t | dest_term t = raise TERM ("dest_term", [t]); (*** Low-level term operations ***) (*Does t occur in u? Or is alpha-convertible to u? The term t must contain no loose bound variables*) fun occs (t, u) = exists_subterm (fn s => t aconv s) u; (*Close up a formula over all free variables by quantification*) fun close_form A = fold_rev (all o Free) (Frees.build (Frees.add_frees A) |> Frees.list_set) A; (*** Specialized operations for resolution... ***) (*computes t(Bound(n+k-1),...,Bound(n)) *) fun combound (t, n, k) = if k>0 then combound (t,n+1,k-1) $ (Bound n) else t; (* ([xn,...,x1], t) goes to \x1 ... xn. t *) fun rlist_abs ([], body) = body | rlist_abs ((a,T)::pairs, body) = rlist_abs(pairs, Abs(a, T, body)); fun incr_tvar_same 0 = Same.same | incr_tvar_same k = Term_Subst.map_atypsT_same (fn TVar ((a, i), S) => TVar ((a, i + k), S) | _ => raise Same.SAME); fun incr_tvar k T = incr_tvar_same k T handle Same.SAME => T; (*For all variables in the term, increment indexnames and lift over the Us result is ?Gidx(B.(lev+n-1),...,B.lev) where lev is abstraction level *) fun incr_indexes_same ([], [], 0) = Same.same | incr_indexes_same (fixed, Ts, k) = let val n = length Ts; val incrT = incr_tvar_same k; fun incr lev (Var ((x, i), T)) = combound (Var ((x, i + k), Ts ---> Same.commit incrT T), lev, n) | incr lev (Free (x, T)) = if member (op =) fixed x then combound (Free (x, Ts ---> Same.commit incrT T), lev, n) else Free (x, incrT T) | incr lev (Abs (x, T, body)) = (Abs (x, incrT T, incr (lev + 1) body handle Same.SAME => body) handle Same.SAME => Abs (x, T, incr (lev + 1) body)) | incr lev (t $ u) = (incr lev t $ (incr lev u handle Same.SAME => u) handle Same.SAME => t $ incr lev u) | incr _ (Const (c, T)) = Const (c, incrT T) | incr _ (Bound _) = raise Same.SAME; in incr 0 end; fun incr_indexes arg t = incr_indexes_same arg t handle Same.SAME => t; (* Lifting functions from subgoal and increment: lift_abs operates on terms lift_all operates on propositions *) fun lift_abs inc = let fun lift Ts (Const ("Pure.imp", _) $ _ $ B) t = lift Ts B t | lift Ts (Const ("Pure.all", _) $ Abs (a, T, B)) t = Abs (a, T, lift (T :: Ts) B t) | lift Ts _ t = incr_indexes ([], rev Ts, inc) t; in lift [] end; fun lift_all inc = let fun lift Ts ((c as Const ("Pure.imp", _)) $ A $ B) t = c $ A $ lift Ts B t | lift Ts ((c as Const ("Pure.all", _)) $ Abs (a, T, B)) t = c $ Abs (a, T, lift (T :: Ts) B t) | lift Ts _ t = incr_indexes ([], rev Ts, inc) t; in lift [] end; (*Strips assumptions in goal, yielding list of hypotheses. *) fun strip_assums_hyp B = let fun strip Hs (Const ("Pure.imp", _) $ H $ B) = strip (H :: Hs) B | strip Hs (Const ("Pure.all", _) $ Abs (a, T, t)) = strip (map (incr_boundvars 1) Hs) t | strip Hs B = rev Hs in strip [] B end; (*Strips assumptions in goal, yielding conclusion. *) fun strip_assums_concl (Const("Pure.imp", _) $ H $ B) = strip_assums_concl B | strip_assums_concl (Const("Pure.all", _) $ Abs (a, T, t)) = strip_assums_concl t | strip_assums_concl B = B; (*Make a list of all the parameters in a subgoal, even if nested*) fun strip_params (Const("Pure.imp", _) $ H $ B) = strip_params B | strip_params (Const("Pure.all", _) $ Abs (a, T, t)) = (a, T) :: strip_params t | strip_params B = []; (*test for nested meta connectives in prems*) val has_meta_prems = let fun is_meta (Const ("Pure.eq", _) $ _ $ _) = true | is_meta (Const ("Pure.imp", _) $ _ $ _) = true | is_meta (Const ("Pure.all", _) $ _) = true | is_meta _ = false; fun ex_meta (Const ("Pure.imp", _) $ A $ B) = is_meta A orelse ex_meta B | ex_meta (Const ("Pure.all", _) $ Abs (_, _, B)) = ex_meta B | ex_meta _ = false; in ex_meta end; (*Removes the parameters from a subgoal and renumber bvars in hypotheses, where j is the total number of parameters (precomputed) If n>0 then deletes assumption n. *) fun remove_params j n A = if j=0 andalso n<=0 then A (*nothing left to do...*) else case A of Const("Pure.imp", _) $ H $ B => if n=1 then (remove_params j (n-1) B) else implies $ (incr_boundvars j H) $ (remove_params j (n-1) B) | Const("Pure.all",_)$Abs(a,T,t) => remove_params (j-1) n t | _ => if n>0 then raise TERM("remove_params", [A]) else A; (*Move all parameters to the front of the subgoal, renaming them apart; if n>0 then deletes assumption n. *) fun flatten_params n A = let val params = strip_params A; val vars = ListPair.zip (Name.variant_list [] (map #1 params), map #2 params) in list_all (vars, remove_params (length vars) n A) end; (*Makes parameters in a goal have the names supplied by the list cs.*) fun list_rename_params cs (Const ("Pure.imp", _) $ A $ B) = implies $ A $ list_rename_params cs B | list_rename_params (c :: cs) ((a as Const ("Pure.all", _)) $ Abs (_, T, t)) = a $ Abs (c, T, list_rename_params cs t) | list_rename_params cs B = B; (*** Treatment of "assume", "erule", etc. ***) (*Strips assumptions in goal yielding HS = [Hn,...,H1], params = [xm,...,x1], and B, where x1...xm are the parameters. This version (21.1.2005) REQUIRES the the parameters to be flattened, but it allows erule to work on assumptions of the form \x. phi. Any \ after the outermost string will be regarded as belonging to the conclusion, and left untouched. Used ONLY by assum_pairs. Unless nasms<0, it can terminate the recursion early; that allows erule to work on assumptions of the form P\Q.*) fun strip_assums_imp (0, Hs, B) = (Hs, B) (*recursion terminated by nasms*) | strip_assums_imp (nasms, Hs, Const("Pure.imp", _) $ H $ B) = strip_assums_imp (nasms-1, H::Hs, B) | strip_assums_imp (_, Hs, B) = (Hs, B); (*recursion terminated by B*) (*Strips OUTER parameters only.*) fun strip_assums_all (params, Const("Pure.all",_)$Abs(a,T,t)) = strip_assums_all ((a,T)::params, t) | strip_assums_all (params, B) = (params, B); (*Produces disagreement pairs, one for each assumption proof, in order. A is the first premise of the lifted rule, and thus has the form H1 \ ... Hk \ B and the pairs are (H1,B),...,(Hk,B). nasms is the number of assumptions in the original subgoal, needed when B has the form B1 \ B2: it stops B1 from being taken as an assumption. *) fun assum_pairs(nasms,A) = let val (params, A') = strip_assums_all ([],A) val (Hs,B) = strip_assums_imp (nasms,[],A') fun abspar t = rlist_abs(params, t) val D = abspar B fun pairrev ([], pairs) = pairs | pairrev (H::Hs, pairs) = pairrev(Hs, (abspar H, D) :: pairs) in pairrev (Hs,[]) end; fun assum_problems (nasms, A) = let val (params, A') = strip_assums_all ([], A) val (Hs, B) = strip_assums_imp (nasms, [], A') fun abspar t = rlist_abs (params, t) in (abspar, rev Hs, B) end; (* global schematic variables *) fun bad_schematic xi = "Illegal schematic variable: " ^ quote (Term.string_of_vname xi); fun bad_fixed x = "Illegal fixed variable: " ^ quote x; fun varifyT_global_same ty = ty |> Term_Subst.map_atypsT_same (fn TFree (a, S) => TVar ((a, 0), S) | TVar (ai, _) => raise TYPE (bad_schematic ai, [ty], [])); fun unvarifyT_global_same ty = ty |> Term_Subst.map_atypsT_same (fn TVar ((a, 0), S) => TFree (a, S) | TVar (ai, _) => raise TYPE (bad_schematic ai, [ty], []) | TFree (a, _) => raise TYPE (bad_fixed a, [ty], [])); val varifyT_global = Same.commit varifyT_global_same; val unvarifyT_global = Same.commit unvarifyT_global_same; fun varify_types_global tm = tm |> Same.commit (Term_Subst.map_types_same varifyT_global_same) handle TYPE (msg, _, _) => raise TERM (msg, [tm]); fun unvarify_types_global tm = tm |> Same.commit (Term_Subst.map_types_same unvarifyT_global_same) handle TYPE (msg, _, _) => raise TERM (msg, [tm]); fun varify_global tm = tm |> Same.commit (Term_Subst.map_aterms_same (fn Free (x, T) => Var ((x, 0), T) | Var (xi, _) => raise TERM (bad_schematic xi, [tm]) | _ => raise Same.SAME)) |> varify_types_global; fun unvarify_global tm = tm |> Same.commit (Term_Subst.map_aterms_same (fn Var ((x, 0), T) => Free (x, T) | Var (xi, _) => raise TERM (bad_schematic xi, [tm]) | Free (x, _) => raise TERM (bad_fixed x, [tm]) | _ => raise Same.SAME)) |> unvarify_types_global; (* goal states *) fun get_goal st i = nth_prem (i, st) handle TERM _ => error ("Subgoal number " ^ string_of_int i ^ " out of range (a total of " ^ string_of_int (count_prems st) ^ " subgoals)"); (*reverses parameters for substitution*) fun goal_params st i = let val gi = get_goal st i val rfrees = map Free (Term.rename_wrt_term gi (strip_params gi)) in (gi, rfrees) end; fun concl_of_goal st i = let val (gi, rfrees) = goal_params st i val B = strip_assums_concl gi in subst_bounds (rfrees, B) end; fun prems_of_goal st i = let val (gi, rfrees) = goal_params st i val As = strip_assums_hyp gi in map (fn A => subst_bounds (rfrees, A)) As end; end; diff --git a/src/Pure/raw_simplifier.ML b/src/Pure/raw_simplifier.ML --- a/src/Pure/raw_simplifier.ML +++ b/src/Pure/raw_simplifier.ML @@ -1,1466 +1,1466 @@ (* Title: Pure/raw_simplifier.ML Author: Tobias Nipkow and Stefan Berghofer, TU Muenchen Higher-order Simplification. *) infix 4 addsimps delsimps addsimprocs delsimprocs setloop addloop delloop setSSolver addSSolver setSolver addSolver; signature BASIC_RAW_SIMPLIFIER = sig val simp_depth_limit: int Config.T val simp_trace_depth_limit: int Config.T val simp_debug: bool Config.T val simp_trace: bool Config.T type cong_name = bool * string type rrule val mk_rrules: Proof.context -> thm list -> rrule list val eq_rrule: rrule * rrule -> bool type proc type solver val mk_solver: string -> (Proof.context -> int -> tactic) -> solver type simpset val empty_ss: simpset val merge_ss: simpset * simpset -> simpset val dest_ss: simpset -> {simps: (string * thm) list, procs: (string * term list) list, congs: (cong_name * thm) list, weak_congs: cong_name list, loopers: string list, unsafe_solvers: string list, safe_solvers: string list} type simproc val eq_simproc: simproc * simproc -> bool val cert_simproc: theory -> string -> {lhss: term list, proc: morphism -> Proof.context -> cterm -> thm option} -> simproc val transform_simproc: morphism -> simproc -> simproc val simpset_of: Proof.context -> simpset val put_simpset: simpset -> Proof.context -> Proof.context val simpset_map: Proof.context -> (Proof.context -> Proof.context) -> simpset -> simpset val map_theory_simpset: (Proof.context -> Proof.context) -> theory -> theory val empty_simpset: Proof.context -> Proof.context val clear_simpset: Proof.context -> Proof.context val addsimps: Proof.context * thm list -> Proof.context val delsimps: Proof.context * thm list -> Proof.context val addsimprocs: Proof.context * simproc list -> Proof.context val delsimprocs: Proof.context * simproc list -> Proof.context val setloop: Proof.context * (Proof.context -> int -> tactic) -> Proof.context val addloop: Proof.context * (string * (Proof.context -> int -> tactic)) -> Proof.context val delloop: Proof.context * string -> Proof.context val setSSolver: Proof.context * solver -> Proof.context val addSSolver: Proof.context * solver -> Proof.context val setSolver: Proof.context * solver -> Proof.context val addSolver: Proof.context * solver -> Proof.context val rewrite_rule: Proof.context -> thm list -> thm -> thm val rewrite_goals_rule: Proof.context -> thm list -> thm -> thm val rewrite_goals_tac: Proof.context -> thm list -> tactic val rewrite_goal_tac: Proof.context -> thm list -> int -> tactic val prune_params_tac: Proof.context -> tactic val fold_rule: Proof.context -> thm list -> thm -> thm val fold_goals_tac: Proof.context -> thm list -> tactic val norm_hhf: Proof.context -> thm -> thm val norm_hhf_protect: Proof.context -> thm -> thm end; signature RAW_SIMPLIFIER = sig include BASIC_RAW_SIMPLIFIER exception SIMPLIFIER of string * thm list type trace_ops val set_trace_ops: trace_ops -> theory -> theory val subgoal_tac: Proof.context -> int -> tactic val loop_tac: Proof.context -> int -> tactic val solvers: Proof.context -> solver list * solver list val map_ss: (Proof.context -> Proof.context) -> Context.generic -> Context.generic val prems_of: Proof.context -> thm list val add_simp: thm -> Proof.context -> Proof.context val del_simp: thm -> Proof.context -> Proof.context val flip_simp: thm -> Proof.context -> Proof.context val init_simpset: thm list -> Proof.context -> Proof.context val add_eqcong: thm -> Proof.context -> Proof.context val del_eqcong: thm -> Proof.context -> Proof.context val add_cong: thm -> Proof.context -> Proof.context val del_cong: thm -> Proof.context -> Proof.context val mksimps: Proof.context -> thm -> thm list val set_mksimps: (Proof.context -> thm -> thm list) -> Proof.context -> Proof.context val set_mkcong: (Proof.context -> thm -> thm) -> Proof.context -> Proof.context val set_mksym: (Proof.context -> thm -> thm option) -> Proof.context -> Proof.context val set_mkeqTrue: (Proof.context -> thm -> thm option) -> Proof.context -> Proof.context val set_term_ord: term ord -> Proof.context -> Proof.context val set_subgoaler: (Proof.context -> int -> tactic) -> Proof.context -> Proof.context val solver: Proof.context -> solver -> int -> tactic val default_mk_sym: Proof.context -> thm -> thm option val add_prems: thm list -> Proof.context -> Proof.context val set_reorient: (Proof.context -> term list -> term -> term -> bool) -> Proof.context -> Proof.context val set_solvers: solver list -> Proof.context -> Proof.context val rewrite_cterm: bool * bool * bool -> (Proof.context -> thm -> thm option) -> Proof.context -> conv val rewrite_term: theory -> thm list -> (term -> term option) list -> term -> term val rewrite_thm: bool * bool * bool -> (Proof.context -> thm -> thm option) -> Proof.context -> thm -> thm val generic_rewrite_goal_tac: bool * bool * bool -> (Proof.context -> tactic) -> Proof.context -> int -> tactic val rewrite: Proof.context -> bool -> thm list -> conv end; structure Raw_Simplifier: RAW_SIMPLIFIER = struct (** datatype simpset **) (* congruence rules *) type cong_name = bool * string; fun cong_name (Const (a, _)) = SOME (true, a) | cong_name (Free (a, _)) = SOME (false, a) | cong_name _ = NONE; structure Congtab = Table(type key = cong_name val ord = prod_ord bool_ord fast_string_ord); (* rewrite rules *) type rrule = {thm: thm, (*the rewrite rule*) name: string, (*name of theorem from which rewrite rule was extracted*) lhs: term, (*the left-hand side*) elhs: cterm, (*the eta-contracted lhs*) extra: bool, (*extra variables outside of elhs*) fo: bool, (*use first-order matching*) perm: bool}; (*the rewrite rule is permutative*) fun trim_context_rrule ({thm, name, lhs, elhs, extra, fo, perm}: rrule) = {thm = Thm.trim_context thm, name = name, lhs = lhs, elhs = Thm.trim_context_cterm elhs, extra = extra, fo = fo, perm = perm}; (* Remarks: - elhs is used for matching, lhs only for preservation of bound variable names; - fo is set iff either elhs is first-order (no Var is applied), in which case fo-matching is complete, or elhs is not a pattern, in which case there is nothing better to do; *) fun eq_rrule ({thm = thm1, ...}: rrule, {thm = thm2, ...}: rrule) = Thm.eq_thm_prop (thm1, thm2); (* FIXME: it seems that the conditions on extra variables are too liberal if prems are nonempty: does solving the prems really guarantee instantiation of all its Vars? Better: a dynamic check each time a rule is applied. *) fun rewrite_rule_extra_vars prems elhs erhs = let val elhss = elhs :: prems; val tvars = TVars.build (fold TVars.add_tvars elhss); val vars = Vars.build (fold Vars.add_vars elhss); in erhs |> Term.exists_type (Term.exists_subtype (fn TVar v => not (TVars.defined tvars v) | _ => false)) orelse erhs |> Term.exists_subterm (fn Var v => not (Vars.defined vars v) | _ => false) end; fun rrule_extra_vars elhs thm = rewrite_rule_extra_vars [] (Thm.term_of elhs) (Thm.full_prop_of thm); fun mk_rrule2 {thm, name, lhs, elhs, perm} = let val t = Thm.term_of elhs; val fo = Pattern.first_order t orelse not (Pattern.pattern t); val extra = rrule_extra_vars elhs thm; in {thm = thm, name = name, lhs = lhs, elhs = elhs, extra = extra, fo = fo, perm = perm} end; (*simple test for looping rewrite rules and stupid orientations*) fun default_reorient ctxt prems lhs rhs = rewrite_rule_extra_vars prems lhs rhs orelse is_Var (head_of lhs) orelse (* turns t = x around, which causes a headache if x is a local variable - usually it is very useful :-( is_Free rhs andalso not(is_Free lhs) andalso not(Logic.occs(rhs,lhs)) andalso not(exists_subterm is_Var lhs) orelse *) exists (fn t => Logic.occs (lhs, t)) (rhs :: prems) orelse null prems andalso Pattern.matches (Proof_Context.theory_of ctxt) (lhs, rhs) (*the condition "null prems" is necessary because conditional rewrites with extra variables in the conditions may terminate although the rhs is an instance of the lhs; example: ?m < ?n \ f ?n \ f ?m *) orelse is_Const lhs andalso not (is_Const rhs); (* simplification procedures *) datatype proc = Proc of {name: string, lhs: term, proc: Proof.context -> cterm -> thm option, stamp: stamp}; fun eq_proc (Proc {stamp = stamp1, ...}, Proc {stamp = stamp2, ...}) = stamp1 = stamp2; (* solvers *) datatype solver = Solver of {name: string, solver: Proof.context -> int -> tactic, id: stamp}; fun mk_solver name solver = Solver {name = name, solver = solver, id = stamp ()}; fun solver_name (Solver {name, ...}) = name; fun solver ctxt (Solver {solver = tac, ...}) = tac ctxt; fun eq_solver (Solver {id = id1, ...}, Solver {id = id2, ...}) = (id1 = id2); (* simplification sets *) (*A simpset contains data required during conversion: rules: discrimination net of rewrite rules; prems: current premises; depth: simp_depth and exceeded flag; congs: association list of congruence rules and a list of `weak' congruence constants. A congruence is `weak' if it avoids normalization of some argument. procs: discrimination net of simplification procedures (functions that prove rewrite rules on the fly); mk_rews: mk: turn simplification thms into rewrite rules; mk_cong: prepare congruence rules; mk_sym: turn \ around; mk_eq_True: turn P into P \ True; term_ord: for ordered rewriting;*) datatype simpset = Simpset of {rules: rrule Net.net, prems: thm list, depth: int * bool Unsynchronized.ref} * {congs: thm Congtab.table * cong_name list, procs: proc Net.net, mk_rews: {mk: Proof.context -> thm -> thm list, mk_cong: Proof.context -> thm -> thm, mk_sym: Proof.context -> thm -> thm option, mk_eq_True: Proof.context -> thm -> thm option, reorient: Proof.context -> term list -> term -> term -> bool}, term_ord: term ord, subgoal_tac: Proof.context -> int -> tactic, loop_tacs: (string * (Proof.context -> int -> tactic)) list, solvers: solver list * solver list}; fun internal_ss (Simpset (_, ss2)) = ss2; fun make_ss1 (rules, prems, depth) = {rules = rules, prems = prems, depth = depth}; fun map_ss1 f {rules, prems, depth} = make_ss1 (f (rules, prems, depth)); fun make_ss2 (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) = {congs = congs, procs = procs, mk_rews = mk_rews, term_ord = term_ord, subgoal_tac = subgoal_tac, loop_tacs = loop_tacs, solvers = solvers}; fun map_ss2 f {congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers} = make_ss2 (f (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers)); fun make_simpset (args1, args2) = Simpset (make_ss1 args1, make_ss2 args2); fun dest_ss (Simpset ({rules, ...}, {congs, procs, loop_tacs, solvers, ...})) = {simps = Net.entries rules |> map (fn {name, thm, ...} => (name, thm)), procs = Net.entries procs |> map (fn Proc {name, lhs, stamp, ...} => ((name, lhs), stamp)) |> partition_eq (eq_snd op =) |> map (fn ps => (fst (fst (hd ps)), map (snd o fst) ps)), congs = congs |> fst |> Congtab.dest, weak_congs = congs |> snd, loopers = map fst loop_tacs, unsafe_solvers = map solver_name (#1 solvers), safe_solvers = map solver_name (#2 solvers)}; (* empty *) fun init_ss depth mk_rews term_ord subgoal_tac solvers = make_simpset ((Net.empty, [], depth), ((Congtab.empty, []), Net.empty, mk_rews, term_ord, subgoal_tac, [], solvers)); fun default_mk_sym _ th = SOME (th RS Drule.symmetric_thm); val empty_ss = init_ss (0, Unsynchronized.ref false) {mk = fn _ => fn th => if can Logic.dest_equals (Thm.concl_of th) then [th] else [], mk_cong = K I, mk_sym = default_mk_sym, mk_eq_True = K (K NONE), reorient = default_reorient} Term_Ord.term_ord (K (K no_tac)) ([], []); (* merge *) (*NOTE: ignores some fields of 2nd simpset*) fun merge_ss (ss1, ss2) = if pointer_eq (ss1, ss2) then ss1 else let val Simpset ({rules = rules1, prems = prems1, depth = depth1}, {congs = (congs1, weak1), procs = procs1, mk_rews, term_ord, subgoal_tac, loop_tacs = loop_tacs1, solvers = (unsafe_solvers1, solvers1)}) = ss1; val Simpset ({rules = rules2, prems = prems2, depth = depth2}, {congs = (congs2, weak2), procs = procs2, mk_rews = _, term_ord = _, subgoal_tac = _, loop_tacs = loop_tacs2, solvers = (unsafe_solvers2, solvers2)}) = ss2; val rules' = Net.merge eq_rrule (rules1, rules2); val prems' = Thm.merge_thms (prems1, prems2); val depth' = if #1 depth1 < #1 depth2 then depth2 else depth1; val congs' = Congtab.merge (K true) (congs1, congs2); val weak' = merge (op =) (weak1, weak2); val procs' = Net.merge eq_proc (procs1, procs2); val loop_tacs' = AList.merge (op =) (K true) (loop_tacs1, loop_tacs2); val unsafe_solvers' = merge eq_solver (unsafe_solvers1, unsafe_solvers2); val solvers' = merge eq_solver (solvers1, solvers2); in make_simpset ((rules', prems', depth'), ((congs', weak'), procs', mk_rews, term_ord, subgoal_tac, loop_tacs', (unsafe_solvers', solvers'))) end; (** context data **) structure Simpset = Generic_Data ( type T = simpset; val empty = empty_ss; val extend = I; val merge = merge_ss; ); val simpset_of = Simpset.get o Context.Proof; fun map_simpset f = Context.proof_map (Simpset.map f); fun map_simpset1 f = map_simpset (fn Simpset (ss1, ss2) => Simpset (map_ss1 f ss1, ss2)); fun map_simpset2 f = map_simpset (fn Simpset (ss1, ss2) => Simpset (ss1, map_ss2 f ss2)); fun put_simpset ss = map_simpset (K ss); fun simpset_map ctxt f ss = ctxt |> put_simpset ss |> f |> simpset_of; val empty_simpset = put_simpset empty_ss; fun map_theory_simpset f thy = let val ctxt' = f (Proof_Context.init_global thy); val thy' = Proof_Context.theory_of ctxt'; in Context.theory_map (Simpset.map (K (simpset_of ctxt'))) thy' end; fun map_ss f = Context.mapping (map_theory_simpset (f o Context_Position.not_really)) f; val clear_simpset = map_simpset (fn Simpset ({depth, ...}, {mk_rews, term_ord, subgoal_tac, solvers, ...}) => init_ss depth mk_rews term_ord subgoal_tac solvers); (* accessors for tactis *) fun subgoal_tac ctxt = (#subgoal_tac o internal_ss o simpset_of) ctxt ctxt; fun loop_tac ctxt = FIRST' (map (fn (_, tac) => tac ctxt) (rev ((#loop_tacs o internal_ss o simpset_of) ctxt))); val solvers = #solvers o internal_ss o simpset_of (* simp depth *) (* The simp_depth_limit is meant to abort infinite recursion of the simplifier early but should not terminate "normal" executions. As of 2017, 25 would suffice; 40 builds in a safety margin. *) val simp_depth_limit = Config.declare_int ("simp_depth_limit", \<^here>) (K 40); val simp_trace_depth_limit = Config.declare_int ("simp_trace_depth_limit", \<^here>) (K 1); fun inc_simp_depth ctxt = ctxt |> map_simpset1 (fn (rules, prems, (depth, exceeded)) => (rules, prems, (depth + 1, if depth = Config.get ctxt simp_trace_depth_limit then Unsynchronized.ref false else exceeded))); fun simp_depth ctxt = let val Simpset ({depth = (depth, _), ...}, _) = simpset_of ctxt in depth end; (* diagnostics *) exception SIMPLIFIER of string * thm list; val simp_debug = Config.declare_bool ("simp_debug", \<^here>) (K false); val simp_trace = Config.declare_bool ("simp_trace", \<^here>) (K false); fun cond_warning ctxt msg = if Context_Position.is_really_visible ctxt then warning (msg ()) else (); fun cond_tracing' ctxt flag msg = if Config.get ctxt flag then let val Simpset ({depth = (depth, exceeded), ...}, _) = simpset_of ctxt; val depth_limit = Config.get ctxt simp_trace_depth_limit; in if depth > depth_limit then if ! exceeded then () else (tracing "simp_trace_depth_limit exceeded!"; exceeded := true) else (tracing (enclose "[" "]" (string_of_int depth) ^ msg ()); exceeded := false) end else (); fun cond_tracing ctxt = cond_tracing' ctxt simp_trace; fun print_term ctxt s t = s ^ "\n" ^ Syntax.string_of_term ctxt t; fun print_thm ctxt s (name, th) = print_term ctxt (if name = "" then s else s ^ " " ^ quote name ^ ":") (Thm.full_prop_of th); (** simpset operations **) (* prems *) fun prems_of ctxt = let val Simpset ({prems, ...}, _) = simpset_of ctxt in prems end; fun add_prems ths = map_simpset1 (fn (rules, prems, depth) => (rules, ths @ prems, depth)); (* maintain simp rules *) fun del_rrule loud (rrule as {thm, elhs, ...}) ctxt = ctxt |> map_simpset1 (fn (rules, prems, depth) => (Net.delete_term eq_rrule (Thm.term_of elhs, rrule) rules, prems, depth)) handle Net.DELETE => (if not loud then () else cond_warning ctxt (fn () => print_thm ctxt "Rewrite rule not in simpset:" ("", thm)); ctxt); fun insert_rrule (rrule as {thm, name, ...}) ctxt = (cond_tracing ctxt (fn () => print_thm ctxt "Adding rewrite rule" (name, thm)); ctxt |> map_simpset1 (fn (rules, prems, depth) => let val rrule2 as {elhs, ...} = mk_rrule2 rrule; val rules' = Net.insert_term eq_rrule (Thm.term_of elhs, trim_context_rrule rrule2) rules; in (rules', prems, depth) end) handle Net.INSERT => (cond_warning ctxt (fn () => print_thm ctxt "Ignoring duplicate rewrite rule:" ("", thm)); ctxt)); val vars_set = Vars.build o Vars.add_vars; local fun vperm (Var _, Var _) = true | vperm (Abs (_, _, s), Abs (_, _, t)) = vperm (s, t) | vperm (t1 $ t2, u1 $ u2) = vperm (t1, u1) andalso vperm (t2, u2) | vperm (t, u) = (t = u); fun var_perm (t, u) = vperm (t, u) andalso Vars.eq_set (apply2 vars_set (t, u)); in fun decomp_simp thm = let val prop = Thm.prop_of thm; val prems = Logic.strip_imp_prems prop; val concl = Drule.strip_imp_concl (Thm.cprop_of thm); val (lhs, rhs) = Thm.dest_equals concl handle TERM _ => raise SIMPLIFIER ("Rewrite rule not a meta-equality", [thm]); val elhs = Thm.dest_arg (Thm.cprop_of (Thm.eta_conversion lhs)); val erhs = Envir.eta_contract (Thm.term_of rhs); val perm = var_perm (Thm.term_of elhs, erhs) andalso not (Thm.term_of elhs aconv erhs) andalso not (is_Var (Thm.term_of elhs)); in (prems, Thm.term_of lhs, elhs, Thm.term_of rhs, perm) end; end; fun decomp_simp' thm = let val (_, lhs, _, rhs, _) = decomp_simp thm in if Thm.nprems_of thm > 0 then raise SIMPLIFIER ("Bad conditional rewrite rule", [thm]) else (lhs, rhs) end; fun mk_eq_True ctxt (thm, name) = let val Simpset (_, {mk_rews = {mk_eq_True, ...}, ...}) = simpset_of ctxt in (case mk_eq_True ctxt thm of NONE => [] | SOME eq_True => let val (_, lhs, elhs, _, _) = decomp_simp eq_True; in [{thm = eq_True, name = name, lhs = lhs, elhs = elhs, perm = false}] end) end; (*create the rewrite rule and possibly also the eq_True variant, in case there are extra vars on the rhs*) fun rrule_eq_True ctxt thm name lhs elhs rhs thm2 = let val rrule = {thm = thm, name = name, lhs = lhs, elhs = elhs, perm = false} in if rewrite_rule_extra_vars [] lhs rhs then mk_eq_True ctxt (thm2, name) @ [rrule] else [rrule] end; fun mk_rrule ctxt (thm, name) = let val (prems, lhs, elhs, rhs, perm) = decomp_simp thm in if perm then [{thm = thm, name = name, lhs = lhs, elhs = elhs, perm = true}] else (*weak test for loops*) if rewrite_rule_extra_vars prems lhs rhs orelse is_Var (Thm.term_of elhs) then mk_eq_True ctxt (thm, name) else rrule_eq_True ctxt thm name lhs elhs rhs thm end |> map (fn {thm, name, lhs, elhs, perm} => {thm = Thm.trim_context thm, name = name, lhs = lhs, elhs = Thm.trim_context_cterm elhs, perm = perm}); fun orient_rrule ctxt (thm, name) = let val (prems, lhs, elhs, rhs, perm) = decomp_simp thm; val Simpset (_, {mk_rews = {reorient, mk_sym, ...}, ...}) = simpset_of ctxt; in if perm then [{thm = thm, name = name, lhs = lhs, elhs = elhs, perm = true}] else if reorient ctxt prems lhs rhs then if reorient ctxt prems rhs lhs then mk_eq_True ctxt (thm, name) else (case mk_sym ctxt thm of NONE => [] | SOME thm' => let val (_, lhs', elhs', rhs', _) = decomp_simp thm' in rrule_eq_True ctxt thm' name lhs' elhs' rhs' thm end) else rrule_eq_True ctxt thm name lhs elhs rhs thm end; fun extract_rews ctxt sym thms = let val Simpset (_, {mk_rews = {mk, ...}, ...}) = simpset_of ctxt; val mk = if sym then fn ctxt => fn th => (mk ctxt th) RL [Drule.symmetric_thm] else mk in maps (fn thm => map (rpair (Thm.get_name_hint thm)) (mk ctxt thm)) thms end; fun extract_safe_rrules ctxt thm = maps (orient_rrule ctxt) (extract_rews ctxt false [thm]); fun mk_rrules ctxt thms = let val rews = extract_rews ctxt false thms val raw_rrules = flat (map (mk_rrule ctxt) rews) in map mk_rrule2 raw_rrules end (* add/del rules explicitly *) local fun comb_simps ctxt comb mk_rrule sym thms = let val rews = extract_rews ctxt sym (map (Thm.transfer' ctxt) thms); in fold (fold comb o mk_rrule) rews ctxt end; (* This code checks if the symetric version of a rule is already in the simpset. However, the variable names in the two versions of the rule may differ. Thus the current test modulo eq_rrule is too weak to be useful and needs to be refined. fun present ctxt rules (rrule as {thm, elhs, ...}) = (Net.insert_term eq_rrule (Thm.term_of elhs, trim_context_rrule rrule) rules; false) handle Net.INSERT => (cond_warning ctxt (fn () => print_thm ctxt "Symmetric rewrite rule already in simpset:" ("", thm)); true); fun sym_present ctxt thms = let val rews = extract_rews ctxt true (map (Thm.transfer' ctxt) thms); val rrules = map mk_rrule2 (flat(map (mk_rrule ctxt) rews)) val Simpset({rules, ...},_) = simpset_of ctxt in exists (present ctxt rules) rrules end *) in fun ctxt addsimps thms = comb_simps ctxt insert_rrule (mk_rrule ctxt) false thms; fun addsymsimps ctxt thms = comb_simps ctxt insert_rrule (mk_rrule ctxt) true thms; fun ctxt delsimps thms = comb_simps ctxt (del_rrule true) (map mk_rrule2 o mk_rrule ctxt) false thms; fun delsimps_quiet ctxt thms = comb_simps ctxt (del_rrule false) (map mk_rrule2 o mk_rrule ctxt) false thms; fun add_simp thm ctxt = ctxt addsimps [thm]; (* with check for presence of symmetric version: if sym_present ctxt [thm] then (cond_warning ctxt (fn () => print_thm ctxt "Ignoring rewrite rule:" ("", thm)); ctxt) else ctxt addsimps [thm]; *) fun del_simp thm ctxt = ctxt delsimps [thm]; fun flip_simp thm ctxt = addsymsimps (delsimps_quiet ctxt [thm]) [thm]; end; fun init_simpset thms ctxt = ctxt |> Context_Position.set_visible false |> empty_simpset |> fold add_simp thms |> Context_Position.restore_visible ctxt; (* congs *) local fun is_full_cong_prems [] [] = true | is_full_cong_prems [] _ = false | is_full_cong_prems (p :: prems) varpairs = (case Logic.strip_assums_concl p of Const ("Pure.eq", _) $ lhs $ rhs => let val (x, xs) = strip_comb lhs and (y, ys) = strip_comb rhs in is_Var x andalso forall is_Bound xs andalso not (has_duplicates (op =) xs) andalso xs = ys andalso member (op =) varpairs (x, y) andalso is_full_cong_prems prems (remove (op =) (x, y) varpairs) end | _ => false); fun is_full_cong thm = let val prems = Thm.prems_of thm and concl = Thm.concl_of thm; val (lhs, rhs) = Logic.dest_equals concl; val (f, xs) = strip_comb lhs and (g, ys) = strip_comb rhs; in f = g andalso not (has_duplicates (op =) (xs @ ys)) andalso length xs = length ys andalso is_full_cong_prems prems (xs ~~ ys) end; fun mk_cong ctxt = let val Simpset (_, {mk_rews = {mk_cong = f, ...}, ...}) = simpset_of ctxt in f ctxt end; in fun add_eqcong thm ctxt = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) => let val (lhs, _) = Logic.dest_equals (Thm.concl_of thm) handle TERM _ => raise SIMPLIFIER ("Congruence not a meta-equality", [thm]); (*val lhs = Envir.eta_contract lhs;*) val a = the (cong_name (head_of lhs)) handle Option.Option => raise SIMPLIFIER ("Congruence must start with a constant or free variable", [thm]); val (xs, weak) = congs; val xs' = Congtab.update (a, Thm.trim_context thm) xs; val weak' = if is_full_cong thm then weak else a :: weak; in ((xs', weak'), procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) end); fun del_eqcong thm ctxt = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) => let val (lhs, _) = Logic.dest_equals (Thm.concl_of thm) handle TERM _ => raise SIMPLIFIER ("Congruence not a meta-equality", [thm]); (*val lhs = Envir.eta_contract lhs;*) val a = the (cong_name (head_of lhs)) handle Option.Option => raise SIMPLIFIER ("Congruence must start with a constant", [thm]); val (xs, _) = congs; val xs' = Congtab.delete_safe a xs; val weak' = Congtab.fold (fn (a, th) => if is_full_cong th then I else insert (op =) a) xs' []; in ((xs', weak'), procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) end); fun add_cong thm ctxt = add_eqcong (mk_cong ctxt thm) ctxt; fun del_cong thm ctxt = del_eqcong (mk_cong ctxt thm) ctxt; end; (* simprocs *) datatype simproc = Simproc of {name: string, lhss: term list, proc: morphism -> Proof.context -> cterm -> thm option, stamp: stamp}; fun eq_simproc (Simproc {stamp = stamp1, ...}, Simproc {stamp = stamp2, ...}) = stamp1 = stamp2; fun cert_simproc thy name {lhss, proc} = Simproc {name = name, lhss = map (Sign.cert_term thy) lhss, proc = proc, stamp = stamp ()}; fun transform_simproc phi (Simproc {name, lhss, proc, stamp}) = Simproc {name = name, lhss = map (Morphism.term phi) lhss, proc = Morphism.transform phi proc, stamp = stamp}; local fun add_proc (proc as Proc {name, lhs, ...}) ctxt = (cond_tracing ctxt (fn () => print_term ctxt ("Adding simplification procedure " ^ quote name ^ " for") lhs); ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) => (congs, Net.insert_term eq_proc (lhs, proc) procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers)) handle Net.INSERT => (cond_warning ctxt (fn () => "Ignoring duplicate simplification procedure " ^ quote name); ctxt)); fun del_proc (proc as Proc {name, lhs, ...}) ctxt = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) => (congs, Net.delete_term eq_proc (lhs, proc) procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers)) handle Net.DELETE => (cond_warning ctxt (fn () => "Simplification procedure " ^ quote name ^ " not in simpset"); ctxt); fun prep_procs (Simproc {name, lhss, proc, stamp}) = lhss |> map (fn lhs => Proc {name = name, lhs = lhs, proc = Morphism.form proc, stamp = stamp}); in fun ctxt addsimprocs ps = fold (fold add_proc o prep_procs) ps ctxt; fun ctxt delsimprocs ps = fold (fold del_proc o prep_procs) ps ctxt; end; (* mk_rews *) local fun map_mk_rews f = map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) => let val {mk, mk_cong, mk_sym, mk_eq_True, reorient} = mk_rews; val (mk', mk_cong', mk_sym', mk_eq_True', reorient') = f (mk, mk_cong, mk_sym, mk_eq_True, reorient); val mk_rews' = {mk = mk', mk_cong = mk_cong', mk_sym = mk_sym', mk_eq_True = mk_eq_True', reorient = reorient'}; in (congs, procs, mk_rews', term_ord, subgoal_tac, loop_tacs, solvers) end); in fun mksimps ctxt = let val Simpset (_, {mk_rews = {mk, ...}, ...}) = simpset_of ctxt in mk ctxt end; fun set_mksimps mk = map_mk_rews (fn (_, mk_cong, mk_sym, mk_eq_True, reorient) => (mk, mk_cong, mk_sym, mk_eq_True, reorient)); fun set_mkcong mk_cong = map_mk_rews (fn (mk, _, mk_sym, mk_eq_True, reorient) => (mk, mk_cong, mk_sym, mk_eq_True, reorient)); fun set_mksym mk_sym = map_mk_rews (fn (mk, mk_cong, _, mk_eq_True, reorient) => (mk, mk_cong, mk_sym, mk_eq_True, reorient)); fun set_mkeqTrue mk_eq_True = map_mk_rews (fn (mk, mk_cong, mk_sym, _, reorient) => (mk, mk_cong, mk_sym, mk_eq_True, reorient)); fun set_reorient reorient = map_mk_rews (fn (mk, mk_cong, mk_sym, mk_eq_True, _) => (mk, mk_cong, mk_sym, mk_eq_True, reorient)); end; (* term_ord *) fun set_term_ord term_ord = map_simpset2 (fn (congs, procs, mk_rews, _, subgoal_tac, loop_tacs, solvers) => (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers)); (* tactics *) fun set_subgoaler subgoal_tac = map_simpset2 (fn (congs, procs, mk_rews, term_ord, _, loop_tacs, solvers) => (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers)); fun ctxt setloop tac = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, _, solvers) => (congs, procs, mk_rews, term_ord, subgoal_tac, [("", tac)], solvers)); fun ctxt addloop (name, tac) = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) => (congs, procs, mk_rews, term_ord, subgoal_tac, AList.update (op =) (name, tac) loop_tacs, solvers)); fun ctxt delloop name = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) => (congs, procs, mk_rews, term_ord, subgoal_tac, (if AList.defined (op =) loop_tacs name then () else cond_warning ctxt (fn () => "No such looper in simpset: " ^ quote name); AList.delete (op =) name loop_tacs), solvers)); fun ctxt setSSolver solver = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, (unsafe_solvers, _)) => (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, (unsafe_solvers, [solver]))); fun ctxt addSSolver solver = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, (unsafe_solvers, solvers)) => (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, (unsafe_solvers, insert eq_solver solver solvers))); fun ctxt setSolver solver = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, (_, solvers)) => (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, ([solver], solvers))); fun ctxt addSolver solver = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, (unsafe_solvers, solvers)) => (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, (insert eq_solver solver unsafe_solvers, solvers))); fun set_solvers solvers = map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, _) => (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, (solvers, solvers))); (* trace operations *) type trace_ops = {trace_invoke: {depth: int, term: term} -> Proof.context -> Proof.context, trace_apply: {unconditional: bool, term: term, thm: thm, rrule: rrule} -> Proof.context -> (Proof.context -> (thm * term) option) -> (thm * term) option}; structure Trace_Ops = Theory_Data ( type T = trace_ops; val empty: T = {trace_invoke = fn _ => fn ctxt => ctxt, trace_apply = fn _ => fn ctxt => fn cont => cont ctxt}; val extend = I; fun merge (trace_ops, _) = trace_ops; ); val set_trace_ops = Trace_Ops.put; val trace_ops = Trace_Ops.get o Proof_Context.theory_of; fun trace_invoke args ctxt = #trace_invoke (trace_ops ctxt) args ctxt; fun trace_apply args ctxt = #trace_apply (trace_ops ctxt) args ctxt; (** rewriting **) (* Uses conversions, see: L C Paulson, A higher-order implementation of rewriting, Science of Computer Programming 3 (1983), pages 119-149. *) fun check_conv ctxt msg thm thm' = let val thm'' = Thm.transitive thm thm' handle THM _ => let val nthm' = Thm.transitive (Thm.symmetric (Drule.beta_eta_conversion (Thm.lhs_of thm'))) thm' in Thm.transitive thm nthm' handle THM _ => let val nthm = Thm.transitive thm (Drule.beta_eta_conversion (Thm.rhs_of thm)) in Thm.transitive nthm nthm' end end val _ = if msg then cond_tracing ctxt (fn () => print_thm ctxt "SUCCEEDED" ("", thm')) else (); in SOME thm'' end handle THM _ => let val _ $ _ $ prop0 = Thm.prop_of thm; val _ = cond_tracing ctxt (fn () => print_thm ctxt "Proved wrong theorem (bad subgoaler?)" ("", thm') ^ "\n" ^ print_term ctxt "Should have proved:" prop0); in NONE end; (* mk_procrule *) fun mk_procrule ctxt thm = let val (prems, lhs, elhs, rhs, _) = decomp_simp thm val thm' = Thm.close_derivation \<^here> thm; in if rewrite_rule_extra_vars prems lhs rhs then (cond_warning ctxt (fn () => print_thm ctxt "Extra vars on rhs:" ("", thm)); []) else [mk_rrule2 {thm = thm', name = "", lhs = lhs, elhs = elhs, perm = false}] end; (* rewritec: conversion to apply the meta simpset to a term *) (*Since the rewriting strategy is bottom-up, we avoid re-normalizing already normalized terms by carrying around the rhs of the rewrite rule just applied. This is called the `skeleton'. It is decomposed in parallel with the term. Once a Var is encountered, the corresponding term is already in normal form. skel0 is a dummy skeleton that is to enforce complete normalization.*) val skel0 = Bound 0; (*Use rhs as skeleton only if the lhs does not contain unnormalized bits. The latter may happen iff there are weak congruence rules for constants in the lhs.*) fun uncond_skel ((_, weak), (lhs, rhs)) = if null weak then rhs (*optimization*) else if exists_subterm (fn Const (a, _) => member (op =) weak (true, a) | Free (a, _) => member (op =) weak (false, a) | _ => false) lhs then skel0 else rhs; (*Behaves like unconditional rule if rhs does not contain vars not in the lhs. Otherwise those vars may become instantiated with unnormalized terms while the premises are solved.*) fun cond_skel (args as (_, (lhs, rhs))) = if Vars.subset (vars_set rhs, vars_set lhs) then uncond_skel args else skel0; (* Rewriting -- we try in order: (1) beta reduction (2) unconditional rewrite rules (3) conditional rewrite rules (4) simplification procedures IMPORTANT: rewrite rules must not introduce new Vars or TVars! *) fun rewritec (prover, maxt) ctxt t = let val thy = Proof_Context.theory_of ctxt; val Simpset ({rules, ...}, {congs, procs, term_ord, ...}) = simpset_of ctxt; val eta_thm = Thm.eta_conversion t; val eta_t' = Thm.rhs_of eta_thm; val eta_t = Thm.term_of eta_t'; fun rew rrule = let val {thm = thm0, name, lhs, elhs = elhs0, extra, fo, perm} = rrule; val thm = Thm.transfer thy thm0; val elhs = Thm.transfer_cterm thy elhs0; val prop = Thm.prop_of thm; val (rthm, elhs') = if maxt = ~1 orelse not extra then (thm, elhs) else (Thm.incr_indexes (maxt + 1) thm, Thm.incr_indexes_cterm (maxt + 1) elhs); val insts = if fo then Thm.first_order_match (elhs', eta_t') else Thm.match (elhs', eta_t'); val thm' = Thm.instantiate insts (Thm.rename_boundvars lhs eta_t rthm); val prop' = Thm.prop_of thm'; - val unconditional = (Logic.count_prems prop' = 0); + val unconditional = Logic.no_prems prop'; val (lhs', rhs') = Logic.dest_equals (Logic.strip_imp_concl prop'); val trace_args = {unconditional = unconditional, term = eta_t, thm = thm', rrule = rrule}; in if perm andalso is_greater_equal (term_ord (rhs', lhs')) then (cond_tracing ctxt (fn () => print_thm ctxt "Cannot apply permutative rewrite rule" (name, thm) ^ "\n" ^ print_thm ctxt "Term does not become smaller:" ("", thm')); NONE) else (cond_tracing ctxt (fn () => print_thm ctxt "Applying instance of rewrite rule" (name, thm)); if unconditional then (cond_tracing ctxt (fn () => print_thm ctxt "Rewriting:" ("", thm')); trace_apply trace_args ctxt (fn ctxt' => let val lr = Logic.dest_equals prop; val SOME thm'' = check_conv ctxt' false eta_thm thm'; in SOME (thm'', uncond_skel (congs, lr)) end)) else (cond_tracing ctxt (fn () => print_thm ctxt "Trying to rewrite:" ("", thm')); if simp_depth ctxt > Config.get ctxt simp_depth_limit then (cond_tracing ctxt (fn () => "simp_depth_limit exceeded - giving up"); NONE) else trace_apply trace_args ctxt (fn ctxt' => (case prover ctxt' thm' of NONE => (cond_tracing ctxt' (fn () => print_thm ctxt' "FAILED" ("", thm')); NONE) | SOME thm2 => (case check_conv ctxt' true eta_thm thm2 of NONE => NONE | SOME thm2' => let val concl = Logic.strip_imp_concl prop; val lr = Logic.dest_equals concl; in SOME (thm2', cond_skel (congs, lr)) end))))) end; fun rews [] = NONE | rews (rrule :: rrules) = let val opt = rew rrule handle Pattern.MATCH => NONE in (case opt of NONE => rews rrules | some => some) end; fun sort_rrules rrs = let fun is_simple ({thm, ...}: rrule) = (case Thm.prop_of thm of Const ("Pure.eq", _) $ _ $ _ => true | _ => false); fun sort [] (re1, re2) = re1 @ re2 | sort (rr :: rrs) (re1, re2) = if is_simple rr then sort rrs (rr :: re1, re2) else sort rrs (re1, rr :: re2); in sort rrs ([], []) end; fun proc_rews [] = NONE | proc_rews (Proc {name, proc, lhs, ...} :: ps) = if Pattern.matches (Proof_Context.theory_of ctxt) (lhs, Thm.term_of t) then (cond_tracing' ctxt simp_debug (fn () => print_term ctxt ("Trying procedure " ^ quote name ^ " on:") eta_t); (case proc ctxt eta_t' of NONE => (cond_tracing' ctxt simp_debug (fn () => "FAILED"); proc_rews ps) | SOME raw_thm => (cond_tracing ctxt (fn () => print_thm ctxt ("Procedure " ^ quote name ^ " produced rewrite rule:") ("", raw_thm)); (case rews (mk_procrule ctxt raw_thm) of NONE => (cond_tracing ctxt (fn () => print_term ctxt ("IGNORED result of simproc " ^ quote name ^ " -- does not match") (Thm.term_of t)); proc_rews ps) | some => some)))) else proc_rews ps; in (case eta_t of Abs _ $ _ => SOME (Thm.transitive eta_thm (Thm.beta_conversion false eta_t'), skel0) | _ => (case rews (sort_rrules (Net.match_term rules eta_t)) of NONE => proc_rews (Net.match_term procs eta_t) | some => some)) end; (* conversion to apply a congruence rule to a term *) fun congc prover ctxt maxt cong t = let val rthm = Thm.incr_indexes (maxt + 1) cong; val rlhs = fst (Thm.dest_equals (Drule.strip_imp_concl (Thm.cprop_of rthm))); val insts = Thm.match (rlhs, t) (* Thm.match can raise Pattern.MATCH; is handled when congc is called *) val thm' = Thm.instantiate insts (Thm.rename_boundvars (Thm.term_of rlhs) (Thm.term_of t) rthm); val _ = cond_tracing ctxt (fn () => print_thm ctxt "Applying congruence rule:" ("", thm')); fun err (msg, thm) = (cond_tracing ctxt (fn () => print_thm ctxt msg ("", thm)); NONE); in (case prover thm' of NONE => err ("Congruence proof failed. Could not prove", thm') | SOME thm2 => (case check_conv ctxt true (Drule.beta_eta_conversion t) thm2 of NONE => err ("Congruence proof failed. Should not have proved", thm2) | SOME thm2' => if op aconv (apply2 Thm.term_of (Thm.dest_equals (Thm.cprop_of thm2'))) then NONE else SOME thm2')) end; val vA = (("A", 0), propT); val vB = (("B", 0), propT); val vC = (("C", 0), propT); fun transitive1 NONE NONE = NONE | transitive1 (SOME thm1) NONE = SOME thm1 | transitive1 NONE (SOME thm2) = SOME thm2 | transitive1 (SOME thm1) (SOME thm2) = SOME (Thm.transitive thm1 thm2); fun transitive2 thm = transitive1 (SOME thm); fun transitive3 thm = transitive1 thm o SOME; fun bottomc ((simprem, useprem, mutsimp), prover, maxidx) = let fun botc skel ctxt t = if is_Var skel then NONE else (case subc skel ctxt t of some as SOME thm1 => (case rewritec (prover, maxidx) ctxt (Thm.rhs_of thm1) of SOME (thm2, skel2) => transitive2 (Thm.transitive thm1 thm2) (botc skel2 ctxt (Thm.rhs_of thm2)) | NONE => some) | NONE => (case rewritec (prover, maxidx) ctxt t of SOME (thm2, skel2) => transitive2 thm2 (botc skel2 ctxt (Thm.rhs_of thm2)) | NONE => NONE)) and try_botc ctxt t = (case botc skel0 ctxt t of SOME trec1 => trec1 | NONE => Thm.reflexive t) and subc skel ctxt t0 = let val Simpset (_, {congs, ...}) = simpset_of ctxt in (case Thm.term_of t0 of Abs (a, T, _) => let val (v, ctxt') = Variable.next_bound (a, T) ctxt; val b = #1 (Term.dest_Free v); val (v', t') = Thm.dest_abs (SOME b) t0; val b' = #1 (Term.dest_Free (Thm.term_of v')); val _ = if b <> b' then warning ("Bad Simplifier context: renamed bound variable " ^ quote b ^ " to " ^ quote b' ^ Position.here (Position.thread_data ())) else (); val skel' = (case skel of Abs (_, _, sk) => sk | _ => skel0); in (case botc skel' ctxt' t' of SOME thm => SOME (Thm.abstract_rule a v' thm) | NONE => NONE) end | t $ _ => (case t of Const ("Pure.imp", _) $ _ => impc t0 ctxt | Abs _ => let val thm = Thm.beta_conversion false t0 in (case subc skel0 ctxt (Thm.rhs_of thm) of NONE => SOME thm | SOME thm' => SOME (Thm.transitive thm thm')) end | _ => let fun appc () = let val (tskel, uskel) = (case skel of tskel $ uskel => (tskel, uskel) | _ => (skel0, skel0)); val (ct, cu) = Thm.dest_comb t0; in (case botc tskel ctxt ct of SOME thm1 => (case botc uskel ctxt cu of SOME thm2 => SOME (Thm.combination thm1 thm2) | NONE => SOME (Thm.combination thm1 (Thm.reflexive cu))) | NONE => (case botc uskel ctxt cu of SOME thm1 => SOME (Thm.combination (Thm.reflexive ct) thm1) | NONE => NONE)) end; val (h, ts) = strip_comb t; in (case cong_name h of SOME a => (case Congtab.lookup (fst congs) a of NONE => appc () | SOME cong => (*post processing: some partial applications h t1 ... tj, j <= length ts, may be a redex. Example: map (\x. x) = (\xs. xs) wrt map_cong*) (let val thm = congc (prover ctxt) ctxt maxidx cong t0; val t = the_default t0 (Option.map Thm.rhs_of thm); val (cl, cr) = Thm.dest_comb t val dVar = Var(("", 0), dummyT) val skel = list_comb (h, replicate (length ts) dVar) in (case botc skel ctxt cl of NONE => thm | SOME thm' => transitive3 thm (Thm.combination thm' (Thm.reflexive cr))) end handle Pattern.MATCH => appc ())) | _ => appc ()) end) | _ => NONE) end and impc ct ctxt = if mutsimp then mut_impc0 [] ct [] [] ctxt else nonmut_impc ct ctxt and rules_of_prem prem ctxt = if maxidx_of_term (Thm.term_of prem) <> ~1 then (cond_tracing ctxt (fn () => print_term ctxt "Cannot add premise as rewrite rule because it contains (type) unknowns:" (Thm.term_of prem)); (([], NONE), ctxt)) else let val (asm, ctxt') = Thm.assume_hyps prem ctxt in ((extract_safe_rrules ctxt' asm, SOME asm), ctxt') end and add_rrules (rrss, asms) ctxt = (fold o fold) insert_rrule rrss ctxt |> add_prems (map_filter I asms) and disch r prem eq = let val (lhs, rhs) = Thm.dest_equals (Thm.cprop_of eq); val eq' = Thm.implies_elim (Thm.instantiate (TVars.empty, Vars.make [(vA, prem), (vB, lhs), (vC, rhs)]) Drule.imp_cong) (Thm.implies_intr prem eq); in if not r then eq' else let val (prem', concl) = Thm.dest_implies lhs; val (prem'', _) = Thm.dest_implies rhs; in Thm.transitive (Thm.transitive (Thm.instantiate (TVars.empty, Vars.make [(vA, prem'), (vB, prem), (vC, concl)]) Drule.swap_prems_eq) eq') (Thm.instantiate (TVars.empty, Vars.make [(vA, prem), (vB, prem''), (vC, concl)]) Drule.swap_prems_eq) end end and rebuild [] _ _ _ _ eq = eq | rebuild (prem :: prems) concl (_ :: rrss) (_ :: asms) ctxt eq = let val ctxt' = add_rrules (rev rrss, rev asms) ctxt; val concl' = Drule.mk_implies (prem, the_default concl (Option.map Thm.rhs_of eq)); val dprem = Option.map (disch false prem); in (case rewritec (prover, maxidx) ctxt' concl' of NONE => rebuild prems concl' rrss asms ctxt (dprem eq) | SOME (eq', _) => transitive2 (fold (disch false) prems (the (transitive3 (dprem eq) eq'))) (mut_impc0 (rev prems) (Thm.rhs_of eq') (rev rrss) (rev asms) ctxt)) end and mut_impc0 prems concl rrss asms ctxt = let val prems' = strip_imp_prems concl; val ((rrss', asms'), ctxt') = fold_map rules_of_prem prems' ctxt |>> split_list; in mut_impc (prems @ prems') (strip_imp_concl concl) (rrss @ rrss') (asms @ asms') [] [] [] [] ctxt' ~1 ~1 end and mut_impc [] concl [] [] prems' rrss' asms' eqns ctxt changed k = transitive1 (fold (fn (eq1, prem) => fn eq2 => transitive1 eq1 (Option.map (disch false prem) eq2)) (eqns ~~ prems') NONE) (if changed > 0 then mut_impc (rev prems') concl (rev rrss') (rev asms') [] [] [] [] ctxt ~1 changed else rebuild prems' concl rrss' asms' ctxt (botc skel0 (add_rrules (rev rrss', rev asms') ctxt) concl)) | mut_impc (prem :: prems) concl (rrs :: rrss) (asm :: asms) prems' rrss' asms' eqns ctxt changed k = (case (if k = 0 then NONE else botc skel0 (add_rrules (rev rrss' @ rrss, rev asms' @ asms) ctxt) prem) of NONE => mut_impc prems concl rrss asms (prem :: prems') (rrs :: rrss') (asm :: asms') (NONE :: eqns) ctxt changed (if k = 0 then 0 else k - 1) | SOME eqn => let val prem' = Thm.rhs_of eqn; val tprems = map Thm.term_of prems; val i = 1 + fold Integer.max (map (fn p => find_index (fn q => q aconv p) tprems) (Thm.hyps_of eqn)) ~1; val ((rrs', asm'), ctxt') = rules_of_prem prem' ctxt; in mut_impc prems concl rrss asms (prem' :: prems') (rrs' :: rrss') (asm' :: asms') (SOME (fold_rev (disch true) (take i prems) (Drule.imp_cong_rule eqn (Thm.reflexive (Drule.list_implies (drop i prems, concl))))) :: eqns) ctxt' (length prems') ~1 end) (*legacy code -- only for backwards compatibility*) and nonmut_impc ct ctxt = let val (prem, conc) = Thm.dest_implies ct; val thm1 = if simprem then botc skel0 ctxt prem else NONE; val prem1 = the_default prem (Option.map Thm.rhs_of thm1); val ctxt1 = if not useprem then ctxt else let val ((rrs, asm), ctxt') = rules_of_prem prem1 ctxt in add_rrules ([rrs], [asm]) ctxt' end; in (case botc skel0 ctxt1 conc of NONE => (case thm1 of NONE => NONE | SOME thm1' => SOME (Drule.imp_cong_rule thm1' (Thm.reflexive conc))) | SOME thm2 => let val thm2' = disch false prem1 thm2 in (case thm1 of NONE => SOME thm2' | SOME thm1' => SOME (Thm.transitive (Drule.imp_cong_rule thm1' (Thm.reflexive conc)) thm2')) end) end; in try_botc end; (* Meta-rewriting: rewrites t to u and returns the theorem t \ u *) (* Parameters: mode = (simplify A, use A in simplifying B, use prems of B (if B is again a meta-impl.) to simplify A) when simplifying A \ B prover: how to solve premises in conditional rewrites and congruences *) fun rewrite_cterm mode prover raw_ctxt raw_ct = let val thy = Proof_Context.theory_of raw_ctxt; val ct = raw_ct |> Thm.transfer_cterm thy |> Thm.adjust_maxidx_cterm ~1; val maxidx = Thm.maxidx_of_cterm ct; val ctxt = raw_ctxt |> Variable.set_body true |> Context_Position.set_visible false |> inc_simp_depth |> (fn ctxt => trace_invoke {depth = simp_depth ctxt, term = Thm.term_of ct} ctxt); val _ = cond_tracing ctxt (fn () => print_term ctxt "SIMPLIFIER INVOKED ON THE FOLLOWING TERM:" (Thm.term_of ct)); in ct |> bottomc (mode, Option.map (Drule.flexflex_unique (SOME ctxt)) oo prover, maxidx) ctxt |> Thm.solve_constraints end; val simple_prover = SINGLE o (fn ctxt => ALLGOALS (resolve_tac ctxt (prems_of ctxt))); fun rewrite _ _ [] = Thm.reflexive | rewrite ctxt full thms = rewrite_cterm (full, false, false) simple_prover (init_simpset thms ctxt); fun rewrite_rule ctxt = Conv.fconv_rule o rewrite ctxt true; (*simple term rewriting -- no proof*) fun rewrite_term thy rules procs = Pattern.rewrite_term thy (map decomp_simp' rules) procs; fun rewrite_thm mode prover ctxt = Conv.fconv_rule (rewrite_cterm mode prover ctxt); (*Rewrite the subgoals of a proof state (represented by a theorem)*) fun rewrite_goals_rule ctxt thms th = Conv.fconv_rule (Conv.prems_conv ~1 (rewrite_cterm (true, true, true) simple_prover (init_simpset thms ctxt))) th; (** meta-rewriting tactics **) (*Rewrite all subgoals*) fun rewrite_goals_tac ctxt defs = PRIMITIVE (rewrite_goals_rule ctxt defs); (*Rewrite one subgoal*) fun generic_rewrite_goal_tac mode prover_tac ctxt i thm = if 0 < i andalso i <= Thm.nprems_of thm then Seq.single (Conv.gconv_rule (rewrite_cterm mode (SINGLE o prover_tac) ctxt) i thm) else Seq.empty; fun rewrite_goal_tac ctxt thms = generic_rewrite_goal_tac (true, false, false) (K no_tac) (init_simpset thms ctxt); (*Prunes all redundant parameters from the proof state by rewriting.*) fun prune_params_tac ctxt = rewrite_goals_tac ctxt [Drule.triv_forall_equality]; (* for folding definitions, handling critical pairs *) (*The depth of nesting in a term*) fun term_depth (Abs (_, _, t)) = 1 + term_depth t | term_depth (f $ t) = 1 + Int.max (term_depth f, term_depth t) | term_depth _ = 0; val lhs_of_thm = #1 o Logic.dest_equals o Thm.prop_of; (*folding should handle critical pairs! E.g. K \ Inl 0, S \ Inr (Inl 0) Returns longest lhs first to avoid folding its subexpressions.*) fun sort_lhs_depths defs = let val keylist = AList.make (term_depth o lhs_of_thm) defs val keys = sort_distinct (rev_order o int_ord) (map #2 keylist) in map (AList.find (op =) keylist) keys end; val rev_defs = sort_lhs_depths o map Thm.symmetric; fun fold_rule ctxt defs = fold (rewrite_rule ctxt) (rev_defs defs); fun fold_goals_tac ctxt defs = EVERY (map (rewrite_goals_tac ctxt) (rev_defs defs)); (* HHF normal form: \ before \, outermost \ generalized *) local fun gen_norm_hhf ss ctxt0 th0 = let val (ctxt, th) = Thm.join_transfer_context (ctxt0, th0); val th' = if Drule.is_norm_hhf (Thm.prop_of th) then th else Conv.fconv_rule (rewrite_cterm (true, false, false) (K (K NONE)) (put_simpset ss ctxt)) th; in th' |> Thm.adjust_maxidx_thm ~1 |> Variable.gen_all ctxt end; val hhf_ss = Context.the_local_context () |> init_simpset Drule.norm_hhf_eqs |> simpset_of; val hhf_protect_ss = Context.the_local_context () |> init_simpset Drule.norm_hhf_eqs |> add_eqcong Drule.protect_cong |> simpset_of; in val norm_hhf = gen_norm_hhf hhf_ss; val norm_hhf_protect = gen_norm_hhf hhf_protect_ss; end; end; structure Basic_Meta_Simplifier: BASIC_RAW_SIMPLIFIER = Raw_Simplifier; open Basic_Meta_Simplifier; diff --git a/src/Pure/thm.ML b/src/Pure/thm.ML --- a/src/Pure/thm.ML +++ b/src/Pure/thm.ML @@ -1,2392 +1,2392 @@ (* Title: Pure/thm.ML Author: Lawrence C Paulson, Cambridge University Computer Laboratory Author: Makarius The very core of Isabelle's Meta Logic: certified types and terms, derivations, theorems, inference rules (including lifting and resolution), oracles. *) infix 0 RS RSN; signature BASIC_THM = sig type ctyp type cterm exception CTERM of string * cterm list type thm type conv = cterm -> thm exception THM of string * int * thm list val RSN: thm * (int * thm) -> thm val RS: thm * thm -> thm end; signature THM = sig include BASIC_THM (*certified types*) val typ_of: ctyp -> typ val global_ctyp_of: theory -> typ -> ctyp val ctyp_of: Proof.context -> typ -> ctyp val dest_ctyp: ctyp -> ctyp list val dest_ctypN: int -> ctyp -> ctyp val dest_ctyp0: ctyp -> ctyp val dest_ctyp1: ctyp -> ctyp val make_ctyp: ctyp -> ctyp list -> ctyp (*certified terms*) val term_of: cterm -> term val typ_of_cterm: cterm -> typ val ctyp_of_cterm: cterm -> ctyp val maxidx_of_cterm: cterm -> int val global_cterm_of: theory -> term -> cterm val cterm_of: Proof.context -> term -> cterm val renamed_term: term -> cterm -> cterm val fast_term_ord: cterm ord val term_ord: cterm ord val dest_comb: cterm -> cterm * cterm val dest_fun: cterm -> cterm val dest_arg: cterm -> cterm val dest_fun2: cterm -> cterm val dest_arg1: cterm -> cterm val dest_abs: string option -> cterm -> cterm * cterm val rename_tvar: indexname -> ctyp -> ctyp val var: indexname * ctyp -> cterm val apply: cterm -> cterm -> cterm val lambda_name: string * cterm -> cterm -> cterm val lambda: cterm -> cterm -> cterm val adjust_maxidx_cterm: int -> cterm -> cterm val incr_indexes_cterm: int -> cterm -> cterm val match: cterm * cterm -> ctyp TVars.table * cterm Vars.table val first_order_match: cterm * cterm -> ctyp TVars.table * cterm Vars.table (*theorems*) val fold_terms: {hyps: bool} -> (term -> 'a -> 'a) -> thm -> 'a -> 'a val fold_atomic_ctyps: {hyps: bool} -> (typ -> bool) -> (ctyp -> 'a -> 'a) -> thm -> 'a -> 'a val fold_atomic_cterms: {hyps: bool} -> (term -> bool) -> (cterm -> 'a -> 'a) -> thm -> 'a -> 'a val terms_of_tpairs: (term * term) list -> term list val full_prop_of: thm -> term val theory_id: thm -> Context.theory_id val theory_name: thm -> string val maxidx_of: thm -> int val maxidx_thm: thm -> int -> int val shyps_of: thm -> sort Ord_List.T val hyps_of: thm -> term list val prop_of: thm -> term val tpairs_of: thm -> (term * term) list val concl_of: thm -> term val prems_of: thm -> term list val nprems_of: thm -> int val no_prems: thm -> bool val major_prem_of: thm -> term val cprop_of: thm -> cterm val cprem_of: thm -> int -> cterm val cconcl_of: thm -> cterm val cprems_of: thm -> cterm list val chyps_of: thm -> cterm list val thm_ord: thm ord exception CONTEXT of string * ctyp list * cterm list * thm list * Context.generic option val theory_of_cterm: cterm -> theory val theory_of_thm: thm -> theory val trim_context_ctyp: ctyp -> ctyp val trim_context_cterm: cterm -> cterm val transfer_ctyp: theory -> ctyp -> ctyp val transfer_cterm: theory -> cterm -> cterm val transfer: theory -> thm -> thm val transfer': Proof.context -> thm -> thm val transfer'': Context.generic -> thm -> thm val join_transfer: theory -> thm -> thm val join_transfer_context: Proof.context * thm -> Proof.context * thm val renamed_prop: term -> thm -> thm val weaken: cterm -> thm -> thm val weaken_sorts: sort list -> cterm -> cterm val proof_bodies_of: thm list -> proof_body list val proof_body_of: thm -> proof_body val proof_of: thm -> proof val reconstruct_proof_of: thm -> Proofterm.proof val consolidate: thm list -> unit val expose_proofs: theory -> thm list -> unit val expose_proof: theory -> thm -> unit val future: thm future -> cterm -> thm val thm_deps: thm -> Proofterm.thm Ord_List.T val extra_shyps: thm -> sort list val strip_shyps: thm -> thm val derivation_closed: thm -> bool val derivation_name: thm -> string val derivation_id: thm -> Proofterm.thm_id option val raw_derivation_name: thm -> string val expand_name: thm -> Proofterm.thm_header -> string option val name_derivation: string * Position.T -> thm -> thm val close_derivation: Position.T -> thm -> thm val trim_context: thm -> thm val axiom: theory -> string -> thm val all_axioms_of: theory -> (string * thm) list val get_tags: thm -> Properties.T val map_tags: (Properties.T -> Properties.T) -> thm -> thm val norm_proof: thm -> thm val adjust_maxidx_thm: int -> thm -> thm (*type classes*) val the_classrel: theory -> class * class -> thm val the_arity: theory -> string * sort list * class -> thm val classrel_proof: theory -> class * class -> proof val arity_proof: theory -> string * sort list * class -> proof (*oracles*) val add_oracle: binding * ('a -> cterm) -> theory -> (string * ('a -> thm)) * theory val oracle_space: theory -> Name_Space.T val pretty_oracle: Proof.context -> string -> Pretty.T val extern_oracles: bool -> Proof.context -> (Markup.T * xstring) list val check_oracle: Proof.context -> xstring * Position.T -> string (*inference rules*) val assume: cterm -> thm val implies_intr: cterm -> thm -> thm val implies_elim: thm -> thm -> thm val forall_intr: cterm -> thm -> thm val forall_elim: cterm -> thm -> thm val reflexive: cterm -> thm val symmetric: thm -> thm val transitive: thm -> thm -> thm val beta_conversion: bool -> conv val eta_conversion: conv val eta_long_conversion: conv val abstract_rule: string -> cterm -> thm -> thm val combination: thm -> thm -> thm val equal_intr: thm -> thm -> thm val equal_elim: thm -> thm -> thm val solve_constraints: thm -> thm val flexflex_rule: Proof.context option -> thm -> thm Seq.seq val generalize: Names.set * Names.set -> int -> thm -> thm val generalize_cterm: Names.set * Names.set -> int -> cterm -> cterm val generalize_ctyp: Names.set -> int -> ctyp -> ctyp val instantiate: ctyp TVars.table * cterm Vars.table -> thm -> thm val instantiate_cterm: ctyp TVars.table * cterm Vars.table -> cterm -> cterm val trivial: cterm -> thm val of_class: ctyp * class -> thm val unconstrainT: thm -> thm val varifyT_global': TFrees.set -> thm -> ((string * sort) * indexname) list * thm val varifyT_global: thm -> thm val legacy_freezeT: thm -> thm val plain_prop_of: thm -> term val dest_state: thm * int -> (term * term) list * term list * term * term val lift_rule: cterm -> thm -> thm val incr_indexes: int -> thm -> thm val assumption: Proof.context option -> int -> thm -> thm Seq.seq val eq_assumption: int -> thm -> thm val rotate_rule: int -> int -> thm -> thm val permute_prems: int -> int -> thm -> thm val bicompose: Proof.context option -> {flatten: bool, match: bool, incremented: bool} -> bool * thm * int -> int -> thm -> thm Seq.seq val biresolution: Proof.context option -> bool -> (bool * thm) list -> int -> thm -> thm Seq.seq val thynames_of_arity: theory -> string * class -> string list val add_classrel: thm -> theory -> theory val add_arity: thm -> theory -> theory end; structure Thm: THM = struct (*** Certified terms and types ***) (** certified types **) datatype ctyp = Ctyp of {cert: Context.certificate, T: typ, maxidx: int, sorts: sort Ord_List.T}; fun typ_of (Ctyp {T, ...}) = T; fun global_ctyp_of thy raw_T = let val T = Sign.certify_typ thy raw_T; val maxidx = Term.maxidx_of_typ T; val sorts = Sorts.insert_typ T []; in Ctyp {cert = Context.Certificate thy, T = T, maxidx = maxidx, sorts = sorts} end; val ctyp_of = global_ctyp_of o Proof_Context.theory_of; fun dest_ctyp (Ctyp {cert, T = Type (_, Ts), maxidx, sorts}) = map (fn T => Ctyp {cert = cert, T = T, maxidx = maxidx, sorts = sorts}) Ts | dest_ctyp cT = raise TYPE ("dest_ctyp", [typ_of cT], []); fun dest_ctypN n (Ctyp {cert, T, maxidx, sorts}) = let fun err () = raise TYPE ("dest_ctypN", [T], []) in (case T of Type (_, Ts) => Ctyp {cert = cert, T = nth Ts n handle General.Subscript => err (), maxidx = maxidx, sorts = sorts} | _ => err ()) end; val dest_ctyp0 = dest_ctypN 0; val dest_ctyp1 = dest_ctypN 1; fun join_certificate_ctyp (Ctyp {cert, ...}) cert0 = Context.join_certificate (cert0, cert); fun union_sorts_ctyp (Ctyp {sorts, ...}) sorts0 = Sorts.union sorts0 sorts; fun maxidx_ctyp (Ctyp {maxidx, ...}) maxidx0 = Int.max (maxidx0, maxidx); fun make_ctyp (Ctyp {cert, T, maxidx = _, sorts = _}) cargs = let val As = map typ_of cargs; fun err () = raise TYPE ("make_ctyp", T :: As, []); in (case T of Type (a, args) => Ctyp { cert = fold join_certificate_ctyp cargs cert, maxidx = fold maxidx_ctyp cargs ~1, sorts = fold union_sorts_ctyp cargs [], T = if length args = length cargs then Type (a, As) else err ()} | _ => err ()) end; (** certified terms **) (*certified terms with checked typ, maxidx, and sorts*) datatype cterm = Cterm of {cert: Context.certificate, t: term, T: typ, maxidx: int, sorts: sort Ord_List.T}; exception CTERM of string * cterm list; fun term_of (Cterm {t, ...}) = t; fun typ_of_cterm (Cterm {T, ...}) = T; fun ctyp_of_cterm (Cterm {cert, T, maxidx, sorts, ...}) = Ctyp {cert = cert, T = T, maxidx = maxidx, sorts = sorts}; fun maxidx_of_cterm (Cterm {maxidx, ...}) = maxidx; fun global_cterm_of thy tm = let val (t, T, maxidx) = Sign.certify_term thy tm; val sorts = Sorts.insert_term t []; in Cterm {cert = Context.Certificate thy, t = t, T = T, maxidx = maxidx, sorts = sorts} end; val cterm_of = global_cterm_of o Proof_Context.theory_of; fun join_certificate0 (Cterm {cert = cert1, ...}, Cterm {cert = cert2, ...}) = Context.join_certificate (cert1, cert2); fun renamed_term t' (Cterm {cert, t, T, maxidx, sorts}) = if t aconv t' then Cterm {cert = cert, t = t', T = T, maxidx = maxidx, sorts = sorts} else raise TERM ("renamed_term: terms disagree", [t, t']); val fast_term_ord = Term_Ord.fast_term_ord o apply2 term_of; val term_ord = Term_Ord.term_ord o apply2 term_of; (* destructors *) fun dest_comb (Cterm {t = c $ a, T, cert, maxidx, sorts}) = let val A = Term.argument_type_of c 0 in (Cterm {t = c, T = A --> T, cert = cert, maxidx = maxidx, sorts = sorts}, Cterm {t = a, T = A, cert = cert, maxidx = maxidx, sorts = sorts}) end | dest_comb ct = raise CTERM ("dest_comb", [ct]); fun dest_fun (Cterm {t = c $ _, T, cert, maxidx, sorts}) = let val A = Term.argument_type_of c 0 in Cterm {t = c, T = A --> T, cert = cert, maxidx = maxidx, sorts = sorts} end | dest_fun ct = raise CTERM ("dest_fun", [ct]); fun dest_arg (Cterm {t = c $ a, T = _, cert, maxidx, sorts}) = let val A = Term.argument_type_of c 0 in Cterm {t = a, T = A, cert = cert, maxidx = maxidx, sorts = sorts} end | dest_arg ct = raise CTERM ("dest_arg", [ct]); fun dest_fun2 (Cterm {t = c $ _ $ _, T, cert, maxidx, sorts}) = let val A = Term.argument_type_of c 0; val B = Term.argument_type_of c 1; in Cterm {t = c, T = A --> B --> T, cert = cert, maxidx = maxidx, sorts = sorts} end | dest_fun2 ct = raise CTERM ("dest_fun2", [ct]); fun dest_arg1 (Cterm {t = c $ a $ _, T = _, cert, maxidx, sorts}) = let val A = Term.argument_type_of c 0 in Cterm {t = a, T = A, cert = cert, maxidx = maxidx, sorts = sorts} end | dest_arg1 ct = raise CTERM ("dest_arg1", [ct]); fun dest_abs a (Cterm {t = Abs (x, T, t), T = Type ("fun", [_, U]), cert, maxidx, sorts}) = let val (y', t') = Term.dest_abs (the_default x a, T, t) in (Cterm {t = Free (y', T), T = T, cert = cert, maxidx = maxidx, sorts = sorts}, Cterm {t = t', T = U, cert = cert, maxidx = maxidx, sorts = sorts}) end | dest_abs _ ct = raise CTERM ("dest_abs", [ct]); (* constructors *) fun rename_tvar (a, i) (Ctyp {cert, T, maxidx, sorts}) = let val S = (case T of TFree (_, S) => S | TVar (_, S) => S | _ => raise TYPE ("rename_tvar: no variable", [T], [])); val _ = if i < 0 then raise TYPE ("rename_tvar: bad index", [TVar ((a, i), S)], []) else (); in Ctyp {cert = cert, T = TVar ((a, i), S), maxidx = Int.max (i, maxidx), sorts = sorts} end; fun var ((x, i), Ctyp {cert, T, maxidx, sorts}) = if i < 0 then raise TERM ("var: bad index", [Var ((x, i), T)]) else Cterm {cert = cert, t = Var ((x, i), T), T = T, maxidx = Int.max (i, maxidx), sorts = sorts}; fun apply (cf as Cterm {t = f, T = Type ("fun", [dty, rty]), maxidx = maxidx1, sorts = sorts1, ...}) (cx as Cterm {t = x, T, maxidx = maxidx2, sorts = sorts2, ...}) = if T = dty then Cterm {cert = join_certificate0 (cf, cx), t = f $ x, T = rty, maxidx = Int.max (maxidx1, maxidx2), sorts = Sorts.union sorts1 sorts2} else raise CTERM ("apply: types don't agree", [cf, cx]) | apply cf cx = raise CTERM ("apply: first arg is not a function", [cf, cx]); fun lambda_name (x, ct1 as Cterm {t = t1, T = T1, maxidx = maxidx1, sorts = sorts1, ...}) (ct2 as Cterm {t = t2, T = T2, maxidx = maxidx2, sorts = sorts2, ...}) = let val t = Term.lambda_name (x, t1) t2 in Cterm {cert = join_certificate0 (ct1, ct2), t = t, T = T1 --> T2, maxidx = Int.max (maxidx1, maxidx2), sorts = Sorts.union sorts1 sorts2} end; fun lambda t u = lambda_name ("", t) u; (* indexes *) fun adjust_maxidx_cterm i (ct as Cterm {cert, t, T, maxidx, sorts}) = if maxidx = i then ct else if maxidx < i then Cterm {maxidx = i, cert = cert, t = t, T = T, sorts = sorts} else Cterm {maxidx = Int.max (maxidx_of_term t, i), cert = cert, t = t, T = T, sorts = sorts}; fun incr_indexes_cterm i (ct as Cterm {cert, t, T, maxidx, sorts}) = if i < 0 then raise CTERM ("negative increment", [ct]) else if i = 0 then ct else Cterm {cert = cert, t = Logic.incr_indexes ([], [], i) t, T = Logic.incr_tvar i T, maxidx = maxidx + i, sorts = sorts}; (*** Derivations and Theorems ***) (* sort constraints *) type constraint = {theory: theory, typ: typ, sort: sort}; local val constraint_ord : constraint ord = Context.theory_id_ord o apply2 (Context.theory_id o #theory) ||| Term_Ord.typ_ord o apply2 #typ ||| Term_Ord.sort_ord o apply2 #sort; val smash_atyps = map_atyps (fn TVar (_, S) => Term.aT S | TFree (_, S) => Term.aT S | T => T); in val union_constraints = Ord_List.union constraint_ord; fun insert_constraints thy (T, S) = let val ignored = S = [] orelse (case T of TFree (_, S') => S = S' | TVar (_, S') => S = S' | _ => false); in if ignored then I else Ord_List.insert constraint_ord {theory = thy, typ = smash_atyps T, sort = S} end; fun insert_constraints_env thy env = let val tyenv = Envir.type_env env; fun insert ([], _) = I | insert (S, T) = insert_constraints thy (Envir.norm_type tyenv T, S); in tyenv |> Vartab.fold (insert o #2) end; end; (* datatype thm *) datatype thm = Thm of deriv * (*derivation*) {cert: Context.certificate, (*background theory certificate*) tags: Properties.T, (*additional annotations/comments*) maxidx: int, (*maximum index of any Var or TVar*) constraints: constraint Ord_List.T, (*implicit proof obligations for sort constraints*) shyps: sort Ord_List.T, (*sort hypotheses*) hyps: term Ord_List.T, (*hypotheses*) tpairs: (term * term) list, (*flex-flex pairs*) prop: term} (*conclusion*) and deriv = Deriv of {promises: (serial * thm future) Ord_List.T, body: Proofterm.proof_body}; type conv = cterm -> thm; (*errors involving theorems*) exception THM of string * int * thm list; fun rep_thm (Thm (_, args)) = args; fun fold_terms h f (Thm (_, {tpairs, prop, hyps, ...})) = fold (fn (t, u) => f t #> f u) tpairs #> f prop #> #hyps h ? fold f hyps; fun fold_atomic_ctyps h g f (th as Thm (_, {cert, maxidx, shyps, ...})) = let fun ctyp T = Ctyp {cert = cert, T = T, maxidx = maxidx, sorts = shyps} in (fold_terms h o fold_types o fold_atyps) (fn T => if g T then f (ctyp T) else I) th end; fun fold_atomic_cterms h g f (th as Thm (_, {cert, maxidx, shyps, ...})) = let fun cterm t T = Cterm {cert = cert, t = t, T = T, maxidx = maxidx, sorts = shyps}; fun apply t T = if g t then f (cterm t T) else I; in (fold_terms h o fold_aterms) (fn t as Const (_, T) => apply t T | t as Free (_, T) => apply t T | t as Var (_, T) => apply t T | _ => I) th end; fun terms_of_tpairs tpairs = fold_rev (fn (t, u) => cons t o cons u) tpairs []; fun eq_tpairs ((t, u), (t', u')) = t aconv t' andalso u aconv u'; fun union_tpairs ts us = Library.merge eq_tpairs (ts, us); val maxidx_tpairs = fold (fn (t, u) => Term.maxidx_term t #> Term.maxidx_term u); fun attach_tpairs tpairs prop = Logic.list_implies (map Logic.mk_equals tpairs, prop); fun full_prop_of (Thm (_, {tpairs, prop, ...})) = attach_tpairs tpairs prop; val union_hyps = Ord_List.union Term_Ord.fast_term_ord; val insert_hyps = Ord_List.insert Term_Ord.fast_term_ord; val remove_hyps = Ord_List.remove Term_Ord.fast_term_ord; fun join_certificate1 (Cterm {cert = cert1, ...}, Thm (_, {cert = cert2, ...})) = Context.join_certificate (cert1, cert2); fun join_certificate2 (Thm (_, {cert = cert1, ...}), Thm (_, {cert = cert2, ...})) = Context.join_certificate (cert1, cert2); (* basic components *) val cert_of = #cert o rep_thm; val theory_id = Context.certificate_theory_id o cert_of; val theory_name = Context.theory_id_name o theory_id; val maxidx_of = #maxidx o rep_thm; fun maxidx_thm th i = Int.max (maxidx_of th, i); val shyps_of = #shyps o rep_thm; val hyps_of = #hyps o rep_thm; val prop_of = #prop o rep_thm; val tpairs_of = #tpairs o rep_thm; val concl_of = Logic.strip_imp_concl o prop_of; val prems_of = Logic.strip_imp_prems o prop_of; val nprems_of = Logic.count_prems o prop_of; -fun no_prems th = nprems_of th = 0; +val no_prems = Logic.no_prems o prop_of; fun major_prem_of th = (case prems_of th of prem :: _ => Logic.strip_assums_concl prem | [] => raise THM ("major_prem_of: rule with no premises", 0, [th])); fun cprop_of (Thm (_, {cert, maxidx, shyps, prop, ...})) = Cterm {cert = cert, maxidx = maxidx, T = propT, t = prop, sorts = shyps}; fun cprem_of (th as Thm (_, {cert, maxidx, shyps, prop, ...})) i = Cterm {cert = cert, maxidx = maxidx, T = propT, sorts = shyps, t = Logic.nth_prem (i, prop) handle TERM _ => raise THM ("cprem_of", i, [th])}; fun cconcl_of (th as Thm (_, {cert, maxidx, shyps, ...})) = Cterm {cert = cert, maxidx = maxidx, T = propT, sorts = shyps, t = concl_of th}; fun cprems_of (th as Thm (_, {cert, maxidx, shyps, ...})) = map (fn t => Cterm {cert = cert, maxidx = maxidx, T = propT, sorts = shyps, t = t}) (prems_of th); fun chyps_of (Thm (_, {cert, shyps, hyps, ...})) = map (fn t => Cterm {cert = cert, maxidx = ~1, T = propT, sorts = shyps, t = t}) hyps; (* thm order: ignores theory context! *) val thm_ord = Term_Ord.fast_term_ord o apply2 prop_of ||| list_ord (prod_ord Term_Ord.fast_term_ord Term_Ord.fast_term_ord) o apply2 tpairs_of ||| list_ord Term_Ord.fast_term_ord o apply2 hyps_of ||| list_ord Term_Ord.sort_ord o apply2 shyps_of; (* implicit theory context *) exception CONTEXT of string * ctyp list * cterm list * thm list * Context.generic option; fun theory_of_cterm (ct as Cterm {cert, ...}) = Context.certificate_theory cert handle ERROR msg => raise CONTEXT (msg, [], [ct], [], NONE); fun theory_of_thm th = Context.certificate_theory (cert_of th) handle ERROR msg => raise CONTEXT (msg, [], [], [th], NONE); fun trim_context_ctyp cT = (case cT of Ctyp {cert = Context.Certificate_Id _, ...} => cT | Ctyp {cert = Context.Certificate thy, T, maxidx, sorts} => Ctyp {cert = Context.Certificate_Id (Context.theory_id thy), T = T, maxidx = maxidx, sorts = sorts}); fun trim_context_cterm ct = (case ct of Cterm {cert = Context.Certificate_Id _, ...} => ct | Cterm {cert = Context.Certificate thy, t, T, maxidx, sorts} => Cterm {cert = Context.Certificate_Id (Context.theory_id thy), t = t, T = T, maxidx = maxidx, sorts = sorts}); fun trim_context_thm th = (case th of Thm (_, {constraints = _ :: _, ...}) => raise THM ("trim_context: pending sort constraints", 0, [th]) | Thm (_, {cert = Context.Certificate_Id _, ...}) => th | Thm (der, {cert = Context.Certificate thy, tags, maxidx, constraints = [], shyps, hyps, tpairs, prop}) => Thm (der, {cert = Context.Certificate_Id (Context.theory_id thy), tags = tags, maxidx = maxidx, constraints = [], shyps = shyps, hyps = hyps, tpairs = tpairs, prop = prop})); fun transfer_ctyp thy' cT = let val Ctyp {cert, T, maxidx, sorts} = cT; val _ = Context.subthy_id (Context.certificate_theory_id cert, Context.theory_id thy') orelse raise CONTEXT ("Cannot transfer: not a super theory", [cT], [], [], SOME (Context.Theory thy')); val cert' = Context.join_certificate (Context.Certificate thy', cert); in if Context.eq_certificate (cert, cert') then cT else Ctyp {cert = cert', T = T, maxidx = maxidx, sorts = sorts} end; fun transfer_cterm thy' ct = let val Cterm {cert, t, T, maxidx, sorts} = ct; val _ = Context.subthy_id (Context.certificate_theory_id cert, Context.theory_id thy') orelse raise CONTEXT ("Cannot transfer: not a super theory", [], [ct], [], SOME (Context.Theory thy')); val cert' = Context.join_certificate (Context.Certificate thy', cert); in if Context.eq_certificate (cert, cert') then ct else Cterm {cert = cert', t = t, T = T, maxidx = maxidx, sorts = sorts} end; fun transfer thy' th = let val Thm (der, {cert, tags, maxidx, constraints, shyps, hyps, tpairs, prop}) = th; val _ = Context.subthy_id (Context.certificate_theory_id cert, Context.theory_id thy') orelse raise CONTEXT ("Cannot transfer: not a super theory", [], [], [th], SOME (Context.Theory thy')); val cert' = Context.join_certificate (Context.Certificate thy', cert); in if Context.eq_certificate (cert, cert') then th else Thm (der, {cert = cert', tags = tags, maxidx = maxidx, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = prop}) end; val transfer' = transfer o Proof_Context.theory_of; val transfer'' = transfer o Context.theory_of; fun join_transfer thy th = (Context.subthy_id (theory_id th, Context.theory_id thy) ? transfer thy) th; fun join_transfer_context (ctxt, th) = if Context.subthy_id (theory_id th, Context.theory_id (Proof_Context.theory_of ctxt)) then (ctxt, transfer' ctxt th) else (Context.raw_transfer (theory_of_thm th) ctxt, th); (* matching *) local fun gen_match match (ct1 as Cterm {t = t1, sorts = sorts1, ...}, ct2 as Cterm {t = t2, sorts = sorts2, maxidx = maxidx2, ...}) = let val cert = join_certificate0 (ct1, ct2); val thy = Context.certificate_theory cert handle ERROR msg => raise CONTEXT (msg, [], [ct1, ct2], [], NONE); val (Tinsts, tinsts) = match thy (t1, t2) (Vartab.empty, Vartab.empty); val sorts = Sorts.union sorts1 sorts2; fun mk_cTinst ((a, i), (S, T)) = (((a, i), S), Ctyp {T = T, cert = cert, maxidx = maxidx2, sorts = sorts}); fun mk_ctinst ((x, i), (U, t)) = let val T = Envir.subst_type Tinsts U in (((x, i), T), Cterm {t = t, T = T, cert = cert, maxidx = maxidx2, sorts = sorts}) end; in (TVars.build (Vartab.fold (TVars.add o mk_cTinst) Tinsts), Vars.build (Vartab.fold (Vars.add o mk_ctinst) tinsts)) end; in val match = gen_match Pattern.match; val first_order_match = gen_match Pattern.first_order_match; end; (*implicit alpha-conversion*) fun renamed_prop prop' (Thm (der, {cert, tags, maxidx, constraints, shyps, hyps, tpairs, prop})) = if prop aconv prop' then Thm (der, {cert = cert, tags = tags, maxidx = maxidx, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = prop'}) else raise TERM ("renamed_prop: props disagree", [prop, prop']); fun make_context ths NONE cert = (Context.Theory (Context.certificate_theory cert) handle ERROR msg => raise CONTEXT (msg, [], [], ths, NONE)) | make_context ths (SOME ctxt) cert = let val thy_id = Context.certificate_theory_id cert; val thy_id' = Context.theory_id (Proof_Context.theory_of ctxt); in if Context.subthy_id (thy_id, thy_id') then Context.Proof ctxt else raise CONTEXT ("Bad context", [], [], ths, SOME (Context.Proof ctxt)) end; fun make_context_certificate ths opt_ctxt cert = let val context = make_context ths opt_ctxt cert; val cert' = Context.Certificate (Context.theory_of context); in (context, cert') end; (*explicit weakening: maps |- B to A |- B*) fun weaken raw_ct th = let val ct as Cterm {t = A, T, sorts, maxidx = maxidxA, ...} = adjust_maxidx_cterm ~1 raw_ct; val Thm (der, {tags, maxidx, constraints, shyps, hyps, tpairs, prop, ...}) = th; in if T <> propT then raise THM ("weaken: assumptions must have type prop", 0, []) else if maxidxA <> ~1 then raise THM ("weaken: assumptions may not contain schematic variables", maxidxA, []) else Thm (der, {cert = join_certificate1 (ct, th), tags = tags, maxidx = maxidx, constraints = constraints, shyps = Sorts.union sorts shyps, hyps = insert_hyps A hyps, tpairs = tpairs, prop = prop}) end; fun weaken_sorts raw_sorts ct = let val Cterm {cert, t, T, maxidx, sorts} = ct; val thy = theory_of_cterm ct; val more_sorts = Sorts.make (map (Sign.certify_sort thy) raw_sorts); val sorts' = Sorts.union sorts more_sorts; in Cterm {cert = cert, t = t, T = T, maxidx = maxidx, sorts = sorts'} end; (** derivations and promised proofs **) fun make_deriv promises oracles thms proof = Deriv {promises = promises, body = PBody {oracles = oracles, thms = thms, proof = proof}}; val empty_deriv = make_deriv [] [] [] MinProof; (* inference rules *) val promise_ord: (serial * thm future) ord = fn ((i, _), (j, _)) => int_ord (j, i); fun bad_proofs i = error ("Illegal level of detail for proof objects: " ^ string_of_int i); fun deriv_rule2 f (Deriv {promises = ps1, body = PBody {oracles = oracles1, thms = thms1, proof = prf1}}) (Deriv {promises = ps2, body = PBody {oracles = oracles2, thms = thms2, proof = prf2}}) = let val ps = Ord_List.union promise_ord ps1 ps2; val oracles = Proofterm.unions_oracles [oracles1, oracles2]; val thms = Proofterm.unions_thms [thms1, thms2]; val prf = (case ! Proofterm.proofs of 2 => f prf1 prf2 | 1 => MinProof | 0 => MinProof | i => bad_proofs i); in make_deriv ps oracles thms prf end; fun deriv_rule1 f = deriv_rule2 (K f) empty_deriv; fun deriv_rule0 make_prf = if ! Proofterm.proofs <= 1 then empty_deriv else deriv_rule1 I (make_deriv [] [] [] (make_prf ())); fun deriv_rule_unconditional f (Deriv {promises, body = PBody {oracles, thms, proof}}) = make_deriv promises oracles thms (f proof); (* fulfilled proofs *) fun raw_promises_of (Thm (Deriv {promises, ...}, _)) = promises; fun join_promises [] = () | join_promises promises = join_promises_of (Future.joins (map snd promises)) and join_promises_of thms = join_promises (Ord_List.make promise_ord (maps raw_promises_of thms)); fun fulfill_body (th as Thm (Deriv {promises, body}, _)) = let val fulfilled_promises = map #1 promises ~~ map fulfill_body (Future.joins (map #2 promises)) in Proofterm.fulfill_norm_proof (theory_of_thm th) fulfilled_promises body end; fun proof_bodies_of thms = (join_promises_of thms; map fulfill_body thms); val proof_body_of = singleton proof_bodies_of; val proof_of = Proofterm.proof_of o proof_body_of; fun reconstruct_proof_of thm = Proofterm.reconstruct_proof (theory_of_thm thm) (prop_of thm) (proof_of thm); val consolidate = ignore o proof_bodies_of; fun expose_proofs thy thms = if Proofterm.export_proof_boxes_required thy then Proofterm.export_proof_boxes (proof_bodies_of (map (transfer thy) thms)) else (); fun expose_proof thy = expose_proofs thy o single; (* future rule *) fun future_result i orig_cert orig_shyps orig_prop thm = let fun err msg = raise THM ("future_result: " ^ msg, 0, [thm]); val Thm (Deriv {promises, ...}, {cert, constraints, shyps, hyps, tpairs, prop, ...}) = thm; val _ = Context.eq_certificate (cert, orig_cert) orelse err "bad theory"; val _ = prop aconv orig_prop orelse err "bad prop"; val _ = null constraints orelse err "bad sort constraints"; val _ = null tpairs orelse err "bad flex-flex constraints"; val _ = null hyps orelse err "bad hyps"; val _ = Sorts.subset (shyps, orig_shyps) orelse err "bad shyps"; val _ = forall (fn (j, _) => i <> j) promises orelse err "bad dependencies"; val _ = join_promises promises; in thm end; fun future future_thm ct = let val Cterm {cert = cert, t = prop, T, maxidx, sorts} = ct; val _ = T <> propT andalso raise CTERM ("future: prop expected", [ct]); val _ = if Proofterm.proofs_enabled () then raise CTERM ("future: proof terms enabled", [ct]) else (); val i = serial (); val future = future_thm |> Future.map (future_result i cert sorts prop); in Thm (make_deriv [(i, future)] [] [] MinProof, {cert = cert, tags = [], maxidx = maxidx, constraints = [], shyps = sorts, hyps = [], tpairs = [], prop = prop}) end; (** Axioms **) fun axiom thy name = (case Name_Space.lookup (Theory.axiom_table thy) name of SOME prop => let val der = deriv_rule0 (fn () => Proofterm.axm_proof name prop); val cert = Context.Certificate thy; val maxidx = maxidx_of_term prop; val shyps = Sorts.insert_term prop []; in Thm (der, {cert = cert, tags = [], maxidx = maxidx, constraints = [], shyps = shyps, hyps = [], tpairs = [], prop = prop}) end | NONE => raise THEORY ("No axiom " ^ quote name, [thy])); fun all_axioms_of thy = map (fn (name, _) => (name, axiom thy name)) (Theory.all_axioms_of thy); (* tags *) val get_tags = #tags o rep_thm; fun map_tags f (Thm (der, {cert, tags, maxidx, constraints, shyps, hyps, tpairs, prop})) = Thm (der, {cert = cert, tags = f tags, maxidx = maxidx, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = prop}); (* technical adjustments *) fun norm_proof (th as Thm (der, args)) = Thm (deriv_rule1 (Proofterm.rew_proof (theory_of_thm th)) der, args); fun adjust_maxidx_thm i (th as Thm (der, {cert, tags, maxidx, constraints, shyps, hyps, tpairs, prop})) = if maxidx = i then th else if maxidx < i then Thm (der, {maxidx = i, cert = cert, tags = tags, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = prop}) else Thm (der, {maxidx = Int.max (maxidx_tpairs tpairs (maxidx_of_term prop), i), cert = cert, tags = tags, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = prop}); (*** Theory data ***) (* type classes *) structure Aritytab = Table( type key = string * sort list * class; val ord = fast_string_ord o apply2 #1 ||| fast_string_ord o apply2 #3 ||| list_ord Term_Ord.sort_ord o apply2 #2; ); datatype classes = Classes of {classrels: thm Symreltab.table, arities: (thm * string * serial) Aritytab.table}; fun make_classes (classrels, arities) = Classes {classrels = classrels, arities = arities}; val empty_classes = make_classes (Symreltab.empty, Aritytab.empty); (*see Theory.at_begin hook for transitive closure of classrels and arity completion*) fun merge_classes (Classes {classrels = classrels1, arities = arities1}, Classes {classrels = classrels2, arities = arities2}) = let val classrels' = Symreltab.merge (K true) (classrels1, classrels2); val arities' = Aritytab.merge (K true) (arities1, arities2); in make_classes (classrels', arities') end; (* data *) structure Data = Theory_Data ( type T = unit Name_Space.table * (*oracles: authentic derivation names*) classes; (*type classes within the logic*) val empty : T = (Name_Space.empty_table Markup.oracleN, empty_classes); val extend = I; fun merge ((oracles1, sorts1), (oracles2, sorts2)) : T = (Name_Space.merge_tables (oracles1, oracles2), merge_classes (sorts1, sorts2)); ); val get_oracles = #1 o Data.get; val map_oracles = Data.map o apfst; val get_classes = (fn (_, Classes args) => args) o Data.get; val get_classrels = #classrels o get_classes; val get_arities = #arities o get_classes; fun map_classes f = (Data.map o apsnd) (fn Classes {classrels, arities} => make_classes (f (classrels, arities))); fun map_classrels f = map_classes (fn (classrels, arities) => (f classrels, arities)); fun map_arities f = map_classes (fn (classrels, arities) => (classrels, f arities)); (* type classes *) fun the_classrel thy (c1, c2) = (case Symreltab.lookup (get_classrels thy) (c1, c2) of SOME thm => transfer thy thm | NONE => error ("Unproven class relation " ^ Syntax.string_of_classrel (Proof_Context.init_global thy) [c1, c2])); fun the_arity thy (a, Ss, c) = (case Aritytab.lookup (get_arities thy) (a, Ss, c) of SOME (thm, _, _) => transfer thy thm | NONE => error ("Unproven type arity " ^ Syntax.string_of_arity (Proof_Context.init_global thy) (a, Ss, [c]))); val classrel_proof = proof_of oo the_classrel; val arity_proof = proof_of oo the_arity; (* solve sort constraints by pro-forma proof *) local fun union_digest (oracles1, thms1) (oracles2, thms2) = (Proofterm.unions_oracles [oracles1, oracles2], Proofterm.unions_thms [thms1, thms2]); fun thm_digest (Thm (Deriv {body = PBody {oracles, thms, ...}, ...}, _)) = (oracles, thms); fun constraint_digest ({theory = thy, typ, sort, ...}: constraint) = Sorts.of_sort_derivation (Sign.classes_of thy) {class_relation = fn _ => fn _ => fn (digest, c1) => fn c2 => if c1 = c2 then ([], []) else union_digest digest (thm_digest (the_classrel thy (c1, c2))), type_constructor = fn (a, _) => fn dom => fn c => let val arity_digest = thm_digest (the_arity thy (a, (map o map) #2 dom, c)) in (fold o fold) (union_digest o #1) dom arity_digest end, type_variable = fn T => map (pair ([], [])) (Type.sort_of_atyp T)} (typ, sort); in fun solve_constraints (thm as Thm (_, {constraints = [], ...})) = thm | solve_constraints (thm as Thm (der, args)) = let val {cert, tags, maxidx, constraints, shyps, hyps, tpairs, prop} = args; val thy = Context.certificate_theory cert; val bad_thys = constraints |> map_filter (fn {theory = thy', ...} => if Context.eq_thy (thy, thy') then NONE else SOME thy'); val () = if null bad_thys then () else raise THEORY ("solve_constraints: bad theories for theorem\n" ^ Syntax.string_of_term_global thy (prop_of thm), thy :: bad_thys); val Deriv {promises, body = PBody {oracles, thms, proof}} = der; val (oracles', thms') = (oracles, thms) |> fold (fold union_digest o constraint_digest) constraints; val body' = PBody {oracles = oracles', thms = thms', proof = proof}; in Thm (Deriv {promises = promises, body = body'}, {constraints = [], cert = cert, tags = tags, maxidx = maxidx, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = prop}) end; end; (*Dangling sort constraints of a thm*) fun extra_shyps (th as Thm (_, {shyps, ...})) = Sorts.subtract (fold_terms {hyps = true} Sorts.insert_term th []) shyps; (*Remove extra sorts that are witnessed by type signature information*) fun strip_shyps thm = (case thm of Thm (_, {shyps = [], ...}) => thm | Thm (der, {cert, tags, maxidx, constraints, shyps, hyps, tpairs, prop}) => let val thy = theory_of_thm thm; val algebra = Sign.classes_of thy; val minimize = Sorts.minimize_sort algebra; val le = Sorts.sort_le algebra; fun lt (S1, S2) = le (S1, S2) andalso not (le (S2, S1)); fun rel (S1, S2) = if S1 = S2 then [] else [(Term.aT S1, S2)]; val present = (fold_terms {hyps = true} o fold_types o fold_atyps_sorts) (insert (eq_fst op =)) thm []; val extra = fold (Sorts.remove_sort o #2) present shyps; val witnessed = Sign.witness_sorts thy present extra; val non_witnessed = fold (Sorts.remove_sort o #2) witnessed extra |> map (`minimize); val extra' = non_witnessed |> map_filter (fn (S, _) => if non_witnessed |> exists (fn (S', _) => lt (S', S)) then NONE else SOME S) |> Sorts.make; val constrs' = non_witnessed |> maps (fn (S1, S2) => let val S0 = the (find_first (fn S => le (S, S1)) extra') in rel (S0, S1) @ rel (S1, S2) end); val constraints' = fold (insert_constraints thy) (witnessed @ constrs') constraints; val shyps' = fold (Sorts.insert_sort o #2) present extra'; in Thm (deriv_rule_unconditional (Proofterm.strip_shyps_proof algebra present witnessed extra') der, {cert = cert, tags = tags, maxidx = maxidx, constraints = constraints', shyps = shyps', hyps = hyps, tpairs = tpairs, prop = prop}) end) |> solve_constraints; (*** Closed theorems with official name ***) (*non-deterministic, depends on unknown promises*) fun derivation_closed (Thm (Deriv {body, ...}, _)) = Proofterm.compact_proof (Proofterm.proof_of body); (*non-deterministic, depends on unknown promises*) fun raw_derivation_name (Thm (Deriv {body, ...}, {shyps, hyps, prop, ...})) = Proofterm.get_approximative_name shyps hyps prop (Proofterm.proof_of body); fun expand_name (Thm (Deriv {body, ...}, {shyps, hyps, prop, ...})) = let val self_id = (case Proofterm.get_identity shyps hyps prop (Proofterm.proof_of body) of NONE => K false | SOME {serial, ...} => fn (header: Proofterm.thm_header) => serial = #serial header); fun expand header = if self_id header orelse #name header = "" then SOME "" else NONE; in expand end; (*deterministic name of finished proof*) fun derivation_name (thm as Thm (_, {shyps, hyps, prop, ...})) = Proofterm.get_approximative_name shyps hyps prop (proof_of thm); (*identified PThm node*) fun derivation_id (thm as Thm (_, {shyps, hyps, prop, ...})) = Proofterm.get_id shyps hyps prop (proof_of thm); (*dependencies of PThm node*) fun thm_deps (thm as Thm (Deriv {promises = [], body = PBody {thms, ...}, ...}, _)) = (case (derivation_id thm, thms) of (SOME {serial = i, ...}, [(j, thm_node)]) => if i = j then Proofterm.thm_node_thms thm_node else thms | _ => thms) | thm_deps thm = raise THM ("thm_deps: bad promises", 0, [thm]); fun name_derivation name_pos = strip_shyps #> (fn thm as Thm (der, args) => let val thy = theory_of_thm thm; val Deriv {promises, body} = der; val {shyps, hyps, prop, tpairs, ...} = args; val _ = null tpairs orelse raise THM ("name_derivation: bad flex-flex constraints", 0, [thm]); val ps = map (apsnd (Future.map fulfill_body)) promises; val (pthm, proof) = Proofterm.thm_proof thy (classrel_proof thy) (arity_proof thy) name_pos shyps hyps prop ps body; val der' = make_deriv [] [] [pthm] proof; in Thm (der', args) end); fun close_derivation pos = solve_constraints #> (fn thm => if not (null (tpairs_of thm)) orelse derivation_closed thm then thm else name_derivation ("", pos) thm); val trim_context = solve_constraints #> trim_context_thm; (*** Oracles ***) fun add_oracle (b, oracle_fn) thy = let val (name, oracles') = Name_Space.define (Context.Theory thy) true (b, ()) (get_oracles thy); val thy' = map_oracles (K oracles') thy; fun invoke_oracle arg = let val Cterm {cert = cert2, t = prop, T, maxidx, sorts} = oracle_fn arg in if T <> propT then raise THM ("Oracle's result must have type prop: " ^ name, 0, []) else let val (oracle, prf) = (case ! Proofterm.proofs of 2 => (((name, Position.thread_data ()), SOME prop), Proofterm.oracle_proof name prop) | 1 => (((name, Position.thread_data ()), SOME prop), MinProof) | 0 => (((name, Position.none), NONE), MinProof) | i => bad_proofs i); in Thm (make_deriv [] [oracle] [] prf, {cert = Context.join_certificate (Context.Certificate thy', cert2), tags = [], maxidx = maxidx, constraints = [], shyps = sorts, hyps = [], tpairs = [], prop = prop}) end end; in ((name, invoke_oracle), thy') end; val oracle_space = Name_Space.space_of_table o get_oracles; fun pretty_oracle ctxt = Name_Space.pretty ctxt (oracle_space (Proof_Context.theory_of ctxt)); fun extern_oracles verbose ctxt = map #1 (Name_Space.markup_table verbose ctxt (get_oracles (Proof_Context.theory_of ctxt))); fun check_oracle ctxt = Name_Space.check (Context.Proof ctxt) (get_oracles (Proof_Context.theory_of ctxt)) #> #1; (*** Meta rules ***) (** primitive rules **) (*The assumption rule A |- A*) fun assume raw_ct = let val Cterm {cert, t = prop, T, maxidx, sorts} = adjust_maxidx_cterm ~1 raw_ct in if T <> propT then raise THM ("assume: prop", 0, []) else if maxidx <> ~1 then raise THM ("assume: variables", maxidx, []) else Thm (deriv_rule0 (fn () => Proofterm.Hyp prop), {cert = cert, tags = [], maxidx = ~1, constraints = [], shyps = sorts, hyps = [prop], tpairs = [], prop = prop}) end; (*Implication introduction [A] : B ------- A \ B *) fun implies_intr (ct as Cterm {t = A, T, maxidx = maxidx1, sorts, ...}) (th as Thm (der, {maxidx = maxidx2, hyps, constraints, shyps, tpairs, prop, ...})) = if T <> propT then raise THM ("implies_intr: assumptions must have type prop", 0, [th]) else Thm (deriv_rule1 (Proofterm.implies_intr_proof A) der, {cert = join_certificate1 (ct, th), tags = [], maxidx = Int.max (maxidx1, maxidx2), constraints = constraints, shyps = Sorts.union sorts shyps, hyps = remove_hyps A hyps, tpairs = tpairs, prop = Logic.mk_implies (A, prop)}); (*Implication elimination A \ B A ------------ B *) fun implies_elim thAB thA = let val Thm (derA, {maxidx = maxidx1, hyps = hypsA, constraints = constraintsA, shyps = shypsA, tpairs = tpairsA, prop = propA, ...}) = thA and Thm (der, {maxidx = maxidx2, hyps, constraints, shyps, tpairs, prop, ...}) = thAB; fun err () = raise THM ("implies_elim: major premise", 0, [thAB, thA]); in (case prop of Const ("Pure.imp", _) $ A $ B => if A aconv propA then Thm (deriv_rule2 (curry Proofterm.%%) der derA, {cert = join_certificate2 (thAB, thA), tags = [], maxidx = Int.max (maxidx1, maxidx2), constraints = union_constraints constraintsA constraints, shyps = Sorts.union shypsA shyps, hyps = union_hyps hypsA hyps, tpairs = union_tpairs tpairsA tpairs, prop = B}) else err () | _ => err ()) end; (*Forall introduction. The Free or Var x must not be free in the hypotheses. [x] : A ------ \x. A *) fun forall_intr (ct as Cterm {maxidx = maxidx1, t = x, T, sorts, ...}) (th as Thm (der, {maxidx = maxidx2, constraints, shyps, hyps, tpairs, prop, ...})) = let fun result a = Thm (deriv_rule1 (Proofterm.forall_intr_proof (a, x) NONE) der, {cert = join_certificate1 (ct, th), tags = [], maxidx = Int.max (maxidx1, maxidx2), constraints = constraints, shyps = Sorts.union sorts shyps, hyps = hyps, tpairs = tpairs, prop = Logic.all_const T $ Abs (a, T, abstract_over (x, prop))}); fun check_occs a x ts = if exists (fn t => Logic.occs (x, t)) ts then raise THM ("forall_intr: variable " ^ quote a ^ " free in assumptions", 0, [th]) else (); in (case x of Free (a, _) => (check_occs a x hyps; check_occs a x (terms_of_tpairs tpairs); result a) | Var ((a, _), _) => (check_occs a x (terms_of_tpairs tpairs); result a) | _ => raise THM ("forall_intr: not a variable", 0, [th])) end; (*Forall elimination \x. A ------ A[t/x] *) fun forall_elim (ct as Cterm {t, T, maxidx = maxidx1, sorts, ...}) (th as Thm (der, {maxidx = maxidx2, constraints, shyps, hyps, tpairs, prop, ...})) = (case prop of Const ("Pure.all", Type ("fun", [Type ("fun", [qary, _]), _])) $ A => if T <> qary then raise THM ("forall_elim: type mismatch", 0, [th]) else Thm (deriv_rule1 (Proofterm.% o rpair (SOME t)) der, {cert = join_certificate1 (ct, th), tags = [], maxidx = Int.max (maxidx1, maxidx2), constraints = constraints, shyps = Sorts.union sorts shyps, hyps = hyps, tpairs = tpairs, prop = Term.betapply (A, t)}) | _ => raise THM ("forall_elim: not quantified", 0, [th])); (* Equality *) (*Reflexivity t \ t *) fun reflexive (Cterm {cert, t, T = _, maxidx, sorts}) = Thm (deriv_rule0 (fn () => Proofterm.reflexive_proof), {cert = cert, tags = [], maxidx = maxidx, constraints = [], shyps = sorts, hyps = [], tpairs = [], prop = Logic.mk_equals (t, t)}); (*Symmetry t \ u ------ u \ t *) fun symmetric (th as Thm (der, {cert, maxidx, constraints, shyps, hyps, tpairs, prop, ...})) = (case prop of (eq as Const ("Pure.eq", _)) $ t $ u => Thm (deriv_rule1 Proofterm.symmetric_proof der, {cert = cert, tags = [], maxidx = maxidx, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = eq $ u $ t}) | _ => raise THM ("symmetric", 0, [th])); (*Transitivity t1 \ u u \ t2 ------------------ t1 \ t2 *) fun transitive th1 th2 = let val Thm (der1, {maxidx = maxidx1, hyps = hyps1, constraints = constraints1, shyps = shyps1, tpairs = tpairs1, prop = prop1, ...}) = th1 and Thm (der2, {maxidx = maxidx2, hyps = hyps2, constraints = constraints2, shyps = shyps2, tpairs = tpairs2, prop = prop2, ...}) = th2; fun err msg = raise THM ("transitive: " ^ msg, 0, [th1, th2]); in case (prop1, prop2) of ((eq as Const ("Pure.eq", Type (_, [U, _]))) $ t1 $ u, Const ("Pure.eq", _) $ u' $ t2) => if not (u aconv u') then err "middle term" else Thm (deriv_rule2 (Proofterm.transitive_proof U u) der1 der2, {cert = join_certificate2 (th1, th2), tags = [], maxidx = Int.max (maxidx1, maxidx2), constraints = union_constraints constraints1 constraints2, shyps = Sorts.union shyps1 shyps2, hyps = union_hyps hyps1 hyps2, tpairs = union_tpairs tpairs1 tpairs2, prop = eq $ t1 $ t2}) | _ => err "premises" end; (*Beta-conversion (\x. t) u \ t[u/x] fully beta-reduces the term if full = true *) fun beta_conversion full (Cterm {cert, t, T = _, maxidx, sorts}) = let val t' = if full then Envir.beta_norm t else (case t of Abs (_, _, bodt) $ u => subst_bound (u, bodt) | _ => raise THM ("beta_conversion: not a redex", 0, [])); in Thm (deriv_rule0 (fn () => Proofterm.reflexive_proof), {cert = cert, tags = [], maxidx = maxidx, constraints = [], shyps = sorts, hyps = [], tpairs = [], prop = Logic.mk_equals (t, t')}) end; fun eta_conversion (Cterm {cert, t, T = _, maxidx, sorts}) = Thm (deriv_rule0 (fn () => Proofterm.reflexive_proof), {cert = cert, tags = [], maxidx = maxidx, constraints = [], shyps = sorts, hyps = [], tpairs = [], prop = Logic.mk_equals (t, Envir.eta_contract t)}); fun eta_long_conversion (Cterm {cert, t, T = _, maxidx, sorts}) = Thm (deriv_rule0 (fn () => Proofterm.reflexive_proof), {cert = cert, tags = [], maxidx = maxidx, constraints = [], shyps = sorts, hyps = [], tpairs = [], prop = Logic.mk_equals (t, Envir.eta_long [] t)}); (*The abstraction rule. The Free or Var x must not be free in the hypotheses. The bound variable will be named "a" (since x will be something like x320) t \ u -------------- \x. t \ \x. u *) fun abstract_rule a (Cterm {t = x, T, sorts, ...}) (th as Thm (der, {cert, maxidx, hyps, constraints, shyps, tpairs, prop, ...})) = let val (t, u) = Logic.dest_equals prop handle TERM _ => raise THM ("abstract_rule: premise not an equality", 0, [th]); val result = Thm (deriv_rule1 (Proofterm.abstract_rule_proof (a, x)) der, {cert = cert, tags = [], maxidx = maxidx, constraints = constraints, shyps = Sorts.union sorts shyps, hyps = hyps, tpairs = tpairs, prop = Logic.mk_equals (Abs (a, T, abstract_over (x, t)), Abs (a, T, abstract_over (x, u)))}); fun check_occs a x ts = if exists (fn t => Logic.occs (x, t)) ts then raise THM ("abstract_rule: variable " ^ quote a ^ " free in assumptions", 0, [th]) else (); in (case x of Free (a, _) => (check_occs a x hyps; check_occs a x (terms_of_tpairs tpairs); result) | Var ((a, _), _) => (check_occs a x (terms_of_tpairs tpairs); result) | _ => raise THM ("abstract_rule: not a variable", 0, [th])) end; (*The combination rule f \ g t \ u ------------- f t \ g u *) fun combination th1 th2 = let val Thm (der1, {maxidx = maxidx1, constraints = constraints1, shyps = shyps1, hyps = hyps1, tpairs = tpairs1, prop = prop1, ...}) = th1 and Thm (der2, {maxidx = maxidx2, constraints = constraints2, shyps = shyps2, hyps = hyps2, tpairs = tpairs2, prop = prop2, ...}) = th2; fun chktypes fT tT = (case fT of Type ("fun", [T1, _]) => if T1 <> tT then raise THM ("combination: types", 0, [th1, th2]) else () | _ => raise THM ("combination: not function type", 0, [th1, th2])); in (case (prop1, prop2) of (Const ("Pure.eq", Type ("fun", [fT, _])) $ f $ g, Const ("Pure.eq", Type ("fun", [tT, _])) $ t $ u) => (chktypes fT tT; Thm (deriv_rule2 (Proofterm.combination_proof f g t u) der1 der2, {cert = join_certificate2 (th1, th2), tags = [], maxidx = Int.max (maxidx1, maxidx2), constraints = union_constraints constraints1 constraints2, shyps = Sorts.union shyps1 shyps2, hyps = union_hyps hyps1 hyps2, tpairs = union_tpairs tpairs1 tpairs2, prop = Logic.mk_equals (f $ t, g $ u)})) | _ => raise THM ("combination: premises", 0, [th1, th2])) end; (*Equality introduction A \ B B \ A ---------------- A \ B *) fun equal_intr th1 th2 = let val Thm (der1, {maxidx = maxidx1, constraints = constraints1, shyps = shyps1, hyps = hyps1, tpairs = tpairs1, prop = prop1, ...}) = th1 and Thm (der2, {maxidx = maxidx2, constraints = constraints2, shyps = shyps2, hyps = hyps2, tpairs = tpairs2, prop = prop2, ...}) = th2; fun err msg = raise THM ("equal_intr: " ^ msg, 0, [th1, th2]); in (case (prop1, prop2) of (Const("Pure.imp", _) $ A $ B, Const("Pure.imp", _) $ B' $ A') => if A aconv A' andalso B aconv B' then Thm (deriv_rule2 (Proofterm.equal_intr_proof A B) der1 der2, {cert = join_certificate2 (th1, th2), tags = [], maxidx = Int.max (maxidx1, maxidx2), constraints = union_constraints constraints1 constraints2, shyps = Sorts.union shyps1 shyps2, hyps = union_hyps hyps1 hyps2, tpairs = union_tpairs tpairs1 tpairs2, prop = Logic.mk_equals (A, B)}) else err "not equal" | _ => err "premises") end; (*The equal propositions rule A \ B A --------- B *) fun equal_elim th1 th2 = let val Thm (der1, {maxidx = maxidx1, constraints = constraints1, shyps = shyps1, hyps = hyps1, tpairs = tpairs1, prop = prop1, ...}) = th1 and Thm (der2, {maxidx = maxidx2, constraints = constraints2, shyps = shyps2, hyps = hyps2, tpairs = tpairs2, prop = prop2, ...}) = th2; fun err msg = raise THM ("equal_elim: " ^ msg, 0, [th1, th2]); in (case prop1 of Const ("Pure.eq", _) $ A $ B => if prop2 aconv A then Thm (deriv_rule2 (Proofterm.equal_elim_proof A B) der1 der2, {cert = join_certificate2 (th1, th2), tags = [], maxidx = Int.max (maxidx1, maxidx2), constraints = union_constraints constraints1 constraints2, shyps = Sorts.union shyps1 shyps2, hyps = union_hyps hyps1 hyps2, tpairs = union_tpairs tpairs1 tpairs2, prop = B}) else err "not equal" | _ => err "major premise") end; (**** Derived rules ****) (*Smash unifies the list of term pairs leaving no flex-flex pairs. Instantiates the theorem and deletes trivial tpairs. Resulting sequence may contain multiple elements if the tpairs are not all flex-flex.*) fun flexflex_rule opt_ctxt = solve_constraints #> (fn th => let val Thm (der, {cert, maxidx, constraints, shyps, hyps, tpairs, prop, ...}) = th; val (context, cert') = make_context_certificate [th] opt_ctxt cert; in Unify.smash_unifiers context tpairs (Envir.empty maxidx) |> Seq.map (fn env => if Envir.is_empty env then th else let val tpairs' = tpairs |> map (apply2 (Envir.norm_term env)) (*remove trivial tpairs, of the form t \ t*) |> filter_out (op aconv); val der' = deriv_rule1 (Proofterm.norm_proof' env) der; val constraints' = insert_constraints_env (Context.certificate_theory cert') env constraints; val prop' = Envir.norm_term env prop; val maxidx = maxidx_tpairs tpairs' (maxidx_of_term prop'); val shyps = Envir.insert_sorts env shyps; in Thm (der', {cert = cert', tags = [], maxidx = maxidx, constraints = constraints', shyps = shyps, hyps = hyps, tpairs = tpairs', prop = prop'}) end) end); (*Generalization of fixed variables A -------------------- A[?'a/'a, ?x/x, ...] *) fun generalize (tfrees, frees) idx th = if Names.is_empty tfrees andalso Names.is_empty frees then th else let val Thm (der, {cert, maxidx, constraints, shyps, hyps, tpairs, prop, ...}) = th; val _ = idx <= maxidx andalso raise THM ("generalize: bad index", idx, [th]); val bad_type = if Names.is_empty tfrees then K false else Term.exists_subtype (fn TFree (a, _) => Names.defined tfrees a | _ => false); fun bad_term (Free (x, T)) = bad_type T orelse Names.defined frees x | bad_term (Var (_, T)) = bad_type T | bad_term (Const (_, T)) = bad_type T | bad_term (Abs (_, T, t)) = bad_type T orelse bad_term t | bad_term (t $ u) = bad_term t orelse bad_term u | bad_term (Bound _) = false; val _ = exists bad_term hyps andalso raise THM ("generalize: variable free in assumptions", 0, [th]); val generalize = Term_Subst.generalize (tfrees, frees) idx; val prop' = generalize prop; val tpairs' = map (apply2 generalize) tpairs; val maxidx' = maxidx_tpairs tpairs' (maxidx_of_term prop'); in Thm (deriv_rule1 (Proofterm.generalize_proof (tfrees, frees) idx prop) der, {cert = cert, tags = [], maxidx = maxidx', constraints = constraints, shyps = shyps, hyps = hyps, tpairs = tpairs', prop = prop'}) end; fun generalize_cterm (tfrees, frees) idx (ct as Cterm {cert, t, T, maxidx, sorts}) = if Names.is_empty tfrees andalso Names.is_empty frees then ct else if idx <= maxidx then raise CTERM ("generalize_cterm: bad index", [ct]) else Cterm {cert = cert, sorts = sorts, T = Term_Subst.generalizeT tfrees idx T, t = Term_Subst.generalize (tfrees, frees) idx t, maxidx = Int.max (maxidx, idx)}; fun generalize_ctyp tfrees idx (cT as Ctyp {cert, T, maxidx, sorts}) = if Names.is_empty tfrees then cT else if idx <= maxidx then raise CTERM ("generalize_ctyp: bad index", []) else Ctyp {cert = cert, sorts = sorts, T = Term_Subst.generalizeT tfrees idx T, maxidx = Int.max (maxidx, idx)}; (*Instantiation of schematic variables A -------------------- A[t1/v1, ..., tn/vn] *) local fun add_cert cert_of (_, c) cert = Context.join_certificate (cert, cert_of c); val add_instT_cert = add_cert (fn Ctyp {cert, ...} => cert); val add_inst_cert = add_cert (fn Cterm {cert, ...} => cert); fun add_sorts sorts_of (_, c) sorts = Sorts.union (sorts_of c) sorts; val add_instT_sorts = add_sorts (fn Ctyp {sorts, ...} => sorts); val add_inst_sorts = add_sorts (fn Cterm {sorts, ...} => sorts); fun make_instT thy (v as (_, S)) (Ctyp {T = U, maxidx, ...}) = if Sign.of_sort thy (U, S) then (U, maxidx) else raise TYPE ("Type not of sort " ^ Syntax.string_of_sort_global thy S, [U], []); fun make_inst thy (v as (_, T)) (Cterm {t = u, T = U, maxidx, ...}) = if T = U then (u, maxidx) else let fun pretty_typing t ty = Pretty.block [Syntax.pretty_term_global thy t, Pretty.str " ::", Pretty.brk 1, Syntax.pretty_typ_global thy ty]; val msg = Pretty.string_of (Pretty.block [Pretty.str "instantiate: type conflict", Pretty.fbrk, pretty_typing (Var v) T, Pretty.fbrk, pretty_typing u U]) in raise TYPE (msg, [T, U], [Var v, u]) end; fun prep_insts (instT, inst) (cert, sorts) = let val cert' = cert |> TVars.fold add_instT_cert instT |> Vars.fold add_inst_cert inst; val thy' = Context.certificate_theory cert' handle ERROR msg => raise CONTEXT (msg, TVars.dest instT |> map #2, Vars.dest inst |> map #2, [], NONE); val sorts' = sorts |> TVars.fold add_instT_sorts instT |> Vars.fold add_inst_sorts inst; val instT' = TVars.map (make_instT thy') instT; val inst' = Vars.map (make_inst thy') inst; in ((instT', inst'), (cert', sorts')) end; in (*Left-to-right replacements: ctpairs = [..., (vi, ti), ...]. Instantiates distinct Vars by terms of same type. Does NOT normalize the resulting theorem!*) fun instantiate (instT, inst) th = if TVars.is_empty instT andalso Vars.is_empty inst then th else let val Thm (der, {cert, hyps, constraints, shyps, tpairs, prop, ...}) = th; val ((instT', inst'), (cert', shyps')) = prep_insts (instT, inst) (cert, shyps) handle CONTEXT (msg, cTs, cts, ths, context) => raise CONTEXT (msg, cTs, cts, th :: ths, context); val subst = Term_Subst.instantiate_maxidx (instT', inst'); val (prop', maxidx1) = subst prop ~1; val (tpairs', maxidx') = fold_map (fn (t, u) => fn i => subst t i ||>> subst u) tpairs maxidx1; val thy' = Context.certificate_theory cert'; val constraints' = TVars.fold (fn ((_, S), (T, _)) => insert_constraints thy' (T, S)) instT' constraints; in Thm (deriv_rule1 (fn d => Proofterm.instantiate (TVars.map (K #1) instT', Vars.map (K #1) inst') d) der, {cert = cert', tags = [], maxidx = maxidx', constraints = constraints', shyps = shyps', hyps = hyps, tpairs = tpairs', prop = prop'}) |> solve_constraints end handle TYPE (msg, _, _) => raise THM (msg, 0, [th]); fun instantiate_cterm (instT, inst) ct = if TVars.is_empty instT andalso Vars.is_empty inst then ct else let val Cterm {cert, t, T, sorts, ...} = ct; val ((instT', inst'), (cert', sorts')) = prep_insts (instT, inst) (cert, sorts); val subst = Term_Subst.instantiate_maxidx (instT', inst'); val substT = Term_Subst.instantiateT_maxidx instT'; val (t', maxidx1) = subst t ~1; val (T', maxidx') = substT T maxidx1; in Cterm {cert = cert', t = t', T = T', sorts = sorts', maxidx = maxidx'} end handle TYPE (msg, _, _) => raise CTERM (msg, [ct]); end; (*The trivial implication A \ A, justified by assume and forall rules. A can contain Vars, not so for assume!*) fun trivial (Cterm {cert, t = A, T, maxidx, sorts}) = if T <> propT then raise THM ("trivial: the term must have type prop", 0, []) else Thm (deriv_rule0 (fn () => Proofterm.trivial_proof), {cert = cert, tags = [], maxidx = maxidx, constraints = [], shyps = sorts, hyps = [], tpairs = [], prop = Logic.mk_implies (A, A)}); (*Axiom-scheme reflecting signature contents T :: c ------------------- OFCLASS(T, c_class) *) fun of_class (cT, raw_c) = let val Ctyp {cert, T, ...} = cT; val thy = Context.certificate_theory cert handle ERROR msg => raise CONTEXT (msg, [cT], [], [], NONE); val c = Sign.certify_class thy raw_c; val Cterm {t = prop, maxidx, sorts, ...} = global_cterm_of thy (Logic.mk_of_class (T, c)); in if Sign.of_sort thy (T, [c]) then Thm (deriv_rule0 (fn () => Proofterm.PClass (T, c)), {cert = cert, tags = [], maxidx = maxidx, constraints = insert_constraints thy (T, [c]) [], shyps = sorts, hyps = [], tpairs = [], prop = prop}) else raise THM ("of_class: type not of class " ^ Syntax.string_of_sort_global thy [c], 0, []) end |> solve_constraints; (*Internalize sort constraints of type variables*) val unconstrainT = strip_shyps #> (fn thm as Thm (der, args) => let val Deriv {promises, body} = der; val {cert, shyps, hyps, tpairs, prop, ...} = args; val thy = theory_of_thm thm; fun err msg = raise THM ("unconstrainT: " ^ msg, 0, [thm]); val _ = null hyps orelse err "bad hyps"; val _ = null tpairs orelse err "bad flex-flex constraints"; val tfrees = build_rev (Term.add_tfree_names prop); val _ = null tfrees orelse err ("illegal free type variables " ^ commas_quote tfrees); val ps = map (apsnd (Future.map fulfill_body)) promises; val (pthm, proof) = Proofterm.unconstrain_thm_proof thy (classrel_proof thy) (arity_proof thy) shyps prop ps body; val der' = make_deriv [] [] [pthm] proof; val prop' = Proofterm.thm_node_prop (#2 pthm); in Thm (der', {cert = cert, tags = [], maxidx = maxidx_of_term prop', constraints = [], shyps = [[]], (*potentially redundant*) hyps = [], tpairs = [], prop = prop'}) end); (*Replace all TFrees not fixed or in the hyps by new TVars*) fun varifyT_global' fixed (Thm (der, {cert, maxidx, constraints, shyps, hyps, tpairs, prop, ...})) = let val tfrees = fold TFrees.add_tfrees hyps fixed; val prop1 = attach_tpairs tpairs prop; val (al, prop2) = Type.varify_global tfrees prop1; val (ts, prop3) = Logic.strip_prems (length tpairs, [], prop2); in (al, Thm (deriv_rule1 (Proofterm.varify_proof prop tfrees) der, {cert = cert, tags = [], maxidx = Int.max (0, maxidx), constraints = constraints, shyps = shyps, hyps = hyps, tpairs = rev (map Logic.dest_equals ts), prop = prop3})) end; val varifyT_global = #2 o varifyT_global' TFrees.empty; (*Replace all TVars by TFrees that are often new*) fun legacy_freezeT (Thm (der, {cert, constraints, shyps, hyps, tpairs, prop, ...})) = let val prop1 = attach_tpairs tpairs prop; val prop2 = Type.legacy_freeze prop1; val (ts, prop3) = Logic.strip_prems (length tpairs, [], prop2); in Thm (deriv_rule1 (Proofterm.legacy_freezeT prop1) der, {cert = cert, tags = [], maxidx = maxidx_of_term prop2, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = rev (map Logic.dest_equals ts), prop = prop3}) end; fun plain_prop_of raw_thm = let val thm = strip_shyps raw_thm; fun err msg = raise THM ("plain_prop_of: " ^ msg, 0, [thm]); in if not (null (hyps_of thm)) then err "theorem may not contain hypotheses" else if not (null (extra_shyps thm)) then err "theorem may not contain sort hypotheses" else if not (null (tpairs_of thm)) then err "theorem may not contain flex-flex pairs" else prop_of thm end; (*** Inference rules for tactics ***) (*Destruct proof state into constraints, other goals, goal(i), rest *) fun dest_state (state as Thm (_, {prop,tpairs,...}), i) = (case Logic.strip_prems(i, [], prop) of (B::rBs, C) => (tpairs, rev rBs, B, C) | _ => raise THM("dest_state", i, [state])) handle TERM _ => raise THM("dest_state", i, [state]); (*Prepare orule for resolution by lifting it over the parameters and assumptions of goal.*) fun lift_rule goal orule = let val Cterm {t = gprop, T, maxidx = gmax, sorts, ...} = goal; val inc = gmax + 1; val lift_abs = Logic.lift_abs inc gprop; val lift_all = Logic.lift_all inc gprop; val Thm (der, {maxidx, constraints, shyps, hyps, tpairs, prop, ...}) = orule; val (As, B) = Logic.strip_horn prop; in if T <> propT then raise THM ("lift_rule: the term must have type prop", 0, []) else Thm (deriv_rule1 (Proofterm.lift_proof gprop inc prop) der, {cert = join_certificate1 (goal, orule), tags = [], maxidx = maxidx + inc, constraints = constraints, shyps = Sorts.union shyps sorts, (*sic!*) hyps = hyps, tpairs = map (apply2 lift_abs) tpairs, prop = Logic.list_implies (map lift_all As, lift_all B)}) end; fun incr_indexes i (thm as Thm (der, {cert, maxidx, constraints, shyps, hyps, tpairs, prop, ...})) = if i < 0 then raise THM ("negative increment", 0, [thm]) else if i = 0 then thm else Thm (deriv_rule1 (Proofterm.incr_indexes i) der, {cert = cert, tags = [], maxidx = maxidx + i, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = map (apply2 (Logic.incr_indexes ([], [], i))) tpairs, prop = Logic.incr_indexes ([], [], i) prop}); (*Solve subgoal Bi of proof state B1...Bn/C by assumption. *) fun assumption opt_ctxt i state = let val Thm (der, {cert, maxidx, constraints, shyps, hyps, ...}) = state; val (context, cert') = make_context_certificate [state] opt_ctxt cert; val (tpairs, Bs, Bi, C) = dest_state (state, i); fun newth n (env, tpairs) = let val normt = Envir.norm_term env; fun assumption_proof prf = Proofterm.assumption_proof (map normt Bs) (normt Bi) n prf; in Thm (deriv_rule1 (assumption_proof #> not (Envir.is_empty env) ? Proofterm.norm_proof' env) der, {tags = [], maxidx = Envir.maxidx_of env, constraints = insert_constraints_env (Context.certificate_theory cert') env constraints, shyps = Envir.insert_sorts env shyps, hyps = hyps, tpairs = if Envir.is_empty env then tpairs else map (apply2 normt) tpairs, prop = if Envir.is_empty env then Logic.list_implies (Bs, C) (*avoid wasted normalizations*) else normt (Logic.list_implies (Bs, C)) (*normalize the new rule fully*), cert = cert'}) end; val (close, asms, concl) = Logic.assum_problems (~1, Bi); val concl' = close concl; fun addprfs [] _ = Seq.empty | addprfs (asm :: rest) n = Seq.make (fn () => Seq.pull (Seq.mapp (newth n) (if Term.could_unify (asm, concl) then (Unify.unifiers (context, Envir.empty maxidx, (close asm, concl') :: tpairs)) else Seq.empty) (addprfs rest (n + 1)))) in addprfs asms 1 end; (*Solve subgoal Bi of proof state B1...Bn/C by assumption. Checks if Bi's conclusion is alpha/eta-convertible to one of its assumptions*) fun eq_assumption i state = let val Thm (der, {cert, maxidx, constraints, shyps, hyps, ...}) = state; val (tpairs, Bs, Bi, C) = dest_state (state, i); val (_, asms, concl) = Logic.assum_problems (~1, Bi); in (case find_index (fn asm => Envir.aeconv (asm, concl)) asms of ~1 => raise THM ("eq_assumption", 0, [state]) | n => Thm (deriv_rule1 (Proofterm.assumption_proof Bs Bi (n + 1)) der, {cert = cert, tags = [], maxidx = maxidx, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = Logic.list_implies (Bs, C)})) end; (*For rotate_tac: fast rotation of assumptions of subgoal i*) fun rotate_rule k i state = let val Thm (der, {cert, maxidx, constraints, shyps, hyps, ...}) = state; val (tpairs, Bs, Bi, C) = dest_state (state, i); val params = Term.strip_all_vars Bi; val rest = Term.strip_all_body Bi; val asms = Logic.strip_imp_prems rest val concl = Logic.strip_imp_concl rest; val n = length asms; val m = if k < 0 then n + k else k; val Bi' = if 0 = m orelse m = n then Bi else if 0 < m andalso m < n then let val (ps, qs) = chop m asms in Logic.list_all (params, Logic.list_implies (qs @ ps, concl)) end else raise THM ("rotate_rule", k, [state]); in Thm (deriv_rule1 (Proofterm.rotate_proof Bs Bi' params asms m) der, {cert = cert, tags = [], maxidx = maxidx, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = Logic.list_implies (Bs @ [Bi'], C)}) end; (*Rotates a rule's premises to the left by k, leaving the first j premises unchanged. Does nothing if k=0 or if k equals n-j, where n is the number of premises. Useful with eresolve_tac and underlies defer_tac*) fun permute_prems j k rl = let val Thm (der, {cert, maxidx, constraints, shyps, hyps, tpairs, prop, ...}) = rl; val prems = Logic.strip_imp_prems prop and concl = Logic.strip_imp_concl prop; val moved_prems = List.drop (prems, j) and fixed_prems = List.take (prems, j) handle General.Subscript => raise THM ("permute_prems: j", j, [rl]); val n_j = length moved_prems; val m = if k < 0 then n_j + k else k; val (prems', prop') = if 0 = m orelse m = n_j then (prems, prop) else if 0 < m andalso m < n_j then let val (ps, qs) = chop m moved_prems; val prems' = fixed_prems @ qs @ ps; in (prems', Logic.list_implies (prems', concl)) end else raise THM ("permute_prems: k", k, [rl]); in Thm (deriv_rule1 (Proofterm.permute_prems_proof prems' j m) der, {cert = cert, tags = [], maxidx = maxidx, constraints = constraints, shyps = shyps, hyps = hyps, tpairs = tpairs, prop = prop'}) end; (* strip_apply f B A strips off all assumptions/parameters from A introduced by lifting over B, and applies f to remaining part of A*) fun strip_apply f = let fun strip (Const ("Pure.imp", _) $ _ $ B1) (Const ("Pure.imp", _) $ A2 $ B2) = Logic.mk_implies (A2, strip B1 B2) | strip ((c as Const ("Pure.all", _)) $ Abs (_, _, t1)) ( Const ("Pure.all", _) $ Abs (a, T, t2)) = c $ Abs (a, T, strip t1 t2) | strip _ A = f A in strip end; fun strip_lifted (Const ("Pure.imp", _) $ _ $ B1) (Const ("Pure.imp", _) $ _ $ B2) = strip_lifted B1 B2 | strip_lifted (Const ("Pure.all", _) $ Abs (_, _, t1)) (Const ("Pure.all", _) $ Abs (_, _, t2)) = strip_lifted t1 t2 | strip_lifted _ A = A; (*Use the alist to rename all bound variables and some unknowns in a term dpairs = current disagreement pairs; tpairs = permanent ones (flexflex); Preserves unknowns in tpairs and on lhs of dpairs. *) fun rename_bvs [] _ _ _ _ = K I | rename_bvs al dpairs tpairs B As = let val add_var = fold_aterms (fn Var ((x, _), _) => insert (op =) x | _ => I); val vids = [] |> fold (add_var o fst) dpairs |> fold (add_var o fst) tpairs |> fold (add_var o snd) tpairs; val vids' = fold (add_var o strip_lifted B) As []; (*unknowns appearing elsewhere be preserved!*) val al' = distinct ((op =) o apply2 fst) (filter_out (fn (x, y) => not (member (op =) vids' x) orelse member (op =) vids x orelse member (op =) vids y) al); val unchanged = filter_out (AList.defined (op =) al') vids'; fun del_clashing clash xs _ [] qs = if clash then del_clashing false xs xs qs [] else qs | del_clashing clash xs ys ((p as (x, y)) :: ps) qs = if member (op =) ys y then del_clashing true (x :: xs) (x :: ys) ps qs else del_clashing clash xs (y :: ys) ps (p :: qs); val al'' = del_clashing false unchanged unchanged al' []; fun rename (t as Var ((x, i), T)) = (case AList.lookup (op =) al'' x of SOME y => Var ((y, i), T) | NONE => t) | rename (Abs (x, T, t)) = Abs (the_default x (AList.lookup (op =) al x), T, rename t) | rename (f $ t) = rename f $ rename t | rename t = t; fun strip_ren f Ai = f rename B Ai in strip_ren end; (*Function to rename bounds/unknowns in the argument, lifted over B*) fun rename_bvars dpairs = rename_bvs (fold_rev Term.match_bvars dpairs []) dpairs; (*** RESOLUTION ***) (** Lifting optimizations **) (*strip off pairs of assumptions/parameters in parallel -- they are identical because of lifting*) fun strip_assums2 (Const("Pure.imp", _) $ _ $ B1, Const("Pure.imp", _) $ _ $ B2) = strip_assums2 (B1,B2) | strip_assums2 (Const("Pure.all",_)$Abs(a,T,t1), Const("Pure.all",_)$Abs(_,_,t2)) = let val (B1,B2) = strip_assums2 (t1,t2) in (Abs(a,T,B1), Abs(a,T,B2)) end | strip_assums2 BB = BB; (*Faster normalization: skip assumptions that were lifted over*) fun norm_term_skip env 0 t = Envir.norm_term env t | norm_term_skip env n (Const ("Pure.all", _) $ Abs (a, T, t)) = let val T' = Envir.norm_type (Envir.type_env env) T (*Must instantiate types of parameters because they are flattened; this could be a NEW parameter*) in Logic.all_const T' $ Abs (a, T', norm_term_skip env n t) end | norm_term_skip env n (Const ("Pure.imp", _) $ A $ B) = Logic.mk_implies (A, norm_term_skip env (n - 1) B) | norm_term_skip _ _ _ = error "norm_term_skip: too few assumptions??"; (*unify types of schematic variables (non-lifted case)*) fun unify_var_types context (th1, th2) env = let fun unify_vars (T :: Us) = fold (fn U => Pattern.unify_types context (T, U)) Us | unify_vars _ = I; val add_vars = full_prop_of #> fold_aterms (fn Var v => Vartab.insert_list (op =) v | _ => I); val vars = Vartab.build (add_vars th1 #> add_vars th2); in SOME (Vartab.fold (unify_vars o #2) vars env) end handle Pattern.Unif => NONE; (*Composition of object rule r=(A1...Am/B) with proof state s=(B1...Bn/C) Unifies B with Bi, replacing subgoal i (1 <= i <= n) If match then forbid instantiations in proof state If lifted then shorten the dpair using strip_assums2. If eres_flg then simultaneously proves A1 by assumption. nsubgoal is the number of new subgoals (written m above). Curried so that resolution calls dest_state only once. *) local exception COMPOSE in fun bicompose_aux opt_ctxt {flatten, match, incremented} (state, (stpairs, Bs, Bi, C), lifted) (eres_flg, orule, nsubgoal) = let val Thm (sder, {maxidx=smax, constraints = constraints2, shyps = shyps2, hyps = hyps2, ...}) = state and Thm (rder, {maxidx=rmax, constraints = constraints1, shyps = shyps1, hyps = hyps1, tpairs=rtpairs, prop=rprop,...}) = orule (*How many hyps to skip over during normalization*) and nlift = Logic.count_prems (strip_all_body Bi) + (if eres_flg then ~1 else 0) val (context, cert) = make_context_certificate [state, orule] opt_ctxt (join_certificate2 (state, orule)); (*Add new theorem with prop = "\Bs; As\ \ C" to thq*) fun addth A (As, oldAs, rder', n) ((env, tpairs), thq) = let val normt = Envir.norm_term env; (*perform minimal copying here by examining env*) val (ntpairs, normp) = if Envir.is_empty env then (tpairs, (Bs @ As, C)) else let val ntps = map (apply2 normt) tpairs in if Envir.above env smax then (*no assignments in state; normalize the rule only*) if lifted then (ntps, (Bs @ map (norm_term_skip env nlift) As, C)) else (ntps, (Bs @ map normt As, C)) else if match then raise COMPOSE else (*normalize the new rule fully*) (ntps, (map normt (Bs @ As), normt C)) end val constraints' = union_constraints constraints1 constraints2 |> insert_constraints_env (Context.certificate_theory cert) env; fun bicompose_proof prf1 prf2 = Proofterm.bicompose_proof flatten (map normt Bs) (map normt As) A oldAs n (nlift+1) prf1 prf2 val th = Thm (deriv_rule2 (if Envir.is_empty env then bicompose_proof else if Envir.above env smax then bicompose_proof o Proofterm.norm_proof' env else Proofterm.norm_proof' env oo bicompose_proof) rder' sder, {tags = [], maxidx = Envir.maxidx_of env, constraints = constraints', shyps = Envir.insert_sorts env (Sorts.union shyps1 shyps2), hyps = union_hyps hyps1 hyps2, tpairs = ntpairs, prop = Logic.list_implies normp, cert = cert}) in Seq.cons th thq end handle COMPOSE => thq; val (rAs,B) = Logic.strip_prems(nsubgoal, [], rprop) handle TERM _ => raise THM("bicompose: rule", 0, [orule,state]); (*Modify assumptions, deleting n-th if n>0 for e-resolution*) fun newAs(As0, n, dpairs, tpairs) = let val (As1, rder') = if not lifted then (As0, rder) else let val rename = rename_bvars dpairs tpairs B As0 in (map (rename strip_apply) As0, deriv_rule1 (Proofterm.map_proof_terms (rename K) I) rder) end; in (map (if flatten then (Logic.flatten_params n) else I) As1, As1, rder', n) handle TERM _ => raise THM("bicompose: 1st premise", 0, [orule]) end; val BBi = if lifted then strip_assums2(B,Bi) else (B,Bi); val dpairs = BBi :: (rtpairs@stpairs); (*elim-resolution: try each assumption in turn*) fun eres _ [] = raise THM ("bicompose: no premises", 0, [orule, state]) | eres env (A1 :: As) = let val A = SOME A1; val (close, asms, concl) = Logic.assum_problems (nlift + 1, A1); val concl' = close concl; fun tryasms [] _ = Seq.empty | tryasms (asm :: rest) n = if Term.could_unify (asm, concl) then let val asm' = close asm in (case Seq.pull (Unify.unifiers (context, env, (asm', concl') :: dpairs)) of NONE => tryasms rest (n + 1) | cell as SOME ((_, tpairs), _) => Seq.it_right (addth A (newAs (As, n, [BBi, (concl', asm')], tpairs))) (Seq.make (fn () => cell), Seq.make (fn () => Seq.pull (tryasms rest (n + 1))))) end else tryasms rest (n + 1); in tryasms asms 1 end; (*ordinary resolution*) fun res env = (case Seq.pull (Unify.unifiers (context, env, dpairs)) of NONE => Seq.empty | cell as SOME ((_, tpairs), _) => Seq.it_right (addth NONE (newAs (rev rAs, 0, [BBi], tpairs))) (Seq.make (fn () => cell), Seq.empty)); val env0 = Envir.empty (Int.max (rmax, smax)); in (case if incremented then SOME env0 else unify_var_types context (state, orule) env0 of NONE => Seq.empty | SOME env => if eres_flg then eres env (rev rAs) else res env) end; end; fun bicompose opt_ctxt flags arg i state = bicompose_aux opt_ctxt flags (state, dest_state (state,i), false) arg; (*Quick test whether rule is resolvable with the subgoal with hyps Hs and conclusion B. If eres_flg then checks 1st premise of rule also*) fun could_bires (Hs, B, eres_flg, rule) = let fun could_reshyp (A1::_) = exists (fn H => Term.could_unify (A1, H)) Hs | could_reshyp [] = false; (*no premise -- illegal*) in Term.could_unify(concl_of rule, B) andalso (not eres_flg orelse could_reshyp (prems_of rule)) end; (*Bi-resolution of a state with a list of (flag,rule) pairs. Puts the rule above: rule/state. Renames vars in the rules. *) fun biresolution opt_ctxt match brules i state = let val (stpairs, Bs, Bi, C) = dest_state(state,i); val lift = lift_rule (cprem_of state i); val B = Logic.strip_assums_concl Bi; val Hs = Logic.strip_assums_hyp Bi; val compose = bicompose_aux opt_ctxt {flatten = true, match = match, incremented = true} (state, (stpairs, Bs, Bi, C), true); fun res [] = Seq.empty | res ((eres_flg, rule)::brules) = if Config.get_generic (make_context [state] opt_ctxt (cert_of state)) Pattern.unify_trace_failure orelse could_bires (Hs, B, eres_flg, rule) then Seq.make (*delay processing remainder till needed*) (fn()=> SOME(compose (eres_flg, lift rule, nprems_of rule), res brules)) else res brules in Seq.flat (res brules) end; (*Resolution: exactly one resolvent must be produced*) fun tha RSN (i, thb) = (case Seq.chop 2 (biresolution NONE false [(false, tha)] i thb) of ([th], _) => solve_constraints th | ([], _) => raise THM ("RSN: no unifiers", i, [tha, thb]) | _ => raise THM ("RSN: multiple unifiers", i, [tha, thb])); (*Resolution: P \ Q, Q \ R gives P \ R*) fun tha RS thb = tha RSN (1,thb); (**** Type classes ****) fun standard_tvars thm = let val thy = theory_of_thm thm; val tvars = build_rev (Term.add_tvars (prop_of thm)); val names = Name.invent Name.context Name.aT (length tvars); val tinst = map2 (fn (ai, S) => fn b => ((ai, S), global_ctyp_of thy (TVar ((b, 0), S)))) tvars names; in instantiate (TVars.make tinst, Vars.empty) thm end (* class relations *) val is_classrel = Symreltab.defined o get_classrels; fun complete_classrels thy = let fun complete (c, (_, (all_preds, all_succs))) (finished1, thy1) = let fun compl c1 c2 (finished2, thy2) = if is_classrel thy2 (c1, c2) then (finished2, thy2) else (false, thy2 |> (map_classrels o Symreltab.update) ((c1, c2), (the_classrel thy2 (c1, c) RS the_classrel thy2 (c, c2)) |> standard_tvars |> close_derivation \<^here> |> tap (expose_proof thy2) |> trim_context)); val proven = is_classrel thy1; val preds = Graph.Keys.fold (fn c1 => proven (c1, c) ? cons c1) all_preds []; val succs = Graph.Keys.fold (fn c2 => proven (c, c2) ? cons c2) all_succs []; in fold_product compl preds succs (finished1, thy1) end; in (case Graph.fold complete (Sorts.classes_of (Sign.classes_of thy)) (true, thy) of (true, _) => NONE | (_, thy') => SOME thy') end; (* type arities *) fun thynames_of_arity thy (a, c) = build (get_arities thy |> Aritytab.fold (fn ((a', _, c'), (_, name, ser)) => (a = a' andalso c = c') ? cons (name, ser))) |> sort (int_ord o apply2 #2) |> map #1; fun insert_arity_completions thy ((t, Ss, c), (th, thy_name, ser)) (finished, arities) = let val completions = Sign.super_classes thy c |> map_filter (fn c1 => if Aritytab.defined arities (t, Ss, c1) then NONE else let val th1 = (th RS the_classrel thy (c, c1)) |> standard_tvars |> close_derivation \<^here> |> tap (expose_proof thy) |> trim_context; in SOME ((t, Ss, c1), (th1, thy_name, ser)) end); val finished' = finished andalso null completions; val arities' = fold Aritytab.update completions arities; in (finished', arities') end; fun complete_arities thy = let val arities = get_arities thy; val (finished, arities') = Aritytab.fold (insert_arity_completions thy) arities (true, get_arities thy); in if finished then NONE else SOME (map_arities (K arities') thy) end; val _ = Theory.setup (Theory.at_begin complete_classrels #> Theory.at_begin complete_arities); (* primitive rules *) fun add_classrel raw_th thy = let val th = strip_shyps (transfer thy raw_th); val th' = th |> unconstrainT |> tap (expose_proof thy) |> trim_context; val prop = plain_prop_of th; val (c1, c2) = Logic.dest_classrel prop; in thy |> Sign.primitive_classrel (c1, c2) |> map_classrels (Symreltab.update ((c1, c2), th')) |> perhaps complete_classrels |> perhaps complete_arities end; fun add_arity raw_th thy = let val th = strip_shyps (transfer thy raw_th); val th' = th |> unconstrainT |> tap (expose_proof thy) |> trim_context; val prop = plain_prop_of th; val (t, Ss, c) = Logic.dest_arity prop; val ar = ((t, Ss, c), (th', Context.theory_name thy, serial ())); in thy |> Sign.primitive_arity (t, Ss, [c]) |> map_arities (Aritytab.update ar #> curry (insert_arity_completions thy ar) true #> #2) end; end; structure Basic_Thm: BASIC_THM = Thm; open Basic_Thm;