diff --git a/thys/Applicative_Lifting/applicative.ML b/thys/Applicative_Lifting/applicative.ML --- a/thys/Applicative_Lifting/applicative.ML +++ b/thys/Applicative_Lifting/applicative.ML @@ -1,1368 +1,1370 @@ (* Author: Joshua Schneider, ETH Zurich *) signature APPLICATIVE = sig type afun val intern: Context.generic -> xstring -> string val extern: Context.generic -> string -> xstring val afun_of_generic: Context.generic -> string -> afun val afun_of: Proof.context -> string -> afun val afuns_of_term_generic: Context.generic -> term -> afun list val afuns_of_term: Proof.context -> term -> afun list val afuns_of_typ_generic: Context.generic -> typ -> afun list val afuns_of_typ: Proof.context -> typ -> afun list val name_of_afun: afun -> binding val unfolds_of_afun: afun -> thm list type afun_inst val match_afun_inst: Proof.context -> afun -> term * int -> afun_inst val import_afun_inst: afun -> Proof.context -> afun_inst * Proof.context val inner_sort_of: afun_inst -> sort val mk_type: afun_inst -> typ -> typ val mk_pure: afun_inst -> typ -> term val lift_term: afun_inst -> term -> term val mk_ap: afun_inst -> typ * typ -> term val mk_comb: afun_inst -> typ -> term * term -> term val mk_set: afun_inst -> typ -> term val dest_type: Proof.context -> afun_inst -> typ -> typ option val dest_type': Proof.context -> afun_inst -> typ -> typ val dest_pure: Proof.context -> afun_inst -> term -> term val dest_comb: Proof.context -> afun_inst -> term -> term * term val infer_comb: Proof.context -> afun_inst -> term * term -> term val subst_lift_term: afun_inst -> (term * term) list -> term -> term val generalize_lift_terms: afun_inst -> term list -> Proof.context -> term list * Proof.context val afun_unfold_tac: Proof.context -> afun -> int -> tactic val afun_fold_tac: Proof.context -> afun -> int -> tactic val unfold_all_tac: Proof.context -> int -> tactic val normalform_conv: Proof.context -> afun -> conv val normalize_rel_tac: Proof.context -> afun -> int -> tactic val general_normalform_conv: Proof.context -> afun -> cterm * cterm -> thm * thm val general_normalize_rel_tac: Proof.context -> afun -> int -> tactic val forward_lift_rule: Proof.context -> afun -> thm -> thm val unfold_wrapper_tac: Proof.context -> afun option -> int -> tactic val fold_wrapper_tac: Proof.context -> afun option -> int -> tactic val normalize_wrapper_tac: Proof.context -> afun option -> int -> tactic val lifting_wrapper_tac: Proof.context -> afun option -> int -> tactic val setup_combinators: (string * thm) list -> local_theory -> local_theory val combinator_rule_attrib: string list option -> attribute val parse_opt_afun: afun option context_parser val applicative_cmd: (((((binding * string list) * string) * string) * string option) * string option) -> local_theory -> Proof.state val print_afuns: Proof.context -> unit val add_unfold_attrib: xstring option -> attribute val forward_lift_attrib: xstring -> attribute end; structure Applicative : APPLICATIVE = struct open Ctr_Sugar_Util (** General utilities **) fun fold_options xs = fold (fn x => (case x of SOME x' => cons x' | NONE => I)) xs []; fun the_pair [x, y] = (x, y) | the_pair _ = raise General.Size; fun strip_comb2 (f $ x $ y) = (f, (x, y)) | strip_comb2 t = raise TERM ("strip_comb2", [t]); fun mk_comb_pattern (t, n) = let val Ts = take n (binder_types (fastype_of t)); val maxidx = maxidx_of_term t; val vars = map (fn (T, i) => ((Name.uu, maxidx + i), T)) (Ts ~~ (1 upto n)); in (vars, Term.betapplys (t, map Var vars)) end; fun match_comb_pattern ctxt tn u = let val thy = Proof_Context.theory_of ctxt; val (vars, pat) = mk_comb_pattern tn; val envs = Pattern.match thy (pat, u) (Vartab.empty, Vartab.empty) handle Pattern.MATCH => raise TERM ("match_comb_pattern", [u, pat]); in (vars, envs) end; fun dest_comb_pattern ctxt tn u = let val (vars, (_, env)) = match_comb_pattern ctxt tn u; in map (the o Envir.lookup1 env) vars end; fun norm_term_types tyenv t = Term_Subst.map_types_same (Envir.norm_type_same tyenv) t handle Same.SAME => t; val mk_TFrees_of = mk_TFrees' oo replicate; fun mk_Free name typ ctxt = yield_singleton Variable.variant_fixes name ctxt |>> (fn name' => Free (name', typ)); (*tuples with explicit sentinel*) fun mk_tuple' ts = fold_rev (curry HOLogic.mk_prod) ts HOLogic.unit; fun strip_tuple' (Const (@{const_name Unity}, _)) = [] | strip_tuple' (Const (@{const_name Pair}, _) $ t1 $ t2) = t1 :: strip_tuple' t2 | strip_tuple' t = raise TERM ("strip_tuple'", [t]); fun mk_eq_on S = let val (SA, ST) = `HOLogic.dest_setT (fastype_of S); in Const (@{const_name eq_on}, ST --> BNF_Util.mk_pred2T SA SA) $ S end; (* Polymorphic terms and term groups *) type poly_type = typ list * typ; type poly_term = typ list * term; fun instantiate_poly_type (tvars, T) insts = typ_subst_atomic (tvars ~~ insts) T; fun instantiate_poly_term (tvars, t) insts = subst_atomic_types (tvars ~~ insts) t; fun dest_poly_type ctxt (tvars, T) U = let val thy = Proof_Context.theory_of ctxt; val tyenv = Sign.typ_match thy (T, U) Vartab.empty handle Type.TYPE_MATCH => raise TYPE ("dest_poly_type", [U, T], []); in map (Type.lookup tyenv o dest_TVar) tvars end; fun poly_type_to_term (tvars, T) = (tvars, Logic.mk_type T); fun poly_type_of_term (tvars, t) = (tvars, Logic.dest_type t); (* Schematic variables are treated uniformly in packed terms, thus forming an ad hoc context of type variables. Otherwise, morphisms are allowed to rename schematic variables non-consistently in separate terms, and occasionally will do so. *) fun pack_poly_term (tvars, t) = HOLogic.mk_prod (mk_tuple' (map Logic.mk_type tvars), t); fun unpack_poly_term t = let val (tvars, t') = HOLogic.dest_prod t; in (map Logic.dest_type (strip_tuple' tvars), t') end; val pack_poly_terms = mk_tuple' o map pack_poly_term; val unpack_poly_terms = map unpack_poly_term o strip_tuple'; (*match and instantiate schematic type variables which are not "quantified" in the packed term*) fun match_poly_terms_type ctxt (pt, i) (U, maxidx) = let val thy = Proof_Context.theory_of ctxt; val pt' = Logic.incr_indexes ([], [], maxidx + 1) pt; val (tvars, T) = poly_type_of_term (nth (unpack_poly_terms pt') i); val tyenv = Sign.typ_match thy (T, U) Vartab.empty handle Type.TYPE_MATCH => raise TYPE ("match_poly_terms", [U, T], []); val tyenv' = fold Vartab.delete_safe (map (#1 o dest_TVar) tvars) tyenv; val pt'' = Envir.subst_term_types tyenv' pt'; in unpack_poly_terms pt'' end; fun match_poly_terms ctxt (pt, i) (t, maxidx) = match_poly_terms_type ctxt (pt, i) (fastype_of t, maxidx); (*fix schematic type variables which are not "quantified", as well as schematic term variables*) fun import_poly_terms pt ctxt = let fun insert_paramTs (tvars, t) = fold_types (fold_atyps (fn TVar v => if member (op =) tvars (TVar v) then I else insert (op =) v | _ => I)) t; val paramTs = rev (fold insert_paramTs (unpack_poly_terms pt) []); val (tfrees, ctxt') = Variable.invent_types (map #2 paramTs) ctxt; - val instT = paramTs ~~ map TFree tfrees; + val instT = Term_Subst.TVars.table (paramTs ~~ map TFree tfrees); val params = map (apsnd (Term_Subst.instantiateT instT)) (rev (Term.add_vars pt [])); val (frees, ctxt'') = Variable.variant_fixes (map (Name.clean o #1 o #1) params) ctxt'; - val inst = params ~~ map Free (frees ~~ map #2 params); + val inst = Term_Subst.Vars.table (params ~~ map Free (frees ~~ map #2 params)); val pt' = Term_Subst.instantiate (instT, inst) pt; in (unpack_poly_terms pt', ctxt'') end; (** Internal representation **) (* Applicative functors *) type rel_thms = { pure_transfer: thm, ap_rel_fun: thm }; fun map_rel_thms f {pure_transfer, ap_rel_fun} = {pure_transfer = f pure_transfer, ap_rel_fun = f ap_rel_fun}; type afun_thms = { hom: thm, ichng: thm, reds: thm Symtab.table, rel_thms: rel_thms option, rel_intros: thm list, pure_comp_conv: thm }; fun map_afun_thms f {hom, ichng, reds, rel_thms, rel_intros, pure_comp_conv} = {hom = f hom, ichng = f ichng, reds = Symtab.map (K f) reds, rel_thms = Option.map (map_rel_thms f) rel_thms, rel_intros = map f rel_intros, pure_comp_conv = f pure_comp_conv}; datatype afun = AFun of { name: binding, terms: term, rel: term option, thms: afun_thms, unfolds: thm Item_Net.T }; fun rep_afun (AFun af) = af; val name_of_afun = #name o rep_afun; val terms_of_afun = #terms o rep_afun; val rel_of_afun = #rel o rep_afun; val thms_of_afun = #thms o rep_afun; val unfolds_of_afun = Item_Net.content o #unfolds o rep_afun; val red_of_afun = Symtab.lookup o #reds o thms_of_afun; val has_red_afun = is_some oo red_of_afun; fun mk_afun name terms rel thms = AFun {name = name, terms = terms, rel = rel, thms = thms, unfolds = Thm.item_net}; fun map_afun f1 f2 f3 f4 f5 (AFun {name, terms, rel, thms, unfolds}) = AFun {name = f1 name, terms = f2 terms, rel = f3 rel, thms = f4 thms, unfolds = f5 unfolds}; fun map_unfolds f thms = fold Item_Net.update (map f (Item_Net.content thms)) Thm.item_net; fun morph_afun phi = let val binding = Morphism.binding phi; val term = Morphism.term phi; val thm = Morphism.thm phi; in map_afun binding term (Option.map term) (map_afun_thms thm) (map_unfolds thm) end; val transfer_afun = morph_afun o Morphism.transfer_morphism; fun add_unfolds_afun thms = map_afun I I I I (fold Item_Net.update thms); fun patterns_of_afun af = let val [Tt, (_, pure), (_, ap), _] = unpack_poly_terms (terms_of_afun af); val (_, T) = poly_type_of_term Tt; in [#2 (mk_comb_pattern (pure, 1)), #2 (mk_comb_pattern (ap, 2)), Net.encode_type T] end; (* Combinator rules *) datatype combinator_rule = Combinator_Rule of { strong_premises: string Ord_List.T, weak_premises: bool, conclusion: string, eq_thm: thm }; fun rep_combinator_rule (Combinator_Rule rule) = rule; val conclusion_of_rule = #conclusion o rep_combinator_rule; val thm_of_rule = #eq_thm o rep_combinator_rule; fun eq_combinator_rule (rule1, rule2) = pointer_eq (rule1, rule2) orelse Thm.eq_thm (thm_of_rule rule1, thm_of_rule rule2); fun is_applicable_rule rule have_weak have_premises = let val {strong_premises, weak_premises, ...} = rep_combinator_rule rule; in (have_weak orelse not weak_premises) andalso have_premises strong_premises end; fun map_combinator_rule f1 f2 f3 f4 (Combinator_Rule {strong_premises, weak_premises, conclusion, eq_thm}) = Combinator_Rule {strong_premises = f1 strong_premises, weak_premises = f2 weak_premises, conclusion = f3 conclusion, eq_thm = f4 eq_thm}; fun transfer_combinator_rule thy = map_combinator_rule I I I (Thm.transfer thy); fun mk_combinator_rule comb_names weak_premises thm = let val (lhs, rhs) = Logic.dest_equals (Thm.prop_of thm); val conclusion = the (Symtab.lookup comb_names (#1 (dest_Const lhs))); val premises = Ord_List.make fast_string_ord (fold_options (map (Symtab.lookup comb_names o #1) (Term.add_consts rhs []))); val weak_premises' = Ord_List.make fast_string_ord (these weak_premises); val strong_premises = Ord_List.subtract fast_string_ord weak_premises' premises; in Combinator_Rule {strong_premises = strong_premises, weak_premises = is_some weak_premises, conclusion = conclusion, eq_thm = thm} end; (* Generic data *) (*FIXME: needs tests, especially around theory merging*) fun merge_afuns _ (af1, af2) = if pointer_eq (af1, af2) then raise Change_Table.SAME else map_afun I I I I (fn thms1 => Item_Net.merge (thms1, #unfolds (rep_afun af2))) af1; structure Data = Generic_Data ( type T = { combinators: thm Symtab.table * combinator_rule list, afuns: afun Name_Space.table, patterns: (string * term list) Item_Net.T }; val empty = { combinators = (Symtab.empty, []), afuns = Name_Space.empty_table "applicative functor", patterns = Item_Net.init (op = o apply2 #1) #2 }; val extend = I; fun merge ({combinators = (cd1, cr1), afuns = a1, patterns = p1}, {combinators = (cd2, cr2), afuns = a2, patterns = p2}) = {combinators = (Symtab.merge (K true) (cd1, cd2), Library.merge eq_combinator_rule (cr1, cr2)), afuns = Name_Space.join_tables merge_afuns (a1, a2), patterns = Item_Net.merge (p1, p2)}; ); fun get_combinators context = let val thy = Context.theory_of context; val {combinators = (defs, rules), ...} = Data.get context; in (Symtab.map (K (Thm.transfer thy)) defs, map (transfer_combinator_rule thy) rules) end; val get_afun_table = #afuns o Data.get; val get_afun_space = Name_Space.space_of_table o get_afun_table; val get_patterns = #patterns o Data.get; fun map_data f1 f2 f3 {combinators, afuns, patterns} = {combinators = f1 combinators, afuns = f2 afuns, patterns = f3 patterns}; val intern = Name_Space.intern o get_afun_space; fun extern context = Name_Space.extern (Context.proof_of context) (get_afun_space context); local fun undeclared name = error ("Undeclared applicative functor " ^ quote name); in fun afun_of_generic context name = case Name_Space.lookup (get_afun_table context) name of SOME af => transfer_afun (Context.theory_of context) af | NONE => undeclared name; val afun_of = afun_of_generic o Context.Proof; fun update_afun name f context = if Name_Space.defined (get_afun_table context) name then Data.map (map_data I (Name_Space.map_table_entry name f) I) context else undeclared name; end; fun match_term context = map #1 o Item_Net.retrieve_matching (get_patterns context); fun match_typ context = match_term context o Net.encode_type; (*works only with terms which are combinations of pure and ap*) fun afuns_of_term_generic context = map (afun_of_generic context) o match_term context; val afuns_of_term = afuns_of_term_generic o Context.Proof; fun afuns_of_typ_generic context = map (afun_of_generic context) o match_typ context; val afuns_of_typ = afuns_of_typ_generic o Context.Proof; fun all_unfolds_of_generic context = let val unfolds_of = map (Thm.transfer'' context) o unfolds_of_afun; in Name_Space.fold_table (fn (_, af) => append (unfolds_of af)) (get_afun_table context) [] end; val all_unfolds_of = all_unfolds_of_generic o Context.Proof; (** Term construction and destruction **) type afun_inst = { T: poly_type, pure: poly_term, ap: poly_term, set: poly_term }; fun mk_afun_inst [T, pure, ap, set] = {T = poly_type_of_term T, pure = pure, ap = ap, set = set}; fun pack_afun_inst {T, pure, ap, set} = pack_poly_terms [poly_type_to_term T, pure, ap, set]; fun match_afun_inst ctxt af = match_poly_terms ctxt (terms_of_afun af, 0) #> mk_afun_inst; fun import_afun_inst_raw terms = import_poly_terms terms #>> mk_afun_inst; val import_afun_inst = import_afun_inst_raw o terms_of_afun; fun inner_sort_of {T = (tvars, _), ...} = Type.sort_of_atyp (the_single tvars); fun mk_type {T, ...} = instantiate_poly_type T o single; fun mk_pure {pure, ...} = instantiate_poly_term pure o single; fun mk_ap {ap, ...} (T1, T2) = instantiate_poly_term ap [T1, T2]; fun mk_set {set, ...} = instantiate_poly_term set o single; fun lift_term af_inst t = Term.betapply (mk_pure af_inst (Term.fastype_of t), t); fun mk_comb af_inst funT (t1, t2) = Term.betapplys (mk_ap af_inst (dest_funT funT), [t1, t2]); fun dest_type ctxt {T, ...} = the_single o dest_poly_type ctxt T; val dest_type' = the_default HOLogic.unitT ooo dest_type; fun dest_pure ctxt {pure = (_, pure), ...} = the_single o dest_comb_pattern ctxt (pure, 1); fun dest_comb ctxt {ap = (_, ap), ...} = the_pair o dest_comb_pattern ctxt (ap, 2); fun infer_comb ctxt af_inst (t1, t2) = let val funT = the_default (dummyT --> dummyT) (dest_type ctxt af_inst (fastype_of t1)); in mk_comb af_inst funT (t1, t2) end; (*lift a term, except for non-combination subterms mapped by subst*) fun subst_lift_term af_inst subst tm = let fun subst_lift (s $ t) = (case (subst_lift s, subst_lift t) of (NONE, NONE) => NONE | (SOME s', NONE) => SOME (mk_comb af_inst (fastype_of s) (s', lift_term af_inst t)) | (NONE, SOME t') => SOME (mk_comb af_inst (fastype_of s) (lift_term af_inst s, t')) | (SOME s', SOME t') => SOME (mk_comb af_inst (fastype_of s) (s', t'))) | subst_lift t = AList.lookup (op aconv) subst t; in (case subst_lift tm of NONE => lift_term af_inst tm | SOME tm' => tm') end; fun add_lifted_vars (s $ t) = add_lifted_vars s #> add_lifted_vars t | add_lifted_vars (Abs (_, _, t)) = Term.add_vars t | add_lifted_vars _ = I; (*lift terms, where schematic variables are generalized to the functor and then fixed*) fun generalize_lift_terms af_inst ts ctxt = let val vars = subtract (op =) (fold add_lifted_vars ts []) (fold Term.add_vars ts []); val (var_names, Ts) = split_list vars; val (free_names, ctxt') = Variable.variant_fixes (map #1 var_names) ctxt; val Ts' = map (mk_type af_inst) Ts; val subst = map Var vars ~~ map Free (free_names ~~ Ts'); in (map (subst_lift_term af_inst subst) ts, ctxt') end; (** Reasoning with applicative functors **) (* Utilities *) val clean_name = perhaps (perhaps_apply [try Name.dest_skolem, try Name.dest_internal]); (*based on term_name from Pure/term.ML*) fun term_to_vname (Const (x, _)) = Long_Name.base_name x | term_to_vname (Free (x, _)) = clean_name x | term_to_vname (Var ((x, _), _)) = clean_name x | term_to_vname _ = "x"; fun afuns_of_rel precise ctxt t = let val (_, (lhs, rhs)) = Variable.focus NONE t ctxt |> #1 |> #2 |> Logic.strip_imp_concl |> Envir.beta_eta_contract |> HOLogic.dest_Trueprop |> strip_comb2; in if precise then (case afuns_of_term ctxt lhs of [] => afuns_of_term ctxt rhs | afs => afs) else afuns_of_typ ctxt (fastype_of lhs) end; fun AUTO_AFUNS precise tac ctxt opt_af = case opt_af of SOME af => tac [af] | NONE => SUBGOAL (fn (goal, i) => (case afuns_of_rel precise ctxt goal of [] => no_tac | afs => tac afs i) handle TERM _ => no_tac); fun AUTO_AFUN precise tac = AUTO_AFUNS precise (tac o hd); fun binop_par_conv cv ct = let val ((binop, arg1), arg2) = Thm.dest_comb ct |>> Thm.dest_comb; val (th1, th2) = cv (arg1, arg2); in Drule.binop_cong_rule binop th1 th2 end; fun binop_par_conv_tac cv = CONVERSION (HOLogic.Trueprop_conv (binop_par_conv cv)); val fold_goal_tac = SELECT_GOAL oo Raw_Simplifier.fold_goals_tac; (* Unfolding of lifted constants *) fun afun_unfold_tac ctxt af = Raw_Simplifier.rewrite_goal_tac ctxt (unfolds_of_afun af); fun afun_fold_tac ctxt af = fold_goal_tac ctxt (unfolds_of_afun af); fun unfold_all_tac ctxt = Raw_Simplifier.rewrite_goal_tac ctxt (all_unfolds_of ctxt); (* Basic conversions *) fun pure_conv ctxt {pure = (_, pure), ...} cv ct = let val ([var], (tyenv, env)) = match_comb_pattern ctxt (pure, 1) (Thm.term_of ct); val arg = the (Envir.lookup1 env var); val thm = cv (Thm.cterm_of ctxt arg); in if Thm.is_reflexive thm then Conv.all_conv ct else let val pure_inst = Envir.subst_term_types tyenv pure; in Drule.arg_cong_rule (Thm.cterm_of ctxt pure_inst) thm end end; fun ap_conv ctxt {ap = (_, ap), ...} cv1 cv2 ct = let val ([var1, var2], (tyenv, env)) = match_comb_pattern ctxt (ap, 2) (Thm.term_of ct); val (arg1, arg2) = apply2 (the o Envir.lookup1 env) (var1, var2); val thm1 = cv1 (Thm.cterm_of ctxt arg1); val thm2 = cv2 (Thm.cterm_of ctxt arg2); in if Thm.is_reflexive thm1 andalso Thm.is_reflexive thm2 then Conv.all_conv ct else let val ap_inst = Envir.subst_term_types tyenv ap; in Drule.binop_cong_rule (Thm.cterm_of ctxt ap_inst) thm1 thm2 end end; (* Normal form conversion *) (*convert a term into applicative normal form*) fun normalform_conv ctxt af ct = let val {hom, ichng, pure_comp_conv, ...} = thms_of_afun af; val the_red = the o red_of_afun af; val leaf_conv = Conv.rewr_conv (mk_meta_eq (the_red "I") |> Thm.symmetric); val merge_conv = Conv.rewr_conv (mk_meta_eq hom); val swap_conv = Conv.rewr_conv (mk_meta_eq ichng); val rotate_conv = Conv.rewr_conv (mk_meta_eq (the_red "B") |> Thm.symmetric); val pure_rotate_conv = Conv.rewr_conv (mk_meta_eq pure_comp_conv); val af_inst = match_afun_inst ctxt af (Thm.term_of ct, Thm.maxidx_of_cterm ct); fun left_conv cv = ap_conv ctxt af_inst cv Conv.all_conv; fun norm_pure_nf ct = ((pure_rotate_conv then_conv left_conv norm_pure_nf) else_conv merge_conv) ct; val norm_nf_pure = swap_conv then_conv norm_pure_nf; fun norm_nf_nf ct = ((rotate_conv then_conv left_conv (left_conv norm_pure_nf then_conv norm_nf_nf)) else_conv norm_nf_pure) ct; fun normalize ct = ((ap_conv ctxt af_inst normalize normalize then_conv norm_nf_nf) else_conv pure_conv ctxt af_inst Conv.all_conv else_conv leaf_conv) ct; in normalize ct end; val normalize_rel_tac = binop_par_conv_tac o apply2 oo normalform_conv; (* Bracket abstraction and generalized unlifting *) (*TODO: use proper conversions*) datatype apterm = Pure of term (*includes pure application*) | ApVar of int * term (*unique index, instantiated term*) | Ap of apterm * apterm; fun apterm_vars (Pure _) = I | apterm_vars (ApVar v) = cons v | apterm_vars (Ap (t1, t2)) = apterm_vars t1 #> apterm_vars t2; fun occurs_any _ (Pure _) = false | occurs_any vs (ApVar (i, _)) = exists (fn j => i = j) vs | occurs_any vs (Ap (t1, t2)) = occurs_any vs t1 orelse occurs_any vs t2; fun term_of_apterm ctxt af_inst t = let fun tm_of (Pure t) = t | tm_of (ApVar (_, t)) = t | tm_of (Ap (t1, t2)) = infer_comb ctxt af_inst (tm_of t1, tm_of t2); in tm_of t end; fun apterm_of_term ctxt af_inst t = let fun aptm_of t i = case try (dest_comb ctxt af_inst) t of SOME (t1, t2) => i |> aptm_of t1 ||>> aptm_of t2 |>> Ap | NONE => if can (dest_pure ctxt af_inst) t then (Pure t, i) else (ApVar (i, t), i + 1); in aptm_of t end; (*find a common variable sequence for two applicative terms, depending on available combinators*) fun consolidate ctxt af (t1, t2) = let fun common_inst (i, t) (j, insts) = case Termtab.lookup insts t of SOME k => (((i, t), k), (j, insts)) | NONE => (((i, t), j), (j + 1, Termtab.update (t, j) insts)); val (vars, _) = (0, Termtab.empty) |> fold_map common_inst (apterm_vars t1 []) ||>> fold_map common_inst (apterm_vars t2 []); fun merge_adjacent (([], _), _) [] = [] | merge_adjacent ((is, t), d) [] = [((is, t), d)] | merge_adjacent (([], _), _) (((i, t), d)::xs) = merge_adjacent (([i], t), d) xs | merge_adjacent ((is, t), d) (((i', t'), d')::xs) = if d = d' then merge_adjacent ((i'::is, t), d) xs else ((is, t), d) :: merge_adjacent (([i'], t'), d') xs; fun align _ [] = NONE | align ((i, t), d) (((i', t'), d')::xs) = if d = d' then SOME ([((i @ i', t), d)], xs) else Option.map (apfst (cons ((i', t'), d'))) (align ((i, t), d) xs); fun merge ([], ys) = ys | merge (xs, []) = xs | merge ((xs as ((is1, t1), d1)::xs'), ys as (((is2, t2), d2)::ys')) = if d1 = d2 then ((is1 @ is2, t1), d1) :: merge (xs', ys') else case (align ((is2, t2), d2) xs, align ((is1, t1), d1) ys) of (SOME (zs, xs''), NONE) => zs @ merge (xs'', ys') | (NONE, SOME (zs, ys'')) => zs @ merge (xs', ys'') | _ => ((is1, t1), d1) :: ((is2, t2), d2) :: merge (xs', ys'); fun unbalanced vs = error ("Unbalanced opaque terms " ^ commas_quote (map (Syntax.string_of_term ctxt o #2 o #1) vs)); fun mismatch (t1, t2) = error ("Mismatched opaque terms " ^ quote (Syntax.string_of_term ctxt t1) ^ " and " ^ quote (Syntax.string_of_term ctxt t2)); fun same ([], []) = [] | same ([], ys) = unbalanced ys | same (xs, []) = unbalanced xs | same ((((i1, t1), d1)::xs), (((i2, t2), d2)::ys)) = if d1 = d2 then ((i1 @ i2, t1), d1) :: same (xs, ys) else mismatch (t1, t2); in vars |> has_red_afun af "C" ? apply2 (sort (int_ord o apply2 #2)) |> apply2 (if has_red_afun af "W" then merge_adjacent (([], Term.dummy), 0) else map (apfst (apfst single))) |> (if has_red_afun af "K" then merge else same) |> map #1 end; fun ap_cong ctxt af_inst thm1 thm2 = let val funT = the_default (dummyT --> dummyT) (dest_type ctxt af_inst (Thm.typ_of_cterm (Thm.lhs_of thm1))); val ap_inst = Thm.cterm_of ctxt (mk_ap af_inst (dest_funT funT)); in Drule.binop_cong_rule ap_inst thm1 thm2 end; fun rewr_subst_ap ctxt af_inst rewr thm1 thm2 = let val rule1 = ap_cong ctxt af_inst thm1 thm2; val rule2 = Conv.rewr_conv rewr (Thm.rhs_of rule1); in Thm.transitive rule1 rule2 end; fun merge_pures ctxt af_inst merge_thm tt = let fun merge (Pure t) = SOME (Thm.reflexive (Thm.cterm_of ctxt t)) | merge (ApVar _) = NONE | merge (Ap (tt1, tt2)) = case merge tt1 of NONE => NONE | SOME thm1 => case merge tt2 of NONE => NONE | SOME thm2 => SOME (rewr_subst_ap ctxt af_inst merge_thm thm1 thm2); in merge tt end; exception ASSERT of string; (*abstract over a variable (opaque subterm)*) fun eliminate ctxt (af, af_inst) tt (v, v_tm) = let val {hom, ichng, ...} = thms_of_afun af; val the_red = the o red_of_afun af; val hom_conv = mk_meta_eq hom; val ichng_conv = mk_meta_eq ichng; val mk_combI = Thm.symmetric o mk_meta_eq; val id_conv = mk_combI (the_red "I"); val comp_conv = mk_combI (the_red "B"); val flip_conv = Option.map mk_combI (red_of_afun af "C"); val const_conv = Option.map mk_combI (red_of_afun af "K"); val dup_conv = Option.map mk_combI (red_of_afun af "W"); val rewr_subst_ap = rewr_subst_ap ctxt af_inst; fun extract_comb n thm = Pure (thm |> Thm.rhs_of |> funpow n Thm.dest_arg1 |> Thm.term_of); fun refl_step tt = (tt, Thm.reflexive (Thm.cterm_of ctxt (term_of_apterm ctxt af_inst tt))); fun comb2_step def (tt1, thm1) (tt2, thm2) = let val thm = rewr_subst_ap def thm1 thm2; in (Ap (Ap (extract_comb 3 thm, tt1), tt2), thm) end; val B_step = comb2_step comp_conv; fun swap_B_step (tt1, thm1) thm2 = let val thm3 = rewr_subst_ap ichng_conv thm1 thm2; val thm4 = Thm.transitive thm3 (Conv.rewr_conv comp_conv (Thm.rhs_of thm3)); in (Ap (Ap (extract_comb 3 thm4, extract_comb 1 thm3), tt1), thm4) end; fun I_step tm = let val thm = Conv.rewr_conv id_conv (Thm.cterm_of ctxt tm) in (extract_comb 1 thm, thm) end; fun W_step s1 s2 = let val (Ap (Ap (tt1, tt2), tt3), thm1) = B_step s1 s2; val thm2 = Conv.rewr_conv comp_conv (Thm.rhs_of thm1 |> funpow 2 Thm.dest_arg1); val thm3 = merge_pures ctxt af_inst hom_conv tt3 |> the; val (tt4, thm4) = swap_B_step (Ap (Ap (extract_comb 3 thm2, tt1), tt2), thm2) thm3; val var = Thm.rhs_of thm1 |> Thm.dest_arg; val thm5 = rewr_subst_ap (the dup_conv) thm4 (Thm.reflexive var); val thm6 = Thm.transitive thm1 thm5; in (Ap (extract_comb 2 thm6, tt4), thm6) end; fun S_step s1 s2 = let val (Ap (Ap (tt1, tt2), tt3), thm1) = comb2_step (the flip_conv) s1 s2; val thm2 = Conv.rewr_conv comp_conv (Thm.rhs_of thm1 |> Thm.dest_arg1); val var = Thm.rhs_of thm1 |> Thm.dest_arg; val thm3 = rewr_subst_ap (the dup_conv) thm2 (Thm.reflexive var); val thm4 = Thm.transitive thm1 thm3; val tt = Ap (extract_comb 2 thm4, Ap (Ap (extract_comb 3 thm2, Ap (tt1, tt2)), tt3)); in (tt, thm4) end; fun K_step tt tm = let val ct = Thm.cterm_of ctxt tm; val T_opt = Term.fastype_of tm |> dest_type ctxt af_inst |> Option.map (Thm.ctyp_of ctxt); val thm = Thm.instantiate' [T_opt] [SOME ct] (Conv.rewr_conv (the const_conv) (term_of_apterm ctxt af_inst tt |> Thm.cterm_of ctxt)) in (Ap (extract_comb 2 thm, tt), thm) end; fun unreachable _ = raise ASSERT "eliminate: assertion failed"; fun elim (Pure _) = unreachable () | elim (ApVar (i, t)) = if exists (fn x => x = i) v then I_step t else unreachable () | elim (Ap (t1, t2)) = (case (occurs_any v t1, occurs_any v t2) of (false, false) => unreachable () | (false, true) => B_step (refl_step t1) (elim t2) | (true, false) => (case merge_pures ctxt af_inst hom_conv t2 of SOME thm => swap_B_step (elim t1) thm | NONE => comb2_step (the flip_conv) (elim t1) (refl_step t2)) | (true, true) => if is_some flip_conv then S_step (elim t1) (elim t2) else W_step (elim t1) (elim t2)); in if occurs_any v tt then elim tt else K_step tt v_tm end; (*convert a pair of terms into equal canonical forms, modulo pure terms*) fun general_normalform_conv ctxt af cts = let val (t1, t2) = apply2 (Thm.term_of) cts; val maxidx = Int.max (apply2 Thm.maxidx_of_cterm cts); (* TODO: is there a better strategy for finding the instantiated functor? *) val af_inst = match_afun_inst ctxt af (t1, maxidx); val ((apt1, apt2), _) = 0 |> apterm_of_term ctxt af_inst t1 ||>> apterm_of_term ctxt af_inst t2; val vs = consolidate ctxt af (apt1, apt2); val merge_thm = mk_meta_eq (#hom (thms_of_afun af)); fun elim_all tt [] = the (merge_pures ctxt af_inst merge_thm tt) | elim_all tt (v::vs) = let val (tt', rule1) = eliminate ctxt (af, af_inst) tt v; val rule2 = elim_all tt' vs; val (_, vartm) = dest_comb ctxt af_inst (Thm.term_of (Thm.rhs_of rule1)); val rule3 = ap_cong ctxt af_inst rule2 (Thm.reflexive (Thm.cterm_of ctxt vartm)); in Thm.transitive rule1 rule3 end; in (elim_all apt1 vs, elim_all apt2 vs) end; val general_normalize_rel_tac = binop_par_conv_tac oo general_normalform_conv; (* Reduce canonical forms to base relation *) fun rename_params names i st = let val (_, Bs, Bi, C) = Thm.dest_state (st, i); val Bi' = Logic.list_rename_params names Bi; in Thm.renamed_prop (Logic.list_implies (Bs @ [Bi'], C)) st end; (* R' (pure f <> x1 <> ... <> xn) (pure g <> x1 <> ... <> xn) ===> !!y1 ... yn. [| yi : setF xi ... |] ==> R (f y1 ... yn) (g y1 ... yn), where either both R and R' are equality, or R' = relF R for relator relF of the functor. The premises yi : setF xi are added only in the latter case and if the set operator is available. Succeeds if partial progress can be made. The names of the new parameters yi are derived from the arguments xi. *) fun head_cong_tac ctxt af renames = let val {rel_intros, ...} = thms_of_afun af; fun term_name tm = case AList.lookup (op aconv) renames tm of SOME n => n | NONE => term_to_vname tm; fun gather_vars' af_inst tm = case try (dest_comb ctxt af_inst) tm of SOME (t1, t2) => term_name t2 :: gather_vars' af_inst t1 | NONE => []; fun gather_vars prop = case prop of Const (@{const_name Trueprop}, _) $ (_ $ rhs) => rev (gather_vars' (match_afun_inst ctxt af (rhs, maxidx_of_term prop)) rhs) | _ => []; in SUBGOAL (fn (subgoal, i) => (REPEAT_DETERM (resolve_tac ctxt rel_intros i) THEN REPEAT_DETERM (resolve_tac ctxt [ext, @{thm rel_fun_eq_onI}] i ORELSE eresolve_tac ctxt [@{thm UNIV_E}] i) THEN PRIMITIVE (rename_params (gather_vars subgoal) i))) end; (* Forward lifting *) (* TODO: add limited support for premises, where used variables are not generalized in the conclusion *) fun forward_lift_rule ctxt af thm = let val thm = Object_Logic.rulify ctxt thm; val (af_inst, ctxt_inst) = import_afun_inst af ctxt; val (prop, ctxt_Ts) = yield_singleton Variable.importT_terms (Thm.prop_of thm) ctxt_inst; val (lhs, rhs) = prop |> HOLogic.dest_Trueprop |> HOLogic.dest_eq; val ([lhs', rhs'], ctxt_lifted) = generalize_lift_terms af_inst [lhs, rhs] ctxt_Ts; val lifted = HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs', rhs')); val (lifted', ctxt') = yield_singleton (Variable.import_terms true) lifted ctxt_lifted; fun tac {prems, context} = HEADGOAL (general_normalize_rel_tac context af THEN' head_cong_tac context af [] THEN' resolve_tac context [prems MRS thm]); val thm' = singleton (Variable.export ctxt' ctxt) (Goal.prove ctxt' [] [] lifted' tac); val thm'' = Raw_Simplifier.fold_rule ctxt (unfolds_of_afun af) thm'; in thm'' end; fun forward_lift_attrib name = Thm.rule_attribute [] (fn context => fn thm => let val af = afun_of_generic context (intern context name) (* FIXME !?!? *) in forward_lift_rule (Context.proof_of context) af thm end); (* High-level tactics *) fun unfold_wrapper_tac ctxt = AUTO_AFUNS false (fn afs => Simplifier.safe_asm_full_simp_tac (ctxt addsimps flat (map unfolds_of_afun afs))) ctxt; fun fold_wrapper_tac ctxt = AUTO_AFUN true (fold_goal_tac ctxt o unfolds_of_afun) ctxt; fun WRAPPER tac ctxt opt_af = REPEAT_DETERM o resolve_tac ctxt [@{thm allI}] THEN' Subgoal.FOCUS (fn {context = ctxt, params, ...} => let val renames = map (swap o apsnd Thm.term_of) params in AUTO_AFUNS false (EVERY' o map (afun_unfold_tac ctxt)) ctxt opt_af 1 THEN AUTO_AFUN true (fn af => afun_unfold_tac ctxt af THEN' CONVERSION Drule.beta_eta_conversion THEN' tac ctxt af THEN' head_cong_tac ctxt af renames) ctxt opt_af 1 end) ctxt THEN' Raw_Simplifier.rewrite_goal_tac ctxt [Drule.triv_forall_equality]; val normalize_wrapper_tac = WRAPPER normalize_rel_tac; val lifting_wrapper_tac = WRAPPER general_normalize_rel_tac; val parse_opt_afun = Scan.peek (fn context => Scan.option Parse.name >> Option.map (intern context #> afun_of_generic context)); (** Declaration **) (* Combinator setup *) fun declare_combinators combs phi = let val (names, thms) = split_list combs; val thms' = map (Morphism.thm phi) thms; fun add_combs (defs, rules) = (fold (Symtab.insert (K false)) (names ~~ thms') defs, rules); in Data.map (map_data add_combs I I) end; val setup_combinators = Local_Theory.declaration {syntax = false, pervasive = false} o declare_combinators; fun combinator_of_red thm = let val (lhs, _) = Logic.dest_equals (Thm.prop_of thm); val (head, _) = strip_comb lhs; in #1 (dest_Const head) end; fun register_combinator_rule weak_premises thm context = let val (lhs, rhs) = Logic.dest_equals (Thm.prop_of thm); val ltvars = Term.add_tvars lhs []; val rtvars = Term.add_tvars rhs []; val _ = if exists (not o member op = ltvars) rtvars then Pretty.breaks [Pretty.str "Combinator equation", Pretty.quote (Syntax.pretty_term (Context.proof_of context) (Thm.prop_of thm)), Pretty.str "has additional type variables on right-hand side."] |> Pretty.block |> Pretty.string_of |> error else (); val (defs, _) = #combinators (Data.get context); val comb_names = Symtab.make (map (fn (name, thm) => (combinator_of_red thm, name)) (Symtab.dest defs)); val rule = mk_combinator_rule comb_names weak_premises thm; fun add_rule (defs, rules) = (defs, insert eq_combinator_rule rule rules); in Data.map (map_data add_rule I I) context end; val combinator_rule_attrib = Thm.declaration_attribute o register_combinator_rule; (* Derivation of combinator reductions *) fun combinator_closure rules have_weak combs = let fun apply rule (cs, changed) = if not (Ord_List.member fast_string_ord cs (conclusion_of_rule rule)) andalso is_applicable_rule rule have_weak (fn prems => Ord_List.subset fast_string_ord (prems, cs)) then (Ord_List.insert fast_string_ord (conclusion_of_rule rule) cs, true) else (cs, changed); fun loop cs = (case fold apply rules (cs, false) of (cs', true) => loop cs' | (_, false) => cs); in loop combs end; fun derive_combinator_red ctxt af_inst red_thms (base_thm, eq_thm) = let val base_prop = Thm.prop_of base_thm; val tvars = Term.add_tvars base_prop []; val (Ts, ctxt_Ts) = mk_TFrees_of (length tvars) (inner_sort_of af_inst) ctxt; - val base_prop' = Term_Subst.instantiate (tvars ~~ Ts, []) base_prop; + val base_prop' = base_prop + |> Term_Subst.instantiate (Term_Subst.TVars.table (tvars ~~ Ts), Term_Subst.Vars.empty); val (lhs, rhs) = Logic.dest_equals base_prop'; val ([lhs', rhs'], ctxt') = generalize_lift_terms af_inst [lhs, rhs] ctxt_Ts; val lifted_prop = (lhs', rhs') |> HOLogic.mk_eq |> HOLogic.mk_Trueprop; val unfold_comb_conv = HOLogic.Trueprop_conv (HOLogic.eq_conv (Conv.top_sweep_conv (K (Conv.rewr_conv eq_thm)) ctxt') Conv.all_conv); val tac = HEADGOAL (CONVERSION unfold_comb_conv THEN' Raw_Simplifier.rewrite_goal_tac ctxt' red_thms THEN' resolve_tac ctxt' [@{thm refl}]); in singleton (Variable.export ctxt' ctxt) (Goal.prove ctxt' [] [] lifted_prop (K tac)) end; (*derive all instantiations with pure terms which can be simplified by homomorphism*) (*FIXME: more of a workaround than a sensible solution*) fun weak_red_closure ctxt (af_inst, merge_thm) strong_red = let val (lhs, _) = Thm.prop_of strong_red |> Logic.dest_equals; val vars = rev (Term.add_vars lhs []); fun closure [] prev thms = (prev::thms) | closure ((v, af_T)::vs) prev thms = (case try (dest_type ctxt af_inst) af_T of NONE => closure vs prev thms | SOME T_opt => let val (T, ctxt') = (case T_opt of NONE => yield_singleton Variable.invent_types (inner_sort_of af_inst) ctxt |>> TFree | SOME T => (T, ctxt)); val (v', ctxt'') = mk_Free (#1 v) T ctxt'; val pure_v = Thm.cterm_of ctxt'' (lift_term af_inst v'); val next = Drule.instantiate_normalize ([], [((v, af_T), pure_v)]) prev; val next' = Raw_Simplifier.rewrite_rule ctxt'' [merge_thm] next; val next'' = singleton (Variable.export ctxt'' ctxt) next'; in closure vs next'' (prev::thms) end); in closure vars strong_red [] end; fun combinator_red_closure ctxt (comb_defs, rules) (af_inst, merge_thm) weak_reds combs = let val have_weak = not (null weak_reds); val red_thms0 = Symtab.fold (fn (_, thm) => cons (mk_meta_eq thm)) combs weak_reds; val red_thms = flat (map (weak_red_closure ctxt (af_inst, merge_thm)) red_thms0); fun apply rule ((cs, rs), changed) = if not (Symtab.defined cs (conclusion_of_rule rule)) andalso is_applicable_rule rule have_weak (forall (Symtab.defined cs)) then let val conclusion = conclusion_of_rule rule; val def = the (Symtab.lookup comb_defs conclusion); val new_red_thm = derive_combinator_red ctxt af_inst rs (def, thm_of_rule rule); val new_red_thms = weak_red_closure ctxt (af_inst, merge_thm) (mk_meta_eq new_red_thm); in ((Symtab.update (conclusion, new_red_thm) cs, new_red_thms @ rs), true) end else ((cs, rs), changed); fun loop xs = (case fold apply rules (xs, false) of (xs', true) => loop xs' | (_, false) => xs); in #1 (loop (combs, red_thms)) end; (* Preparation of AFun data *) fun mk_terms ctxt (raw_pure, raw_ap, raw_rel, raw_set) = let val thy = Proof_Context.theory_of ctxt; val show_typ = quote o Syntax.string_of_typ ctxt; val show_term = quote o Syntax.string_of_term ctxt; fun closed_poly_term t = let val poly_t = singleton (Variable.polymorphic ctxt) t; in case Term.add_vars (singleton (Variable.export_terms (Proof_Context.augment t ctxt) ctxt) t) [] of [] => (case (Term.hidden_polymorphism poly_t) of [] => poly_t | _ => error ("Hidden type variables in term " ^ show_term t)) | _ => error ("Locally free variables in term " ^ show_term t) end; val pure = closed_poly_term raw_pure; val (tvar, T1) = fastype_of pure |> dest_funT |>> dest_TVar handle TYPE _ => error ("Bad type for pure: " ^ show_typ (fastype_of pure)); val maxidx_pure = maxidx_of_term pure; val ap = Logic.incr_indexes ([], [], maxidx_pure + 1) (closed_poly_term raw_ap); fun bad_ap _ = error ("Bad type for ap: " ^ show_typ (fastype_of ap)); val (T23, (T2, T3)) = fastype_of ap |> dest_funT ||> dest_funT handle TYPE _ => bad_ap (); val maxidx_common = Term.maxidx_term ap maxidx_pure; (*unify type variables, while keeping the live variables separate*) fun no_unifier (T, U) = error ("Unable to infer common functor type from " ^ commas (map show_typ [T, U])); fun unify_ap_type T (tyenv, maxidx) = let val argT = TVar ((Name.aT, maxidx + 1), []); - val T1' = Term_Subst.instantiateT [(tvar, argT)] T1; + val T1' = Term_Subst.instantiateT (Term_Subst.TVars.table [(tvar, argT)]) T1; val (tyenv', maxidx') = Sign.typ_unify thy (T1', T) (tyenv, maxidx + 1) handle Type.TUNIFY => no_unifier (T1', T); in (argT, (tyenv', maxidx')) end; val (ap_args, (ap_env, maxidx_env)) = fold_map unify_ap_type [T2, T3, T23] (Vartab.empty, maxidx_common); val [T2_arg, T3_arg, T23_arg] = map (Envir.norm_type ap_env) ap_args; val (tvar2, tvar3) = (dest_TVar T2_arg, dest_TVar T3_arg) handle TYPE _ => bad_ap (); val _ = if T23_arg = T2_arg --> T3_arg then () else bad_ap (); val sort = foldl1 (Sign.inter_sort thy) (map #2 [tvar, tvar2, tvar3]); val _ = Sign.of_sort thy (Term.aT sort --> Term.aT sort, sort) orelse error ("Sort constraint " ^ quote (Syntax.string_of_sort ctxt sort) ^ " not closed under function types"); fun update_sort (v, S) (tyenv, maxidx) = (Vartab.update_new (v, (S, TVar ((Name.aT, maxidx + 1), sort))) tyenv, maxidx + 1); val (common_env, _) = fold update_sort [tvar, tvar2, tvar3] (ap_env, maxidx_env); val tvar' = Envir.norm_type common_env (TVar tvar); val pure' = norm_term_types common_env pure; val (tvar2', tvar3') = apply2 (Envir.norm_type common_env) (T2_arg, T3_arg); val ap' = norm_term_types common_env ap; fun bad_set set = error ("Bad type for set: " ^ show_typ (fastype_of set)); fun mk_set set = let val tyenv = Sign.typ_match thy (domain_type (fastype_of set), range_type (fastype_of pure')) Vartab.empty handle Type.TYPE_MATCH => bad_set set; val set' = Envir.subst_term_types tyenv set; val set_tvar = fastype_of set' |> range_type |> HOLogic.dest_setT |> dest_TVar handle TYPE _ => bad_set set; val _ = if Term.eq_tvar (dest_TVar tvar', set_tvar) then () else bad_set set; in ([tvar'], set') end val set = (case raw_set of NONE => ([tvar'], Abs ("x", tvar', HOLogic.mk_UNIV tvar')) | SOME t => mk_set (closed_poly_term t)); val terms = Term_Subst.zero_var_indexes (pack_poly_terms [poly_type_to_term ([tvar'], range_type (fastype_of pure')), ([tvar'], pure'), ([tvar2', tvar3'], ap'), set]); (*TODO: also infer the relator type?*) fun bad_rel rel = error ("Bad type for rel: " ^ show_typ (fastype_of rel)); fun mk_rel rel = let val ((T1, T2), (T1_af, T2_af)) = fastype_of rel |> dest_funT |>> BNF_Util.dest_pred2T ||> BNF_Util.dest_pred2T; val _ = (dest_TVar T1; dest_TVar T2); val _ = if T1 = T2 then bad_rel rel else (); val af_inst = mk_afun_inst (match_poly_terms_type ctxt (terms, 0) (T1_af, maxidx_of_term rel)); val (T1', T2') = apply2 (dest_type ctxt af_inst) (T1_af, T2_af); val _ = if (is_none T1' andalso is_none T2') orelse (T1' = SOME T1 andalso T2' = SOME T2) then () else bad_rel rel; in Term_Subst.zero_var_indexes (pack_poly_terms [([T1, T2], rel)]) end handle TYPE _ => bad_rel rel; val rel = Option.map (mk_rel o closed_poly_term) raw_rel; in (terms, rel) end; fun mk_rel_intros {pure_transfer, ap_rel_fun} = let val pure_rel_intro = pure_transfer RS @{thm rel_funD}; in [pure_rel_intro, ap_rel_fun] end; fun mk_afun_thms ctxt af_inst (hom_thm, ichng_thm, reds, rel_axioms) = let val pure_comp_conv = let val ([T1, T2, T3], ctxt_Ts) = mk_TFrees_of 3 (inner_sort_of af_inst) ctxt; val (((g, f), x), ctxt') = ctxt_Ts |> mk_Free "g" (T2 --> T3) ||>> mk_Free "f" (mk_type af_inst (T1 --> T2)) ||>> mk_Free "x" (mk_type af_inst T1); val comb = mk_comb af_inst; val lhs = comb (T2 --> T3) (lift_term af_inst g, comb (T1 --> T2) (f, x)); val B_g = Abs ("f", T1 --> T2, Abs ("x", T1, Term.betapply (g, Bound 1 $ Bound 0))); val rhs = comb (T1 --> T3) (comb ((T1 --> T2) --> T1 --> T3) (lift_term af_inst B_g, f), x); val prop = HOLogic.mk_eq (lhs, rhs) |> HOLogic.mk_Trueprop; val merge_rule = mk_meta_eq hom_thm; val B_intro = the (Symtab.lookup reds "B") |> mk_meta_eq |> Thm.symmetric; val tac = HEADGOAL (Raw_Simplifier.rewrite_goal_tac ctxt' [B_intro, merge_rule] THEN' resolve_tac ctxt' [@{thm refl}]); in singleton (Variable.export ctxt' ctxt) (Goal.prove ctxt' [] [] prop (K tac)) end; val eq_intros = let val ([T1, T2], ctxt_Ts) = mk_TFrees_of 2 (inner_sort_of af_inst) ctxt; val T12 = mk_type af_inst (T1 --> T2); val (((((x, y), x'), f), g), ctxt') = ctxt_Ts |> mk_Free "x" T1 ||>> mk_Free "y" T1 ||>> mk_Free "x" (mk_type af_inst T1) ||>> mk_Free "f" T12 ||>> mk_Free "g" T12; val pure_fun = mk_pure af_inst T1; val pure_cong = Drule.infer_instantiate' ctxt' (map (SOME o Thm.cterm_of ctxt') [x, y, pure_fun]) @{thm arg_cong}; val ap_fun = mk_ap af_inst (T1, T2); val ap_cong1 = Drule.infer_instantiate' ctxt' (map (SOME o Thm.cterm_of ctxt') [f, g, ap_fun, x']) @{thm arg1_cong}; in Variable.export ctxt' ctxt [pure_cong, ap_cong1] end; val rel_intros = case rel_axioms of NONE => [] | SOME axioms => mk_rel_intros axioms; in {hom = hom_thm, ichng = ichng_thm, reds = reds, rel_thms = rel_axioms, rel_intros = eq_intros @ rel_intros, pure_comp_conv = pure_comp_conv} end; fun reuse_TFrees n S (ctxt, Ts) = let val have_n = Int.min (n, length Ts); val (more_Ts, ctxt') = mk_TFrees_of (n - have_n) S ctxt; in (take have_n Ts @ more_Ts, (ctxt', Ts @ more_Ts)) end; fun mk_comb_prop lift_pos thm af_inst ctxt_Ts = let val base = Thm.prop_of thm; val tvars = Term.add_tvars base []; val (Ts, (ctxt', Ts')) = reuse_TFrees (length tvars) (inner_sort_of af_inst) ctxt_Ts; - val base' = Term_Subst.instantiate (tvars ~~ Ts, []) base; + val base' = base + |> Term_Subst.instantiate (Term_Subst.TVars.table (tvars ~~ Ts), Term_Subst.Vars.empty); val (lhs, rhs) = Logic.dest_equals base'; val (_, lhs_args) = strip_comb lhs; val lift_var = Var o apsnd (mk_type af_inst) o dest_Var; val (lhs_args', subst) = fold_index (fn (i, v) => if member (op =) lift_pos i then apfst (cons v) else map_prod (cons (lift_var v)) (cons (v, lift_var v))) lhs_args ([], []); val (lhs', rhs') = apply2 (subst_lift_term af_inst subst) (lhs, rhs); val lifted = (lhs', rhs') |> HOLogic.mk_eq |> HOLogic.mk_Trueprop; in (fold Logic.all lhs_args' lifted, (ctxt', Ts')) end; fun mk_homomorphism_prop af_inst ctxt_Ts = let val ([T1, T2], (ctxt', Ts')) = reuse_TFrees 2 (inner_sort_of af_inst) ctxt_Ts; val ((f, x), _) = ctxt' |> mk_Free "f" (T1 --> T2) ||>> mk_Free "x" T1; val lhs = mk_comb af_inst (T1 --> T2) (lift_term af_inst f, lift_term af_inst x); val rhs = lift_term af_inst (f $ x); val prop = HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs, rhs)); in (Logic.all f (Logic.all x prop), (ctxt', Ts')) end; fun mk_interchange_prop af_inst ctxt_Ts = let val ([T1, T2], (ctxt', Ts')) = reuse_TFrees 2 (inner_sort_of af_inst) ctxt_Ts; val ((f, x), _) = ctxt' |> mk_Free "f" (mk_type af_inst (T1 --> T2)) ||>> mk_Free "x" T1; val lhs = mk_comb af_inst (T1 --> T2) (f, lift_term af_inst x); val T_x = Abs ("f", T1 --> T2, Bound 0 $ x); val rhs = mk_comb af_inst ((T1 --> T2) --> T2) (lift_term af_inst T_x, f); val prop = HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs, rhs)); in (Logic.all f (Logic.all x prop), (ctxt', Ts')) end; fun mk_rel_props (af_inst, rel_inst) ctxt_Ts = let fun mk_af_rel tm = let val (T1, T2) = BNF_Util.dest_pred2T (fastype_of tm); in betapply (instantiate_poly_term rel_inst [T1, T2], tm) end; val ([T1, T2, T3], (ctxt', Ts')) = reuse_TFrees 3 (inner_sort_of af_inst) ctxt_Ts; val (pure_R, _) = mk_Free "R" (T1 --> T2 --> @{typ bool}) ctxt'; val rel_pure = BNF_Util.mk_rel_fun pure_R (mk_af_rel pure_R) $ mk_pure af_inst T1 $ mk_pure af_inst T2; val pure_prop = Logic.all pure_R (HOLogic.mk_Trueprop rel_pure); val ((((f, g), x), ap_R), _) = ctxt' |> mk_Free "f" (mk_type af_inst (T1 --> T2)) ||>> mk_Free "g" (mk_type af_inst (T1 --> T3)) ||>> mk_Free "x" (mk_type af_inst T1) ||>> mk_Free "R" (T2 --> T3 --> @{typ bool}); val fun_rel = BNF_Util.mk_rel_fun (mk_eq_on (mk_set af_inst T1 $ x)) ap_R; val rel_ap = Logic.mk_implies (HOLogic.mk_Trueprop (mk_af_rel fun_rel $ f $ g), HOLogic.mk_Trueprop (mk_af_rel ap_R $ mk_comb af_inst (T1 --> T2) (f, x) $ mk_comb af_inst (T1 --> T3) (g, x))); val ap_prop = fold_rev Logic.all [ap_R, f, g, x] rel_ap; in ([pure_prop, ap_prop], (ctxt', Ts')) end; fun mk_interchange ctxt ((comb_defs, _), comb_unfolds) (af_inst, merge_thm) reds = let val T_def = the (Symtab.lookup comb_defs "T"); val T_red = the (Symtab.lookup reds "T"); val (weak_prop, (ctxt', _)) = mk_comb_prop [0] T_def af_inst (ctxt, []); fun tac {context, ...} = HEADGOAL (Raw_Simplifier.rewrite_goal_tac context [Thm.symmetric merge_thm] THEN' resolve_tac context [T_red]); val weak_red = singleton (Variable.export ctxt' ctxt) (Goal.prove ctxt' [] [] weak_prop tac); in Raw_Simplifier.rewrite_rule ctxt (comb_unfolds) weak_red RS sym end; fun mk_weak_reds ctxt ((comb_defs, _), comb_unfolds) af_inst (hom_thm, ichng_thm, reds) = let val unfolded_reds = Symtab.map (K (Raw_Simplifier.rewrite_rule ctxt comb_unfolds)) reds; val af_thms = mk_afun_thms ctxt af_inst (hom_thm, ichng_thm, unfolded_reds, NONE); val af = mk_afun Binding.empty (pack_afun_inst af_inst) NONE af_thms; fun tac {context, ...} = HEADGOAL (normalize_wrapper_tac context (SOME af) THEN' Raw_Simplifier.rewrite_goal_tac context comb_unfolds THEN' resolve_tac context [refl]); fun mk comb lift_pos = let val def = the (Symtab.lookup comb_defs comb); val (prop, (ctxt', _)) = mk_comb_prop lift_pos def af_inst (ctxt, []); val hol_thm = singleton (Variable.export ctxt' ctxt) (Goal.prove ctxt' [] [] prop tac); in mk_meta_eq hol_thm end; val uncurry_thm = mk_meta_eq (forward_lift_rule ctxt af @{thm uncurry_pair}); in [mk "C" [1], mk "C" [2], uncurry_thm] end; fun mk_comb_reds ctxt combss af_inst user_combs (hom_thm, user_thms, ichng_thms) = let val ((comb_defs, comb_rules), comb_unfolds) = combss; val merge_thm = mk_meta_eq hom_thm; val user_reds = Symtab.make (user_combs ~~ user_thms); val reds0 = combinator_red_closure ctxt (comb_defs, comb_rules) (af_inst, merge_thm) [] user_reds; val ichng_thm = case ichng_thms of [] => singleton (Variable.export ctxt ctxt) (mk_interchange ctxt combss (af_inst, merge_thm) reds0) | [thm] => thm; val weak_reds = mk_weak_reds ctxt combss af_inst (hom_thm, ichng_thm, reds0); val reds1 = combinator_red_closure ctxt (comb_defs, comb_rules) (af_inst, merge_thm) weak_reds reds0; val unfold = Raw_Simplifier.rewrite_rule ctxt comb_unfolds; in (Symtab.map (K unfold) reds1, ichng_thm) end; fun note_afun_thms af = let val thms = thms_of_afun af; val named_thms = [("homomorphism", [#hom thms]), ("interchange", [#ichng thms]), ("afun_rel_intros", #rel_intros thms)] @ map (fn (name, thm) => ("pure_" ^ name ^ "_conv", [thm])) (Symtab.dest (#reds thms)) @ (case #rel_thms thms of NONE => [] | SOME rel_thms' => [("pure_transfer", [#pure_transfer rel_thms']), ("ap_rel_fun_cong", [#ap_rel_fun rel_thms'])]); val base_name = Binding.name_of (name_of_afun af); fun mk_note (name, thms) = ((Binding.qualify true base_name (Binding.name name), []), [(thms, [])]); in Local_Theory.notes (map mk_note named_thms) #> #2 end; fun register_afun af = let fun decl phi context = Data.map (fn {combinators, afuns, patterns} => let val af' = morph_afun phi af; val (name, afuns') = Name_Space.define context true (name_of_afun af', af') afuns; val patterns' = Item_Net.update (name, patterns_of_afun af') patterns; in {combinators = combinators, afuns = afuns', patterns = patterns'} end) context; in Local_Theory.declaration {syntax = false, pervasive = false} decl end; fun applicative_cmd (((((name, flags), raw_pure), raw_ap), raw_rel), raw_set) lthy = let val comb_unfolds = Named_Theorems.get lthy @{named_theorems combinator_unfold}; val comb_reprs = Named_Theorems.get lthy @{named_theorems combinator_repr}; val (comb_defs, comb_rules) = get_combinators (Context.Proof lthy); val _ = fold (fn name => if Symtab.defined comb_defs name then I else error ("Unknown combinator " ^ quote name)) flags (); val _ = if has_duplicates op = flags then warning "Ignoring duplicate combinators" else (); val user_combs0 = Ord_List.make fast_string_ord flags; val raw_pure' = Syntax.read_term lthy raw_pure; val raw_ap' = Syntax.read_term lthy raw_ap; val raw_rel' = Option.map (Syntax.read_term lthy) raw_rel; val raw_set' = Option.map (Syntax.read_term lthy) raw_set; val (terms, rel) = mk_terms lthy (raw_pure', raw_ap', raw_rel', raw_set'); val derived_combs0 = combinator_closure comb_rules false user_combs0; val required_combs = Ord_List.make fast_string_ord ["B", "I"]; val user_combs = Ord_List.union fast_string_ord user_combs0 (Ord_List.subtract fast_string_ord derived_combs0 required_combs); val derived_combs1 = combinator_closure comb_rules false user_combs; val derived_combs2 = combinator_closure comb_rules true derived_combs1; fun is_redundant comb = eq_list (op =) (derived_combs2, (combinator_closure comb_rules true (Ord_List.remove fast_string_ord comb user_combs))); val redundant_combs = filter is_redundant user_combs; val _ = if null redundant_combs then () else warning ("Redundant combinators: " ^ commas redundant_combs); val prove_interchange = not (Ord_List.member fast_string_ord derived_combs1 "T"); val (af_inst, ctxt_af) = import_afun_inst_raw terms lthy; (* TODO: reuse TFrees from above *) val (rel_insts, ctxt_inst) = (case rel of NONE => (NONE, ctxt_af) | SOME r => let val (rel_inst, ctxt') = import_poly_terms r ctxt_af |>> the_single; val T = fastype_of (#2 rel_inst) |> range_type |> domain_type; val af_inst = match_poly_terms_type ctxt' (terms, 0) (T, ~1) |> mk_afun_inst; in (SOME (af_inst, rel_inst), ctxt') end); val mk_propss = [apfst single o mk_homomorphism_prop af_inst, fold_map (fn comb => mk_comb_prop [] (the (Symtab.lookup comb_defs comb)) af_inst) user_combs, if prove_interchange then apfst single o mk_interchange_prop af_inst else pair [], if is_some rel then mk_rel_props (the rel_insts) else pair []]; val (propss, (ctxt_Ts, _)) = fold_map I mk_propss (ctxt_inst, []); fun repr_tac ctxt = Raw_Simplifier.rewrite_goals_tac ctxt comb_reprs; fun after_qed thmss lthy' = let val [[hom_thm], user_thms, ichng_thms, rel_thms] = map (Variable.export lthy' ctxt_inst) thmss; val (reds, ichng_thm) = mk_comb_reds ctxt_inst ((comb_defs, comb_rules), comb_unfolds) af_inst user_combs (hom_thm, user_thms, ichng_thms); val rel_axioms = case rel_thms of [] => NONE | [thm1, thm2] => SOME {pure_transfer = thm1, ap_rel_fun = thm2}; val af_thms = mk_afun_thms ctxt_inst af_inst (hom_thm, ichng_thm, reds, rel_axioms); val af_thms = map_afun_thms (singleton (Variable.export ctxt_inst lthy)) af_thms; val af = mk_afun name terms rel af_thms; in lthy |> register_afun af |> note_afun_thms af end; in Proof.theorem NONE after_qed ((map o map) (rpair []) propss) ctxt_Ts |> Proof.refine (Method.Basic (SIMPLE_METHOD o repr_tac)) |> Seq.the_result "" end; fun print_afuns ctxt = let fun pretty_afun (name, af) = let val [pT, (_, pure), (_, ap), (_, set)] = unpack_poly_terms (terms_of_afun af); val ([tvar], T) = poly_type_of_term pT; val rel = Option.map (#2 o the_single o unpack_poly_terms) (rel_of_afun af); val combinators = Symtab.keys (#reds (thms_of_afun af)); in Pretty.block (Pretty.fbreaks ([Pretty.block [Pretty.str name, Pretty.str ":", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt T), Pretty.brk 1, Pretty.str "of", Pretty.brk 1, Syntax.pretty_typ ctxt tvar], Pretty.block [Pretty.str "pure:", Pretty.brk 1, Pretty.quote (Syntax.pretty_term ctxt pure)], Pretty.block [Pretty.str "ap:", Pretty.brk 1, Pretty.quote (Syntax.pretty_term ctxt ap)], Pretty.block [Pretty.str "set:", Pretty.brk 1, Pretty.quote (Syntax.pretty_term ctxt set)]] @ (case rel of NONE => [] | SOME rel' => [Pretty.block [Pretty.str "rel:", Pretty.brk 1, Pretty.quote (Syntax.pretty_term ctxt rel')]]) @ [Pretty.block ([Pretty.str "combinators:", Pretty.brk 1] @ Pretty.commas (map Pretty.str combinators))])) end; val afuns = sort_by #1 (Name_Space.fold_table cons (get_afun_table (Context.Proof ctxt)) []); in Pretty.writeln (Pretty.big_list "Registered applicative functors:" (map pretty_afun afuns)) end; (* Unfolding *) fun add_unfold_thm name thm context = let val (lhs, _) = Thm.prop_of thm |> HOLogic.dest_Trueprop |> HOLogic.dest_eq handle TERM _ => error "Not an equation"; val names = case name of SOME n => [intern context n] | NONE => case match_typ context (Term.fastype_of lhs) of ns as (_::_) => ns | [] => error "Unable to determine applicative functor instance"; val _ = map (afun_of_generic context) names; (*TODO: check equation*) val thm' = mk_meta_eq thm; in fold (fn n => update_afun n (add_unfolds_afun [thm'])) names context end; fun add_unfold_attrib name = Thm.declaration_attribute (add_unfold_thm name); (*TODO: attribute to delete unfolds*) end; diff --git a/thys/Generalized_Counting_Sort/Stability.thy b/thys/Generalized_Counting_Sort/Stability.thy --- a/thys/Generalized_Counting_Sort/Stability.thy +++ b/thys/Generalized_Counting_Sort/Stability.thy @@ -1,764 +1,763 @@ (* Title: An Efficient Generalization of Counting Sort for Large, possibly Infinite Key Ranges Author: Pasquale Noce Software Engineer at HID Global, Italy pasquale dot noce dot lavoro at gmail dot com pasquale dot noce at hidglobal dot com *) section "Proof of algorithm's stability" theory Stability imports Sorting begin text \ \null In this section, it is formally proven that GCsort is \emph{stable}, viz. that the sublist of the output of function @{const gcsort} built by picking out the objects having a given key matches the sublist of the input objects' list built in the same way. Here below, steps 5, 6, and 7 of the proof method are accomplished. Particularly, @{text stab_inv} is the predicate that will be shown to be invariant over inductive set @{const gcsort_set}. \null \ fun stab_inv :: "('b \ 'a list) \ ('a \ 'b) \ nat \ nat list \ 'a list \ bool" where "stab_inv f key (u, ns, xs) = (\k. [x\xs. key x = k] = f k)" lemma gcsort_stab_input: "stab_inv (\k. [x\xs. key x = k]) key (0, [length xs], xs)" by simp lemma gcsort_stab_intro: "stab_inv f key t \ [x\gcsort_out t. key x = k] = f k" by (cases t, simp add: gcsort_out_def) text \ \null In what follows, step 9 of the proof method is accomplished. First, lemma @{text fill_offs_enum_stable} proves that function @{const fill}, if its input offsets' list is computed via the composition of functions @{const offs} and @{const enum}, does not modify the sublist of its input objects' list formed by the objects having a given key. Moreover, lemmas @{text mini_stable} and @{text maxi_stable} prove that the extraction of the leftmost minimum and the rightmost maximum from an objects' list through functions @{const mini} and @{const maxi} is endowed with the same property. These lemmas are then used to prove lemma @{text gcsort_stab_inv}, which states that the sublist of the objects having a given key within the objects' list is still the same after any recursive round. \null \ lemma fill_stable [rule_format]: assumes A: "index_less index key" and B: "index_same index key" shows "(\x \ set xs. key x \ {mi..ma}) \ ns \ [] \ offs_pred ns ub xs index key mi ma \ map the [w\fill xs ns index key ub mi ma. \x. w = Some x \ key x = k] = [x\xs. k = key x]" proof (induction xs arbitrary: ns, simp_all add: Let_def, rule conjI, (rule_tac [!] impI)+, (erule_tac [!] conjE)+, simp_all) fix x xs and ns :: "nat list" let ?i = "index key x (length ns) mi ma" let ?ns' = "ns[?i := Suc (ns ! ?i)]" let ?ws = "[w\fill xs ?ns' index key ub mi ma. \y. w = Some y \ key y = key x]" let ?ws' = "[w\(fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x]. \y. w = Some y \ key y = key x]" assume C: "\x \ set xs. mi \ key x \ key x \ ma" and D: "mi \ key x" and E: "key x \ ma" and F: "ns \ []" and G: "offs_pred ns ub (x # xs) index key mi ma" have H: "?i < length ns" using A and D and E and F by (simp add: index_less_def) assume "\ns. ns \ [] \ offs_pred ns ub xs index key mi ma \ map the [w\fill xs ns index key ub mi ma. \y. w = Some y \ key y = key x] = [y\xs. key x = key y]" moreover have I: "offs_pred ?ns' ub xs index key mi ma" using G and H by (rule_tac offs_pred_cons, simp_all) ultimately have J: "map the ?ws = [y\xs. key x = key y]" using F by simp have "ns ! ?i + offs_num (length ns) (x # xs) index key mi ma ?i \ ub" using G and H by (rule offs_pred_ub, simp add: offs_num_cons) hence K: "ns ! ?i < ub" by (simp add: offs_num_cons) moreover from this have L: "(fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x] ! (ns ! ?i) = Some x" by (subst nth_list_update_eq, simp_all add: fill_length) ultimately have "0 < length ?ws'" by (simp add: length_filter_conv_card card_gt_0_iff, rule_tac exI [where x = "ns ! ?i"], simp add: fill_length) hence "\w ws. ?ws' = w # ws" by (rule_tac list.exhaust [of ?ws'], simp_all) then obtain w and ws where "?ws' = w # ws" by blast hence "\us vs y. (fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x] = us @ w # vs \ (\u \ set us. \y. u = Some y \ key y \ key x) \ w = Some y \ key y = key x \ ws = [w\vs. \y. w = Some y \ key y = key x]" (is "\us vs y. ?P us vs y") by (simp add: filter_eq_Cons_iff, blast) then obtain us and vs and y where M: "?P us vs y" by blast let ?A = "{i. i < length ns \ 0 < offs_num (length ns) xs index key mi ma i \ ns ! i \ length us}" have "length us = ns ! ?i" proof (rule ccontr, erule neqE, cases "?A = {}", cases "0 < offs_num (length ns) xs index key mi ma 0", simp_all only: not_gr0) assume N: "length us < ns ! ?i" and O: "?A = {}" and P: "0 < offs_num (length ns) xs index key mi ma 0" have "length us < ns ! 0" proof (rule ccontr, simp add: not_less) assume "ns ! 0 \ length us" with F and P have "0 \ ?A" by simp with O show False by blast qed hence "length us < ?ns' ! 0" using F by (cases ?i, simp_all) hence "offs_none ?ns' ub xs index key mi ma (length us)" using P by (simp add: offs_none_def) hence "fill xs ?ns' index key ub mi ma ! (length us) = None" using C and F and I by (rule_tac fill_none [OF A], simp_all) hence "(fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x] ! (length us) = None" using N by simp thus False using M by simp next assume N: "length us < ns ! ?i" and O: "?A = {}" and P: "offs_num (length ns) xs index key mi ma 0 = 0" from K and N have "length us < offs_next ?ns' ub xs index key mi ma 0" proof (rule_tac ccontr, simp only: not_less offs_next_def split: if_split_asm) assume "offs_set_next ?ns' xs index key mi ma 0 \ {}" (is "?B \ _") hence "Min ?B \ ?B" by (rule_tac Min_in, simp) moreover assume "?ns' ! Min ?B \ length us" hence "ns ! Min ?B \ length us" using H by (cases "Min ?B = ?i", simp_all) ultimately have "Min ?B \ ?A" by simp with O show False by blast qed hence "offs_none ?ns' ub xs index key mi ma (length us)" using P by (simp add: offs_none_def) hence "fill xs ?ns' index key ub mi ma ! (length us) = None" using C and F and I by (rule_tac fill_none [OF A], simp_all) hence "(fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x] ! (length us) = None" using N by simp thus False using M by simp next assume N: "length us < ns ! ?i" assume "?A \ {}" hence "Max ?A \ ?A" by (rule_tac Max_in, simp) moreover from this have O: "Max ?A \ ?i" using N by (rule_tac notI, simp) ultimately have "index key y (length ?ns') mi ma = Max ?A" using C proof (rule_tac fill_index [OF A _ I, where j = "length us"], simp_all, rule_tac ccontr, simp only: not_less not_le offs_next_def split: if_split_asm) assume "ub \ length us" with N have "ub < ns ! ?i" by simp with K show False by simp next let ?B = "offs_set_next ?ns' xs index key mi ma (Max ?A)" assume "?ns' ! Min ?B \ length us" hence "ns ! Min ?B \ length us" using H by (cases "Min ?B = ?i", simp_all) moreover assume "?B \ {}" hence P: "Min ?B \ ?B" by (rule_tac Min_in, simp) ultimately have "Min ?B \ ?A" by simp hence "Min ?B \ Max ?A" by (rule_tac Max_ge, simp) thus False using P by simp next have "fill xs ?ns' index key ub mi ma ! length us = (fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x] ! length us" using N by simp thus "fill xs ?ns' index key ub mi ma ! length us = Some y" using M by simp qed moreover have "index key y (length ?ns') mi ma = ?i" using B and D and E and M by (cases "y = x", simp_all add: index_same_def) ultimately show False using O by simp next assume N: "ns ! ?i < length us" hence "(fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x] ! (ns ! ?i) = us ! (ns ! ?i)" using M by (simp add: nth_append) hence "(fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x] ! (ns ! ?i) \ set us" using N by simp thus False using L and M by blast qed hence "take (ns ! ?i) ((fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x]) = us" using M by simp hence N: "take (ns ! ?i) (fill xs ?ns' index key ub mi ma) = us" by simp have "fill xs ?ns' index key ub mi ma = take (ns ! ?i) (fill xs ?ns' index key ub mi ma) @ fill xs ?ns' index key ub mi ma ! (ns ! ?i) # drop (Suc (ns ! ?i)) (fill xs ?ns' index key ub mi ma)" (is "_ = ?ts @ _ # ?ds") using K by (rule_tac id_take_nth_drop, simp add: fill_length) moreover have "fill xs ?ns' index key ub mi ma ! (ns ! ?i) = None" using C and D and E and F and G by (rule_tac fill_index_none [OF A], simp_all) ultimately have O: "fill xs ?ns' index key ub mi ma = us @ None # ?ds" using N by simp have "(fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x] = ?ts @ Some x # ?ds" using K by (rule_tac upd_conv_take_nth_drop, simp add: fill_length) hence "(fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x] = us @ Some x # ?ds" using N by simp hence "?ws' = Some x # [w\?ds. \y. w = Some y \ key y = key x]" using M by simp also have "\ = Some x # ?ws" using M by (subst (2) O, simp) finally show "map the ?ws' = x # [y\xs. key x = key y]" using J by simp next fix x xs and ns :: "nat list" let ?i = "index key x (length ns) mi ma" let ?ns' = "ns[?i := Suc (ns ! ?i)]" let ?ws = "[w\fill xs ?ns' index key ub mi ma. \y. w = Some y \ key y = k]" let ?ws' = "[w\(fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x]. \y. w = Some y \ key y = k]" assume C: "\x \ set xs. mi \ key x \ key x \ ma" and D: "mi \ key x" and E: "key x \ ma" and F: "ns \ []" and G: "offs_pred ns ub (x # xs) index key mi ma" and H: "k \ key x" have I: "?i < length ns" using A and D and E and F by (simp add: index_less_def) assume "\ns. ns \ [] \ offs_pred ns ub xs index key mi ma \ map the [w\fill xs ns index key ub mi ma. \y. w = Some y \ key y = k] = [y\xs. k = key y]" moreover have "offs_pred ?ns' ub xs index key mi ma" using G and I by (rule_tac offs_pred_cons, simp_all) ultimately have J: "map the ?ws = [y\xs. k = key y]" using F by simp have "ns ! ?i + offs_num (length ns) (x # xs) index key mi ma ?i \ ub" using G and I by (rule offs_pred_ub, simp add: offs_num_cons) hence K: "ns ! ?i < ub" by (simp add: offs_num_cons) have "fill xs ?ns' index key ub mi ma = take (ns ! ?i) (fill xs ?ns' index key ub mi ma) @ fill xs ?ns' index key ub mi ma ! (ns ! ?i) # drop (Suc (ns ! ?i)) (fill xs ?ns' index key ub mi ma)" (is "_ = ?ts @ _ # ?ds") using K by (rule_tac id_take_nth_drop, simp add: fill_length) moreover have "fill xs ?ns' index key ub mi ma ! (ns ! ?i) = None" using C and D and E and F and G by (rule_tac fill_index_none [OF A], simp_all) ultimately have L: "fill xs ?ns' index key ub mi ma = ?ts @ None # ?ds" by simp have "(fill xs ?ns' index key ub mi ma)[ns ! ?i := Some x] = ?ts @ Some x # ?ds" using K by (rule_tac upd_conv_take_nth_drop, simp add: fill_length) moreover have "?ws = [w\?ts. \y. w = Some y \ key y = k] @ [w\?ds. \y. w = Some y \ key y = k]" by (subst L, simp) ultimately have "?ws' = ?ws" using H by simp thus "map the ?ws' = [y\xs. k = key y]" using J by simp qed lemma fill_offs_enum_stable [rule_format]: assumes A: "index_less index key" and B: "index_same index key" shows "\x \ set xs. key x \ {mi..ma} \ 0 < n \ [x\map the (fill xs (offs (enum xs index key n mi ma) 0) index key (length xs) mi ma). key x = k] = [x\xs. k = key x]" (is "_ \ _ \ [_\map the ?ys. _] = _" is "_ \ _ \ [_\map the (fill _ ?ns _ _ _ _ _). _] = _") proof (subst fill_stable [symmetric, OF A B, of xs mi ma ?ns], simp, simp only: length_greater_0_conv [symmetric] offs_length enum_length, rule offs_enum_pred [OF A], simp, subst filter_map, simp add: filter_eq_nths fill_length) assume C: "\x \ set xs. mi \ key x \ key x \ ma" and D: "0 < n" have "{i. i < length xs \ key (the (?ys ! i)) = k} = {i. i < length xs \ (\x. ?ys ! i = Some x \ key x = k)}" (is "?A = ?B") proof (rule set_eqI, rule iffI, simp_all, erule_tac [!] conjE, erule_tac [2] exE, erule_tac [2] conjE, simp_all) fix i assume E: "i < length xs" and F: "key (the (?ys ! i)) = k" have "\x. ?ys ! i = Some x" proof (cases "?ys ! i", simp_all) assume "?ys ! i = None" moreover have "?ys ! i \ set ?ys" using E by (rule_tac nth_mem, simp add: fill_length) ultimately have "None \ set ?ys" by simp moreover have "count (mset ?ys) None = 0" using C and D by (rule_tac fill_offs_enum_count_none [OF A], simp) ultimately show False by simp qed then obtain x where "?ys ! i = Some x" .. moreover from this have "key x = k" using F by simp ultimately show "\x. ?ys ! i = Some x \ key x = k" by simp qed thus "map the (nths ?ys ?A) = map the (nths ?ys ?B)" by simp qed lemma mini_first [rule_format]: "xs \ [] \ i < mini xs key \ key (xs ! mini xs key) < key (xs ! i)" by (induction xs arbitrary: i, simp_all add: Let_def, (rule impI)+, erule conjE, simp add: not_le nth_Cons split: nat.split) lemma maxi_last [rule_format]: "xs \ [] \ maxi xs key < i \ i < length xs \ key (xs ! i) < key (xs ! maxi xs key)" by (induction xs arbitrary: i, simp_all add: Let_def, (rule impI)+, rule le_less_trans, rule maxi_ub, rule nth_mem, simp) lemma nths_range: "nths xs A = nths xs (A \ {..A. nths xs A = nths xs (A \ {.. A} = nths xs ({i. Suc i \ A} \ {.. A} \ {.. A \ i < length xs}" by blast ultimately show "nths xs {i. Suc i \ A} = nths xs {i. Suc i \ A \ i < length xs}" by simp qed lemma filter_nths_diff: assumes A: "i < length xs" and B: "\ P (xs ! i)" shows "[x\nths xs (A - {i}). P x] = [x\nths xs A. P x]" proof (cases "i \ A", simp_all) case True have C: "xs = take i xs @ xs ! i # drop (Suc i) xs" (is "_ = ?ts @ _ # ?ds") using A by (rule id_take_nth_drop) have "nths xs A = nths ?ts A @ nths (xs ! i # ?ds) {j. j + length ?ts \ A}" by (subst C, simp add: nths_append) moreover have D: "length ?ts = i" using A by simp ultimately have E: "nths xs A = nths ?ts (A \ {.. A}" (is "_ = ?vs @ _ # ?ws") using True by (simp add: nths_Cons, subst nths_range, simp) have "nths xs (A - {i}) = nths ?ts (A - {i}) @ nths (xs ! i # ?ds) {j. j + length ?ts \ A - {i}}" by (subst C, simp add: nths_append) moreover have "(A - {i}) \ {.. {.. []" and B: "mini xs key \ A" (is "?nmi \ _") shows "[x\[xs ! ?nmi] @ nths xs (A - {?nmi}). key x = k] = [x\nths xs A. key x = k]" (is "[x\[?xmi] @ _. _] = _") proof (simp, rule conjI, rule_tac [!] impI, rule_tac [2] filter_nths_diff, rule_tac [2] mini_less, simp_all add: A) assume C: "key ?xmi = k" moreover have "?nmi < length xs" using A by (rule_tac mini_less, simp) ultimately have "0 < length [x\xs. key x = k]" by (simp add: length_filter_conv_card card_gt_0_iff, rule_tac exI, rule_tac conjI) hence "\y ys. [x\xs. key x = k] = y # ys" by (rule_tac list.exhaust [of "[x\xs. key x = k]"], simp_all) then obtain y and ys where "[x\xs. key x = k] = y # ys" by blast hence "\us vs. xs = us @ y # vs \ (\u \ set us. key u \ k) \ key y = k \ ys = [x\vs. key x = k]" (is "\us vs. ?P us vs") by (simp add: filter_eq_Cons_iff) then obtain us and vs where D: "?P us vs" by blast have E: "length us = ?nmi" proof (rule ccontr, erule neqE) assume "length us < ?nmi" with A have "key ?xmi < key (xs ! length us)" by (rule mini_first) also have "\ = k" using D by simp finally show False using C by simp next assume F: "?nmi < length us" hence "?xmi = us ! ?nmi" using D by (simp add: nth_append) hence "?xmi \ set us" using F by simp thus False using C and D by simp qed moreover have "xs ! length us = y" using D by simp ultimately have F: "?xmi = y" by simp have "nths xs A = nths us A @ nths (y # vs) {i. i + ?nmi \ A}" using D and E by (simp add: nths_append) also have "\ = nths us A @ y # nths vs {i. Suc i + ?nmi \ A}" (is "_ = _ @ _ # ?ws") using B by (simp add: nths_Cons) finally have G: "[x\nths xs A. key x = k] = y # [x\?ws. key x = k]" using D by (simp add: filter_empty_conv, rule_tac ballI, drule_tac in_set_nthsD, simp) have "nths xs (A - {?nmi}) = nths us (A - {?nmi}) @ nths (y # vs) {i. i + ?nmi \ A - {?nmi}}" using D and E by (simp add: nths_append) also have "\ = nths us (A - {?nmi}) @ ?ws" by (simp add: nths_Cons) finally have H: "[x\nths xs (A - {?nmi}). key x = k] = [x\?ws. key x = k]" using D by (simp add: filter_empty_conv, rule_tac ballI, drule_tac in_set_nthsD, simp) show "?xmi # [x\nths xs (A - {?nmi}). key x = k] = [x\nths xs A. key x = k]" using F and G and H by simp qed lemma maxi_stable: assumes A: "xs \ []" and B: "maxi xs key \ A" (is "?nma \ _") shows "[x\nths xs (A - {?nma}) @ [xs ! ?nma]. key x = k] = [x\nths xs A. key x = k]" (is "[x\_ @ [?xma]. _] = _") proof (simp, rule conjI, rule_tac [!] impI, rule_tac [2] filter_nths_diff, rule_tac [2] maxi_less, simp_all add: A) assume C: "key ?xma = k" moreover have D: "?nma < length xs" using A by (rule_tac maxi_less, simp) ultimately have "0 < length [x\rev xs. key x = k]" by (simp add: length_filter_conv_card card_gt_0_iff, rule_tac exI [where x = "length xs - Suc ?nma"], simp add: rev_nth) hence "\y ys. [x\rev xs. key x = k] = y # ys" by (rule_tac list.exhaust [of "[x\rev xs. key x = k]"], simp_all) then obtain y and ys where "[x\rev xs. key x = k] = y # ys" by blast hence "\us vs. rev xs = us @ y # vs \ (\u \ set us. key u \ k) \ key y = k \ ys = [x\vs. key x = k]" (is "\us vs. ?P us vs") by (simp add: filter_eq_Cons_iff) then obtain us and vs where E: "?P us vs" by blast hence F: "xs = rev vs @ y # rev us" by (simp add: rev_swap) have G: "length us = length xs - Suc ?nma" proof (rule ccontr, erule neqE) assume "length us < length xs - Suc ?nma" hence "?nma < length xs - Suc (length us)" by simp moreover have "length xs - Suc (length us) < length xs" using D by simp ultimately have "key (xs ! (length xs - Suc (length us))) < key ?xma" by (rule maxi_last [OF A]) moreover have "length us < length xs" using F by simp ultimately have "key (rev xs ! length us) < key ?xma" by (simp add: rev_nth) moreover have "key (rev xs ! length us) = k" using E by simp ultimately show False using C by simp next assume H: "length xs - Suc ?nma < length us" hence "rev xs ! (length xs - Suc ?nma) = us ! (length xs - Suc ?nma)" using E by (simp add: nth_append) hence "rev xs ! (length xs - Suc ?nma) \ set us" using H by simp hence "?xma \ set us" using D by (simp add: rev_nth) thus False using C and E by simp qed moreover have "rev xs ! length us = y" using E by simp ultimately have H: "?xma = y" using D by (simp add: rev_nth) have "length xs = length us + Suc ?nma" using D and G by simp hence I: "length vs = ?nma" using F by simp hence "nths xs A = nths (rev vs) A @ nths (y # rev us) {i. i + ?nma \ A}" using F by (simp add: nths_append) also have "\ = nths (rev vs) (A \ {.. A}" (is "_ = ?ws @ _ # _") using B and I by (simp add: nths_Cons, subst nths_range, simp) finally have J: "[x\nths xs A. key x = k] = [x\?ws. key x = k] @ [y]" using E by (simp add: filter_empty_conv, rule_tac ballI, drule_tac in_set_nthsD, simp) have "nths xs (A - {?nma}) = nths (rev vs) (A - {?nma}) @ nths (y # rev us) {i. i + ?nma \ A - {?nma}}" using F and I by (simp add: nths_append) hence "nths xs (A - {?nma}) = nths (rev vs) ((A - {?nma}) \ {.. A}" using I by (simp add: nths_Cons, subst nths_range, simp) moreover have "(A - {?nma}) \ {.. {..nths xs (A - {?nma}). key x = k] = [x\?ws. key x = k]" using E by (simp add: filter_empty_conv, rule_tac ballI, drule_tac in_set_nthsD, simp) show "[x\nths xs (A - {?nma}). key x = k] @ [?xma] = [x\nths xs A. key x = k]" using H and J and K by simp qed lemma round_stab_inv [rule_format]: "index_less index key \ index_same index key \ bn_inv p q t \ add_inv n t \ stab_inv f key t \ stab_inv f key (round index key p q r t)" proof (induction index key p q r t arbitrary: n f rule: round.induct, (rule_tac [!] impI)+, simp, simp, simp_all only: simp_thms) fix index p q r u ns xs n f and key :: "'a \ 'b" let ?t = "round index key p q r (u, ns, tl xs)" assume "\n f. bn_inv p q (u, ns, tl xs) \ add_inv n (u, ns, tl xs) \ stab_inv f key (u, ns, tl xs) \ stab_inv f key ?t" and "bn_inv p q (u, Suc 0 # ns, xs)" and "add_inv n (u, Suc 0 # ns, xs)" and "stab_inv f key (u, Suc 0 # ns, xs)" thus "stab_inv f key (round index key p q r (u, Suc 0 # ns, xs))" proof (cases ?t, cases xs, simp_all add: add_suc, arith, erule_tac conjE, rule_tac allI, simp) fix k y ys xs' let ?f' = "f(key y := tl (f (key y)))" assume "\n' f'. foldl (+) 0 ns = n' \ length ys = n' \ (\k'. [x\ys. key x = k'] = f' k') \ (\k'. [x\xs'. key x = k'] = f' k')" moreover assume "Suc (foldl (+) 0 ns) = n" and "Suc (length ys) = n" ultimately have "(\k'. [x\ys. key x = k'] = ?f' k') \ (\k'. [x\xs'. key x = k'] = ?f' k')" by blast moreover assume A: "\k'. (if key y = k' then y # [x\ys. key x = k'] else [x\ys. key x = k']) = f k'" hence B: "f (key y) = y # [x\ys. key x = key y]" by (drule_tac spec [where x = "key y"], simp) hence "\k'. [x\ys. key x = k'] = ?f' k'" proof (rule_tac allI, simp, rule_tac impI) fix k' assume "k' \ key y" thus "[x\ys. key x = k'] = f k'" using A by (drule_tac spec [where x = k'], simp) qed ultimately have C: "\k'. [x\xs'. key x = k'] = ?f' k'" .. show "(key y = k \ y # [x\xs'. key x = k] = f k) \ (key y \ k \ [x\xs'. key x = k] = f k)" proof (rule conjI, rule_tac [!] impI) assume "key y = k" moreover have "tl (f (key y)) = [x\xs'. key x = key y]" using C by simp hence "f (key y) = y # [x\xs'. key x = key y]" using B by (subst hd_Cons_tl [symmetric], simp_all) ultimately show "y # [x\xs'. key x = k] = f k" by simp next assume "key y \ k" thus "[x\xs'. key x = k] = f k" using C by simp qed qed next fix index p q r u m ns n f and key :: "'a \ 'b" and xs :: "'a list" let ?ws = "take (Suc (Suc m)) xs" let ?ys = "drop (Suc (Suc m)) xs" let ?r = "\m'. round_suc_suc index key ?ws m m' u" let ?t = "\r' v. round index key p q r' (v, ns, ?ys)" assume A: "index_less index key" and B: "index_same index key" assume "\ws a b c d e g h i n f. ws = ?ws \ a = bn_comp m p q r \ (b, c) = bn_comp m p q r \ d = ?r b \ (e, g) = ?r b \ (h, i) = g \ bn_inv p q (e, ns, ?ys) \ add_inv n (e, ns, ?ys) \ stab_inv f key (e, ns, ?ys) \ stab_inv f key (?t c e)" and "bn_inv p q (u, Suc (Suc m) # ns, xs)" and "add_inv n (u, Suc (Suc m) # ns, xs)" and "stab_inv f key (u, Suc (Suc m) # ns, xs)" thus "stab_inv f key (round index key p q r (u, Suc (Suc m) # ns, xs))" using [[simproc del: defined_all]] proof (simp split: prod.split, ((rule_tac allI)+, ((rule_tac impI)+)?)+, (erule_tac conjE)+, subst (asm) (2) add_base_zero, simp) fix m' r' v ms' ws' xs' k let ?f = "\k. [x\?ys. key x = k]" let ?P = "\f. \k. [x\?ys. key x = k] = f k" let ?Q = "\f. \k. [x\xs'. key x = k] = f k" assume C: "?r m' = (v, ms', ws')" and D: "bn_comp m p q r = (m', r')" and E: "bn_valid m p q" and F: "Suc (Suc (foldl (+) 0 ns + m)) = n" and G: "length xs = n" assume "\ws a b c d e g h i n' f. ws = ?ws \ a = (m', r') \ b = m' \ c = r' \ d = (v, ms', ws') \ e = v \ g = (ms', ws') \ h = ms' \ i = ws' \ foldl (+) 0 ns = n' \ n - Suc (Suc m) = n' \ ?P f \ ?Q f" hence "\f. foldl (+) 0 ns = n - Suc (Suc m) \ ?P f \ ?Q f" by simp hence "\f. ?P f \ ?Q f" using F by simp hence "?P ?f \ ?Q ?f" . hence "[x\xs'. key x = k] = [x\?ys. key x = k]" by simp moreover assume "\k. [x\xs. key x = k] = f k" hence "f k = [x\?ws @ ?ys. key x = k]" by simp ultimately have "f k = [x\?ws. key x = k] @ [x\xs'. key x = k]" by (subst (asm) filter_append, simp) with C [symmetric] show "[x\ws'. key x = k] @ [x\xs'. key x = k] = f k" proof (simp add: round_suc_suc_def Let_def del: filter.simps split: if_split_asm) let ?nmi = "mini ?ws key" let ?nma = "maxi ?ws key" let ?xmi = "?ws ! ?nmi" let ?xma = "?ws ! ?nma" let ?mi = "key ?xmi" let ?ma = "key ?xma" let ?k = "case m of 0 \ m | Suc 0 \ m | Suc (Suc i) \ u + m'" let ?zs = "nths ?ws (- {?nmi, ?nma})" let ?ms = "enum ?zs index key ?k ?mi ?ma" have H: "length ?ws = Suc (Suc m)" using F and G by simp hence I: "?nmi \ ?nma" by (rule_tac mini_maxi_neq, simp) have "[x\(([?xmi] @ map the (fill ?zs (offs ?ms 0) index key m ?mi ?ma)) @ [?xma]). key x = k] = [x\?ws. key x = k]" proof (cases "m = 0") case True have "?nmi < length ?ws" using H by (rule_tac mini_less, simp) hence J: "?nmi < Suc (Suc 0)" using True by simp moreover have "?nma < length ?ws" using H by (rule_tac maxi_less, simp) hence K: "?nma < Suc (Suc 0)" using True by simp ultimately have "card ({..[?xmi] @ nths ?ws ({..[?xma]. key x = k] = [x\?ws. key x = k]" proof (subst mini_stable, simp only: length_greater_0_conv [symmetric] H, simp add: I J, subst filter_append [symmetric]) show "[x\nths ?ws ({..?ws. key x = k]" by (subst maxi_stable, simp only: length_greater_0_conv [symmetric] H, simp add: K, simp add: True) qed qed next case False hence "0 < ?k" by (simp, drule_tac bn_comp_fst_nonzero [OF E], subst (asm) D, simp split: nat.split) hence "[x\map the (fill ?zs (offs ?ms 0) index key (length ?zs) ?mi ?ma). key x = k] = [x\?zs. k = key x]" by (rule_tac fill_offs_enum_stable [OF A B], simp, rule_tac conjI, ((rule_tac mini_lb | rule_tac maxi_ub), erule_tac in_set_nthsD)+) moreover have "[x\?zs. k = key x] = [x\?zs. key x = k]" by (rule filter_cong, simp, blast) ultimately have J: "[x\map the (fill ?zs (offs ?ms 0) index key m ?mi ?ma). key x = k] = [x\?zs. key x = k]" using H by (simp add: mini_maxi_nths) show ?thesis proof (simp only: filter_append J, subst Compl_eq_Diff_UNIV, subst Diff_insert, subst filter_append [symmetric]) show "[x\[?xmi] @ nths ?ws (UNIV - {?nma} - {?nmi}). key x = k] @ [x\[?xma]. key x = k] = [x\?ws. key x = k]" proof (subst mini_stable, simp only: length_greater_0_conv [symmetric] H, simp add: I, subst filter_append [symmetric]) show "[x\nths ?ws (UNIV - {?nma}) @ [?xma]. key x = k] = [x\?ws. key x = k]" by (subst maxi_stable, simp only: length_greater_0_conv [symmetric] H, simp, subst nths_range, subst H, simp) qed qed qed thus "[x\?xmi # map the (fill ?zs (offs ?ms 0) index key m ?mi ?ma) @ [?xma]. key x = k] = [x\?ws. key x = k]" by simp qed qed qed lemma gcsort_stab_inv: assumes A: "index_less index key" and B: "index_same index key" and C: "add_inv n t" and D: "n \ p" shows "\t' \ gcsort_set index key p t; stab_inv f key t\ \ stab_inv f key t'" by (erule gcsort_set.induct, simp, drule gcsort_add_inv [OF A _ C D], rule round_stab_inv [OF A B], simp_all del: bn_inv.simps, erule conjE, frule sym, erule subst, rule bn_inv_intro, insert D, simp) text \ \null The only remaining task is to address step 10 of the proof method, which is done by proving theorem @{text gcsort_stable}. It holds under the conditions that the objects' number is not larger than the counters' upper bound and function @{text index} satisfies both predicates @{const index_less} and @{const index_same}, and states that function @{const gcsort} leaves unchanged the sublist of the objects having a given key within the input objects' list. \null \ theorem gcsort_stable: assumes A: "index_less index key" and B: "index_same index key" and C: "length xs \ p" shows "[x\gcsort index key p xs. key x = k] = [x\xs. key x = k]" proof - let ?t = "(0, [length xs], xs)" have "stab_inv (\k. [x\xs. key x = k]) key (gcsort_aux index key p ?t)" by (rule gcsort_stab_inv [OF A B _ C], rule gcsort_add_input, rule gcsort_aux_set, rule gcsort_stab_input) hence "[x\gcsort_out (gcsort_aux index key p ?t). key x = k] = [x\xs. key x = k]" by (rule gcsort_stab_intro) moreover have "?t = gcsort_in xs" by (simp add: gcsort_in_def) ultimately show ?thesis by (simp add: gcsort_def) qed end diff --git a/thys/Matroids/Matroid.thy b/thys/Matroids/Matroid.thy --- a/thys/Matroids/Matroid.thy +++ b/thys/Matroids/Matroid.thy @@ -1,1337 +1,1338 @@ (* File: Matroid.thy Author: Jonas Keinholz *) section \Matroids\ theory Matroid imports Indep_System begin lemma card_subset_ex: assumes "finite A" "n \ card A" shows "\B \ A. card B = n" using assms proof (induction A arbitrary: n rule: finite_induct) case (insert x A) show ?case proof (cases n) case 0 then show ?thesis using card.empty by blast next case (Suc k) then have "\B \ A. card B = k" using insert by auto then obtain B where "B \ A" "card B = k" by auto moreover from this have "finite B" using insert.hyps finite_subset by auto ultimately have "card (insert x B) = n" using Suc insert.hyps card_insert_disjoint by fastforce then show ?thesis using \B \ A\ by blast qed qed auto locale matroid = indep_system + assumes augment_aux: "indep X \ indep Y \ card X = Suc (card Y) \ \x \ X - Y. indep (insert x Y)" begin lemma augment: assumes "indep X" "indep Y" "card Y < card X" shows "\x \ X - Y. indep (insert x Y)" proof - obtain X' where "X' \ X" "card X' = Suc (card Y)" using assms card_subset_ex[of X "Suc (card Y)"] indep_finite by auto then obtain x where "x \ X' - Y" "indep (insert x Y)" using assms augment_aux[of X' Y] indep_subset by auto then show ?thesis using \X' \ X\ by auto qed lemma augment_psubset: assumes "indep X" "indep Y" "Y \ X" shows "\x \ X - Y. indep (insert x Y)" using assms augment psubset_card_mono indep_finite by blast subsection \Minors\ text \ A subset of the ground set induces a matroid. \ lemma matroid_subset [simp, intro]: assumes "\ \ carrier" shows "matroid \ (indep_in \)" unfolding matroid_def matroid_axioms_def proof (safe, goal_cases indep_system augment) case indep_system then show ?case using indep_system_subset[OF assms] . next case (augment X Y) then show ?case using augment_aux[of X Y] unfolding indep_in_def by auto qed context fixes \ assumes "\ \ carrier" begin interpretation \: matroid \ "indep_in \" using \\ \ carrier\ by auto lemmas augment_aux_indep_in = \.augment_aux lemmas augment_indep_in = \.augment lemmas augment_psubset_indep_in = \.augment_psubset end subsection \Bases\ lemma basis_card: assumes "basis B\<^sub>1" assumes "basis B\<^sub>2" shows "card B\<^sub>1 = card B\<^sub>2" proof (rule ccontr, goal_cases False) case False then have "card B\<^sub>1 < card B\<^sub>2 \ card B\<^sub>2 < card B\<^sub>1" by auto moreover { fix B\<^sub>1 B\<^sub>2 assume "basis B\<^sub>1" "basis B\<^sub>2" "card B\<^sub>1 < card B\<^sub>2" then obtain x where "x \ B\<^sub>2 - B\<^sub>1" "indep (insert x B\<^sub>1)" using augment basisD by blast then have "x \ carrier - B\<^sub>1" using \basis B\<^sub>1\ basisD indep_subset_carrier by blast then have "\ indep (insert x B\<^sub>1)" using \basis B\<^sub>1\ basisD by auto then have "False" using \indep (insert x B\<^sub>1)\ by auto } ultimately show ?case using assms by auto qed lemma basis_indep_card: assumes "indep X" assumes "basis B" shows "card X \ card B" proof - obtain B' where "basis B'" "X \ B'" using assms indep_imp_subset_basis by auto then show ?thesis using assms basis_finite basis_card[of B B'] by (auto intro: card_mono) qed lemma basis_augment: assumes "basis B\<^sub>1" "basis B\<^sub>2" "x \ B\<^sub>1 - B\<^sub>2" shows "\y \ B\<^sub>2 - B\<^sub>1. basis (insert y (B\<^sub>1 - {x}))" proof - let ?B\<^sub>1 = "B\<^sub>1 - {x}" have "card ?B\<^sub>1 < card B\<^sub>2" using assms basis_card[of B\<^sub>1 B\<^sub>2] card_Diff1_less[OF basis_finite, of B\<^sub>1] by auto moreover have "indep ?B\<^sub>1" using assms basis_indep[of B\<^sub>1] indep_subset[of B\<^sub>1 ?B\<^sub>1] by auto ultimately obtain y where y: "y \ B\<^sub>2 - ?B\<^sub>1" "indep (insert y ?B\<^sub>1)" using assms augment[of B\<^sub>2 ?B\<^sub>1] basis_indep by auto let ?B\<^sub>1' = "insert y ?B\<^sub>1" have "basis ?B\<^sub>1'" using \indep ?B\<^sub>1'\ proof (rule basisI, goal_cases "insert") case (insert x) have "card (insert x ?B\<^sub>1') > card B\<^sub>1" proof - have "card (insert x ?B\<^sub>1') = Suc (card ?B\<^sub>1')" using insert card.insert_remove[OF indep_finite, of ?B\<^sub>1'] y by auto also have "\ = Suc (Suc (card ?B\<^sub>1))" using card.insert_remove[OF indep_finite, of ?B\<^sub>1] \indep ?B\<^sub>1\ y by auto also have "\ = Suc (card B\<^sub>1)" using assms basis_finite[of B\<^sub>1] card.remove[of B\<^sub>1] by auto finally show ?thesis by auto qed then have "\indep (insert x (insert y ?B\<^sub>1))" using assms basis_indep_card[of "insert x (insert y ?B\<^sub>1)" B\<^sub>1] by auto moreover have "insert x (insert y ?B\<^sub>1) \ carrier" using assms insert y basis_finite indep_subset_carrier by auto ultimately show ?case by auto qed then show ?thesis using assms y by auto qed context fixes \ assumes *: "\ \ carrier" begin interpretation \: matroid \ "indep_in \" using \\ \ carrier\ by auto lemmas basis_in_card = \.basis_card[OF basis_inD_aux[OF *] basis_inD_aux[OF *]] lemmas basis_in_indep_in_card = \.basis_indep_card[OF _ basis_inD_aux[OF *]] lemma basis_in_augment: assumes "basis_in \ B\<^sub>1" "basis_in \ B\<^sub>2" "x \ B\<^sub>1 - B\<^sub>2" shows "\y \ B\<^sub>2 - B\<^sub>1. basis_in \ (insert y (B\<^sub>1 - {x}))" using assms \.basis_augment unfolding basis_in_def by auto end subsection \Circuits\ lemma circuit_elim: assumes "circuit C\<^sub>1" "circuit C\<^sub>2" "C\<^sub>1 \ C\<^sub>2" "x \ C\<^sub>1 \ C\<^sub>2" shows "\C\<^sub>3 \ (C\<^sub>1 \ C\<^sub>2) - {x}. circuit C\<^sub>3" proof - let ?C = "(C\<^sub>1 \ C\<^sub>2) - {x}" let ?carrier = "C\<^sub>1 \ C\<^sub>2" have assms': "circuit_in carrier C\<^sub>1" "circuit_in carrier C\<^sub>2" using assms circuit_imp_circuit_in by auto have "?C \ carrier" using assms circuit_subset_carrier by auto show ?thesis proof (cases "indep ?C") case False then show ?thesis using dep_iff_supset_circuit \?C \ carrier\ by auto next case True then have "indep_in ?carrier ?C" using \?C \ carrier\ by (auto intro: indep_inI) have *: "?carrier \ carrier" using assms circuit_subset_carrier by auto obtain y where y: "y \ C\<^sub>2" "y \ C\<^sub>1" using assms circuit_subset_eq by blast then have "indep_in ?carrier (C\<^sub>2 - {y})" using assms' circuit_in_min_dep_in[OF * circuit_in_supI[OF *, of C\<^sub>2]] by auto then obtain B where B: "basis_in ?carrier B" "C\<^sub>2 - {y} \ B" using * assms indep_in_imp_subset_basis_in[of ?carrier "C\<^sub>2 - {y}"] by auto have "y \ B" proof (rule ccontr, goal_cases False) case False then have "C\<^sub>2 \ B" using B by auto moreover have "circuit_in ?carrier C\<^sub>2" using * assms' circuit_in_supI by auto ultimately have "\ indep_in ?carrier B" using B basis_in_subset_carrier[OF *] supset_circuit_in_imp_dep_in[OF *] by auto then show False using assms B basis_in_indep_in[OF *] by auto qed have "C\<^sub>1 - B \ {}" proof (rule ccontr, goal_cases False) case False then have "C\<^sub>1 - (C\<^sub>1 \ B) = {}" by auto then have "C\<^sub>1 = C\<^sub>1 \ B" using assms circuit_subset_eq by auto moreover have "indep (C\<^sub>1 \ B)" using assms B basis_in_indep_in[OF *] indep_in_subset[OF *, of B "C\<^sub>1 \ B"] indep_in_indep by auto ultimately show ?case using assms circuitD by auto qed then obtain z where z: "z \ C\<^sub>1" "z \ B" by auto have "y \ z" using y z by auto have "x \ C\<^sub>1" "x \ C\<^sub>2" using assms by auto have "finite ?carrier" using assms carrier_finite finite_subset by auto have "card B \ card (?carrier - {y, z})" proof (rule card_mono) show "finite (C\<^sub>1 \ C\<^sub>2 - {y, z})" using \finite ?carrier\ by auto next show "B \ C\<^sub>1 \ C\<^sub>2 - {y, z}" using B basis_in_subset_carrier[OF *, of B] \y \ B\ \z \ B\ by auto qed also have "\ = card ?carrier - 2" using \finite ?carrier\ \y \ C\<^sub>2\ \z \ C\<^sub>1\ \y \ z\ card_Diff_subset_Int by auto also have "\ < card ?carrier - 1" proof - have "card ?carrier = card C\<^sub>1 + card C\<^sub>2 - card (C\<^sub>1 \ C\<^sub>2)" using assms \finite ?carrier\ card_Un_Int[of C\<^sub>1 C\<^sub>2] by auto also have "\ = card C\<^sub>1 + (card C\<^sub>2 - card (C\<^sub>1 \ C\<^sub>2))" using assms \finite ?carrier\ card_mono[of C\<^sub>2] by auto also have "\ = card C\<^sub>1 + card (C\<^sub>2 - C\<^sub>1)" proof - have "card (C\<^sub>2 - C\<^sub>1) = card C\<^sub>2 - card (C\<^sub>2 \ C\<^sub>1)" using assms \finite ?carrier\ card_Diff_subset_Int[of C\<^sub>2 C\<^sub>1] by auto also have "\ = card C\<^sub>2 - card (C\<^sub>1 \ C\<^sub>2)" by (simp add: inf_commute) finally show ?thesis by auto qed finally have "card (C\<^sub>1 \ C\<^sub>2) = card C\<^sub>1 + card (C\<^sub>2 - C\<^sub>1)" . moreover have "card C\<^sub>1 > 0" using assms circuit_nonempty \finite ?carrier\ by auto moreover have "card (C\<^sub>2 - C\<^sub>1) > 0" using assms \finite ?carrier\ \y \ C\<^sub>2\ \y \ C\<^sub>1\ by auto ultimately show ?thesis by auto qed also have "\ = card ?C" using \finite ?carrier\ card_Diff_singleton \x \ C\<^sub>1\ \x \ C\<^sub>2\ by auto finally have "card B < card ?C" . then have False using basis_in_indep_in_card[OF *, of ?C B] B \indep_in ?carrier ?C\ by auto then show ?thesis by auto qed qed lemma min_dep_imp_supset_circuit: assumes "indep X" assumes "circuit C" assumes "C \ insert x X" shows "x \ C" proof (rule ccontr) assume "x \ C" then have "C \ X" using assms by auto then have "indep C" using assms indep_subset by auto then show False using assms circuitD by auto qed lemma min_dep_imp_ex1_supset_circuit: assumes "x \ carrier" assumes "indep X" assumes "\ indep (insert x X)" shows "\!C. circuit C \ C \ insert x X" proof - obtain C where C: "circuit C" "C \ insert x X" using assms indep_subset_carrier dep_iff_supset_circuit by auto show ?thesis proof (rule ex1I, goal_cases ex unique) show "circuit C \ C \ insert x X" using C by auto next { fix C' assume C': "circuit C'" "C' \ insert x X" have "C' = C" proof (rule ccontr) assume "C' \ C" moreover have "x \ C' \ C" using C C' assms min_dep_imp_supset_circuit by auto ultimately have "\ indep (C' \ C - {x})" using circuit_elim[OF C(1) C'(1), of x] supset_circuit_imp_dep[of _ "C' \ C - {x}"] by auto moreover have "C' \ C - {x} \ X" using C C' by auto ultimately show False using assms indep_subset by auto qed } then show "\C'. circuit C' \ C' \ insert x X \ C' = C" by auto qed qed lemma basis_ex1_supset_circuit: assumes "basis B" assumes "x \ carrier - B" shows "\!C. circuit C \ C \ insert x B" using assms min_dep_imp_ex1_supset_circuit basisD by auto definition fund_circuit :: "'a \ 'a set \ 'a set" where "fund_circuit x B \ (THE C. circuit C \ C \ insert x B)" lemma circuit_iff_fund_circuit: "circuit C \ (\x B. x \ carrier - B \ basis B \ C = fund_circuit x B)" proof (safe, goal_cases LTR RTL) case LTR then obtain x where "x \ C" using circuit_nonempty by auto then have "indep (C - {x})" using LTR unfolding circuit_def by auto then obtain B where B: "basis B" "C - {x} \ B" using indep_imp_subset_basis by auto then have "x \ carrier" using LTR circuit_subset_carrier \x \ C\ by auto moreover have "x \ B" proof (rule ccontr, goal_cases False) case False then have "C \ B" using \C - {x} \ B\ by auto then have "\ indep B" using LTR B basis_subset_carrier supset_circuit_imp_dep by auto then show ?case using B basis_indep by auto qed ultimately show ?case unfolding fund_circuit_def using LTR B theI_unique[OF basis_ex1_supset_circuit[of B x], of C] by auto next case (RTL x B) then have "\!C. circuit C \ C \ insert x B" using min_dep_imp_ex1_supset_circuit basisD[of B] by auto then show ?case unfolding fund_circuit_def using theI[of "\C. circuit C \ C \ insert x B"] by fastforce qed lemma fund_circuitI: assumes "basis B" assumes "x \ carrier - B" assumes "circuit C" assumes "C \ insert x B" shows "fund_circuit x B = C" unfolding fund_circuit_def using assms theI_unique[OF basis_ex1_supset_circuit, of B x C] by auto definition fund_circuit_in where "fund_circuit_in \ x B \ matroid.fund_circuit \ (indep_in \) x B" context fixes \ assumes *: "\ \ carrier" begin interpretation \: matroid \ "indep_in \" using \\ \ carrier\ by auto lemma fund_circuit_inI_aux: "\.fund_circuit x B = fund_circuit_in \ x B" unfolding fund_circuit_in_def by auto lemma circuit_in_elim: assumes "circuit_in \ C\<^sub>1" "circuit_in \ C\<^sub>2" "C\<^sub>1 \ C\<^sub>2" "x \ C\<^sub>1 \ C\<^sub>2" shows "\C\<^sub>3 \ (C\<^sub>1 \ C\<^sub>2) - {x}. circuit_in \ C\<^sub>3" using assms \.circuit_elim unfolding circuit_in_def by auto lemmas min_dep_in_imp_supset_circuit_in = \.min_dep_imp_supset_circuit[OF _ circuit_inD_aux[OF *]] lemma min_dep_in_imp_ex1_supset_circuit_in: assumes "x \ \" assumes "indep_in \ X" assumes "\ indep_in \ (insert x X)" shows "\!C. circuit_in \ C \ C \ insert x X" using assms \.min_dep_imp_ex1_supset_circuit unfolding circuit_in_def by auto lemma basis_in_ex1_supset_circuit_in: assumes "basis_in \ B" assumes "x \ \ - B" shows "\!C. circuit_in \ C \ C \ insert x B" using assms \.basis_ex1_supset_circuit unfolding circuit_in_def basis_in_def by auto lemma fund_circuit_inI: assumes "basis_in \ B" assumes "x \ \ - B" assumes "circuit_in \ C" assumes "C \ insert x B" shows "fund_circuit_in \ x B = C" using assms \.fund_circuitI unfolding basis_in_def circuit_in_def fund_circuit_in_def by auto end context fixes \ assumes *: "\ \ carrier" begin interpretation \: matroid \ "indep_in \" using \\ \ carrier\ by auto lemma fund_circuit_in_sub_cong: assumes "\' \ \" assumes "x \ \' - B" assumes "basis_in \' B" shows "\.fund_circuit_in \' x B = fund_circuit_in \' x B" proof - obtain C where C: "circuit_in \' C" "C \ insert x B" using * assms basis_in_ex1_supset_circuit_in[of \' B x] by auto then have "fund_circuit_in \' x B = C" using * assms fund_circuit_inI by auto also have "\ = \.fund_circuit_in \' x B" using * assms C \.fund_circuit_inI basis_in_sub_cong[of \] circuit_in_sub_cong[of \] by auto finally show ?thesis by auto qed end subsection \Ranks\ abbreviation rank_of where "rank_of \ lower_rank_of" lemmas rank_of_def = lower_rank_of_def lemmas rank_of_sub_cong = lower_rank_of_sub_cong lemmas rank_of_le = lower_rank_of_le context fixes \ assumes *: "\ \ carrier" begin interpretation \: matroid \ "indep_in \" using * by auto lemma lower_rank_of_eq_upper_rank_of: "lower_rank_of \ = upper_rank_of \" proof - obtain B where "basis_in \ B" using basis_in_ex[OF *] by auto then have "{card B | B. basis_in \ B} = {card B}" by safe (auto dest: basis_in_card[OF *]) then show ?thesis unfolding lower_rank_of_def upper_rank_of_def by auto qed lemma rank_of_eq_card_basis_in: assumes "basis_in \ B" shows "rank_of \ = card B" proof - have "{card B | B. basis_in \ B} = {card B}" using assms by safe (auto dest: basis_in_card[OF *]) then show ?thesis unfolding rank_of_def by auto qed lemma rank_of_indep_in_le: assumes "indep_in \ X" shows "card X \ rank_of \" proof - { fix B assume "basis_in \ B" moreover obtain B' where "basis_in \ B'" "X \ B'" using assms indep_in_imp_subset_basis_in[OF *] by auto ultimately have "card X \ card B" using card_mono[OF basis_in_finite[OF *]] basis_in_card[OF *, of B B'] by auto } moreover have "finite {card B | B. basis_in \ B}" using collect_basis_in_finite[OF *] by auto ultimately show ?thesis unfolding rank_of_def using basis_in_ex[OF *] by auto qed end lemma rank_of_mono: assumes "X \ Y" assumes "Y \ carrier" shows "rank_of X \ rank_of Y" proof - obtain B\<^sub>X where B\<^sub>X: "basis_in X B\<^sub>X" using assms basis_in_ex[of X] by auto moreover obtain B\<^sub>Y where B\<^sub>Y: "basis_in Y B\<^sub>Y" using assms basis_in_ex[of Y] by auto moreover have "card B\<^sub>X \ card B\<^sub>Y" using assms basis_in_indep_in_card[OF _ _ B\<^sub>Y] basis_in_indep_in[OF _ B\<^sub>X] indep_in_subI_subset by auto ultimately show ?thesis using assms rank_of_eq_card_basis_in by auto qed lemma rank_of_insert_le: assumes "X \ carrier" assumes "x \ carrier" shows "rank_of (insert x X) \ Suc (rank_of X)" proof - obtain B where B: "basis_in X B" using assms basis_in_ex[of X] by auto have "basis_in (insert x X) B \ basis_in (insert x X) (insert x B)" proof - obtain B' where B': "B' \ insert x X - X" "basis_in (insert x X) (B \ B')" using assms B basis_in_subI[of "insert x X" X B] by auto then have "B' = {} \ B' = {x}" by auto then show ?thesis proof assume "B' = {}" then have "basis_in (insert x X) B" using B' by auto then show ?thesis by auto next assume "B' = {x}" then have "basis_in (insert x X) (insert x B)" using B' by auto then show ?thesis by auto qed qed then show ?thesis proof assume "basis_in (insert x X) B" then show ?thesis using assms B rank_of_eq_card_basis_in by auto next assume "basis_in (insert x X) (insert x B)" then have "rank_of (insert x X) = card (insert x B)" using assms rank_of_eq_card_basis_in by auto also have "\ = Suc (card (B - {x}))" using assms card.insert_remove[of B x] using B basis_in_finite by auto also have "\ \ Suc (card B)" using assms B basis_in_finite card_Diff1_le[of B] by auto also have "\ = Suc (rank_of X)" using assms B rank_of_eq_card_basis_in by auto finally show ?thesis . qed qed lemma rank_of_Un_Int_le: assumes "X \ carrier" assumes "Y \ carrier" shows "rank_of (X \ Y) + rank_of (X \ Y) \ rank_of X + rank_of Y" proof - obtain B_Int where B_Int: "basis_in (X \ Y) B_Int" using assms basis_in_ex[of "X \ Y"] by auto then have "indep_in (X \ Y) B_Int" using assms indep_in_subI_subset[OF _ basis_in_indep_in[of "X \ Y" B_Int], of "X \ Y"] by auto then obtain B_Un where B_Un: "basis_in (X \ Y) B_Un" "B_Int \ B_Un" using assms indep_in_imp_subset_basis_in[of "X \ Y" B_Int] by auto have "card (B_Un \ (X \ Y)) + card (B_Un \ (X \ Y)) = card ((B_Un \ X) \ (B_Un \ Y)) + card ((B_Un \ X) \ (B_Un \ Y))" by (simp add: inf_assoc inf_left_commute inf_sup_distrib1) also have "\ = card (B_Un \ X) + card (B_Un \ Y)" proof - have "finite (B_Un \ X)" "finite (B_Un \ Y)" using assms finite_subset[OF _ carrier_finite] by auto then show ?thesis using card_Un_Int[of "B_Un \ X" "B_Un \ Y"] by auto qed also have "\ \ rank_of X + rank_of Y" proof - have "card (B_Un \ X) \ rank_of X" proof - have "indep_in X (B_Un \ X)" using assms basis_in_indep_in[OF _ B_Un(1)] indep_in_subI by auto then show ?thesis using assms rank_of_indep_in_le by auto qed moreover have "card (B_Un \ Y) \ rank_of Y" proof - have "indep_in Y (B_Un \ Y)" using assms basis_in_indep_in[OF _ B_Un(1)] indep_in_subI by auto then show ?thesis using assms rank_of_indep_in_le by auto qed ultimately show ?thesis by auto qed finally have "rank_of X + rank_of Y \ card (B_Un \ (X \ Y)) + card (B_Un \ (X \ Y))" . moreover have "B_Un \ (X \ Y) = B_Un" using assms basis_in_subset_carrier[OF _ B_Un(1)] by auto moreover have "B_Un \ (X \ Y) = B_Int" proof - have "card (B_Un \ (X \ Y)) \ card B_Int" proof - have "indep_in (X \ Y) (B_Un \ (X \ Y))" using assms basis_in_indep_in[OF _ B_Un(1)] indep_in_subI by auto then show ?thesis using assms basis_in_indep_in_card[of "X \ Y" _ B_Int] B_Int by auto qed moreover have "finite (B_Un \ (X \ Y))" using assms carrier_finite finite_subset[of "B_Un \ (X \ Y)"] by auto moreover have "B_Int \ B_Un \ (X \ Y)" using assms B_Un B_Int basis_in_subset_carrier[of "X \ Y" B_Int] by auto ultimately show ?thesis using card_seteq by blast qed ultimately have "rank_of X + rank_of Y \ card B_Un + card B_Int" by auto moreover have "card B_Un = rank_of (X \ Y)" using assms rank_of_eq_card_basis_in[OF _ B_Un(1)] by auto moreover have "card B_Int = rank_of (X \ Y)" using assms rank_of_eq_card_basis_in[OF _ B_Int] by fastforce ultimately show "rank_of X + rank_of Y \ rank_of (X \ Y) + rank_of (X \ Y)" by auto qed lemma rank_of_Un_absorbI: assumes "X \ carrier" "Y \ carrier" assumes "\y. y \ Y - X \ rank_of (insert y X) = rank_of X" shows "rank_of (X \ Y) = rank_of X" proof - have "finite (Y - X)" using finite_subset[OF \Y \ carrier\] carrier_finite by auto then show ?thesis using assms proof (induction "Y - X" arbitrary: Y rule: finite_induct ) case empty then have "X \ Y = X" by auto then show ?case by auto next case (insert y F) have "rank_of (X \ Y) + rank_of X \ rank_of X + rank_of X" proof - have "rank_of (X \ Y) + rank_of X = rank_of ((X \ (Y - {y})) \ (insert y X)) + rank_of ((X \ (Y - {y})) \ (insert y X))" proof - have "X \ Y = (X \ (Y - {y})) \ (insert y X)" "X = (X \ (Y - {y})) \ (insert y X)" using insert by auto then show ?thesis by auto qed also have "\ \ rank_of (X \ (Y - {y})) + rank_of (insert y X)" proof (rule rank_of_Un_Int_le) show "X \ (Y - {y}) \ carrier" using insert by auto next show "insert y X \ carrier" using insert by auto qed also have "\ = rank_of (X \ (Y - {y})) + rank_of X" proof - have "y \ Y - X" using insert by auto then show ?thesis using insert by auto qed also have "\ = rank_of X + rank_of X" proof - have "F = (Y - {y}) - X" "Y - {y} \ carrier" using insert by auto then show ?thesis using insert insert(3)[of "Y - {y}"] by auto qed finally show ?thesis . qed moreover have "rank_of (X \ Y) + rank_of X \ rank_of X + rank_of X" using insert rank_of_mono by auto ultimately show ?case by auto qed qed lemma indep_iff_rank_of: assumes "X \ carrier" shows "indep X \ rank_of X = card X" proof (standard, goal_cases LTR RTL) case LTR then have "indep_in X X" by (auto intro: indep_inI) then have "basis_in X X" by (auto intro: basis_inI[OF assms]) then show ?case using rank_of_eq_card_basis_in[OF assms] by auto next case RTL obtain B where B: "basis_in X B" using basis_in_ex[OF assms] by auto then have "card B = card X" using RTL rank_of_eq_card_basis_in[OF assms] by auto then have "B = X" using basis_in_subset_carrier[OF assms B] card_seteq[OF finite_subset[OF assms carrier_finite]] by auto then show ?case using basis_in_indep_in[OF assms B] indep_in_indep by auto qed lemma basis_iff_rank_of: assumes "X \ carrier" shows "basis X \ rank_of X = card X \ rank_of X = rank_of carrier" proof (standard, goal_cases LTR RTL) case LTR then have "rank_of X = card X" using assms indep_iff_rank_of basis_indep by auto moreover have "\ = rank_of carrier" using LTR rank_of_eq_card_basis_in[of carrier X] basis_iff_basis_in by auto ultimately show ?case by auto next case RTL show ?case proof (rule basisI) show "indep X" using assms RTL indep_iff_rank_of by blast next fix x assume x: "x \ carrier - X" show "\ indep (insert x X)" proof (rule ccontr, goal_cases False) case False then have "card (insert x X) \ rank_of carrier" using assms x indep_inI rank_of_indep_in_le by auto also have "\ = card X" using RTL by auto finally show ?case using finite_subset[OF assms carrier_finite] x by auto qed qed qed lemma circuit_iff_rank_of: assumes "X \ carrier" shows "circuit X \ X \ {} \ (\x \ X. rank_of (X - {x}) = card (X - {x}) \ card (X - {x}) = rank_of X)" proof (standard, goal_cases LTR RTL) case LTR then have "X \ {}" using circuit_nonempty by auto moreover have indep_remove: "\x. x \ X \ rank_of (X - {x}) = card (X - {x})" proof - fix x assume "x \ X" then have "indep (X - {x})" using circuit_min_dep[OF LTR] by auto moreover have "X - {x} \ carrier" using assms by auto ultimately show "rank_of (X - {x}) = card (X - {x})" using indep_iff_rank_of by auto qed moreover have "\x. x \ X \ rank_of (X - {x}) = rank_of X" proof - fix x assume *: "x \ X" have "rank_of X \ card X" using assms rank_of_le by auto moreover have "rank_of X \ card X" using assms LTR circuitD indep_iff_rank_of[of X] by auto ultimately have "rank_of X < card X" by auto then have "rank_of X \ card (X - {x})" using * finite_subset[OF assms] carrier_finite by auto also have "\ = rank_of (X - {x})" using indep_remove \x \ X\ by auto finally show "rank_of (X - {x}) = rank_of X" using assms rank_of_mono[of "X - {x}" X] by auto qed ultimately show ?case by auto next case RTL then have "X \ {}" and indep_remove: "\x. x \ X \ rank_of (X - {x}) = card (X - {x})" and dep: "\x. x \ X \ rank_of (X - {x}) = rank_of X" by auto show ?case using assms proof (rule circuitI) obtain x where x: "x \ X" using \X \ {}\ by auto then have "rank_of X = card (X - {x})" using dep indep_remove by auto also have "\ < card X" using card_Diff1_less[OF finite_subset[OF assms carrier_finite] x] . finally show "\ indep X" using indep_iff_rank_of[OF assms] by auto next fix x assume "x \ X" then show "indep (X - {x})" using assms indep_remove[of x] indep_iff_rank_of[of "X - {x}"] by auto qed qed context fixes \ assumes *: "\ \ carrier" begin interpretation \: matroid \ "indep_in \" using * by auto lemma indep_in_iff_rank_of: assumes "X \ \" shows "indep_in \ X \ rank_of X = card X" using assms \.indep_iff_rank_of rank_of_sub_cong[OF * assms] by auto lemma basis_in_iff_rank_of: assumes "X \ \" shows "basis_in \ X \ rank_of X = card X \ rank_of X = rank_of \" using \.basis_iff_rank_of[OF assms] rank_of_sub_cong[OF *] assms unfolding basis_in_def by auto lemma circuit_in_iff_rank_of: assumes "X \ \" shows "circuit_in \ X \ X \ {} \ (\x \ X. rank_of (X - {x}) = card (X - {x}) \ card (X - {x}) = rank_of X)" proof - have "circuit_in \ X \ \.circuit X" unfolding circuit_in_def .. also have "\ \ X \ {} \ (\x \ X. \.rank_of (X - {x}) = card (X - {x}) \ card (X - {x}) = \.rank_of X)" using \.circuit_iff_rank_of[OF assms] . also have "\ \ X \ {} \ (\x \ X. rank_of (X - {x}) = card (X - {x}) \ card (X - {x}) = rank_of X)" proof - { fix x have "\.rank_of (X - {x}) = rank_of (X - {x})" "\.rank_of X = rank_of X" using assms rank_of_sub_cong[OF *, of "X - {x}"] rank_of_sub_cong[OF *, of X] by auto then have "\.rank_of (X - {x}) = card (X - {x}) \ card (X - {x}) = \.rank_of X \ rank_of (X - {x}) = card (X - {x}) \ card (X - {x}) = rank_of X" by auto } - then show ?thesis by auto + then show ?thesis + by (auto simp: simp del: card_Diff_insert) qed finally show ?thesis . qed end subsection \Closure\ definition cl :: "'a set \ 'a set" where "cl X \ {x \ carrier. rank_of (insert x X) = rank_of X}" lemma clI: assumes "x \ carrier" assumes "rank_of (insert x X) = rank_of X" shows "x \ cl X" unfolding cl_def using assms by auto lemma cl_altdef: assumes "X \ carrier" shows "cl X = \{Y \ Pow carrier. X \ Y \ rank_of Y = rank_of X}" proof - { fix x assume *: "x \ cl X" have "x \ \{Y \ Pow carrier. X \ Y \ rank_of Y = rank_of X}" proof show "insert x X \ {Y \ Pow carrier. X \ Y \ rank_of Y = rank_of X}" using assms * unfolding cl_def by auto qed auto } moreover { fix x assume *: "x \ \{Y \ Pow carrier. X \ Y \ rank_of Y = rank_of X}" then obtain Y where Y: "x \ Y" "Y \ carrier" "X \ Y" "rank_of Y = rank_of X" by auto have "rank_of (insert x X) = rank_of X" proof - have "rank_of (insert x X) \ rank_of X" proof - have "insert x X \ Y" using Y by auto then show ?thesis using rank_of_mono[of "insert x X" Y] Y by auto qed moreover have "rank_of X \ rank_of (insert x X)" using Y by (auto intro: rank_of_mono) ultimately show ?thesis by auto qed then have "x \ cl X" using * unfolding cl_def by auto } ultimately show ?thesis by blast qed lemma cl_rank_of: "x \ cl X \ rank_of (insert x X) = rank_of X" unfolding cl_def by auto lemma cl_subset_carrier: "cl X \ carrier" unfolding cl_def by auto lemmas clD = cl_rank_of cl_subset_carrier lemma cl_subset: assumes "X \ carrier" shows "X \ cl X" using assms using insert_absorb[of _ X] by (auto intro!: clI) lemma cl_mono: assumes "X \ Y" assumes "Y \ carrier" shows "cl X \ cl Y" proof fix x assume "x \ cl X" then have "x \ carrier" using cl_subset_carrier by auto have "insert x X \ carrier" using assms \x \ cl X\ cl_subset_carrier[of X] by auto then interpret X_insert: matroid "insert x X" "indep_in (insert x X)" by auto have "insert x Y \ carrier" using assms \x \ cl X\ cl_subset_carrier[of X] by auto then interpret Y_insert: matroid "insert x Y" "indep_in (insert x Y)" by auto show "x \ cl Y" using \x \ carrier\ proof (rule clI, cases "x \ X") case True then show "rank_of (insert x Y) = rank_of Y" using assms insert_absorb[of x Y] by auto next case False obtain B\<^sub>X where B\<^sub>X: "basis_in X B\<^sub>X" using assms basis_in_ex[of X] by auto have "basis_in (insert x X) B\<^sub>X" proof - have "rank_of B\<^sub>X = card B\<^sub>X \ rank_of B\<^sub>X = rank_of (insert x X)" proof - have "rank_of B\<^sub>X = card B\<^sub>X \ rank_of B\<^sub>X = rank_of X" using assms B\<^sub>X basis_in_subset_carrier[where \ = X and X = B\<^sub>X] basis_in_iff_rank_of[where \ = X and X = B\<^sub>X] by blast then show ?thesis using cl_rank_of[OF \x \ cl X\] by auto qed then show ?thesis using assms basis_in_subset_carrier[OF _ B\<^sub>X] \x \ carrier\ basis_in_iff_rank_of[of "insert x X" B\<^sub>X] by auto qed have "indep_in (insert x Y) B\<^sub>X" using assms basis_in_indep_in[OF _ B\<^sub>X] indep_in_subI_subset[of X "insert x Y"] by auto then obtain B\<^sub>Y where B\<^sub>Y: "basis_in (insert x Y) B\<^sub>Y" "B\<^sub>X \ B\<^sub>Y" using assms \x \ carrier\ indep_in_iff_subset_basis_in[of "insert x Y" B\<^sub>X] by auto have "basis_in Y B\<^sub>Y" proof - have "x \ B\<^sub>Y" proof (rule ccontr, goal_cases False) case False then have "insert x B\<^sub>X \ B\<^sub>Y" using \B\<^sub>X \ B\<^sub>Y\ by auto then have "indep_in (insert x Y) (insert x B\<^sub>X)" using assms \x \ carrier\ B\<^sub>Y basis_in_indep_in[where \ = "insert x Y" and X = B\<^sub>Y] indep_in_subset[where \ = "insert x Y" and X = B\<^sub>Y and Y = "insert x B\<^sub>X"] by auto then have "indep_in (insert x X) (insert x B\<^sub>X)" using assms B\<^sub>X basis_in_subset_carrier[where \ = X and X = B\<^sub>X] indep_in_supI[where \ = "insert x Y" and \' = "insert x X" and X = "insert x B\<^sub>X"] by auto moreover have "x \ insert x X - B\<^sub>X" using assms \x \ X\ B\<^sub>X basis_in_subset_carrier[where \ = X and X = B\<^sub>X] by auto ultimately show ?case using assms \x \ carrier\ \basis_in (insert x X) B\<^sub>X\ basis_in_max_indep_in[where \ = "insert x X" and X = B\<^sub>X and x = x] by auto qed then show ?thesis using assms \x \ carrier\ B\<^sub>Y basis_in_subset_carrier[of "insert x Y" B\<^sub>Y] basis_in_supI[where \ = "insert x Y" and \' = Y and B = B\<^sub>Y] by auto qed show "rank_of (insert x Y) = rank_of Y" proof - have "rank_of (insert x Y) = card B\<^sub>Y" using assms \x \ carrier\ \basis_in (insert x Y) B\<^sub>Y\ basis_in_subset_carrier using basis_in_iff_rank_of[where \ = "insert x Y" and X = B\<^sub>Y] by auto also have "\ = rank_of Y" using assms \x \ carrier\ \basis_in Y B\<^sub>Y\ basis_in_subset_carrier using basis_in_iff_rank_of[where \ = Y and X = B\<^sub>Y] by auto finally show ?thesis . qed qed qed lemma cl_insert_absorb: assumes "X \ carrier" assumes "x \ cl X" shows "cl (insert x X) = cl X" proof show "cl (insert x X) \ cl X" proof (standard, goal_cases elem) case (elem y) then have *: "x \ carrier" "y \ carrier" using assms cl_subset_carrier by auto have "rank_of (insert y X) = rank_of (insert y (insert x X))" proof - have "rank_of (insert y X) \ rank_of (insert y (insert x X))" using assms * by (auto intro: rank_of_mono) moreover have "rank_of (insert y (insert x X)) = rank_of (insert y X)" proof - have "insert y (insert x X) = insert x (insert y X)" by auto then have "rank_of (insert y (insert x X)) = rank_of (insert x (insert y X))" by auto also have "\ = rank_of (insert y X)" proof - have "cl X \ cl (insert y X)" by (rule cl_mono) (auto simp add: assms \y \ carrier\) then have "x \ cl (insert y X)" using assms by auto then show ?thesis unfolding cl_def by auto qed finally show ?thesis . qed ultimately show ?thesis by auto qed also have "\ = rank_of (insert x X)" using elem using cl_rank_of by auto also have "\ = rank_of X" using assms cl_rank_of by auto finally show "y \ cl X" using * by (auto intro: clI) qed next have "insert x X \ carrier" using assms cl_subset_carrier by auto moreover have "X \ insert x X" using assms by auto ultimately show "cl X \ cl (insert x X)" using assms cl_subset_carrier cl_mono by auto qed lemma cl_cl_absorb: assumes "X \ carrier" shows "cl (cl X) = cl X" proof show "cl (cl X) \ cl X" proof (standard, goal_cases elem) case (elem x) then have "x \ carrier" using cl_subset_carrier by auto then show ?case proof (rule clI) have "rank_of (insert x X) \ rank_of X" using assms \x \ carrier\ rank_of_mono[of X "insert x X"] by auto moreover have "rank_of (insert x X) \ rank_of X" proof - have "rank_of (insert x X) \ rank_of (insert x (cl X))" using assms \x \ carrier\ cl_subset_carrier cl_subset[of X] rank_of_mono[of "insert x X" "insert x (cl X)"] by auto also have "\ = rank_of (cl X)" using elem cl_rank_of by auto also have "\ = rank_of (X \ (cl X - X))" using Diff_partition[OF cl_subset[OF assms]] by auto also have "\ = rank_of X" using \X \ carrier\ proof (rule rank_of_Un_absorbI) show "cl X - X \ carrier" using assms cl_subset_carrier by auto next fix y assume "y \ cl X - X - X" then show "rank_of (insert y X) = rank_of X" unfolding cl_def by auto qed finally show ?thesis . qed ultimately show "rank_of (insert x X) = rank_of X" by auto qed qed next show "cl X \ cl (cl X)" using cl_subset[OF cl_subset_carrier] by auto qed lemma cl_augment: assumes "X \ carrier" assumes "x \ carrier" assumes "y \ cl (insert x X) - cl X" shows "x \ cl (insert y X)" using \x \ carrier\ proof (rule clI) have "rank_of (insert y X) \ rank_of (insert x (insert y X))" using assms cl_subset_carrier by (auto intro: rank_of_mono) moreover have "rank_of (insert x (insert y X)) \ rank_of (insert y X)" proof - have "rank_of (insert x (insert y X)) = rank_of (insert y (insert x X))" proof - have "insert x (insert y X) = insert y (insert x X)" by auto then show ?thesis by auto qed also have "rank_of (insert y (insert x X)) = rank_of (insert x X)" using assms cl_def by auto also have "\ \ Suc (rank_of X)" using assms cl_subset_carrier by (auto intro: rank_of_insert_le) also have "\ = rank_of (insert y X)" proof - have "rank_of (insert y X) \ Suc (rank_of X)" using assms cl_subset_carrier by (auto intro: rank_of_insert_le) moreover have "rank_of (insert y X) \ rank_of X" using assms cl_def by auto moreover have "rank_of X \ rank_of (insert y X)" using assms cl_subset_carrier by (auto intro: rank_of_mono) ultimately show ?thesis by auto qed finally show ?thesis . qed ultimately show "rank_of (insert x (insert y X)) = rank_of (insert y X)" by auto qed lemma clI_insert: assumes "x \ carrier" assumes "indep X" assumes "\ indep (insert x X)" shows "x \ cl X" using \x \ carrier\ proof (rule clI) have *: "X \ carrier" using assms indep_subset_carrier by auto then have **: "insert x X \ carrier" using assms by auto have "indep_in (insert x X) X" using assms by (auto intro: indep_inI) then obtain B where B: "basis_in (insert x X) B" "X \ B" using assms indep_in_iff_subset_basis_in[OF **] by auto have "x \ B" proof (rule ccontr, goal_cases False) case False then have "indep_in (insert x X) (insert x X)" using B indep_in_subset[OF ** basis_in_indep_in[OF **]] by auto then show ?case using assms indep_in_indep by auto qed have "basis_in X B" using * proof (rule basis_inI, goal_cases indep max_indep) case indep show ?case proof (rule indep_in_supI[where \ = "insert x X"]) show "B \ X" using B basis_in_subset_carrier[OF **] \x \ B\ by auto next show "indep_in (insert x X) B" using basis_in_indep_in[OF ** B(1)] . qed auto next case (max_indep y) then have "\ indep_in (insert x X) (insert y B)" using B basis_in_max_indep_in[OF **] by auto then show ?case by (auto intro: indep_in_subI_subset) qed then show "rank_of (insert x X) = rank_of X" using B rank_of_eq_card_basis_in[OF *] rank_of_eq_card_basis_in[OF **] by auto qed lemma indep_in_carrier [simp]: "indep_in carrier = indep" using indep_subset_carrier by (auto simp: indep_in_def fun_eq_iff) context fixes I defines "I \ (\X. X \ carrier \ (\x\X. x \ cl (X - {x})))" begin lemma I_mono: "I Y" if "Y \ X" "I X" for X Y :: "'a set" proof - have "\x\Y. x \ cl (Y - {x})" proof (intro ballI) fix x assume x: "x \ Y" with that have "cl (Y - {x}) \ cl (X - {x})" by (intro cl_mono) (auto simp: I_def) with that and x show "x \ cl (Y - {x})" by (auto simp: I_def) qed with that show ?thesis by (auto simp: I_def) qed lemma clI': assumes "I X" "x \ carrier" "\I (insert x X)" shows "x \ cl X" proof - from assms have x: "x \ X" by (auto simp: insert_absorb) from assms obtain y where y: "y \ insert x X" "y \ cl (insert x X - {y})" by (force simp: I_def) show "x \ cl X" proof (cases "x = y") case True thus ?thesis using assms x y by (auto simp: I_def) next case False have "y \ cl (insert x X - {y})" by fact also from False have "insert x X - {y} = insert x (X - {y})" by auto finally have "y \ cl (insert x (X - {y})) - cl (X - {y})" using assms False y unfolding I_def by blast hence "x \ cl (insert y (X - {y}))" using cl_augment[of "X - {y}" x y] assms False y by (auto simp: I_def) also from y and False have "insert y (X - {y}) = X" by auto finally show ?thesis . qed qed lemma matroid_I: "matroid carrier I" proof (unfold_locales, goal_cases) show "finite carrier" by (rule carrier_finite) next case (4 X Y) have "\x\Y. x \ cl (Y - {x})" proof (intro ballI) fix x assume x: "x \ Y" with 4 have "cl (Y - {x}) \ cl (X - {x})" by (intro cl_mono) (auto simp: I_def) with 4 and x show "x \ cl (Y - {x})" by (auto simp: I_def) qed with 4 show ?case by (auto simp: I_def) next case (5 X Y) have "~(\X Y. I X \ I Y \ card X < card Y \ (\x\Y-X. \I (insert x X)))" proof assume *: "\X Y. I X \ I Y \ card X < card Y \ (\x\Y-X. \I (insert x X))" (is "\X Y. ?P X Y") define n where "n = Max ((\(X, Y). card (X \ Y)) ` {(X, Y). ?P X Y})" have "{(X, Y). ?P X Y} \ Pow carrier \ Pow carrier" by (auto simp: I_def) hence finite: "finite {(X, Y). ?P X Y}" by (rule finite_subset) (insert carrier_finite, auto) hence "n \ ((\(X, Y). card (X \ Y)) ` {(X, Y). ?P X Y})" unfolding n_def using * by (intro Max_in finite_imageI) auto then obtain X Y where XY: "?P X Y" "n = card (X \ Y)" by auto hence finite': "finite X" "finite Y" using finite_subset[OF _ carrier_finite] XY by (auto simp: I_def) from XY finite' have "~(Y \ X)" using card_mono[of X Y] by auto then obtain y where y: "y \ Y - X" by blast have False proof (cases "X \ cl (Y - {y})") case True from y XY have [simp]: "y \ carrier" by (auto simp: I_def) assume "X \ cl (Y - {y})" hence "cl X \ cl (cl (Y - {y}))" by (intro cl_mono cl_subset_carrier) also have "\ = cl (Y - {y})" using XY by (intro cl_cl_absorb) (auto simp: I_def) finally have "cl X \ cl (Y - {y})" . moreover have "y \ cl (Y - {y})" using y I_def XY(1) by blast ultimately have "y \ cl X" by blast thus False unfolding I_def using XY y clI' \y \ carrier\ by blast next case False with y XY have [simp]: "y \ carrier" by (auto simp: I_def) assume "\(X \ cl (Y - {y}))" then obtain t where t: "t \ X" "t \ cl (Y - {y})" by auto with XY have [simp]: "t \ carrier" by (auto simp: I_def) have "t \ X - Y" using t y clI[of t "Y - {y}"] by (cases "t = y") (auto simp: insert_absorb) moreover have "I (Y - {y})" using XY(1) I_mono[of "Y - {y}" Y] by blast ultimately have *: "I (insert t (Y - {y}))" using clI'[of "Y - {y}" t] t by auto from XY have "finite Y" by (intro finite_subset[OF _ carrier_finite]) (auto simp: I_def) moreover from y have "Y \ {}" by auto ultimately have [simp]: "card (insert t (Y - {y})) = card Y" using \t \ X - Y\ y by (simp add: Suc_diff_Suc card_gt_0_iff) have "\x\Y - X. I (insert x X)" proof (rule ccontr) assume "\?thesis" hence "?P X (insert t (Y - {y}))" using XY * \t \ X - Y\ by auto hence "card (X \ insert t (Y - {y})) \ n" unfolding n_def using finite by (intro Max_ge) auto also have "X \ insert t (Y - {y}) = insert t ((X \ Y) - {y})" using y \t \ X - Y\ by blast also have "card \ = Suc (card (X \ Y))" using y \t \ X - Y\ \finite Y\ by (simp add: card.insert_remove) finally show False using XY by simp qed with XY show False by blast qed thus False . qed with 5 show ?case by auto qed (auto simp: I_def) end definition cl_in where "cl_in \ X = matroid.cl \ (indep_in \) X" lemma cl_eq_cl_in: assumes "X \ carrier" shows "cl X = cl_in carrier X" proof - interpret \: matroid carrier "indep_in carrier" by (intro matroid_subset) auto have "cl X = {x \ carrier. rank_of (insert x X) = rank_of X}" unfolding cl_def by auto also have "\ = {x \ carrier. \.rank_of (insert x X) = \.rank_of X}" using rank_of_sub_cong[of carrier] assms by auto also have "\ = cl_in carrier X" unfolding cl_in_def \.cl_def by auto finally show ?thesis . qed context fixes \ assumes *: "\ \ carrier" begin interpretation \: matroid \ "indep_in \" using * by auto lemma cl_inI_aux: "x \ \.cl X \ x \ cl_in \ X" unfolding cl_in_def by auto lemma cl_inD_aux: "x \ cl_in \ X \ x \ \.cl X" unfolding cl_in_def by auto lemma cl_inI: assumes "X \ \" assumes "x \ \" assumes "rank_of (insert x X) = rank_of X" shows "x \ cl_in \ X" proof - have "\.rank_of (insert x X) = rank_of (insert x X)" "\.rank_of X = rank_of X" using assms rank_of_sub_cong[OF *] by auto then show ?thesis unfolding cl_in_def using assms by (auto intro: \.clI) qed lemma cl_in_altdef: assumes "X \ \" shows "cl_in \ X = \{Y \ Pow \. X \ Y \ rank_of Y = rank_of X}" unfolding cl_in_def proof (safe, goal_cases LTR RTL) case (LTR x) then have "x \ \{Y \ Pow \. X \ Y \ \.rank_of Y = \.rank_of X}" using \.cl_altdef[OF assms] by auto then obtain Y where Y: "x \ Y" "Y \ Pow \" "X \ Y" "\.rank_of Y = \.rank_of X" by auto then show ?case using rank_of_sub_cong[OF *] by auto next case (RTL x Y) then have "x \ \{Y \ Pow \. X \ Y \ \.rank_of Y = \.rank_of X}" using rank_of_sub_cong[OF *, of X] rank_of_sub_cong[OF *, of Y] by auto then show ?case using \.cl_altdef[OF assms] by auto qed lemma cl_in_subset_carrier: "cl_in \ X \ \" using \.cl_subset_carrier unfolding cl_in_def . lemma cl_in_rank_of: assumes "X \ \" assumes "x \ cl_in \ X" shows "rank_of (insert x X) = rank_of X" proof - have "\.rank_of (insert x X) = \.rank_of X" using assms \.cl_rank_of unfolding cl_in_def by auto moreover have "\.rank_of (insert x X) = rank_of (insert x X)" using assms rank_of_sub_cong[OF *, of "insert x X"] cl_in_subset_carrier by auto moreover have "\.rank_of X = rank_of X" using assms rank_of_sub_cong[OF *] by auto ultimately show ?thesis by auto qed lemmas cl_inD = cl_in_rank_of cl_in_subset_carrier lemma cl_in_subset: assumes "X \ \" shows "X \ cl_in \ X" using \.cl_subset[OF assms] unfolding cl_in_def . lemma cl_in_mono: assumes "X \ Y" assumes "Y \ \" shows "cl_in \ X \ cl_in \ Y" using \.cl_mono[OF assms] unfolding cl_in_def . lemma cl_in_insert_absorb: assumes "X \ \" assumes "x \ cl_in \ X" shows "cl_in \ (insert x X) = cl_in \ X" using assms \.cl_insert_absorb unfolding cl_in_def by auto lemma cl_in_augment: assumes "X \ \" assumes "x \ \" assumes "y \ cl_in \ (insert x X) - cl_in \ X" shows "x \ cl_in \ (insert y X)" using assms \.cl_augment unfolding cl_in_def by auto lemmas cl_inI_insert = cl_inI_aux[OF \.clI_insert] end lemma cl_in_subI: assumes "X \ \'" "\' \ \" "\ \ carrier" shows "cl_in \' X \ cl_in \ X" proof (safe, goal_cases elem) case (elem x) then have "x \ \'" "rank_of (insert x X) = rank_of X" using assms cl_inD[where \ = \' and X = X] by auto then show "x \ cl_in \ X" using assms by (auto intro: cl_inI) qed context fixes \ assumes *: "\ \ carrier" begin interpretation \: matroid \ "indep_in \" using * by auto lemma cl_in_sub_cong: assumes "X \ \'" "\' \ \" shows "\.cl_in \' X = cl_in \' X" proof (safe, goal_cases LTR RTL) case (LTR x) then have "x \ \'" "\.rank_of (insert x X) = \.rank_of X" using assms \.cl_in_rank_of[where \ = \' and X = X and x = x] \.cl_in_subset_carrier[where \ = \'] by auto moreover have "\.rank_of X = rank_of X" using assms rank_of_sub_cong[OF *] by auto moreover have "\.rank_of (insert x X) = rank_of (insert x X)" using assms rank_of_sub_cong[OF *, of "insert x X"] \x \ \'\ by auto ultimately show ?case using assms * by (auto intro: cl_inI) next case (RTL x) then have "x \ \'" "rank_of (insert x X) = rank_of X" using * assms cl_inD[where \ = \' and X = X] by auto moreover have "\.rank_of X = rank_of X" using assms rank_of_sub_cong[OF *] by auto moreover have "\.rank_of (insert x X) = rank_of (insert x X)" using assms rank_of_sub_cong[OF *, of "insert x X"] \x \ \'\ by auto ultimately show ?case using assms by (auto intro: \.cl_inI) qed end end end \ No newline at end of file diff --git a/thys/Ordinal_Partitions/Library_Additions.thy b/thys/Ordinal_Partitions/Library_Additions.thy --- a/thys/Ordinal_Partitions/Library_Additions.thy +++ b/thys/Ordinal_Partitions/Library_Additions.thy @@ -1,338 +1,338 @@ section \Library additions\ theory Library_Additions imports "ZFC_in_HOL.Ordinal_Exp" "HOL-Library.Ramsey" "Nash_Williams.Nash_Williams" begin lemma finite_enumerate_Diff_singleton: fixes S :: "'a::wellorder set" assumes "finite S" and i: "i < card S" "enumerate S i < x" shows "enumerate (S - {x}) i = enumerate S i" using i proof (induction i) case 0 have "(LEAST i. i \ S \ i\x) = (LEAST i. i \ S)" proof (rule Least_equality) have "\t. t \ S \ t\x" using 0 \finite S\ finite_enumerate_in_set by blast then show "(LEAST i. i \ S) \ S \ (LEAST i. i \ S) \ x" by (metis "0.prems"(2) LeastI enumerate_0 not_less_Least) qed (simp add: Least_le) then show ?case by (auto simp: enumerate_0) next case (Suc i) then have x: "enumerate S i < x" by (meson enumerate_step finite_enumerate_step less_trans) have cardSx: "Suc i < card (S - {x})" and "i < card S" - using Suc \finite S\ card_Diff_singleton_if finite_enumerate_Ex by fastforce+ + using Suc \finite S\ card_Diff_singleton_if[of S] finite_enumerate_Ex by fastforce+ have "(LEAST s. s \ S \ s\x \ enumerate (S - {x}) i < s) = (LEAST s. s \ S \ enumerate S i < s)" (is "_ = ?r") proof (intro Least_equality conjI) show "?r \ S" by (metis (lifting) LeastI Suc.prems(1) assms(1) finite_enumerate_in_set finite_enumerate_step) show "?r \ x" using Suc.prems not_less_Least [of _ "\t. t \ S \ enumerate S i < t"] \finite S\ finite_enumerate_in_set finite_enumerate_step by blast show "enumerate (S - {x}) i < ?r" by (metis (full_types) Suc.IH Suc.prems(1) \i < card S\ enumerate_Suc'' enumerate_step finite_enumerate_Suc'' finite_enumerate_step x) show "\y. y \ S \ y \ x \ enumerate (S - {x}) i < y \ ?r \ y" by (simp add: Least_le Suc.IH \i < card S\ x) qed then show ?case using Suc assms by (simp add: finite_enumerate_Suc'' cardSx) qed lemma hd_lex: "\hd ms < hd ns; length ms = length ns; ns \ []\ \ (ms, ns) \ lex less_than" by (metis hd_Cons_tl length_0_conv less_than_iff lexord_cons_cons lexord_lex) lemma sorted_hd_le: assumes "sorted xs" "x \ list.set xs" shows "hd xs \ x" using assms by (induction xs) (auto simp: less_imp_le) lemma sorted_le_last: assumes "sorted xs" "x \ list.set xs" shows "x \ last xs" using assms by (induction xs) (auto simp: less_imp_le) lemma hd_list_of: assumes "finite A" "A \ {}" shows "hd (sorted_list_of_set A) = Min A" proof (rule antisym) have "Min A \ A" by (simp add: assms) then show "hd (sorted_list_of_set A) \ Min A" by (simp add: sorted_hd_le \finite A\) next show "Min A \ hd (sorted_list_of_set A)" by (metis Min_le assms hd_in_set set_sorted_list_of_set sorted_list_of_set_eq_Nil_iff) qed lemma sorted_hd_le_last: assumes "sorted xs" "xs \ []" shows "hd xs \ last xs" using assms by (simp add: sorted_hd_le) lemma sorted_list_of_set_set_of [simp]: "strict_sorted l \ sorted_list_of_set (list.set l) = l" by (simp add: strict_sorted_equal) lemma range_strict_mono_ext: fixes f::"nat \ 'a::linorder" assumes eq: "range f = range g" and sm: "strict_mono f" "strict_mono g" shows "f = g" proof fix n show "f n = g n" proof (induction n rule: less_induct) case (less n) obtain x y where xy: "f n = g y" "f x = g n" by (metis eq imageE rangeI) then have "n = y" by (metis (no_types) less.IH neq_iff sm strict_mono_less xy) then show ?case using xy by auto qed qed subsection \Other material\ definition strict_mono_sets :: "['a::order set, 'a::order \ 'b::order set] \ bool" where "strict_mono_sets A f \ \x\A. \y\A. x < y \ less_sets (f x) (f y)" lemma strict_mono_setsD: assumes "strict_mono_sets A f" "x < y" "x \ A" "y \ A" shows "less_sets (f x) (f y)" using assms by (auto simp: strict_mono_sets_def) lemma strict_mono_on_o: "\strict_mono_on r A; strict_mono_on s B; s ` B \ A\ \ strict_mono_on (r \ s) B" by (auto simp: image_subset_iff strict_mono_on_def) lemma strict_mono_sets_imp_disjoint: fixes A :: "'a::linorder set" assumes "strict_mono_sets A f" shows "pairwise (\x y. disjnt (f x) (f y)) A" using assms unfolding strict_mono_sets_def pairwise_def by (meson antisym_conv3 disjnt_sym less_sets_imp_disjnt) lemma strict_mono_sets_subset: assumes "strict_mono_sets B f" "A \ B" shows "strict_mono_sets A f" using assms by (auto simp: strict_mono_sets_def) lemma strict_mono_less_sets_Min: assumes "strict_mono_sets I f" "finite I" "I \ {}" shows "less_sets (f (Min I)) (\ (f ` (I - {Min I})))" using assms by (simp add: strict_mono_sets_def less_sets_UN2 dual_order.strict_iff_order) lemma pair_less_iff1 [simp]: "((x,y), (x,z)) \ pair_less \ y" "\\{}" "\A. A \ \ \ infinite A" and "\A B. \A \ \; B \ \\ \ A \ B \ \" shows "infinite (\\)" by (simp add: assms finite_Inf_in) lemma atLeast_less_sets: "\less_sets A {x}; B \ {x..}\ \ less_sets A B" by (force simp: less_sets_def subset_iff) subsection \The list-of function\ lemma sorted_list_of_set_insert_remove_cons: assumes "finite A" "less_sets {a} A" shows "sorted_list_of_set (insert a A) = a # sorted_list_of_set A" proof - have "strict_sorted (a # sorted_list_of_set A)" using assms less_setsD by auto moreover have "list.set (a # sorted_list_of_set A) = insert a A" using assms by force moreover have "length (a # sorted_list_of_set A) = card (insert a A)" using assms card_insert_if less_setsD by fastforce ultimately show ?thesis by (metis \finite A\ finite_insert sorted_list_of_set_unique) qed lemma sorted_list_of_set_Un: assumes AB: "less_sets A B" and fin: "finite A" "finite B" shows "sorted_list_of_set (A \ B) = sorted_list_of_set A @ sorted_list_of_set B" proof - have "strict_sorted (sorted_list_of_set A @ sorted_list_of_set B)" using AB unfolding less_sets_def by (metis fin set_sorted_list_of_set sorted_wrt_append strict_sorted_list_of_set) moreover have "card A + card B = card (A \ B)" using less_sets_imp_disjnt [OF AB] by (simp add: assms card_Un_disjoint disjnt_def) ultimately show ?thesis by (simp add: assms strict_sorted_equal) qed lemma sorted_list_of_set_UN_lessThan: fixes k::nat assumes sm: "strict_mono_sets {..i. i < k \ finite (A i)" shows "sorted_list_of_set (\i A) (sorted_list_of_set {.. (A ` {.. (A ` {.. (A ` {.. A k)" by (simp add: Un_commute lessThan_Suc) also have "\ = sorted_list_of_set (\ (A ` {.. = concat (map (sorted_list_of_set \ A) (sorted_list_of_set {.. = concat (map (sorted_list_of_set \ A) (sorted_list_of_set {..i. i \ k \ finite (A i)" shows "sorted_list_of_set (\i\k. A i) = concat (map (sorted_list_of_set \ A) (sorted_list_of_set {..k}))" by (metis assms lessThan_Suc_atMost less_Suc_eq_le sorted_list_of_set_UN_lessThan) subsection \Monotonic enumeration of a countably infinite set\ abbreviation "enum \ enumerate" text \Could be generalised to infinite countable sets of any type\ lemma nat_infinite_iff: fixes N :: "nat set" shows "infinite N \ (\f::nat\nat. N = range f \ strict_mono f)" proof safe assume "infinite N" then show "\f. N = range (f::nat \ nat) \ strict_mono f" by (metis bij_betw_imp_surj_on bij_enumerate enumerate_mono strict_mono_def) next fix f :: "nat \ nat" assume "strict_mono f" and "N = range f" and "finite (range f)" then show False using range_inj_infinite strict_mono_imp_inj_on by blast qed lemma enum_works: fixes N :: "nat set" assumes "infinite N" shows "N = range (enum N) \ strict_mono (enum N)" by (metis assms bij_betw_imp_surj_on bij_enumerate enumerate_mono strict_monoI) lemma range_enum: "range (enum N) = N" and strict_mono_enum: "strict_mono (enum N)" if "infinite N" for N :: "nat set" using enum_works [OF that] by auto lemma enum_0_eq_Inf: fixes N :: "nat set" assumes "infinite N" shows "enum N 0 = Inf N" proof - have "enum N 0 \ N" using assms range_enum by auto moreover have "\x. x \ N \ enum N 0 \ x" by (metis (mono_tags, opaque_lifting) assms imageE le0 less_mono_imp_le_mono range_enum strict_monoD strict_mono_enum) ultimately show ?thesis by (metis cInf_eq_minimum) qed lemma enum_works_finite: fixes N :: "nat set" assumes "finite N" shows "N = enum N ` {.. strict_mono_on (enum N) {.. N" "finite N" obtains i where "i < card N" "x = enum N i" by (metis \x \ N\ \finite N\ enum_works_finite imageE lessThan_iff) lemma enum_0_eq_Inf_finite: fixes N :: "nat set" assumes "finite N" "N \ {}" shows "enum N 0 = Inf N" proof - have "enum N 0 \ N" by (metis Nat.neq0_conv assms empty_is_image enum_works_finite image_eqI lessThan_empty_iff lessThan_iff) moreover have "enum N 0 \ x" if "x \ N" for x proof - obtain i where "i < card N" "x = enum N i" by (metis \x \ N\ \finite N\ enum_obtain_index_finite) with assms show ?thesis by (metis Nat.neq0_conv finite_enumerate_mono less_or_eq_imp_le) qed ultimately show ?thesis by (metis cInf_eq_minimum) qed lemma greaterThan_less_enum: fixes N :: "nat set" assumes "N \ {x<..}" "infinite N" shows "x < enum N i" using assms range_enum by fastforce lemma atLeast_le_enum: fixes N :: "nat set" assumes "N \ {x..}" "infinite N" shows "x \ enum N i" using assms range_enum by fastforce lemma less_sets_empty1 [simp]: "less_sets {} A" and less_sets_empty2 [simp]: "less_sets A {}" by (simp_all add: less_sets_def) lemma less_sets_singleton1 [simp]: "less_sets {a} A \ (\x\A. a < x)" and less_sets_singleton2 [simp]: "less_sets A {a} \ (\x\A. x < a)" by (simp_all add: less_sets_def) lemma less_sets_atMost [simp]: "less_sets {..a} A \ (\x\A. a < x)" and less_sets_alLeast [simp]: "less_sets A {a..} \ (\x\A. x < a)" by (auto simp: less_sets_def) lemma less_sets_imp_strict_mono_sets: assumes "\i. less_sets (A i) (A (Suc i))" "\i. i>0 \ A i \ {}" shows "strict_mono_sets UNIV A" proof (clarsimp simp: strict_mono_sets_def) fix i j::nat assume "i < j" then show "less_sets (A i) (A j)" proof (induction "j-i" arbitrary: i j) case (Suc x) then show ?case by (metis Suc_diff_Suc Suc_inject Suc_mono assms less_Suc_eq less_sets_trans zero_less_Suc) qed auto qed lemma less_sets_Suc_Max: assumes "finite A" shows "less_sets A {Suc (Max A)..}" proof (cases "A = {}") case False then show ?thesis by (simp add: assms less_Suc_eq_le) qed auto lemma infinite_nat_greaterThan: fixes m::nat assumes "infinite N" shows "infinite (N \ {m<..})" proof - have "N \ -{m<..} \ (N \ {m<..})" by blast moreover have "finite (-{m<..})" by simp ultimately show ?thesis using assms finite_subset by blast qed end diff --git a/thys/Refine_Imperative_HOL/Sepref_Rules.thy b/thys/Refine_Imperative_HOL/Sepref_Rules.thy --- a/thys/Refine_Imperative_HOL/Sepref_Rules.thy +++ b/thys/Refine_Imperative_HOL/Sepref_Rules.thy @@ -1,1745 +1,1745 @@ section \Refinement Rule Management\ theory Sepref_Rules imports Sepref_Basic Sepref_Constraints begin text \This theory contains tools for managing the refinement rules used by Sepref\ text \The theories are based on uncurried functions, i.e., every function has type @{typ "'a\'b"}, where @{typ 'a} is the tuple of parameters, or unit if there are none. \ subsection \Assertion Interface Binding\ text \Binding of interface types to refinement assertions\ definition intf_of_assn :: "('a \ _ \ assn) \ 'b itself \ bool" where [simp]: "intf_of_assn a b = True" lemma intf_of_assnI: "intf_of_assn R TYPE('a)" by simp named_theorems_rev intf_of_assn \Links between refinement assertions and interface types\ lemma intf_of_assn_fallback: "intf_of_assn (R :: 'a \ _ \ assn) TYPE('a)" by simp subsection \Function Refinement with Precondition\ definition fref :: "('c \ bool) \ ('a \ 'c) set \ ('b \ 'd) set \ (('a \ 'b) \ ('c \ 'd)) set" ("[_]\<^sub>f _ \ _" [0,60,60] 60) where "[P]\<^sub>f R \ S \ {(f,g). \x y. P y \ (x,y)\R \ (f x, g y)\S}" abbreviation freft ("_ \\<^sub>f _" [60,60] 60) where "R \\<^sub>f S \ ([\_. True]\<^sub>f R \ S)" lemma rel2p_fref[rel2p]: "rel2p (fref P R S) = (\f g. (\x y. P y \ rel2p R x y \ rel2p S (f x) (g y)))" by (auto simp: fref_def rel2p_def[abs_def]) lemma fref_cons: assumes "(f,g) \ [P]\<^sub>f R \ S" assumes "\c a. (c,a)\R' \ Q a \ P a" assumes "R' \ R" assumes "S \ S'" shows "(f,g) \ [Q]\<^sub>f R' \ S'" using assms unfolding fref_def by fastforce lemmas fref_cons' = fref_cons[OF _ _ order_refl order_refl] lemma frefI[intro?]: assumes "\x y. \P y; (x,y)\R\ \ (f x, g y)\S" shows "(f,g)\fref P R S" using assms unfolding fref_def by auto lemma fref_ncI: "(f,g)\R\S \ (f,g)\R\\<^sub>fS" apply (rule frefI) apply parametricity done lemma frefD: assumes "(f,g)\fref P R S" shows "\P y; (x,y)\R\ \ (f x, g y)\S" using assms unfolding fref_def by auto lemma fref_ncD: "(f,g)\R\\<^sub>fS \ (f,g)\R\S" apply (rule fun_relI) apply (drule frefD) apply simp apply assumption+ done lemma fref_compI: "fref P R1 R2 O fref Q S1 S2 \ fref (\x. Q x \ (\y. (y,x)\S1 \ P y)) (R1 O S1) (R2 O S2)" unfolding fref_def apply (auto) apply blast done lemma fref_compI': "\ (f,g)\fref P R1 R2; (g,h)\fref Q S1 S2 \ \ (f,h) \ fref (\x. Q x \ (\y. (y,x)\S1 \ P y)) (R1 O S1) (R2 O S2)" using fref_compI[of P R1 R2 Q S1 S2] by auto lemma fref_unit_conv: "(\_. c, \_. a) \ fref P unit_rel S \ (P () \ (c,a)\S)" by (auto simp: fref_def) lemma fref_uncurry_conv: "(uncurry c, uncurry a) \ fref P (R1\\<^sub>rR2) S \ (\x1 y1 x2 y2. P (y1,y2) \ (x1,y1)\R1 \ (x2,y2)\R2 \ (c x1 x2, a y1 y2) \ S)" by (auto simp: fref_def) lemma fref_mono: "\ \x. P' x \ P x; R' \ R; S \ S' \ \ fref P R S \ fref P' R' S'" unfolding fref_def by auto blast lemma fref_composeI: assumes FR1: "(f,g)\fref P R1 R2" assumes FR2: "(g,h)\fref Q S1 S2" assumes C1: "\x. P' x \ Q x" assumes C2: "\x y. \P' x; (y,x)\S1\ \ P y" assumes R1: "R' \ R1 O S1" assumes R2: "R2 O S2 \ S'" assumes FH: "f'=f" "h'=h" shows "(f',h') \ fref P' R' S'" unfolding FH apply (rule subsetD[OF fref_mono fref_compI'[OF FR1 FR2]]) using C1 C2 apply blast using R1 apply blast using R2 apply blast done lemma fref_triv: "A\Id \ (f,f)\[P]\<^sub>f A \ Id" by (auto simp: fref_def) subsection \Heap-Function Refinement\ text \ The following relates a heap-function with a pure function. It contains a precondition, a refinement assertion for the arguments before and after execution, and a refinement relation for the result. \ (* TODO: We only use this with keep/destroy information, so we could model the parameter relations as such (('a\'ai \ assn) \ bool) *) definition hfref :: " ('a \ bool) \ (('a \ 'ai \ assn) \ ('a \ 'ai \ assn)) \ ('b \ 'bi \ assn) \ (('ai \ 'bi Heap) \ ('a\'b nres)) set" ("[_]\<^sub>a _ \ _" [0,60,60] 60) where "[P]\<^sub>a RS \ T \ { (f,g) . \c a. P a \ hn_refine (fst RS a c) (f c) (snd RS a c) T (g a)}" abbreviation hfreft ("_ \\<^sub>a _" [60,60] 60) where "RS \\<^sub>a T \ ([\_. True]\<^sub>a RS \ T)" lemma hfrefI[intro?]: assumes "\c a. P a \ hn_refine (fst RS a c) (f c) (snd RS a c) T (g a)" shows "(f,g)\hfref P RS T" using assms unfolding hfref_def by blast lemma hfrefD: assumes "(f,g)\hfref P RS T" shows "\c a. P a \ hn_refine (fst RS a c) (f c) (snd RS a c) T (g a)" using assms unfolding hfref_def by blast lemma hfref_to_ASSERT_conv: "NO_MATCH (\_. True) P \ (a,b)\[P]\<^sub>a R \ S \ (a,\x. ASSERT (P x) \ b x) \ R \\<^sub>a S" unfolding hfref_def apply (clarsimp; safe; clarsimp?) apply (rule hn_refine_nofailI) apply (simp add: refine_pw_simps) subgoal for xc xa apply (drule spec[of _ xc]) apply (drule spec[of _ xa]) by simp done text \ A pair of argument refinement assertions can be created by the input assertion and the information whether the parameter is kept or destroyed by the function. \ primrec hf_pres :: "('a \ 'b \ assn) \ bool \ ('a \ 'b \ assn)\('a \ 'b \ assn)" where "hf_pres R True = (R,R)" | "hf_pres R False = (R,invalid_assn R)" abbreviation hfkeep :: "('a \ 'b \ assn) \ ('a \ 'b \ assn)\('a \ 'b \ assn)" ("(_\<^sup>k)" [1000] 999) where "R\<^sup>k \ hf_pres R True" abbreviation hfdrop :: "('a \ 'b \ assn) \ ('a \ 'b \ assn)\('a \ 'b \ assn)" ("(_\<^sup>d)" [1000] 999) where "R\<^sup>d \ hf_pres R False" abbreviation "hn_kede R kd \ hn_ctxt (snd (hf_pres R kd))" abbreviation "hn_keep R \ hn_kede R True" abbreviation "hn_dest R \ hn_kede R False" lemma keep_drop_sels[simp]: "fst (R\<^sup>k) = R" "snd (R\<^sup>k) = R" "fst (R\<^sup>d) = R" "snd (R\<^sup>d) = invalid_assn R" by auto lemma hf_pres_fst[simp]: "fst (hf_pres R k) = R" by (cases k) auto text \ The following operator combines multiple argument assertion-pairs to argument assertion-pairs for the product. It is required to state argument assertion-pairs for uncurried functions. \ definition hfprod :: " (('a \ 'b \ assn)\('a \ 'b \ assn)) \ (('c \ 'd \ assn)\('c \ 'd \ assn)) \ ((('a\'c) \ ('b \ 'd) \ assn) \ (('a\'c) \ ('b \ 'd) \ assn))" (infixl "*\<^sub>a" 65) where "RR *\<^sub>a SS \ (prod_assn (fst RR) (fst SS), prod_assn (snd RR) (snd SS))" lemma hfprod_fst_snd[simp]: "fst (A *\<^sub>a B) = prod_assn (fst A) (fst B)" "snd (A *\<^sub>a B) = prod_assn (snd A) (snd B)" unfolding hfprod_def by auto subsubsection \Conversion from fref to hfref\ (* TODO: Variant of import-param! Automate this! *) lemma fref_to_pure_hfref': assumes "(f,g) \ [P]\<^sub>f R\\S\nres_rel" assumes "\x. x\Domain R \ R\``Collect P \ f x = RETURN (f' x)" shows "(return o f', g) \ [P]\<^sub>a (pure R)\<^sup>k\pure S" apply (rule hfrefI) apply (rule hn_refineI) using assms apply ((sep_auto simp: fref_def pure_def pw_le_iff pw_nres_rel_iff refine_pw_simps eintros del: exI)) apply force done subsubsection \Conversion from hfref to hnr\ text \This section contains the lemmas. The ML code is further down. \ lemma hf2hnr: assumes "(f,g) \ [P]\<^sub>a R \ S" shows "\x xi. P x \ hn_refine (emp * hn_ctxt (fst R) x xi) (f$xi) (emp * hn_ctxt (snd R) x xi) S (g$x)" using assms unfolding hfref_def by (auto simp: hn_ctxt_def) (*lemma hf2hnr_new: assumes "(f,g) \ [P]\<^sub>a R \ S" shows "\x xi. (\h. h\fst R x xi \ P x) \ hn_refine (emp * hn_ctxt (fst R) x xi) (f xi) (emp * hn_ctxt (snd R) x xi) S (g$x)" using assms unfolding hfref_def by (auto simp: hn_ctxt_def intro: hn_refine_preI) *) (* Products that stem from currying are tagged by a special refinement relation *) definition [simp]: "to_hnr_prod \ prod_assn" lemma to_hnr_prod_fst_snd: "fst (A *\<^sub>a B) = to_hnr_prod (fst A) (fst B)" "snd (A *\<^sub>a B) = to_hnr_prod (snd A) (snd B)" unfolding hfprod_def by auto (* Warning: This lemma is carefully set up to be applicable as an unfold rule, for more than one level of uncurrying*) lemma hnr_uncurry_unfold: " (\x xi. P x \ hn_refine (\ * hn_ctxt (to_hnr_prod A B) x xi) (fi xi) (\' * hn_ctxt (to_hnr_prod A' B') x xi) R (f x)) \ (\b bi a ai. P (a,b) \ hn_refine (\ * hn_ctxt B b bi * hn_ctxt A a ai) (fi (ai,bi)) (\' * hn_ctxt B' b bi * hn_ctxt A' a ai) R (f (a,b)) )" by (auto simp: hn_ctxt_def prod_assn_def star_aci) lemma hnr_intro_dummy: "\x xi. P x \ hn_refine (\ x xi) (c xi) (\' x xi) R (a x) \ \x xi. P x \ hn_refine (emp*\ x xi) (c xi) (emp*\' x xi) R (a x)" by simp lemma hn_ctxt_ctxt_fix_conv: "hn_ctxt (hn_ctxt R) = hn_ctxt R" by (simp add: hn_ctxt_def[abs_def]) lemma uncurry_APP: "uncurry f$(a,b) = f$a$b" by auto (* TODO: Replace by more general rule. *) lemma norm_RETURN_o: "\f. (RETURN o f)$x = (RETURN$(f$x))" "\f. (RETURN oo f)$x$y = (RETURN$(f$x$y))" "\f. (RETURN ooo f)$x$y$z = (RETURN$(f$x$y$z))" "\f. (\x. RETURN ooo f x)$x$y$z$a = (RETURN$(f$x$y$z$a))" "\f. (\x y. RETURN ooo f x y)$x$y$z$a$b = (RETURN$(f$x$y$z$a$b))" by auto lemma norm_return_o: "\f. (return o f)$x = (return$(f$x))" "\f. (return oo f)$x$y = (return$(f$x$y))" "\f. (return ooo f)$x$y$z = (return$(f$x$y$z))" "\f. (\x. return ooo f x)$x$y$z$a = (return$(f$x$y$z$a))" "\f. (\x y. return ooo f x y)$x$y$z$a$b = (return$(f$x$y$z$a$b))" by auto lemma hn_val_unit_conv_emp[simp]: "hn_val unit_rel x y = emp" by (auto simp: hn_ctxt_def pure_def) subsubsection \Conversion from hnr to hfref\ text \This section contains the lemmas. The ML code is further down. \ abbreviation "id_assn \ pure Id" abbreviation "unit_assn \ id_assn :: unit \ _" lemma pure_unit_rel_eq_empty: "unit_assn x y = emp" by (auto simp: pure_def) lemma uc_hfprod_sel: "fst (A *\<^sub>a B) a c = (case (a,c) of ((a1,a2),(c1,c2)) \ fst A a1 c1 * fst B a2 c2)" "snd (A *\<^sub>a B) a c = (case (a,c) of ((a1,a2),(c1,c2)) \ snd A a1 c1 * snd B a2 c2)" unfolding hfprod_def prod_assn_def[abs_def] by auto subsubsection \Conversion from relation to fref\ text \This section contains the lemmas. The ML code is further down. \ definition "CURRY R \ { (f,g). (uncurry f, uncurry g) \ R }" lemma fref_param1: "R\S = fref (\_. True) R S" by (auto simp: fref_def fun_relD) lemma fref_nest: "fref P1 R1 (fref P2 R2 S) \ CURRY (fref (\(a,b). P1 a \ P2 b) (R1\\<^sub>rR2) S)" apply (rule eq_reflection) by (auto simp: fref_def CURRY_def) lemma in_CURRY_conv: "(f,g) \ CURRY R \ (uncurry f, uncurry g) \ R" unfolding CURRY_def by auto lemma uncurry0_APP[simp]: "uncurry0 c $ x = c" by auto lemma fref_param0I: "(c,a)\R \ (uncurry0 c, uncurry0 a) \ fref (\_. True) unit_rel R" by (auto simp: fref_def) subsubsection \Composition\ definition hr_comp :: "('b \ 'c \ assn) \ ('b \ 'a) set \ 'a \ 'c \ assn" \ \Compose refinement assertion with refinement relation\ where "hr_comp R1 R2 a c \ \\<^sub>Ab. R1 b c * \((b,a)\R2)" definition hrp_comp :: "('d \ 'b \ assn) \ ('d \ 'c \ assn) \ ('d \ 'a) set \ ('a \ 'b \ assn) \ ('a \ 'c \ assn)" \ \Compose argument assertion-pair with refinement relation\ where "hrp_comp RR' S \ (hr_comp (fst RR') S, hr_comp (snd RR') S) " lemma hr_compI: "(b,a)\R2 \ R1 b c \\<^sub>A hr_comp R1 R2 a c" unfolding hr_comp_def by sep_auto lemma hr_comp_Id1[simp]: "hr_comp (pure Id) R = pure R" unfolding hr_comp_def[abs_def] pure_def apply (intro ext ent_iffI) by sep_auto+ lemma hr_comp_Id2[simp]: "hr_comp R Id = R" unfolding hr_comp_def[abs_def] apply (intro ext ent_iffI) by sep_auto+ (*lemma hr_comp_invalid[simp]: "hr_comp (\a c. true) R a c = true * \(\b. (b,a)\R)" unfolding hr_comp_def[abs_def] apply (intro ext ent_iffI) apply sep_auto+ done*) lemma hr_comp_emp[simp]: "hr_comp (\a c. emp) R a c = \(\b. (b,a)\R)" unfolding hr_comp_def[abs_def] apply (intro ext ent_iffI) apply sep_auto+ done lemma hr_comp_prod_conv[simp]: "hr_comp (prod_assn Ra Rb) (Ra' \\<^sub>r Rb') = prod_assn (hr_comp Ra Ra') (hr_comp Rb Rb')" unfolding hr_comp_def[abs_def] prod_assn_def[abs_def] apply (intro ext ent_iffI) apply solve_entails apply clarsimp apply sep_auto apply clarsimp apply (intro ent_ex_preI) apply (rule ent_ex_postI) apply (sep_auto split: prod.splits) done lemma hr_comp_pure: "hr_comp (pure R) S = pure (R O S)" apply (intro ext) apply (rule ent_iffI) unfolding hr_comp_def[abs_def] apply (sep_auto simp: pure_def)+ done lemma hr_comp_is_pure[safe_constraint_rules]: "is_pure A \ is_pure (hr_comp A B)" by (auto simp: hr_comp_pure is_pure_conv) lemma hr_comp_the_pure: "is_pure A \ the_pure (hr_comp A B) = the_pure A O B" unfolding is_pure_conv by (clarsimp simp: hr_comp_pure) lemma rdomp_hrcomp_conv: "rdomp (hr_comp A R) x \ (\y. rdomp A y \ (y,x)\R)" by (auto simp: rdomp_def hr_comp_def) lemma hn_rel_compI: "\nofail a; (b,a)\\R2\nres_rel\ \ hn_rel R1 b c \\<^sub>A hn_rel (hr_comp R1 R2) a c" unfolding hr_comp_def hn_rel_def nres_rel_def apply (clarsimp intro!: ent_ex_preI) apply (drule (1) order_trans) apply (simp add: ret_le_down_conv) by sep_auto lemma hr_comp_precise[constraint_rules]: assumes [safe_constraint_rules]: "precise R" assumes SV: "single_valued S" shows "precise (hr_comp R S)" apply (rule preciseI) unfolding hr_comp_def apply clarsimp by (metis SV assms(1) preciseD single_valuedD) lemma hr_comp_assoc: "hr_comp (hr_comp R S) T = hr_comp R (S O T)" apply (intro ext) unfolding hr_comp_def apply (rule ent_iffI; clarsimp) apply sep_auto apply (rule ent_ex_preI; clarsimp) (* TODO: sep_auto/solve_entails is too eager splitting the subgoal here! *) apply sep_auto done lemma hnr_comp: assumes R: "\b1 c1. P b1 \ hn_refine (R1 b1 c1 * \) (c c1) (R1p b1 c1 * \') R (b b1)" assumes S: "\a1 b1. \Q a1; (b1,a1)\R1'\ \ (b b1,a a1)\\R'\nres_rel" assumes PQ: "\a1 b1. \Q a1; (b1,a1)\R1'\ \ P b1" assumes Q: "Q a1" shows "hn_refine (hr_comp R1 R1' a1 c1 * \) (c c1) (hr_comp R1p R1' a1 c1 * \') (hr_comp R R') (a a1)" unfolding hn_refine_alt proof clarsimp assume NF: "nofail (a a1)" show " > c c1 <\r. hn_rel (hr_comp R R') (a a1) r * (hr_comp R1p R1' a1 c1 * \')>\<^sub>t" apply (subst hr_comp_def) apply (clarsimp intro!: norm_pre_ex_rule) proof - fix b1 assume R1: "(b1, a1) \ R1'" from S R1 Q have R': "(b b1, a a1) \ \R'\nres_rel" by blast with NF have NFB: "nofail (b b1)" by (simp add: nres_rel_def pw_le_iff refine_pw_simps) from PQ R1 Q have P: "P b1" by blast with NFB R have "> c c1 <\r. hn_rel R (b b1) r * (R1p b1 c1 * \')>\<^sub>t" unfolding hn_refine_alt by auto thus "> c c1 <\r. hn_rel (hr_comp R R') (a a1) r * (hr_comp R1p R1' a1 c1 * \')>\<^sub>t" apply (rule cons_post_rule) apply (solve_entails) by (intro ent_star_mono hn_rel_compI[OF NF R'] hr_compI[OF R1] ent_refl) qed qed lemma hnr_comp1_aux: assumes R: "\b1 c1. P b1 \ hn_refine (hn_ctxt R1 b1 c1) (c c1) (hn_ctxt R1p b1 c1) R (b$b1)" assumes S: "\a1 b1. \Q a1; (b1,a1)\R1'\ \ (b$b1,a$a1)\\R'\nres_rel" assumes PQ: "\a1 b1. \Q a1; (b1,a1)\R1'\ \ P b1" assumes Q: "Q a1" shows "hn_refine (hr_comp R1 R1' a1 c1) (c c1) (hr_comp R1p R1' a1 c1) (hr_comp R R') (a a1)" using assms hnr_comp[where \=emp and \'=emp and a=a and b=b and c=c and P=P and Q=Q] unfolding hn_ctxt_def by auto lemma hfcomp: assumes A: "(f,g) \ [P]\<^sub>a RR' \ S" assumes B: "(g,h) \ [Q]\<^sub>f T \ \U\nres_rel" shows "(f,h) \ [\a. Q a \ (\a'. (a',a)\T \ P a')]\<^sub>a hrp_comp RR' T \ hr_comp S U" using assms unfolding fref_def hfref_def hrp_comp_def apply clarsimp apply (rule hnr_comp1_aux[of P "fst RR'" f "snd RR'" S g "\a. Q a \ (\a'. (a',a)\T \ P a')" T h U]) apply (auto simp: hn_ctxt_def) done lemma hfref_weaken_pre_nofail: assumes "(f,g) \ [P]\<^sub>a R \ S" shows "(f,g) \ [\x. nofail (g x) \ P x]\<^sub>a R \ S" using assms unfolding hfref_def hn_refine_def by auto lemma hfref_cons: assumes "(f,g) \ [P]\<^sub>a R \ S" assumes "\x. P' x \ P x" assumes "\x y. fst R' x y \\<^sub>t fst R x y" assumes "\x y. snd R x y \\<^sub>t snd R' x y" assumes "\x y. S x y \\<^sub>t S' x y" shows "(f,g) \ [P']\<^sub>a R' \ S'" unfolding hfref_def apply clarsimp apply (rule hn_refine_cons) apply (rule assms(3)) defer apply (rule entt_trans[OF assms(4)]; sep_auto) apply (rule assms(5)) apply (frule assms(2)) using assms(1) unfolding hfref_def apply auto done subsubsection \Composition Automation\ text \This section contains the lemmas. The ML code is further down. \ lemma prod_hrp_comp: "hrp_comp (A *\<^sub>a B) (C \\<^sub>r D) = hrp_comp A C *\<^sub>a hrp_comp B D" unfolding hrp_comp_def hfprod_def by simp lemma hrp_comp_keep: "hrp_comp (A\<^sup>k) B = (hr_comp A B)\<^sup>k" by (auto simp: hrp_comp_def) lemma hr_comp_invalid: "hr_comp (invalid_assn R1) R2 = invalid_assn (hr_comp R1 R2)" apply (intro ent_iffI entailsI ext) unfolding invalid_assn_def hr_comp_def by auto lemma hrp_comp_dest: "hrp_comp (A\<^sup>d) B = (hr_comp A B)\<^sup>d" by (auto simp: hrp_comp_def hr_comp_invalid) definition "hrp_imp RR RR' \ \a b. (fst RR' a b \\<^sub>t fst RR a b) \ (snd RR a b \\<^sub>t snd RR' a b)" lemma hfref_imp: "hrp_imp RR RR' \ [P]\<^sub>a RR \ S \ [P]\<^sub>a RR' \ S" apply clarsimp apply (erule hfref_cons) apply (simp_all add: hrp_imp_def) done lemma hrp_imp_refl: "hrp_imp RR RR" unfolding hrp_imp_def by auto lemma hrp_imp_reflI: "RR = RR' \ hrp_imp RR RR'" unfolding hrp_imp_def by auto lemma hrp_comp_cong: "hrp_imp A A' \ B=B' \ hrp_imp (hrp_comp A B) (hrp_comp A' B')" by (sep_auto simp: hrp_imp_def hrp_comp_def hr_comp_def entailst_def) lemma hrp_prod_cong: "hrp_imp A A' \ hrp_imp B B' \ hrp_imp (A*\<^sub>aB) (A'*\<^sub>aB')" by (sep_auto simp: hrp_imp_def prod_assn_def intro: entt_star_mono) lemma hrp_imp_trans: "hrp_imp A B \ hrp_imp B C \ hrp_imp A C" unfolding hrp_imp_def by (fastforce intro: entt_trans) lemma fcomp_norm_dflt_init: "x\[P]\<^sub>a R \ T \ hrp_imp R S \ x\[P]\<^sub>a S \ T" apply (erule rev_subsetD) by (rule hfref_imp) definition "comp_PRE R P Q S \ \x. S x \ (P x \ (\y. (y,x)\R \ Q x y))" lemma comp_PRE_cong[cong]: assumes "R\R'" assumes "\x. P x \ P' x" assumes "\x. S x \ S' x" assumes "\x y. \P x; (y,x)\R; y\Domain R; S' x \ \ Q x y \ Q' x y" shows "comp_PRE R P Q S \ comp_PRE R' P' Q' S'" using assms by (fastforce simp: comp_PRE_def intro!: eq_reflection ext) lemma fref_compI_PRE: "\ (f,g)\fref P R1 R2; (g,h)\fref Q S1 S2 \ \ (f,h) \ fref (comp_PRE S1 Q (\_. P) (\_. True)) (R1 O S1) (R2 O S2)" using fref_compI[of P R1 R2 Q S1 S2] unfolding comp_PRE_def by auto lemma PRE_D1: "(Q x \ P x) \ comp_PRE S1 Q (\x _. P x) S x" by (auto simp: comp_PRE_def) lemma PRE_D2: "(Q x \ (\y. (y,x)\S1 \ S x \ P x y)) \ comp_PRE S1 Q P S x" by (auto simp: comp_PRE_def) lemma fref_weaken_pre: assumes "\x. P x \ P' x" assumes "(f,h) \ fref P' R S" shows "(f,h) \ fref P R S" apply (rule rev_subsetD[OF assms(2) fref_mono]) using assms(1) by auto lemma fref_PRE_D1: assumes "(f,h) \ fref (comp_PRE S1 Q (\x _. P x) X) R S" shows "(f,h) \ fref (\x. Q x \ P x) R S" by (rule fref_weaken_pre[OF PRE_D1 assms]) lemma fref_PRE_D2: assumes "(f,h) \ fref (comp_PRE S1 Q P X) R S" shows "(f,h) \ fref (\x. Q x \ (\y. (y,x)\S1 \ X x \ P x y)) R S" by (rule fref_weaken_pre[OF PRE_D2 assms]) lemmas fref_PRE_D = fref_PRE_D1 fref_PRE_D2 lemma hfref_weaken_pre: assumes "\x. P x \ P' x" assumes "(f,h) \ hfref P' R S" shows "(f,h) \ hfref P R S" using assms by (auto simp: hfref_def) lemma hfref_weaken_pre': assumes "\x. \P x; rdomp (fst R) x\ \ P' x" assumes "(f,h) \ hfref P' R S" shows "(f,h) \ hfref P R S" apply (rule hfrefI) apply (rule hn_refine_preI) using assms by (auto simp: hfref_def rdomp_def) lemma hfref_weaken_pre_nofail': assumes "(f,g) \ [P]\<^sub>a R \ S" assumes "\x. \nofail (g x); Q x\ \ P x" shows "(f,g) \ [Q]\<^sub>a R \ S" apply (rule hfref_weaken_pre[OF _ assms(1)[THEN hfref_weaken_pre_nofail]]) using assms(2) by blast lemma hfref_compI_PRE_aux: assumes A: "(f,g) \ [P]\<^sub>a RR' \ S" assumes B: "(g,h) \ [Q]\<^sub>f T \ \U\nres_rel" shows "(f,h) \ [comp_PRE T Q (\_. P) (\_. True)]\<^sub>a hrp_comp RR' T \ hr_comp S U" apply (rule hfref_weaken_pre[OF _ hfcomp[OF A B]]) by (auto simp: comp_PRE_def) lemma hfref_compI_PRE: assumes A: "(f,g) \ [P]\<^sub>a RR' \ S" assumes B: "(g,h) \ [Q]\<^sub>f T \ \U\nres_rel" shows "(f,h) \ [comp_PRE T Q (\x y. P y) (\x. nofail (h x))]\<^sub>a hrp_comp RR' T \ hr_comp S U" using hfref_compI_PRE_aux[OF A B, THEN hfref_weaken_pre_nofail] apply (rule hfref_weaken_pre[rotated]) apply (auto simp: comp_PRE_def) done lemma hfref_PRE_D1: assumes "(f,h) \ hfref (comp_PRE S1 Q (\x _. P x) X) R S" shows "(f,h) \ hfref (\x. Q x \ P x) R S" by (rule hfref_weaken_pre[OF PRE_D1 assms]) lemma hfref_PRE_D2: assumes "(f,h) \ hfref (comp_PRE S1 Q P X) R S" shows "(f,h) \ hfref (\x. Q x \ (\y. (y,x)\S1 \ X x \ P x y)) R S" by (rule hfref_weaken_pre[OF PRE_D2 assms]) lemma hfref_PRE_D3: assumes "(f,h) \ hfref (comp_PRE S1 Q P X) R S" shows "(f,h) \ hfref (comp_PRE S1 Q P X) R S" using assms . lemmas hfref_PRE_D = hfref_PRE_D1 hfref_PRE_D3 subsection \Automation\ text \Purity configuration for constraint solver\ lemmas [safe_constraint_rules] = pure_pure text \Configuration for hfref to hnr conversion\ named_theorems to_hnr_post \to_hnr converter: Postprocessing unfold rules\ lemma uncurry0_add_app_tag: "uncurry0 (RETURN c) = uncurry0 (RETURN$c)" by simp lemmas [to_hnr_post] = norm_RETURN_o norm_return_o uncurry0_add_app_tag uncurry0_apply uncurry0_APP hn_val_unit_conv_emp mult_1[of "x::assn" for x] mult_1_right[of "x::assn" for x] named_theorems to_hfref_post \to_hfref converter: Postprocessing unfold rules\ lemma prod_casesK[to_hfref_post]: "case_prod (\_ _. k) = (\_. k)" by auto lemma uncurry0_hfref_post[to_hfref_post]: "hfref (uncurry0 True) R S = hfref (\_. True) R S" apply (fo_rule arg_cong fun_cong)+ by auto (* Currently not used, we keep it in here anyway. *) text \Configuration for relation normalization after composition\ named_theorems fcomp_norm_unfold \fcomp-normalizer: Unfold theorems\ named_theorems fcomp_norm_simps \fcomp-normalizer: Simplification theorems\ named_theorems fcomp_norm_init "fcomp-normalizer: Initialization rules" named_theorems fcomp_norm_trans "fcomp-normalizer: Transitivity rules" named_theorems fcomp_norm_cong "fcomp-normalizer: Congruence rules" named_theorems fcomp_norm_norm "fcomp-normalizer: Normalization rules" named_theorems fcomp_norm_refl "fcomp-normalizer: Reflexivity rules" text \Default Setup\ lemmas [fcomp_norm_unfold] = prod_rel_comp nres_rel_comp Id_O_R R_O_Id lemmas [fcomp_norm_unfold] = hr_comp_Id1 hr_comp_Id2 lemmas [fcomp_norm_unfold] = hr_comp_prod_conv lemmas [fcomp_norm_unfold] = prod_hrp_comp hrp_comp_keep hrp_comp_dest hr_comp_pure (*lemmas [fcomp_norm_unfold] = prod_casesK uncurry0_hfref_post*) lemma [fcomp_norm_simps]: "CONSTRAINT is_pure P \ pure (the_pure P) = P" by simp lemmas [fcomp_norm_simps] = True_implies_equals lemmas [fcomp_norm_init] = fcomp_norm_dflt_init lemmas [fcomp_norm_trans] = hrp_imp_trans lemmas [fcomp_norm_cong] = hrp_comp_cong hrp_prod_cong (*lemmas [fcomp_norm_norm] = hrp_comp_dest*) lemmas [fcomp_norm_refl] = refl hrp_imp_refl lemma ensure_fref_nresI: "(f,g)\[P]\<^sub>f R\S \ (RETURN o f, RETURN o g)\[P]\<^sub>f R\\S\nres_rel" by (auto intro: nres_relI simp: fref_def) lemma ensure_fref_nres_unfold: "\f. RETURN o (uncurry0 f) = uncurry0 (RETURN f)" "\f. RETURN o (uncurry f) = uncurry (RETURN oo f)" "\f. (RETURN ooo uncurry) f = uncurry (RETURN ooo f)" by auto text \Composed precondition normalizer\ named_theorems fcomp_prenorm_simps \fcomp precondition-normalizer: Simplification theorems\ text \Support for preconditions of the form \_\Domain R\, where \R\ is the relation of the next more abstract level.\ declare DomainI[fcomp_prenorm_simps] lemma auto_weaken_pre_init_hf: assumes "\x. PROTECT P x \ P' x" assumes "(f,h) \ hfref P' R S" shows "(f,h) \ hfref P R S" using assms by (auto simp: hfref_def) lemma auto_weaken_pre_init_f: assumes "\x. PROTECT P x \ P' x" assumes "(f,h) \ fref P' R S" shows "(f,h) \ fref P R S" using assms by (auto simp: fref_def) lemmas auto_weaken_pre_init = auto_weaken_pre_init_hf auto_weaken_pre_init_f lemma auto_weaken_pre_uncurry_step: assumes "PROTECT f a \ f'" shows "PROTECT (\(x,y). f x y) (a,b) \ f' b" using assms by (auto simp: curry_def dest!: meta_eq_to_obj_eq intro!: eq_reflection) lemma auto_weaken_pre_uncurry_finish: "PROTECT f x \ f x" by (auto) lemma auto_weaken_pre_uncurry_start: assumes "P \ P'" assumes "P'\Q" shows "P\Q" using assms by (auto) lemma auto_weaken_pre_comp_PRE_I: assumes "S x \ P x" assumes "\y. \(y,x)\R; P x; S x\ \ Q x y" shows "comp_PRE R P Q S x" using assms by (auto simp: comp_PRE_def) lemma auto_weaken_pre_to_imp_nf: "(A\B\C) = (A\B \ C)" "((A\B)\C) = (A\B\C)" by auto lemma auto_weaken_pre_add_dummy_imp: "P \ True \ P" by simp text \Synthesis for hfref statements\ definition hfsynth_ID_R :: "('a \ _ \ assn) \ 'a \ bool" where [simp]: "hfsynth_ID_R _ _ \ True" lemma hfsynth_ID_R_D: fixes I :: "'a itself" assumes "hfsynth_ID_R R a" assumes "intf_of_assn R I" shows "a ::\<^sub>i I" by simp lemma hfsynth_hnr_from_hfI: assumes "\x xi. P x \ hfsynth_ID_R (fst R) x \ hn_refine (emp * hn_ctxt (fst R) x xi) (f$xi) (emp * hn_ctxt (snd R) x xi) S (g$x)" shows "(f,g) \ [P]\<^sub>a R \ S" using assms unfolding hfref_def by (auto simp: hn_ctxt_def) lemma hfsynth_ID_R_uncurry_unfold: "hfsynth_ID_R (to_hnr_prod R S) (a,b) \ hfsynth_ID_R R a \ hfsynth_ID_R S b" "hfsynth_ID_R (fst (hf_pres R k)) \ hfsynth_ID_R R" by (auto intro!: eq_reflection) ML \ signature SEPREF_RULES = sig (* Analysis of relations, both fref and fun_rel *) (* "R1\...\Rn\_" / "[_]\<^sub>f ((R1\\<^sub>rR2)...\\<^sub>rRn)" \ "[R1,...,Rn]" *) val binder_rels: term -> term list (* "_\...\_\S" / "[_]\<^sub>f _ \ S" \ "S" *) val body_rel: term -> term (* Map \/fref to (precond,args,res). NONE if no/trivial precond. *) val analyze_rel: term -> term option * term list * term (* Make trivial ("\_. True") precond *) val mk_triv_precond: term list -> term (* Make "[P]\<^sub>f ((R1\\<^sub>rR2)...\\<^sub>rRn) \ S". Insert trivial precond if NONE. *) val mk_rel: term option * term list * term -> term (* Map relation to (args,res) *) val strip_rel: term -> term list * term (* Make hfprod (op *\<^sub>a) *) val mk_hfprod : term * term -> term val mk_hfprods : term list -> term (* Determine interface type of refinement assertion, using default fallback if necessary. Use named_thms intf_of_assn for configuration. *) val intf_of_assn : Proof.context -> term -> typ (* Convert a parametricity theorem in higher-order form to uncurried fref-form. For functions without arguments, a unit-argument is added. TODO/FIXME: Currently this only works for higher-order theorems, i.e., theorems of the form (f,g)\R1\\\Rn. First-order theorems are silently treated as refinement theorems for functions with zero arguments, i.e., a unit-argument is added. *) val to_fref : Proof.context -> thm -> thm (* Convert a parametricity or fref theorem to first order form *) val to_foparam : Proof.context -> thm -> thm (* Convert schematic hfref goal to hnr-goal *) val prepare_hfref_synth_tac : Proof.context -> tactic' (* Convert theorem in hfref-form to hnr-form *) val to_hnr : Proof.context -> thm -> thm (* Convert theorem in hnr-form to hfref-form *) val to_hfref: Proof.context -> thm -> thm (* Convert theorem to given form, if not yet in this form *) val ensure_fref : Proof.context -> thm -> thm val ensure_fref_nres : Proof.context -> thm -> thm val ensure_hfref : Proof.context -> thm -> thm val ensure_hnr : Proof.context -> thm -> thm type hnr_analysis = { thm: thm, (* Original theorem, may be normalized *) precond: term, (* Precondition, abstracted over abs-arguments *) prems : term list, (* Premises not depending on arguments *) ahead: term * bool, (* Abstract function, has leading RETURN *) chead: term * bool, (* Concrete function, has leading return *) argrels: (term * bool) list, (* Argument relations, preserved (keep-flag) *) result_rel: term (* Result relation *) } val analyze_hnr: Proof.context -> thm -> hnr_analysis val pretty_hnr_analysis: Proof.context -> hnr_analysis -> Pretty.T val mk_hfref_thm: Proof.context -> hnr_analysis -> thm (* Simplify precondition of fref/hfref-theorem *) val simplify_precond: Proof.context -> thm -> thm (* Normalize hfref-theorem after composition *) val norm_fcomp_rule: Proof.context -> thm -> thm (* Replace "pure ?A" by "?A'" and is_pure constraint, then normalize *) val add_pure_constraints_rule: Proof.context -> thm -> thm (* Compose fref/hfref and fref theorem, to produce hfref theorem. The input theorems may also be in ho-param or hnr form, and are converted accordingly. *) val gen_compose : Proof.context -> thm -> thm -> thm (* FCOMP-attribute *) val fcomp_attrib: attribute context_parser end structure Sepref_Rules: SEPREF_RULES = struct local open Refine_Util Relators in fun binder_rels @{mpat "?F \ ?G"} = F::binder_rels G | binder_rels @{mpat "fref _ ?F _"} = strip_prodrel_left F | binder_rels _ = [] local fun br_aux @{mpat "_ \ ?G"} = br_aux G | br_aux R = R in fun body_rel @{mpat "fref _ _ ?G"} = G | body_rel R = br_aux R end fun strip_rel R = (binder_rels R, body_rel R) fun analyze_rel @{mpat "fref (\_. True) ?R ?S"} = (NONE,strip_prodrel_left R,S) | analyze_rel @{mpat "fref ?P ?R ?S"} = (SOME P,strip_prodrel_left R,S) | analyze_rel R = let val (args,res) = strip_rel R in (NONE,args,res) end fun mk_triv_precond Rs = absdummy (map rel_absT Rs |> list_prodT_left) @{term True} fun mk_rel (P,Rs,S) = let val R = list_prodrel_left Rs val P = case P of SOME P => P | NONE => mk_triv_precond Rs in @{mk_term "fref ?P ?R ?S"} end end fun mk_hfprod (a, b) = @{mk_term "?a*\<^sub>a?b"} local fun mk_hfprods_rev [] = @{mk_term "unit_assn\<^sup>k"} | mk_hfprods_rev [Rk] = Rk | mk_hfprods_rev (Rkn::Rks) = mk_hfprod (mk_hfprods_rev Rks, Rkn) in val mk_hfprods = mk_hfprods_rev o rev end fun intf_of_assn ctxt t = let val orig_ctxt = ctxt val (t,ctxt) = yield_singleton (Variable.import_terms false) t ctxt val v = TVar (("T",0),Proof_Context.default_sort ctxt ("T",0)) |> Logic.mk_type val goal = @{mk_term "Trueprop (intf_of_assn ?t ?v)"} val i_of_assn_rls = Named_Theorems_Rev.get ctxt @{named_theorems_rev intf_of_assn} @ @{thms intf_of_assn_fallback} fun tac ctxt = REPEAT_ALL_NEW (resolve_tac ctxt i_of_assn_rls) val thm = Goal.prove ctxt [] [] goal (fn {context,...} => ALLGOALS (tac context)) val intf = case Thm.concl_of thm of @{mpat "Trueprop (intf_of_assn _ (?v AS\<^sub>p TYPE (_)))"} => v | _ => raise THM("Intf_of_assn: Proved a different theorem?",~1,[thm]) val intf = singleton (Variable.export_terms ctxt orig_ctxt) intf |> Logic.dest_type in intf end datatype rthm_type = RT_HOPARAM (* (_,_) \ _ \ \ \ _ *) | RT_FREF (* (_,_) \ [_]\<^sub>f _ \ _ *) | RT_HNR (* hn_refine _ _ _ _ _ *) | RT_HFREF (* (_,_) \ [_]\<^sub>a _ \ _ *) | RT_OTHER fun rthm_type thm = case Thm.concl_of thm |> HOLogic.dest_Trueprop of @{mpat "(_,_) \ fref _ _ _"} => RT_FREF | @{mpat "(_,_) \ hfref _ _ _"} => RT_HFREF | @{mpat "hn_refine _ _ _ _ _"} => RT_HNR | @{mpat "(_,_) \ _"} => RT_HOPARAM (* TODO: Distinction between ho-param and fo-param *) | _ => RT_OTHER fun to_fref ctxt thm = let open Conv in case Thm.concl_of thm |> HOLogic.dest_Trueprop of @{mpat "(_,_)\_\_"} => Local_Defs.unfold0 ctxt @{thms fref_param1} thm |> fconv_rule (repeat_conv (Refine_Util.ftop_conv (K (rewr_conv @{thm fref_nest})) ctxt)) |> Local_Defs.unfold0 ctxt @{thms in_CURRY_conv} | @{mpat "(_,_)\_"} => thm RS @{thm fref_param0I} | _ => raise THM ("to_fref: Expected theorem of form (_,_)\_",~1,[thm]) end fun to_foparam ctxt thm = let val unf_thms = @{thms split_tupled_all prod_rel_simp uncurry_apply cnv_conj_to_meta Product_Type.split} in case Thm.concl_of thm of @{mpat "Trueprop ((_,_) \ fref _ _ _)"} => (@{thm frefD} OF [thm]) |> forall_intr_vars |> Local_Defs.unfold0 ctxt unf_thms |> Variable.gen_all ctxt | @{mpat "Trueprop ((_,_) \ _)"} => Parametricity.fo_rule thm | _ => raise THM("Expected parametricity or fref theorem",~1,[thm]) end fun to_hnr ctxt thm = (thm RS @{thm hf2hnr}) |> Local_Defs.unfold0 ctxt @{thms to_hnr_prod_fst_snd keep_drop_sels} (* Resolve fst and snd over *\<^sub>a and R\<^sup>k, R\<^sup>d *) |> Local_Defs.unfold0 ctxt @{thms hnr_uncurry_unfold} (* Resolve products for uncurried parameters *) |> Local_Defs.unfold0 ctxt @{thms uncurry_apply uncurry_APP assn_one_left split} (* Remove the uncurry modifiers, the emp-dummy, and unfold product cases *) |> Local_Defs.unfold0 ctxt @{thms hn_ctxt_ctxt_fix_conv} (* Remove duplicate hn_ctxt tagging *) |> Local_Defs.unfold0 ctxt @{thms all_to_meta imp_to_meta HOL.True_implies_equals HOL.implies_True_equals Pure.triv_forall_equality cnv_conj_to_meta} (* Convert to meta-level, remove vacuous condition *) |> Local_Defs.unfold0 ctxt (Named_Theorems.get ctxt @{named_theorems to_hnr_post}) (* Post-Processing *) |> Goal.norm_result ctxt |> Conv.fconv_rule Thm.eta_conversion (* Convert schematic hfref-goal to hn_refine goal *) fun prepare_hfref_synth_tac ctxt = let val i_of_assn_rls = Named_Theorems_Rev.get ctxt @{named_theorems_rev intf_of_assn} @ @{thms intf_of_assn_fallback} val to_hnr_post_rls = Named_Theorems.get ctxt @{named_theorems to_hnr_post} val i_of_assn_tac = ( REPEAT' ( DETERM o dresolve_tac ctxt @{thms hfsynth_ID_R_D} THEN' DETERM o SOLVED' (REPEAT_ALL_NEW (resolve_tac ctxt i_of_assn_rls)) ) ) in (* Note: To re-use the to_hnr infrastructure, we first work with $-tags on the abstract function, which are finally removed. *) resolve_tac ctxt @{thms hfsynth_hnr_from_hfI} THEN_ELSE' ( SELECT_GOAL ( unfold_tac ctxt @{thms to_hnr_prod_fst_snd keep_drop_sels hf_pres_fst} (* Distribute fst,snd over product and hf_pres *) THEN unfold_tac ctxt @{thms hnr_uncurry_unfold hfsynth_ID_R_uncurry_unfold} (* Curry parameters *) THEN unfold_tac ctxt @{thms uncurry_apply uncurry_APP assn_one_left split} (* Curry parameters (II) and remove emp assertion *) (*THEN unfold_tac ctxt @{thms hn_ctxt_ctxt_fix_conv} (* Remove duplicate hn_ctxt (Should not be necessary) *)*) THEN unfold_tac ctxt @{thms all_to_meta imp_to_meta HOL.True_implies_equals HOL.implies_True_equals Pure.triv_forall_equality cnv_conj_to_meta} (* Convert precondition to meta-level *) THEN ALLGOALS i_of_assn_tac (* Generate _::\<^sub>i_ premises*) THEN unfold_tac ctxt to_hnr_post_rls (* Postprocessing *) THEN unfold_tac ctxt @{thms APP_def} (* Get rid of $ - tags *) ) , K all_tac ) end (************************************) (* Analyze hnr *) structure Termtab2 = Table( type key = term * term val ord = prod_ord Term_Ord.fast_term_ord Term_Ord.fast_term_ord); type hnr_analysis = { thm: thm, precond: term, prems : term list, ahead: term * bool, chead: term * bool, argrels: (term * bool) list, result_rel: term } fun analyze_hnr (ctxt:Proof.context) thm = let (* Debug information: Stores string*term pairs, which are pretty-printed on error *) val dbg = Unsynchronized.ref [] fun add_dbg msg ts = ( dbg := (msg,ts) :: !dbg; () ) fun pretty_dbg (msg,ts) = Pretty.block [ Pretty.str msg, Pretty.str ":", Pretty.brk 1, Pretty.list "[" "]" (map (Syntax.pretty_term ctxt) ts) ] fun pretty_dbgs l = map pretty_dbg l |> Pretty.fbreaks |> Pretty.block fun trace_dbg msg = Pretty.block [Pretty.str msg, Pretty.fbrk, pretty_dbgs (rev (!dbg))] |> Pretty.string_of |> tracing fun fail msg = (trace_dbg msg; raise THM(msg,~1,[thm])) fun assert cond msg = cond orelse fail msg; (* Heads may have a leading return/RETURN. The following code strips off the leading return, unless it has the form "return x" for an argument x *) fun check_strip_leading args t f = (* Handle the case RETURN x, where x is an argument *) if Termtab.defined args f then (t,false) else (f,true) fun strip_leading_RETURN args (t as @{mpat "RETURN$(?f)"}) = check_strip_leading args t f | strip_leading_RETURN args (t as @{mpat "RETURN ?f"}) = check_strip_leading args t f | strip_leading_RETURN _ t = (t,false) fun strip_leading_return args (t as @{mpat "return$(?f)"}) = check_strip_leading args t f | strip_leading_return args (t as @{mpat "return ?f"}) = check_strip_leading args t f | strip_leading_return _ t = (t,false) (* The following code strips the arguments of the concrete or abstract function. It knows how to handle APP-tags ($), and stops at PR_CONST-tags. Moreover, it only strips actual arguments that occur in the precondition-section of the hn_refine-statement. This ensures that non-arguments, like maxsize, are treated correctly. *) fun strip_fun _ (t as @{mpat "PR_CONST _"}) = (t,[]) | strip_fun s (t as @{mpat "?f$?x"}) = check_arg s t f x | strip_fun s (t as @{mpat "?f ?x"}) = check_arg s t f x | strip_fun _ f = (f,[]) and check_arg s t f x = if Termtab.defined s x then strip_fun s f |> apsnd (curry op :: x) else (t,[]) (* Arguments in the pre/postcondition are wrapped into hn_ctxt tags. This function strips them off. *) fun dest_hn_ctxt @{mpat "hn_ctxt ?R ?a ?c"} = ((a,c),R) | dest_hn_ctxt _ = fail "Invalid hn_ctxt parameter in pre or postcondition" fun dest_hn_refine @{mpat "(hn_refine ?G ?c ?G' ?R ?a)"} = (G,c,G',R,a) | dest_hn_refine _ = fail "Conclusion is not a hn_refine statement" (* Strip separation conjunctions. Special case for "emp", which is ignored. *) fun is_emp @{mpat emp} = true | is_emp _ = false val strip_star' = Sepref_Basic.strip_star #> filter (not o is_emp) (* Compare Termtab2s for equality of keys *) fun pairs_eq pairs1 pairs2 = Termtab2.forall (Termtab2.defined pairs1 o fst) pairs2 andalso Termtab2.forall (Termtab2.defined pairs2 o fst) pairs1 fun atomize_prem @{mpat "Trueprop ?p"} = p | atomize_prem _ = fail "Non-atomic premises" (* Make HOL conjunction list *) fun mk_conjs [] = @{const True} | mk_conjs [p] = p | mk_conjs (p::ps) = HOLogic.mk_binop @{const_name "HOL.conj"} (p,mk_conjs ps) (***********************) (* Start actual analysis *) val _ = add_dbg "thm" [Thm.prop_of thm] val prems = Thm.prems_of thm val concl = Thm.concl_of thm |> HOLogic.dest_Trueprop val (G,c,G',R,a) = dest_hn_refine concl val pre_pairs = G |> strip_star' |> tap (add_dbg "precondition") |> map dest_hn_ctxt |> Termtab2.make val post_pairs = G' |> strip_star' |> tap (add_dbg "postcondition") |> map dest_hn_ctxt |> Termtab2.make val _ = assert (pairs_eq pre_pairs post_pairs) "Parameters in precondition do not match postcondition" val aa_set = pre_pairs |> Termtab2.keys |> map fst |> Termtab.make_set val ca_set = pre_pairs |> Termtab2.keys |> map snd |> Termtab.make_set val (a,leading_RETURN) = strip_leading_RETURN aa_set a val (c,leading_return) = strip_leading_return ca_set c val _ = add_dbg "stripped abstract term" [a] val _ = add_dbg "stripped concrete term" [c] val (ahead,aargs) = strip_fun aa_set a; val (chead,cargs) = strip_fun ca_set c; val _ = add_dbg "abstract head" [ahead] val _ = add_dbg "abstract args" aargs val _ = add_dbg "concrete head" [chead] val _ = add_dbg "concrete args" cargs val _ = assert (length cargs = length aargs) "Different number of abstract and concrete arguments"; val _ = assert (not (has_duplicates op aconv aargs)) "Duplicate abstract arguments" val _ = assert (not (has_duplicates op aconv cargs)) "Duplicate concrete arguments" val argpairs = aargs ~~ cargs val ap_set = Termtab2.make_set argpairs val _ = assert (pairs_eq pre_pairs ap_set) "Arguments from pre/postcondition do not match operation's arguments" val pre_rels = map (the o (Termtab2.lookup pre_pairs)) argpairs val post_rels = map (the o (Termtab2.lookup post_pairs)) argpairs val _ = add_dbg "pre-rels" pre_rels val _ = add_dbg "post-rels" post_rels fun adjust_hf_pres @{mpat "snd (?R\<^sup>k)"} = R | adjust_hf_pres t = t val post_rels = map adjust_hf_pres post_rels fun is_invalid R @{mpat "invalid_assn ?R'"} = R aconv R' | is_invalid _ @{mpat "snd (_\<^sup>d)"} = true | is_invalid _ _ = false fun is_keep (R,R') = if R aconv R' then true else if is_invalid R R' then false else fail "Mismatch between pre and post relation for argument" val keep = map is_keep (pre_rels ~~ post_rels) val argrels = pre_rels ~~ keep val aa_set = Termtab.make_set aargs val ca_set = Termtab.make_set cargs fun is_precond t = (exists_subterm (Termtab.defined ca_set) t andalso fail "Premise contains concrete argument") orelse exists_subterm (Termtab.defined aa_set) t val (preconds, prems) = split is_precond prems val precond = map atomize_prem preconds |> mk_conjs |> fold lambda aargs val _ = add_dbg "precond" [precond] val _ = add_dbg "prems" prems in { thm = thm, precond = precond, prems = prems, ahead = (ahead,leading_RETURN), chead = (chead,leading_return), argrels = argrels, result_rel = R } end fun pretty_hnr_analysis ctxt ({thm,precond,ahead,chead,argrels,result_rel,...}) : Pretty.T = let val _ = thm (* Suppress unused warning for thm *) fun pretty_argrel (R,k) = Pretty.block [ Syntax.pretty_term ctxt R, if k then Pretty.str "\<^sup>k" else Pretty.str "\<^sup>d" ] val pretty_chead = case chead of (t,false) => Syntax.pretty_term ctxt t | (t,true) => Pretty.block [Pretty.str "return ", Syntax.pretty_term ctxt t] val pretty_ahead = case ahead of (t,false) => Syntax.pretty_term ctxt t | (t,true) => Pretty.block [Pretty.str "RETURN ", Syntax.pretty_term ctxt t] in Pretty.fbreaks [ (*Display.pretty_thm ctxt thm,*) Pretty.block [ Pretty.enclose "[" "]" [pretty_chead, pretty_ahead], Pretty.enclose "[" "]" [Syntax.pretty_term ctxt precond], Pretty.brk 1, Pretty.block (Pretty.separate " \" (map pretty_argrel argrels @ [Syntax.pretty_term ctxt result_rel])) ] ] |> Pretty.block end fun mk_hfref_thm ctxt ({thm,precond,prems,ahead,chead,argrels,result_rel}) = let fun mk_keep (R,true) = @{mk_term "?R\<^sup>k"} | mk_keep (R,false) = @{mk_term "?R\<^sup>d"} (* TODO: Move, this is of general use! *) fun mk_uncurry f = @{mk_term "uncurry ?f"} (* Uncurry function for the given number of arguments. For zero arguments, add a unit-parameter. *) fun rpt_uncurry n t = if n=0 then @{mk_term "uncurry0 ?t"} else if n=1 then t else funpow (n-1) mk_uncurry t (* Rewrite uncurried lambda's to \(_,_). _ form. Use top-down rewriting to correctly handle nesting to the left. TODO: Combine with abstraction and uncurry-procedure, and mark the deviation about uncurry as redundant intermediate step to be eliminated. *) fun rew_uncurry_lambda t = let val rr = map (Logic.dest_equals o Thm.prop_of) @{thms uncurry_def uncurry0_def} val thy = Proof_Context.theory_of ctxt in Pattern.rewrite_term_top thy rr [] t end (* Shortcuts for simplification tactics *) fun gsimp_only ctxt sec = let val ss = put_simpset HOL_basic_ss ctxt |> sec in asm_full_simp_tac ss end fun simp_only ctxt thms = gsimp_only ctxt (fn ctxt => ctxt addsimps thms) (********************************) (* Build theorem statement *) (* \prems\ \ (chead,ahead) \ [precond] rels \ R *) (* Uncurry precondition *) val num_args = length argrels val precond = precond |> rpt_uncurry num_args |> rew_uncurry_lambda (* Convert to nicer \((...,_),_) - form*) (* Re-attach leading RETURN/return *) fun mk_RETURN (t,r) = if r then let val T = funpow num_args range_type (fastype_of (fst ahead)) val tRETURN = Const (@{const_name RETURN}, T --> Type(@{type_name nres},[T])) in Refine_Util.mk_compN num_args tRETURN t end else t fun mk_return (t,r) = if r then let val T = funpow num_args range_type (fastype_of (fst chead)) val tRETURN = Const (@{const_name return}, T --> Type(@{type_name Heap},[T])) in Refine_Util.mk_compN num_args tRETURN t end else t (* Hrmpf!: Gone for good from 2015\2016. Inserting ctxt-based substitute here. *) fun certify_inst ctxt (instT, inst) = - (map (apsnd (Thm.ctyp_of ctxt)) instT, - map (apsnd (Thm.cterm_of ctxt)) inst); + (map (apsnd (Thm.ctyp_of ctxt)) (Term_Subst.TVars.dest instT), + map (apsnd (Thm.cterm_of ctxt)) (Term_Subst.Vars.dest inst)); (* fun mk_RETURN (t,r) = if r then @{mk_term "RETURN o ?t"} else t fun mk_return (t,r) = if r then @{mk_term "return o ?t"} else t *) (* Uncurry abstract and concrete function, append leading return *) val ahead = ahead |> mk_RETURN |> rpt_uncurry num_args val chead = chead |> mk_return |> rpt_uncurry num_args (* Add keep-flags and summarize argument relations to product *) val argrel = map mk_keep argrels |> rev (* TODO: Why this rev? *) |> mk_hfprods (* Produce final result statement *) val result = @{mk_term "Trueprop ((?chead,?ahead) \ [?precond]\<^sub>a ?argrel \ ?result_rel)"} val result = Logic.list_implies (prems,result) (********************************) (* Prove theorem *) (* Create context and import result statement and original theorem *) val orig_ctxt = ctxt (*val thy = Proof_Context.theory_of ctxt*) val (insts, ctxt) = Variable.import_inst true [result] ctxt val insts' = certify_inst ctxt insts val result = Term_Subst.instantiate insts result val thm = Thm.instantiate insts' thm (* Unfold APP tags. This is required as some APP-tags have also been unfolded by analysis *) val thm = Local_Defs.unfold0 ctxt @{thms APP_def} thm (* Tactic to prove the theorem. A first step uses hfrefI to get a hnr-goal. This is then normalized in several consecutive steps, which get rid of uncurrying. Finally, the original theorem is used for resolution, where the pre- and postcondition, and result relation are connected with a consequence rule, to handle unfolded hn_ctxt-tags, re-ordered relations, and introduced unit-parameters (TODO: Mark artificially introduced unit-parameter specially, it may get confused with intentional unit-parameter, e.g., functional empty_set ()!) *) fun tac ctxt = resolve_tac ctxt @{thms hfrefI} THEN' gsimp_only ctxt (fn c => c addsimps @{thms uncurry_def hn_ctxt_def uncurry0_def keep_drop_sels uc_hfprod_sel o_apply APP_def} |> Splitter.add_split @{thm prod.split} ) THEN' TRY o ( REPEAT_ALL_NEW (match_tac ctxt @{thms allI impI}) THEN' simp_only ctxt @{thms Product_Type.split prod.inject}) THEN' TRY o REPEAT_ALL_NEW (ematch_tac ctxt @{thms conjE}) THEN' TRY o hyp_subst_tac ctxt THEN' simp_only ctxt @{thms triv_forall_equality} THEN' ( resolve_tac ctxt @{thms hn_refine_cons[rotated]} THEN' (resolve_tac ctxt [thm] THEN_ALL_NEW assume_tac ctxt)) THEN_ALL_NEW simp_only ctxt @{thms hn_ctxt_def entt_refl pure_unit_rel_eq_empty mult_ac mult_1 mult_1_right keep_drop_sels} (* Prove theorem *) val result = Thm.cterm_of ctxt result val rthm = Goal.prove_internal ctxt [] result (fn _ => ALLGOALS (tac ctxt)) (* Export statement to original context *) val rthm = singleton (Variable.export ctxt orig_ctxt) rthm (* Post-processing *) val rthm = Local_Defs.unfold0 ctxt (Named_Theorems.get ctxt @{named_theorems to_hfref_post}) rthm in rthm end fun to_hfref ctxt = analyze_hnr ctxt #> mk_hfref_thm ctxt (***********************************) (* Composition *) local fun norm_set_of ctxt = { trans_rules = Named_Theorems.get ctxt @{named_theorems fcomp_norm_trans}, cong_rules = Named_Theorems.get ctxt @{named_theorems fcomp_norm_cong}, norm_rules = Named_Theorems.get ctxt @{named_theorems fcomp_norm_norm}, refl_rules = Named_Theorems.get ctxt @{named_theorems fcomp_norm_refl} } fun init_rules_of ctxt = Named_Theorems.get ctxt @{named_theorems fcomp_norm_init} fun unfold_rules_of ctxt = Named_Theorems.get ctxt @{named_theorems fcomp_norm_unfold} fun simp_rules_of ctxt = Named_Theorems.get ctxt @{named_theorems fcomp_norm_simps} in fun norm_fcomp_rule ctxt = let open PO_Normalizer Refine_Util val norm1 = gen_norm_rule (init_rules_of ctxt) (norm_set_of ctxt) ctxt val norm2 = Local_Defs.unfold0 ctxt (unfold_rules_of ctxt) val norm3 = Conv.fconv_rule ( Simplifier.asm_full_rewrite (put_simpset HOL_basic_ss ctxt addsimps simp_rules_of ctxt)) val norm = changed_rule (try_rule norm1 o try_rule norm2 o try_rule norm3) in repeat_rule norm end end fun add_pure_constraints_rule ctxt thm = let val orig_ctxt = ctxt val t = Thm.prop_of thm fun cnv (@{mpat (typs) "pure (mpaq_STRUCT (mpaq_Var ?x _) :: (?'v_c\?'v_a) set)"}) = let val T = a --> c --> @{typ assn} val t = Var (x,T) val t = @{mk_term "(the_pure ?t)"} in [(x,T,t)] end | cnv (t$u) = union op= (cnv t) (cnv u) | cnv (Abs (_,_,t)) = cnv t | cnv _ = [] val pvars = cnv t val _ = (pvars |> map #1 |> has_duplicates op=) andalso raise TERM ("Duplicate indexname with different type",[t]) (* This should not happen *) val substs = map (fn (x,_,t) => (x,t)) pvars val t' = subst_Vars substs t fun mk_asm (x,T,_) = let val t = Var (x,T) val t = @{mk_term "Trueprop (CONSTRAINT is_pure ?t)"} in t end val assms = map mk_asm pvars fun add_prems prems t = let val prems' = Logic.strip_imp_prems t val concl = Logic.strip_imp_concl t in Logic.list_implies (prems@prems', concl) end val t' = add_prems assms t' val (t',ctxt) = yield_singleton (Variable.import_terms true) t' ctxt val thm' = Goal.prove_internal ctxt [] (Thm.cterm_of ctxt t') (fn _ => ALLGOALS (resolve_tac ctxt [thm] THEN_ALL_NEW assume_tac ctxt)) val thm' = norm_fcomp_rule ctxt thm' val thm' = singleton (Variable.export ctxt orig_ctxt) thm' in thm' end val cfg_simp_precond = Attrib.setup_config_bool @{binding fcomp_simp_precond} (K true) local fun mk_simp_thm ctxt t = let val st = t |> HOLogic.mk_Trueprop |> Thm.cterm_of ctxt |> Goal.init val ctxt = Context_Position.set_visible false ctxt val ctxt = ctxt addsimps ( refine_pw_simps.get ctxt @ Named_Theorems.get ctxt @{named_theorems fcomp_prenorm_simps} @ @{thms split_tupled_all cnv_conj_to_meta} ) val trace_incomplete_transfer_tac = COND (Thm.prems_of #> exists (strip_all_body #> Logic.strip_imp_concl #> Term.is_open)) (print_tac ctxt "Failed transfer from intermediate level:") all_tac val tac = ALLGOALS (resolve_tac ctxt @{thms auto_weaken_pre_comp_PRE_I} ) THEN ALLGOALS (Simplifier.asm_full_simp_tac ctxt) THEN trace_incomplete_transfer_tac THEN ALLGOALS (TRY o filter_prems_tac ctxt (K false)) THEN Local_Defs.unfold0_tac ctxt [Drule.triv_forall_equality] val st' = tac st |> Seq.take 1 |> Seq.list_of val thm = case st' of [st'] => Goal.conclude st' | _ => raise THM("Simp_Precond: Simp-Tactic failed",~1,[st]) (* Check generated premises for leftover intermediate stuff *) val _ = exists (Logic.is_all) (Thm.prems_of thm) andalso raise THM("Simp_Precond: Transfer from intermediate level failed",~1,[thm]) val thm = thm (*|> map (Simplifier.asm_full_simplify ctxt)*) |> Conv.fconv_rule (Object_Logic.atomize ctxt) |> Local_Defs.unfold0 ctxt @{thms auto_weaken_pre_to_imp_nf} val thm = case Thm.concl_of thm of @{mpat "Trueprop (_ \ _)"} => thm | @{mpat "Trueprop _"} => thm RS @{thm auto_weaken_pre_add_dummy_imp} | _ => raise THM("Simp_Precond: Generated odd theorem, expected form 'P\Q'",~1,[thm]) in thm end in fun simplify_precond ctxt thm = let val orig_ctxt = ctxt val thm = Refine_Util.OF_fst @{thms auto_weaken_pre_init} [asm_rl,thm] val thm = Local_Defs.unfold0 ctxt @{thms split_tupled_all} thm OF @{thms auto_weaken_pre_uncurry_start} fun rec_uncurry thm = case try (fn () => thm OF @{thms auto_weaken_pre_uncurry_step}) () of NONE => thm OF @{thms auto_weaken_pre_uncurry_finish} | SOME thm => rec_uncurry thm val thm = rec_uncurry thm |> Conv.fconv_rule Thm.eta_conversion val t = case Thm.prems_of thm of t::_ => t | _ => raise THM("Simp-Precond: Expected at least one premise",~1,[thm]) val (t,ctxt) = yield_singleton (Variable.import_terms false) t ctxt val ((_,t),ctxt) = Variable.focus NONE t ctxt val t = case t of @{mpat "Trueprop (_ \ ?t)"} => t | _ => raise TERM("Simp_Precond: Expected implication",[t]) val simpthm = mk_simp_thm ctxt t |> singleton (Variable.export ctxt orig_ctxt) val thm = thm OF [simpthm] val thm = Local_Defs.unfold0 ctxt @{thms prod_casesK} thm in thm end fun simplify_precond_if_cfg ctxt = if Config.get ctxt cfg_simp_precond then simplify_precond ctxt else I end (* fref O fref *) fun compose_ff ctxt A B = (@{thm fref_compI_PRE} OF [A,B]) |> norm_fcomp_rule ctxt |> simplify_precond_if_cfg ctxt |> Conv.fconv_rule Thm.eta_conversion (* hfref O fref *) fun compose_hf ctxt A B = (@{thm hfref_compI_PRE} OF [A,B]) |> norm_fcomp_rule ctxt |> simplify_precond_if_cfg ctxt |> Conv.fconv_rule Thm.eta_conversion |> add_pure_constraints_rule ctxt |> Conv.fconv_rule Thm.eta_conversion fun ensure_fref ctxt thm = case rthm_type thm of RT_HOPARAM => to_fref ctxt thm | RT_FREF => thm | _ => raise THM("Expected parametricity or fref theorem",~1,[thm]) fun ensure_fref_nres ctxt thm = let val thm = ensure_fref ctxt thm in case Thm.concl_of thm of @{mpat (typs) "Trueprop (_\fref _ _ (_::(_ nres\_)set))"} => thm | @{mpat "Trueprop ((_,_)\fref _ _ _)"} => (thm RS @{thm ensure_fref_nresI}) |> Local_Defs.unfold0 ctxt @{thms ensure_fref_nres_unfold} | _ => raise THM("Expected fref-theorem",~1,[thm]) end fun ensure_hfref ctxt thm = case rthm_type thm of RT_HNR => to_hfref ctxt thm | RT_HFREF => thm | _ => raise THM("Expected hnr or hfref theorem",~1,[thm]) fun ensure_hnr ctxt thm = case rthm_type thm of RT_HNR => thm | RT_HFREF => to_hnr ctxt thm | _ => raise THM("Expected hnr or hfref theorem",~1,[thm]) fun gen_compose ctxt A B = let val rtA = rthm_type A in if rtA = RT_HOPARAM orelse rtA = RT_FREF then compose_ff ctxt (ensure_fref ctxt A) (ensure_fref ctxt B) else compose_hf ctxt (ensure_hfref ctxt A) ((ensure_fref_nres ctxt B)) end val parse_fcomp_flags = Refine_Util.parse_paren_lists (Refine_Util.parse_bool_config "prenorm" cfg_simp_precond) val fcomp_attrib = parse_fcomp_flags |-- Attrib.thm >> (fn B => Thm.rule_attribute [] (fn context => fn A => let val ctxt = Context.proof_of context in gen_compose ctxt A B end)) end \ attribute_setup to_fref = \ Scan.succeed (Thm.rule_attribute [] (Sepref_Rules.to_fref o Context.proof_of)) \ "Convert parametricity theorem to uncurried fref-form" attribute_setup to_foparam = \ Scan.succeed (Thm.rule_attribute [] (Sepref_Rules.to_foparam o Context.proof_of)) \ \Convert param or fref rule to first order rule\ (* Overloading existing param_fo - attribute from Parametricity.thy *) attribute_setup param_fo = \ Scan.succeed (Thm.rule_attribute [] (Sepref_Rules.to_foparam o Context.proof_of)) \ \Convert param or fref rule to first order rule\ attribute_setup to_hnr = \ Scan.succeed (Thm.rule_attribute [] (Sepref_Rules.to_hnr o Context.proof_of)) \ "Convert hfref-rule to hnr-rule" attribute_setup to_hfref = \Scan.succeed ( Thm.rule_attribute [] (Context.proof_of #> Sepref_Rules.to_hfref) )\ \Convert hnr to hfref theorem\ attribute_setup ensure_fref_nres = \Scan.succeed ( Thm.rule_attribute [] (Context.proof_of #> Sepref_Rules.ensure_fref_nres) )\ attribute_setup sepref_dbg_norm_fcomp_rule = \Scan.succeed ( Thm.rule_attribute [] (Context.proof_of #> Sepref_Rules.norm_fcomp_rule) )\ attribute_setup sepref_simplify_precond = \Scan.succeed ( Thm.rule_attribute [] (Context.proof_of #> Sepref_Rules.simplify_precond) )\ \Simplify precondition of fref/hfref-theorem\ attribute_setup FCOMP = Sepref_Rules.fcomp_attrib "Composition of refinement rules" end diff --git a/thys/Regular-Sets/pEquivalence_Checking.thy b/thys/Regular-Sets/pEquivalence_Checking.thy --- a/thys/Regular-Sets/pEquivalence_Checking.thy +++ b/thys/Regular-Sets/pEquivalence_Checking.thy @@ -1,306 +1,307 @@ section \Deciding Regular Expression Equivalence (2)\ theory pEquivalence_Checking imports Equivalence_Checking Derivatives begin text\\noindent Similar to theory @{theory "Regular-Sets.Equivalence_Checking"}, but with partial derivatives instead of derivatives.\ text\Lifting many notions to sets:\ definition "Lang R == UN r:R. lang r" definition "Nullable R == EX r:R. nullable r" definition "Pderiv a R == UN r:R. pderiv a r" definition "Atoms R == UN r:R. atoms r" (* FIXME: rename pderiv_set in Derivatives to Pderiv and drop the one above *) lemma Atoms_pderiv: "Atoms(pderiv a r) \ atoms r" apply (induct r) apply (auto simp: Atoms_def UN_subset_iff) apply (fastforce)+ done lemma Atoms_Pderiv: "Atoms(Pderiv a R) \ Atoms R" using Atoms_pderiv by (fastforce simp: Atoms_def Pderiv_def) lemma pderiv_no_occurrence: "x \ atoms r \ pderiv x r = {}" by (induct r) auto lemma Pderiv_no_occurrence: "x \ Atoms R \ Pderiv x R = {}" by(auto simp:pderiv_no_occurrence Atoms_def Pderiv_def) lemma Deriv_Lang: "Deriv c (Lang R) = Lang (Pderiv c R)" by(auto simp: Deriv_pderiv Pderiv_def Lang_def) lemma Nullable_pderiv[simp]: "Nullable(pderivs w r) = (w : lang r)" apply(induction w arbitrary: r) apply (simp add: Nullable_def nullable_iff singleton_iff) using eqset_imp_iff[OF Deriv_pderiv[where 'a = 'a]] apply (simp add: Nullable_def Deriv_def) done type_synonym 'a Rexp_pair = "'a rexp set * 'a rexp set" type_synonym 'a Rexp_pairs = "'a Rexp_pair list" definition is_Bisimulation :: "'a list \ 'a Rexp_pairs \ bool" where "is_Bisimulation as ps = (\(R,S)\ set ps. Atoms R \ Atoms S \ set as \ (Nullable R \ Nullable S) \ (\a\set as. (Pderiv a R, Pderiv a S) \ set ps))" lemma Bisim_Lang_eq: assumes Bisim: "is_Bisimulation as ps" assumes "(R, S) \ set ps" shows "Lang R = Lang S" proof - define ps' where "ps' = ({}, {}) # ps" from Bisim have Bisim': "is_Bisimulation as ps'" by (fastforce simp: ps'_def is_Bisimulation_def UN_subset_iff Pderiv_def Atoms_def) let ?R = "\K L. (\(R,S)\set ps'. K = Lang R \ L = Lang S)" show ?thesis proof (rule language_coinduct[where R="?R"]) from \(R,S) \ set ps\ have "(R,S) \ set ps'" by (auto simp: ps'_def) thus "?R (Lang R) (Lang S)" by auto next fix K L assume "?R K L" then obtain R S where rs: "(R, S) \ set ps'" and KL: "K = Lang R" "L = Lang S" by auto with Bisim' have "Nullable R \ Nullable S" by (auto simp: is_Bisimulation_def) thus "[] \ K \ [] \ L" by (auto simp: nullable_iff KL Nullable_def Lang_def) fix a show "?R (Deriv a K) (Deriv a L)" proof cases assume "a \ set as" with rs Bisim' have "(Pderiv a R, Pderiv a S) \ set ps'" by (auto simp: is_Bisimulation_def) thus ?thesis by (fastforce simp: KL Deriv_Lang) next assume "a \ set as" with Bisim' rs have "a \ Atoms R \ Atoms S" by (fastforce simp: is_Bisimulation_def UN_subset_iff) then have "Pderiv a R = {}" "Pderiv a S = {}" by (metis Pderiv_no_occurrence Un_iff)+ then have "Deriv a K = Lang {}" "Deriv a L = Lang {}" unfolding KL Deriv_Lang by auto thus ?thesis by (auto simp: ps'_def) qed qed qed subsection \Closure computation\ fun test :: "'a Rexp_pairs * 'a Rexp_pairs \ bool" where "test (ws, ps) = (case ws of [] \ False | (R,S)#_ \ Nullable R = Nullable S)" fun step :: "'a list \ 'a Rexp_pairs * 'a Rexp_pairs \ 'a Rexp_pairs * 'a Rexp_pairs" where "step as (ws,ps) = (let (R,S) = hd ws; ps' = (R,S) # ps; succs = map (\a. (Pderiv a R, Pderiv a S)) as; new = filter (\p. p \ set ps \ set ws) succs in (remdups new @ tl ws, ps'))" definition closure :: "'a list \ 'a Rexp_pairs * 'a Rexp_pairs \ ('a Rexp_pairs * 'a Rexp_pairs) option" where "closure as = while_option test (step as)" definition pre_Bisim :: "'a list \ 'a rexp set \ 'a rexp set \ 'a Rexp_pairs * 'a Rexp_pairs \ bool" where "pre_Bisim as R S = (\(ws,ps). ((R,S) \ set ws \ set ps) \ (\(R,S)\ set ws \ set ps. Atoms R \ Atoms S \ set as) \ (\(R,S)\ set ps. (Nullable R \ Nullable S) \ (\a\set as. (Pderiv a R, Pderiv a S) \ set ps \ set ws)))" lemma step_set_eq: "\ test (ws,ps); step as (ws,ps) = (ws',ps') \ \ set ws' \ set ps' = set ws \ set ps \ (\a\set as. {(Pderiv a (fst(hd ws)), Pderiv a (snd(hd ws)))})" by(auto split: list.splits) theorem closure_sound: assumes result: "closure as ([(R,S)],[]) = Some([],ps)" and atoms: "Atoms R \ Atoms S \ set as" shows "Lang R = Lang S" proof- { fix st have "pre_Bisim as R S st \ test st \ pre_Bisim as R S (step as st)" unfolding pre_Bisim_def proof(split prod.splits, elim case_prodE conjE, intro allI impI conjI, goal_cases) case 1 thus ?case by(auto split: list.splits) next case prems: (2 ws ps ws' ps') note prems(2)[simp] from \test st\ obtain wstl R S where [simp]: "ws = (R,S)#wstl" by (auto split: list.splits) from \step as st = (ws',ps')\ obtain P where [simp]: "ps' = (R,S) # ps" and [simp]: "ws' = remdups(filter P (map (\a. (Pderiv a R, Pderiv a S)) as)) @ wstl" by auto have "\(R',S')\set wstl \ set ps'. Atoms R' \ Atoms S' \ set as" using prems(4) by auto moreover have "\a\set as. Atoms(Pderiv a R) \ Atoms(Pderiv a S) \ set as" using prems(4) by simp (metis (lifting) Atoms_Pderiv order_trans) ultimately show ?case by simp blast next case 3 thus ?case apply (clarsimp simp: image_iff split: prod.splits list.splits) by hypsubst_thin metis qed } moreover from atoms have "pre_Bisim as R S ([(R,S)],[])" by (simp add: pre_Bisim_def) ultimately have pre_Bisim_ps: "pre_Bisim as R S ([],ps)" by (rule while_option_rule[OF _ result[unfolded closure_def]]) then have "is_Bisimulation as ps" "(R,S) \ set ps" by (auto simp: pre_Bisim_def is_Bisimulation_def) thus "Lang R = Lang S" by (rule Bisim_Lang_eq) qed subsection \The overall procedure\ definition check_eqv :: "'a rexp \ 'a rexp \ bool" where "check_eqv r s = (case closure (add_atoms r (add_atoms s [])) ([({r}, {s})], []) of Some([],_) \ True | _ \ False)" lemma soundness: assumes "check_eqv r s" shows "lang r = lang s" proof - let ?as = "add_atoms r (add_atoms s [])" obtain ps where 1: "closure ?as ([({r},{s})],[]) = Some([],ps)" using assms by (auto simp: check_eqv_def split:option.splits list.splits) then have "lang r = lang s" by(rule closure_sound[of _ "{r}" "{s}", simplified Lang_def, simplified]) (auto simp: set_add_atoms Atoms_def) thus "lang r = lang s" by simp qed text\Test:\ lemma "check_eqv (Plus One (Times (Atom 0) (Star(Atom 0)))) (Star(Atom(0::nat)))" by eval subsection "Termination and Completeness" definition PDERIVS :: "'a rexp set => 'a rexp set" where "PDERIVS R = (UN r:R. pderivs_lang UNIV r)" lemma PDERIVS_incr[simp]: "R \ PDERIVS R" apply(auto simp add: PDERIVS_def pderivs_lang_def) by (metis pderivs.simps(1) insertI1) lemma Pderiv_PDERIVS: assumes "R' \ PDERIVS R" shows "Pderiv a R' \ PDERIVS R" proof fix r assume "r : Pderiv a R'" then obtain r' where "r' : R'" "r : pderiv a r'" by(auto simp: Pderiv_def) from \r' : R'\ \R' \ PDERIVS R\ obtain s w where "s : R" "r' : pderivs w s" by(auto simp: PDERIVS_def pderivs_lang_def) hence "r \ pderivs (w @ [a]) s" using \r : pderiv a r'\ by(auto simp add:pderivs_snoc) thus "r : PDERIVS R" using \s : R\ by(auto simp: PDERIVS_def pderivs_lang_def) qed lemma finite_PDERIVS: "finite R \ finite(PDERIVS R)" by(simp add: PDERIVS_def finite_pderivs_lang_UNIV) lemma closure_Some: assumes "finite R0" "finite S0" shows "\p. closure as ([(R0,S0)],[]) = Some p" proof(unfold closure_def) let ?Inv = "%(ws,bs). distinct ws \ (ALL (R,S) : set ws. R \ PDERIVS R0 \ S \ PDERIVS S0 \ (R,S) \ set bs)" let ?m1 = "%bs. Pow(PDERIVS R0) \ Pow(PDERIVS S0) - set bs" let ?m2 = "%(ws,bs). card(?m1 bs)" have Inv0: "?Inv ([(R0, S0)], [])" by simp { fix s assume "test s" "?Inv s" obtain ws bs where [simp]: "s = (ws,bs)" by fastforce from \test s\ obtain R S ws' where [simp]: "ws = (R,S)#ws'" by(auto split: prod.splits list.splits) let ?bs' = "(R,S) # bs" let ?succs = "map (\a. (Pderiv a R, Pderiv a S)) as" let ?new = "filter (\p. p \ set bs \ set ws) ?succs" let ?ws' = "remdups ?new @ ws'" have *: "?Inv (step as s)" proof - from \?Inv s\ have "distinct ?ws'" by auto have "ALL (R,S) : set ?ws'. R \ PDERIVS R0 \ S \ PDERIVS S0 \ (R,S) \ set ?bs'" using \?Inv s\ by(simp add: Ball_def image_iff) (metis Pderiv_PDERIVS) with \distinct ?ws'\ show ?thesis by(simp) qed have "?m2(step as s) < ?m2 s" proof - - have "finite(?m1 bs)" + have fin: "finite(?m1 bs)" by(metis assms finite_Diff finite_PDERIVS finite_cartesian_product finite_Pow_iff) - moreover have "?m2(step as s) < ?m2 s" using \?Inv s\ - by(auto intro: psubset_card_mono[OF \finite(?m1 bs)\]) + have "?m2(step as s) < ?m2 s" using \?Inv s\ psubset_card_mono[OF \finite(?m1 bs)\] + apply (simp split: prod.split_asm) + by (metis Diff_iff Pow_iff SigmaI fin card_gt_0_iff diff_Suc_less emptyE) then show ?thesis using \?Inv s\ by simp qed note * and this } note step = this show "\p. while_option test (step as) ([(R0, S0)], []) = Some p" by(rule measure_while_option_Some [where P = ?Inv and f = ?m2, OF _ Inv0])(simp add: step) qed theorem closure_Some_Inv: assumes "closure as ([({r},{s})],[]) = Some p" shows "\(R,S)\set(fst p). \w. R = pderivs w r \ S = pderivs w s" (is "?Inv p") proof- from assms have 1: "while_option test (step as) ([({r},{s})],[]) = Some p" by(simp add: closure_def) have Inv0: "?Inv ([({r},{s})],[])" by simp (metis pderivs.simps(1)) { fix p assume "?Inv p" "test p" obtain ws bs where [simp]: "p = (ws,bs)" by fastforce from \test p\ obtain R S ws' where [simp]: "ws = (R,S)#ws'" by(auto split: prod.splits list.splits) let ?succs = "map (\a. (Pderiv a R, Pderiv a S)) as" let ?new = "filter (\p. p \ set bs \ set ws) ?succs" let ?ws' = "remdups ?new @ ws'" from \?Inv p\ obtain w where [simp]: "R = pderivs w r" "S = pderivs w s" by auto { fix x assume "x : set as" have "EX w. Pderiv x R = pderivs w r \ Pderiv x S = pderivs w s" by(rule_tac x="w@[x]" in exI)(simp add: pderivs_append Pderiv_def) } with \?Inv p\ have "?Inv (step as p)" by auto } note Inv_step = this show ?thesis apply(rule while_option_rule[OF _ 1]) apply(erule (1) Inv_step) apply(rule Inv0) done qed lemma closure_complete: assumes "lang r = lang s" shows "EX bs. closure as ([({r},{s})],[]) = Some([],bs)" (is ?C) proof(rule ccontr) assume "\ ?C" then obtain R S ws bs where cl: "closure as ([({r},{s})],[]) = Some((R,S)#ws,bs)" using closure_Some[of "{r}" "{s}", simplified] by (metis (opaque_lifting, no_types) list.exhaust prod.exhaust) from assms closure_Some_Inv[OF this] while_option_stop[OF cl[unfolded closure_def]] show "False" by auto qed corollary completeness: "lang r = lang s \ check_eqv r s" by(auto simp add: check_eqv_def dest!: closure_complete split: option.split list.split) end diff --git a/thys/SenSocialChoice/Arrow.thy b/thys/SenSocialChoice/Arrow.thy --- a/thys/SenSocialChoice/Arrow.thy +++ b/thys/SenSocialChoice/Arrow.thy @@ -1,637 +1,637 @@ (* * Arrow's General Possibility Theorem, following Sen. * (C)opyright Peter Gammie, peteg42 at gmail.com, commenced July 2006. * License: BSD *) (*<*) theory Arrow imports SCFs begin (*>*) section\Arrow's General Possibility Theorem\ text\ The proof falls into two parts: showing that a semi-decisive individual is in fact a dictator, and that a semi-decisive individual exists. I take them in that order. It might be good to do some of this in a locale. The complication is untangling where various witnesses need to be quantified over. \ (* **************************************** *) subsection\Semi-decisiveness Implies Decisiveness\ text\ I follow \cite[Chapter~3*]{Sen:70a} quite closely here. Formalising his appeal to the @{term "iia"} assumption is the main complication here. \ text\The witness for the first lemma: in the profile $P'$, special agent $j$ strictly prefers $x$ to $y$ to $z$, and doesn't care about the other alternatives. Everyone else strictly prefers $y$ to each of $x$ to $z$, and inherits the relative preferences between $x$ and $z$ from profile $P$. The model has to be specific about ordering all the other alternatives, but these are immaterial in the proof that uses this witness. Note also that the following lemma is used with different instantiations of $x$, $y$ and $z$, so we need to quantify over them here. This happens implicitly, but in a locale we would have to be more explicit. This is just tedious.\ lemma decisive1_witness: assumes has3A: "hasw [x,y,z] A" and profileP: "profile A Is P" and jIs: "j \ Is" obtains P' where "profile A Is P'" and "x \<^bsub>(P' j)\<^esub>\ y \ y \<^bsub>(P' j)\<^esub>\ z" and "\i. i \ j \ y \<^bsub>(P' i)\<^esub>\ x \ y \<^bsub>(P' i)\<^esub>\ z \ ((x \<^bsub>(P' i)\<^esub>\ z) = (x \<^bsub>(P i)\<^esub>\ z)) \ ((z \<^bsub>(P' i)\<^esub>\ x) = (z \<^bsub>(P i)\<^esub>\ x))" proof let ?P' = "\i. (if i = j then ({ (x, u) | u. u \ A } \ { (y, u) | u. u \ A - {x} } \ { (z, u) | u. u \ A - {x,y} }) else ({ (y, u) | u. u \ A } \ { (x, u) | u. u \ A - {y,z} } \ { (z, u) | u. u \ A - {x,y} } \ (if x \<^bsub>(P i)\<^esub>\ z then {(x,z)} else {}) \ (if z \<^bsub>(P i)\<^esub>\ x then {(z,x)} else {}))) \ (A - {x,y,z}) \ (A - {x,y,z})" show "profile A Is ?P'" proof fix i assume iIs: "i \ Is" show "rpr A (?P' i)" proof(cases "i = j") case True with has3A show ?thesis by - (rule rprI, simp_all add: trans_def, blast+) next case False hence ij: "i \ j" . show ?thesis proof from iIs profileP have "complete A (P i)" by (blast dest: rpr_complete) with ij show "complete A (?P' i)" by (simp add: complete_def, blast) from iIs profileP have "refl_on A (P i)" by (auto simp add: rpr_def) with has3A ij show "refl_on A (?P' i)" by (simp, blast) from ij has3A show "trans (?P' i)" by (clarsimp simp add: trans_def) qed qed next from profileP show "Is \ {}" by (rule profile_non_empty) qed from has3A show "x \<^bsub>(?P' j)\<^esub>\ y \ y \<^bsub>(?P' j)\<^esub>\ z" and "\i. i \ j \ y \<^bsub>(?P' i)\<^esub>\ x \ y \<^bsub>(?P' i)\<^esub>\ z \ ((x \<^bsub>(?P' i)\<^esub>\ z) = (x \<^bsub>(P i)\<^esub>\ z)) \ ((z \<^bsub>(?P' i)\<^esub>\ x) = (z \<^bsub>(P i)\<^esub>\ x))" unfolding strict_pref_def by auto qed text \The key lemma: in the presence of Arrow's assumptions, an individual who is semi-decisive for $x$ and $y$ is actually decisive for $x$ over any other alternative $z$. (This is where the quantification becomes important.) \ lemma decisive1: assumes has3A: "hasw [x,y,z] A" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" and sd: "semidecisive swf A Is {j} x y" shows "decisive swf A Is {j} x z" proof from sd show jIs: "{j} \ Is" by blast fix P assume profileP: "profile A Is P" and jxzP: "\i. i \ {j} \ x \<^bsub>(P i)\<^esub>\ z" from has3A profileP jIs obtain P' where profileP': "profile A Is P'" and jxyzP': "x \<^bsub>(P' j)\<^esub>\ y" "y \<^bsub>(P' j)\<^esub>\ z" and ixyzP': "\i. i \ j \ y \<^bsub>(P' i)\<^esub>\ x \ y \<^bsub>(P' i)\<^esub>\ z \ ((x \<^bsub>(P' i)\<^esub>\ z) = (x \<^bsub>(P i)\<^esub>\ z)) \ ((z \<^bsub>(P' i)\<^esub>\ x) = (z \<^bsub>(P i)\<^esub>\ x))" by - (rule decisive1_witness, blast+) from iia have "\a b. \ a \ {x, z}; b \ {x, z} \ \ (a \<^bsub>(swf P)\<^esub>\ b) = (a \<^bsub>(swf P')\<^esub>\ b)" proof(rule iiaE) from has3A show "{x,z} \ A" by simp next fix i assume iIs: "i \ Is" fix a b assume ab: "a \ {x, z}" "b \ {x, z}" show "(a \<^bsub>(P' i)\<^esub>\ b) = (a \<^bsub>(P i)\<^esub>\ b)" proof(cases "i = j") case False with ab iIs ixyzP' profileP profileP' has3A show ?thesis unfolding profile_def by auto next case True from profileP' jIs jxyzP' have "x \<^bsub>(P' j)\<^esub>\ z" by (auto dest: rpr_less_trans) with True ab iIs jxzP profileP profileP' has3A show ?thesis unfolding profile_def strict_pref_def by auto qed qed (simp_all add: profileP profileP') moreover have "x \<^bsub>(swf P')\<^esub>\ z" proof - from profileP' sd jxyzP' ixyzP' have "x \<^bsub>(swf P')\<^esub>\ y" by (simp add: semidecisive_def) moreover from jxyzP' ixyzP' have "\i. i \ Is \ y \<^bsub>(P' i)\<^esub>\ z" by (case_tac "i=j", auto) with wp profileP' has3A have "y \<^bsub>(swf P')\<^esub>\ z" by (auto dest: weak_paretoD) moreover note SWF_rpr[OF swf] profileP' ultimately show "x \<^bsub>(swf P')\<^esub>\ z" unfolding universal_domain_def by (blast dest: rpr_less_trans) qed ultimately show "x \<^bsub>(swf P)\<^esub>\ z" unfolding strict_pref_def by blast qed text\The witness for the second lemma: special agent $j$ strictly prefers $z$ to $x$ to $y$, and everyone else strictly prefers $z$ to $x$ and $y$ to $x$. (In some sense the last part is upside-down with respect to the first witness.)\ lemma decisive2_witness: assumes has3A: "hasw [x,y,z] A" and profileP: "profile A Is P" and jIs: "j \ Is" obtains P' where "profile A Is P'" and "z \<^bsub>(P' j)\<^esub>\ x \ x \<^bsub>(P' j)\<^esub>\ y" and "\i. i \ j \ z \<^bsub>(P' i)\<^esub>\ x \ y \<^bsub>(P' i)\<^esub>\ x \ ((y \<^bsub>(P' i)\<^esub>\ z) = (y \<^bsub>(P i)\<^esub>\ z)) \ ((z \<^bsub>(P' i)\<^esub>\ y) = (z \<^bsub>(P i)\<^esub>\ y))" proof let ?P' = "\i. (if i = j then ({ (z, u) | u. u \ A } \ { (x, u) | u. u \ A - {z} } \ { (y, u) | u. u \ A - {x,z} }) else ({ (z, u) | u. u \ A - {y} } \ { (y, u) | u. u \ A - {z} } \ { (x, u) | u. u \ A - {y,z} } \ (if y \<^bsub>(P i)\<^esub>\ z then {(y,z)} else {}) \ (if z \<^bsub>(P i)\<^esub>\ y then {(z,y)} else {}))) \ (A - {x,y,z}) \ (A - {x,y,z})" show "profile A Is ?P'" proof fix i assume iIs: "i \ Is" show "rpr A (?P' i)" proof(cases "i = j") case True with has3A show ?thesis by - (rule rprI, simp_all add: trans_def, blast+) next case False hence ij: "i \ j" . show ?thesis proof from iIs profileP have "complete A (P i)" by (auto simp add: rpr_def) with ij show "complete A (?P' i)" by (simp add: complete_def, blast) from iIs profileP have "refl_on A (P i)" by (auto simp add: rpr_def) with has3A ij show "refl_on A (?P' i)" by (simp, blast) from ij has3A show "trans (?P' i)" by (clarsimp simp add: trans_def) qed qed next show "Is \ {}" by (rule profile_non_empty[OF profileP]) qed from has3A show "z \<^bsub>(?P' j)\<^esub>\ x \ x \<^bsub>(?P' j)\<^esub>\ y" and "\i. i \ j \ z \<^bsub>(?P' i)\<^esub>\ x \ y \<^bsub>(?P' i)\<^esub>\ x \ ((y \<^bsub>(?P' i)\<^esub>\ z) = (y \<^bsub>(P i)\<^esub>\ z)) \ ((z \<^bsub>(?P' i)\<^esub>\ y) = (z \<^bsub>(P i)\<^esub>\ y))" unfolding strict_pref_def by auto qed lemma decisive2: assumes has3A: "hasw [x,y,z] A" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" and sd: "semidecisive swf A Is {j} x y" shows "decisive swf A Is {j} z y" proof from sd show jIs: "{j} \ Is" by blast fix P assume profileP: "profile A Is P" and jyzP: "\i. i \ {j} \ z \<^bsub>(P i)\<^esub>\ y" from has3A profileP jIs obtain P' where profileP': "profile A Is P'" and jxyzP': "z \<^bsub>(P' j)\<^esub>\ x" "x \<^bsub>(P' j)\<^esub>\ y" and ixyzP': "\i. i \ j \ z \<^bsub>(P' i)\<^esub>\ x \ y \<^bsub>(P' i)\<^esub>\ x \ ((y \<^bsub>(P' i)\<^esub>\ z) = (y \<^bsub>(P i)\<^esub>\ z)) \ ((z \<^bsub>(P' i)\<^esub>\ y) = (z \<^bsub>(P i)\<^esub>\ y))" by - (rule decisive2_witness, blast+) from iia have "\a b. \ a \ {y, z}; b \ {y, z} \ \ (a \<^bsub>(swf P)\<^esub>\ b) = (a \<^bsub>(swf P')\<^esub>\ b)" proof(rule iiaE) from has3A show "{y,z} \ A" by simp next fix i assume iIs: "i \ Is" fix a b assume ab: "a \ {y, z}" "b \ {y, z}" show "(a \<^bsub>(P' i)\<^esub>\ b) = (a \<^bsub>(P i)\<^esub>\ b)" proof(cases "i = j") case False with ab iIs ixyzP' profileP profileP' has3A show ?thesis unfolding profile_def by auto next case True from profileP' jIs jxyzP' have "z \<^bsub>(P' j)\<^esub>\ y" by (auto dest: rpr_less_trans) with True ab iIs jyzP profileP profileP' has3A show ?thesis unfolding profile_def strict_pref_def by auto qed qed (simp_all add: profileP profileP') moreover have "z \<^bsub>(swf P')\<^esub>\ y" proof - from profileP' sd jxyzP' ixyzP' have "x \<^bsub>(swf P')\<^esub>\ y" by (simp add: semidecisive_def) moreover from jxyzP' ixyzP' have "\i. i \ Is \ z \<^bsub>(P' i)\<^esub>\ x" by (case_tac "i=j", auto) with wp profileP' has3A have "z \<^bsub>(swf P')\<^esub>\ x" by (auto dest: weak_paretoD) moreover note SWF_rpr[OF swf] profileP' ultimately show "z \<^bsub>(swf P')\<^esub>\ y" unfolding universal_domain_def by (blast dest: rpr_less_trans) qed ultimately show "z \<^bsub>(swf P)\<^esub>\ y" unfolding strict_pref_def by blast qed text \The following results permute $x$, $y$ and $z$ to show how decisiveness can be obtained from semi-decisiveness in all cases. Again, quite tedious.\ lemma decisive3: assumes has3A: "hasw [x,y,z] A" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" and sd: "semidecisive swf A Is {j} x z" shows "decisive swf A Is {j} y z" using has3A decisive2[OF _ iia swf wp sd] by (simp, blast) lemma decisive4: assumes has3A: "hasw [x,y,z] A" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" and sd: "semidecisive swf A Is {j} y z" shows "decisive swf A Is {j} y x" using has3A decisive1[OF _ iia swf wp sd] by (simp, blast) lemma decisive5: assumes has3A: "hasw [x,y,z] A" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" and sd: "semidecisive swf A Is {j} x y" shows "decisive swf A Is {j} y x" proof - from sd have "decisive swf A Is {j} x z" by (rule decisive1[OF has3A iia swf wp]) hence "semidecisive swf A Is {j} x z" by (rule d_imp_sd) hence "decisive swf A Is {j} y z" by (rule decisive3[OF has3A iia swf wp]) hence "semidecisive swf A Is {j} y z" by (rule d_imp_sd) thus "decisive swf A Is {j} y x" by (rule decisive4[OF has3A iia swf wp]) qed lemma decisive6: assumes has3A: "hasw [x,y,z] A" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" and sd: "semidecisive swf A Is {j} y x" shows "decisive swf A Is {j} y z" "decisive swf A Is {j} z x" "decisive swf A Is {j} x y" proof - from has3A have has3A': "hasw [y,x,z] A" by auto show "decisive swf A Is {j} y z" by (rule decisive1[OF has3A' iia swf wp sd]) show "decisive swf A Is {j} z x" by (rule decisive2[OF has3A' iia swf wp sd]) show "decisive swf A Is {j} x y" by (rule decisive5[OF has3A' iia swf wp sd]) qed lemma decisive7: assumes has3A: "hasw [x,y,z] A" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" and sd: "semidecisive swf A Is {j} x y" shows "decisive swf A Is {j} y z" "decisive swf A Is {j} z x" "decisive swf A Is {j} x y" proof - from sd have "decisive swf A Is {j} y x" by (rule decisive5[OF has3A iia swf wp]) hence "semidecisive swf A Is {j} y x" by (rule d_imp_sd) thus "decisive swf A Is {j} y z" "decisive swf A Is {j} z x" "decisive swf A Is {j} x y" by (rule decisive6[OF has3A iia swf wp])+ qed lemma j_decisive_xy: assumes has3A: "hasw [x,y,z] A" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" and sd: "semidecisive swf A Is {j} x y" and uv: "hasw [u,v] {x,y,z}" shows "decisive swf A Is {j} u v" using uv decisive1[OF has3A iia swf wp sd] decisive2[OF has3A iia swf wp sd] decisive5[OF has3A iia swf wp sd] decisive7[OF has3A iia swf wp sd] by (simp, blast) lemma j_decisive: assumes has3A: "has 3 A" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" and xyA: "hasw [x,y] A" and sd: "semidecisive swf A Is {j} x y" and uv: "hasw [u,v] A" shows "decisive swf A Is {j} u v" proof - from has_extend_witness'[OF has3A xyA] obtain z where xyzA: "hasw [x,y,z] A" by auto { assume ux: "u = x" and vy: "v = y" with xyzA iia swf wp sd have ?thesis by (auto intro: j_decisive_xy) } moreover { assume ux: "u = x" and vNEy: "v \ y" with uv xyA iia swf wp sd have ?thesis by(auto intro: j_decisive_xy[of x y]) } moreover { assume uy: "u = y" and vx: "v = x" with xyzA iia swf wp sd have ?thesis by (auto intro: j_decisive_xy) } moreover { assume uy: "u = y" and vNEx: "v \ x" with uv xyA iia swf wp sd have ?thesis by (auto intro: j_decisive_xy) } moreover { assume uNExy: "u \ {x,y}" and vx: "v = x" with uv xyA iia swf wp sd have ?thesis by (auto intro: j_decisive_xy[of x y]) } moreover { assume uNExy: "u \ {x,y}" and vy: "v = y" with uv xyA iia swf wp sd have ?thesis by (auto intro: j_decisive_xy[of x y]) } moreover { assume uNExy: "u \ {x,y}" and vNExy: "v \ {x,y}" with uv xyA iia swf wp sd have "decisive swf A Is {j} x u" by (auto intro: j_decisive_xy[where x=x and z=u]) hence sdxu: "semidecisive swf A Is {j} x u" by (rule d_imp_sd) with uNExy vNExy uv xyA iia swf wp have ?thesis by (auto intro: j_decisive_xy[of x]) } ultimately show ?thesis by blast qed text \The first result: if $j$ is semidecisive for some alternatives $u$ and $v$, then they are actually a dictator.\ lemma sd_imp_dictator: assumes has3A: "has 3 A" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" and uv: "hasw [u,v] A" and sd: "semidecisive swf A Is {j} u v" shows "dictator swf A Is j" proof fix x y assume x: "x \ A" and y: "y \ A" show "decisive swf A Is {j} x y" proof(cases "x = y") case True with sd show "decisive swf A Is {j} x y" by (blast intro: d_refl) next case False with x y iia swf wp has3A uv sd show "decisive swf A Is {j} x y" by (auto intro: j_decisive) qed next from sd show "j \ Is" by blast qed (* **************************************** *) subsection\The Existence of a Semi-decisive Individual\ text\The second half of the proof establishes the existence of a semi-decisive individual. The required witness is essentially an encoding of the Condorcet pardox (aka "the paradox of voting" that shows we get tied up in knots if a certain agent didn't have dictatorial powers.\ lemma sd_exists_witness: assumes has3A: "hasw [x,y,z] A" and Vs: "Is = V1 \ V2 \ V3 \ V1 \ V2 = {} \ V1 \ V3 = {} \ V2 \ V3 = {}" and Is: "Is \ {}" obtains P where "profile A Is P" and "\i \ V1. x \<^bsub>(P i)\<^esub>\ y \ y \<^bsub>(P i)\<^esub>\ z" and "\i \ V2. z \<^bsub>(P i)\<^esub>\ x \ x \<^bsub>(P i)\<^esub>\ y" and "\i \ V3. y \<^bsub>(P i)\<^esub>\ z \ z \<^bsub>(P i)\<^esub>\ x" proof let ?P = "\i. (if i \ V1 then ({ (x, u) | u. u \ A } \ { (y, u) | u. u \ A \ u \ x } \ { (z, u) | u. u \ A \ u \ x \ u \ y }) else if i \ V2 then ({ (z, u) | u. u \ A } \ { (x, u) | u. u \ A \ u \ z } \ { (y, u) | u. u \ A \ u \ x \ u \ z }) else ({ (y, u) | u. u \ A } \ { (z, u) | u. u \ A \ u \ y } \ { (x, u) | u. u \ A \ u \ y \ u \ z })) \ { (u, v) | u v. u \ A - {x,y,z} \ v \ A - {x,y,z}}" show "profile A Is ?P" proof fix i assume iIs: "i \ Is" show "rpr A (?P i)" proof show "complete A (?P i)" by (simp add: complete_def, blast) from has3A iIs show "refl_on A (?P i)" by - (simp, blast) from has3A iIs show "trans (?P i)" by (clarsimp simp add: trans_def) qed next from Is show "Is \ {}" . qed from has3A Vs show "\i \ V1. x \<^bsub>(?P i)\<^esub>\ y \ y \<^bsub>(?P i)\<^esub>\ z" and "\i \ V2. z \<^bsub>(?P i)\<^esub>\ x \ x \<^bsub>(?P i)\<^esub>\ y" and "\i \ V3. y \<^bsub>(?P i)\<^esub>\ z \ z \<^bsub>(?P i)\<^esub>\ x" unfolding strict_pref_def by auto qed text \This proof is unfortunately long. Many of the statements rely on a lot of context, making it difficult to split it up.\ lemma sd_exists: assumes has3A: "has 3 A" and finiteIs: "finite Is" and twoIs: "has 2 Is" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" shows "\j u v. hasw [u,v] A \ semidecisive swf A Is {j} u v" proof - let ?P = "\S. S \ Is \ S \ {} \ (\u v. hasw [u,v] A \ semidecisive swf A Is S u v)" obtain u v where uvA: "hasw [u,v] A" using has_witness_two[OF has3A] by auto \ \The weak pareto requirement implies that the set of all individuals is decisive between any given alternatives.\ hence "decisive swf A Is Is u v" by - (rule, auto intro: weak_paretoD[OF wp]) hence "semidecisive swf A Is Is u v" by (rule d_imp_sd) with uvA twoIs has_suc_notempty[where n=1] nat_2[symmetric] have "?P Is" by auto \ \Obtain a minimally-sized semi-decisive set.\ from ex_has_least_nat[where P="?P" and m="card", OF this] obtain V x y where VIs: "V \ Is" and Vnotempty: "V \ {}" and xyA: "hasw [x,y] A" and Vsd: "semidecisive swf A Is V x y" and Vmin: "\V'. ?P V' \ card V \ card V'" by blast from VIs finiteIs have Vfinite: "finite V" by (rule finite_subset) \ \Show that minimal set contains a single individual.\ from Vfinite Vnotempty have "\j. V = {j}" proof(rule finite_set_singleton_contra) assume Vcard: "1 < card V" then obtain j where jV: "{j} \ V" using has_extend_witness[where xs="[]", OF card_has[where n="card V"]] by auto \ \Split an individual from the "minimal" set.\ let ?V1 = "{j}" let ?V2 = "V - ?V1" let ?V3 = "Is - V" - from jV card_Diff_singleton[OF Vfinite] Vcard + from jV card_Diff_singleton Vcard have V2card: "card ?V2 > 0" "card ?V2 < card V" by auto hence V2notempty: "{} \ ?V2" by auto from jV VIs have jV2V3: "Is = ?V1 \ ?V2 \ ?V3 \ ?V1 \ ?V2 = {} \ ?V1 \ ?V3 = {} \ ?V2 \ ?V3 = {}" by auto \ \Show that that individual is semi-decisive for $x$ over $z$.\ from has_extend_witness'[OF has3A xyA] obtain z where threeDist: "hasw [x,y,z] A" by auto from sd_exists_witness[OF threeDist jV2V3] VIs Vnotempty obtain P where profileP: "profile A Is P" and V1xyzP: "x \<^bsub>(P j)\<^esub>\ y \ y \<^bsub>(P j)\<^esub>\ z" and V2xyzP: "\i \ ?V2. z \<^bsub>(P i)\<^esub>\ x \ x \<^bsub>(P i)\<^esub>\ y" and V3xyzP: "\i \ ?V3. y \<^bsub>(P i)\<^esub>\ z \ z \<^bsub>(P i)\<^esub>\ x" by (simp, blast) have xPz: "x \<^bsub>(swf P)\<^esub>\ z" proof(rule rpr_less_le_trans[where y="y"]) from profileP swf show "rpr A (swf P)" by auto next \ \V2 is semi-decisive, and everyone else opposes their choice. Ergo they prevail.\ show "x \<^bsub>(swf P)\<^esub>\ y" proof - from profileP V3xyzP have "\i \ ?V3. y \<^bsub>(P i)\<^esub>\ x" by (blast dest: rpr_less_trans) with profileP V1xyzP V2xyzP Vsd show ?thesis unfolding semidecisive_def by auto qed next \ \This result is unfortunately quite tortuous.\ from SWF_rpr[OF swf] show "y \<^bsub>(swf P)\<^esub>\ z" proof(rule rpr_less_not[OF _ _ notI]) from threeDist show "hasw [z, y] A" by auto next assume zPy: "z \<^bsub>(swf P)\<^esub>\ y" have "semidecisive swf A Is ?V2 z y" proof from VIs show "V - {j} \ Is" by blast next fix P' assume profileP': "profile A Is P'" and V2yz': "\i. i \ ?V2 \ z \<^bsub>(P' i)\<^esub>\ y" and nV2yz': "\i. i \ Is - ?V2 \ y \<^bsub>(P' i)\<^esub>\ z" from iia have "\a b. \ a \ {y, z}; b \ {y, z} \ \ (a \<^bsub>(swf P)\<^esub>\ b) = (a \<^bsub>(swf P')\<^esub>\ b)" proof(rule iiaE) from threeDist show yzA: "{y,z} \ A" by simp next fix i assume iIs: "i \ Is" fix a b assume ab: "a \ {y, z}" "b \ {y, z}" with VIs profileP V2xyzP have V2yzP: "\i \ ?V2. z \<^bsub>(P i)\<^esub>\ y" by (blast dest: rpr_less_trans) show "(a \<^bsub>(P' i)\<^esub>\ b) = (a \<^bsub>(P i)\<^esub>\ b)" proof(cases "i \ ?V2") case True with VIs profileP profileP' ab V2yz' V2yzP threeDist show ?thesis unfolding strict_pref_def profile_def by auto next case False from V1xyzP V3xyzP have "\i \ Is - ?V2. y \<^bsub>(P i)\<^esub>\ z" by auto with iIs False VIs jV profileP profileP' ab nV2yz' threeDist show ?thesis unfolding profile_def strict_pref_def by auto qed qed (simp_all add: profileP profileP') with zPy show "z \<^bsub>(swf P')\<^esub>\ y" unfolding strict_pref_def by blast qed with VIs Vsd Vmin[where V'="?V2"] V2card V2notempty threeDist show False by auto qed (simp add: profileP threeDist) qed have "semidecisive swf A Is ?V1 x z" proof from jV VIs show "{j} \ Is" by blast next \ \Use @{term "iia"} to show the SWF must allow the individual to prevail.\ fix P' assume profileP': "profile A Is P'" and V1yz': "\i. i \ ?V1 \ x \<^bsub>(P' i)\<^esub>\ z" and nV1yz': "\i. i \ Is - ?V1 \ z \<^bsub>(P' i)\<^esub>\ x" from iia have "\a b. \ a \ {x, z}; b \ {x, z} \ \ (a \<^bsub>(swf P)\<^esub>\ b) = (a \<^bsub>(swf P')\<^esub>\ b)" proof(rule iiaE) from threeDist show xzA: "{x,z} \ A" by simp next fix i assume iIs: "i \ Is" fix a b assume ab: "a \ {x, z}" "b \ {x, z}" show "(a \<^bsub>(P' i)\<^esub>\ b) = (a \<^bsub>(P i)\<^esub>\ b)" proof(cases "i \ ?V1") case True with jV VIs profileP V1xyzP have "\i \ ?V1. x \<^bsub>(P i)\<^esub>\ z" by (blast dest: rpr_less_trans) with True jV VIs profileP profileP' ab V1yz' threeDist show ?thesis unfolding strict_pref_def profile_def by auto next case False from V2xyzP V3xyzP have "\i \ Is - ?V1. z \<^bsub>(P i)\<^esub>\ x" by auto with iIs False VIs jV profileP profileP' ab nV1yz' threeDist show ?thesis unfolding strict_pref_def profile_def by auto qed qed (simp_all add: profileP profileP') with xPz show "x \<^bsub>(swf P')\<^esub>\ z" unfolding strict_pref_def by blast qed with jV VIs Vsd Vmin[where V'="?V1"] V2card threeDist show False by auto qed with xyA Vsd show ?thesis by blast qed (* **************************************** *) subsection\Arrow's General Possibility Theorem\ text\ Finally we conclude with the celebrated ``possibility'' result. Note that we assume the set of individuals is finite; \cite{Routley:79} relaxes this with some fancier set theory. Having an infinite set of alternatives doesn't matter, though the result is a bit more plausible if we assume finiteness \cite[p54]{Sen:70a}. \ theorem ArrowGeneralPossibility: assumes has3A: "has 3 A" and finiteIs: "finite Is" and has2Is: "has 2 Is" and iia: "iia swf A Is" and swf: "SWF swf A Is universal_domain" and wp: "weak_pareto swf A Is universal_domain" obtains j where "dictator swf A Is j" using sd_imp_dictator[OF has3A iia swf wp] sd_exists[OF has3A finiteIs has2Is iia swf wp] by blast (*<*) end (*>*)