diff --git a/thys/Category3/CartesianCategory.thy b/thys/Category3/CartesianCategory.thy --- a/thys/Category3/CartesianCategory.thy +++ b/thys/Category3/CartesianCategory.thy @@ -1,2286 +1,2444 @@ (* Title: CartesianCategory Author: Eugene W. Stark , 2020 Maintainer: Eugene W. Stark *) chapter "Cartesian Category" text\ In this chapter, we explore the notion of a ``cartesian category'', which we define to be a category having binary products and a terminal object. We show that every cartesian category extends to an ``elementary cartesian category'', whose definition assumes that specific choices have been made for projections and terminal object. Conversely, the underlying category of an elementary cartesian category is a cartesian category. We also show that cartesian categories are the same thing as categories with finite products. \ theory CartesianCategory -imports Limit SetCat +imports Limit SetCat CategoryWithPullbacks begin section "Category with Binary Products" subsection "Binary Product Diagrams" text \ The ``shape'' of a binary product diagram is a category having two distinct identity arrows and no non-identity arrows. \ locale binary_product_shape begin sublocale concrete_category \UNIV :: bool set\ \\a b. if a = b then {()} else {}\ \\_. ()\ \\_ _ _ _ _. ()\ apply (unfold_locales, auto) apply (meson empty_iff) by (meson empty_iff) abbreviation comp where "comp \ COMP" abbreviation FF where "FF \ MkIde False" abbreviation TT where "TT \ MkIde True" lemma arr_char: shows "arr f \ f = FF \ f = TT" using arr_char by (cases f, simp_all) lemma ide_char: shows "ide f \ f = FF \ f = TT" using ide_char ide_MkIde by (cases f, auto) lemma is_discrete: shows "ide f \ arr f" using arr_char ide_char by simp lemma dom_simp [simp]: assumes "arr f" shows "dom f = f" using assms is_discrete by simp lemma cod_simp [simp]: assumes "arr f" shows "cod f = f" using assms is_discrete by simp lemma seq_char: shows "seq f g \ arr f \ f = g" by auto lemma comp_simp [simp]: assumes "seq f g" shows "comp f g = f" using assms seq_char by fastforce end locale binary_product_diagram = J: binary_product_shape + C: category C for C :: "'c comp" (infixr "\" 55) and a0 :: 'c and a1 :: 'c + assumes is_discrete: "C.ide a0 \ C.ide a1" begin notation J.comp (infixr "\\<^sub>J" 55) fun map where "map J.FF = a0" | "map J.TT = a1" | "map _ = C.null" sublocale diagram J.comp C map proof show "\f. \ J.arr f \ map f = C.null" using J.arr_char map.elims by auto fix f assume f: "J.arr f" show "C.arr (map f)" using f J.arr_char is_discrete C.ideD(1) map.simps(1-2) by metis show "C.dom (map f) = map (J.dom f)" using f J.arr_char J.dom_char is_discrete by force show "C.cod (map f) = map (J.cod f)" using f J.arr_char J.cod_char is_discrete by force next fix f g assume fg: "J.seq g f" show "map (g \\<^sub>J f) = map g \ map f" using fg J.arr_char J.seq_char J.null_char J.not_arr_null is_discrete by (metis (no_types, lifting) C.comp_ide_self J.comp_simp map.simps(1-2)) qed end subsection "Category with Binary Products" text \ A \emph{binary product} in a category @{term C} is a limit of a binary product diagram in @{term C}. \ context binary_product_diagram begin definition mkCone where "mkCone p0 p1 \ \j. if j = J.FF then p0 else if j = J.TT then p1 else C.null" abbreviation is_rendered_commutative_by where "is_rendered_commutative_by p0 p1 \ C.seq a0 p0 \ C.seq a1 p1 \ C.dom p0 = C.dom p1" abbreviation has_as_binary_product where "has_as_binary_product p0 p1 \ limit_cone (C.dom p0) (mkCone p0 p1)" lemma cone_mkCone: assumes "is_rendered_commutative_by p0 p1" shows "cone (C.dom p0) (mkCone p0 p1)" proof - interpret E: constant_functor J.comp C \C.dom p0\ using assms by unfold_locales auto show "cone (C.dom p0) (mkCone p0 p1)" using assms mkCone_def J.arr_char E.map_simp is_discrete C.comp_ide_arr C.comp_arr_dom by unfold_locales auto qed lemma is_rendered_commutative_by_cone: assumes "cone a \" shows "is_rendered_commutative_by (\ J.FF) (\ J.TT)" proof - interpret \: cone J.comp C map a \ using assms by auto show ?thesis using is_discrete by simp qed lemma mkCone_cone: assumes "cone a \" shows "mkCone (\ J.FF) (\ J.TT) = \" proof - interpret \: cone J.comp C map a \ using assms by auto interpret mkCone_\: cone J.comp C map \C.dom (\ J.FF)\ \mkCone (\ J.FF) (\ J.TT)\ using assms is_rendered_commutative_by_cone cone_mkCone by blast show ?thesis using mkCone_def \.is_extensional J.ide_char mkCone_def NaturalTransformation.eqI [of J.comp C] \.natural_transformation_axioms mkCone_\.natural_transformation_axioms by fastforce qed + lemma cone_iff_span: + shows "cone (C.dom h) (mkCone h k) \ C.span h k \ C.cod h = a0 \ C.cod k = a1" + using cone_mkCone mkCone_def J.arr_char J.ide_char is_rendered_commutative_by_cone + apply (intro iffI) + apply (metis (no_types, lifting) C.cod_eqI C.comp_ide_arr J.arr.inject is_discrete) + by auto + + lemma cones_map_mkCone_eq_iff: + assumes "is_rendered_commutative_by p0 p1" and "is_rendered_commutative_by p0' p1'" + and "\h : C.dom p0' \ C.dom p0\" + shows "cones_map h (mkCone p0 p1) = mkCone p0' p1' \ p0 \ h = p0' \ p1 \ h = p1'" + proof - + interpret \: cone J.comp C map \C.dom p0\ \mkCone p0 p1\ + using assms(1) cone_mkCone [of p0 p1] by blast + interpret \': cone J.comp C map \C.dom p0'\ \mkCone p0' p1'\ + using assms(2) cone_mkCone [of p0' p1'] by blast + show ?thesis + proof + assume 3: "cones_map h (mkCone p0 p1) = mkCone p0' p1'" + show "p0 \ h = p0' \ p1 \ h = p1'" + proof + show "p0 \ h = p0'" + proof - + have "p0' = cones_map h (mkCone p0 p1) J.FF" + using 3 mkCone_def J.arr_char by simp + also have "... = p0 \ h" + using assms mkCone_def J.arr_char \.cone_axioms by auto + finally show ?thesis by auto + qed + show "p1 \ h = p1'" + proof - + have "p1' = cones_map h (mkCone p0 p1) J.TT" + using 3 mkCone_def J.arr_char by simp + also have "... = p1 \ h" + using assms mkCone_def J.arr_char \.cone_axioms by auto + finally show ?thesis by auto + qed + qed + next + assume "p0 \ h = p0' \ p1 \ h = p1'" + thus "cones_map h (mkCone p0 p1) = mkCone p0' p1'" + using assms \.cone_axioms mkCone_def J.arr_char by auto + qed + qed + end locale binary_product_cone = J: binary_product_shape + C: category C + D: binary_product_diagram C f0 f1 + limit_cone J.comp C D.map \C.dom p0\ \D.mkCone p0 p1\ for C :: "'c comp" (infixr "\" 55) and f0 :: 'c and f1 :: 'c and p0 :: 'c and p1 :: 'c begin lemma renders_commutative: shows "D.is_rendered_commutative_by p0 p1" - using cone_axioms D.is_rendered_commutative_by_cone D.mkCone_def \.Ya.Cop_S.arr.simps(1) - by (metis (no_types, lifting)) (* TODO: pretty opaque *) + using cone_axioms D.is_rendered_commutative_by_cone D.mkCone_def D.cone_iff_span + by force lemma is_universal': assumes "D.is_rendered_commutative_by p0' p1'" shows "\!h. \h : C.dom p0' \ C.dom p0\ \ p0 \ h = p0' \ p1 \ h = p1'" proof - have "D.cone (C.dom p0') (D.mkCone p0' p1')" using assms D.cone_mkCone by blast hence "\!h. \h : C.dom p0' \ C.dom p0\ \ D.cones_map h (D.mkCone p0 p1) = D.mkCone p0' p1'" using is_universal by simp moreover have "\h. \h : C.dom p0' \ C.dom p0\ \ D.cones_map h (D.mkCone p0 p1) = D.mkCone p0' p1' \ p0 \ h = p0' \ p1 \ h = p1'" - proof - - fix h - assume h: "\h : C.dom p0' \ C.dom p0\" - show "D.cones_map h (D.mkCone p0 p1) = D.mkCone p0' p1' \ - p0 \ h = p0' \ p1 \ h = p1'" - proof - assume 1: "D.cones_map h (D.mkCone p0 p1) = D.mkCone p0' p1'" - show "p0 \ h = p0' \ p1 \ h = p1'" - proof - show "p0 \ h = p0'" - proof - - have "p0' = D.mkCone p0' p1' J.FF" - using D.mkCone_def J.arr_char by simp - also have "... = D.cones_map h (D.mkCone p0 p1) J.FF" - using 1 by simp - also have "... = p0 \ h" - using h D.mkCone_def J.arr_char cone_\ by auto - finally show ?thesis by auto - qed - show "p1 \ h = p1'" - proof - - have "p1' = D.mkCone p0' p1' J.TT" - using D.mkCone_def J.arr_char by simp - also have "... = D.cones_map h (D.mkCone p0 p1) J.TT" - using 1 by simp - also have "... = p1 \ h" - using h D.mkCone_def J.arr_char cone_\ by auto - finally show ?thesis by auto - qed - qed - next - assume 1: "p0 \ h = p0' \ p1 \ h = p1'" - show "D.cones_map h (D.mkCone p0 p1) = D.mkCone p0' p1'" - using h 1 cone_\ D.mkCone_def by auto - qed - qed + using assms D.cones_map_mkCone_eq_iff [of p0 p1 p0' p1'] renders_commutative + by blast ultimately show ?thesis by blast qed lemma induced_arrowI': assumes "D.is_rendered_commutative_by p0' p1'" shows "\induced_arrow (C.dom p0') (D.mkCone p0' p1') : C.dom p0' \ C.dom p0\" and "p0 \ induced_arrow (C.dom p0') (D.mkCone p0' p1') = p0'" and "p1 \ induced_arrow (C.dom p1') (D.mkCone p0' p1') = p1'" proof - interpret A': constant_functor J.comp C \C.dom p0'\ using assms by (unfold_locales, auto) have cone: "D.cone (C.dom p0') (D.mkCone p0' p1')" using assms D.cone_mkCone [of p0' p1'] by blast show 0: "p0 \ induced_arrow (C.dom p0') (D.mkCone p0' p1') = p0'" proof - have "p0 \ induced_arrow (C.dom p0') (D.mkCone p0' p1') = D.cones_map (induced_arrow (C.dom p0') (D.mkCone p0' p1')) (D.mkCone p0 p1) J.FF" using cone induced_arrowI(1) D.mkCone_def J.arr_char cone_\ by force also have "... = p0'" proof - have "D.cones_map (induced_arrow (C.dom p0') (D.mkCone p0' p1')) (D.mkCone p0 p1) = D.mkCone p0' p1'" using cone induced_arrowI by blast thus ?thesis using J.arr_char D.mkCone_def by simp qed finally show ?thesis by auto qed show "p1 \ induced_arrow (C.dom p1') (D.mkCone p0' p1') = p1'" proof - have "p1 \ induced_arrow (C.dom p1') (D.mkCone p0' p1') = D.cones_map (induced_arrow (C.dom p0') (D.mkCone p0' p1')) (D.mkCone p0 p1) J.TT" using assms cone induced_arrowI(1) D.mkCone_def J.arr_char cone_\ by fastforce also have "... = p1'" proof - have "D.cones_map (induced_arrow (C.dom p0') (D.mkCone p0' p1')) (D.mkCone p0 p1) = D.mkCone p0' p1'" using cone induced_arrowI by blast thus ?thesis using J.arr_char D.mkCone_def by simp qed finally show ?thesis by auto qed show "\induced_arrow (C.dom p0') (D.mkCone p0' p1') : C.dom p0' \ C.dom p0\" using 0 cone induced_arrowI by simp qed end context category begin definition has_as_binary_product where "has_as_binary_product a0 a1 p0 p1 \ ide a0 \ ide a1 \ binary_product_diagram.has_as_binary_product C a0 a1 p0 p1" definition has_binary_products where "has_binary_products = (\a0 a1. ide a0 \ ide a1 \ (\p0 p1. has_as_binary_product a0 a1 p0 p1))" + lemma has_as_binary_productI [intro]: + assumes "ide a" and "ide b" + and "\p : c \ a\" and "\q : c \ b\" + and "\x f g. \\f : x \ a\; \g : x \ b\\ \ \!h. \h : x \ c\ \ p \ h = f \ q \ h = g" + shows "has_as_binary_product a b p q" + proof (unfold has_as_binary_product_def, intro conjI) + show "ide a" by fact + show "ide b" by fact + interpret J: binary_product_shape . + interpret D: binary_product_diagram C a b + using assms(1-2) by unfold_locales auto + show "D.has_as_binary_product p q" + proof - + have 2: "D.is_rendered_commutative_by p q" + using assms ide_in_hom by blast + let ?\ = "D.mkCone p q" + interpret \: cone J.comp C D.map c ?\ + using assms(4) D.cone_mkCone 2 by auto + interpret \: limit_cone J.comp C D.map c ?\ + proof + fix x \' + assume \': "D.cone x \'" + interpret \': cone J.comp C D.map x \' + using \' by simp + have 1: "\!h. \h : x \ c\ \ p \ h = \' J.FF \ q \ h = \' J.TT" + proof - + have "\\' J.FF : x \ a\ \ \\' J.TT : x \ b\" + by auto + thus ?thesis + using assms(5) [of "\' J.FF" x "\' J.TT"] by simp + qed + have 3: "D.is_rendered_commutative_by (\' J.FF) (\' J.TT)" + using assms(1-2) by force + obtain h where h: "\h : x \ c\ \ p \ h = \' J.FF \ q \ h = \' J.TT" + using 1 by blast + have 4: "\h : dom (\' (J.MkIde False)) \ dom p\" + using assms(3) h by auto + have "\h : x \ c\ \ D.cones_map h (D.mkCone p q) = \'" + proof (intro conjI) + show "\h : x \ c\" + using h by blast + show "D.cones_map h (D.mkCone p q) = \'" + proof + fix j + show "D.cones_map h (D.mkCone p q) j = \' j" + using h 2 3 4 D.cones_map_mkCone_eq_iff [of p q "\' J.FF" "\' J.TT"] + \.cone_axioms J.is_discrete \'.is_extensional + D.mkCone_def binary_product_shape.ide_char + apply (cases "J.ide j") + apply auto[1] + by auto + qed + qed + moreover have "\h'. \h' : x \ c\ \ D.cones_map h' (D.mkCone p q) = \' \ h' = h" + proof - + fix h' + assume 1: "\h' : x \ c\ \ D.cones_map h' (D.mkCone p q) = \'" + have "\!h. \h : x \ c\ \ p \ h = \' J.FF \ q \ h = \' J.TT" + proof - + have "\\' J.FF : x \ a\ \ \\' J.TT : x \ b\" + by auto + thus ?thesis + using h assms(5) [of "\' J.FF" x "\' J.TT"] J.ide_char by auto + qed + moreover have "\h : x \ c\ \ \' J.FF = p \ h \ q \ h = \' J.TT" + using h by simp + moreover have "\h' : x \ c\ \ \' J.FF = p \ h' \ q \ h' = \' J.TT" + using 1 \.cone_axioms D.mkCone_def [of p q] by auto + ultimately show "h' = h" by auto + qed + ultimately show "\!h. \h : x \ c\ \ D.cones_map h (D.mkCone p q) = \'" + by blast + qed + show "D.has_as_binary_product p q" + using assms \.limit_cone_axioms by blast + qed + qed + + lemma has_as_binary_productE [elim]: + assumes "has_as_binary_product a b p q" + and "\\p : dom p \ a\; \q : dom p \ b\; + \x f g. \\f : x \ a\; \g : x \ b\\ \ \!h. p \ h = f \ q \ h = g\ \ T" + shows T + proof - + interpret J: binary_product_shape . + interpret D: binary_product_diagram C a b + using assms(1) has_as_binary_product_def + by (simp add: binary_product_diagram.intro binary_product_diagram_axioms.intro + category_axioms) + have 1: "\h k. span h k \ cod h = a \ cod k = b \ D.cone (dom h) (D.mkCone h k)" + using D.cone_iff_span by presburger + let ?\ = "D.mkCone p q" + interpret \: limit_cone J.comp C D.map \dom p\ ?\ + using assms(1) has_as_binary_product_def D.cone_mkCone by blast + have span: "span p q" + using 1 \.cone_axioms by blast + moreover have "\p : dom p \ a\ \ \q : dom p \ b\" + using span \.preserves_hom \.cone_axioms binary_product_shape.arr_char + by (metis D.cone_iff_span arr_iff_in_hom) + moreover have "\x f g. \\f : x \ a\; \g : x \ b\\ \ \!l. p \ l = f \ q \ l = g" + proof - + fix x f g + assume f: "\f : x \ a\" and g: "\g : x \ b\" + let ?\' = "D.mkCone f g" + interpret \': cone J.comp C D.map x ?\' + using 1 f g by blast + have 3: "\!l. \l : x \ dom p\ \ D.cones_map l ?\ = ?\'" + using 1 f g \.is_universal [of x "D.mkCone f g"] \'.cone_axioms by fastforce + obtain l where l: "\l : x \ dom p\ \ D.cones_map l ?\ = ?\'" + using 3 by blast + have "p \ l = f \ q \ l = g" + proof + have "p \ l = ?\ J.FF \ l" + using D.mkCone_def by presburger + also have "... = D.cones_map l ?\ J.FF" + using \.cone_axioms + apply simp + using l by fastforce + also have "... = f" + using D.mkCone_def l by presburger + finally show "p \ l = f" by blast + have "q \ l = ?\ J.TT \ l" + using D.mkCone_def by simp + also have "... = D.cones_map l ?\ J.TT" + using \.cone_axioms + apply simp + using l by fastforce + also have "... = g" + using D.mkCone_def l by simp + finally show "q \ l = g" by blast + qed + moreover have "\l'. p \ l' = f \ q \ l' = g\ l' = l" + proof - + fix l' + assume 1: "p \ l' = f \ q \ l' = g" + have 2: "\l' : x \ dom p\" + using 1 f by blast + moreover have "D.cones_map l' ?\ = ?\'" + using 1 2 D.cones_map_mkCone_eq_iff [of p q f g l'] + by (metis (no_types, lifting) f g \\p : dom p \ a\ \ \q : dom p \ b\\ + comp_cod_arr in_homE) + ultimately show "l' = l" + using l \.is_universal \'.cone_axioms by blast + qed + ultimately show "\!l. p \ l = f \ q \ l = g" by blast + qed + ultimately show T + using assms(2) by simp + qed + end locale category_with_binary_products = category + assumes has_binary_products: has_binary_products subsection "Elementary Category with Binary Products" text \ An \emph{elementary category with binary products} is a category equipped with a specific way of mapping each pair of objects \a\ and \b\ to a pair of arrows \\\<^sub>1[a, b]\ and \\\<^sub>0[a, b]\ that comprise a universal span. It is useful to assume that the mappings that produce \\\<^sub>1[a, b]\ and \\\<^sub>0[a, b]\ from \a\ and \b\ are extensional; that is, if either \a\ or \b\ is not an identity, then \\\<^sub>1[a, b]\ and \\\<^sub>0[a, b]\ are \null\. \ locale elementary_category_with_binary_products = category C for C :: "'a comp" (infixr "\" 55) and pr0 :: "'a \ 'a \ 'a" ("\\<^sub>0[_, _]") and pr1 :: "'a \ 'a \ 'a" ("\\<^sub>1[_, _]") + assumes pr0_ext: "\ (ide a \ ide b) \ \\<^sub>0[a, b] = null" and pr1_ext: "\ (ide a \ ide b) \ \\<^sub>1[a, b] = null" and span_pr: "\ ide a; ide b \ \ span \\<^sub>1[a, b] \\<^sub>0[a, b]" and cod_pr0: "\ ide a; ide b \ \ cod \\<^sub>0[a, b] = b" and cod_pr1: "\ ide a; ide b \ \ cod \\<^sub>1[a, b] = a" and universal: "span f g \ \!l. \\<^sub>1[cod f, cod g] \ l = f \ \\<^sub>0[cod f, cod g] \ l = g" begin lemma pr0_in_hom': assumes "ide a" and "ide b" shows "\\\<^sub>0[a, b] : dom \\<^sub>0[a, b] \ b\" using assms span_pr cod_pr0 by auto lemma pr1_in_hom': assumes "ide a" and "ide b" shows "\\\<^sub>1[a, b] : dom \\<^sub>0[a, b] \ a\" using assms span_pr cod_pr1 by auto text \ We introduce a notation for tupling, which denotes the arrow into a product that is induced by a span. \ definition tuple ("\_, _\") where "\f, g\ \ if span f g then THE l. \\<^sub>1[cod f, cod g] \ l = f \ \\<^sub>0[cod f, cod g] \ l = g else null" text \ The following defines product of arrows (not just of objects). It will take a little while before we can prove that it is functorial, but for right now it is nice to have it as a notation for the apex of a product cone. We have to go through some slightly unnatural contortions in the development here, though, to avoid having to introduce a separate preliminary notation just for the product of objects. \ (* TODO: I want to use \ but it has already been commandeered for product types. *) definition prod (infixr "\" 51) where "f \ g \ \f \ \\<^sub>1[dom f, dom g], g \ \\<^sub>0[dom f, dom g]\" lemma seq_pr_tuple: assumes "span f g" shows "seq \\<^sub>0[cod f, cod g] \f, g\" proof - have "\\<^sub>0[cod f, cod g] \ \f, g\ = g" unfolding tuple_def using assms universal theI [of "\l. \\<^sub>1[cod f, cod g] \ l = f \ \\<^sub>0[cod f, cod g] \ l = g"] by simp meson thus ?thesis using assms by simp qed lemma tuple_pr_arr: assumes "ide a" and "ide b" and "seq \\<^sub>0[a, b] h" shows "\\\<^sub>1[a, b] \ h, \\<^sub>0[a, b] \ h\ = h" unfolding tuple_def using assms span_pr cod_pr0 cod_pr1 universal [of "\\<^sub>1[a, b] \ h" "\\<^sub>0[a, b] \ h"] theI_unique [of "\l. \\<^sub>1[a, b] \ l = \\<^sub>1[a, b] \ h \ \\<^sub>0[a, b] \ l = \\<^sub>0[a, b] \ h" h] by auto lemma pr_tuple [simp]: assumes "span f g" and "cod f = a" and "cod g = b" shows "\\<^sub>1[a, b] \ \f, g\ = f" and "\\<^sub>0[a, b] \ \f, g\ = g" proof - have 1: "\\<^sub>1[a, b] \ \f, g\ = f \ \\<^sub>0[a, b] \ \f, g\ = g" unfolding tuple_def using assms universal theI [of "\l. \\<^sub>1[a, b] \ l = f \ \\<^sub>0[a, b] \ l = g"] by simp meson show "\\<^sub>1[a, b] \ \f, g\ = f" using 1 by simp show "\\<^sub>0[a, b] \ \f, g\ = g" using 1 by simp qed lemma cod_tuple: assumes "span f g" shows "cod \f, g\ = cod f \ cod g" proof - have "cod f \ cod g = \\\<^sub>1[cod f, cod g], \\<^sub>0[cod f, cod g]\" unfolding prod_def using assms comp_cod_arr span_pr cod_pr0 cod_pr1 by simp also have "... = \\\<^sub>1[cod f, cod g] \ dom \\<^sub>0[cod f, cod g], \\<^sub>0[cod f, cod g] \ dom \\<^sub>0[cod f, cod g]\" using assms span_pr comp_arr_dom by simp also have "... = dom \\<^sub>0[cod f, cod g]" using assms tuple_pr_arr span_pr by simp also have "... = cod \f, g\" using assms seq_pr_tuple by blast finally show ?thesis by simp qed lemma tuple_in_hom [intro]: assumes "\f : a \ b\" and "\g : a \ c\" shows "\\f, g\ : a \ b \ c\" using assms pr_tuple dom_comp cod_tuple apply (elim in_homE, intro in_homI) apply (metis seqE) by metis+ lemma tuple_in_hom' [simp]: assumes "arr f" and "dom f = a" and "cod f = b" and "arr g" and "dom g = a" and "cod g = c" shows "\\f, g\ : a \ b \ c\" using assms by auto lemma tuple_ext: assumes "\ span f g" shows "\f, g\ = null" unfolding tuple_def by (simp add: assms) lemma tuple_simps [simp]: assumes "span f g" shows "arr \f, g\" and "dom \f, g\ = dom f" and "cod \f, g\ = cod f \ cod g" proof - show "arr \f, g\" using assms tuple_in_hom by blast show "dom \f, g\ = dom f" using assms tuple_in_hom by (metis dom_comp pr_tuple(1)) show "cod \f, g\ = cod f \ cod g" using assms cod_tuple by auto qed lemma tuple_pr [simp]: assumes "ide a" and "ide b" shows "\\\<^sub>1[a, b], \\<^sub>0[a, b]\ = a \ b" proof - have 1: "dom \\<^sub>0[a, b] = a \ b" using assms seq_pr_tuple cod_tuple [of "\\<^sub>1[a, b]" "\\<^sub>0[a, b]"] span_pr pr0_in_hom' pr1_in_hom' by (metis cod_pr0 cod_pr1 seqE) hence "\\\<^sub>1[a, b], \\<^sub>0[a, b]\ = \\\<^sub>1[a, b] \ (a \ b), \\<^sub>0[a, b] \ (a \ b)\" using assms pr0_in_hom' pr1_in_hom' comp_arr_dom span_pr by simp thus ?thesis using assms 1 tuple_pr_arr span_pr by (metis comp_arr_dom) qed lemma pr_in_hom [intro, simp]: assumes "ide a" and "ide b" shows "\\\<^sub>0[a, b] : a \ b \ b\" and "\\\<^sub>1[a, b] : a \ b \ a\" proof - show 0: "\\\<^sub>0[a, b] : a \ b \ b\" using assms pr0_in_hom' seq_pr_tuple [of "\\<^sub>1[a, b]" "\\<^sub>0[a, b]"] cod_tuple [of "\\<^sub>1[a, b]" "\\<^sub>0[a, b]"] span_pr cod_pr0 cod_pr1 by (intro in_homI, auto) show "\\\<^sub>1[a, b] : a \ b \ a\" using assms 0 span_pr pr1_in_hom' by fastforce qed lemma pr_simps [simp]: assumes "ide a" and "ide b" shows "arr \\<^sub>0[a, b]" and "dom \\<^sub>0[a, b] = a \ b" and "cod \\<^sub>0[a, b] = b" and "arr \\<^sub>1[a, b]" and "dom \\<^sub>1[a, b] = a \ b" and "cod \\<^sub>1[a, b] = a" using assms pr_in_hom by blast+ lemma arr_pr0_iff [iff]: shows "arr \\<^sub>0[a, b] \ ide a \ ide b" proof show "ide a \ ide b \ arr \\<^sub>0[a, b]" using pr_in_hom by auto show "arr \\<^sub>0[a, b] \ ide a \ ide b" using pr0_ext not_arr_null by metis qed lemma arr_pr1_iff [iff]: shows "arr \\<^sub>1[a, b] \ ide a \ ide b" proof show "ide a \ ide b \ arr \\<^sub>1[a, b]" using pr_in_hom by auto show "arr \\<^sub>1[a, b] \ ide a \ ide b" using pr1_ext not_arr_null by metis qed lemma pr_joint_monic: assumes "seq \\<^sub>0[a, b] h" and "\\<^sub>0[a, b] \ h = \\<^sub>0[a, b] \ h'" and "\\<^sub>1[a, b] \ h = \\<^sub>1[a, b] \ h'" shows "h = h'" using assms by (metis arr_pr0_iff seqE tuple_pr_arr) lemma comp_tuple_arr [simp]: assumes "span f g" and "arr h" and "dom f = cod h" shows "\f, g\ \ h = \f \ h, g \ h\" proof (intro pr_joint_monic [where h = "\f, g\ \ h"]) show "seq \\<^sub>0[cod f, cod g] (\f, g\ \ h)" using assms by fastforce show "\\<^sub>0[cod f, cod g] \ \f, g\ \ h = \\<^sub>0[cod f, cod g] \ \f \ h, g \ h\" proof - have "\\<^sub>0[cod f, cod g] \ \f, g\ \ h = (\\<^sub>0[cod f, cod g] \ \f, g\) \ h" using comp_assoc by simp thus ?thesis using assms by simp qed show "\\<^sub>1[cod f, cod g] \ \f, g\ \ h = \\<^sub>1[cod f, cod g] \ \f \ h, g \ h\" proof - have "\\<^sub>1[cod f, cod g] \ \f, g\ \ h = (\\<^sub>1[cod f, cod g] \ \f, g\) \ h" using comp_assoc by simp thus ?thesis using assms by simp qed qed lemma ide_prod [intro, simp]: assumes "ide a" and "ide b" shows "ide (a \ b)" using assms pr_simps ide_dom [of "\\<^sub>0[a, b]"] by simp lemma prod_in_hom [intro]: assumes "\f : a \ c\" and "\g : b \ d\" shows "\f \ g : a \ b \ c \ d\" using assms prod_def by fastforce lemma prod_in_hom' [simp]: assumes "arr f" and "dom f = a" and "cod f = c" and "arr g" and "dom g = b" and "cod g = d" shows "\f \ g : a \ b \ c \ d\" using assms by blast lemma prod_simps [simp]: assumes "arr f0" and "arr f1" shows "arr (f0 \ f1)" and "dom (f0 \ f1) = dom f0 \ dom f1" and "cod (f0 \ f1) = cod f0 \ cod f1" using assms prod_in_hom by blast+ end subsection "Agreement between the Definitions" text \ We now show that a category with binary products extends (by making a choice) to an elementary category with binary products, and that the underlying category of an elementary category with binary products is a category with binary products. \ context category_with_binary_products begin definition pr1 where "pr1 a b \ if ide a \ ide b then fst (SOME x. has_as_binary_product a b (fst x) (snd x)) else null" definition pr0 where "pr0 a b \ if ide a \ ide b then snd (SOME x. has_as_binary_product a b (fst x) (snd x)) else null" lemma pr_yields_binary_product: assumes "ide a" and "ide b" shows "has_as_binary_product a b (pr1 a b) (pr0 a b)" proof - have "\x. has_as_binary_product a b (fst x) (snd x)" using assms has_binary_products has_binary_products_def has_as_binary_product_def by simp thus ?thesis using assms has_binary_products has_binary_products_def pr0_def pr1_def someI_ex [of "\x. has_as_binary_product a b (fst x) (snd x)"] by simp qed interpretation elementary_category_with_binary_products C pr0 pr1 proof show "\a b. \ (ide a \ ide b) \ pr0 a b = null" using pr0_def by auto show "\a b. \ (ide a \ ide b) \ pr1 a b = null" using pr1_def by auto fix a b assume a: "ide a" and b: "ide b" interpret J: binary_product_shape . interpret D: binary_product_diagram C a b using a b by unfold_locales auto let ?\ = "D.mkCone (pr1 a b) (pr0 a b)" interpret \: limit_cone J.comp C D.map \dom (pr1 a b)\ ?\ using a b pr_yields_binary_product by (simp add: has_as_binary_product_def) have 1: "pr1 a b = ?\ J.FF \ pr0 a b = ?\ J.TT" using D.mkCone_def by simp show "span (pr1 a b) (pr0 a b)" using 1 \.preserves_reflects_arr J.seqE J.arr_char J.seq_char J.is_category D.is_rendered_commutative_by_cone \.cone_axioms by metis show "cod (pr1 a b) = a" using 1 \.preserves_cod [of J.FF] J.cod_char J.arr_char by auto show "cod (pr0 a b) = b" using 1 \.preserves_cod [of J.TT] J.cod_char J.arr_char by auto next fix f g assume fg: "span f g" show "\!l. pr1 (cod f) (cod g) \ l = f \ pr0 (cod f) (cod g) \ l = g" proof - interpret J: binary_product_shape . interpret D: binary_product_diagram C \cod f\ \cod g\ using fg by unfold_locales auto let ?\ = "D.mkCone (pr1 (cod f) (cod g)) (pr0 (cod f) (cod g))" interpret \: limit_cone J.comp C D.map \dom (pr1 (cod f) (cod g))\ ?\ using fg pr_yields_binary_product [of "cod f" "cod g"] has_as_binary_product_def by simp interpret \: binary_product_cone C \cod f\ \cod g\ \pr1 (cod f) (cod g)\ \pr0 (cod f) (cod g)\ .. have 1: "pr1 (cod f) (cod g) = ?\ J.FF \ pr0 (cod f) (cod g) = ?\ J.TT" using D.mkCone_def by simp show "\!l. pr1 (cod f) (cod g) \ l = f \ pr0 (cod f) (cod g) \ l = g" proof - have "\!l. \l : dom f \ dom (pr1 (cod f) (cod g))\ \ pr1 (cod f) (cod g) \ l = f \ pr0 (cod f) (cod g) \ l = g" using fg \.is_universal' by simp moreover have "\l. pr1 (cod f) (cod g) \ l = f \ \l : dom f \ dom (pr1 (cod f) (cod g))\" using fg dom_comp in_homI seqE seqI by metis ultimately show ?thesis by auto qed qed qed proposition extends_to_elementary_category_with_binary_products: shows "elementary_category_with_binary_products C pr0 pr1" .. end context elementary_category_with_binary_products begin interpretation category_with_binary_products C proof show "has_binary_products" - proof (unfold has_binary_products_def) - have "\a b. ide a \ ide b \ \p0 p1. has_as_binary_product a b p0 p1" + proof (unfold has_binary_products_def, intro allI impI) + show "\a b. ide a \ ide b \ \p0 p1. has_as_binary_product a b p0 p1" proof - fix a b assume ab: "ide a \ ide b" - interpret J: binary_product_shape . - interpret D: binary_product_diagram C a b - using ab by unfold_locales auto - have 2: "D.is_rendered_commutative_by \\<^sub>1[a, b] \\<^sub>0[a, b]" - using ab by simp - let ?\ = "D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b]" - interpret \: cone J.comp C D.map \dom \\<^sub>1[a, b]\ ?\ - using D.cone_mkCone 2 by auto - interpret \: limit_cone J.comp C D.map \dom \\<^sub>1[a, b]\ ?\ + have "has_as_binary_product a b \\<^sub>1[a, b] \\<^sub>0[a, b]" proof - fix a' \' - assume \': "D.cone a' \'" - interpret \': cone J.comp C D.map a' \' - using \' by simp - show "\!h. \h : a' \ dom \\<^sub>1[a, b]\ \ - D.cones_map h (D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b]) = \'" - proof - let ?h = "\\' J.FF, \' J.TT\" - show h': "\?h : a' \ dom \\<^sub>1[a, b]\ \ - D.cones_map ?h (D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b]) = \'" - proof - show h: "\?h : a' \ dom \\<^sub>1[a, b]\" - using ab tuple_in_hom J.ide_char by fastforce - show "D.cones_map ?h (D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b]) = \'" - proof - - interpret \'h: cone J.comp C D.map a' - \D.cones_map ?h (D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b])\ - proof - - have "D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b] \ D.cones (cod \\' J.FF, \' J.TT\)" - using ab h D.cone_mkCone D.is_rendered_commutative_by_cone - \.cone_axioms - by auto - hence "D.cones_map ?h (D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b]) \ D.cones a'" - using ab h D.cones_map_mapsto by blast - thus "D.cone a' (D.cones_map ?h (D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b]))" - by simp - qed - show ?thesis - proof - - have "\j. J.ide j \ D.cones_map ?h (D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b]) j = \' j" - using ab h J.ide_char D.mkCone_def \.cone_axioms by auto - thus ?thesis - using NaturalTransformation.eqI - \'.natural_transformation_axioms \'h.natural_transformation_axioms - by blast - qed - qed - qed - show "\h. \h : a' \ dom \\<^sub>1[a, b]\ \ - D.cones_map h (D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b]) = \' \ h = ?h" - proof - - fix h - assume 1: "\h : a' \ dom \\<^sub>1[a, b]\ \ - D.cones_map h (D.mkCone \\<^sub>1[a, b] \\<^sub>0[a, b]) = \'" - hence "cod h = dom \\<^sub>1[a, b]" by auto - show "h = ?h" - using 1 ab \.cone_axioms D.mkCone_def h' pr_joint_monic [of a b h ?h] - by auto - qed + show "ide a" and "ide b" and "\\\<^sub>1[a, b] : a \ b \ a\" and "\\\<^sub>0[a, b] : a \ b \ b\" + using ab by auto + show "\x f g. \\f : x \ a\; \g : x \ b\\ + \ \!h. \h : x \ a \ b\ \ \\<^sub>1[a, b] \ h = f \ \\<^sub>0[a, b] \ h = g" + proof - + fix x f g + assume f: "\f : x \ a\" and g: "\g : x \ b\" + show "\!h. \h : x \ a \ b\ \ \\<^sub>1[a, b] \ h = f \ \\<^sub>0[a, b] \ h = g" + using ab f g tuple_pr_arr pr_tuple [of f g a b] tuple_in_hom' + by (metis in_homE) qed qed - have "has_as_binary_product a b \\<^sub>1[a, b] \\<^sub>0[a, b]" - using ab has_as_binary_product_def \.limit_cone_axioms by blast - thus "\p0 p1. has_as_binary_product a b p0 p1" - by blast + thus "\p0 p1. has_as_binary_product a b p0 p1" by blast qed - thus "\a b. ide a \ ide b \ (\p0 p1. has_as_binary_product a b p0 p1)" - by simp qed qed proposition is_category_with_binary_products: shows "category_with_binary_products C" .. end subsection "Further Properties" context elementary_category_with_binary_products begin lemma interchange: assumes "seq h f" and "seq k g" shows "(h \ k) \ (f \ g) = h \ f \ k \ g" using assms prod_def comp_tuple_arr comp_assoc by fastforce lemma pr_naturality [simp]: assumes "arr g" and "dom g = b" and "cod g = d" and "arr f" and "dom f = a" and "cod f = c" shows "\\<^sub>0[c, d] \ (f \ g) = g \ \\<^sub>0[a, b]" and "\\<^sub>1[c, d] \ (f \ g) = f \ \\<^sub>1[a, b]" using assms prod_def by fastforce+ abbreviation dup ("\[_]") where "\[f] \ \f, f\" lemma dup_in_hom [intro, simp]: assumes "\f : a \ b\" shows "\\[f] : a \ b \ b\" using assms by fastforce lemma dup_simps [simp]: assumes "arr f" shows "arr \[f]" and "dom \[f] = dom f" and "cod \[f] = cod f \ cod f" using assms dup_in_hom by auto lemma dup_naturality: assumes "\f : a \ b\" shows "\[b] \ f = (f \ f) \ \[a]" using assms prod_def comp_arr_dom comp_cod_arr comp_tuple_arr comp_assoc by fastforce lemma pr_dup [simp]: assumes "ide a" shows "\\<^sub>0[a, a] \ \[a] = a" and "\\<^sub>1[a, a] \ \[a] = a" using assms by simp_all lemma prod_tuple: assumes "span f g" and "seq h f" and "seq k g" shows "(h \ k) \ \f, g\ = \h \ f, k \ g\" using assms prod_def comp_assoc comp_tuple_arr by fastforce lemma tuple_eqI: assumes "seq \\<^sub>0[b, c] f" and "seq \\<^sub>1[b, c] f" and "\\<^sub>0[b, c] \ f = f0" and "\\<^sub>1[b, c] \ f = f1" shows "f = \f1, f0\" using assms pr_joint_monic [of b c "\f1, f0\" f] pr_tuple by auto definition assoc ("\[_, _, _]") where "\[a, b, c] \ \\\<^sub>1[a, b] \ \\<^sub>1[a \ b, c], \\\<^sub>0[a, b] \ \\<^sub>1[a \ b, c], \\<^sub>0[a \ b, c]\\" definition assoc' ("\\<^sup>-\<^sup>1[_, _, _]") where "\\<^sup>-\<^sup>1[a, b, c] \ \\\\<^sub>1[a, b \ c], \\<^sub>1[b, c] \ \\<^sub>0[a, b \ c]\, \\<^sub>0[b, c] \ \\<^sub>0[a, b \ c]\" lemma assoc_in_hom [intro]: assumes "ide a" and "ide b" and "ide c" shows "\\[a, b, c] : (a \ b) \ c \ a \ (b \ c)\" using assms assoc_def by auto lemma assoc_simps [simp]: assumes "ide a" and "ide b" and "ide c" shows "arr \[a, b, c]" and "dom \[a, b, c] = (a \ b) \ c" and "cod \[a, b, c] = a \ (b \ c)" using assms assoc_in_hom by auto lemma assoc'_in_hom [intro]: assumes "ide a" and "ide b" and "ide c" shows "\\\<^sup>-\<^sup>1[a, b, c] : a \ (b \ c) \ (a \ b) \ c\" using assms assoc'_def by auto lemma assoc'_simps [simp]: assumes "ide a" and "ide b" and "ide c" shows "arr \\<^sup>-\<^sup>1[a, b, c]" and "dom \\<^sup>-\<^sup>1[a, b, c] = a \ (b \ c)" and "cod \\<^sup>-\<^sup>1[a, b, c] = (a \ b) \ c" using assms assoc'_in_hom by auto lemma assoc_naturality: assumes "\f0 : a0 \ b0\" and "\f1 : a1 \ b1\" and "\f2 : a2 \ b2\" shows "\[b0, b1, b2] \ ((f0 \ f1) \ f2) = (f0 \ (f1 \ f2)) \ \[a0, a1, a2]" proof - have "\\<^sub>0[b0, b1 \ b2] \ \[b0, b1, b2] \ ((f0 \ f1) \ f2) = \\<^sub>0[b0, b1 \ b2] \ (f0 \ (f1 \ f2)) \ \[a0, a1, a2]" proof - have "\\<^sub>0[b0, b1 \ b2] \ \[b0, b1, b2] \ ((f0 \ f1) \ f2) = (\\<^sub>0[b0, b1 \ b2] \ \[b0, b1, b2]) \ ((f0 \ f1) \ f2)" using comp_assoc by simp also have "... = \\\<^sub>0[b0, b1] \ \\<^sub>1[b0 \ b1, b2], \\<^sub>0[b0 \ b1, b2]\ \ ((f0 \ f1) \ f2)" using assms assoc_def by fastforce also have "... = \(\\<^sub>0[b0, b1] \ \\<^sub>1[b0 \ b1, b2]) \ ((f0 \ f1) \ f2), \\<^sub>0[b0 \ b1, b2] \ ((f0 \ f1) \ f2)\" using assms comp_tuple_arr by fastforce also have "... = \(\\<^sub>0[b0, b1] \ (f0 \ f1)) \ \\<^sub>1[a0 \ a1, a2], f2 \ \\<^sub>0[a0 \ a1, a2]\" using assms comp_assoc by fastforce also have "... = \f1 \ \\<^sub>0[a0, a1] \ \\<^sub>1[a0 \ a1, a2], f2 \ \\<^sub>0[a0 \ a1, a2]\" using assms comp_assoc by (metis in_homE pr_naturality(1)) also have "... = \\<^sub>0[b0, b1 \ b2] \ (f0 \ (f1 \ f2)) \ \[a0, a1, a2]" using assms comp_assoc assoc_def prod_tuple by fastforce finally show ?thesis by blast qed moreover have "\\<^sub>1[b0, b1 \ b2] \ \[b0, b1, b2] \ ((f0 \ f1) \ f2) = \\<^sub>1[b0, b1 \ b2] \ (f0 \ (f1 \ f2)) \ \[a0, a1, a2]" proof - have "\\<^sub>1[b0, b1 \ b2] \ \[b0, b1, b2] \ ((f0 \ f1) \ f2) = (\\<^sub>1[b0, b1 \ b2] \ \[b0, b1, b2]) \ ((f0 \ f1) \ f2)" using comp_assoc by simp also have "... = (\\<^sub>1[b0, b1] \ \\<^sub>1[b0 \ b1, b2]) \ ((f0 \ f1) \ f2)" using assms assoc_def by fastforce also have "... = (\\<^sub>1[b0, b1] \ (f0 \ f1)) \ \\<^sub>1[a0 \ a1, a2]" using assms comp_assoc by fastforce also have "... = f0 \ \\<^sub>1[a0, a1] \ \\<^sub>1[a0 \ a1, a2]" using assms comp_assoc by (metis in_homE pr_naturality(2)) also have "... = \\<^sub>1[b0, b1 \ b2] \ (f0 \ (f1 \ f2)) \ \[a0, a1, a2]" proof - have "\\<^sub>1[b0, b1 \ b2] \ (f0 \ (f1 \ f2)) \ \[a0, a1, a2] = (\\<^sub>1[b0, b1 \ b2] \ (f0 \ (f1 \ f2))) \ \[a0, a1, a2]" using comp_assoc by simp also have "... = f0 \ \\<^sub>1[a0, a1 \ a2] \ \[a0, a1, a2]" using assms comp_assoc by fastforce also have "... = f0 \ \\<^sub>1[a0, a1] \ \\<^sub>1[a0 \ a1, a2]" using assms assoc_def by fastforce finally show ?thesis by simp qed finally show ?thesis by blast qed ultimately show ?thesis using assms pr_joint_monic [of b0 "b1 \ b2" "\[b0, b1, b2] \ ((f0 \ f1) \ f2)" "(f0 \ (f1 \ f2)) \ \[a0, a1, a2]"] by fastforce qed lemma pentagon: assumes "ide a" and "ide b" and "ide c" and "ide d" shows "((a \ \[b, c, d]) \ \[a, b \ c, d]) \ (\[a, b, c] \ d) = \[a, b, c \ d] \ \[a \ b, c, d]" proof (intro pr_joint_monic [where h = "((a \ \[b, c, d]) \ \[a, b \ c, d]) \ (\[a, b, c] \ d)" and h' = "\[a, b, c \ d] \ \[a \ b, c, d]"]) show "seq \\<^sub>0[a, b \ (c \ d)] (((a \ \[b, c, d]) \ \[a, b \ c, d]) \ (\[a, b, c] \ d))" using assms by simp show "\\<^sub>1[a, b \ c \ d] \ ((a \ \[b, c, d]) \ \[a, b \ c, d]) \ (\[a, b, c] \ d) = \\<^sub>1[a, b \ c \ d] \ \[a, b, c \ d] \ \[a \ b, c, d]" proof - have "\\<^sub>1[a, b \ c \ d] \ ((a \ \[b, c, d]) \ \[a, b \ c, d]) \ (\[a, b, c] \ d) = ((\\<^sub>1[a, b \ c \ d] \ (a \ \[b, c, d])) \ \[a, b \ c, d]) \ (\[a, b, c] \ d)" using comp_assoc by simp also have "... = (\\<^sub>1[a, (b \ c) \ d] \ \[a, b \ c, d]) \ (\[a, b, c] \ d)" using assms pr_naturality(2) comp_cod_arr by force also have "... = \\<^sub>1[a, b \ c] \ \\<^sub>1[a \ b \ c, d] \ (\[a, b, c] \ d)" using assms assoc_def comp_assoc by simp also have "... = (\\<^sub>1[a, b \ c] \ \[a, b, c]) \ \\<^sub>1[(a \ b) \ c, d]" using assms pr_naturality(2) comp_assoc by fastforce also have "... = \\<^sub>1[a, b] \ \\<^sub>1[a \ b, c] \ \\<^sub>1[(a \ b) \ c, d]" using assms assoc_def comp_assoc by simp finally have "\\<^sub>1[a, b \ c \ d] \ ((a \ \[b, c, d]) \ \[a, b \ c, d]) \ (\[a, b, c] \ d) = \\<^sub>1[a, b] \ \\<^sub>1[a \ b, c] \ \\<^sub>1[(a \ b) \ c, d]" by blast also have "... = \\<^sub>1[a, b \ c \ d] \ \[a, b, c \ d] \ \[a \ b, c, d]" using assms assoc_def comp_assoc by auto finally show ?thesis by blast qed show "\\<^sub>0[a, b \ (c \ d)] \ ((a \ \[b, c, d]) \ \[a, b \ c, d]) \ (\[a, b, c] \ d) = \\<^sub>0[a, b \ (c \ d)] \ \[a, b, c \ d] \ \[a \ b, c, d]" proof - have "\\<^sub>0[a, b \ (c \ d)] \ ((a \ \[b, c, d]) \ \[a, b \ c, d]) \ (\[a, b, c] \ d) = \\<^sub>0[a, b \ c \ d] \ ((a \ \\\<^sub>1[b, c] \ \\<^sub>1[b \ c, d], \\\<^sub>0[b, c] \ \\<^sub>1[b \ c, d], \\<^sub>0[b \ c, d]\\) \ \\\<^sub>1[a, b \ c] \ \\<^sub>1[a \ b \ c, d], \\\<^sub>0[a, b \ c] \ \\<^sub>1[a \ b \ c, d], \\<^sub>0[a \ b \ c, d]\\) \ (\\\<^sub>1[a, b] \ \\<^sub>1[a \ b, c], \\\<^sub>0[a, b] \ \\<^sub>1[a \ b, c], \\<^sub>0[a \ b, c]\\ \ d)" using assms assoc_def by simp also have "... = \\\<^sub>1[b, c] \ \\<^sub>1[b \ c, d], \\\<^sub>0[b, c] \ \\<^sub>1[b \ c, d], \\<^sub>0[b \ c, d]\\ \ (\\<^sub>0[a, (b \ c) \ d] \ \\\<^sub>1[a, b \ c] \ \\<^sub>1[a \ b \ c, d], \\\<^sub>0[a, b \ c] \ \\<^sub>1[a \ b \ c, d], \\<^sub>0[a \ b \ c, d]\\) \ (\\\<^sub>1[a, b] \ \\<^sub>1[a \ b, c], \\\<^sub>0[a, b] \ \\<^sub>1[a \ b, c], \\<^sub>0[a \ b, c]\\ \ d)" proof - have "\\<^sub>0[a, b \ c \ d] \ (a \ \\\<^sub>1[b, c] \ \\<^sub>1[b \ c, d], \\\<^sub>0[b, c] \ \\<^sub>1[b \ c, d], \\<^sub>0[b \ c, d]\\) = \\\<^sub>1[b, c] \ \\<^sub>1[b \ c, d], \\\<^sub>0[b, c] \ \\<^sub>1[b \ c, d], \\<^sub>0[b \ c, d]\\ \ \\<^sub>0[a, (b \ c) \ d]" using assms assoc_def ide_in_hom pr_naturality(1) by auto thus ?thesis using comp_assoc by metis qed also have "... = \\\<^sub>0[a, b] \ \\<^sub>1[a \ b, c] \ \\<^sub>1[(a \ b) \ c, d], \\\<^sub>0[a \ b, c] \ \\<^sub>1[(a \ b) \ c, d], d \ \\<^sub>0[(a \ b) \ c, d]\\" using assms comp_assoc by simp also have "... = \\\<^sub>0[a, b] \ \\<^sub>1[a \ b, c] \ \\<^sub>1[(a \ b) \ c, d], \\\<^sub>0[a \ b, c] \ \\<^sub>1[(a \ b) \ c, d], \\<^sub>0[(a \ b) \ c, d]\\" using assms comp_cod_arr by simp also have "... = \\<^sub>0[a, b \ (c \ d)] \ \[a, b, c \ d] \ \[a \ b, c, d]" using assms assoc_def comp_assoc by simp finally show ?thesis by simp qed qed lemma inverse_arrows_assoc: assumes "ide a" and "ide b" and "ide c" shows "inverse_arrows \[a, b, c] \\<^sup>-\<^sup>1[a, b, c]" using assms assoc_def assoc'_def comp_assoc by (auto simp add: tuple_pr_arr) interpretation CC: product_category C C .. abbreviation Prod where "Prod fg \ fst fg \ snd fg" abbreviation Prod' where "Prod' fg \ snd fg \ fst fg" interpretation \: binary_functor C C C Prod using tuple_ext CC.comp_char interchange apply unfold_locales apply auto by (metis prod_def seqE)+ interpretation Prod': binary_functor C C C Prod' using tuple_ext CC.comp_char interchange apply unfold_locales apply auto by (metis prod_def seqE)+ lemma binary_functor_Prod: shows "binary_functor C C C Prod" and "binary_functor C C C Prod'" .. definition sym ("\[_, _]") where "\[a1, a0] \ if ide a0 \ ide a1 then \\\<^sub>0[a1, a0], \\<^sub>1[a1, a0]\ else null" lemma sym_in_hom [intro]: assumes "ide a" and "ide b" shows "\\[a, b] : a \ b \ b \ a\" using assms sym_def by auto lemma sym_simps [simp]: assumes "ide a" and "ide b" shows "arr \[a, b]" and "dom \[a, b] = a \ b" and "cod \[a, b] = b \ a" using assms sym_in_hom by auto lemma comp_sym_tuple [simp]: assumes "\f0 : a \ b0\" and "\f1 : a \ b1\" shows "\[b0, b1] \ \f0, f1\ = \f1, f0\" using assms sym_def comp_tuple_arr by fastforce lemma prj_sym [simp]: assumes "ide a0" and "ide a1" shows "\\<^sub>0[a1, a0] \ \[a0, a1] = \\<^sub>1[a0, a1]" and "\\<^sub>1[a1, a0] \ \[a0, a1] = \\<^sub>0[a0, a1]" using assms sym_def by auto lemma comp_sym_sym [simp]: assumes "ide a0" and "ide a1" shows "\[a1, a0] \ \[a0, a1] = (a0 \ a1)" using assms sym_def comp_tuple_arr by auto lemma sym_inverse_arrows: assumes "ide a0" and "ide a1" shows "inverse_arrows \[a0, a1] \[a1, a0]" using assms sym_in_hom comp_sym_sym by auto lemma sym_assoc_coherence: assumes "ide a" and "ide b" and "ide c" shows "\[b, c, a] \ \[a, b \ c] \ \[a, b, c] = (b \ \[a, c]) \ \[b, a, c] \ (\[a, b] \ c)" using assms sym_def assoc_def comp_assoc prod_tuple comp_cod_arr by simp lemma sym_naturality: assumes "\f0 : a0 \ b0\" and "\f1 : a1 \ b1\" shows "\[b0, b1] \ (f0 \ f1) = (f1 \ f0) \ \[a0, a1]" using assms sym_def comp_assoc prod_tuple by fastforce abbreviation \ where "\ fg \ \[cod (fst fg), cod (snd fg)] \ (fst fg \ snd fg)" interpretation \: natural_transformation CC.comp C Prod Prod' \ using sym_def CC.arr_char CC.null_char comp_arr_dom comp_cod_arr apply unfold_locales apply auto using arr_cod_iff_arr ideD(1) apply metis using arr_cod_iff_arr ideD(1) apply metis using prod_tuple by simp lemma \_is_natural_transformation: shows "natural_transformation CC.comp C Prod Prod' \" .. abbreviation Diag where "Diag f \ if arr f then (f, f) else CC.null" interpretation \: "functor" C CC.comp Diag by (unfold_locales, auto) lemma functor_Diag: shows "functor C CC.comp Diag" .. interpretation \o\: composite_functor CC.comp C CC.comp Prod Diag .. interpretation \o\: composite_functor C CC.comp C Diag Prod .. abbreviation \ where "\ \ \(f, g). (\\<^sub>1[cod f, cod g] \ (f \ g), \\<^sub>0[cod f, cod g] \ (f \ g))" interpretation \: transformation_by_components CC.comp CC.comp \o\.map CC.map \ using pr_naturality comp_arr_dom comp_cod_arr by unfold_locales auto lemma \_is_natural_transformation: shows "natural_transformation CC.comp CC.comp \o\.map CC.map \" proof - have "\.map = \" using \.map_def ext \.is_extensional comp_arr_dom comp_cod_arr by auto thus "natural_transformation CC.comp CC.comp \o\.map CC.map \" using \.natural_transformation_axioms by simp qed interpretation \: natural_transformation C C map \o\.map dup using dup_naturality comp_arr_dom comp_cod_arr prod_tuple tuple_ext by unfold_locales auto lemma dup_is_natural_transformation: shows "natural_transformation C C map \o\.map dup" .. interpretation \o\o\: composite_functor C CC.comp CC.comp Diag \o\.map .. interpretation \o\o\: composite_functor CC.comp C C Prod \o\.map .. interpretation \o\: natural_transformation C CC.comp Diag \o\o\.map \Diag \ dup\ proof - have "Diag \ map = Diag" by auto thus "natural_transformation C CC.comp Diag \o\o\.map (Diag \ dup)" using \.as_nat_trans.natural_transformation_axioms \.natural_transformation_axioms o_assoc horizontal_composite [of C C map \o\.map dup CC.comp Diag Diag Diag] by metis qed interpretation \o\: natural_transformation CC.comp C Prod \o\o\.map \dup \ Prod\ using \.natural_transformation_axioms \.as_nat_trans.natural_transformation_axioms o_assoc horizontal_composite [of CC.comp C Prod Prod Prod C map \o\.map dup] by simp interpretation \o\: natural_transformation C CC.comp \o\o\.map Diag \\.map \ Diag\ using \.natural_transformation_axioms \.as_nat_trans.natural_transformation_axioms horizontal_composite [of C CC.comp Diag Diag Diag CC.comp \o\.map CC.map \.map] by simp interpretation \o\: natural_transformation CC.comp C \o\o\.map Prod \Prod \ \.map\ proof - have "Prod \ \o\.map = \o\o\.map" by auto thus "natural_transformation CC.comp C \o\o\.map Prod (Prod \ \.map)" using \.natural_transformation_axioms \.as_nat_trans.natural_transformation_axioms o_assoc horizontal_composite [of CC.comp CC.comp \o\.map CC.map \.map C Prod Prod Prod] by simp qed interpretation \o\_\o\: vertical_composite C CC.comp Diag \o\o\.map Diag \Diag \ dup\ \\.map \ Diag\ .. interpretation \o\_\o\: vertical_composite CC.comp C Prod \o\o\.map Prod \dup \ Prod\ \Prod \ \.map\ .. interpretation \\: unit_counit_adjunction CC.comp C Diag Prod dup \.map proof show "\o\_\o\.map = Diag" proof fix f have "\ arr f \ \o\_\o\.map f = Diag f" by (simp add: \o\_\o\.is_extensional) moreover have "arr f \ \o\_\o\.map f = Diag f" using comp_cod_arr comp_assoc \o\_\o\.map_def by auto ultimately show "\o\_\o\.map f = Diag f" by blast qed show "\o\_\o\.map = Prod" proof fix fg show "\o\_\o\.map fg = Prod fg" proof - have "\ CC.arr fg \ ?thesis" by (simp add: \.is_extensional \o\_\o\.is_extensional) moreover have "CC.arr fg \ ?thesis" proof - assume fg: "CC.arr fg" have 1: "dup (Prod fg) = \cod (fst fg) \ cod (snd fg), cod (fst fg) \ cod (snd fg)\ \ (fst fg \ snd fg)" using fg \.is_natural_2 apply simp by (metis (no_types, lifting) prod_simps(1) prod_simps(3)) have "\o\_\o\.map fg = (\\<^sub>1[cod (fst fg), cod (snd fg)] \ \\<^sub>0[cod (fst fg), cod (snd fg)]) \ \cod (fst fg) \ cod (snd fg), cod (fst fg) \ cod (snd fg)\ \ (fst fg \ snd fg)" using fg 1 \o\_\o\.map_def comp_cod_arr by simp also have "... = ((\\<^sub>1[cod (fst fg), cod (snd fg)] \ \\<^sub>0[cod (fst fg), cod (snd fg)]) \ \cod (fst fg) \ cod (snd fg), cod (fst fg) \ cod (snd fg)\) \ (fst fg \ snd fg)" using comp_assoc by simp also have "... = \\\<^sub>1[cod (fst fg), cod (snd fg)] \ (cod (fst fg) \ cod (snd fg)), \\<^sub>0[cod (fst fg), cod (snd fg)] \ (cod (fst fg) \ cod (snd fg))\ \ (fst fg \ snd fg)" using fg prod_tuple by simp also have "... = Prod fg" using fg comp_arr_dom \.as_nat_trans.is_natural_2 by auto finally show ?thesis by simp qed ultimately show ?thesis by blast qed qed qed proposition induces_unit_counit_adjunction: shows "unit_counit_adjunction CC.comp C Diag Prod dup \.map" using \\.unit_counit_adjunction_axioms by simp end section "Category with Terminal Object" locale category_with_terminal_object = category + assumes has_terminal: "\t. terminal t" locale elementary_category_with_terminal_object = category C for C :: "'a comp" (infixr "\" 55) and one :: "'a" ("\") and trm :: "'a \ 'a" ("\[_]") + assumes ide_one: "ide \" and trm_in_hom: "ide a \ \\[a] : a \ \\" and trm_eqI: "\ ide a; \f : a \ \\ \ \ f = \[a]" begin lemma trm_simps: assumes "ide a" shows "arr \[a]" and "dom \[a] = a" and "cod \[a] = \" using assms trm_in_hom by auto lemma trm_one: shows "\[\] = \" using ide_one trm_in_hom trm_eqI ide_in_hom by auto lemma terminal_one: shows "terminal \" using ide_one trm_in_hom trm_eqI terminal_def by metis lemma trm_naturality: assumes "arr f" shows "\[cod f] \ f = \[dom f]" using assms trm_eqI by (metis comp_in_homI' ide_cod ide_dom in_homE trm_in_hom) proposition is_category_with_terminal_object: shows "category_with_terminal_object C" apply unfold_locales using terminal_one by auto end context category_with_terminal_object begin definition some_terminal ("\") where "some_terminal \ SOME t. terminal t" definition "trm" ("\[_]") where "\[f] \ if arr f then THE t. \t : dom f \ \\ else null" lemma terminal_some_terminal [intro]: shows "terminal \" using some_terminal_def has_terminal someI_ex [of "\t. terminal t"] by presburger lemma ide_some_terminal: shows "ide \" using terminal_def by blast lemma trm_in_hom [intro]: assumes "arr f" shows "\\[f] : dom f \ \\" proof - have "ide (dom f)" using assms by fastforce hence "\!t. \t : dom f \ \\" using assms trm_def terminal_def terminal_some_terminal by simp thus ?thesis using assms trm_def [of f] theI' [of "\t. \t : dom f \ \\"] by auto qed lemma trm_simps [simp]: assumes "arr f" shows "arr \[f]" and "dom \[f] = dom f" and "cod \[f] = \" using assms trm_in_hom by auto lemma trm_eqI: assumes "\t : dom f \ \\" shows "t = \[f]" proof - have "ide (dom f)" using assms by (metis ide_dom in_homE) hence "\!t. \t : dom f \ \\" using terminal_def [of \] terminal_some_terminal by auto moreover have "\t : dom f \ \\" using assms by simp ultimately show ?thesis using assms trm_def the1_equality [of "\t. \t : dom f \ \\" t] \ide (dom f)\ arr_dom_iff_arr by fastforce qed sublocale elementary_category_with_terminal_object C \ trm using ide_some_terminal trm_eqI by unfold_locales auto proposition extends_to_elementary_category_with_terminal_object: shows "elementary_category_with_terminal_object C \ trm" .. end section "Cartesian Category" locale cartesian_category = category_with_binary_products + category_with_terminal_object + locale category_with_pullbacks_and_terminal_object = + category_with_pullbacks + + category_with_terminal_object + begin + + sublocale category_with_binary_products C + proof + show "has_binary_products" + proof - + have "\a0 a1. \ide a0; ide a1\ \ \p0 p1. has_as_binary_product a0 a1 p0 p1" + proof - + fix a0 a1 + assume a0: "ide a0" and a1: "ide a1" + obtain p0 p1 where p0p1: "has_as_pullback \[a0] \[a1] p0 p1" + using a0 a1 has_pullbacks has_pullbacks_def by force + have "has_as_binary_product a0 a1 p0 p1" + using a0 a1 p0p1 + apply (elim has_as_pullbackE, intro has_as_binary_productI) + apply blast + apply blast + apply fastforce + apply fastforce + proof - + fix x f g + assume f: "\f : x \ a0\" and g: "\g : x \ a1\" + assume "\h k. commutative_square \[a0] \[a1] h k \ \!l. p0 \ l = h \ p1 \ l = k" + moreover have "commutative_square \[a0] \[a1] f g" + using f g + by (metis a0 commutative_squareI in_homE + elementary_category_with_terminal_object.trm_simps(2) + extends_to_elementary_category_with_terminal_object + has_as_pullbackE p0p1 trm_naturality) + moreover have "\l. p0 \ l = f \ p1 \ l = g \ \l : x \ dom p1\" + using f g by blast + ultimately show "\!l. \l : x \ dom p1\ \ p0 \ l = f \ p1 \ l = g" + by metis + qed + thus "\p0 p1. has_as_binary_product a0 a1 p0 p1" + by auto + qed + thus ?thesis + using has_binary_products_def by force + qed + qed + + sublocale cartesian_category C .. + + end + locale elementary_cartesian_category = elementary_category_with_binary_products + elementary_category_with_terminal_object begin proposition is_cartesian_category: shows "cartesian_category C" using cartesian_category.intro is_category_with_binary_products is_category_with_terminal_object by auto end context cartesian_category begin proposition extends_to_elementary_cartesian_category: shows "elementary_cartesian_category C pr0 pr1 \ trm" by (simp add: elementary_cartesian_category_def elementary_category_with_terminal_object_axioms extends_to_elementary_category_with_binary_products) sublocale elementary_cartesian_category C pr0 pr1 \ trm using extends_to_elementary_cartesian_category by simp end text \ Here we prove some facts that will later allow us to show that an elementary cartesian category is a monoidal category. \ context elementary_cartesian_category begin abbreviation \ where "\ \ \\<^sub>0[\, \]" lemma pr_coincidence: shows "\ = \\<^sub>1[\, \]" using ide_one by (simp add: terminal_arr_unique terminal_one) lemma \_is_terminal_arr: shows "terminal_arr \" using ide_one by (simp add: terminal_one) lemma inverse_arrows_\: shows "inverse_arrows \ \\, \\" using ide_one by (metis (no_types, lifting) dup_is_natural_transformation \_is_terminal_arr cod_pr0 comp_cod_arr pr_dup(1) ide_dom inverse_arrows_def map_simp natural_transformation.is_natural_2 pr_simps(2) pr1_in_hom' trm_eqI trm_naturality trm_one tuple_pr) lemma \_is_iso: shows "iso \" using inverse_arrows_\ by auto lemma trm_tensor: assumes "ide a" and "ide b" shows "\[a \ b] = \ \ (\[a] \ \[b])" proof - have "\[a \ b] = \[a] \ \\<^sub>1[a, b]" by (metis assms(1-2) cod_pr1 pr_simps(4-6) trm_naturality) moreover have "\\[b] : b \ \\" using assms(2) trm_in_hom by blast ultimately show ?thesis using assms(1) pr_coincidence trm_in_hom by fastforce qed abbreviation runit ("\[_]") where "\[a] \ \\<^sub>1[a, \]" abbreviation runit' ("\\<^sup>-\<^sup>1[_]") where "\\<^sup>-\<^sup>1[a] \ \a, \[a]\" abbreviation lunit ("\[_]") where "\[a] \ \\<^sub>0[\, a]" abbreviation lunit' ("\\<^sup>-\<^sup>1[_]") where "\\<^sup>-\<^sup>1[a] \ \\[a], a\" lemma runit_in_hom: assumes "ide a" shows "\\[a] : a \ \ \ a\" using assms ide_one by simp lemma runit'_in_hom: assumes "ide a" shows "\\\<^sup>-\<^sup>1[a] : a \ a \ \\" using assms ide_in_hom trm_in_hom by blast lemma lunit_in_hom: assumes "ide a" shows "\\[a] : \ \ a \ a\" using assms ide_one by simp lemma lunit'_in_hom: assumes "ide a" shows "\\\<^sup>-\<^sup>1[a] : a \ \ \ a\" using assms ide_in_hom trm_in_hom by blast lemma runit_naturality: assumes "ide a" shows "\[cod a] \ (a \ \) = a \ \[dom a]" using assms pr_naturality(2) ide_char ide_one by blast lemma inverse_arrows_runit: assumes "ide a" shows "inverse_arrows \[a] \\<^sup>-\<^sup>1[a]" proof show "ide (\[a] \ \\<^sup>-\<^sup>1[a])" proof - have "\[a] \ \\<^sup>-\<^sup>1[a] = a" using assms by (metis in_homE ide_char pr_tuple(1) trm_in_hom) thus ?thesis using assms by presburger qed show "ide (\\<^sup>-\<^sup>1[a] \ \[a])" proof - have "ide (a \ \)" using assms ide_one by blast moreover have "\\<^sup>-\<^sup>1[a] \ \[a] = a \ \" proof (intro pr_joint_monic [of a \ "\\<^sup>-\<^sup>1[a] \ \[a]" "a \ \"]) show "seq \\<^sub>0[a, \] (\\<^sup>-\<^sup>1[a] \ \[a])" using assms ide_one runit'_in_hom [of a] by (intro seqI) auto show "\\<^sub>0[a, \] \ \\<^sup>-\<^sup>1[a] \ \[a] = \\<^sub>0[a, \] \ (a \ \)" proof - have "\\<^sub>0[a, \] \ \\<^sup>-\<^sup>1[a] \ \[a] = (\\<^sub>0[a, \] \ \\<^sup>-\<^sup>1[a]) \ \[a]" using comp_assoc by simp also have "... = \[a] \ \[a]" using assms ide_one by (metis in_homE pr_tuple(2) ide_char trm_in_hom) also have "... = \[a \ \]" using assms ide_one trm_naturality [of "\[a]"] by simp also have "... = \\<^sub>0[a, \] \ (a \ \)" using assms comp_arr_dom ide_one trm_naturality trm_one by fastforce finally show ?thesis by blast qed show "\\<^sub>1[a, \] \ \\<^sup>-\<^sup>1[a] \ \[a] = \\<^sub>1[a, \] \ (a \ \)" using assms by (metis \ide (\[a] \ \\<^sup>-\<^sup>1[a])\ cod_comp cod_pr1 dom_comp ide_compE ide_one comp_assoc runit_naturality) qed ultimately show ?thesis by simp qed qed lemma lunit_naturality: assumes "arr f" shows "C \[cod f] (\ \ f) = C f \[dom f]" using assms pr_naturality(1) ide_char ide_one by blast lemma inverse_arrows_lunit: assumes "ide a" shows "inverse_arrows \[a] \\<^sup>-\<^sup>1[a]" proof show "ide (C \[a] \\<^sup>-\<^sup>1[a])" proof - have "C \[a] \\<^sup>-\<^sup>1[a] = a" using assms by (metis ide_char in_homE pr_tuple(2) trm_in_hom) thus ?thesis using assms by simp qed show "ide (\\<^sup>-\<^sup>1[a] \ \[a])" proof - have "\\<^sup>-\<^sup>1[a] \ \[a] = \ \ a" proof (intro pr_joint_monic [of \ a "\\<^sup>-\<^sup>1[a] \ \[a]" "\ \ a"]) show "seq \[a] (\\<^sup>-\<^sup>1[a] \ \[a])" using assms \ide (\[a] \ \\<^sup>-\<^sup>1[a])\ by blast show "\[a] \ \\<^sup>-\<^sup>1[a] \ \[a] = \[a] \ (\ \ a)" using assms by (metis \ide (\[a] \ \\<^sup>-\<^sup>1[a])\ cod_comp cod_pr0 dom_cod ide_compE ide_one comp_assoc lunit_naturality) show "\\<^sub>1[\, a] \ \\<^sup>-\<^sup>1[a] \ \[a] = \\<^sub>1[\, a] \ (\ \ a)" proof - have "\\<^sub>1[\, a] \ \\<^sup>-\<^sup>1[a] \ \[a] = (\\<^sub>1[\, a] \ \\<^sup>-\<^sup>1[a]) \ \[a]" using comp_assoc by simp also have "... = \[a] \ \[a]" using assms ide_one by (metis pr_tuple(1) ide_char in_homE trm_in_hom) also have "... = \[\ \ a]" using assms ide_one trm_naturality [of "\[a]"] by simp also have "... = \\<^sub>1[\, a] \ (\ \ a)" using assms comp_arr_dom ide_one trm_naturality trm_one by fastforce finally show ?thesis by simp qed qed moreover have "ide (\ \ a)" using assms ide_one by simp finally show ?thesis by blast qed qed lemma comp_lunit_term_dup: assumes "ide a" shows "\[a] \ (\[a] \ a) \ \[a] = a" proof - have "\\[a] : a \ \\" using assms trm_in_hom by blast hence "\[a] \ (\[a] \ a) = a \ \\<^sub>0[a, a]" by (metis assms pr_naturality(1) ide_char in_homE) thus ?thesis by (metis (no_types) assms comp_assoc comp_ide_self pr_dup(1)) qed lemma comp_runit_term_dup: assumes "ide a" shows "\[a] \ (a \ \[a]) \ \[a] = a" proof - have "\\[a] : a \ \\" using assms trm_in_hom by blast hence "\[a] \ (a \ \[a]) = a \ \\<^sub>1[a, a]" using assms by auto thus ?thesis using assms by (metis comp_ide_arr pr_dup(2) ide_char comp_assoc seqI) qed lemma comp_proj_assoc: assumes "ide a0" and "ide a1" and "ide a2" shows "\\<^sub>1[a0, a1 \ a2] \ \[a0, a1, a2] = \\<^sub>1[a0, a1] \ \\<^sub>1[a0 \ a1, a2]" and "\\<^sub>0[a0, a1 \ a2] \ \[a0, a1, a2] = \\\<^sub>0[a0, a1] \ \\<^sub>1[a0 \ a1, a2], \\<^sub>0[a0 \ a1, a2]\" using assms assoc_def by auto lemma dup_coassoc: assumes "ide a" shows "\[a, a, a] \ (\[a] \ a) \ \[a] = (a \ \[a]) \ \[a]" proof (intro pr_joint_monic [of a "a \ a" "\[a, a, a] \ (\[a] \ a) \ \[a]" "(a \ \[a]) \ \[a]"]) show "seq \\<^sub>0[a, a \ a] (\[a, a, a] \ (\[a] \ a) \ \[a])" using assms by simp show "\\<^sub>0[a, a \ a] \ \[a, a, a] \ (\[a] \ a) \ \[a] = \\<^sub>0[a, a \ a] \ (a \ \[a]) \ \[a]" proof - have "\\<^sub>0[a, a \ a] \ \[a, a, a] \ (\[a] \ a) \ \[a] = ((\\<^sub>0[a, a \ a] \ \[a, a, a]) \ (\[a] \ a)) \ \[a]" using comp_assoc by simp also have "... = \((\\<^sub>0[a, a] \ \\<^sub>1[a \ a, a]) \ (\[a] \ a)) \ \[a], (a \ \\<^sub>0[a, a]) \ \[a]\" using assms assoc_def by simp also have "... = \[a]" using assms comp_assoc by simp also have "... = (\\<^sub>0[a, a \ a] \ (a \ \[a])) \ \[a]" using assms assoc_def comp_assoc by simp also have "... = \\<^sub>0[a, a \ a] \ (a \ \[a]) \ \[a]" using comp_assoc by simp finally show ?thesis by blast qed show "\\<^sub>1[a, a \ a] \ \[a, a, a] \ (\[a] \ a) \ \[a] = \\<^sub>1[a, a \ a] \ (a \ \[a]) \ \[a]" proof - have "\\<^sub>1[a, a \ a] \ \[a, a, a] \ (\[a] \ a) \ \[a] = ((\\<^sub>1[a, a \ a] \ \[a, a, a]) \ (\[a] \ a)) \ \[a]" using comp_assoc by simp also have "... = ((\\<^sub>1[a, a] \ \\<^sub>1[a \ a, a]) \ (\[a] \ a)) \ \[a]" using assms assoc_def by simp also have "... = a" using assms comp_assoc by simp also have "... = (a \ \\<^sub>1[a, a]) \ \[a]" using assms comp_assoc by simp also have "... = (\\<^sub>1[a, a \ a] \ (a \ \[a])) \ \[a]" using assms by simp also have "... = \\<^sub>1[a, a \ a] \ (a \ \[a]) \ \[a]" using comp_assoc by simp finally show ?thesis by blast qed qed lemma comp_assoc_tuple: assumes "\f0 : a \ b0\" and "\f1 : a \ b1\" and "\f2 : a \ b2\" shows "\[b0, b1, b2] \ \\f0, f1\, f2\ = \f0, \f1, f2\\" and "\\<^sup>-\<^sup>1[b0, b1, b2] \ \f0, \f1, f2\\ = \\f0, f1\, f2\" using assms assoc_def assoc'_def comp_assoc by fastforce+ lemma dup_tensor: assumes "ide a" and "ide b" shows "\[a \ b] = \\<^sup>-\<^sup>1[a, b, a \ b] \ (a \ \[b, a, b]) \ (a \ \ (a, b) \ b) \ (a \ \\<^sup>-\<^sup>1[a, b, b]) \ \[a, a, b \ b] \ (\[a] \ \[b])" proof (intro pr_joint_monic [of "a \ b" "a \ b" "\[a \ b]"]) show "seq \\<^sub>0[a \ b, a \ b] (\[a \ b])" using assms by simp have 1: "\\<^sup>-\<^sup>1[a, b, a \ b] \ (a \ \[b, a, b]) \ (a \ \ (a, b) \ b) \ (a \ \\<^sup>-\<^sup>1[a, b, b]) \ \[a, a, b \ b] \ (\[a] \ \[b]) = \a \ b, a \ b\" proof - have "\\<^sup>-\<^sup>1[a, b, a \ b] \ (a \ \[b, a, b]) \ (a \ \ (a, b) \ b) \ (a \ \\<^sup>-\<^sup>1[a, b, b]) \ \[a, a, b \ b] \ (\[a] \ \[b]) = \\<^sup>-\<^sup>1[a, b, a \ b] \ (a \ \[b, a, b]) \ (a \ \ (a, b) \ b) \ (a \ \\<^sup>-\<^sup>1[a, b, b]) \ \\\<^sub>1[a, b], \\\<^sub>1[a, b], \[b] \ \\<^sub>0[a, b]\\" proof - have "\[a, a, b \ b] \ (\[a] \ \[b]) = \\\<^sub>1[a, b], \\\<^sub>1[a, b], \[b] \ \\<^sub>0[a, b]\\" using assms assoc_def comp_assoc pr_naturality comp_cod_arr by simp thus ?thesis by presburger qed also have "... = \\<^sup>-\<^sup>1[a, b, a \ b] \ \a \ a \ a \ \\<^sub>1[a, b], \[b, a, b] \ (\[a, b] \ (a \ b) \ b) \ \\<^sup>-\<^sup>1[a, b, b] \ \\\<^sub>1[a, b], \[b \ \\<^sub>0[a, b]]\\" using assms prod_tuple by simp also have "... = \\<^sup>-\<^sup>1[a, b, a \ b] \ \\\<^sub>1[a, b], \[b, a, b] \ (\[a, b] \ b) \ \\<^sup>-\<^sup>1[a, b, b] \ \\\<^sub>1[a, b], \[\\<^sub>0[a, b]]\\" proof - have "a \ a \ a \ \\<^sub>1[a, b] = \\<^sub>1[a, b]" using assms comp_cod_arr by simp moreover have "b \ \\<^sub>0[a, b] = \\<^sub>0[a, b]" using assms comp_cod_arr by simp moreover have "\[a, b] \ (a \ b) \ b = \[a, b] \ b" using assms comp_arr_dom by simp ultimately show ?thesis by simp qed also have "... = \\<^sup>-\<^sup>1[a, b, a \ b] \ \\\<^sub>1[a, b], \[b, a, b] \ (\[a, b] \ b) \ \\\\<^sub>1[a, b], \\<^sub>0[a, b]\, \\<^sub>0[a, b]\\" proof - have "\\<^sup>-\<^sup>1[a, b, b] \ \\\<^sub>1[a, b], \[\\<^sub>0[a, b]]\ = \\\\<^sub>1[a, b], \\<^sub>0[a, b]\, \\<^sub>0[a, b]\" using assms comp_assoc_tuple(2) by blast thus ?thesis by simp qed also have "... = \\<^sup>-\<^sup>1[a, b, a \ b] \ \\\<^sub>1[a, b], \[b, a, b] \ \\[a, b], \\<^sub>0[a, b]\\" using assms prod_tuple comp_arr_dom comp_cod_arr by simp also have "... = \\<^sup>-\<^sup>1[a, b, a \ b] \ \\\<^sub>1[a, b], \\\<^sub>0[a, b], \\\<^sub>1[a, b], \\<^sub>0[a, b]\\\" using assms comp_assoc_tuple(1) by (metis sym_def pr_in_hom) also have "... = \\\\<^sub>1[a, b], \\<^sub>0[a, b]\, \\\<^sub>1[a, b], \\<^sub>0[a, b]\\" using assms comp_assoc_tuple(2) by force also have "... = \[a \ b]" using assms by simp finally show ?thesis by simp qed show "\\<^sub>0[a \ b, a \ b] \ \[a \ b] = \\<^sub>0[a \ b, a \ b] \ \\<^sup>-\<^sup>1[a, b, a \ b] \ (a \ \[b, a, b]) \ (a \ \ (a, b) \ b) \ (a \ \\<^sup>-\<^sup>1[a, b, b]) \ \[a, a, b \ b] \ (\[a] \ \[b])" using assms 1 by force show "\\<^sub>1[a \ b, a \ b] \ \[a \ b] = \\<^sub>1[a \ b, a \ b] \ \\<^sup>-\<^sup>1[a, b, a \ b] \ (a \ \[b, a, b]) \ (a \ \ (a, b) \ b) \ (a \ \\<^sup>-\<^sup>1[a, b, b]) \ \[a, a, b \ b] \ (\[a] \ \[b])" using assms 1 by force qed (* TODO: Not sure if the remaining facts are useful. *) lemma \_eq_trm: shows "\ = \[\ \ \]" proof (intro terminal_arr_unique) show "par \ \[\ \ \]" by (simp add: ide_one trm_one trm_tensor) show "terminal_arr \[\ \ \]" using ide_one \_is_terminal_arr \par \ \[\ \ \]\ by auto show "terminal_arr \" using \_is_terminal_arr by blast qed lemma terminal_tensor_one_one: shows "terminal (\ \ \)" proof show "ide (\ \ \)" using ide_one by simp show "\a. ide a \ \!f. \f : a \ \ \ \\" proof - fix a assume a: "ide a" show "\!f. \f : a \ \ \ \\" proof show "\inv \ \ \[a] : a \ \ \ \\" using a ide_one inverse_arrows_\ inverse_unique trm_in_hom by fastforce show "\f. \f : a \ \ \ \\ \ f = inv \ \ \[a]" proof - fix f assume f: "\f : a \ \ \ \\" have "\ \ f = \[a]" proof (intro terminal_arr_unique) show "par (\ \ f) \[a]" using a f by (metis \_is_iso \_is_terminal_arr \\inv \ \ \[a] : a \ \ \ \\\ cod_comp dom_comp dom_inv ide_one in_homE pr_simps(2-3) seqE seqI) show "terminal_arr (\ \ f)" using a f \_is_terminal_arr cod_comp by force show "terminal_arr \[a]" using a \par (\ \ f) \[a]\ \terminal_arr (\ \ f)\ by auto qed thus "f = inv \ \ \[a]" using a f \_is_iso invert_side_of_triangle(1) \\inv \ \ \[a] : a \ \ \ \\\ by blast qed qed qed qed end section "Category with Finite Products" text \ In this last section, we show that the notion ``cartesian category'', which we defined to be a category with binary products and terminal object, coincides with the notion ``category with finite products''. Due to the inability to quantify over types in HOL, we content ourselves with defining the latter notion as "has \I\-indexed products for every finite set \I\ of natural numbers." We can transfer this property to finite sets at other types using the fact that products are preserved under bijections of the index sets. \ locale category_with_finite_products = category C for C :: "'c comp" + assumes has_finite_products: "finite (I :: nat set) \ has_products I" begin lemma has_finite_products': assumes "I \ UNIV" shows "finite I \ has_products I" proof - assume I: "finite I" obtain n \ where \: "bij_betw \ {k. k < (n :: nat)} I" using I finite_imp_nat_seg_image_inj_on inj_on_imp_bij_betw by fastforce show "has_products I" using assms(1) \ has_finite_products has_products_preserved_by_bijection category_with_finite_products.has_finite_products by blast qed end lemma (in category) has_binary_products_if: assumes "has_products ({0, 1} :: nat set)" shows "has_binary_products" proof (unfold has_binary_products_def) show "\a0 a1. ide a0 \ ide a1 \ (\p0 p1. has_as_binary_product a0 a1 p0 p1)" proof (intro allI impI) fix a0 a1 assume 1: "ide a0 \ ide a1" show "\p0 p1. has_as_binary_product a0 a1 p0 p1" proof - interpret J: binary_product_shape by unfold_locales interpret D: binary_product_diagram C a0 a1 using 1 by unfold_locales auto interpret discrete_diagram J.comp C D.map using J.is_discrete by unfold_locales auto show "\p0 p1. has_as_binary_product a0 a1 p0 p1" proof (unfold has_as_binary_product_def) text \ Here we have to work around the fact that \has_finite_products\ is defined in terms of @{typ "nat set"}, whereas \has_as_binary_product\ is defined in terms of \J.arr set\. \ let ?\ = "(\x :: nat. if x = 0 then J.FF else J.TT)" let ?\ = "\j. if j = J.FF then 0 else 1" have "bij_betw ?\ ({0, 1} :: nat set) {J.FF, J.TT}" using bij_betwI [of ?\ "{0, 1} :: nat set" "{J.FF, J.TT}" ?\] by fastforce hence "has_products {J.FF, J.TT}" using assms has_products_def [of "{J.FF, J.TT}"] has_products_preserved_by_bijection [of "{0, 1} :: nat set" ?\ "{J.FF, J.TT}"] by blast hence "\a. has_as_product J.comp D.map a" using has_products_def [of "{J.FF, J.TT}"] discrete_diagram_axioms J.arr_char by blast hence "\a \. product_cone J.comp C D.map a \" using has_as_product_def by blast hence 2: "\a \. D.limit_cone a \" unfolding product_cone_def by simp obtain a \ where \: "D.limit_cone a \" using 2 by auto interpret \: limit_cone J.comp C D.map a \ using \ by auto have "\ = D.mkCone (\ J.FF) (\ J.TT)" proof - have "\a. J.ide a \ \ a = D.mkCone (\ J.FF) (\ J.TT) a" using D.mkCone_def J.ide_char by auto moreover have "a = dom (\ J.FF)" by simp moreover have "D.cone a (D.mkCone (\ (J.MkIde False)) (\ (J.MkIde True)))" using 1 D.cone_mkCone [of "\ J.FF" "\ J.TT"] by auto ultimately show ?thesis using D.mkCone_def \.natural_transformation_axioms D.cone_mkCone [of "\ J.FF" "\ J.TT"] NaturalTransformation.eqI [of "J.comp" C \.A.map "D.map" \ "D.mkCone (\ J.FF) (\ J.TT)"] cone_def [of J.comp C D.map a "D.mkCone (\ J.FF) (\ J.TT)"] J.ide_char by blast qed hence "D.limit_cone (dom (\ J.FF)) (D.mkCone (\ J.FF) (\ J.TT))" using \.limit_cone_axioms by simp thus "\p0 p1. ide a0 \ ide a1 \ D.has_as_binary_product p0 p1" using 1 by blast qed qed qed qed sublocale category_with_finite_products \ category_with_binary_products C using has_binary_products_if has_finite_products by (unfold_locales, unfold has_binary_products_def) simp proposition (in category_with_finite_products) is_category_with_binary_products: shows "category_with_binary_products C" .. sublocale category_with_finite_products \ category_with_terminal_object C proof interpret J: discrete_category "{} :: nat set" by unfold_locales auto interpret D: empty_diagram J.comp C "\j. null" by unfold_locales auto interpret D: discrete_diagram J.comp C "\j. null" using J.is_discrete by unfold_locales auto have "\a. D.has_as_limit a \ has_as_product J.comp (\j. null) a" using product_cone_def J.category_axioms category_axioms D.discrete_diagram_axioms has_as_product_def product_cone_def by metis moreover have "\a. has_as_product J.comp (\j. null) a" using has_finite_products [of "{} :: nat set"] has_products_def [of "{} :: nat set"] D.discrete_diagram_axioms by blast ultimately have "\a. D.has_as_limit a" by blast thus "\a. terminal a" using D.has_as_limit_iff_terminal by blast qed proposition (in category_with_finite_products) is_category_with_terminal_object: shows "category_with_terminal_object C" .. sublocale category_with_finite_products \ cartesian_category .. proposition (in category_with_finite_products) is_cartesian_category: shows "cartesian_category C" .. context category begin lemma binary_product_of_products_is_product: assumes "has_as_product J0 D0 a0" and "has_as_product J1 D1 a1" and "has_as_binary_product a0 a1 p0 p1" and "Collect (partial_magma.arr J0) \ Collect (partial_magma.arr J1) = {}" and "partial_magma.null J0 = partial_magma.null J1" shows "has_as_product (discrete_category.comp (Collect (partial_magma.arr J0) \ Collect (partial_magma.arr J1)) (partial_magma.null J0)) (\i. if i \ Collect (partial_magma.arr J0) then D0 i else if i \ Collect (partial_magma.arr J1) then D1 i else null) (dom p0)" proof - obtain \0 where \0: "product_cone J0 (\) D0 a0 \0" using assms(1) has_as_product_def by blast obtain \1 where \1: "product_cone J1 (\) D1 a1 \1" using assms(2) has_as_product_def by blast interpret J0: category J0 using \0 product_cone.axioms(1) by metis interpret J1: category J1 using \1 product_cone.axioms(1) by metis interpret D0: discrete_diagram J0 C D0 using \0 product_cone.axioms(3) by metis interpret D1: discrete_diagram J1 C D1 using \1 product_cone.axioms(3) by metis interpret \0: product_cone J0 C D0 a0 \0 using \0 by auto interpret \1: product_cone J1 C D1 a1 \1 using \1 by auto interpret J: discrete_category \Collect J0.arr \ Collect J1.arr\ J0.null using J0.not_arr_null assms(5) by unfold_locales auto interpret X: binary_product_shape . interpret a0xa1: binary_product_diagram C a0 a1 using assms(3) has_as_binary_product_def by (simp add: binary_product_diagram.intro binary_product_diagram_axioms.intro category_axioms) have p0p1: "a0xa1.has_as_binary_product p0 p1" using assms(3) has_as_binary_product_def [of a0 a1 p0 p1] by simp let ?D = "(\i. if i \ Collect J0.arr then D0 i else if i \ Collect J1.arr then D1 i else null)" let ?a = "dom p0" let ?\ = "\i. if i \ Collect J0.arr then \0 i \ p0 else if i \ Collect J1.arr then \1 i \ p1 else null" let ?p0p1 = "a0xa1.mkCone p0 p1" interpret p0p1: limit_cone X.comp C a0xa1.map ?a ?p0p1 using p0p1 by simp have a: "ide ?a" using p0p1.ide_apex by simp have p0: "\p0 : ?a \ a0\" using a0xa1.mkCone_def p0p1.preserves_hom [of X.FF X.FF X.FF] X.ide_char X.ide_in_hom by auto have p1: "\p1 : ?a \ a1\" using a0xa1.mkCone_def p0p1.preserves_hom [of X.TT X.TT X.TT] X.ide_char X.ide_in_hom by auto interpret D: discrete_diagram J.comp C ?D using assms J.arr_char J.dom_char J.cod_char J.is_discrete D0.is_discrete D1.is_discrete J.cod_comp J.seq_char by unfold_locales auto interpret A: constant_functor J.comp C ?a using p0p1.ide_apex by unfold_locales simp interpret \: natural_transformation J.comp C A.map ?D ?\ proof fix j show "\ J.arr j \ ?\ j = null" by simp assume j: "J.arr j" have \0j: "J0.arr j \ \\0 j : a0 \ D0 j\" using D0.is_discrete by auto have \1j: "J1.arr j \ \\1 j : a1 \ D1 j\" using D1.is_discrete by auto show "dom (?\ j) = A.map (J.dom j)" using j J.arr_char p0 p1 \0j \1j by fastforce show "cod (?\ j) = ?D (J.cod j)" using j J.arr_char p0 p1 \0j \1j by fastforce show "?D j \ ?\ (J.dom j) = ?\ j" proof - have 0: "J0.arr j \ D0 j \ \0 j \ p0 = \0 j \ p0" proof - have "J0.arr j \ (D0 j \ \0 j) \ p0 = \0 j \ p0" using p0 \0.is_natural_1 \0.is_natural_2 D0.is_discrete by simp thus "J0.arr j \ D0 j \ \0 j \ p0 = \0 j \ p0" using comp_assoc by simp qed have 1: "J1.arr j \ D1 j \ \1 j \ p1 = \1 j \ p1" proof - have "J1.arr j \ (D1 j \ \1 j) \ p1 = \1 j \ p1" using p1 \1.is_natural_1 \1.is_natural_2 D1.is_discrete by simp thus "J1.arr j \ D1 j \ \1 j \ p1 = \1 j \ p1" using comp_assoc by simp qed show ?thesis using 0 1 by auto qed show "?\ (J.cod j) \ A.map j = ?\ j" using j comp_arr_dom p0 p1 comp_assoc by auto qed interpret \: cone J.comp C ?D ?a ?\ .. interpret \: product_cone J.comp C ?D ?a ?\ proof show "\a' \'. D.cone a' \' \ \!f. \f : a' \ ?a\ \ D.cones_map f ?\ = \'" proof - fix a' \' assume \': "D.cone a' \'" interpret \': cone J.comp C ?D a' \' using \' by simp show "\!f. \f : a' \ ?a\ \ D.cones_map f ?\ = \'" proof let ?\0' = "\i. if i \ Collect J0.arr then \' i else null" let ?\1' = "\i. if i \ Collect J1.arr then \' i else null" have 0: "\i. i \ Collect J0.arr \ \' i \ hom a' (D0 i)" using J.arr_char by auto have 1: "\i. i \ Collect J1.arr \ \' i \ hom a' (D1 i)" using J.arr_char \Collect J0.arr \ Collect J1.arr = {}\ by force interpret A0': constant_functor J0 C a' apply unfold_locales using \'.ide_apex by auto interpret A1': constant_functor J1 C a' apply unfold_locales using \'.ide_apex by auto interpret \0': cone J0 C D0 a' ?\0' proof (unfold_locales) fix j show "\ J0.arr j \ (if j \ Collect J0.arr then \' j else null) = null" by simp assume j: "J0.arr j" show 0: "dom (?\0' j) = A0'.map (J0.dom j)" using j by simp show 1: "cod (?\0' j) = D0 (J0.cod j)" using j J.arr_char J.cod_char D0.is_discrete by simp show "D0 j \ (?\0' (J0.dom j)) = ?\0' j" using 1 j J.arr_char D0.is_discrete comp_cod_arr by simp show "?\0' (J0.cod j) \ A0'.map j = ?\0' j" using 0 j J.arr_char D0.is_discrete comp_arr_dom by simp qed interpret \1': cone J1 C D1 a' ?\1' proof (unfold_locales) fix j show "\ J1.arr j \ (if j \ Collect J1.arr then \' j else null) = null" by simp assume j: "J1.arr j" show 0: "dom (?\1' j) = A1'.map (J1.dom j)" using j by simp show 1: "cod (?\1' j) = D1 (J1.cod j)" using assms(4) j J.arr_char J.cod_char D1.is_discrete by auto show "D1 j \ (?\1' (J1.dom j)) = ?\1' j" using 1 j J.arr_char D1.is_discrete comp_cod_arr by simp show "?\1' (J1.cod j) \ A1'.map j = ?\1' j" using 0 j J.arr_char D1.is_discrete comp_arr_dom by simp qed define f0 where "f0 = \0.induced_arrow a' ?\0'" define f1 where "f1 = \1.induced_arrow a' ?\1'" have f0: "\f0 : a' \ a0\" using f0_def \0.induced_arrowI \0'.cone_axioms by simp have f1: "\f1 : a' \ a1\" using f1_def \1.induced_arrowI \1'.cone_axioms by simp have 2: "a0xa1.is_rendered_commutative_by f0 f1" using f0 f1 by auto interpret p0p1: binary_product_cone C a0 a1 p0 p1 .. interpret f0f1: cone X.comp C a0xa1.map a' \a0xa1.mkCone f0 f1\ using 2 f0 f1 a0xa1.cone_mkCone [of f0 f1] by auto define f where "f = p0p1.induced_arrow a' (a0xa1.mkCone f0 f1)" have f: "\f : a' \ ?a\" using f_def 2 f0 f1 p0p1.induced_arrowI'(1) by auto moreover have \': "D.cones_map f ?\ = \'" proof fix j show "D.cones_map f ?\ j = \' j" proof (cases "J0.arr j", cases "J1.arr j") show "\J0.arr j; J1.arr j\ \ D.cones_map f ?\ j = \' j" using assms(4) by auto show "\J0.arr j; \ J1.arr j\ \ D.cones_map f ?\ j = \' j" proof - assume J0: "J0.arr j" and J1: "\ J1.arr j" have "D.cones_map f ?\ j = (\0 j \ p0) \ f" using f J0 J1 \.cone_axioms by auto also have "... = \0 j \ p0 \ f" using comp_assoc by simp also have "... = \0 j \ f0" using 2 f0 f1 f_def p0p1.induced_arrowI' by auto also have "... = \' j" proof - have "\0 j \ f0 = \0 j \ \0.induced_arrow' a' \'" unfolding f0_def by simp also have "... = (\j. if J0.arr j then \0 j \ \0.induced_arrow a' (\i. if i \ Collect J0.arr then \' i else null) else null) j" using J0 by simp also have "... = D0.mkCone \' j" proof - have "(\j. if J0.arr j then \0 j \ \0.induced_arrow a' (\i. if i \ Collect J0.arr then \' i else null) else null) = D0.mkCone \'" using f0 f0_def \0.induced_arrowI(2) [of ?\0' a'] J0 D0.mkCone_cone \0'.cone_axioms \0.cone_axioms J0 by auto thus ?thesis by meson qed also have "... = \' j" using J0 by simp finally show ?thesis by blast qed finally show ?thesis by simp qed show "\ J0.arr j \ D.cones_map f ?\ j = \' j" proof (cases "J1.arr j") show "\\ J0.arr j; \ J1.arr j\ \ D.cones_map f ?\ j = \' j" using f \.cone_axioms \'.is_extensional by auto show "\\ J0.arr j; J1.arr j\ \ D.cones_map f ?\ j = \' j" proof - assume J0: "\ J0.arr j" and J1: "J1.arr j" have "D.cones_map f ?\ j = (\1 j \ p1) \ f" using J0 J1 f \.cone_axioms by auto also have "... = \1 j \ p1 \ f" using comp_assoc by simp also have "... = \1 j \ f1" using 2 f0 f1 f_def p0p1.induced_arrowI' by auto also have "... = \' j" proof - have "\1 j \ f1 = \1 j \ \1.induced_arrow' a' \'" unfolding f1_def by simp also have "... = (\j. if J1.arr j then \1 j \ \1.induced_arrow a' (\i. if i \ Collect J1.arr then \' i else null) else null) j" using J1 by simp also have "... = D1.mkCone \' j" proof - have "(\j. if J1.arr j then \1 j \ \1.induced_arrow a' (\i. if i \ Collect J1.arr then \' i else null) else null) = D1.mkCone \'" using f1 f1_def \1.induced_arrowI(2) [of ?\1' a'] J1 D1.mkCone_cone [of a' \'] \1'.cone_axioms \1.cone_axioms J1 by auto thus ?thesis by meson qed also have "... = \' j" using J1 by simp finally show ?thesis by blast qed finally show ?thesis by simp qed qed qed qed ultimately show "\f : a' \ ?a\ \ D.cones_map f ?\ = \'" by blast show "\f'. \f' : a' \ ?a\ \ D.cones_map f' ?\ = \' \ f' = f" proof - fix f' assume f': "\f' : a' \ ?a\ \ D.cones_map f' ?\ = \'" let ?f0' = "p0 \ f'" let ?f1' = "p1 \ f'" have 1: "a0xa1.is_rendered_commutative_by ?f0' ?f1'" using f' p0 p1 p0p1.renders_commutative seqI' by auto have f0': "\?f0' : a' \ a0\" using f' p0 by auto have f1': "\?f1' : a' \ a1\" using f' p1 by auto have "p0 \ f = p0 \ f'" proof - have "D0.cones_map (p0 \ f) \0 = ?\0'" using f p0 \0.cone_axioms \' \.cone_axioms comp_assoc assms(4) seqI' by fastforce moreover have "D0.cones_map (p0 \ f') \0 = ?\0'" using f' p0 \0.cone_axioms \.cone_axioms comp_assoc assms(4) seqI' by fastforce moreover have "p0 \ f = f0" using 2 f0 f_def p0p1.induced_arrowI'(2) by blast ultimately show ?thesis using f0 f0' \0'.cone_axioms \0.is_universal [of a'] by auto qed moreover have "p1 \ f = p1 \ f'" proof - have "D1.cones_map (p1 \ f) \1 = ?\1'" proof fix j show "D1.cones_map (p1 \ f) \1 j = ?\1' j" using f p1 \1.cone_axioms \' \.cone_axioms comp_assoc assms(4) seqI' apply auto by auto qed moreover have "D1.cones_map (p1 \ f') \1 = ?\1'" proof fix j show "D1.cones_map (p1 \ f') \1 j = ?\1' j" using f' p1 \1.cone_axioms \.cone_axioms comp_assoc assms(4) seqI' apply auto by auto qed moreover have "p1 \ f = f1" using 2 f1 f_def p0p1.induced_arrowI'(3) by blast ultimately show ?thesis using f1 f1' \1'.cone_axioms \1.is_universal [of a'] by auto qed ultimately show "f' = f" using f f' p0p1.is_universal' [of a'] by (metis (no_types, lifting) "1" dom_comp in_homE p0p1.is_universal' p1 seqI') qed qed qed qed show "has_as_product J.comp ?D ?a" unfolding has_as_product_def using \.product_cone_axioms by auto qed end sublocale cartesian_category \ category_with_finite_products proof obtain t where t: "terminal t" using has_terminal by blast { fix n :: nat have "\I :: nat set. finite I \ card I = n \ has_products I" proof (induct n) show "\I :: nat set. finite I \ card I = 0 \ has_products I" proof - fix I :: "nat set" assume "finite I \ card I = 0" hence I: "I = {}" by force thus "has_products I" proof - interpret J: discrete_category I 0 apply unfold_locales using I by auto have "\D. discrete_diagram J.comp C D \ \a. has_as_product J.comp D a" proof - fix D assume D: "discrete_diagram J.comp C D" interpret D: discrete_diagram J.comp C D using D by auto interpret D: empty_diagram J.comp C D using I J.arr_char by unfold_locales simp have "has_as_product J.comp D t" using t D.has_as_limit_iff_terminal has_as_product_def product_cone_def J.category_axioms category_axioms D.discrete_diagram_axioms by metis thus "\a. has_as_product J.comp D a" by blast qed moreover have "I \ UNIV" using I by blast ultimately show ?thesis using I has_products_def by (metis category_with_terminal_object.has_terminal discrete_diagram.product_coneI discrete_diagram_def empty_diagram.has_as_limit_iff_terminal empty_diagram.intro empty_diagram_axioms.intro empty_iff has_as_product_def is_category_with_terminal_object mem_Collect_eq) qed qed show "\n I :: nat set. \ (\I :: nat set. finite I \ card I = n \ has_products I); finite I \ card I = Suc n \ \ has_products I" proof - fix n :: nat fix I :: "nat set" assume IH: "\I :: nat set. finite I \ card I = n \ has_products I" assume I: "finite I \ card I = Suc n" show "has_products I" proof - have "card I = 1 \ has_products I" using I has_unary_products by blast moreover have "card I \ 1 \ has_products I" proof - assume "card I \ 1" hence cardI: "card I > 1" using I by simp obtain i where i: "i \ I" using cardI by fastforce let ?I0 = "{i}" and ?I1 = "I - {i}" have 1: "I = ?I0 \ ?I1 \ ?I0 \ ?I1 = {} \ card ?I0 = 1 \ card ?I1 = n" using i I cardI by auto show "has_products I" proof (unfold has_products_def, intro conjI allI impI) show "I \ UNIV" using I by auto fix J D assume D: "discrete_diagram J C D \ Collect (partial_magma.arr J) = I" interpret D: discrete_diagram J C D using D by simp have Null: "D.J.null \ ?I0 \ D.J.null \ ?I1" using D D.J.not_arr_null i by blast interpret J0: discrete_category ?I0 D.J.null using 1 Null D by unfold_locales auto interpret J1: discrete_category ?I1 D.J.null using Null by unfold_locales auto interpret J0uJ1: discrete_category \Collect J0.arr \ Collect J1.arr\ J0.null using Null 1 J0.null_char J1.null_char by unfold_locales auto interpret D0: discrete_diagram_from_map ?I0 C D D.J.null using 1 J0.ide_char D.preserves_ide D D.is_discrete i by unfold_locales auto interpret D1: discrete_diagram_from_map ?I1 C D D.J.null using 1 J1.ide_char D.preserves_ide D D.is_discrete i by unfold_locales auto obtain a0 where a0: "has_as_product J0.comp D0.map a0" using 1 has_unary_products [of ?I0] has_products_def [of ?I0] D0.discrete_diagram_axioms by fastforce obtain a1 where a1: "has_as_product J1.comp D1.map a1" using 1 I IH [of ?I1] has_products_def [of ?I1] D1.discrete_diagram_axioms by blast have 2: "\p0 p1. has_as_binary_product a0 a1 p0 p1" proof - have "ide a0 \ ide a1" using a0 a1 product_is_ide by auto thus ?thesis using a0 a1 has_binary_products has_binary_products_def by simp qed obtain p0 p1 where a: "has_as_binary_product a0 a1 p0 p1" using 2 by auto let ?a = "dom p0" have "has_as_product J D ?a" proof - have "D = (\j. if j \ Collect J0.arr then D0.map j else if j \ Collect J1.arr then D1.map j else null)" proof fix j show "D j = (if j \ Collect J0.arr then D0.map j else if j \ Collect J1.arr then D1.map j else null)" using 1 D0.map_def D1.map_def D.is_extensional D J0.arr_char J1.arr_char by auto qed moreover have "J = J0uJ1.comp" proof - have "\j j'. J j j' = J0uJ1.comp j j'" proof - fix j j' show "J j j' = J0uJ1.comp j j'" using D J0uJ1.arr_char J0.arr_char J1.arr_char D.is_discrete i apply (cases "j \ ?I0", cases "j' \ ?I0") apply simp_all apply auto[1] apply (metis D.J.comp_arr_ide D.J.comp_ide_arr D.J.ext D.J.seqE D.is_discrete J0.null_char J0uJ1.null_char) by (metis D.J.comp_arr_ide D.J.comp_ide_arr D.J.comp_ide_self D.J.ext D.J.seqE D.is_discrete J0.null_char J0uJ1.null_char mem_Collect_eq) qed thus ?thesis by blast qed moreover have "Collect J0.arr \ Collect J1.arr = {}" by auto moreover have "J0.null = J1.null" using J0.null_char J1.null_char by simp ultimately show "has_as_product J D ?a" using binary_product_of_products_is_product [of J0.comp D0.map a0 J1.comp D1.map a1 p0 p1] J0.arr_char J1.arr_char 1 a0 a1 a by simp qed thus "\a. has_as_product J D a" by blast qed qed ultimately show "has_products I" by blast qed qed qed } hence 1: "\n I :: nat set. finite I \ card I = n \ has_products I" by simp thus "\I :: nat set. finite I \ has_products I" by blast qed proposition (in cartesian_category) is_category_with_finite_products: shows "category_with_finite_products C" .. end diff --git a/thys/Category3/CategoryWithPullbacks.thy b/thys/Category3/CategoryWithPullbacks.thy --- a/thys/Category3/CategoryWithPullbacks.thy +++ b/thys/Category3/CategoryWithPullbacks.thy @@ -1,1114 +1,1177 @@ (* Title: CategoryWithPullbacks Author: Eugene W. Stark , 2019 Maintainer: Eugene W. Stark *) chapter "Category with Pullbacks" theory CategoryWithPullbacks imports Limit begin text \ \sloppypar In this chapter, we give a traditional definition of pullbacks in a category as limits of cospan diagrams and we define a locale \category_with_pullbacks\ that is satisfied by categories in which every cospan diagram has a limit. These definitions build on the general definition of limit that we gave in @{theory Category3.Limit}. We then define a locale \elementary_category_with_pullbacks\ that axiomatizes categories equipped with chosen functions that assign to each cospan a corresponding span of ``projections'', which enjoy the familiar universal property of a pullback. After developing consequences of the axioms, we prove that the two locales are in agreement, in the sense that every interpretation of \category_with_pullbacks\ extends to an interpretation of \elementary_category_with_pullbacks\, and conversely, the underlying category of an interpretation of \elementary_category_with_pullbacks\ always yields an interpretation of \category_with_pullbacks\. \ section "Commutative Squares" context category begin text \ The following provides some useful technology for working with commutative squares. \ definition commutative_square where "commutative_square f g h k \ cospan f g \ span h k \ dom f = cod h \ f \ h = g \ k" lemma commutative_squareI [intro, simp]: assumes "cospan f g" and "span h k" and "dom f = cod h" and "f \ h = g \ k" shows "commutative_square f g h k" using assms commutative_square_def by auto lemma commutative_squareE [elim]: assumes "commutative_square f g h k" and "\ arr f; arr g; arr h; arr k; cod f = cod g; dom h = dom k; dom f = cod h; dom g = cod k; f \ h = g \ k \ \ T" shows T using assms commutative_square_def by (metis (mono_tags, lifting) seqE seqI) lemma commutative_square_comp_arr: assumes "commutative_square f g h k" and "seq h l" shows "commutative_square f g (h \ l) (k \ l)" using assms apply (elim commutative_squareE, intro commutative_squareI, auto) using comp_assoc by metis lemma arr_comp_commutative_square: assumes "commutative_square f g h k" and "seq l f" shows "commutative_square (l \ f) (l \ g) h k" using assms comp_assoc by (elim commutative_squareE, intro commutative_squareI, auto) end section "Cospan Diagrams" (* TODO: Rework the ugly development of equalizers into this form. *) text \ The ``shape'' of a cospan diagram is a category having two non-identity arrows with distinct domains and a common codomain. \ locale cospan_shape begin datatype Arr = Null | AA | BB | TT | AT | BT fun comp where "comp AA AA = AA" | "comp AT AA = AT" | "comp TT AT = AT" | "comp BB BB = BB" | "comp BT BB = BT" | "comp TT BT = BT" | "comp TT TT = TT" | "comp _ _ = Null" interpretation partial_magma comp proof show "\!n. \f. comp n f = n \ comp f n = n" proof show "\f. comp Null f = Null \ comp f Null = Null" by simp show "\n. \f. comp n f = n \ comp f n = n \ n = Null" by (metis comp.simps(8)) qed qed lemma null_char: shows "null = Null" proof - have "\f. comp Null f = Null \ comp f Null = Null" by simp thus ?thesis using null_def ex_un_null theI [of "\n. \f. comp n f = n \ comp f n = n"] by (metis partial_magma.comp_null(2) partial_magma_axioms) qed lemma ide_char: shows "ide f \ f = AA \ f = BB \ f = TT" proof show "ide f \ f = AA \ f = BB \ f = TT" using ide_def null_char by (cases f, simp_all) show "f = AA \ f = BB \ f = TT \ ide f" proof - have 1: "\f g. f = AA \ f = BB \ f = TT \ comp f f \ Null \ (comp g f \ Null \ comp g f = g) \ (comp f g \ Null \ comp f g = g)" proof - fix f g show "f = AA \ f = BB \ f = TT \ comp f f \ Null \ (comp g f \ Null \ comp g f = g) \ (comp f g \ Null \ comp f g = g)" by (cases f; cases g, auto) qed assume f: "f = AA \ f = BB \ f = TT" show "ide f" using f 1 ide_def null_char by simp qed qed fun Dom where "Dom AA = AA" | "Dom BB = BB" | "Dom TT = TT" | "Dom AT = AA" | "Dom BT = BB" | "Dom _ = Null" fun Cod where "Cod AA = AA" | "Cod BB = BB" | "Cod TT = TT" | "Cod AT = TT" | "Cod BT = TT" | "Cod _ = Null" lemma domains_char': shows "domains f = (if f = Null then {} else {Dom f})" using domains_def ide_char null_char by (cases f, auto) lemma codomains_char': shows "codomains f = (if f = Null then {} else {Cod f})" using codomains_def ide_char null_char by (cases f, auto) lemma arr_char: shows "arr f \ f \ Null" using arr_def domains_char' codomains_char' by simp lemma seq_char: shows "seq g f \ (f = AA \ (g = AA \ g = AT)) \ (f = BB \ (g = BB \ g = BT)) \ (f = AT \ g = TT) \ (f = BT \ g = TT) \ (f = TT \ g = TT)" using arr_char null_char by (cases f; cases g, simp_all) interpretation category comp proof fix f g h show "comp g f \ null \ seq g f" using null_char arr_char seq_char by simp show "domains f \ {} \ codomains f \ {}" using domains_char' codomains_char' by auto show "seq h g \ seq (comp h g) f \ seq g f" using seq_char arr_char by (cases g; cases h; simp_all) show "seq h (comp g f) \ seq g f \ seq h g" using seq_char arr_char by (cases f; cases g; simp_all) show "seq g f \ seq h g \ seq (comp h g) f" using seq_char arr_char by (cases f; simp_all; cases g; simp_all; cases h; auto) show "seq g f \ seq h g \ comp (comp h g) f = comp h (comp g f)" using seq_char by (cases f; simp_all; cases g; simp_all; cases h; auto) qed lemma is_category: shows "category comp" .. lemma dom_char: shows "dom = Dom" using dom_def domains_char domains_char' null_char by fastforce lemma cod_char: shows "cod = Cod" using cod_def codomains_char codomains_char' null_char by fastforce end sublocale cospan_shape \ category comp using is_category by auto locale cospan_diagram = J: cospan_shape + C: category C for C :: "'c comp" (infixr "\" 55) and f0 :: 'c and f1 :: 'c + assumes is_cospan: "C.cospan f0 f1" begin no_notation J.comp (infixr "\" 55) notation J.comp (infixr "\\<^sub>J" 55) fun map where "map J.AA = C.dom f0" | "map J.BB = C.dom f1" | "map J.TT = C.cod f0" | "map J.AT = f0" | "map J.BT = f1" | "map _ = C.null" end sublocale cospan_diagram \ diagram J.comp C map proof show "\f. \ J.arr f \ map f = C.null" using J.arr_char by simp fix f assume f: "J.arr f" show "C.arr (map f)" using f J.arr_char is_cospan by (cases f, simp_all) show "C.dom (map f) = map (J.dom f)" using f J.arr_char J.dom_char is_cospan by (cases f, simp_all) show "C.cod (map f) = map (J.cod f)" using f J.arr_char J.cod_char is_cospan by (cases f, simp_all) next fix f g assume fg: "J.seq g f" show "map (g \\<^sub>J f) = map g \ map f" using fg J.seq_char J.null_char J.not_arr_null is_cospan apply (cases f; cases g, simp_all) using C.comp_arr_dom C.comp_cod_arr by auto qed section "Category with Pullbacks" text \ A \emph{pullback} in a category @{term C} is a limit of a cospan diagram in @{term C}. \ context cospan_diagram begin definition mkCone where "mkCone p0 p1 \ \j. if j = J.AA then p0 else if j = J.BB then p1 else if j = J.AT then f0 \ p0 else if j = J.BT then f1 \ p1 else if j = J.TT then f0 \ p0 else C.null" abbreviation is_rendered_commutative_by where "is_rendered_commutative_by p0 p1 \ C.seq f0 p0 \ f0 \ p0 = f1 \ p1" abbreviation has_as_pullback where "has_as_pullback p0 p1 \ limit_cone (C.dom p0) (mkCone p0 p1)" lemma cone_mkCone: assumes "is_rendered_commutative_by p0 p1" shows "cone (C.dom p0) (mkCone p0 p1)" proof - interpret E: constant_functor J.comp C \C.dom p0\ apply unfold_locales using assms by auto show "cone (C.dom p0) (mkCone p0 p1)" proof fix f show "\ J.arr f \ mkCone p0 p1 f = C.null" using mkCone_def J.arr_char by simp assume f: "J.arr f" show "C.dom (mkCone p0 p1 f) = E.map (J.dom f)" using assms f mkCone_def J.arr_char J.dom_char apply (cases f, simp_all) by (metis C.dom_comp)+ show "C.cod (mkCone p0 p1 f) = map (J.cod f)" using assms f mkCone_def J.arr_char J.cod_char is_cospan by (cases f, auto) show "map f \ mkCone p0 p1 (J.dom f) = mkCone p0 p1 f" using assms f mkCone_def J.arr_char J.dom_char C.comp_ide_arr is_cospan by (cases f, auto) show "mkCone p0 p1 (J.cod f) \ E.map f = mkCone p0 p1 f" using assms f mkCone_def J.arr_char J.cod_char C.comp_arr_dom apply (cases f, auto) apply (metis C.dom_comp C.seqE) by (metis C.dom_comp)+ qed qed lemma is_rendered_commutative_by_cone: assumes "cone a \" shows "is_rendered_commutative_by (\ J.AA) (\ J.BB)" proof - interpret \: cone J.comp C map a \ using assms by auto show ?thesis proof show "C.seq f0 (\ J.AA)" by (metis C.seqI J.cod_char J.seq_char \.preserves_cod \.preserves_reflects_arr J.seqE is_cospan J.Cod.simps(1) map.simps(1)) show "f0 \ \ J.AA = f1 \ \ J.BB" by (metis J.cod_char J.dom_char \.A.map_simp \.naturality J.Cod.simps(4-5) J.Dom.simps(4-5) J.comp.simps(2,5) J.seq_char map.simps(4-5)) qed qed lemma mkCone_cone: assumes "cone a \" shows "mkCone (\ J.AA) (\ J.BB) = \" proof - interpret \: cone J.comp C map a \ using assms by auto have 1: "is_rendered_commutative_by (\ J.AA) (\ J.BB)" using assms is_rendered_commutative_by_cone by blast interpret mkCone_\: cone J.comp C map \C.dom (\ J.AA)\ \mkCone (\ J.AA) (\ J.BB)\ using assms cone_mkCone 1 by auto show ?thesis proof - have "\j. j = J.AA \ mkCone (\ J.AA) (\ J.BB) j = \ j" using mkCone_def \.is_extensional by simp moreover have "\j. j = J.BB \ mkCone (\ J.AA) (\ J.BB) j = \ j" using mkCone_def \.is_extensional by simp moreover have "\j. j = J.TT \ mkCone (\ J.AA) (\ J.BB) j = \ j" using 1 mkCone_def \.is_extensional \.A.map_simp \.preserves_comp_1 cospan_shape.seq_char \.is_natural_2 apply simp by (metis J.seqE J.comp.simps(5) map.simps(5)) ultimately have "\j. J.ide j \ mkCone (\ J.AA) (\ J.BB) j = \ j" using J.ide_char by auto thus "mkCone (\ J.AA) (\ J.BB) = \" using mkCone_def NaturalTransformation.eqI [of J.comp C] \.natural_transformation_axioms mkCone_\.natural_transformation_axioms J.ide_char by simp qed qed + lemma cone_iff_commutative_square: + shows "cone (C.dom h) (mkCone h k) \ C.commutative_square f0 f1 h k" + using cone_mkCone mkCone_def J.arr_char J.ide_char is_rendered_commutative_by_cone + is_cospan C.commutative_square_def cospan_shape.Arr.simps(11) + C.dom_comp C.seqE C.seqI + apply (intro iffI) + by (intro C.commutative_squareI) metis+ + + lemma cones_map_mkCone_eq_iff: + assumes "is_rendered_commutative_by p0 p1" and "is_rendered_commutative_by p0' p1'" + and "\h : C.dom p0' \ C.dom p0\" + shows "cones_map h (mkCone p0 p1) = mkCone p0' p1' \ p0 \ h = p0' \ p1 \ h = p1'" + proof - + interpret \: cone J.comp C map \C.dom p0\ \mkCone p0 p1\ + using assms(1) cone_mkCone [of p0 p1] by blast + interpret \': cone J.comp C map \C.dom p0'\ \mkCone p0' p1'\ + using assms(2) cone_mkCone [of p0' p1'] by blast + show ?thesis + proof + assume 3: "cones_map h (mkCone p0 p1) = mkCone p0' p1'" + show "p0 \ h = p0' \ p1 \ h = p1'" + proof + show "p0 \ h = p0'" + proof - + have "p0' = mkCone p0' p1' J.AA" + using mkCone_def J.arr_char by simp + also have "... = cones_map h (mkCone p0 p1) J.AA" + using 3 by simp + also have "... = p0 \ h" + using assms mkCone_def J.arr_char \.cone_axioms by auto + finally show ?thesis by auto + qed + show "p1 \ h = p1'" + proof - + have "p1' = mkCone p0' p1' J.BB" + using mkCone_def J.arr_char by simp + also have "... = cones_map h (mkCone p0 p1) J.BB" + using 3 by simp + also have "... = p1 \ h" + using assms mkCone_def J.arr_char \.cone_axioms by auto + finally show ?thesis by auto + qed + qed + next + assume 4: "p0 \ h = p0' \ p1 \ h = p1'" + show "cones_map h (mkCone p0 p1) = mkCone p0' p1'" + proof + fix j + have "\ J.arr j \ cones_map h (mkCone p0 p1) j = mkCone p0' p1' j" + using assms \.cone_axioms mkCone_def J.arr_char by auto + moreover have "J.arr j \ cones_map h (mkCone p0 p1) j = mkCone p0' p1' j" + using assms 4 \.cone_axioms mkCone_def J.arr_char C.comp_assoc + by fastforce + ultimately show "cones_map h (mkCone p0 p1) j = mkCone p0' p1' j" + using J.arr_char J.Dom.cases by blast + qed + qed + qed + end locale pullback_cone = J: cospan_shape + C: category C + D: cospan_diagram C f0 f1 + limit_cone J.comp C D.map \C.dom p0\ \D.mkCone p0 p1\ for C :: "'c comp" (infixr "\" 55) and f0 :: 'c and f1 :: 'c and p0 :: 'c and p1 :: 'c begin (* TODO: Equalizer should be simplifiable in the same way. *) lemma renders_commutative: shows "D.is_rendered_commutative_by p0 p1" using D.mkCone_def D.cospan_diagram_axioms cone_axioms cospan_diagram.is_rendered_commutative_by_cone by fastforce lemma is_universal': assumes "D.is_rendered_commutative_by p0' p1'" shows "\!h. \h : C.dom p0' \ C.dom p0\ \ p0 \ h = p0' \ p1 \ h = p1'" proof - have "D.cone (C.dom p0') (D.mkCone p0' p1')" using assms D.cone_mkCone by blast hence 1: "\!h. \h : C.dom p0' \ C.dom p0\ \ D.cones_map h (D.mkCone p0 p1) = D.mkCone p0' p1'" using is_universal by simp have 2: "\h. \h : C.dom p0' \ C.dom p0\ \ D.cones_map h (D.mkCone p0 p1) = D.mkCone p0' p1' \ p0 \ h = p0' \ p1 \ h = p1'" - proof - - fix h - assume h: "\h : C.dom p0' \ C.dom p0\" - show "D.cones_map h (D.mkCone p0 p1) = D.mkCone p0' p1' \ - p0 \ h = p0' \ p1 \ h = p1'" - proof - assume 3: "D.cones_map h (D.mkCone p0 p1) = D.mkCone p0' p1'" - show "p0 \ h = p0' \ p1 \ h = p1'" - proof - show "p0 \ h = p0'" - proof - - have "p0' = D.mkCone p0' p1' J.AA" - using D.mkCone_def J.arr_char by simp - also have "... = D.cones_map h (D.mkCone p0 p1) J.AA" - using 3 by simp - also have "... = p0 \ h" - using h D.mkCone_def J.arr_char cone_\ by auto - finally show ?thesis by auto - qed - show "p1 \ h = p1'" - proof - - have "p1' = D.mkCone p0' p1' J.BB" - using D.mkCone_def J.arr_char by simp - also have "... = D.cones_map h (D.mkCone p0 p1) J.BB" - using 3 by simp - also have "... = p1 \ h" - using h D.mkCone_def J.arr_char cone_\ by auto - finally show ?thesis by auto - qed - qed - next - assume 4: "p0 \ h = p0' \ p1 \ h = p1'" - show "D.cones_map h (D.mkCone p0 p1) = D.mkCone p0' p1'" - proof - fix j - have "\ J.arr j \ D.cones_map h (D.mkCone p0 p1) j = D.mkCone p0' p1' j" - using h cone_axioms D.mkCone_def J.arr_char by auto - moreover have "J.arr j \ - D.cones_map h (D.mkCone p0 p1) j = D.mkCone p0' p1' j" - using assms h 4 cone_\ D.mkCone_def J.arr_char renders_commutative - C.comp_assoc - by fastforce - ultimately show "D.cones_map h (D.mkCone p0 p1) j = D.mkCone p0' p1' j" - using J.arr_char J.Dom.cases by blast - qed - qed - qed + using assms D.cones_map_mkCone_eq_iff [of p0 p1 p0' p1'] renders_commutative + by fastforce thus ?thesis using 1 by blast qed lemma induced_arrowI': assumes "D.is_rendered_commutative_by p0' p1'" shows "\induced_arrow (C.dom p0') (D.mkCone p0' p1') : C.dom p0' \ C.dom p0\" and "p0 \ induced_arrow (C.dom p0') (D.mkCone p0' p1') = p0'" and "p1 \ induced_arrow (C.dom p1') (D.mkCone p0' p1') = p1'" proof - interpret A': constant_functor J.comp C \C.dom p0'\ using assms by (unfold_locales, auto) have cone: "D.cone (C.dom p0') (D.mkCone p0' p1')" using assms D.cone_mkCone [of p0' p1'] by blast show 1: "p0 \ induced_arrow (C.dom p0') (D.mkCone p0' p1') = p0'" proof - have "p0 \ induced_arrow (C.dom p0') (D.mkCone p0' p1') = D.cones_map (induced_arrow (C.dom p0') (D.mkCone p0' p1')) (D.mkCone p0 p1) J.AA" using cone induced_arrowI(1) D.mkCone_def J.arr_char cone_\ by force also have "... = p0'" proof - have "D.cones_map (induced_arrow (C.dom p0') (D.mkCone p0' p1')) (D.mkCone p0 p1) = D.mkCone p0' p1'" using cone induced_arrowI by blast thus ?thesis using J.arr_char D.mkCone_def by simp qed finally show ?thesis by auto qed show 2: "p1 \ induced_arrow (C.dom p1') (D.mkCone p0' p1') = p1'" proof - have "p1 \ induced_arrow (C.dom p1') (D.mkCone p0' p1') = D.cones_map (induced_arrow (C.dom p0') (D.mkCone p0' p1')) (D.mkCone p0 p1) J.BB" proof - have "C.dom p0' = C.dom p1'" using assms by (metis C.dom_comp) thus ?thesis using cone induced_arrowI(1) D.mkCone_def J.arr_char cone_\ by force qed also have "... = p1'" proof - have "D.cones_map (induced_arrow (C.dom p0') (D.mkCone p0' p1')) (D.mkCone p0 p1) = D.mkCone p0' p1'" using cone induced_arrowI by blast thus ?thesis using J.arr_char D.mkCone_def by simp qed finally show ?thesis by auto qed show "\induced_arrow (C.dom p0') (D.mkCone p0' p1') : C.dom p0' \ C.dom p0\" using 1 cone induced_arrowI by simp qed end context category begin definition has_as_pullback where "has_as_pullback f0 f1 p0 p1 \ cospan f0 f1 \ cospan_diagram.has_as_pullback C f0 f1 p0 p1" definition has_pullbacks where "has_pullbacks = (\f0 f1. cospan f0 f1 \ (\p0 p1. has_as_pullback f0 f1 p0 p1))" + lemma has_as_pullbackI [intro]: + assumes "cospan f g" and "commutative_square f g p q" + and "\h k. commutative_square f g h k \ \!l. p \ l = h \ q \ l = k" + shows "has_as_pullback f g p q" + proof (unfold has_as_pullback_def, intro conjI) + show "arr f" and "arr g" and "cod f = cod g" + using assms(1) by auto + interpret J: cospan_shape . + interpret D: cospan_diagram C f g + using assms(1-2) by unfold_locales auto + show "D.has_as_pullback p q" + proof - + have 1: "D.is_rendered_commutative_by p q" + using assms ide_in_hom by blast + let ?\ = "D.mkCone p q" + let ?a = "dom p" + interpret \: cone J.comp C D.map ?a ?\ + using assms(2) D.cone_mkCone 1 by auto + interpret \: limit_cone J.comp C D.map ?a ?\ + proof + fix x \' + assume \': "D.cone x \'" + interpret \': cone J.comp C D.map x \' + using \' by simp + have 2: "D.is_rendered_commutative_by (\' J.AA) (\' J.BB)" + using \' D.is_rendered_commutative_by_cone [of x \'] by blast + have 3: "\!l. p \ l = \' J.AA \ q \ l = \' J.BB" + using assms(1,3) 2 \'.preserves_hom J.arr_char J.ide_char by simp + obtain l where l: "p \ l = \' J.AA \ q \ l = \' J.BB" + using 3 by blast + have "\l : x \ ?a\" + using l 2 \'.preserves_hom J.arr_char J.ide_char \'.component_in_hom + \'.is_extensional \'.preserves_reflects_arr comp_in_homE comp_null(2) in_homE + by metis + moreover have "D.cones_map l (D.mkCone p q) = \'" + using l D.cones_map_mkCone_eq_iff [of p q "\' J.AA" "\' J.BB" l] + by (metis (no_types, lifting) 1 2 D.mkCone_cone \' calculation dom_comp in_homE seqE) + ultimately have "\l. \l : x \ ?a\ \ D.cones_map l (D.mkCone p q) = \'" + by blast + moreover have "\l'. \\l' : x \ ?a\; D.cones_map l' (D.mkCone p q) = \'\ \ l' = l" + proof - + fix l' + assume l': "\l' : x \ ?a\" + assume eq: "D.cones_map l' (D.mkCone p q) = \'" + have "p \ l' = \' J.AA \ q \ l' = \' J.BB" + using l' eq J.arr_char \.cone_axioms D.mkCone_def by auto + thus "l' = l" + using 3 l by blast + qed + ultimately show "\!l. \l : x \ ?a\ \ D.cones_map l (D.mkCone p q) = \'" + by blast + qed + show "D.has_as_pullback p q" + using assms \.limit_cone_axioms by blast + qed + qed + + lemma has_as_pullbackE [elim]: + assumes "has_as_pullback f g p q" + and "\cospan f g; commutative_square f g p q; + \h k. commutative_square f g h k \ \!l. p \ l = h \ q \ l = k\ \ T" + shows T + proof - + interpret J: cospan_shape . + interpret D: cospan_diagram C f g + using assms(1) has_as_pullback_def + by (meson category_axioms cospan_diagram.intro cospan_diagram_axioms.intro) + have 1: "\h k. commutative_square f g h k \ D.cone (dom h) (D.mkCone h k)" + using D.cone_iff_commutative_square by presburger + let ?\ = "D.mkCone p q" + interpret \: limit_cone J.comp C D.map \dom p\ ?\ + using assms(1) has_as_pullback_def D.cone_mkCone by blast + have "cospan f g" + using D.is_cospan by blast + moreover have csq: "commutative_square f g p q" + using 1 \.cone_axioms by blast + moreover have "\h k. commutative_square f g h k \ \!l. p \ l = h \ q \ l = k" + proof - + fix h k + assume 2: "commutative_square f g h k" + let ?\' = "D.mkCone h k" + interpret \': cone J.comp C D.map \dom h\ ?\' + using 1 2 by blast + have 3: "\!l. \l : dom h \ dom p\ \ D.cones_map l ?\ = ?\'" + using 1 2 \.is_universal [of "dom h" "D.mkCone h k"] by blast + obtain l where l: "\l : dom h \ dom p\ \ D.cones_map l ?\ = ?\'" + using 3 by blast + have "p \ l = h \ q \ l = k" + proof + have "p \ l = D.cones_map l ?\ J.AA" + using \.cone_axioms D.mkCone_def J.seq_char in_homE + apply simp + by (metis J.seqE l) + also have "... = h" + using l \'.cone_axioms D.mkCone_def J.seq_char in_homE by simp + finally show "p \ l = h" by blast + have "q \ l = D.cones_map l ?\ J.BB" + using \.cone_axioms D.mkCone_def J.seq_char in_homE + apply simp + by (metis J.seqE l) + also have "... = k" + using l \'.cone_axioms D.mkCone_def J.seq_char in_homE by simp + finally show "q \ l = k" by blast + qed + moreover have "\l'. p \ l' = h \ q \ l' = k \ l' = l" + proof - + fix l' + assume 1: "p \ l' = h \ q \ l' = k" + have 2: "\l' : dom h \ dom p\" + using 1 + by (metis \'.ide_apex arr_dom_iff_arr arr_iff_in_hom ideD(1) seqE dom_comp) + moreover have "D.cones_map l' ?\ = ?\'" + using D.cones_map_mkCone_eq_iff + by (meson 1 2 csq D.cone_iff_commutative_square \'.cone_axioms + commutative_squareE seqI) + ultimately show "l' = l" + using l \.is_universal \'.cone_axioms by blast + qed + ultimately show "\!l. p \ l = h \ q \ l = k" by blast + qed + ultimately show T + using assms(2) by blast + qed + end locale category_with_pullbacks = category + assumes has_pullbacks: has_pullbacks section "Elementary Category with Pullbacks" text \ An \emph{elementary category with pullbacks} is a category equipped with a specific way of mapping each cospan to a span such that the resulting square commutes and such that the span is universal for that property. It is useful to assume that the functions, mapping a cospan to the two projections of the pullback, are extensional; that is, they yield @{term null} when applied to arguments that do not form a cospan. \ locale elementary_category_with_pullbacks = category C for C :: "'a comp" (infixr "\" 55) and prj0 :: "'a \ 'a \ 'a" ("\

\<^sub>0[_, _]") and prj1 :: "'a \ 'a \ 'a" ("\

\<^sub>1[_, _]") + assumes prj0_ext: "\ cospan f g \ \

\<^sub>0[f, g] = null" and prj1_ext: "\ cospan f g \ \

\<^sub>1[f, g] = null" and pullback_commutes [intro]: "cospan f g \ commutative_square f g \

\<^sub>1[f, g] \

\<^sub>0[f, g]" and universal: "commutative_square f g h k \ \!l. \

\<^sub>1[f, g] \ l = h \ \

\<^sub>0[f, g] \ l = k" begin lemma pullback_commutes': assumes "cospan f g" shows "f \ \

\<^sub>1[f, g] = g \ \

\<^sub>0[f, g]" using assms commutative_square_def by blast lemma prj0_in_hom': assumes "cospan f g" shows "\\

\<^sub>0[f, g] : dom \

\<^sub>0[f, g] \ dom g\" using assms pullback_commutes by (metis category.commutative_squareE category_axioms in_homI) lemma prj1_in_hom': assumes "cospan f g" shows "\\

\<^sub>1[f, g] : dom \

\<^sub>0[f, g] \ dom f\" using assms pullback_commutes by (metis category.commutative_squareE category_axioms in_homI) text \ The following gives us a notation for the common domain of the two projections of a pullback. \ definition pbdom (infix "\\" 51) where "f \\ g \ dom \

\<^sub>0[f, g]" lemma pbdom_in_hom [intro]: assumes "cospan f g" shows "\f \\ g : f \\ g \ f \\ g\" unfolding pbdom_def using assms prj0_in_hom' by (metis arr_dom_iff_arr arr_iff_in_hom cod_dom dom_dom in_homE) lemma ide_pbdom [simp]: assumes "cospan f g" shows "ide (f \\ g)" using assms ide_in_hom by auto[1] lemma prj0_in_hom [intro, simp]: assumes "cospan f g" and "a = f \\ g" and "b = dom g" shows "\\

\<^sub>0[f, g] : a \ b\" unfolding pbdom_def using assms prj0_in_hom' by (simp add: pbdom_def) lemma prj1_in_hom [intro, simp]: assumes "cospan f g" and "a = f \\ g" and "b = dom f" shows "\\

\<^sub>1[f, g] : a \ b\" unfolding pbdom_def using assms prj1_in_hom' by (simp add: pbdom_def) lemma prj0_simps [simp]: assumes "cospan f g" shows "arr \

\<^sub>0[f, g]" and "dom \

\<^sub>0[f, g] = f \\ g" and "cod \

\<^sub>0[f, g] = dom g" using assms prj0_in_hom by (blast, blast, blast) lemma prj0_simps_arr [iff]: shows "arr \

\<^sub>0[f, g] \ cospan f g" proof show "cospan f g \ arr \

\<^sub>0[f, g]" using prj0_in_hom by auto show "arr \

\<^sub>0[f, g] \ cospan f g" using prj0_ext not_arr_null by metis qed lemma prj1_simps [simp]: assumes "cospan f g" shows "arr \

\<^sub>1[f, g]" and "dom \

\<^sub>1[f, g] = f \\ g" and "cod \

\<^sub>1[f, g] = dom f" using assms prj1_in_hom by (blast, blast, blast) lemma prj1_simps_arr [iff]: shows "arr \

\<^sub>1[f, g] \ cospan f g" proof show "cospan f g \ arr \

\<^sub>1[f, g]" using prj1_in_hom by auto show "arr \

\<^sub>1[f, g] \ cospan f g" using prj1_ext not_arr_null by metis qed lemma span_prj: assumes "cospan f g" shows "span \

\<^sub>0[f, g] \

\<^sub>1[f, g]" using assms by simp text \ We introduce a notation for tupling, which produces the induced arrow into a pullback. In our notation, the ``$0$-side'', which we regard as the input, occurs on the right, and the ``$1$-side'', which we regard as the output, occurs on the left. \ definition tuple ("\_ \_, _\ _\") where "\h \f, g\ k\ \ if commutative_square f g h k then THE l. \

\<^sub>0[f, g] \ l = k \ \

\<^sub>1[f, g] \ l = h else null" lemma tuple_in_hom [intro]: assumes "commutative_square f g h k" shows "\\h \f, g\ k\ : dom h \ f \\ g\" proof have 1: "\

\<^sub>0[f, g] \ \h \f, g\ k\ = k \ \

\<^sub>1[f, g] \ \h \f, g\ k\ = h" unfolding tuple_def using assms universal theI [of "\l. \

\<^sub>0[f, g] \ l = k \ \

\<^sub>1[f, g] \ l = h"] apply simp by meson show "arr \h \f, g\ k\" using assms 1 apply (elim commutative_squareE) by (metis (no_types, lifting) seqE) show "dom \h \f, g\ k\ = dom h" using assms 1 apply (elim commutative_squareE) by (metis (no_types, lifting) dom_comp) show "cod \h \f, g\ k\ = f \\ g" unfolding pbdom_def using assms 1 apply (elim commutative_squareE) by (metis seqE) qed lemma tuple_is_extensional: assumes "\ commutative_square f g h k" shows "\h \f, g\ k\ = null" unfolding tuple_def using assms by simp lemma tuple_simps [simp]: assumes "commutative_square f g h k" shows "arr \h \f, g\ k\" and "dom \h \f, g\ k\ = dom h" and "cod \h \f, g\ k\ = f \\ g" using assms tuple_in_hom apply blast using assms tuple_in_hom apply blast using assms tuple_in_hom by blast lemma prj_tuple [simp]: assumes "commutative_square f g h k" shows "\

\<^sub>0[f, g] \ \h \f, g\ k\ = k" and "\

\<^sub>1[f, g] \ \h \f, g\ k\ = h" proof - have 1: "\

\<^sub>0[f, g] \ \h \f, g\ k\ = k \ \

\<^sub>1[f, g] \ \h \f, g\ k\ = h" unfolding tuple_def using assms universal theI [of "\l. \

\<^sub>0[f, g] \ l = k \ \

\<^sub>1[f, g] \ l = h"] apply simp by meson show "\

\<^sub>0[f, g] \ \h \f, g\ k\ = k" using 1 by simp show "\

\<^sub>1[f, g] \ \h \f, g\ k\ = h" using 1 by simp qed lemma tuple_prj: assumes "cospan f g" and "seq \

\<^sub>1[f, g] h" shows "\\

\<^sub>1[f, g] \ h \f, g\ \

\<^sub>0[f, g] \ h\ = h" proof - have 1: "commutative_square f g (\

\<^sub>1[f, g] \ h) (\

\<^sub>0[f, g] \ h)" using assms pullback_commutes by (simp add: commutative_square_comp_arr) have "\

\<^sub>0[f, g] \ \\

\<^sub>1[f, g] \ h \f, g\ \

\<^sub>0[f, g] \ h\ = \

\<^sub>0[f, g] \ h" using assms 1 by simp moreover have "\

\<^sub>1[f, g] \ \\

\<^sub>1[f, g] \ h \f, g\ \

\<^sub>0[f, g] \ h\ = \

\<^sub>1[f, g] \ h" using assms 1 by simp ultimately show ?thesis unfolding tuple_def using assms 1 universal [of f g "\

\<^sub>1[f, g] \ h" "\

\<^sub>0[f, g] \ h"] theI_unique [of "\l. \

\<^sub>0[f, g] \ l = \

\<^sub>0[f, g] \ h \ \

\<^sub>1[f, g] \ l = \

\<^sub>1[f, g] \ h" h] by auto qed lemma tuple_prj_spc [simp]: assumes "cospan f g" shows "\\

\<^sub>1[f, g] \f, g\ \

\<^sub>0[f, g]\ = f \\ g" proof - have "\\

\<^sub>1[f, g] \f, g\ \

\<^sub>0[f, g]\ = \\

\<^sub>1[f, g] \ (f \\ g) \f, g\ \

\<^sub>0[f, g] \ (f \\ g)\" using assms comp_arr_dom by simp thus ?thesis using assms tuple_prj by simp qed lemma prj_joint_monic: assumes "cospan f g" and "seq \

\<^sub>1[f, g] h" and "seq \

\<^sub>1[f, g] h'" and "\

\<^sub>0[f, g] \ h = \

\<^sub>0[f, g] \ h'" and "\

\<^sub>1[f, g] \ h = \

\<^sub>1[f, g] \ h'" shows "h = h'" proof - have "h = \\

\<^sub>1[f, g] \ h \f, g\ \

\<^sub>0[f, g] \ h\" using assms tuple_prj [of f g h] by simp also have "... = \\

\<^sub>1[f, g] \ h' \f, g\ \

\<^sub>0[f, g] \ h'\" using assms by simp also have "... = h'" using assms tuple_prj [of f g h'] by simp finally show ?thesis by blast qed text \ The pullback of an identity along an arbitrary arrow is an isomorphism. \ lemma iso_pullback_ide: assumes "cospan \ \" and "ide \" shows "iso \

\<^sub>0[\, \]" proof - have "inverse_arrows \

\<^sub>0[\, \] \\ \\, \\ dom \\" proof show 1: "ide (\

\<^sub>0[\, \] \ \\ \\, \\ dom \\)" using assms comp_arr_dom comp_cod_arr prj_tuple(1) by simp show "ide (\\ \\, \\ dom \\ \ \

\<^sub>0[\, \])" proof - have "\\ \\, \\ dom \\ \ \

\<^sub>0[\, \] = (\ \\ \)" proof - have "\

\<^sub>0[\, \] \ \\ \\, \\ dom \\ \ \

\<^sub>0[\, \] = \

\<^sub>0[\, \] \ (\ \\ \)" proof - have "\

\<^sub>0[\, \] \ \\ \\, \\ dom \\ \ \

\<^sub>0[\, \] = (\

\<^sub>0[\, \] \ \\ \\, \\ dom \\) \ \

\<^sub>0[\, \]" using assms 1 comp_reduce by blast also have "... = \

\<^sub>0[\, \] \ (\ \\ \)" using assms prj_tuple(1) pullback_commutes comp_arr_dom comp_cod_arr by simp finally show ?thesis by blast qed moreover have "\

\<^sub>1[\, \] \ \\ \\, \\ dom \\ \ \

\<^sub>0[\, \] = \

\<^sub>1[\, \] \ (\ \\ \)" proof - have "\

\<^sub>1[\, \] \ \\ \\, \\ dom \\ \ \

\<^sub>0[\, \] = (\

\<^sub>1[\, \] \ \\ \\, \\ dom \\) \ \

\<^sub>0[\, \]" using assms(2) comp_assoc by simp also have "... = \ \ \

\<^sub>0[\, \]" using assms comp_arr_dom comp_cod_arr prj_tuple(2) by fastforce also have "... = \ \ \

\<^sub>1[\, \]" using assms pullback_commutes commutative_square_def by simp also have "... = \

\<^sub>1[\, \] \ (\ \\ \)" using assms comp_arr_dom comp_cod_arr pullback_commutes commutative_square_def by simp finally show ?thesis by simp qed ultimately show ?thesis using assms prj0_in_hom prj1_in_hom comp_arr_dom prj1_simps(1-2) prj_joint_monic by metis qed thus ?thesis using assms by auto qed qed thus ?thesis by auto qed lemma comp_tuple_arr: assumes "commutative_square f g h k" and "seq h l" shows "\h \f, g\ k\ \ l = \h \ l \f, g\ k \ l\" proof - have "\

\<^sub>0[f, g] \ \h \f, g\ k\ \ l = \

\<^sub>0[f, g] \ \h \ l \f, g\ k \ l\" proof - have "\

\<^sub>0[f, g] \ \h \f, g\ k\ \ l = (\

\<^sub>0[f, g] \ \h \f, g\ k\) \ l" using comp_assoc by simp also have "... = \

\<^sub>0[f, g] \ \h \ l \f, g\ k \ l\" using assms commutative_square_comp_arr by auto finally show ?thesis by blast qed moreover have "\

\<^sub>1[f, g] \ \h \f, g\ k\ \ l = \

\<^sub>1[f, g] \ \h \ l \f, g\ k \ l\" proof - have "\

\<^sub>1[f, g] \ \h \f, g\ k\ \ l = (\

\<^sub>1[f, g] \ \h \f, g\ k\) \ l" using comp_assoc by simp also have "... = \

\<^sub>1[f, g] \ \h \ l \f, g\ k \ l\" using assms commutative_square_comp_arr by auto finally show ?thesis by blast qed moreover have "seq \

\<^sub>1[f, g] (\h \f, g\ k\ \ l)" using assms tuple_in_hom prj1_in_hom by fastforce ultimately show ?thesis using assms prj_joint_monic [of f g "\h \f, g\ k\ \ l" "\h \ l \f, g\ k \ l\"] by auto qed lemma pullback_arr_cod: assumes "arr f" shows "inverse_arrows \

\<^sub>1[f, cod f] \dom f \f, cod f\ f\" and "inverse_arrows \

\<^sub>0[cod f, f] \f \cod f, f\ dom f\" proof - show "inverse_arrows \

\<^sub>1[f, cod f] \dom f \f, cod f\ f\" proof show "ide (\dom f \f, cod f\ f\ \ \

\<^sub>1[f, cod f])" proof - have "\dom f \f, cod f\ f\ \ \

\<^sub>1[f, cod f] = f \\ cod f" proof - have "\

\<^sub>0[f, cod f] \ \dom f \f, cod f\ f\ \ \

\<^sub>1[f, cod f] = \

\<^sub>0[f, cod f] \ (f \\ cod f)" proof - have "\

\<^sub>0[f, cod f] \ \dom f \f, cod f\ f\ \ \

\<^sub>1[f, cod f] = (\

\<^sub>0[f, cod f] \ \dom f \f, cod f\ f\) \ \

\<^sub>1[f, cod f]" using comp_assoc by simp also have "... = \

\<^sub>0[f, cod f] \ (f \\ cod f)" using assms pullback_commutes [of f "cod f"] comp_arr_dom comp_cod_arr by auto finally show ?thesis by blast qed moreover have "\

\<^sub>1[f, cod f] \ \dom f \f, cod f\ f\ \ \

\<^sub>1[f, cod f] = \

\<^sub>1[f, cod f] \ (f \\ cod f)" proof - have "\

\<^sub>1[f, cod f] \ \dom f \f, cod f\ f\ \ \

\<^sub>1[f, cod f] = (\

\<^sub>1[f, cod f] \ \dom f \f, cod f\ f\) \ \

\<^sub>1[f, cod f]" using assms comp_assoc by presburger also have "... = \

\<^sub>1[f, cod f] \ (f \\ cod f)" using assms comp_arr_dom comp_cod_arr by simp finally show ?thesis by blast qed ultimately show ?thesis using assms prj_joint_monic [of f "cod f" "\dom f \f, cod f\ f\ \ \

\<^sub>1[f, cod f]" "f \\ cod f"] by simp qed thus ?thesis using assms arr_cod cod_cod prj1_simps_arr by simp qed show "ide (\

\<^sub>1[f, cod f] \ \dom f \f, cod f\ f\)" using assms comp_arr_dom comp_cod_arr by fastforce qed show "inverse_arrows \

\<^sub>0[cod f, f] \f \cod f, f\ dom f\" proof show "ide (\

\<^sub>0[cod f, f] \ \f \cod f, f\ dom f\)" using assms comp_arr_dom comp_cod_arr by simp show "ide (\f \cod f, f\ dom f\ \ \

\<^sub>0[cod f, f])" proof - have "\f \cod f, f\ dom f\ \ \

\<^sub>0[cod f, f] = cod f \\ f" proof - have "\

\<^sub>0[cod f, f] \ \f \cod f, f\ dom f\ \ \

\<^sub>0[cod f, f] = \

\<^sub>0[cod f, f] \ (cod f \\ f)" proof - have "\

\<^sub>0[cod f, f] \ \f \cod f, f\ dom f\ \ \

\<^sub>0[cod f, f] = (\

\<^sub>0[cod f, f] \ \f \cod f, f\ dom f\) \ \

\<^sub>0[cod f, f]" using comp_assoc by simp also have "... = dom f \ \

\<^sub>0[cod f, f]" using assms comp_arr_dom comp_cod_arr by simp also have "... = \

\<^sub>0[cod f, f] \ (cod f \\ f)" using assms comp_arr_dom comp_cod_arr by simp finally show ?thesis using prj0_in_hom by blast qed moreover have "\

\<^sub>1[cod f, f] \ \f \cod f, f\ dom f\ \ \

\<^sub>0[cod f, f] = \

\<^sub>1[cod f, f] \ (cod f \\ f)" proof - have "\

\<^sub>1[cod f, f] \ \f \cod f, f\ dom f\ \ \

\<^sub>0[cod f, f] = (\

\<^sub>1[cod f, f] \ \f \cod f, f\ dom f\) \ \

\<^sub>0[cod f, f]" using comp_assoc by simp also have "... = \

\<^sub>1[cod f, f] \ (cod f \\ f)" using assms pullback_commutes [of "cod f" f] comp_arr_dom comp_cod_arr by auto finally show ?thesis by blast qed ultimately show ?thesis using assms prj_joint_monic [of "cod f" f "\f \cod f, f\ dom f\ \ \

\<^sub>0[cod f, f]"] by simp qed thus ?thesis using assms by simp qed qed qed text \ The pullback of a monomorphism along itself is automatically symmetric: the left and right projections are equal. \ lemma pullback_mono_self: assumes "mono f" shows "\

\<^sub>0[f, f] = \

\<^sub>1[f, f]" proof - have "f \ \

\<^sub>0[f, f] = f \ \

\<^sub>1[f, f]" using assms pullback_commutes [of f f] by (metis commutative_squareE mono_implies_arr) thus ?thesis using assms monoE [of f "\

\<^sub>1[f, f]" "\

\<^sub>0[f, f]"] by (metis mono_implies_arr prj0_simps(1,3) seqI) qed lemma pullback_iso_self: assumes "iso f" shows "\

\<^sub>0[f, f] = \

\<^sub>1[f, f]" using assms pullback_mono_self iso_is_section section_is_mono by simp lemma pullback_ide_self [simp]: assumes "ide a" shows "\

\<^sub>0[a, a] = \

\<^sub>1[a, a]" using assms pullback_iso_self ide_is_iso by blast end section "Agreement between the Definitions" text \ It is very easy to write locale assumptions that have unintended consequences or that are even inconsistent. So, to keep ourselves honest, we don't just accept the definition of ``elementary category with pullbacks'', but in fact we formally establish the sense in which it agrees with our standard definition of ``category with pullbacks'', which is given in terms of limit cones. This is extra work, but it ensures that we didn't make a mistake. \ context category_with_pullbacks begin definition prj1 where "prj1 f g \ if cospan f g then fst (SOME x. cospan_diagram.has_as_pullback C f g (fst x) (snd x)) else null" definition prj0 where "prj0 f g \ if cospan f g then snd (SOME x. cospan_diagram.has_as_pullback C f g (fst x) (snd x)) else null" lemma prj_yields_pullback: assumes "cospan f g" shows "cospan_diagram.has_as_pullback C f g (prj1 f g) (prj0 f g)" proof - have "\x. cospan_diagram.has_as_pullback C f g (fst x) (snd x)" using assms has_pullbacks has_pullbacks_def has_as_pullback_def by simp thus ?thesis using assms has_pullbacks has_pullbacks_def prj0_def prj1_def someI_ex [of "\x. cospan_diagram.has_as_pullback C f g (fst x) (snd x)"] by simp qed interpretation elementary_category_with_pullbacks C prj0 prj1 proof show "\f g. \ cospan f g \ prj0 f g = null" using prj0_def by auto show "\f g. \ cospan f g \ prj1 f g = null" using prj1_def by auto show "\f g. cospan f g \ commutative_square f g (prj1 f g) (prj0 f g)" proof fix f g assume fg: "cospan f g" show "cospan f g" by fact interpret J: cospan_shape . interpret D: cospan_diagram C f g using fg by (unfold_locales, auto) let ?\ = "D.mkCone (prj1 f g) (prj0 f g)" interpret \: limit_cone J.comp C D.map \dom (prj1 f g)\ ?\ using fg prj_yields_pullback by auto have 1: "prj1 f g = ?\ J.AA \ prj0 f g = ?\ J.BB" using D.mkCone_def by simp show "span (prj1 f g) (prj0 f g)" proof - have "arr (prj1 f g) \ arr (prj0 f g)" using 1 J.arr_char J.seq_char by (metis J.seqE \.preserves_reflects_arr) moreover have "dom (prj1 f g) = dom (prj0 f g)" using 1 D.is_rendered_commutative_by_cone \.cone_axioms J.seq_char by (metis J.cod_eqI J.seqE \.A.map_simp \.preserves_dom J.ide_char) ultimately show ?thesis by simp qed show "dom f = cod (prj1 f g)" using 1 fg \.preserves_cod [of J.BB] J.cod_char D.mkCone_def by (metis D.map.simps(1) D.preserves_cod J.seqE \.preserves_cod cod_dom J.seq_char) show "f \ prj1 f g = g \ prj0 f g" using 1 fg D.is_rendered_commutative_by_cone \.cone_axioms by force qed show "\f g h k. commutative_square f g h k \ \!l. prj1 f g \ l = h \ prj0 f g \ l = k" proof - fix f g h k assume fghk: "commutative_square f g h k" interpret J: cospan_shape . interpret D: cospan_diagram C f g using fghk by (unfold_locales, auto) let ?\ = "D.mkCone (prj1 f g) (prj0 f g)" interpret \: limit_cone J.comp C D.map \dom (prj1 f g)\ ?\ using fghk prj_yields_pullback by auto interpret \: pullback_cone C f g \prj1 f g\ \prj0 f g\ .. have 1: "prj1 f g = ?\ J.AA \ prj0 f g = ?\ J.BB" using D.mkCone_def by simp show "\!l. prj1 f g \ l = h \ prj0 f g \ l = k" proof let ?l = "SOME l. prj1 f g \ l = h \ prj0 f g \ l = k" show "prj1 f g \ ?l = h \ prj0 f g \ ?l = k" using fghk \.is_universal' \.renders_commutative someI_ex [of "\l. prj1 f g \ l = h \ prj0 f g \ l = k"] by blast thus "\l. prj1 f g \ l = h \ prj0 f g \ l = k \ l = ?l" using fghk \.is_universal' \.renders_commutative limit_cone_def by (metis (no_types, lifting) in_homI seqE commutative_squareE dom_comp seqI) qed qed qed proposition extends_to_elementary_category_with_pullbacks: shows "elementary_category_with_pullbacks C prj0 prj1" .. end context elementary_category_with_pullbacks begin interpretation category_with_pullbacks C proof show "has_pullbacks" proof (unfold has_pullbacks_def) have "\f g. cospan f g \ \p0 p1. has_as_pullback f g p0 p1" proof - fix f g assume fg: "cospan f g" - interpret J: cospan_shape . - interpret D: cospan_diagram C f g - using fg by (unfold_locales, auto) - let ?\ = "D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g]" - interpret \: cone J.comp C D.map \dom \

\<^sub>1[f, g]\ ?\ - proof - - have "D.is_rendered_commutative_by \

\<^sub>1[f, g] \

\<^sub>0[f, g]" - using fg pullback_commutes' by simp - thus "cone J.comp C D.map (dom \

\<^sub>1[f, g]) ?\" - using D.cone_mkCone by auto - qed - interpret \: limit_cone J.comp C D.map \dom \

\<^sub>1[f, g]\ ?\ - proof - fix a' \' - assume \': "D.cone a' \'" - interpret \': cone J.comp C D.map a' \' - using \' by simp - have 1: "commutative_square f g (\' J.AA) (\' J.BB)" - using fg J.ide_char J.cod_char D.is_rendered_commutative_by_cone \'.cone_axioms - by auto - show "\!h. \h : a' \ dom \

\<^sub>1[f, g]\ \ - D.cones_map h (D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g]) = \'" - proof - let ?h = "\\' J.AA \f, g\ \' J.BB\" - show h': "\?h : a' \ dom \

\<^sub>1[f, g]\ \ - D.cones_map ?h (D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g]) = \'" - proof - show h: "\?h : a' \ dom \

\<^sub>1[f, g]\" - using fg 1 by fastforce - show "D.cones_map ?h (D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g]) = \'" - proof - - have "D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g] \ D.cones (cod \\' J.AA \f, g\ \' J.BB\)" - using fg h D.cone_mkCone D.is_rendered_commutative_by_cone - \.cone_axioms - by auto - hence 2: "D.cones_map ?h (D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g]) \ D.cones a'" - using fg h D.cones_map_mapsto by blast - interpret \'h: cone J.comp C D.map a' - \D.cones_map ?h (D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g])\ - using 2 by simp - show ?thesis - proof - - have "\j. J.ide j \ D.cones_map ?h (D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g]) j = \' j" - proof - - fix j - show "J.ide j \ D.cones_map ?h (D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g]) j = \' j" - using fg h 1 J.ide_char D.mkCone_def \.cone_axioms - apply (cases j, simp_all) - by (metis D.map.simps(4) J.dom_eqI \'.is_natural_1 \'.naturality - J.seqE \'.A.map_simp J.comp.simps(3,7) J.seq_char - prj_tuple(2) comp_assoc) - qed - thus ?thesis - using NaturalTransformation.eqI - \'.natural_transformation_axioms \'h.natural_transformation_axioms - by blast - qed - qed - qed - show "\h. \h : a' \ dom \

\<^sub>1[f, g]\ \ - D.cones_map h (D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g]) = \' \ - h = ?h" - proof - - fix h - assume 2: "\h : a' \ dom \

\<^sub>1[f, g]\ \ - D.cones_map h (D.mkCone \

\<^sub>1[f, g] \

\<^sub>0[f, g]) = \'" - show "h = ?h" - proof - - have "\

\<^sub>0[f, g] \ h = \

\<^sub>0[f, g] \ ?h \ \

\<^sub>1[f, g] \ h = \

\<^sub>1[f, g] \ ?h" - using 1 2 fg J.arr_char \.cone_axioms D.mkCone_def by auto - thus ?thesis - using fg 2 h' prj_joint_monic by blast - qed - qed - qed - qed - show "\p0 p1. has_as_pullback f g p0 p1" - using fg has_as_pullback_def \.limit_cone_axioms by blast + have "has_as_pullback f g \

\<^sub>1[f, g] \

\<^sub>0[f, g]" + using fg has_as_pullbackI pullback_commutes universal by presburger + thus "\p0 p1. has_as_pullback f g p0 p1" by blast qed thus "\f g. cospan f g \ (\p0 p1. has_as_pullback f g p0 p1)" by simp qed qed proposition is_category_with_pullbacks: shows "category_with_pullbacks C" .. end sublocale elementary_category_with_pullbacks \ category_with_pullbacks using is_category_with_pullbacks by auto end diff --git a/thys/Category3/Limit.thy b/thys/Category3/Limit.thy --- a/thys/Category3/Limit.thy +++ b/thys/Category3/Limit.thy @@ -1,6271 +1,6352 @@ (* Title: Limit Author: Eugene W. Stark , 2016 Maintainer: Eugene W. Stark *) chapter Limit theory Limit imports FreeCategory DiscreteCategory Adjunction begin text\ This theory defines the notion of limit in terms of diagrams and cones and relates it to the concept of a representation of a functor. The diagonal functor associated with a diagram shape @{term J} is defined and it is shown that a right adjoint to the diagonal functor gives limits of shape @{term J} and that a category has limits of shape @{term J} if and only if the diagonal functor is a left adjoint functor. Products and equalizers are defined as special cases of limits, and it is shown that a category with equalizers has limits of shape @{term J} if it has products indexed by the sets of objects and arrows of @{term J}. The existence of limits in a set category is investigated, and it is shown that every set category has equalizers and that a set category @{term S} has @{term I}-indexed products if and only if the universe of @{term S} ``admits @{term I}-indexed tupling.'' The existence of limits in functor categories is also developed, showing that limits in functor categories are ``determined pointwise'' and that a functor category @{term "[A, B]"} has limits of shape @{term J} if @{term B} does. Finally, it is shown that the Yoneda functor preserves limits. This theory concerns itself only with limits; I have made no attempt to consider colimits. Although it would be possible to rework the entire development in dual form, it is possible that there is a more efficient way to dualize at least parts of it without repeating all the work. This is something that deserves further thought. \ section "Representations of Functors" text\ A representation of a contravariant functor \F: Cop \ S\, where @{term S} is a set category that is the target of a hom-functor for @{term C}, consists of an object @{term a} of @{term C} and a natural isomorphism @{term "\: Y a \ F"}, where \Y: C \ [Cop, S]\ is the Yoneda functor. \ locale representation_of_functor = C: category C + Cop: dual_category C + S: set_category S setp + F: "functor" Cop.comp S F + Hom: hom_functor C S setp \ + Ya: yoneda_functor_fixed_object C S setp \ a + natural_isomorphism Cop.comp S \Ya.Y a\ F \ for C :: "'c comp" (infixr "\" 55) and S :: "'s comp" (infixr "\\<^sub>S" 55) and setp :: "'s set \ bool" and \ :: "'c * 'c \ 'c \ 's" and F :: "'c \ 's" and a :: 'c and \ :: "'c \ 's" begin abbreviation Y where "Y \ Ya.Y" abbreviation \ where "\ \ Hom.\" end text\ Two representations of the same functor are uniquely isomorphic. \ locale two_representations_one_functor = C: category C + Cop: dual_category C + S: set_category S setp + F: set_valued_functor Cop.comp S setp F + yoneda_functor C S setp \ + Ya: yoneda_functor_fixed_object C S setp \ a + Ya': yoneda_functor_fixed_object C S setp \ a' + \: representation_of_functor C S setp \ F a \ + \': representation_of_functor C S setp \ F a' \' for C :: "'c comp" (infixr "\" 55) and S :: "'s comp" (infixr "\\<^sub>S" 55) and setp :: "'s set \ bool" and F :: "'c \ 's" and \ :: "'c * 'c \ 'c \ 's" and a :: 'c and \ :: "'c \ 's" and a' :: 'c and \' :: "'c \ 's" begin interpretation \: inverse_transformation Cop.comp S \Y a\ F \ .. interpretation \': inverse_transformation Cop.comp S \Y a'\ F \' .. interpretation \\': vertical_composite Cop.comp S \Y a\ F \Y a'\ \ \'.map .. interpretation \'\: vertical_composite Cop.comp S \Y a'\ F \Y a\ \' \.map .. lemma are_uniquely_isomorphic: shows "\!\. \\ : a \ a'\ \ C.iso \ \ map \ = Cop_S.MkArr (Y a) (Y a') \\'.map" proof - interpret \\': natural_isomorphism Cop.comp S \Y a\ \Y a'\ \\'.map using \.natural_isomorphism_axioms \'.natural_isomorphism_axioms natural_isomorphisms_compose by blast interpret \'\: natural_isomorphism Cop.comp S \Y a'\ \Y a\ \'\.map using \'.natural_isomorphism_axioms \.natural_isomorphism_axioms natural_isomorphisms_compose by blast interpret \\'_\'\: inverse_transformations Cop.comp S \Y a\ \Y a'\ \\'.map \'\.map proof fix x assume X: "Cop.ide x" show "S.inverse_arrows (\\'.map x) (\'\.map x)" using S.inverse_arrows_compose S.inverse_arrows_sym X \'\.map_simp_ide \\'.map_simp_ide \'.inverts_components \.inverts_components by force qed have "Cop_S.inverse_arrows (Cop_S.MkArr (Y a) (Y a') \\'.map) (Cop_S.MkArr (Y a') (Y a) \'\.map)" proof - have Ya: "functor Cop.comp S (Y a)" .. have Ya': "functor Cop.comp S (Y a')" .. have \\': "natural_transformation Cop.comp S (Y a) (Y a') \\'.map" .. have \'\: "natural_transformation Cop.comp S (Y a') (Y a) \'\.map" .. show ?thesis by (metis (no_types, lifting) Cop_S.arr_MkArr Cop_S.comp_MkArr Cop_S.ide_MkIde Cop_S.inverse_arrows_def Ya'.functor_axioms Ya.functor_axioms \'\.natural_transformation_axioms \\'.natural_transformation_axioms \\'_\'\.inverse_transformations_axioms inverse_transformations_inverse(1-2) mem_Collect_eq) qed hence 3: "Cop_S.iso (Cop_S.MkArr (Y a) (Y a') \\'.map)" using Cop_S.isoI by blast hence "\f. \f : a \ a'\ \ map f = Cop_S.MkArr (Y a) (Y a') \\'.map" using Ya.ide_a Ya'.ide_a is_full Y_def Cop_S.iso_is_arr full_functor.is_full Cop_S.MkArr_in_hom \\'.natural_transformation_axioms preserves_ide by force from this obtain \ where \: "\\ : a \ a'\ \ map \ = Cop_S.MkArr (Y a) (Y a') \\'.map" by blast show ?thesis by (metis 3 C.in_homE \ is_faithful reflects_iso) qed end section "Diagrams and Cones" text\ A \emph{diagram} in a category @{term C} is a functor \D: J \ C\. We refer to the category @{term J} as the diagram \emph{shape}. Note that in the usual expositions of category theory that use set theory as their foundations, the shape @{term J} of a diagram is required to be a ``small'' category, where smallness means that the collection of objects of @{term J}, as well as each of the ``homs,'' is a set. However, in HOL there is no class of all sets, so it is not meaningful to speak of @{term J} as ``small'' in any kind of absolute sense. There is likely a meaningful notion of smallness of @{term J} \emph{relative to} @{term C} (the result below that states that a set category has @{term I}-indexed products if and only if its universe ``admits @{term I}-indexed tuples'' is suggestive of how this might be defined), but I haven't fully explored this idea at present. \ locale diagram = C: category C + J: category J + "functor" J C D for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) and D :: "'j \ 'c" begin notation J.in_hom ("\_ : _ \\<^sub>J _\") end lemma comp_diagram_functor: assumes "diagram J C D" and "functor J' J F" shows "diagram J' C (D o F)" by (meson assms(1) assms(2) diagram_def functor.axioms(1) functor_comp) text\ A \emph{cone} over a diagram \D: J \ C\ is a natural transformation from a constant functor to @{term D}. The value of the constant functor is the \emph{apex} of the cone. \ locale cone = C: category C + J: category J + D: diagram J C D + A: constant_functor J C a + natural_transformation J C A.map D \ for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) and D :: "'j \ 'c" and a :: 'c and \ :: "'j \ 'c" begin lemma ide_apex: shows "C.ide a" using A.value_is_ide by auto lemma component_in_hom: assumes "J.arr j" shows "\\ j : a \ D (J.cod j)\" using assms by auto end text\ A cone over diagram @{term D} is transformed into a cone over diagram @{term "D o F"} by pre-composing with @{term F}. \ lemma comp_cone_functor: assumes "cone J C D a \" and "functor J' J F" shows "cone J' C (D o F) a (\ o F)" proof - interpret \: cone J C D a \ using assms(1) by auto interpret F: "functor" J' J F using assms(2) by auto interpret A': constant_functor J' C a using \.A.value_is_ide by unfold_locales auto have 1: "\.A.map o F = A'.map" using \.A.map_def A'.map_def \.J.not_arr_null by auto interpret \': natural_transformation J' C A'.map \D o F\ \\ o F\ using 1 horizontal_composite F.as_nat_trans.natural_transformation_axioms \.natural_transformation_axioms by fastforce show "cone J' C (D o F) a (\ o F)" .. qed text\ A cone over diagram @{term D} can be transformed into a cone over a diagram @{term D'} by post-composing with a natural transformation from @{term D} to @{term D'}. \ lemma vcomp_transformation_cone: assumes "cone J C D a \" and "natural_transformation J C D D' \" shows "cone J C D' a (vertical_composite.map J C \ \)" by (meson assms cone.axioms(4-5) cone.intro diagram.intro natural_transformation.axioms(1-4) vertical_composite.intro vertical_composite.is_natural_transformation) context "functor" begin lemma preserves_diagrams: fixes J :: "'j comp" assumes "diagram J A D" shows "diagram J B (F o D)" by (meson assms diagram_def functor_axioms functor_comp functor_def) lemma preserves_cones: fixes J :: "'j comp" assumes "cone J A D a \" shows "cone J B (F o D) (F a) (F o \)" proof - interpret \: cone J A D a \ using assms by auto interpret Fa: constant_functor J B \F a\ using \.ide_apex by unfold_locales auto have 1: "F o \.A.map = Fa.map" proof fix f show "(F \ \.A.map) f = Fa.map f" using is_extensional Fa.is_extensional \.A.is_extensional by (cases "\.J.arr f", simp_all) qed interpret \': natural_transformation J B Fa.map \F o D\ \F o \\ using 1 horizontal_composite \.natural_transformation_axioms as_nat_trans.natural_transformation_axioms by fastforce show "cone J B (F o D) (F a) (F o \)" .. qed end context diagram begin abbreviation cone where "cone a \ \ Limit.cone J C D a \" abbreviation cones :: "'c \ ('j \ 'c) set" where "cones a \ { \. cone a \ }" text\ An arrow @{term "f \ C.hom a' a"} induces by composition a transformation from cones with apex @{term a} to cones with apex @{term a'}. This transformation is functorial in @{term f}. \ abbreviation cones_map :: "'c \ ('j \ 'c) \ ('j \ 'c)" where "cones_map f \ (\\ \ cones (C.cod f). \j. if J.arr j then \ j \ f else C.null)" lemma cones_map_mapsto: assumes "C.arr f" shows "cones_map f \ extensional (cones (C.cod f)) \ (cones (C.cod f) \ cones (C.dom f))" proof show "cones_map f \ extensional (cones (C.cod f))" by blast show "cones_map f \ cones (C.cod f) \ cones (C.dom f)" proof fix \ assume "\ \ cones (C.cod f)" hence \: "cone (C.cod f) \" by auto interpret \: cone J C D \C.cod f\ \ using \ by auto interpret B: constant_functor J C \C.dom f\ using assms by unfold_locales auto have "cone (C.dom f) (\j. if J.arr j then \ j \ f else C.null)" using assms B.value_is_ide \.is_natural_1 \.is_natural_2 apply (unfold_locales, auto) using \.is_natural_1 apply (metis C.comp_assoc) using \.is_natural_2 C.comp_arr_dom by (metis J.arr_cod_iff_arr J.cod_cod C.comp_assoc) thus "(\j. if J.arr j then \ j \ f else C.null) \ cones (C.dom f)" by auto qed qed lemma cones_map_ide: assumes "\ \ cones a" shows "cones_map a \ = \" proof - interpret \: cone J C D a \ using assms by auto show ?thesis proof fix j show "cones_map a \ j = \ j" using assms \.A.value_is_ide \.preserves_hom C.comp_arr_dom \.is_extensional by (cases "J.arr j", auto) qed qed lemma cones_map_comp: assumes "C.seq f g" shows "cones_map (f \ g) = restrict (cones_map g o cones_map f) (cones (C.cod f))" proof (intro restr_eqI) show "cones (C.cod (f \ g)) = cones (C.cod f)" using assms by simp show "\\. \ \ cones (C.cod (f \ g)) \ (\j. if J.arr j then \ j \ f \ g else C.null) = (cones_map g o cones_map f) \" proof - fix \ assume \: "\ \ cones (C.cod (f \ g))" show "(\j. if J.arr j then \ j \ f \ g else C.null) = (cones_map g o cones_map f) \" proof - have "((cones_map g) o (cones_map f)) \ = cones_map g (cones_map f \)" by force also have "... = (\j. if J.arr j then (\j. if J.arr j then \ j \ f else C.null) j \ g else C.null)" proof fix j have "cone (C.dom f) (cones_map f \)" using assms \ cones_map_mapsto by (elim C.seqE, force) thus "cones_map g (cones_map f \) j = (if J.arr j then C (if J.arr j then \ j \ f else C.null) g else C.null)" using \ assms by auto qed also have "... = (\j. if J.arr j then \ j \ f \ g else C.null)" using C.comp_assoc by fastforce finally show ?thesis by auto qed qed qed end text\ Changing the apex of a cone by pre-composing with an arrow @{term f} commutes with changing the diagram of a cone by post-composing with a natural transformation. \ lemma cones_map_vcomp: assumes "diagram J C D" and "diagram J C D'" and "natural_transformation J C D D' \" and "cone J C D a \" and f: "partial_magma.in_hom C f a' a" shows "diagram.cones_map J C D' f (vertical_composite.map J C \ \) = vertical_composite.map J C (diagram.cones_map J C D f \) \" proof - interpret D: diagram J C D using assms(1) by auto interpret D': diagram J C D' using assms(2) by auto interpret \: natural_transformation J C D D' \ using assms(3) by auto interpret \: cone J C D a \ using assms(4) by auto interpret \o\: vertical_composite J C \.A.map D D' \ \ .. interpret \o\: cone J C D' a \o\.map .. interpret \f: cone J C D a' \D.cones_map f \\ using f \.cone_axioms D.cones_map_mapsto by blast interpret \o\f: vertical_composite J C \f.A.map D D' \D.cones_map f \\ \ .. interpret \o\_f: cone J C D' a' \D'.cones_map f \o\.map\ using f \o\.cone_axioms D'.cones_map_mapsto [of f] by blast write C (infixr "\" 55) show "D'.cones_map f \o\.map = \o\f.map" proof (intro NaturalTransformation.eqI) show "natural_transformation J C \f.A.map D' (D'.cones_map f \o\.map)" .. show "natural_transformation J C \f.A.map D' \o\f.map" .. show "\j. D.J.ide j \ D'.cones_map f \o\.map j = \o\f.map j" proof - fix j assume j: "D.J.ide j" have "D'.cones_map f \o\.map j = \o\.map j \ f" using f \o\.cone_axioms \o\.map_simp_2 \o\.is_extensional by auto also have "... = (\ j \ \ (D.J.dom j)) \ f" using j \o\.map_simp_2 by simp also have "... = \ j \ \ (D.J.dom j) \ f" using D.C.comp_assoc by simp also have "... = \o\f.map j" using j f \.cone_axioms \o\f.map_simp_2 by auto finally show "D'.cones_map f \o\.map j = \o\f.map j" by auto qed qed qed text\ Given a diagram @{term D}, we can construct a contravariant set-valued functor, which takes each object @{term a} of @{term C} to the set of cones over @{term D} with apex @{term a}, and takes each arrow @{term f} of @{term C} to the function on cones over @{term D} induced by pre-composition with @{term f}. For this, we need to introduce a set category @{term S} whose universe is large enough to contain all the cones over @{term D}, and we need to have an explicit correspondence between cones and elements of the universe of @{term S}. A replete set category @{term S} equipped with an injective mapping @{term_type "\ :: ('j => 'c) => 's"} serves this purpose. \ locale cones_functor = C: category C + Cop: dual_category C + J: category J + D: diagram J C D + S: replete_concrete_set_category S UNIV \ for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) and D :: "'j \ 'c" and S :: "'s comp" (infixr "\\<^sub>S" 55) and \ :: "('j \ 'c) \ 's" begin notation S.in_hom ("\_ : _ \\<^sub>S _\") abbreviation \ where "\ \ S.DN" definition map :: "'c \ 's" where "map = (\f. if C.arr f then S.mkArr (\ ` D.cones (C.cod f)) (\ ` D.cones (C.dom f)) (\ o D.cones_map f o \) else S.null)" lemma map_simp [simp]: assumes "C.arr f" shows "map f = S.mkArr (\ ` D.cones (C.cod f)) (\ ` D.cones (C.dom f)) (\ o D.cones_map f o \)" using assms map_def by auto lemma arr_map: assumes "C.arr f" shows "S.arr (map f)" proof - have "\ o D.cones_map f o \ \ \ ` D.cones (C.cod f) \ \ ` D.cones (C.dom f)" using assms D.cones_map_mapsto by force thus ?thesis using assms S.UP_mapsto by auto qed lemma map_ide: assumes "C.ide a" shows "map a = S.mkIde (\ ` D.cones a)" proof - have "map a = S.mkArr (\ ` D.cones a) (\ ` D.cones a) (\ o D.cones_map a o \)" using assms map_simp by force also have "... = S.mkArr (\ ` D.cones a) (\ ` D.cones a) (\x. x)" using S.UP_mapsto D.cones_map_ide by force also have "... = S.mkIde (\ ` D.cones a)" using assms S.mkIde_as_mkArr S.UP_mapsto by blast finally show ?thesis by auto qed lemma map_preserves_dom: assumes "Cop.arr f" shows "map (Cop.dom f) = S.dom (map f)" using assms arr_map map_ide by auto lemma map_preserves_cod: assumes "Cop.arr f" shows "map (Cop.cod f) = S.cod (map f)" using assms arr_map map_ide by auto lemma map_preserves_comp: assumes "Cop.seq g f" shows "map (g \\<^sup>o\<^sup>p f) = map g \\<^sub>S map f" proof - have "map (g \\<^sup>o\<^sup>p f) = S.mkArr (\ ` D.cones (C.cod f)) (\ ` D.cones (C.dom g)) ((\ o D.cones_map g o \) o (\ o D.cones_map f o \))" proof - have 1: "S.arr (map (g \\<^sup>o\<^sup>p f))" using assms arr_map [of "C f g"] by simp have "map (g \\<^sup>o\<^sup>p f) = S.mkArr (\ ` D.cones (C.cod f)) (\ ` D.cones (C.dom g)) (\ o D.cones_map (C f g) o \)" using assms map_simp [of "C f g"] by simp also have "... = S.mkArr (\ ` D.cones (C.cod f)) (\ ` D.cones (C.dom g)) ((\ o D.cones_map g o \) o (\ o D.cones_map f o \))" using assms 1 calculation D.cones_map_mapsto D.cones_map_comp by auto finally show ?thesis by blast qed also have "... = map g \\<^sub>S map f" using assms arr_map [of f] arr_map [of g] map_simp S.comp_mkArr by auto finally show ?thesis by auto qed lemma is_functor: shows "functor Cop.comp S map" apply (unfold_locales) using map_def arr_map map_preserves_dom map_preserves_cod map_preserves_comp by auto end sublocale cones_functor \ "functor" Cop.comp S map using is_functor by auto sublocale cones_functor \ set_valued_functor Cop.comp S \\A. A \ S.Univ\ map .. section Limits subsection "Limit Cones" text\ A \emph{limit cone} for a diagram @{term D} is a cone @{term \} over @{term D} with the universal property that any other cone @{term \'} over the diagram @{term D} factors uniquely through @{term \}. \ locale limit_cone = C: category C + J: category J + D: diagram J C D + cone J C D a \ for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) and D :: "'j \ 'c" and a :: 'c and \ :: "'j \ 'c" + assumes is_universal: "cone J C D a' \' \ \!f. \f : a' \ a\ \ D.cones_map f \ = \'" begin definition induced_arrow :: "'c \ ('j \ 'c) \ 'c" where "induced_arrow a' \' = (THE f. \f : a' \ a\ \ D.cones_map f \ = \')" lemma induced_arrowI: assumes \': "\' \ D.cones a'" shows "\induced_arrow a' \' : a' \ a\" and "D.cones_map (induced_arrow a' \') \ = \'" proof - have "\!f. \f : a' \ a\ \ D.cones_map f \ = \'" using assms \' is_universal by simp hence 1: "\induced_arrow a' \' : a' \ a\ \ D.cones_map (induced_arrow a' \') \ = \'" using theI' [of "\f. \f : a' \ a\ \ D.cones_map f \ = \'"] induced_arrow_def by presburger show "\induced_arrow a' \' : a' \ a\" using 1 by simp show "D.cones_map (induced_arrow a' \') \ = \'" using 1 by simp qed lemma cones_map_induced_arrow: shows "induced_arrow a' \ D.cones a' \ C.hom a' a" and "\\'. \' \ D.cones a' \ D.cones_map (induced_arrow a' \') \ = \'" using induced_arrowI by auto lemma induced_arrow_cones_map: assumes "C.ide a'" shows "(\f. D.cones_map f \) \ C.hom a' a \ D.cones a'" and "\f. \f : a' \ a\ \ induced_arrow a' (D.cones_map f \) = f" proof - have a': "C.ide a'" using assms by (simp add: cone.ide_apex) have cone_\: "cone J C D a \" .. show "(\f. D.cones_map f \) \ C.hom a' a \ D.cones a'" using cone_\ D.cones_map_mapsto by blast fix f assume f: "\f : a' \ a\" show "induced_arrow a' (D.cones_map f \) = f" proof - have "D.cones_map f \ \ D.cones a'" using f cone_\ D.cones_map_mapsto by blast hence "\!f'. \f' : a' \ a\ \ D.cones_map f' \ = D.cones_map f \" using assms is_universal by auto thus ?thesis using f induced_arrow_def the1_equality [of "\f'. \f' : a' \ a\ \ D.cones_map f' \ = D.cones_map f \"] by presburger qed qed text\ For a limit cone @{term \} with apex @{term a}, for each object @{term a'} the hom-set @{term "C.hom a' a"} is in bijective correspondence with the set of cones with apex @{term a'}. \ lemma bij_betw_hom_and_cones: assumes "C.ide a'" shows "bij_betw (\f. D.cones_map f \) (C.hom a' a) (D.cones a')" proof (intro bij_betwI) show "(\f. D.cones_map f \) \ C.hom a' a \ D.cones a'" using assms induced_arrow_cones_map by blast show "induced_arrow a' \ D.cones a' \ C.hom a' a" using assms cones_map_induced_arrow by blast show "\f. f \ C.hom a' a \ induced_arrow a' (D.cones_map f \) = f" using assms induced_arrow_cones_map by blast show "\\'. \' \ D.cones a' \ D.cones_map (induced_arrow a' \') \ = \'" using assms cones_map_induced_arrow by blast qed lemma induced_arrow_eqI: assumes "D.cone a' \'" and "\f : a' \ a\" and "D.cones_map f \ = \'" shows "induced_arrow a' \' = f" using assms is_universal induced_arrow_def the1_equality [of "\f. f \ C.hom a' a \ D.cones_map f \ = \'" f] by simp lemma induced_arrow_self: shows "induced_arrow a \ = a" proof - have "\a : a \ a\ \ D.cones_map a \ = \" using ide_apex cone_axioms D.cones_map_ide by force thus ?thesis using induced_arrow_eqI cone_axioms by auto qed end context diagram begin abbreviation limit_cone where "limit_cone a \ \ Limit.limit_cone J C D a \" text\ A diagram @{term D} has object @{term a} as a limit if @{term a} is the apex of some limit cone over @{term D}. \ abbreviation has_as_limit :: "'c \ bool" where "has_as_limit a \ (\\. limit_cone a \)" abbreviation has_limit where "has_limit \ (\a \. limit_cone a \)" definition some_limit :: 'c where "some_limit = (SOME a. \\. limit_cone a \)" definition some_limit_cone :: "'j \ 'c" where "some_limit_cone = (SOME \. limit_cone some_limit \)" lemma limit_cone_some_limit_cone: assumes has_limit shows "limit_cone some_limit some_limit_cone" proof - have "\a. has_as_limit a" using assms by simp hence "has_as_limit some_limit" using some_limit_def someI_ex [of "\a. \\. limit_cone a \"] by simp thus "limit_cone some_limit some_limit_cone" using assms some_limit_cone_def someI_ex [of "\\. limit_cone some_limit \"] by simp qed lemma ex_limitE: assumes "\a. has_as_limit a" obtains a \ where "limit_cone a \" using assms someI_ex by blast end subsection "Limits by Representation" text\ A limit for a diagram D can also be given by a representation \(a, \)\ of the cones functor. \ locale representation_of_cones_functor = C: category C + Cop: dual_category C + J: category J + D: diagram J C D + S: replete_concrete_set_category S UNIV \ + Cones: cones_functor J C D S \ + Hom: hom_functor C S \\A. A \ S.Univ\ \ + representation_of_functor C S S.setp \ Cones.map a \ for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) and D :: "'j \ 'c" and S :: "'s comp" (infixr "\\<^sub>S" 55) and \ :: "'c * 'c \ 'c \ 's" and \ :: "('j \ 'c) \ 's" and a :: 'c and \ :: "'c \ 's" subsection "Putting it all Together" text\ A ``limit situation'' combines and connects the ways of presenting a limit. \ locale limit_situation = C: category C + Cop: dual_category C + J: category J + D: diagram J C D + S: replete_concrete_set_category S UNIV \ + Cones: cones_functor J C D S \ + Hom: hom_functor C S S.setp \ + \: representation_of_functor C S S.setp \ Cones.map a \ + \: limit_cone J C D a \ for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) and D :: "'j \ 'c" and S :: "'s comp" (infixr "\\<^sub>S" 55) and \ :: "'c * 'c \ 'c \ 's" and \ :: "('j \ 'c) \ 's" and a :: 'c and \ :: "'c \ 's" and \ :: "'j \ 'c" + assumes \_in_terms_of_\: "\ = S.DN (S.Fun (\ a) (\ (a, a) a))" and \_in_terms_of_\: "Cop.ide a' \ \ a' = S.mkArr (Hom.set (a', a)) (\ ` D.cones a') (\x. \ (D.cones_map (Hom.\ (a', a) x) \))" text (in limit_situation) \ The assumption @{prop \_in_terms_of_\} states that the universal cone @{term \} is obtained by applying the function @{term "S.Fun (\ a)"} to the identity @{term a} of @{term[source=true] C} (after taking into account the necessary coercions). \ text (in limit_situation) \ The assumption @{prop \_in_terms_of_\} states that the component of @{term \} at @{term a'} is the arrow of @{term[source=true] S} corresponding to the function that takes an arrow @{term "f \ C.hom a' a"} and produces the cone with vertex @{term a'} obtained by transforming the universal cone @{term \} by @{term f}. \ subsection "Limit Cones Induce Limit Situations" text\ To obtain a limit situation from a limit cone, we need to introduce a set category that is large enough to contain the hom-sets of @{term C} as well as the cones over @{term D}. We use the category of all @{typ "('c + ('j \ 'c))"}-sets for this. \ context limit_cone begin interpretation Cop: dual_category C .. interpretation CopxC: product_category Cop.comp C .. interpretation S: replete_setcat \undefined :: 'c + ('j \ 'c)\ . notation S.comp (infixr "\\<^sub>S" 55) interpretation Sr: replete_concrete_set_category S.comp UNIV \S.UP o Inr\ apply unfold_locales using S.UP_mapsto apply auto[1] using S.inj_UP inj_Inr inj_compose by metis interpretation Cones: cones_functor J C D S.comp \S.UP o Inr\ .. interpretation Hom: hom_functor C S.comp S.setp \\_. S.UP o Inl\ apply (unfold_locales) using S.UP_mapsto apply auto[1] using S.inj_UP injD inj_onI inj_Inl inj_compose apply (metis (no_types, lifting)) using S.UP_mapsto by auto interpretation Y: yoneda_functor C S.comp S.setp \\_. S.UP o Inl\ .. interpretation Ya: yoneda_functor_fixed_object C S.comp S.setp \\_. S.UP o Inl\ a apply (unfold_locales) using ide_apex by auto abbreviation inl :: "'c \ 'c + ('j \ 'c)" where "inl \ Inl" abbreviation inr :: "('j \ 'c) \ 'c + ('j \ 'c)" where "inr \ Inr" abbreviation \ where "\ \ S.UP o inr" abbreviation \ where "\ \ Cones.\" abbreviation \ where "\ \ \_. S.UP o inl" abbreviation \ where "\ \ Hom.\" abbreviation Y where "Y \ Y.Y" lemma Ya_ide: assumes a': "C.ide a'" shows "Y a a' = S.mkIde (Hom.set (a', a))" using assms ide_apex Y.Y_simp Hom.map_ide by simp lemma Ya_arr: assumes g: "C.arr g" shows "Y a g = S.mkArr (Hom.set (C.cod g, a)) (Hom.set (C.dom g, a)) (\ (C.dom g, a) o Cop.comp g o \ (C.cod g, a))" using ide_apex g Y.Y_ide_arr [of a g "C.dom g" "C.cod g"] by auto lemma cone_\ [simp]: shows "\ \ D.cones a" using cone_axioms by simp text\ For each object @{term a'} of @{term[source=true] C} we have a function mapping @{term "C.hom a' a"} to the set of cones over @{term D} with apex @{term a'}, which takes @{term "f \ C.hom a' a"} to \\f\, where \\f\ is the cone obtained by composing @{term \} with @{term f} (after accounting for coercions to and from the universe of @{term S}). The corresponding arrows of @{term S} are the components of a natural isomorphism from @{term "Y a"} to \Cones\. \ definition \o :: "'c \ ('c + ('j \ 'c)) setcat.arr" where "\o a' = S.mkArr (Hom.set (a', a)) (\ ` D.cones a') (\x. \ (D.cones_map (\ (a', a) x) \))" lemma \o_in_hom: assumes a': "C.ide a'" shows "\\o a' : S.mkIde (Hom.set (a', a)) \\<^sub>S S.mkIde (\ ` D.cones a')\" proof - have " \S.mkArr (Hom.set (a', a)) (\ ` D.cones a') (\x. \ (D.cones_map (\ (a', a) x) \)) : S.mkIde (Hom.set (a', a)) \\<^sub>S S.mkIde (\ ` D.cones a')\" proof - have "(\x. \ (D.cones_map (\ (a', a) x) \)) \ Hom.set (a', a) \ \ ` D.cones a'" proof fix x assume x: "x \ Hom.set (a', a)" hence "\\ (a', a) x : a' \ a\" using ide_apex a' Hom.\_mapsto by auto hence "D.cones_map (\ (a', a) x) \ \ D.cones a'" using ide_apex a' x D.cones_map_mapsto cone_\ by force thus "\ (D.cones_map (\ (a', a) x) \) \ \ ` D.cones a'" by simp qed moreover have "Hom.set (a', a) \ S.Univ" using ide_apex a' Hom.set_subset_Univ by auto moreover have "\ ` D.cones a' \ S.Univ" using S.UP_mapsto by auto ultimately show ?thesis using S.mkArr_in_hom by simp qed thus ?thesis using \o_def [of a'] by auto qed interpretation \: transformation_by_components Cop.comp S.comp \Y a\ Cones.map \o proof fix a' assume A': "Cop.ide a'" show "\\o a' : Y a a' \\<^sub>S Cones.map a'\" using A' Ya_ide \o_in_hom Cones.map_ide by auto next fix g assume g: "Cop.arr g" show "\o (Cop.cod g) \\<^sub>S Y a g = Cones.map g \\<^sub>S \o (Cop.dom g)" proof - let ?A = "Hom.set (C.cod g, a)" let ?B = "Hom.set (C.dom g, a)" let ?B' = "\ ` D.cones (C.cod g)" let ?C = "\ ` D.cones (C.dom g)" let ?F = "\ (C.dom g, a) o Cop.comp g o \ (C.cod g, a)" let ?F' = "\ o D.cones_map g o \" let ?G = "\x. \ (D.cones_map (\ (C.dom g, a) x) \)" let ?G' = "\x. \ (D.cones_map (\ (C.cod g, a) x) \)" have "S.arr (Y a g) \ Y a g = S.mkArr ?A ?B ?F" using ide_apex g Ya.preserves_arr Ya_arr by fastforce moreover have "S.arr (\o (Cop.cod g))" using g \o_in_hom [of "Cop.cod g"] by auto moreover have "\o (Cop.cod g) = S.mkArr ?B ?C ?G" using g \o_def [of "C.dom g"] by auto moreover have "S.seq (\o (Cop.cod g)) (Y a g)" using ide_apex g \o_in_hom [of "Cop.cod g"] by auto ultimately have 1: "S.seq (\o (Cop.cod g)) (Y a g) \ \o (Cop.cod g) \\<^sub>S Y a g = S.mkArr ?A ?C (?G o ?F)" using S.comp_mkArr [of ?A ?B ?F ?C ?G] by argo have "Cones.map g = S.mkArr (\ ` D.cones (C.cod g)) (\ ` D.cones (C.dom g)) ?F'" using g Cones.map_simp by fastforce moreover have "\o (Cop.dom g) = S.mkArr ?A ?B' ?G'" using g \o_def by fastforce moreover have "S.seq (Cones.map g) (\o (Cop.dom g))" using g Cones.preserves_hom [of g "C.cod g" "C.dom g"] \o_in_hom [of "Cop.dom g"] by force ultimately have 2: "S.seq (Cones.map g) (\o (Cop.dom g)) \ Cones.map g \\<^sub>S \o (Cop.dom g) = S.mkArr ?A ?C (?F' o ?G')" using S.seqI' [of "\o (Cop.dom g)" "Cones.map g"] S.comp_mkArr by auto have "\o (Cop.cod g) \\<^sub>S Y a g = S.mkArr ?A ?C (?G o ?F)" using 1 by auto also have "... = S.mkArr ?A ?C (?F' o ?G')" proof (intro S.mkArr_eqI') show "S.arr (S.mkArr ?A ?C (?G o ?F))" using 1 by force show "\x. x \ ?A \ (?G o ?F) x = (?F' o ?G') x" proof - fix x assume x: "x \ ?A" hence 1: "\\ (C.cod g, a) x : C.cod g \ a\" using ide_apex g Hom.\_mapsto [of "C.cod g" a] by auto have "(?G o ?F) x = \ (D.cones_map (\ (C.dom g, a) (\ (C.dom g, a) (\ (C.cod g, a) x \ g))) \)" proof - (* Why is it so balky with this proof? *) have "(?G o ?F) x = ?G (?F x)" by simp also have "... = \ (D.cones_map (\ (C.dom g, a) (\ (C.dom g, a) (\ (C.cod g, a) x \ g))) \)" by (metis Cop.comp_def comp_apply) finally show ?thesis by auto qed also have "... = \ (D.cones_map (\ (C.cod g, a) x \ g) \)" proof - have "\\ (C.cod g, a) x \ g : C.dom g \ a\" using g 1 by auto thus ?thesis using Hom.\_\ by presburger qed also have "... = \ (D.cones_map g (D.cones_map (\ (C.cod g, a) x) \))" using g x 1 cone_\ D.cones_map_comp [of "\ (C.cod g, a) x" g] by fastforce also have "... = \ (D.cones_map g (\ (\ (D.cones_map (\ (C.cod g, a) x) \))))" using 1 cone_\ D.cones_map_mapsto Sr.DN_UP by auto also have "... = (?F' o ?G') x" by simp finally show "(?G o ?F) x = (?F' o ?G') x" by auto qed qed also have "... = Cones.map g \\<^sub>S \o (Cop.dom g)" using 2 by auto finally show ?thesis by auto qed qed interpretation \: set_valued_transformation Cop.comp S.comp S.setp \Y a\ Cones.map \.map .. interpretation \: natural_isomorphism Cop.comp S.comp \Y a\ Cones.map \.map proof fix a' assume a': "Cop.ide a'" show "S.iso (\.map a')" proof - let ?F = "\x. \ (D.cones_map (\ (a', a) x) \)" have bij: "bij_betw ?F (Hom.set (a', a)) (\ ` D.cones a')" proof - have "\x x'. \ x \ Hom.set (a', a); x' \ Hom.set (a', a); \ (D.cones_map (\ (a', a) x) \) = \ (D.cones_map (\ (a', a) x') \) \ \ x = x'" proof - fix x x' assume x: "x \ Hom.set (a', a)" and x': "x' \ Hom.set (a', a)" and xx': "\ (D.cones_map (\ (a', a) x) \) = \ (D.cones_map (\ (a', a) x') \)" have \x: "\\ (a', a) x : a' \ a\" using x ide_apex a' Hom.\_mapsto by auto have \x': "\\ (a', a) x' : a' \ a\" using x' ide_apex a' Hom.\_mapsto by auto have 1: "\!f. \f : a' \ a\ \ \ (D.cones_map f \) = \ (D.cones_map (\ (a', a) x) \)" proof - have "D.cones_map (\ (a', a) x) \ \ D.cones a'" using \x a' cone_\ D.cones_map_mapsto by force hence 2: "\!f. \f : a' \ a\ \ D.cones_map f \ = D.cones_map (\ (a', a) x) \" using a' is_universal by simp show "\!f. \f : a' \ a\ \ \ (D.cones_map f \) = \ (D.cones_map (\ (a', a) x) \)" proof - have "\f. \ (D.cones_map f \) = \ (D.cones_map (\ (a', a) x) \) \ D.cones_map f \ = D.cones_map (\ (a', a) x) \" proof - fix f :: 'c have "D.cones_map f \ = D.cones_map (\ (a', a) x) \ \ \ (D.cones_map f \) = \ (D.cones_map (\ (a', a) x) \)" by simp thus "(\ (D.cones_map f \) = \ (D.cones_map (\ (a', a) x) \)) = (D.cones_map f \ = D.cones_map (\ (a', a) x) \)" by (meson Sr.inj_UP injD) qed thus ?thesis using 2 by auto qed qed have 2: "\!x''. x'' \ Hom.set (a', a) \ \ (D.cones_map (\ (a', a) x'') \) = \ (D.cones_map (\ (a', a) x) \)" proof - from 1 obtain f'' where f'': "\f'' : a' \ a\ \ \ (D.cones_map f'' \) = \ (D.cones_map (\ (a', a) x) \)" by blast have "\ (a', a) f'' \ Hom.set (a', a) \ \ (D.cones_map (\ (a', a) (\ (a', a) f'')) \) = \ (D.cones_map (\ (a', a) x) \)" proof show "\ (a', a) f'' \ Hom.set (a', a)" using f'' Hom.set_def by auto show "\ (D.cones_map (\ (a', a) (\ (a', a) f'')) \) = \ (D.cones_map (\ (a', a) x) \)" using f'' Hom.\_\ by presburger qed moreover have "\x''. x'' \ Hom.set (a', a) \ \ (D.cones_map (\ (a', a) x'') \) = \ (D.cones_map (\ (a', a) x) \) \ x'' = \ (a', a) f''" proof - fix x'' assume x'': "x'' \ Hom.set (a', a) \ \ (D.cones_map (\ (a', a) x'') \) = \ (D.cones_map (\ (a', a) x) \)" hence "\\ (a', a) x'' : a' \ a\ \ \ (D.cones_map (\ (a', a) x'') \) = \ (D.cones_map (\ (a', a) x) \)" using ide_apex a' Hom.set_def Hom.\_mapsto [of a' a] by auto hence "\ (a', a) (\ (a', a) x'') = \ (a', a) f''" using 1 f'' by auto thus "x'' = \ (a', a) f''" using ide_apex a' x'' Hom.\_\ by simp qed ultimately show ?thesis using ex1I [of "\x'. x' \ Hom.set (a', a) \ \ (D.cones_map (\ (a', a) x') \) = \ (D.cones_map (\ (a', a) x) \)" "\ (a', a) f''"] by simp qed thus "x = x'" using x x' xx' by auto qed hence "inj_on ?F (Hom.set (a', a))" using inj_onI [of "Hom.set (a', a)" ?F] by auto moreover have "?F ` Hom.set (a', a) = \ ` D.cones a'" proof show "?F ` Hom.set (a', a) \ \ ` D.cones a'" proof fix X' assume X': "X' \ ?F ` Hom.set (a', a)" from this obtain x' where x': "x' \ Hom.set (a', a) \ ?F x' = X'" by blast show "X' \ \ ` D.cones a'" proof - have "X' = \ (D.cones_map (\ (a', a) x') \)" using x' by blast hence "X' = \ (D.cones_map (\ (a', a) x') \)" using x' by force moreover have "\\ (a', a) x' : a' \ a\" using ide_apex a' x' Hom.set_def Hom.\_\ by auto ultimately show ?thesis using x' cone_\ D.cones_map_mapsto by force qed qed show "\ ` D.cones a' \ ?F ` Hom.set (a', a)" proof fix X' assume X': "X' \ \ ` D.cones a'" hence "\ X' \ \ ` \ ` D.cones a'" by simp with Sr.DN_UP have "\ X' \ D.cones a'" by auto hence "\!f. \f : a' \ a\ \ D.cones_map f \ = \ X'" using a' is_universal by simp from this obtain f where "\f : a' \ a\ \ D.cones_map f \ = \ X'" by auto hence f: "\f : a' \ a\ \ \ (D.cones_map f \) = X'" using X' Sr.UP_DN by auto have "X' = ?F (\ (a', a) f)" using f Hom.\_\ by presburger thus "X' \ ?F ` Hom.set (a', a)" using f Hom.set_def by force qed qed ultimately show ?thesis using bij_betw_def [of ?F "Hom.set (a', a)" "\ ` D.cones a'"] inj_on_def by auto qed let ?f = "S.mkArr (Hom.set (a', a)) (\ ` D.cones a') ?F" have iso: "S.iso ?f" proof - have "?F \ Hom.set (a', a) \ \ ` D.cones a'" using bij bij_betw_imp_funcset by fast hence 1: "S.arr ?f" using ide_apex a' Hom.set_subset_Univ S.UP_mapsto by auto thus ?thesis using bij S.iso_char S.set_mkIde by fastforce qed moreover have "?f = \.map a'" using a' \o_def by force finally show ?thesis by auto qed qed interpretation R: representation_of_functor C S.comp S.setp \ Cones.map a \.map .. lemma \_in_terms_of_\: shows "\ = \ (\.FUN a (\ (a, a) a))" proof - have "\.FUN a (\ (a, a) a) = (\x \ Hom.set (a, a). \ (D.cones_map (\ (a, a) x) \)) (\ (a, a) a)" using ide_apex S.Fun_mkArr \.map_simp_ide \o_def \.preserves_reflects_arr [of a] by simp also have "... = \ (D.cones_map a \)" proof - have "(\x \ Hom.set (a, a). \ (D.cones_map (\ (a, a) x) \)) (\ (a, a) a) = \ (D.cones_map (\ (a, a) (\ (a, a) a)) \)" proof - have "\ (a, a) a \ Hom.set (a, a)" using ide_apex Hom.\_mapsto by fastforce thus ?thesis using restrict_apply' [of "\ (a, a) a" "Hom.set (a, a)"] by blast qed also have "... = \ (D.cones_map a \)" proof - have "\ (a, a) (\ (a, a) a) = a" using ide_apex Hom.\_\ [of a a a] by fastforce thus ?thesis by metis qed finally show ?thesis by auto qed finally have "\.FUN a (\ (a, a) a) = \ (D.cones_map a \)" by auto also have "... = \ \" using ide_apex D.cones_map_ide [of \ a] cone_\ by simp finally have "\.FUN a (\ (a, a) a) = \ \" by blast hence "\ (\.FUN a (\ (a, a) a)) = \ (\ \)" by simp thus ?thesis using cone_\ Sr.DN_UP by simp qed abbreviation Hom where "Hom \ Hom.map" abbreviation \ where "\ \ \.map" lemma induces_limit_situation: shows "limit_situation J C D S.comp \ \ a \ \" using \_in_terms_of_\ \o_def by unfold_locales auto no_notation S.comp (infixr "\\<^sub>S" 55) end sublocale limit_cone \ limit_situation J C D replete_setcat.comp \ \ a \ \ using induces_limit_situation by auto subsection "Representations of the Cones Functor Induce Limit Situations" context representation_of_cones_functor begin interpretation \: set_valued_transformation Cop.comp S S.setp \Y a\ Cones.map \ .. interpretation \: inverse_transformation Cop.comp S \Y a\ Cones.map \ .. interpretation \: set_valued_transformation Cop.comp S S.setp Cones.map \Y a\ \.map .. abbreviation \ where "\ \ Cones.\" abbreviation \ where "\ \ \ (S.Fun (\ a) (\ (a, a) a))" lemma Cones_SET_eq_\_img_cones: assumes "C.ide a'" shows "Cones.SET a' = \ ` D.cones a'" proof - have "\ ` D.cones a' \ S.Univ" using S.UP_mapsto by auto thus ?thesis using assms Cones.map_ide S.set_mkIde by auto qed lemma \\: shows "\ \ = S.Fun (\ a) (\ (a, a) a)" proof - have "S.Fun (\ a) (\ (a, a) a) \ Cones.SET a" using Ya.ide_a Hom.\_mapsto S.Fun_mapsto [of "\ a"] Hom.set_map by fastforce thus ?thesis using Ya.ide_a Cones_SET_eq_\_img_cones by auto qed interpretation \: cone J C D a \ proof - have "\ \ \ \ ` D.cones a" using Ya.ide_a \\ S.Fun_mapsto [of "\ a"] Hom.\_mapsto Hom.set_map Cones_SET_eq_\_img_cones by fastforce thus "D.cone a \" by (metis (no_types, lifting) S.DN_UP UNIV_I f_inv_into_f inv_into_into mem_Collect_eq) qed lemma cone_\: shows "D.cone a \" .. lemma \_FUN_simp: assumes a': "C.ide a'" and x: "x \ Hom.set (a', a)" shows "\.FUN a' x = Cones.FUN (\ (a', a) x) (\ \)" proof - have \x: "\\ (a', a) x : a' \ a\" using Ya.ide_a a' x Hom.\_mapsto by blast have \a: "\ (a, a) a \ Hom.set (a, a)" using Ya.ide_a Hom.\_mapsto by fastforce have "\.FUN a' x = (\.FUN a' o Ya.FUN (\ (a', a) x)) (\ (a, a) a)" proof - have "\ (a', a) (a \ \ (a', a) x) = x" using Ya.ide_a a' x \x Hom.\_\ C.comp_cod_arr by fastforce moreover have "S.arr (S.mkArr (Hom.set (a, a)) (Hom.set (a', a)) (\ (a', a) \ Cop.comp (\ (a', a) x) \ \ (a, a)))" by (metis C.arrI Cop.arr_char Ya.Y_ide_arr(2) Ya.preserves_arr \.ide_apex \x) ultimately show ?thesis using Ya.ide_a a' x Ya.Y_ide_arr \x \a C.ide_in_hom by auto qed also have "... = (Cones.FUN (\ (a', a) x) o \.FUN a) (\ (a, a) a)" proof - have "(\.FUN a' o Ya.FUN (\ (a', a) x)) (\ (a, a) a) = S.Fun (\ a' \\<^sub>S Y a (\ (a', a) x)) (\ (a, a) a)" using \x a' \a Ya.ide_a Ya.map_simp Hom.set_map by (elim C.in_homE, auto) also have "... = S.Fun (S (Cones.map (\ (a', a) x)) (\ a)) (\ (a, a) a)" using \x is_natural_1 [of "\ (a', a) x"] is_natural_2 [of "\ (a', a) x"] by auto also have "... = (Cones.FUN (\ (a', a) x) o \.FUN a) (\ (a, a) a)" proof - have "S.seq (Cones.map (\ (a', a) x)) (\ a)" using Ya.ide_a \x Cones.map_preserves_dom [of "\ (a', a) x"] apply (intro S.seqI) apply auto[2] by fastforce thus ?thesis using Ya.ide_a \a Hom.set_map by auto qed finally show ?thesis by simp qed also have "... = Cones.FUN (\ (a', a) x) (\ \)" using \\ by simp finally show ?thesis by auto qed lemma \_is_universal: assumes "D.cone a' \'" shows "\\ (a', a) (\.FUN a' (\ \')) : a' \ a\" and "D.cones_map (\ (a', a) (\.FUN a' (\ \'))) \ = \'" and "\ \f' : a' \ a\; D.cones_map f' \ = \' \ \ f' = \ (a', a) (\.FUN a' (\ \'))" proof - interpret \': cone J C D a' \' using assms by auto have a': "C.ide a'" using \'.ide_apex by simp have \\': "\ \' \ Cones.SET a'" using assms a' Cones_SET_eq_\_img_cones by auto let ?f = "\ (a', a) (\.FUN a' (\ \'))" have A: "\.FUN a' (\ \') \ Hom.set (a', a)" proof - have "\.FUN a' \ Cones.SET a' \ Ya.SET a'" using a' \.preserves_hom [of a' a' a'] S.Fun_mapsto [of "\.map a'"] by fastforce thus ?thesis using a' \\' Ya.ide_a Hom.set_map by auto qed show f: "\?f : a' \ a\" using A a' Ya.ide_a Hom.\_mapsto [of a' a] by auto have E: "\f. \f : a' \ a\ \ Cones.FUN f (\ \) = \.FUN a' (\ (a', a) f)" proof - fix f assume f: "\f : a' \ a\" have "\ (a', a) f \ Hom.set (a', a)" using a' Ya.ide_a f Hom.\_mapsto by auto thus "Cones.FUN f (\ \) = \.FUN a' (\ (a', a) f)" using a' f \_FUN_simp by simp qed have I: "\.FUN a' (\.FUN a' (\ \')) = \ \'" proof - have "\.FUN a' (\.FUN a' (\ \')) = compose (\.DOM a') (\.FUN a') (\.FUN a') (\ \')" using a' \\' Cones.map_ide \.preserves_hom [of a' a' a'] by force also have "... = (\x \ \.DOM a'. x) (\ \')" using a' \.inverts_components S.inverse_arrows_char by force also have "... = \ \'" using a' \\' Cones.map_ide \.preserves_hom [of a' a' a'] by force finally show ?thesis by auto qed show f\: "D.cones_map ?f \ = \'" proof - have "D.cones_map ?f \ = (\ o Cones.FUN ?f o \) \" using f Cones.preserves_arr [of ?f] cone_\ by (cases "D.cone a \", auto) also have "... = \'" using f Ya.ide_a a' A E I by auto finally show ?thesis by auto qed show "\ \f' : a' \ a\; D.cones_map f' \ = \' \ \ f' = ?f" proof - assume f': "\f' : a' \ a\" and f'\: "D.cones_map f' \ = \'" show "f' = ?f" proof - have 1: "\ (a', a) f' \ Hom.set (a', a) \ \ (a', a) ?f \ Hom.set (a', a)" using Ya.ide_a a' f f' Hom.\_mapsto by auto have "S.iso (\ a')" using \'.ide_apex components_are_iso by auto hence 2: "S.arr (\ a') \ bij_betw (\.FUN a') (Hom.set (a', a)) (Cones.SET a')" using Ya.ide_a a' S.iso_char Hom.set_map by auto have "\.FUN a' (\ (a', a) f') = \.FUN a' (\ (a', a) ?f)" proof - have "\.FUN a' (\ (a', a) ?f) = \ \'" using A I Hom.\_\ Ya.ide_a a' by simp also have "... = Cones.FUN f' (\ \)" using f f' A E cone_\ Cones.preserves_arr f\ f'\ by (elim C.in_homE, auto) also have "... = \.FUN a' (\ (a', a) f')" using f' E by simp finally show ?thesis by argo qed moreover have "inj_on (\.FUN a') (Hom.set (a', a))" using 2 bij_betw_imp_inj_on by blast ultimately have 3: "\ (a', a) f' = \ (a', a) ?f" using 1 inj_on_def [of "\.FUN a'" "Hom.set (a', a)"] by blast show ?thesis proof - have "f' = \ (a', a) (\ (a', a) f')" using Ya.ide_a a' f' Hom.\_\ by simp also have "... = \ (a', a) (\.FUN a' (\ \'))" using Ya.ide_a a' Hom.\_\ A 3 by simp finally show ?thesis by blast qed qed qed qed interpretation \: limit_cone J C D a \ proof show "\a' \'. D.cone a' \' \ \!f. \f : a' \ a\ \ D.cones_map f \ = \'" proof - fix a' \' assume 1: "D.cone a' \'" show "\!f. \f : a' \ a\ \ D.cones_map f \ = \'" proof show "\\ (a', a) (\.FUN a' (\ \')) : a' \ a\ \ D.cones_map (\ (a', a) (\.FUN a' (\ \'))) \ = \'" using 1 \_is_universal by blast show "\f. \f : a' \ a\ \ D.cones_map f \ = \' \ f = \ (a', a) (\.FUN a' (\ \'))" using 1 \_is_universal by blast qed qed qed lemma \_is_limit_cone: shows "D.limit_cone a \" .. lemma induces_limit_situation: shows "limit_situation J C D S \ \ a \ \" proof show "\ = \" by simp fix a' assume a': "Cop.ide a'" let ?F = "\x. \ (D.cones_map (\ (a', a) x) \)" show "\ a' = S.mkArr (Hom.set (a', a)) (\ ` D.cones a') ?F" proof - have 1: "\\ a' : S.mkIde (Hom.set (a', a)) \\<^sub>S S.mkIde (\ ` D.cones a')\" using a' Cones.map_ide Ya.ide_a by auto moreover have "\.DOM a' = Hom.set (a', a)" using 1 Hom.set_subset_Univ a' Ya.ide_a Hom.set_map by simp moreover have "\.COD a' = \ ` D.cones a'" using a' Cones_SET_eq_\_img_cones by fastforce ultimately have 2: "\ a' = S.mkArr (Hom.set (a', a)) (\ ` D.cones a') (\.FUN a')" using S.mkArr_Fun [of "\ a'"] by fastforce also have "... = S.mkArr (Hom.set (a', a)) (\ ` D.cones a') ?F" proof show "S.arr (S.mkArr (Hom.set (a', a)) (\ ` D.cones a') (\.FUN a'))" using 1 2 by auto show "\x. x \ Hom.set (a', a) \ \.FUN a' x = ?F x" proof - fix x assume x: "x \ Hom.set (a', a)" hence \x: "\\ (a', a) x : a' \ a\" using a' Ya.ide_a Hom.\_mapsto by auto show "\.FUN a' x = ?F x" proof - have "\.FUN a' x = Cones.FUN (\ (a', a) x) (\ \)" using a' x \_FUN_simp by simp also have "... = restrict (\ o D.cones_map (\ (a', a) x) o \) (\ ` D.cones a) (\ \)" using \x Cones.map_simp Cones.preserves_arr [of "\ (a', a) x"] S.Fun_mkArr by (elim C.in_homE, auto) also have "... = ?F x" using cone_\ by simp ultimately show ?thesis by simp qed qed qed finally show "\ a' = S.mkArr (Hom.set (a', a)) (\ ` D.cones a') ?F" by auto qed qed end sublocale representation_of_cones_functor \ limit_situation J C D S \ \ a \ \ using induces_limit_situation by auto section "Categories with Limits" context category begin text\ A category @{term[source=true] C} has limits of shape @{term J} if every diagram of shape @{term J} admits a limit cone. \ definition has_limits_of_shape where "has_limits_of_shape J \ \D. diagram J C D \ (\a \. limit_cone J C D a \)" text\ A category has limits at a type @{typ 'j} if it has limits of shape @{term J} for every category @{term J} whose arrows are of type @{typ 'j}. \ definition has_limits where "has_limits (_ :: 'j) \ \J :: 'j comp. category J \ has_limits_of_shape J" text\ Whether a category has limits of shape \J\ truly depends only on the ``shape'' (\emph{i.e.}~isomorphism class) of \J\ and not on details of its construction. \ lemma has_limits_preserved_by_isomorphism: assumes "has_limits_of_shape J" and "isomorphic_categories J J'" shows "has_limits_of_shape J'" proof - interpret J: category J using assms(2) isomorphic_categories_def isomorphic_categories_axioms_def by auto interpret J': category J' using assms(2) isomorphic_categories_def isomorphic_categories_axioms_def by auto from assms(2) obtain \ \ where IF: "inverse_functors J' J \ \" using isomorphic_categories_def isomorphic_categories_axioms_def inverse_functors_sym by blast interpret IF: inverse_functors J' J \ \ using IF by auto have \\: "\ o \ = J.map" using IF.inv by metis have \\: "\ o \ = J'.map" using IF.inv' by metis have "\D'. diagram J' C D' \ \a \. limit_cone J' C D' a \" proof - fix D' assume D': "diagram J' C D'" interpret D': diagram J' C D' using D' by auto interpret D: composite_functor J J' C \ D' .. interpret D: diagram J C \D' o \\ .. have D: "diagram J C (D' o \)" .. from assms(1) obtain a \ where \: "D.limit_cone a \" using D has_limits_of_shape_def by blast interpret \: limit_cone J C \D' o \\ a \ using \ by auto interpret A': constant_functor J' C a using \.ide_apex by (unfold_locales, auto) have \o\: "cone J' C (D' o \ o \) a (\ o \)" using comp_cone_functor IF.G.functor_axioms \.cone_axioms by fastforce hence \o\: "cone J' C D' a (\ o \)" using \\ by (metis D'.functor_axioms Fun.comp_assoc comp_functor_identity) interpret \o\: cone J' C D' a \\ o \\ using \o\ by auto interpret \o\: limit_cone J' C D' a \\ o \\ proof fix a' \' assume \': "D'.cone a' \'" interpret \': cone J' C D' a' \' using \' by auto have \'o\: "cone J C (D' o \) a' (\' o \)" using \' comp_cone_functor IF.F.functor_axioms by fastforce interpret \'o\: cone J C \D' o \\ a' \\' o \\ using \'o\ by auto have "cone J C (D' o \) a' (\' o \)" .. hence 1: "\!f. \f : a' \ a\ \ D.cones_map f \ = \' o \" using \.is_universal by simp show "\!f. \f : a' \ a\ \ D'.cones_map f (\ o \) = \'" proof let ?f = "THE f. \f : a' \ a\ \ D.cones_map f \ = \' o \" have f: "\?f : a' \ a\ \ D.cones_map ?f \ = \' o \" using 1 theI' [of "\f. \f : a' \ a\ \ D.cones_map f \ = \' o \"] by blast have f_in_hom: "\?f : a' \ a\" using f by blast have "D'.cones_map ?f (\ o \) = \'" proof fix j' have "\J'.arr j' \ D'.cones_map ?f (\ o \) j' = \' j'" proof - assume j': "\J'.arr j'" have "D'.cones_map ?f (\ o \) j' = null" using j' f_in_hom \o\ by fastforce thus ?thesis using j' \'.is_extensional by simp qed moreover have "J'.arr j' \ D'.cones_map ?f (\ o \) j' = \' j'" proof - assume j': "J'.arr j'" have "D'.cones_map ?f (\ o \) j' = \ (\ j') \ ?f" using j' f \o\ by fastforce also have "... = D.cones_map ?f \ (\ j')" using j' f_in_hom \ \.cone_\ by fastforce also have "... = \' j'" using j' f \ \\ Fun.comp_def J'.map_simp by metis finally show "D'.cones_map ?f (\ o \) j' = \' j'" by auto qed ultimately show "D'.cones_map ?f (\ o \) j' = \' j'" by blast qed thus "\?f : a' \ a\ \ D'.cones_map ?f (\ o \) = \'" using f by auto fix f' assume f': "\f' : a' \ a\ \ D'.cones_map f' (\ o \) = \'" have "D.cones_map f' \ = \' o \" proof fix j have "\J.arr j \ D.cones_map f' \ j = (\' o \) j" using f' \ \'o\.is_extensional \.cone_\ mem_Collect_eq restrict_apply by auto moreover have "J.arr j \ D.cones_map f' \ j = (\' o \) j" proof - assume j: "J.arr j" have "D.cones_map f' \ j = C (\ j) f'" using j f' \.cone_\ by auto also have "... = C ((\ o \) (\ j)) f'" using j f' \\ by (metis comp_apply J.map_simp) also have "... = D'.cones_map f' (\ o \) (\ j)" using j f' \o\ by fastforce also have "... = (\' o \) j" using j f' by auto finally show "D.cones_map f' \ j = (\' o \) j" by auto qed ultimately show "D.cones_map f' \ j = (\' o \) j" by blast qed hence "\f' : a' \ a\ \ D.cones_map f' \ = \' o \" using f' by auto moreover have "\P x x'. (\!x. P x) \ P x \ P x' \ x = x'" by auto ultimately show "f' = ?f" using 1 f by blast qed qed have "limit_cone J' C D' a (\ o \)" .. thus "\a \. limit_cone J' C D' a \" by blast qed thus ?thesis using has_limits_of_shape_def by auto qed end subsection "Diagonal Functors" text\ The existence of limits can also be expressed in terms of adjunctions: a category @{term C} has limits of shape @{term J} if the diagonal functor taking each object @{term a} in @{term C} to the constant-@{term a} diagram and each arrow \f \ C.hom a a'\ to the constant-@{term f} natural transformation between diagrams is a left adjoint functor. \ locale diagonal_functor = C: category C + J: category J + J_C: functor_category J C for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) begin notation J.in_hom ("\_ : _ \\<^sub>J _\") notation J_C.comp (infixr "\\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>]" 55) notation J_C.in_hom ("\_ : _ \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] _\") definition map :: "'c \ ('j, 'c) J_C.arr" where "map f = (if C.arr f then J_C.MkArr (constant_functor.map J C (C.dom f)) (constant_functor.map J C (C.cod f)) (constant_transformation.map J C f) else J_C.null)" lemma is_functor: shows "functor C J_C.comp map" proof fix f show "\ C.arr f \ local.map f = J_C.null" using map_def by simp assume f: "C.arr f" interpret Dom_f: constant_functor J C \C.dom f\ using f by (unfold_locales, auto) interpret Cod_f: constant_functor J C \C.cod f\ using f by (unfold_locales, auto) interpret Fun_f: constant_transformation J C f using f by (unfold_locales, auto) show 1: "J_C.arr (map f)" using f map_def by (simp add: Fun_f.natural_transformation_axioms) show "J_C.dom (map f) = map (C.dom f)" proof - have "constant_transformation J C (C.dom f)" using f by unfold_locales auto hence "constant_transformation.map J C (C.dom f) = Dom_f.map" using Dom_f.map_def constant_transformation.map_def [of J C "C.dom f"] by auto thus ?thesis using f 1 by (simp add: map_def J_C.dom_char) qed show "J_C.cod (map f) = map (C.cod f)" proof - have "constant_transformation J C (C.cod f)" using f by unfold_locales auto hence "constant_transformation.map J C (C.cod f) = Cod_f.map" using Cod_f.map_def constant_transformation.map_def [of J C "C.cod f"] by auto thus ?thesis using f 1 by (simp add: map_def J_C.cod_char) qed next fix f g assume g: "C.seq g f" have f: "C.arr f" using g by auto interpret Dom_f: constant_functor J C \C.dom f\ using f by unfold_locales auto interpret Cod_f: constant_functor J C \C.cod f\ using f by unfold_locales auto interpret Fun_f: constant_transformation J C f using f by unfold_locales auto interpret Cod_g: constant_functor J C \C.cod g\ using g by unfold_locales auto interpret Fun_g: constant_transformation J C g using g by unfold_locales auto interpret Fun_g: natural_transformation J C Cod_f.map Cod_g.map Fun_g.map apply unfold_locales using f g C.seqE [of g f] C.comp_arr_dom C.comp_cod_arr Fun_g.is_extensional by auto interpret Fun_fg: vertical_composite J C Dom_f.map Cod_f.map Cod_g.map Fun_f.map Fun_g.map .. have 1: "J_C.arr (map f)" using f map_def by (simp add: Fun_f.natural_transformation_axioms) show "map (g \ f) = map g \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map f" proof - have "map (C g f) = J_C.MkArr Dom_f.map Cod_g.map (constant_transformation.map J C (C g f))" using f g map_def by simp also have "... = J_C.MkArr Dom_f.map Cod_g.map (\j. if J.arr j then C g f else C.null)" proof - have "constant_transformation J C (g \ f)" using g by unfold_locales auto thus ?thesis using constant_transformation.map_def by metis qed also have "... = J_C.comp (J_C.MkArr Cod_f.map Cod_g.map Fun_g.map) (J_C.MkArr Dom_f.map Cod_f.map Fun_f.map)" proof - have "J_C.MkArr Cod_f.map Cod_g.map Fun_g.map \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] J_C.MkArr Dom_f.map Cod_f.map Fun_f.map = J_C.MkArr Dom_f.map Cod_g.map Fun_fg.map" using J_C.comp_char J_C.comp_MkArr Fun_f.natural_transformation_axioms Fun_g.natural_transformation_axioms by blast also have "... = J_C.MkArr Dom_f.map Cod_g.map (\j. if J.arr j then g \ f else C.null)" using Fun_fg.is_extensional Fun_fg.map_simp_2 by auto finally show ?thesis by auto qed also have "... = map g \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map f" using f g map_def by fastforce finally show ?thesis by auto qed qed sublocale "functor" C J_C.comp map using is_functor by auto text\ The objects of \[J, C]\ correspond bijectively to diagrams of shape @{term J} in @{term C}. \ lemma ide_determines_diagram: assumes "J_C.ide d" shows "diagram J C (J_C.Map d)" and "J_C.MkIde (J_C.Map d) = d" proof - interpret \: natural_transformation J C \J_C.Map d\ \J_C.Map d\ \J_C.Map d\ using assms J_C.ide_char J_C.arr_MkArr by fastforce interpret D: "functor" J C \J_C.Map d\ .. show "diagram J C (J_C.Map d)" .. show "J_C.MkIde (J_C.Map d) = d" using assms J_C.ide_char by (metis J_C.ideD(1) J_C.MkArr_Map) qed lemma diagram_determines_ide: assumes "diagram J C D" shows "J_C.ide (J_C.MkIde D)" and "J_C.Map (J_C.MkIde D) = D" proof - interpret D: diagram J C D using assms by auto show "J_C.ide (J_C.MkIde D)" using J_C.ide_char using D.functor_axioms J_C.ide_MkIde by auto thus "J_C.Map (J_C.MkIde D) = D" using J_C.in_homE by simp qed lemma bij_betw_ide_diagram: shows "bij_betw J_C.Map (Collect J_C.ide) (Collect (diagram J C))" proof (intro bij_betwI) show "J_C.Map \ Collect J_C.ide \ Collect (diagram J C)" using ide_determines_diagram by blast show "J_C.MkIde \ Collect (diagram J C) \ Collect J_C.ide" using diagram_determines_ide by blast show "\d. d \ Collect J_C.ide \ J_C.MkIde (J_C.Map d) = d" using ide_determines_diagram by blast show "\D. D \ Collect (diagram J C) \ J_C.Map (J_C.MkIde D) = D" using diagram_determines_ide by blast qed text\ Arrows from from the diagonal functor correspond bijectively to cones. \ lemma arrow_determines_cone: assumes "J_C.ide d" and "arrow_from_functor C J_C.comp map a d x" shows "cone J C (J_C.Map d) a (J_C.Map x)" and "J_C.MkArr (constant_functor.map J C a) (J_C.Map d) (J_C.Map x) = x" proof - interpret D: diagram J C \J_C.Map d\ using assms ide_determines_diagram by auto interpret x: arrow_from_functor C J_C.comp map a d x using assms by auto interpret A: constant_functor J C a using x.arrow by (unfold_locales, auto) interpret \: constant_transformation J C a using x.arrow by (unfold_locales, auto) have Dom_x: "J_C.Dom x = A.map" using J_C.in_hom_char map_def x.arrow by force have Cod_x: "J_C.Cod x = J_C.Map d" using x.arrow by auto interpret \: natural_transformation J C A.map \J_C.Map d\ \J_C.Map x\ using x.arrow J_C.arr_char [of x] Dom_x Cod_x by force show "D.cone a (J_C.Map x)" .. show "J_C.MkArr A.map (J_C.Map d) (J_C.Map x) = x" using x.arrow Dom_x Cod_x \.natural_transformation_axioms by (intro J_C.arr_eqI, auto) qed lemma cone_determines_arrow: assumes "J_C.ide d" and "cone J C (J_C.Map d) a \" shows "arrow_from_functor C J_C.comp map a d (J_C.MkArr (constant_functor.map J C a) (J_C.Map d) \)" and "J_C.Map (J_C.MkArr (constant_functor.map J C a) (J_C.Map d) \) = \" proof - interpret \: cone J C \J_C.Map d\ a \ using assms(2) by auto let ?x = "J_C.MkArr \.A.map (J_C.Map d) \" interpret x: arrow_from_functor C J_C.comp map a d ?x proof have "\J_C.MkArr \.A.map (J_C.Map d) \ : J_C.MkIde \.A.map \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] J_C.MkIde (J_C.Map d)\" using \.natural_transformation_axioms by auto moreover have "J_C.MkIde \.A.map = map a" using \.A.value_is_ide map_def \.A.map_def C.ide_char by (metis (no_types, lifting) J_C.dom_MkArr preserves_arr preserves_dom) moreover have "J_C.MkIde (J_C.Map d) = d" using assms ide_determines_diagram(2) by simp ultimately show "C.ide a \ \J_C.MkArr \.A.map (J_C.Map d) \ : map a \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] d\" using \.A.value_is_ide by simp qed show "arrow_from_functor C J_C.comp map a d ?x" .. show "J_C.Map (J_C.MkArr (constant_functor.map J C a) (J_C.Map d) \) = \" by (simp add: \.natural_transformation_axioms) qed text\ Transforming a cone by composing at the apex with an arrow @{term g} corresponds, via the preceding bijections, to composition in \[J, C]\ with the image of @{term g} under the diagonal functor. \ lemma cones_map_is_composition: assumes "\g : a' \ a\" and "cone J C D a \" shows "J_C.MkArr (constant_functor.map J C a') D (diagram.cones_map J C D g \) = J_C.MkArr (constant_functor.map J C a) D \ \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g" proof - interpret A: constant_transformation J C a using assms(1) by (unfold_locales, auto) interpret \: cone J C D a \ using assms(2) by auto have cone_\: "cone J C D a \" .. interpret A': constant_transformation J C a' using assms(1) by (unfold_locales, auto) let ?\' = "\.D.cones_map g \" interpret \': cone J C D a' ?\' using assms(1) cone_\ \.D.cones_map_mapsto by blast let ?x = "J_C.MkArr \.A.map D \" let ?x' = "J_C.MkArr \'.A.map D ?\'" show "?x' = J_C.comp ?x (map g)" proof (intro J_C.arr_eqI) have x: "J_C.arr ?x" using \.natural_transformation_axioms J_C.arr_char [of ?x] by simp show x': "J_C.arr ?x'" using \'.natural_transformation_axioms J_C.arr_char [of ?x'] by simp have 3: "\?x : map a \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] J_C.MkIde D\" using \.D.diagram_axioms arrow_from_functor.arrow cone_\ cone_determines_arrow(1) diagram_determines_ide(1) by fastforce have 4: "\?x' : map a' \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] J_C.MkIde D\" by (metis (no_types, lifting) J_C.Dom.simps(1) J_C.Dom_cod J_C.Map_cod J_C.cod_MkArr \'.cone_axioms arrow_from_functor.arrow category.ide_cod cone_determines_arrow(1) functor_def is_functor x) have seq_xg: "J_C.seq ?x (map g)" using assms(1) 3 preserves_hom [of g] by (intro J_C.seqI', auto) show 2: "J_C.seq ?x (map g)" using seq_xg J_C.seqI' by blast show "J_C.Dom ?x' = J_C.Dom (?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g)" proof - have "J_C.Dom ?x' = J_C.Dom (J_C.dom ?x')" using x' J_C.Dom_dom by simp also have "... = J_C.Dom (map a')" using 4 by force also have "... = J_C.Dom (J_C.dom (?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g))" using assms(1) 2 by auto also have "... = J_C.Dom (?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g)" using seq_xg J_C.Dom_dom J_C.seqI' by blast finally show ?thesis by auto qed show "J_C.Cod ?x' = J_C.Cod (?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g)" proof - have "J_C.Cod ?x' = J_C.Cod (J_C.cod ?x')" using x' J_C.Cod_cod by simp also have "... = J_C.Cod (J_C.MkIde D)" using 4 by force also have "... = J_C.Cod (J_C.cod (?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g))" using 2 3 J_C.cod_comp J_C.in_homE by metis also have "... = J_C.Cod (?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g)" using seq_xg J_C.Cod_cod J_C.seqI' by blast finally show ?thesis by auto qed show "J_C.Map ?x' = J_C.Map (?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g)" proof - interpret g: constant_transformation J C g apply unfold_locales using assms(1) by auto interpret \og: vertical_composite J C A'.map \.A.map D g.map \ using assms(1) C.comp_arr_dom C.comp_cod_arr A'.is_extensional g.is_extensional apply (unfold_locales, auto) by (elim J.seqE, auto) have "J_C.Map (?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g) = \og.map" using assms(1) 2 J_C.comp_char map_def by auto also have "... = J_C.Map ?x'" using x' \og.map_def J_C.arr_char [of ?x'] natural_transformation.is_extensional assms(1) cone_\ \og.map_simp_2 by fastforce finally show ?thesis by auto qed qed qed text\ Coextension along an arrow from a functor is equivalent to a transformation of cones. \ lemma coextension_iff_cones_map: assumes x: "arrow_from_functor C J_C.comp map a d x" and g: "\g : a' \ a\" and x': "\x' : map a' \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] d\" shows "arrow_from_functor.is_coext C J_C.comp map a x a' x' g \ J_C.Map x' = diagram.cones_map J C (J_C.Map d) g (J_C.Map x)" proof - interpret x: arrow_from_functor C J_C.comp map a d x using assms by auto interpret A': constant_functor J C a' using assms(2) by (unfold_locales, auto) have x': "arrow_from_functor C J_C.comp map a' d x'" using A'.value_is_ide assms(3) by (unfold_locales, blast) have d: "J_C.ide d" using J_C.ide_cod x.arrow by blast let ?D = "J_C.Map d" let ?\ = "J_C.Map x" let ?\' = "J_C.Map x'" interpret D: diagram J C ?D using ide_determines_diagram J_C.ide_cod x.arrow by blast interpret \: cone J C ?D a ?\ using assms(1) d arrow_determines_cone by simp interpret \: constant_transformation J C g using g \.ide_apex by (unfold_locales, auto) interpret \og: vertical_composite J C A'.map \.A.map ?D \.map ?\ using g C.comp_arr_dom C.comp_cod_arr \.is_extensional by (unfold_locales, auto) show ?thesis proof assume 0: "x.is_coext a' x' g" show "?\' = D.cones_map g ?\" proof - have 1: "x' = x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g" using 0 x.is_coext_def by blast hence "?\' = J_C.Map x'" using 0 x.is_coext_def by fast moreover have "... = D.cones_map g ?\" proof - have "J_C.MkArr A'.map (J_C.Map d) (D.cones_map g (J_C.Map x)) = x'" proof - have "J_C.MkArr A'.map (J_C.Map d) (D.cones_map g (J_C.Map x)) = x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g" using d g cones_map_is_composition arrow_determines_cone(2) \.cone_axioms x.arrow_from_functor_axioms by auto thus ?thesis by (metis 1) qed moreover have "J_C.arr (J_C.MkArr A'.map (J_C.Map d) (D.cones_map g (J_C.Map x)))" using 1 d g cones_map_is_composition preserves_arr arrow_determines_cone(2) \.cone_axioms x.arrow_from_functor_axioms assms(3) by auto ultimately show ?thesis by auto qed ultimately show ?thesis by blast qed next assume X': "?\' = D.cones_map g ?\" show "x.is_coext a' x' g" proof - have 4: "J_C.seq x (map g)" using g x.arrow mem_Collect_eq preserves_arr preserves_cod by (elim C.in_homE, auto) hence 1: "x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] map g = J_C.MkArr (J_C.Dom (map g)) (J_C.Cod x) (vertical_composite.map J C (J_C.Map (map g)) ?\)" using J_C.comp_char [of x "map g"] by simp have 2: "vertical_composite.map J C (J_C.Map (map g)) ?\ = \og.map" by (simp add: map_def \.value_is_arr \.natural_transformation_axioms) have 3: "... = D.cones_map g ?\" using g \og.map_simp_2 \.cone_axioms \og.is_extensional by auto have "J_C.MkArr A'.map ?D ?\' = J_C.comp x (map g)" proof - have f1: "A'.map = J_C.Dom (map g)" using \.natural_transformation_axioms map_def g by auto have "J_C.Map d = J_C.Cod x" using x.arrow by auto thus ?thesis using f1 X' 1 2 3 by argo qed moreover have "J_C.MkArr A'.map ?D ?\' = x'" using d x' arrow_determines_cone by blast ultimately show ?thesis using g x.is_coext_def by simp qed qed qed end locale right_adjoint_to_diagonal_functor = C: category C + J: category J + J_C: functor_category J C + \: diagonal_functor J C + "functor" J_C.comp C G + Adj: meta_adjunction J_C.comp C \.map G \ \ for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) and G :: "('j, 'c) functor_category.arr \ 'c" and \ :: "'c \ ('j, 'c) functor_category.arr \ 'c" and \ :: "('j, 'c) functor_category.arr \ 'c \ ('j, 'c) functor_category.arr" + assumes adjoint: "adjoint_functors J_C.comp C \.map G" begin interpretation S: replete_setcat . interpretation Adj: adjunction J_C.comp C S.comp S.setp Adj.\C Adj.\D \.map G \ \ Adj.\ Adj.\ Adj.\ Adj.\ using Adj.induces_adjunction by simp text\ A right adjoint @{term G} to a diagonal functor maps each object @{term d} of \[J, C]\ (corresponding to a diagram @{term D} of shape @{term J} in @{term C} to an object of @{term C}. This object is the limit object, and the component at @{term d} of the counit of the adjunction determines the limit cone. \ lemma gives_limit_cones: assumes "diagram J C D" shows "limit_cone J C D (G (J_C.MkIde D)) (J_C.Map (Adj.\ (J_C.MkIde D)))" proof - interpret D: diagram J C D using assms by auto let ?d = "J_C.MkIde D" let ?a = "G ?d" let ?x = "Adj.\ ?d" let ?\ = "J_C.Map ?x" have "diagram J C D" .. hence 1: "J_C.ide ?d" using \.diagram_determines_ide by auto hence 2: "J_C.Map (J_C.MkIde D) = D" using assms 1 J_C.in_homE \.diagram_determines_ide(2) by simp interpret x: terminal_arrow_from_functor C J_C.comp \.map ?a ?d ?x apply unfold_locales apply (metis (no_types, lifting) "1" preserves_ide Adj.\_in_terms_of_\ Adj.\o_def Adj.\o_in_hom) by (metis 1 Adj.has_terminal_arrows_from_functor(1) terminal_arrow_from_functor.is_terminal) have 3: "arrow_from_functor C J_C.comp \.map ?a ?d ?x" .. interpret \: cone J C D ?a ?\ using 1 2 3 \.arrow_determines_cone [of ?d] by auto have cone_\: "D.cone ?a ?\" .. interpret \: limit_cone J C D ?a ?\ proof fix a' \' assume cone_\': "D.cone a' \'" interpret \': cone J C D a' \' using cone_\' by auto let ?x' = "J_C.MkArr \'.A.map D \'" interpret x': arrow_from_functor C J_C.comp \.map a' ?d ?x' using 1 2 by (metis \.cone_determines_arrow(1) cone_\') have "arrow_from_functor C J_C.comp \.map a' ?d ?x'" .. hence 4: "\!g. x.is_coext a' ?x' g" using x.is_terminal by simp have 5: "\g. \g : a' \\<^sub>C ?a\ \ x.is_coext a' ?x' g \ D.cones_map g ?\ = \'" using \.coextension_iff_cones_map x'.arrow x.arrow_from_functor_axioms by auto have 6: "\g. x.is_coext a' ?x' g \ \g : a' \\<^sub>C ?a\" using x.is_coext_def by simp show "\!g. \g : a' \\<^sub>C ?a\ \ D.cones_map g ?\ = \'" proof - have "\g. \g : a' \\<^sub>C ?a\ \ D.cones_map g ?\ = \'" using 4 5 6 by meson thus ?thesis using 4 5 6 by blast qed qed show "D.limit_cone ?a ?\" .. qed corollary gives_limits: assumes "diagram J C D" shows "diagram.has_as_limit J C D (G (J_C.MkIde D))" using assms gives_limit_cones by fastforce end lemma (in category) has_limits_iff_left_adjoint_diagonal: assumes "category J" shows "has_limits_of_shape J \ left_adjoint_functor C (functor_category.comp J C) (diagonal_functor.map J C)" proof - interpret J: category J using assms by auto interpret J_C: functor_category J C .. interpret \: diagonal_functor J C .. show ?thesis proof assume A: "left_adjoint_functor C J_C.comp \.map" interpret \: left_adjoint_functor C J_C.comp \.map using A by auto interpret Adj: meta_adjunction J_C.comp C \.map \.G \.\ \.\ using \.induces_meta_adjunction by auto have 1: "adjoint_functors J_C.comp C \.map \.G" using adjoint_functors_def \.induces_meta_adjunction by blast interpret G: right_adjoint_to_diagonal_functor J C \.G \.\ \.\ using 1 by unfold_locales auto show "has_limits_of_shape J" using A G.gives_limits has_limits_of_shape_def by blast next text\ If @{term "has_limits J"}, then every diagram @{term D} from @{term J} to @{term[source=true] C} has a limit cone. This means that, for every object @{term d} of the functor category \[J, C]\, there exists an object @{term a} of @{term C} and a terminal arrow from \\ a\ to @{term d} in \[J, C]\. The terminal arrow is given by the limit cone. \ assume A: "has_limits_of_shape J" show "left_adjoint_functor C J_C.comp \.map" proof fix d assume D: "J_C.ide d" interpret D: diagram J C \J_C.Map d\ using D \.ide_determines_diagram by auto let ?D = "J_C.Map d" have "diagram J C (J_C.Map d)" .. from this obtain a \ where limit: "limit_cone J C ?D a \" using A has_limits_of_shape_def by blast interpret A: constant_functor J C a using limit by (simp add: Limit.cone_def limit_cone_def) interpret \: limit_cone J C ?D a \ using limit by simp have cone_\: "cone J C ?D a \" .. let ?x = "J_C.MkArr A.map ?D \" interpret x: arrow_from_functor C J_C.comp \.map a d ?x using D cone_\ \.cone_determines_arrow by auto have "terminal_arrow_from_functor C J_C.comp \.map a d ?x" proof show "\a' x'. arrow_from_functor C J_C.comp \.map a' d x' \ \!g. x.is_coext a' x' g" proof - fix a' x' assume x': "arrow_from_functor C J_C.comp \.map a' d x'" interpret x': arrow_from_functor C J_C.comp \.map a' d x' using x' by auto interpret A': constant_functor J C a' by (unfold_locales, simp add: x'.arrow) let ?\' = "J_C.Map x'" interpret \': cone J C ?D a' ?\' using D x' \.arrow_determines_cone by auto have cone_\': "cone J C ?D a' ?\'" .. let ?g = "\.induced_arrow a' ?\'" show "\!g. x.is_coext a' x' g" proof show "x.is_coext a' x' ?g" proof (unfold x.is_coext_def) have 1: "\?g : a' \ a\ \ D.cones_map ?g \ = ?\'" using \.induced_arrow_def \.is_universal cone_\' theI' [of "\f. \f : a' \ a\ \ D.cones_map f \ = ?\'"] by presburger hence 2: "x' = ?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] \.map ?g" proof - have "x' = J_C.MkArr A'.map ?D ?\'" using D \.arrow_determines_cone(2) x'.arrow_from_functor_axioms by auto thus ?thesis using 1 cone_\ \.cones_map_is_composition [of ?g a' a ?D \] by simp qed show "\?g : a' \ a\ \ x' = ?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] \.map ?g" using 1 2 by auto qed next fix g assume X: "x.is_coext a' x' g" show "g = ?g" proof - have "\g : a' \ a\ \ D.cones_map g \ = ?\'" proof show G: "\g : a' \ a\" using X x.is_coext_def by blast show "D.cones_map g \ = ?\'" proof - have "?\' = J_C.Map (?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] \.map g)" using X x.is_coext_def [of a' x' g] by fast also have "... = D.cones_map g \" proof - interpret map_g: constant_transformation J C g using G by (unfold_locales, auto) interpret \': vertical_composite J C map_g.F.map A.map \\.\.Ya.Cop_S.Map d\ map_g.map \ proof (intro_locales) have "map_g.G.map = A.map" using G by blast thus "natural_transformation_axioms J (\) map_g.F.map A.map map_g.map" using map_g.natural_transformation_axioms by (simp add: natural_transformation_def) qed have "J_C.Map (?x \\<^sub>[\<^sub>J\<^sub>,\<^sub>C\<^sub>] \.map g) = vertical_composite.map J C map_g.map \" proof - have "J_C.seq ?x (\.map g)" using G x.arrow by auto thus ?thesis using G \.map_def J_C.Map_comp' [of ?x "\.map g"] by auto qed also have "... = D.cones_map g \" using G cone_\ \'.map_def map_g.map_def \.is_natural_2 \'.map_simp_2 by auto finally show ?thesis by blast qed finally show ?thesis by auto qed qed thus ?thesis using cone_\' \.is_universal \.induced_arrow_def theI_unique [of "\g. \g : a' \ a\ \ D.cones_map g \ = ?\'" g] by presburger qed qed qed qed thus "\a x. terminal_arrow_from_functor C J_C.comp \.map a d x" by auto qed qed qed section "Right Adjoint Functors Preserve Limits" context right_adjoint_functor begin lemma preserves_limits: fixes J :: "'j comp" assumes "diagram J C E" and "diagram.has_as_limit J C E a" shows "diagram.has_as_limit J D (G o E) (G a)" proof - text\ From the assumption that @{term E} has a limit, obtain a limit cone @{term \}. \ interpret J: category J using assms(1) diagram_def by auto interpret E: diagram J C E using assms(1) by auto from assms(2) obtain \ where \: "limit_cone J C E a \" by auto interpret \: limit_cone J C E a \ using \ by auto have a: "C.ide a" using \.ide_apex by auto text\ Form the @{term E}-image \GE\ of the diagram @{term E}. \ interpret GE: composite_functor J C D E G .. interpret GE: diagram J D GE.map .. text\Let \G\\ be the @{term G}-image of the cone @{term \}, and note that it is a cone over \GE\.\ let ?G\ = "G o \" interpret G\: cone J D GE.map \G a\ ?G\ using \.cone_axioms preserves_cones by blast text\ Claim that \G\\ is a limit cone for diagram \GE\. \ interpret G\: limit_cone J D GE.map \G a\ ?G\ proof text \ Let @{term \} be an arbitrary cone over \GE\. \ fix b \ assume \: "GE.cone b \" interpret \: cone J D GE.map b \ using \ by auto interpret Fb: constant_functor J C \F b\ apply unfold_locales by (meson F_is_functor \.ide_apex functor.preserves_ide) interpret Adj: meta_adjunction C D F G \ \ using induces_meta_adjunction by auto interpret S: replete_setcat . interpret Adj: adjunction C D S.comp S.setp Adj.\C Adj.\D F G \ \ Adj.\ Adj.\ Adj.\ Adj.\ using Adj.induces_adjunction by simp text\ For each arrow @{term j} of @{term J}, let @{term "\' j"} be defined to be the adjunct of @{term "\ j"}. We claim that @{term \'} is a cone over @{term E}. \ let ?\' = "\j. if J.arr j then Adj.\ (C.cod (E j)) \\<^sub>C F (\ j) else C.null" have cone_\': "E.cone (F b) ?\'" proof show "\j. \J.arr j \ ?\' j = C.null" by simp fix j assume j: "J.arr j" show "C.dom (?\' j) = Fb.map (J.dom j)" using j \_in_hom by simp show "C.cod (?\' j) = E (J.cod j)" using j \_in_hom by simp show "E j \\<^sub>C ?\' (J.dom j) = ?\' j" proof - have "E j \\<^sub>C ?\' (J.dom j) = (E j \\<^sub>C Adj.\ (E (J.dom j))) \\<^sub>C F (\ (J.dom j))" using j C.comp_assoc by simp also have "... = Adj.\ (E (J.cod j)) \\<^sub>C F (\ j)" proof - have "(E j \\<^sub>C Adj.\ (E (J.dom j))) \\<^sub>C F (\ (J.dom j)) = (Adj.\ (C.cod (E j)) \\<^sub>C Adj.FG.map (E j)) \\<^sub>C F (\ (J.dom j))" using j Adj.\.naturality [of "E j"] by fastforce also have "... = Adj.\ (C.cod (E j)) \\<^sub>C Adj.FG.map (E j) \\<^sub>C F (\ (J.dom j))" using C.comp_assoc by simp also have "... = Adj.\ (E (J.cod j)) \\<^sub>C F (\ j)" proof - have "Adj.FG.map (E j) \\<^sub>C F (\ (J.dom j)) = F (GE.map j \\<^sub>D \ (J.dom j))" using j by simp hence "Adj.FG.map (E j) \\<^sub>C F (\ (J.dom j)) = F (\ j)" using j \.is_natural_1 by metis thus ?thesis using j by simp qed finally show ?thesis by auto qed also have "... = ?\' j" using j by simp finally show ?thesis by auto qed show "?\' (J.cod j) \\<^sub>C Fb.map j = ?\' j" proof - have "?\' (J.cod j) \\<^sub>C Fb.map j = Adj.\ (E (J.cod j)) \\<^sub>C F (\ (J.cod j))" using j Fb.value_is_ide Adj.\.preserves_hom C.comp_arr_dom [of "F (\ (J.cod j))"] C.comp_assoc by simp also have "... = Adj.\ (E (J.cod j)) \\<^sub>C F (\ j)" using j \.is_natural_1 \.is_natural_2 Adj.\.naturality J.arr_cod_iff_arr by (metis J.cod_cod \.A.map_simp) also have "... = ?\' j" using j by simp finally show ?thesis by auto qed qed text\ Using the universal property of the limit cone @{term \}, obtain the unique arrow @{term f} that transforms @{term \} into @{term \'}. \ from this \.is_universal [of "F b" ?\'] obtain f where f: "\f : F b \\<^sub>C a\ \ E.cones_map f \ = ?\'" by auto text\ Let @{term g} be the adjunct of @{term f}, and show that @{term g} transforms @{term G\} into @{term \}. \ let ?g = "G f \\<^sub>D Adj.\ b" have 1: "\?g : b \\<^sub>D G a\" using f \.ide_apex by fastforce moreover have "GE.cones_map ?g ?G\ = \" proof fix j have "\J.arr j \ GE.cones_map ?g ?G\ j = \ j" using 1 G\.cone_axioms \.is_extensional by auto moreover have "J.arr j \ GE.cones_map ?g ?G\ j = \ j" proof - fix j assume j: "J.arr j" have "GE.cones_map ?g ?G\ j = G (\ j) \\<^sub>D ?g" using j 1 G\.cone_axioms mem_Collect_eq restrict_apply by auto also have "... = G (\ j \\<^sub>C f) \\<^sub>D Adj.\ b" using j f \.preserves_hom [of j "J.dom j" "J.cod j"] D.comp_assoc by fastforce also have "... = G (E.cones_map f \ j) \\<^sub>D Adj.\ b" proof - have "\ j \\<^sub>C f = Adj.\ (C.cod (E j)) \\<^sub>C F (\ j)" proof - have "\ j \\<^sub>C f = E.cones_map f \ j" proof - have "E.cone (C.cod f) \" using f \.cone_axioms by blast thus ?thesis using \.is_extensional by simp qed also have "... = Adj.\ (C.cod (E j)) \\<^sub>C F (\ j)" using j f by simp finally show ?thesis by blast qed thus ?thesis using f mem_Collect_eq restrict_apply Adj.F.is_extensional by simp qed also have "... = (G (Adj.\ (C.cod (E j))) \\<^sub>D Adj.\ (D.cod (GE.map j))) \\<^sub>D \ j" using j f Adj.\.naturality [of "\ j"] D.comp_assoc by auto also have "... = D.cod (\ j) \\<^sub>D \ j" using j Adj.\\.triangle_G Adj.\_in_terms_of_\ Adj.\o_def Adj.\_in_terms_of_\ Adj.\o_def Adj.unit_counit_G by fastforce also have "... = \ j" using j D.comp_cod_arr by simp finally show "GE.cones_map ?g ?G\ j = \ j" by metis qed ultimately show "GE.cones_map ?g ?G\ j = \ j" by auto qed ultimately have "\?g : b \\<^sub>D G a\ \ GE.cones_map ?g ?G\ = \" by auto text\ It remains to be shown that @{term g} is the unique such arrow. Given any @{term g'} that transforms @{term G\} into @{term \}, its adjunct transforms @{term \} into @{term \'}. The adjunct of @{term g'} is therefore equal to @{term f}, which implies @{term g'} = @{term g}. \ moreover have "\g'. \g' : b \\<^sub>D G a\ \ GE.cones_map g' ?G\ = \ \ g' = ?g" proof - fix g' assume g': "\g' : b \\<^sub>D G a\ \ GE.cones_map g' ?G\ = \" have 1: "\\ a g' : F b \\<^sub>C a\" using g' a \_in_hom by simp have 2: "E.cones_map (\ a g') \ = ?\'" proof fix j have "\J.arr j \ E.cones_map (\ a g') \ j = ?\' j" using 1 \.cone_axioms by auto moreover have "J.arr j \ E.cones_map (\ a g') \ j = ?\' j" proof - fix j assume j: "J.arr j" have "E.cones_map (\ a g') \ j = \ j \\<^sub>C \ a g'" using 1 \.cone_axioms \.is_extensional by auto also have "... = (\ j \\<^sub>C Adj.\ a) \\<^sub>C F g'" using j a g' Adj.\_in_terms_of_\ C.comp_assoc Adj.\_def by auto also have "... = (Adj.\ (C.cod (E j)) \\<^sub>C F (G (\ j))) \\<^sub>C F g'" using j a g' Adj.\.naturality [of "\ j"] by simp also have "... = Adj.\ (C.cod (E j)) \\<^sub>C F (\ j)" using j a g' G\.cone_axioms C.comp_assoc by auto finally show "E.cones_map (\ a g') \ j = ?\' j" by (simp add: j) qed ultimately show "E.cones_map (\ a g') \ j = ?\' j" by auto qed have "\ a g' = f" proof - have "\!f. \f : F b \\<^sub>C a\ \ E.cones_map f \ = ?\'" using cone_\' \.is_universal by simp moreover have "\\ a g' : F b \\<^sub>C a\ \ E.cones_map (\ a g') \ = ?\'" using 1 2 by simp ultimately show ?thesis using ex1E [of "\f. \f : F b \\<^sub>C a\ \ E.cones_map f \ = ?\'" "\ a g' = f"] using 1 2 Adj.\.is_extensional C.comp_null(2) C.ex_un_null \.cone_axioms f mem_Collect_eq restrict_apply by blast qed hence "\ b (\ a g') = \ b f" by auto hence "g' = \ b f" using \.ide_apex g' by (simp add: \_\) moreover have "?g = \ b f" using f Adj.\_in_terms_of_\ \.ide_apex Adj.\_def by auto ultimately show "g' = ?g" by argo qed ultimately show "\!g. \g : b \\<^sub>D G a\ \ GE.cones_map g ?G\ = \" by blast qed have "GE.limit_cone (G a) ?G\" .. thus ?thesis by auto qed end section "Special Kinds of Limits" subsection "Terminal Objects" text\ An object of a category @{term C} is a terminal object if and only if it is a limit of the empty diagram in @{term C}. \ locale empty_diagram = diagram J C D for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) and D :: "'j \ 'c" + assumes is_empty: "\J.arr j" begin lemma has_as_limit_iff_terminal: shows "has_as_limit a \ C.terminal a" proof assume a: "has_as_limit a" show "C.terminal a" proof have "\\. limit_cone a \" using a by auto from this obtain \ where \: "limit_cone a \" by blast interpret \: limit_cone J C D a \ using \ by auto have cone_\: "cone a \" .. show "C.ide a" using \.ide_apex by auto have 1: "\ = (\j. C.null)" using is_empty \.is_extensional by auto show "\a'. C.ide a' \ \!f. \f : a' \ a\" proof - fix a' assume a': "C.ide a'" interpret A': constant_functor J C a' apply unfold_locales using a' by auto let ?\' = "\j. C.null" have cone_\': "cone a' ?\'" using a' is_empty apply unfold_locales by auto hence "\!f. \f : a' \ a\ \ cones_map f \ = ?\'" using \.is_universal by force moreover have "\f. \f : a' \ a\ \ cones_map f \ = ?\'" using 1 cone_\ by auto ultimately show "\!f. \f : a' \ a\" by blast qed qed next assume a: "C.terminal a" show "has_as_limit a" proof - let ?\ = "\j. C.null" have "C.ide a" using a C.terminal_def by simp interpret A: constant_functor J C a apply unfold_locales using \C.ide a\ by simp interpret \: cone J C D a ?\ using \C.ide a\ is_empty by (unfold_locales, auto) have cone_\: "cone a ?\" .. have 1: "\a' \'. cone a' \' \ \' = (\j. C.null)" proof - fix a' \' assume \': "cone a' \'" interpret \': cone J C D a' \' using \' by auto show "\' = (\j. C.null)" using is_empty \'.is_extensional by metis qed have "limit_cone a ?\" proof fix a' \' assume \': "cone a' \'" have 2: "\' = (\j. C.null)" using 1 \' by simp interpret \': cone J C D a' \' using \' by auto have "\!f. \f : a' \ a\" using a C.terminal_def \'.ide_apex by simp moreover have "\f. \f : a' \ a\ \ cones_map f ?\ = \'" using 1 2 cones_map_mapsto cone_\ \'.cone_axioms mem_Collect_eq by blast ultimately show "\!f. \f : a' \ a\ \ cones_map f (\j. C.null) = \'" by blast qed thus ?thesis by auto qed qed end subsection "Products" text\ A \emph{product} in a category @{term C} is a limit of a discrete diagram in @{term C}. \ locale discrete_diagram = J: category J + diagram J C D for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) and D :: "'j \ 'c" + assumes is_discrete: "J.arr = J.ide" begin abbreviation mkCone where "mkCone F \ (\j. if J.arr j then F j else C.null)" lemma cone_mkCone: assumes "C.ide a" and "\j. J.arr j \ \F j : a \ D j\" shows "cone a (mkCone F)" proof - interpret A: constant_functor J C a using assms(1) by unfold_locales auto show "cone a (mkCone F)" using assms(2) is_discrete apply unfold_locales apply auto apply (metis C.in_homE C.comp_cod_arr) using C.comp_arr_ide by fastforce qed lemma mkCone_cone: assumes "cone a \" shows "mkCone \ = \" proof - interpret \: cone J C D a \ using assms by auto show "mkCone \ = \" using \.is_extensional by auto qed end text\ The following locale defines a discrete diagram in a category @{term C}, given an index set @{term I} and a function @{term D} mapping @{term I} to objects of @{term C}. Here we obtain the diagram shape @{term J} using a discrete category construction that allows us to directly identify the objects of @{term J} with the elements of @{term I}, however this construction can only be applied in case the set @{term I} is not the universe of its element type. \ locale discrete_diagram_from_map = J: discrete_category I null + C: category C for I :: "'i set" and C :: "'c comp" (infixr "\" 55) and D :: "'i \ 'c" and null :: 'i + assumes maps_to_ide: "i \ I \ C.ide (D i)" begin definition map where "map j \ if J.arr j then D j else C.null" end sublocale discrete_diagram_from_map \ discrete_diagram J.comp C map using map_def maps_to_ide J.arr_char J.Null_not_in_Obj J.null_char by unfold_locales auto locale product_cone = J: category J + C: category C + D: discrete_diagram J C D + limit_cone J C D a \ for J :: "'j comp" (infixr "\\<^sub>J" 55) and C :: "'c comp" (infixr "\" 55) and D :: "'j \ 'c" and a :: 'c and \ :: "'j \ 'c" begin lemma is_cone: shows "D.cone a \" .. text\ The following versions of @{prop is_universal} and @{prop induced_arrowI} from the \limit_cone\ locale are specialized to the case in which the underlying diagram is a product diagram. \ lemma is_universal': assumes "C.ide b" and "\j. J.arr j \ \F j: b \ D j\" shows "\!f. \f : b \ a\ \ (\j. J.arr j \ \ j \ f = F j)" proof - let ?\ = "D.mkCone F" interpret B: constant_functor J C b using assms(1) by unfold_locales auto have cone_\: "D.cone b ?\" using assms D.is_discrete D.cone_mkCone by blast interpret \: cone J C D b ?\ using cone_\ by auto have "\!f. \f : b \ a\ \ D.cones_map f \ = ?\" using cone_\ is_universal by force moreover have "\f. \f : b \ a\ \ D.cones_map f \ = ?\ \ (\j. J.arr j \ \ j \ f = F j)" proof - fix f assume f: "\f : b \ a\" show "D.cones_map f \ = ?\ \ (\j. J.arr j \ \ j \ f = F j)" proof assume 1: "D.cones_map f \ = ?\" show "\j. J.arr j \ \ j \ f = F j" proof - have "\j. J.arr j \ \ j \ f = F j" proof - fix j assume j: "J.arr j" have "\ j \ f = D.cones_map f \ j" using j f cone_axioms by force also have "... = F j" using j 1 by simp finally show "\ j \ f = F j" by auto qed thus ?thesis by auto qed next assume 1: "\j. J.arr j \ \ j \ f = F j" show "D.cones_map f \ = ?\" using 1 f is_cone \.is_extensional D.is_discrete is_cone cone_\ by auto qed qed ultimately show ?thesis by blast qed abbreviation induced_arrow' :: "'c \ ('j \ 'c) \ 'c" where "induced_arrow' b F \ induced_arrow b (D.mkCone F)" lemma induced_arrowI': assumes "C.ide b" and "\j. J.arr j \ \F j : b \ D j\" shows "\j. J.arr j \ \ j \ induced_arrow' b F = F j" proof - interpret B: constant_functor J C b using assms(1) by unfold_locales auto interpret \: cone J C D b \D.mkCone F\ using assms D.cone_mkCone by blast have cone_\: "D.cone b (D.mkCone F)" .. hence 1: "D.cones_map (induced_arrow' b F) \ = D.mkCone F" using induced_arrowI by blast fix j assume j: "J.arr j" have "\ j \ induced_arrow' b F = D.cones_map (induced_arrow' b F) \ j" using induced_arrowI(1) cone_\ is_cone is_extensional by force also have "... = F j" using j 1 by auto finally show "\ j \ induced_arrow' b F = F j" by auto qed end context discrete_diagram begin lemma product_coneI: assumes "limit_cone a \" shows "product_cone J C D a \" by (meson assms discrete_diagram_axioms functor_axioms functor_def product_cone.intro) end context category begin definition has_as_product where "has_as_product J D a \ (\\. product_cone J C D a \)" lemma product_is_ide: assumes "has_as_product J D a" shows "ide a" proof - obtain \ where \: "product_cone J C D a \" using assms has_as_product_def by blast interpret \: product_cone J C D a \ using \ by auto show ?thesis using \.ide_apex by auto qed text\ A category has @{term I}-indexed products for an @{typ 'i}-set @{term I} if every @{term I}-indexed discrete diagram has a product. In order to reap the benefits of being able to directly identify the elements of a set I with the objects of discrete category it generates (thereby avoiding the use of coercion maps), it is necessary to assume that @{term "I \ UNIV"}. If we want to assert that a category has products indexed by the universe of some type @{typ 'i}, we have to pass to a larger type, such as @{typ "'i option"}. \ definition has_products where "has_products (I :: 'i set) \ I \ UNIV \ (\J D. discrete_diagram J C D \ Collect (partial_magma.arr J) = I \ (\a. has_as_product J D a))" lemma ex_productE: assumes "\a. has_as_product J D a" obtains a \ where "product_cone J C D a \" using assms has_as_product_def someI_ex [of "\a. has_as_product J D a"] by metis lemma has_products_if_has_limits: assumes "has_limits (undefined :: 'j)" and "I \ (UNIV :: 'j set)" shows "has_products I" proof (unfold has_products_def, intro conjI allI impI) show "I \ UNIV" by fact fix J D assume D: "discrete_diagram J C D \ Collect (partial_magma.arr J) = I" interpret D: discrete_diagram J C D using D by simp have 1: "\a. D.has_as_limit a" using assms D D.diagram_axioms D.J.category_axioms by (simp add: has_limits_of_shape_def has_limits_def) show "\a. has_as_product J D a" using 1 has_as_product_def D.product_coneI by blast qed lemma has_finite_products_if_has_finite_limits: assumes "\J :: 'j comp. (finite (Collect (partial_magma.arr J))) \ has_limits_of_shape J" and "finite (I :: 'j set)" and "I \ UNIV" shows "has_products I" proof (unfold has_products_def, intro conjI allI impI) show "I \ UNIV" by fact fix J D assume D: "discrete_diagram J C D \ Collect (partial_magma.arr J) = I" interpret D: discrete_diagram J C D using D by simp have 1: "\a. D.has_as_limit a" using assms D has_limits_of_shape_def D.diagram_axioms by auto show "\a. has_as_product J D a" using 1 has_as_product_def D.product_coneI by blast qed lemma has_products_preserved_by_bijection: assumes "has_products I" and "bij_betw \ I I'" and "I' \ UNIV" shows "has_products I'" proof (unfold has_products_def, intro conjI allI impI) show "I' \ UNIV" by fact show "\J' D'. discrete_diagram J' C D' \ Collect (partial_magma.arr J') = I' \ \a. has_as_product J' D' a" proof - fix J' D' assume 1: "discrete_diagram J' C D' \ Collect (partial_magma.arr J') = I'" interpret J': category J' using 1 by (simp add: discrete_diagram_def) interpret D': discrete_diagram J' C D' using 1 by simp interpret J: discrete_category I \SOME x. x \ I\ using assms has_products_def [of I] someI_ex [of "\x. x \ I"] by unfold_locales auto have 2: "Collect J.arr = I \ Collect J'.arr = I'" using 1 by auto have \: "bij_betw \ (Collect J.arr) (Collect J'.arr)" using 2 assms(2) by simp let ?\ = "\j. if J.arr j then \ j else J'.null" let ?\' = "\j'. if J'.arr j' then the_inv_into I \ j' else J.null" interpret \: "functor" J.comp J' ?\ proof - have "\ ` I = I'" using \ 2 bij_betw_def [of \ I I'] by simp hence "\j. J.arr j \ J'.arr (?\ j)" using 1 D'.is_discrete by auto thus "functor J.comp J' ?\" using D'.is_discrete J.is_discrete J.seqE by unfold_locales auto qed interpret \': "functor" J' J.comp ?\' proof - have "the_inv_into I \ ` I' = I" using assms(2) \ bij_betw_the_inv_into bij_betw_imp_surj_on by metis hence "\j'. J'.arr j' \ J.arr (?\' j')" using 2 D'.is_discrete J.is_discrete by auto thus "functor J' J.comp ?\'" using D'.is_discrete J.is_discrete J'.seqE by unfold_locales auto qed let ?D = "\i. D' (\ i)" interpret D: discrete_diagram_from_map I C ?D \SOME j. j \ I\ using assms 1 D'.is_discrete bij_betw_imp_surj_on \.preserves_ide by unfold_locales auto obtain a where a: "has_as_product J.comp D.map a" using assms D.discrete_diagram_axioms has_products_def [of I] by auto obtain \ where \: "product_cone J.comp C D.map a \" using a has_as_product_def by blast interpret \: product_cone J.comp C D.map a \ using \ by simp let ?\' = "\ o ?\'" interpret A: constant_functor J' C a using \.ide_apex by unfold_locales simp interpret \': natural_transformation J' C A.map D' ?\' proof - have "\.A.map \ ?\' = A.map" using \ A.map_def \'.preserves_arr \.A.is_extensional J.not_arr_null by auto moreover have "D.map \ ?\' = D'" proof fix j' have "J'.arr j' \ (D.map \ ?\') j' = D' j'" proof - assume 2: "J'.arr j'" have 3: "inj_on \ I" using assms(2) bij_betw_imp_inj_on by auto have "\ ` I = I'" by (metis (no_types) assms(2) bij_betw_imp_surj_on) hence "\ ` I = Collect J'.arr" using 1 by force thus ?thesis using 2 3 D.map_def \'.preserves_arr f_the_inv_into_f by fastforce qed moreover have "\ J'.arr j' \ (D.map \ ?\') j' = D' j'" using D.is_extensional D'.is_extensional by (simp add: J.Null_not_in_Obj J.null_char) ultimately show "(D.map \ ?\') j' = D' j'" by blast qed ultimately show "natural_transformation J' C A.map D' ?\'" using \.natural_transformation_axioms \'.as_nat_trans.natural_transformation_axioms horizontal_composite [of J' J.comp ?\' ?\' ?\' C \.A.map D.map \] by simp qed interpret \': cone J' C D' a ?\' .. interpret \': product_cone J' C D' a ?\' proof fix a' \' assume \': "D'.cone a' \'" interpret \': cone J' C D' a' \' using \' by simp show "\!f. \f : a' \ a\ \ D'.cones_map f (\ \ ?\') = \'" proof - let ?\ = "\' o ?\" interpret A': constant_functor J.comp C a' using \'.ide_apex by unfold_locales simp interpret \: natural_transformation J.comp C A'.map D.map ?\ proof - have "\'.A.map \ ?\ = A'.map" using \ \.preserves_arr A'.map_def \'.A.is_extensional by auto moreover have "D' \ ?\ = D.map" using \ D.map_def D'.is_extensional by auto ultimately show "natural_transformation J.comp C A'.map D.map ?\" using \'.natural_transformation_axioms \.as_nat_trans.natural_transformation_axioms horizontal_composite [of J.comp J' ?\ ?\ ?\ C \'.A.map D' \'] by simp qed interpret \: cone J.comp C D.map a' ?\ .. have *: "\!f. \f : a' \ a\ \ D.cones_map f \ = ?\" using \.is_universal \.cone_axioms by simp show "\!f. \f : a' \ a\ \ D'.cones_map f ?\' = \'" proof - have "\f. \f : a' \ a\ \ D'.cones_map f ?\' = \'" proof - obtain f where f: "\f : a' \ a\ \ D.cones_map f \ = ?\" using * by blast have "D'.cones_map f ?\' = \'" proof fix j' show "D'.cones_map f ?\' j' = \' j'" proof (cases "J'.arr j'") assume j': "\ J'.arr j'" show "D'.cones_map f ?\' j' = \' j'" using f j' \'.is_extensional \'.cone_axioms by auto next assume j': "J'.arr j'" show "D'.cones_map f ?\' j' = \' j'" proof - have "D'.cones_map f ?\' j' = \ (the_inv_into I \ j') \ f" using f j' \'.cone_axioms by auto also have "... = D.cones_map f \ (the_inv_into I \ j')" proof - have "arr f \ dom f = a' \ cod f = a" using f by blast thus ?thesis using \'.preserves_arr \.cone_\ j' by auto qed also have "... = (\' \ ?\) (the_inv_into I \ j')" using f by simp also have "... = \' j'" using assms(2) j' 2 bij_betw_def [of \ I I'] bij_betw_imp_inj_on \'.preserves_arr f_the_inv_into_f by fastforce finally show ?thesis by simp qed qed qed thus ?thesis using f by blast qed moreover have "\f f'. \ \f : a' \ a\; D'.cones_map f ?\' = \'; \f' : a' \ a\; D'.cones_map f' ?\' = \' \ \ f = f'" proof - fix f f' assume f: "\f : a' \ a\" and f': "\f' : a' \ a\" and f\': "D'.cones_map f ?\' = \'" and f'\': "D'.cones_map f' ?\' = \'" have "D.cones_map f \ = \' \ ?\ \ D.cones_map f' \ = \' o ?\" proof (intro conjI) show "D.cones_map f \ = \' \ ?\" proof fix j have "\ J.arr j \ D.cones_map f \ j = (\' \ ?\) j" using f f\' \.cone_axioms \.is_extensional by auto moreover have "J.arr j \ D.cones_map f \ j = (\' \ ?\) j" proof - assume j: "J.arr j" have 1: "j = the_inv_into I \ (\ j)" using assms(2) j \ the_inv_into_f_f bij_betw_imp_inj_on J.arr_char by metis have "D.cones_map f \ j = D.cones_map f \ (the_inv_into I \ (\ j))" using 1 by simp also have "... = (\' \ ?\) j" using f j f\' 1 \.cone_axioms \'.cone_axioms \.preserves_arr by auto finally show "D.cones_map f \ j = (\' \ ?\) j" by blast qed ultimately show "D.cones_map f \ j = (\' \ ?\) j" by blast qed show "D.cones_map f' \ = \' \ ?\" proof fix j have "\ J.arr j \ D.cones_map f' \ j = (\' \ ?\) j" using f' f\' \.cone_axioms \.is_extensional by auto moreover have "J.arr j \ D.cones_map f' \ j = (\' \ ?\) j" proof - assume j: "J.arr j" have 1: "j = the_inv_into I \ (\ j)" using assms(2) j \ the_inv_into_f_f bij_betw_imp_inj_on J.arr_char by metis have "D.cones_map f' \ j = D.cones_map f' \ (the_inv_into I \ (\ j))" using 1 by simp also have "... = (\' \ ?\) j" using f' j f'\' 1 \.cone_axioms \'.cone_axioms \.preserves_arr by auto finally show "D.cones_map f' \ j = (\' \ ?\) j" by blast qed ultimately show "D.cones_map f' \ j = (\' \ ?\) j" by blast qed qed thus "f = f'" using f f' * by auto qed ultimately show ?thesis by blast qed qed qed have "has_as_product J' D' a" using has_as_product_def \'.product_cone_axioms by auto thus "\a. has_as_product J' D' a" by blast qed qed lemma ide_is_unary_product: assumes "ide a" shows "\m n :: nat. m \ n \ has_as_product (discrete_category.comp {m :: nat} (n :: nat)) (\i. if i = m then a else null) a" proof - fix m n :: nat assume neq: "m \ n" have "{m :: nat} \ UNIV" proof - have "finite {m :: nat}" by simp moreover have "\finite (UNIV :: nat set)" by simp ultimately show ?thesis by fastforce qed interpret J: discrete_category "{m :: nat}" \n :: nat\ using neq \{m :: nat} \ UNIV\ by unfold_locales auto let ?D = "\i. if i = m then a else null" interpret D: discrete_diagram J.comp C ?D apply unfold_locales using assms J.null_char neq apply auto by metis interpret A: constant_functor J.comp C a using assms by unfold_locales auto show "has_as_product J.comp ?D a" proof (unfold has_as_product_def) let ?\ = "\i :: nat. if i = m then a else null" interpret \: natural_transformation J.comp C A.map ?D ?\ using assms J.arr_char J.dom_char J.cod_char by unfold_locales auto interpret \: cone J.comp C ?D a ?\ .. interpret \: product_cone J.comp C ?D a ?\ proof fix a' \' assume \': "D.cone a' \'" interpret \': cone J.comp C ?D a' \' using \' by auto show "\!f. \f : a' \ a\ \ D.cones_map f ?\ = \'" proof show "\\' m : a' \ a\ \ D.cones_map (\' m) ?\ = \'" proof show 1: "\\' m : a' \ a\" using \'.preserves_hom \'.A.map_def J.arr_char J.dom_char J.cod_char by auto show "D.cones_map (\' m) ?\ = \'" proof fix j show "D.cones_map (\' m) (\i. if i = m then a else null) j = \' j" using J.arr_char J.dom_char J.cod_char J.ide_char \.cone_axioms comp_cod_arr apply (cases "j = m") apply simp using \'.is_extensional by simp qed qed show "\f. \f : a' \ a\ \ D.cones_map f ?\ = \' \ f = \' m" proof - fix f assume f: "\f : a' \ a\ \ D.cones_map f ?\ = \'" show "f = \' m" using assms f \'.preserves_hom J.arr_char J.dom_char J.cod_char \.cone_axioms comp_cod_arr by auto qed qed qed show "\\. product_cone J.comp C (\i. if i = m then a else null) a \" using \.product_cone_axioms by blast qed qed lemma has_unary_products: assumes "card I = 1" and "I \ UNIV" shows "has_products I" proof - obtain i where i: "I = {i}" using assms card_1_singletonE by blast obtain n where n: "n \ I" using assms by auto have "has_products {1 :: nat}" proof (unfold has_products_def, intro conjI) show "{1 :: nat} \ UNIV" by auto show "\(J :: nat comp) D. discrete_diagram J (\) D \ Collect (partial_magma.arr J) = {1} \ (\a. has_as_product J D a)" proof fix J :: "nat comp" show "\D. discrete_diagram J (\) D \ Collect (partial_magma.arr J) = {1} \ (\a. has_as_product J D a)" proof (intro allI impI) fix D assume D: "discrete_diagram J (\) D \ Collect (partial_magma.arr J) = {1}" interpret D: discrete_diagram J C D using D by auto interpret J: discrete_category \{1 :: nat}\ D.J.null by (metis D D.J.not_arr_null discrete_category.intro mem_Collect_eq) have 1: "has_as_product J.comp D (D 1)" proof - have "has_as_product J.comp (\i. if i = 1 then D 1 else null) (D 1)" using ide_is_unary_product D.preserves_ide D.is_discrete D J.null_char by (metis J.Null_not_in_Obj insertCI mem_Collect_eq) moreover have "D = (\i. if i = 1 then D 1 else null)" proof fix j show "D j = (if j = 1 then D 1 else null)" using D D.is_extensional by auto qed ultimately show ?thesis by simp qed moreover have "J = J.comp" proof - have "\j j'. J j j' = J.comp j j'" proof - fix j j' show "J j j' = J.comp j j'" using J.comp_char D.is_discrete D by (metis D.J.category_axioms D.J.ext D.J.ide_def J.null_char category.seqE mem_Collect_eq) qed thus ?thesis by auto qed ultimately show "\a. has_as_product J D a" by auto qed qed qed moreover have "bij_betw (\k. if k = 1 then i else n) {1 :: nat} I" using i by (intro bij_betwI') auto ultimately show "has_products I" using assms has_products_preserved_by_bijection by blast qed end subsection "Equalizers" text\ An \emph{equalizer} in a category @{term C} is a limit of a parallel pair of arrows in @{term C}. \ locale parallel_pair_diagram = J: parallel_pair + C: category C for C :: "'c comp" (infixr "\" 55) and f0 :: 'c and f1 :: 'c + assumes is_parallel: "C.par f0 f1" begin no_notation J.comp (infixr "\" 55) notation J.comp (infixr "\\<^sub>J" 55) definition map where "map \ (\j. if j = J.Zero then C.dom f0 else if j = J.One then C.cod f0 else if j = J.j0 then f0 else if j = J.j1 then f1 else C.null)" lemma map_simp: shows "map J.Zero = C.dom f0" and "map J.One = C.cod f0" and "map J.j0 = f0" and "map J.j1 = f1" proof - show "map J.Zero = C.dom f0" using map_def by metis show "map J.One = C.cod f0" using map_def J.Zero_not_eq_One by metis show "map J.j0 = f0" using map_def J.Zero_not_eq_j0 J.One_not_eq_j0 by metis show "map J.j1 = f1" using map_def J.Zero_not_eq_j1 J.One_not_eq_j1 J.j0_not_eq_j1 by metis qed end sublocale parallel_pair_diagram \ diagram J.comp C map apply unfold_locales apply (simp add: J.arr_char map_def) using map_def is_parallel J.arr_char J.cod_simp J.dom_simp apply auto[2] proof - show 1: "\j. J.arr j \ C.cod (map j) = map (J.cod j)" using is_parallel map_simp(1-4) J.arr_char by auto next fix j j' assume jj': "J.seq j' j" show "map (j' \\<^sub>J j) = map j' \ map j" proof - have 1: "(j = J.Zero \ j' \ J.One) \ (j \ J.Zero \ j' = J.One)" using jj' J.seq_char by blast moreover have "j = J.Zero \ j' \ J.One \ ?thesis" by (metis (no_types, lifting) C.arr_dom_iff_arr C.comp_arr_dom C.dom_dom J.comp_simp(1) jj' map_simp(1,3-4) J.arr_char is_parallel) moreover have "j \ J.Zero \ j' = J.One \ ?thesis" by (metis (no_types, lifting) C.comp_arr_dom C.comp_cod_arr C.seqE J.comp_char jj' map_simp(2-4) J.arr_char is_parallel) ultimately show ?thesis by blast qed qed context parallel_pair_diagram begin definition mkCone where "mkCone e \ \j. if J.arr j then if j = J.Zero then e else f0 \ e else C.null" abbreviation is_equalized_by where "is_equalized_by e \ C.seq f0 e \ f0 \ e = f1 \ e" abbreviation has_as_equalizer where "has_as_equalizer e \ limit_cone (C.dom e) (mkCone e)" lemma cone_mkCone: assumes "is_equalized_by e" shows "cone (C.dom e) (mkCone e)" proof - interpret E: constant_functor J.comp C \C.dom e\ apply unfold_locales using assms by auto show "cone (C.dom e) (mkCone e)" using assms mkCone_def apply unfold_locales apply auto[2] using C.dom_comp C.seqE C.cod_comp J.Zero_not_eq_One J.arr_char' J.cod_char map_def apply (metis (no_types, lifting) C.not_arr_null parallel_pair.cod_simp(1) preserves_arr) proof - show "\j. J.arr j \ map j \ mkCone e (J.dom j) = mkCone e j" proof - fix j assume j: "J.arr j" have 1: "j = J.Zero \ map j \ mkCone e (J.dom j) = mkCone e j" using assms j mkCone_def C.cod_comp by (metis (no_types, lifting) C.comp_cod_arr J.arr_char J.dom_char map_def J.dom_simp(2)) show "map j \ mkCone e (J.dom j) = mkCone e j" by (metis (no_types, lifting) 1 C.comp_cod_arr C.seqE assms j map_simp(1) mkCone_def J.dom_simp(1)) qed next show "\j. J.arr j \ mkCone e (J.cod j) \ E.map j = mkCone e j" by (metis (no_types, lifting) C.comp_arr_dom C.comp_assoc C.seqE J.arr_cod_iff_arr J.seqI J.seq_char assms E.map_simp mkCone_def J.cod_simp(1) J.dom_simp(1)) qed qed lemma is_equalized_by_cone: assumes "cone a \" shows "is_equalized_by (\ (J.Zero))" proof - interpret \: cone J.comp C map a \ using assms by auto show ?thesis by (metis (no_types, lifting) J.arr_char J.cod_char cone_def \.component_in_hom \.is_natural_1 \.naturality assms C.in_homE constant_functor.map_simp J.dom_simp(3-4) map_simp(3-4)) qed lemma mkCone_cone: assumes "cone a \" shows "mkCone (\ J.Zero) = \" proof - interpret \: cone J.comp C map a \ using assms by auto have 1: "is_equalized_by (\ J.Zero)" using assms is_equalized_by_cone by blast show ?thesis proof fix j have "j = J.Zero \ mkCone (\ J.Zero) j = \ j" using mkCone_def \.is_extensional by simp moreover have "j = J.One \ j = J.j0 \ j = J.j1 \ mkCone (\ J.Zero) j = \ j" using J.arr_char J.cod_char J.dom_char J.seq_char mkCone_def \.is_natural_1 \.is_natural_2 \.A.map_simp map_def by (metis (no_types, lifting) J.Zero_not_eq_j0 J.dom_simp(2)) ultimately have "J.arr j \ mkCone (\ J.Zero) j = \ j" using J.arr_char by auto thus "mkCone (\ J.Zero) j = \ j" using mkCone_def \.is_extensional by fastforce qed qed end locale equalizer_cone = J: parallel_pair + C: category C + D: parallel_pair_diagram C f0 f1 + limit_cone J.comp C D.map "C.dom e" "D.mkCone e" for C :: "'c comp" (infixr "\" 55) and f0 :: 'c and f1 :: 'c and e :: 'c begin lemma equalizes: shows "D.is_equalized_by e" proof show "C.seq f0 e" proof (intro C.seqI) show "C.arr e" using ide_apex C.arr_dom_iff_arr by fastforce show "C.arr f0" using D.map_simp D.preserves_arr J.arr_char by metis show "C.dom f0 = C.cod e" using J.arr_char J.ide_char D.mkCone_def D.map_simp preserves_cod [of J.Zero] by auto qed show "f0 \ e = f1 \ e" using D.map_simp D.mkCone_def J.arr_char naturality [of J.j0] naturality [of J.j1] by force qed lemma is_universal': assumes "D.is_equalized_by e'" shows "\!h. \h : C.dom e' \ C.dom e\ \ e \ h = e'" proof - have "D.cone (C.dom e') (D.mkCone e')" using assms D.cone_mkCone by blast moreover have 0: "D.cone (C.dom e) (D.mkCone e)" .. ultimately have 1: "\!h. \h : C.dom e' \ C.dom e\ \ D.cones_map h (D.mkCone e) = D.mkCone e'" using is_universal [of "C.dom e'" "D.mkCone e'"] by auto have 2: "\h. \h : C.dom e' \ C.dom e\ \ D.cones_map h (D.mkCone e) = D.mkCone e' \ e \ h = e'" proof - fix h assume h: "\h : C.dom e' \ C.dom e\" show "D.cones_map h (D.mkCone e) = D.mkCone e' \ e \ h = e'" proof assume 3: "D.cones_map h (D.mkCone e) = D.mkCone e'" show "e \ h = e'" proof - have "e' = D.mkCone e' J.Zero" using D.mkCone_def J.arr_char by simp also have "... = D.cones_map h (D.mkCone e) J.Zero" using 3 by simp also have "... = e \ h" using 0 h D.mkCone_def J.arr_char by auto finally show ?thesis by auto qed next assume e': "e \ h = e'" show "D.cones_map h (D.mkCone e) = D.mkCone e'" proof fix j have "\J.arr j \ D.cones_map h (D.mkCone e) j = D.mkCone e' j" using h cone_axioms D.mkCone_def by auto moreover have "j = J.Zero \ D.cones_map h (D.mkCone e) j = D.mkCone e' j" using h e' cone_\ D.mkCone_def J.arr_char [of J.Zero] by force moreover have "J.arr j \ j \ J.Zero \ D.cones_map h (D.mkCone e) j = D.mkCone e' j" using C.comp_assoc D.mkCone_def cone_\ e' h by auto ultimately show "D.cones_map h (D.mkCone e) j = D.mkCone e' j" by blast qed qed qed thus ?thesis using 1 by blast qed lemma induced_arrowI': assumes "D.is_equalized_by e'" shows "\induced_arrow (C.dom e') (D.mkCone e') : C.dom e' \ C.dom e\" and "e \ induced_arrow (C.dom e') (D.mkCone e') = e'" proof - interpret A': constant_functor J.comp C \C.dom e'\ using assms by (unfold_locales, auto) have cone: "D.cone (C.dom e') (D.mkCone e')" using assms D.cone_mkCone [of e'] by blast have "e \ induced_arrow (C.dom e') (D.mkCone e') = D.cones_map (induced_arrow (C.dom e') (D.mkCone e')) (D.mkCone e) J.Zero" using cone induced_arrowI(1) D.mkCone_def J.arr_char cone_\ by force also have "... = e'" proof - have "D.cones_map (induced_arrow (C.dom e') (D.mkCone e')) (D.mkCone e) = D.mkCone e'" using cone induced_arrowI by blast thus ?thesis using J.arr_char D.mkCone_def by simp qed finally have 1: "e \ induced_arrow (C.dom e') (D.mkCone e') = e'" by auto show "\induced_arrow (C.dom e') (D.mkCone e') : C.dom e' \ C.dom e\" using 1 cone induced_arrowI by simp show "e \ induced_arrow (C.dom e') (D.mkCone e') = e'" using 1 cone induced_arrowI by simp qed end context category begin definition has_as_equalizer where "has_as_equalizer f0 f1 e \ par f0 f1 \ parallel_pair_diagram.has_as_equalizer C f0 f1 e" definition has_equalizers where "has_equalizers = (\f0 f1. par f0 f1 \ (\e. has_as_equalizer f0 f1 e))" + lemma has_as_equalizerI [intro]: + assumes "par f g" and "seq f e" and "f \ e = g \ e" + and "\e'. \seq f e'; f \ e' = g \ e'\ \ \!h. e \ h = e'" + shows "has_as_equalizer f g e" + proof (unfold has_as_equalizer_def, intro conjI) + show "arr f" and "arr g" and "dom f = dom g" and "cod f = cod g" + using assms(1) by auto + interpret J: parallel_pair . + interpret D: parallel_pair_diagram C f g + using assms(1) by unfold_locales + show "D.has_as_equalizer e" + proof - + let ?\ = "D.mkCone e" + let ?a = "dom e" + interpret \: cone J.comp C D.map ?a ?\ + using assms(2-3) D.cone_mkCone [of e] by simp + interpret \: limit_cone J.comp C D.map ?a ?\ + proof + fix a' \' + assume \': "D.cone a' \'" + interpret \': cone J.comp C D.map a' \' + using \' by blast + have "seq f (\' J.Zero)" + using J.ide_char J.arr_char \'.preserves_hom + by (metis (no_types, lifting) D.map_simp(3) \'.is_natural_1 + \'.natural_transformation_axioms natural_transformation.preserves_reflects_arr + parallel_pair.dom_simp(3)) + moreover have "f \ (\' J.Zero) = g \ (\' J.Zero)" + using \' D.is_equalized_by_cone by blast + ultimately have 1: "\!h. e \ h = \' J.Zero" + using assms by blast + obtain h where h: "e \ h = \' J.Zero" + using 1 by blast + have 2: "D.is_equalized_by e" + using assms(2-3) by blast + have "\h : a' \ dom e\ \ D.cones_map h (D.mkCone e) = \'" + proof + show 3: "\h : a' \ dom e\" + using h \'.preserves_dom + by (metis (no_types, lifting) \'.component_in_hom \seq f (\' J.Zero)\ + category.has_codomain_iff_arr category.seqE category_axioms cod_in_codomains + domains_comp in_hom_def parallel_pair.arr_char) + show "D.cones_map h (D.mkCone e) = \'" + proof + fix j + have "D.cone (cod h) (D.mkCone e)" + using 2 3 D.cone_mkCone by auto + thus "D.cones_map h (D.mkCone e) j = \' j" + using h 2 3 D.cone_mkCone [of e] J.arr_char D.mkCone_def comp_assoc + apply (cases "J.arr j") + apply simp_all + apply (metis (no_types, lifting) D.mkCone_cone \') + using \'.is_extensional + by presburger + qed + qed + hence "\h. \h : a' \ dom e\ \ D.cones_map h (D.mkCone e) = \'" + by blast + moreover have "\h'. \h' : a' \ dom e\ \ D.cones_map h' (D.mkCone e) = \' \ h' = h" + proof (elim conjE) + fix h' + assume h': "\h' : a' \ dom e\" + assume eq: "D.cones_map h' (D.mkCone e) = \'" + have "e \ h' = \' J.Zero" + using eq D.cone_mkCone D.mkCone_def \'.preserves_reflects_arr \.cone_axioms + \seq f (\' J.Zero)\ eq h' in_homE mem_Collect_eq restrict_apply seqE + apply simp + by fastforce + moreover have "\!h. e \ h = \' J.Zero" + using assms(2,4) 1 category.seqI by blast + ultimately show "h' = h" + using h by auto + qed + ultimately show "\!h. \h : a' \ dom e\ \ D.cones_map h (D.mkCone e) = \'" + by blast + qed + show "D.has_as_equalizer e" + using assms \.limit_cone_axioms by blast + qed + qed + end section "Limits by Products and Equalizers" text\ A category with equalizers has limits of shape @{term J} if it has products indexed by the set of arrows of @{term J} and the set of objects of @{term J}. The proof is patterned after \cite{MacLane}, Theorem 2, page 109: \begin{quotation} \noindent ``The limit of \F: J \ C\ is the equalizer \e\ of \f, g: \\<^sub>i F\<^sub>i \ \\<^sub>u F\<^sub>c\<^sub>o\<^sub>d \<^sub>u (u \ arr J, i \ J)\ where \p\<^sub>u f = p\<^sub>c\<^sub>o\<^sub>d \<^sub>u\, \p\<^sub>u g = F\<^sub>u o p\<^sub>d\<^sub>o\<^sub>m \<^sub>u\; the limiting cone \\\ is \\\<^sub>j = p\<^sub>j e\, for \j \ J\.'' \end{quotation} \ locale category_with_equalizers = category C for C :: "'c comp" (infixr "\" 55) + assumes has_equalizers: "has_equalizers" begin lemma has_limits_if_has_products: fixes J :: "'j comp" (infixr "\\<^sub>J" 55) assumes "category J" and "has_products (Collect (partial_magma.ide J))" and "has_products (Collect (partial_magma.arr J))" shows "has_limits_of_shape J" proof (unfold has_limits_of_shape_def) interpret J: category J using assms(1) by auto have "\D. diagram J C D \ (\a \. limit_cone J C D a \)" proof - fix D assume D: "diagram J C D" interpret D: diagram J C D using D by auto text\ First, construct the two required products and their cones. \ interpret Obj: discrete_category \Collect J.ide\ J.null using J.not_arr_null J.ideD(1) mem_Collect_eq by (unfold_locales, blast) interpret \o: discrete_diagram_from_map \Collect J.ide\ C D J.null using D.preserves_ide by (unfold_locales, auto) have "\p. has_as_product Obj.comp \o.map p" using assms(2) \o.diagram_axioms has_products_def Obj.arr_char by (metis (no_types, lifting) Collect_cong \o.discrete_diagram_axioms mem_Collect_eq) from this obtain \o \o where \o: "product_cone Obj.comp C \o.map \o \o" using ex_productE [of Obj.comp \o.map] by auto interpret \o: product_cone Obj.comp C \o.map \o \o using \o by auto have \o_in_hom: "\j. Obj.arr j \ \\o j : \o \ D j\" using \o.preserves_dom \o.preserves_cod \o.map_def by auto interpret Arr: discrete_category \Collect J.arr\ J.null using J.not_arr_null by (unfold_locales, blast) interpret \a: discrete_diagram_from_map \Collect J.arr\ C \D o J.cod\ J.null by (unfold_locales, auto) have "\p. has_as_product Arr.comp \a.map p" using assms(3) has_products_def [of "Collect J.arr"] \a.discrete_diagram_axioms by blast from this obtain \a \a where \a: "product_cone Arr.comp C \a.map \a \a" using ex_productE [of Arr.comp \a.map] by auto interpret \a: product_cone Arr.comp C \a.map \a \a using \a by auto have \a_in_hom: "\j. Arr.arr j \ \\a j : \a \ D (J.cod j)\" using \a.preserves_cod \a.preserves_dom \a.map_def by auto text\ Next, construct a parallel pair of arrows \f, g: \o \ \a\ that expresses the commutativity constraints imposed by the diagram. \ interpret \o: constant_functor Arr.comp C \o using \o.ide_apex by (unfold_locales, auto) let ?\ = "\j. if Arr.arr j then \o (J.cod j) else null" interpret \: cone Arr.comp C \a.map \o ?\ using \o.ide_apex \o_in_hom \a.map_def \o.map_def \o.is_discrete \o.is_natural_2 comp_cod_arr by (unfold_locales, auto) let ?f = "\a.induced_arrow \o ?\" have f_in_hom: "\?f : \o \ \a\" using \.cone_axioms \a.induced_arrowI by blast have f_map: "\a.cones_map ?f \a = ?\" using \.cone_axioms \a.induced_arrowI by blast have ff: "\j. J.arr j \ \a j \ ?f = \o (J.cod j)" using \.component_in_hom \a.induced_arrowI' \o.ide_apex by auto let ?\' = "\j. if Arr.arr j then D j \ \o (J.dom j) else null" interpret \': cone Arr.comp C \a.map \o ?\' using \o.ide_apex \o_in_hom \o.map_def \a.map_def comp_arr_dom comp_cod_arr by (unfold_locales, auto) let ?g = "\a.induced_arrow \o ?\'" have g_in_hom: "\?g : \o \ \a\" using \'.cone_axioms \a.induced_arrowI by blast have g_map: "\a.cones_map ?g \a = ?\'" using \'.cone_axioms \a.induced_arrowI by blast have gg: "\j. J.arr j \ \a j \ ?g = D j \ \o (J.dom j)" using \'.component_in_hom \a.induced_arrowI' \o.ide_apex by force interpret PP: parallel_pair_diagram C ?f ?g using f_in_hom g_in_hom by (elim in_homE, unfold_locales, auto) from PP.is_parallel obtain e where equ: "PP.has_as_equalizer e" using has_equalizers has_equalizers_def has_as_equalizer_def by blast interpret EQU: limit_cone PP.J.comp C PP.map \dom e\ \PP.mkCone e\ using equ by auto interpret EQU: equalizer_cone C ?f ?g e .. text\ An arrow @{term h} with @{term "cod h = \o"} equalizes @{term f} and @{term g} if and only if it satisfies the commutativity condition required for a cone over @{term D}. \ have E: "\h. \h : dom h \ \o\ \ ?f \ h = ?g \ h \ (\j. J.arr j \ ?\ j \ h = ?\' j \ h)" proof fix h assume h: "\h : dom h \ \o\" show "?f \ h = ?g \ h \ \j. J.arr j \ ?\ j \ h = ?\' j \ h" proof - assume E: "?f \ h = ?g \ h" have "\j. J.arr j \ ?\ j \ h = ?\' j \ h" proof - fix j assume j: "J.arr j" have "?\ j \ h = \a.cones_map ?f \a j \ h" using j f_map by fastforce also have "... = \a j \ ?f \ h" using j f_in_hom \a.map_def \a.cone_\ comp_assoc by auto also have "... = \a j \ ?g \ h" using j E by simp also have "... = \a.cones_map ?g \a j \ h" using j g_in_hom \a.map_def \a.cone_\ comp_assoc by auto also have "... = ?\' j \ h" using j g_map by force finally show "?\ j \ h = ?\' j \ h" by auto qed thus "\j. J.arr j \ ?\ j \ h = ?\' j \ h" by blast qed show "\j. J.arr j \ ?\ j \ h = ?\' j \ h \ ?f \ h = ?g \ h" proof - assume 1: "\j. J.arr j \ ?\ j \ h = ?\' j \ h" have 2: "\j. j \ Collect J.arr \ \a j \ ?f \ h = \a j \ ?g \ h" proof - fix j assume j: "j \ Collect J.arr" have "\a j \ ?f \ h = (\a j \ ?f) \ h" using comp_assoc by simp also have "... = ?\ j \ h" using ff j by force also have "... = ?\' j \ h" using 1 j by auto also have "... = (\a j \ ?g) \ h" using gg j by force also have "... = \a j \ ?g \ h" using comp_assoc by simp finally show "\a j \ ?f \ h = \a j \ ?g \ h" by auto qed show "C ?f h = C ?g h" proof - have "\j. Arr.arr j \ \\a j \ ?f \ h : dom h \ \a.map j\" using f_in_hom h \a_in_hom by (elim in_homE, auto) hence 3: "\!k. \k : dom h \ \a\ \ (\j. Arr.arr j \ \a j \ k = \a j \ ?f \ h)" using h \a \a.is_universal' [of "dom h" "\j. \a j \ ?f \ h"] \a.map_def ide_dom [of h] by blast have 4: "\P x x'. \!k. P k x \ P x x \ P x' x \ x' = x" by auto let ?P = "\ k x. \k : dom h \ \a\ \ (\j. j \ Collect J.arr \ \a j \ k = \a j \ x)" have "?P (?g \ h) (?g \ h)" using g_in_hom h by force moreover have "?P (?f \ h) (?g \ h)" using 2 f_in_hom g_in_hom h by force ultimately show ?thesis using 3 4 [of ?P "?f \ h" "?g \ h"] by auto qed qed qed have E': "\e. \e : dom e \ \o\ \ ?f \ e = ?g \ e \ (\j. J.arr j \ (D (J.cod j) \ \o (J.cod j) \ e) \ dom e = D j \ \o (J.dom j) \ e)" proof - have 1: "\e j. \e : dom e \ \o\ \ J.arr j \ ?\ j \ e = (D (J.cod j) \ \o (J.cod j) \ e) \ dom e" proof - fix e j assume e: "\e : dom e \ \o\" assume j: "J.arr j" have "\\o (J.cod j) \ e : dom e \ D (J.cod j)\" using e j \o_in_hom by auto thus "?\ j \ e = (D (J.cod j) \ \o (J.cod j) \ e) \ dom e" using j comp_arr_dom comp_cod_arr by (elim in_homE, auto) qed have 2: "\e j. \e : dom e \ \o\ \ J.arr j \ ?\' j \ e = D j \ \o (J.dom j) \ e" by (simp add: comp_assoc) show "\e. \e : dom e \ \o\ \ ?f \ e = ?g \ e \ (\j. J.arr j \ (D (J.cod j) \ \o (J.cod j) \ e) \ dom e = D j \ \o (J.dom j) \ e)" using 1 2 E by presburger qed text\ The composites of @{term e} with the projections from the product @{term \o} determine a limit cone @{term \} for @{term D}. The component of @{term \} at an object @{term j} of @{term[source=true] J} is the composite @{term "C (\o j) e"}. However, we need to extend @{term \} to all arrows @{term j} of @{term[source=true] J}, so the correct definition is @{term "\ j = C (D j) (C (\o (J.dom j)) e)"}. \ have e_in_hom: "\e : dom e \ \o\" using EQU.equalizes f_in_hom in_homI by (metis (no_types, lifting) seqE in_homE) have e_map: "C ?f e = C ?g e" using EQU.equalizes f_in_hom in_homI by fastforce interpret domE: constant_functor J C \dom e\ using e_in_hom by (unfold_locales, auto) let ?\ = "\j. if J.arr j then D j \ \o (J.dom j) \ e else null" have \: "\j. J.arr j \ \?\ j : dom e \ D (J.cod j)\" proof - fix j assume j: "J.arr j" show "\?\ j : dom e \ D (J.cod j)\" using j e_in_hom \o_in_hom [of "J.dom j"] by auto qed interpret \: cone J C D \dom e\ ?\ using \ comp_cod_arr e_in_hom e_map E' apply unfold_locales apply auto by (metis D.as_nat_trans.is_natural_1 comp_assoc) text\ If @{term \} is any cone over @{term D} then @{term \} restricts to a cone over @{term \o} for which the induced arrow to @{term \o} equalizes @{term f} and @{term g}. \ have R: "\a \. cone J C D a \ \ cone Obj.comp C \o.map a (\o.mkCone \) \ ?f \ \o.induced_arrow a (\o.mkCone \) = ?g \ \o.induced_arrow a (\o.mkCone \)" proof - fix a \ assume cone_\: "cone J C D a \" interpret \: cone J C D a \ using cone_\ by auto interpret A: constant_functor Obj.comp C a using \.ide_apex by (unfold_locales, auto) interpret \o: cone Obj.comp C \o.map a \\o.mkCone \\ using A.value_is_ide \o.map_def comp_cod_arr comp_arr_dom by (unfold_locales, auto) let ?e = "\o.induced_arrow a (\o.mkCone \)" have mkCone_\: "\o.mkCone \ \ \o.cones a" using \o.cone_axioms by auto have e: "\?e : a \ \o\" using mkCone_\ \o.induced_arrowI by simp have ee: "\j. J.ide j \ \o j \ ?e = \ j" proof - fix j assume j: "J.ide j" have "\o j \ ?e = \o.cones_map ?e \o j" using j e \o.cone_axioms by force also have "... = \o.mkCone \ j" using j mkCone_\ \o.induced_arrowI [of "\o.mkCone \" a] by fastforce also have "... = \ j" using j by simp finally show "\o j \ ?e = \ j" by auto qed have "\j. J.arr j \ (D (J.cod j) \ \o (J.cod j) \ ?e) \ dom ?e = D j \ \o (J.dom j) \ ?e" proof - fix j assume j: "J.arr j" have 1: "\\o (J.cod j) : \o \ D (J.cod j)\" using j \o_in_hom by simp have 2: "(D (J.cod j) \ \o (J.cod j) \ ?e) \ dom ?e = D (J.cod j) \ \o (J.cod j) \ ?e" proof - have "seq (D (J.cod j)) (\o (J.cod j))" using j 1 by auto moreover have "seq (\o (J.cod j)) ?e" using j e by fastforce ultimately show ?thesis using comp_arr_dom by auto qed also have 3: "... = \o (J.cod j) \ ?e" using j e 1 comp_cod_arr by (elim in_homE, auto) also have "... = D j \ \o (J.dom j) \ ?e" using j e ee 2 3 \.naturality \.A.map_simp \.ide_apex comp_cod_arr by auto finally show "(D (J.cod j) \ \o (J.cod j) \ ?e) \ dom ?e = D j \ \o (J.dom j) \ ?e" by auto qed hence "C ?f ?e = C ?g ?e" using E' \o.induced_arrowI \o.cone_axioms mem_Collect_eq by blast thus "cone Obj.comp C \o.map a (\o.mkCone \) \ C ?f ?e = C ?g ?e" using \o.cone_axioms by auto qed text\ Finally, show that @{term \} is a limit cone. \ interpret \: limit_cone J C D \dom e\ ?\ proof fix a \ assume cone_\: "cone J C D a \" interpret \: cone J C D a \ using cone_\ by auto interpret A: constant_functor Obj.comp C a using \.ide_apex by unfold_locales auto have cone_\o: "cone Obj.comp C \o.map a (\o.mkCone \)" using A.value_is_ide \o.map_def D.preserves_ide comp_cod_arr comp_arr_dom \.preserves_hom by unfold_locales auto show "\!h. \h : a \ dom e\ \ D.cones_map h ?\ = \" proof let ?e' = "\o.induced_arrow a (\o.mkCone \)" have e'_in_hom: "\?e' : a \ \o\" using cone_\ R \o.induced_arrowI by auto have e'_map: "?f \ ?e' = ?g \ ?e' \ \o.cones_map ?e' \o = \o.mkCone \" using cone_\ R \o.induced_arrowI [of "\o.mkCone \" a] by auto have equ: "PP.is_equalized_by ?e'" using e'_map e'_in_hom f_in_hom seqI' by blast let ?h = "EQU.induced_arrow a (PP.mkCone ?e')" have h_in_hom: "\?h : a \ dom e\" using EQU.induced_arrowI PP.cone_mkCone [of ?e'] e'_in_hom equ by fastforce have h_map: "PP.cones_map ?h (PP.mkCone e) = PP.mkCone ?e'" using EQU.induced_arrowI [of "PP.mkCone ?e'" a] PP.cone_mkCone [of ?e'] e'_in_hom equ by fastforce have 3: "D.cones_map ?h ?\ = \" proof fix j have "\J.arr j \ D.cones_map ?h ?\ j = \ j" using h_in_hom \.cone_axioms cone_\ \.is_extensional by force moreover have "J.arr j \ D.cones_map ?h ?\ j = \ j" proof - fix j assume j: "J.arr j" have 1: "\\o (J.dom j) \ e : dom e \ D (J.dom j)\" using j e_in_hom \o_in_hom [of "J.dom j"] by auto have "D.cones_map ?h ?\ j = ?\ j \ ?h" using h_in_hom j \.cone_axioms by auto also have "... = D j \ (\o (J.dom j) \ e) \ ?h" using j comp_assoc by simp also have "... = D j \ \ (J.dom j)" proof - have "(\o (J.dom j) \ e) \ ?h = \ (J.dom j)" proof - have "(\o (J.dom j) \ e) \ ?h = \o (J.dom j) \ e \ ?h" using j 1 e_in_hom h_in_hom \o arrI comp_assoc by auto also have "... = \o (J.dom j) \ ?e'" using equ e'_in_hom EQU.induced_arrowI' [of ?e'] by auto also have "... = \o.cones_map ?e' \o (J.dom j)" using j e'_in_hom \o.cone_axioms by auto also have "... = \ (J.dom j)" using j e'_map by simp finally show ?thesis by auto qed thus ?thesis by simp qed also have "... = \ j" using j \.is_natural_1 by simp finally show "D.cones_map ?h ?\ j = \ j" by auto qed ultimately show "D.cones_map ?h ?\ j = \ j" by auto qed show "\?h : a \ dom e\ \ D.cones_map ?h ?\ = \" using h_in_hom 3 by simp show "\h'. \h' : a \ dom e\ \ D.cones_map h' ?\ = \ \ h' = ?h" proof - fix h' assume h': "\h' : a \ dom e\ \ D.cones_map h' ?\ = \" have h'_in_hom: "\h' : a \ dom e\" using h' by simp have h'_map: "D.cones_map h' ?\ = \" using h' by simp show "h' = ?h" proof - have 1: "\e \ h' : a \ \o\ \ ?f \ e \ h' = ?g \ e \ h' \ \o.cones_map (C e h') \o = \o.mkCone \" proof - have 2: "\e \ h' : a \ \o\" using h'_in_hom e_in_hom by auto moreover have "?f \ e \ h' = ?g \ e \ h'" by (metis (no_types, lifting) EQU.equalizes comp_assoc) moreover have "\o.cones_map (e \ h') \o = \o.mkCone \" proof have "\o.cones_map (e \ h') \o = \o.cones_map h' (\o.cones_map e \o)" using \o.cone_axioms e_in_hom h'_in_hom \o.cones_map_comp [of e h'] by fastforce fix j have "\Obj.arr j \ \o.cones_map (e \ h') \o j = \o.mkCone \ j" using 2 e_in_hom h'_in_hom \o.cone_axioms by auto moreover have "Obj.arr j \ \o.cones_map (e \ h') \o j = \o.mkCone \ j" proof - assume j: "Obj.arr j" have "\o.cones_map (e \ h') \o j = \o j \ e \ h'" using 2 j \o.cone_axioms by auto also have "... = (\o j \ e) \ h'" using comp_assoc by auto also have "... = \o.mkCone ?\ j \ h'" using j e_in_hom \o_in_hom comp_ide_arr [of "D j" "\o j \ e"] by fastforce also have "... = \o.mkCone \ j" using j h' \.cone_axioms mem_Collect_eq by auto finally show "\o.cones_map (e \ h') \o j = \o.mkCone \ j" by auto qed ultimately show "\o.cones_map (e \ h') \o j = \o.mkCone \ j" by auto qed ultimately show ?thesis by auto qed have "\e \ h' : a \ \o\" using 1 by simp moreover have "e \ h' = ?e'" using 1 cone_\o e'_in_hom e'_map \o.is_universal \o by blast ultimately show "h' = ?h" using 1 h'_in_hom h'_map EQU.is_universal' [of "e \ h'"] EQU.induced_arrowI' [of ?e'] equ by (elim in_homE) auto qed qed qed qed have "limit_cone J C D (dom e) ?\" .. thus "\a \. limit_cone J C D a \" by auto qed thus "\D. diagram J C D \ (\a \. limit_cone J C D a \)" by blast qed end section "Limits in a Set Category" text\ In this section, we consider the special case of limits in a set category. \ locale diagram_in_set_category = J: category J + S: set_category S is_set + diagram J S D for J :: "'j comp" (infixr "\\<^sub>J" 55) and S :: "'s comp" (infixr "\" 55) and is_set :: "'s set \ bool" and D :: "'j \ 's" begin notation S.in_hom ("\_ : _ \ _\") text\ An object @{term a} of a set category @{term[source=true] S} is a limit of a diagram in @{term[source=true] S} if and only if there is a bijection between the set @{term "S.hom S.unity a"} of points of @{term a} and the set of cones over the diagram that have apex @{term S.unity}. \ lemma limits_are_sets_of_cones: shows "has_as_limit a \ S.ide a \ (\\. bij_betw \ (S.hom S.unity a) (cones S.unity))" proof text\ If \has_limit a\, then by the universal property of the limit cone, composition in @{term[source=true] S} yields a bijection between @{term "S.hom S.unity a"} and @{term "cones S.unity"}. \ assume a: "has_as_limit a" hence "S.ide a" using limit_cone_def cone.ide_apex by metis from a obtain \ where \: "limit_cone a \" by auto interpret \: limit_cone J S D a \ using \ by auto have "bij_betw (\f. cones_map f \) (S.hom S.unity a) (cones S.unity)" using \.bij_betw_hom_and_cones S.ide_unity by simp thus "S.ide a \ (\\. bij_betw \ (S.hom S.unity a) (cones S.unity))" using \S.ide a\ by blast next text\ Conversely, an arbitrary bijection @{term \} between @{term "S.hom S.unity a"} and cones unity extends pointwise to a natural bijection @{term "\ a'"} between @{term "S.hom a' a"} and @{term "cones a'"}, showing that @{term a} is a limit. In more detail, the hypotheses give us a correspondence between points of @{term a} and cones with apex @{term "S.unity"}. We extend this to a correspondence between functions to @{term a} and general cones, with each arrow from @{term a'} to @{term a} determining a cone with apex @{term a'}. If @{term "f \ hom a' a"} then composition with @{term f} takes each point @{term y} of @{term a'} to the point @{term "S f y"} of @{term a}. To this we may apply the given bijection @{term \} to obtain @{term "\ (S f y) \ cones S.unity"}. The component @{term "\ (S f y) j"} at @{term j} of this cone is a point of @{term "S.cod (D j)"}. Thus, @{term "f \ hom a' a"} determines a cone @{term \f} with apex @{term a'} whose component at @{term j} is the unique arrow @{term "\f j"} of @{term[source=true] S} such that @{term "\f j \ hom a' (cod (D j))"} and @{term "S (\f j) y = \ (S f y) j"} for all points @{term y} of @{term a'}. The cone @{term \a} corresponding to @{term "a \ S.hom a a"} is then a limit cone. \ assume a: "S.ide a \ (\\. bij_betw \ (S.hom S.unity a) (cones S.unity))" hence ide_a: "S.ide a" by auto show "has_as_limit a" proof - from a obtain \ where \: "bij_betw \ (S.hom S.unity a) (cones S.unity)" by blast have X: "\f j y. \ \f : S.dom f \ a\; J.arr j; \y : S.unity \ S.dom f\ \ \ \\ (f \ y) j : S.unity \ S.cod (D j)\" proof - fix f j y assume f: "\f : S.dom f \ a\" and j: "J.arr j" and y: "\y : S.unity \ S.dom f\" interpret \: cone J S D S.unity \\ (S f y)\ using f y \ bij_betw_imp_funcset funcset_mem by blast show "\\ (f \ y) j : S.unity \ S.cod (D j)\" using j by auto qed text\ We want to define the component @{term "\j \ S.hom (S.dom f) (S.cod (D j))"} at @{term j} of a cone by specifying how it acts by composition on points @{term "y \ S.hom S.unity (S.dom f)"}. We can do this because @{term[source=true] S} is a set category. \ let ?P = "\f j \j. \\j : S.dom f \ S.cod (D j)\ \ (\y. \y : S.unity \ S.dom f\ \ \j \ y = \ (f \ y) j)" let ?\ = "\f j. if J.arr j then (THE \j. ?P f j \j) else S.null" have \: "\f j. \ \f : S.dom f \ a\; J.arr j \ \ ?P f j (?\ f j)" proof - fix b f j assume f: "\f : S.dom f \ a\" and j: "J.arr j" interpret B: constant_functor J S \S.dom f\ using f by (unfold_locales) auto have "(\y. \ (f \ y) j) \ S.hom S.unity (S.dom f) \ S.hom S.unity (S.cod (D j))" using f j X Pi_I' by simp hence "\!\j. ?P f j \j" using f j S.fun_complete' by (elim S.in_homE) auto thus "?P f j (?\ f j)" using j theI' [of "?P f j"] by simp qed text\ The arrows @{term "\ f j"} are in fact the components of a cone with apex @{term "S.dom f"}. \ have cone: "\f. \f : S.dom f \ a\ \ cone (S.dom f) (?\ f)" proof - fix f assume f: "\f : S.dom f \ a\" interpret B: constant_functor J S \S.dom f\ using f by unfold_locales auto show "cone (S.dom f) (?\ f)" proof show "\j. \J.arr j \ ?\ f j = S.null" by simp fix j assume j: "J.arr j" have 0: "\?\ f j : S.dom f \ S.cod (D j)\" using f j \ by simp show "S.dom (?\ f j) = B.map (J.dom j)" using f j \ by auto show "S.cod (?\ f j) = D (J.cod j)" using f j \ by auto have par2: "S.par (?\ f (J.cod j) \ B.map j) (?\ f j)" using f j 0 \ [of f "J.cod j"] by (elim S.in_homE, auto) have nat: "\y. \y : S.unity \ S.dom f\ \ (D j \ ?\ f (J.dom j)) \ y = ?\ f j \ y \ (?\ f (J.cod j) \ B.map j) \ y = ?\ f j \ y" proof - fix y assume y: "\y : S.unity \ S.dom f\" show "(D j \ ?\ f (J.dom j)) \ y = ?\ f j \ y \ (?\ f (J.cod j) \ B.map j) \ y = ?\ f j \ y" proof have 1: "\ (f \ y) \ cones S.unity" using f y \ bij_betw_imp_funcset PiE S.seqI S.cod_comp S.dom_comp mem_Collect_eq by fastforce interpret \: cone J S D S.unity \\ (f \ y)\ using 1 by simp show "(D j \ ?\ f (J.dom j)) \ y = ?\ f j \ y" using J.arr_dom S.comp_assoc \ \.is_natural_1 f j y by presburger have "(?\ f (J.cod j) \ B.map j) \ y = ?\ f (J.cod j) \ y" using j B.map_simp par2 B.value_is_ide S.comp_arr_ide by (metis (no_types, lifting)) also have "... = \ (f \ y) (J.cod j)" using f y \ \.is_extensional by simp also have "... = \ (f \ y) j" using j \.is_natural_2 by (metis J.arr_cod \.A.map_simp J.cod_cod) also have "... = ?\ f j \ y" using f y \ \.is_extensional by simp finally show "(?\ f (J.cod j) \ B.map j) \ y = ?\ f j \ y" by auto qed qed show "D j \ ?\ f (J.dom j) = ?\ f j" proof - have "S.par (D j \ ?\ f (J.dom j)) (?\ f j)" using f j 0 \ [of f "J.dom j"] by (elim S.in_homE, auto) thus ?thesis using nat 0 apply (intro S.arr_eqI' [of "D j \ ?\ f (J.dom j)" "?\ f j"]) apply force by auto qed show "?\ f (J.cod j) \ B.map j = ?\ f j" using par2 nat 0 f j \ apply (intro S.arr_eqI' [of "?\ f (J.cod j) \ B.map j" "?\ f j"]) apply force by (metis (no_types, lifting) S.in_homE) qed qed interpret \a: cone J S D a \?\ a\ using a cone [of a] by fastforce text\ Finally, show that \\ a\ is a limit cone. \ interpret \a: limit_cone J S D a \?\ a\ proof fix a' \' assume cone_\': "cone a' \'" interpret \': cone J S D a' \' using cone_\' by auto show "\!f. \f : a' \ a\ \ cones_map f (?\ a) = \'" proof let ?\ = "inv_into (S.hom S.unity a) \" have \: "?\ \ cones S.unity \ S.hom S.unity a" using \ bij_betw_inv_into bij_betwE by blast let ?P = "\f. \f : a' \ a\ \ (\y. y \ S.hom S.unity a' \ f \ y = ?\ (cones_map y \'))" have 1: "\!f. ?P f" proof - have "(\y. ?\ (cones_map y \')) \ S.hom S.unity a' \ S.hom S.unity a" proof fix x assume "x \ S.hom S.unity a'" hence "\x : S.unity \ a'\" by simp hence "cones_map x \ cones a' \ cones S.unity" using cones_map_mapsto [of x] by (elim S.in_homE) auto hence "cones_map x \' \ cones S.unity" using cone_\' by blast thus "?\ (cones_map x \') \ S.hom S.unity a" using \ by auto qed thus ?thesis using S.fun_complete' a \'.ide_apex by simp qed let ?f = "THE f. ?P f" have f: "?P ?f" using 1 theI' [of ?P] by simp have f_in_hom: "\?f : a' \ a\" using f by simp have f_map: "cones_map ?f (?\ a) = \'" proof - have 1: "cone a' (cones_map ?f (?\ a))" proof - have "cones_map ?f \ cones a \ cones a'" using f_in_hom cones_map_mapsto [of ?f] by (elim S.in_homE) auto hence "cones_map ?f (?\ a) \ cones a'" using \a.cone_axioms by blast thus ?thesis by simp qed interpret f\a: cone J S D a' \cones_map ?f (?\ a)\ using 1 by simp show ?thesis proof fix j have "\J.arr j \ cones_map ?f (?\ a) j = \' j" using 1 \'.is_extensional f\a.is_extensional by presburger moreover have "J.arr j \ cones_map ?f (?\ a) j = \' j" proof - assume j: "J.arr j" show "cones_map ?f (?\ a) j = \' j" proof (intro S.arr_eqI' [of "cones_map ?f (?\ a) j" "\' j"]) show par: "S.par (cones_map ?f (?\ a) j) (\' j)" using j \'.preserves_cod \'.preserves_dom \'.preserves_reflects_arr f\a.preserves_cod f\a.preserves_dom f\a.preserves_reflects_arr by presburger fix y assume "\y : S.unity \ S.dom (cones_map ?f (?\ a) j)\" hence y: "\y : S.unity \ a'\" using j f\a.preserves_dom by simp have 1: "\?\ a j : a \ D (J.cod j)\" using j \a.preserves_hom by force have 2: "\?f \ y : S.unity \ a\" using f_in_hom y by blast have "cones_map ?f (?\ a) j \ y = (?\ a j \ ?f) \ y" proof - have "S.cod ?f = a" using f_in_hom by blast thus ?thesis using j \a.cone_axioms by simp qed also have "... = ?\ a j \ ?f \ y" using 1 j y f_in_hom S.comp_assoc S.seqI' by blast also have "... = \ (a \ ?f \ y) j" using 1 2 ide_a f j y \ [of a] by (simp add: S.ide_in_hom) also have "... = \ (?f \ y) j" using a 2 y S.comp_cod_arr by (elim S.in_homE, auto) also have "... = \ (?\ (cones_map y \')) j" using j y f by simp also have "... = cones_map y \' j" proof - have "cones_map y \' \ cones S.unity" using cone_\' y cones_map_mapsto by force hence "\ (?\ (cones_map y \')) = cones_map y \'" using \ bij_betw_inv_into_right [of \] by simp thus ?thesis by auto qed also have "... = \' j \ y" using cone_\' j y by auto finally show "cones_map ?f (?\ a) j \ y = \' j \ y" by auto qed qed ultimately show "cones_map ?f (?\ a) j = \' j" by blast qed qed show "\?f : a' \ a\ \ cones_map ?f (?\ a) = \'" using f_in_hom f_map by simp show "\f'. \f' : a' \ a\ \ cones_map f' (?\ a) = \' \ f' = ?f" proof - fix f' assume f': "\f' : a' \ a\ \ cones_map f' (?\ a) = \'" have f'_in_hom: "\f' : a' \ a\" using f' by simp have f'_map: "cones_map f' (?\ a) = \'" using f' by simp show "f' = ?f" proof (intro S.arr_eqI' [of f' ?f]) show "S.par f' ?f" using f_in_hom f'_in_hom by (elim S.in_homE, auto) show "\y'. \y' : S.unity \ S.dom f'\ \ f' \ y' = ?f \ y'" proof - fix y' assume y': "\y' : S.unity \ S.dom f'\" have 0: "\ (f' \ y') = cones_map y' \'" proof fix j have 1: "\f' \ y' : S.unity \ a\" using f'_in_hom y' by auto hence 2: "\ (f' \ y') \ cones S.unity" using \ bij_betw_imp_funcset [of \ "S.hom S.unity a" "cones S.unity"] by auto interpret \'': cone J S D S.unity \\ (f' \ y')\ using 2 by auto have "\J.arr j \ \ (f' \ y') j = cones_map y' \' j" using f' y' cone_\' \''.is_extensional mem_Collect_eq restrict_apply by (elim S.in_homE, auto) moreover have "J.arr j \ \ (f' \ y') j = cones_map y' \' j" proof - assume j: "J.arr j" have 3: "\?\ a j : a \ D (J.cod j)\" using j \a.preserves_hom by force have "\ (f' \ y') j = \ (a \ f' \ y') j" using a f' y' j S.comp_cod_arr by (elim S.in_homE, auto) also have "... = ?\ a j \ f' \ y'" using 1 3 \ [of a] a f' y' j by fastforce also have "... = (?\ a j \ f') \ y'" using S.comp_assoc by simp also have "... = cones_map f' (?\ a) j \ y'" using f' y' j \a.cone_axioms by auto also have "... = \' j \ y'" using f' by blast also have "... = cones_map y' \' j" using y' j cone_\' f' mem_Collect_eq restrict_apply by force finally show "\ (f' \ y') j = cones_map y' \' j" by auto qed ultimately show "\ (f' \ y') j = cones_map y' \' j" by auto qed hence "f' \ y' = ?\ (cones_map y' \')" using \ f'_in_hom y' S.comp_in_homI bij_betw_inv_into_left [of \ "S.hom S.unity a" "cones S.unity" "f' \ y'"] by (elim S.in_homE, auto) moreover have "?f \ y' = ?\ (cones_map y' \')" using \ 0 1 f f_in_hom f'_in_hom y' S.comp_in_homI bij_betw_inv_into_left [of \ "S.hom S.unity a" "cones S.unity" "?f \ y'"] by (elim S.in_homE, auto) ultimately show "f' \ y' = ?f \ y'" by auto qed qed qed qed qed have "limit_cone a (?\ a)" .. thus ?thesis by auto qed qed end locale diagram_in_replete_set_category = J: category J + S: replete_set_category S + diagram J S D for J :: "'j comp" (infixr "\\<^sub>J" 55) and S :: "'s comp" (infixr "\" 55) and D :: "'j \ 's" begin sublocale diagram_in_set_category J S S.setp D .. end context set_category begin text\ A set category has an equalizer for any parallel pair of arrows. \ lemma has_equalizers: shows "has_equalizers" proof (unfold has_equalizers_def) have "\f0 f1. par f0 f1 \ \e. has_as_equalizer f0 f1 e" proof - fix f0 f1 assume par: "par f0 f1" interpret J: parallel_pair . interpret PP: parallel_pair_diagram S f0 f1 using par by unfold_locales auto interpret PP: diagram_in_set_category J.comp S setp PP.map .. text\ Let @{term a} be the object corresponding to the set of all images of equalizing points of @{term "dom f0"}, and let @{term e} be the inclusion of @{term a} in @{term "dom f0"}. \ let ?a = "mkIde (img ` {e. e \ hom unity (dom f0) \ f0 \ e = f1 \ e})" have 0: "{e. e \ hom unity (dom f0) \ f0 \ e = f1 \ e} \ hom unity (dom f0)" by auto hence 1: "img ` {e. e \ hom unity (dom f0) \ f0 \ e = f1 \ e} \ Univ" using img_point_in_Univ by auto have 2: "setp (img ` {e. e \ hom unity (dom f0) \ f0 \ e = f1 \ e})" proof - have "setp (img ` hom unity (dom f0))" using ide_dom par setp_img_points by blast moreover have "img ` {e. e \ hom unity (dom f0) \ f0 \ e = f1 \ e} \ img ` hom unity (dom f0)" by blast ultimately show ?thesis by (meson setp_respects_subset) qed have ide_a: "ide ?a" using 1 2 ide_mkIde by auto have set_a: "set ?a = img ` {e. e \ hom unity (dom f0) \ f0 \ e = f1 \ e}" using 1 2 set_mkIde by simp have incl_in_a: "incl_in ?a (dom f0)" proof - have "ide (dom f0)" using PP.is_parallel by simp moreover have "set ?a \ set (dom f0)" using img_point_elem_set set_a by fastforce ultimately show ?thesis using incl_in_def \ide ?a\ by simp qed text\ Then @{term "set a"} is in bijective correspondence with @{term "PP.cones unity"}. \ let ?\ = "\t. PP.mkCone (mkPoint (dom f0) t)" let ?\ = "\\. img (\ (J.Zero))" have bij: "bij_betw ?\ (set ?a) (PP.cones unity)" proof (intro bij_betwI) show "?\ \ set ?a \ PP.cones unity" proof fix t assume t: "t \ set ?a" hence 1: "t \ img ` {e. e \ hom unity (dom f0) \ f0 \ e = f1 \ e}" using set_a by blast then have 2: "mkPoint (dom f0) t \ hom unity (dom f0)" using mkPoint_in_hom imageE mem_Collect_eq mkPoint_img(2) by auto with 1 have 3: "mkPoint (dom f0) t \ {e. e \ hom unity (dom f0) \ f0 \ e = f1 \ e}" using mkPoint_img(2) by auto then have "PP.is_equalized_by (mkPoint (dom f0) t)" using CollectD par by fastforce thus "PP.mkCone (mkPoint (dom f0) t) \ PP.cones unity" using 2 PP.cone_mkCone [of "mkPoint (dom f0) t"] by auto qed show "?\ \ PP.cones unity \ set ?a" proof fix \ assume \: "\ \ PP.cones unity" interpret \: cone J.comp S PP.map unity \ using \ by auto have "\ (J.Zero) \ hom unity (dom f0) \ f0 \ \ (J.Zero) = f1 \ \ (J.Zero)" using \ PP.map_def PP.is_equalized_by_cone J.arr_char by auto hence "img (\ (J.Zero)) \ set ?a" using set_a by simp thus "?\ \ \ set ?a" by blast qed show "\t. t \ set ?a \ ?\ (?\ t) = t" using set_a J.arr_char PP.mkCone_def imageE mem_Collect_eq mkPoint_img(2) by auto show "\\. \ \ PP.cones unity \ ?\ (?\ \) = \" proof - fix \ assume \: "\ \ PP.cones unity" interpret \: cone J.comp S PP.map unity \ using \ by auto have 1: "\ (J.Zero) \ hom unity (dom f0) \ f0 \ \ (J.Zero) = f1 \ \ (J.Zero)" using \ PP.map_def PP.is_equalized_by_cone J.arr_char by auto hence "img (\ (J.Zero)) \ set ?a" using set_a by simp hence "img (\ (J.Zero)) \ set (dom f0)" using incl_in_a incl_in_def by auto hence "mkPoint (dom f0) (img (\ J.Zero)) = \ J.Zero" using 1 mkPoint_img(2) by blast hence "?\ (?\ \) = PP.mkCone (\ J.Zero)" by simp also have "... = \" using \ PP.mkCone_cone by simp finally show "?\ (?\ \) = \" by auto qed qed text\ It follows that @{term a} is a limit of \PP\, and that the limit cone gives an equalizer of @{term f0} and @{term f1}. \ have "PP.has_as_limit ?a" proof - have "\\. bij_betw \ (hom unity ?a) (set ?a)" using bij_betw_points_and_set ide_a by auto from this obtain \ where \: "bij_betw \ (hom unity ?a) (set ?a)" by blast have "bij_betw (?\ o \) (hom unity ?a) (PP.cones unity)" using bij \ bij_betw_comp_iff by blast hence "\\. bij_betw \ (hom unity ?a) (PP.cones unity)" by auto thus ?thesis using ide_a PP.limits_are_sets_of_cones by simp qed from this obtain \ where \: "limit_cone J.comp S PP.map ?a \" by auto have "PP.has_as_equalizer (\ J.Zero)" proof - interpret \: limit_cone J.comp S PP.map ?a \ using \ by auto have "PP.mkCone (\ (J.Zero)) = \" using \ PP.mkCone_cone \.cone_axioms by simp moreover have "dom (\ (J.Zero)) = ?a" using J.ide_char \.preserves_hom \.A.map_def by simp ultimately show ?thesis using \ by simp qed thus "\e. has_as_equalizer f0 f1 e" using par has_as_equalizer_def by auto qed thus "\f0 f1. par f0 f1 \ (\e. has_as_equalizer f0 f1 e)" by auto qed end sublocale set_category \ category_with_equalizers S apply unfold_locales using has_equalizers by auto context set_category begin text\ The aim of the next results is to characterize the conditions under which a set category has products. In a traditional development of category theory, one shows that the category \textbf{Set} of \emph{all} sets has all small (\emph{i.e.}~set-indexed) products. In the present context we do not have a category of \emph{all} sets, but rather only a category of all sets with elements at a particular type. Clearly, we cannot expect such a category to have products indexed by arbitrarily large sets. The existence of @{term I}-indexed products in a set category @{term[source=true] S} implies that the universe \S.Univ\ of @{term[source=true] S} must be large enough to admit the formation of @{term I}-tuples of its elements. Conversely, for a set category @{term[source=true] S} the ability to form @{term I}-tuples in @{term Univ} implies that @{term[source=true] S} has @{term I}-indexed products. Below we make this precise by defining the notion of when a set category @{term[source=true] S} ``admits @{term I}-indexed tupling'' and we show that @{term[source=true] S} has @{term I}-indexed products if and only if it admits @{term I}-indexed tupling. The definition of ``@{term[source=true] S} admits @{term I}-indexed tupling'' says that there is an injective map, from the space of extensional functions from @{term I} to @{term Univ}, to @{term Univ}. However for a convenient statement and proof of the desired result, the definition of extensional function from theory @{theory "HOL-Library.FuncSet"} needs to be modified. The theory @{theory "HOL-Library.FuncSet"} uses the definite, but arbitrarily chosen value @{term undefined} as the value to be assumed by an extensional function outside of its domain. In the context of the \set_category\, though, it is more natural to use \S.unity\, which is guaranteed to be an element of the universe of @{term[source=true] S}, for this purpose. Doing things that way makes it simpler to establish a bijective correspondence between cones over @{term D} with apex @{term unity} and the set of extensional functions @{term d} that map each arrow @{term j} of @{term J} to an element @{term "d j"} of @{term "set (D j)"}. Possibly it makes sense to go back and make this change in \set_category\, but that would mean completely abandoning @{theory "HOL-Library.FuncSet"} and essentially introducing a duplicate version for use with \set_category\. As a compromise, what I have done here is to locally redefine the few notions from @{theory "HOL-Library.FuncSet"} that I need in order to prove the next set of results. The redefined notions are primed to avoid confusion with the original versions. \ definition extensional' where "extensional' A \ {f. \x. x \ A \ f x = unity}" abbreviation PiE' where "PiE' A B \ Pi A B \ extensional' A" abbreviation restrict' where "restrict' f A \ \x. if x \ A then f x else unity" lemma extensional'I [intro]: assumes "\x. x \ A \ f x = unity" shows "f \ extensional' A" using assms extensional'_def by auto lemma extensional'_arb: assumes "f \ extensional' A" and "x \ A" shows "f x = unity" using assms extensional'_def by fast lemma extensional'_monotone: assumes "A \ B" shows "extensional' A \ extensional' B" using assms extensional'_arb by fastforce lemma PiE'_mono: "(\x. x \ A \ B x \ C x) \ PiE' A B \ PiE' A C" by auto end locale discrete_diagram_in_set_category = S: set_category S \ + discrete_diagram J S D + diagram_in_set_category J S \ D for J :: "'j comp" (infixr "\\<^sub>J" 55) and S :: "'s comp" (infixr "\" 55) and \ :: "'s set \ bool" and D :: "'j \ 's" begin text\ For @{term D} a discrete diagram in a set category, there is a bijective correspondence between cones over @{term D} with apex unity and the set of extensional functions @{term d} that map each arrow @{term j} of @{term[source=true] J} to an element of @{term "S.set (D j)"}. \ abbreviation I where "I \ Collect J.arr" definition funToCone where "funToCone F \ \j. if J.arr j then S.mkPoint (D j) (F j) else S.null" definition coneToFun where "coneToFun \ \ \j. if J.arr j then S.img (\ j) else S.unity" lemma funToCone_mapsto: shows "funToCone \ S.PiE' I (S.set o D) \ cones S.unity" proof fix F assume F: "F \ S.PiE' I (S.set o D)" interpret U: constant_functor J S S.unity apply unfold_locales using S.ide_unity by auto have "cone S.unity (funToCone F)" proof show "\j. \J.arr j \ funToCone F j = S.null" using funToCone_def by simp fix j assume j: "J.arr j" have "funToCone F j = S.mkPoint (D j) (F j)" using j funToCone_def by simp moreover have "... \ S.hom S.unity (D j)" using F j is_discrete S.img_mkPoint(1) [of "D j"] by force ultimately have 2: "funToCone F j \ S.hom S.unity (D j)" by auto show 3: "S.dom (funToCone F j) = U.map (J.dom j)" using 2 j U.map_simp by auto show 4: "S.cod (funToCone F j) = D (J.cod j)" using 2 j is_discrete by auto show "D j \ funToCone F (J.dom j) = funToCone F j" using 2 j is_discrete S.comp_cod_arr by auto show "funToCone F (J.cod j) \ (U.map j) = funToCone F j" using 3 j is_discrete U.map_simp S.arr_dom_iff_arr S.comp_arr_dom U.preserves_arr by (metis J.ide_char) qed thus "funToCone F \ cones S.unity" by auto qed lemma coneToFun_mapsto: shows "coneToFun \ cones S.unity \ S.PiE' I (S.set o D)" proof fix \ assume \: "\ \ cones S.unity" interpret \: cone J S D S.unity \ using \ by auto show "coneToFun \ \ S.PiE' I (S.set o D)" proof show "coneToFun \ \ Pi I (S.set o D)" using S.mkPoint_img(1) coneToFun_def is_discrete \.component_in_hom by (simp add: S.img_point_elem_set restrict_apply') show "coneToFun \ \ S.extensional' I" by (metis S.extensional'I coneToFun_def mem_Collect_eq) qed qed lemma funToCone_coneToFun: assumes "\ \ cones S.unity" shows "funToCone (coneToFun \) = \" proof interpret \: cone J S D S.unity \ using assms by auto fix j have "\J.arr j \ funToCone (coneToFun \) j = \ j" using funToCone_def \.is_extensional by simp moreover have "J.arr j \ funToCone (coneToFun \) j = \ j" using funToCone_def coneToFun_def S.mkPoint_img(2) is_discrete \.component_in_hom by auto ultimately show "funToCone (coneToFun \) j = \ j" by blast qed lemma coneToFun_funToCone: assumes "F \ S.PiE' I (S.set o D)" shows "coneToFun (funToCone F) = F" proof fix i have "i \ I \ coneToFun (funToCone F) i = F i" using assms coneToFun_def S.extensional'_arb [of F I i] by auto moreover have "i \ I \ coneToFun (funToCone F) i = F i" proof - assume i: "i \ I" have "coneToFun (funToCone F) i = S.img (funToCone F i)" using i coneToFun_def by simp also have "... = S.img (S.mkPoint (D i) (F i))" using i funToCone_def by auto also have "... = F i" using assms i is_discrete S.img_mkPoint(2) by force finally show "coneToFun (funToCone F) i = F i" by auto qed ultimately show "coneToFun (funToCone F) i = F i" by auto qed lemma bij_coneToFun: shows "bij_betw coneToFun (cones S.unity) (S.PiE' I (S.set o D))" using coneToFun_mapsto funToCone_mapsto funToCone_coneToFun coneToFun_funToCone bij_betwI by blast lemma bij_funToCone: shows "bij_betw funToCone (S.PiE' I (S.set o D)) (cones S.unity)" using coneToFun_mapsto funToCone_mapsto funToCone_coneToFun coneToFun_funToCone bij_betwI by blast end context set_category begin text\ A set category admits @{term I}-indexed tupling if there is an injective map that takes each extensional function from @{term I} to @{term Univ} to an element of @{term Univ}. \ definition admits_tupling where "admits_tupling I \ \\. \ \ PiE' I (\_. Univ) \ Univ \ inj_on \ (PiE' I (\_. Univ))" lemma admits_tupling_monotone: assumes "admits_tupling I" and "I' \ I" shows "admits_tupling I'" proof - from assms(1) obtain \ where \: "\ \ PiE' I (\_. Univ) \ Univ \ inj_on \ (PiE' I (\_. Univ))" using admits_tupling_def by metis have "\ \ PiE' I' (\_. Univ) \ Univ" proof fix f assume f: "f \ PiE' I' (\_. Univ)" have "f \ PiE' I (\_. Univ)" using assms(2) f extensional'_def [of I'] terminal_unity extensional'_monotone by auto thus "\ f \ Univ" using \ by auto qed moreover have "inj_on \ (PiE' I' (\_. Univ))" proof - have 1: "\F A A'. inj_on F A \ A' \ A \ inj_on F A'" using subset_inj_on by blast moreover have "PiE' I' (\_. Univ) \ PiE' I (\_. Univ)" using assms(2) extensional'_def [of I'] terminal_unity by auto ultimately show ?thesis using \ assms(2) by blast qed ultimately show ?thesis using admits_tupling_def by metis qed lemma admits_tupling_respects_bij: assumes "admits_tupling I" and "bij_betw \ I I'" shows "admits_tupling I'" proof - obtain \ where \: "\ \ (I \ Univ) \ extensional' I \ Univ \ inj_on \ ((I \ Univ) \ extensional' I)" using assms(1) admits_tupling_def by metis have inv: "bij_betw (inv_into I \) I' I" using assms(2) bij_betw_inv_into by blast let ?C = "\f x. if x \ I then f (\ x) else unity" let ?\' = "\f. \ (?C f)" have 1: "\f. f \ (I' \ Univ) \ extensional' I' \ ?C f \ (I \ Univ) \ extensional' I" using assms bij_betw_apply by fastforce have "?\' \ (I' \ Univ) \ extensional' I' \ Univ \ inj_on ?\' ((I' \ Univ) \ extensional' I')" proof show "(\f. \ (?C f)) \ (I' \ Univ) \ extensional' I' \ Univ" using 1 \ by blast show "inj_on ?\' ((I' \ Univ) \ extensional' I')" proof fix f g assume f: "f \ (I' \ Univ) \ extensional' I'" assume g: "g \ (I' \ Univ) \ extensional' I'" assume eq: "?\' f = ?\' g" have f': "?C f \ (I \ Univ) \ extensional' I" using f 1 by simp have g': "?C g \ (I \ Univ) \ extensional' I" using g 1 by simp have 2: "?C f = ?C g" using f' g' \ eq by (simp add: inj_on_def) show "f = g" proof fix x show "f x = g x" proof (cases "x \ I'") show "x \ I' \ ?thesis" using f g by (metis (no_types, opaque_lifting) "2" assms(2) bij_betw_apply bij_betw_inv_into_right inv) show "x \ I' \ ?thesis" using f g by (metis IntD2 extensional'_arb) qed qed qed qed thus ?thesis using admits_tupling_def by blast qed end context replete_set_category begin lemma has_products_iff_admits_tupling: fixes I :: "'i set" shows "has_products I \ I \ UNIV \ admits_tupling I" proof text\ If @{term[source=true] S} has @{term I}-indexed products, then for every @{term I}-indexed discrete diagram @{term D} in @{term[source=true] S} there is an object @{term \D} of @{term[source=true] S} whose points are in bijective correspondence with the set of cones over @{term D} with apex @{term unity}. In particular this is true for the diagram @{term D} that assigns to each element of @{term I} the ``universal object'' @{term "mkIde Univ"}. \ assume has_products: "has_products I" have I: "I \ UNIV" using has_products has_products_def by auto interpret J: discrete_category I \SOME x. x \ I\ using I someI_ex [of "\x. x \ I"] by (unfold_locales, auto) let ?D = "\i. mkIde Univ" interpret D: discrete_diagram_from_map I S ?D \SOME j. j \ I\ using J.not_arr_null J.arr_char ide_mkIde by (unfold_locales, auto) interpret D: discrete_diagram_in_set_category J.comp S \\A. A \ Univ\ D.map .. have "discrete_diagram J.comp S D.map" .. from this obtain \D \ where \: "product_cone J.comp S D.map \D \" using has_products has_products_def [of I] ex_productE [of "J.comp" D.map] D.diagram_axioms by blast interpret \: product_cone J.comp S D.map \D \ using \ by auto have "D.has_as_limit \D" using \.limit_cone_axioms by auto hence \D: "ide \D \ (\\. bij_betw \ (hom unity \D) (D.cones unity))" using D.limits_are_sets_of_cones by simp from this obtain \ where \: "bij_betw \ (hom unity \D) (D.cones unity)" by blast have \': "inv_into (hom unity \D) \ \ D.cones unity \ hom unity \D \ inj_on (inv_into (hom unity \D) \) (D.cones unity)" using \ bij_betw_inv_into bij_betw_imp_inj_on bij_betw_imp_funcset by blast let ?\ = "img o (inv_into (hom unity \D) \) o D.funToCone" have 1: "D.funToCone \ PiE' I (set o D.map) \ D.cones unity" using D.funToCone_mapsto extensional'_def [of I] by auto have 2: "inv_into (hom unity \D) \ \ D.cones unity \ hom unity \D" using \' by auto have 3: "img \ hom unity \D \ Univ" using img_point_in_Univ by blast have 4: "inj_on D.funToCone (PiE' I (set o D.map))" proof - have "D.I = I" by auto thus ?thesis using D.bij_funToCone bij_betw_imp_inj_on by auto qed have 5: "inj_on (inv_into (hom unity \D) \) (D.cones unity)" using \' by auto have 6: "inj_on img (hom unity \D)" using \D bij_betw_points_and_set bij_betw_imp_inj_on [of img "hom unity \D" "set \D"] by simp have "?\ \ PiE' I (set o D.map) \ Univ" using 1 2 3 by force moreover have "inj_on ?\ (PiE' I (set o D.map))" proof - have 7: "\A B C D F G H. F \ A \ B \ G \ B \ C \ H \ C \ D \ inj_on F A \ inj_on G B \ inj_on H C \ inj_on (H o G o F) A" by (simp add: Pi_iff inj_on_def) show ?thesis using 1 2 3 4 5 6 7 [of D.funToCone "PiE' I (set o D.map)" "D.cones unity" "inv_into (hom unity \D) \" "hom unity \D" img Univ] by fastforce qed moreover have "PiE' I (set o D.map) = PiE' I (\x. Univ)" proof - have "\i. i \ I \ (set o D.map) i = Univ" using J.arr_char D.map_def by simp thus ?thesis by blast qed ultimately have "?\ \ (PiE' I (\x. Univ)) \ Univ \ inj_on ?\ (PiE' I (\x. Univ))" by auto thus "I \ UNIV \ admits_tupling I" using I admits_tupling_def by auto next assume ex_\: "I \ UNIV \ admits_tupling I" show "has_products I" proof (unfold has_products_def) from ex_\ obtain \ where \: "\ \ (PiE' I (\x. Univ)) \ Univ \ inj_on \ (PiE' I (\x. Univ))" using admits_tupling_def by metis text\ Given an @{term I}-indexed discrete diagram @{term D}, obtain the object @{term \D} of @{term[source=true] S} corresponding to the set @{term "\ ` PiE I D"} of all @{term "\ d"} where \d \ d \ J \\<^sub>E Univ\ and @{term "d i \ D i"} for all @{term "i \ I"}. The elements of @{term \D} are in bijective correspondence with the set of cones over @{term D}, hence @{term \D} is a limit of @{term D}. \ have "\J D. discrete_diagram J S D \ Collect (partial_magma.arr J) = I \ \\D. has_as_product J D \D" proof fix J :: "'i comp" and D assume D: "discrete_diagram J S D \ Collect (partial_magma.arr J) = I" interpret J: category J using D discrete_diagram.axioms(1) by blast interpret D: discrete_diagram J S D using D by simp interpret D: discrete_diagram_in_set_category J S \\A. A \ Univ\ D .. let ?\D = "mkIde (\ ` PiE' I (set o D))" have 0: "ide ?\D" proof - have "set o D \ I \ Pow Univ" using Pow_iff incl_in_def o_apply elem_set_implies_incl_in subsetI Pi_I' setp_set_ide by (metis (mono_tags, lifting)) hence "\ ` PiE' I (set o D) \ Univ" using \ by blast thus ?thesis using \ ide_mkIde by simp qed hence set_\D: "\ ` PiE' I (set o D) = set ?\D" using 0 ide_in_hom arr_mkIde set_mkIde by auto text\ The elements of @{term \D} are all values of the form @{term "\ d"}, where @{term d} satisfies @{term "d i \ set (D i)"} for all @{term "i \ I"}. Such @{term d} correspond bijectively to cones. Since @{term \} is injective, the values @{term "\ d"} correspond bijectively to cones. \ let ?\ = "mkPoint ?\D o \ o D.coneToFun" let ?\' = "D.funToCone o inv_into (PiE' I (set o D)) \ o img" have 1: "\ \ PiE' I (set o D) \ set ?\D \ inj_on \ (PiE' I (set o D))" proof - have "PiE' I (set o D) \ PiE' I (\x. Univ)" using setp_set_ide elem_set_implies_incl_in elem_set_implies_set_eq_singleton incl_in_def PiE'_mono comp_apply subsetI by (metis (no_types, lifting)) thus ?thesis using \ subset_inj_on set_\D Pi_I' imageI by fastforce qed have 2: "inv_into (PiE' I (set o D)) \ \ set ?\D \ PiE' I (set o D)" proof fix y assume y: "y \ set ?\D" have "y \ \ ` (PiE' I (set o D))" using y set_\D by auto thus "inv_into (PiE' I (set o D)) \ y \ PiE' I (set o D)" using inv_into_into [of y \ "PiE' I (set o D)"] by simp qed have 3: "\x. x \ set ?\D \ \ (inv_into (PiE' I (set o D)) \ x) = x" using set_\D by (simp add: f_inv_into_f) have 4: "\d. d \ PiE' I (set o D) \ inv_into (PiE' I (set o D)) \ (\ d) = d" using 1 by auto have 5: "D.I = I" using D by auto have "bij_betw ?\ (D.cones unity) (hom unity ?\D)" proof (intro bij_betwI) show "?\ \ D.cones unity \ hom unity ?\D" proof fix \ assume \: "\ \ D.cones unity" show "?\ \ \ hom unity ?\D" using \ 0 1 5 D.coneToFun_mapsto mkPoint_in_hom [of ?\D] by (simp, blast) qed show "?\' \ hom unity ?\D \ D.cones unity" proof fix x assume x: "x \ hom unity ?\D" hence "img x \ set ?\D" using img_point_elem_set by blast hence "inv_into (PiE' I (set o D)) \ (img x) \ Pi I (set \ D) \ extensional' I" using 2 by blast thus "?\' x \ D.cones unity" using 5 D.funToCone_mapsto by auto qed show "\x. x \ hom unity ?\D \ ?\ (?\' x) = x" proof - fix x assume x: "x \ hom unity ?\D" show "?\ (?\' x) = x" proof - have "D.coneToFun (D.funToCone (inv_into (PiE' I (set o D)) \ (img x))) = inv_into (PiE' I (set o D)) \ (img x)" using x 1 5 img_point_elem_set set_\D D.coneToFun_funToCone by force hence "\ (D.coneToFun (D.funToCone (inv_into (PiE' I (set o D)) \ (img x)))) = img x" using x 3 img_point_elem_set set_\D by force thus ?thesis using x 0 mkPoint_img by auto qed qed show "\\. \ \ D.cones unity \ ?\' (?\ \) = \" proof - fix \ assume \: "\ \ D.cones unity" show "?\' (?\ \) = \" proof - have "img (mkPoint ?\D (\ (D.coneToFun \))) = \ (D.coneToFun \)" using \ 0 1 5 D.coneToFun_mapsto img_mkPoint(2) by blast hence "inv_into (PiE' I (set o D)) \ (img (mkPoint ?\D (\ (D.coneToFun \)))) = D.coneToFun \" using \ D.coneToFun_mapsto 4 5 by (metis (no_types, lifting) PiE) hence "D.funToCone (inv_into (PiE' I (set o D)) \ (img (mkPoint ?\D (\ (D.coneToFun \))))) = \" using \ D.funToCone_coneToFun by auto thus ?thesis by auto qed qed qed hence "bij_betw (inv_into (D.cones unity) ?\) (hom unity ?\D) (D.cones unity)" using bij_betw_inv_into by blast hence "\\. bij_betw \ (hom unity ?\D) (D.cones unity)" by blast hence "D.has_as_limit ?\D" using \ide ?\D\ D.limits_are_sets_of_cones by simp from this obtain \ where \: "limit_cone J S D ?\D \" by blast interpret \: limit_cone J S D ?\D \ using \ by auto interpret P: product_cone J S D ?\D \ using \ D.product_coneI by blast have "product_cone J S D ?\D \" .. thus "has_as_product J D ?\D" using has_as_product_def by auto qed thus "I \ UNIV \ (\J D. discrete_diagram J S D \ Collect (partial_magma.arr J) = I \ (\\D. has_as_product J D \D))" using ex_\ by blast qed qed end context replete_set_category begin text\ Characterization of the completeness properties enjoyed by a set category: A set category @{term[source=true] S} has all limits at a type @{typ 'j}, if and only if @{term[source=true] S} admits @{term I}-indexed tupling for all @{typ 'j}-sets @{term I} such that @{term "I \ UNIV"}. \ theorem has_limits_iff_admits_tupling: shows "has_limits (undefined :: 'j) \ (\I :: 'j set. I \ UNIV \ admits_tupling I)" proof assume has_limits: "has_limits (undefined :: 'j)" show "\I :: 'j set. I \ UNIV \ admits_tupling I" using has_limits has_products_if_has_limits has_products_iff_admits_tupling by blast next assume admits_tupling: "\I :: 'j set. I \ UNIV \ admits_tupling I" show "has_limits (undefined :: 'j)" using has_limits_def admits_tupling has_products_iff_admits_tupling by (metis category.axioms(1) category.ideD(1) has_limits_if_has_products iso_tuple_UNIV_I mem_Collect_eq partial_magma.not_arr_null) qed end section "Limits in Functor Categories" text\ In this section, we consider the special case of limits in functor categories, with the objective of showing that limits in a functor category \[A, B]\ are given pointwise, and that \[A, B]\ has all limits that @{term B} has. \ locale parametrized_diagram = J: category J + A: category A + B: category B + JxA: product_category J A + binary_functor J A B D for J :: "'j comp" (infixr "\\<^sub>J" 55) and A :: "'a comp" (infixr "\\<^sub>A" 55) and B :: "'b comp" (infixr "\\<^sub>B" 55) and D :: "'j * 'a \ 'b" begin (* Notation for A.in_hom and B.in_hom is being inherited, but from where? *) notation J.in_hom ("\_ : _ \\<^sub>J _\") notation JxA.comp (infixr "\\<^sub>J\<^sub>x\<^sub>A" 55) notation JxA.in_hom ("\_ : _ \\<^sub>J\<^sub>x\<^sub>A _\") text\ A choice of limit cone for each diagram \D (-, a)\, where @{term a} is an object of @{term[source=true] A}, extends to a functor \L: A \ B\, where the action of @{term L} on arrows of @{term[source=true] A} is determined by universality. \ abbreviation L where "L \ \l \. \a. if A.arr a then limit_cone.induced_arrow J B (\j. D (j, A.cod a)) (l (A.cod a)) (\ (A.cod a)) (l (A.dom a)) (vertical_composite.map J B (\ (A.dom a)) (\j. D (j, a))) else B.null" abbreviation P where "P \ \l \. \a f. \f : l (A.dom a) \\<^sub>B l (A.cod a)\ \ diagram.cones_map J B (\j. D (j, A.cod a)) f (\ (A.cod a)) = vertical_composite.map J B (\ (A.dom a)) (\j. D (j, a))" lemma L_arr: assumes "\a. A.ide a \ limit_cone J B (\j. D (j, a)) (l a) (\ a)" shows "\a. A.arr a \ (\!f. P l \ a f) \ P l \ a (L l \ a)" proof fix a assume a: "A.arr a" interpret \_dom_a: limit_cone J B \\j. D (j, A.dom a)\ \l (A.dom a)\ \\ (A.dom a)\ using a assms by auto interpret \_cod_a: limit_cone J B \\j. D (j, A.cod a)\ \l (A.cod a)\ \\ (A.cod a)\ using a assms by auto interpret Da: natural_transformation J B \\j. D (j, A.dom a)\ \\j. D (j, A.cod a)\ \\j. D (j, a)\ using a fixing_arr_gives_natural_transformation_2 by simp interpret Dao\_dom_a: vertical_composite J B \_dom_a.A.map \\j. D (j, A.dom a)\ \\j. D (j, A.cod a)\ \\ (A.dom a)\ \\j. D (j, a)\ .. interpret Dao\_dom_a: cone J B \\j. D (j, A.cod a)\ \l (A.dom a)\ Dao\_dom_a.map .. show "P l \ a (L l \ a)" using a Dao\_dom_a.cone_axioms \_cod_a.induced_arrowI [of Dao\_dom_a.map "l (A.dom a)"] by auto show "\!f. P l \ a f" using \_cod_a.is_universal Dao\_dom_a.cone_axioms by blast qed lemma L_ide: assumes "\a. A.ide a \ limit_cone J B (\j. D (j, a)) (l a) (\ a)" shows "\a. A.ide a \ L l \ a = l a" proof - let ?L = "L l \" let ?P = "P l \" fix a assume a: "A.ide a" interpret \a: limit_cone J B \\j. D (j, a)\ \l a\ \\ a\ using a assms by auto have Pa: "?P a = (\f. f \ B.hom (l a) (l a) \ diagram.cones_map J B (\j. D (j, a)) f (\ a) = \ a)" using a vcomp_ide_dom \a.natural_transformation_axioms by simp have "?P a (?L a)" using assms a L_arr [of l \ a] by fastforce moreover have "?P a (l a)" proof - have "?P a (l a) \ l a \ B.hom (l a) (l a) \ \a.D.cones_map (l a) (\ a) = \ a" using Pa by meson thus ?thesis using a \a.ide_apex \a.cone_axioms \a.D.cones_map_ide [of "\ a" "l a"] by force qed moreover have "\!f. ?P a f" using a Pa \a.is_universal \a.cone_axioms by force ultimately show "?L a = l a" by blast qed lemma chosen_limits_induce_functor: assumes "\a. A.ide a \ limit_cone J B (\j. D (j, a)) (l a) (\ a)" shows "functor A B (L l \)" proof - let ?L = "L l \" let ?P = "\a. \f. \f : l (A.dom a) \\<^sub>B l (A.cod a)\ \ diagram.cones_map J B (\j. D (j, A.cod a)) f (\ (A.cod a)) = vertical_composite.map J B (\ (A.dom a)) (\j. D (j, a))" interpret L: "functor" A B ?L apply unfold_locales using assms L_arr [of l] L_ide apply auto[4] proof - fix a' a assume 1: "A.arr (A a' a)" have a: "A.arr a" using 1 by auto have a': "\a' : A.cod a \\<^sub>A A.cod a'\" using 1 by auto have a'a: "A.seq a' a" using 1 by auto interpret \_dom_a: limit_cone J B \\j. D (j, A.dom a)\ \l (A.dom a)\ \\ (A.dom a)\ using a assms by auto interpret \_cod_a: limit_cone J B \\j. D (j, A.cod a)\ \l (A.cod a)\ \\ (A.cod a)\ using a'a assms by auto interpret \_dom_a'a: limit_cone J B \\j. D (j, A.dom (a' \\<^sub>A a))\ \l (A.dom (a' \\<^sub>A a))\ \\ (A.dom (a' \\<^sub>A a))\ using a'a assms by auto interpret \_cod_a'a: limit_cone J B \\j. D (j, A.cod (a' \\<^sub>A a))\ \l (A.cod (a' \\<^sub>A a))\ \\ (A.cod (a' \\<^sub>A a))\ using a'a assms by auto interpret Da: natural_transformation J B \\j. D (j, A.dom a)\ \\j. D (j, A.cod a)\ \\j. D (j, a)\ using a fixing_arr_gives_natural_transformation_2 by simp interpret Da': natural_transformation J B \\j. D (j, A.cod a)\ \\j. D (j, A.cod (a' \\<^sub>A a))\ \\j. D (j, a')\ using a a'a fixing_arr_gives_natural_transformation_2 by fastforce interpret Da'o\_cod_a: vertical_composite J B \_cod_a.A.map \\j. D (j, A.cod a)\ \\j. D (j, A.cod (a' \\<^sub>A a))\ \\ (A.cod a)\ \\j. D (j, a')\.. interpret Da'o\_cod_a: cone J B \\j. D (j, A.cod (a' \\<^sub>A a))\ \l (A.cod a)\ Da'o\_cod_a.map .. interpret Da'a: natural_transformation J B \\j. D (j, A.dom (a' \\<^sub>A a))\ \\j. D (j, A.cod (a' \\<^sub>A a))\ \\j. D (j, a' \\<^sub>A a)\ using a'a fixing_arr_gives_natural_transformation_2 [of "a' \\<^sub>A a"] by auto interpret Da'ao\_dom_a'a: vertical_composite J B \_dom_a'a.A.map \\j. D (j, A.dom (a' \\<^sub>A a))\ \\j. D (j, A.cod (a' \\<^sub>A a))\ \\ (A.dom (a' \\<^sub>A a))\ \\j. D (j, a' \\<^sub>A a)\ .. interpret Da'ao\_dom_a'a: cone J B \\j. D (j, A.cod (a' \\<^sub>A a))\ \l (A.dom (a' \\<^sub>A a))\ Da'ao\_dom_a'a.map .. show "?L (a' \\<^sub>A a) = ?L a' \\<^sub>B ?L a" proof - have "?P (a' \\<^sub>A a) (?L (a' \\<^sub>A a))" using assms a'a L_arr [of l \ "a' \\<^sub>A a"] by fastforce moreover have "?P (a' \\<^sub>A a) (?L a' \\<^sub>B ?L a)" proof have La: "\?L a : l (A.dom a) \\<^sub>B l (A.cod a)\" using assms a L_arr by fast moreover have La': "\?L a' : l (A.cod a) \\<^sub>B l (A.cod a')\" using assms a a' L_arr [of l \ a'] by auto ultimately have seq: "B.seq (?L a') (?L a)" by (elim B.in_homE, auto) thus La'_La: "\?L a' \\<^sub>B ?L a : l (A.dom (a' \\<^sub>A a)) \\<^sub>B l (A.cod (a' \\<^sub>A a))\" using a a' 1 La La' by (intro B.comp_in_homI, auto) show "\_cod_a'a.D.cones_map (?L a' \\<^sub>B ?L a) (\ (A.cod (a' \\<^sub>A a))) = Da'ao\_dom_a'a.map" proof - have "\_cod_a'a.D.cones_map (?L a' \\<^sub>B ?L a) (\ (A.cod (a' \\<^sub>A a))) = (\_cod_a'a.D.cones_map (?L a) o \_cod_a'a.D.cones_map (?L a')) (\ (A.cod a'))" proof - have "\_cod_a'a.D.cones_map (?L a' \\<^sub>B ?L a) (\ (A.cod (a' \\<^sub>A a))) = restrict (\_cod_a'a.D.cones_map (?L a) \ \_cod_a'a.D.cones_map (?L a')) (\_cod_a'a.D.cones (B.cod (?L a'))) (\ (A.cod (a' \\<^sub>A a)))" using seq \_cod_a'a.cone_axioms \_cod_a'a.D.cones_map_comp [of "?L a'" "?L a"] by argo also have "... = (\_cod_a'a.D.cones_map (?L a) o \_cod_a'a.D.cones_map (?L a')) (\ (A.cod a'))" proof - have "\ (A.cod a') \ \_cod_a'a.D.cones (l (A.cod a'))" using \_cod_a'a.cone_axioms a'a by simp moreover have "B.cod (?L a') = l (A.cod a')" using assms a' L_arr [of l] by auto ultimately show ?thesis using a' a'a by simp qed finally show ?thesis by blast qed also have "... = \_cod_a'a.D.cones_map (?L a) (\_cod_a'a.D.cones_map (?L a') (\ (A.cod a')))" by simp also have "... = \_cod_a'a.D.cones_map (?L a) Da'o\_cod_a.map" proof - have "?P a' (?L a')" using assms a' L_arr [of l \ a'] by fast moreover have "?P a' = (\f. f \ B.hom (l (A.cod a)) (l (A.cod a')) \ \_cod_a'a.D.cones_map f (\ (A.cod a')) = Da'o\_cod_a.map)" using a'a by force ultimately show ?thesis using a'a by force qed also have "... = vertical_composite.map J B (\_cod_a.D.cones_map (?L a) (\ (A.cod a))) (\j. D (j, a'))" using assms \_cod_a.D.diagram_axioms \_cod_a'a.D.diagram_axioms Da'.natural_transformation_axioms \_cod_a.cone_axioms La cones_map_vcomp [of J B "\j. D (j, A.cod a)" "\j. D (j, A.cod (a' \\<^sub>A a))" "\j. D (j, a')" "l (A.cod a)" "\ (A.cod a)" "?L a" "l (A.dom a)"] by blast also have "... = vertical_composite.map J B (vertical_composite.map J B (\ (A.dom a)) (\j. D (j, a))) (\j. D (j, a'))" using assms a L_arr by presburger also have "... = vertical_composite.map J B (\ (A.dom a)) (vertical_composite.map J B (\j. D (j, a)) (\j. D (j, a')))" using a'a Da.natural_transformation_axioms Da'.natural_transformation_axioms \_dom_a.natural_transformation_axioms vcomp_assoc by auto also have "... = vertical_composite.map J B (\ (A.dom (a' \\<^sub>A a))) (\j. D (j, a' \\<^sub>A a))" using a'a preserves_comp_2 by simp finally show ?thesis by auto qed qed moreover have "\!f. ?P (a' \\<^sub>A a) f" using \_cod_a'a.is_universal [of "l (A.dom (a' \\<^sub>A a))" "vertical_composite.map J B (\ (A.dom (a' \\<^sub>A a))) (\j. D (j, a' \\<^sub>A a))"] Da'ao\_dom_a'a.cone_axioms by fast ultimately show ?thesis by blast qed qed show ?thesis .. qed end locale diagram_in_functor_category = A: category A + B: category B + A_B: functor_category A B + diagram J A_B.comp D for A :: "'a comp" (infixr "\\<^sub>A" 55) and B :: "'b comp" (infixr "\\<^sub>B" 55) and J :: "'j comp" (infixr "\\<^sub>J" 55) and D :: "'j \ ('a, 'b) functor_category.arr" begin interpretation JxA: product_category J A .. interpretation A_BxA: product_category A_B.comp A .. interpretation E: evaluation_functor A B .. interpretation Curry: currying J A B .. notation JxA.comp (infixr "\\<^sub>J\<^sub>x\<^sub>A" 55) notation JxA.in_hom ("\_ : _ \\<^sub>J\<^sub>x\<^sub>A _\") text\ Evaluation of a functor or natural transformation from @{term[source=true] J} to \[A, B]\ at an arrow @{term a} of @{term[source=true] A}. \ abbreviation at where "at a \ \ \j. Curry.uncurry \ (j, a)" lemma at_simp: assumes "A.arr a" and "J.arr j" and "A_B.arr (\ j)" shows "at a \ j = A_B.Map (\ j) a" using assms Curry.uncurry_def E.map_simp by simp lemma functor_at_ide_is_functor: assumes "functor J A_B.comp F" and "A.ide a" shows "functor J B (at a F)" proof - interpret uncurry_F: "functor" JxA.comp B \Curry.uncurry F\ using assms(1) Curry.uncurry_preserves_functors by simp interpret uncurry_F: binary_functor J A B \Curry.uncurry F\ .. show ?thesis using assms(2) uncurry_F.fixing_ide_gives_functor_2 by simp qed lemma functor_at_arr_is_transformation: assumes "functor J A_B.comp F" and "A.arr a" shows "natural_transformation J B (at (A.dom a) F) (at (A.cod a) F) (at a F)" proof - interpret uncurry_F: "functor" JxA.comp B \Curry.uncurry F\ using assms(1) Curry.uncurry_preserves_functors by simp interpret uncurry_F: binary_functor J A B \Curry.uncurry F\ .. show ?thesis using assms(2) uncurry_F.fixing_arr_gives_natural_transformation_2 by simp qed lemma transformation_at_ide_is_transformation: assumes "natural_transformation J A_B.comp F G \" and "A.ide a" shows "natural_transformation J B (at a F) (at a G) (at a \)" proof - interpret \: natural_transformation J A_B.comp F G \ using assms(1) by auto interpret uncurry_F: "functor" JxA.comp B \Curry.uncurry F\ using Curry.uncurry_preserves_functors \.F.functor_axioms by simp interpret uncurry_f: binary_functor J A B \Curry.uncurry F\ .. interpret uncurry_G: "functor" JxA.comp B \Curry.uncurry G\ using Curry.uncurry_preserves_functors \.G.functor_axioms by simp interpret uncurry_G: binary_functor J A B \Curry.uncurry G\ .. interpret uncurry_\: natural_transformation JxA.comp B \Curry.uncurry F\ \Curry.uncurry G\ \Curry.uncurry \\ using Curry.uncurry_preserves_transformations \.natural_transformation_axioms by simp interpret uncurry_\: binary_functor_transformation J A B \Curry.uncurry F\ \Curry.uncurry G\ \Curry.uncurry \\ .. show ?thesis using assms(2) uncurry_\.fixing_ide_gives_natural_transformation_2 by simp qed lemma constant_at_ide_is_constant: assumes "cone x \" and a: "A.ide a" shows "at a (constant_functor.map J A_B.comp x) = constant_functor.map J B (A_B.Map x a)" proof - interpret \: cone J A_B.comp D x \ using assms(1) by auto have x: "A_B.ide x" using \.ide_apex by auto interpret Fun_x: "functor" A B \A_B.Map x\ using x A_B.ide_char by simp interpret Da: "functor" J B \at a D\ using a functor_at_ide_is_functor functor_axioms by blast interpret Da: diagram J B \at a D\ .. interpret Xa: constant_functor J B \A_B.Map x a\ using a Fun_x.preserves_ide by unfold_locales simp show "at a \.A.map = Xa.map" using a x Curry.uncurry_def E.map_def Xa.is_extensional by auto qed lemma at_ide_is_diagram: assumes a: "A.ide a" shows "diagram J B (at a D)" proof - interpret Da: "functor" J B "at a D" using a functor_at_ide_is_functor functor_axioms by simp show ?thesis .. qed lemma cone_at_ide_is_cone: assumes "cone x \" and a: "A.ide a" shows "diagram.cone J B (at a D) (A_B.Map x a) (at a \)" proof - interpret \: cone J A_B.comp D x \ using assms(1) by auto have x: "A_B.ide x" using \.ide_apex by auto interpret Fun_x: "functor" A B \A_B.Map x\ using x A_B.ide_char by simp interpret Da: diagram J B \at a D\ using a at_ide_is_diagram by auto interpret Xa: constant_functor J B \A_B.Map x a\ using a by (unfold_locales, simp) interpret \a: natural_transformation J B Xa.map \at a D\ \at a \\ using assms(1) x a transformation_at_ide_is_transformation \.natural_transformation_axioms constant_at_ide_is_constant by fastforce interpret \a: cone J B \at a D\ \A_B.Map x a\ \at a \\ .. show cone_\a: "Da.cone (A_B.Map x a) (at a \)" .. qed lemma at_preserves_comp: assumes "A.seq a' a" shows "at (A a' a) D = vertical_composite.map J B (at a D) (at a' D)" proof - interpret Da: natural_transformation J B \at (A.dom a) D\ \at (A.cod a) D\ \at a D\ using assms functor_at_arr_is_transformation functor_axioms by blast interpret Da': natural_transformation J B \at (A.cod a) D\ \at (A.cod a') D\ \at a' D\ using assms functor_at_arr_is_transformation [of D a'] functor_axioms by fastforce interpret Da'oDa: vertical_composite J B \at (A.dom a) D\ \at (A.cod a) D\ \at (A.cod a') D\ \at a D\ \at a' D\ .. interpret Da'a: natural_transformation J B \at (A.dom a) D\ \at (A.cod a') D\ \at (a' \\<^sub>A a) D\ using assms functor_at_arr_is_transformation [of D "a' \\<^sub>A a"] functor_axioms by simp show "at (a' \\<^sub>A a) D = Da'oDa.map" proof (intro NaturalTransformation.eqI) show "natural_transformation J B (at (A.dom a) D) (at (A.cod a') D) Da'oDa.map" .. show "natural_transformation J B (at (A.dom a) D) (at (A.cod a') D) (at (a' \\<^sub>A a) D)" .. show "\j. J.ide j \ at (a' \\<^sub>A a) D j = Da'oDa.map j" proof - fix j assume j: "J.ide j" interpret Dj: "functor" A B \A_B.Map (D j)\ using j preserves_ide A_B.ide_char by simp show "at (a' \\<^sub>A a) D j = Da'oDa.map j" using assms j Dj.preserves_comp at_simp Da'oDa.map_simp_ide by auto qed qed qed lemma cones_map_pointwise: assumes "cone x \" and "cone x' \'" and f: "f \ A_B.hom x' x" shows "cones_map f \ = \' \ (\a. A.ide a \ diagram.cones_map J B (at a D) (A_B.Map f a) (at a \) = at a \')" proof interpret \: cone J A_B.comp D x \ using assms(1) by auto interpret \': cone J A_B.comp D x' \' using assms(2) by auto have x: "A_B.ide x" using \.ide_apex by auto have x': "A_B.ide x'" using \'.ide_apex by auto interpret \f: cone J A_B.comp D x' \cones_map f \\ using x' f assms(1) cones_map_mapsto by blast interpret Fun_x: "functor" A B \A_B.Map x\ using x A_B.ide_char by simp interpret Fun_x': "functor" A B \A_B.Map x'\ using x' A_B.ide_char by simp show "cones_map f \ = \' \ (\a. A.ide a \ diagram.cones_map J B (at a D) (A_B.Map f a) (at a \) = at a \')" proof - assume \': "cones_map f \ = \'" have "\a. A.ide a \ diagram.cones_map J B (at a D) (A_B.Map f a) (at a \) = at a \'" proof - fix a assume a: "A.ide a" interpret Da: diagram J B \at a D\ using a at_ide_is_diagram by auto interpret \a: cone J B \at a D\ \A_B.Map x a\ \at a \\ using a assms(1) cone_at_ide_is_cone by simp interpret \'a: cone J B \at a D\ \A_B.Map x' a\ \at a \'\ using a assms(2) cone_at_ide_is_cone by simp have 1: "\A_B.Map f a : A_B.Map x' a \\<^sub>B A_B.Map x a\" using f a A_B.arr_char A_B.Map_cod A_B.Map_dom mem_Collect_eq natural_transformation.preserves_hom A.ide_in_hom by (metis (no_types, lifting) A_B.in_homE) interpret \fa: cone J B \at a D\ \A_B.Map x' a\ \Da.cones_map (A_B.Map f a) (at a \)\ using 1 \a.cone_axioms Da.cones_map_mapsto by force show "Da.cones_map (A_B.Map f a) (at a \) = at a \'" proof fix j have "\J.arr j \ Da.cones_map (A_B.Map f a) (at a \) j = at a \' j" using \'a.is_extensional \fa.is_extensional [of j] by simp moreover have "J.arr j \ Da.cones_map (A_B.Map f a) (at a \) j = at a \' j" using a f 1 \.cone_axioms \a.cone_axioms at_simp apply simp apply (elim A_B.in_homE B.in_homE, auto) using \' \.A.map_simp A_B.Map_comp [of "\ j" f a a] by auto ultimately show "Da.cones_map (A_B.Map f a) (at a \) j = at a \' j" by blast qed qed thus "\a. A.ide a \ diagram.cones_map J B (at a D) (A_B.Map f a) (at a \) = at a \'" by simp qed show "\a. A.ide a \ diagram.cones_map J B (at a D) (A_B.Map f a) (at a \) = at a \' \ cones_map f \ = \'" proof - assume A: "\a. A.ide a \ diagram.cones_map J B (at a D) (A_B.Map f a) (at a \) = at a \'" show "cones_map f \ = \'" proof (intro NaturalTransformation.eqI) show "natural_transformation J A_B.comp \'.A.map D (cones_map f \)" .. show "natural_transformation J A_B.comp \'.A.map D \'" .. show "\j. J.ide j \ cones_map f \ j = \' j" proof (intro A_B.arr_eqI) fix j assume j: "J.ide j" show 1: "A_B.arr (cones_map f \ j)" using j \f.preserves_reflects_arr by simp show "A_B.arr (\' j)" using j by auto have Dom_\f_j: "A_B.Dom (cones_map f \ j) = A_B.Map x'" using x' j 1 A_B.Map_dom \'.A.map_simp \f.preserves_dom J.ide_in_hom by (metis (no_types, lifting) J.ideD(2) \f.preserves_reflects_arr) also have Dom_\'_j: "... = A_B.Dom (\' j)" using x' j A_B.Map_dom [of "\' j"] \'.preserves_hom \'.A.map_simp by simp finally show "A_B.Dom (cones_map f \ j) = A_B.Dom (\' j)" by auto have Cod_\f_j: "A_B.Cod (cones_map f \ j) = A_B.Map (D (J.cod j))" using j A_B.Map_cod A_B.cod_char J.ide_in_hom \f.preserves_hom by (metis (no_types, lifting) "1" J.ideD(1) \f.preserves_cod) also have Cod_\'_j: "... = A_B.Cod (\' j)" using j A_B.Map_cod [of "\' j"] \'.preserves_hom by simp finally show "A_B.Cod (cones_map f \ j) = A_B.Cod (\' j)" by auto show "A_B.Map (cones_map f \ j) = A_B.Map (\' j)" proof (intro NaturalTransformation.eqI) interpret \fj: natural_transformation A B \A_B.Map x'\ \A_B.Map (D (J.cod j))\ \A_B.Map (cones_map f \ j)\ using j \f.preserves_reflects_arr A_B.arr_char [of "cones_map f \ j"] Dom_\f_j Cod_\f_j by simp show "natural_transformation A B (A_B.Map x') (A_B.Map (D (J.cod j))) (A_B.Map (cones_map f \ j))" .. interpret \'j: natural_transformation A B \A_B.Map x'\ \A_B.Map (D (J.cod j))\ \A_B.Map (\' j)\ using j A_B.arr_char [of "\' j"] Dom_\'_j Cod_\'_j by simp show "natural_transformation A B (A_B.Map x') (A_B.Map (D (J.cod j))) (A_B.Map (\' j))" .. show "\a. A.ide a \ A_B.Map (cones_map f \ j) a = A_B.Map (\' j) a" proof - fix a assume a: "A.ide a" interpret Da: diagram J B \at a D\ using a at_ide_is_diagram by auto have cone_\a: "Da.cone (A_B.Map x a) (at a \)" using a assms(1) cone_at_ide_is_cone by simp interpret \a: cone J B \at a D\ \A_B.Map x a\ \at a \\ using cone_\a by auto interpret Fun_f: natural_transformation A B \A_B.Dom f\ \A_B.Cod f\ \A_B.Map f\ using f A_B.arr_char by fast have fa: "A_B.Map f a \ B.hom (A_B.Map x' a) (A_B.Map x a)" using a f Fun_f.preserves_hom A.ide_in_hom by auto have "A_B.Map (cones_map f \ j) a = Da.cones_map (A_B.Map f a) (at a \) j" proof - have "A_B.Map (cones_map f \ j) a = A_B.Map (A_B.comp (\ j) f) a" using assms(1) f \.is_extensional by auto also have "... = B (A_B.Map (\ j) a) (A_B.Map f a)" using f j a \.preserves_hom A.ide_in_hom J.ide_in_hom A_B.Map_comp \.A.map_simp by (metis (no_types, lifting) A.comp_ide_self A.ideD(1) A_B.seqI' J.ideD(1) mem_Collect_eq) also have "... = Da.cones_map (A_B.Map f a) (at a \) j" using j a cone_\a fa Curry.uncurry_def E.map_simp by auto finally show ?thesis by auto qed also have "... = at a \' j" using j a A by simp also have "... = A_B.Map (\' j) a" using j Curry.uncurry_def E.map_simp \'j.is_extensional by simp finally show "A_B.Map (cones_map f \ j) a = A_B.Map (\' j) a" by auto qed qed qed qed qed qed text\ If @{term \} is a cone with apex @{term a} over @{term D}, then @{term \} is a limit cone if, for each object @{term x} of @{term X}, the cone obtained by evaluating @{term \} at @{term x} is a limit cone with apex @{term "A_B.Map a x"} for the diagram in @{term C} obtained by evaluating @{term D} at @{term x}. \ lemma cone_is_limit_if_pointwise_limit: assumes cone_\: "cone x \" and "\a. A.ide a \ diagram.limit_cone J B (at a D) (A_B.Map x a) (at a \)" shows "limit_cone x \" proof - interpret \: cone J A_B.comp D x \ using assms by auto have x: "A_B.ide x" using \.ide_apex by auto show "limit_cone x \" proof fix x' \' assume cone_\': "cone x' \'" interpret \': cone J A_B.comp D x' \' using cone_\' by auto have x': "A_B.ide x'" using \'.ide_apex by auto text\ The universality of the limit cone \at a \\ yields, for each object \a\ of \A\, a unique arrow \fa\ that transforms \at a \\ to \at a \'\. \ have EU: "\a. A.ide a \ \!fa. fa \ B.hom (A_B.Map x' a) (A_B.Map x a) \ diagram.cones_map J B (at a D) fa (at a \) = at a \'" proof - fix a assume a: "A.ide a" interpret Da: diagram J B \at a D\ using a at_ide_is_diagram by auto interpret \a: limit_cone J B \at a D\ \A_B.Map x a\ \at a \\ using assms(2) a by auto interpret \'a: cone J B \at a D\ \A_B.Map x' a\ \at a \'\ using a cone_\' cone_at_ide_is_cone by auto have "Da.cone (A_B.Map x' a) (at a \')" .. thus "\!fa. fa \ B.hom (A_B.Map x' a) (A_B.Map x a) \ Da.cones_map fa (at a \) = at a \'" using \a.is_universal by simp qed text\ Our objective is to show the existence of a unique arrow \f\ that transforms \\\ into \\'\. We obtain \f\ by bundling the arrows \fa\ of \C\ and proving that this yields a natural transformation from \X\ to \C\, hence an arrow of \[X, C]\. \ show "\!f. \f : x' \\<^sub>[\<^sub>A\<^sub>,\<^sub>B\<^sub>] x\ \ cones_map f \ = \'" proof let ?P = "\a fa. \fa : A_B.Map x' a \\<^sub>B A_B.Map x a\ \ diagram.cones_map J B (at a D) fa (at a \) = at a \'" have AaPa: "\a. A.ide a \ ?P a (THE fa. ?P a fa)" proof - fix a assume a: "A.ide a" have "\!fa. ?P a fa" using a EU by simp thus "?P a (THE fa. ?P a fa)" using a theI' [of "?P a"] by fastforce qed have AaPa_in_hom: "\a. A.ide a \ \THE fa. ?P a fa : A_B.Map x' a \\<^sub>B A_B.Map x a\" using AaPa by blast have AaPa_map: "\a. A.ide a \ diagram.cones_map J B (at a D) (THE fa. ?P a fa) (at a \) = at a \'" using AaPa by blast let ?Fun_f = "\a. if A.ide a then (THE fa. ?P a fa) else B.null" interpret Fun_x: "functor" A B \\a. A_B.Map x a\ using x A_B.ide_char by simp interpret Fun_x': "functor" A B \\a. A_B.Map x' a\ using x' A_B.ide_char by simp text\ The arrows \Fun_f a\ are the components of a natural transformation. It is more work to verify the naturality than it seems like it ought to be. \ interpret \: transformation_by_components A B \\a. A_B.Map x' a\ \\a. A_B.Map x a\ ?Fun_f proof fix a assume a: "A.ide a" show "\?Fun_f a : A_B.Map x' a \\<^sub>B A_B.Map x a\" using a AaPa by simp next fix a assume a: "A.arr a" text\ \newcommand\xdom{\mathop{\rm dom}} \newcommand\xcod{\mathop{\rm cod}} $$\xymatrix{ {x_{\xdom a}} \drtwocell\omit{\omit(A)} \ar[d]_{\chi_{\xdom a}} \ar[r]^{x_a} & {x_{\xcod a}} \ar[d]^{\chi_{\xcod a}} \\ {D_{\xdom a}} \ar[r]^{D_a} & {D_{\xcod a}} \\ {x'_{\xdom a}} \urtwocell\omit{\omit(B)} \ar@/^5em/[uu]^{f_{\xdom a}}_{\hspace{1em}(C)} \ar[u]^{\chi'_{\xdom a}} \ar[r]_{x'_a} & {x'_{\xcod a}} \ar[u]_{x'_{\xcod a}} \ar@/_5em/[uu]_{f_{\xcod a}} }$$ \ let ?x_dom_a = "A_B.Map x (A.dom a)" let ?x_cod_a = "A_B.Map x (A.cod a)" let ?x_a = "A_B.Map x a" have x_a: "\?x_a : ?x_dom_a \\<^sub>B ?x_cod_a\" using a x A_B.ide_char by auto let ?x'_dom_a = "A_B.Map x' (A.dom a)" let ?x'_cod_a = "A_B.Map x' (A.cod a)" let ?x'_a = "A_B.Map x' a" have x'_a: "\?x'_a : ?x'_dom_a \\<^sub>B ?x'_cod_a\" using a x' A_B.ide_char by auto let ?f_dom_a = "?Fun_f (A.dom a)" let ?f_cod_a = "?Fun_f (A.cod a)" have f_dom_a: "\?f_dom_a : ?x'_dom_a \\<^sub>B ?x_dom_a\" using a AaPa by simp have f_cod_a: "\?f_cod_a : ?x'_cod_a \\<^sub>B ?x_cod_a\" using a AaPa by simp interpret D_dom_a: diagram J B \at (A.dom a) D\ using a at_ide_is_diagram by simp interpret D_cod_a: diagram J B \at (A.cod a) D\ using a at_ide_is_diagram by simp interpret Da: natural_transformation J B \at (A.dom a) D\ \at (A.cod a) D\ \at a D\ using a functor_axioms functor_at_arr_is_transformation by simp interpret \_dom_a: limit_cone J B \at (A.dom a) D\ \A_B.Map x (A.dom a)\ \at (A.dom a) \\ using assms(2) a by auto interpret \_cod_a: limit_cone J B \at (A.cod a) D\ \A_B.Map x (A.cod a)\ \at (A.cod a) \\ using assms(2) a by auto interpret \'_dom_a: cone J B \at (A.dom a) D\ \A_B.Map x' (A.dom a)\ \at (A.dom a) \'\ using a cone_\' cone_at_ide_is_cone by auto interpret \'_cod_a: cone J B \at (A.cod a) D\ \A_B.Map x' (A.cod a)\ \at (A.cod a) \'\ using a cone_\' cone_at_ide_is_cone by auto text\ Now construct cones with apexes \x_dom_a\ and \x'_dom_a\ over @{term "at (A.cod a) D"} by forming the vertical composites of @{term "at (A.dom a) \"} and @{term "at (A.cod a) \'"} with the natural transformation @{term "at a D"}. \ interpret Dao\_dom_a: vertical_composite J B \_dom_a.A.map \at (A.dom a) D\ \at (A.cod a) D\ \at (A.dom a) \\ \at a D\ .. interpret Dao\_dom_a: cone J B \at (A.cod a) D\ ?x_dom_a Dao\_dom_a.map using \_dom_a.cone_axioms Da.natural_transformation_axioms vcomp_transformation_cone by metis interpret Dao\'_dom_a: vertical_composite J B \'_dom_a.A.map \at (A.dom a) D\ \at (A.cod a) D\ \at (A.dom a) \'\ \at a D\ .. interpret Dao\'_dom_a: cone J B \at (A.cod a) D\ ?x'_dom_a Dao\'_dom_a.map using \'_dom_a.cone_axioms Da.natural_transformation_axioms vcomp_transformation_cone by metis have Dao\_dom_a: "D_cod_a.cone ?x_dom_a Dao\_dom_a.map" .. have Dao\'_dom_a: "D_cod_a.cone ?x'_dom_a Dao\'_dom_a.map" .. text\ These cones are also obtained by transforming the cones @{term "at (A.cod a) \"} and @{term "at (A.cod a) \'"} by \x_a\ and \x'_a\, respectively. \ have A: "Dao\_dom_a.map = D_cod_a.cones_map ?x_a (at (A.cod a) \)" proof fix j have "\J.arr j \ Dao\_dom_a.map j = D_cod_a.cones_map ?x_a (at (A.cod a) \) j" using Dao\_dom_a.is_extensional \_cod_a.cone_axioms x_a by force moreover have "J.arr j \ Dao\_dom_a.map j = D_cod_a.cones_map ?x_a (at (A.cod a) \) j" proof - assume j: "J.arr j" have "Dao\_dom_a.map j = at a D j \\<^sub>B at (A.dom a) \ (J.dom j)" using j Dao\_dom_a.map_simp_2 by simp also have "... = A_B.Map (D j) a \\<^sub>B A_B.Map (\ (J.dom j)) (A.dom a)" using a j at_simp by simp also have "... = A_B.Map (A_B.comp (D j) (\ (J.dom j))) a" using a j A_B.Map_comp by (metis (no_types, lifting) A.comp_arr_dom \.is_natural_1 \.preserves_reflects_arr) also have "... = A_B.Map (A_B.comp (\ (J.cod j)) (\.A.map j)) a" using a j \.naturality by simp also have "... = A_B.Map (\ (J.cod j)) (A.cod a) \\<^sub>B A_B.Map x a" using a j x A_B.Map_comp by (metis (no_types, lifting) A.comp_cod_arr \.A.map_simp \.is_natural_2 \.preserves_reflects_arr) also have "... = at (A.cod a) \ (J.cod j) \\<^sub>B A_B.Map x a" using a j at_simp by simp also have "... = at (A.cod a) \ j \\<^sub>B A_B.Map x a" using a j \_cod_a.is_natural_2 \_cod_a.A.map_simp by (metis J.arr_cod_iff_arr J.cod_cod) also have "... = D_cod_a.cones_map ?x_a (at (A.cod a) \) j" using a j x \_cod_a.cone_axioms preserves_cod by simp finally show ?thesis by blast qed ultimately show "Dao\_dom_a.map j = D_cod_a.cones_map ?x_a (at (A.cod a) \) j" by blast qed have B: "Dao\'_dom_a.map = D_cod_a.cones_map ?x'_a (at (A.cod a) \')" proof fix j have "\J.arr j \ Dao\'_dom_a.map j = D_cod_a.cones_map ?x'_a (at (A.cod a) \') j" using Dao\'_dom_a.is_extensional \'_cod_a.cone_axioms x'_a by force moreover have "J.arr j \ Dao\'_dom_a.map j = D_cod_a.cones_map ?x'_a (at (A.cod a) \') j" proof - assume j: "J.arr j" have "Dao\'_dom_a.map j = at a D j \\<^sub>B at (A.dom a) \' (J.dom j)" using j Dao\'_dom_a.map_simp_2 by simp also have "... = A_B.Map (D j) a \\<^sub>B A_B.Map (\' (J.dom j)) (A.dom a)" using a j at_simp by simp also have "... = A_B.Map (A_B.comp (D j) (\' (J.dom j))) a" using a j A_B.Map_comp by (metis (no_types, lifting) A.comp_arr_dom \'.is_natural_1 \'.preserves_reflects_arr) also have "... = A_B.Map (A_B.comp (\' (J.cod j)) (\'.A.map j)) a" using a j \'.naturality by simp also have "... = A_B.Map (\' (J.cod j)) (A.cod a) \\<^sub>B A_B.Map x' a" using a j x' A_B.Map_comp by (metis (no_types, lifting) A.comp_cod_arr \'.A.map_simp \'.is_natural_2 \'.preserves_reflects_arr) also have "... = at (A.cod a) \' (J.cod j) \\<^sub>B A_B.Map x' a" using a j at_simp by simp also have "... = at (A.cod a) \' j \\<^sub>B A_B.Map x' a" using a j \'_cod_a.is_natural_2 \'_cod_a.A.map_simp by (metis J.arr_cod_iff_arr J.cod_cod) also have "... = D_cod_a.cones_map ?x'_a (at (A.cod a) \') j" using a j x' \'_cod_a.cone_axioms preserves_cod by simp finally show ?thesis by blast qed ultimately show "Dao\'_dom_a.map j = D_cod_a.cones_map ?x'_a (at (A.cod a) \') j" by blast qed text\ Next, we show that \f_dom_a\, which is the unique arrow that transforms \\_dom_a\ into \\'_dom_a\, is also the unique arrow that transforms \Dao\_dom_a\ into \Dao\'_dom_a\. \ have C: "D_cod_a.cones_map ?f_dom_a Dao\_dom_a.map = Dao\'_dom_a.map" proof (intro NaturalTransformation.eqI) show "natural_transformation J B \'_dom_a.A.map (at (A.cod a) D) Dao\'_dom_a.map" .. show "natural_transformation J B \'_dom_a.A.map (at (A.cod a) D) (D_cod_a.cones_map ?f_dom_a Dao\_dom_a.map)" proof - interpret \: cone J B \at (A.cod a) D\ ?x'_dom_a \D_cod_a.cones_map ?f_dom_a Dao\_dom_a.map\ proof - have "\b b' f. \ f \ B.hom b' b; D_cod_a.cone b Dao\_dom_a.map \ \ D_cod_a.cone b' (D_cod_a.cones_map f Dao\_dom_a.map)" using D_cod_a.cones_map_mapsto by blast moreover have "D_cod_a.cone ?x_dom_a Dao\_dom_a.map" .. ultimately show "D_cod_a.cone ?x'_dom_a (D_cod_a.cones_map ?f_dom_a Dao\_dom_a.map)" using f_dom_a by simp qed show ?thesis .. qed show "\j. J.ide j \ D_cod_a.cones_map ?f_dom_a Dao\_dom_a.map j = Dao\'_dom_a.map j" proof - fix j assume j: "J.ide j" have "D_cod_a.cones_map ?f_dom_a Dao\_dom_a.map j = Dao\_dom_a.map j \\<^sub>B ?f_dom_a" using j f_dom_a Dao\_dom_a.cone_axioms by (elim B.in_homE, auto) also have "... = (at a D j \\<^sub>B at (A.dom a) \ j) \\<^sub>B ?f_dom_a" using j Dao\_dom_a.map_simp_ide by simp also have "... = at a D j \\<^sub>B at (A.dom a) \ j \\<^sub>B ?f_dom_a" using B.comp_assoc by simp also have "... = at a D j \\<^sub>B D_dom_a.cones_map ?f_dom_a (at (A.dom a) \) j" using j \_dom_a.cone_axioms f_dom_a by (elim B.in_homE, auto) also have "... = at a D j \\<^sub>B at (A.dom a) \' j" using a AaPa A.ide_dom by presburger also have "... = Dao\'_dom_a.map j" using j Dao\'_dom_a.map_simp_ide by simp finally show "D_cod_a.cones_map ?f_dom_a Dao\_dom_a.map j = Dao\'_dom_a.map j" by auto qed qed text\ Naturality amounts to showing that \C f_cod_a x'_a = C x_a f_dom_a\. To do this, we show that both arrows transform @{term "at (A.cod a) \"} into \Dao\'_cod_a\, thus they are equal by the universality of @{term "at (A.cod a) \"}. \ have "\!fa. \fa : ?x'_dom_a \\<^sub>B ?x_cod_a\ \ D_cod_a.cones_map fa (at (A.cod a) \) = Dao\'_dom_a.map" using Dao\'_dom_a.cone_axioms a \_cod_a.is_universal [of ?x'_dom_a Dao\'_dom_a.map] by fast moreover have "?f_cod_a \\<^sub>B ?x'_a \ B.hom ?x'_dom_a ?x_cod_a \ D_cod_a.cones_map (?f_cod_a \\<^sub>B ?x'_a) (at (A.cod a) \) = Dao\'_dom_a.map" proof show "?f_cod_a \\<^sub>B ?x'_a \ B.hom ?x'_dom_a ?x_cod_a" using f_cod_a x'_a by blast show "D_cod_a.cones_map (?f_cod_a \\<^sub>B ?x'_a) (at (A.cod a) \) = Dao\'_dom_a.map" proof - have "D_cod_a.cones_map (?f_cod_a \\<^sub>B ?x'_a) (at (A.cod a) \) = restrict (D_cod_a.cones_map ?x'_a o D_cod_a.cones_map ?f_cod_a) (D_cod_a.cones (?x_cod_a)) (at (A.cod a) \)" using x'_a D_cod_a.cones_map_comp [of ?f_cod_a ?x'_a] f_cod_a by (elim B.in_homE, auto) also have "... = D_cod_a.cones_map ?x'_a (D_cod_a.cones_map ?f_cod_a (at (A.cod a) \))" using \_cod_a.cone_axioms by simp also have "... = Dao\'_dom_a.map" using a B AaPa_map A.ide_cod by presburger finally show ?thesis by auto qed qed moreover have "?x_a \\<^sub>B ?f_dom_a \ B.hom ?x'_dom_a ?x_cod_a \ D_cod_a.cones_map (?x_a \\<^sub>B ?f_dom_a) (at (A.cod a) \) = Dao\'_dom_a.map" proof show "?x_a \\<^sub>B ?f_dom_a \ B.hom ?x'_dom_a ?x_cod_a" using f_dom_a x_a by blast show "D_cod_a.cones_map (?x_a \\<^sub>B ?f_dom_a) (at (A.cod a) \) = Dao\'_dom_a.map" proof - have "D_cod_a.cones (B.cod (A_B.Map x a)) = D_cod_a.cones (A_B.Map x (A.cod a))" using a x by simp moreover have "B.seq ?x_a ?f_dom_a" using f_dom_a x_a by (elim B.in_homE, auto) ultimately have "D_cod_a.cones_map (?x_a \\<^sub>B ?f_dom_a) (at (A.cod a) \) = restrict (D_cod_a.cones_map ?f_dom_a o D_cod_a.cones_map ?x_a) (D_cod_a.cones (?x_cod_a)) (at (A.cod a) \)" using D_cod_a.cones_map_comp [of ?x_a ?f_dom_a] x_a by argo also have "... = D_cod_a.cones_map ?f_dom_a (D_cod_a.cones_map ?x_a (at (A.cod a) \))" using \_cod_a.cone_axioms by simp also have "... = Dao\'_dom_a.map" using A C a AaPa by argo finally show ?thesis by blast qed qed ultimately show "?f_cod_a \\<^sub>B ?x'_a = ?x_a \\<^sub>B ?f_dom_a" using a \_cod_a.is_universal by blast qed text\ The arrow from @{term x'} to @{term x} in \[A, B]\ determined by the natural transformation \\\ transforms @{term \} into @{term \'}. Moreover, it is the unique such arrow, since the components of \\\ are each determined by universality. \ let ?f = "A_B.MkArr (\a. A_B.Map x' a) (\a. A_B.Map x a) \.map" have f_in_hom: "?f \ A_B.hom x' x" proof - have arr_f: "A_B.arr ?f" using x' x A_B.arr_MkArr \.natural_transformation_axioms by simp moreover have "A_B.MkIde (\a. A_B.Map x a) = x" using x A_B.ide_char A_B.MkArr_Map A_B.in_homE A_B.ide_in_hom by metis moreover have "A_B.MkIde (\a. A_B.Map x' a) = x'" using x' A_B.ide_char A_B.MkArr_Map A_B.in_homE A_B.ide_in_hom by metis ultimately show ?thesis using A_B.dom_char A_B.cod_char by auto qed have Fun_f: "\a. A.ide a \ A_B.Map ?f a = (THE fa. ?P a fa)" using f_in_hom \.map_simp_ide by fastforce have cones_map_f: "cones_map ?f \ = \'" using AaPa Fun_f at_ide_is_diagram assms(2) x x' cone_\ cone_\' f_in_hom Fun_f cones_map_pointwise by presburger show "\?f : x' \\<^sub>[\<^sub>A\<^sub>,\<^sub>B\<^sub>] x\ \ cones_map ?f \ = \'" using f_in_hom cones_map_f by auto show "\f'. \f' : x' \\<^sub>[\<^sub>A\<^sub>,\<^sub>B\<^sub>] x\ \ cones_map f' \ = \' \ f' = ?f" proof - fix f' assume f': "\f' : x' \\<^sub>[\<^sub>A\<^sub>,\<^sub>B\<^sub>] x\ \ cones_map f' \ = \'" have 0: "\a. A.ide a \ diagram.cones_map J B (at a D) (A_B.Map f' a) (at a \) = at a \'" using f' cone_\ cone_\' cones_map_pointwise by blast have "f' = A_B.MkArr (A_B.Dom f') (A_B.Cod f') (A_B.Map f')" using f' A_B.MkArr_Map by auto also have "... = ?f" proof (intro A_B.MkArr_eqI) show "A_B.arr (A_B.MkArr (A_B.Dom f') (A_B.Cod f') (A_B.Map f'))" using f' calculation by blast show 1: "A_B.Dom f' = A_B.Map x'" using f' A_B.Map_dom by auto show 2: "A_B.Cod f' = A_B.Map x" using f' A_B.Map_cod by auto show "A_B.Map f' = \.map" proof (intro NaturalTransformation.eqI) show "natural_transformation A B (A_B.Map x') (A_B.Map x) \.map" .. show "natural_transformation A B (A_B.Map x') (A_B.Map x) (A_B.Map f')" using f' 1 2 A_B.arr_char [of f'] by auto show "\a. A.ide a \ A_B.Map f' a = \.map a" proof - fix a assume a: "A.ide a" interpret Da: diagram J B \at a D\ using a at_ide_is_diagram by auto interpret Fun_f': natural_transformation A B \A_B.Dom f'\ \A_B.Cod f'\ \A_B.Map f'\ using f' A_B.arr_char by fast have "A_B.Map f' a \ B.hom (A_B.Map x' a) (A_B.Map x a)" using a f' Fun_f'.preserves_hom A.ide_in_hom by auto hence "?P a (A_B.Map f' a)" using a 0 [of a] by simp moreover have "?P a (\.map a)" using a \.map_simp_ide Fun_f AaPa by presburger ultimately show "A_B.Map f' a = \.map a" using a EU by blast qed qed qed finally show "f' = ?f" by auto qed qed qed qed end context functor_category begin text\ A functor category \[A, B]\ has limits of shape @{term[source=true] J} whenever @{term B} has limits of shape @{term[source=true] J}. \ lemma has_limits_of_shape_if_target_does: assumes "category (J :: 'j comp)" and "B.has_limits_of_shape J" shows "has_limits_of_shape J" proof (unfold has_limits_of_shape_def) have "\D. diagram J comp D \ (\x \. limit_cone J comp D x \)" proof - fix D assume D: "diagram J comp D" interpret J: category J using assms(1) by auto interpret JxA: product_category J A .. interpret D: diagram J comp D using D by auto interpret D: diagram_in_functor_category A B J D .. interpret Curry: currying J A B .. text\ Given diagram @{term D} in \[A, B]\, choose for each object \a\ of \A\ a limit cone \(la, \a)\ for \at a D\ in \B\. \ let ?l = "\a. diagram.some_limit J B (D.at a D)" let ?\ = "\a. diagram.some_limit_cone J B (D.at a D)" have l\: "\a. A.ide a \ diagram.limit_cone J B (D.at a D) (?l a) (?\ a)" using B.has_limits_of_shape_def D.at_ide_is_diagram assms(2) diagram.limit_cone_some_limit_cone by blast text\ The choice of limit cones induces a limit functor from \A\ to \B\. \ interpret uncurry_D: diagram JxA.comp B "Curry.uncurry D" proof - interpret "functor" JxA.comp B \Curry.uncurry D\ using D.functor_axioms Curry.uncurry_preserves_functors by simp interpret binary_functor J A B \Curry.uncurry D\ .. show "diagram JxA.comp B (Curry.uncurry D)" .. qed interpret uncurry_D: parametrized_diagram J A B \Curry.uncurry D\ .. let ?L = "uncurry_D.L ?l ?\" let ?P = "uncurry_D.P ?l ?\" interpret L: "functor" A B ?L using l\ uncurry_D.chosen_limits_induce_functor [of ?l ?\] by simp have L_ide: "\a. A.ide a \ ?L a = ?l a" using uncurry_D.L_ide [of ?l ?\] l\ by blast have L_arr: "\a. A.arr a \ (\!f. ?P a f) \ ?P a (?L a)" using uncurry_D.L_arr [of ?l ?\] l\ by blast text\ The functor \L\ extends to a functor \L'\ from \JxA\ to \B\ that is constant on \J\. \ let ?L' = "\ja. if JxA.arr ja then ?L (snd ja) else B.null" let ?P' = "\ja. ?P (snd ja)" interpret L': "functor" JxA.comp B ?L' apply unfold_locales using L.preserves_arr L.preserves_dom L.preserves_cod apply auto[4] using L.preserves_comp JxA.comp_char by (elim JxA.seqE, auto) have "\ja. JxA.arr ja \ (\!f. ?P' ja f) \ ?P' ja (?L' ja)" proof - fix ja assume ja: "JxA.arr ja" have "A.arr (snd ja)" using ja by blast thus "(\!f. ?P' ja f) \ ?P' ja (?L' ja)" using ja L_arr by presburger qed hence L'_arr: "\ja. JxA.arr ja \ ?P' ja (?L' ja)" by blast have L'_ide: "\ja. \ J.arr (fst ja); A.ide (snd ja) \ \ ?L' ja = ?l (snd ja)" using L_ide l\ by force have L'_arr_map: "\ja. JxA.arr ja \ uncurry_D.P ?l ?\ (snd ja) (uncurry_D.L ?l ?\ (snd ja))" using L'_arr by presburger text\ The map that takes an object \(j, a)\ of \JxA\ to the component \\ a j\ of the limit cone \\ a\ is a natural transformation from \L\ to uncurry \D\. \ let ?\' = "\ja. ?\ (snd ja) (fst ja)" interpret \': transformation_by_components JxA.comp B ?L' \Curry.uncurry D\ ?\' proof fix ja assume ja: "JxA.ide ja" let ?j = "fst ja" let ?a = "snd ja" interpret \a: limit_cone J B \D.at ?a D\ \?l ?a\ \?\ ?a\ using ja l\ by blast show "\?\' ja : ?L' ja \\<^sub>B Curry.uncurry D ja\" using ja L'_ide [of ja] by force next fix ja assume ja: "JxA.arr ja" let ?j = "fst ja" let ?a = "snd ja" have j: "J.arr ?j" using ja by simp have a: "A.arr ?a" using ja by simp interpret D_dom_a: diagram J B \D.at (A.dom ?a) D\ using a D.at_ide_is_diagram by auto interpret D_cod_a: diagram J B \D.at (A.cod ?a) D\ using a D.at_ide_is_diagram by auto interpret Da: natural_transformation J B \D.at (A.dom ?a) D\ \D.at (A.cod ?a) D\ \D.at ?a D\ using a D.functor_axioms D.functor_at_arr_is_transformation by simp interpret \_dom_a: limit_cone J B \D.at (A.dom ?a) D\ \?l (A.dom ?a)\ \?\ (A.dom ?a)\ using a l\ by simp interpret \_cod_a: limit_cone J B \D.at (A.cod ?a) D\ \?l (A.cod ?a)\ \?\ (A.cod ?a)\ using a l\ by simp interpret Dao\_dom_a: vertical_composite J B \_dom_a.A.map \D.at (A.dom ?a) D\ \D.at (A.cod ?a) D\ \?\ (A.dom ?a)\ \D.at ?a D\ .. interpret Dao\_dom_a: cone J B \D.at (A.cod ?a) D\ \?l (A.dom ?a)\ Dao\_dom_a.map .. show "?\' (JxA.cod ja) \\<^sub>B ?L' ja = B (Curry.uncurry D ja) (?\' (JxA.dom ja))" proof - have "?\' (JxA.cod ja) \\<^sub>B ?L' ja = ?\ (A.cod ?a) (J.cod ?j) \\<^sub>B ?L' ja" using ja by fastforce also have "... = D_cod_a.cones_map (?L' ja) (?\ (A.cod ?a)) (J.cod ?j)" using ja L'_arr_map [of ja] \_cod_a.cone_axioms by auto also have "... = Dao\_dom_a.map (J.cod ?j)" using ja \_cod_a.induced_arrowI Dao\_dom_a.cone_axioms L'_arr by presburger also have "... = D.at ?a D (J.cod ?j) \\<^sub>B D_dom_a.some_limit_cone (J.cod ?j)" using ja Dao\_dom_a.map_simp_ide by fastforce also have "... = D.at ?a D (J.cod ?j) \\<^sub>B D.at (A.dom ?a) D ?j \\<^sub>B ?\' (JxA.dom ja)" using ja \_dom_a.naturality \_dom_a.ide_apex apply simp by (metis B.comp_arr_ide \_dom_a.preserves_reflects_arr) also have "... = (D.at ?a D (J.cod ?j) \\<^sub>B D.at (A.dom ?a) D ?j) \\<^sub>B ?\' (JxA.dom ja)" using j ja B.comp_assoc by presburger also have "... = B (D.at ?a D ?j) (?\' (JxA.dom ja))" using a j ja Map_comp A.comp_arr_dom D.as_nat_trans.is_natural_2 by simp also have "... = Curry.uncurry D ja \\<^sub>B ?\' (JxA.dom ja)" using Curry.uncurry_def by simp finally show ?thesis by auto qed qed text\ Since \\'\ is constant on \J\, \curry \'\ is a cone over \D\. \ interpret constL: constant_functor J comp \MkIde ?L\ using L.as_nat_trans.natural_transformation_axioms MkArr_in_hom ide_in_hom L.functor_axioms by unfold_locales blast (* TODO: This seems a little too involved. *) have curry_L': "constL.map = Curry.curry ?L' ?L' ?L'" proof fix j have "\J.arr j \ constL.map j = Curry.curry ?L' ?L' ?L' j" using Curry.curry_def constL.is_extensional by simp moreover have "J.arr j \ constL.map j = Curry.curry ?L' ?L' ?L' j" using Curry.curry_def constL.value_is_ide in_homE ide_in_hom by auto ultimately show "constL.map j = Curry.curry ?L' ?L' ?L' j" by blast qed hence uncurry_constL: "Curry.uncurry constL.map = ?L'" using L'.as_nat_trans.natural_transformation_axioms Curry.uncurry_curry by simp interpret curry_\': natural_transformation J comp constL.map D \Curry.curry ?L' (Curry.uncurry D) \'.map\ proof - have "Curry.curry (Curry.uncurry D) (Curry.uncurry D) (Curry.uncurry D) = D" using Curry.curry_uncurry D.functor_axioms D.as_nat_trans.natural_transformation_axioms by blast thus "natural_transformation J comp constL.map D (Curry.curry ?L' (Curry.uncurry D) \'.map)" using Curry.curry_preserves_transformations curry_L' \'.natural_transformation_axioms by force qed interpret curry_\': cone J comp D \MkIde ?L\ \Curry.curry ?L' (Curry.uncurry D) \'.map\ .. text\ The value of \curry_\'\ at each object \a\ of \A\ is the limit cone \\ a\, hence \curry_\'\ is a limit cone. \ have 1: "\a. A.ide a \ D.at a (Curry.curry ?L' (Curry.uncurry D) \'.map) = ?\ a" proof - fix a assume a: "A.ide a" have "D.at a (Curry.curry ?L' (Curry.uncurry D) \'.map) = (\j. Curry.uncurry (Curry.curry ?L' (Curry.uncurry D) \'.map) (j, a))" using a by simp moreover have "... = (\j. \'.map (j, a))" using a Curry.uncurry_curry \'.natural_transformation_axioms by simp moreover have "... = ?\ a" proof (intro NaturalTransformation.eqI) interpret \a: limit_cone J B \D.at a D\ \?l a\ \?\ a\ using a l\ by simp interpret \': binary_functor_transformation J A B ?L' \Curry.uncurry D\ \'.map .. show "natural_transformation J B \a.A.map (D.at a D) (?\ a)" .. show "natural_transformation J B \a.A.map (D.at a D) (\j. \'.map (j, a))" proof - have "\a.A.map = (\j. ?L' (j, a))" using a \a.A.map_def L'_ide by auto thus ?thesis using a \'.fixing_ide_gives_natural_transformation_2 by simp qed fix j assume j: "J.ide j" show "\'.map (j, a) = ?\ a j" using a j \'.map_simp_ide by simp qed ultimately show "D.at a (Curry.curry ?L' (Curry.uncurry D) \'.map) = ?\ a" by simp qed hence 2: "\a. A.ide a \ diagram.limit_cone J B (D.at a D) (?l a) (D.at a (Curry.curry ?L' (Curry.uncurry D) \'.map))" using l\ by simp hence "limit_cone J comp D (MkIde ?L) (Curry.curry ?L' (Curry.uncurry D) \'.map)" using 1 2 L.functor_axioms L_ide curry_\'.cone_axioms curry_L' D.cone_is_limit_if_pointwise_limit by simp thus "\x \. limit_cone J comp D x \" by blast qed thus "\D. diagram J comp D \ (\x \. limit_cone J comp D x \)" by blast qed lemma has_limits_if_target_does: assumes "B.has_limits (undefined :: 'j)" shows "has_limits (undefined :: 'j)" using assms B.has_limits_def has_limits_def has_limits_of_shape_if_target_does by fast end section "The Yoneda Functor Preserves Limits" text\ In this section, we show that the Yoneda functor from \C\ to \[Cop, S]\ preserves limits. \ context yoneda_functor begin lemma preserves_limits: fixes J :: "'j comp" assumes "diagram J C D" and "diagram.has_as_limit J C D a" shows "diagram.has_as_limit J Cop_S.comp (map o D) (map a)" proof - text\ The basic idea of the proof is as follows: If \\\ is a limit cone in \C\, then for every object \a'\ of \Cop\ the evaluation of \Y o \\ at \a'\ is a limit cone in \S\. By the results on limits in functor categories, this implies that \Y o \\ is a limit cone in \[Cop, S]\. \ interpret J: category J using assms(1) diagram_def by auto interpret D: diagram J C D using assms(1) by auto from assms(2) obtain \ where \: "D.limit_cone a \" by blast interpret \: limit_cone J C D a \ using \ by auto have a: "C.ide a" using \.ide_apex by auto interpret YoD: diagram J Cop_S.comp \map o D\ using D.diagram_axioms functor_axioms preserves_diagrams [of J D] by simp interpret YoD: diagram_in_functor_category Cop.comp S J \map o D\ .. interpret Yo\: cone J Cop_S.comp \map o D\ \map a\ \map o \\ using \.cone_axioms preserves_cones by blast have "\a'. C.ide a' \ limit_cone J S (YoD.at a' (map o D)) (Cop_S.Map (map a) a') (YoD.at a' (map o \))" proof - fix a' assume a': "C.ide a'" interpret A': constant_functor J C a' using a' by (unfold_locales, auto) interpret YoD_a': diagram J S \YoD.at a' (map o D)\ using a' YoD.at_ide_is_diagram by simp interpret Yo\_a': cone J S \YoD.at a' (map o D)\ \Cop_S.Map (map a) a'\ \YoD.at a' (map o \)\ using a' YoD.cone_at_ide_is_cone Yo\.cone_axioms by fastforce have eval_at_ide: "\j. J.ide j \ YoD.at a' (map \ D) j = Hom.map (a', D j)" proof - fix j assume j: "J.ide j" have "YoD.at a' (map \ D) j = Cop_S.Map (map (D j)) a'" using a' j YoD.at_simp YoD.preserves_arr [of j] by auto also have "... = Y (D j) a'" using Y_def by simp also have "... = Hom.map (a', D j)" using a' j D.preserves_arr by simp finally show "YoD.at a' (map \ D) j = Hom.map (a', D j)" by auto qed have eval_at_arr: "\j. J.arr j \ YoD.at a' (map \ \) j = Hom.map (a', \ j)" proof - fix j assume j: "J.arr j" have "YoD.at a' (map \ \) j = Cop_S.Map ((map o \) j) a'" using a' j YoD.at_simp [of a' j "map o \"] preserves_arr by fastforce also have "... = Y (\ j) a'" using Y_def by simp also have "... = Hom.map (a', \ j)" using a' j by simp finally show "YoD.at a' (map \ \) j = Hom.map (a', \ j)" by auto qed have Fun_map_a_a': "Cop_S.Map (map a) a' = Hom.map (a', a)" using a a' map_simp preserves_arr [of a] by simp show "limit_cone J S (YoD.at a' (map o D)) (Cop_S.Map (map a) a') (YoD.at a' (map o \))" proof fix x \ assume \: "YoD_a'.cone x \" interpret \: cone J S \YoD.at a' (map o D)\ x \ using \ by auto have x: "S.ide x" using \.ide_apex by simp text\ For each object \j\ of \J\, the component \\ j\ is an arrow in \S.hom x (Hom.map (a', D j))\. Each element \e \ S.set x\ therefore determines an arrow \\ (a', D j) (S.Fun (\ j) e) \ C.hom a' (D j)\. These arrows are the components of a cone \\ e\ over @{term D} with apex @{term a'}. \ have \j: "\j. J.ide j \ \\ j : x \\<^sub>S Hom.map (a', D j)\" using eval_at_ide \.preserves_hom J.ide_in_hom by force have \: "\e. e \ S.set x \ transformation_by_components J C A'.map D (\j. \ (a', D j) (S.Fun (\ j) e))" proof - fix e assume e: "e \ S.set x" show "transformation_by_components J C A'.map D (\j. \ (a', D j) (S.Fun (\ j) e))" proof fix j assume j: "J.ide j" show "\\ (a', D j) (S.Fun (\ j) e) : A'.map j \ D j\" using e j S.Fun_mapsto [of "\ j"] A'.preserves_ide Hom.set_map eval_at_ide Hom.\_mapsto [of "A'.map j" "D j"] by force next fix j assume j: "J.arr j" show "\ (a', D (J.cod j)) (S.Fun (\ (J.cod j)) e) \ A'.map j = D j \ \ (a', D (J.dom j)) (S.Fun (\ (J.dom j)) e)" proof - have "\ (a', D (J.cod j)) (S.Fun (\ (J.cod j)) e) \ A'.map j = \ (a', D (J.cod j)) (S.Fun (\ (J.cod j)) e) \ a'" using A'.map_simp j by simp also have "... = \ (a', D (J.cod j)) (S.Fun (\ (J.cod j)) e)" proof - have "\ (a', D (J.cod j)) (S.Fun (\ (J.cod j)) e) \ C.hom a' (D (J.cod j))" using a' e j Hom.\_mapsto [of "A'.map j" "D (J.cod j)"] A'.map_simp S.Fun_mapsto [of "\ (J.cod j)"] Hom.set_map eval_at_ide by auto thus ?thesis using C.comp_arr_dom by fastforce qed also have "... = \ (a', D (J.cod j)) (S.Fun (Y (D j) a') (S.Fun (\ (J.dom j)) e))" proof - have "S.Fun (Y (D j) a') (S.Fun (\ (J.dom j)) e) = (S.Fun (Y (D j) a') o S.Fun (\ (J.dom j))) e" by simp also have "... = S.Fun (Y (D j) a' \\<^sub>S \ (J.dom j)) e" using a' e j Y_arr_ide(1) S.in_homE \j eval_at_ide S.Fun_comp by force also have "... = S.Fun (\ (J.cod j)) e" using a' j x \.is_natural_2 \.A.map_simp S.comp_arr_dom J.arr_cod_iff_arr J.cod_cod YoD.preserves_arr \.is_natural_1 YoD.at_simp by auto finally have "S.Fun (Y (D j) a') (S.Fun (\ (J.dom j)) e) = S.Fun (\ (J.cod j)) e" by auto thus ?thesis by simp qed also have "... = D j \ \ (a', D (J.dom j)) (S.Fun (\ (J.dom j)) e)" proof - have "S.Fun (Y (D j) a') (S.Fun (\ (J.dom j)) e) = \ (a', D (J.cod j)) (D j \ \ (a', D (J.dom j)) (S.Fun (\ (J.dom j)) e))" proof - have "S.Fun (\ (J.dom j)) e \ Hom.set (a', D (J.dom j))" using a' e j \j S.Fun_mapsto [of "\ (J.dom j)"] Hom.set_map YoD.at_simp eval_at_ide by auto moreover have "C.arr (\ (a', D (J.dom j)) (S.Fun (\ (J.dom j)) e)) \ C.dom (\ (a', D (J.dom j)) (S.Fun (\ (J.dom j)) e)) = a'" using a' e j \j S.Fun_mapsto [of "\ (J.dom j)"] Hom.set_map eval_at_ide Hom.\_mapsto [of a' "D (J.dom j)"] by auto ultimately show ?thesis using a' e j Hom.Fun_map C.comp_arr_dom by force qed moreover have "D j \ \ (a', D (J.dom j)) (S.Fun (\ (J.dom j)) e) \ C.hom a' (D (J.cod j))" proof - have "\ (a', D (J.dom j)) (S.Fun (\ (J.dom j)) e) \ C.hom a' (D (J.dom j))" using a' e j Hom.\_mapsto [of a' "D (J.dom j)"] eval_at_ide S.Fun_mapsto [of "\ (J.dom j)"] Hom.set_map by auto thus ?thesis using j D.preserves_hom by blast qed ultimately show ?thesis using a' j Hom.\_\ by simp qed finally show ?thesis by auto qed qed qed let ?\ = "\e. transformation_by_components.map J C A'.map (\j. \ (a', D j) (S.Fun (\ j) e))" have cone_\e: "\e. e \ S.set x \ D.cone a' (?\ e)" proof - fix e assume e: "e \ S.set x" interpret \e: transformation_by_components J C A'.map D \\j. \ (a', D j) (S.Fun (\ j) e)\ using e \ by blast show "D.cone a' (?\ e)" .. qed text\ Since \\ e\ is a cone for each element \e\ of \S.set x\, by the universal property of the limit cone \\\ there is a unique arrow \fe \ C.hom a' a\ that transforms \\\ to \\ e\. \ have ex_fe: "\e. e \ S.set x \ \!fe. \fe : a' \ a\ \ D.cones_map fe \ = ?\ e" using cone_\e \.is_universal by simp text\ The map taking \e \ S.set x\ to \fe \ C.hom a' a\ determines an arrow \f \ S.hom x (Hom (a', a))\ that transforms the cone obtained by evaluating \Y o \\ at \a'\ to the cone \\\. \ let ?f = "S.mkArr (S.set x) (Hom.set (a', a)) (\e. \ (a', a) (\.induced_arrow a' (?\ e)))" have 0: "(\e. \ (a', a) (\.induced_arrow a' (?\ e))) \ S.set x \ Hom.set (a', a)" proof fix e assume e: "e \ S.set x" interpret \e: cone J C D a' \?\ e\ using e cone_\e by simp have "\.induced_arrow a' (?\ e) \ C.hom a' a" using a a' e ex_fe \.induced_arrowI \e.cone_axioms by simp thus "\ (a', a) (\.induced_arrow a' (?\ e)) \ Hom.set (a', a)" using a a' Hom.\_mapsto by auto qed have f: "\?f : x \\<^sub>S Hom.map (a', a)\" proof - have "(\e. \ (a', a) (\.induced_arrow a' (?\ e))) \ S.set x \ Hom.set (a', a)" proof fix e assume e: "e \ S.set x" interpret \e: cone J C D a' \?\ e\ using e cone_\e by simp have "\.induced_arrow a' (?\ e) \ C.hom a' a" using a a' e ex_fe \.induced_arrowI \e.cone_axioms by simp thus "\ (a', a) (\.induced_arrow a' (?\ e)) \ Hom.set (a', a)" using a a' Hom.\_mapsto by auto qed moreover have "setp (Hom.set (a', a))" using a a' Hom.small_homs by (metis Fun_map_a_a' Hom.map_ide S.arr_mkIde S.ideD(1) Yo\_a'.ide_apex) ultimately show ?thesis using a a' x \.ide_apex S.mkArr_in_hom [of "S.set x" "Hom.set (a', a)"] Hom.set_subset_Univ S.mkIde_set by simp qed have "YoD_a'.cones_map ?f (YoD.at a' (map o \)) = \" proof (intro NaturalTransformation.eqI) show "natural_transformation J S \.A.map (YoD.at a' (map o D)) \" using \.natural_transformation_axioms by auto have 1: "S.cod ?f = Cop_S.Map (map a) a'" using f Fun_map_a_a' by force interpret YoD_a'of: cone J S \YoD.at a' (map o D)\ x \YoD_a'.cones_map ?f (YoD.at a' (map o \))\ proof - have "YoD_a'.cone (S.cod ?f) (YoD.at a' (map o \))" using a a' f Yo\_a'.cone_axioms preserves_arr [of a] by auto hence "YoD_a'.cone (S.dom ?f) (YoD_a'.cones_map ?f (YoD.at a' (map o \)))" using f YoD_a'.cones_map_mapsto S.arrI by blast thus "cone J S (YoD.at a' (map o D)) x (YoD_a'.cones_map ?f (YoD.at a' (map o \)))" using f by auto qed show "natural_transformation J S \.A.map (YoD.at a' (map o D)) (YoD_a'.cones_map ?f (YoD.at a' (map o \)))" .. fix j assume j: "J.ide j" have "YoD_a'.cones_map ?f (YoD.at a' (map o \)) j = YoD.at a' (map o \) j \\<^sub>S ?f" using f j Fun_map_a_a' Yo\_a'.cone_axioms by fastforce also have "... = \ j" proof (intro S.arr_eqI) show "S.par (YoD.at a' (map o \) j \\<^sub>S ?f) (\ j)" using 1 f j x YoD_a'.preserves_hom by fastforce show "S.Fun (YoD.at a' (map o \) j \\<^sub>S ?f) = S.Fun (\ j)" proof fix e have "e \ S.set x \ S.Fun (YoD.at a' (map o \) j \\<^sub>S ?f) e = S.Fun (\ j) e" using 1 f j x S.Fun_mapsto [of "\ j"] \.A.map_simp extensional_arb [of "S.Fun (\ j)"] by auto moreover have "e \ S.set x \ S.Fun (YoD.at a' (map o \) j \\<^sub>S ?f) e = S.Fun (\ j) e" proof - assume e: "e \ S.set x" interpret \e: transformation_by_components J C A'.map D \\j. \ (a', D j) (S.Fun (\ j) e)\ using e \ by blast interpret \e: cone J C D a' \?\ e\ using e cone_\e by simp have induced_arrow: "\.induced_arrow a' (?\ e) \ C.hom a' a" using a a' e ex_fe \.induced_arrowI \e.cone_axioms by simp have "S.Fun (YoD.at a' (map o \) j \\<^sub>S ?f) e = restrict (S.Fun (YoD.at a' (map o \) j) o S.Fun ?f) (S.set x) e" using 1 e f j S.Fun_comp YoD_a'.preserves_hom by force also have "... = (\ (a', D j) o C (\ j) o \ (a', a)) (S.Fun ?f e)" using j a' f e Hom.map_simp_2 S.Fun_mkArr Hom.preserves_arr [of "(a', \ j)"] eval_at_arr by (elim S.in_homE, auto) also have "... = (\ (a', D j) o C (\ j) o \ (a', a)) (\ (a', a) (\.induced_arrow a' (?\ e)))" using e f S.Fun_mkArr by fastforce also have "... = \ (a', D j) (D.cones_map (\.induced_arrow a' (?\ e)) \ j)" using a a' e j 0 Hom.\_\ induced_arrow \.cone_axioms by auto also have "... = \ (a', D j) (?\ e j)" using \.induced_arrowI \e.cone_axioms by fastforce also have "... = \ (a', D j) (\ (a', D j) (S.Fun (\ j) e))" using j \e.map_def [of j] by simp also have "... = S.Fun (\ j) e" proof - have "S.Fun (\ j) e \ Hom.set (a', D j)" using a' e j S.Fun_mapsto [of "\ j"] eval_at_ide Hom.set_map by auto thus ?thesis using a' j Hom.\_\ C.ide_in_hom J.ide_in_hom by blast qed finally show "S.Fun (YoD.at a' (map o \) j \\<^sub>S ?f) e = S.Fun (\ j) e" by auto qed ultimately show "S.Fun (YoD.at a' (map o \) j \\<^sub>S ?f) e = S.Fun (\ j) e" by auto qed qed finally show "YoD_a'.cones_map ?f (YoD.at a' (map o \)) j = \ j" by auto qed hence ff: "?f \ S.hom x (Hom.map (a', a)) \ YoD_a'.cones_map ?f (YoD.at a' (map o \)) = \" using f by auto text\ Any other arrow \f' \ S.hom x (Hom.map (a', a))\ that transforms the cone obtained by evaluating \Y o \\ at @{term a'} to the cone @{term \}, must equal \f\, showing that \f\ is unique. \ moreover have "\f'. \f' : x \\<^sub>S Hom.map (a', a)\ \ YoD_a'.cones_map f' (YoD.at a' (map o \)) = \ \ f' = ?f" proof - fix f' assume f': "\f' : x \\<^sub>S Hom.map (a', a)\ \ YoD_a'.cones_map f' (YoD.at a' (map o \)) = \" show "f' = ?f" proof (intro S.arr_eqI) show par: "S.par f' ?f" using f f' by (elim S.in_homE, auto) show "S.Fun f' = S.Fun ?f" proof fix e have "e \ S.set x \ S.Fun f' e = S.Fun ?f e" using f f' S.Fun_in_terms_of_comp by fastforce moreover have "e \ S.set x \ S.Fun f' e = S.Fun ?f e" proof - assume e: "e \ S.set x" have fe: "S.Fun ?f e \ Hom.set (a', a)" using e f par by auto have f'e: "S.Fun f' e \ Hom.set (a', a)" using a a' e f' S.Fun_mapsto Hom.set_map by fastforce have 1: "\\ (a', a) (S.Fun f' e) : a' \ a\" using a a' e f' f'e S.Fun_mapsto Hom.\_mapsto Hom.set_map by blast have 2: "\\ (a', a) (S.Fun ?f e) : a' \ a\" using a a' e f' fe S.Fun_mapsto Hom.\_mapsto Hom.set_map by blast interpret \ofe: cone J C D a' \D.cones_map (\ (a', a) (S.Fun ?f e)) \\ proof - have "D.cones_map (\ (a', a) (S.Fun ?f e)) \ D.cones a \ D.cones a'" using 2 D.cones_map_mapsto [of "\ (a', a) (S.Fun ?f e)"] by (elim C.in_homE, auto) thus "cone J C D a' (D.cones_map (\ (a', a) (S.Fun ?f e)) \)" using \.cone_axioms by blast qed have A: "\h j. h \ C.hom a' a \ J.arr j \ S.Fun (YoD.at a' (map o \) j) (\ (a', a) h) = \ (a', D (J.cod j)) (\ j \ h)" proof - fix h j assume j: "J.arr j" assume h: "h \ C.hom a' a" have "S.Fun (YoD.at a' (map o \) j) (\ (a', a) h) = (\ (a', D (J.cod j)) \ C (\ j) \ \ (a', a)) (\ (a', a) h)" proof - have "S.Fun (YoD.at a' (map o \) j) = restrict (\ (a', D (J.cod j)) \ C (\ j) \ \ (a', a)) (Hom.set (a', a))" proof - have "S.Fun (YoD.at a' (map o \) j) = S.Fun (Y (\ j) a')" using a' j YoD.at_simp Y_def Yo\.preserves_reflects_arr [of j] by simp also have "... = restrict (\ (a', D (J.cod j)) \ C (\ j) \ \ (a', a)) (Hom.set (a', a))" using a' j \.preserves_hom [of j "J.dom j" "J.cod j"] Y_arr_ide [of a' "\ j" a "D (J.cod j)"] \.A.map_simp S.Fun_mkArr by fastforce finally show ?thesis by blast qed thus ?thesis using a a' h Hom.\_mapsto by auto qed also have "... = \ (a', D (J.cod j)) (\ j \ h)" using a a' h Hom.\_\ by simp finally show "S.Fun (YoD.at a' (map o \) j) (\ (a', a) h) = \ (a', D (J.cod j)) (\ j \ h)" by auto qed have "D.cones_map (\ (a', a) (S.Fun f' e)) \ = D.cones_map (\ (a', a) (S.Fun ?f e)) \" proof fix j have "\J.arr j \ D.cones_map (\ (a', a) (S.Fun f' e)) \ j = D.cones_map (\ (a', a) (S.Fun ?f e)) \ j" using 1 2 \.cone_axioms by (elim C.in_homE, auto) moreover have "J.arr j \ D.cones_map (\ (a', a) (S.Fun f' e)) \ j = D.cones_map (\ (a', a) (S.Fun ?f e)) \ j" proof - assume j: "J.arr j" have "D.cones_map (\ (a', a) (S.Fun f' e)) \ j = \ j \ \ (a', a) (S.Fun f' e)" using j 1 \.cone_axioms by auto also have "... = \ (a', D (J.cod j)) (S.Fun (\ j) e)" proof - have "\ (a', D (J.cod j)) (S.Fun (YoD.at a' (map o \) j) (S.Fun f' e)) = \ (a', D (J.cod j)) (\ (a', D (J.cod j)) (\ j \ \ (a', a) (S.Fun f' e)))" using j a a' f'e A Hom.\_\ Hom.\_mapsto by force moreover have "\ j \ \ (a', a) (S.Fun f' e) \ C.hom a' (D (J.cod j))" using a a' j f'e Hom.\_mapsto \.preserves_hom [of j "J.dom j" "J.cod j"] \.A.map_simp by auto moreover have "S.Fun (YoD.at a' (map o \) j) (S.Fun f' e) = S.Fun (\ j) e" using Fun_map_a_a' a a' j f' e x Yo\_a'.A.map_simp eval_at_ide Yo\_a'.cone_axioms by auto ultimately show ?thesis using a a' Hom.\_\ by auto qed also have "... = \ j \ \ (a', a) (S.Fun ?f e)" proof - have "S.Fun (YoD.at a' (map o \) j) (S.Fun ?f e) = \ (a', D (J.cod j)) (\ j \ \ (a', a) (S.Fun ?f e))" using j a a' fe A [of "\ (a', a) (S.Fun ?f e)" j] Hom.\_\ Hom.\_mapsto by auto hence "\ (a', D (J.cod j)) (S.Fun (YoD.at a' (map o \) j) (S.Fun ?f e)) = \ (a', D (J.cod j)) (\ (a', D (J.cod j)) (\ j \ \ (a', a) (S.Fun ?f e)))" by simp moreover have "\ j \ \ (a', a) (S.Fun ?f e) \ C.hom a' (D (J.cod j))" using a a' j fe Hom.\_mapsto \.preserves_hom [of j "J.dom j" "J.cod j"] \.A.map_simp by auto moreover have "S.Fun (YoD.at a' (map o \) j) (S.Fun ?f e) = S.Fun (\ j) e" proof - have "S.Fun (YoD.at a' (map o \) j) (S.Fun ?f e) = (S.Fun (YoD.at a' (map o \) j) o S.Fun ?f) e" by simp also have "... = S.Fun (YoD.at a' (map o \) j \\<^sub>S ?f) e" using Fun_map_a_a' a a' j f e x Yo\_a'.A.map_simp eval_at_ide by auto also have "... = S.Fun (\ j) e" proof - have "YoD.at a' (map o \) j \\<^sub>S ?f = YoD_a'.cones_map ?f (YoD.at a' (map o \)) j" using j f Yo\_a'.cone_axioms Fun_map_a_a' by auto thus ?thesis using j ff by argo qed finally show ?thesis by auto qed ultimately show ?thesis using a a' Hom.\_\ by auto qed also have "... = D.cones_map (\ (a', a) (S.Fun ?f e)) \ j" using j 2 \.cone_axioms by force finally show "D.cones_map (\ (a', a) (S.Fun f' e)) \ j = D.cones_map (\ (a', a) (S.Fun ?f e)) \ j" by auto qed ultimately show "D.cones_map (\ (a', a) (S.Fun f' e)) \ j = D.cones_map (\ (a', a) (S.Fun ?f e)) \ j" by auto qed hence "\ (a', a) (S.Fun f' e) = \ (a', a) (S.Fun ?f e)" using 1 2 \ofe.cone_axioms \.cone_axioms \.is_universal by blast hence "\ (a', a) (\ (a', a) (S.Fun f' e)) = \ (a', a) (\ (a', a) (S.Fun ?f e))" by simp thus "S.Fun f' e = S.Fun ?f e" using a a' fe f'e Hom.\_\ by force qed ultimately show "S.Fun f' e = S.Fun ?f e" by auto qed qed qed ultimately have "\!f. \f : x \\<^sub>S Hom.map (a', a)\ \ YoD_a'.cones_map f (YoD.at a' (map o \)) = \" using ex1I [of "\f. S.in_hom x (Hom.map (a', a)) f \ YoD_a'.cones_map f (YoD.at a' (map o \)) = \"] by blast thus "\!f. \f : x \\<^sub>S Cop_S.Map (map a) a'\ \ YoD_a'.cones_map f (YoD.at a' (map o \)) = \" using a a' Y_def by simp qed qed thus "YoD.has_as_limit (map a)" using YoD.cone_is_limit_if_pointwise_limit Yo\.cone_axioms by auto qed end end diff --git a/thys/MonoidalCategory/CartesianMonoidalCategory.thy b/thys/MonoidalCategory/CartesianMonoidalCategory.thy --- a/thys/MonoidalCategory/CartesianMonoidalCategory.thy +++ b/thys/MonoidalCategory/CartesianMonoidalCategory.thy @@ -1,654 +1,657 @@ (* Title: CartesianMonoidalCategory Author: Eugene W. Stark , 2020 Maintainer: Eugene W. Stark *) chapter "Cartesian Monoidal Category" theory CartesianMonoidalCategory imports MonoidalCategory Category3.CartesianCategory begin locale symmetric_monoidal_category = monoidal_category C T \ \ + S: symmetry_functor C C + ToS: composite_functor CC.comp CC.comp C S.map T + \: natural_isomorphism CC.comp C T ToS.map \ for C :: "'a comp" (infixr "\" 55) and T :: "'a * 'a \ 'a" and \ :: "'a * 'a * 'a \ 'a" and \ :: 'a and \ :: "'a * 'a \ 'a" + assumes sym_inverse: "\ ide a; ide b \ \ inverse_arrows (\ (a, b)) (\ (b, a))" and unitor_coherence: "ide a \ \[a] \ \ (a, \) = \[a]" and assoc_coherence: "\ ide a; ide b; ide c \ \ \ (b, c, a) \ \ (a, b \ c) \ \ (a, b, c) = (b \ \ (a, c)) \ \ (b, a, c) \ (\ (a, b) \ c)" begin abbreviation sym ("\[_, _]") where "sym a b \ \ (a, b)" end locale elementary_symmetric_monoidal_category = elementary_monoidal_category C tensor unity lunit runit assoc for C :: "'a comp" (infixr "\" 55) and tensor :: "'a \ 'a \ 'a" (infixr "\" 53) and unity :: 'a ("\") and lunit :: "'a \ 'a" ("\[_]") and runit :: "'a \ 'a" ("\[_]") and assoc :: "'a \ 'a \ 'a \ 'a" ("\[_, _, _]") and sym :: "'a \ 'a \ 'a" ("\[_, _]") + assumes sym_in_hom: "\ ide a; ide b \ \ \\[a, b] : a \ b \ b \ a\" and sym_naturality: "\ arr f; arr g \ \ \[cod f, cod g] \ (f \ g) = (g \ f) \ \[dom f, dom g]" and sym_inverse: "\ ide a; ide b \ \ inverse_arrows \[a, b] \[b, a]" and unitor_coherence: "ide a \ \[a] \ \[a, \] = \[a]" and assoc_coherence: "\ ide a; ide b; ide c \ \ \[b, c, a] \ \[a, b \ c] \ \[a, b, c] = (b \ \[a, c]) \ \[b, a, c] \ (\[a, b] \ c)" begin lemma sym_simps [simp]: assumes "ide a" and "ide b" shows "arr \[a, b]" and "dom \[a, b] = a \ b" and "cod \[a, b] = b \ a" using assms sym_in_hom by auto interpretation monoidal_category C T \ \ using induces_monoidal_category by simp interpretation CC: product_category C C .. interpretation S: symmetry_functor C C .. interpretation ToS: composite_functor CC.comp CC.comp C S.map T .. definition \ :: "'a * 'a \ 'a" where "\ f \ if CC.arr f then \[cod (fst f), cod (snd f)] \ (fst f \ snd f) else null" interpretation \: natural_isomorphism CC.comp C T ToS.map \ proof - interpret \: transformation_by_components CC.comp C T ToS.map "\a. \[fst a, snd a]" apply unfold_locales using sym_in_hom sym_naturality by auto interpret \: natural_isomorphism CC.comp C T ToS.map \.map apply unfold_locales using sym_inverse \.map_simp_ide by auto have "\ = \.map" using \_def \.map_def sym_naturality by fastforce thus "natural_isomorphism CC.comp C T ToS.map \" using \.natural_isomorphism_axioms by presburger qed interpretation symmetric_monoidal_category C T \ \ \ proof show "\a b. \ ide a; ide b \ \ inverse_arrows (\ (a, b)) (\ (b, a))" proof - fix a b assume a: "ide a" and b: "ide b" show "inverse_arrows (\ (a, b)) (\ (b, a))" using a b sym_inverse comp_arr_dom \_def by auto qed (* * TODO: Here just using "lunit" refers to the locale parameter, rather than * the constant introduced by the interpretation above of monoidal_category. * This is slightly mysterious. *) show "\a. ide a \ local.lunit a \ \ (a, local.unity) = local.runit a" proof - fix a assume a: "ide a" show "local.lunit a \ \ (a, local.unity) = local.runit a" using a lunit_agreement \_agreement sym_in_hom comp_arr_dom [of "\[a, \]"] unitor_coherence runit_agreement \_def by simp qed show "\a b c. \ ide a; ide b; ide c \ \ local.assoc b c a \ \ (a, local.tensor b c) \ local.assoc a b c = local.tensor b (\ (a, c)) \ local.assoc b a c \ local.tensor (\ (a, b)) c" proof - fix a b c assume a: "ide a" and b: "ide b" and c: "ide c" show "local.assoc b c a \ \ (a, local.tensor b c) \ local.assoc a b c = local.tensor b (\ (a, c)) \ local.assoc b a c \ local.tensor (\ (a, b)) c" using a b c sym_in_hom tensor_preserves_ide \_def assoc_coherence comp_arr_dom comp_cod_arr by simp qed qed lemma induces_symmetric_monoidal_category: shows "symmetric_monoidal_category C T \ \ \" .. end context symmetric_monoidal_category begin interpretation elementary_monoidal_category C tensor unity lunit runit assoc using induces_elementary_monoidal_category by auto lemma induces_elementary_symmetric_monoidal_category: shows "elementary_symmetric_monoidal_category C tensor unity lunit runit assoc (\a b. \ (a, b))" using \.naturality unitor_coherence assoc_coherence sym_inverse by unfold_locales auto end (* TODO: This definition of "diagonal_functor" conflicts with the one in Category3.Limit. *) locale diagonal_functor = C: category C + CC: product_category C C for C :: "'a comp" begin abbreviation map where "map f \ if C.arr f then (f, f) else CC.null" lemma is_functor: shows "functor C CC.comp map" using map_def by unfold_locales auto sublocale "functor" C CC.comp map using is_functor by simp end locale cartesian_monoidal_category = monoidal_category C T \ \ + \: constant_functor C C \ + \: diagonal_functor C + \: natural_transformation C C map \.map \ + \: natural_transformation C C map \T o \.map\ \ for C :: "'a comp" (infixr "\" 55) and T :: "'a * 'a \ 'a" and \ :: "'a * 'a * 'a \ 'a" and \ :: 'a and \ :: "'a \ 'a" ("\[_]") and \ :: "'a \ 'a" ("\[_]") + assumes trm_unity: "\[\] = \" and pr0_dup: "ide a \ \[a] \ (a \ \[a]) \ \ a = a" and pr1_dup: "ide a \ \[a] \ (\[a] \ a) \ \ a = a" and tuple_pr: "\ ide a; ide b \ \ (\[a] \ (a \ \[b]) \ \[b] \ (\[a] \ b)) \ \[a \ b] = a \ b" locale elementary_cartesian_monoidal_category = elementary_monoidal_category C tensor unity lunit runit assoc for C :: "'a comp" (infixr "\" 55) and tensor :: "'a \ 'a \ 'a" (infixr "\" 53) and unity :: 'a ("\") and lunit :: "'a \ 'a" ("\[_]") and runit :: "'a \ 'a" ("\[_]") and assoc :: "'a \ 'a \ 'a \ 'a" ("\[_, _, _]") and trm :: "'a \ 'a" ("\[_]") and dup :: "'a \ 'a" ("\[_]") + assumes trm_in_hom [intro]: "ide a \ \\[a] : a \ \\" and trm_unity: "\[\] = \" and trm_naturality: "arr f \ \[cod f] \ f = \[dom f]" and dup_in_hom [intro]: "ide a \ \\[a] : a \ a \ a\" and dup_naturality: "arr f \ \[cod f] \ f = (f \ f) \ \[dom f]" and prj0_dup: "ide a \ \[a] \ (a \ \[a]) \ \[a] = a" and prj1_dup: "ide a \ \[a] \ (\[a] \ a) \ \[a] = a" and tuple_prj: "\ ide a; ide b \ \ (\[a] \ (a \ \[b]) \ \[b] \ (\[a] \ b)) \ \[a \ b] = a \ b" context cartesian_monoidal_category begin lemma terminal_unity: shows "terminal \" proof show "ide \" by simp show "\a. ide a \ \!f. \f : a \ \\" proof fix a assume a: "ide a" show "\\ a : a \ \\" using a by auto show "\f. \f : a \ \\ \ f = \ a" using trm_unity \.naturality comp_cod_arr by fastforce qed qed lemma trm_is_terminal_arr: assumes "ide a" shows "terminal_arr \[a]" using assms terminal_unity by simp interpretation elementary_monoidal_category C tensor unity lunit runit assoc using induces_elementary_monoidal_category by simp interpretation elementary_cartesian_monoidal_category C tensor unity lunit runit assoc \ \ proof show "\a. ide a \ \\[a] : a \ \\" using \_in_hom by force show "\[\] = \" using \.preserves_hom \_in_hom ide_unity trm_is_terminal_arr terminal_unity by (intro terminal_arr_unique) auto show "\f. arr f \ \[cod f] \ f = \[dom f]" using \.naturality comp_cod_arr by simp show "\a. ide a \ \\[a] : a \ a \ a\" by auto show "\f. arr f \ \[cod f] \ f = (f \ f) \ \[dom f]" using \.naturality by simp show "\a. ide a \ \[a] \ (\[a] \ a) \ \[a] = a" using pr1_dup lunit_in_hom by simp show "\a. ide a \ \[a] \ (a \ \[a]) \ \[a] = a" using pr0_dup runit_in_hom by simp show "\a0 a1. \ ide a0; ide a1 \ \ (\[a0] \ (a0 \ \[a1]) \ \[a1] \ (\[a0] \ a1)) \ \[a0 \ a1] = a0 \ a1" using tuple_pr by simp qed lemma induces_elementary_cartesian_monoidal_category: shows "elementary_cartesian_monoidal_category C tensor \ lunit runit assoc \ \" .. end context elementary_cartesian_monoidal_category begin lemma trm_simps [simp]: assumes "ide a" shows "arr \[a]" and "dom \[a] = a" and "cod \[a] = \" using assms trm_in_hom by auto lemma dup_simps [simp]: assumes "ide a" shows "arr \[a]" and "dom \[a] = a" and "cod \[a] = a \ a" using assms dup_in_hom by auto definition \ :: "'a \ 'a" where "\ f \ if arr f then \[dom f] else null" definition \ :: "'a \ 'a" where "\ f \ if arr f then \[cod f] \ f else null" interpretation CC: product_category C C .. interpretation MC: monoidal_category C T \ \ using induces_monoidal_category by auto interpretation I: constant_functor C C MC.unity by unfold_locales auto interpretation \: diagonal_functor C .. interpretation D: composite_functor C CC.comp C \.map T .. interpretation \: natural_transformation C C map I.map \ using trm_naturality I.map_def \_def \_agreement comp_cod_arr by unfold_locales auto interpretation \: natural_transformation C C map D.map \ using dup_naturality \_def comp_arr_dom by unfold_locales auto interpretation MC: cartesian_monoidal_category C T \ \ \ \ proof show "\ MC.unity = MC.unity" using \_agreement trm_unity \_def by simp show "\a. ide a \ MC.runit a \ MC.tensor a (\ a) \ \ a = a" using runit_agreement \_def \_def prj0_dup comp_arr_dom by auto show "\a. ide a \ MC.lunit a \ MC.tensor (\ a) a \ \ a = a" using lunit_agreement \_def \_def prj1_dup comp_arr_dom by auto show "\a b. \ ide a; ide b \ \ MC.tensor (MC.runit a \ MC.tensor a (\ b)) (MC.lunit b \ MC.tensor (\ a) b) \ \ (MC.tensor a b) = MC.tensor a b" proof - fix a b assume a: "ide a" and b: "ide b" have "seq \[a] (a \ \[b])" by (metis a b arr_tensor cod_tensor ide_char in_homE runit_in_hom seqI trm_simps(1,3)) moreover have "seq \[b] (\[a] \ b)" by (metis a b arr_tensor cod_tensor ide_char in_homE lunit_in_hom seqI trm_simps(1,3)) ultimately show "MC.tensor (MC.runit a \ MC.tensor a (\ b)) (MC.lunit b \ MC.tensor (\ a) b) \ \ (MC.tensor a b) = MC.tensor a b" using a b lunit_agreement runit_agreement unitor_coincidence \_def \_def comp_arr_dom tensor_preserves_ide tuple_prj T_def by auto qed qed lemma induces_cartesian_monoidal_category: shows "cartesian_monoidal_category C T \ \ \ \" .. end text \ A cartesian category extends to a a cartesian monoidal category by using the product structure to obtain the various canonical maps. \ context cartesian_category begin interpretation C: elementary_cartesian_category C pr0 pr1 \ trm using extends_to_elementary_cartesian_category by simp interpretation CC: product_category C C .. interpretation CCC: product_category C CC.comp .. interpretation T: binary_functor C C C Prod using binary_functor_Prod by simp interpretation T: binary_endofunctor C Prod .. interpretation ToTC: "functor" CCC.comp C T.ToTC using T.functor_ToTC by auto interpretation ToCT: "functor" CCC.comp C T.ToCT using T.functor_ToCT by auto interpretation \: transformation_by_components CCC.comp C T.ToTC T.ToCT \\abc. assoc (fst abc) (fst (snd abc)) (snd (snd abc))\ proof show "\abc. CCC.ide abc \ \assoc (fst abc) (fst (snd abc)) (snd (snd abc)) : T.ToTC abc \ T.ToCT abc\" using CCC.ide_char CC.ide_char CCC.arr_char CC.arr_char T.ToTC_def T.ToCT_def by auto show "\f. CCC.arr f \ assoc (fst (CCC.cod f)) (fst (snd (CCC.cod f))) (snd (snd (CCC.cod f))) \ T.ToTC f = T.ToCT f \ assoc (fst (CCC.dom f)) (fst (snd (CCC.dom f))) (snd (snd (CCC.dom f)))" using CCC.arr_char CC.arr_char CCC.dom_char CCC.cod_char T.ToTC_def T.ToCT_def assoc_naturality by simp blast qed + abbreviation \ + where "\ \ \.map" + interpretation \: natural_isomorphism CCC.comp C T.ToTC T.ToCT \.map proof show "\a. CCC.ide a \ iso (\.map a)" using CCC.ide_char CC.ide_char \.map_simp_ide inverse_arrows_assoc by auto qed interpretation L: "functor" C C \\f. Prod (cod \, f)\ using \_is_terminal_arr T.fixing_ide_gives_functor_1 by simp interpretation L: endofunctor C \\f. Prod (cod \, f)\ .. interpretation \: transformation_by_components C C \\f. Prod (cod \, f)\ map \\a. pr0 (cod \) a\ using \_is_terminal_arr by unfold_locales auto interpretation \: natural_isomorphism C C \\f. Prod (cod \, f)\ map \.map using \.map_simp_ide inverse_arrows_lunit ide_some_terminal by unfold_locales auto interpretation L: equivalence_functor C C \\f. Prod (cod \, f)\ using \.natural_isomorphism_axioms naturally_isomorphic_def L.isomorphic_to_identity_is_equivalence by blast interpretation R: "functor" C C \\f. Prod (f, cod \)\ using \_is_terminal_arr T.fixing_ide_gives_functor_2 by simp interpretation R: endofunctor C\\f. Prod (f, cod \)\ .. interpretation \: transformation_by_components C C \\f. Prod (f, cod \)\ map \\a. pr1 a (cod \)\ using \_is_terminal_arr by unfold_locales auto interpretation \: natural_isomorphism C C \\f. Prod (f, cod \)\ map \.map using \.map_simp_ide inverse_arrows_runit ide_some_terminal by unfold_locales auto interpretation R: equivalence_functor C C \\f. Prod (f, cod \)\ using \.natural_isomorphism_axioms naturally_isomorphic_def R.isomorphic_to_identity_is_equivalence by blast interpretation C: monoidal_category C Prod \.map \ using ide_some_terminal \_is_iso pentagon comp_assoc by unfold_locales auto interpretation \: constant_functor C C C.unity using C.ide_unity by unfold_locales auto interpretation \: natural_transformation C C map \.map trm using C.unity_def \.map_def ide_some_terminal trm_naturality comp_cod_arr trm_in_hom apply unfold_locales using trm_def apply auto[1] apply fastforce apply fastforce apply (metis in_homE trm_eqI trm_in_hom cod_pr0 dom_dom) by (metis trm_eqI trm_in_hom dom_dom map_simp) interpretation \: "functor" C CC.comp Diag using functor_Diag by simp interpretation \o\: composite_functor C CC.comp C Diag Prod .. interpretation natural_transformation C C map \Prod o Diag\ dup using dup_is_natural_transformation by simp lemma unity_agreement: shows "C.unity = \" using C.unity_def ide_some_terminal by simp lemma assoc_agreement: assumes "ide a" and "ide b" and "ide c" shows "C.assoc a b c = assoc a b c" using assms assoc_def \.map_simp_ide by simp lemma assoc'_agreement: assumes "ide a" and "ide b" and "ide c" shows "C.assoc' a b c = assoc' a b c" using assms inverse_arrows_assoc inverse_unique by auto lemma runit_char_eqn: assumes "ide a" shows "prod (runit a) \ = prod a \ \ assoc a \ \" using assms ide_one assoc_def comp_assoc prod_tuple comp_cod_arr by (intro pr_joint_monic [of a \ "prod (runit a) \" "prod a \ \ assoc a \ \"]) auto lemma runit_agreement: assumes "ide a" shows "runit a = C.runit a" using assms unity_agreement assoc_agreement C.runit_char(2) runit_char_eqn ide_some_terminal by (intro C.runit_eqI) auto lemma lunit_char_eqn: assumes "ide a" shows "prod \ (lunit a) = prod \ a \ assoc' \ \ a" proof (intro pr_joint_monic [of \ a "prod \ (lunit a)" "prod \ a \ assoc' \ \ a"]) show "seq (lunit a) (local.prod \ (lunit a))" using assms ide_one by simp show "lunit a \ prod \ (lunit a) = lunit a \ prod \ a \ assoc' \ \ a" using assms ide_one assoc'_def comp_assoc prod_tuple comp_cod_arr by simp show "pr1 \ a \ prod \ (lunit a) = pr1 \ a \ prod \ a \ assoc' \ \ a" using assms ide_one assoc'_def comp_cod_arr prod_tuple pr_naturality apply simp by (metis trm_eqI cod_pr0 cod_pr1 comp_in_homI' ide_prod pr_simps(1,3-6) pr1_in_hom') qed lemma lunit_agreement: assumes "ide a" shows "lunit a = C.lunit a" using assms unity_agreement assoc'_agreement C.lunit_char(2) lunit_char_eqn ide_some_terminal by (intro C.lunit_eqI) auto interpretation C: cartesian_monoidal_category C Prod \.map \ dup trm proof show "trm C.unity = C.unity" by (simp add: C.unity_def ide_some_terminal trm_one) show "\a. ide a \ C.runit a \ C.tensor a \[a] \ dup a = a" using comp_runit_term_dup runit_agreement by simp show "\a. ide a \ C.lunit a \ C.tensor \[a] a \ dup a = a" using comp_lunit_term_dup lunit_agreement by auto show "\a b. \ide a; ide b\ \ C.tensor (C.runit a \ C.tensor a \[b]) (C.lunit b \ C.tensor \[a] b) \ dup (C.tensor a b) = C.tensor a b" proof - fix a b assume a: "ide a" and b: "ide b" have "C.tensor (C.runit a \ C.tensor a \[b]) (C.lunit b \ C.tensor \[a] b) \ dup (C.tensor a b) = prod (C.runit a \ prod a \[b]) (C.lunit b \ prod \[a] b) \ dup (prod a b)" using a b by simp also have "... = tuple ((C.runit a \ prod a \[b]) \ prod a b) ((C.lunit b \ prod \[a] b) \ prod a b)" using a b ide_one trm_in_hom [of a] trm_in_hom [of b] unity_agreement prod_tuple by fastforce also have "... = tuple (C.runit a \ prod a \[b] \ prod a b) (C.lunit b \ prod \[a] b \ prod a b)" using comp_assoc by simp also have "... = tuple (C.runit a \ prod a \[b]) (C.lunit b \ prod \[a] b)" using a b comp_arr_dom by simp also have "... = tuple (runit a \ prod a \[b]) (lunit b \ prod \[a] b)" using a b lunit_agreement runit_agreement by simp also have "... = tuple (pr1 a b) (pr0 a b)" proof - have "runit a \ prod a \[b] = pr1 a b" using a b pr_naturality(2) trm_in_hom [of b] by (metis cod_pr1 comp_ide_arr ide_char in_homE pr_simps(4,6) seqI) moreover have "lunit b \ prod \[a] b = pr0 a b" using a b pr_naturality(1) [of b b b "\[a]" a \] trm_in_hom [of a] comp_cod_arr by (metis cod_pr0 ide_char in_homE pr_simps(1)) ultimately show ?thesis by simp qed also have "... = prod a b" using a b by simp finally show "C.tensor (C.runit a \ C.tensor a \[b]) (C.lunit b \ C.tensor \[a] b) \ dup (C.tensor a b) = C.tensor a b" by auto qed qed lemma extends_to_cartesian_monoidal_category: shows "cartesian_monoidal_category C Prod \.map \ dup trm" .. end text \ In a \cartesian_monoidal_category\, the monoidal structure is given by a categorical product and terminal object, so that the underlying category is cartesian. \ context cartesian_monoidal_category begin definition pr0 ("\

\<^sub>0[_, _]") where "\

\<^sub>0[a, b] \ if ide a \ ide b then \[a] \ (a \ \[b]) else null" definition pr1 ("\

\<^sub>1[_, _]") where "\

\<^sub>1[a, b] \ if ide a \ ide b then \[b] \ (\[a] \ b) else null" lemma pr_in_hom [intro]: assumes "ide a0" and "ide a1" shows "\\

\<^sub>0[a0, a1] : a0 \ a1 \ a0\" and "\\

\<^sub>1[a0, a1] : a0 \ a1 \ a1\" proof - show "\\

\<^sub>0[a0, a1] : a0 \ a1 \ a0\" unfolding pr0_def using assms runit_in_hom by fastforce show "\\

\<^sub>1[a0, a1] : a0 \ a1 \ a1\" unfolding pr1_def using assms lunit_in_hom by fastforce qed lemma pr_simps [simp]: assumes "ide a0" and "ide a1" shows "arr \

\<^sub>0[a0, a1]" and "dom \

\<^sub>0[a0, a1] = a0 \ a1" and "cod \

\<^sub>0[a0, a1] = a0" and "arr \

\<^sub>1[a0, a1]" and "dom \

\<^sub>1[a0, a1] = a0 \ a1" and "cod \

\<^sub>1[a0, a1] = a1" using assms pr_in_hom(1-2) by blast+ interpretation P: composite_functor CC.comp C CC.comp T \.map .. interpretation ECC: elementary_cartesian_category C pr1 pr0 \ \ proof show "\a b. \ide a; ide b\ \ span \

\<^sub>0[a, b] \

\<^sub>1[a, b]" by simp show "\a b. \ide a; ide b\ \ cod \

\<^sub>1[a, b] = b" by simp show "\a b. \ide a; ide b\ \ cod \

\<^sub>0[a, b] = a" by simp show "ide \" by simp show "\a. ide a \ \\ a : a \ \\" by auto show "\a f. \ide a; \f : a \ \\\ \ f = \[a]" using \\a. ide a \ \\[a] : a \ \\\ terminalE terminal_unity by blast show "\a b. \ (ide a \ ide b) \ \

\<^sub>1[a, b] = null" using pr1_def by auto show "\a b. \ (ide a \ ide b) \ \

\<^sub>0[a, b] = null" using pr0_def by auto show "\f g. span f g \ \!l. \

\<^sub>0[cod f, cod g] \ l = f \ \

\<^sub>1[cod f, cod g] \ l = g" proof - fix f g assume fg: "span f g" let ?l = "(f \ g) \ \[dom f]" have "\

\<^sub>0[cod f, cod g] \ ?l = f" proof - have "\

\<^sub>0[cod f, cod g] \ ?l = (\[cod f] \ (cod f \ \[cod g])) \ (f \ g) \ \[dom f]" using fg pr0_def by simp also have "... = \[cod f] \ ((cod f \ \[cod g]) \ (f \ g)) \ \[dom f]" using comp_assoc by simp also have "... = \[cod f] \ (f \ \[dom f]) \ \[dom f]" using fg interchange comp_cod_arr \.naturality by simp also have "... = \[cod f] \ ((cod f \ \[cod f]) \ (f \ f)) \ \[dom f]" using fg interchange comp_cod_arr \.naturality by simp also have "... = \[cod f] \ (cod f \ \[cod f]) \ (f \ f) \ \[dom f]" using comp_assoc by simp also have "... = (\[cod f] \ (cod f \ \[cod f]) \ \[cod f]) \ f" using fg \.naturality comp_assoc by simp also have "... = f" using fg pr0_dup comp_cod_arr by simp finally show ?thesis by blast qed moreover have "\

\<^sub>1[cod f, cod g] \ ?l = g" proof - have "\

\<^sub>1[cod f, cod g] \ ?l = \[cod g] \ ((\[cod f] \ cod g) \ (f \ g)) \ \[dom g]" using fg pr1_def comp_assoc by simp also have "... = \[cod g] \ (\[dom f] \ g) \ \[dom g]" using fg interchange comp_cod_arr \.naturality by simp also have "... = \[cod g] \ ((\[cod g] \ cod g) \ (g \ g)) \ \[dom g]" using fg interchange comp_cod_arr \.naturality by simp also have "... = \[cod g] \ (\[cod g] \ cod g) \ (g \ g) \ \[dom g]" using comp_assoc by simp also have "... = (\[cod g] \ (\[cod g] \ cod g) \ \[cod g]) \ g" using fg \.naturality comp_assoc by simp also have "... = g" using fg pr1_dup comp_cod_arr by simp finally show ?thesis by blast qed moreover have "\l. \ \

\<^sub>0[cod f, cod g] \ l = f; \

\<^sub>1[cod f, cod g] \ l = g \ \ l = ?l" proof - fix l assume f: "\

\<^sub>0[cod f, cod g] \ l = f" and g: "\

\<^sub>1[cod f, cod g] \ l = g" have l: "\l : dom f \ cod f \ cod g\" using f g fg by (metis arr_iff_in_hom dom_comp ide_cod pr_simps(5) seqE) have "?l = (\

\<^sub>0[cod f, cod g] \ l \ \

\<^sub>1[cod f, cod g] \ l) \ \[dom f]" using f g by simp also have "... = ((\

\<^sub>0[cod f, cod g] \ \

\<^sub>1[cod f, cod g]) \ (l \ l)) \ \[dom f]" using fg f g interchange by simp also have "... = (\

\<^sub>0[cod f, cod g] \ \

\<^sub>1[cod f, cod g]) \ (l \ l) \ \[dom f]" using comp_assoc by simp also have "... = ((\

\<^sub>0[cod f, cod g] \ \

\<^sub>1[cod f, cod g]) \ \[cod f \ cod g]) \ l" using l \.naturality [of l] comp_assoc by auto also have "... = (cod f \ cod g) \ l" using f g fg tuple_pr pr0_def pr1_def by auto also have "... = l" using l comp_cod_arr by auto finally show "l = ?l" by simp qed ultimately show "\!l. \

\<^sub>0[cod f, cod g] \ l = f \ \

\<^sub>1[cod f, cod g] \ l = g" by auto qed qed lemma extends_to_elementary_cartesian_category: shows "elementary_cartesian_category C pr1 pr0 \ \" .. lemma is_cartesian_category: shows "cartesian_category C" using ECC.is_cartesian_category by simp end (* * TODO: I would like to have coherence theorems for symmetric monoidal and cartesian * monoidal categories here, but I haven't yet figured out a suitably economical way * to extend the existing result. *) end diff --git a/thys/ResiduatedTransitionSystem/LambdaCalculus.thy b/thys/ResiduatedTransitionSystem/LambdaCalculus.thy --- a/thys/ResiduatedTransitionSystem/LambdaCalculus.thy +++ b/thys/ResiduatedTransitionSystem/LambdaCalculus.thy @@ -1,10903 +1,10924 @@ chapter "The Lambda Calculus" text \ In this second part of the article, we apply the residuated transition system framework developed in the first part to the theory of reductions in Church's \\\-calculus. The underlying idea is to exhibit \\\-terms as states (identities) of an RTS, with reduction steps as non-identity transitions. We represent both states and transitions in a unified, variable-free syntax based on de Bruijn indices. A difficulty one faces in regarding the \\\-calculus as an RTS is that ``elementary reductions'', in which just one redex is contracted, are not preserved by residuation: an elementary reduction can have zero or more residuals along another elementary reduction. However, ``parallel reductions'', which permit the contraction of multiple redexes existing in a term to be contracted in a single step, are preserved by residuation. For this reason, in our syntax each term represents a parallel reduction of zero or more redexes; a parallel reduction of zero redexes representing an identity. We have syntactic constructors for variables, \\\-abstractions, and applications. An additional constructor represents a \\\-redex that has been marked for contraction. This is a slightly different approach that that taken by other authors (\emph{e.g.}~\cite{barendregt} or \cite{huet-residual-theory}), in which it is the application constructor that is marked to indicate a redex to be contracted, but it seems more natural in the present setting in which a single syntax is used to represent both terms and reductions. Once the syntax has been defined, we define the residuation operation and prove that it satisfies the conditions for a weakly extensional RTS. In this RTS, the source of a term is obtained by ``erasing'' the markings on redexes, leaving an identity term. The target of a term is the contractum of the parallel reduction it represents. As the definition of residuation involves the use of substitution, a necessary prerequisite is to develop the theory of substitution using de Bruijn indices. In addition, various properties concerning the commutation of residuation and substitution have to be proved. This part of the work has benefited greatly from previous work of Huet \cite{huet-residual-theory}, in which the theory of residuation was formalized in the proof assistant Coq. In particular, it was very helpful to have already available known-correct statements of various lemmas regarding indices, substitution, and residuation. The development of the theory culminates in the proof of L\'{e}vy's ``Cube Lemma'' \cite{levy}, which is the key axiom in the definition of RTS. Once reductions in the \\\-calculus have been cast as transitions of an RTS, we are able to take advantage of generic results already proved for RTS's; in particular, the construction of the RTS of paths, which represent reduction sequences. Very little additional effort is required at this point to prove the Church-Rosser Theorem. Then, after proving a series of miscellaneous lemmas about reduction paths, we turn to the study of developments. A development of a term is a reduction path from that term in which the only redexes that are contracted are those that are residuals of redexes in the original term. We prove the Finite Developments Theorem: all developments are finite. The proof given here follows that given by de Vrijer \cite{deVrijer}, except that here we make the adaptations necessary for a syntax based on de Bruijn indices, rather than the classical named-variable syntax used by de Vrijer. Using the Finite Developments Theorem, we define a function that takes a term and constructs a ``complete development'' of that term, which is a development in which no residuals of original redexes remain to be contracted. We then turn our attention to ``standard reduction paths'', which are reduction paths in which redexes are contracted in a left-to-right order, perhaps with some skips. After giving a definition of standard reduction paths, we define a function that takes a term and constructs a complete development that is also standard. Using this function as a base case, we then define a function that takes an arbitrary parallel reduction path and transforms it into a standard reduction path that is congruent to the given path. The algorithm used is roughly analogous to insertion sort. We use this function to prove strong form of the Standardization Theorem: every reduction path is congruent to a standard reduction path. As a corollary of the Standardization Theorem, we prove the Leftmost Reduction Theorem: leftmost reduction is a normalizing reduction strategy. It should be noted that, in this article, we consider only the \\\\-calculus. In the early stages of this work, I made an exploratory attempt to incorporate \\\-reduction as well, but after encountering some unanticipated difficulties I decided not to attempt that extension until the \\\-only case had been well-developed. \ theory LambdaCalculus imports Main ResiduatedTransitionSystem begin section "Syntax" locale lambda_calculus begin text \ The syntax of terms has constructors \Var\ for variables, \Lam\ for \\\-abstraction, and \App\ for application. In addition, there is a constructor \Beta\ which is used to represent a \\\-redex that has been marked for contraction. The idea is that a term \Beta t u\ represents a marked version of the term \App (Lam t) u\. Finally, there is a constructor \Nil\ which is used to represent the null element required for the residuation operation. \ datatype (discs_sels) lambda = Nil | Var nat | Lam lambda | App lambda lambda | Beta lambda lambda text \ The following notation renders \Beta t u\ as a ``marked'' version of \App (Lam t) u\, even though the former is a single constructor, whereas the latter contains two constructors. \ notation Nil ("\<^bold>\") notation Var ("\<^bold>\_\<^bold>\") notation Lam ("\<^bold>\\<^bold>[_\<^bold>]") notation App (infixl "\<^bold>\" 55) notation Beta ("(\<^bold>\\<^bold>[_\<^bold>] \<^bold>\ _)" [55, 56] 55) text \ The following function computes the set of free variables of a term. Note that since variables are represented by numeric indices, this is a set of numbers. \ fun FV where "FV \<^bold>\ = {}" | "FV \<^bold>\i\<^bold>\ = {i}" | "FV \<^bold>\\<^bold>[t\<^bold>] = (\n. n - 1) ` (FV t - {0})" | "FV (t \<^bold>\ u) = FV t \ FV u" | "FV (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = (\n. n - 1) ` (FV t - {0}) \ FV u" subsection "Some Orderings for Induction" text \ We will need to do some simultaneous inductions on pairs and triples of subterms of given terms. We prove the well-foundedness of the associated relations using the following size measure. \ fun size :: "lambda \ nat" where "size \<^bold>\ = 0" | "size \<^bold>\_\<^bold>\ = 1" | "size \<^bold>\\<^bold>[t\<^bold>] = size t + 1" | "size (t \<^bold>\ u) = size t + size u + 1" | "size (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = (size t + 1) + size u + 1" lemma wf_if_img_lt: fixes r :: "('a * 'a) set" and f :: "'a \ nat" assumes "\x y. (x, y) \ r \ f x < f y" shows "wf r" using assms by (metis in_measure wf_iff_no_infinite_down_chain wf_measure) inductive subterm where "\t. subterm t \<^bold>\\<^bold>[t\<^bold>]" | "\t u. subterm t (t \<^bold>\ u)" | "\t u. subterm u (t \<^bold>\ u)" | "\t u. subterm t (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" | "\t u. subterm u (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" | "\t u v. \subterm t u; subterm u v\ \ subterm t v" lemma subterm_implies_smaller: shows "subterm t u \ size t < size u" by (induct rule: subterm.induct) auto abbreviation subterm_rel where "subterm_rel \ {(t, u). subterm t u}" lemma wf_subterm_rel: shows "wf subterm_rel" using subterm_implies_smaller wf_if_img_lt by (metis case_prod_conv mem_Collect_eq) abbreviation subterm_pair_rel where "subterm_pair_rel \ {((t1, t2), u1, u2). subterm t1 u1 \ subterm t2 u2}" lemma wf_subterm_pair_rel: shows "wf subterm_pair_rel" using subterm_implies_smaller wf_if_img_lt [of subterm_pair_rel "\(t1, t2). max (size t1) (size t2)"] by fastforce abbreviation subterm_triple_rel where "subterm_triple_rel \ {((t1, t2, t3), u1, u2, u3). subterm t1 u1 \ subterm t2 u2 \ subterm t3 u3}" lemma wf_subterm_triple_rel: shows "wf subterm_triple_rel" using subterm_implies_smaller wf_if_img_lt [of subterm_triple_rel "\(t1, t2, t3). max (max (size t1) (size t2)) (size t3)"] by fastforce lemma subterm_lemmas: shows "subterm t \<^bold>\\<^bold>[t\<^bold>]" and "subterm t (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ subterm u (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" and "subterm t (t \<^bold>\ u) \ subterm u (t \<^bold>\ u)" and "subterm t (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ subterm u (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" by (metis subterm.simps)+ subsection "Arrows and Identities" text \ Here we define some special classes of terms. An ``arrow'' is a term that contains no occurrences of \Nil\. An ``identity'' is an arrow that contains no occurrences of \Beta\. It will be important for the commutation of substitution and residuation later on that substitution not be used in a way that could create any marked redexes; for example, we don't want the substitution of \Lam (Var 0)\ for \Var 0\ in an application \App (Var 0) (Var 0)\ to create a new ``marked'' redex. The use of the separate constructor \Beta\ for marked redexes automatically avoids this. \ fun Arr where "Arr \<^bold>\ = False" | "Arr \<^bold>\_\<^bold>\ = True" | "Arr \<^bold>\\<^bold>[t\<^bold>] = Arr t" | "Arr (t \<^bold>\ u) = (Arr t \ Arr u)" | "Arr (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = (Arr t \ Arr u)" lemma Arr_not_Nil: assumes "Arr t" shows "t \ \<^bold>\" using assms by auto fun Ide where "Ide \<^bold>\ = False" | "Ide \<^bold>\_\<^bold>\ = True" | "Ide \<^bold>\\<^bold>[t\<^bold>] = Ide t" | "Ide (t \<^bold>\ u) = (Ide t \ Ide u)" | "Ide (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = False" lemma Ide_implies_Arr: shows "Ide t \ Arr t" by (induct t) auto lemma ArrE [elim]: assumes "Arr t" and "\i. t = \<^bold>\i\<^bold>\ \ T" and "\u. t = \<^bold>\\<^bold>[u\<^bold>] \ T" and "\u v. t = u \<^bold>\ v \ T" and "\u v. t = \<^bold>\\<^bold>[u\<^bold>] \<^bold>\ v \ T" shows T using assms by (cases t) auto subsection "Raising Indices" text \ For substitution, we need to be able to raise the indices of all free variables in a subterm by a specified amount. To do this recursively, we need to keep track of the depth of nesting of \\\'s and only raise the indices of variables that are already greater than or equal to that depth, as these are the variables that are free in the current context. This leads to defining a function \Raise\ that has two arguments: the depth threshold \d\ and the increment \n\ to be added to indices above that threshold. \ fun Raise where "Raise _ _ \<^bold>\ = \<^bold>\" | "Raise d n \<^bold>\i\<^bold>\ = (if i \ d then \<^bold>\i+n\<^bold>\ else \<^bold>\i\<^bold>\)" | "Raise d n \<^bold>\\<^bold>[t\<^bold>] = \<^bold>\\<^bold>[Raise (Suc d) n t\<^bold>]" | "Raise d n (t \<^bold>\ u) = Raise d n t \<^bold>\ Raise d n u" | "Raise d n (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = \<^bold>\\<^bold>[Raise (Suc d) n t\<^bold>] \<^bold>\ Raise d n u" text \ Ultimately, the definition of substitution will only directly involve the function that raises all indices of variables that are free in the outermost context; in a term, so we introduce an abbreviation for this special case. \ abbreviation raise where "raise == Raise 0" lemma size_Raise: shows "\d. size (Raise d n t) = size t" by (induct t) auto lemma Raise_not_Nil: assumes "t \ \<^bold>\" shows "Raise d n t \ \<^bold>\" using assms by (cases t) auto lemma FV_Raise: shows "\d n. FV (Raise d n t) = (\x. if x \ d then x + n else x) ` FV t" apply (induct t) apply auto[3] apply force apply force apply force apply force apply fastforce proof - fix t u d n assume ind1: "\d n. FV (Raise d n t) = (\x. if d \ x then x + n else x) ` FV t" assume ind2: "\d n. FV (Raise d n u) = (\x. if d \ x then x + n else x) ` FV u" have "FV (Raise d n (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)) = (\x. x - Suc 0) ` ((\x. x + n) ` (FV t \ {x. Suc d \ x}) \ FV t \ {x. \ Suc d \ x} - {0}) \ ((\x. x + n) ` (FV u \ {x. d \ x}) \ FV u \ {x. \ d \ x})" using ind1 ind2 by simp also have "... = (\x. if d \ x then x + n else x) ` FV (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" apply simp by force finally show "FV (Raise d n (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)) = (\x. if d \ x then x + n else x) ` FV (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" by blast qed lemma Arr_Raise: shows "\d n. Arr t \ Arr (Raise d n t)" using FV_Raise by (induct t) auto lemma Ide_Raise: shows "\d n. Ide t \ Ide (Raise d n t)" by (induct t) auto lemma Raise_0: shows "\d n. Raise d 0 t = t" by (induct t) auto lemma Raise_Suc: shows "\d n. Raise d (Suc n) t = Raise d 1 (Raise d n t)" by (induct t) auto lemma Raise_Var: shows "Raise d n \<^bold>\i\<^bold>\ = \<^bold>\if i < d then i else i + n\<^bold>\" by auto text \ The following development of the properties of raising indices, substitution, and residuation has benefited greatly from the previous work by Huet \cite{huet-residual-theory}. In particular, it was very helpful to have correct statements of various lemmas available, rather than having to reconstruct them. \ lemma Raise_plus: shows "\d m n. Raise d (m + n) t = Raise (d + m) n (Raise d m t)" by (induct t) auto lemma Raise_plus': shows "\n m d d'. \d' \ d + n; d \ d'\ \ Raise d (m + n) t = Raise d' m (Raise d n t)" by (induct t) auto lemma Raise_Raise: shows "\i k n p. i \ n \ Raise i p (Raise n k t) = Raise (p + n) k (Raise i p t)" by (induct t) auto lemma raise_plus: shows "\n m d. d \ n \ raise (m + n) t = Raise d m (raise n t)" using Raise_plus' by auto lemma raise_Raise: shows "\k p n. raise p (Raise n k t) = Raise (p + n) k (raise p t)" by (simp add: Raise_Raise) lemma Raise_inj: shows "\d n u. Raise d n t = Raise d n u \ t = u" proof (induct t) show "\d n u. Raise d n \<^bold>\ = Raise d n u \ \<^bold>\ = u" by (metis Raise.simps(1) Raise_not_Nil) show "\x d n. Raise d n \<^bold>\x\<^bold>\ = Raise d n u \ \<^bold>\x\<^bold>\ = u" for u using Raise_Var apply (cases u, auto) by (metis add_lessD1 add_right_imp_eq) show "\t d n. \\d n u'. Raise d n t = Raise d n u' \ t = u'; Raise d n \<^bold>\\<^bold>[t\<^bold>] = Raise d n u\ \ \<^bold>\\<^bold>[t\<^bold>] = u" for u apply (cases u, auto) by (metis lambda.distinct(9)) show "\t1 t2 d n. \\d n u'. Raise d n t1 = Raise d n u' \ t1 = u'; \d n u'. Raise d n t2 = Raise d n u' \ t2 = u'; Raise d n (t1 \<^bold>\ t2) = Raise d n u\ \ t1 \<^bold>\ t2 = u" for u apply (cases u, auto) by (metis lambda.distinct(11)) show "\t1 t2 d n. \\d n u'. Raise d n t1 = Raise d n u' \ t1 = u'; \d n u'. Raise d n t2 = Raise d n u' \ t2 = u'; Raise d n (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) = Raise d n u\ \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 = u" for u apply (cases u, auto) by (metis lambda.distinct(13)) qed subsection "Substitution" text \ Following \cite{huet-residual-theory}, we now define a generalized substitution operation with adjustment of indices. The ultimate goal is to define the result of contraction of a marked redex \Beta t u\ to be \subst u t\. However, to be able to give a proper recursive definition of \subst\, we need to introduce a parameter \n\ to keep track of the depth of nesting of \Lam\'s as we descend into the the term \t\. So, instead of \subst u t\ simply substituting \u\ for occurrences of \Var 0\, \Subst n u t\ will be substituting for occurrences of \Var n\, and the term \u\ will have the indices of its free variables raised by \n\ before replacing \Var n\. In addition, any variables in \t\ that have indices greater than \n\ will have these indices lowered by one, to account for the outermost \Lam\ that is being removed by the contraction. We can then define \subst u t\ to be \Subst 0 u t\. \ fun Subst where "Subst _ _ \<^bold>\ = \<^bold>\" | "Subst n v \<^bold>\i\<^bold>\ = (if n < i then \<^bold>\i-1\<^bold>\ else if n = i then raise n v else \<^bold>\i\<^bold>\)" | "Subst n v \<^bold>\\<^bold>[t\<^bold>] = \<^bold>\\<^bold>[Subst (Suc n) v t\<^bold>]" | "Subst n v (t \<^bold>\ u) = Subst n v t \<^bold>\ Subst n v u" | "Subst n v (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = \<^bold>\\<^bold>[Subst (Suc n) v t\<^bold>] \<^bold>\ Subst n v u" abbreviation subst where "subst \ Subst 0" lemma Subst_Nil: shows "Subst n v \<^bold>\ = \<^bold>\" by (cases "v = \<^bold>\") auto lemma Subst_not_Nil: assumes "v \ \<^bold>\" and "t \ \<^bold>\" shows "\n. t \ \<^bold>\ \ Subst n v t \ \<^bold>\" using assms Raise_not_Nil by (induct t) auto text \ The following expression summarizes how the set of free variables of a term \Subst d u t\, obtained by substituting \u\ into \t\ at depth \d\, relates to the sets of free variables of \t\ and \u\. This expression is not used in the subsequent formal development, but it has been left here as an aid to understanding. \ abbreviation FVS where "FVS d v t \ (FV t \ {x. x < d}) \ (\x. x - 1) ` {x. x > d \ x \ FV t} \ (\x. x + d) ` {x. d \ FV t \ x \ FV v}" lemma FV_Subst: shows "\d v. FV (Subst d v t) = FVS d v t" proof (induct t) have "\d t v. (\x. x - 1) ` (FVS (Suc d) v t - {0}) = FVS d v \<^bold>\\<^bold>[t\<^bold>]" by auto force+ (* 8 sec *) thus "\d t v. (\d v. FV (Subst d v t) = FVS d v t) \ FV (Subst d v \<^bold>\\<^bold>[t\<^bold>]) = FVS d v \<^bold>\\<^bold>[t\<^bold>]" by simp have "\t u v d. (\x. x - 1) ` (FVS (Suc d) v t - {0}) \ FVS d v u = FVS d v (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" by auto force+ (* 25 sec *) thus "\t u v d. \\d v. FV (Subst d v t) = FVS d v t; \d v. FV (Subst d v u) = FVS d v u\ \ FV (Subst d v (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)) = FVS d v (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" by simp qed (auto simp add: FV_Raise) lemma Arr_Subst: assumes "Arr v" shows "\n. Arr t \ Arr (Subst n v t)" using assms Arr_Raise FV_Subst by (induct t) auto lemma vacuous_Subst: shows "\i v. \Arr v; i \ FV t\ \ Raise i 1 (Subst i v t) = t" apply (induct t, auto) by force+ lemma Ide_Subst_iff: shows "\n. Ide (Subst n v t) \ Ide t \ (n \ FV t \ Ide v)" using Ide_Raise vacuous_Subst apply (induct t) apply auto[5] apply fastforce by (metis Diff_empty Diff_insert0 One_nat_def diff_Suc_1 image_iff insertE insert_Diff nat.distinct(1)) lemma Ide_Subst: shows "\n. \Ide t; Ide v\ \ Ide (Subst n v t)" using Ide_Raise by (induct t) auto lemma Raise_Subst: shows "\v k p n. Raise (p + n) k (Subst p v t) = Subst p (Raise n k v) (Raise (Suc (p + n)) k t)" using raise_Raise apply (induct t, auto) by (metis add_Suc)+ lemma Raise_Subst': assumes "t \ \<^bold>\" shows "\v n p k. \v \ \<^bold>\; k \ n\ \ Raise k p (Subst n v t) = Subst (p + n) v (Raise k p t)" using assms raise_plus apply (induct t, auto) apply (metis Raise.simps(1) Subst_Nil Suc_le_mono add_Suc_right) apply fastforce apply fastforce apply (metis Raise.simps(1) Subst_Nil Suc_le_mono add_Suc_right) by fastforce lemma Raise_subst: shows "\v k n. Raise n k (subst v t) = subst (Raise n k v) (Raise (Suc n) k t)" using Raise_0 apply (induct t, auto) by (metis One_nat_def Raise_Subst plus_1_eq_Suc)+ lemma raise_Subst: assumes "t \ \<^bold>\" shows "\v n p. v \ \<^bold>\ \ raise p (Subst n v t) = Subst (p + n) v (raise p t)" using assms Raise_plus raise_Raise Raise_Subst' apply (induct t) by (meson zero_le)+ lemma Subst_Raise: shows "\v m n d. \v \ \<^bold>\; d \ m; m \ n + d\ \ Subst m v (Raise d (Suc n) t) = Raise d n t" by (induct t) auto lemma Subst_raise: shows "\v m n. \v \ \<^bold>\; m \ n\ \ Subst m v (raise (Suc n) t) = raise n t" using Subst_Raise by (induct t) auto lemma Subst_Subst: shows "\v w m n. \v \ \<^bold>\; w \ \<^bold>\\ \ Subst (m + n) w (Subst m v t) = Subst m (Subst n w v) (Subst (Suc (m + n)) w t)" using Raise_0 raise_Subst Subst_not_Nil Subst_raise apply (induct t, auto) by (metis add_Suc)+ text \ The Substitution Lemma, as given by Huet \cite{huet-residual-theory}. \ lemma substitution_lemma: shows "\v w n. \v \ \<^bold>\; w \ \<^bold>\\ \ Subst n v (subst w t) = subst (Subst n v w) (Subst (Suc n) v t)" using Subst_not_Nil Raise_0 Subst_Subst Subst_raise apply (induct t, auto) apply (metis Suc_lessD Suc_pred less_imp_le zero_less_diff) by (metis One_nat_def plus_1_eq_Suc)+ section "Lambda-Calculus as an RTS" subsection "Residuation" text \ We now define residuation on terms. Residuation is an operation which, when defined for terms \t\ and \u\, produces terms \t \ u\ and \u \ t\ that represent, respectively, what remains of the reductions of \t\ after performing the reductions in \u\, and what remains of the reductions of \u\ after performing the reductions in \t\. The definition ensures that, if residuation is defined for two terms, then those terms in must be arrows that are \emph{coinitial} (\emph{i.e.}~they are the same after erasing marks on redexes). The residual \t \ u\ then has marked redexes at positions corresponding to redexes that were originally marked in \t\ and that were not contracted by any of the reductions of \u\. This definition has also benefited from the presentation in \cite{huet-residual-theory}. \ fun resid (infix "\\" 70) where "\<^bold>\i\<^bold>\ \\ \<^bold>\i'\<^bold>\ = (if i = i' then \<^bold>\i\<^bold>\ else \<^bold>\)" | "\<^bold>\\<^bold>[t\<^bold>] \\ \<^bold>\\<^bold>[t'\<^bold>] = (if t \\ t' = \<^bold>\ then \<^bold>\ else \<^bold>\\<^bold>[t \\ t'\<^bold>])" | "(t \<^bold>\ u) \\ (t'\<^bold>\ u') = (if t \\ t' = \<^bold>\ \ u \\ u' = \<^bold>\ then \<^bold>\ else (t \\ t') \<^bold>\ (u \\ u'))" | "(\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \\ (\<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u') = (if t \\ t' = \<^bold>\ \ u \\ u' = \<^bold>\ then \<^bold>\ else subst (u \\ u') (t \\ t'))" | "(\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \\ (\<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u') = (if t \\ t' = \<^bold>\ \ u \\ u' = \<^bold>\ then \<^bold>\ else subst (u \\ u') (t \\ t'))" | "(\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \\ (\<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u') = (if t \\ t' = \<^bold>\ \ u \\ u' = \<^bold>\ then \<^bold>\ else \<^bold>\\<^bold>[t \\ t'\<^bold>] \<^bold>\ (u \\ u'))" | "resid _ _ = \<^bold>\" text \ Terms t and u are \emph{consistent} if residuation is defined for them. \ abbreviation Con (infix "\" 50) where "Con t u \ resid t u \ \<^bold>\" lemma ConE [elim]: assumes "t \ t'" and "\i. \t = \<^bold>\i\<^bold>\; t' = \<^bold>\i\<^bold>\; resid t t' = \<^bold>\i\<^bold>\\ \ T" and "\u u'. \t = \<^bold>\\<^bold>[u\<^bold>]; t' = \<^bold>\\<^bold>[u'\<^bold>]; u \ u'; t \\ t' = \<^bold>\\<^bold>[u \\ u'\<^bold>]\ \ T" and "\u v u' v'. \t = u \<^bold>\ v; t' = u' \<^bold>\ v'; u \ u'; v \ v'; t \\ t' = (u \\ u') \<^bold>\ (v \\ v')\ \ T" and "\u v u' v'. \t = \<^bold>\\<^bold>[u\<^bold>] \<^bold>\ v; t' = \<^bold>\\<^bold>[u'\<^bold>] \<^bold>\ v'; u \ u'; v \ v'; t \\ t' = subst (v \\ v') (u \\ u')\ \ T" and "\u v u' v'. \t = \<^bold>\\<^bold>[u\<^bold>] \<^bold>\ v; t' = Beta u' v'; u \ u'; v \ v'; t \\ t' = subst (v \\ v') (u \\ u')\ \ T" and "\u v u' v'. \t = \<^bold>\\<^bold>[u\<^bold>] \<^bold>\ v; t' = \<^bold>\\<^bold>[u'\<^bold>] \<^bold>\ v'; u \ u'; v \ v'; t \\ t' = \<^bold>\\<^bold>[u \\ u'\<^bold>] \<^bold>\ (v \\ v')\ \ T" shows T using assms apply (cases t; cases t') apply simp_all apply metis apply metis apply metis apply (cases "un_App1 t", simp_all) apply metis apply (cases "un_App1 t'", simp_all) apply metis by metis text \ A term can only be consistent with another if both terms are ``arrows''. \ lemma Con_implies_Arr1: shows "\u. t \ u \ Arr t" apply (induct t) apply auto[3] proof - fix u v t' assume ind1: "\u'. u \ u' \ Arr u" assume ind2: "\v'. v \ v' \ Arr v" show "u \<^bold>\ v \ t' \ Arr (u \<^bold>\ v)" using ind1 ind2 apply (cases t', simp_all) apply metis apply (cases u, simp_all) by (metis lambda.distinct(3) resid.simps(2)) show "\<^bold>\\<^bold>[u\<^bold>] \<^bold>\ v \ t' \ Arr (\<^bold>\\<^bold>[u\<^bold>] \<^bold>\ v)" using ind1 ind2 apply (cases t', simp_all) apply (cases "un_App1 t'", simp_all) by metis+ qed lemma Con_implies_Arr2: shows "\t. t \ u \ Arr u" apply (induct u) apply auto[3] proof - fix u' v' t assume ind1: "\u. u \ u' \ Arr u'" assume ind2: "\v. v \ v' \ Arr v'" show "t \ u' \<^bold>\ v' \ Arr (u' \<^bold>\ v')" using ind1 ind2 apply (cases t, simp_all) apply metis apply (cases u', simp_all) by (metis lambda.distinct(3) resid.simps(2)) show "t \ (\<^bold>\\<^bold>[u'\<^bold>] \<^bold>\ v') \ Arr (\<^bold>\\<^bold>[u'\<^bold>] \<^bold>\ v')" using ind1 ind2 apply (cases t, simp_all) apply (cases "un_App1 t", simp_all) by metis+ qed lemma ConD: shows "t \<^bold>\ u \ t' \<^bold>\ u' \ t \ t' \ u \ u'" and "\<^bold>\\<^bold>[v\<^bold>] \<^bold>\ u \ \<^bold>\\<^bold>[v'\<^bold>] \<^bold>\ u' \ \<^bold>\\<^bold>[v\<^bold>] \ \<^bold>\\<^bold>[v'\<^bold>] \ u \ u'" and "\<^bold>\\<^bold>[v\<^bold>] \<^bold>\ u \ t' \<^bold>\ u' \ \<^bold>\\<^bold>[v\<^bold>] \ t' \ u \ u'" and "t \<^bold>\ u \ \<^bold>\\<^bold>[v'\<^bold>] \<^bold>\ u' \ t \ \<^bold>\\<^bold>[v'\<^bold>] \ u \ u'" by auto text \ Residuation on consistent terms preserves arrows. \ lemma Arr_resid_ind: shows "\u. t \ u \ Arr (t \\ u)" apply (induct t) apply auto proof - fix t1 t2 u assume ind1: "\u. t1 \ u \ Arr (t1 \\ u)" assume ind2: "\u. t2 \ u \ Arr (t2 \\ u)" show "t1 \<^bold>\ t2 \ u \ Arr ((t1 \<^bold>\ t2) \\ u)" using ind1 ind2 Arr_Subst apply (cases u, auto) apply (cases t1, auto) by (metis Arr.simps(3) ConD(2) resid.simps(2) resid.simps(4)) show "\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 \ u \ Arr ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u)" using ind1 ind2 Arr_Subst by (cases u) auto qed lemma Arr_resid: shows "\u. t \ u \ Arr (t \\ u)" using Arr_resid_ind by auto subsection "Source and Target" text \ Here we give syntactic versions of the \emph{source} and \emph{target} of a term. These will later be shown to agree (on arrows) with the versions derived from the residuation. The underlying idea here is that a term stands for a reduction sequence in which all marked redexes (corresponding to instances of the constructor \Beta\) are contracted in a bottom-up fashion. A term without any marked redexes stands for an empty reduction sequence; such terms will be shown to be the identities derived from the residuation. The source of term is the identity obtained by erasing all markings; that is, by replacing all subterms of the form \Beta t u\ by \App (Lam t) u\. The target of a term is the identity that is the result of contracting all the marked redexes. \ fun Src where "Src \<^bold>\ = \<^bold>\" | "Src \<^bold>\i\<^bold>\ = \<^bold>\i\<^bold>\" | "Src \<^bold>\\<^bold>[t\<^bold>] = \<^bold>\\<^bold>[Src t\<^bold>]" | "Src (t \<^bold>\ u) = Src t \<^bold>\ Src u" | "Src (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = \<^bold>\\<^bold>[Src t\<^bold>] \<^bold>\ Src u" fun Trg where "Trg \<^bold>\i\<^bold>\ = \<^bold>\i\<^bold>\" | "Trg \<^bold>\\<^bold>[t\<^bold>] = \<^bold>\\<^bold>[Trg t\<^bold>]" | "Trg (t \<^bold>\ u) = Trg t \<^bold>\ Trg u" | "Trg (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = subst (Trg u) (Trg t)" | "Trg _ = \<^bold>\" lemma Ide_Src: shows "Arr t \ Ide (Src t)" by (induct t) auto lemma Ide_iff_Src_self: assumes "Arr t" shows "Ide t \ Src t = t" using assms Ide_Src by (induct t) auto lemma Arr_Src [simp]: assumes "Arr t" shows "Arr (Src t)" using assms Ide_Src Ide_implies_Arr by blast lemma Con_Src: shows "\t u. \size t + size u \ n; t \ u\ \ Src t \ Src u" by (induct n) auto lemma Src_eq_iff: shows "Src \<^bold>\i\<^bold>\ = Src \<^bold>\i'\<^bold>\ \ i = i'" and "Src (t \<^bold>\ u) = Src (t' \<^bold>\ u') \ Src t = Src t' \ Src u = Src u'" and "Src (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = Src (\<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u') \ Src t = Src t' \ Src u = Src u'" and "Src (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = Src (\<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u') \ Src t = Src t' \ Src u = Src u'" by auto lemma Src_Raise: shows "\d. Src (Raise d n t) = Raise d n (Src t)" by (induct t) auto lemma Src_Subst [simp]: shows "\d X. \Arr t; Arr u\ \ Src (Subst d t u) = Subst d (Src t) (Src u)" using Src_Raise by (induct u) auto lemma Ide_Trg: shows "Arr t \ Ide (Trg t)" using Ide_Subst by (induct t) auto lemma Ide_iff_Trg_self: shows "Arr t \ Ide t \ Trg t = t" apply (induct t) apply auto by (metis Ide.simps(5) Ide_Subst Ide_Trg)+ lemma Arr_Trg [simp]: assumes "Arr X" shows "Arr (Trg X)" using assms Ide_Trg Ide_implies_Arr by blast lemma Src_Src [simp]: assumes "Arr t" shows "Src (Src t) = Src t" using assms Ide_Src Ide_iff_Src_self Ide_implies_Arr by blast lemma Trg_Src [simp]: assumes "Arr t" shows "Trg (Src t) = Src t" using assms Ide_Src Ide_iff_Trg_self Ide_implies_Arr by blast lemma Trg_Trg [simp]: assumes "Arr t" shows "Trg (Trg t) = Trg t" using assms Ide_Trg Ide_iff_Trg_self Ide_implies_Arr by blast lemma Src_Trg [simp]: assumes "Arr t" shows "Src (Trg t) = Trg t" using assms Ide_Trg Ide_iff_Src_self Ide_implies_Arr by blast text \ Two terms are syntactically \emph{coinitial} if they are arrows with the same source; that is, they represent two reductions from the same starting term. \ abbreviation Coinitial where "Coinitial t u \ Arr t \ Arr u \ Src t = Src u" text \ We now show that terms are consistent if and only if they are coinitial. \ lemma Coinitial_cases: assumes "Arr t" and "Arr t'" and "Src t = Src t'" shows "(t = \<^bold>\ \ t' = \<^bold>\) \ (\x. t = \<^bold>\x\<^bold>\ \ t' = \<^bold>\x\<^bold>\) \ (\u u'. t = \<^bold>\\<^bold>[u\<^bold>] \ t' = \<^bold>\\<^bold>[u'\<^bold>]) \ (\u v u' v'. t = u \<^bold>\ v \ t' = u' \<^bold>\ v') \ (\u v u' v'. t = \<^bold>\\<^bold>[u\<^bold>] \<^bold>\ v \ t' = \<^bold>\\<^bold>[u'\<^bold>] \<^bold>\ v') \ (\u v u' v'. t = \<^bold>\\<^bold>[u\<^bold>] \<^bold>\ v \ t' = \<^bold>\\<^bold>[u'\<^bold>] \<^bold>\ v') \ (\u v u' v'. t = \<^bold>\\<^bold>[u\<^bold>] \<^bold>\ v \ t' = \<^bold>\\<^bold>[u'\<^bold>] \<^bold>\ v')" using assms by (cases t; cases t') auto lemma Con_implies_Coinitial_ind: shows "\t u. \size t + size u \ n; t \ u\ \ Coinitial t u" using Con_implies_Arr1 Con_implies_Arr2 by (induct n) auto lemma Coinitial_implies_Con_ind: shows "\t u. \size (Src t) \ n; Coinitial t u\ \ t \ u" proof (induct n) show "\t u. \size (Src t) \ 0; Coinitial t u\ \ t \ u" by auto fix n t u assume Coinitial: "Coinitial t u" assume n: "size (Src t) \ Suc n" assume ind: "\t u. \size (Src t) \ n; Coinitial t u\ \ t \ u" show "t \ u" using n ind Coinitial Coinitial_cases [of t u] Subst_not_Nil by auto qed lemma Coinitial_iff_Con: shows "Coinitial t u \ t \ u" using Coinitial_implies_Con_ind Con_implies_Coinitial_ind by blast lemma Coinitial_Raise_Raise: shows "\d n u. Coinitial t u \ Coinitial (Raise d n t) (Raise d n u)" using Arr_Raise Src_Raise apply (induct t, auto) by (metis Raise.simps(3-4)) lemma Con_sym: shows "t \ u \ u \ t" by (metis Coinitial_iff_Con) lemma ConI [intro, simp]: assumes "Arr t" and "Arr u" and "Src t = Src u" shows "Con t u" using assms Coinitial_iff_Con by blast lemma Con_Arr_Src [simp]: assumes "Arr t" shows "t \ Src t" and "Src t \ t" using assms by (auto simp add: Ide_Src Ide_implies_Arr) lemma resid_Arr_self: shows "Arr t \ t \\ t = Trg t" by (induct t) auto text \ The following result is not used in the formal development that follows, but it requires some proof and might eventually be useful. \ lemma finite_branching: shows "Ide a \ finite {t. Arr t \ Src t = a}" proof (induct a) show "Ide \<^bold>\ \ finite {t. Arr t \ Src t = \<^bold>\}" by simp fix x have "\t. Src t = \<^bold>\x\<^bold>\ \ t = \<^bold>\x\<^bold>\" using Src.elims by blast thus "finite {t. Arr t \ Src t = \<^bold>\x\<^bold>\}" by simp next fix a assume a: "Ide \<^bold>\\<^bold>[a\<^bold>]" assume ind: "Ide a \ finite {t. Arr t \ Src t = a}" have "{t. Arr t \ Src t = \<^bold>\\<^bold>[a\<^bold>]} = Lam ` {t. Arr t \ Src t = a}" proof show "Lam ` {t. Arr t \ Src t = a} \ {t. Arr t \ Src t = \<^bold>\\<^bold>[a\<^bold>]}" by auto show "{t. Arr t \ Src t = \<^bold>\\<^bold>[a\<^bold>]} \ Lam ` {t. Arr t \ Src t = a}" proof fix t assume t: "t \ {t. Arr t \ Src t = \<^bold>\\<^bold>[a\<^bold>]}" show "t \ Lam ` {t. Arr t \ Src t = a}" using t by (cases t) auto qed qed thus "finite {t. Arr t \ Src t = \<^bold>\\<^bold>[a\<^bold>]}" using a ind by simp next fix a1 a2 assume ind1: "Ide a1 \ finite {t. Arr t \ Src t = a1}" assume ind2: "Ide a2 \ finite {t. Arr t \ Src t = a2}" assume a: "Ide (\<^bold>\\<^bold>[a1\<^bold>] \<^bold>\ a2)" show "finite {t. Arr t \ Src t = \<^bold>\\<^bold>[a1\<^bold>] \<^bold>\ a2}" using a ind1 ind2 by simp next fix a1 a2 assume ind1: "Ide a1 \ finite {t. Arr t \ Src t = a1}" assume ind2: "Ide a2 \ finite {t. Arr t \ Src t = a2}" assume a: "Ide (a1 \<^bold>\ a2)" have "{t. Arr t \ Src t = a1 \<^bold>\ a2} = ({t. is_App t} \ ({t. Arr t \ Src (un_App1 t) = a1 \ Src (un_App2 t) = a2})) \ ({t. is_Beta t \ is_Lam a1} \ ({t. Arr t \ is_Lam a1 \ Src (un_Beta1 t) = un_Lam a1 \ Src (un_Beta2 t) = a2}))" by fastforce have "{t. Arr t \ Src t = a1 \<^bold>\ a2} = (\(t1, t2). t1 \<^bold>\ t2) ` ({t1. Arr t1 \ Src t1 = a1} \ {t2. Arr t2 \ Src t2 = a2}) \ (\(t1, t2). \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) ` ({t1t2. is_Lam a1} \ {t1. Arr t1 \ Src t1 = un_Lam a1} \ {t2. Arr t2 \ Src t2 = a2})" proof show "(\(t1, t2). t1 \<^bold>\ t2) ` ({t1. Arr t1 \ Src t1 = a1} \ {t2. Arr t2 \ Src t2 = a2}) \ (\(t1, t2). \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) ` ({t1t2. is_Lam a1} \ {t1. Arr t1 \ Src t1 = un_Lam a1} \ {t2. Arr t2 \ Src t2 = a2}) \ {t. Arr t \ Src t = a1 \<^bold>\ a2}" by auto show "{t. Arr t \ Src t = a1 \<^bold>\ a2} \ (\(t1, t2). t1 \<^bold>\ t2) ` ({t1. Arr t1 \ Src t1 = a1} \ {t2. Arr t2 \ Src t2 = a2}) \ (\(t1, t2). \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) ` ({t1t2. is_Lam a1} \ {t1. Arr t1 \ Src t1 = un_Lam a1} \ {t2. Arr t2 \ Src t2 = a2})" proof fix t assume t: "t \ {t. Arr t \ Src t = a1 \<^bold>\ a2}" have "is_App t \ is_Beta t" using t by auto moreover have "is_App t \ t \ (\(t1, t2). t1 \<^bold>\ t2) ` ({t1. Arr t1 \ Src t1 = a1} \ {t2. Arr t2 \ Src t2 = a2})" using t image_iff is_App_def by fastforce moreover have "is_Beta t \ t \ (\(t1, t2). \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) ` ({t1t2. is_Lam a1} \ {t1. Arr t1 \ Src t1 = un_Lam a1} \ {t2. Arr t2 \ Src t2 = a2})" using t is_Beta_def by fastforce ultimately show "t \ (\(t1, t2). t1 \<^bold>\ t2) ` ({t1. Arr t1 \ Src t1 = a1} \ {t2. Arr t2 \ Src t2 = a2}) \ (\(t1, t2). \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) ` ({t1t2. is_Lam a1} \ {t1. Arr t1 \ Src t1 = un_Lam a1} \ {t2. Arr t2 \ Src t2 = a2})" by blast qed qed moreover have "finite ({t1. Arr t1 \ Src t1 = a1} \ {t2. Arr t2 \ Src t2 = a2})" using a ind1 ind2 Ide.simps(4) by blast moreover have "is_Lam a1 \ finite ({t1. Arr t1 \ Src t1 = un_Lam a1} \ {t2. Arr t2 \ Src t2 = a2})" proof - assume a1: "is_Lam a1" have "Ide (un_Lam a1)" using a a1 is_Lam_def by force have "Lam ` {t1. Arr t1 \ Src t1 = un_Lam a1} = {t. Arr t \ Src t = a1}" proof show "Lam ` {t1. Arr t1 \ Src t1 = un_Lam a1} \ {t. Arr t \ Src t = a1}" using a1 by fastforce show "{t. Arr t \ Src t = a1} \ Lam ` {t1. Arr t1 \ Src t1 = un_Lam a1}" proof fix t assume t: "t \ {t. Arr t \ Src t = a1}" have "is_Lam t" using a1 t by auto hence "un_Lam t \ {t1. Arr t1 \ Src t1 = un_Lam a1}" using is_Lam_def t by force thus "t \ Lam ` {t1. Arr t1 \ Src t1 = un_Lam a1}" by (metis \is_Lam t\ lambda.collapse(2) rev_image_eqI) qed qed moreover have "inj Lam" using inj_on_def by blast ultimately have "finite {t1. Arr t1 \ Src t1 = un_Lam a1}" by (metis (mono_tags, lifting) Ide.simps(4) a finite_imageD ind1 injD inj_onI) moreover have "finite {t2. Arr t2 \ Src t2 = a2}" using Ide.simps(4) a ind2 by blast ultimately show "finite ({t1. Arr t1 \ Src t1 = un_Lam a1} \ {t2. Arr t2 \ Src t2 = a2})" by blast qed ultimately show "finite {t. Arr t \ Src t = a1 \<^bold>\ a2}" using a ind1 ind2 by simp qed subsection "Residuation and Substitution" text \ We now develop a series of lemmas that involve the interaction of residuation and substitution. \ lemma Raise_resid: shows "\t u k n. t \ u \ Raise k n (t \\ u) = Raise k n t \\ Raise k n u" proof - (* * Note: This proof uses subterm induction because the hypothesis Con t u yields * cases in which App and Beta terms are compared, so that the first argument to App * needs to be unfolded. *) fix t u k n let ?P = "\(t, u). \k n. t \ u \ Raise k n (t \\ u) = Raise k n t \\ Raise k n u" have "\t u. \t' u'. ((t', u'), (t, u)) \ subterm_pair_rel \ (\k n. t' \ u' \ Raise k n (t' \\ u') = Raise k n t' \\ Raise k n u') \ (\k n. t \ u \ Raise k n (t \\ u) = Raise k n t \\ Raise k n u)" using subterm_lemmas Coinitial_iff_Con Coinitial_Raise_Raise Raise_subst by auto thus "\t u k n. t \ u \ Raise k n (t \\ u) = Raise k n t \\ Raise k n u" using wf_subterm_pair_rel wf_induct [of subterm_pair_rel ?P] by blast qed lemma Con_Raise: shows "\d n u. t \ u \ Raise d n t \ Raise d n u" apply (induct t) apply auto[3] by (metis Raise_not_Nil Raise_resid)+ text \ The following is Huet's Commutation Theorem \cite{huet-residual-theory}: ``substitution commutes with residuation''. \ lemma resid_Subst: assumes "t \ t'" and "u \ u'" shows "Subst n t u \\ Subst n t' u' = Subst n (t \\ t') (u \\ u')" proof - let ?P = "\(u, u'). \n t t'. t \ t' \ u \ u' \ Subst n t u \\ Subst n t' u' = Subst n (t \\ t') (u \\ u')" have "\u u'. \w w'. ((w, w'), (u, u')) \ subterm_pair_rel \ (\n v v'. v \ v' \ w \ w' \ Subst n v w \\ Subst n v' w' = Subst n (v \\ v') (w \\ w')) \ \n t t'. t \ t' \ u \ u' \ Subst n t u \\ Subst n t' u' = Subst n (t \\ t') (u \\ u')" using subterm_lemmas Raise_resid Subst_not_Nil Con_Raise Raise_subst substitution_lemma by auto thus "Subst n t u \\ Subst n t' u' = Subst n (t \\ t') (u \\ u')" using assms wf_subterm_pair_rel wf_induct [of subterm_pair_rel ?P] by auto qed lemma Trg_Subst [simp]: shows "\d t. \Arr t; Arr u\ \ Trg (Subst d t u) = Subst d (Trg t) (Trg u)" by (metis Arr_Subst Arr_Trg Arr_not_Nil resid_Arr_self resid_Subst) lemma Src_resid: shows "\t. t \ u \ Src (t \\ u) = Trg u" proof (induct u, auto simp add: Arr_resid_ind) fix t t1' show "\t2'. \\t1. t1 \ t1' \ Src (t1 \\ t1') = Trg t1'; \t2. t2 \ t2' \ Src (t2 \\ t2') = Trg t2'; t \ t1' \<^bold>\ t2'\ \ Src (t \\ (t1' \<^bold>\ t2')) = Trg t1' \<^bold>\ Trg t2'" apply (cases t; cases t1') apply auto by (metis Src.simps(3) lambda.distinct(3) lambda.sel(2) resid.simps(2)) qed lemma Coinitial_resid_resid: assumes "t \ v" and "u \ v" shows "Coinitial (t \\ v) (u \\ v)" using assms Src_resid Arr_resid Coinitial_iff_Con by presburger lemma Con_implies_is_Lam_iff_is_Lam: assumes "t \ u" shows "is_Lam t \ is_Lam u" using assms by auto lemma Con_implies_Coinitial3: assumes "t \\ v \ u \\ v" shows "Coinitial v u" and "Coinitial v t" and "Coinitial u t" using assms by (metis Coinitial_iff_Con resid.simps(7))+ text \ We can now prove L\'{e}vy's ``Cube Lemma'' \cite{levy}, which is the key axiom for a residuated transition system. \ lemma Cube: shows "v \\ t \ u \\ t \ (v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" proof - let ?P = "\(t, u, v). v \\ t \ u \\ t \ (v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" have "\t u v. \t' u' v'. ((t', u', v'), (t, u, v)) \ subterm_triple_rel \ ?P (t', u', v') \ v \\ t \ u \\ t \ (v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" proof - fix t u v assume ind: "\t' u' v'. ((t', u', v'), (t, u, v)) \ subterm_triple_rel \ ?P (t', u', v')" show "v \\ t \ u \\ t \ (v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" proof (intro impI) assume con: "v \\ t \ u \\ t" have "Con v t" using con by auto moreover have "Con u t" using con by auto ultimately show "(v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" using subterm_lemmas ind Coinitial_iff_Con Coinitial_resid_resid resid_Subst apply (elim ConE [of v t] ConE [of u t]) apply simp_all apply metis apply metis apply (cases "un_App1 t"; cases "un_App1 v", simp_all) apply metis apply metis apply metis apply metis apply metis apply (cases "un_App1 u", simp_all) apply metis by metis qed qed hence "?P (t, u, v)" using wf_subterm_triple_rel wf_induct [of subterm_triple_rel ?P] by blast thus "v \\ t \ u \\ t \ (v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" by simp qed subsection "Residuation Determines an RTS" text \ We are now in a position to verify that the residuation operation that we have defined satisfies the axioms for a residuated transition system, and that various notions which we have defined syntactically above (\emph{e.g.}~arrow, source, target) agree with the versions derived abstractly from residuation. \ sublocale partial_magma resid apply unfold_locales by (metis Arr.simps(1) Coinitial_iff_Con) lemma null_char [simp]: shows "null = \<^bold>\" using null_def by (metis null_is_zero(2) resid.simps(7)) sublocale residuation resid using null_char Arr_resid Coinitial_iff_Con Cube apply (unfold_locales, auto) by metis+ (* TODO: Try to understand when notation is and is not inherited. *) notation resid (infix "\\" 70) lemma resid_is_residuation: shows "residuation resid" .. lemma arr_char [iff]: shows "arr t \ Arr t" using Coinitial_iff_Con arr_def con_def null_char by auto lemma ide_char [iff]: shows "ide t \ Ide t" by (metis Ide_iff_Trg_self Ide_implies_Arr arr_char arr_resid_iff_con ide_def resid_Arr_self) lemma resid_Arr_Ide: shows "\a. \Ide a; Coinitial t a\ \ t \\ a = t" using Ide_iff_Src_self by (induct t, auto) lemma resid_Ide_Arr: shows "\t. \Ide a; Coinitial a t\ \ Ide (a \\ t)" apply (induct a) apply auto[2] by (metis ConI conI cube ideI ide_char null_char resid_Arr_Ide)+ lemma resid_Arr_Src [simp]: assumes "Arr t" shows "t \\ Src t = t" using assms Ide_Src by (simp add: Ide_implies_Arr resid_Arr_Ide) lemma resid_Src_Arr [simp]: assumes "Arr t" shows "Src t \\ t = Trg t" using assms by (metis (full_types) Con_Arr_Src(2) Con_implies_Arr1 Src_Src Src_resid cube resid_Arr_Src resid_Arr_self) sublocale rts resid proof show "\a t. \ide a; con t a\ \ t \\ a = t" using ide_char resid_Arr_Ide by (metis Coinitial_iff_Con con_def null_char) show "\t. arr t \ ide (trg t)" by (simp add: Ide_Trg resid_Arr_self trg_def) show "\a t. \ide a; con a t\ \ ide (resid a t)" using ide_char null_char resid_Ide_Arr Coinitial_iff_Con con_def by force show "\t u. con t u \ \a. ide a \ con a t \ con a u" by (metis Coinitial_iff_Con Ide_Src Ide_iff_Src_self Ide_implies_Arr con_def ide_char null_char) show "\t u v. \ide (resid t u); con u v\ \ con (resid t u) (resid v u)" by (metis Coinitial_resid_resid ide_char not_arr_null null_char resid_Ide_Arr con_def con_sym ide_implies_arr) qed lemma is_rts: shows "rts resid" .. lemma sources_char\<^sub>\: shows "sources t = (if Arr t then {Src t} else {})" proof (cases "Arr t") show "\ Arr t \ ?thesis" using arr_char arr_iff_has_source by auto assume t: "Arr t" have 1: "{Src t} \ sources t" using t Ide_Src by force moreover have "sources t \ {Src t}" by (metis Coinitial_iff_Con Ide_iff_Src_self ide_char in_sourcesE null_char con_def singleton_iff subsetI) ultimately show ?thesis using t by auto qed lemma sources_simp [simp]: assumes "Arr t" shows "sources t = {Src t}" using assms sources_char\<^sub>\ by auto lemma sources_simps [simp]: shows "sources \<^bold>\ = {}" and "sources \<^bold>\x\<^bold>\ = {\<^bold>\x\<^bold>\}" and "arr t \ sources \<^bold>\\<^bold>[t\<^bold>] = {\<^bold>\\<^bold>[Src t\<^bold>]}" and "\arr t; arr u\ \ sources (t \<^bold>\ u) = {Src t \<^bold>\ Src u}" and "\arr t; arr u\ \ sources (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = {\<^bold>\\<^bold>[Src t\<^bold>] \<^bold>\ Src u}" using sources_char\<^sub>\ by auto lemma targets_char\<^sub>\: shows "targets t = (if Arr t then {Trg t} else {})" proof (cases "Arr t") show "\ Arr t \ ?thesis" by (meson arr_char arr_iff_has_target) assume t: "Arr t" have 1: "{Trg t} \ targets t" using t resid_Arr_self trg_def trg_in_targets by force moreover have "targets t \ {Trg t}" by (metis 1 Ide_iff_Src_self arr_char ide_char ide_implies_arr in_targetsE insert_subset prfx_implies_con resid_Arr_self sources_resid sources_simp t) ultimately show ?thesis using t by auto qed lemma targets_simp [simp]: assumes "Arr t" shows "targets t = {Trg t}" using assms targets_char\<^sub>\ by auto lemma targets_simps [simp]: shows "targets \<^bold>\ = {}" and "targets \<^bold>\x\<^bold>\ = {\<^bold>\x\<^bold>\}" and "arr t \ targets \<^bold>\\<^bold>[t\<^bold>] = {\<^bold>\\<^bold>[Trg t\<^bold>]}" and "\arr t; arr u\ \ targets (t \<^bold>\ u) = {Trg t \<^bold>\ Trg u}" and "\arr t; arr u\ \ targets (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = {subst (Trg u) (Trg t)}" using targets_char\<^sub>\ by auto lemma seq_char: shows "seq t u \ Arr t \ Arr u \ Trg t = Src u" using seq_def arr_char sources_char\<^sub>\ targets_char\<^sub>\ by force lemma seqI\<^sub>\ [intro, simp]: assumes "Arr t" and "Arr u" and "Trg t = Src u" shows "seq t u" using assms seq_char by simp lemma seqE\<^sub>\ [elim]: assumes "seq t u" and "\Arr t; Arr u; Trg t = Src u\ \ T" shows T using assms seq_char by blast text \ The following classifies the ways that transitions can be sequential. It is useful for later proofs by case analysis. \ lemma seq_cases: assumes "seq t u" shows "(is_Var t \ is_Var u) \ (is_Lam t \ is_Lam u) \ (is_App t \ is_App u) \ (is_App t \ is_Beta u \ is_Lam (un_App1 t)) \ (is_App t \ is_Beta u \ is_Beta (un_App1 t)) \ is_Beta t" using assms seq_char by (cases t; cases u) auto sublocale confluent_rts resid by (unfold_locales) fastforce lemma is_confluent_rts: shows "confluent_rts resid" .. lemma con_char [iff]: shows "con t u \ Con t u" by fastforce lemma coinitial_char [iff]: shows "coinitial t u \ Coinitial t u" by fastforce lemma sources_Raise: assumes "Arr t" shows "sources (Raise d n t) = {Raise d n (Src t)}" using assms by (simp add: Coinitial_Raise_Raise Src_Raise) lemma targets_Raise: assumes "Arr t" shows "targets (Raise d n t) = {Raise d n (Trg t)}" using assms by (metis Arr_Raise ConI Raise_resid resid_Arr_self targets_char\<^sub>\) lemma sources_subst [simp]: assumes "Arr t" and "Arr u" shows "sources (subst t u) = {subst (Src t) (Src u)}" using assms sources_char\<^sub>\ Arr_Subst arr_char by simp lemma targets_subst [simp]: assumes "Arr t" and "Arr u" shows "targets (subst t u) = {subst (Trg t) (Trg u)}" using assms targets_char\<^sub>\ Arr_Subst arr_char by simp notation prfx (infix "\" 50) notation cong (infix "\" 50) lemma prfx_char [iff]: shows "t \ u \ Ide (t \\ u)" using ide_char by simp lemma prfx_Var_iff: shows "u \ \<^bold>\i\<^bold>\ \ u = \<^bold>\i\<^bold>\" by (metis Arr.simps(2) Coinitial_iff_Con Ide.simps(1) Ide_iff_Src_self Src.simps(2) ide_char resid_Arr_Ide) lemma prfx_Lam_iff: shows "u \ Lam t \ is_Lam u \ un_Lam u \ t" using ide_char Arr_not_Nil Con_implies_is_Lam_iff_is_Lam Ide_implies_Arr is_Lam_def by fastforce lemma prfx_App_iff: shows "u \ t1 \<^bold>\ t2 \ is_App u \ un_App1 u \ t1 \ un_App2 u \ t2" using ide_char by (cases u; cases t1) auto lemma prfx_Beta_iff: shows "u \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 \ (is_App u \ un_App1 u \ \<^bold>\\<^bold>[t1\<^bold>] \ un_App2 u \ t2 \ (0 \ FV (un_Lam (un_App1 u) \\ t1) \ un_App2 u \ t2)) \ (is_Beta u \ un_Beta1 u \ t1 \ un_Beta2 u \ t2 \ (0 \ FV (un_Beta1 u \\ t1) \ un_Beta2 u \ t2))" using ide_char Ide_Subst_iff by (cases u; cases "un_App1 u") auto lemma cong_Ide_are_eq: assumes "t \ u" and "Ide t" and "Ide u" shows "t = u" using assms by (metis Coinitial_iff_Con Ide_iff_Src_self con_char prfx_implies_con) lemma eq_Ide_are_cong: assumes "t = u" and "Ide t" shows "t \ u" using assms Ide_implies_Arr resid_Ide_Arr by blast sublocale weakly_extensional_rts resid apply unfold_locales by (metis Coinitial_iff_Con Ide_iff_Src_self Ide_implies_Arr ide_char ide_def) lemma is_weakly_extensional_rts: shows "weakly_extensional_rts resid" .. lemma src_char [simp]: shows "src t = (if Arr t then Src t else \<^bold>\)" using src_def by force lemma trg_char [simp]: shows "trg t = (if Arr t then Trg t else \<^bold>\)" by (metis Coinitial_iff_Con resid_Arr_self trg_def) text \ We ``almost'' have an extensional RTS. The case that fails is \\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 \ u \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 = u\. This is because \t1\ might ignore its argument, so that \subst t2 t1 = subst t2' t1\, with both sides being identities, even if \t2 \ t2'\. The following gives a concrete example of such a situation. \ abbreviation non_extensional_ex1 where "non_extensional_ex1 \ \<^bold>\\<^bold>[\<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>]\<^bold>] \<^bold>\ (\<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>])" abbreviation non_extensional_ex2 where "non_extensional_ex2 \ \<^bold>\\<^bold>[\<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>]\<^bold>] \<^bold>\ (\<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>])" lemma non_extensional: shows "\<^bold>\\<^bold>[\<^bold>\1\<^bold>\\<^bold>] \<^bold>\ non_extensional_ex1 \ \<^bold>\\<^bold>[\<^bold>\1\<^bold>\\<^bold>] \<^bold>\ non_extensional_ex2" and "\<^bold>\\<^bold>[\<^bold>\1\<^bold>\\<^bold>] \<^bold>\ non_extensional_ex1 \ \<^bold>\\<^bold>[\<^bold>\1\<^bold>\\<^bold>] \<^bold>\ non_extensional_ex2" by auto text \ The following gives an example of two terms that are both coinitial and coterminal, but which are not congruent. \ abbreviation cong_nontrivial_ex1 where "cong_nontrivial_ex1 \ \<^bold>\\<^bold>[\<^bold>\0\<^bold>\ \<^bold>\ \<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\\<^bold>[\<^bold>\0\<^bold>\ \<^bold>\ \<^bold>\0\<^bold>\\<^bold>] \<^bold>\ (\<^bold>\\<^bold>[\<^bold>\0\<^bold>\ \<^bold>\ \<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\\<^bold>[\<^bold>\0\<^bold>\ \<^bold>\ \<^bold>\0\<^bold>\\<^bold>])" abbreviation cong_nontrivial_ex2 where "cong_nontrivial_ex2 \ \<^bold>\\<^bold>[\<^bold>\0\<^bold>\ \<^bold>\ \<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\\<^bold>[\<^bold>\0\<^bold>\ \<^bold>\ \<^bold>\0\<^bold>\\<^bold>] \<^bold>\ (\<^bold>\\<^bold>[\<^bold>\0\<^bold>\ \<^bold>\ \<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\\<^bold>[\<^bold>\0\<^bold>\ \<^bold>\ \<^bold>\0\<^bold>\\<^bold>])" lemma cong_nontrivial: shows "coinitial cong_nontrivial_ex1 cong_nontrivial_ex2" and "coterminal cong_nontrivial_ex1 cong_nontrivial_ex2" and "\ cong cong_nontrivial_ex1 cong_nontrivial_ex2" by auto text \ Every two coinitial transitions have a join, obtained structurally by unioning the sets of marked redexes. \ fun Join (infix "\" 52) where "\<^bold>\x\<^bold>\ \ \<^bold>\x'\<^bold>\ = (if x = x' then \<^bold>\x\<^bold>\ else \<^bold>\)" | "\<^bold>\\<^bold>[t\<^bold>] \ \<^bold>\\<^bold>[t'\<^bold>] = \<^bold>\\<^bold>[t \ t'\<^bold>]" | "\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u \ \<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u' = \<^bold>\\<^bold>[(t \ t')\<^bold>] \<^bold>\ (u \ u')" | "\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u \ \<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u' = \<^bold>\\<^bold>[(t \ t')\<^bold>] \<^bold>\ (u \ u')" | "t \<^bold>\ u \ t'\<^bold>\ u' = (t \ t') \<^bold>\ (u \ u')" | "\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u \ \<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u' = \<^bold>\\<^bold>[(t \ t')\<^bold>] \<^bold>\ (u \ u')" | "_ \ _ = \<^bold>\" lemma Join_sym: shows "t \ u = u \ t" using Join.induct [of "\t u. t \ u = u \ t"] by auto lemma Src_Join: shows "\u. Coinitial t u \ Src (t \ u) = Src t" proof (induct t) show "\u. Coinitial \<^bold>\ u \ Src (\<^bold>\ \ u) = Src \<^bold>\" by simp show "\x u. Coinitial \<^bold>\x\<^bold>\ u \ Src (\<^bold>\x\<^bold>\ \ u) = Src \<^bold>\x\<^bold>\" by auto fix t u assume ind: "\u. Coinitial t u \ Src (t \ u) = Src t" assume tu: "Coinitial \<^bold>\\<^bold>[t\<^bold>] u" show "Src (\<^bold>\\<^bold>[t\<^bold>] \ u) = Src \<^bold>\\<^bold>[t\<^bold>]" using tu ind by (cases u) auto next fix t1 t2 u assume ind1: "\u1. Coinitial t1 u1 \ Src (t1 \ u1) = Src t1" assume ind2: "\u2. Coinitial t2 u2 \ Src (t2 \ u2) = Src t2" assume tu: "Coinitial (t1 \<^bold>\ t2) u" show "Src (t1 \<^bold>\ t2 \ u) = Src (t1 \<^bold>\ t2)" using tu ind1 ind2 apply (cases u, simp_all) apply (cases t1, simp_all) by (metis Arr.simps(3) Join.simps(2) Src.simps(3) lambda.sel(2)) next fix t1 t2 u assume ind1: "\u1. Coinitial t1 u1 \ Src (t1 \ u1) = Src t1" assume ind2: "\u2. Coinitial t2 u2 \ Src (t2 \ u2) = Src t2" assume tu: "Coinitial (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) u" show "Src ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \ u) = Src (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" using tu ind1 ind2 apply (cases u, simp_all) by (cases "un_App1 u") auto qed lemma resid_Join: shows "\u. Coinitial t u \ (t \ u) \\ u = t \\ u" proof (induct t) show "\u. Coinitial \<^bold>\ u \ (\<^bold>\ \ u) \\ u = \<^bold>\ \\ u" by auto show "\x u. Coinitial \<^bold>\x\<^bold>\ u \ (\<^bold>\x\<^bold>\ \ u) \\ u = \<^bold>\x\<^bold>\ \\ u" by auto fix t u assume ind: "\u. Coinitial t u \ (t \ u) \\ u = t \\ u" assume tu: "Coinitial \<^bold>\\<^bold>[t\<^bold>] u" show "(\<^bold>\\<^bold>[t\<^bold>] \ u) \\ u = \<^bold>\\<^bold>[t\<^bold>] \\ u" using tu ind by (cases u) auto next fix t1 t2 u assume ind1: "\u1. Coinitial t1 u1 \ (t1 \ u1) \\ u1 = t1 \\ u1" assume ind2: "\u2. Coinitial t2 u2 \ (t2 \ u2) \\ u2 = t2 \\ u2" assume tu: "Coinitial (t1 \<^bold>\ t2) u" show "(t1 \<^bold>\ t2 \ u) \\ u = (t1 \<^bold>\ t2) \\ u" using tu ind1 ind2 Coinitial_iff_Con apply (cases u, simp_all) proof - fix u1 u2 assume u: "u = \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2" have t2u2: "t2 \ u2" using Arr_not_Nil Arr_resid tu u by simp have t1u1: "Coinitial (un_Lam t1 \ u1) u1" proof - have "Arr (un_Lam t1 \ u1)" by (metis Arr.simps(3-5) Coinitial_iff_Con Con_implies_is_Lam_iff_is_Lam Join.simps(2) Src.simps(3-5) ind1 lambda.collapse(2) lambda.disc(8) lambda.sel(3) tu u) thus ?thesis using Src_Join by (metis Arr.simps(3-5) Coinitial_iff_Con Con_implies_is_Lam_iff_is_Lam Src.simps(3-5) lambda.collapse(2) lambda.disc(8) lambda.sel(2-3) tu u) qed have "un_Lam t1 \ u1" using t1u1 by (metis Coinitial_iff_Con Con_implies_is_Lam_iff_is_Lam ConD(4) lambda.collapse(2) lambda.disc(8) resid.simps(2) tu u) thus "(t1 \<^bold>\ t2 \ \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2) \\ (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2) = (t1 \<^bold>\ t2) \\ (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2)" using u tu t1u1 t2u2 ind1 ind2 apply (cases t1, auto) proof - fix v assume v: "t1 = \<^bold>\\<^bold>[v\<^bold>]" show "subst (t2 \\ u2) ((v \ u1) \\ u1) = subst (t2 \\ u2) (v \\ u1)" proof - have "subst (t2 \\ u2) ((v \ u1) \\ u1) = (t1 \<^bold>\ t2 \ \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2) \\ (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2)" by (simp add: Coinitial_iff_Con ind2 t2u2 v) also have "... = (t1 \<^bold>\ t2) \\ (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2)" proof - have "(t1 \<^bold>\ t2 \ \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2) \\ (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2) = (\<^bold>\\<^bold>[(v \ u1)\<^bold>] \<^bold>\ (t2 \ u2)) \\ (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2)" using v by simp also have "... = subst (t2 \\ u2) ((v \ u1) \\ u1)" by (simp add: Coinitial_iff_Con ind2 t2u2) also have "... = subst (t2 \\ u2) (v \\ u1)" proof - have "(t1 \ \<^bold>\\<^bold>[u1\<^bold>]) \\ \<^bold>\\<^bold>[u1\<^bold>] = t1 \\ \<^bold>\\<^bold>[u1\<^bold>]" using u tu ind1 by simp thus ?thesis using \un_Lam t1 \ u1 \ \<^bold>\\ t1u1 v by force qed also have "... = (t1 \<^bold>\ t2) \\ (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2)" using tu u v by force finally show ?thesis by blast qed also have "... = subst (t2 \\ u2) (v \\ u1)" by (simp add: t2u2 v) finally show ?thesis by auto qed qed qed next fix t1 t2 u assume ind1: "\u1. Coinitial t1 u1 \ (t1 \ u1) \\ u1 = t1 \\ u1" assume ind2: "\u2. Coinitial t2 u2 \ (t2 \ u2) \\ u2 = t2 \\ u2" assume tu: "Coinitial (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) u" show "((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \ u) \\ u = (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u" using tu ind1 ind2 Coinitial_iff_Con apply (cases u, simp_all) proof - fix u1 u2 assume u: "u = u1 \<^bold>\ u2" show "(\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 \ u1 \<^bold>\ u2) \\ (u1 \<^bold>\ u2) = (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ (u1 \<^bold>\ u2)" using ind1 ind2 tu u by (cases u1) auto qed qed lemma prfx_Join: shows "\u. Coinitial t u \ u \ t \ u" proof (induct t) show "\u. Coinitial \<^bold>\ u \ u \ \<^bold>\ \ u" by simp show "\x u. Coinitial \<^bold>\x\<^bold>\ u \ u \ \<^bold>\x\<^bold>\ \ u" by auto fix t u assume ind: "\u. Coinitial t u \ u \ t \ u" assume tu: "Coinitial \<^bold>\\<^bold>[t\<^bold>] u" show "u \ \<^bold>\\<^bold>[t\<^bold>] \ u" using tu ind apply (cases u, auto) by force next fix t1 t2 u assume ind1: "\u1. Coinitial t1 u1 \ u1 \ t1 \ u1" assume ind2: "\u2. Coinitial t2 u2 \ u2 \ t2 \ u2" assume tu: "Coinitial (t1 \<^bold>\ t2) u" show "u \ t1 \<^bold>\ t2 \ u" using tu ind1 ind2 Coinitial_iff_Con apply (cases u, simp_all) apply (metis Ide.simps(1)) proof - fix u1 u2 assume u: "u = \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2" assume 1: "Arr t1 \ Arr t2 \ Arr u1 \ Arr u2 \ Src t1 = \<^bold>\\<^bold>[Src u1\<^bold>] \ Src t2 = Src u2" have 2: "u1 \ un_Lam t1 \ u1" by (metis "1" Coinitial_iff_Con Con_implies_is_Lam_iff_is_Lam Con_Arr_Src(2) lambda.collapse(2) lambda.disc(8) resid.simps(2) resid_Join) have 3: "u2 \ t2 \ u2" by (metis "1" conE ind2 null_char prfx_implies_con) show "Ide ((\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2) \\ (t1 \<^bold>\ t2 \ \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2))" using u tu 1 2 3 ind1 ind2 apply (cases t1, simp_all) by (metis Arr.simps(3) Ide.simps(3) Ide_Subst Join.simps(2) Src.simps(3) resid.simps(2)) qed next fix t1 t2 u assume ind1: "\u1. Coinitial t1 u1 \ u1 \ t1 \ u1" assume ind2: "\u2. Coinitial t2 u2 \ u2 \ t2 \ u2" assume tu: "Coinitial (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) u" show "u \ (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \ u" using tu ind1 ind2 Coinitial_iff_Con apply (cases u, simp_all) apply (cases "un_App1 u", simp_all) by (metis Ide.simps(1) Ide_Subst)+ qed lemma Ide_resid_Join: shows "\u. Coinitial t u \ Ide (u \\ (t \ u))" using ide_char prfx_Join by blast lemma join_of_Join: assumes "Coinitial t u" shows "join_of t u (t \ u)" proof (unfold join_of_def composite_of_def, intro conjI) show "t \ t \ u" using assms Join_sym prfx_Join [of u t] by simp show "u \ t \ u" using assms Ide_resid_Join ide_char by simp show "(t \ u) \\ t \ u \\ t" by (metis \prfx u (Join t u)\ arr_char assms cong_subst_right(2) prfx_implies_con prfx_reflexive resid_Join con_sym cube) show "u \\ t \ (t \ u) \\ t" by (metis Coinitial_resid_resid \prfx t (Join t u)\ \prfx u (Join t u)\ conE ide_char null_char prfx_implies_con resid_Ide_Arr cube) show "(t \ u) \\ u \ t \\ u" using \(t \ u) \ t \ u \ t\ cube by auto show "t \\ u \ (t \ u) \\ u" by (metis \(t \ u) \ t \ u \ t\ assms cube resid_Join) qed sublocale rts_with_joins resid using join_of_Join apply unfold_locales by (metis Coinitial_iff_Con conE joinable_def null_char) lemma is_rts_with_joins: shows "rts_with_joins resid" .. subsection "Simulations from Syntactic Constructors" text \ Here we show that the syntactic constructors \Lam\ and \App\, as well as the substitution operation \subst\, determine simulations. In addition, we show that \Beta\ determines a transformation from \App \ (Lam \ Id)\ to \subst\. \ abbreviation Lam\<^sub>e\<^sub>x\<^sub>t where "Lam\<^sub>e\<^sub>x\<^sub>t t \ if arr t then \<^bold>\\<^bold>[t\<^bold>] else \<^bold>\" lemma Lam_is_simulation: shows "simulation resid resid Lam\<^sub>e\<^sub>x\<^sub>t" using Arr_resid Coinitial_iff_Con by unfold_locales auto interpretation Lam: simulation resid resid Lam\<^sub>e\<^sub>x\<^sub>t using Lam_is_simulation by simp interpretation \x\: product_of_weakly_extensional_rts resid resid .. abbreviation App\<^sub>e\<^sub>x\<^sub>t where "App\<^sub>e\<^sub>x\<^sub>t t \ if \x\.arr t then fst t \<^bold>\ snd t else \<^bold>\" lemma App_is_binary_simulation: shows "binary_simulation resid resid resid App\<^sub>e\<^sub>x\<^sub>t" proof show "\t. \ \x\.arr t \ App\<^sub>e\<^sub>x\<^sub>t t = null" by auto show "\t u. \x\.con t u \ con (App\<^sub>e\<^sub>x\<^sub>t t) (App\<^sub>e\<^sub>x\<^sub>t u)" using \x\.con_char Coinitial_iff_Con by auto show "\t u. \x\.con t u \ App\<^sub>e\<^sub>x\<^sub>t (\x\.resid t u) = App\<^sub>e\<^sub>x\<^sub>t t \\ App\<^sub>e\<^sub>x\<^sub>t u" using \x\.arr_char \x\.resid_def apply simp by (metis Arr_resid_ind Con_implies_Arr1 Con_implies_Arr2) qed interpretation App: binary_simulation resid resid resid App\<^sub>e\<^sub>x\<^sub>t using App_is_binary_simulation by simp abbreviation subst\<^sub>e\<^sub>x\<^sub>t where "subst\<^sub>e\<^sub>x\<^sub>t \ \t. if \x\.arr t then subst (snd t) (fst t) else \<^bold>\" lemma subst_is_binary_simulation: shows "binary_simulation resid resid resid subst\<^sub>e\<^sub>x\<^sub>t" proof show "\t. \ \x\.arr t \ subst\<^sub>e\<^sub>x\<^sub>t t = null" by auto show "\t u. \x\.con t u \ con (subst\<^sub>e\<^sub>x\<^sub>t t) (subst\<^sub>e\<^sub>x\<^sub>t u)" using \x\.con_char con_char Subst_not_Nil resid_Subst \x\.coinitialE \x\.con_imp_coinitial apply simp by metis show "\t u. \x\.con t u \ subst\<^sub>e\<^sub>x\<^sub>t (\x\.resid t u) = subst\<^sub>e\<^sub>x\<^sub>t t \\ subst\<^sub>e\<^sub>x\<^sub>t u" using \x\.arr_char \x\.resid_def apply simp by (metis Arr_resid_ind Con_implies_Arr1 Con_implies_Arr2 resid_Subst) qed interpretation subst: binary_simulation resid resid resid subst\<^sub>e\<^sub>x\<^sub>t using subst_is_binary_simulation by simp interpretation Id: identity_simulation resid .. interpretation Lam_Id: product_simulation resid resid resid resid Lam\<^sub>e\<^sub>x\<^sub>t Id.map .. interpretation App_o_Lam_Id: composite_simulation \x\.resid \x\.resid resid Lam_Id.map App\<^sub>e\<^sub>x\<^sub>t .. abbreviation Beta\<^sub>e\<^sub>x\<^sub>t where "Beta\<^sub>e\<^sub>x\<^sub>t t \ if \x\.arr t then \<^bold>\\<^bold>[fst t\<^bold>] \<^bold>\ snd t else \<^bold>\" lemma Beta_is_transformation: shows "transformation \x\.resid resid App_o_Lam_Id.map subst\<^sub>e\<^sub>x\<^sub>t Beta\<^sub>e\<^sub>x\<^sub>t" proof show "\f. \ \x\.arr f \ Beta\<^sub>e\<^sub>x\<^sub>t f = null" by simp - show "\f. \x\.arr f \ src (Beta\<^sub>e\<^sub>x\<^sub>t f) = App_o_Lam_Id.map (\x\.src f)" + show "\f. \x\.ide f \ src (Beta\<^sub>e\<^sub>x\<^sub>t f) = App_o_Lam_Id.map (\x\.src f)" using \x\.src_char Lam_Id.map_def by simp - show "\f. \x\.arr f \ trg (Beta\<^sub>e\<^sub>x\<^sub>t f) = subst\<^sub>e\<^sub>x\<^sub>t (\x\.trg f)" + show "\f. \x\.ide f \ trg (Beta\<^sub>e\<^sub>x\<^sub>t f) = subst\<^sub>e\<^sub>x\<^sub>t (\x\.trg f)" using \x\.trg_char by simp show "\f. \x\.arr f \ Beta\<^sub>e\<^sub>x\<^sub>t (\x\.src f) \\ App_o_Lam_Id.map f = Beta\<^sub>e\<^sub>x\<^sub>t (\x\.trg f)" using \x\.src_char \x\.trg_char Arr_Trg Arr_not_Nil Lam_Id.map_def by simp show "\f. \x\.arr f \ App_o_Lam_Id.map f \\ Beta\<^sub>e\<^sub>x\<^sub>t (\x\.src f) = subst\<^sub>e\<^sub>x\<^sub>t f" using \x\.src_char \x\.trg_char Lam_Id.map_def by auto + show "\f. \x\.arr f \ join_of (Beta\<^sub>e\<^sub>x\<^sub>t (\x\.src f)) (App_o_Lam_Id.map f) (Beta\<^sub>e\<^sub>x\<^sub>t f)" + proof - + fix f + assume f: "\x\.arr f" + show "join_of (Beta\<^sub>e\<^sub>x\<^sub>t (\x\.src f)) (App_o_Lam_Id.map f) (Beta\<^sub>e\<^sub>x\<^sub>t f)" + proof (intro join_ofI composite_ofI) + show "App_o_Lam_Id.map f \ Beta\<^sub>e\<^sub>x\<^sub>t f" + using f Lam_Id.map_def Ide_Subst arr_char prfx_char prfx_reflexive by auto + show "Beta\<^sub>e\<^sub>x\<^sub>t f \\ Beta\<^sub>e\<^sub>x\<^sub>t (\x\.src f) \ App_o_Lam_Id.map f \\ Beta\<^sub>e\<^sub>x\<^sub>t (\x\.src f)" + using f Lam_Id.map_def \x\.src_char trg_char trg_def + apply auto + by (metis Arr_Subst Ide_Trg) + show 1: "Beta\<^sub>e\<^sub>x\<^sub>t f \\ App_o_Lam_Id.map f \ Beta\<^sub>e\<^sub>x\<^sub>t (\x\.src f) \\ App_o_Lam_Id.map f" + using f Lam_Id.map_def Ide_Subst \x\.src_char Ide_Trg Arr_resid_ind Coinitial_iff_Con + resid_Arr_self + apply simp + by metis + show "Beta\<^sub>e\<^sub>x\<^sub>t (\x\.src f) \ Beta\<^sub>e\<^sub>x\<^sub>t f" + using f 1 Lam_Id.map_def Ide_Subst \x\.src_char resid_Arr_self by auto + qed + qed qed text \ The next two results are used to show that mapping App over lists of transitions preserves paths. \ lemma App_is_simulation1: assumes "ide a" shows "simulation resid resid (\t. if arr t then t \<^bold>\ a else \<^bold>\)" proof - have "(\t. if \x\.arr (t, a) then fst (t, a) \<^bold>\ snd (t, a) else \<^bold>\) = (\t. if arr t then t \<^bold>\ a else \<^bold>\)" using assms ide_implies_arr by force thus ?thesis using assms App.fixing_ide_gives_simulation_2 [of a] by auto qed lemma App_is_simulation2: assumes "ide a" shows "simulation resid resid (\t. if arr t then a \<^bold>\ t else \<^bold>\)" proof - have "(\t. if \x\.arr (a, t) then fst (a, t) \<^bold>\ snd (a, t) else \<^bold>\) = (\t. if arr t then a \<^bold>\ t else \<^bold>\)" using assms ide_implies_arr by force thus ?thesis using assms App.fixing_ide_gives_simulation_1 [of a] by auto qed subsection "Reduction and Conversion" text \ Here we define the usual relations of reduction and conversion. Reduction is the least transitive relation that relates \a\ to \b\ if there exists an arrow \t\ having \a\ as its source and \b\ as its target. Conversion is the least transitive relation that relates \a\ to b if there exists an arrow \t\ in either direction between \a\ and \b\. \ inductive red where "Arr t \ red (Src t) (Trg t)" | "\red a b; red b c\ \ red a c" inductive cnv where "Arr t \ cnv (Src t) (Trg t)" | "Arr t \ cnv (Trg t) (Src t)" | "\cnv a b; cnv b c\ \ cnv a c" lemma cnv_refl: assumes "Ide a" shows "cnv a a" using assms by (metis Ide_iff_Src_self Ide_implies_Arr cnv.simps) lemma cnv_sym: shows "cnv a b \ cnv b a" apply (induct rule: cnv.induct) using cnv.intros(1-2) apply auto[2] using cnv.intros(3) by blast lemma red_imp_cnv: shows "red a b \ cnv a b" using cnv.intros(1,3) red.inducts by blast end text \ We now define a locale that extends the residuation operation defined above to paths, using general results that have already been shown for paths in an RTS. In particular, we are taking advantage of the general proof of the Cube Lemma for residuation on paths. Our immediate goal is to prove the Church-Rosser theorem, so we first prove a lemma that connects the reduction relation to paths. Later, we will prove many more facts in this locale, thereby developing a general framework for reasoning about reduction paths in the \\\-calculus. \ locale reduction_paths = \: lambda_calculus begin sublocale \: rts \.resid by (simp add: \.is_rts_with_joins rts_with_joins.axioms(1)) sublocale paths_in_weakly_extensional_rts \.resid .. sublocale paths_in_confluent_rts \.resid using confluent_rts.axioms(1) \.is_confluent_rts paths_in_rts_def paths_in_confluent_rts.intro by blast notation \.resid (infix "\\" 70) notation \.con (infix "\" 50) notation \.prfx (infix "\" 50) notation \.cong (infix "\" 50) notation Resid (infix "\<^sup>*\\\<^sup>*" 70) notation Resid1x (infix "\<^sup>1\\\<^sup>*" 70) notation Residx1 (infix "\<^sup>*\\\<^sup>1" 70) notation con (infix "\<^sup>*\\<^sup>*" 50) notation prfx (infix "\<^sup>*\\<^sup>*" 50) notation cong (infix "\<^sup>*\\<^sup>*" 50) lemma red_iff: shows "\.red a b \ (\T. Arr T \ Src T = a \ Trg T = b)" proof show "\.red a b \ \T. Arr T \ Src T = a \ Trg T = b" proof (induct rule: \.red.induct) show "\t. \.Arr t \ \T. Arr T \ Src T = \.Src t \ Trg T = \.Trg t" by (metis Arr.simps(2) Srcs.simps(2) Srcs_simp\<^sub>P\<^sub>W\<^sub>E Trg.simps(2) \.trg_def \.arr_char \.resid_Arr_self \.sources_char\<^sub>\ singleton_insert_inj_eq') show "\a b c. \\T. Arr T \ Src T = a \ Trg T = b; \T. Arr T \ Src T = b \ Trg T = c\ \ \T. Arr T \ Src T = a \ Trg T = c" by (metis Arr.simps(1) Arr_appendI\<^sub>P\<^sub>W\<^sub>E Srcs_append Srcs_simp\<^sub>P\<^sub>W\<^sub>E Trgs_append Trgs_simp\<^sub>P\<^sub>W\<^sub>E singleton_insert_inj_eq') qed show "\T. Arr T \ Src T = a \ Trg T = b \ \.red a b" proof - have "Arr T \ \.red (Src T) (Trg T)" for T proof (induct T) show "Arr [] \ \.red (Src []) (Trg [])" by auto fix t T assume ind: "Arr T \ \.red (Src T) (Trg T)" assume Arr: "Arr (t # T)" show "\.red (Src (t # T)) (Trg (t # T))" proof (cases "T = []") show "T = [] \ ?thesis" using Arr arr_char \.red.intros(1) by simp assume T: "T \ []" have "\.red (Src (t # T)) (\.Trg t)" apply simp by (meson Arr Arr.simps(2) Con_Arr_self Con_implies_Arr(1) Con_initial_left \.arr_char \.red.intros(1)) moreover have "\.Trg t = Src T" using Arr by (metis Arr.elims(2) Srcs_simp\<^sub>P\<^sub>W\<^sub>E T \.arr_iff_has_target insert_subset \.targets_char\<^sub>\ list.sel(1) list.sel(3) singleton_iff) ultimately show ?thesis using ind by (metis (no_types, opaque_lifting) Arr Con_Arr_self Con_implies_Arr(2) Resid_cons(2) T Trg.simps(3) \.red.intros(2) neq_Nil_conv) qed qed thus "\T. Arr T \ Src T = a \ Trg T = b \ \.red a b" by blast qed qed end subsection "The Church-Rosser Theorem" context lambda_calculus begin interpretation \x: reduction_paths . theorem church_rosser: shows "cnv a b \ \c. red a c \ red b c" proof (induct rule: cnv.induct) show "\t. Arr t \ \c. red (Src t) c \ red (Trg t) c" by (metis Ide_Trg Ide_iff_Src_self Ide_iff_Trg_self Ide_implies_Arr red.intros(1)) thus "\t. Arr t \ \c. red (Trg t) c \ red (Src t) c" by auto show "\a b c. \cnv a b; cnv b c; \x. red a x \ red b x; \y. red b y \ red c y\ \ \z. red a z \ red c z" proof - fix a b c assume ind1: "\x. red a x \ red b x" and ind2: "\y. red b y \ red c y" obtain x where x: "red a x \ red b x" using ind1 by blast obtain y where y: "red b y \ red c y" using ind2 by blast obtain T1 U1 where 1: "\x.Arr T1 \ \x.Arr U1 \ \x.Src T1 = a \ \x.Src U1 = b \ \x.Trgs T1 = \x.Trgs U1" using x \x.red_iff [of a x] \x.red_iff [of b x] by fastforce obtain T2 U2 where 2: "\x.Arr T2 \ \x.Arr U2 \ \x.Src T2 = b \ \x.Src U2 = c \ \x.Trgs T2 = \x.Trgs U2" using y \x.red_iff [of b y] \x.red_iff [of c y] by fastforce show "\e. red a e \ red c e" proof - let ?T = "T1 @ (\x.Resid T2 U1)" and ?U = "U2 @ (\x.Resid U1 T2)" have 3: "\x.Arr ?T \ \x.Arr ?U \ \x.Src ?T = a \ \x.Src ?U = c" using 1 2 by (metis \x.Arr_appendI\<^sub>P\<^sub>W\<^sub>E \x.Arr_has_Trg \x.Con_imp_Arr_Resid \x.Src_append \x.Src_resid \x.Srcs_simp\<^sub>P\<^sub>W\<^sub>E \x.Trgs.simps(1) \x.Trgs_simp\<^sub>P\<^sub>W\<^sub>E \x.arrI\<^sub>P \x.arr_append_imp_seq \x.confluence_ind singleton_insert_inj_eq') moreover have "\x.Trgs ?T = \x.Trgs ?U" using 1 2 3 \x.Srcs_simp\<^sub>P\<^sub>W\<^sub>E \x.Trgs_Resid_sym \x.Trgs_append \x.confluence_ind by presburger ultimately have "\T U. \x.Arr T \ \x.Arr U \ \x.Src T = a \ \x.Src U = c \ \x.Trgs T = \x.Trgs U" by blast thus ?thesis using \x.red_iff \x.Arr_has_Trg by fastforce qed qed qed corollary weak_diamond: assumes "red a b" and "red a b'" obtains c where "red b c" and "red b' c" proof - have "cnv b b'" using assms by (metis cnv.intros(1) cnv.intros(3) cnv_sym red.induct) thus ?thesis using that church_rosser by blast qed text \ As a consequence of the Church-Rosser Theorem, the collection of all reduction paths forms a coherent normal sub-RTS of the RTS of reduction paths, and on identities the congruence induced by this normal sub-RTS coincides with convertibility. The quotient of the \\\-calculus RTS by this congruence is then obviously discrete: the only transitions are identities. \ interpretation Red: normal_sub_rts \x.Resid \Collect \x.Arr\ proof show "\t. t \ Collect \x.Arr \ \x.arr t" by blast show "\a. \x.ide a \ a \ Collect \x.Arr" using \x.Ide_char \x.ide_char by blast show "\u t. \u \ Collect \x.Arr; \x.coinitial t u\ \ \x.Resid u t \ Collect \x.Arr" by (metis \x.Con_imp_Arr_Resid \x.Resid.simps(1) \x.con_sym \x.confluence\<^sub>P \x.ide_def \\a. \x.ide a \ a \ Collect \x.Arr\ mem_Collect_eq \x.arr_resid_iff_con) show "\u t. \u \ Collect \x.Arr; \x.Resid t u \ Collect \x.Arr\ \ t \ Collect \x.Arr" by (metis \x.Arr.simps(1) \x.Con_implies_Arr(1) mem_Collect_eq) show "\u t. \u \ Collect \x.Arr; \x.seq u t\ \ \v. \x.composite_of u t v" by (meson \x.obtains_composite_of) show "\u t. \u \ Collect \x.Arr; \x.seq t u\ \ \v. \x.composite_of t u v" by (meson \x.obtains_composite_of) qed interpretation Red: coherent_normal_sub_rts \x.Resid \Collect \x.Arr\ apply unfold_locales by (metis Red.Cong_closure_props(4) Red.Cong_imp_arr(2) \x.Con_imp_Arr_Resid \x.arr_resid_iff_con \x.con_char \x.sources_resid mem_Collect_eq) lemma cnv_iff_Cong: assumes "ide a" and "ide b" shows "cnv a b \ Red.Cong [a] [b]" proof assume 1: "Red.Cong [a] [b]" obtain U V where UV: "\x.Arr U \ \x.Arr V \ Red.Cong\<^sub>0 (\x.Resid [a] U) (\x.Resid [b] V)" using 1 Red.Cong_def [of "[a]" "[b]"] by blast have "red a (\x.Trg U) \ red b (\x.Trg V)" by (metis UV \x.Arr.simps(1) \x.Con_implies_Arr(1) \x.Resid_single_ide(2) \x.Src_resid \x.Trg.simps(2) assms(1-2) mem_Collect_eq reduction_paths.red_iff trg_ide) moreover have "\x.Trg U = \x.Trg V" using UV by (metis (no_types, lifting) Red.Cong\<^sub>0_imp_con \x.Arr.simps(1) \x.Con_Arr_self \x.Con_implies_Arr(1) \x.Resid_single_ide(2) \x.Src_resid \x.cube \x.ide_def \x.resid_arr_ide assms(1) mem_Collect_eq) ultimately show "cnv a b" by (metis cnv_sym cnv.intros(3) red_imp_cnv) next assume 1: "cnv a b" obtain c where c: "red a c \ red b c" using 1 church_rosser by blast obtain U where U: "\x.Arr U \ \x.Src U = a \ \x.Trg U = c" using c \x.red_iff by blast obtain V where V: "\x.Arr V \ \x.Src V = b \ \x.Trg V = c" using c \x.red_iff by blast have "\x.Resid1x a U = c \ \x.Resid1x b V = c" by (metis U V \x.Con_single_ide_ind \x.Ide.simps(2) \x.Resid1x_as_Resid \x.Resid_Ide_Arr_ind \x.Resid_single_ide(2) \x.Srcs_simp\<^sub>P\<^sub>W\<^sub>E \x.Trg.simps(2) \x.Trg_resid_sym \x.ex_un_Src assms(1-2) singletonD trg_ide) hence "Red.Cong\<^sub>0 (\x.Resid [a] U) (\x.Resid [b] V)" by (metis Red.Cong\<^sub>0_reflexive U V \x.Con_single_ideI(1) \x.Resid1x_as_Resid \x.Srcs_simp\<^sub>P\<^sub>W\<^sub>E \x.arr_resid \x.con_char assms(1-2) empty_set list.set_intros(1) list.simps(15)) thus "Red.Cong [a] [b]" using U V Red.Cong_def [of "[a]" "[b]"] by blast qed interpretation \q: quotient_by_coherent_normal \x.Resid \Collect \x.Arr\ .. lemma quotient_by_cnv_is_discrete: shows "\q.arr t \ \q.ide t" by (metis Red.Cong_class_memb_is_arr \q.arr_char \q.ide_char' \x.arr_char mem_Collect_eq subsetI) subsection "Normalization" text \ A \emph{normal form} is an identity that is not the source of any non-identity arrow. \ definition NF where "NF a \ Ide a \ (\t. Arr t \ Src t = a \ Ide t)" lemma (in reduction_paths) path_from_NF_is_Ide: assumes "\.NF a" shows "\Arr U; Src U = a\ \ Ide U" proof (induct U, simp) fix u U assume ind: "\Arr U; Src U = a\ \ Ide U" assume uU: "Arr (u # U)" and a: "Src (u # U) = a" have "\.Ide u" using assms a \.NF_def uU by force thus "Ide (u # U)" using uU ind apply (cases "U = []") apply simp by (metis Arr_consE Con_Arr_self Con_initial_right Ide.simps(2) Ide_consI Resid_Arr_Ide_ind Src_resid Trg.simps(2) a \.ide_char) qed lemma NF_reduct_is_trivial: assumes "NF a" and "red a b" shows "a = b" proof - interpret \x: reduction_paths . have "\U. \\x.Arr U; a \ \x.Srcs U\ \ \x.Ide U" using assms \x.path_from_NF_is_Ide by (simp add: \x.Srcs_simp\<^sub>P\<^sub>W\<^sub>E) thus ?thesis using assms \x.red_iff by (metis \x.Con_Arr_self \x.Resid_Arr_Ide_ind \x.Src_resid \x.path_from_NF_is_Ide) qed lemma NF_unique: assumes "red t u" and "red t u'" and "NF u" and "NF u'" shows "u = u'" using assms weak_diamond NF_reduct_is_trivial by metis text \ A term is \emph{normalizable} if it is an identity that is reducible to a normal form. \ definition normalizable where "normalizable a \ Ide a \ (\b. red a b \ NF b)" end section "Reduction Paths" text \ In this section we develop further facts about reduction paths for the \\\-calculus. \ context reduction_paths begin subsection "Sources and Targets" lemma Srcs_simp\<^sub>\\<^sub>P: shows "Arr t \ Srcs t = {\.Src (hd t)}" by (metis Arr_has_Src Srcs.elims list.sel(1) \.sources_char\<^sub>\) lemma Trgs_simp\<^sub>\\<^sub>P: shows "Arr t \ Trgs t = {\.Trg (last t)}" by (metis Arr.simps(1) Arr_has_Trg Trgs.simps(2) Trgs_append append_butlast_last_id not_Cons_self2 \.targets_char\<^sub>\) lemma sources_single_Src [simp]: assumes "\.Arr t" shows "sources [\.Src t] = sources [t]" using assms by (metis \.Con_Arr_Src(1) \.Ide_Src Ide.simps(2) Resid.simps(3) con_char ideE ide_char sources_resid \.con_char \.ide_char list.discI \.resid_Arr_Src) lemma targets_single_Trg [simp]: assumes "\.Arr t" shows "targets [\.Trg t] = targets [t]" using assms by (metis (full_types) Resid.simps(3) conI\<^sub>P \.Arr_Trg \.arr_char \.resid_Arr_Src \.resid_Src_Arr \.arr_resid_iff_con targets_resid_sym) lemma sources_single_Trg [simp]: assumes "\.Arr t" shows "sources [\.Trg t] = targets [t]" using assms by (metis \.Ide_Trg Ide.simps(2) ideE ide_char sources_resid \.ide_char targets_single_Trg) lemma targets_single_Src [simp]: assumes "\.Arr t" shows "targets [\.Src t] = sources [t]" using assms by (metis \.Arr_Src \.Trg_Src sources_single_Src sources_single_Trg) lemma single_Src_hd_in_sources: assumes "Arr T" shows "[\.Src (hd T)] \ sources T" using assms by (metis Arr.simps(1) Arr_has_Src Ide.simps(2) Resid_Arr_Src Srcs_simp\<^sub>P \.source_is_ide conI\<^sub>P empty_set ide_char in_sourcesI \.sources_char\<^sub>\ list.set_intros(1) list.simps(15)) lemma single_Trg_last_in_targets: assumes "Arr T" shows "[\.Trg (last T)] \ targets T" using assms targets_char\<^sub>P Arr_imp_arr_last Trgs_simp\<^sub>\\<^sub>P \.Ide_Trg by fastforce lemma in_sources_iff: assumes "Arr T" shows "A \ sources T \ A \<^sup>*\\<^sup>* [\.Src (hd T)]" using assms by (meson single_Src_hd_in_sources sources_are_cong sources_cong_closed) lemma in_targets_iff: assumes "Arr T" shows "B \ targets T \ B \<^sup>*\\<^sup>* [\.Trg (last T)]" using assms by (meson single_Trg_last_in_targets targets_are_cong targets_cong_closed) lemma seq_imp_cong_Trg_last_Src_hd: assumes "seq T U" shows "\.Trg (last T) \ \.Src (hd U)" using assms Arr_imp_arr_hd Arr_imp_arr_last Srcs_simp\<^sub>P\<^sub>W\<^sub>E Trgs_simp\<^sub>P\<^sub>W\<^sub>E \.cong_reflexive seq_char by (metis Srcs_simp\<^sub>\\<^sub>P Trgs_simp\<^sub>\\<^sub>P \.Arr_Trg \.arr_char singleton_inject) lemma sources_char\<^sub>\\<^sub>P: shows "sources T = {A. Arr T \ A \<^sup>*\\<^sup>* [\.Src (hd T)]}" using in_sources_iff arr_char sources_char\<^sub>P by auto lemma targets_char\<^sub>\\<^sub>P: shows "targets T = {B. Arr T \ B \<^sup>*\\<^sup>* [\.Trg (last T)]}" using in_targets_iff arr_char targets_char by auto lemma Src_hd_eqI: assumes "cong T U" shows "\.Src (hd T) = \.Src (hd U)" using assms by (metis Con_imp_eq_Srcs Con_implies_Arr(1) Ide.simps(1) Srcs_simp\<^sub>\\<^sub>P ide_char singleton_insert_inj_eq') lemma Trg_last_eqI: assumes "cong T U" shows "\.Trg (last T) = \.Trg (last U)" proof - have 1: "[\.Trg (last T)] \ targets T \ [\.Trg (last U)] \ targets U" using assms by (metis Con_implies_Arr(1) Ide.simps(1) ide_char single_Trg_last_in_targets) have "\.cong (\.Trg (last T)) (\.Trg (last U))" by (metis "1" Ide.simps(2) Resid.simps(3) assms con_char cong_implies_coterminal coterminal_iff ide_char prfx_implies_con targets_are_cong) moreover have "\.Ide (\.Trg (last T)) \ \.Ide (\.Trg (last U))" using "1" Ide.simps(2) ide_char by blast ultimately show ?thesis using \.weak_extensionality by blast qed lemma Trg_last_Src_hd_eqI: assumes "seq T U" shows "\.Trg (last T) = \.Src (hd U)" using assms Arr_imp_arr_hd Arr_imp_arr_last \.Ide_Src \.weak_extensionality \.Ide_Trg seq_char seq_imp_cong_Trg_last_Src_hd by force lemma seqI\<^sub>\\<^sub>P [intro]: assumes "Arr T" and "Arr U" and "\.Trg (last T) = \.Src (hd U)" shows "seq T U" by (metis assms Arr_imp_arr_last Srcs_simp\<^sub>\\<^sub>P \.arr_char \.targets_char\<^sub>\ Trgs_simp\<^sub>P seq_char) lemma conI\<^sub>\\<^sub>P [intro]: assumes "arr T" and "arr U" and "\.Src (hd T) = \.Src (hd U)" shows "T \<^sup>*\\<^sup>* U" using assms by (simp add: Srcs_simp\<^sub>\\<^sub>P arr_char con_char confluence_ind) subsection "Mapping Constructors over Paths" lemma Arr_map_Lam: assumes "Arr T" shows "Arr (map \.Lam T)" proof - interpret Lam: simulation \.resid \.resid \\t. if \.arr t then \<^bold>\\<^bold>[t\<^bold>] else \<^bold>\\ using \.Lam_is_simulation by simp interpret simulation Resid Resid \\T. if Arr T then map (\t. if \.arr t then \<^bold>\\<^bold>[t\<^bold>] else \<^bold>\) T else []\ using assms Lam.lifts_to_paths by blast have "map (\t. if \.Arr t then \<^bold>\\<^bold>[t\<^bold>] else \<^bold>\) T = map \.Lam T" using assms set_Arr_subset_arr by fastforce thus ?thesis using assms preserves_reflects_arr [of T] arr_char by (simp add: \map (\t. if \.Arr t then \<^bold>\\<^bold>[t\<^bold>] else \<^bold>\) T = map \.Lam T\) qed lemma Arr_map_App1: assumes "\.Ide b" and "Arr T" shows "Arr (map (\t. t \<^bold>\ b) T)" proof - interpret App1: simulation \.resid \.resid \\t. if \.arr t then t \<^bold>\ b else \<^bold>\\ using assms \.App_is_simulation1 [of b] by simp interpret simulation Resid Resid \\T. if Arr T then map (\t. if \.arr t then t \<^bold>\ b else \<^bold>\) T else []\ using assms App1.lifts_to_paths by blast have "map (\t. if \.arr t then t \<^bold>\ b else \<^bold>\) T = map (\t. t \<^bold>\ b) T" using assms set_Arr_subset_arr by auto thus ?thesis using assms preserves_reflects_arr arr_char by (metis (mono_tags, lifting)) qed lemma Arr_map_App2: assumes "\.Ide a" and "Arr T" shows "Arr (map (\.App a) T)" proof - interpret App2: simulation \.resid \.resid \\u. if \.arr u then a \<^bold>\ u else \<^bold>\\ using assms \.App_is_simulation2 by simp interpret simulation Resid Resid \\T. if Arr T then map (\u. if \.arr u then a \<^bold>\ u else \<^bold>\) T else []\ using assms App2.lifts_to_paths by blast have "map (\u. if \.arr u then a \<^bold>\ u else \<^bold>\) T = map (\u. a \<^bold>\ u) T" using assms set_Arr_subset_arr by auto thus ?thesis using assms preserves_reflects_arr arr_char by (metis (mono_tags, lifting)) qed interpretation \\<^sub>L\<^sub>a\<^sub>m: sub_rts \.resid \\t. \.Arr t \ \.is_Lam t\ proof show "\t. \.Arr t \ \.is_Lam t \ \.arr t" by blast show "\t. \.Arr t \ \.is_Lam t \ \.sources t \ {t. \.Arr t \ \.is_Lam t}" by auto show "\\.Arr t \ \.is_Lam t; \.Arr u \ \.is_Lam u; \.con t u\ \ \.Arr (t \\ u) \ \.is_Lam (t \\ u)" for t u apply (cases t; cases u) apply simp_all using \.Coinitial_resid_resid by presburger qed interpretation un_Lam: simulation \\<^sub>L\<^sub>a\<^sub>m.resid \.resid \\t. if \\<^sub>L\<^sub>a\<^sub>m.arr t then \.un_Lam t else \<^bold>\\ proof let ?un_Lam = "\t. if \\<^sub>L\<^sub>a\<^sub>m.arr t then \.un_Lam t else \<^bold>\" show "\t. \ \\<^sub>L\<^sub>a\<^sub>m.arr t \ ?un_Lam t = \.null" by auto show "\t u. \\<^sub>L\<^sub>a\<^sub>m.con t u \ \.con (?un_Lam t) (?un_Lam u)" by auto show "\t u. \\<^sub>L\<^sub>a\<^sub>m.con t u \ ?un_Lam (\\<^sub>L\<^sub>a\<^sub>m.resid t u) = ?un_Lam t \\ ?un_Lam u" using \\<^sub>L\<^sub>a\<^sub>m.resid_closed \\<^sub>L\<^sub>a\<^sub>m.resid_def by auto qed lemma Arr_map_un_Lam: assumes "Arr T" and "set T \ Collect \.is_Lam" shows "Arr (map \.un_Lam T)" proof - have "map (\t. if \\<^sub>L\<^sub>a\<^sub>m.arr t then \.un_Lam t else \<^bold>\) T = map \.un_Lam T" using assms set_Arr_subset_arr by auto thus ?thesis using assms by (metis (no_types, lifting) \\<^sub>L\<^sub>a\<^sub>m.path_reflection \.arr_char mem_Collect_eq set_Arr_subset_arr subset_code(1) un_Lam.preserves_paths) qed interpretation \\<^sub>A\<^sub>p\<^sub>p: sub_rts \.resid \\t. \.Arr t \ \.is_App t\ proof show "\t. \.Arr t \ \.is_App t \ \.arr t" by blast show "\t. \.Arr t \ \.is_App t \ \.sources t \ {t. \.Arr t \ \.is_App t}" by auto show "\\.Arr t \ \.is_App t; \.Arr u \ \.is_App u; \.con t u\ \ \.Arr (t \\ u) \ \.is_App (t \\ u)" for t u using \.Arr_resid_ind by (cases t; cases u) auto qed interpretation un_App1: simulation \\<^sub>A\<^sub>p\<^sub>p.resid \.resid \\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App1 t else \<^bold>\\ proof let ?un_App1 = "\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App1 t else \<^bold>\" show "\t. \ \\<^sub>A\<^sub>p\<^sub>p.arr t \ ?un_App1 t = \.null" by auto show "\t u. \\<^sub>A\<^sub>p\<^sub>p.con t u \ \.con (?un_App1 t) (?un_App1 u)" by auto show "\\<^sub>A\<^sub>p\<^sub>p.con t u \ ?un_App1 (\\<^sub>A\<^sub>p\<^sub>p.resid t u) = ?un_App1 t \\ ?un_App1 u" for t u using \\<^sub>A\<^sub>p\<^sub>p.resid_def \.Arr_resid_ind by (cases t; cases u) auto qed interpretation un_App2: simulation \\<^sub>A\<^sub>p\<^sub>p.resid \.resid \\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App2 t else \<^bold>\\ proof let ?un_App2 = "\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App2 t else \<^bold>\" show "\t. \ \\<^sub>A\<^sub>p\<^sub>p.arr t \ ?un_App2 t = \.null" by auto show "\t u. \\<^sub>A\<^sub>p\<^sub>p.con t u \ \.con (?un_App2 t) (?un_App2 u)" by auto show "\\<^sub>A\<^sub>p\<^sub>p.con t u \ ?un_App2 (\\<^sub>A\<^sub>p\<^sub>p.resid t u) = ?un_App2 t \\ ?un_App2 u" for t u using \\<^sub>A\<^sub>p\<^sub>p.resid_def \.Arr_resid_ind by (cases t; cases u) auto qed lemma Arr_map_un_App1: assumes "Arr T" and "set T \ Collect \.is_App" shows "Arr (map \.un_App1 T)" proof - interpret P\<^sub>A\<^sub>p\<^sub>p: paths_in_rts \\<^sub>A\<^sub>p\<^sub>p.resid .. interpret un_App1: simulation P\<^sub>A\<^sub>p\<^sub>p.Resid Resid \\T. if P\<^sub>A\<^sub>p\<^sub>p.Arr T then map (\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App1 t else \<^bold>\) T else []\ using un_App1.lifts_to_paths by simp have 1: "map (\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App1 t else \<^bold>\) T = map \.un_App1 T" using assms set_Arr_subset_arr by auto have 2: "P\<^sub>A\<^sub>p\<^sub>p.Arr T" using assms set_Arr_subset_arr \\<^sub>A\<^sub>p\<^sub>p.path_reflection [of T] by blast hence "arr (if P\<^sub>A\<^sub>p\<^sub>p.Arr T then map (\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App1 t else \<^bold>\) T else [])" using un_App1.preserves_reflects_arr [of T] by blast hence "Arr (if P\<^sub>A\<^sub>p\<^sub>p.Arr T then map (\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App1 t else \<^bold>\) T else [])" using arr_char by auto hence "Arr (if P\<^sub>A\<^sub>p\<^sub>p.Arr T then map \.un_App1 T else [])" using 1 by metis thus ?thesis using 2 by simp qed lemma Arr_map_un_App2: assumes "Arr T" and "set T \ Collect \.is_App" shows "Arr (map \.un_App2 T)" proof - interpret P\<^sub>A\<^sub>p\<^sub>p: paths_in_rts \\<^sub>A\<^sub>p\<^sub>p.resid .. interpret un_App2: simulation P\<^sub>A\<^sub>p\<^sub>p.Resid Resid \\T. if P\<^sub>A\<^sub>p\<^sub>p.Arr T then map (\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App2 t else \<^bold>\) T else []\ using un_App2.lifts_to_paths by simp have 1: "map (\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App2 t else \<^bold>\) T = map \.un_App2 T" using assms set_Arr_subset_arr by auto have 2: "P\<^sub>A\<^sub>p\<^sub>p.Arr T" using assms set_Arr_subset_arr \\<^sub>A\<^sub>p\<^sub>p.path_reflection [of T] by blast hence "arr (if P\<^sub>A\<^sub>p\<^sub>p.Arr T then map (\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App2 t else \<^bold>\) T else [])" using un_App2.preserves_reflects_arr [of T] by blast hence "Arr (if P\<^sub>A\<^sub>p\<^sub>p.Arr T then map (\t. if \\<^sub>A\<^sub>p\<^sub>p.arr t then \.un_App2 t else \<^bold>\) T else [])" using arr_char by blast hence "Arr (if P\<^sub>A\<^sub>p\<^sub>p.Arr T then map \.un_App2 T else [])" using 1 by metis thus ?thesis using 2 by simp qed lemma map_App_map_un_App1: shows "\Arr U; set U \ Collect \.is_App; \.Ide b; \.un_App2 ` set U \ {b}\ \ map (\t. \.App t b) (map \.un_App1 U) = U" by (induct U) auto lemma map_App_map_un_App2: shows "\Arr U; set U \ Collect \.is_App; \.Ide a; \.un_App1 ` set U \ {a}\ \ map (\.App a) (map \.un_App2 U) = U" by (induct U) auto lemma map_Lam_Resid: assumes "coinitial T U" shows "map \.Lam (T \<^sup>*\\\<^sup>* U) = map \.Lam T \<^sup>*\\\<^sup>* map \.Lam U" proof - interpret Lam: simulation \.resid \.resid \\t. if \.arr t then \<^bold>\\<^bold>[t\<^bold>] else \<^bold>\\ using \.Lam_is_simulation by simp interpret Lamx: simulation Resid Resid \\T. if Arr T then map (\t. if \.arr t then \<^bold>\\<^bold>[t\<^bold>] else \<^bold>\) T else []\ using Lam.lifts_to_paths by simp have "\T. Arr T \ map (\t. if \.arr t then \<^bold>\\<^bold>[t\<^bold>] else \<^bold>\) T = map \.Lam T" using set_Arr_subset_arr by auto moreover have "Arr (T \<^sup>*\\\<^sup>* U)" using assms confluence\<^sub>P Con_imp_Arr_Resid con_char by force moreover have "T \<^sup>*\\<^sup>* U" using assms confluence by simp moreover have "Arr T \ Arr U" using assms arr_char by auto ultimately show ?thesis using assms Lamx.preserves_resid [of T U] by presburger qed lemma map_App1_Resid: assumes "\.Ide x" and "coinitial T U" shows "map (\.App x) (T \<^sup>*\\\<^sup>* U) = map (\.App x) T \<^sup>*\\\<^sup>* map (\.App x) U" proof - interpret App: simulation \.resid \.resid \\t. if \.arr t then x \<^bold>\ t else \<^bold>\\ using assms \.App_is_simulation2 by simp interpret Appx: simulation Resid Resid \\T. if Arr T then map (\t. if \.arr t then x \<^bold>\ t else \<^bold>\) T else []\ using App.lifts_to_paths by simp have "\T. Arr T \ map (\t. if \.arr t then x \<^bold>\ t else \<^bold>\) T = map (\.App x) T" using set_Arr_subset_arr by auto moreover have "Arr (T \<^sup>*\\\<^sup>* U)" using assms confluence\<^sub>P Con_imp_Arr_Resid con_char by force moreover have "T \<^sup>*\\<^sup>* U" using assms confluence by simp moreover have "Arr T \ Arr U" using assms arr_char by auto ultimately show ?thesis using assms Appx.preserves_resid [of T U] by presburger qed lemma map_App2_Resid: assumes "\.Ide x" and "coinitial T U" shows "map (\t. t \<^bold>\ x) (T \<^sup>*\\\<^sup>* U) = map (\t. t \<^bold>\ x) T \<^sup>*\\\<^sup>* map (\t. t \<^bold>\ x) U" proof - interpret App: simulation \.resid \.resid \\t. if \.arr t then t \<^bold>\ x else \<^bold>\\ using assms \.App_is_simulation1 by simp interpret Appx: simulation Resid Resid \\T. if Arr T then map (\t. if \.arr t then t \<^bold>\ x else \<^bold>\) T else []\ using App.lifts_to_paths by simp have "\T. Arr T \ map (\t. if \.arr t then t \<^bold>\ x else \<^bold>\) T = map (\t. t \<^bold>\ x) T" using set_Arr_subset_arr by auto moreover have "Arr (T \<^sup>*\\\<^sup>* U)" using assms confluence\<^sub>P Con_imp_Arr_Resid con_char by force moreover have "T \<^sup>*\\<^sup>* U" using assms confluence by simp moreover have "Arr T \ Arr U" using assms arr_char by auto ultimately show ?thesis using assms Appx.preserves_resid [of T U] by presburger qed lemma cong_map_Lam: shows "\T. T \<^sup>*\\<^sup>* U \ map \.Lam T \<^sup>*\\<^sup>* map \.Lam U" apply (induct U) apply (simp add: ide_char) by (metis map_Lam_Resid cong_implies_coinitial cong_reflexive ideE map_is_Nil_conv Con_imp_Arr_Resid arr_char) lemma cong_map_App1: shows "\x T. \\.Ide x; T \<^sup>*\\<^sup>* U\ \ map (\.App x) T \<^sup>*\\<^sup>* map (\.App x) U" apply (induct U) apply (simp add: ide_char) apply (intro conjI) by (metis Nil_is_map_conv arr_resid_iff_con con_char con_imp_coinitial cong_reflexive ideE map_App1_Resid)+ lemma cong_map_App2: shows "\x T. \\.Ide x; T \<^sup>*\\<^sup>* U\ \ map (\X. X \<^bold>\ x) T \<^sup>*\\<^sup>* map (\X. X \<^bold>\ x) U" apply (induct U) apply (simp add: ide_char) apply (intro conjI) by (metis Nil_is_map_conv arr_resid_iff_con con_char cong_implies_coinitial cong_reflexive ide_def arr_char ideE map_App2_Resid)+ subsection "Decomposition of `App Paths'" text \ The following series of results is aimed at showing that a reduction path, all of whose transitions have \App\ as their top-level constructor, can be factored up to congruence into a reduction path in which only the ``rator'' components are reduced, followed by a reduction path in which only the ``rand'' components are reduced. \ lemma orthogonal_App_single_single: assumes "\.Arr t" and "\.Arr u" shows "[\.Src t \<^bold>\ u] \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src u] = [\.Trg t \<^bold>\ u]" and "[t \<^bold>\ \.Src u] \<^sup>*\\\<^sup>* [\.Src t \<^bold>\ u] = [t \<^bold>\ \.Trg u]" using assms arr_char \.Arr_not_Nil by auto lemma orthogonal_App_single_Arr: shows "\t. \Arr [t]; Arr U\ \ map (\.App (\.Src t)) U \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src (hd U)] = map (\.App (\.Trg t)) U \ [t \<^bold>\ \.Src (hd U)] \<^sup>*\\\<^sup>* map (\.App (\.Src t)) U = [t \<^bold>\ \.Trg (last U)]" proof (induct U) show "\t. \Arr [t]; Arr []\ \ map (\.App (\.Src t)) [] \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src (hd [])] = map (\.App (\.Trg t)) [] \ [t \<^bold>\ \.Src (hd [])] \<^sup>*\\\<^sup>* map (\.App (\.Src t)) [] = [t \<^bold>\ \.Trg (last [])]" by fastforce fix t u U assume ind: "\t. \Arr [t]; Arr U\ \ map (\.App (\.Src t)) U \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src (hd U)] = map (\.App (\.Trg t)) U \ [t \<^bold>\ \.Src (hd U)] \<^sup>*\\\<^sup>* map (\.App (\.Src t)) U = [t \<^bold>\ \.Trg (last U)]" assume t: "Arr [t]" assume uU: "Arr (u # U)" show "map (\.App (\.Src t)) (u # U) \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src (hd (u # U))] = map (\.App (\.Trg t)) (u # U) \ [t \<^bold>\ \.Src (hd (u # U))] \<^sup>*\\\<^sup>* map (\.App (\.Src t)) (u # U) = [t \<^bold>\ \.Trg (last (u # U))]" proof (cases "U = []") show "U = [] \ ?thesis" using t uU orthogonal_App_single_single by simp assume U: "U \ []" have 2: "coinitial ([\.Src t \<^bold>\ u] @ map (\.App (\.Src t)) U) [t \<^bold>\ \.Src u]" proof show 3: "arr ([\.Src t \<^bold>\ u] @ map (\.App (\.Src t)) U)" using t uU by (metis Arr_iff_Con_self Arr_map_App2 Con_rec(1) append_Cons append_Nil arr_char \.Con_implies_Arr2 \.Ide_Src \.con_char list.simps(9)) show "sources ([\.Src t \<^bold>\ u] @ map (\.App (\.Src t)) U) = sources [t \<^bold>\ \.Src u]" proof - have "seq [\.Src t \<^bold>\ u] (map (\.App (\.Src t)) U)" using U 3 arr_append_imp_seq by force thus ?thesis using sources_append [of "[\.Src t \<^bold>\ u]" "map (\.App (\.Src t)) U"] sources_single_Src [of "\.Src t \<^bold>\ u"] sources_single_Src [of "t \<^bold>\ \.Src u"] using arr_char t by (simp add: seq_char) qed qed show ?thesis proof show 4: "map (\.App (\.Src t)) (u # U) \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src (hd (u # U))] = map (\.App (\.Trg t)) (u # U)" proof - have "map (\.App (\.Src t)) (u # U) \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src (hd (u # U))] = ([\.Src t \<^bold>\ u] @ map (\.App (\.Src t)) U) \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src u]" by simp also have "... = [\.Src t \<^bold>\ u] \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src u] @ map (\.App (\.Src t)) U \<^sup>*\\\<^sup>* ([t \<^bold>\ \.Src u] \<^sup>*\\\<^sup>* [\.Src t \<^bold>\ u])" by (meson "2" Resid_append(1) con_char confluence not_Cons_self2) also have "... = [\.Trg t \<^bold>\ u] @ map (\.App (\.Src t)) U \<^sup>*\\\<^sup>* [t \<^bold>\ \.Trg u]" using t \.Arr_not_Nil by (metis Arr_imp_arr_hd \.arr_char list.sel(1) orthogonal_App_single_single(1) orthogonal_App_single_single(2) uU) also have "... = [\.Trg t \<^bold>\ u] @ map (\.App (\.Trg t)) U" proof - have "\.Src (hd U) = \.Trg u" using U uU Arr.elims(2) Srcs_simp\<^sub>\\<^sub>P by force thus ?thesis using t uU ind Arr.elims(2) by fastforce qed also have "... = map (\.App (\.Trg t)) (u # U)" by auto finally show ?thesis by blast qed show "[t \<^bold>\ \.Src (hd (u # U))] \<^sup>*\\\<^sup>* map (\.App (\.Src t)) (u # U) = [t \<^bold>\ \.Trg (last (u # U))]" proof - have "[t \<^bold>\ \.Src (hd (u # U))] \<^sup>*\\\<^sup>* map (\.App (\.Src t)) (u # U) = ([t \<^bold>\ \.Src (hd (u # U))] \<^sup>*\\\<^sup>* [\.Src t \<^bold>\ u]) \<^sup>*\\\<^sup>* map (\.App (\.Src t)) U" by (metis U 4 Con_sym Resid_cons(2) list.distinct(1) list.simps(9) map_is_Nil_conv) also have "... = [t \<^bold>\ \.Trg u] \<^sup>*\\\<^sup>* map (\.App (\.Src t)) U" by (metis Arr_imp_arr_hd lambda_calculus.arr_char list.sel(1) orthogonal_App_single_single(2) t uU) also have "... = [t \<^bold>\ \.Trg (last (u # U))]" by (metis 2 t U uU Con_Arr_self Con_cons(1) Con_implies_Arr(1) Trg_last_Src_hd_eqI arr_append_imp_seq coinitialE ind \.Src.simps(4) \.Trg.simps(3) \.lambda.inject(3) last.simps list.distinct(1) list.map_sel(1) map_is_Nil_conv) finally show ?thesis by blast qed qed qed qed lemma orthogonal_App_Arr_Arr: shows "\U. \Arr T; Arr U\ \ map (\.App (\.Src (hd T))) U \<^sup>*\\\<^sup>* map (\X. \.App X (\.Src (hd U))) T = map (\.App (\.Trg (last T))) U \ map (\X. X \<^bold>\ \.Src (hd U)) T \<^sup>*\\\<^sup>* map (\.App (\.Src (hd T))) U = map (\X. X \<^bold>\ \.Trg (last U)) T" proof (induct T) show "\U. \Arr []; Arr U\ \ map (\.App (\.Src (hd []))) U \<^sup>*\\\<^sup>* map (\X. X \<^bold>\ \.Src (hd U)) [] = map (\.App (\.Trg (last []))) U \ map (\X. X \<^bold>\ \.Src (hd U)) [] \<^sup>*\\\<^sup>* map (\.App (\.Src (hd []))) U = map (\X. X \<^bold>\ \.Trg (last U)) []" by simp fix t T U assume ind: "\U. \Arr T; Arr U\ \ map (\.App (\.Src (hd T))) U \<^sup>*\\\<^sup>* map (\X. \.App X (\.Src (hd U))) T = map (\.App (\.Trg (last T))) U \ map (\X. X \<^bold>\ \.Src (hd U)) T \<^sup>*\\\<^sup>* map (\.App (\.Src (hd T))) U = map (\X. X \<^bold>\ \.Trg (last U)) T" assume tT: "Arr (t # T)" assume U: "Arr U" show "map (\.App (\.Src (hd (t # T)))) U \<^sup>*\\\<^sup>* map (\X. X \<^bold>\ \.Src (hd U)) (t # T) = map (\.App (\.Trg (last (t # T)))) U \ map (\X. X \<^bold>\ \.Src (hd U)) (t # T) \<^sup>*\\\<^sup>* map (\.App (\.Src (hd (t # T)))) U = map (\X. X \<^bold>\ \.Trg (last U)) (t # T)" proof (cases "T = []") show "T = [] \ ?thesis" using tT U by (simp add: orthogonal_App_single_Arr) assume T: "T \ []" have 1: "Arr T" using T tT Arr_imp_Arr_tl by fastforce have 2: "\.Src (hd T) = \.Trg t" using tT T Arr.elims(2) Srcs_simp\<^sub>\\<^sub>P by force show ?thesis proof show 3: "map (\.App (\.Src (hd (t # T)))) U \<^sup>*\\\<^sup>* map (\X. X \<^bold>\ \.Src (hd U)) (t # T) = map (\.App (\.Trg (last (t # T)))) U" proof - have "map (\.App (\.Src (hd (t # T)))) U \<^sup>*\\\<^sup>* map (\X. X \<^bold>\ \.Src (hd U)) (t # T) = map (\.App (\.Src t)) U \<^sup>*\\\<^sup>* ([\.App t (\.Src (hd U))] @ map (\X. X \<^bold>\ \.Src (hd U)) T)" using tT U by simp also have "... = (map (\.App (\.Src t)) U \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src (hd U)]) \<^sup>*\\\<^sup>* map (\X. X \<^bold>\ \.Src (hd U)) T" using tT U Resid_append(2) by (metis Con_appendI(2) Resid.simps(1) T map_is_Nil_conv not_Cons_self2) also have "... = map (\.App (\.Trg t)) U \<^sup>*\\\<^sup>* map (\X. X \<^bold>\ \.Src (hd U)) T" using tT U orthogonal_App_single_Arr Arr_imp_arr_hd by fastforce also have "... = map (\.App (\.Trg (last (t # T)))) U" using tT U 1 2 ind by auto finally show ?thesis by blast qed show "map (\X. X \<^bold>\ \.Src (hd U)) (t # T) \<^sup>*\\\<^sup>* map (\.App (\.Src (hd (t # T)))) U = map (\X. X \<^bold>\ \.Trg (last U)) (t # T)" proof - have "map (\X. X \<^bold>\ \.Src (hd U)) (t # T) \<^sup>*\\\<^sup>* map (\.App (\.Src (hd (t # T)))) U = ([t \<^bold>\ \.Src (hd U)] @ map (\X. X \<^bold>\ \.Src (hd U)) T) \<^sup>*\\\<^sup>* map (\.App (\.Src t)) U" using tT U by simp also have "... = ([t \<^bold>\ \.Src (hd U)] \<^sup>*\\\<^sup>* map (\.App (\.Src t)) U) @ (map (\X. X \<^bold>\ \.Src (hd U)) T \<^sup>*\\\<^sup>* (map (\.App (\.Src t)) U \<^sup>*\\\<^sup>* [t \<^bold>\ \.Src (hd U)]))" using tT U 3 Con_sym Resid_append(1) [of "[t \<^bold>\ \.Src (hd U)]" "map (\X. X \<^bold>\ \.Src (hd U)) T" "map (\.App (\.Src t)) U"] by fastforce also have "... = [t \<^bold>\ \.Trg (last U)] @ map (\X. X \<^bold>\ \.Src (hd U)) T \<^sup>*\\\<^sup>* map (\.App (\.Trg t)) U" using tT U Arr_imp_arr_hd orthogonal_App_single_Arr by fastforce also have "... = [t \<^bold>\ \.Trg (last U)] @ map (\X. X \<^bold>\ \.Trg (last U)) T" using tT U "1" "2" ind by presburger also have "... = map (\X. X \<^bold>\ \.Trg (last U)) (t # T)" by simp finally show ?thesis by blast qed qed qed qed lemma orthogonal_App_cong: assumes "Arr T" and "Arr U" shows "map (\X. X \<^bold>\ \.Src (hd U)) T @ map (\.App (\.Trg (last T))) U \<^sup>*\\<^sup>* map (\.App (\.Src (hd T))) U @ map (\X. X \<^bold>\ \.Trg (last U)) T" (* using assms orthogonal_App_Arr_Arr [of T U] by (smt (verit, best) Con_Arr_self Con_imp_Arr_Resid Con_implies_Arr(1) Con_sym Nil_is_append_conv Resid_append_ind arr_char cube map_is_Nil_conv prfx_reflexive) *) proof have 1: "Arr (map (\X. X \<^bold>\ \.Src (hd U)) T)" using assms Arr_imp_arr_hd Arr_map_App1 \.Ide_Src by force have 2: "Arr (map (\.App (\.Trg (last T))) U)" using assms Arr_imp_arr_last Arr_map_App2 \.Ide_Trg by force have 3: "Arr (map (\.App (\.Src (hd T))) U)" using assms Arr_imp_arr_hd Arr_map_App2 \.Ide_Src by force have 4: "Arr (map (\X. X \<^bold>\ \.Trg (last U)) T)" using assms Arr_imp_arr_last Arr_map_App1 \.Ide_Trg by force have 5: "Arr (map (\X. X \<^bold>\ \.Src (hd U)) T @ map (\.App (\.Trg (last T))) U)" using assms by (metis (no_types, lifting) 1 2 Arr.simps(2) Arr_has_Src Arr_imp_arr_last Srcs.simps(1) Srcs_Resid_Arr_single Trgs_simp\<^sub>P arr_append arr_char last_map orthogonal_App_single_Arr seq_char) have 6: "Arr (map (\.App (\.Src (hd T))) U @ map (\X. X \<^bold>\ \.Trg (last U)) T)" using assms by (metis (no_types, lifting) 3 4 Arr.simps(2) Arr_has_Src Arr_imp_arr_hd Srcs.simps(1) Srcs.simps(2) Srcs_Resid Srcs_simp\<^sub>P arr_append arr_char hd_map orthogonal_App_single_Arr seq_char) have 7: "Con (map (\X. X \<^bold>\ \.Src (hd U)) T @ map ((\<^bold>\) (\.Trg (last T))) U) (map ((\<^bold>\) (\.Src (hd T))) U @ map (\X. X \<^bold>\ \.Trg (last U)) T)" using assms orthogonal_App_Arr_Arr [of T U] by (metis 1 2 5 6 Con_imp_eq_Srcs Resid.simps(1) Srcs_append confluence_ind) have 8: "Con (map ((\<^bold>\) (\.Src (hd T))) U @ map (\X. X \<^bold>\ \.Trg (last U)) T) (map (\X. X \<^bold>\ \.Src (hd U)) T @ map ((\<^bold>\) (\.Trg (last T))) U)" using 7 Con_sym by simp show "map (\X. X \<^bold>\ \.Src (hd U)) T @ map ((\<^bold>\) (\.Trg (last T))) U \<^sup>*\\<^sup>* map ((\<^bold>\) (\.Src (hd T))) U @ map (\X. X \<^bold>\ \.Trg (last U)) T" proof - have "(map (\X. X \<^bold>\ \.Src (hd U)) T @ map ((\<^bold>\) (\.Trg (last T))) U) \<^sup>*\\\<^sup>* (map ((\<^bold>\) (\.Src (hd T))) U @ map (\X. X \<^bold>\ \.Trg (last U)) T) = map (\X. X \<^bold>\ \.Trg (last U)) T \<^sup>*\\\<^sup>* map (\X. X \<^bold>\ \.Trg (last U)) T @ (map ((\<^bold>\) (\.Trg (last T))) U \<^sup>*\\\<^sup>* map ((\<^bold>\) (\.Trg (last T))) U) \<^sup>*\\\<^sup>* (map (\X. X \<^bold>\ \.Trg (last U)) T \<^sup>*\\\<^sup>* map (\X. X \<^bold>\ \.Trg (last U)) T)" using assms 7 orthogonal_App_Arr_Arr Resid_append2 [of "map (\X. X \<^bold>\ \.Src (hd U)) T" "map (\.App (\.Trg (last T))) U" "map (\.App (\.Src (hd T))) U" "map (\X. X \<^bold>\ \.Trg (last U)) T"] by fastforce moreover have "Ide ..." using assms 1 2 3 4 5 6 7 Resid_Arr_self by (metis Arr_append_iff\<^sub>P Con_Arr_self Con_imp_Arr_Resid Ide_appendI\<^sub>P Resid_Ide_Arr_ind append_Nil2 calculation) ultimately show ?thesis using ide_char by presburger qed show "map ((\<^bold>\) (\.Src (hd T))) U @ map (\X. X \<^bold>\ \.Trg (last U)) T \<^sup>*\\<^sup>* map (\X. X \<^bold>\ \.Src (hd U)) T @ map ((\<^bold>\) (\.Trg (last T))) U" proof - have "map ((\<^bold>\) (\.Src (hd T))) U \<^sup>*\\\<^sup>* map (\X. X \<^bold>\ \.Src (hd U)) T = map ((\<^bold>\) (\.Trg (last T))) U" by (simp add: assms orthogonal_App_Arr_Arr) have "(map ((\<^bold>\) (\.Src (hd T))) U @ map (\X. X \<^bold>\ \.Trg (last U)) T) \<^sup>*\\\<^sup>* (map (\X. X \<^bold>\ \.Src (hd U)) T @ map ((\<^bold>\) (\.Trg (last T))) U) = (map ((\<^bold>\) (\.Trg (last T))) U) \<^sup>*\\\<^sup>* map ((\<^bold>\) (\.Trg (last T))) U @ (map (\X. X \<^bold>\ \.Trg (last U)) T \<^sup>*\\\<^sup>* map (\X. X \<^bold>\ \.Trg (last U)) T) \<^sup>*\\\<^sup>* (map ((\<^bold>\) (\.Trg (last T))) U \<^sup>*\\\<^sup>* map ((\<^bold>\) (\.Trg (last T))) U)" using assms 8 orthogonal_App_Arr_Arr [of T U] Resid_append2 [of "map (\.App (\.Src (hd T))) U" "map (\X. X \<^bold>\ \.Trg (last U)) T" "map (\X. X \<^bold>\ \.Src (hd U)) T" "map (\.App (\.Trg (last T))) U"] by fastforce moreover have "Ide ..." using assms 1 2 3 4 5 6 8 Resid_Arr_self Arr_append_iff\<^sub>P Con_sym by (metis Con_Arr_self Con_imp_Arr_Resid Ide_appendI\<^sub>P Resid_Ide_Arr_ind append_Nil2 calculation) ultimately show ?thesis using ide_char by presburger qed qed text \ We arrive at the final objective of this section: factorization, up to congruence, of a path whose transitions all have \App\ as the top-level constructor, into the composite of a path that reduces only the ``rators'' and a path that reduces only the ``rands''. \ lemma map_App_decomp: shows "\Arr U; set U \ Collect \.is_App\ \ map (\X. X \<^bold>\ \.Src (\.un_App2 (hd U))) (map \.un_App1 U) @ map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U) \<^sup>*\\<^sup>* U" proof (induct U) show "Arr [] \ map (\X. X \<^bold>\ \.Src (\.un_App2 (hd []))) (map \.un_App1 []) @ map (\.App (\.Trg (\.un_App1 (last [])))) (map \.un_App2 []) \<^sup>*\\<^sup>* []" by simp fix u U assume ind: "\Arr U; set U \ Collect \.is_App\ \ map (\X. \.App X (\.Src (\.un_App2 (hd U)))) (map \.un_App1 U) @ map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U) \<^sup>*\\<^sup>* U" assume uU: "Arr (u # U)" assume set: "set (u # U) \ Collect \.is_App" have u: "\.Arr u \ \.is_App u" using set set_Arr_subset_arr uU by fastforce show "map (\X. X \<^bold>\ \.Src (\.un_App2 (hd (u # U)))) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)) \<^sup>*\\<^sup>* u # U" proof (cases "U = []") assume U: "U = []" show ?thesis using u U \.Con_sym \.Ide_iff_Src_self \.resid_Arr_self \.resid_Src_Arr \.resid_Arr_Src \.Src_resid \.Arr_resid ide_char \.Arr_not_Nil by (cases u, simp_all) next assume U: "U \ []" have 1: "Arr (map \.un_App1 U)" using U set Arr_map_un_App1 uU by (metis Arr_imp_Arr_tl list.distinct(1) list.map_disc_iff list.map_sel(2) list.sel(3)) have 2: "Arr [\.un_App2 u]" using U uU set by (metis Arr.simps(2) Arr_imp_arr_hd Arr_map_un_App2 hd_map list.discI list.sel(1)) have 3: "\.Arr (\.un_App1 u) \ \.Arr (\.un_App2 u)" using uU set by (metis Arr_imp_arr_hd Arr_map_un_App1 Arr_map_un_App2 \.arr_char list.distinct(1) list.map_sel(1) list.sel(1)) have 4: "map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U) @ [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u] \<^sup>*\\<^sup>* [\.Src (hd (map \.un_App1 U)) \<^bold>\ \.un_App2 u] @ map (\X. X \<^bold>\ \.Trg (last [\.un_App2 u])) (map \.un_App1 U)" proof - have "map (\X. X \<^bold>\ \.Src (hd [\.un_App2 u])) (map \.un_App1 U) = map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U)" using U uU set by simp moreover have "map (\.App (\.Trg (last (map \.un_App1 U)))) [\.un_App2 u] = [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u]" by (simp add: U last_map) moreover have "map (\.App (\.Src (hd (map \.un_App1 U)))) [\.un_App2 u] = [\.Src (hd (map \.un_App1 U)) \<^bold>\ \.un_App2 u]" by simp moreover have "map (\X. X \<^bold>\ \.Trg (last [\.un_App2 u])) (map \.un_App1 U) = map (\X. X \<^bold>\ \.Trg (last [\.un_App2 u])) (map \.un_App1 U)" using U uU set by blast ultimately show ?thesis using U uU set last_map hd_map 1 2 3 orthogonal_App_cong [of "map \.un_App1 U" "[\.un_App2 u]"] by presburger qed have 5: "\.Arr (\.un_App1 u \<^bold>\ \.Src (\.un_App2 u))" by (simp add: 3) have 6: "Arr (map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U))" by (metis 1 Arr_imp_arr_last Arr_map_App2 Arr_map_un_App2 Con_implies_Arr(2) Ide.simps(1) Resid_Arr_self Resid_cons(2) U insert_subset \.Ide_Trg \.arr_char last_map list.simps(15) set uU) have 7: "\.Arr (\.Trg (\.un_App1 (last U)))" by (metis 4 Arr.simps(2) Arr_append_iff\<^sub>P Con_implies_Arr(2) Ide.simps(1) U ide_char \.Arr.simps(4) \.arr_char list.map_disc_iff not_Cons_self2) have 8: "\.Src (hd (map \.un_App1 U)) = \.Trg (\.un_App1 u)" proof - have "\.Src (hd U) = \.Trg u" using u uU U by fastforce thus ?thesis using u uU U set apply (cases u; cases "hd U") apply (simp_all add: list.map_sel(1)) using list.set_sel(1) by fastforce qed have 9: "\.Src (\.un_App2 (hd U)) = \.Trg (\.un_App2 u)" proof - have "\.Src (hd U) = \.Trg u" using u uU U by fastforce thus ?thesis using u uU U set apply (cases u; cases "hd U") apply simp_all by (metis lambda_calculus.lambda.disc(15) list.set_sel(1) mem_Collect_eq subset_code(1)) qed have "map (\X. X \<^bold>\ \.Src (\.un_App2 (hd (u # U)))) (map \.un_App1 (u # U)) @ map ((\<^bold>\) (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)) = [\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)] @ (map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U) @ [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u]) @ map ((\<^bold>\) (\.Trg (\.un_App1 (last U)))) (map \.un_App2 U)" using uU U by simp also have 12: "cong ... ([\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)] @ ([\.Src (hd (map \.un_App1 U)) \<^bold>\ \.un_App2 u] @ map (\X. X \<^bold>\ \.Trg (last [\.un_App2 u])) (map \.un_App1 U)) @ map ((\<^bold>\) (\.Trg (\.un_App1 (last U)))) (map \.un_App2 U))" proof (intro cong_append [of "[\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)]"] cong_append [where U = "map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U)"]) show "[\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)] \<^sup>*\\<^sup>* [\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)]" using 5 arr_char cong_reflexive Arr.simps(2) \.arr_char by presburger show "map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U) \<^sup>*\\<^sup>* map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U)" using 6 cong_reflexive by auto show "map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U) @ [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u] \<^sup>*\\<^sup>* [\.Src (hd (map \.un_App1 U)) \<^bold>\ \.un_App2 u] @ map (\X. X \<^bold>\ \.Trg (last [\.un_App2 u])) (map \.un_App1 U)" using 4 by simp show 10: "seq [\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)] ((map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U) @ [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u]) @ map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U))" proof show "Arr [\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)]" using 5 Arr.simps(2) by blast show "Arr ((map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U) @ [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u]) @ map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U))" proof (intro Arr_appendI\<^sub>P\<^sub>W\<^sub>E) show "Arr (map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U))" using 1 3 Arr_map_App1 lambda_calculus.Ide_Src by blast show "Arr [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u]" by (simp add: 3 7) show "Trg (map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U)) = Src [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u]" by (metis 4 Arr_appendE\<^sub>P\<^sub>W\<^sub>E Con_implies_Arr(2) Ide.simps(1) U ide_char list.map_disc_iff not_Cons_self2) show "Arr (map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U))" using 6 by simp show "Trg (map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U) @ [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u]) = Src (map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U))" using U uU set 1 3 6 7 9 Srcs_simp\<^sub>P\<^sub>W\<^sub>E Arr_imp_arr_hd Arr_imp_arr_last apply auto by (metis Nil_is_map_conv hd_map \.Src.simps(4) \.Src_Trg \.Trg_Trg last_map list.map_comp) qed show "\.Trg (last [\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)]) = \.Src (hd ((map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U) @ [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u]) @ map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U)))" using 8 9 by (simp add: 3 U hd_map) qed show "seq (map (\X. X \<^bold>\ \.Src (\.un_App2 u)) (map \.un_App1 U) @ [\.Trg (\.un_App1 (last U)) \<^bold>\ \.un_App2 u]) (map (\X. \.Trg (\.un_App1 (last U)) \<^bold>\ X) (map \.un_App2 U))" by (metis Nil_is_map_conv U 10 append_is_Nil_conv arr_append_imp_seq seqE) qed also have 11: "[\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)] @ ([\.Src (hd (map \.un_App1 U)) \<^bold>\ \.un_App2 u] @ map (\X. X \<^bold>\ \.Trg (last [\.un_App2 u])) (map \.un_App1 U)) @ map ((\<^bold>\) (\.Trg (\.un_App1 (last U)))) (map \.un_App2 U) = ([\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)] @ [\.Src (hd (map \.un_App1 U)) \<^bold>\ \.un_App2 u]) @ map (\X. X \<^bold>\ \.Trg (last [\.un_App2 u])) (map \.un_App1 U) @ map ((\<^bold>\) (\.Trg (\.un_App1 (last U)))) (map \.un_App2 U)" by simp also have "cong ... ([u] @ U)" proof (intro cong_append) show "seq ([\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)] @ [\.Src (hd (map \.un_App1 U)) \<^bold>\ \.un_App2 u]) (map (\X. X \<^bold>\ \.Trg (last [\.un_App2 u])) (map \.un_App1 U) @ map ((\<^bold>\) (\.Trg (\.un_App1 (last U)))) (map \.un_App2 U))" by (metis 5 11 12 U Arr.simps(1-2) Con_implies_Arr(2) Ide.simps(1) Nil_is_map_conv append_is_Nil_conv arr_append_imp_seq arr_char ide_char \.arr_char) show "[\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)] @ [\.Src (hd (map \.un_App1 U)) \<^bold>\ \.un_App2 u] \<^sup>*\\<^sup>* [u]" proof - have "[\.un_App1 u \<^bold>\ \.Src (\.un_App2 u)] @ [\.Trg (\.un_App1 u) \<^bold>\ \.un_App2 u] \<^sup>*\\<^sup>* [u]" using u uU U \.Arr_Trg \.Arr_not_Nil \.resid_Arr_self apply (cases u) apply auto by force+ thus ?thesis using 8 by simp qed show "map (\X. X \<^bold>\ \.Trg (last [\.un_App2 u])) (map \.un_App1 U) @ map ((\<^bold>\) (\.Trg (\.un_App1 (last U)))) (map \.un_App2 U) \<^sup>*\\<^sup>* U" using ind set 9 apply simp using U uU by blast qed also have "[u] @ U = u # U" by simp finally show ?thesis by blast qed qed subsection "Miscellaneous" lemma Resid_parallel: assumes "cong t t'" and "coinitial t u" shows "u \<^sup>*\\\<^sup>* t = u \<^sup>*\\\<^sup>* t'" proof - have "u \<^sup>*\\\<^sup>* t = (u \<^sup>*\\\<^sup>* t) \<^sup>*\\\<^sup>* (t' \<^sup>*\\\<^sup>* t)" using assms by (metis con_target conI\<^sub>P con_sym resid_arr_ide) also have "... = (u \<^sup>*\\\<^sup>* t') \<^sup>*\\\<^sup>* (t \<^sup>*\\\<^sup>* t')" using cube by auto also have "... = u \<^sup>*\\\<^sup>* t'" using assms by (metis con_target conI\<^sub>P con_sym resid_arr_ide) finally show ?thesis by blast qed lemma set_Ide_subset_single_hd: shows "Ide T \ set T \ {hd T}" apply (induct T, auto) using \.coinitial_ide_are_cong by (metis Arr_imp_arr_hd Ide_consE Ide_imp_Ide_hd Ide_implies_Arr Srcs_simp\<^sub>P\<^sub>W\<^sub>E Srcs_simp\<^sub>\\<^sub>P \.trg_ide equals0D \.Ide_iff_Src_self \.arr_char \.ide_char set_empty singletonD subset_code(1)) text \ A single parallel reduction with \Beta\ as the top-level operator factors, up to congruence, either as a path in which the top-level redex is contracted first, or as a path in which the top-level redex is contracted last. \ lemma Beta_decomp: assumes "\.Arr t" and "\.Arr u" shows "[\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ [\.subst u t] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u]" and "[\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u] @ [\<^bold>\\<^bold>[\.Trg t\<^bold>] \<^bold>\ \.Trg u] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u]" using assms \.Arr_not_Nil \.Subst_not_Nil ide_char \.Ide_Subst \.Ide_Trg \.Arr_Subst \.resid_Arr_self by auto text \ If a reduction path follows an initial reduction whose top-level constructor is \Lam\, then all the terms in the path have \Lam\ as their top-level constructor. \ lemma seq_Lam_Arr_implies: shows "\t. \seq [t] U; \.is_Lam t\ \ set U \ Collect \.is_Lam" proof (induct U) show "\t. \seq [t] []; \.is_Lam t\ \ set [] \ Collect \.is_Lam" by simp fix u U t assume ind: "\t. \seq [t] U; \.is_Lam t\ \ set U \ Collect \.is_Lam" assume uU: "seq [t] (u # U)" assume t: "\.is_Lam t" show "set (u # U) \ Collect \.is_Lam" proof - have "\.is_Lam u" proof - have "\.seq t u" by (metis Arr_imp_arr_hd Trg_last_Src_hd_eqI \.arr_char \.seq_char last_ConsL list.sel(1) seq_char uU) thus ?thesis using \.seq_cases t by blast qed moreover have "set U \ Collect \.is_Lam" proof (cases "U = []") show "U = [] \ ?thesis" by simp assume U: "U \ []" have "seq [u] U" by (metis U append_Cons arr_append_imp_seq not_Cons_self2 self_append_conv2 seqE uU) thus ?thesis using ind calculation by simp qed ultimately show ?thesis by auto qed qed lemma seq_map_un_Lam: assumes "seq [\<^bold>\\<^bold>[t\<^bold>]] U" shows "seq [t] (map \.un_Lam U)" proof - have "Arr (\<^bold>\\<^bold>[t\<^bold>] # U)" using assms by (simp add: seq_char) hence "Arr (map \.un_Lam (\<^bold>\\<^bold>[t\<^bold>] # U)) \ Arr U" using seq_Lam_Arr_implies by (metis Arr_map_un_Lam \seq [\<^bold>\\<^bold>[t\<^bold>]] U\ \.lambda.discI(2) mem_Collect_eq seq_char set_ConsD subset_code(1)) hence "Arr (\.un_Lam \<^bold>\\<^bold>[t\<^bold>] # map \.un_Lam U) \ Arr U" by simp thus ?thesis using seq_char by (metis (no_types, lifting) Arr.simps(1) Con_imp_eq_Srcs Con_implies_Arr(2) Con_initial_right Resid_rec(1) Resid_rec(3) Srcs_Resid \.lambda.sel(2) map_is_Nil_conv confluence_ind) qed end section "Developments" text \ A \emph{development} is a reduction path from a term in which at each step exactly one redex is contracted, and the only redexes that are contracted are those that are residuals of redexes present in the original term. That is, no redexes are contracted that were newly created as a result of the previous reductions. The main theorem about developments is the Finite Developments Theorem, which states that all developments are finite. A proof of this theorem was published by Hindley \cite{hindley}, who attributes the result to Schroer \cite{schroer}. Other proofs were published subsequently. Here we follow the paper by de Vrijer \cite{deVrijer}, which may in some sense be considered the definitive work because de Vrijer's proof gives an exact bound on the number of steps in a development. Since de Vrijer used a classical, named-variable representation of \\\-terms, for the formalization given in the present article it was necessary to find the correct way to adapt de Vrijer's proof to the de Bruijn index representation of terms. I found this to be a somewhat delicate matter and to my knowledge it has not been done previously. \ context lambda_calculus begin text \ We define an \emph{elementary reduction} defined to be a term with exactly one marked redex. These correspond to the most basic computational steps. \ fun elementary_reduction where "elementary_reduction \<^bold>\ \ False" | "elementary_reduction (\<^bold>\_\<^bold>\) \ False" | "elementary_reduction \<^bold>\\<^bold>[t\<^bold>] \ elementary_reduction t" | "elementary_reduction (t \<^bold>\ u) \ (elementary_reduction t \ Ide u) \ (Ide t \ elementary_reduction u)" | "elementary_reduction (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ Ide t \ Ide u" text \ It is tempting to imagine that elementary reductions would be atoms with respect to the preorder \\\, but this is not necessarily the case. For example, suppose \t = \<^bold>\\<^bold>[\<^bold>\1\<^bold>\\<^bold>] \<^bold>\ (\<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\0\<^bold>\)\ and \u = \<^bold>\\<^bold>[\<^bold>\1\<^bold>\\<^bold>] \<^bold>\ (\<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\0\<^bold>\)\. Then \t\ is an elementary reduction, \u \ t\ (in fact \u \ t\) but \u\ is not an identity, nor is it elementary. \ lemma elementary_reduction_is_arr: shows "elementary_reduction t \ arr t" using Ide_implies_Arr arr_char by (induct t) auto lemma elementary_reduction_not_ide: shows "elementary_reduction t \ \ ide t" using ide_char by (induct t) auto lemma elementary_reduction_Raise_iff: shows "\d n. elementary_reduction (Raise d n t) \ elementary_reduction t" using Ide_Raise by (induct t) auto lemma elementary_reduction_Lam_iff: shows "is_Lam t \ elementary_reduction t \ elementary_reduction (un_Lam t)" by (metis elementary_reduction.simps(3) lambda.collapse(2)) lemma elementary_reduction_App_iff: shows "is_App t \ elementary_reduction t \ (elementary_reduction (un_App1 t) \ ide (un_App2 t)) \ (ide (un_App1 t) \ elementary_reduction (un_App2 t))" using ide_char by (metis elementary_reduction.simps(4) lambda.collapse(3)) lemma elementary_reduction_Beta_iff: shows "is_Beta t \ elementary_reduction t \ ide (un_Beta1 t) \ ide (un_Beta2 t)" using ide_char by (metis elementary_reduction.simps(5) lambda.collapse(4)) lemma cong_elementary_reductions_are_equal: shows "\u. \elementary_reduction t; elementary_reduction u; t \ u\ \ t = u" proof (induct t) show "\u. \elementary_reduction \<^bold>\; elementary_reduction u; \<^bold>\ \ u\ \ \<^bold>\ = u" by simp show "\x u. \elementary_reduction \<^bold>\x\<^bold>\; elementary_reduction u; \<^bold>\x\<^bold>\ \ u\ \ \<^bold>\x\<^bold>\ = u" by simp show "\t u. \\u. \elementary_reduction t; elementary_reduction u; t \ u\ \ t = u; elementary_reduction \<^bold>\\<^bold>[t\<^bold>]; elementary_reduction u; \<^bold>\\<^bold>[t\<^bold>] \ u\ \ \<^bold>\\<^bold>[t\<^bold>] = u" by (metis elementary_reduction_Lam_iff lambda.collapse(2) lambda.inject(2) prfx_Lam_iff) show "\t1 t2. \\u. \elementary_reduction t1; elementary_reduction u; t1 \ u\ \ t1 = u; \u. \elementary_reduction t2; elementary_reduction u; t2 \ u\ \ t2 = u; elementary_reduction (t1 \<^bold>\ t2); elementary_reduction u; t1 \<^bold>\ t2 \ u\ \ t1 \<^bold>\ t2 = u" for u using prfx_App_iff apply (cases u) apply auto[3] apply (metis elementary_reduction_App_iff ide_backward_stable lambda.sel(3-4) weak_extensionality) by auto show "\t1 t2. \\u. \elementary_reduction t1; elementary_reduction u; t1 \ u\ \ t1 = u; \u. \elementary_reduction t2; elementary_reduction u; t2 \ u\ \ t2 = u; elementary_reduction (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2); elementary_reduction u; \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 \ u\ \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 = u" for u using prfx_App_iff apply (cases u, simp_all) by (metis (full_types) Coinitial_iff_Con Ide_iff_Src_self Ide.simps(1)) qed text \ An \emph{elementary reduction path} is a path in which each step is an elementary reduction. It will be convenient to regard the empty list as an elementary reduction path, even though it is not actually a path according to our previous definition of that notion. \ definition (in reduction_paths) elementary_reduction_path where "elementary_reduction_path T \ (T = [] \ Arr T \ set T \ Collect \.elementary_reduction)" text \ In the formal definition of ``development'' given below, we represent a set of redexes simply by a term, in which the occurrences of \Beta\ correspond to the redexes in the set. To express the idea that an elementary reduction \u\ is a member of the set of redexes represented by term \t\, it is not adequate to say \u \ t\. To see this, consider the developments of a term of the form \\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2\. Intuitively, such developments should consist of a (possibly empty) initial segment containing only transitions of the form \t1 \<^bold>\ t2\, followed by a transition of the form \\<^bold>\\<^bold>[u1'\<^bold>] \<^bold>\ u2'\, followed by a development of the residual of the original \\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2\ after what has come so far. The requirement \u \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2\ is not a strong enough constraint on the transitions in the initial segment, because \\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2 \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2\ can hold for \t2\ and \u2\ coinitial, but otherwise without any particular relationship between their sets of marked redexes. In particular, this can occur when \u2\ and \t2\ occur as subterms that can be deleted by the contraction of an outer redex. So we need to introduce a notion of containment between terms that is stronger and more ``syntactic'' than \\\. The notion ``subsumed by'' defined below serves this purpose. Term \u\ is subsumed by term \t\ if both terms are arrows with exactly the same form except that \t\ may contain \\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2\ (a marked redex) in places where \u\ contains \\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2\. \ fun subs (infix "\" 50) where "\<^bold>\i\<^bold>\ \ \<^bold>\i'\<^bold>\ \ i = i'" | "\<^bold>\\<^bold>[t\<^bold>] \ \<^bold>\\<^bold>[t'\<^bold>] \ t \ t'" | "t \<^bold>\ u \ t' \<^bold>\ u' \ t \ t' \ u \ u'" | "\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u \ \<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u' \ t \ t' \ u \ u'" | "\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u \ \<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u' \ t \ t' \ u \ u'" | "_ \ _ \ False" lemma subs_implies_prfx: shows "\u. t \ u \ t \ u" apply (induct t) apply auto[1] using subs.elims(2) apply fastforce proof - show "\t. \\u. t \ u \ t \ u; \<^bold>\\<^bold>[t\<^bold>] \ u\ \ \<^bold>\\<^bold>[t\<^bold>] \ u" for u by (cases u, auto) fastforce show "\t2. \\u1. t1 \ u1 \ t1 \ u1; \u2. t2 \ u2 \ t2 \ u2; t1 \<^bold>\ t2 \ u\ \ t1 \<^bold>\ t2 \ u" for t1 u apply (cases t1; cases u) apply simp_all apply fastforce+ apply (metis Ide_Subst con_char lambda.sel(2) subs.simps(2) prfx_Lam_iff prfx_char prfx_implies_con) by fastforce+ show "\t1 t2. \\u1. t1 \ u1 \ t1 \ u1; \u2. t2 \ u2 \ t2 \ u2; \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 \ u\ \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 \ u" for u using Ide_Subst apply (cases u, simp_all) by (metis Ide.simps(1)) qed text \ The following is an example showing that two terms can be related by \\\ without being related by \\\. \ lemma subs_example: shows "\<^bold>\\<^bold>[\<^bold>\1\<^bold>\\<^bold>] \<^bold>\ (\<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\0\<^bold>\) \ \<^bold>\\<^bold>[\<^bold>\1\<^bold>\\<^bold>] \<^bold>\ (\<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\0\<^bold>\) = True" and "\<^bold>\\<^bold>[\<^bold>\1\<^bold>\\<^bold>] \<^bold>\ (\<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\0\<^bold>\) \ \<^bold>\\<^bold>[\<^bold>\1\<^bold>\\<^bold>] \<^bold>\ (\<^bold>\\<^bold>[\<^bold>\0\<^bold>\\<^bold>] \<^bold>\ \<^bold>\0\<^bold>\) = False" by auto lemma subs_Ide: shows "\u. \ide u; Src t = Src u\ \ u \ t" using Ide_Src Ide_implies_Arr Ide_iff_Src_self by (induct t, simp_all) force+ lemma subs_App: shows "u \ t1 \<^bold>\ t2 \ is_App u \ un_App1 u \ t1 \ un_App2 u \ t2" by (metis lambda.collapse(3) prfx_App_iff subs.simps(3) subs_implies_prfx) end context reduction_paths begin text \ We now formally define a \emph{development of \t\} to be an elementary reduction path \U\ that is coinitial with \[t]\ and is such that each transition \u\ in \U\ is subsumed by the residual of \t\ along the prefix of \U\ coming before \u\. Stated another way, each transition in \U\ corresponds to the contraction of a single redex that is the residual of a redex originally marked in \t\. \ fun development where "development t [] \ \.Arr t" | "development t (u # U) \ \.elementary_reduction u \ u \ t \ development (t \\ u) U" lemma development_imp_Arr: assumes "development t U" shows "\.Arr t" using assms by (metis \.Con_implies_Arr2 \.Ide.simps(1) \.ide_char \.subs_implies_prfx development.elims(2)) lemma development_Ide: shows "\t. \.Ide t \ development t U \ U = []" using \.Ide_implies_Arr apply (induct U) apply auto by (meson \.elementary_reduction_not_ide \.ide_backward_stable \.ide_char \.subs_implies_prfx) lemma development_implies: shows "\t. development t U \ elementary_reduction_path U \ (U \ [] \ U \<^sup>*\\<^sup>* [t])" apply (induct U) using elementary_reduction_path_def apply simp proof - fix t u U assume ind: "\t. development t U \ elementary_reduction_path U \ (U \ [] \ U \<^sup>*\\<^sup>* [t])" show "development t (u # U) \ elementary_reduction_path (u # U) \ (u # U \ [] \ u # U \<^sup>*\\<^sup>* [t])" proof (cases "U = []") assume uU: "development t (u # U)" show "U = [] \ ?thesis" using uU \.subs_implies_prfx ide_char \.elementary_reduction_is_arr elementary_reduction_path_def prfx_implies_con by force assume U: "U \ []" have "\.elementary_reduction u \ u \ t \ development (t \\ u) U" using U uU development.elims(1) by blast hence 1: "\.elementary_reduction u \ elementary_reduction_path U \ u \ t \ (U \ [] \ U \<^sup>*\\<^sup>* [t \\ u])" using U uU ind by auto show ?thesis proof (unfold elementary_reduction_path_def, intro conjI) show "u # U = [] \ Arr (u # U) \ set (u # U) \ Collect \.elementary_reduction" using U 1 by (metis Con_implies_Arr(1) Con_rec(2) con_char prfx_implies_con elementary_reduction_path_def insert_subset list.simps(15) mem_Collect_eq \.prfx_implies_con \.subs_implies_prfx) show "u # U \ [] \ u # U \<^sup>*\\<^sup>* [t]" proof - have "u # U \<^sup>*\\<^sup>* [t] \ ide ([u \\ t] @ U \<^sup>*\\\<^sup>* [t \\ u])" using 1 U Con_rec(2) Resid_rec(2) con_char prfx_implies_con \.prfx_implies_con \.subs_implies_prfx by simp also have "... \ True" using U 1 ide_char Ide_append_iff\<^sub>P\<^sub>W\<^sub>E [of "[u \\ t]" "U \<^sup>*\\\<^sup>* [t \\ u]"] by (metis Ide.simps(2) Ide_appendI\<^sub>P\<^sub>W\<^sub>E Src_resid Trg.simps(2) \.prfx_implies_con \.trg_resid_sym con_char \.subs_implies_prfx prfx_implies_con) finally show ?thesis by blast qed qed qed qed text \ The converse of the previous result does not hold, because there could be a stage \i\ at which \u\<^sub>i \ t\<^sub>i\, but \t\<^sub>i\ deletes the redex contracted in \u\<^sub>i\, so there is nothing forcing that redex to have been originally marked in \t\. So \U\ being a development of \t\ is a stronger property than \U\ just being an elementary reduction path such that \U \<^sup>*\\<^sup>* [t]\. \ lemma development_append: shows "\t V. \development t U; development (t \<^sup>1\\\<^sup>* U) V\ \ development t (U @ V)" using development_imp_Arr null_char apply (induct U) apply auto by (metis Resid1x.simps(2-3) append_Nil neq_Nil_conv) lemma development_map_Lam: shows "\t. development t T \ development \<^bold>\\<^bold>[t\<^bold>] (map \.Lam T)" using \.Arr_not_Nil development_imp_Arr by (induct T) auto lemma development_map_App_1: shows "\t. \development t T; \.Arr u\ \ development (t \<^bold>\ u) (map (\x. x \<^bold>\ \.Src u) T)" apply (induct T) apply (simp add: \.Ide_implies_Arr) proof - fix t T t' assume ind: "\t. \development t T; \.Arr u\ \ development (t \<^bold>\ u) (map (\x. x \<^bold>\ \.Src u) T)" assume t'T: "development t (t' # T)" assume u: "\.Arr u" show "development (t \<^bold>\ u) (map (\x. x \<^bold>\ \.Src u) (t' # T))" using u t'T ind apply simp using \.Arr_not_Nil \.Ide_Src development_imp_Arr \.subs_Ide by force qed lemma development_map_App_2: shows "\u. \\.Arr t; development u U\ \ development (t \<^bold>\ u) (map (\x. \.App (\.Src t) x) U)" apply (induct U) apply (simp add: \.Ide_implies_Arr) proof - fix u U u' assume ind: "\u. \\.Arr t; development u U\ \ development (t \<^bold>\ u) (map (\.App (\.Src t)) U)" assume u'U: "development u (u' # U)" assume t: "\.Arr t" show "development (t \<^bold>\ u) (map (\.App (\.Src t)) (u' # U)) " using t u'U ind apply simp by (metis \.Coinitial_iff_Con \.Ide_Src \.Ide_iff_Src_self \.Ide_implies_Arr development_imp_Arr \.ide_char \.resid_Arr_Ide \.subs_Ide) qed subsection "Finiteness of Developments" text \ A term \t\ has the finite developments property if there exists a finite value that bounds the length of all developments of \t\. The goal of this section is to prove the Finite Developments Theorem: every term has the finite developments property. \ definition FD where "FD t \ \n. \U. development t U \ length U \ n" end text \ In \cite{hindley}, Hindley proceeds by using structural induction to establish a bound on the length of a development of a term. The only case that poses any difficulty is the case of a \\\-redex, which is \\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u\ in the notation used here. He notes that there is an easy bound on the length of a development of a special form in which all the contractions of residuals of \t\ occur before the contraction of the top-level redex. The development first takes \\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u\ to \\<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u'\, then to \subst u' t'\, then continues with independent developments of \u'\. The number of independent developments of \u'\ is given by the number of free occurrences of \Var 0\ in \t'\. As there can be only finitely many such \t'\, we can use the maximum number of free occurrences of \Var 0\ over all such \t'\ to bound the steps in the independent developments of \u'\. In the general case, the problem is that reductions of residuals of t can increase the number of free occurrences of \Var 0\, so we can't readily count them at any particular stage. Hindley shows that developments in which there are reductions of residuals of \t\ that occur after the contraction of the top-level redex are equivalent to reductions of the special form, by a transformation with a bounded increase in length. This can be considered as a weak form of standardization for developments. A later paper by de Vrijer \cite{deVrijer} obtains an explicit function for the exact number of steps in a development of maximal length. His proof is very straightforward and amenable to formalization, and it is what we follow here. The main issue for us is that de Vrijer uses a classical representation of \\\-terms, with variable names and \\\-equivalence, whereas here we are using de Bruijn indices. This means that we have to discover the correct modification of de Vrijer's definitions to apply to the present situation. \ context lambda_calculus begin text \ Our first definition is that of the ``multiplicity'' of a free variable in a term. This is a count of the maximum number of times a variable could occur free in a term reachable in a development. The main issue in adjusting to de Bruijn indices is that the same variable will have different indices depending on the depth at which it occurs in the term. So, we need to keep track of how the indices of variables change as we move through the term. Our modified definitions adjust the parameter to the multiplicity function on each recursive call, to account for the contextual depth (\emph{i.e.}~the number of binders on a path from the root of the term). The definition of this function is readily understandable, except perhaps for the \Beta\ case. The multiplicity \mtp x (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)\ has to be at least as large as \mtp x (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)\, to account for developments in which the top-level redex is not contracted. However, if the top-level redex \\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u\ is contracted, then the contractum is \subst u t\, so the multiplicity has to be at least as large as \mtp x (subst u t)\. This leads to the relation: \begin{center} \mtp x (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = max (mtp x (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)) (mtp x (subst u t))\ \end{center} This is not directly suitable for use in a definition of the function \mtp\, because proving the termination is problematic. Instead, we have to guess the correct expression for \mtp x (subst u t)\ and use that. Now, each variable \x\ in \subst u t\ other than the variable \0\ that is substituted for still has all the occurrences that it does in \\<^bold>\\<^bold>[t\<^bold>]\. In addition, the variable being substituted for (which has index \0\ in the outermost context of \t\) will in general have multiple free occurrences in \t\, with a total multiplicity given by \mtp 0 t\. The substitution operation replaces each free occurrence by \u\, which has the effect of multiplying the multiplicity of a variable \x\ in \t\ by a factor of \mtp 0 t\. These considerations lead to the following: \begin{center} \mtp x (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = max (mtp x \<^bold>\\<^bold>[t\<^bold>] + mtp x u) (mtp x \<^bold>\\<^bold>[t\<^bold>] + mtp x u * mtp 0 t)\ \end{center} However, we can simplify this to: \begin{center} \mtp x (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = mtp x \<^bold>\\<^bold>[t\<^bold>] + mtp x u * max 1 (mtp 0 t)\ \end{center} and replace the \mtp x \<^bold>\\<^bold>[t\<^bold>]\ by \mtp (Suc x) t\ to simplify the ordering necessary for the termination proof and allow it to be done automatically. The final result is perhaps about the first thing one would think to write down, but there are possible ways to go wrong and it is of course still necessary to discover the proper form required for the various induction proofs. I followed a long path of rather more complicated-looking definitions, until I eventually managed to find the proper inductive forms for all the lemmas and eventually arrive back at this definition. \ fun mtp :: "nat \ lambda \ nat" where "mtp x \<^bold>\ = 0" | "mtp x \<^bold>\z\<^bold>\ = (if z = x then 1 else 0)" | "mtp x \<^bold>\\<^bold>[t\<^bold>] = mtp (Suc x) t" | "mtp x (t \<^bold>\ u) = mtp x t + mtp x u" | "mtp x (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = mtp (Suc x) t + mtp x u * max 1 (mtp 0 t)" text \ The multiplicity function generalizes the free variable predicate. This is not actually used, but is included for explanatory purposes. \ lemma mtp_gt_0_iff_in_FV: shows "\x. mtp x t > 0 \ x \ FV t" proof (induct t) show "\x. 0 < mtp x \<^bold>\ \ x \ FV \<^bold>\" by simp show "\x z. 0 < mtp x \<^bold>\z\<^bold>\ \ x \ FV \<^bold>\z\<^bold>\" by auto show Lam: "\t x. (\x. 0 < mtp x t \ x \ FV t) \ 0 < mtp x \<^bold>\\<^bold>[t\<^bold>] \ x \ FV \<^bold>\\<^bold>[t\<^bold>]" proof - fix t and x :: nat assume ind: "\x. 0 < mtp x t \ x \ FV t" show "0 < mtp x \<^bold>\\<^bold>[t\<^bold>] \ x \ FV \<^bold>\\<^bold>[t\<^bold>]" using ind apply auto apply (metis Diff_iff One_nat_def diff_Suc_1 empty_iff imageI insert_iff nat.distinct(1)) by (metis Suc_pred neq0_conv) qed show "\t u x. \\x. 0 < mtp x t \ x \ FV t; \x. 0 < mtp x u \ x \ FV u\ \ 0 < mtp x (t \<^bold>\ u) \ x \ FV (t \<^bold>\ u)" by simp show "\t u x. \\x. 0 < mtp x t \ x \ FV t; \x. 0 < mtp x u \ x \ FV u\ \ 0 < mtp x (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ x \ FV (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" proof - fix t u and x :: nat assume ind1: "\x. 0 < mtp x t \ x \ FV t" assume ind2: "\x. 0 < mtp x u \ x \ FV u" show "0 < mtp x (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ x \ FV (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" using ind1 ind2 apply simp by force qed qed text \ We now establish a fact about commutation of multiplicity and Raise that will be needed subsequently. \ lemma mtpE_eq_Raise: shows "\x k d. x < d \ mtp x (Raise d k t) = mtp x t" by (induct t) auto lemma mtp_Raise_ind: shows "\d x k l t. \l \ d; size t \ s\ \ mtp (x + d + k) (Raise l k t) = mtp (x + d) t" proof (induct s) show "\d x k l. \l \ d; size t \ 0\ \ mtp (x + d + k) (Raise l k t) = mtp (x + d) t" for t by (cases t) auto show "\s d x k l. \\d x k l t. \l \ d; size t \ s\ \ mtp (x + d + k) (Raise l k t) = mtp (x + d) t; l \ d; size t \ Suc s\ \ mtp (x + d + k) (Raise l k t) = mtp (x + d) t" for t proof (cases t) show "\d x k l s. t = \<^bold>\ \ mtp (x + d + k) (Raise l k t) = mtp (x + d) t" by simp show "\z d x k l s. \l \ d; t = \<^bold>\z\<^bold>\\ \ mtp (x + d + k) (Raise l k t) = mtp (x + d) t" by simp show "\u d x k l s. \l \ d; size t \ Suc s; t = \<^bold>\\<^bold>[u\<^bold>]; (\d x k l u. \l \ d; size u \ s\ \ mtp (x + d + k) (Raise l k u) = mtp (x + d) u)\ \ mtp (x + d + k) (Raise l k t) = mtp (x + d) t" proof - fix u d x s and k l :: nat assume l: "l \ d" and s: "size t \ Suc s" and t: "t = \<^bold>\\<^bold>[u\<^bold>]" assume ind: "\d x k l u. \l \ d; size u \ s\ \ mtp (x + d + k) (Raise l k u) = mtp (x + d) u" show "mtp (x + d + k) (Raise l k t) = mtp (x + d) t" proof - have "mtp (x + d + k) (Raise l k t) = mtp (Suc (x + d + k)) (Raise (Suc l) k u)" using t by simp also have "... = mtp (x + Suc d) u" proof - have "size u \ s" using t s by force thus ?thesis using l s ind [of "Suc l" "Suc d"] by simp qed also have "... = mtp (x + d) t" using t by auto finally show ?thesis by blast qed qed show "\t1 t2 d x k l s. \\d x k l t1. \l \ d; size t1 \ s\ \ mtp (x + d + k) (Raise l k t1) = mtp (x + d) t1; \d x k l t2. \l \ d; size t2 \ s\ \ mtp (x + d + k) (Raise l k t2) = mtp (x + d) t2; l \ d; size t \ Suc s; t = t1 \<^bold>\ t2\ \ mtp (x + d + k) (Raise l k t) = mtp (x + d) t" proof - fix t1 t2 s assume s: "size t \ Suc s" and t: "t = t1 \<^bold>\ t2" have "size t1 \ s \ size t2 \ s" using s t by auto thus "\d x k l. \\d x k l t1. \l \ d; size t1 \ s\ \ mtp (x + d + k) (Raise l k t1) = mtp (x + d) t1; \d x k l t2. \l \ d; size t2 \ s\ \ mtp (x + d + k) (Raise l k t2) = mtp (x + d) t2; l \ d; size t \ Suc s; t = t1 \<^bold>\ t2\ \ mtp (x + d + k) (Raise l k t) = mtp (x + d) t" by simp qed show "\t1 t2 d x k l s. \\d x k l t1. \l \ d; size t1 \ s\ \ mtp (x + d + k) (Raise l k t1) = mtp (x + d) t1; \d x k l t2. \l \ d; size t2 \ s\ \ mtp (x + d + k) (Raise l k t2) = mtp (x + d) t2; l \ d; size t \ Suc s; t = \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2\ \ mtp (x + d + k) (Raise l k t) = mtp (x + d) t" proof - fix t1 t2 d x s and k l :: nat assume l: "l \ d" and s: "size t \ Suc s" and t: "t = \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2" assume ind: "\d x k l N. \l \ d; size N \ s\ \ mtp (x + d + k) (Raise l k N) = mtp (x + d) N" show "mtp (x + d + k) (Raise l k t) = mtp (x + d) t" proof - have 1: "size t1 \ s \ size t2 \ s" using s t by auto have "mtp (x + d + k) (Raise l k t) = mtp (Suc (x + d + k)) (Raise (Suc l) k t1) + mtp (x + d + k) (Raise l k t2) * max 1 (mtp 0 (Raise (Suc l) k t1))" using t l by simp also have "... = mtp (Suc (x + d + k)) (Raise (Suc l) k t1) + mtp (x + d) t2 * max 1 (mtp 0 (Raise (Suc l) k t1))" using l 1 ind by auto also have "... = mtp (x + Suc d) t1 + mtp (x + d) t2 * max 1 (mtp 0 t1)" proof - have "mtp (x + Suc d + k) (Raise (Suc l) k t1) = mtp (x + Suc d) t1" using l 1 ind [of "Suc l" "Suc d" t1] by simp moreover have "mtp 0 (Raise (Suc l) k t1) = mtp 0 t1" (* Raising indices already > 0 does not affect mtp\<^sub>0. *) using l 1 ind [of "Suc l" "Suc d" t1 k] mtpE_eq_Raise by simp ultimately show ?thesis by simp qed also have "... = mtp (x + d) t" using t by auto finally show ?thesis by blast qed qed qed qed lemma mtp_Raise: assumes "l \ d" shows "mtp (x + d + k) (Raise l k t) = mtp (x + d) t" using assms mtp_Raise_ind by blast lemma mtp_Raise': shows "\k l. mtp l (Raise l (Suc k) t) = 0" by (induct t) auto lemma mtp_raise: shows "mtp (x + Suc d) (raise d t) = mtp (Suc x) t" by (metis Suc_eq_plus1 add.assoc le_add2 le_add_same_cancel2 mtp_Raise plus_1_eq_Suc) lemma mtp_Subst_cancel: shows "\k d n. mtp k (Subst (Suc d + k) u t) = mtp k t" proof (induct t) show "\k d n. mtp k (Subst (Suc d + k) u \<^bold>\) = mtp k \<^bold>\" by simp show "\k z d n. mtp k (Subst (Suc d + k) u \<^bold>\z\<^bold>\) = mtp k \<^bold>\z\<^bold>\" using mtp_Raise' apply auto by (metis add_Suc_right add_Suc_shift order_refl raise_plus) show "\t k d n. (\k d n. mtp k (Subst (Suc d + k) u t) = mtp k t) \ mtp k (Subst (Suc d + k) u \<^bold>\\<^bold>[t\<^bold>]) = mtp k \<^bold>\\<^bold>[t\<^bold>]" by (metis Subst.simps(3) add_Suc_right mtp.simps(3)) show "\t1 t2 k d n. \\k d n. mtp k (Subst (Suc d + k) u t1) = mtp k t1; \k d n. mtp k (Subst (Suc d + k) u t2) = mtp k t2\ \ mtp k (Subst (Suc d + k) u (t1 \<^bold>\ t2)) = mtp k (t1 \<^bold>\ t2)" by auto show "\t1 t2 k d n. \\k d n. mtp k (Subst (Suc d + k) u t1) = mtp k t1; \k d n. mtp k (Subst (Suc d + k) u t2) = mtp k t2\ \ mtp k (Subst (Suc d + k) u (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) = mtp k (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" using mtp_Raise' apply auto by (metis Nat.add_0_right add_Suc_right) qed lemma mtp\<^sub>0_Subst_cancel: shows "mtp 0 (Subst (Suc d) u t) = mtp 0 t" using mtp_Subst_cancel [of 0] by simp text \ We can now (!) prove the desired generalization of de Vrijer's formula for the commutation of multiplicity and substitution. This is the main lemma whose form is difficult to find. To get this right, the proper relationships have to exist between the various depth parameters to \Subst\ and the arguments to \mtp\. \ lemma mtp_Subst': shows "\d x u. mtp (x + Suc d) (Subst d u t) = mtp (x + Suc (Suc d)) t + mtp (Suc x) u * mtp d t" proof (induct t) show "\d x u. mtp (x + Suc d) (Subst d u \<^bold>\) = mtp (x + Suc (Suc d)) \<^bold>\ + mtp (Suc x) u * mtp d \<^bold>\" by simp show "\z d x u. mtp (x + Suc d) (Subst d u \<^bold>\z\<^bold>\) = mtp (x + Suc (Suc d)) \<^bold>\z\<^bold>\ + mtp (Suc x) u * mtp d \<^bold>\z\<^bold>\" using mtp_raise by auto show "\t d x u. (\d x u. mtp (x + Suc d) (Subst d u t) = mtp (x + Suc (Suc d)) t + mtp (Suc x) u * mtp d t) \ mtp (x + Suc d) (Subst d u \<^bold>\\<^bold>[t\<^bold>]) = mtp (x + Suc (Suc d)) \<^bold>\\<^bold>[t\<^bold>] + mtp (Suc x) u * mtp d \<^bold>\\<^bold>[t\<^bold>]" proof - fix t u d x assume ind: "\d x N. mtp (x + Suc d) (Subst d N t) = mtp (x + Suc (Suc d)) t + mtp (Suc x) N * mtp d t" have "mtp (x + Suc d) (Subst d u \<^bold>\\<^bold>[t\<^bold>]) = mtp (Suc x + Suc (Suc d)) t + mtp (x + Suc (Suc d)) (raise (Suc d) u) * mtp (Suc d) t" using ind mtp_raise add_Suc_shift by (metis Subst.simps(3) add_Suc_right mtp.simps(3)) also have "... = mtp (x + Suc (Suc d)) \<^bold>\\<^bold>[t\<^bold>] + mtp (Suc x) u * mtp d \<^bold>\\<^bold>[t\<^bold>]" using Raise_Suc by (metis add_Suc_right add_Suc_shift mtp.simps(3) mtp_raise) finally show "mtp (x + Suc d) (Subst d u \<^bold>\\<^bold>[t\<^bold>]) = mtp (x + Suc (Suc d)) \<^bold>\\<^bold>[t\<^bold>] + mtp (Suc x) u * mtp d \<^bold>\\<^bold>[t\<^bold>]" by blast qed show "\t1 t2 u d x. \\d x u. mtp (x + Suc d) (Subst d u t1) = mtp (x + Suc (Suc d)) t1 + mtp (Suc x) u * mtp d t1; \d x u. mtp (x + Suc d) (Subst d u t2) = mtp (x + Suc (Suc d)) t2 + mtp (Suc x) u * mtp d t2\ \ mtp (x + Suc d) (Subst d u (t1 \<^bold>\ t2)) = mtp (x + Suc (Suc d)) (t1 \<^bold>\ t2) + mtp (Suc x) u * mtp d (t1 \<^bold>\ t2)" by (simp add: add_mult_distrib2) show "\t1 t2 u d x. \\d x N. mtp (x + Suc d) (Subst d N t1) = mtp (x + Suc (Suc d)) t1 + mtp (Suc x) N * mtp d t1; \d x N. mtp (x + Suc d) (Subst d N t2) = mtp (x + Suc (Suc d)) t2 + mtp (Suc x) N * mtp d t2\ \ mtp (x + Suc d) (Subst d u (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) = mtp (x + Suc (Suc d)) (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) + mtp (Suc x) u * mtp d (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - fix t1 t2 u d x assume ind1: "\d x N. mtp (x + Suc d) (Subst d N t1) = mtp (x + Suc (Suc d)) t1 + mtp (Suc x) N * mtp d t1" assume ind2: "\d x N. mtp (x + Suc d) (Subst d N t2) = mtp (x + Suc (Suc d)) t2 + mtp (Suc x) N * mtp d t2" show "mtp (x + Suc d) (Subst d u (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) = mtp (x + Suc (Suc d)) (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) + mtp (Suc x) u * mtp d (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - let ?A = "mtp (Suc x + Suc (Suc d)) t1" let ?B = "mtp (Suc x + Suc d) t2" let ?M1 = "mtp (Suc d) t1" let ?M2 = "mtp d t2" let ?M1\<^sub>0 = "mtp 0 (Subst (Suc d) u t1)" let ?M1\<^sub>0' = "mtp 0 t1" let ?N = "mtp (Suc x) u" have "mtp (x + Suc d) (Subst d u (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) = mtp (x + Suc d) (\<^bold>\\<^bold>[Subst (Suc d) u t1\<^bold>] \<^bold>\ Subst d u t2)" by simp also have "... = mtp (x + Suc (Suc d)) (Subst (Suc d) u t1) + mtp (x + Suc d) (Subst d u t2) * max 1 (mtp 0 (Subst (Suc d) u t1))" by simp also have "... = (?A + ?N * ?M1) + (?B + ?N * ?M2) * max 1 ?M1\<^sub>0" using ind1 ind2 add_Suc_shift by presburger also have "... = ?A + ?N * ?M1 + ?B * max 1 ?M1\<^sub>0 + ?N * ?M2 * max 1 ?M1\<^sub>0" by algebra also have "... = ?A + ?B * max 1 ?M1\<^sub>0' + ?N * ?M1 + ?N * ?M2 * max 1 ?M1\<^sub>0'" proof - have "?M1\<^sub>0 = ?M1\<^sub>0'" (* The u-dependence on the LHS is via raise (Suc d) u, which does not have any free occurrences of 0. So mtp 0 0 yields the same on both. *) using mtp\<^sub>0_Subst_cancel by blast thus ?thesis by auto qed also have "... = ?A + ?B * max 1 ?M1\<^sub>0' + ?N * (?M1 + ?M2 * max 1 ?M1\<^sub>0')" by algebra also have "... = mtp (Suc x + Suc d) (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) + mtp (Suc x) u * mtp d (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" by simp finally show ?thesis by simp qed qed qed text \ The following lemma provides expansions that apply when the parameter to \mtp\ is \0\, as opposed to the previous lemma, which only applies for parameters greater than \0\. \ lemma mtp_Subst: shows "\u k. mtp k (Subst k u t) = mtp (Suc k) t + mtp k (raise k u) * mtp k t" proof (induct t) show "\u k. mtp k (Subst k u \<^bold>\) = mtp (Suc k) \<^bold>\ + mtp k (raise k u) * mtp k \<^bold>\" by simp show "\x u k. mtp k (Subst k u \<^bold>\x\<^bold>\) = mtp (Suc k) \<^bold>\x\<^bold>\ + mtp k (raise k u) * mtp k \<^bold>\x\<^bold>\" by auto show "\t u k. (\u k. mtp k (Subst k u t) = mtp (Suc k) t + mtp k (raise k u) * mtp k t) \ mtp k (Subst k u \<^bold>\\<^bold>[t\<^bold>]) = mtp (Suc k) \<^bold>\\<^bold>[t\<^bold>] + mtp k (Raise 0 k u) * mtp k \<^bold>\\<^bold>[t\<^bold>]" using mtp_Raise [of 0] apply auto by (metis add.left_neutral) show "\t1 t2 u k. \\u k. mtp k (Subst k u t1) = mtp (Suc k) t1 + mtp k (raise k u) * mtp k t1; \u k. mtp k (Subst k u t2) = mtp (Suc k) t2 + mtp k (raise k u) * mtp k t2\ \ mtp k (Subst k u (t1 \<^bold>\ t2)) = mtp (Suc k) (t1 \<^bold>\ t2) + mtp k (raise k u) * mtp k (t1 \<^bold>\ t2)" by (auto simp add: distrib_left) show "\t1 t2 u k. \\u k. mtp k (Subst k u t1) = mtp (Suc k) t1 + mtp k (raise k u) * mtp k t1; \u k. mtp k (Subst k u t2) = mtp (Suc k) t2 + mtp k (raise k u) * mtp k t2\ \ mtp k (Subst k u (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) = mtp (Suc k) (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) + mtp k (raise k u) * mtp k (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - fix t1 t2 u k assume ind1: "\u k. mtp k (Subst k u t1) = mtp (Suc k) t1 + mtp k (raise k u) * mtp k t1" assume ind2: "\u k. mtp k (Subst k u t2) = mtp (Suc k) t2 + mtp k (raise k u) * mtp k t2" show "mtp k (Subst k u (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) = mtp (Suc k) (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) + mtp k (raise k u) * mtp k (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - have "mtp (Suc k) (Raise 0 (Suc k) u) * mtp (Suc k) t1 + (mtp (Suc k) t2 + mtp k (Raise 0 k u) * mtp k t2) * max (Suc 0) (mtp 0 t1) = mtp (Suc k) t2 * max (Suc 0) (mtp 0 t1) + mtp k (Raise 0 k u) * (mtp (Suc k) t1 + mtp k t2 * max (Suc 0) (mtp 0 t1))" proof - have "mtp (Suc k) (Raise 0 (Suc k) u) * mtp (Suc k) t1 + (mtp (Suc k) t2 + mtp k (Raise 0 k u) * mtp k t2) * max (Suc 0) (mtp 0 t1) = mtp (Suc k) t2 * max (Suc 0) (mtp 0 t1) + mtp (Suc k) (Raise 0 (Suc k) u) * mtp (Suc k) t1 + mtp k (Raise 0 k u) * mtp k t2 * max (Suc 0) (mtp 0 t1)" by algebra also have "... = mtp (Suc k) t2 * max (Suc 0) (mtp 0 t1) + mtp (Suc k) (Raise 0 (Suc k) u) * mtp (Suc k) t1 + mtp 0 u * mtp k t2 * max (Suc 0) (mtp 0 t1)" using mtp_Raise [of 0 0 0 k u] by auto also have "... = mtp (Suc k) t2 * max (Suc 0) (mtp 0 t1) + mtp k (Raise 0 k u) * (mtp (Suc k) t1 + mtp k t2 * max (Suc 0) (mtp 0 t1))" by (metis (no_types, lifting) ab_semigroup_add_class.add_ac(1) ab_semigroup_mult_class.mult_ac(1) add_mult_distrib2 le_add1 mtp_Raise plus_nat.add_0) finally show ?thesis by blast qed thus ?thesis using ind1 ind2 mtp\<^sub>0_Subst_cancel by auto qed qed qed lemma mtp0_subst_le: shows "mtp 0 (subst u t) \ mtp 1 t + mtp 0 u * max 1 (mtp 0 t)" proof (cases t) show "t = \<^bold>\ \ mtp 0 (subst u t) \ mtp 1 t + mtp 0 u * max 1 (mtp 0 t)" by auto show "\z. t = \<^bold>\z\<^bold>\ \ mtp 0 (subst u t) \ mtp 1 t + mtp 0 u * max 1 (mtp 0 t)" using Raise_0 by force show "\P. t = \<^bold>\\<^bold>[P\<^bold>] \ mtp 0 (subst u t) \ mtp 1 t + mtp 0 u * max 1 (mtp 0 t)" using mtp_Subst [of 0 u t] Raise_0 by force show "\t1 t2. t = t1 \<^bold>\ t2 \ mtp 0 (subst u t) \ mtp 1 t + mtp 0 u * max 1 (mtp 0 t)" using mtp_Subst Raise_0 add_mult_distrib2 nat_mult_max_right by auto show "\t1 t2. t = \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 \ mtp 0 (subst u t) \ mtp 1 t + mtp 0 u * max 1 (mtp 0 t)" using mtp_Subst Raise_0 by (metis Nat.add_0_right dual_order.eq_iff max_def mult.commute mult_zero_left not_less_eq_eq plus_1_eq_Suc trans_le_add1) qed lemma elementary_reduction_nonincreases_mtp: shows "\u x. \elementary_reduction u; u \ t\ \ mtp x (resid t u) \ mtp x t" proof (induct t) show "\u x. \elementary_reduction u; u \ \<^bold>\\ \ mtp x (resid \<^bold>\ u) \ mtp x \<^bold>\" by simp show "\x u i. \elementary_reduction u; u \ \<^bold>\i\<^bold>\\ \ mtp x (resid \<^bold>\i\<^bold>\ u) \ mtp x \<^bold>\i\<^bold>\" by (meson Ide.simps(2) elementary_reduction_not_ide ide_backward_stable ide_char subs_implies_prfx) fix u show "\t x. \\u x. \elementary_reduction u; u \ t\ \ mtp x (resid t u) \ mtp x t; elementary_reduction u; u \ \<^bold>\\<^bold>[t\<^bold>]\ \ mtp x (\<^bold>\\<^bold>[t\<^bold>] \\ u) \ mtp x \<^bold>\\<^bold>[t\<^bold>]" by (cases u) auto show "\t1 t2 x. \\u x. \elementary_reduction u; u \ t1\ \ mtp x (resid t1 u) \ mtp x t1; \u x. \elementary_reduction u; u \ t2\ \ mtp x (resid t2 u) \ mtp x t2; elementary_reduction u; u \ t1 \<^bold>\ t2\ \ mtp x (resid (t1 \<^bold>\ t2) u) \ mtp x (t1 \<^bold>\ t2)" apply (cases u) apply auto apply (metis Coinitial_iff_Con add_mono_thms_linordered_semiring(3) resid_Arr_Ide) by (metis Coinitial_iff_Con add_mono_thms_linordered_semiring(2) resid_Arr_Ide) (* * TODO: Isabelle is sensitive to the order of assumptions in the induction hypotheses * stated in the "show". Why? *) show "\t1 t2 x. \\u1 x. \elementary_reduction u1; u1 \ t1\ \ mtp x (resid t1 u1) \ mtp x t1; \u2 x. \elementary_reduction u2; u2 \ t2\ \ mtp x (resid t2 u2) \ mtp x t2; elementary_reduction u; u \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2\ \ mtp x ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) \ mtp x (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - fix t1 t2 x assume ind1: "\u1 x. \elementary_reduction u1; u1 \ t1\ \ mtp x (t1 \\ u1) \ mtp x t1" assume ind2: "\u2 x. \elementary_reduction u2; u2 \ t2\ \ mtp x (t2 \\ u2) \ mtp x t2" assume u: "elementary_reduction u" assume subs: "u \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2" have 1: "is_App u \ is_Beta u" using subs by (metis prfx_Beta_iff subs_implies_prfx) have "is_App u \ mtp x ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) \ mtp x (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - assume 2: "is_App u" obtain u1 u2 where u1u2: "u = \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2" using 2 u by (metis ConD(3) Con_implies_is_Lam_iff_is_Lam Con_sym con_def is_App_def is_Lam_def lambda.disc(8) null_char prfx_implies_con subs subs_implies_prfx) have "mtp x ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) = mtp x (\<^bold>\\<^bold>[t1 \\ u1\<^bold>] \<^bold>\ (t2 \\ u2))" using u1u2 subs by (metis Con_sym Ide.simps(1) ide_char resid.simps(6) subs_implies_prfx) also have "... = mtp (Suc x) (resid t1 u1) + mtp x (resid t2 u2) * max 1 (mtp 0 (resid t1 u1))" by simp also have "... \ mtp (Suc x) t1 + mtp x (resid t2 u2) * max 1 (mtp 0 (resid t1 u1))" using u1u2 ind1 [of u1 "Suc x"] con_sym ide_char resid_arr_ide prfx_implies_con subs subs_implies_prfx u by force also have "... \ mtp (Suc x) t1 + mtp x t2 * max 1 (mtp 0 (resid t1 u1))" using u1u2 ind2 [of u2 x] by (metis (no_types, lifting) Con_implies_Coinitial_ind add_left_mono dual_order.eq_iff elementary_reduction.simps(4) lambda.disc(11) mult_le_cancel2 prfx_App_iff resid.simps(31) resid_Arr_Ide subs subs.simps(4) subs_implies_prfx u) also have "... \ mtp (Suc x) t1 + mtp x t2 * max 1 (mtp 0 t1)" using ind1 [of u1 0] by (metis Con_implies_Coinitial_ind Ide.simps(3) elementary_reduction.simps(3) elementary_reduction.simps(4) lambda.disc(11) max.mono mult_le_mono nat_add_left_cancel_le nat_le_linear prfx_App_iff resid.simps(31) resid_Arr_Ide subs subs.simps(4) subs_implies_prfx u u1u2) also have "... = mtp x (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" by auto finally show "mtp x ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) \ mtp x (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" by blast qed moreover have "is_Beta u \ mtp x ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) \ mtp x (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - assume 2: "is_Beta u" obtain u1 u2 where u1u2: "u = \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2" using 2 u is_Beta_def by auto have "mtp x ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) = mtp x (subst (t2 \\ u2) (t1 \\ u1))" using u1u2 subs by (metis con_def con_sym null_char prfx_implies_con resid.simps(4) subs_implies_prfx) also have "... \ mtp (Suc x) (resid t1 u1) + mtp x (resid t2 u2) * max 1 (mtp 0 (resid t1 u1))" apply (cases "x = 0") using mtp0_subst_le Raise_0 mtp_Subst' [of "x - 1" 0 "resid t2 u2" "resid t1 u1"] by auto also have "... \ mtp (Suc x) t1 + mtp x t2 * max 1 (mtp 0 t1)" using ind1 ind2 apply simp by (metis Coinitial_iff_Con Ide.simps(1) dual_order.eq_iff elementary_reduction.simps(5) ide_char resid.simps(4) resid_Arr_Ide subs subs_implies_prfx u u1u2) also have "... = mtp x (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" by simp finally show "mtp x ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) \ mtp x (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" by blast qed ultimately show "mtp x ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) \ mtp x (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" using 1 by blast qed qed text \ Next we define the ``height'' of a term. This counts the number of steps in a development of maximal length of the given term. \ fun hgt where "hgt \<^bold>\ = 0" | "hgt \<^bold>\_\<^bold>\ = 0" | "hgt \<^bold>\\<^bold>[t\<^bold>] = hgt t" | "hgt (t \<^bold>\ u) = hgt t + hgt u" | "hgt (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = Suc (hgt t + hgt u * max 1 (mtp 0 t))" lemma hgt_resid_ide: shows "\ide u; u \ t\ \ hgt (resid t u) \ hgt t" by (metis con_sym eq_imp_le resid_arr_ide prfx_implies_con subs_implies_prfx) lemma hgt_Raise: shows "\l k. hgt (Raise l k t) = hgt t" using mtpE_eq_Raise by (induct t) auto lemma hgt_Subst: shows "\u k. Arr u \ hgt (Subst k u t) = hgt t + hgt u * mtp k t" proof (induct t) show "\u k. Arr u \ hgt (Subst k u \<^bold>\) = hgt \<^bold>\ + hgt u * mtp k \<^bold>\" by simp show "\x u k. Arr u \ hgt (Subst k u \<^bold>\x\<^bold>\) = hgt \<^bold>\x\<^bold>\ + hgt u * mtp k \<^bold>\x\<^bold>\" using hgt_Raise by auto show "\t u k. \\u k. Arr u \ hgt (Subst k u t) = hgt t + hgt u * mtp k t; Arr u\ \ hgt (Subst k u \<^bold>\\<^bold>[t\<^bold>]) = hgt \<^bold>\\<^bold>[t\<^bold>] + hgt u * mtp k \<^bold>\\<^bold>[t\<^bold>]" by auto show "\t1 t2 u k. \\u k. Arr u \ hgt (Subst k u t1) = hgt t1 + hgt u * mtp k t1; \u k. Arr u \ hgt (Subst k u t2) = hgt t2 + hgt u * mtp k t2; Arr u\ \ hgt (Subst k u (t1 \<^bold>\ t2)) = hgt (t1 \<^bold>\ t2) + hgt u * mtp k (t1 \<^bold>\ t2)" by (simp add: distrib_left) show "\t1 t2 u k. \\u k. Arr u \ hgt (Subst k u t1) = hgt t1 + hgt u * mtp k t1; \u k. Arr u \ hgt (Subst k u t2) = hgt t2 + hgt u * mtp k t2; Arr u\ \ hgt (Subst k u (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) = hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) + hgt u * mtp k (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - fix t1 t2 u k assume ind1: "\u k. Arr u \ hgt (Subst k u t1) = hgt t1 + hgt u * mtp k t1" assume ind2: "\u k. Arr u \ hgt (Subst k u t2) = hgt t2 + hgt u * mtp k t2" assume u: "Arr u" show "hgt (Subst k u (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) = hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) + hgt u * mtp k (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - have "hgt (Subst k u (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) = Suc (hgt (Subst (Suc k) u t1) + hgt (Subst k u t2) * max 1 (mtp 0 (Subst (Suc k) u t1)))" by simp also have "... = Suc ((hgt t1 + hgt u * mtp (Suc k) t1) + (hgt t2 + hgt u * mtp k t2) * max 1 (mtp 0 (Subst (Suc k) u t1)))" using u ind1 [of u "Suc k"] ind2 [of u k] by simp also have "... = Suc (hgt t1 + hgt t2 * max 1 (mtp 0 (Subst (Suc k) u t1)) + hgt u * mtp (Suc k) t1) + hgt u * mtp k t2 * max 1 (mtp 0 (Subst (Suc k) u t1))" using comm_semiring_class.distrib by force also have "... = Suc (hgt t1 + hgt t2 * max 1 (mtp 0 (Subst (Suc k) u t1)) + hgt u * (mtp (Suc k) t1 + mtp k t2 * max 1 (mtp 0 (Subst (Suc k) u t1))))" by (simp add: distrib_left) also have "... = Suc (hgt t1 + hgt t2 * max 1 (mtp 0 t1) + hgt u * (mtp (Suc k) t1 + mtp k t2 * max 1 (mtp 0 t1)))" proof - have "mtp 0 (Subst (Suc k) u t1) = mtp 0 t1" using mtp\<^sub>0_Subst_cancel by auto thus ?thesis by simp qed also have "... = hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) + hgt u * mtp k (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" by simp finally show ?thesis by blast qed qed qed lemma elementary_reduction_decreases_hgt: shows "\u. \elementary_reduction u; u \ t\ \ hgt (t \\ u) < hgt t" proof (induct t) show "\u. \elementary_reduction u; u \ \<^bold>\\ \ hgt (\<^bold>\ \\ u) < hgt \<^bold>\" by simp show "\u x. \elementary_reduction u; u \ \<^bold>\x\<^bold>\\ \ hgt (\<^bold>\x\<^bold>\ \\ u) < hgt \<^bold>\x\<^bold>\" using Ide.simps(2) elementary_reduction_not_ide ide_backward_stable ide_char subs_implies_prfx by blast show "\t u. \\u. \elementary_reduction u; u \ t\ \ hgt (t \\ u) < hgt t; elementary_reduction u; u \ \<^bold>\\<^bold>[t\<^bold>]\ \ hgt (\<^bold>\\<^bold>[t\<^bold>] \\ u) < hgt \<^bold>\\<^bold>[t\<^bold>]" proof - fix t u assume ind: "\u. \elementary_reduction u; u \ t\ \ hgt (t \\ u) < hgt t" assume u: "elementary_reduction u" assume subs: "u \ \<^bold>\\<^bold>[t\<^bold>]" show "hgt (\<^bold>\\<^bold>[t\<^bold>] \\ u) < hgt \<^bold>\\<^bold>[t\<^bold>]" using u subs ind apply (cases u) apply simp_all by fastforce qed show "\t1 t2 u. \\u. \elementary_reduction u; u \ t1\ \ hgt (t1 \\ u) < hgt t1; \u. \elementary_reduction u; u \ t2\ \ hgt (t2 \\ u) < hgt t2; elementary_reduction u; u \ t1 \<^bold>\ t2\ \ hgt ((t1 \<^bold>\ t2) \\ u) < hgt (t1 \<^bold>\ t2)" proof - fix t1 t2 u assume ind1: "\u. \elementary_reduction u; u \ t1\ \ hgt (t1 \\ u) < hgt t1" assume ind2: "\u. \elementary_reduction u; u \ t2\ \ hgt (t2 \\ u) < hgt t2" assume u: "elementary_reduction u" assume subs: "u \ t1 \<^bold>\ t2" show "hgt ((t1 \<^bold>\ t2) \\ u) < hgt (t1 \<^bold>\ t2)" using u subs ind1 ind2 apply (cases u) apply simp_all by (metis add_le_less_mono add_less_le_mono hgt_resid_ide ide_char not_less0 zero_less_iff_neq_zero) qed show "\t1 t2 u. \\u. \elementary_reduction u; u \ t1\ \ hgt (t1 \\ u) < hgt t1; \u. \elementary_reduction u; u \ t2\ \ hgt (t2 \\ u) < hgt t2; elementary_reduction u; u \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2\ \ hgt ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) < hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - fix t1 t2 u assume ind1: "\u. \elementary_reduction u; u \ t1\ \ hgt (t1 \\ u) < hgt t1" assume ind2: "\u. \elementary_reduction u; u \ t2\ \ hgt (t2 \\ u) < hgt t2" assume u: "elementary_reduction u" assume subs: "u \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2" have "is_App u \ is_Beta u" using subs by (metis prfx_Beta_iff subs_implies_prfx) moreover have "is_App u \ hgt ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) < hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - fix u1 u2 assume 0: "is_App u" obtain u1 u1' u2 where 1: "u = u1 \<^bold>\ u2 \ u1 = \<^bold>\\<^bold>[u1'\<^bold>]" using u 0 by (metis ConD(3) Con_implies_is_Lam_iff_is_Lam Con_sym con_def is_App_def is_Lam_def null_char prfx_implies_con subs subs_implies_prfx) have "hgt ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) = hgt ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ (u1 \<^bold>\ u2))" using 1 by simp also have "... = hgt (\<^bold>\\<^bold>[t1 \\ u1'\<^bold>] \<^bold>\ t2 \\ u2)" by (metis "1" Con_sym Ide.simps(1) ide_char resid.simps(6) subs subs_implies_prfx) also have "... = Suc (hgt (t1 \\ u1') + hgt (t2 \\ u2) * max (Suc 0) (mtp 0 (t1 \\ u1')))" by auto also have "... < hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - have "elementary_reduction (un_App1 u) \ ide (un_App2 u) \ ide (un_App1 u) \ elementary_reduction (un_App2 u)" using u 1 elementary_reduction_App_iff [of u] by simp moreover have "elementary_reduction (un_App1 u) \ ide (un_App2 u) \ ?thesis" proof - assume 2: "elementary_reduction (un_App1 u) \ ide (un_App2 u)" have "elementary_reduction u1' \ ide (un_App2 u)" using 1 2 u elementary_reduction_Lam_iff by force moreover have "mtp 0 (t1 \\ u1') \ mtp 0 t1" using 1 calculation elementary_reduction_nonincreases_mtp subs subs.simps(4) by blast moreover have "mtp 0 (t2 \\ u2) \ mtp 0 t2" using 1 hgt_resid_ide [of u2 t2] by (metis calculation(1) con_sym eq_refl resid_arr_ide lambda.sel(4) prfx_implies_con subs subs.simps(4) subs_implies_prfx) ultimately show ?thesis using 1 2 ind1 [of u1'] hgt_resid_ide apply simp by (metis "1" Suc_le_mono \mtp 0 (t1 \ u1') \ mtp 0 t1\ add_less_le_mono le_add1 le_add_same_cancel1 max.mono mult_le_mono subs subs.simps(4)) qed moreover have "ide (un_App1 u) \ elementary_reduction (un_App2 u) \ ?thesis" proof - assume 2: "ide (un_App1 u) \ elementary_reduction (un_App2 u)" have "ide (un_App1 u) \ elementary_reduction u2" using 1 2 u elementary_reduction_Lam_iff by force moreover have "mtp 0 (t1 \\ u1') \ mtp 0 t1" using 1 hgt_resid_ide [of u1' t1] by (metis Ide.simps(3) calculation con_sym eq_refl ide_char resid_arr_ide lambda.sel(3) prfx_implies_con subs subs.simps(4) subs_implies_prfx) moreover have "mtp 0 (t2 \\ u2) \ mtp 0 t2" using 1 elementary_reduction_nonincreases_mtp subs calculation(1) subs.simps(4) by blast ultimately show ?thesis using 1 2 ind2 [of u2] apply simp by (metis Coinitial_iff_Con Ide_iff_Src_self Nat.add_0_right add_le_less_mono ide_char Ide.simps(1) subs.simps(4) le_add1 max_nat.neutr_eq_iff mult_less_cancel2 nat.distinct(1) neq0_conv resid_Arr_Src subs subs_implies_prfx) qed ultimately show ?thesis by blast qed also have "... = Suc (hgt t1 + hgt t2 * max 1 (mtp 0 t1))" by simp also have "... = hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" by simp finally show "hgt ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) < hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" by blast qed moreover have "is_Beta u \ hgt ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) < hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - fix u1 u2 assume 0: "is_Beta u" obtain u1 u2 where 1: "u = \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2" using u 0 by (metis lambda.collapse(4)) have "hgt ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) = hgt ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2))" using 1 by simp also have "... = hgt (subst (resid t2 u2) (resid t1 u1))" by (metis "1" con_def con_sym null_char prfx_implies_con resid.simps(4) subs subs_implies_prfx) also have "... = hgt (resid t1 u1) + hgt (resid t2 u2) * mtp 0 (resid t1 u1)" proof - have "Arr (resid t2 u2)" by (metis "1" Coinitial_resid_resid Con_sym Ide.simps(1) ide_char resid.simps(4) subs subs_implies_prfx) thus ?thesis using hgt_Subst [of "resid t2 u2" 0 "resid t1 u1"] by simp qed also have "... < hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" proof - have "ide u1 \ ide u2" using u 1 elementary_reduction_Beta_iff [of u] by auto thus ?thesis using 1 hgt_resid_ide by (metis add_le_mono con_sym hgt.simps(5) resid_arr_ide less_Suc_eq_le max.cobounded2 nat_mult_max_right prfx_implies_con subs subs.simps(5) subs_implies_prfx) qed finally show "hgt ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) < hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" by blast qed ultimately show "hgt ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u) < hgt (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" by blast qed qed end context reduction_paths begin lemma length_devel_le_hgt: shows "\t. development t U \ length U \ \.hgt t" using \.elementary_reduction_decreases_hgt by (induct U, auto, fastforce) text \ We finally arrive at the main result of this section: the Finite Developments Theorem. \ theorem finite_developments: shows "FD t" using length_devel_le_hgt [of t] FD_def by auto subsection "Complete Developments" text \ A \emph{complete development} is a development in which there are no residuals of originally marked redexes left to contract. \ definition complete_development where "complete_development t U \ development t U \ (\.Ide t \ [t] \<^sup>*\\<^sup>* U)" lemma complete_development_Ide_iff: shows "complete_development t U \ \.Ide t \ U = []" using complete_development_def development_Ide Ide.simps(1) ide_char by (induct t) auto lemma complete_development_cons: assumes "complete_development t (u # U)" shows "complete_development (t \\ u) U" using assms complete_development_def by (metis Ide.simps(1) Ide.simps(2) Resid_rec(1) Resid_rec(3) complete_development_Ide_iff ide_char development.simps(2) \.ide_char list.simps(3)) lemma complete_development_cong: shows "\t. \complete_development t U; \ \.Ide t\ \ [t] \<^sup>*\\<^sup>* U" using complete_development_def development_implies by (induct U) auto lemma complete_developments_cong: assumes "\ \.Ide t" and "complete_development t U" and "complete_development t V" shows "U \<^sup>*\\<^sup>* V" using assms complete_development_cong [of "t"] cong_symmetric cong_transitive by blast lemma Trgs_complete_development: shows "\t. \complete_development t U; \ \.Ide t\ \ Trgs U = {\.Trg t}" using complete_development_cong Ide.simps(1) Srcs_Resid Trgs.simps(2) Trgs_Resid_sym ide_char complete_development_def development_imp_Arr \.targets_char\<^sub>\ apply simp by (metis Srcs_Resid Trgs.simps(2) con_char ide_def) text \ Now that we know all developments are finite, it is easy to construct a complete development by an iterative process that at each stage contracts one of the remaining marked redexes at each stage. It is also possible to construct a complete development by structural induction without using the finite developments property, but it is more work to prove the correctness. \ fun (in lambda_calculus) bottom_up_redex where "bottom_up_redex \<^bold>\ = \<^bold>\" | "bottom_up_redex \<^bold>\x\<^bold>\ = \<^bold>\x\<^bold>\" | "bottom_up_redex \<^bold>\\<^bold>[M\<^bold>] = \<^bold>\\<^bold>[bottom_up_redex M\<^bold>]" | "bottom_up_redex (M \<^bold>\ N) = (if \ Ide M then bottom_up_redex M \<^bold>\ Src N else M \<^bold>\ bottom_up_redex N)" | "bottom_up_redex (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) = (if \ Ide M then \<^bold>\\<^bold>[bottom_up_redex M\<^bold>] \<^bold>\ Src N else if \ Ide N then \<^bold>\\<^bold>[M\<^bold>] \<^bold>\ bottom_up_redex N else \<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N)" lemma (in lambda_calculus) elementary_reduction_bottom_up_redex: shows "\Arr t; \ Ide t\ \ elementary_reduction (bottom_up_redex t)" using Ide_Src by (induct t) auto lemma (in lambda_calculus) subs_bottom_up_redex: shows "Arr t \ bottom_up_redex t \ t" apply (induct t) apply auto[3] apply (metis Arr.simps(4) Ide.simps(4) Ide_Src Ide_iff_Src_self Ide_implies_Arr bottom_up_redex.simps(4) ide_char lambda.disc(14) lambda.sel(3) lambda.sel(4) subs_App subs_Ide) by (metis Arr.simps(5) Ide_Src Ide_iff_Src_self Ide_implies_Arr bottom_up_redex.simps(5) ide_char subs.simps(4) subs.simps(5) subs_Ide) function (sequential) bottom_up_development where "bottom_up_development t = (if \ \.Arr t \ \.Ide t then [] else \.bottom_up_redex t # (bottom_up_development (t \\ \.bottom_up_redex t)))" by pat_completeness auto termination bottom_up_development using \.elementary_reduction_decreases_hgt \.elementary_reduction_bottom_up_redex \.subs_bottom_up_redex by (relation "measure \.hgt") auto lemma complete_development_bottom_up_development_ind: shows "\t. \\.Arr t; length (bottom_up_development t) \ n\ \ complete_development t (bottom_up_development t)" proof (induct n) show "\t. \\.Arr t; length (bottom_up_development t) \ 0\ \ complete_development t (bottom_up_development t)" using complete_development_def development_Ide by auto show "\n t. \\t. \\.Arr t; length (bottom_up_development t) \ n\ \ complete_development t (bottom_up_development t); \.Arr t; length (bottom_up_development t) \ Suc n\ \ complete_development t (bottom_up_development t)" proof - fix n t assume t: "\.Arr t" assume n: "length (bottom_up_development t) \ Suc n" assume ind: "\t. \\.Arr t; length (bottom_up_development t) \ n\ \ complete_development t (bottom_up_development t)" show "complete_development t (bottom_up_development t)" proof (cases "bottom_up_development t") show "bottom_up_development t = [] \ ?thesis" using ind t by force fix u U assume uU: "bottom_up_development t = u # U" have 1: "\.elementary_reduction u \ u \ t" using t uU by (metis bottom_up_development.simps \.elementary_reduction_bottom_up_redex list.inject list.simps(3) \.subs_bottom_up_redex) moreover have "complete_development (\.resid t u) U" using 1 ind by (metis Suc_le_length_iff \.arr_char \.arr_resid_iff_con bottom_up_development.simps list.discI list.inject n not_less_eq_eq \.prfx_implies_con \.con_sym \.subs_implies_prfx uU) ultimately show ?thesis by (metis Con_sym Ide.simps(2) Resid_rec(1) Resid_rec(3) complete_development_Ide_iff complete_development_def ide_char development.simps(2) development_implies \.ide_char list.simps(3) uU) qed qed qed lemma complete_development_bottom_up_development: assumes "\.Arr t" shows "complete_development t (bottom_up_development t)" using assms complete_development_bottom_up_development_ind by blast end section "Reduction Strategies" context lambda_calculus begin text \ A \emph{reduction strategy} is a function taking an identity term to an arrow having that identity as its source. \ definition reduction_strategy where "reduction_strategy f \ (\t. Ide t \ Coinitial (f t) t)" text \ The following defines the iterated application of a reduction strategy to an identity term. \ fun reduce where "reduce f a 0 = a" | "reduce f a (Suc n) = reduce f (Trg (f a)) n" lemma red_reduce: assumes "reduction_strategy f" shows "\a. Ide a \ red a (reduce f a n)" apply (induct n, auto) apply (metis Ide_iff_Src_self Ide_iff_Trg_self Ide_implies_Arr red.simps) by (metis Ide_Trg Ide_iff_Src_self assms red.intros(1) red.intros(2) reduction_strategy_def) text \ A reduction strategy is \emph{normalizing} if iterated application of it to a normalizable term eventually yields a normal form. \ definition normalizing_strategy where "normalizing_strategy f \ (\a. normalizable a \ (\n. NF (reduce f a n)))" end context reduction_paths begin text \ The following function constructs the reduction path that results by iterating the application of a reduction strategy to a term. \ fun apply_strategy where "apply_strategy f a 0 = []" | "apply_strategy f a (Suc n) = f a # apply_strategy f (\.Trg (f a)) n" lemma apply_strategy_gives_path_ind: assumes "\.reduction_strategy f" shows "\a. \\.Ide a; n > 0\ \ Arr (apply_strategy f a n) \ length (apply_strategy f a n) = n \ Src (apply_strategy f a n) = a \ Trg (apply_strategy f a n) = \.reduce f a n" proof (induct n, simp) fix n a assume ind: "\a. \\.Ide a; 0 < n\ \ Arr (apply_strategy f a n) \ length (apply_strategy f a n) = n \ Src (apply_strategy f a n) = a \ Trg (apply_strategy f a n) = \.reduce f a n" assume a: "\.Ide a" show "Arr (apply_strategy f a (Suc n)) \ length (apply_strategy f a (Suc n)) = Suc n \ Src (apply_strategy f a (Suc n)) = a \ Trg (apply_strategy f a (Suc n)) = \.reduce f a (Suc n)" proof (intro conjI) have 1: "\.Arr (f a) \ \.Src (f a) = a" using assms a \.reduction_strategy_def by (metis \.Ide_iff_Src_self) show "Arr (apply_strategy f a (Suc n))" using "1" Arr.elims(3) ind \.targets_char\<^sub>\ \.Ide_Trg by fastforce show "Src (apply_strategy f a (Suc n)) = a" by (simp add: "1") show "length (apply_strategy f a (Suc n)) = Suc n" by (metis "1" \.Ide_Trg One_nat_def Suc_eq_plus1 ind list.size(3) list.size(4) neq0_conv apply_strategy.simps(1) apply_strategy.simps(2)) show "Trg (apply_strategy f a (Suc n)) = \.reduce f a (Suc n)" proof (cases "apply_strategy f (\.Trg (f a)) n = []") show "apply_strategy f (\.Trg (f a)) n = [] \ ?thesis" using a 1 ind [of "\.Trg (f a)"] \.Ide_Trg \.targets_char\<^sub>\ by force assume 2: "apply_strategy f (\.Trg (f a)) n \ []" have "Trg (apply_strategy f a (Suc n)) = Trg (apply_strategy f (\.Trg (f a)) n)" using a 1 ind [of "\.Trg (f a)"] by (simp add: "2") also have "... = \.reduce f a (Suc n)" using 1 2 \.Ide_Trg ind [of "\.Trg (f a)"] by fastforce finally show ?thesis by blast qed qed qed lemma apply_strategy_gives_path: assumes "\.reduction_strategy f" and "\.Ide a" and "n > 0" shows "Arr (apply_strategy f a n)" and "length (apply_strategy f a n) = n" and "Src (apply_strategy f a n) = a" and "Trg (apply_strategy f a n) = \.reduce f a n" using assms apply_strategy_gives_path_ind by auto lemma reduce_eq_Trg_apply_strategy: assumes "\.reduction_strategy S" and "\.Ide a" shows "n > 0 \ \.reduce S a n = Trg (apply_strategy S a n)" using assms apply (induct n) apply simp_all by (metis Arr.simps(1) Trg_simp apply_strategy_gives_path_ind \.Ide_Trg \.reduce.simps(1) \.reduction_strategy_def \.trg_char neq0_conv apply_strategy.simps(1)) end subsection "Parallel Reduction" context lambda_calculus begin text \ \emph{Parallel reduction} is the strategy that contracts all available redexes at each step. \ fun parallel_strategy where "parallel_strategy \<^bold>\i\<^bold>\ = \<^bold>\i\<^bold>\" | "parallel_strategy \<^bold>\\<^bold>[t\<^bold>] = \<^bold>\\<^bold>[parallel_strategy t\<^bold>]" | "parallel_strategy (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = \<^bold>\\<^bold>[parallel_strategy t\<^bold>] \<^bold>\ parallel_strategy u" | "parallel_strategy (t \<^bold>\ u) = parallel_strategy t \<^bold>\ parallel_strategy u" | "parallel_strategy (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = \<^bold>\\<^bold>[parallel_strategy t\<^bold>] \<^bold>\ parallel_strategy u" | "parallel_strategy \<^bold>\ = \<^bold>\" lemma parallel_strategy_is_reduction_strategy: shows "reduction_strategy parallel_strategy" proof (unfold reduction_strategy_def, intro allI impI) fix t show "Ide t \ Coinitial (parallel_strategy t) t" using Ide_implies_Arr apply (induct t, auto) by force+ qed lemma parallel_strategy_Src_eq: shows "Arr t \ parallel_strategy (Src t) = parallel_strategy t" by (induct t) auto lemma subs_parallel_strategy_Src: shows "Arr t \ t \ parallel_strategy (Src t)" by (induct t) auto end context reduction_paths begin text \ Parallel reduction is a universal strategy in the sense that every reduction path is \\<^sup>*\\<^sup>*\-below the path generated by the parallel reduction strategy. \ lemma parallel_strategy_is_universal: shows "\U. \n > 0; n \ length U; Arr U\ \ take n U \<^sup>*\\<^sup>* apply_strategy \.parallel_strategy (Src U) n" proof (induct n, simp) fix n a and U :: "\.lambda list" assume n: "Suc n \ length U" assume U: "Arr U" assume ind: "\U. \0 < n; n \ length U; Arr U\ \ take n U \<^sup>*\\<^sup>* apply_strategy \.parallel_strategy (Src U) n" have 1: "take (Suc n) U = hd U # take n (tl U)" by (metis U Arr.simps(1) take_Suc) have 2: "hd U \ \.parallel_strategy (Src U)" by (metis Arr_imp_arr_hd Con_single_ideI(2) Resid_Arr_Src Src_resid Srcs_simp\<^sub>\\<^sub>P Trg.simps(2) U \.source_is_ide \.trg_ide empty_set \.arr_char \.sources_char\<^sub>\ \.subs_parallel_strategy_Src list.set_intros(1) list.simps(15)) show "take (Suc n) U \<^sup>*\\<^sup>* apply_strategy \.parallel_strategy (Src U) (Suc n)" proof (cases "apply_strategy \.parallel_strategy (Src U) (Suc n)") show "apply_strategy \.parallel_strategy (Src U) (Suc n) = [] \ take (Suc n) U \<^sup>*\\<^sup>* apply_strategy \.parallel_strategy (Src U) (Suc n)" by simp fix v V assume 3: "apply_strategy \.parallel_strategy (Src U) (Suc n) = v # V" show "take (Suc n) U \<^sup>*\\<^sup>* apply_strategy \.parallel_strategy (Src U) (Suc n)" proof (cases "V = []") show "V = [] \ ?thesis" using 1 2 3 ind ide_char by (metis Suc_inject Ide.simps(2) Resid.simps(3) list.discI list.inject \.prfx_implies_con apply_strategy.elims \.subs_implies_prfx take0) assume V: "V \ []" have 4: "Arr (v # V)" using 3 apply_strategy_gives_path(1) by (metis Arr_imp_arr_hd Srcs_simp\<^sub>P\<^sub>W\<^sub>E Srcs_simp\<^sub>\\<^sub>P U \.Ide_Src \.arr_iff_has_target \.parallel_strategy_is_reduction_strategy \.targets_char\<^sub>\ singleton_insert_inj_eq' zero_less_Suc) have 5: "Arr (hd U # take n (tl U))" by (metis 1 U Arr_append_iff\<^sub>P id_take_nth_drop list.discI not_less take_all_iff) have 6: "Srcs (hd U # take n (tl U)) = Srcs (v # V)" by (metis 2 3 \.Coinitial_iff_Con \.Ide.simps(1) Srcs.simps(2) Srcs.simps(3) \.ide_char list.exhaust_sel list.inject apply_strategy.simps(2) \.sources_char\<^sub>\ \.subs_implies_prfx) have "take (Suc n) U \<^sup>*\\\<^sup>* apply_strategy \.parallel_strategy (Src U) (Suc n) = [hd U \\ v] \<^sup>*\\\<^sup>* V @ (take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U]) \<^sup>*\\\<^sup>* (V \<^sup>*\\\<^sup>* [hd U \\ v])" using U V 1 3 4 5 6 by (metis Resid.simps(1) Resid_cons(1) Resid_rec(3-4) confluence_ind) moreover have "Ide ..." proof have 7: "v = \.parallel_strategy (Src U) \ V = apply_strategy \.parallel_strategy (Src U \\ v) n" using 3 \.subs_implies_prfx \.subs_parallel_strategy_Src apply simp by (metis (full_types) \.Coinitial_iff_Con \.Ide.simps(1) \.Trg.simps(5) \.parallel_strategy.simps(9) \.resid_Src_Arr) show 8: "Ide ([hd U \\ v] \<^sup>*\\\<^sup>* V)" by (metis 2 4 5 6 7 V Con_initial_left Ide.simps(2) confluence_ind Con_rec(3) Resid_Ide_Arr_ind \.subs_implies_prfx) show 9: "Ide ((take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U]) \<^sup>*\\\<^sup>* (V \<^sup>*\\\<^sup>* [hd U \\ v]))" proof - have 10: "\.Ide (hd U \\ v)" using 2 7 \.ide_char \.subs_implies_prfx by presburger have 11: "V = apply_strategy \.parallel_strategy (\.Trg v) n" using 3 by auto have "(take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U]) \<^sup>*\\\<^sup>* (V \<^sup>*\\\<^sup>* [hd U \\ v]) = (take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U]) \<^sup>*\\\<^sup>* apply_strategy \.parallel_strategy (\.Trg v) n" by (metis 8 10 11 Ide.simps(1) Resid_single_ide(2) \.prfx_char) moreover have "Ide ..." proof - have "Ide (take n (take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U]) \<^sup>*\\\<^sup>* apply_strategy \.parallel_strategy (\.Trg v) n)" proof - have "0 < n" proof - have "length V = n" using apply_strategy_gives_path by (metis 10 11 V \.Coinitial_iff_Con \.Ide_Trg \.Arr_not_Nil \.Ide_implies_Arr \.parallel_strategy_is_reduction_strategy neq0_conv apply_strategy.simps(1)) thus ?thesis using V by blast qed moreover have "n \ length (take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U])" proof - have "length (take n (tl U)) = n" using n by force thus ?thesis using n U length_Resid [of "take n (tl U)" "[v \\ hd U]"] by (metis 4 5 6 Arr.simps(1) Con_cons(2) Con_rec(2) confluence_ind dual_order.eq_iff) qed moreover have "\.Trg v = Src (take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U])" proof - have "Src (take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U]) = Trg [v \\ hd U]" by (metis Src_resid calculation(1-2) linorder_not_less list.size(3)) also have "... = \.Trg v" by (metis 10 Trg.simps(2) \.Arr_not_Nil \.apex_sym \.trg_ide \.Ide_iff_Src_self \.Ide_implies_Arr \.Src_resid \.prfx_char) finally show ?thesis by simp qed ultimately show ?thesis using ind [of "Resid (take n (tl U)) [\.resid v (hd U)]"] ide_char by (metis Con_imp_Arr_Resid le_zero_eq less_not_refl list.size(3)) qed moreover have "take n (take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U]) = take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U]" proof - have "Arr (take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U])" by (metis Con_imp_Arr_Resid Con_implies_Arr(1) Ide.simps(1) calculation take_Nil) thus ?thesis by (metis 1 Arr.simps(1) length_Resid dual_order.eq_iff length_Cons length_take min.absorb2 n old.nat.inject take_all) qed ultimately show ?thesis by simp qed ultimately show ?thesis by auto qed show "Trg ([hd U \\ v] \<^sup>*\\\<^sup>* V) = Src ((take n (tl U) \<^sup>*\\\<^sup>* [v \\ hd U]) \<^sup>*\\\<^sup>* (V \<^sup>*\\\<^sup>* [hd U \\ v]))" by (metis 9 Ide.simps(1) Src_resid Trg_resid_sym) qed ultimately show ?thesis using ide_char by presburger qed qed qed end context lambda_calculus begin text \ Parallel reduction is a normalizing strategy. \ lemma parallel_strategy_is_normalizing: shows "normalizing_strategy parallel_strategy" proof - interpret \x: reduction_paths . (* TODO: Notation is not inherited here. *) have "\a. normalizable a \ \n. NF (reduce parallel_strategy a n)" proof - fix a assume 1: "normalizable a" obtain U b where U: "\x.Arr U \ \x.Src U = a \ \x.Trg U = b \ NF b" using 1 normalizable_def \x.red_iff by blast have 2: "\n. \0 < n; n \ length U\ \ \x.Ide (\x.Resid (take n U) (\x.apply_strategy parallel_strategy a n))" using U \x.parallel_strategy_is_universal \x.ide_char by blast let ?PR = "\x.apply_strategy parallel_strategy a (length U)" have "\x.Trg ?PR = b" proof - have 3: "\x.Ide (\x.Resid U ?PR)" using U 2 [of "length U"] by force have "\x.Trg (\x.Resid ?PR U) = b" by (metis "3" NF_reduct_is_trivial U \x.Con_imp_Arr_Resid \x.Con_sym \x.Ide.simps(1) \x.Src_resid reduction_paths.red_iff) thus ?thesis by (metis 3 \x.Con_Arr_self \x.Ide_implies_Arr \x.Resid_Arr_Ide_ind \x.Src_resid \x.Trg_resid_sym) qed hence "reduce parallel_strategy a (length U) = b" using 1 U by (metis \x.Arr.simps(1) length_greater_0_conv normalizable_def \x.apply_strategy_gives_path(4) parallel_strategy_is_reduction_strategy) thus "\n. NF (reduce parallel_strategy a n)" using U by blast qed thus ?thesis using normalizing_strategy_def by blast qed text \ An alternative characterization of a normal form is a term on which the parallel reduction strategy yields an identity. \ abbreviation has_redex where "has_redex t \ Arr t \ \ Ide (parallel_strategy t)" lemma NF_iff_has_no_redex: shows "Arr t \ NF t \ \ has_redex t" proof (induct t) show "Arr \<^bold>\ \ NF \<^bold>\ \ \ has_redex \<^bold>\" using NF_def by simp show "\x. Arr \<^bold>\x\<^bold>\ \ NF \<^bold>\x\<^bold>\ \ \ has_redex \<^bold>\x\<^bold>\" using NF_def by force show "\t. \Arr t \ NF t \ \ has_redex t; Arr \<^bold>\\<^bold>[t\<^bold>]\ \ NF \<^bold>\\<^bold>[t\<^bold>] \ \ has_redex \<^bold>\\<^bold>[t\<^bold>]" proof - fix t assume ind: "Arr t \ NF t \ \ has_redex t" assume t: "Arr \<^bold>\\<^bold>[t\<^bold>]" show "NF \<^bold>\\<^bold>[t\<^bold>] \ \ has_redex \<^bold>\\<^bold>[t\<^bold>]" proof show "NF \<^bold>\\<^bold>[t\<^bold>] \ \ has_redex \<^bold>\\<^bold>[t\<^bold>]" using t ind by (metis NF_def Arr.simps(3) Ide.simps(3) Src.simps(3) parallel_strategy.simps(2)) show "\ has_redex \<^bold>\\<^bold>[t\<^bold>] \ NF \<^bold>\\<^bold>[t\<^bold>]" using t ind by (metis NF_def ide_backward_stable ide_char parallel_strategy_Src_eq subs_implies_prfx subs_parallel_strategy_Src) qed qed show "\t1 t2. \Arr t1 \ NF t1 \ \ has_redex t1; Arr t2 \ NF t2 \ \ has_redex t2; Arr (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)\ \ NF (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \ \ has_redex (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" using NF_def Ide.simps(5) parallel_strategy.simps(8) by presburger show "\t1 t2. \Arr t1 \ NF t1 \ \ has_redex t1; Arr t2 \ NF t2 \ \ has_redex t2; Arr (t1 \<^bold>\ t2)\ \ NF (t1 \<^bold>\ t2) \ \ has_redex (t1 \<^bold>\ t2)" proof - fix t1 t2 assume ind1: "Arr t1 \ NF t1 \ \ has_redex t1" assume ind2: "Arr t2 \ NF t2 \ \ has_redex t2" assume t: "Arr (t1 \<^bold>\ t2)" show "NF (t1 \<^bold>\ t2) \ \ has_redex (t1 \<^bold>\ t2)" using t ind1 ind2 NF_def apply (intro iffI) apply (metis Ide_iff_Src_self parallel_strategy_is_reduction_strategy reduction_strategy_def) apply (cases t1) apply simp_all apply (metis Ide_iff_Src_self ide_char parallel_strategy.simps(1,5) parallel_strategy_is_reduction_strategy reduction_strategy_def resid_Arr_Src subs_implies_prfx subs_parallel_strategy_Src) by (metis Ide_iff_Src_self ide_char ind1 Arr.simps(4) parallel_strategy.simps(6) parallel_strategy_is_reduction_strategy reduction_strategy_def resid_Arr_Src subs_implies_prfx subs_parallel_strategy_Src) qed qed lemma (in lambda_calculus) not_NF_elim: assumes "\ NF t" and "Ide t" obtains u where "coinitial t u \ \ Ide u" using assms NF_def by auto lemma (in lambda_calculus) NF_Lam_iff: shows "NF \<^bold>\\<^bold>[t\<^bold>] \ NF t" using NF_def by (metis Ide_implies_Arr NF_iff_has_no_redex Ide.simps(3) parallel_strategy.simps(2)) lemma (in lambda_calculus) NF_App_iff: shows "NF (t1 \<^bold>\ t2) \ \ is_Lam t1 \ NF t1 \ NF t2" proof - have "\ NF (t1 \<^bold>\ t2) \ is_Lam t1 \ \ NF t1 \ \ NF t2" apply (cases "is_Lam t1") apply simp_all apply (cases t1) apply simp_all using NF_def Ide.simps(1) apply presburger apply (metis Ide_implies_Arr NF_def NF_iff_has_no_redex Ide.simps(4) parallel_strategy.simps(5)) apply (metis Ide_implies_Arr NF_def NF_iff_has_no_redex Ide.simps(4) parallel_strategy.simps(6)) using NF_def Ide.simps(5) by presburger moreover have "is_Lam t1 \ \ NF t1 \ \ NF t2 \ \ NF (t1 \<^bold>\ t2)" proof - have "is_Lam t1 \ \NF (t1 \<^bold>\ t2)" by (metis Ide_implies_Arr NF_def NF_iff_has_no_redex Ide.simps(5) lambda.collapse(2) parallel_strategy.simps(3,8)) moreover have "\ NF t1 \ \NF (t1 \<^bold>\ t2)" using NF_def Ide_iff_Src_self Ide_implies_Arr apply auto by (metis (full_types) Arr.simps(4) Ide.simps(4) Src.simps(4)) moreover have "\ NF t2 \ \NF (t1 \<^bold>\ t2)" using NF_def Ide_iff_Src_self Ide_implies_Arr apply auto by (metis (full_types) Arr.simps(4) Ide.simps(4) Src.simps(4)) ultimately show "is_Lam t1 \ \ NF t1 \ \ NF t2 \ \ NF (t1 \<^bold>\ t2)" by auto qed ultimately show ?thesis by blast qed subsection "Head Reduction" text \ \emph{Head reduction} is the strategy that only contracts a redex at the ``head'' position, which is found at the end of the ``left spine'' of applications, and does nothing if there is no such redex. The following function applies to an arbitrary arrow \t\, and it marks the redex at the head position, if any, otherwise it yields \Src t\. \ fun head_strategy where "head_strategy \<^bold>\i\<^bold>\ = \<^bold>\i\<^bold>\" | "head_strategy \<^bold>\\<^bold>[t\<^bold>] = \<^bold>\\<^bold>[head_strategy t\<^bold>]" | "head_strategy (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = \<^bold>\\<^bold>[Src t\<^bold>] \<^bold>\ Src u" | "head_strategy (t \<^bold>\ u) = head_strategy t \<^bold>\ Src u" | "head_strategy (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = \<^bold>\\<^bold>[Src t\<^bold>] \<^bold>\ Src u" | "head_strategy \<^bold>\ = \<^bold>\" lemma Arr_head_strategy: shows "Arr t \ Arr (head_strategy t)" apply (induct t) apply auto proof - fix t u assume ind: "Arr (head_strategy t)" assume t: "Arr t" and u: "Arr u" show "Arr (head_strategy (t \<^bold>\ u))" using t u ind by (cases t) auto qed lemma Src_head_strategy: shows "Arr t \ Src (head_strategy t) = Src t" apply (induct t) apply auto proof - fix t u assume ind: "Src (head_strategy t) = Src t" assume t: "Arr t" and u: "Arr u" have "Src (head_strategy (t \<^bold>\ u)) = Src (head_strategy t \<^bold>\ Src u)" using t ind by (cases t) auto also have "... = Src t \<^bold>\ Src u" using t u ind by auto finally show "Src (head_strategy (t \<^bold>\ u)) = Src t \<^bold>\ Src u" by simp qed lemma Con_head_strategy: shows "Arr t \ Con t (head_strategy t)" apply (induct t) apply auto apply (simp add: Arr_head_strategy Src_head_strategy) using Arr_Subst Arr_not_Nil by auto lemma head_strategy_Src: shows "Arr t \ head_strategy (Src t) = head_strategy t" apply (induct t) apply auto using Arr.elims(2) by fastforce lemma head_strategy_is_elementary: shows "\Arr t; \ Ide (head_strategy t)\ \ elementary_reduction (head_strategy t)" using Ide_Src apply (induct t) apply auto proof - fix t1 t2 assume t1: "Arr t1" and t2: "Arr t2" assume t: "\ Ide (head_strategy (t1 \<^bold>\ t2))" assume 1: "\ Ide (head_strategy t1) \ elementary_reduction (head_strategy t1)" assume 2: "\ Ide (head_strategy t2) \ elementary_reduction (head_strategy t2)" show "elementary_reduction (head_strategy (t1 \<^bold>\ t2))" using t t1 t2 1 2 Ide_Src Ide_implies_Arr by (cases t1) auto qed lemma head_strategy_is_reduction_strategy: shows "reduction_strategy head_strategy" proof (unfold reduction_strategy_def, intro allI impI) fix t show "Ide t \ Coinitial (head_strategy t) t" proof (induct t) show "Ide \<^bold>\ \ Coinitial (head_strategy \<^bold>\) \<^bold>\" by simp show "\x. Ide \<^bold>\x\<^bold>\ \ Coinitial (head_strategy \<^bold>\x\<^bold>\) \<^bold>\x\<^bold>\" by simp show "\t. \Ide t \ Coinitial (head_strategy t) t; Ide \<^bold>\\<^bold>[t\<^bold>]\ \ Coinitial (head_strategy \<^bold>\\<^bold>[t\<^bold>]) \<^bold>\\<^bold>[t\<^bold>]" by simp fix t1 t2 assume ind1: "Ide t1 \ Coinitial (head_strategy t1) t1" assume ind2: "Ide t2 \ Coinitial (head_strategy t2) t2" assume t: "Ide (t1 \<^bold>\ t2)" show "Coinitial (head_strategy (t1 \<^bold>\ t2)) (t1 \<^bold>\ t2)" using t ind1 Ide_implies_Arr Ide_iff_Src_self by (cases t1) simp_all next fix t1 t2 assume ind1: "Ide t1 \ Coinitial (head_strategy t1) t1" assume ind2: "Ide t2 \ Coinitial (head_strategy t2) t2" assume t: "Ide (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" show "Coinitial (head_strategy (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" using t by auto qed qed text \ The following function tests whether a term is an elementary reduction of the head redex. \ fun is_head_reduction where "is_head_reduction \<^bold>\_\<^bold>\ \ False" | "is_head_reduction \<^bold>\\<^bold>[t\<^bold>] \ is_head_reduction t" | "is_head_reduction (\<^bold>\\<^bold>[_\<^bold>] \<^bold>\ _) \ False" | "is_head_reduction (t \<^bold>\ u) \ is_head_reduction t \ Ide u" | "is_head_reduction (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ Ide t \ Ide u" | "is_head_reduction \<^bold>\ \ False" lemma is_head_reduction_char: shows "is_head_reduction t \ elementary_reduction t \ head_strategy (Src t) = t" apply (induct t) apply simp_all proof - fix t1 t2 assume ind: "is_head_reduction t1 \ elementary_reduction t1 \ head_strategy (Src t1) = t1" show "is_head_reduction (t1 \<^bold>\ t2) \ (elementary_reduction t1 \ Ide t2 \ Ide t1 \ elementary_reduction t2) \ head_strategy (Src t1 \<^bold>\ Src t2) = t1 \<^bold>\ t2" using ind Ide_implies_Arr Ide_iff_Src_self Ide_Src elementary_reduction_not_ide ide_char apply (cases t1) apply simp_all apply (metis Ide_Src arr_char elementary_reduction_is_arr) apply (metis Ide_Src arr_char elementary_reduction_is_arr) by metis next fix t1 t2 show "Ide t1 \ Ide t2 \ Ide t1 \ Ide t2 \ Src (Src t1) = t1 \ Src (Src t2) = t2" by (metis Ide_iff_Src_self Ide_implies_Arr) qed lemma is_head_reductionI: assumes "Arr t" and "elementary_reduction t" and "head_strategy (Src t) = t" shows "is_head_reduction t" using assms is_head_reduction_char by blast text \ The following function tests whether a redex in the head position of a term is marked. \ fun contains_head_reduction where "contains_head_reduction \<^bold>\_\<^bold>\ \ False" | "contains_head_reduction \<^bold>\\<^bold>[t\<^bold>] \ contains_head_reduction t" | "contains_head_reduction (\<^bold>\\<^bold>[_\<^bold>] \<^bold>\ _) \ False" | "contains_head_reduction (t \<^bold>\ u) \ contains_head_reduction t \ Arr u" | "contains_head_reduction (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ Arr t \ Arr u" | "contains_head_reduction \<^bold>\ \ False" lemma is_head_reduction_imp_contains_head_reduction: shows "is_head_reduction t \ contains_head_reduction t" using Ide_implies_Arr apply (induct t) apply auto proof - fix t1 t2 assume ind1: "is_head_reduction t1 \ contains_head_reduction t1" assume ind2: "is_head_reduction t2 \ contains_head_reduction t2" assume t: "is_head_reduction (t1 \<^bold>\ t2)" show "contains_head_reduction (t1 \<^bold>\ t2)" using t ind1 ind2 Ide_implies_Arr by (cases t1) auto qed text \ An \emph{internal reduction} is one that does not contract any redex at the head position. \ fun is_internal_reduction where "is_internal_reduction \<^bold>\_\<^bold>\ \ True" | "is_internal_reduction \<^bold>\\<^bold>[t\<^bold>] \ is_internal_reduction t" | "is_internal_reduction (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ Arr t \ Arr u" | "is_internal_reduction (t \<^bold>\ u) \ is_internal_reduction t \ Arr u" | "is_internal_reduction (\<^bold>\\<^bold>[_\<^bold>] \<^bold>\ _) \ False" | "is_internal_reduction \<^bold>\ \ False" lemma is_internal_reduction_iff: shows "is_internal_reduction t \ Arr t \ \ contains_head_reduction t" apply (induct t) apply simp_all proof - fix t1 t2 assume ind1: "is_internal_reduction t1 \ Arr t1 \ \ contains_head_reduction t1" assume ind2: "is_internal_reduction t2 \ Arr t2 \ \ contains_head_reduction t2" show "is_internal_reduction (t1 \<^bold>\ t2) \ Arr t1 \ Arr t2 \ \ contains_head_reduction (t1 \<^bold>\ t2)" using ind1 ind2 apply (cases t1) apply simp_all by blast qed text \ Head reduction steps are either \\\-prefixes of, or are preserved by, residuation along arbitrary reductions. \ lemma is_head_reduction_resid: shows "\u. \is_head_reduction t; Arr u; Src t = Src u\ \ t \ u \ is_head_reduction (t \\ u)" proof (induct t) show "\u. \is_head_reduction \<^bold>\; Arr u; Src \<^bold>\ = Src u\ \ \<^bold>\ \ u \ is_head_reduction (\<^bold>\ \\ u)" by auto show "\x u. \is_head_reduction \<^bold>\x\<^bold>\; Arr u; Src \<^bold>\x\<^bold>\ = Src u\ \ \<^bold>\x\<^bold>\ \ u \ is_head_reduction (\<^bold>\x\<^bold>\ \\ u)" by auto fix t u assume ind: "\u. \is_head_reduction t; Arr u; Src t = Src u\ \ t \ u \ is_head_reduction (t \\ u)" assume t: "is_head_reduction \<^bold>\\<^bold>[t\<^bold>]" assume u: "Arr u" assume tu: "Src \<^bold>\\<^bold>[t\<^bold>] = Src u" have 1: "Arr t" by (metis Arr_head_strategy head_strategy_Src is_head_reduction_char Arr.simps(3) t tu u) show " \<^bold>\\<^bold>[t\<^bold>] \ u \ is_head_reduction (\<^bold>\\<^bold>[t\<^bold>] \\ u)" using t u tu 1 ind by (cases u) auto next fix t1 t2 u assume ind1: "\u1. \is_head_reduction t1; Arr u1; Src t1 = Src u1\ \ t1 \ u1 \ is_head_reduction (t1 \\ u1)" assume ind2: "\u2. \is_head_reduction t2; Arr u2; Src t2 = Src u2\ \ t2 \ u2 \ is_head_reduction (t2 \\ u2)" assume t: "is_head_reduction (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" assume u: "Arr u" assume tu: "Src (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) = Src u" show "\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 \ u \ is_head_reduction ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u)" using t u tu ind1 ind2 Coinitial_iff_Con Ide_implies_Arr ide_char resid_Ide_Arr Ide_Subst by (cases u; cases "un_App1 u") auto next fix t1 t2 u assume ind1: "\u1. \is_head_reduction t1; Arr u1; Src t1 = Src u1\ \ t1 \ u1 \ is_head_reduction (t1 \\ u1)" assume ind2: "\u2. \is_head_reduction t2; Arr u2; Src t2 = Src u2\ \ t2 \ u2 \ is_head_reduction (t2 \\ u2)" assume t: "is_head_reduction (t1 \<^bold>\ t2)" assume u: "Arr u" assume tu: "Src (t1 \<^bold>\ t2) = Src u" have "Arr (t1 \<^bold>\ t2)" using is_head_reduction_char elementary_reduction_is_arr t by blast hence t1: "Arr t1" and t2: "Arr t2" by auto have 0: "\ is_Lam t1" using t is_Lam_def by fastforce have 1: "is_head_reduction t1" using t t1 by force show "t1 \<^bold>\ t2 \ u \ is_head_reduction ((t1 \<^bold>\ t2) \\ u) " proof - have "\ Ide ((t1 \<^bold>\ t2) \\ u) \ is_head_reduction ((t1 \<^bold>\ t2) \\ u)" proof (intro is_head_reductionI) assume 2: "\ Ide ((t1 \<^bold>\ t2) \\ u)" have 3: "is_App u \ \ Ide (t1 \\ un_App1 u) \ \ Ide (t2 \\ un_App2 u)" by (metis "2" ide_char lambda.collapse(3) lambda.discI(3) lambda.sel(3-4) prfx_App_iff) have 4: "is_Beta u \ \ Ide (t1 \\ un_Beta1 u) \ \ Ide (t2 \\ un_Beta2 u)" using u tu 2 by (metis "0" ConI Con_implies_is_Lam_iff_is_Lam \Arr (t1 \<^bold>\ t2)\ ConD(4) lambda.collapse(4) lambda.disc(8)) show 5: "Arr ((t1 \<^bold>\ t2) \\ u)" using Arr_resid \Arr (t1 \<^bold>\ t2)\ tu u by auto show "head_strategy (Src ((t1 \<^bold>\ t2) \\ u)) = (t1 \<^bold>\ t2) \\ u" proof (cases u) show "u = \<^bold>\ \ head_strategy (Src ((t1 \<^bold>\ t2) \\ u)) = (t1 \<^bold>\ t2) \\ u" by simp show "\x. u = \<^bold>\x\<^bold>\ \ head_strategy (Src ((t1 \<^bold>\ t2) \\ u)) = (t1 \<^bold>\ t2) \\ u" by auto show "\v. u = \<^bold>\\<^bold>[v\<^bold>] \ head_strategy (Src ((t1 \<^bold>\ t2) \\ u)) = (t1 \<^bold>\ t2) \\ u" by simp show "\u1 u2. u = \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2 \ head_strategy (Src ((t1 \<^bold>\ t2) \\ u)) = (t1 \<^bold>\ t2) \\ u" by (metis "0" "5" Arr_not_Nil ConD(4) Con_implies_is_Lam_iff_is_Lam lambda.disc(8)) show "\u1 u2. u = App u1 u2 \ head_strategy (Src ((t1 \<^bold>\ t2) \\ u)) = (t1 \<^bold>\ t2) \\ u" proof - fix u1 u2 assume u1u2: "u = u1 \<^bold>\ u2" have "head_strategy (Src ((t1 \<^bold>\ t2) \\ u)) = head_strategy (Src (t1 \\ u1) \<^bold>\ Src (t2 \\ u2))" using u u1u2 tu t1 t2 Coinitial_iff_Con by auto also have "... = head_strategy (Trg u1 \<^bold>\ Trg u2)" using 5 u1u2 Src_resid by (metis Arr_not_Nil ConD(1)) also have "... = (t1 \<^bold>\ t2) \\ u" proof (cases "Trg u1") show "Trg u1 = \<^bold>\ \ head_strategy (Trg u1 \<^bold>\ Trg u2) = (t1 \<^bold>\ t2) \\ u" using Arr_not_Nil u u1u2 by force show "\x. Trg u1 = \<^bold>\x\<^bold>\ \ head_strategy (Trg u1 \<^bold>\ Trg u2) = (t1 \<^bold>\ t2) \\ u" using tu t u t1 t2 u1u2 Arr_not_Nil Ide_iff_Src_self by (cases u1; cases t1) auto show "\v. Trg u1 = \<^bold>\\<^bold>[v\<^bold>] \ head_strategy (Trg u1 \<^bold>\ Trg u2) = (t1 \<^bold>\ t2) \\ u" using tu t u t1 t2 u1u2 Arr_not_Nil Ide_iff_Src_self apply (cases u1; cases t1) apply auto by (metis 2 5 Src_resid Trg.simps(3-4) resid.simps(3-4) resid_Src_Arr) show "\u11 u12. Trg u1 = u11 \<^bold>\ u12 \ head_strategy (Trg u1 \<^bold>\ Trg u2) = (t1 \<^bold>\ t2) \\ u" proof - fix u11 u12 assume u1: "Trg u1 = u11 \<^bold>\ u12" show "head_strategy (Trg u1 \<^bold>\ Trg u2) = (t1 \<^bold>\ t2) \\ u" proof (cases "Trg u1") show "Trg u1 = \<^bold>\ \ ?thesis" using u1 by simp show "\x. Trg u1 = \<^bold>\x\<^bold>\ \ ?thesis" apply simp using u1 by force show "\v. Trg u1 = \<^bold>\\<^bold>[v\<^bold>] \ ?thesis" using u1 by simp show "\u11 u12. Trg u1 = u11 \<^bold>\ u12 \ ?thesis" using t u tu u1u2 1 2 ind1 elementary_reduction_not_ide is_head_reduction_char Src_resid Ide_iff_Src_self \Arr (t1 \<^bold>\ t2)\ Coinitial_iff_Con by fastforce show "\u11 u12. Trg u1 = \<^bold>\\<^bold>[u11\<^bold>] \<^bold>\ u12 \ ?thesis" using u1 by simp qed qed show "\u11 u12. Trg u1 = \<^bold>\\<^bold>[u11\<^bold>] \<^bold>\ u12 \ ?thesis" using u1u2 u Ide_Trg by fastforce qed finally show "head_strategy (Src ((t1 \<^bold>\ t2) \\ u)) = (t1 \<^bold>\ t2) \\ u" by simp qed qed thus "elementary_reduction ((t1 \<^bold>\ t2) \\ u)" by (metis 2 5 Ide_Src Ide_implies_Arr head_strategy_is_elementary) qed thus ?thesis by blast qed qed text \ Internal reductions are closed under residuation. \ lemma is_internal_reduction_resid: shows "\u. \is_internal_reduction t; is_internal_reduction u; Src t = Src u\ \ is_internal_reduction (t \\ u)" apply (induct t) apply auto apply (metis Con_implies_Arr2 con_char weak_extensionality Arr.simps(2) Src.simps(2) parallel_strategy.simps(1) prfx_implies_con resid_Arr_Src subs_Ide subs_implies_prfx subs_parallel_strategy_Src) proof - fix t u assume ind: "\u. \is_internal_reduction u; Src t = Src u\ \ is_internal_reduction (t \\ u)" assume t: "is_internal_reduction t" assume u: "is_internal_reduction u" assume tu: "\<^bold>\\<^bold>[Src t\<^bold>] = Src u" show "is_internal_reduction (\<^bold>\\<^bold>[t\<^bold>] \\ u)" using t u tu ind apply (cases u) by auto fastforce next fix t1 t2 u assume ind1: "\u. \is_internal_reduction t1; is_internal_reduction u; Src t1 = Src u\ \ is_internal_reduction (t1 \\ u)" assume t: "is_internal_reduction (t1 \<^bold>\ t2)" assume u: "is_internal_reduction u" assume tu: "Src t1 \<^bold>\ Src t2 = Src u" show "is_internal_reduction ((t1 \<^bold>\ t2) \\ u)" using t u tu ind1 Coinitial_resid_resid Coinitial_iff_Con Arr_Src is_internal_reduction_iff apply auto apply (metis Arr.simps(4) Src.simps(4)) proof - assume t1: "Arr t1" and t2: "Arr t2" and u: "Arr u" assume tu: "Src t1 \<^bold>\ Src t2 = Src u" assume 1: "\ contains_head_reduction u" assume 2: "\ contains_head_reduction (t1 \<^bold>\ t2)" assume 3: "contains_head_reduction ((t1 \<^bold>\ t2) \\ u)" show False using t1 t2 u tu 1 2 3 is_internal_reduction_iff apply (cases u) apply simp_all apply (cases t1; cases "un_App1 u") apply simp_all by (metis Coinitial_iff_Con ind1 Arr.simps(4) Src.simps(4) resid.simps(3)) qed qed text \ A head reduction is preserved by residuation along an internal reduction, so a head reduction can only be canceled by a transition that contains a head reduction. \ lemma is_head_reduction_resid': shows "\u. \is_head_reduction t; is_internal_reduction u; Src t = Src u\ \ is_head_reduction (t \\ u)" proof (induct t) show "\u. \is_head_reduction \<^bold>\; is_internal_reduction u; Src \<^bold>\ = Src u\ \ is_head_reduction (\<^bold>\ \\ u)" by simp show "\x u. \is_head_reduction \<^bold>\x\<^bold>\; is_internal_reduction u; Src \<^bold>\x\<^bold>\ = Src u\ \ is_head_reduction (\<^bold>\x\<^bold>\ \\ u)" by simp show "\t. \\u. \is_head_reduction t; is_internal_reduction u; Src t = Src u\ \ is_head_reduction (t \\ u); is_head_reduction \<^bold>\\<^bold>[t\<^bold>]; is_internal_reduction u; Src \<^bold>\\<^bold>[t\<^bold>] = Src u\ \ is_head_reduction (\<^bold>\\<^bold>[t\<^bold>] \\ u)" for u by (cases u, simp_all) fastforce fix t1 t2 u assume ind1: "\u. \is_head_reduction t1; is_internal_reduction u; Src t1 = Src u\ \ is_head_reduction (t1 \\ u)" assume t: "is_head_reduction (t1 \<^bold>\ t2)" assume u: "is_internal_reduction u" assume tu: "Src (t1 \<^bold>\ t2) = Src u" show "is_head_reduction ((t1 \<^bold>\ t2) \\ u)" using t u tu ind1 apply (cases u) apply simp_all proof (intro conjI impI) fix u1 u2 assume u1u2: "u = u1 \<^bold>\ u2" show 1: "Con t1 u1" using Coinitial_iff_Con tu u1u2 ide_char by (metis ConD(1) Ide.simps(1) is_head_reduction.simps(9) is_head_reduction_resid is_internal_reduction.simps(9) is_internal_reduction_resid t u) show "Con t2 u2" using Coinitial_iff_Con tu u1u2 ide_char by (metis ConD(1) Ide.simps(1) is_head_reduction.simps(9) is_head_reduction_resid is_internal_reduction.simps(9) is_internal_reduction_resid t u) show "is_head_reduction (t1 \\ u1 \<^bold>\ t2 \\ u2)" using t u u1u2 1 Coinitial_iff_Con \Con t2 u2\ ide_char ind1 resid_Ide_Arr apply (cases t1; simp_all; cases u1; simp_all; cases "un_App1 u1") apply auto by (metis 1 ind1 is_internal_reduction.simps(6) resid.simps(3)) qed next fix t1 t2 u assume ind1: "\u. \is_head_reduction t1; is_internal_reduction u; Src t1 = Src u\ \ is_head_reduction (t1 \\ u)" assume t: "is_head_reduction (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" assume u: "is_internal_reduction u" assume tu: "Src (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) = Src u" show "is_head_reduction ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u)" using t u tu ind1 apply (cases u) apply simp_all by (metis Con_implies_Arr1 is_head_reduction_resid is_internal_reduction.simps(9) is_internal_reduction_resid lambda.disc(15) prfx_App_iff t tu) qed text \ The following function differs from \head_strategy\ in that it only selects an already-marked redex, whereas \head_strategy\ marks the redex at the head position. \ fun head_redex where "head_redex \<^bold>\ = \<^bold>\" | "head_redex \<^bold>\x\<^bold>\ = \<^bold>\x\<^bold>\" | "head_redex \<^bold>\\<^bold>[t\<^bold>] = \<^bold>\\<^bold>[head_redex t\<^bold>]" | "head_redex (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = \<^bold>\\<^bold>[Src t\<^bold>] \<^bold>\ Src u" | "head_redex (t \<^bold>\ u) = head_redex t \<^bold>\ Src u" | "head_redex (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = (\<^bold>\\<^bold>[Src t\<^bold>] \<^bold>\ Src u)" lemma elementary_reduction_head_redex: shows "\Arr t; \ Ide (head_redex t)\ \ elementary_reduction (head_redex t)" using Ide_Src apply (induct t) apply auto proof - show "\t2. \\ Ide (head_redex t1) \ elementary_reduction (head_redex t1); \ Ide (head_redex (t1 \<^bold>\ t2)); \t. Arr t \ Ide (Src t); Arr t1; Arr t2\ \ elementary_reduction (head_redex (t1 \<^bold>\ t2))" for t1 using Ide_Src by (cases t1) auto qed lemma subs_head_redex: shows "Arr t \ head_redex t \ t" using Ide_Src subs_Ide apply (induct t) apply simp_all proof - show "\t2. \head_redex t1 \ t1; head_redex t2 \ t2; Arr t1 \ Arr t2; \t. Arr t \ Ide (Src t); \u t. \Ide u; Src t = Src u\ \ u \ t\ \ head_redex (t1 \<^bold>\ t2) \ t1 \<^bold>\ t2" for t1 using Ide_Src subs_Ide by (cases t1) auto qed lemma contains_head_reduction_iff: shows "contains_head_reduction t \ Arr t \ \ Ide (head_redex t)" apply (induct t) apply simp_all proof - show "\t2. contains_head_reduction t1 = (Arr t1 \ \ Ide (head_redex t1)) \ contains_head_reduction (t1 \<^bold>\ t2) = (Arr t1 \ Arr t2 \ \ Ide (head_redex (t1 \<^bold>\ t2)))" for t1 using Ide_Src by (cases t1) auto qed lemma head_redex_is_head_reduction: shows "\Arr t; contains_head_reduction t\ \ is_head_reduction (head_redex t)" using Ide_Src apply (induct t) apply simp_all proof - show "\t2. \contains_head_reduction t1 \ is_head_reduction (head_redex t1); Arr t1 \ Arr t2; contains_head_reduction (t1 \<^bold>\ t2); \t. Arr t \ Ide (Src t)\ \ is_head_reduction (head_redex (t1 \<^bold>\ t2))" for t1 using Ide_Src contains_head_reduction_iff subs_implies_prfx by (cases t1) auto qed lemma Arr_head_redex: assumes "Arr t" shows "Arr (head_redex t)" using assms Ide_implies_Arr elementary_reduction_head_redex elementary_reduction_is_arr by blast lemma Src_head_redex: assumes "Arr t" shows "Src (head_redex t) = Src t" using assms by (metis Coinitial_iff_Con Ide.simps(1) ide_char subs_head_redex subs_implies_prfx) lemma Con_Arr_head_redex: assumes "Arr t" shows "Con t (head_redex t)" using assms by (metis Con_sym Ide.simps(1) ide_char subs_head_redex subs_implies_prfx) lemma is_head_reduction_if: shows "\contains_head_reduction u; elementary_reduction u\ \ is_head_reduction u" apply (induct u) apply auto using contains_head_reduction.elims(2) apply fastforce proof - fix u1 u2 assume u1: "Ide u1" assume u2: "elementary_reduction u2" assume 1: "contains_head_reduction (u1 \<^bold>\ u2)" have False using u1 u2 1 apply (cases u1) apply auto by (metis Arr_head_redex Ide_iff_Src_self Src_head_redex contains_head_reduction_iff ide_char resid_Arr_Src subs_head_redex subs_implies_prfx u1) thus "is_head_reduction (u1 \<^bold>\ u2)" by blast qed lemma (in reduction_paths) head_redex_decomp: assumes "\.Arr t" shows "[\.head_redex t] @ [t \\ \.head_redex t] \<^sup>*\\<^sup>* [t]" using assms prfx_decomp \.subs_head_redex \.subs_implies_prfx by (metis Ide.simps(2) Resid.simps(3) \.prfx_implies_con ide_char) text \ An internal reduction cannot create a new head redex. \ lemma internal_reduction_preserves_no_head_redex: shows "\is_internal_reduction u; Ide (head_strategy (Src u))\ \ Ide (head_strategy (Trg u))" apply (induct u) apply simp_all proof - fix u1 u2 assume ind1: "\is_internal_reduction u1; Ide (head_strategy (Src u1))\ \ Ide (head_strategy (Trg u1))" assume ind2: "\is_internal_reduction u2; Ide (head_strategy (Src u2))\ \ Ide (head_strategy (Trg u2))" assume u: "is_internal_reduction (u1 \<^bold>\ u2)" assume 1: "Ide (head_strategy (Src u1 \<^bold>\ Src u2))" show "Ide (head_strategy (Trg u1 \<^bold>\ Trg u2))" using u 1 ind1 ind2 Ide_Src Ide_Trg Ide_implies_Arr by (cases u1) auto qed lemma head_reduction_unique: shows "\is_head_reduction t; is_head_reduction u; coinitial t u\ \ t = u" by (metis Coinitial_iff_Con con_def confluence is_head_reduction_char null_char) text \ Residuation along internal reductions preserves head reductions. \ lemma resid_head_strategy_internal: shows "is_internal_reduction u \ head_strategy (Src u) \\ u = head_strategy (Trg u)" using internal_reduction_preserves_no_head_redex Arr_head_strategy Ide_iff_Src_self Src_head_strategy Src_resid head_strategy_is_elementary is_head_reduction_char is_head_reduction_resid' is_internal_reduction_iff apply (cases u) apply simp_all apply (metis head_strategy_Src resid_Src_Arr) apply (metis head_strategy_Src Arr.simps(4) Src.simps(4) Trg.simps(3) resid_Src_Arr) by blast text \ An internal reduction followed by a head reduction can be expressed as a join of the internal reduction with a head reduction. \ lemma resid_head_strategy_Src: assumes "is_internal_reduction t" and "is_head_reduction u" and "seq t u" shows "head_strategy (Src t) \\ t = u" and "composite_of t u (Join (head_strategy (Src t)) t)" proof - show 1: "head_strategy (Src t) \\ t = u" using assms internal_reduction_preserves_no_head_redex resid_head_strategy_internal elementary_reduction_not_ide ide_char is_head_reduction_char seq_char by force show "composite_of t u (Join (head_strategy (Src t)) t)" using assms(3) 1 Arr_head_strategy Src_head_strategy join_of_Join join_of_def seq_char by force qed lemma App_Var_contains_no_head_reduction: shows "\ contains_head_reduction (\<^bold>\x\<^bold>\ \<^bold>\ u)" by simp lemma hgt_resid_App_head_redex: assumes "Arr (t \<^bold>\ u)" and "\ Ide (head_redex (t \<^bold>\ u))" shows "hgt ((t \<^bold>\ u) \\ head_redex (t \<^bold>\ u)) < hgt (t \<^bold>\ u)" using assms contains_head_reduction_iff elementary_reduction_decreases_hgt elementary_reduction_head_redex subs_head_redex by blast subsection "Leftmost Reduction" text \ Leftmost (or normal-order) reduction is the strategy that produces an elementary reduction path by contracting the leftmost redex at each step. It agrees with head reduction as long as there is a head redex, otherwise it continues on with the next subterm to the right. \ fun leftmost_strategy where "leftmost_strategy \<^bold>\x\<^bold>\ = \<^bold>\x\<^bold>\" | "leftmost_strategy \<^bold>\\<^bold>[t\<^bold>] = \<^bold>\\<^bold>[leftmost_strategy t\<^bold>]" | "leftmost_strategy (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = \<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u" | "leftmost_strategy (t \<^bold>\ u) = (if \ Ide (leftmost_strategy t) then leftmost_strategy t \<^bold>\ u else t \<^bold>\ leftmost_strategy u)" | "leftmost_strategy (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = \<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u" | "leftmost_strategy \<^bold>\ = \<^bold>\" (* TODO: Consider if is_head_reduction should be done this way. *) definition is_leftmost_reduction where "is_leftmost_reduction t \ elementary_reduction t \ leftmost_strategy (Src t) = t" lemma leftmost_strategy_is_reduction_strategy: shows "reduction_strategy leftmost_strategy" proof (unfold reduction_strategy_def, intro allI impI) fix t show "Ide t \ Coinitial (leftmost_strategy t) t" proof (induct t, auto) show "\t2. \Arr (leftmost_strategy t1); Arr (leftmost_strategy t2); Ide t1; Ide t2; Arr t1; Src (leftmost_strategy t1) = Src t1; Arr t2; Src (leftmost_strategy t2) = Src t2\ \ Arr (leftmost_strategy (t1 \<^bold>\ t2))" for t1 by (cases t1) auto qed qed lemma elementary_reduction_leftmost_strategy: shows "Ide t \ elementary_reduction (leftmost_strategy t) \ Ide (leftmost_strategy t)" apply (induct t) apply simp_all proof - fix t1 t2 show "\elementary_reduction (leftmost_strategy t1) \ Ide (leftmost_strategy t1); elementary_reduction (leftmost_strategy t2) \ Ide (leftmost_strategy t2); Ide t1 \ Ide t2\ \ elementary_reduction (leftmost_strategy (t1 \<^bold>\ t2)) \ Ide (leftmost_strategy (t1 \<^bold>\ t2))" by (cases t1) auto qed lemma (in lambda_calculus) leftmost_strategy_selects_head_reduction: shows "is_head_reduction t \ t = leftmost_strategy (Src t)" proof (induct t) show "\t1 t2. \is_head_reduction t1 \ t1 = leftmost_strategy (Src t1); is_head_reduction (t1 \<^bold>\ t2)\ \ t1 \<^bold>\ t2 = leftmost_strategy (Src (t1 \<^bold>\ t2))" proof - fix t1 t2 assume ind1: "is_head_reduction t1 \ t1 = leftmost_strategy (Src t1)" assume t: "is_head_reduction (t1 \<^bold>\ t2)" show "t1 \<^bold>\ t2 = leftmost_strategy (Src (t1 \<^bold>\ t2))" using t ind1 apply (cases t1) apply simp_all apply (cases "Src t1") apply simp_all using ind1 apply force using ind1 apply force using ind1 apply force apply (metis Ide_iff_Src_self Ide_implies_Arr elementary_reduction_not_ide ide_char ind1 is_head_reduction_char) using ind1 apply force by (metis Ide_iff_Src_self Ide_implies_Arr) qed show "\t1 t2. \is_head_reduction t1 \ t1 = leftmost_strategy (Src t1); is_head_reduction (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)\ \ \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2 = leftmost_strategy (Src (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2))" by (metis Ide_iff_Src_self Ide_implies_Arr Src.simps(5) is_head_reduction.simps(8) leftmost_strategy.simps(3)) qed auto lemma has_redex_iff_not_Ide_leftmost_strategy: shows "Arr t \ has_redex t \ \ Ide (leftmost_strategy (Src t))" apply (induct t) apply simp_all proof - fix t1 t2 assume ind1: "Ide (parallel_strategy t1) \ Ide (leftmost_strategy (Src t1))" assume ind2: "Ide (parallel_strategy t2) \ Ide (leftmost_strategy (Src t2))" assume t: "Arr t1 \ Arr t2" show "Ide (parallel_strategy (t1 \<^bold>\ t2)) \ Ide (leftmost_strategy (Src t1 \<^bold>\ Src t2))" using t ind1 ind2 Ide_Src Ide_iff_Src_self by (cases t1) auto qed lemma leftmost_reduction_preservation: shows "\u. \is_leftmost_reduction t; elementary_reduction u; \ is_leftmost_reduction u; coinitial t u\ \ is_leftmost_reduction (t \\ u)" proof (induct t) show "\u. coinitial \<^bold>\ u \ is_leftmost_reduction (\<^bold>\ \\ u)" by simp show "\x u. is_leftmost_reduction \<^bold>\x\<^bold>\ \ is_leftmost_reduction (\<^bold>\x\<^bold>\ \\ u)" by (simp add: is_leftmost_reduction_def) fix t u show "\\u. \is_leftmost_reduction t; elementary_reduction u; \ is_leftmost_reduction u; coinitial t u\ \ is_leftmost_reduction (t \\ u); is_leftmost_reduction (Lam t); elementary_reduction u; \ is_leftmost_reduction u; coinitial \<^bold>\\<^bold>[t\<^bold>] u\ \ is_leftmost_reduction (\<^bold>\\<^bold>[t\<^bold>] \\ u)" using is_leftmost_reduction_def by (cases u) auto next fix t1 t2 u show "\is_leftmost_reduction (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2); elementary_reduction u; \ is_leftmost_reduction u; coinitial (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) u\ \ is_leftmost_reduction ((\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \\ u)" using is_leftmost_reduction_def Src_resid Ide_Trg Ide_iff_Src_self Arr_Trg Arr_not_Nil apply (cases u) apply simp_all by (cases "un_App1 u") auto assume ind1: "\u. \is_leftmost_reduction t1; elementary_reduction u; \ is_leftmost_reduction u; coinitial t1 u\ \ is_leftmost_reduction (t1 \\ u)" assume ind2: "\u. \is_leftmost_reduction t2; elementary_reduction u; \ is_leftmost_reduction u; coinitial t2 u\ \ is_leftmost_reduction (t2 \\ u)" assume 1: "is_leftmost_reduction (t1 \<^bold>\ t2)" assume 2: "elementary_reduction u" assume 3: "\ is_leftmost_reduction u" assume 4: "coinitial (t1 \<^bold>\ t2) u" show "is_leftmost_reduction ((t1 \<^bold>\ t2) \\ u)" using 1 2 3 4 ind1 ind2 is_leftmost_reduction_def Src_resid apply (cases u) apply auto[3] proof - show "\u1 u2. u = \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2 \ is_leftmost_reduction ((t1 \<^bold>\ t2) \\ u)" by (metis 2 3 is_leftmost_reduction_def elementary_reduction.simps(5) is_head_reduction.simps(8) leftmost_strategy_selects_head_reduction) fix u1 u2 assume u: "u = u1 \<^bold>\ u2" show "is_leftmost_reduction ((t1 \<^bold>\ t2) \\ u)" using u 1 2 3 4 ind1 ind2 is_leftmost_reduction_def Src_resid Ide_Trg elementary_reduction_not_ide apply (cases u) apply simp_all apply (cases u1) apply simp_all apply auto[1] using Ide_iff_Src_self apply simp_all proof - fix u11 u12 assume u: "u = u11 \<^bold>\ u12 \<^bold>\ u2" assume u1: "u1 = u11 \<^bold>\ u12" have A: "(elementary_reduction t1 \ Src u2 = t2 \ Src u11 \<^bold>\ Src u12 = t1 \ elementary_reduction t2) \ (if \ Ide (leftmost_strategy (Src u11 \<^bold>\ Src u12)) then leftmost_strategy (Src u11 \<^bold>\ Src u12) \<^bold>\ Src u2 else Src u11 \<^bold>\ Src u12 \<^bold>\ leftmost_strategy (Src u2)) = t1 \<^bold>\ t2" using 1 4 Ide_iff_Src_self is_leftmost_reduction_def u by auto have B: "(elementary_reduction u11 \ Src u12 = u12 \ Src u11 = u11 \ elementary_reduction u12) \ Src u2 = u2 \ Src u11 = u11 \ Src u12 = u12 \ elementary_reduction u2" using "2" "4" Ide_iff_Src_self u by force have C: "t1 = u11 \<^bold>\ u12 \ t2 \ u2" using 1 3 u by fastforce have D: "Arr t1 \ Arr t2 \ Arr u11 \ Arr u12 \ Arr u2 \ Src t1 = Src u11 \<^bold>\ Src u12 \ Src t2 = Src u2" using 4 u by force have E: "\u. \elementary_reduction t1 \ leftmost_strategy (Src u) = t1; elementary_reduction u; t1 \ u; Arr u \ Src u11 \<^bold>\ Src u12 = Src u\ \ elementary_reduction (t1 \\ u) \ leftmost_strategy (Trg u) = t1 \\ u" using D Src_resid ind1 is_leftmost_reduction_def by auto have F: "\u. \elementary_reduction t2 \ leftmost_strategy (Src u) = t2; elementary_reduction u; t2 \ u; Arr u \ Src u2 = Src u\ \ elementary_reduction (t2 \\ u) \ leftmost_strategy (Trg u) = t2 \\ u" using D Src_resid ind2 is_leftmost_reduction_def by auto have G: "\t. elementary_reduction t \ \ Ide t" using elementary_reduction_not_ide ide_char by blast have H: "elementary_reduction (t1 \\ (u11 \<^bold>\ u12)) \ Ide (t2 \\ u2) \ Ide (t1 \\ (u11 \<^bold>\ u12)) \ elementary_reduction (t2 \\ u2)" proof (cases "Ide (t2 \\ u2)") assume 1: "Ide (t2 \\ u2)" hence "elementary_reduction (t1 \\ (u11 \<^bold>\ u12))" by (metis A B C D E F G Ide_Src Arr.simps(4) Src.simps(4) elementary_reduction.simps(4) lambda.inject(3) resid_Arr_Src) thus ?thesis using 1 by auto next assume 1: "\ Ide (t2 \\ u2)" hence "Ide (t1 \\ (u11 \<^bold>\ u12)) \ elementary_reduction (t2 \\ u2)" apply (intro conjI) apply (metis 1 A D Ide_Src Arr.simps(4) Src.simps(4) resid_Ide_Arr) by (metis A B C D F Ide_iff_Src_self lambda.inject(3) resid_Arr_Src resid_Ide_Arr) thus ?thesis by simp qed show "(\ Ide (leftmost_strategy (Trg u11 \<^bold>\ Trg u12)) \ (elementary_reduction (t1 \\ (u11 \<^bold>\ u12)) \ Ide (t2 \\ u2) \ Ide (t1 \\ (u11 \<^bold>\ u12)) \ elementary_reduction (t2 \\ u2)) \ leftmost_strategy (Trg u11 \<^bold>\ Trg u12) = t1 \\ (u11 \<^bold>\ u12) \ Trg u2 = t2 \\ u2) \ (Ide (leftmost_strategy (Trg u11 \<^bold>\ Trg u12)) \ (elementary_reduction (t1 \\ (u11 \<^bold>\ u12)) \ Ide (t2 \\ u2) \ Ide (t1 \\ (u11 \<^bold>\ u12)) \ elementary_reduction (t2 \\ u2)) \ Trg u11 \<^bold>\ Trg u12 = t1 \\ (u11 \<^bold>\ u12) \ leftmost_strategy (Trg u2) = t2 \\ u2)" proof (intro conjI impI) show H: "elementary_reduction (t1 \\ (u11 \<^bold>\ u12)) \ Ide (t2 \\ u2) \ Ide (t1 \\ (u11 \<^bold>\ u12)) \ elementary_reduction (t2 \\ u2)" by fact show H: "elementary_reduction (t1 \\ (u11 \<^bold>\ u12)) \ Ide (t2 \\ u2) \ Ide (t1 \\ (u11 \<^bold>\ u12)) \ elementary_reduction (t2 \\ u2)" by fact assume K: "\ Ide (leftmost_strategy (Trg u11 \<^bold>\ Trg u12))" show J: "Trg u2 = t2 \\ u2" using A B D G K has_redex_iff_not_Ide_leftmost_strategy NF_def NF_iff_has_no_redex NF_App_iff resid_Arr_Src resid_Src_Arr by (metis lambda.inject(3)) show "leftmost_strategy (Trg u11 \<^bold>\ Trg u12) = t1 \\ (u11 \<^bold>\ u12)" using 2 A B C D E G H J u Ide_Trg Src_Src has_redex_iff_not_Ide_leftmost_strategy resid_Arr_Ide resid_Src_Arr by (metis Arr.simps(4) Ide.simps(4) Src.simps(4) Trg.simps(3) elementary_reduction.simps(4) lambda.inject(3)) next assume K: "Ide (leftmost_strategy (Trg u11 \<^bold>\ Trg u12))" show I: "Trg u11 \<^bold>\ Trg u12 = t1 \\ (u11 \<^bold>\ u12)" using 2 A D E K u Coinitial_resid_resid ConI resid_Arr_self resid_Ide_Arr resid_Arr_Ide Ide_iff_Src_self Src_resid apply (cases "Ide (leftmost_strategy (Src u11 \<^bold>\ Src u12))") apply simp using lambda_calculus.Con_Arr_Src(2) apply force apply simp using u1 G H Coinitial_iff_Con apply (cases "elementary_reduction u11"; cases "elementary_reduction u12") apply simp_all apply metis apply (metis Src.simps(4) Trg.simps(3) elementary_reduction.simps(1,4)) apply (metis Src.simps(4) Trg.simps(3) elementary_reduction.simps(1,4)) by (metis Trg_Src) show "leftmost_strategy (Trg u2) = t2 \\ u2" using 2 A C D F G H I u Ide_Trg Ide_iff_Src_self NF_def NF_iff_has_no_redex has_redex_iff_not_Ide_leftmost_strategy resid_Ide_Arr by (metis Arr.simps(4) Src.simps(4) Trg.simps(3) elementary_reduction.simps(4) lambda.inject(3)) qed qed qed qed end section "Standard Reductions" text \ In this section, we define the notion of a \emph{standard reduction}, which is an elementary reduction path that performs reductions from left to right, possibly skipping some redexes that could be contracted. Once a redex has been skipped, neither that redex nor any redex to its left will subsequently be contracted. We then define and prove correct a function that transforms an arbitrary elementary reduction path into a congruent standard reduction path. Using this function, we prove the Standardization Theorem, which says that every elementary reduction path is congruent to a standard reduction path. We then show that a standard reduction path that reaches a normal form is in fact a leftmost reduction path. From this fact and the Standardization Theorem we prove the Leftmost Reduction Theorem: leftmost reduction is a normalizing strategy. The Standardization Theorem was first proved by Curry and Feys \cite{curry-and-feys}, with subsequent proofs given by a number of authors. Formalized proofs have also been given; a recent one (using Agda) is presented in \cite{copes}, with references to earlier work. The version of the theorem that we formalize here is a ``strong'' version, which asserts the existence of a standard reduction path congruent to a a given elementary reduction path. At the core of the proof is a function that directly transforms a given reduction path into a standard one, using an algorithm roughly analogous to insertion sort. The Finite Development Theorem is used in the proof of termination. The proof of correctness is long, due to the number of cases that have to be considered, but the use of a proof assistant makes this manageable. \ subsection "Standard Reduction Paths" subsubsection "`Standardly Sequential' Reductions" text \ We first need to define the notion of a ``standard reduction''. In contrast to what is typically done by other authors, we define this notion by direct comparison of adjacent terms in an elementary reduction path, rather than by using devices such as a numbering of subterms from left to right. The following function decides when two terms \t\ and \u\ are elementary reductions that are ``standardly sequential''. This means that \t\ and \u\ are sequential, but in addition no marked redex in \u\ is the residual of an (unmarked) redex ``to the left of'' any marked redex in \t\. Some care is required to make sure that the recursive definition captures what we intend. Most of the clauses are readily understandable. One clause that perhaps could use some explanation is the one for \sseq ((\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \<^bold>\ v) w\. Referring to the previously proved fact \seq_cases\, which classifies the way in which two terms \t\ and \u\ can be sequential, we see that one case that must be covered is when \t\ has the form \\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ v) \<^bold>\ w\ and the top-level constructor of \u\ is \Beta\. In this case, it is the reduction of \t\ that creates the top-level redex contracted in \u\, so it is impossible for \u\ to be a residual of a redex that already exists in \Src t\. \ context lambda_calculus begin fun sseq where "sseq _ \<^bold>\ = False" | "sseq \<^bold>\_\<^bold>\ \<^bold>\_\<^bold>\ = False" | "sseq \<^bold>\\<^bold>[t\<^bold>] \<^bold>\\<^bold>[t'\<^bold>] = sseq t t'" | "sseq (t \<^bold>\ u) (t' \<^bold>\ u') = ((sseq t t' \ Ide u \ u = u') \ (Ide t \ t = t' \ sseq u u') \ (elementary_reduction t \ Trg t = t' \ (u = Src u' \ elementary_reduction u')))" | "sseq (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) (\<^bold>\\<^bold>[t'\<^bold>] \<^bold>\ u') = False" | "sseq ((\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \<^bold>\ v) w = (Ide t \ Ide u \ Ide v \ elementary_reduction w \ seq ((\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \<^bold>\ v) w)" | "sseq (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) v = (Ide t \ Ide u \ elementary_reduction v \ seq (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) v)" | "sseq _ _ = False" lemma sseq_imp_seq: shows "\u. sseq t u \ seq t u" proof (induct t) show "\u. sseq \<^bold>\ u \ seq \<^bold>\ u" using sseq.elims(1) by blast fix u show "\x. sseq \<^bold>\x\<^bold>\ u \ seq \<^bold>\x\<^bold>\ u" using sseq.elims(1) by blast show "\t. \\u. sseq t u \ seq t u; sseq \<^bold>\\<^bold>[t\<^bold>] u\ \ seq \<^bold>\\<^bold>[t\<^bold>] u" using seq_char by (cases u) auto show "\t1 t2. \\u. sseq t1 u \ seq t1 u; \u. sseq t2 u \ seq t2 u; sseq (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) u\ \ seq (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) u" using seq_char Ide_implies_Arr by (cases u) auto fix t1 t2 show "\\u. sseq t1 u \ seq t1 u; \u. sseq t2 u \ seq t2 u; sseq (t1 \<^bold>\ t2) u\ \ seq (t1 \<^bold>\ t2) u" proof - assume ind1: "\u. sseq t1 u \ seq t1 u" assume ind2: "\u. sseq t2 u \ seq t2 u" assume 1: "sseq (t1 \<^bold>\ t2) u" show ?thesis using 1 ind1 ind2 seq_char arr_char elementary_reduction_is_arr Ide_Src Ide_Trg Ide_implies_Arr Coinitial_iff_Con resid_Arr_self apply (cases u, simp_all) apply (cases t1, simp_all) apply (cases t1, simp_all) apply (cases "Ide t1"; cases "Ide t2") apply simp_all apply (metis Ide_iff_Src_self Ide_iff_Trg_self) apply (metis Ide_iff_Src_self Ide_iff_Trg_self) apply (metis Ide_iff_Trg_self Src_Trg) by (cases t1) auto qed qed lemma sseq_imp_elementary_reduction1: shows "\t. sseq t u \ elementary_reduction t" proof (induct u) show "\t. sseq t \<^bold>\ \ elementary_reduction t" by simp show "\x t. sseq t \<^bold>\x\<^bold>\ \ elementary_reduction t" using elementary_reduction.simps(2) sseq.elims(1) by blast show "\u. \\t. sseq t u \ elementary_reduction t; sseq t \<^bold>\\<^bold>[u\<^bold>]\ \ elementary_reduction t" for t using seq_cases sseq_imp_seq apply (cases t, simp_all) by force show "\u1 u2. \\t. sseq t u1 \ elementary_reduction t; \t. sseq t u2 \ elementary_reduction t; sseq t (u1 \<^bold>\ u2)\ \ elementary_reduction t" for t using seq_cases sseq_imp_seq Ide_Src elementary_reduction_is_arr apply (cases t, simp_all) by blast show "\u1 u2. \\t. sseq t u1 \ elementary_reduction t; \t. sseq t u2 \ elementary_reduction t; sseq t (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2)\ \ elementary_reduction t" for t using seq_cases sseq_imp_seq apply (cases t, simp_all) by fastforce qed lemma sseq_imp_elementary_reduction2: shows "\t. sseq t u \ elementary_reduction u" proof (induct u) show "\t. sseq t \<^bold>\ \ elementary_reduction \<^bold>\" by simp show "\x t. sseq t \<^bold>\x\<^bold>\ \ elementary_reduction \<^bold>\x\<^bold>\" using elementary_reduction.simps(2) sseq.elims(1) by blast show "\u. \\t. sseq t u \ elementary_reduction u; sseq t \<^bold>\\<^bold>[u\<^bold>]\ \ elementary_reduction \<^bold>\\<^bold>[u\<^bold>]" for t using seq_cases sseq_imp_seq apply (cases t, simp_all) by force show "\u1 u2. \\t. sseq t u1 \ elementary_reduction u1; \t. sseq t u2 \ elementary_reduction u2; sseq t (u1 \<^bold>\ u2)\ \ elementary_reduction (u1 \<^bold>\ u2)" for t using seq_cases sseq_imp_seq Ide_Trg elementary_reduction_is_arr by (cases t) auto show "\u1 u2. \\t. sseq t u1 \ elementary_reduction u1; \t. sseq t u2 \ elementary_reduction u2; sseq t (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2)\ \ elementary_reduction (\<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2)" for t using seq_cases sseq_imp_seq apply (cases t, simp_all) by fastforce qed lemma sseq_Beta: shows "sseq (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) v \ Ide t \ Ide u \ elementary_reduction v \ seq (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) v" by (cases v) auto lemma sseq_BetaI [intro]: assumes "Ide t" and "Ide u" and "elementary_reduction v" and "seq (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) v" shows "sseq (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) v" using assms sseq_Beta by simp text \ A head reduction is standardly sequential with any elementary reduction that can be performed after it. \ lemma sseq_head_reductionI: shows "\u. \is_head_reduction t; elementary_reduction u; seq t u\ \ sseq t u" proof (induct t) show "\u. \is_head_reduction \<^bold>\; elementary_reduction u; seq \<^bold>\ u\ \ sseq \<^bold>\ u" by simp show "\x u. \is_head_reduction \<^bold>\x\<^bold>\; elementary_reduction u; seq \<^bold>\x\<^bold>\ u\ \ sseq \<^bold>\x\<^bold>\ u" by auto show "\t. \\u. \is_head_reduction t; elementary_reduction u; seq t u\ \ sseq t u; is_head_reduction \<^bold>\\<^bold>[t\<^bold>]; elementary_reduction u; seq \<^bold>\\<^bold>[t\<^bold>] u\ \ sseq \<^bold>\\<^bold>[t\<^bold>] u" for u by (cases u) auto show "\t2. \\u. \is_head_reduction t1; elementary_reduction u; seq t1 u\ \ sseq t1 u; \u. \is_head_reduction t2; elementary_reduction u; seq t2 u\ \ sseq t2 u; is_head_reduction (t1 \<^bold>\ t2); elementary_reduction u; seq (t1 \<^bold>\ t2) u\ \ sseq (t1 \<^bold>\ t2) u" for t1 u using seq_char apply (cases u) apply simp_all apply (metis ArrE Ide_iff_Src_self Ide_iff_Trg_self App_Var_contains_no_head_reduction is_head_reduction_char is_head_reduction_imp_contains_head_reduction is_head_reduction.simps(3,6-7)) by (cases t1) auto show "\t1 t2 u. \\u. \is_head_reduction t1; elementary_reduction u; seq t1 u\ \ sseq t1 u; \u. \is_head_reduction t2; elementary_reduction u; seq t2 u\ \ sseq t2 u; is_head_reduction (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2); elementary_reduction u; seq (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) u\ \ sseq (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) u" by auto qed text \ Once a head reduction is skipped in an application, then all terms that follow it in a standard reduction path are also applications that do not contain head reductions. \ lemma sseq_preserves_App_and_no_head_reduction: shows "\u. \sseq t u; is_App t \ \ contains_head_reduction t\ \ is_App u \ \ contains_head_reduction u" apply (induct t) apply simp_all proof - fix t1 t2 u assume ind1: "\u. \sseq t1 u; is_App t1 \ \ contains_head_reduction t1\ \ is_App u \ \ contains_head_reduction u" assume ind2: "\u. \sseq t2 u; is_App t2 \ \ contains_head_reduction t2\ \ is_App u \ \ contains_head_reduction u" assume sseq: "sseq (t1 \<^bold>\ t2) u" assume t: "\ contains_head_reduction (t1 \<^bold>\ t2)" have u: "\ is_Beta u" using sseq t sseq_imp_seq seq_cases by (cases t1; cases u) auto have 1: "is_App u" using u sseq sseq_imp_seq apply (cases u) apply simp_all by fastforce+ moreover have "\ contains_head_reduction u" proof (cases u) show "\v. u = \<^bold>\\<^bold>[v\<^bold>] \ \ contains_head_reduction u" using 1 by auto show "\v w. u = \<^bold>\\<^bold>[v\<^bold>] \<^bold>\ w \ \ contains_head_reduction u" using u by auto fix u1 u2 assume u: "u = u1 \<^bold>\ u2" have 1: "(sseq t1 u1 \ Ide t2 \ t2 = u2) \ (Ide t1 \ t1 = u1 \ sseq t2 u2) \ (elementary_reduction t1 \ u1 = Trg t1 \ t2 = Src u2 \ elementary_reduction u2)" using sseq u by force moreover have "Ide t1 \ t1 = u1 \ sseq t2 u2 \ ?thesis" using Ide_implies_Arr ide_char sseq_imp_seq t u by fastforce moreover have "elementary_reduction t1 \ u1 = Trg t1 \ t2 = Src u2 \ elementary_reduction u2 \ ?thesis" proof - assume 2: "elementary_reduction t1 \ u1 = Trg t1 \ t2 = Src u2 \ elementary_reduction u2" have "contains_head_reduction u \ contains_head_reduction u1" using u apply simp using contains_head_reduction.elims(2) by fastforce hence "contains_head_reduction u \ \ Ide u1" using contains_head_reduction_iff by (metis Coinitial_iff_Con Ide_iff_Src_self Ide_implies_Arr ide_char resid_Arr_Src subs_head_redex subs_implies_prfx) thus ?thesis using 2 by (metis Arr.simps(4) Ide_Trg seq_char sseq sseq_imp_seq) qed moreover have "sseq t1 u1 \ Ide t2 \ t2 = u2 \ ?thesis" using t u ind1 [of u1] Ide_implies_Arr sseq_imp_elementary_reduction1 apply (cases t1, simp_all) using elementary_reduction.simps(1) apply blast using elementary_reduction.simps(2) apply blast using contains_head_reduction.elims(2) apply fastforce apply (metis contains_head_reduction.simps(6) is_App_def) using sseq_Beta by blast ultimately show ?thesis by blast qed auto ultimately show "is_App u \ \ contains_head_reduction u" by blast qed end subsubsection "Standard Reduction Paths" context reduction_paths begin text \ A \emph{standard reduction path} is an elementary reduction path in which successive reductions are standardly sequential. \ fun Std where "Std [] = True" | "Std [t] = \.elementary_reduction t" | "Std (t # U) = (\.sseq t (hd U) \ Std U)" lemma Std_consE [elim]: assumes "Std (t # U)" and "\\.Arr t; U \ [] \ \.sseq t (hd U); Std U\ \ thesis" shows thesis using assms by (metis \.arr_char \.elementary_reduction_is_arr \.seq_char \.sseq_imp_seq list.exhaust_sel list.sel(1) Std.simps(1-3)) lemma Std_imp_Arr [simp]: shows "\Std T; T \ []\ \ Arr T" proof (induct T) show "[] \ [] \ Arr []" by simp fix t U assume ind: "\Std U; U \ []\ \ Arr U" assume tU: "Std (t # U)" show "Arr (t # U)" proof (cases "U = []") show "U = [] \ Arr (t # U)" using \.elementary_reduction_is_arr tU \.Ide_implies_Arr Std.simps(2) Arr.simps(2) by blast assume U: "U \ []" show "Arr (t # U)" proof - have "\.sseq t (hd U)" using tU U by (metis list.exhaust_sel reduction_paths.Std.simps(3)) thus ?thesis using U ind \.sseq_imp_seq apply auto using reduction_paths.Std.elims(3) tU by fastforce qed qed qed lemma Std_imp_sseq_last_hd: shows "\U. \Std (T @ U); T \ []; U \ []\ \ \.sseq (last T) (hd U)" apply (induct T) apply simp_all by (metis Std.elims(3) Std.simps(3) append_self_conv2 neq_Nil_conv) lemma Std_implies_set_subset_elementary_reduction: shows "Std U \ set U \ Collect \.elementary_reduction" apply (induct U) apply auto by (metis Std.simps(2) Std.simps(3) neq_Nil_conv \.sseq_imp_elementary_reduction1) lemma Std_map_Lam: shows "Std T \ Std (map \.Lam T)" proof (induct T) show "Std [] \ Std (map \.Lam [])" by simp fix t U assume ind: "Std U \ Std (map \.Lam U)" assume tU: "Std (t # U)" have "Std (map \.Lam (t # U)) \ Std (\<^bold>\\<^bold>[t\<^bold>] # map \.Lam U)" by auto also have "... = True" apply (cases "U = []") apply simp_all using Arr.simps(3) Std.simps(2) arr_char tU apply presburger proof - assume U: "U \ []" have "Std (\<^bold>\\<^bold>[t\<^bold>] # map \.Lam U) \ \.sseq \<^bold>\\<^bold>[t\<^bold>] \<^bold>\\<^bold>[hd U\<^bold>] \ Std (map \.Lam U)" using U by (metis Nil_is_map_conv Std.simps(3) hd_map list.exhaust_sel) also have "... \ \.sseq t (hd U) \ Std (map \.Lam U)" by auto also have "... = True" using ind tU U by (metis Std.simps(3) list.exhaust_sel) finally show "Std (\<^bold>\\<^bold>[t\<^bold>] # map \.Lam U)" by blast qed finally show "Std (map \.Lam (t # U))" by blast qed lemma Std_map_App1: shows "\\.Ide b; Std T\ \ Std (map (\X. X \<^bold>\ b) T)" proof (induct T) show "\\.Ide b; Std []\ \ Std (map (\X. X \<^bold>\ b) [])" by simp fix t U assume ind: "\\.Ide b; Std U\ \ Std (map (\X. X \<^bold>\ b) U)" assume b: "\.Ide b" assume tU: "Std (t # U)" show "Std (map (\v. v \<^bold>\ b) (t # U))" proof (cases "U = []") show "U = [] \ ?thesis" using Ide_implies_Arr b \.arr_char tU by force assume U: "U \ []" have "Std (map (\v. v \<^bold>\ b) (t # U)) = Std ((t \<^bold>\ b) # map (\X. X \<^bold>\ b) U)" by simp also have "... = (\.sseq (t \<^bold>\ b) (hd U \<^bold>\ b) \ Std (map (\X. X \<^bold>\ b) U))" using U reduction_paths.Std.simps(3) hd_map by (metis Nil_is_map_conv neq_Nil_conv) also have "... = True" using b tU U ind by (metis Std.simps(3) list.exhaust_sel \.sseq.simps(4)) finally show "Std (map (\v. v \<^bold>\ b) (t # U))" by blast qed qed lemma Std_map_App2: shows "\\.Ide a; Std T\ \ Std (map (\u. a \<^bold>\ u) T)" proof (induct T) show "\\.Ide a; Std []\ \ Std (map (\u. a \<^bold>\ u) [])" by simp fix t U assume ind: "\\.Ide a; Std U\ \ Std (map (\u. a \<^bold>\ u) U)" assume a: "\.Ide a" assume tU: "Std (t # U)" show "Std (map (\u. a \<^bold>\ u) (t # U))" proof (cases "U = []") show "U = [] \ ?thesis" using a tU by force assume U: "U \ []" have "Std (map (\u. a \<^bold>\ u) (t # U)) = Std ((a \<^bold>\ t) # map (\u. a \<^bold>\ u) U)" by simp also have "... = (\.sseq (a \<^bold>\ t) (a \<^bold>\ hd U) \ Std (map (\u. a \<^bold>\ u) U))" using U by (metis Nil_is_map_conv Std.simps(3) hd_map list.exhaust_sel) also have "... = True" using a tU U ind by (metis Std.simps(3) list.exhaust_sel \.sseq.simps(4)) finally show "Std (map (\u. a \<^bold>\ u) (t # U))" by blast qed qed lemma Std_map_un_Lam: shows "\Std T; set T \ Collect \.is_Lam\ \ Std (map \.un_Lam T)" proof (induct T) show "\Std []; set [] \ Collect \.is_Lam\ \ Std (map \.un_Lam [])" by simp fix t T assume ind: "\Std T; set T \ Collect \.is_Lam\ \ Std (map \.un_Lam T)" assume tT: "Std (t # T)" assume 1: "set (t # T) \ Collect \.is_Lam" show "Std (map \.un_Lam (t # T))" proof (cases "T = []") show "T = [] \ Std (map \.un_Lam (t # T))" by (metis "1" Std.simps(2) \.elementary_reduction.simps(3) \.lambda.collapse(2) list.set_intros(1) list.simps(8) list.simps(9) mem_Collect_eq subset_code(1) tT) assume T: "T \ []" show "Std (map \.un_Lam (t # T))" using T tT 1 ind Std.simps(3) [of "\.un_Lam t" "\.un_Lam (hd T)" "map \.un_Lam (tl T)"] by (metis \.lambda.collapse(2) \.sseq.simps(3) list.exhaust_sel list.sel(1) list.set_intros(1) map_eq_Cons_conv mem_Collect_eq reduction_paths.Std.simps(3) set_subset_Cons subset_code(1)) qed qed lemma Std_append_single: shows "\Std T; T \ []; \.sseq (last T) u\ \ Std (T @ [u])" proof (induct T) show "\Std []; [] \ []; \.sseq (last []) u\ \ Std ([] @ [u])" by blast fix t T assume ind: "\Std T; T \ []; \.sseq (last T) u\ \ Std (T @ [u])" assume tT: "Std (t # T)" assume sseq: "\.sseq (last (t # T)) u" have "Std (t # (T @ [u]))" using \.sseq_imp_elementary_reduction2 sseq ind tT apply (cases "T = []") apply simp by (metis append_Cons last_ConsR list.sel(1) neq_Nil_conv reduction_paths.Std.simps(3)) thus "Std ((t # T) @ [u])" by simp qed lemma Std_append: shows "\T. \Std T; Std U; T = [] \ U = [] \ \.sseq (last T) (hd U)\ \ Std (T @ U)" proof (induct U) show "\T. \Std T; Std []; T = [] \ [] = [] \ \.sseq (last T) (hd [])\ \ Std (T @ [])" by simp fix u T U assume ind: "\T. \Std T; Std U; T = [] \ U = [] \ \.sseq (last T) (hd U)\ \ Std (T @ U)" assume T: "Std T" assume uU: "Std (u # U)" have U: "Std U" using uU Std.elims(3) by fastforce assume seq: "T = [] \ u # U = [] \ \.sseq (last T) (hd (u # U))" show "Std (T @ (u # U))" by (metis Std_append_single T U append.assoc append.left_neutral append_Cons ind last_snoc list.distinct(1) list.exhaust_sel list.sel(1) Std.simps(3) seq uU) qed subsubsection "Projections of Standard `App Paths'" text \ Given a standard reduction path, all of whose transitions have App as their top-level constructor, we can apply \un_App1\ or \un_App2\ to each transition to project the path onto paths formed from the ``rator'' and the ``rand'' of each application. These projected paths are not standard, since the projection operation will introduce identities, in general. However, in this section we show that if we remove the identities, then in fact we do obtain standard reduction paths. \ abbreviation notIde where "notIde \ \u. \ \.Ide u" lemma filter_notIde_Ide: shows "Ide U \ filter notIde U = []" by (induct U) auto lemma cong_filter_notIde: shows "\Arr U; \ Ide U\ \ filter notIde U \<^sup>*\\<^sup>* U" proof (induct U) show "\Arr []; \ Ide []\ \ filter notIde [] \<^sup>*\\<^sup>* []" by simp fix u U assume ind: "\Arr U; \ Ide U\ \ filter notIde U \<^sup>*\\<^sup>* U" assume Arr: "Arr (u # U)" assume 1: "\ Ide (u # U)" show "filter notIde (u # U) \<^sup>*\\<^sup>* (u # U)" proof (cases "\.Ide u") assume u: "\.Ide u" have U: "Arr U \ \ Ide U" using Arr u 1 Ide.elims(3) by fastforce have "filter notIde (u # U) = filter notIde U" using u by simp also have "... \<^sup>*\\<^sup>* U" using U ind by blast also have "U \<^sup>*\\<^sup>* [u] @ U" using u by (metis (full_types) Arr Arr_has_Src Cons_eq_append_conv Ide.elims(3) Ide.simps(2) Srcs.simps(1) U arrI\<^sub>P arr_append_imp_seq cong_append_ideI(3) ide_char \.ide_char not_Cons_self2) also have "[u] @ U = u # U" by simp finally show ?thesis by blast next assume u: "\ \.Ide u" show ?thesis proof (cases "Ide U") assume U: "Ide U" have "filter notIde (u # U) = [u]" using u U filter_notIde_Ide by simp moreover have "[u] \<^sup>*\\<^sup>* [u] @ U" using u U cong_append_ideI(4) [of "[u]" U] by (metis Arr Con_Arr_self Cons_eq_appendI Resid_Ide(1) arr_append_imp_seq arr_char ide_char ide_implies_arr neq_Nil_conv self_append_conv2) moreover have "[u] @ U = u # U" by simp ultimately show ?thesis by auto next assume U: "\ Ide U" have "filter notIde (u # U) = [u] @ filter notIde U" using u U Arr by simp also have "... \<^sup>*\\<^sup>* [u] @ U" proof (cases "U = []") show "U = [] \ ?thesis" by (metis Arr arr_char cong_reflexive append_Nil2 filter.simps(1)) assume 1: "U \ []" have "seq [u] (filter notIde U)" by (metis (full_types) 1 Arr Arr.simps(2-3) Con_imp_eq_Srcs Con_implies_Arr(1) Ide.elims(3) Ide.simps(1) Trgs.simps(2) U ide_char ind seq_char seq_implies_Trgs_eq_Srcs) thus ?thesis using u U Arr ind cong_append [of "[u]" "filter notIde U" "[u]" U] by (meson 1 Arr_consE cong_reflexive seqE) qed also have "[u] @ U = u # U" by simp finally show ?thesis by argo qed qed qed lemma Std_filter_map_un_App1: shows "\Std U; set U \ Collect \.is_App\ \ Std (filter notIde (map \.un_App1 U))" proof (induct U) show "\Std []; set [] \ Collect \.is_App\ \ Std (filter notIde (map \.un_App1 []))" by simp fix u U assume ind: "\Std U; set U \ Collect \.is_App\ \ Std (filter notIde (map \.un_App1 U))" assume 1: "Std (u # U)" assume 2: "set (u # U) \ Collect \.is_App" show "Std (filter notIde (map \.un_App1 (u # U)))" using 1 2 ind apply (cases u) apply simp_all proof - fix u1 u2 assume uU: "Std ((u1 \<^bold>\ u2) # U)" assume set: "set U \ Collect \.is_App" assume ind: "Std U \ Std (filter notIde (map \.un_App1 U))" assume u: "u = u1 \<^bold>\ u2" show "(\ \.Ide u1 \ Std (u1 # filter notIde (map \.un_App1 U))) \ (\.Ide u1 \ Std (filter notIde (map \.un_App1 U)))" proof (intro conjI impI) assume u1: "\.Ide u1" show "Std (filter notIde (map \.un_App1 U))" by (metis 1 Std.simps(1) Std.simps(3) ind neq_Nil_conv) next assume u1: "\ \.Ide u1" show "Std (u1 # filter notIde (map \.un_App1 U))" proof (cases "Ide (map \.un_App1 U)") show "Ide (map \.un_App1 U) \ ?thesis" proof - assume U: "Ide (map \.un_App1 U)" have "filter notIde (map \.un_App1 U) = []" by (metis U Ide_char filter_False \.ide_char mem_Collect_eq subsetD) thus ?thesis by (metis Std.elims(1) Std.simps(2) \.elementary_reduction.simps(4) list.discI list.sel(1) \.sseq_imp_elementary_reduction1 u1 uU) qed assume U: "\ Ide (map \.un_App1 U)" show ?thesis proof (cases "U = []") show "U = [] \ ?thesis" using 1 u u1 by fastforce assume "U \ []" hence U: "U \ [] \ \ Ide (map \.un_App1 U)" using U by simp have "\.sseq u1 (hd (filter notIde (map \.un_App1 U)))" proof - have "\u1 u2. \set U \ Collect \.is_App; \ Ide (map \.un_App1 U); U \ []; Std ((u1 \<^bold>\ u2) # U); \ \.Ide u1\ \ \.sseq u1 (hd (filter notIde (map \.un_App1 U)))" for U apply (induct U) apply simp_all apply (intro conjI impI) proof - fix u U u1 u2 assume ind: "\u1 u2. \\ Ide (map \.un_App1 U); U \ []; Std ((u1 \<^bold>\ u2) # U); \ \.Ide u1\ \ \.sseq u1 (hd (filter notIde (map \.un_App1 U)))" assume 1: "\.is_App u \ set U \ Collect \.is_App" assume 2: "\ Ide (\.un_App1 u # map \.un_App1 U)" assume 3: "\.sseq (u1 \<^bold>\ u2) u \ Std (u # U)" show "\ \.Ide (\.un_App1 u) \ \.sseq u1 (\.un_App1 u)" by (metis 1 3 \.Arr.simps(4) \.Ide_Trg \.lambda.collapse(3) \.seq_char \.sseq.simps(4) \.sseq_imp_seq) assume 4: "\ \.Ide u1" assume 5: "\.Ide (\.un_App1 u)" have u1: "\.elementary_reduction u1" using 3 4 \.elementary_reduction.simps(4) \.sseq_imp_elementary_reduction1 by blast have 6: "Arr (\.un_App1 u # map \.un_App1 U)" using 1 3 Std_imp_Arr Arr_map_un_App1 [of "u # U"] by auto have 7: "Arr (map \.un_App1 U)" using 1 2 3 5 6 Arr_map_un_App1 Std_imp_Arr \.ide_char by fastforce have 8: "\ Ide (map \.un_App1 U)" using 2 5 6 set_Ide_subset_ide by fastforce have 9: "\.seq u (hd U)" by (metis 3 7 Std.simps(3) Arr.simps(1) list.collapse list.simps(8) \.sseq_imp_seq) show "\.sseq u1 (hd (filter notIde (map \.un_App1 U)))" proof - have "\.sseq (u1 \<^bold>\ \.Trg (\.un_App2 u)) (hd U)" proof (cases "\.Ide (\.un_App1 (hd U))") assume 10: "\.Ide (\.un_App1 (hd U))" hence "\.elementary_reduction (\.un_App2 (hd U))" by (metis (full_types) 1 3 7 Std.elims(2) Arr.simps(1) \.elementary_reduction_App_iff \.elementary_reduction_not_ide \.ide_char list.sel(2) list.sel(3) list.set_sel(1) list.simps(8) mem_Collect_eq \.sseq_imp_elementary_reduction2 subsetD) moreover have "\.Trg u1 = \.un_App1 (hd U)" proof - have "\.Trg u1 = \.Src (\.un_App1 u)" by (metis 1 3 5 \.Ide_iff_Src_self \.Ide_implies_Arr \.Trg_Src \.elementary_reduction_not_ide \.ide_char \.lambda.collapse(3) \.sseq.simps(4) \.sseq_imp_elementary_reduction2) also have "... = \.Trg (\.un_App1 u)" by (metis 5 \.Ide_iff_Src_self \.Ide_iff_Trg_self \.Ide_implies_Arr) also have "... = \.un_App1 (hd U)" using 1 3 5 7 \.Ide_iff_Trg_self by (metis 9 10 Arr.simps(1) lambda_calculus.Ide_iff_Src_self \.Ide_implies_Arr \.Src_Src \.Src_eq_iff(2) \.Trg.simps(3) \.lambda.collapse(3) \.seqE\<^sub>\ list.set_sel(1) list.simps(8) mem_Collect_eq subsetD) finally show ?thesis by argo qed moreover have "\.Trg (\.un_App2 u) = \.Src (\.un_App2 (hd U))" by (metis 1 7 9 Arr.simps(1) hd_in_set \.Src.simps(4) \.Src_Src \.Trg.simps(3) \.lambda.collapse(3) \.lambda.sel(4) \.seq_char list.simps(8) mem_Collect_eq subset_code(1)) ultimately show ?thesis using \.sseq.simps(4) by (metis 1 7 u1 Arr.simps(1) hd_in_set \.lambda.collapse(3) list.simps(8) mem_Collect_eq subsetD) next assume 10: "\ \.Ide (\.un_App1 (hd U))" have False proof - have "\.elementary_reduction (\.un_App2 u)" using 1 3 5 \.elementary_reduction_App_iff \.elementary_reduction_not_ide \.sseq_imp_elementary_reduction2 by blast moreover have "\.sseq u (hd U)" by (metis 3 7 Std.simps(3) Arr.simps(1) hd_Cons_tl list.simps(8)) moreover have "\.elementary_reduction (\.un_App1 (hd U))" by (metis 1 7 10 Nil_is_map_conv Arr.simps(1) calculation(2) \.elementary_reduction_App_iff hd_in_set \.ide_char mem_Collect_eq \.sseq_imp_elementary_reduction2 subset_iff) ultimately show ?thesis using \.sseq.simps(4) by (metis 1 5 7 Arr.simps(1) \.elementary_reduction_not_ide hd_in_set \.ide_char \.lambda.collapse(3) list.simps(8) mem_Collect_eq subset_iff) qed thus ?thesis by argo qed hence " Std ((u1 \<^bold>\ \.Trg (\.un_App2 u)) # U)" by (metis 3 7 Std.simps(3) Arr.simps(1) list.exhaust_sel list.simps(8)) thus ?thesis using ind by (metis 7 8 u1 Arr.simps(1) \.elementary_reduction_not_ide \.ide_char list.simps(8)) qed qed thus ?thesis using U set u1 uU by blast qed thus ?thesis by (metis 1 Std.simps(2-3) \U \ []\ ind list.exhaust_sel list.sel(1) \.sseq_imp_elementary_reduction1) qed qed qed qed qed lemma Std_filter_map_un_App2: shows "\Std U; set U \ Collect \.is_App\ \ Std (filter notIde (map \.un_App2 U))" proof (induct U) show "\Std []; set [] \ Collect \.is_App\ \ Std (filter notIde (map \.un_App2 []))" by simp fix u U assume ind: "\Std U; set U \ Collect \.is_App\ \ Std (filter notIde (map \.un_App2 U))" assume 1: "Std (u # U)" assume 2: "set (u # U) \ Collect \.is_App" show "Std (filter notIde (map \.un_App2 (u # U)))" using 1 2 ind apply (cases u) apply simp_all proof - fix u1 u2 assume uU: "Std ((u1 \<^bold>\ u2) # U)" assume set: "set U \ Collect \.is_App" assume ind: "Std U \ Std (filter notIde (map \.un_App2 U))" assume u: "u = u1 \<^bold>\ u2" show "(\ \.Ide u2 \ Std (u2 # filter notIde (map \.un_App2 U))) \ (\.Ide u2 \ Std (filter notIde (map \.un_App2 U)))" proof (intro conjI impI) assume u2: "\.Ide u2" show "Std (filter notIde (map \.un_App2 U))" by (metis 1 Std.simps(1) Std.simps(3) ind neq_Nil_conv) next assume u2: "\ \.Ide u2" show "Std (u2 # filter notIde (map \.un_App2 U))" proof (cases "Ide (map \.un_App2 U)") show "Ide (map \.un_App2 U) \ ?thesis" proof - assume U: "Ide (map \.un_App2 U)" have "filter notIde (map \.un_App2 U) = []" by (metis U Ide_char filter_False \.ide_char mem_Collect_eq subsetD) thus ?thesis by (metis Std.elims(1) Std.simps(2) \.elementary_reduction.simps(4) list.discI list.sel(1) \.sseq_imp_elementary_reduction1 u2 uU) qed assume U: "\ Ide (map \.un_App2 U)" show ?thesis proof (cases "U = []") show "U = [] \ ?thesis" using "1" u u2 by fastforce assume "U \ []" hence U: "U \ [] \ \ Ide (map \.un_App2 U)" using U by simp have "\.sseq u2 (hd (filter notIde (map \.un_App2 U)))" proof - have "\u1 u2. \set U \ Collect \.is_App; \ Ide (map \.un_App2 U); U \ []; Std ((u1 \<^bold>\ u2) # U); \ \.Ide u2\ \ \.sseq u2 (hd (filter notIde (map \.un_App2 U)))" for U apply (induct U) apply simp_all apply (intro conjI impI) proof - fix u U u1 u2 assume ind: "\u1 u2. \\ Ide (map \.un_App2 U); U \ []; Std ((u1 \<^bold>\ u2) # U); \ \.Ide u2\ \ \.sseq u2 (hd (filter notIde (map \.un_App2 U)))" assume 1: "\.is_App u \ set U \ Collect \.is_App" assume 2: "\ Ide (\.un_App2 u # map \.un_App2 U)" assume 3: "\.sseq (u1 \<^bold>\ u2) u \ Std (u # U)" assume 4: "\ \.Ide u2" show "\ \.Ide (\.un_App2 u) \ \.sseq u2 (\.un_App2 u)" by (metis 1 3 4 \.elementary_reduction.simps(4) \.elementary_reduction_not_ide \.ide_char \.lambda.collapse(3) \.sseq.simps(4) \.sseq_imp_elementary_reduction1) assume 5: "\.Ide (\.un_App2 u)" have False by (metis 1 3 4 5 \.elementary_reduction_not_ide \.ide_char \.lambda.collapse(3) \.sseq.simps(4) \.sseq_imp_elementary_reduction2) thus "\.sseq u2 (hd (filter notIde (map \.un_App2 U)))" by argo qed thus ?thesis using U set u2 uU by blast qed thus ?thesis by (metis "1" Std.simps(2) Std.simps(3) \U \ []\ ind list.exhaust_sel list.sel(1) \.sseq_imp_elementary_reduction1) qed qed qed qed qed text \ If the first step in a standard reduction path contracts a redex that is not at the head position, then all subsequent terms have \App\ as their top-level operator. \ lemma seq_App_Std_implies: shows "\t. \Std (t # U); \.is_App t \ \ \.contains_head_reduction t\ \ set U \ Collect \.is_App" proof (induct U) show "\t. \Std [t]; \.is_App t \ \ \.contains_head_reduction t\ \ set [] \ Collect \.is_App" by simp fix t u U assume ind: "\t. \Std (t # U); \.is_App t \ \ \.contains_head_reduction t\ \ set U \ Collect \.is_App" assume Std: "Std (t # u # U)" assume t: "\.is_App t \ \ \.contains_head_reduction t" have U: "set (u # U) \ Collect \.elementary_reduction" using Std Std_implies_set_subset_elementary_reduction by fastforce have u: "\.elementary_reduction u" using U by simp have "set U \ Collect \.elementary_reduction" using U by simp show "set (u # U) \ Collect \.is_App" proof (cases "U = []") show "U = [] \ ?thesis" by (metis Std empty_set empty_subsetI insert_subset \.sseq_preserves_App_and_no_head_reduction list.sel(1) list.simps(15) mem_Collect_eq reduction_paths.Std.simps(3) t) assume U: "U \ []" have "\.sseq t u" using Std by auto hence "\.is_App u \ \ \.Ide u \ \ \.contains_head_reduction u" using t u U \.sseq_preserves_App_and_no_head_reduction [of t u] \.elementary_reduction_not_ide by blast thus ?thesis using Std ind [of u] \set U \ Collect \.elementary_reduction\ by simp qed qed subsection "Standard Developments" text \ The following function takes a term \t\ (representing a parallel reduction) and produces a standard reduction path that is a complete development of \t\ and is thus congruent to \[t]\. The proof of termination makes use of the Finite Development Theorem. \ function (sequential) standard_development where "standard_development \<^bold>\ = []" | "standard_development \<^bold>\_\<^bold>\ = []" | "standard_development \<^bold>\\<^bold>[t\<^bold>] = map \.Lam (standard_development t)" | "standard_development (t \<^bold>\ u) = (if \.Arr t \ \.Arr u then map (\v. v \<^bold>\ \.Src u) (standard_development t) @ map (\v. \.Trg t \<^bold>\ v) (standard_development u) else [])" | "standard_development (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = (if \.Arr t \ \.Arr u then (\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) # standard_development (\.subst u t) else [])" by pat_completeness auto abbreviation (in lambda_calculus) stddev_term_rel where "stddev_term_rel \ mlex_prod hgt subterm_rel" lemma (in lambda_calculus) subst_lt_Beta: assumes "Arr t" and "Arr u" shows "(subst u t, \<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ stddev_term_rel" proof - have "(\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \\ (\<^bold>\\<^bold>[Src t\<^bold>] \<^bold>\ Src u) = subst u t" using assms by (metis Arr_not_Nil Ide_Src Ide_iff_Src_self Ide_implies_Arr resid.simps(4) resid_Arr_Ide) moreover have "elementary_reduction (\<^bold>\\<^bold>[Src t\<^bold>] \<^bold>\ Src u)" by (simp add: assms Ide_Src) moreover have "\<^bold>\\<^bold>[Src t\<^bold>] \<^bold>\ Src u \ \<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u" by (metis assms Arr.simps(5) head_redex.simps(9) subs_head_redex) ultimately show ?thesis using assms elementary_reduction_decreases_hgt [of "\<^bold>\\<^bold>[Src t\<^bold>] \<^bold>\ Src u" "\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u"] by (metis mlex_less) qed termination standard_development proof (relation \.stddev_term_rel) show "wf \.stddev_term_rel" using \.wf_subterm_rel wf_mlex by blast show "\t. (t, \<^bold>\\<^bold>[t\<^bold>]) \ \.stddev_term_rel" by (simp add: \.subterm_lemmas(1) mlex_prod_def) show "\t u. (t, t \<^bold>\ u) \ \.stddev_term_rel" using \.subterm_lemmas(3) by (metis antisym_conv1 \.hgt.simps(4) le_add1 mem_Collect_eq mlex_iff old.prod.case) show "\t u. (u, t \<^bold>\ u) \ \.stddev_term_rel" using \.subterm_lemmas(3) by (simp add: mlex_leq) show "\t u. \.Arr t \ \.Arr u \ (\.subst u t, \<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ \.stddev_term_rel" using \.subst_lt_Beta by simp qed lemma Ide_iff_standard_development_empty: shows "\.Arr t \ \.Ide t \ standard_development t = []" by (induct t) auto lemma set_standard_development: shows "\.Arr t \ set (standard_development t) \ Collect \.elementary_reduction" apply (rule standard_development.induct) using \.Ide_Src \.Ide_Trg \.Arr_Subst by auto lemma cong_standard_development: shows "\.Arr t \ \ \.Ide t \ standard_development t \<^sup>*\\<^sup>* [t]" proof (rule standard_development.induct) show "\.Arr \<^bold>\ \ \ \.Ide \<^bold>\ \ standard_development \<^bold>\ \<^sup>*\\<^sup>* [\<^bold>\]" by simp show "\x. \.Arr \<^bold>\x\<^bold>\ \ \ \.Ide \<^bold>\x\<^bold>\ \ standard_development \<^bold>\x\<^bold>\ \<^sup>*\\<^sup>* [\<^bold>\x\<^bold>\]" by simp show "\t. \.Arr t \ \ \.Ide t \ standard_development t \<^sup>*\\<^sup>* [t] \ \.Arr \<^bold>\\<^bold>[t\<^bold>] \ \ \.Ide \<^bold>\\<^bold>[t\<^bold>] \ standard_development \<^bold>\\<^bold>[t\<^bold>] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>]]" by (metis (mono_tags, lifting) cong_map_Lam \.Arr.simps(3) \.Ide.simps(3) list.simps(8,9) standard_development.simps(3)) show "\t u. \\.Arr t \ \.Arr u \ \.Arr t \ \ \.Ide t \ standard_development t \<^sup>*\\<^sup>* [t]; \.Arr t \ \.Arr u \ \.Arr u \ \ \.Ide u \ standard_development u \<^sup>*\\<^sup>* [u]\ \ \.Arr (t \<^bold>\ u) \ \ \.Ide (t \<^bold>\ u) \ standard_development (t \<^bold>\ u) \<^sup>*\\<^sup>* [t \<^bold>\ u]" proof fix t u assume ind1: "\.Arr t \ \.Arr u \ \.Arr t \ \ \.Ide t \ standard_development t \<^sup>*\\<^sup>* [t]" assume ind2: "\.Arr t \ \.Arr u \ \.Arr u \ \ \.Ide u \ standard_development u \<^sup>*\\<^sup>* [u]" assume 1: "\.Arr (t \<^bold>\ u) \ \ \.Ide (t \<^bold>\ u)" show "standard_development (t \<^bold>\ u) \<^sup>*\\<^sup>* [t \<^bold>\ u]" proof (cases "standard_development t = []") show "standard_development t = [] \ ?thesis" using 1 ind2 cong_map_App1 Ide_iff_standard_development_empty \.Ide_iff_Trg_self apply simp by (metis (no_types, opaque_lifting) list.simps(8,9)) assume t: "standard_development t \ []" show ?thesis proof (cases "standard_development u = []") assume u: "standard_development u = []" have "standard_development (t \<^bold>\ u) = map (\X. X \<^bold>\ u) (standard_development t)" using u 1 \.Ide_iff_Src_self ide_char ind2 by auto also have "... \<^sup>*\\<^sup>* map (\a. a \<^bold>\ u) [t]" using cong_map_App2 [of u] by (meson 1 \.Arr.simps(4) Ide_iff_standard_development_empty t u ind1) also have "map (\a. a \<^bold>\ u) [t] = [t \<^bold>\ u]" by simp finally show ?thesis by blast next assume u: "standard_development u \ []" have "standard_development (t \<^bold>\ u) = map (\a. a \<^bold>\ \.Src u) (standard_development t) @ map (\b. \.Trg t \<^bold>\ b) (standard_development u)" using 1 by force moreover have "map (\a. a \<^bold>\ \.Src u) (standard_development t) \<^sup>*\\<^sup>* [t \<^bold>\ \.Src u]" proof - have "map (\a. a \<^bold>\ \.Src u) (standard_development t) \<^sup>*\\<^sup>* map (\a. a \<^bold>\ \.Src u) [t]" using t u 1 ind1 \.Ide_Src Ide_iff_standard_development_empty cong_map_App2 by (metis \.Arr.simps(4)) also have "map (\a. a \<^bold>\ \.Src u) [t] = [t \<^bold>\ \.Src u]" by simp finally show ?thesis by blast qed moreover have "map (\b. \.Trg t \<^bold>\ b) (standard_development u) \<^sup>*\\<^sup>* [\.Trg t \<^bold>\ u]" using t u 1 ind2 \.Ide_Trg Ide_iff_standard_development_empty cong_map_App1 by (metis (mono_tags, opaque_lifting) \.Arr.simps(4) list.simps(8,9)) moreover have "seq (map (\a. a \<^bold>\ \.Src u) (standard_development t)) (map (\b. \.Trg t \<^bold>\ b) (standard_development u))" using 1 u seqI\<^sub>\\<^sub>P Con_implies_Arr(1) Ide.simps(1) calculation(2) ide_char Ide_iff_standard_development_empty Src_hd_eqI Trg_last_eqI calculation(2-3) hd_map ind2 \.Arr.simps(4) \.Src.simps(4) \.Src_Trg \.Trg.simps(3) \.Trg_Src last_ConsL list.sel(1) by (metis (no_types, lifting)) ultimately have "standard_development (t \<^bold>\ u) \<^sup>*\\<^sup>* [t \<^bold>\ \.Src u] @ [\.Trg t \<^bold>\ u]" using cong_append [of "map (\a. a \<^bold>\ \.Src u) (standard_development t)" "map (\b. \.Trg t \<^bold>\ b) (standard_development u)" "[t \<^bold>\ \.Src u]" "[\.Trg t \<^bold>\ u]"] by simp moreover have "[t \<^bold>\ \.Src u] @ [\.Trg t \<^bold>\ u] \<^sup>*\\<^sup>* [t \<^bold>\ u]" using 1 \.Ide_Trg \.resid_Arr_Src \.resid_Arr_self \.null_char ide_char \.Arr_not_Nil by simp ultimately show ?thesis using cong_transitive by blast qed qed qed show "\t u. (\.Arr t \ \.Arr u \ \.Arr (\.subst u t) \ \ \.Ide (\.subst u t) \ standard_development (\.subst u t) \<^sup>*\\<^sup>* [\.subst u t]) \ \.Arr (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ \ \.Ide (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ standard_development (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u]" proof fix t u assume 1: "\.Arr (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \ \ \.Ide (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" assume ind: "\.Arr t \ \.Arr u \ \.Arr (\.subst u t) \ \ \.Ide (\.subst u t) \ standard_development (\.subst u t) \<^sup>*\\<^sup>* [\.subst u t]" show "standard_development (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u]" proof (cases "\.Ide (\.subst u t)") assume 2: "\.Ide (\.subst u t)" have "standard_development (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = [\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u]" using 1 2 Ide_iff_standard_development_empty [of "\.subst u t"] \.Arr_Subst by simp also have "[\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u]" using 1 2 \.Ide_Src \.Ide_implies_Arr ide_char \.resid_Arr_Ide apply (intro conjI) apply simp_all apply (metis \.Ide.simps(1) \.Ide_Subst_iff \.Ide_Trg) by fastforce finally show ?thesis by blast next assume 2: "\ \.Ide (\.subst u t)" have "standard_development (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) = [\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ standard_development (\.subst u t)" using 1 by auto also have "[\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ standard_development (\.subst u t) \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ [\.subst u t]" proof (intro cong_append) show "seq [\.Beta (\.Src t) (\.Src u)] (standard_development (\.subst u t))" using 1 2 ind arr_char ide_implies_arr \.Arr_Subst Con_implies_Arr(1) Src_hd_eqI apply (intro seqI\<^sub>\\<^sub>P) apply simp_all by (metis Arr.simps(1)) show "[\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u]" using 1 by (metis \.Arr.simps(5) \.Ide_Src \.Ide_implies_Arr Arr.simps(2) Resid_Arr_self ide_char \.arr_char) show "standard_development (\.subst u t) \<^sup>*\\<^sup>* [\.subst u t]" using 1 2 \.Arr_Subst ind by simp qed also have "[\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ [\.subst u t] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u]" proof show "[\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ [\.subst u t] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u]" proof - have "t \\ \.Src t \ \<^bold>\ \ u \\ \.Src u \ \<^bold>\" by (metis "1" \.Arr.simps(5) \.Coinitial_iff_Con \.Ide_Src \.Ide_iff_Src_self \.Ide_implies_Arr) moreover have "\.con (\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" by (metis "1" \.head_redex.simps(9) \.prfx_implies_con \.subs_head_redex \.subs_implies_prfx) ultimately have "([\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ [\.subst u t]) \<^sup>*\\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u] = [\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] \<^sup>*\\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u] @ [\.subst u t] \<^sup>*\\\<^sup>* ([\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u] \<^sup>*\\\<^sup>* [\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u])" using Resid_append(1) [of "[\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u]" "[\.subst u t]" "[\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u]"] apply simp by (metis \.Arr_Subst \.Coinitial_iff_Con \.Ide_Src \.resid_Arr_Ide) also have "... = [\.subst (\.Trg u) (\.Trg t)] @ ([\.subst u t] \<^sup>*\\\<^sup>* [\.subst u t])" proof - have "t \\ \.Src t \ \<^bold>\ \ u \\ \.Src u \ \<^bold>\" by (metis "1" \.Arr.simps(5) \.Coinitial_iff_Con \.Ide_Src \.Ide_iff_Src_self \.Ide_implies_Arr) moreover have "\.Src t \\ t \ \<^bold>\ \ \.Src u \\ u \ \<^bold>\" using \.Con_sym calculation(1) by presburger moreover have "\.con (\.subst u t) (\.subst u t)" by (meson \.Arr_Subst \.Con_implies_Arr2 \.arr_char \.arr_def calculation(2)) moreover have "\.con (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) (\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u)" using \\.con (\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)\ \.con_sym by blast moreover have "\.con (\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)" using \\.con (\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u)\ by blast moreover have "\.con (\.subst u t) (\.subst (u \\ \.Src u) (t \\ \.Src t))" by (metis \.Coinitial_iff_Con \.Ide_Src calculation(1-3) \.resid_Arr_Ide) ultimately show ?thesis using "1" by auto qed finally have "([\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ [\.subst u t]) \<^sup>*\\\<^sup>* [\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u] = [\.subst (\.Trg u) (\.Trg t)] @ [\.subst u t] \<^sup>*\\\<^sup>* [\.subst u t]" by blast moreover have "Ide ..." by (metis "1" "2" \.Arr.simps(5) \.Arr_Subst \.Ide_Subst \.Ide_Trg Nil_is_append_conv Arr_append_iff\<^sub>P\<^sub>W\<^sub>E Con_implies_Arr(2) Ide.simps(1-2) Ide_appendI\<^sub>P\<^sub>W\<^sub>E Resid_Arr_self ide_char calculation \.ide_char ind Con_imp_Arr_Resid) ultimately show ?thesis using ide_char by presburger qed show "[\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ [\.subst u t]" proof - have "[\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u] \<^sup>*\\\<^sup>* ([\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ [\.subst u t]) = ([\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u] \<^sup>*\\\<^sup>* [\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u]) \<^sup>*\\\<^sup>* [\.subst u t]" by fastforce also have "... = [\.subst u t] \<^sup>*\\\<^sup>* [\.subst u t]" proof - have "t \\ \.Src t \ \<^bold>\ \ u \\ \.Src u \ \<^bold>\" by (metis "1" \.Arr.simps(5) \.Coinitial_iff_Con \.Ide_Src \.Ide_iff_Src_self \.Ide_implies_Arr) moreover have "\.con (\.subst u t) (\.subst u t)" by (metis "1" \.Arr.simps(5) \.Arr_Subst \.Coinitial_iff_Con \.con_def \.null_char) moreover have "\.con (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) (\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u)" by (metis "1" \.Con_sym \.con_def \.head_redex.simps(9) \.null_char \.prfx_implies_con \.subs_head_redex \.subs_implies_prfx) moreover have "\.con (\.subst (u \\ \.Src u) (t \\ \.Src t)) (\.subst u t)" by (metis \.Coinitial_iff_Con \.Ide_Src calculation(1) calculation(2) \.resid_Arr_Ide) ultimately show ?thesis using \.resid_Arr_Ide apply simp by (metis \.Coinitial_iff_Con \.Ide_Src) qed finally have "[\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u] \<^sup>*\\\<^sup>* ([\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u] @ [\.subst u t]) = [\.subst u t] \<^sup>*\\\<^sup>* [\.subst u t]" by blast moreover have "Ide ..." by (metis "1" "2" \.Arr.simps(5) \.Arr_Subst Con_implies_Arr(2) Resid_Arr_self ind ide_char) ultimately show ?thesis using ide_char by presburger qed qed finally show ?thesis by blast qed qed qed lemma Src_hd_standard_development: assumes "\.Arr t" and "\ \.Ide t" shows "\.Src (hd (standard_development t)) = \.Src t" by (metis assms Src_hd_eqI cong_standard_development list.sel(1)) lemma Trg_last_standard_development: assumes "\.Arr t" and "\ \.Ide t" shows "\.Trg (last (standard_development t)) = \.Trg t" by (metis assms Trg_last_eqI cong_standard_development last_ConsL) lemma Srcs_standard_development: shows "\\.Arr t; standard_development t \ []\ \ Srcs (standard_development t) = {\.Src t}" by (metis Con_implies_Arr(1) Ide.simps(1) Ide_iff_standard_development_empty Src_hd_standard_development Srcs_simp\<^sub>\\<^sub>P cong_standard_development ide_char) lemma Trgs_standard_development: shows "\\.Arr t; standard_development t \ []\ \ Trgs (standard_development t) = {\.Trg t}" by (metis Con_implies_Arr(2) Ide.simps(1) Ide_iff_standard_development_empty Trg_last_standard_development Trgs_simp\<^sub>\\<^sub>P cong_standard_development ide_char) lemma development_standard_development: shows "\.Arr t \ development t (standard_development t)" apply (rule standard_development.induct) apply blast apply simp apply (simp add: development_map_Lam) proof fix t1 t2 assume ind1: "\.Arr t1 \ \.Arr t2 \ \.Arr t1 \ development t1 (standard_development t1)" assume ind2: "\.Arr t1 \ \.Arr t2 \ \.Arr t2 \ development t2 (standard_development t2)" assume t: "\.Arr (t1 \<^bold>\ t2)" show "development (t1 \<^bold>\ t2) (standard_development (t1 \<^bold>\ t2))" proof (cases "standard_development t1 = []") show "standard_development t1 = [] \ development (t1 \<^bold>\ t2) (standard_development (t1 \<^bold>\ t2))" using t ind2 \.Ide_Src \.Ide_Trg \.Ide_iff_Src_self \.Ide_iff_Trg_self Ide_iff_standard_development_empty development_map_App_2 [of "\.Src t1" t2 "standard_development t2"] by fastforce assume t1: "standard_development t1 \ []" show "development (t1 \<^bold>\ t2) (standard_development (t1 \<^bold>\ t2))" proof (cases "standard_development t2 = []") assume t2: "standard_development t2 = []" show ?thesis using t t2 ind1 Ide_iff_standard_development_empty development_map_App_1 by simp next assume t2: "standard_development t2 \ []" have "development (t1 \<^bold>\ t2) (map (\a. a \<^bold>\ \.Src t2) (standard_development t1))" using \.Arr.simps(4) development_map_App_1 ind1 t by presburger moreover have "development ((t1 \<^bold>\ t2) \<^sup>1\\\<^sup>* map (\a. a \<^bold>\ \.Src t2) (standard_development t1)) (map (\a. \.Trg t1 \<^bold>\ a) (standard_development t2))" proof - have "\.App t1 t2 \<^sup>1\\\<^sup>* map (\a. a \<^bold>\ \.Src t2) (standard_development t1) = \.Trg t1 \<^bold>\ t2" proof - have "map (\a. a \<^bold>\ \.Src t2) (standard_development t1) \<^sup>*\\<^sup>* [t1 \<^bold>\ \.Src t2]" proof - have "map (\a. a \<^bold>\ \.Src t2) (standard_development t1) = standard_development (t1 \<^bold>\ \.Src t2)" by (metis \.Arr.simps(4) \.Ide_Src \.Ide_iff_Src_self Ide_iff_standard_development_empty \.Ide_implies_Arr Nil_is_map_conv append_Nil2 standard_development.simps(4) t) also have "standard_development (t1 \<^bold>\ \.Src t2) \<^sup>*\\<^sup>* [t1 \<^bold>\ \.Src t2]" by (metis \.Arr.simps(4) \.Ide.simps(4) \.Ide_Src \.Ide_implies_Arr cong_standard_development development_Ide ind1 t t1) finally show ?thesis by blast qed hence "[t1 \<^bold>\ t2] \<^sup>*\\\<^sup>* map (\a. a \<^bold>\ \.Src t2) (standard_development t1) = [t1 \<^bold>\ t2] \<^sup>*\\\<^sup>* [t1 \<^bold>\ \.Src t2]" by (metis Resid_parallel con_imp_coinitial prfx_implies_con calculation development_implies map_is_Nil_conv t1) also have "[t1 \<^bold>\ t2] \<^sup>*\\\<^sup>* [t1 \<^bold>\ \.Src t2] = [\.Trg t1 \<^bold>\ t2]" using t \.arr_resid_iff_con \.resid_Arr_self by simp force finally have "[t1 \<^bold>\ t2] \<^sup>*\\\<^sup>* map (\a. a \<^bold>\ \.Src t2) (standard_development t1) = [\.Trg t1 \<^bold>\ t2]" by blast thus ?thesis by (simp add: Resid1x_as_Resid') qed thus ?thesis by (metis ind2 \.Arr.simps(4) \.Ide_Trg \.Ide_iff_Src_self development_map_App_2 \.reduction_strategy_def \.head_strategy_is_reduction_strategy t) qed ultimately show ?thesis using t development_append [of "t1 \<^bold>\ t2" "map (\a. a \<^bold>\ \.Src t2) (standard_development t1)" "map (\b. \.Trg t1 \<^bold>\ b) (standard_development t2)"] by auto qed qed next fix t1 t2 assume ind: "\.Arr t1 \ \.Arr t2 \ \.Arr (\.subst t2 t1) \ development (\.subst t2 t1) (standard_development (\.subst t2 t1))" show "\.Arr (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) \ development (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) (standard_development (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2))" proof assume 1: "\.Arr (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)" have "development (\.subst t2 t1) (standard_development (\.subst t2 t1))" using 1 ind by (simp add: \.Arr_Subst) thus "development (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) (standard_development (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2))" using 1 \.Ide_Src \.subs_Ide by auto qed qed lemma Std_standard_development: shows "Std (standard_development t)" apply (rule standard_development.induct) apply simp_all using Std_map_Lam apply blast proof fix t u assume t: "\.Arr t \ \.Arr u \ Std (standard_development t)" assume u: "\.Arr t \ \.Arr u \ Std (standard_development u)" assume 0: "\.Arr t \ \.Arr u" show "Std (map (\a. a \<^bold>\ \.Src u) (standard_development t) @ map (\b. \.Trg t \<^bold>\ b) (standard_development u))" proof (cases "\.Ide t") show "\.Ide t \ ?thesis" using 0 \.Ide_iff_Trg_self Ide_iff_standard_development_empty u Std_map_App2 by fastforce assume 1: "\ \.Ide t" show ?thesis proof (cases "\.Ide u") show "\.Ide u \ ?thesis" using t u 0 1 Std_map_App1 [of "\.Src u" "standard_development t"] \.Ide_Src by (metis Ide_iff_standard_development_empty append_Nil2 list.simps(8)) assume 2: "\ \.Ide u" show ?thesis proof (intro Std_append) show 3: "Std (map (\a. a \<^bold>\ \.Src u) (standard_development t))" using t 0 Std_map_App1 \.Ide_Src by blast show "Std (map (\b. \.Trg t \<^bold>\ b) (standard_development u))" using u 0 Std_map_App2 \.Ide_Trg by simp show "map (\a. a \<^bold>\ \.Src u) (standard_development t) = [] \ map (\b. \.Trg t \<^bold>\ b) (standard_development u) = [] \ \.sseq (last (map (\a. a \<^bold>\ \.Src u) (standard_development t))) (hd (map (\b. \.Trg t \<^bold>\ b) (standard_development u)))" proof - have "\.sseq (last (map (\a. a \<^bold>\ \.Src u) (standard_development t))) (hd (map (\b. \.Trg t \<^bold>\ b) (standard_development u)))" proof - obtain x where x: "last (map (\a. a \<^bold>\ \.Src u) (standard_development t)) = x \<^bold>\ \.Src u" using 0 1 Ide_iff_standard_development_empty last_map by auto obtain y where y: "hd (map (\b. \.Trg t \<^bold>\ b) (standard_development u)) = \.Trg t \<^bold>\ y" using 0 2 Ide_iff_standard_development_empty list.map_sel(1) by auto have "\.elementary_reduction x" proof - have "\.elementary_reduction (x \<^bold>\ \.Src u)" using x by (metis 0 1 3 Ide_iff_standard_development_empty Nil_is_map_conv Std.simps(2) Std_imp_sseq_last_hd append_butlast_last_id append_self_conv2 list.discI list.sel(1) \.sseq_imp_elementary_reduction2) thus ?thesis using 0 \.Ide_Src \.elementary_reduction_not_ide by auto qed moreover have "\.elementary_reduction y" proof - have "\.elementary_reduction (\.Trg t \<^bold>\ y)" using y by (metis 0 2 \.Ide_Trg Ide_iff_standard_development_empty u Std.elims(2) \.elementary_reduction.simps(4) list.map_sel(1) list.sel(1) \.sseq_imp_elementary_reduction1) thus ?thesis using 0 \.Ide_Trg \.elementary_reduction_not_ide by auto qed moreover have "\.Trg t = \.Trg x" by (metis 0 1 Ide_iff_standard_development_empty Trg_last_standard_development x \.lambda.inject(3) last_map) moreover have "\.Src u = \.Src y" using y by (metis 0 2 \.Arr_not_Nil \.Coinitial_iff_Con Ide_iff_standard_development_empty development.elims(2) development_imp_Arr development_standard_development \.lambda.inject(3) list.map_sel(1) list.sel(1)) ultimately show ?thesis using x y by simp qed thus ?thesis by blast qed qed qed qed next fix t u assume ind: "\.Arr t \ \.Arr u \ Std (standard_development (\.subst u t))" show "\.Arr t \ \.Arr u \ Std ((\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) # standard_development (\.subst u t))" proof assume 1: "\.Arr t \ \.Arr u" show "Std ((\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) # standard_development (\.subst u t))" proof (cases "\.Ide (\.subst u t)") show "\.Ide (\.subst u t) \ Std ((\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) # standard_development (\.subst u t))" using 1 \.Arr_Subst \.Ide_Src Ide_iff_standard_development_empty by simp assume 2: "\ \.Ide (\.subst u t)" show "Std ((\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) # standard_development (\.subst u t))" proof - have "\.sseq (\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) (hd (standard_development (\.subst u t)))" proof - have "\.elementary_reduction (hd (standard_development (\.subst u t)))" using ind by (metis 1 2 \.Arr_Subst Ide_iff_standard_development_empty Std.elims(2) list.sel(1) \.sseq_imp_elementary_reduction1) moreover have "\.seq (\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) (hd (standard_development (\.subst u t)))" using 1 2 Src_hd_standard_development calculation \.Arr.simps(5) \.Arr_Src \.Arr_Subst \.Src_Subst \.Trg.simps(4) \.Trg_Src \.arr_char \.elementary_reduction_is_arr \.seq_char by presburger ultimately show ?thesis using 1 \.Ide_Src \.sseq_Beta by auto qed moreover have "Std (standard_development (\.subst u t))" using 1 ind by blast ultimately show ?thesis by (metis 1 2 \.Arr_Subst Ide_iff_standard_development_empty Std.simps(3) list.collapse) qed qed qed qed subsection "Standardization" text \ In this section, we define and prove correct a function that takes an arbitrary reduction path and produces a standard reduction path congruent to it. The method is roughly analogous to insertion sort: given a path, recursively standardize the tail and then ``insert'' the head into to the result. A complication is that in general the head may be a parallel reduction instead of an elementary reduction, and in any case elementary reductions are not preserved under residuation so we need to be able to handle the parallel reductions that arise from permuting elementary reductions. In general, this means that parallel reduction steps have to be decomposed into factors, and then each factor has to be inserted at its proper position. Another issue is that reductions don't all happen at the top level of a term, so we need to be able to descend recursively into terms during the insertion procedure. The key idea here is: in a standard reduction, once a step has occurred that is not a head reduction, then all subsequent terms will have \App\ as their top-level constructor. So, once we have passed a step that is not a head reduction, we can recursively descend into the subsequent applications and treat the ``rator'' and the ``rand'' parts independently. The following function performs the core insertion part of the standardization algorithm. It assumes that it is given an arbitrary parallel reduction \t\ and an already-standard reduction path \U\, and it inserts \t\ into \U\, producing a standard reduction path that is congruent to \t # U\. A somewhat elaborate case analysis is required to determine whether \t\ needs to be factored and whether part of it might need to be permuted with the head of \U\. The recursion is complicated by the need to make sure that the second argument \U\ is always a standard reduction path. This is so that it is possible to decide when the rest of the steps will be applications and it is therefore possible to recurse into them. This constrains what recursive calls we can make, since we are not able to make a recursive call in which an identity has been prepended to \U\. Also, if \t # U\ consists completely of identities, then its standardization is the empty list \[]\, which is not a path and cannot be congruent to \t # U\. So in order to be able to apply the induction hypotheses in the correctness proof, we need to make sure that we don't make recursive calls when \U\ itself would consist entirely of identities. Finally, when we descend through an application, the step \t\ and the path \U\ are projected to their ``rator'' and ``rand'' components, which are treated separately and the results concatenated. However, the projection operations can introduce identities and therefore do not preserve elementary reductions. To handle this, we need to filter out identities after projection but before the recursive call. Ensuring termination also involves some care: we make recursive calls in which the length of the second argument is increased, but the ``height'' of the first argument is decreased. So we use a lexicographic order that makes the height of the first argument more significant and the length of the second argument secondary. The base cases either discard paths that consist entirely of identities, or else they expand a single parallel reduction \t\ into a standard development. \ function (sequential) stdz_insert where "stdz_insert t [] = standard_development t" | "stdz_insert \<^bold>\_\<^bold>\ U = stdz_insert (hd U) (tl U)" | "stdz_insert \<^bold>\\<^bold>[t\<^bold>] U = (if \.Ide t then stdz_insert (hd U) (tl U) else map \.Lam (stdz_insert t (map \.un_Lam U)))" | "stdz_insert (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) ((\<^bold>\\<^bold>[_\<^bold>] \<^bold>\ _) # U) = stdz_insert (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) U" | "stdz_insert (t \<^bold>\ u) U = (if \.Ide (t \<^bold>\ u) then stdz_insert (hd U) (tl U) else if \.seq (t \<^bold>\ u) (hd U) then if \.contains_head_reduction (t \<^bold>\ u) then if \.Ide ((t \<^bold>\ u) \\ \.head_redex (t \<^bold>\ u)) then \.head_redex (t \<^bold>\ u) # stdz_insert (hd U) (tl U) else \.head_redex (t \<^bold>\ u) # stdz_insert ((t \<^bold>\ u) \\ \.head_redex (t \<^bold>\ u)) U else if \.contains_head_reduction (hd U) then if \.Ide ((t \<^bold>\ u) \\ \.head_strategy (t \<^bold>\ u)) then stdz_insert (\.head_strategy (t \<^bold>\ u)) (tl U) else \.head_strategy (t \<^bold>\ u) # stdz_insert ((t \<^bold>\ u) \\ \.head_strategy (t \<^bold>\ u)) (tl U) else map (\a. a \<^bold>\ \.Src u) (stdz_insert t (filter notIde (map \.un_App1 U))) @ map (\b. \.Trg (\.un_App1 (last U)) \<^bold>\ b) (stdz_insert u (filter notIde (map \.un_App2 U))) else [])" | "stdz_insert (\<^bold>\\<^bold>[t\<^bold>] \<^bold>\ u) U = (if \.Arr t \ \.Arr u then (\<^bold>\\<^bold>[\.Src t\<^bold>] \<^bold>\ \.Src u) # stdz_insert (\.subst u t) U else [])" | "stdz_insert _ _ = []" by pat_completeness auto (* * TODO: * In the case "stdz_insert (M \<^bold>\ N) U": * The "if \.seq (M \<^bold>\ N) (hd U)" is needed for the termination proof. * The first "if \.Ide (\.resid (M \<^bold>\ N) (\.head_redex (M \<^bold>\ N)))" * cannot be removed because the resulting induction rule does not contain * the induction hypotheses necessary to prove the correctness. * The second "if \.Ide (\.resid (M \<^bold>\ N) (\.head_redex (M \<^bold>\ N)))" * results in a similar, but different problem. * In the case "stdz_insert (\.Beta M N) U": * The "if \.Arr M \ \.Arr N" is needed for the termination proof. * It is possible that the function would still be correct if some of the tests * for whether the term being inserted is an identity were omitted, but if these * tests are removed, then the correctness proof fails ways that are not obviously * repairable, probably due to the induction rule not having all the needed * induction hypotheses. *) fun standardize where "standardize [] = []" | "standardize U = stdz_insert (hd U) (standardize (tl U))" abbreviation stdzins_rel where "stdzins_rel \ mlex_prod (length o snd) (inv_image (mlex_prod \.hgt \.subterm_rel) fst)" termination stdz_insert using \.subterm.intros(2-3) \.hgt_Subst less_Suc_eq_le \.elementary_reduction_decreases_hgt \.elementary_reduction_head_redex \.contains_head_reduction_iff \.elementary_reduction_is_arr \.Src_head_redex \.App_Var_contains_no_head_reduction \.hgt_resid_App_head_redex \.seq_char apply (relation stdzins_rel) apply (auto simp add: wf_mlex \.wf_subterm_rel mlex_iff mlex_less \.subterm_lemmas(1)) by (meson dual_order.eq_iff length_filter_le not_less_eq_eq)+ lemma stdz_insert_Ide: shows "\t. Ide (t # U) \ stdz_insert t U = []" proof (induct U) show "\t. Ide [t] \ stdz_insert t [] = []" by (metis Ide_iff_standard_development_empty \.Ide_implies_Arr Ide.simps(2) \.ide_char stdz_insert.simps(1)) show "\U. \\t. Ide (t # U) \ stdz_insert t U = []; Ide (t # u # U)\ \ stdz_insert t (u # U) = []" for t u using \.ide_char apply (cases t; cases u) apply simp_all by fastforce qed lemma stdz_insert_Ide_Std: shows "\u. \\.Ide u; seq [u] U; Std U\ \ stdz_insert u U = stdz_insert (hd U) (tl U)" proof (induct U) show "\u. \\.Ide u; seq [u] []; Std []\ \ stdz_insert u [] = stdz_insert (hd []) (tl [])" by (simp add: seq_char) fix u v U assume u: "\.Ide u" assume seq: "seq [u] (v # U)" assume Std: "Std (v # U)" assume ind: "\u. \\.Ide u; seq [u] U; Std U\ \ stdz_insert u U = stdz_insert (hd U) (tl U)" show "stdz_insert u (v # U) = stdz_insert (hd (v # U)) (tl (v # U))" using u ind stdz_insert_Ide Ide_implies_Arr apply (cases u; cases v) apply simp_all proof - fix x y a b assume xy: "\.Ide x \ \.Ide y" assume u': "u = x \<^bold>\ y" assume v': "v = \<^bold>\\<^bold>[a\<^bold>] \<^bold>\ b" have ab: "\.Ide a \ \.Ide b" using Std \v = \<^bold>\\<^bold>[a\<^bold>] \<^bold>\ b\ Std.elims(2) \.sseq_Beta by (metis Std_consE \.elementary_reduction.simps(5) Std.simps(2)) have "x = \<^bold>\\<^bold>[a\<^bold>] \ y = b" using xy ab u u' v' seq seq_char by (metis \.Ide_iff_Src_self \.Ide_iff_Trg_self \.Ide_implies_Arr \.Src.simps(5) Srcs_simp\<^sub>\\<^sub>P Trgs.simps(2) \.lambda.inject(3) list.sel(1) singleton_insert_inj_eq \.targets_char\<^sub>\) thus "stdz_insert (x \<^bold>\ y) ((\<^bold>\\<^bold>[a\<^bold>] \<^bold>\ b) # U) = stdz_insert (\<^bold>\\<^bold>[a\<^bold>] \<^bold>\ b) U" using u u' stdz_insert.simps(4) by presburger qed qed text \ Insertion of a term with \Beta\ as its top-level constructor always leaves such a term at the head of the result. Stated another way, \Beta\ at the top-level must always come first in a standard reduction path. \ lemma stdz_insert_Beta_ind: shows "\t U. \\.hgt t + length U \ n; \.is_Beta t; seq [t] U\ \ \.is_Beta (hd (stdz_insert t U))" proof (induct n) show "\t U. \\.hgt t + length U \ 0; \.is_Beta t; seq [t] U\ \ \.is_Beta (hd (stdz_insert t U))" using Arr.simps(1) seq_char by blast fix n t U assume ind: "\t U. \\.hgt t + length U \ n; \.is_Beta t; seq [t] U\ \ \.is_Beta (hd (stdz_insert t U))" assume seq: "seq [t] U" assume n: "\.hgt t + length U \ Suc n" assume t: "\.is_Beta t" show "\.is_Beta (hd (stdz_insert t U))" using t seq seq_char by (cases U; cases t; cases "hd U") auto qed lemma stdz_insert_Beta: assumes "\.is_Beta t" and "seq [t] U" shows "\.is_Beta (hd (stdz_insert t U))" using assms stdz_insert_Beta_ind by blast text \ This is the correctness lemma for insertion: Given a term \t\ and standard reduction path \U\ sequential with it, the result of insertion is a standard reduction path which is congruent to \t # U\ unless \t # U\ consists entirely of identities. The proof is very long. Its structure parallels that of the definition of the function \stdz_insert\. For really understanding the details, I strongly suggest viewing the proof in Isabelle/JEdit and using the code folding feature to unfold the proof a little bit at a time. \ lemma stdz_insert_correctness: shows "seq [t] U \ Std U \ Std (stdz_insert t U) \ (\ Ide (t # U) \ cong (stdz_insert t U) (t # U))" (is "?P t U") proof (rule stdz_insert.induct [of ?P]) show "\t. ?P t []" using seq_char by simp show "\u U. ?P \<^bold>\ (u # U)" using seq_char not_arr_null null_char by auto show "\x u U. ?P (hd (u # U)) (tl (u # U)) \ ?P \<^bold>\x\<^bold>\ (u # U)" proof - fix x u U assume ind: "?P (hd (u # U)) (tl (u # U))" show "?P \<^bold>\x\<^bold>\ (u # U)" proof (intro impI, elim conjE, intro conjI) assume seq: "seq [\<^bold>\x\<^bold>\] (u # U)" assume Std: "Std (u # U)" have 1: "stdz_insert \<^bold>\x\<^bold>\ (u # U) = stdz_insert u U" by simp have 2: "U \ [] \ seq [u] U" using Std Std_imp_Arr by (simp add: arrI\<^sub>P arr_append_imp_seq) show "Std (stdz_insert \<^bold>\x\<^bold>\ (u # U))" using ind by (metis 1 2 Std Std_standard_development list.exhaust_sel list.sel(1) list.sel(3) reduction_paths.Std.simps(3) reduction_paths.stdz_insert.simps(1)) show "\ Ide (\<^bold>\x\<^bold>\ # u # U) \ stdz_insert \<^bold>\x\<^bold>\ (u # U) \<^sup>*\\<^sup>* \<^bold>\x\<^bold>\ # u # U" proof (cases "U = []") show "U = [] \ ?thesis" using cong_standard_development cong_cons_ideI(1) apply simp by (metis Arr.simps(1-2) Arr_iff_Con_self Con_rec(3) \.in_sourcesI con_char cong_transitive ideE \.Ide.simps(2) \.arr_char \.ide_char seq) assume U: "U \ []" show ?thesis using 1 2 ind seq seq_char cong_cons_ideI(1) apply simp by (metis Std Std_consE U \.Arr.simps(2) \.Ide.simps(2) \.targets_simps(2) prfx_transitive) qed qed qed show "\M u U. \\.Ide M \ ?P (hd (u # U)) (tl (u # U)); \ \.Ide M \ ?P M (map \.un_Lam (u # U))\ \ ?P \<^bold>\\<^bold>[M\<^bold>] (u # U)" proof - fix M u U assume ind1: "\.Ide M \ ?P (hd (u # U)) (tl (u # U))" assume ind2: "\ \.Ide M \ ?P M (map \.un_Lam (u # U))" show "?P \<^bold>\\<^bold>[M\<^bold>] (u # U)" proof (intro impI, elim conjE) assume seq: "seq [\<^bold>\\<^bold>[M\<^bold>]] (u # U)" assume Std: "Std (u # U)" have u: "\.is_Lam u" using seq by (metis insert_subset \.lambda.disc(8) list.simps(15) mem_Collect_eq seq_Lam_Arr_implies) have U: "set U \ Collect \.is_Lam" using u seq by (metis insert_subset \.lambda.disc(8) list.simps(15) seq_Lam_Arr_implies) show "Std (stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U)) \ (\ Ide (\<^bold>\\<^bold>[M\<^bold>] # u # U) \ stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U) \<^sup>*\\<^sup>* \<^bold>\\<^bold>[M\<^bold>] # u # U)" proof (cases "\.Ide M") assume M: "\.Ide M" have 1: "stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U) = stdz_insert u U" using M by simp show ?thesis proof (cases "Ide (u # U)") show "Ide (u # U) \ ?thesis" using 1 Std_standard_development Ide_iff_standard_development_empty by (metis Ide_imp_Ide_hd Std Std_implies_set_subset_elementary_reduction \.elementary_reduction_not_ide list.sel(1) list.set_intros(1) mem_Collect_eq subset_code(1)) assume 2: "\ Ide (u # U)" show ?thesis proof (cases "U = []") assume 3: "U = []" have 4: "standard_development u \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[M\<^bold>]] @ [u]" using M 2 3 seq ide_char cong_standard_development [of u] cong_append_ideI(1) [of "[\<^bold>\\<^bold>[M\<^bold>]]" "[u]"] by (metis Arr_imp_arr_hd Ide.simps(2) Std Std_imp_Arr cong_transitive \.Ide.simps(3) \.arr_char \.ide_char list.sel(1) not_Cons_self2) show ?thesis using 1 3 4 Std_standard_development by force next assume 3: "U \ []" have "stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U) = stdz_insert u U" using M 3 by simp have 5: "\.Arr u \ \ \.Ide u" by (meson "3" Std Std_consE \.elementary_reduction_not_ide \.ide_char \.sseq_imp_elementary_reduction1) have 4: "standard_development u @ U \<^sup>*\\<^sup>* ([\<^bold>\\<^bold>[M\<^bold>]] @ [u]) @ U" proof (intro cong_append seqI\<^sub>\\<^sub>P) show "Arr (standard_development u)" using 5 Std_standard_development Std_imp_Arr Ide_iff_standard_development_empty by force show "Arr U" using Std 3 by auto show "\.Trg (last (standard_development u)) = \.Src (hd U)" by (metis "3" "5" Std Std_consE Trg_last_standard_development \.seq_char \.sseq_imp_seq) show "standard_development u \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[M\<^bold>]] @ [u]" using M 5 Std Std_imp_Arr cong_standard_development [of u] cong_append_ideI(3) [of "[\<^bold>\\<^bold>[M\<^bold>]]" "[u]"] by (metis (no_types, lifting) Arr.simps(2) Ide.simps(2) arr_char ide_char \.Ide.simps(3) \.arr_char \.ide_char prfx_transitive seq seq_def sources_cons) show "U \<^sup>*\\<^sup>* U" by (simp add: \Arr U\ arr_char prfx_reflexive) qed show ?thesis proof (intro conjI) show "Std (stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U))" by (metis (no_types, lifting) 1 3 M Std Std_consE append_Cons append_eq_append_conv2 append_self_conv arr_append_imp_seq ind1 list.sel(1) list.sel(3) not_Cons_self2 seq seq_def) show "\ Ide (\<^bold>\\<^bold>[M\<^bold>] # u # U) \ stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U) \<^sup>*\\<^sup>* \<^bold>\\<^bold>[M\<^bold>] # u # U" proof have "seq [u] U \ Std U" using 2 3 Std by (metis Cons_eq_appendI Std_consE arr_append_imp_seq neq_Nil_conv self_append_conv2 seq seqE) thus "stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U) \<^sup>*\\<^sup>* \<^bold>\\<^bold>[M\<^bold>] # u # U" using M 1 2 3 4 ind1 cong_cons_ideI(1) [of "\<^bold>\\<^bold>[M\<^bold>]" "u # U"] apply simp by (meson cong_transitive seq) qed qed qed qed next assume M: "\ \.Ide M" have 1: "stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U) = map \.Lam (stdz_insert M (\.un_Lam u # map \.un_Lam U))" using M by simp show ?thesis proof (intro conjI) show "Std (stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U))" by (metis "1" M Std Std_map_Lam Std_map_un_Lam ind2 \.lambda.disc(8) list.simps(9) seq seq_Lam_Arr_implies seq_map_un_Lam) show "\ Ide (\<^bold>\\<^bold>[M\<^bold>] # u # U) \ stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U) \<^sup>*\\<^sup>* \<^bold>\\<^bold>[M\<^bold>] # u # U" proof have "map \.Lam (stdz_insert M (\.un_Lam u # map \.un_Lam U)) \<^sup>*\\<^sup>* \<^bold>\\<^bold>[M\<^bold>] # u # U" proof - have "map \.Lam (stdz_insert M (\.un_Lam u # map \.un_Lam U)) \<^sup>*\\<^sup>* map \.Lam (M # \.un_Lam u # map \.un_Lam U)" by (metis (mono_tags, opaque_lifting) Ide_imp_Ide_hd M Std Std_map_un_Lam cong_map_Lam ind2 \.ide_char \.lambda.discI(2) list.sel(1) list.simps(9) seq seq_Lam_Arr_implies seq_map_un_Lam) thus ?thesis using u U by (simp add: map_idI subset_code(1)) qed thus "stdz_insert \<^bold>\\<^bold>[M\<^bold>] (u # U) \<^sup>*\\<^sup>* \<^bold>\\<^bold>[M\<^bold>] # u # U" using "1" by presburger qed qed qed qed qed show "\M N A B U. ?P (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) U \ ?P (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U)" proof - fix M N A B U assume ind: "?P (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) U" show "?P (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U)" proof (intro impI, elim conjE) assume seq: "seq [\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U)" assume Std: "Std ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U)" have MN: "\.Arr M \ \.Arr N" using seq by (simp add: seq_char) have AB: "\.Trg M = A \ \.Trg N = B" proof - have 1: "\.Ide A \ \.Ide B" using Std by (metis Std.simps(2) Std.simps(3) \.elementary_reduction.simps(5) list.exhaust_sel \.sseq_Beta) moreover have "Trgs [\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] = Srcs [\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B]" using 1 seq seq_char by (simp add: \.Ide_implies_Arr Srcs_simp\<^sub>\\<^sub>P) ultimately show ?thesis by (metis \.Ide_iff_Src_self \.Ide_implies_Arr \.Src.simps(5) Srcs_simp\<^sub>\\<^sub>P \.Trg.simps(2-3) Trgs_simp\<^sub>\\<^sub>P \.lambda.inject(2) \.lambda.sel(3-4) last.simps list.sel(1) seq_char seq the_elem_eq) qed have 1: "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) = stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) U" by auto show "Std (stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U)) \ (\ Ide ((\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) \ stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U)" proof (cases "U = []") assume U: "U = []" have 1: "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) = standard_development (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N)" using U by simp show ?thesis proof (intro conjI) show "Std (stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U))" using 1 Std_standard_development by presburger show "\ Ide ((\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) \ stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U" proof (intro impI) assume 2: "\ Ide ((\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U)" have "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) = (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) # standard_development (\.subst N M)" using 1 MN by simp also have "... \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N]" using MN AB cong_standard_development by (metis 1 calculation \.Arr.simps(5) \.Ide.simps(5)) also have "[\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U" using AB MN U Beta_decomp(2) [of M N] by simp finally show "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U" by blast qed qed next assume U: "U \ []" have 1: "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) = stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) U" using U by simp have 2: "seq [\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] U" using MN AB U Std \.sseq_imp_seq apply (intro seqI\<^sub>\\<^sub>P) apply auto by fastforce have 3: "Std U" using Std by fastforce show ?thesis proof (intro conjI) show "Std (stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U))" using 2 3 ind by simp show "\ Ide ((\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) \ stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U" proof have "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] @ U" by (metis "1" "2" "3" \.Ide.simps(5) U Ide.simps(3) append.left_neutral append_Cons \.ide_char ind list.exhaust) also have "[\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] @ U \<^sup>*\\<^sup>* ([\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] @ [\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B]) @ U" using MN AB Beta_decomp by (meson "2" cong_append cong_reflexive seqE) also have "([\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] @ [\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B]) @ U = (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U" by simp finally show "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) ((\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # (\<^bold>\\<^bold>[A\<^bold>] \<^bold>\ B) # U" by argo qed qed qed qed qed show "\M N u U. (\.Arr M \ \.Arr N \ ?P (\.subst N M) (u # U)) \ ?P (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U)" proof - fix M N u U assume ind: "\.Arr M \ \.Arr N \ ?P (\.subst N M) (u # U)" show "?P (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U)" proof (intro impI, elim conjE) assume seq: "seq [\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] (u # U)" assume Std: "Std (u # U)" have MN: "\.Arr M \ \.Arr N" using seq seq_char by simp show "Std (stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U)) \ (\ Ide (\.Beta M N # u # U) \ cong (stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U)) ((\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U))" proof (cases "\.Ide (\.subst N M)") assume 1: "\.Ide (\.subst N M)" have 2: "\ Ide (u # U)" using Std Std_implies_set_subset_elementary_reduction \.elementary_reduction_not_ide by force have 3: "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) = (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) # stdz_insert u U" using MN 1 seq seq_char Std stdz_insert_Ide_Std [of "\.subst N M" "u # U"] \.Ide_implies_Arr by (cases "U = []") auto show ?thesis proof (cases "U = []") assume U: "U = []" have 3: "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) = (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) # standard_development u" using 2 3 U by force have 4: "\.seq (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) (hd (standard_development u))" proof show "\.Arr (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N)" using MN by simp show "\.Arr (hd (standard_development u))" by (metis 2 Arr_imp_arr_hd Ide.simps(2) Ide_iff_standard_development_empty Std Std_consE Std_imp_Arr Std_standard_development U \.arr_char \.ide_char) show "\.Trg (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) = \.Src (hd (standard_development u))" by (metis 1 2 Ide.simps(2) MN Src_hd_standard_development Std Std_consE Trg_last_Src_hd_eqI U \.Ide_iff_Src_self \.Ide_implies_Arr \.Src_Subst \.Trg.simps(4) \.Trg_Src \.Trg_Subst \.ide_char last_ConsL list.sel(1) seq) qed show ?thesis proof (intro conjI) show "Std (stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U))" proof - have "\.sseq (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) (hd (standard_development u))" using MN 2 4 U \.Ide_Src apply (intro \.sseq_BetaI) apply auto by (metis Ide.simps(1) Resid.simps(2) Std Std_consE Std_standard_development cong_standard_development hd_Cons_tl ide_char \.sseq_imp_elementary_reduction1 Std.simps(2)) thus ?thesis by (metis 3 Std.simps(2-3) Std_standard_development hd_Cons_tl \.sseq_imp_elementary_reduction1) qed show "\ Ide ((\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U) \ stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U" proof have "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) = [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ standard_development u" using 3 by simp also have 5: "[\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ standard_development u \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ [u]" proof (intro cong_append) show "seq [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] (standard_development u)" by (metis 2 3 Ide.simps(2) Ide_iff_standard_development_empty Std Std_consE Std_imp_Arr U \Std (stdz_insert (\.Beta M N) (u # U))\ arr_append_imp_seq arr_char calculation \.ide_char neq_Nil_conv) thus "[\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N]" using cong_reflexive by blast show "standard_development u \<^sup>*\\<^sup>* [u]" by (metis 2 Arr.simps(2) Ide.simps(2) Std Std_imp_Arr U cong_standard_development \.arr_char \.ide_char not_Cons_self2) qed also have "[\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ [u] \<^sup>*\\<^sup>* ([\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ [\.subst N M]) @ [u]" proof (intro cong_append) show "seq [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] [u]" by (metis 5 Con_implies_Arr(1) Ide.simps(1) arr_append_imp_seq arr_char ide_char not_Cons_self2) show "[\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ [\.subst N M]" by (metis (full_types) 1 MN Ide_iff_standard_development_empty cong_standard_development cong_transitive \.Arr.simps(5) \.Arr_Subst \.Ide.simps(5) Beta_decomp(1) standard_development.simps(5)) show "[u] \<^sup>*\\<^sup>* [u]" using Resid_Arr_self Std Std_imp_Arr U ide_char by blast qed also have "([\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ [\.subst N M]) @ [u] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] @ [u]" by (metis Beta_decomp(1) MN U Resid_Arr_self cong_append ide_char seq_char seq) also have "[\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] @ [u] = (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U" using U by simp finally show "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U" by blast qed qed next assume U: "U \ []" have 4: "seq [u] U" by (simp add: Std U arrI\<^sub>P arr_append_imp_seq) have 5: "Std U" using Std by auto have 6: "Std (stdz_insert u U) \ set (stdz_insert u U) \ {a. \.elementary_reduction a} \ (\ Ide (u # U) \ cong (stdz_insert u U) (u # U))" proof - have "seq [\.subst N M] (u # U) \ Std (u # U)" using MN Std Std_imp_Arr \.Arr_Subst apply (intro conjI seqI\<^sub>\\<^sub>P) apply simp_all by (metis Trg_last_Src_hd_eqI \.Trg.simps(4) last_ConsL list.sel(1) seq) thus ?thesis using MN 1 2 3 4 5 ind Std_implies_set_subset_elementary_reduction stdz_insert_Ide_Std apply simp by (meson cong_cons_ideI(1) cong_transitive lambda_calculus.ide_char) qed have 7: "\.seq (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) (hd (stdz_insert u U))" using MN 1 2 6 Arr_imp_arr_hd Con_implies_Arr(2) ide_char \.arr_char Ide_iff_standard_development_empty Src_hd_eqI Trg_last_Src_hd_eqI Trg_last_standard_development \.Ide_implies_Arr seq apply (intro \.seqI\<^sub>\) apply simp apply (metis Ide.simps(1)) by (metis \.Arr.simps(5) \.Ide.simps(5) last.simps standard_development.simps(5)) have 8: "seq [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] (stdz_insert u U)" by (metis 2 6 7 seqI\<^sub>\\<^sub>P Arr.simps(2) Con_implies_Arr(2) Ide.simps(1) ide_char last.simps \.seqE \.seq_char) show ?thesis proof (intro conjI) show "Std (stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U))" proof - have "\.sseq (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) (hd (stdz_insert u U))" by (metis MN 2 6 7 \.Ide_Src Std.elims(2) Ide.simps(1) Resid.simps(2) ide_char list.sel(1) \.sseq_BetaI \.sseq_imp_elementary_reduction1) thus ?thesis by (metis 2 3 6 Std.simps(3) Resid.simps(1) con_char prfx_implies_con list.exhaust_sel) qed show "\ Ide ((\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U) \ stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U" proof have "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) = [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ stdz_insert u U" using 3 by simp also have "... \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ u # U" using MN 2 3 6 8 cong_append by (meson cong_reflexive seqE) also have "[\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ u # U \<^sup>*\\<^sup>* ([\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ [\.subst N M]) @ u # U" using MN 1 2 6 8 Beta_decomp(1) Std Src_hd_eqI Trg_last_Src_hd_eqI \.Arr_Subst \.ide_char ide_char apply (intro cong_append cong_append_ideI seqI\<^sub>\\<^sub>P) apply auto[2] apply metis apply auto[4] by (metis cong_transitive) also have "([\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ [\.subst N M]) @ u # U \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] @ u # U" by (meson MN 2 6 Beta_decomp(1) cong_append prfx_transitive seq) also have "[\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] @ u # U = (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U" by simp finally show "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U" by simp qed qed qed next assume 1: "\ \.Ide (\.subst N M)" have 2: "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) = (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) # stdz_insert (\.subst N M) (u # U)" using 1 MN by simp have 3: "seq [\.subst N M] (u # U)" using \.Arr_Subst MN seq_char seq by force have 4: "Std (stdz_insert (\.subst N M) (u # U)) \ set (stdz_insert (\.subst N M) (u # U)) \ {a. \.elementary_reduction a} \ stdz_insert (\.Subst 0 N M) (u # U) \<^sup>*\\<^sup>* \.subst N M # u # U" using 1 3 Std ind MN Ide.simps(3) \.ide_char Std_implies_set_subset_elementary_reduction by presburger have 5: "\.seq (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) (hd (stdz_insert (\.subst N M) (u # U)))" using MN 4 apply (intro \.seqI\<^sub>\) apply simp apply (metis Arr_imp_arr_hd Con_implies_Arr(1) Ide.simps(1) ide_char \.arr_char) using Src_hd_eqI by force show ?thesis proof (intro conjI) show "Std (stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U))" proof - have "\.sseq (\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N) (hd (stdz_insert (\.subst N M) (u # U)))" using 5 by (metis 4 MN \.Ide_Src Std.elims(2) Ide.simps(1) Resid.simps(2) ide_char list.sel(1) \.sseq_BetaI \.sseq_imp_elementary_reduction1) thus ?thesis by (metis 2 4 Std.simps(3) Arr.simps(1) Con_implies_Arr(2) Ide.simps(1) ide_char list.exhaust_sel) qed show "\ Ide ((\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U) \ stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U" proof have "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) = [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ stdz_insert (\.subst N M) (u # U)" using 2 by simp also have "... \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ \.subst N M # u # U" proof (intro cong_append) show "seq [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] (stdz_insert (\.subst N M) (u # U))" by (metis 4 5 Arr.simps(2) Con_implies_Arr(1) Ide.simps(1) ide_char \.arr_char \.seq_char last_ConsL seqI\<^sub>\\<^sub>P) show "[\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N]" by (meson MN cong_transitive \.Arr_Src Beta_decomp(1)) show "stdz_insert (\.subst N M) (u # U) \<^sup>*\\<^sup>* \.subst N M # u # U" using 4 by fastforce qed also have "[\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ \.subst N M # u # U = ([\<^bold>\\<^bold>[\.Src M\<^bold>] \<^bold>\ \.Src N] @ [\.subst N M]) @ u # U" by simp also have "... \<^sup>*\\<^sup>* [\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] @ u # U" by (meson Beta_decomp(1) MN cong_append cong_reflexive seqE seq) also have "[\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N] @ u # U = (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U" by simp finally show "stdz_insert (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (\<^bold>\\<^bold>[M\<^bold>] \<^bold>\ N) # u # U" by blast qed qed qed qed qed text \ Because of the way the function package processes the pattern matching in the definition of \stdz_insert\, it produces eight separate subgoals for the remainder of the proof, even though these subgoals are all simple consequences of a single, more general fact. We first prove this fact, then use it to discharge the eight subgoals. \ have *: "\M N u U. \\ (\.is_Lam M \ \.is_Beta u); \.Ide (M \<^bold>\ N) \ ?P (hd (u # U)) (tl (u # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (M \<^bold>\ N); \.Ide (\.resid (M \<^bold>\ N) (\.head_redex (M \<^bold>\ N)))\ \ ?P (hd (u # U)) (tl (u # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (M \<^bold>\ N); \ \.Ide (\.resid (M \<^bold>\ N) (\.head_redex (M \<^bold>\ N)))\ \ ?P (\.resid (M \<^bold>\ N) (\.head_redex (M \<^bold>\ N))) (u # U); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \.Ide (\.resid (M \<^bold>\ N) (\.head_strategy (M \<^bold>\ N)))\ \ ?P (\.head_strategy (M \<^bold>\ N)) (tl (u # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \ \.Ide (\.resid (M \<^bold>\ N) (\.head_strategy (M \<^bold>\ N)))\ \ ?P (\.resid (M \<^bold>\ N) (\.head_strategy (M \<^bold>\ N))) (tl (u # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P M (filter notIde (map \.un_App1 (u # U))); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P N (filter notIde (map \.un_App2 (u # U)))\ \ ?P (M \<^bold>\ N) (u # U)" proof - fix M N u U assume ind1: "\.Ide (M \<^bold>\ N) \ ?P (hd (u # U)) (tl (u # U))" assume ind2: "\\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (M \<^bold>\ N); \.Ide (\.resid (M \<^bold>\ N) (\.head_redex (M \<^bold>\ N)))\ \ ?P (hd (u # U)) (tl (u # U))" assume ind3: "\\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (M \<^bold>\ N); \ \.Ide (\.resid (M \<^bold>\ N) (\.head_redex (M \<^bold>\ N)))\ \ ?P (\.resid (M \<^bold>\ N) (\.head_redex (M \<^bold>\ N))) (u # U)" assume ind4: "\\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \.Ide (\.resid (M \<^bold>\ N) (\.head_strategy (M \<^bold>\ N)))\ \ ?P (\.head_strategy (M \<^bold>\ N)) (tl (u # U))" assume ind5: "\\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \ \.Ide (\.resid (M \<^bold>\ N) (\.head_strategy (M \<^bold>\ N)))\ \ ?P (\.resid (M \<^bold>\ N) (\.head_strategy (M \<^bold>\ N))) (tl (u # U))" assume ind7: "\\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P M (filter notIde (map \.un_App1 (u # U)))" assume ind8: "\\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P N (filter notIde (map \.un_App2 (u # U)))" assume *: "\ (\.is_Lam M \ \.is_Beta u)" show "?P (M \<^bold>\ N) (u # U)" proof (intro impI, elim conjE) assume seq: "seq [M \<^bold>\ N] (u # U)" assume Std: "Std (u # U)" have MN: "\.Arr M \ \.Arr N" using seq_char seq by force have u: "\.Arr u" using Std by (meson Std_imp_Arr Arr.simps(2) Con_Arr_self Con_implies_Arr(1) Con_initial_left \.arr_char list.simps(3)) have "U \ [] \ Arr U" using Std Std_imp_Arr Arr.simps(3) by (metis Arr.elims(3) list.discI) have "\.is_App u \ \.is_Beta u" using * seq MN u seq_char \.arr_char Srcs_simp\<^sub>\\<^sub>P \.targets_char\<^sub>\ by (cases M; cases u) auto have **: "\.seq (M \<^bold>\ N) u" using Srcs_simp\<^sub>\\<^sub>P seq_char seq \.seq_def u by force show "Std (stdz_insert (M \<^bold>\ N) (u # U)) \ (\ Ide ((M \<^bold>\ N) # u # U) \ cong (stdz_insert (M \<^bold>\ N) (u # U)) ((M \<^bold>\ N) # u # U))" proof (cases "\.Ide (M \<^bold>\ N)") assume 1: "\.Ide (M \<^bold>\ N)" have MN: "\.Arr M \ \.Arr N \ \.Ide M \ \.Ide N" using MN 1 by simp have 2: "stdz_insert (M \<^bold>\ N) (u # U) = stdz_insert u U" using MN 1 by (simp add: Std seq stdz_insert_Ide_Std) show ?thesis proof (cases "U = []") assume U: "U = []" have 2: "stdz_insert (M \<^bold>\ N) (u # U) = standard_development u" using 1 2 U by simp show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" using "2" Std_standard_development by presburger show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" by (metis "1" "2" Ide.simps(2) U cong_cons_ideI(1) cong_standard_development ide_backward_stable ide_char \.ide_char prfx_transitive seq u) qed next assume U: "U \ []" have 2: "stdz_insert (M \<^bold>\ N) (u # U) = stdz_insert u U" using 1 2 U by simp have 3: "seq [u] U" by (simp add: Std U arrI\<^sub>P arr_append_imp_seq) have 4: "Std (stdz_insert u U) \ set (stdz_insert u U) \ {a. \.elementary_reduction a} \ (\ Ide (u # U) \ cong (stdz_insert u U) (u # U))" using MN 3 Std ind1 Std_implies_set_subset_elementary_reduction by (metis "1" Std.simps(3) U list.sel(1) list.sel(3) standardize.cases) show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" by (metis "1" "2" "3" Std Std.simps(3) U ind1 list.exhaust_sel list.sel(1,3)) show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof assume 5: "\ Ide ((M \<^bold>\ N) # u # U)" have "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* u # U" using "1" "2" "4" "5" seq_char seq by force also have "u # U \<^sup>*\\<^sup>* [M \<^bold>\ N] @ u # U" using "1" Ide.simps(2) cong_append_ideI(1) ide_char seq by blast also have "[M \<^bold>\ N] @ (u # U) = (M \<^bold>\ N) # u # U" by simp finally show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" by blast qed qed qed next assume 1: "\ \.Ide (M \<^bold>\ N)" show ?thesis proof (cases "\.contains_head_reduction (M \<^bold>\ N)") assume 2: "\.contains_head_reduction (M \<^bold>\ N)" show ?thesis proof (cases "\.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))") assume 3: "\.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))" have 4: "\ Ide (u # U)" by (metis Std Std_implies_set_subset_elementary_reduction in_mono \.elementary_reduction_not_ide list.set_intros(1) mem_Collect_eq set_Ide_subset_ide) have 5: "stdz_insert (M \<^bold>\ N) (u # U) = \.head_redex (M \<^bold>\ N) # stdz_insert u U" using MN 1 2 3 4 ** by auto show ?thesis proof (cases "U = []") assume U: "U = []" have u: "\.Arr u \ \ \.Ide u" using 4 U u by force have 5: "stdz_insert (M \<^bold>\ N) (u # U) = \.head_redex (M \<^bold>\ N) # standard_development u" using 5 U by simp show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" proof - have "\.sseq (\.head_redex (M \<^bold>\ N)) (hd (standard_development u))" proof - have "\.seq (\.head_redex (M \<^bold>\ N)) (hd (standard_development u))" proof show "\.Arr (\.head_redex (M \<^bold>\ N))" using MN \.Arr.simps(4) \.Arr_head_redex by presburger show "\.Arr (hd (standard_development u))" using Arr_imp_arr_hd Ide_iff_standard_development_empty Std_standard_development u by force show "\.Trg (\.head_redex (M \<^bold>\ N)) = \.Src (hd (standard_development u))" proof - have "\.Trg (\.head_redex (M \<^bold>\ N)) = \.Trg ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))" by (metis 3 MN \.Con_Arr_head_redex \.Src_resid \.Arr.simps(4) \.Ide_iff_Src_self \.Ide_iff_Trg_self \.Ide_implies_Arr) also have "... = \.Src u" using MN by (metis Trg_last_Src_hd_eqI Trg_last_eqI head_redex_decomp \.Arr.simps(4) last_ConsL last_appendR list.sel(1) not_Cons_self2 seq) also have "... = \.Src (hd (standard_development u))" using ** 2 3 u MN Src_hd_standard_development [of u] by metis finally show ?thesis by blast qed qed thus ?thesis by (metis 2 u MN \.Arr.simps(4) Ide_iff_standard_development_empty development.simps(2) development_standard_development \.head_redex_is_head_reduction list.exhaust_sel \.sseq_head_reductionI) qed thus ?thesis by (metis 5 Ide_iff_standard_development_empty Std.simps(3) Std_standard_development list.exhaust u) qed show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof have "stdz_insert (M \<^bold>\ N) (u # U) = [\.head_redex (M \<^bold>\ N)] @ standard_development u" using 5 by simp also have "... \<^sup>*\\<^sup>* [\.head_redex (M \<^bold>\ N)] @ [u]" using u cong_standard_development [of u] cong_append by (metis 2 5 Ide_iff_standard_development_empty Std_imp_Arr \Std (stdz_insert (M \<^bold>\ N) (u # U))\ arr_append_imp_seq arr_char calculation cong_standard_development cong_transitive \.Arr_head_redex \.contains_head_reduction_iff list.distinct(1)) also have "[\.head_redex (M \<^bold>\ N)] @ [u] \<^sup>*\\<^sup>* ([\.head_redex (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)]) @ [u]" proof - have "[\.head_redex (M \<^bold>\ N)] \<^sup>*\\<^sup>* [\.head_redex (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)]" by (metis (no_types, lifting) 1 3 MN Arr_iff_Con_self Ide.simps(2) Resid.simps(2) arr_append_imp_seq arr_char cong_append_ideI(4) cong_transitive head_redex_decomp ide_backward_stable ide_char \.Arr.simps(4) \.ide_char not_Cons_self2) thus ?thesis using MN U u seq by (meson cong_append head_redex_decomp \.Arr.simps(4) prfx_transitive) qed also have "([\.head_redex (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)]) @ [u] \<^sup>*\\<^sup>* [M \<^bold>\ N] @ [u]" by (metis \.Arr.simps(4) MN U Resid_Arr_self cong_append ide_char seq_char head_redex_decomp seq) also have "[M \<^bold>\ N] @ [u] = (M \<^bold>\ N) # u # U" using U by simp finally show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" by blast qed qed next assume U: "U \ []" have 6: "Std (stdz_insert u U) \ set (stdz_insert u U) \ {a. \.elementary_reduction a} \ cong (stdz_insert u U) (u # U)" proof - have "seq [u] U" by (simp add: Std U arrI\<^sub>P arr_append_imp_seq) moreover have "Std U" using Std Std.elims(2) U by blast ultimately show ?thesis using ind2 ** 1 2 3 4 Std_implies_set_subset_elementary_reduction by force qed show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" proof - have "\.sseq (\.head_redex (M \<^bold>\ N)) (hd (stdz_insert u U))" proof - have "\.seq (\.head_redex (M \<^bold>\ N)) (hd (stdz_insert u U))" proof show "\.Arr (\.head_redex (M \<^bold>\ N))" using MN \.Arr_head_redex by force show "\.Arr (hd (stdz_insert u U))" using 6 by (metis Arr_imp_arr_hd Con_implies_Arr(2) Ide.simps(1) ide_char \.arr_char) show "\.Trg (\.head_redex (M \<^bold>\ N)) = \.Src (hd (stdz_insert u U))" proof - have "\.Trg (\.head_redex (M \<^bold>\ N)) = \.Trg ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))" by (metis 3 \.Arr_not_Nil \.Ide_iff_Src_self \.Ide_iff_Trg_self \.Ide_implies_Arr \.Src_resid) also have "... = \.Trg (M \<^bold>\ N)" by (metis 1 MN Trg_last_eqI Trg_last_standard_development cong_standard_development head_redex_decomp \.Arr.simps(4) last_snoc) also have "... = \.Src (hd (stdz_insert u U))" by (metis ** 6 Src_hd_eqI \.seqE\<^sub>\ list.sel(1)) finally show ?thesis by blast qed qed thus ?thesis by (metis 2 6 MN \.Arr.simps(4) Std.elims(1) Ide.simps(1) Resid.simps(2) ide_char \.head_redex_is_head_reduction list.sel(1) \.sseq_head_reductionI \.sseq_imp_elementary_reduction1) qed thus ?thesis by (metis 5 6 Std.simps(3) Arr.simps(1) Con_implies_Arr(1) con_char prfx_implies_con list.exhaust_sel) qed show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof have "stdz_insert (M \<^bold>\ N) (u # U) = [\.head_redex (M \<^bold>\ N)] @ stdz_insert u U" using 5 by simp also have 7: "[\.head_redex (M \<^bold>\ N)] @ stdz_insert u U \<^sup>*\\<^sup>* [\.head_redex (M \<^bold>\ N)] @ u # U" using 6 cong_append [of "[\.head_redex (M \<^bold>\ N)]" "stdz_insert u U" "[\.head_redex (M \<^bold>\ N)]" "u # U"] by (metis 2 5 Arr.simps(1) Resid.simps(2) Std_imp_Arr \Std (stdz_insert (M \<^bold>\ N) (u # U))\ arr_append_imp_seq arr_char calculation cong_standard_development cong_transitive ide_implies_arr \.Arr_head_redex \.contains_head_reduction_iff list.distinct(1)) also have "[\.head_redex (M \<^bold>\ N)] @ u # U \<^sup>*\\<^sup>* ([\.head_redex (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)]) @ u # U" proof - have "[\.head_redex (M \<^bold>\ N)] \<^sup>*\\<^sup>* [\.head_redex (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)]" by (metis 2 3 head_redex_decomp \.Arr_head_redex \.Con_Arr_head_redex \.Ide_iff_Src_self \.Ide_implies_Arr \.Src_resid \.contains_head_reduction_iff \.resid_Arr_self prfx_decomp prfx_transitive) moreover have "seq [\.head_redex (M \<^bold>\ N)] (u # U)" by (metis 7 arr_append_imp_seq cong_implies_coterminal coterminalE list.distinct(1)) ultimately show ?thesis using 3 ide_char cong_symmetric cong_append by (meson 6 prfx_transitive) qed also have "([\.head_redex (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)]) @ u # U \<^sup>*\\<^sup>* [M \<^bold>\ N] @ u # U" by (meson 6 MN \.Arr.simps(4) cong_append prfx_transitive head_redex_decomp seq) also have "[M \<^bold>\ N] @ (u # U) = (M \<^bold>\ N) # u # U" by simp finally show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" by blast qed qed qed next assume 3: "\ \.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))" have 4: "stdz_insert (M \<^bold>\ N) (u # U) = \.head_redex (M \<^bold>\ N) # stdz_insert ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (u # U)" using MN 1 2 3 ** by auto have 5: "Std (stdz_insert ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (u # U)) \ set (stdz_insert ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (u # U)) \ {a. \.elementary_reduction a} \ stdz_insert ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N) # u # U" proof - have "seq [(M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)] (u # U)" by (metis (full_types) MN arr_append_imp_seq cong_implies_coterminal coterminalE head_redex_decomp \.Arr.simps(4) not_Cons_self2 seq seq_def targets_append) thus ?thesis using ind3 1 2 3 ** Std Std_implies_set_subset_elementary_reduction by auto qed show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" proof - have "\.sseq (\.head_redex (M \<^bold>\ N)) (hd (stdz_insert ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (u # U)))" proof - have "\.seq (\.head_redex (M \<^bold>\ N)) (hd (stdz_insert ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (u # U)))" using MN 5 \.Arr_head_redex by (metis (no_types, lifting) Arr_imp_arr_hd Con_implies_Arr(2) Ide.simps(1) Src_hd_eqI ide_char \.Arr.simps(4) \.Arr_head_redex \.Con_Arr_head_redex \.Src_resid \.arr_char \.seq_char list.sel(1)) moreover have "\.elementary_reduction (hd (stdz_insert ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (u # U)))" using 5 by (metis Arr.simps(1) Con_implies_Arr(2) Ide.simps(1) hd_in_set ide_char mem_Collect_eq subset_code(1)) ultimately show ?thesis using MN 2 \.head_redex_is_head_reduction \.sseq_head_reductionI by simp qed thus ?thesis by (metis 4 5 Std.simps(3) Arr.simps(1) Con_implies_Arr(2) Ide.simps(1) ide_char list.exhaust_sel) qed show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof have "stdz_insert (M \<^bold>\ N) (u # U) = [\.head_redex (M \<^bold>\ N)] @ stdz_insert ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (u # U)" using 4 by simp also have "... \<^sup>*\\<^sup>* [\.head_redex (M \<^bold>\ N)] @ ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N) # u # U)" proof (intro cong_append) show "seq [\.head_redex (M \<^bold>\ N)] (stdz_insert ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (u # U))" by (metis 4 5 Ide.simps(1) Resid.simps(1) Std_imp_Arr \Std (stdz_insert (M \<^bold>\ N) (u # U))\ arrI\<^sub>P arr_append_imp_seq calculation ide_char list.discI) show "[\.head_redex (M \<^bold>\ N)] \<^sup>*\\<^sup>* [\.head_redex (M \<^bold>\ N)]" using MN \.cong_reflexive ide_char \.Arr_head_redex by force show "stdz_insert ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N) # u # U" using 5 by fastforce qed also have "([\.head_redex (M \<^bold>\ N)] @ ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N) # u # U)) = ([\.head_redex (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)]) @ (u # U)" by simp also have "([\.head_redex (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)]) @ u # U \<^sup>*\\<^sup>* [M \<^bold>\ N] @ u # U" by (meson ** cong_append cong_reflexive seqE head_redex_decomp seq \.seq_char) also have "[M \<^bold>\ N] @ (u # U) = (M \<^bold>\ N) # u # U" by simp finally show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" by blast qed qed qed next assume 2: "\ \.contains_head_reduction (M \<^bold>\ N)" show ?thesis proof (cases "\.contains_head_reduction u") assume 3: "\.contains_head_reduction u" have B: "[\.head_strategy (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)] \<^sup>*\\<^sup>* [M \<^bold>\ N] @ [u]" proof - have "[M \<^bold>\ N] @ [u] \<^sup>*\\<^sup>* [\.head_strategy (\.Src (M \<^bold>\ N)) \ M \<^bold>\ N]" proof - have "\.is_internal_reduction (M \<^bold>\ N)" using 2 ** \.is_internal_reduction_iff by blast moreover have "\.is_head_reduction u" proof - have "\.elementary_reduction u" by (metis Std lambda_calculus.sseq_imp_elementary_reduction1 list.discI list.sel(1) reduction_paths.Std.elims(2)) thus ?thesis using \.is_head_reduction_if 3 by force qed moreover have "\.head_strategy (\.Src (M \<^bold>\ N)) \\ (M \<^bold>\ N) = u" using \.resid_head_strategy_Src(1) ** calculation(1-2) by fastforce moreover have "[M \<^bold>\ N] \<^sup>*\\<^sup>* [\.head_strategy (\.Src (M \<^bold>\ N)) \ M \<^bold>\ N]" using MN \.prfx_implies_con ide_char \.Arr_head_strategy \.Src_head_strategy \.prfx_Join by force ultimately show ?thesis using u \.Coinitial_iff_Con \.Arr_not_Nil \.resid_Join prfx_decomp [of "M \<^bold>\ N" "\.head_strategy (\.Src (M \<^bold>\ N)) \ M \<^bold>\ N"] by simp qed also have "[\.head_strategy (\.Src (M \<^bold>\ N)) \ M \<^bold>\ N] \<^sup>*\\<^sup>* [\.head_strategy (\.Src (M \<^bold>\ N))] @ [(M \<^bold>\ N) \\ \.head_strategy (\.Src (M \<^bold>\ N))]" proof - have 3: "\.composite_of (\.head_strategy (\.Src (M \<^bold>\ N))) ((M \<^bold>\ N) \\ \.head_strategy (\.Src (M \<^bold>\ N))) (\.head_strategy (\.Src (M \<^bold>\ N)) \ M \<^bold>\ N)" using \.Arr_head_strategy MN \.Src_head_strategy \.join_of_Join \.join_of_def by force hence "composite_of [\.head_strategy (\.Src (M \<^bold>\ N))] [(M \<^bold>\ N) \\ \.head_strategy (\.Src (M \<^bold>\ N))] [\.head_strategy (\.Src (M \<^bold>\ N)) \ M \<^bold>\ N]" using composite_of_single_single by (metis (no_types, lifting) \.Con_sym Ide.simps(2) Resid.simps(3) composite_ofI \.composite_ofE \.con_char ide_char \.prfx_implies_con) hence "[\.head_strategy (\.Src (M \<^bold>\ N))] @ [(M \<^bold>\ N) \\ \.head_strategy (\.Src (M \<^bold>\ N))] \<^sup>*\\<^sup>* [\.head_strategy (\.Src (M \<^bold>\ N)) \ M \<^bold>\ N]" using \.resid_Join by (meson 3 composite_of_single_single composite_of_unq_upto_cong) thus ?thesis by blast qed also have "[\.head_strategy (\.Src (M \<^bold>\ N))] @ [(M \<^bold>\ N) \\ \.head_strategy (\.Src (M \<^bold>\ N))] \<^sup>*\\<^sup>* [\.head_strategy (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)]" by (metis (full_types) \.Arr.simps(4) MN prfx_transitive calculation \.head_strategy_Src) finally show ?thesis by blast qed show ?thesis proof (cases "\.Ide ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))") assume 4: "\.Ide ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))" have A: "[\.head_strategy (M \<^bold>\ N)] \<^sup>*\\<^sup>* [\.head_strategy (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)]" by (meson 4 B Con_implies_Arr(1) Ide.simps(2) arr_append_imp_seq arr_char con_char cong_append_ideI(2) ide_char \.ide_char not_Cons_self2 prfx_implies_con) have 5: "\ Ide (u # U)" by (meson 3 Ide_consE \.ide_backward_stable \.subs_head_redex \.subs_implies_prfx \.contains_head_reduction_iff \.elementary_reduction_head_redex \.elementary_reduction_not_ide) have 6: "stdz_insert (M \<^bold>\ N) (u # U) = stdz_insert (\.head_strategy (M \<^bold>\ N)) U" using 1 2 3 4 5 * ** \\.is_App u \ \.is_Beta u\ apply (cases u) apply simp_all apply blast by (cases M) auto show ?thesis proof (cases "U = []") assume U: "U = []" have u: "\ \.Ide u" using 5 U by simp have 6: "stdz_insert (M \<^bold>\ N) (u # U) = standard_development (\.head_strategy (M \<^bold>\ N))" using 6 U by simp show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" using "6" Std_standard_development by presburger show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof have "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* [\.head_strategy (M \<^bold>\ N)]" using 4 6 cong_standard_development ** 1 2 3 \.Arr.simps(4) \.Arr_head_strategy MN \.ide_backward_stable \.ide_char by metis also have "[\.head_strategy (M \<^bold>\ N)] \<^sup>*\\<^sup>* [M \<^bold>\ N] @ [u]" by (meson A B prfx_transitive) also have "[M \<^bold>\ N] @ [u] = (M \<^bold>\ N) # u # U" using U by auto finally show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" by blast qed qed next assume U: "U \ []" have 7: "seq [\.head_strategy (M \<^bold>\ N)] U" proof show "Arr [\.head_strategy (M \<^bold>\ N)]" by (meson A Con_implies_Arr(1) con_char prfx_implies_con) show "Arr U" using U \U \ [] \ Arr U\ by presburger show "\.Trg (last [\.head_strategy (M \<^bold>\ N)]) = \.Src (hd U)" by (metis A B Std Std_consE Trg_last_eqI U \.seqE\<^sub>\ \.sseq_imp_seq last_snoc) qed have 8: "Std (stdz_insert (\.head_strategy (M \<^bold>\ N)) U) \ set (stdz_insert (\.head_strategy (M \<^bold>\ N)) U) \ {a. \.elementary_reduction a} \ stdz_insert (\.head_strategy (M \<^bold>\ N)) U \<^sup>*\\<^sup>* \.head_strategy (M \<^bold>\ N) # U" proof - have "Std U" by (metis Std Std.simps(3) U list.exhaust_sel) moreover have "\ Ide (\.head_strategy (M \<^bold>\ N) # tl (u # U))" using 1 4 \.ide_backward_stable by blast ultimately show ?thesis using ind4 ** 1 2 3 4 7 Std_implies_set_subset_elementary_reduction by force qed show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" using 6 8 by presburger show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof have "stdz_insert (M \<^bold>\ N) (u # U) = stdz_insert (\.head_strategy (M \<^bold>\ N)) U" using 6 by simp also have "... \<^sup>*\\<^sup>* [\.head_strategy (M \<^bold>\ N)] @ U" using 8 by simp also have "[\.head_strategy (M \<^bold>\ N)] @ U \<^sup>*\\<^sup>* ([M \<^bold>\ N] @ [u]) @ U" by (meson A B U 7 Resid_Arr_self cong_append ide_char prfx_transitive \U \ [] \ Arr U\) also have "([M \<^bold>\ N] @ [u]) @ U = (M \<^bold>\ N) # u # U" by simp finally show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" by blast qed qed qed next assume 4: "\ \.Ide ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))" show ?thesis proof (cases "U = []") assume U: "U = []" have 5: "stdz_insert (M \<^bold>\ N) (u # U) = \.head_strategy (M \<^bold>\ N) # standard_development ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))" using 1 2 3 4 U * ** \\.is_App u \ \.is_Beta u\ apply (cases u) apply simp_all apply blast apply (cases M) apply simp_all by blast+ show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" proof - have "\.sseq (\.head_strategy (M \<^bold>\ N)) (hd (standard_development ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))))" proof - have "\.seq (\.head_strategy (M \<^bold>\ N)) (hd (standard_development ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))))" using MN ** 4 \.Arr_head_strategy Arr_imp_arr_hd Ide_iff_standard_development_empty Src_hd_standard_development Std_imp_Arr Std_standard_development \.Arr_resid \.Src_head_strategy \.Src_resid by force moreover have "\.elementary_reduction (hd (standard_development ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))))" by (metis 4 Ide_iff_standard_development_empty MN Std_consE Std_standard_development hd_Cons_tl \.Arr.simps(4) \.Arr_resid_ind \.Con_head_strategy \.sseq_imp_elementary_reduction1 Std.simps(2)) ultimately show ?thesis using \.sseq_head_reductionI Std_standard_development by (metis ** 2 3 Std U \.internal_reduction_preserves_no_head_redex \.is_internal_reduction_iff \.Src_head_strategy \.elementary_reduction_not_ide \.head_strategy_Src \.head_strategy_is_elementary \.ide_char \.is_head_reduction_char \.is_head_reduction_if \.seqE\<^sub>\ Std.simps(2)) qed thus ?thesis by (metis 4 5 MN Ide_iff_standard_development_empty Std_standard_development \.Arr.simps(4) \.Arr_resid_ind \.Con_head_strategy list.exhaust_sel Std.simps(3)) qed show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof have "stdz_insert (M \<^bold>\ N) (u # U) = [\.head_strategy (M \<^bold>\ N)] @ standard_development ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))" using 5 by simp also have "... \<^sup>*\\<^sup>* [\.head_strategy (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)]" proof (intro cong_append) show 6: "seq [\.head_strategy (M \<^bold>\ N)] (standard_development ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)))" using 4 Ide_iff_standard_development_empty MN \Std (stdz_insert (M \<^bold>\ N) (u # U))\ arr_append_imp_seq arr_char calculation \.Arr_head_strategy \.Arr_resid lambda_calculus.Src_head_strategy by force show "[\.head_strategy (M \<^bold>\ N)] \<^sup>*\\<^sup>* [\.head_strategy (M \<^bold>\ N)]" by (meson MN 6 cong_reflexive seqE) show "standard_development ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) \<^sup>*\\<^sup>* [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)]" using 4 MN cong_standard_development \.Arr.simps(4) \.Arr_resid_ind \.Con_head_strategy by presburger qed also have "[\.head_strategy (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)] \<^sup>*\\<^sup>* [M \<^bold>\ N] @ [u]" using B by blast also have "[M \<^bold>\ N] @ [u] = (M \<^bold>\ N) # u # U" using U by simp finally show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" by blast qed qed next assume U: "U \ []" have 5: "stdz_insert (M \<^bold>\ N) (u # U) = \.head_strategy (M \<^bold>\ N) # stdz_insert (\.resid (M \<^bold>\ N) (\.head_strategy (M \<^bold>\ N))) U" using 1 2 3 4 U * ** \\.is_App u \ \.is_Beta u\ apply (cases u) apply simp_all apply blast apply (cases M) apply simp_all by blast+ have 6: "Std (stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U) \ set (stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U) \ {a. \.elementary_reduction a} \ stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U \<^sup>*\\<^sup>* (M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N) # U" proof - have "seq [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)] U" proof show "Arr [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)]" by (simp add: MN lambda_calculus.Arr_resid_ind \.Con_head_strategy) show "Arr U" using U \U \ [] \ Arr U\ by blast show "\.Trg (last [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)]) = \.Src (hd U)" by (metis (mono_tags, lifting) B U Std Std_consE Trg_last_eqI \.seq_char \.sseq_imp_seq last_ConsL last_snoc) qed thus ?thesis using ind5 Std_implies_set_subset_elementary_reduction by (metis ** 1 2 3 4 Std Std.simps(3) Arr_iff_Con_self Ide.simps(3) Resid.simps(1) seq_char \.ide_char list.exhaust_sel list.sel(1,3)) qed show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" proof - have "\.sseq (\.head_strategy (M \<^bold>\ N)) (hd (stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U))" proof - have "\.seq (\.head_strategy (M \<^bold>\ N)) (hd (stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U))" proof show "\.Arr (\.head_strategy (M \<^bold>\ N))" using MN \.Arr_head_strategy by force show "\.Arr (hd (stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U))" using 6 by (metis Ide.simps(1) Resid.simps(2) Std_consE hd_Cons_tl ide_char) show "\.Trg (\.head_strategy (M \<^bold>\ N)) = \.Src (hd (stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U))" using 6 by (metis MN Src_hd_eqI \.Arr.simps(4) \.Con_head_strategy \.Src_resid list.sel(1)) qed moreover have "\.is_head_reduction (\.head_strategy (M \<^bold>\ N))" using ** 1 2 3 \.Src_head_strategy \.head_strategy_is_elementary \.head_strategy_Src \.is_head_reduction_char \.seq_char by (metis \.Src_head_redex \.contains_head_reduction_iff \.head_redex_is_head_reduction \.internal_reduction_preserves_no_head_redex \.is_internal_reduction_iff) moreover have "\.elementary_reduction (hd (stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U))" by (metis 6 Ide.simps(1) Resid.simps(2) ide_char hd_in_set in_mono mem_Collect_eq) ultimately show ?thesis using \.sseq_head_reductionI by blast qed thus ?thesis by (metis 5 6 Std.simps(3) Arr.simps(1) Con_implies_Arr(1) con_char prfx_implies_con list.exhaust_sel) qed show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof have "stdz_insert (M \<^bold>\ N) (u # U) = [\.head_strategy (M \<^bold>\ N)] @ stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U" using 5 by simp also have 10: "... \<^sup>*\\<^sup>* [\.head_strategy (M \<^bold>\ N)] @ ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N) # U)" proof (intro cong_append) show 10: "seq [\.head_strategy (M \<^bold>\ N)] (stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U)" by (metis 5 6 Ide.simps(1) Resid.simps(1) Std_imp_Arr \Std (stdz_insert (M \<^bold>\ N) (u # U))\ arr_append_imp_seq arr_char calculation ide_char list.distinct(1)) show "[\.head_strategy (M \<^bold>\ N)] \<^sup>*\\<^sup>* [\.head_strategy (M \<^bold>\ N)]" using MN 10 cong_reflexive by blast show "stdz_insert ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) U \<^sup>*\\<^sup>* (M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N) # U" using 6 by auto qed also have 11: "[\.head_strategy (M \<^bold>\ N)] @ ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N) # U) = ([\.head_strategy (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)]) @ U" by simp also have "... \<^sup>*\\<^sup>* (([M \<^bold>\ N] @ [u]) @ U)" proof - have "seq ([\.head_strategy (M \<^bold>\ N)] @ [(M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)]) U" by (metis U 10 11 append_is_Nil_conv arr_append_imp_seq cong_implies_coterminal coterminalE not_Cons_self2) thus ?thesis using B cong_append cong_reflexive by blast qed also have "([M \<^bold>\ N] @ [u]) @ U = (M \<^bold>\ N) # u # U" by simp finally show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" by blast qed qed qed qed next assume 3: "\ \.contains_head_reduction u" have u: "\.Arr u \ \.is_App u \ \ \.contains_head_reduction u" using "3" \\.is_App u \ \.is_Beta u\ \.is_Beta_def u by force have 5: "\ \.Ide u" by (metis Std Std.simps(2) Std.simps(3) \.elementary_reduction_not_ide \.ide_char neq_Nil_conv \.sseq_imp_elementary_reduction1) show ?thesis proof - have 4: "stdz_insert (M \<^bold>\ N) (u # U) = map (\X. \.App X (\.Src N)) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))" using MN 1 2 3 5 * ** \\.is_App u \ \.is_Beta u\ apply (cases "U = []"; cases M; cases u) apply simp_all by blast+ have ***: "set U \ Collect \.is_App" using u 5 Std seq_App_Std_implies by blast have X: "Std (filter notIde (map \.un_App1 (u # U)))" by (metis *** Std Std_filter_map_un_App1 insert_subset list.simps(15) mem_Collect_eq u) have Y: "Std (filter notIde (map \.un_App2 (u # U)))" by (metis *** u Std Std_filter_map_un_App2 insert_subset list.simps(15) mem_Collect_eq) have A: "\ \.un_App1 ` set (u # U) \ Collect \.Ide \ Std (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) \ set (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) \ {a. \.elementary_reduction a} \ stdz_insert M (filter notIde (map \.un_App1 (u # U))) \<^sup>*\\<^sup>* M # filter notIde (map \.un_App1 (u # U))" proof - assume *: "\ \.un_App1 ` set (u # U) \ Collect \.Ide" have "seq [M] (filter notIde (map \.un_App1 (u # U)))" proof show "Arr [M]" using MN by simp show "Arr (filter notIde (map \.un_App1 (u # U)))" by (metis (mono_tags, lifting) "*" Std_imp_Arr X empty_filter_conv list.set_map mem_Collect_eq subset_code(1)) show "\.Trg (last [M]) = \.Src (hd (filter notIde (map \.un_App1 (u # U))))" proof - have "\.Trg (last [M]) = \.Src (hd (map \.un_App1 (u # U)))" using ** u by fastforce also have "... = \.Src (hd (filter notIde (map \.un_App1 (u # U))))" proof - have "Arr (map \.un_App1 (u # U))" using u *** by (metis Arr_map_un_App1 Std Std_imp_Arr insert_subset list.simps(15) mem_Collect_eq neq_Nil_conv) moreover have "\ Ide (map \.un_App1 (u # U))" by (metis "*" Collect_cong \.ide_char list.set_map set_Ide_subset_ide) ultimately show ?thesis using Src_hd_eqI cong_filter_notIde by blast qed finally show ?thesis by blast qed qed moreover have "\ Ide (M # filter notIde (map \.un_App1 (u # U)))" using * by (metis (no_types, lifting) *** Arr_map_un_App1 Std Std_imp_Arr Arr.simps(1) Ide.elims(2) Resid_Arr_Ide_ind ide_char seq_char calculation(1) cong_filter_notIde filter_notIde_Ide insert_subset list.discI list.sel(3) list.simps(15) mem_Collect_eq u) ultimately show ?thesis by (metis X 1 2 3 ** ind7 Std_implies_set_subset_elementary_reduction list.sel(1)) qed have B: "\ \.un_App2 ` set (u # U) \ Collect \.Ide \ Std (stdz_insert N (filter notIde (map \.un_App2 (u # U)))) \ set (stdz_insert N (filter notIde (map \.un_App2 (u # U)))) \ {a. \.elementary_reduction a} \ stdz_insert N (filter notIde (map \.un_App2 (u # U))) \<^sup>*\\<^sup>* N # filter notIde (map \.un_App2 (u # U))" proof - assume **: "\ \.un_App2 ` set (u # U) \ Collect \.Ide" have "seq [N] (filter notIde (map \.un_App2 (u # U)))" proof show "Arr [N]" using MN by simp show "Arr (filter (\u. \ \.Ide u) (map \.un_App2 (u # U)))" by (metis (mono_tags, lifting) ** Std_imp_Arr Y empty_filter_conv list.set_map mem_Collect_eq subset_code(1)) show "\.Trg (last [N]) = \.Src (hd (filter notIde (map \.un_App2 (u # U))))" proof - have "\.Trg (last [N]) = \.Src (hd (map \.un_App2 (u # U)))" by (metis u seq Trg_last_Src_hd_eqI \.Src.simps(4) \.Trg.simps(3) \.is_App_def \.lambda.sel(4) last_ConsL list.discI list.map_sel(1) list.sel(1)) also have "... = \.Src (hd (filter notIde (map \.un_App2 (u # U))))" proof - have "Arr (map \.un_App2 (u # U))" using u *** by (metis Arr_map_un_App2 Std Std_imp_Arr list.distinct(1) mem_Collect_eq set_ConsD subset_code(1)) moreover have "\ Ide (map \.un_App2 (u # U))" by (metis ** Collect_cong \.ide_char list.set_map set_Ide_subset_ide) ultimately show ?thesis using Src_hd_eqI cong_filter_notIde by blast qed finally show ?thesis by blast qed qed moreover have "\.seq (M \<^bold>\ N) u" by (metis u Srcs_simp\<^sub>\\<^sub>P Arr.simps(2) Trgs.simps(2) seq_char \.arr_char list.sel(1) seq \.seqI \.sources_char\<^sub>\) moreover have "\ Ide (N # filter notIde (map \.un_App2 (u # U)))" using u * by (metis (no_types, lifting) *** Arr_map_un_App2 Std Std_imp_Arr Arr.simps(1) Ide.elims(2) Resid_Arr_Ide_ind ide_char seq_char calculation(1) cong_filter_notIde filter_notIde_Ide insert_subset list.discI list.sel(3) list.simps(15) mem_Collect_eq) ultimately show ?thesis using * 1 2 3 Y ind8 Std_implies_set_subset_elementary_reduction by simp qed show ?thesis proof (cases "\.un_App1 ` set (u # U) \ Collect \.Ide"; cases "\.un_App2 ` set (u # U) \ Collect \.Ide") show "\\.un_App1 ` set (u # U) \ Collect \.Ide; \.un_App2 ` set (u # U) \ Collect \.Ide\ \ ?thesis" proof - assume *: "\.un_App1 ` set (u # U) \ Collect \.Ide" assume **: "\.un_App2 ` set (u # U) \ Collect \.Ide" have False using u 5 * ** Ide_iff_standard_development_empty by (metis \.Ide.simps(4) image_subset_iff \.lambda.collapse(3) list.set_intros(1) mem_Collect_eq) thus ?thesis by blast qed show "\\.un_App1 ` set (u # U) \ Collect \.Ide; \ \.un_App2 ` set (u # U) \ Collect \.Ide\ \ ?thesis" proof - assume *: "\.un_App1 ` set (u # U) \ Collect \.Ide" assume **: "\ \.un_App2 ` set (u # U) \ Collect \.Ide" have 6: "\.Trg (\.un_App1 (last (u # U))) = \.Trg M" proof - have "\.Trg M = \.Src (hd (map \.un_App1 (u # U)))" by (metis u seq Trg_last_Src_hd_eqI hd_map \.Src.simps(4) \.Trg.simps(3) \.is_App_def \.lambda.sel(3) last_ConsL list.discI list.sel(1)) also have "... = \.Trg (last (map \.un_App1 (u # U)))" proof - have 6: "Ide (map \.un_App1 (u # U))" using * *** u Std Std_imp_Arr Ide_char ide_char Arr_map_un_App1 by (metis (mono_tags, lifting) Collect_cong insert_subset \.ide_char list.distinct(1) list.set_map list.simps(15) mem_Collect_eq) hence "Src (map \.un_App1 (u # U)) = Trg (map \.un_App1 (u # U))" using Ide_imp_Src_eq_Trg by blast thus ?thesis using 6 Ide_implies_Arr by force qed also have "... = \.Trg (\.un_App1 (last (u # U)))" by (simp add: last_map) finally show ?thesis by simp qed have "filter notIde (map \.un_App1 (u # U)) = []" using * by (simp add: subset_eq) hence 4: "stdz_insert (M \<^bold>\ N) (u # U) = map (\X. X \<^bold>\ \.Src N) (standard_development M) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))" using u 4 5 * ** Ide_iff_standard_development_empty MN by simp show ?thesis proof (intro conjI) have "Std (map (\X. X \<^bold>\ \.Src N) (standard_development M) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))))" proof (intro Std_append) show "Std (map (\X. X \<^bold>\ \.Src N) (standard_development M))" using Std_map_App1 Std_standard_development MN \.Ide_Src by force show "Std (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))))" using "**" B MN 6 Std_map_App2 \.Ide_Trg by presburger show "map (\X. X \<^bold>\ \.Src N) (standard_development M) = [] \ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))) = [] \ \.sseq (last (map (\X. X \<^bold>\ \.Src N) (standard_development M))) (hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))))" proof (cases "\.Ide M") show "\.Ide M \ ?thesis" using Ide_iff_standard_development_empty MN by blast assume M: "\ \.Ide M" have "\.sseq (last (map (\X. X \<^bold>\ \.Src N) (standard_development M))) (hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))))" proof - have "last (map (\X. X \<^bold>\ \.Src N) (standard_development M)) = \.App (last (standard_development M)) (\.Src N)" using M by (simp add: Ide_iff_standard_development_empty MN last_map) moreover have "hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))) = \.App (\.Trg (\.un_App1 (last (u # U)))) (hd (stdz_insert N (filter notIde (map \.un_App2 (u # U)))))" by (metis ** B Ide.simps(1) Resid.simps(2) hd_map ide_char) moreover have "\.sseq (\.App (last (standard_development M)) (\.Src N)) ..." proof - have "\.elementary_reduction (last (standard_development M))" using M MN Std_standard_development Ide_iff_standard_development_empty last_in_set mem_Collect_eq set_standard_development subsetD by metis moreover have "\.elementary_reduction (hd (stdz_insert N (filter notIde (map \.un_App2 (u # U)))))" using ** B by (metis Arr.simps(1) Con_implies_Arr(2) Ide.simps(1) ide_char in_mono list.set_sel(1) mem_Collect_eq) moreover have "\.Trg (last (standard_development M)) = \.Trg (\.un_App1 (last (u # U)))" using M MN 6 Trg_last_standard_development by presburger moreover have "\.Src N = \.Src (hd (stdz_insert N (filter notIde (map \.un_App2 (u # U)))))" by (metis "**" B Src_hd_eqI list.sel(1)) ultimately show ?thesis by simp qed ultimately show ?thesis by simp qed thus ?thesis by blast qed qed thus "Std (stdz_insert (M \<^bold>\ N) (u # U))" using 4 by simp show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof (cases "\.Ide M") assume M: "\.Ide M" have "stdz_insert (M \<^bold>\ N) (u # U) = map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))" using 4 M MN Ide_iff_standard_development_empty by simp also have "... \<^sup>*\\<^sup>* (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (N # filter notIde (map \.un_App2 (u # U))))" proof - have "\.Ide (\.Trg (\.un_App1 (last (u # U))))" using M 6 \.Ide_Trg \.Ide_implies_Arr by fastforce thus ?thesis using ** *** B u cong_map_App1 by blast qed also have "map (\.App (\.Trg (\.un_App1 (last (u # U))))) (N # filter notIde (map \.un_App2 (u # U))) = map (\.App (\.Trg (\.un_App1 (last (u # U))))) (filter notIde (N # map \.un_App2 (u # U)))" using 1 M by force also have "map (\.App (\.Trg (\.un_App1 (last (u # U))))) (filter notIde (N # map \.un_App2 (u # U))) \<^sup>*\\<^sup>* map (\.App (\.Trg (\.un_App1 (last (u # U))))) (N # map \.un_App2 (u # U))" proof - have "Arr (N # map \.un_App2 (u # U))" proof show "\.arr N" using MN by blast show "Arr (map \.un_App2 (u # U))" using *** u Std Arr_map_un_App2 by (metis Std_imp_Arr insert_subset list.distinct(1) list.simps(15) mem_Collect_eq) show "\.trg N = Src (map \.un_App2 (u # U))" using u \\.seq (M \<^bold>\ N) u\ \.seq_char \.is_App_def by auto qed moreover have "\ Ide (N # map \.un_App2 (u # U))" using 1 M by force moreover have "\.Ide (\.Trg (\.un_App1 (last (u # U))))" using M 6 \.Ide_Trg \.Ide_implies_Arr by presburger ultimately show ?thesis using cong_filter_notIde cong_map_App1 by blast qed also have "map (\.App (\.Trg (\.un_App1 (last (u # U))))) (N # map \.un_App2 (u # U)) = map (\.App M) (N # map \.un_App2 (u # U))" using M MN \\.Trg (\.un_App1 (last (u # U))) = \.Trg M\ \.Ide_iff_Trg_self by force also have "... = (M \<^bold>\ N) # map (\.App M) (map \.un_App2 (u # U))" by simp also have "... = (M \<^bold>\ N) # u # U" proof - have "Arr (u # U)" using Std Std_imp_Arr by blast moreover have "set (u # U) \ Collect \.is_App" using *** u by simp moreover have "\.un_App1 u = M" by (metis * u M seq Trg_last_Src_hd_eqI \.Ide_iff_Src_self \.Ide_iff_Trg_self \.Ide_implies_Arr \.Src.simps(4) \.Trg.simps(3) \.lambda.collapse(3) \.lambda.sel(3) last.simps list.distinct(1) list.sel(1) list.set_intros(1) list.set_map list.simps(9) mem_Collect_eq standardize.cases subset_iff) moreover have "\.un_App1 ` set (u # U) \ {M}" proof - have "Ide (map \.un_App1 (u # U))" using * *** Std Std_imp_Arr Arr_map_un_App1 by (metis Collect_cong Ide_char calculation(1-2) \.ide_char list.set_map) thus ?thesis by (metis calculation(3) hd_map list.discI list.sel(1) list.set_map set_Ide_subset_single_hd) qed ultimately show ?thesis using M map_App_map_un_App2 by blast qed finally show ?thesis by blast next assume M: "\ \.Ide M" have "stdz_insert (M \<^bold>\ N) (u # U) = map (\X. X \<^bold>\ \.Src N) (standard_development M) @ map (\X. \.Trg M \<^bold>\ X) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))" using 4 6 by simp also have "... \<^sup>*\\<^sup>* [M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N] @ map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U)))" proof (intro cong_append) show "map (\X. X \<^bold>\ \.Src N) (standard_development M) \<^sup>*\\<^sup>* [M \<^bold>\ \.Src N]" using MN M cong_standard_development \.Ide_Src cong_map_App2 [of "\.Src N" "standard_development M" "[M]"] by simp show "map (\X. \.Trg M \<^bold>\ X) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))) \<^sup>*\\<^sup>* [\.Trg M \<^bold>\ N] @ map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U)))" proof - have "map (\X. \.Trg M \<^bold>\ X) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))) \<^sup>*\\<^sup>* map (\X. \.Trg M \<^bold>\ X) (N # filter notIde (map \.un_App2 (u # U)))" using ** B MN cong_map_App1 lambda_calculus.Ide_Trg by presburger also have "map (\X. \.Trg M \<^bold>\ X) (N # filter notIde (map \.un_App2 (u # U))) = [\.Trg M \<^bold>\ N] @ map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U)))" by simp finally show ?thesis by blast qed show "seq (map (\X. X \<^bold>\ \.Src N) (standard_development M)) (map (\X. \.Trg M \<^bold>\ X) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))))" using MN M ** B cong_standard_development [of M] by (metis Nil_is_append_conv Resid.simps(2) Std_imp_Arr \Std (stdz_insert (M \<^bold>\ N) (u # U))\ arr_append_imp_seq arr_char calculation complete_development_Ide_iff complete_development_def list.map_disc_iff development.simps(1)) qed also have "[M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N] @ map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U))) = ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N]) @ map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U)))" by simp also have "([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N]) @ map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U))) \<^sup>*\\<^sup>* ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N]) @ map (\X. \.Trg M \<^bold>\ X) (map \.un_App2 (u # U))" proof (intro cong_append) show "seq ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N]) (map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U))))" proof show "Arr ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N])" by (simp add: MN) show 9: "Arr (map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U))))" proof - have "Arr (map \.un_App2 (u # U))" using *** u Arr_map_un_App2 by (metis Std Std_imp_Arr list.distinct(1) mem_Collect_eq set_ConsD subset_code(1)) moreover have "\ Ide (map \.un_App2 (u # U))" using ** by (metis Collect_cong \.ide_char list.set_map set_Ide_subset_ide) ultimately show ?thesis using cong_filter_notIde by (metis Arr_map_App2 Con_implies_Arr(2) Ide.simps(1) MN ide_char \.Ide_Trg) qed show "\.Trg (last ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N])) = \.Src (hd (map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U)))))" proof - have "\.Trg (last ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N])) = \.Trg M \<^bold>\ \.Trg N" using MN by auto also have "... = \.Src u" using Trg_last_Src_hd_eqI seq by force also have "... = \.Src (\.Trg M \<^bold>\ \.un_App2 u)" using MN \\.App (\.Trg M) (\.Trg N) = \.Src u\ u by auto also have 8: "... = \.Trg M \<^bold>\ \.Src (\.un_App2 u)" using MN by simp also have 7: "... = \.Trg M \<^bold>\ \.Src (hd (filter notIde (map \.un_App2 (u # U))))" using u 5 list.simps(9) cong_filter_notIde \filter notIde (map \.un_App1 (u # U)) = []\ by auto also have "... = \.Src (hd (map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U)))))" (* TODO: Figure out what is going on with 7 8 9. *) by (metis 7 8 9 Arr.simps(1) hd_map \.Src.simps(4) \.lambda.sel(4) list.simps(8)) finally show "\.Trg (last ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N])) = \.Src (hd (map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U)))))" by blast qed qed show "seq [M \<^bold>\ \.Src N] [\.Trg M \<^bold>\ N]" using MN by fastforce show "[M \<^bold>\ \.Src N] \<^sup>*\\<^sup>* [M \<^bold>\ \.Src N]" using MN by (meson head_redex_decomp \.Arr.simps(4) \.Arr_Src prfx_transitive) show "[\.Trg M \<^bold>\ N] \<^sup>*\\<^sup>* [\.Trg M \<^bold>\ N]" using MN by (meson \seq [M \<^bold>\ \.Src N] [\.Trg M \<^bold>\ N]\ cong_reflexive seqE) show "map (\X. \.Trg M \<^bold>\ X) (filter notIde (map \.un_App2 (u # U))) \<^sup>*\\<^sup>* map (\X. \.Trg M \<^bold>\ X) (map \.un_App2 (u # U))" proof - have "Arr (map \.un_App2 (u # U))" using *** u Arr_map_un_App2 by (metis Std Std_imp_Arr list.distinct(1) mem_Collect_eq set_ConsD subset_code(1)) moreover have "\ Ide (map \.un_App2 (u # U))" using ** by (metis Collect_cong \.ide_char list.set_map set_Ide_subset_ide) ultimately show ?thesis using M MN cong_filter_notIde cong_map_App1 \.Ide_Trg by presburger qed qed also have "([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N]) @ map (\X. \.Trg M \<^bold>\ X) (map \.un_App2 (u # U)) \<^sup>*\\<^sup>* [M \<^bold>\ N] @ u # U" proof (intro cong_append) show "seq ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N]) (map (\X. \.Trg M \<^bold>\ X) (map \.un_App2 (u # U)))" by (metis Nil_is_append_conv Nil_is_map_conv arr_append_imp_seq calculation cong_implies_coterminal coterminalE list.distinct(1)) show "[M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N] \<^sup>*\\<^sup>* [M \<^bold>\ N]" using MN \.resid_Arr_self \.Arr_not_Nil \.Ide_Trg ide_char by simp show " map (\X. \.Trg M \<^bold>\ X) (map \.un_App2 (u # U)) \<^sup>*\\<^sup>* u # U" proof - have "map (\X. \.Trg M \<^bold>\ X) (map \.un_App2 (u # U)) = u # U" proof (intro map_App_map_un_App2) show "Arr (u # U)" using Std Std_imp_Arr by blast show "set (u # U) \ Collect \.is_App" using *** u by auto show "\.Ide (\.Trg M)" using MN \.Ide_Trg by blast show "\.un_App1 ` set (u # U) \ {\.Trg M}" proof - have "\.un_App1 u = \.Trg M" using * u seq seq_char apply (cases u) apply simp_all by (metis Trg_last_Src_hd_eqI \.Ide_iff_Src_self \.Src_Src \.Src_Trg \.Src_eq_iff(2) \.Trg.simps(3) last_ConsL list.sel(1) seq u) moreover have "Ide (map \.un_App1 (u # U))" using * Std Std_imp_Arr Arr_map_un_App1 by (metis Collect_cong Ide_char \Arr (u # U)\ \set (u # U) \ Collect \.is_App\ \.ide_char list.set_map) ultimately show ?thesis using set_Ide_subset_single_hd by force qed qed thus ?thesis by (simp add: Resid_Arr_self Std ide_char) qed qed also have "[M \<^bold>\ N] @ u # U = (M \<^bold>\ N) # u # U" by simp finally show ?thesis by blast qed qed qed qed show "\\ \.un_App1 ` set (u # U) \ Collect \.Ide; \.un_App2 ` set (u # U) \ Collect \.Ide\ \ ?thesis" proof - assume *: "\ \.un_App1 ` set (u # U) \ Collect \.Ide" assume **: "\.un_App2 ` set (u # U) \ Collect \.Ide" have 10: "filter notIde (map \.un_App2 (u # U)) = []" using ** by (simp add: subset_eq) hence 4: "stdz_insert (M \<^bold>\ N) (u # U) = map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (standard_development N)" using u 4 5 * ** Ide_iff_standard_development_empty MN by simp have 6: "\.Ide (\.Trg (\.un_App1 (last (u # U))))" using *** u Std Std_imp_Arr by (metis Arr_imp_arr_last in_mono \.Arr.simps(4) \.Ide_Trg \.arr_char \.lambda.collapse(3) last.simps last_in_set list.discI mem_Collect_eq) show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" proof - have "Std (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (standard_development N))" proof (intro Std_append) show "Std (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))))" using * A MN Std_map_App1 \.Ide_Src by presburger show "Std (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (standard_development N))" using MN 6 Std_map_App2 Std_standard_development by simp show "map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) = [] \ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (standard_development N) = [] \ \.sseq (last (map (\X. \.App X (\.Src N)) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))))) (hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (standard_development N)))" proof (cases "\.Ide N") show "\.Ide N \ ?thesis" using Ide_iff_standard_development_empty MN by blast assume N: "\ \.Ide N" have "\.sseq (last (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))))) (hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (standard_development N)))" proof - have "hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (standard_development N)) = \.App (\.Trg (\.un_App1 (last (u # U)))) (hd (standard_development N))" by (meson Ide_iff_standard_development_empty MN N list.map_sel(1)) moreover have "last (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U))))) = \.App (last (stdz_insert M (filter notIde (map \.un_App1 (u # U))))) (\.Src N)" by (metis * A Ide.simps(1) Resid.simps(1) ide_char last_map) moreover have "\.sseq ... (\.App (\.Trg (\.un_App1 (last (u # U)))) (hd (standard_development N)))" proof - have 7: "\.elementary_reduction (last (stdz_insert M (filter notIde (map \.un_App1 (u # U)))))" using * A by (metis Ide.simps(1) Resid.simps(2) ide_char last_in_set mem_Collect_eq subset_iff) moreover have "\.elementary_reduction (hd (standard_development N))" using MN N hd_in_set set_standard_development Ide_iff_standard_development_empty by blast moreover have "\.Src N = \.Src (hd (standard_development N))" using MN N Src_hd_standard_development by auto moreover have "\.Trg (last (stdz_insert M (filter notIde (map \.un_App1 (u # U))))) = \.Trg (\.un_App1 (last (u # U)))" proof - have "[\.Trg (last (stdz_insert M (filter notIde (map \.un_App1 (u # U)))))] = [\.Trg (\.un_App1 (last (u # U)))]" proof - have "\.Trg (last (stdz_insert M (filter notIde (map \.un_App1 (u # U))))) = \.Trg (last (map \.un_App1 (u # U)))" proof - have "\.Trg (last (stdz_insert M (filter notIde (map \.un_App1 (u # U))))) = \.Trg (last (M # filter notIde (map \.un_App1 (u # U))))" using * A Trg_last_eqI by blast also have "... = \.Trg (last ([M] @ filter notIde (map \.un_App1 (u # U))))" by simp also have "... = \.Trg (last (filter notIde (map \.un_App1 (u # U))))" proof - have "seq [M] (filter notIde (map \.un_App1 (u # U)))" proof show "Arr [M]" using MN by simp show "Arr (filter notIde (map \.un_App1 (u # U)))" using * Std_imp_Arr by (metis (no_types, lifting) X empty_filter_conv list.set_map mem_Collect_eq subsetI) show "\.Trg (last [M]) = \.Src (hd (filter notIde (map \.un_App1 (u # U))))" proof - have "\.Trg (last [M]) = \.Trg M" using MN by simp also have "... = \.Src (\.un_App1 u)" by (metis Trg_last_Src_hd_eqI \.Src.simps(4) \.Trg.simps(3) \.lambda.collapse(3) \.lambda.inject(3) last_ConsL list.sel(1) seq u) also have "... = \.Src (hd (map \.un_App1 (u # U)))" by auto also have "... = \.Src (hd (filter notIde (map \.un_App1 (u # U))))" using u 5 10 by force finally show ?thesis by blast qed qed thus ?thesis by fastforce qed also have "... = \.Trg (last (map \.un_App1 (u # U)))" proof - have "filter (\u. \ \.Ide u) (map \.un_App1 (u # U)) \<^sup>*\\<^sup>* map \.un_App1 (u # U)" using * *** u Std Std_imp_Arr Arr_map_un_App1 [of "u # U"] cong_filter_notIde by (metis (mono_tags, lifting) empty_filter_conv filter_notIde_Ide list.discI list.set_map mem_Collect_eq set_ConsD subset_code(1)) thus ?thesis using cong_implies_coterminal Trg_last_eqI by presburger qed finally show ?thesis by blast qed thus ?thesis by (simp add: last_map) qed moreover have "\.Ide (\.Trg (last (stdz_insert M (filter notIde (map \.un_App1 (u # U))))))" using 7 \.Ide_Trg \.elementary_reduction_is_arr by blast moreover have "\.Ide (\.Trg (\.un_App1 (last (u # U))))" using 6 by blast ultimately show ?thesis by simp qed ultimately show ?thesis using \.sseq.simps(4) by blast qed ultimately show ?thesis by argo qed thus ?thesis by blast qed qed thus ?thesis using 4 by simp qed show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof (cases "\.Ide N") assume N: "\.Ide N" have "stdz_insert (M \<^bold>\ N) (u # U) = map (\X. X \<^bold>\ N) (stdz_insert M (filter notIde (map \.un_App1 (u # U))))" using 4 N MN Ide_iff_standard_development_empty \.Ide_iff_Src_self by force also have "... \<^sup>*\\<^sup>* map (\X. X \<^bold>\ N) (M # filter notIde (map \.un_App1 (u # U)))" using * A MN N \.Ide_Src cong_map_App2 \.Ide_iff_Src_self by blast also have "map (\X. X \<^bold>\ N) (M # filter notIde (map \.un_App1 (u # U))) = [M \<^bold>\ N] @ map (\X. \.App X N) (filter notIde (map \.un_App1 (u # U)))" by auto also have "[M \<^bold>\ N] @ map (\X. X \<^bold>\ N) (filter notIde (map \.un_App1 (u # U))) \<^sup>*\\<^sup>* [M \<^bold>\ N] @ map (\X. X \<^bold>\ N) (map \.un_App1 (u # U))" proof (intro cong_append) show "seq [M \<^bold>\ N] (map (\X. X \<^bold>\ N) (filter notIde (map \.un_App1 (u # U))))" proof have 20: "Arr (map \.un_App1 (u # U))" using *** u Std Arr_map_un_App1 by (metis Std_imp_Arr insert_subset list.discI list.simps(15) mem_Collect_eq) show "Arr [M \<^bold>\ N]" using MN by auto show 21: "Arr (map (\X. X \<^bold>\ N) (filter notIde (map \.un_App1 (u # U))))" proof - have "Arr (filter notIde (map \.un_App1 (u # U)))" using u 20 cong_filter_notIde by (metis (no_types, lifting) * Std_imp_Arr \Std (filter notIde (map \.un_App1 (u # U)))\ empty_filter_conv list.set_map mem_Collect_eq subsetI) thus ?thesis using MN N Arr_map_App1 \.Ide_Src by presburger qed show "\.Trg (last [M \<^bold>\ N]) = \.Src (hd (map (\X. X \<^bold>\ N) (filter notIde (map \.un_App1 (u # U)))))" proof - have "\.Trg (last [M \<^bold>\ N]) = \.Trg M \<^bold>\ N" using MN N \.Ide_iff_Trg_self by simp also have "... = \.Src (\.un_App1 u) \<^bold>\ N" using MN u seq seq_char by (metis Trg_last_Src_hd_eqI calculation \.Src_Src \.Src_Trg \.Src_eq_iff(2) \.is_App_def \.lambda.sel(3) list.sel(1)) also have "... = \.Src (\.un_App1 u \<^bold>\ N)" using MN N \.Ide_iff_Src_self by simp also have "... = \.Src (hd (map (\X. X \<^bold>\ N) (map \.un_App1 (u # U))))" by simp also have "... = \.Src (hd (map (\X. X \<^bold>\ N) (filter notIde (map \.un_App1 (u # U)))))" proof - have "cong (map \.un_App1 (u # U)) (filter notIde (map \.un_App1 (u # U)))" using * 20 21 cong_filter_notIde by (metis Arr.simps(1) filter_notIde_Ide map_is_Nil_conv) thus ?thesis by (metis (no_types, lifting) Ide.simps(1) Resid.simps(2) Src_hd_eqI hd_map ide_char \.Src.simps(4) list.distinct(1) list.simps(9)) qed finally show ?thesis by blast qed qed show "cong [M \<^bold>\ N] [M \<^bold>\ N]" using MN by (meson head_redex_decomp \.Arr.simps(4) \.Arr_Src prfx_transitive) show "map (\X. X \<^bold>\ N) (filter notIde (map \.un_App1 (u # U))) \<^sup>*\\<^sup>* map (\X. X \<^bold>\ N) (map \.un_App1 (u # U))" proof - have "Arr (map \.un_App1 (u # U))" using *** u Std Arr_map_un_App1 by (metis Std_imp_Arr insert_subset list.discI list.simps(15) mem_Collect_eq) moreover have "\ Ide (map \.un_App1 (u # U))" using * by (metis Collect_cong \.ide_char list.set_map set_Ide_subset_ide) ultimately show ?thesis using *** u MN N cong_filter_notIde cong_map_App2 by (meson \.Ide_Src) qed qed also have "[M \<^bold>\ N] @ map (\X. X \<^bold>\ N) (map \.un_App1 (u # U)) \<^sup>*\\<^sup>* [M \<^bold>\ N] @ u # U" proof - have "map (\X. X \<^bold>\ N) (map \.un_App1 (u # U)) \<^sup>*\\<^sup>* u # U" proof - have "map (\X. X \<^bold>\ N) (map \.un_App1 (u # U)) = u # U" proof (intro map_App_map_un_App1) show "Arr (u # U)" using Std Std_imp_Arr by simp show "set (u # U) \ Collect \.is_App" using *** u by auto show "\.Ide N" using N by simp show "\.un_App2 ` set (u # U) \ {N}" proof - have "\.Src (\.un_App2 u) = \.Trg N" using ** seq u seq_char N apply (cases u) apply simp_all by (metis Trg_last_Src_hd_eqI \.Src.simps(4) \.Trg.simps(3) \.lambda.inject(3) last_ConsL list.sel(1) seq) moreover have "\.Ide (\.un_App2 u) \ \.Ide N" using ** N by simp moreover have "Ide (map \.un_App2 (u # U))" using ** Std Std_imp_Arr Arr_map_un_App2 by (metis Collect_cong Ide_char \Arr (u # U)\ \set (u # U) \ Collect \.is_App\ \.ide_char list.set_map) ultimately show ?thesis by (metis hd_map \.Ide_iff_Src_self \.Ide_iff_Trg_self \.Ide_implies_Arr list.discI list.sel(1) list.set_map set_Ide_subset_single_hd) qed qed thus ?thesis by (simp add: Resid_Arr_self Std ide_char) qed thus ?thesis using MN cong_append by (metis (no_types, lifting) 1 cong_standard_development cong_transitive \.Arr.simps(4) seq) qed also have "[M \<^bold>\ N] @ (u # U) = (M \<^bold>\ N) # u # U" by simp finally show ?thesis by blast next assume N: "\ \.Ide N" have "stdz_insert (M \<^bold>\ N) (u # U) = map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (standard_development N)" using 4 by simp also have "... \<^sup>*\\<^sup>* map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N]" proof (intro cong_append) show 23: "map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) \<^sup>*\\<^sup>* map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U)))" using * A MN \.Ide_Src cong_map_App2 by blast show 22: "map (\.App (\.Trg (\.un_App1 (last (u # U))))) (standard_development N) \<^sup>*\\<^sup>* map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N]" using 6 *** u Std Std_imp_Arr MN N cong_standard_development cong_map_App1 by presburger show "seq (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U))))) (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (standard_development N))" proof - have "seq (map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U)))) (map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N])" proof show 26: "Arr (map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U))))" by (metis 23 Con_implies_Arr(2) Ide.simps(1) ide_char) show "Arr (map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N])" by (meson 22 arr_char con_implies_arr(2) prfx_implies_con) show "\.Trg (last (map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U))))) = \.Src (hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N]))" proof - have "\.Trg (last (map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U)))) \ \.Trg (last (map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U)))))" proof - have "targets (map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U)))) = targets (map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U)))" proof - have "map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U))) \<^sup>*\\<^sup>* map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U))" proof - have "map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U)) = map (\X. X \<^bold>\ \.Src N) ([M] @ map \.un_App1 (u # U))" by simp also have "cong ... (map (\X. X \<^bold>\ \.Src N) ([M] @ filter notIde (map \.un_App1 (u # U))))" proof - have "[M] @ map \.un_App1 (u # U) \<^sup>*\\<^sup>* [M] @ filter notIde (map \.un_App1 (u # U))" proof (intro cong_append) show "cong [M] [M]" using MN by (meson head_redex_decomp prfx_transitive) show "seq [M] (map \.un_App1 (u # U))" proof show "Arr [M]" using MN by simp show "Arr (map \.un_App1 (u # U))" using *** u Std Arr_map_un_App1 by (metis Std_imp_Arr insert_subset list.discI list.simps(15) mem_Collect_eq) show "\.Trg (last [M]) = \.Src (hd (map \.un_App1 (u # U)))" using MN u seq seq_char Srcs_simp\<^sub>\\<^sub>P by auto qed show "cong (map \.un_App1 (u # U)) (filter notIde (map \.un_App1 (u # U)))" proof - have "Arr (map \.un_App1 (u # U))" by (metis *** Arr_map_un_App1 Std Std_imp_Arr insert_subset list.discI list.simps(15) mem_Collect_eq u) moreover have "\ Ide (map \.un_App1 (u # U))" using * set_Ide_subset_ide by fastforce ultimately show ?thesis using cong_filter_notIde by blast qed qed thus "map (\X. X \<^bold>\ \.Src N) ([M] @ map \.un_App1 (u # U)) \<^sup>*\\<^sup>* map (\X. X \<^bold>\ \.Src N) ([M] @ filter notIde (map \.un_App1 (u # U)))" using MN cong_map_App2 \.Ide_Src by presburger qed finally show ?thesis by simp qed thus ?thesis using cong_implies_coterminal by blast qed moreover have "[\.Trg (last (map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U))))] \ targets (map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U)))" by (metis (no_types, lifting) 26 calculation mem_Collect_eq single_Trg_last_in_targets targets_char\<^sub>\\<^sub>P) moreover have "[\.Trg (last (map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U)))))] \ targets (map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U))))" using 26 single_Trg_last_in_targets by blast ultimately show ?thesis by (metis (no_types, lifting) 26 Ide.simps(1-2) Resid_rec(1) in_targets_iff ide_char) qed moreover have "\.Ide (\.Trg (last (map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U)))))" by (metis 6 MN \.Ide.simps(4) \.Ide_Src \.Trg.simps(3) \.Trg_Src last_ConsR last_map list.distinct(1) list.simps(9)) moreover have "\.Ide (\.Trg (last (map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U))))))" using \.ide_backward_stable calculation(1-2) by fast ultimately show ?thesis by (metis (no_types, lifting) 6 MN hd_map \.Ide_iff_Src_self \.Ide_implies_Arr \.Src.simps(4) \.Trg.simps(3) \.Trg_Src \.cong_Ide_are_eq last.simps last_map list.distinct(1) list.map_disc_iff list.sel(1)) qed qed thus ?thesis using 22 23 cong_respects_seq\<^sub>P by presburger qed qed also have "map (\X. X \<^bold>\ \.Src N) (M # filter notIde (map \.un_App1 (u # U))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N] = [M \<^bold>\ \.Src N] @ map (\X. X \<^bold>\ \.Src N) (filter notIde (map \.un_App1 (u # U))) @ [\.App (\.Trg (\.un_App1 (last (u # U)))) N]" by simp also have 1: "[M \<^bold>\ \.Src N] @ map (\X. X \<^bold>\ \.Src N) (filter notIde (map \.un_App1 (u # U))) @ [\.App (\.Trg (\.un_App1 (last (u # U)))) N] \<^sup>*\\<^sup>* [M \<^bold>\ \.Src N] @ map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ [\.App (\.Trg (\.un_App1 (last (u # U)))) N]" proof (intro cong_append) show "[M \<^bold>\ \.Src N] \<^sup>*\\<^sup>* [M \<^bold>\ \.Src N]" using MN by (meson head_redex_decomp lambda_calculus.Arr.simps(4) lambda_calculus.Arr_Src prfx_transitive) show 21: "map (\X. X \<^bold>\ \.Src N) (filter notIde (map \.un_App1 (u # U))) \<^sup>*\\<^sup>* map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U))" proof - have "filter notIde (map \.un_App1 (u # U)) \<^sup>*\\<^sup>* map \.un_App1 (u # U)" proof - have "\ Ide (map \.un_App1 (u # U))" using * by (metis Collect_cong \.ide_char list.set_map set_Ide_subset_ide) thus ?thesis using *** u Std Std_imp_Arr Arr_map_un_App1 cong_filter_notIde by (metis \\ Ide (map \.un_App1 (u # U))\ list.distinct(1) mem_Collect_eq set_ConsD subset_code(1)) qed thus ?thesis using MN cong_map_App2 [of "\.Src N"] \.Ide_Src by presburger qed show "[\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N] \<^sup>*\\<^sup>* [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N]" by (metis "6" Con_implies_Arr(1) MN \.Ide_implies_Arr arr_char cong_reflexive \.Ide_iff_Src_self neq_Nil_conv orthogonal_App_single_single(1)) show "seq (map (\X. X \<^bold>\ \.Src N) (filter notIde (map \.un_App1 (u # U)))) [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N]" proof show "Arr (map (\X. X \<^bold>\ \.Src N) (filter notIde (map \.un_App1 (u # U))))" by (metis 21 Con_implies_Arr(2) Ide.simps(1) ide_char) show "Arr [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N]" by (metis Con_implies_Arr(2) Ide.simps(1) \[\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N] \<^sup>*\\<^sup>* [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N]\ ide_char) show "\.Trg (last (map (\X. X \<^bold>\ \.Src N) (filter notIde (map \.un_App1 (u # U))))) = \.Src (hd [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N])" by (metis (no_types, lifting) 6 21 MN Trg_last_eqI \.Ide_iff_Src_self \.Ide_implies_Arr \.Src.simps(4) \.Trg.simps(3) \.Trg_Src last_map list.distinct(1) list.map_disc_iff list.sel(1)) qed show "seq [M \<^bold>\ \.Src N] (map (\X. X \<^bold>\ \.Src N) (filter notIde (map \.un_App1 (u # U))) @ [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N])" proof show "Arr [M \<^bold>\ \.Src N]" using MN by simp show "Arr (map (\X. X \<^bold>\ \.Src N) (filter notIde (map \.un_App1 (u # U))) @ [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N])" apply (intro Arr_appendI\<^sub>P) apply (metis 21 Con_implies_Arr(2) Ide.simps(1) ide_char) apply (metis Con_implies_Arr(1) Ide.simps(1) \[\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N] \<^sup>*\\<^sup>* [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N]\ ide_char) by (metis (no_types, lifting) "21" Arr.simps(1) Arr_append_iff\<^sub>P Con_implies_Arr(2) Ide.simps(1) append_is_Nil_conv calculation ide_char not_Cons_self2) show "\.Trg (last [M \<^bold>\ \.Src N]) = \.Src (hd (map (\X. X \<^bold>\ \.Src N) (filter notIde (map \.un_App1 (u # U))) @ [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N]))" by (metis (no_types, lifting) Con_implies_Arr(2) Ide.simps(1) Trg_last_Src_hd_eqI append_is_Nil_conv arr_append_imp_seq arr_char calculation ide_char not_Cons_self2) qed qed also have "[M \<^bold>\ \.Src N] @ map (\X. X \<^bold>\ \.Src N)(map \.un_App1 (u # U)) @ [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N] \<^sup>*\\<^sup>* [M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N] @ map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U))" proof (intro cong_append [of "[\.App M (\.Src N)]"]) show "seq [M \<^bold>\ \.Src N] (map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N])" proof show "Arr [M \<^bold>\ \.Src N]" using MN by simp show "Arr (map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N])" by (metis (no_types, lifting) 1 Con_append(2) Con_implies_Arr(2) Ide.simps(1) append_is_Nil_conv ide_char not_Cons_self2) show "\.Trg (last [M \<^bold>\ \.Src N]) = \.Src (hd (map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N]))" proof - have "\.Trg M = \.Src (\.un_App1 u)" using u seq by (metis Trg_last_Src_hd_eqI \.Src.simps(4) \.Trg.simps(3) \.lambda.collapse(3) \.lambda.inject(3) last_ConsL list.sel(1)) thus ?thesis using MN by auto qed qed show "[M \<^bold>\ \.Src N] \<^sup>*\\<^sup>* [M \<^bold>\ \.Src N]" using MN by (metis head_redex_decomp \.Arr.simps(4) \.Arr_Src prfx_transitive) show "map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ [\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N] \<^sup>*\\<^sup>* [\.Trg M \<^bold>\ N] @ map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U))" proof - have "map (\X. X \<^bold>\ \.Src (hd [N])) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (last (map \.un_App1 (u # U))))) [N] \<^sup>*\\<^sup>* map (\.App (\.Src (hd (map \.un_App1 (u # U))))) [N] @ map (\X. X \<^bold>\ \.Trg (last [N])) (map \.un_App1 (u # U))" proof - have "Arr (map \.un_App1 (u # U))" using Std *** u Arr_map_un_App1 by (metis Std_imp_Arr insert_subset list.discI list.simps(15) mem_Collect_eq) moreover have "Arr [N]" using MN by simp ultimately show ?thesis using orthogonal_App_cong by blast qed moreover have "map (\.App (\.Src (hd (map \.un_App1 (u # U))))) [N] = [\.Trg M \<^bold>\ N]" by (metis Trg_last_Src_hd_eqI lambda_calculus.Src.simps(4) \.Trg.simps(3) \.lambda.collapse(3) \.lambda.sel(3) last_ConsL list.sel(1) list.simps(8) list.simps(9) seq u) moreover have "[\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N] = map (\.App (\.Trg (last (map \.un_App1 (u # U))))) [N]" by (simp add: last_map) ultimately show ?thesis using last_map by auto qed qed also have "[M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N] @ map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U)) = ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N]) @ map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U))" by simp also have "... \<^sup>*\\<^sup>* [M \<^bold>\ N] @ (u # U)" proof (intro cong_append) show "[M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N] \<^sup>*\\<^sup>* [M \<^bold>\ N]" using MN \.resid_Arr_self \.Arr_not_Nil \.Ide_Trg ide_char by auto show 1: "map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U)) \<^sup>*\\<^sup>* u # U" proof - have "map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U)) = u # U" proof (intro map_App_map_un_App1) show "Arr (u # U)" using Std Std_imp_Arr by simp show "set (u # U) \ Collect \.is_App" using "***" u by auto show "\.Ide (\.Trg N)" using MN \.Ide_Trg by simp show "\.un_App2 ` set (u # U) \ {\.Trg N}" proof - have "\.Src (\.un_App2 u) = \.Trg N" using u seq seq_char apply (cases u) apply simp_all by (metis Trg_last_Src_hd_eqI \.Src.simps(4) \.Trg.simps(3) \.lambda.inject(3) last_ConsL list.sel(1) seq) moreover have "\.Ide (\.un_App2 u)" using ** by simp moreover have "Ide (map \.un_App2 (u # U))" using ** Std Std_imp_Arr Arr_map_un_App2 by (metis Collect_cong Ide_char \Arr (u # U)\ \set (u # U) \ Collect \.is_App\ \.ide_char list.set_map) ultimately show ?thesis by (metis \.Ide_iff_Src_self \.Ide_implies_Arr list.sel(1) list.set_map list.simps(9) set_Ide_subset_single_hd singleton_insert_inj_eq) qed qed thus ?thesis by (simp add: Resid_Arr_self Std ide_char) qed show "seq ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N]) (map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U)))" proof show "Arr ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N])" using MN by simp show "Arr (map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U)))" using MN Std Std_imp_Arr Arr_map_un_App1 Arr_map_App1 by (metis 1 Con_implies_Arr(1) Ide.simps(1) ide_char) show "\.Trg (last ([M \<^bold>\ \.Src N] @ [\.Trg M \<^bold>\ N])) = \.Src (hd (map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U))))" using MN Std Std_imp_Arr Arr_map_un_App1 Arr_map_App1 seq seq_char u Srcs_simp\<^sub>\\<^sub>P by auto qed qed also have "[M \<^bold>\ N] @ (u # U) = (M \<^bold>\ N) # u # U" by simp finally show ?thesis by blast qed qed qed qed show "\\ \.un_App1 ` set (u # U) \ Collect \.Ide; \ \.un_App2 ` set (u # U) \ Collect \.Ide\ \ ?thesis" proof - assume *: "\ \.un_App1 ` set (u # U) \ Collect \.Ide" assume **: "\ \.un_App2 ` set (u # U) \ Collect \.Ide" show ?thesis proof (intro conjI) show "Std (stdz_insert (M \<^bold>\ N) (u # U))" proof - have "Std (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))))" proof (intro Std_append) show "Std (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))))" using * A \.Ide_Src MN Std_map_App1 by presburger show "Std (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))))" proof - have "\.Arr (\.un_App1 (last (u # U)))" by (metis *** \.Arr.simps(4) Std Std_imp_Arr Arr.simps(2) Arr_append_iff\<^sub>P append_butlast_last_id append_self_conv2 \.arr_char \.lambda.collapse(3) last.simps last_in_set list.discI mem_Collect_eq subset_code(1) u) thus ?thesis using ** B \.Ide_Trg MN Std_map_App2 by presburger qed show "map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) = [] \ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))) = [] \ \.sseq (last (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))))) (hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))))" proof - have "\.sseq (last (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))))) (hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))))" proof - let ?M = "\.un_App1 (last (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U))))))" let ?M' = "\.Trg (\.un_App1 (last (u # U)))" let ?N = "\.Src N" let ?N' = "\.un_App2 (hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))))" have M: "?M = last (stdz_insert M (filter notIde (map \.un_App1 (u # U))))" by (metis * A Ide.simps(1) Resid.simps(1) ide_char \.lambda.sel(3) last_map) have N': "?N' = hd (stdz_insert N (filter notIde (map \.un_App2 (u # U))))" by (metis ** B Ide.simps(1) Resid.simps(2) ide_char \.lambda.sel(4) hd_map) have AppMN: "last (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U))))) = ?M \<^bold>\ ?N" by (metis * A Ide.simps(1) M Resid.simps(2) ide_char last_map) moreover have 4: "hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))) = ?M' \<^bold>\ ?N'" by (metis (no_types, lifting) ** B Resid.simps(2) con_char prfx_implies_con \.lambda.collapse(3) \.lambda.discI(3) \.lambda.inject(3) list.map_sel(1)) moreover have MM: "\.elementary_reduction ?M" by (metis * A Arr.simps(1) Con_implies_Arr(2) Ide.simps(1) M ide_char in_mono last_in_set mem_Collect_eq) moreover have NN': "\.elementary_reduction ?N'" using ** B N' by (metis Arr.simps(1) Con_implies_Arr(2) Ide.simps(1) ide_char in_mono list.set_sel(1) mem_Collect_eq) moreover have "\.Trg ?M = ?M'" proof - have 1: "[\.Trg ?M] \<^sup>*\\<^sup>* [?M']" proof - have "[\.Trg ?M] \<^sup>*\\<^sup>* [\.Trg (last (M # filter notIde (map \.un_App1 (u # U))))]" proof - have "targets (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) = targets (M # filter notIde (map \.un_App1 (u # U)))" using * A cong_implies_coterminal by blast moreover have "[\.Trg (last (M # filter notIde (map \.un_App1 (u # U))))] \ targets (M # filter notIde (map \.un_App1 (u # U)))" by (metis (no_types, lifting) * A \.Arr_Trg \.Ide_Trg Arr.simps(2) Arr_append_iff\<^sub>P Arr_iff_Con_self Con_implies_Arr(2) Ide.simps(1) Ide.simps(2) Resid_Arr_Ide_ind ide_char append_butlast_last_id append_self_conv2 \.arr_char in_targets_iff \.ide_char list.discI) ultimately show ?thesis using * A M in_targets_iff by (metis (no_types, lifting) Con_implies_Arr(1) con_char prfx_implies_con in_targets_iff) qed also have 2: "[\.Trg (last (M # filter notIde (map \.un_App1 (u # U))))] \<^sup>*\\<^sup>* [\.Trg (last (filter notIde (map \.un_App1 (u # U))))]" by (metis (no_types, lifting) * prfx_transitive calculation empty_filter_conv last_ConsR list.set_map mem_Collect_eq subsetI) also have "[\.Trg (last (filter notIde (map \.un_App1 (u # U))))] \<^sup>*\\<^sup>* [\.Trg (last (map \.un_App1 (u # U)))]" proof - have "map \.un_App1 (u # U) \<^sup>*\\<^sup>* filter notIde (map \.un_App1 (u # U))" by (metis (mono_tags, lifting) * *** Arr_map_un_App1 Std Std_imp_Arr cong_filter_notIde empty_filter_conv filter_notIde_Ide insert_subset list.discI list.set_map list.simps(15) mem_Collect_eq subsetI u) thus ?thesis by (metis 2 Trg_last_eqI prfx_transitive) qed also have "[\.Trg (last (map \.un_App1 (u # U)))] = [?M']" by (simp add: last_map) finally show ?thesis by blast qed have 3: "\.Trg ?M = \.Trg ?M \\ ?M'" by (metis (no_types, lifting) 1 * A M Con_implies_Arr(2) Ide.simps(1) Resid_Arr_Ide_ind Resid_rec(1) ide_char target_is_ide in_targets_iff list.inject) also have "... = ?M'" by (metis (no_types, lifting) 1 4 Arr.simps(2) Con_implies_Arr(2) Ide.simps(1) Ide.simps(2) MM NN' Resid_Arr_Ide_ind Resid_rec(1) Src_hd_eqI calculation ide_char \.Ide_iff_Src_self \.Src_Trg \.arr_char \.elementary_reduction.simps(4) \.elementary_reduction_App_iff \.elementary_reduction_is_arr \.elementary_reduction_not_ide \.lambda.discI(3) \.lambda.sel(3) list.sel(1)) finally show ?thesis by blast qed moreover have "?N = \.Src ?N'" proof - have 1: "[\.Src ?N'] \<^sup>*\\<^sup>* [?N]" proof - have "sources (stdz_insert N (filter notIde (map \.un_App2 (u # U)))) = sources [N]" using ** B by (metis Con_implies_Arr(2) Ide.simps(1) coinitialE cong_implies_coinitial ide_char sources_cons) thus ?thesis by (metis (no_types, lifting) AppMN ** B \.Ide_Src MM MN N' NN' \.Trg_Src Arr.simps(1) Arr.simps(2) Con_implies_Arr(1) Ide.simps(2) con_char ideE ide_char sources_cons \.arr_char in_targets_iff \.elementary_reduction.simps(4) \.elementary_reduction_App_iff \.elementary_reduction_is_arr \.elementary_reduction_not_ide \.lambda.disc(14) \.lambda.sel(4) last_ConsL list.exhaust_sel targets_single_Src) qed have "\.Src ?N' = \.Src ?N' \\ ?N" by (metis (no_types, lifting) 1 MN \.Coinitial_iff_Con \.Ide_Src Arr.simps(2) Ide.simps(1) Ide_implies_Arr Resid_rec(1) ide_char \.not_arr_null \.null_char \.resid_Arr_Ide) also have "... = ?N" by (metis 1 MN NN' Src_hd_eqI calculation \.Src_Src \.arr_char \.elementary_reduction_is_arr list.sel(1)) finally show ?thesis by simp qed ultimately show ?thesis using u \.sseq.simps(4) by (metis (mono_tags, lifting)) qed thus ?thesis by blast qed qed thus ?thesis using 4 by presburger qed show "\ Ide ((M \<^bold>\ N) # u # U) \ stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" proof have "stdz_insert (M \<^bold>\ N) (u # U) = map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U))))" using 4 by simp also have "... \<^sup>*\\<^sup>* map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (N # map \.un_App2 (u # U))" proof (intro cong_append) have X: "stdz_insert M (filter notIde (map \.un_App1 (u # U))) \<^sup>*\\<^sup>* M # map \.un_App1 (u # U)" proof - have "stdz_insert M (filter notIde (map \.un_App1 (u # U))) \<^sup>*\\<^sup>* [M] @ filter notIde (map \.un_App1 (u # U))" using * A by simp also have "[M] @ filter notIde (map \.un_App1 (u # U)) \<^sup>*\\<^sup>* [M] @ map \.un_App1 (u # U)" proof - have "filter notIde (map \.un_App1 (u # U)) \<^sup>*\\<^sup>* map \.un_App1 (u # U)" using * cong_filter_notIde by (metis (mono_tags, lifting) *** Arr_map_un_App1 Std Std_imp_Arr empty_filter_conv filter_notIde_Ide insert_subset list.discI list.set_map list.simps(15) mem_Collect_eq subsetI u) moreover have "seq [M] (filter notIde (map \.un_App1 (u # U)))" by (metis * A Arr.simps(1) Con_implies_Arr(1) append_Cons append_Nil arr_append_imp_seq arr_char calculation ide_implies_arr list.discI) ultimately show ?thesis using cong_append cong_reflexive by blast qed also have "[M] @ map \.un_App1 (u # U) = M # map \.un_App1 (u # U)" by simp finally show ?thesis by blast qed have Y: "stdz_insert N (filter notIde (map \.un_App2 (u # U))) \<^sup>*\\<^sup>* N # map \.un_App2 (u # U)" proof - have 5: "stdz_insert N (filter notIde (map \.un_App2 (u # U))) \<^sup>*\\<^sup>* [N] @ filter notIde (map \.un_App2 (u # U))" using ** B by simp also have "[N] @ filter notIde (map \.un_App2 (u # U)) \<^sup>*\\<^sup>* [N] @ map \.un_App2 (u # U)" proof - have "filter notIde (map \.un_App2 (u # U)) \<^sup>*\\<^sup>* map \.un_App2 (u # U)" using ** cong_filter_notIde by (metis (mono_tags, lifting) *** Arr_map_un_App2 Std Std_imp_Arr empty_filter_conv filter_notIde_Ide insert_subset list.discI list.set_map list.simps(15) mem_Collect_eq subsetI u) moreover have "seq [N] (filter notIde (map \.un_App2 (u # U)))" by (metis 5 Arr.simps(1) Con_implies_Arr(2) Ide.simps(1) arr_append_imp_seq arr_char calculation ide_char not_Cons_self2) ultimately show ?thesis using cong_append cong_reflexive by blast qed also have "[N] @ map \.un_App2 (u # U) = N # map \.un_App2 (u # U)" by simp finally show ?thesis by blast qed show "seq (map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U))))) (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))))" by (metis 4 * ** A B Ide.simps(1) Nil_is_append_conv Nil_is_map_conv Resid.simps(1) Std_imp_Arr \Std (stdz_insert (M \<^bold>\ N) (u # U))\ arr_append_imp_seq arr_char ide_char) show "map (\X. X \<^bold>\ \.Src N) (stdz_insert M (filter notIde (map \.un_App1 (u # U)))) \<^sup>*\\<^sup>* map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U))" using X cong_map_App2 MN lambda_calculus.Ide_Src by presburger show "map (\.App (\.Trg (\.un_App1 (last (u # U))))) (stdz_insert N (filter notIde (map \.un_App2 (u # U)))) \<^sup>*\\<^sup>* map (\.App (\.Trg (\.un_App1 (last (u # U))))) (N # map \.un_App2 (u # U))" proof - have "set U \ Collect \.Arr \ Collect \.is_App" using *** Std Std_implies_set_subset_elementary_reduction \.elementary_reduction_is_arr by blast hence "\.Ide (\.Trg (\.un_App1 (last (u # U))))" by (metis inf.boundedE \.Arr.simps(4) \.Ide_Trg \.lambda.collapse(3) last.simps last_in_set mem_Collect_eq subset_eq u) thus ?thesis using Y cong_map_App1 by blast qed qed also have "map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (N # map \.un_App2 (u # U)) \<^sup>*\\<^sup>* [M \<^bold>\ N] @ [u] @ U" proof - have "(map (\X. X \<^bold>\ \.Src N) (M # map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (N # map \.un_App2 (u # U))) = ([M \<^bold>\ \.Src N] @ map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U))) @ ([\.Trg (\.un_App1 (last (u # U))) \<^bold>\ N] @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)))" by simp also have "... = [M \<^bold>\ \.Src N] @ (map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N]) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U))" by auto also have "... \<^sup>*\\<^sup>* [M \<^bold>\ \.Src N] @ (map (\.App (\.Src (\.un_App1 u))) [N] @ map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U))" proof - (* * TODO: (intro congI) does not work because it breaks the expression * down too far, resulting in a false subgoal. *) have "(map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N]) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)) \<^sup>*\\<^sup>* (map (\.App (\.Src (\.un_App1 u))) [N] @ map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U))" proof - have 1: "Arr (map \.un_App1 (u # U))" using u *** by (metis Arr_map_un_App1 Std Std_imp_Arr list.discI mem_Collect_eq set_ConsD subset_code(1)) have "map (\X. \.App X (\.Src N)) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N] \<^sup>*\\<^sup>* map (\.App (\.Src (\.un_App1 u))) [N] @ map (\X. \.App X (\.Trg N)) (map \.un_App1 (u # U))" proof - have "Arr [N]" using MN by simp moreover have "\.Trg (last (map \.un_App1 (u # U))) = \.Trg (\.un_App1 (last (u # U)))" by (simp add: last_map) ultimately show ?thesis using 1 orthogonal_App_cong [of "map \.un_App1 (u # U)" "[N]"] by simp qed moreover have "seq (map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N]) (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)))" proof show "Arr (map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N])" by (metis Con_implies_Arr(1) Ide.simps(1) calculation ide_char) show "Arr (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)))" using u *** by (metis 1 Arr_imp_arr_last Arr_map_App2 Arr_map_un_App2 Std Std_imp_Arr \.Ide_Trg \.arr_char last_map list.discI mem_Collect_eq set_ConsD subset_code(1)) show "\.Trg (last (map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N])) = \.Src (hd (map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U))))" proof - have 1: "\.Arr (\.un_App1 u)" using u \.is_App_def by force have 2: "U \ [] \ \.Arr (\.un_App1 (last U))" by (metis *** Arr_imp_arr_last Arr_map_un_App1 \U \ [] \ Arr U\ \.arr_char last_map) have 3: "\.Trg N = \.Src (\.un_App2 u)" by (metis Trg_last_Src_hd_eqI \.Src.simps(4) \.Trg.simps(3) \.lambda.collapse(3) \.lambda.inject(3) last_ConsL list.sel(1) seq u) show ?thesis using u *** seq 1 2 3 by (cases "U = []") auto qed qed moreover have "map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)) \<^sup>*\\<^sup>* map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U))" using calculation(2) cong_reflexive by blast ultimately show ?thesis using cong_append by blast qed moreover have "seq [M \<^bold>\ \.Src N] ((map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N]) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)))" proof show "Arr [M \<^bold>\ \.Src N]" using MN by simp show "Arr ((map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N]) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)))" using MN u seq by (metis Con_implies_Arr(1) Ide.simps(1) calculation ide_char) show "\.Trg (last [M \<^bold>\ \.Src N]) = \.Src (hd ((map (\X. X \<^bold>\ \.Src N) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) [N]) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U))))" using MN u seq seq_char Srcs_simp\<^sub>\\<^sub>P by (cases u) auto qed ultimately show ?thesis using cong_append by (meson Resid_Arr_self ide_char seq_char) qed also have "[M \<^bold>\ \.Src N] @ (map (\.App (\.Src (\.un_App1 u))) [N] @ map (\X. \.App X (\.Trg N)) (map \.un_App1 (u # U))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)) = ([M \<^bold>\ \.Src N] @ [\.Src (\.un_App1 u) \<^bold>\ N]) @ (map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U))) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U))" by simp also have "... \<^sup>*\\<^sup>* ([M \<^bold>\ N] @ [u] @ U)" proof - have "[M \<^bold>\ \.Src N] @ [\.Src (\.un_App1 u) \<^bold>\ N] \<^sup>*\\<^sup>* [M \<^bold>\ N]" proof - have "\.Src (\.un_App1 u) = \.Trg M" by (metis Trg_last_Src_hd_eqI \.Src.simps(4) \.Trg.simps(3) \.lambda.collapse(3) \.lambda.inject(3) last.simps list.sel(1) seq u) thus ?thesis using MN u seq seq_char \.Arr_not_Nil \.resid_Arr_self ide_char \.Ide_Trg by simp qed moreover have "map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)) \<^sup>*\\<^sup>* [u] @ U" proof - have "Arr ([u] @ U)" by (simp add: Std) moreover have "set ([u] @ U) \ Collect \.is_App" using *** u by auto moreover have "\.Src (\.un_App2 (hd ([u] @ U))) = \.Trg N" proof - have "\.Ide (\.Trg N)" using MN lambda_calculus.Ide_Trg by presburger moreover have "\.Ide (\.Src (\.un_App2 (hd ([u] @ U))))" by (metis Std Std_implies_set_subset_elementary_reduction \.Ide_Src \.arr_iff_has_source \.ide_implies_arr \set ([u] @ U) \ Collect \.is_App\ append_Cons \.elementary_reduction_App_iff \.elementary_reduction_is_arr \.sources_char\<^sub>\ list.sel(1) list.set_intros(1) mem_Collect_eq subset_code(1)) moreover have "\.Src (\.Trg N) = \.Src (\.Src (\.un_App2 (hd ([u] @ U))))" proof - have "\.Src (\.Trg N) = \.Trg N" using MN by simp also have "... = \.Src (\.un_App2 u)" using u seq seq_char Srcs_simp\<^sub>\\<^sub>P by (cases u) auto also have "... = \.Src (\.Src (\.un_App2 (hd ([u] @ U))))" by (metis \.Ide_iff_Src_self \.Ide_implies_Arr \\.Ide (\.Src (\.un_App2 (hd ([u] @ U))))\ append_Cons list.sel(1)) finally show ?thesis by blast qed ultimately show ?thesis by (metis \.Ide_iff_Src_self \.Ide_implies_Arr) qed ultimately show ?thesis using map_App_decomp by (metis append_Cons append_Nil) qed moreover have "seq ([M \<^bold>\ \.Src N] @ [\.Src (\.un_App1 u) \<^bold>\ N]) (map (\X. X \<^bold>\ \.Trg N) (map \.un_App1 (u # U)) @ map (\.App (\.Trg (\.un_App1 (last (u # U))))) (map \.un_App2 (u # U)))" using calculation(1-2) cong_respects_seq\<^sub>P seq by auto ultimately show ?thesis using cong_append by presburger qed finally show ?thesis by blast qed also have "[M \<^bold>\ N] @ [u] @ U = (M \<^bold>\ N) # u # U" by simp finally show "stdz_insert (M \<^bold>\ N) (u # U) \<^sup>*\\<^sup>* (M \<^bold>\ N) # u # U" by blast qed qed qed qed qed qed qed qed qed qed text \ The eight remaining subgoals are now trivial consequences of fact \*\. Unfortunately, I haven't found a way to discharge them without having to state each one of them explicitly. \ show "\N u U. \\.Ide (\<^bold>\ \<^bold>\ N) \ ?P (hd (u # U)) (tl (u # U)); \\ \.Ide (\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\ \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (\<^bold>\ \<^bold>\ N); \.Ide ((\<^bold>\ \<^bold>\ N) \\ \.head_redex (\<^bold>\ \<^bold>\ N))\ \ ?P (hd (u # U)) (tl (u # U)); \\ \.Ide (\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\ \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (\<^bold>\ \<^bold>\ N); \ \.Ide ((\<^bold>\ \<^bold>\ N) \\ \.head_redex (\<^bold>\ \<^bold>\ N))\ \ ?P ((\<^bold>\ \<^bold>\ N) \\ \.head_redex (\<^bold>\ \<^bold>\ N)) (u # U); \\ \.Ide (\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\ \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\ \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \.Ide ((\<^bold>\ \<^bold>\ N) \\ \.head_strategy (\<^bold>\ \<^bold>\ N))\ \ ?P (\.head_strategy (\<^bold>\ \<^bold>\ N)) (tl (u # U)); \\ \.Ide (\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\ \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\ \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \ \.Ide ((\<^bold>\ \<^bold>\ N) \\ \.head_strategy (\<^bold>\ \<^bold>\ N))\ \ ?P (\.resid (\<^bold>\ \<^bold>\ N) (\.head_strategy (\<^bold>\ \<^bold>\ N))) (tl (u # U)); \\ \.Ide (\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\ \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\ \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P \<^bold>\ (filter notIde (map \.un_App1 (u # U))); \\ \.Ide (\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\ \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\ \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P N (filter notIde (map \.un_App2 (u # U)))\ \ ?P (\<^bold>\ \<^bold>\ N) (u # U)" using * \.lambda.disc(6) by presburger show "\x N u U. \\.Ide (\<^bold>\x\<^bold>\ \<^bold>\ N) \ ?P (hd (u # U)) (tl (u # U)); \\ \.Ide (\<^bold>\x\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\x\<^bold>\ \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (\<^bold>\x\<^bold>\ \<^bold>\ N); \.Ide ((\<^bold>\x\<^bold>\ \<^bold>\ N) \\ \.head_redex (\<^bold>\x\<^bold>\ \<^bold>\ N))\ \ ?P (hd (u # U)) (tl (u # U)); \\ \.Ide (\<^bold>\x\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\x\<^bold>\ \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (\<^bold>\x\<^bold>\ \<^bold>\ N); \ \.Ide ((\<^bold>\x\<^bold>\ \<^bold>\ N) \\ \.head_redex (\<^bold>\x\<^bold>\ \<^bold>\ N))\ \ ?P ((\<^bold>\x\<^bold>\ \<^bold>\ N) \\ \.head_redex (\<^bold>\x\<^bold>\ \<^bold>\ N)) (u # U); \\ \.Ide (\<^bold>\x\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\x\<^bold>\ \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\x\<^bold>\ \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \.Ide ((\<^bold>\x\<^bold>\ \<^bold>\ N) \\ \.head_strategy (\<^bold>\x\<^bold>\ \<^bold>\ N))\ \ ?P (\.head_strategy (\<^bold>\x\<^bold>\ \<^bold>\ N)) (tl (u # U)); \\ \.Ide (\<^bold>\x\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\x\<^bold>\ \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\x\<^bold>\ \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \ \.Ide ((\<^bold>\x\<^bold>\ \<^bold>\ N) \\ \.head_strategy (\<^bold>\x\<^bold>\ \<^bold>\ N))\ \ ?P ((\<^bold>\x\<^bold>\ \<^bold>\ N) \\ \.head_strategy (\<^bold>\x\<^bold>\ \<^bold>\ N)) (tl (u # U)); \\ \.Ide (\<^bold>\x\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\x\<^bold>\ \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\x\<^bold>\ \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P \<^bold>\x\<^bold>\ (filter notIde (map \.un_App1 (u # U))); \\ \.Ide (\<^bold>\x\<^bold>\ \<^bold>\ N); \.seq (\<^bold>\x\<^bold>\ \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\x\<^bold>\ \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P N (filter notIde (map \.un_App2 (u # U)))\ \ ?P (\<^bold>\x\<^bold>\ \<^bold>\ N) (u # U)" using * \.lambda.disc(7) by presburger show "\M1 M2 N u U. \\.Ide (M1 \<^bold>\ M2 \<^bold>\ N) \ ?P (hd (u # U)) (tl (u # U)); \\ \.Ide (M1 \<^bold>\ M2 \<^bold>\ N); \.seq (M1 \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (M1 \<^bold>\ M2 \<^bold>\ N); \.Ide ((M1 \<^bold>\ M2 \<^bold>\ N) \\ \.head_redex (M1 \<^bold>\ M2 \<^bold>\ N))\ \ ?P (hd (u # U)) (tl (u # U)); \\ \.Ide (M1 \<^bold>\ M2 \<^bold>\ N); \.seq (M1 \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (M1 \<^bold>\ M2 \<^bold>\ N); \ \.Ide ((M1 \<^bold>\ M2 \<^bold>\ N) \\ \.head_redex (M1 \<^bold>\ M2 \<^bold>\ N))\ \ ?P ((M1 \<^bold>\ M2 \<^bold>\ N) \\ \.head_redex (M1 \<^bold>\ M2 \<^bold>\ N)) (u # U); \\ \.Ide (M1 \<^bold>\ M2 \<^bold>\ N); \.seq (M1 \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M1 \<^bold>\ M2 \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \.Ide ((M1 \<^bold>\ M2 \<^bold>\ N) \\ \.head_strategy (M1 \<^bold>\ M2 \<^bold>\ N))\ \ ?P (\.head_strategy (M1 \<^bold>\ M2 \<^bold>\ N)) (tl (u # U)); \\ \.Ide (M1 \<^bold>\ M2 \<^bold>\ N); \.seq (M1 \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M1 \<^bold>\ M2 \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \ \.Ide ((M1 \<^bold>\ M2 \<^bold>\ N) \\ \.head_strategy (M1 \<^bold>\ M2 \<^bold>\ N))\ \ ?P ((M1 \<^bold>\ M2 \<^bold>\ N) \\ \.head_strategy (M1 \<^bold>\ M2 \<^bold>\ N)) (tl (u # U)); \\ \.Ide (M1 \<^bold>\ M2 \<^bold>\ N); \.seq (M1 \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M1 \<^bold>\ M2 \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P (M1 \<^bold>\ M2) (filter notIde (map \.un_App1 (u # U))); \\ \.Ide (M1 \<^bold>\ M2 \<^bold>\ N); \.seq (M1 \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (M1 \<^bold>\ M2 \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P N (filter notIde (map \.un_App2 (u # U)))\ \ ?P (M1 \<^bold>\ M2 \<^bold>\ N) (u # U)" using * \.lambda.disc(9) by presburger show "\M1 M2 N u U. \\.Ide (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) \ ?P (hd (u # U)) (tl (u # U)); \\ \.Ide (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \.seq (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \.Ide ((\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) \\ (\.head_redex (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N)))\ \ ?P (hd (u # U)) (tl (u # U)); \\ \.Ide (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \.seq (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \.contains_head_reduction (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \ \.Ide ((\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) \\ (\.head_redex (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N)))\ \ ?P (\.resid (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) (\.head_redex (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N))) (u # U); \\ \.Ide (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \.seq (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \.Ide ((\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) \\ \.head_strategy (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N))\ \ ?P (\.head_strategy (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N)) (tl (u # U)); \\ \.Ide (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \.seq (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \.contains_head_reduction (hd (u # U)); \ \.Ide ((\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) \\ \.head_strategy (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N))\ \ ?P ((\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) \\ \.head_strategy (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N)) (tl (u # U)); \\ \.Ide (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \.seq (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2) (filter notIde (map \.un_App1 (u # U))); \\ \.Ide (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \.seq (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) (hd (u # U)); \ \.contains_head_reduction (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N); \ \.contains_head_reduction (hd (u # U))\ \ ?P N (filter notIde (map \.un_App2 (u # U)))\ \ ?P (\<^bold>\\<^bold>[M1\<^bold>] \<^bold>\ M2 \<^bold>\ N) (u # U)" using * \.lambda.disc(10) by presburger show "\M N U. \\.Ide (M \<^bold>\ N) \ ?P (hd (\<^bold>\ # U)) (tl (\<^bold>\ # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\ # U)); \.contains_head_reduction (M \<^bold>\ N); \.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))\ \ ?P (hd (\<^bold>\ # U)) (tl (\<^bold>\ # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\ # U)); \.contains_head_reduction (M \<^bold>\ N); \ \.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))\ \ ?P ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (\<^bold>\ # U); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\ # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd (\<^bold>\ # U)); \.Ide (\.resid (M \<^bold>\ N) (\.head_strategy (M \<^bold>\ N)))\ \ ?P (\.head_strategy (M \<^bold>\ N)) (tl (\<^bold>\ # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\ # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd (\<^bold>\ # U)); \ \.Ide ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))\ \ ?P ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) (tl (\<^bold>\ # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\ # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd (\<^bold>\ # U))\ \ ?P M (filter notIde (map \.un_App1 (\<^bold>\ # U))); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\ # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd (\<^bold>\ # U))\ \ ?P N (filter notIde (map \.un_App2 (\<^bold>\ # U)))\ \ ?P (M \<^bold>\ N) (\<^bold>\ # U)" using * \.lambda.disc(16) by presburger show "\M N x U. \\.Ide (M \<^bold>\ N) \ ?P (hd (\<^bold>\x\<^bold>\ # U)) (tl (\<^bold>\x\<^bold>\ # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\x\<^bold>\ # U)); \.contains_head_reduction (M \<^bold>\ N); \.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))\ \ ?P (hd (\<^bold>\x\<^bold>\ # U)) (tl (\<^bold>\x\<^bold>\ # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\x\<^bold>\ # U)); \.contains_head_reduction (M \<^bold>\ N); \ \.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))\ \ ?P ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (\<^bold>\x\<^bold>\ # U); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\x\<^bold>\ # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd (\<^bold>\x\<^bold>\ # U)); \.Ide ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))\ \ ?P (\.head_strategy (M \<^bold>\ N)) (tl (\<^bold>\x\<^bold>\ # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\x\<^bold>\ # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd (\<^bold>\x\<^bold>\ # U)); \ \.Ide ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))\ \ ?P ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) (tl (\<^bold>\x\<^bold>\ # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\x\<^bold>\ # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd (\<^bold>\x\<^bold>\ # U))\ \ ?P M (filter notIde (map \.un_App1 (\<^bold>\x\<^bold>\ # U))); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\x\<^bold>\ # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd (\<^bold>\x\<^bold>\ # U))\ \ ?P N (filter notIde (map \.un_App2 (\<^bold>\x\<^bold>\ # U)))\ \ ?P (M \<^bold>\ N) (\<^bold>\x\<^bold>\ # U)" using * \.lambda.disc(17) by presburger show "\M N P U. \\.Ide (M \<^bold>\ N) \ ?P (hd (\<^bold>\\<^bold>[P\<^bold>] # U)) (tl (\<^bold>\\<^bold>[P\<^bold>] # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\\<^bold>[P\<^bold>] # U)); \.contains_head_reduction (M \<^bold>\ N); \.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))\ \ ?P (hd (\<^bold>\\<^bold>[P\<^bold>] # U)) (tl (\<^bold>\\<^bold>[P\<^bold>] # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\\<^bold>[P\<^bold>] # U)); \.contains_head_reduction (M \<^bold>\ N); \ \.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))\ \ ?P ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) (\<^bold>\\<^bold>[P\<^bold>] # U); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\\<^bold>[P\<^bold>] # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd (\<^bold>\\<^bold>[P\<^bold>] # U)); \.Ide ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))\ \ ?P (\.head_strategy (M \<^bold>\ N)) (tl (\<^bold>\\<^bold>[P\<^bold>] # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\\<^bold>[P\<^bold>] # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd (\<^bold>\\<^bold>[P\<^bold>] # U)); \ \.Ide ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))\ \ ?P (\.resid (M \<^bold>\ N) (\.head_strategy (M \<^bold>\ N))) (tl (\<^bold>\\<^bold>[P\<^bold>] # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\\<^bold>[P\<^bold>] # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd (\<^bold>\\<^bold>[P\<^bold>] # U))\ \ ?P M (filter notIde (map \.un_App1 (\<^bold>\\<^bold>[P\<^bold>] # U))); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd (\<^bold>\\<^bold>[P\<^bold>] # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd (\<^bold>\\<^bold>[P\<^bold>] # U))\ \ ?P N (filter notIde (map \.un_App2 (\<^bold>\\<^bold>[P\<^bold>] # U)))\ \ ?P (M \<^bold>\ N) (\<^bold>\\<^bold>[P\<^bold>] # U)" using * \.lambda.disc(18) by presburger show "\M N P1 P2 U. \\.Ide (M \<^bold>\ N) \ ?P (hd ((P1 \<^bold>\ P2) # U)) (tl ((P1 \<^bold>\ P2) # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd ((P1 \<^bold>\ P2) # U)); \.contains_head_reduction (M \<^bold>\ N); \.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))\ \ ?P (hd ((P1 \<^bold>\ P2) # U)) (tl((P1 \<^bold>\ P2) # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd ((P1 \<^bold>\ P2) # U)); \.contains_head_reduction (M \<^bold>\ N); \ \.Ide ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N))\ \ ?P ((M \<^bold>\ N) \\ \.head_redex (M \<^bold>\ N)) ((P1 \<^bold>\ P2) # U); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd ((P1 \<^bold>\ P2) # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd ((P1 \<^bold>\ P2) # U)); \.Ide ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))\ \ ?P (\.head_strategy (M \<^bold>\ N)) (tl ((P1 \<^bold>\ P2) # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd ((P1 \<^bold>\ P2) # U)); \ \.contains_head_reduction (M \<^bold>\ N); \.contains_head_reduction (hd ((P1 \<^bold>\ P2) # U)); \ \.Ide ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N))\ \ ?P ((M \<^bold>\ N) \\ \.head_strategy (M \<^bold>\ N)) (tl ((P1 \<^bold>\ P2) # U)); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd ((P1 \<^bold>\ P2) # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd ((P1 \<^bold>\ P2) # U))\ \ ?P M (filter notIde (map \.un_App1 ((P1 \<^bold>\ P2) # U))); \\ \.Ide (M \<^bold>\ N); \.seq (M \<^bold>\ N) (hd ((P1 \<^bold>\ P2) # U)); \ \.contains_head_reduction (M \<^bold>\ N); \ \.contains_head_reduction (hd ((P1 \<^bold>\ P2) # U))\ \ ?P N (filter notIde (map \.un_App2 ((P1 \<^bold>\ P2) # U)))\ \ ?P (M \<^bold>\ N) ((P1 \<^bold>\ P2) # U)" using * \.lambda.disc(19) by presburger qed subsubsection "The Standardization Theorem" text \ Using the function \standardize\, we can now prove the Standardization Theorem. There is still a little bit more work to do, because we have to deal with various cases in which the reduction path to be standardized is empty or consists entirely of identities. \ theorem standardization_theorem: shows "Arr T \ Std (standardize T) \ (Ide T \ standardize T = []) \ (\ Ide T \ cong (standardize T) T)" proof (induct T) show "Arr [] \ Std (standardize []) \ (Ide [] \ standardize [] = []) \ (\ Ide [] \ cong (standardize []) [])" by simp fix t T assume ind: "Arr T \ Std (standardize T) \ (Ide T \ standardize T = []) \ (\ Ide T \ cong (standardize T) T)" assume tT: "Arr (t # T)" have t: "\.Arr t" using tT Arr_imp_arr_hd by force show "Std (standardize (t # T)) \ (Ide (t # T) \ standardize (t # T) = []) \ (\ Ide (t # T) \ cong (standardize (t # T)) (t # T))" proof (cases "T = []") show "T = [] \ ?thesis" using t tT Ide_iff_standard_development_empty Std_standard_development cong_standard_development by simp assume 0: "T \ []" hence T: "Arr T" using tT by (metis Arr_imp_Arr_tl list.sel(3)) show ?thesis proof (intro conjI) show "Std (standardize (t # T))" proof - have 1: "\ Ide T \ seq [t] (standardize T)" using t T ind 0 ide_char Con_implies_Arr(1) apply (intro seqI\<^sub>\\<^sub>P) apply simp apply (metis Con_implies_Arr(1) Ide.simps(1) ide_char) by (metis Src_hd_eqI Trg_last_Src_hd_eqI \T \ []\ append_Cons arrI\<^sub>P arr_append_imp_seq list.distinct(1) self_append_conv2 tT) show ?thesis using T 1 ind Std_standard_development stdz_insert_correctness by auto qed show "Ide (t # T) \ standardize (t # T) = []" using Ide_consE Ide_iff_standard_development_empty Ide_implies_Arr ind \.Ide_implies_Arr \.ide_char by (metis list.sel(1,3) standardize.simps(1-2) stdz_insert.simps(1)) show "\ Ide (t # T) \ standardize (t # T) \<^sup>*\\<^sup>* t # T" proof assume 1: "\ Ide (t # T)" show "standardize (t # T) \<^sup>*\\<^sup>* t # T" proof (cases "\.Ide t") assume t: "\.Ide t" have 2: "\ Ide T" using 1 t tT by fastforce have "standardize (t # T) = stdz_insert t (standardize T)" by simp also have "... \<^sup>*\\<^sup>* t # T" proof - have 3: "Std (standardize T) \ standardize T \<^sup>*\\<^sup>* T" using T 2 ind by blast have "stdz_insert t (standardize T) = stdz_insert (hd (standardize T)) (tl (standardize T))" proof - have "seq [t] (standardize T)" using 0 2 tT ind by (metis Arr.elims(2) Con_imp_eq_Srcs Con_implies_Arr(1) Ide.simps(1-2) Ide_implies_Arr Trgs.simps(2) ide_char \.ide_char list.inject seq_char seq_implies_Trgs_eq_Srcs t) thus ?thesis using t 3 stdz_insert_Ide_Std by blast qed also have "... \<^sup>*\\<^sup>* hd (standardize T) # tl (standardize T)" proof - have "\ Ide (standardize T)" using 2 3 ide_backward_stable ide_char by blast moreover have "tl (standardize T) \ [] \ seq [hd (standardize T)] (tl (standardize T)) \ Std (tl (standardize T))" by (metis 3 Std_consE Std_imp_Arr append.left_neutral append_Cons arr_append_imp_seq arr_char hd_Cons_tl list.discI tl_Nil) ultimately show ?thesis by (metis "2" Ide.simps(2) Resid.simps(1) Std_consE T cong_standard_development ide_char ind \.ide_char list.exhaust_sel stdz_insert.simps(1) stdz_insert_correctness) qed also have "hd (standardize T) # tl (standardize T) = standardize T" by (metis 3 Arr.simps(1) Con_implies_Arr(2) Ide.simps(1) ide_char list.exhaust_sel) also have "standardize T \<^sup>*\\<^sup>* T" using 3 by simp also have "T \<^sup>*\\<^sup>* t # T" using 0 t tT arr_append_imp_seq arr_char cong_cons_ideI(2) by simp finally show ?thesis by blast qed thus ?thesis by auto next assume t: "\ \.Ide t" show ?thesis proof (cases "Ide T") assume T: "Ide T" have "standardize (t # T) = standard_development t" using t T Ide_implies_Arr ind by simp also have "... \<^sup>*\\<^sup>* [t]" using t T tT cong_standard_development [of t] by blast also have "[t] \<^sup>*\\<^sup>* [t] @ T" using t T tT cong_append_ideI(4) [of "[t]" T] by (simp add: 0 arrI\<^sub>P arr_append_imp_seq ide_char) finally show ?thesis by auto next assume T: "\ Ide T" have 1: "Std (standardize T) \ standardize T \<^sup>*\\<^sup>* T" using T \Arr T\ ind by blast have 2: "seq [t] (standardize T)" by (metis 0 Arr.simps(2) Arr.simps(3) Con_imp_eq_Srcs Con_implies_Arr(2) Ide.elims(3) Ide.simps(1) T Trgs.simps(2) ide_char ind seq_char seq_implies_Trgs_eq_Srcs tT) have "stdz_insert t (standardize T) \<^sup>*\\<^sup>* t # standardize T" using t 1 2 stdz_insert_correctness [of t "standardize T"] by blast also have "t # standardize T \<^sup>*\\<^sup>* t # T" using 1 2 by (meson Arr.simps(2) \.prfx_reflexive cong_cons seq_char) finally show ?thesis by auto qed qed qed qed qed qed subsubsection "The Leftmost Reduction Theorem" text \ In this section we prove the Leftmost Reduction Theorem, which states that leftmost reduction is a normalizing strategy. We first show that if a standard reduction path reaches a normal form, then the path must be the one produced by following the leftmost reduction strategy. This is because, in a standard reduction path, once a leftmost redex is skipped, all subsequent reductions occur ``to the right of it'', hence they are all non-leftmost reductions that do not contract the skipped redex, which remains in the leftmost position. The Leftmost Reduction Theorem then follows from the Standardization Theorem. If a term is normalizable, there is a reduction path from that term to a normal form. By the Standardization Theorem we may as well assume that path is standard. But a standard reduction path to a normal form is the path generated by following the leftmost reduction strategy, hence leftmost reduction reaches a normal form after a finite number of steps. \ lemma sseq_reflects_leftmost_reduction: assumes "\.sseq t u" and "\.is_leftmost_reduction u" shows "\.is_leftmost_reduction t" proof - have *: "\u. u = \.leftmost_strategy (\.Src t) \\ t \ \ \.sseq t u" for t proof (induct t) show "\u. \ \.sseq \<^bold>\ u" using \.sseq_imp_seq by blast show "\x u. \ \.sseq \<^bold>\x\<^bold>\ u" using \.elementary_reduction.simps(2) \.sseq_imp_elementary_reduction1 by blast show "\t u. \\u. u = \.leftmost_strategy (\.Src t) \\ t \ \ \.sseq t u; u = \.leftmost_strategy (\.Src \<^bold>\\<^bold>[t\<^bold>]) \\ \<^bold>\\<^bold>[t\<^bold>]\ \ \ \.sseq \<^bold>\\<^bold>[t\<^bold>] u" by auto show "\t1 t2 u. \\u. u = \.leftmost_strategy (\.Src t1) \\ t1 \ \ \.sseq t1 u; \u. u = \.leftmost_strategy (\.Src t2) \\ t2 \ \ \.sseq t2 u; u = \.leftmost_strategy (\.Src (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) \\ (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)\ \ \ \.sseq (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2) u" apply simp by (metis \.sseq_imp_elementary_reduction2 \.Coinitial_iff_Con \.Ide_Src \.Ide_Subst \.elementary_reduction_not_ide \.ide_char \.resid_Ide_Arr) show "\t1 t2. \\u. u = \.leftmost_strategy (\.Src t1) \\ t1 \ \ \.sseq t1 u; \u. u = \.leftmost_strategy (\.Src t2) \\ t2 \ \ \.sseq t2 u; u = \.leftmost_strategy (\.Src (\.App t1 t2)) \\ \.App t1 t2\ \ \ \.sseq (\.App t1 t2) u" for u apply (cases u) apply simp_all apply (metis \.elementary_reduction.simps(2) \.sseq_imp_elementary_reduction2) apply (metis \.Src.simps(3) \.Src_resid \.Trg.simps(3) \.lambda.distinct(15) \.lambda.distinct(3)) proof - show "\t1 t2 u1 u2. \\ \.sseq t1 (\.leftmost_strategy (\.Src t1) \\ t1); \ \.sseq t2 (\.leftmost_strategy (\.Src t2) \\ t2); \<^bold>\\<^bold>[u1\<^bold>] \<^bold>\ u2 = \.leftmost_strategy (\.App (\.Src t1) (\.Src t2)) \\ \.App t1 t2; u = \.leftmost_strategy (\.App (\.Src t1) (\.Src t2)) \\ \.App t1 t2\ \ \ \.sseq (\.App t1 t2) (\.leftmost_strategy (\.App (\.Src t1) (\.Src t2)) \\ \.App t1 t2)" by (metis \.sseq_imp_elementary_reduction1 \.Arr.simps(5) \.Arr_resid_ind \.Coinitial_iff_Con \.Ide.simps(5) \.Ide_iff_Src_self \.Src.simps(4) \.Src_resid \.contains_head_reduction.simps(8) \.is_head_reduction_if \.lambda.discI(3) \.lambda.distinct(7) \.leftmost_strategy_selects_head_reduction \.resid_Arr_self \.sseq_preserves_App_and_no_head_reduction) show "\u1 u2. \\ \.sseq t1 (\.leftmost_strategy (\.Src t1) \\ t1); \ \.sseq t2 (\.leftmost_strategy (\.Src t2) \\ t2); \.App u1 u2 = \.leftmost_strategy (\.App (\.Src t1) (\.Src t2)) \\ \.App t1 t2; u = \.leftmost_strategy (\.App (\.Src t1) (\.Src t2)) \\ \.App t1 t2\ \ \ \.sseq (\.App t1 t2) (\.leftmost_strategy (\.App (\.Src t1) (\.Src t2)) \\ \.App t1 t2)" for t1 t2 apply (cases "\ \.Arr t1") apply simp_all apply (meson \.Arr.simps(4) \.seq_char \.sseq_imp_seq) apply (cases "\ \.Arr t2") apply simp_all apply (meson \.Arr.simps(4) \.seq_char \.sseq_imp_seq) using \.Arr_not_Nil apply (cases t1) apply simp_all using \.NF_iff_has_no_redex \.has_redex_iff_not_Ide_leftmost_strategy \.Ide_iff_Src_self \.Ide_iff_Trg_self \.NF_def \.elementary_reduction_not_ide \.eq_Ide_are_cong \.leftmost_strategy_is_reduction_strategy \.reduction_strategy_def \.resid_Arr_Src apply simp apply (metis \.Arr.simps(4) \.Ide.simps(4) \.Ide_Trg \.Src.simps(4) \.sseq_imp_elementary_reduction2) by (metis \.Ide_Trg \.elementary_reduction_not_ide \.ide_char) qed qed have "t \ \.leftmost_strategy (\.Src t) \ False" proof - assume 1: "t \ \.leftmost_strategy (\.Src t)" have 2: "\ \.Ide (\.leftmost_strategy (\.Src t))" by (meson assms(1) \.NF_def \.NF_iff_has_no_redex \.arr_char \.elementary_reduction_is_arr \.elementary_reduction_not_ide \.has_redex_iff_not_Ide_leftmost_strategy \.ide_char \.sseq_imp_elementary_reduction1) have "\.is_leftmost_reduction (\.leftmost_strategy (\.Src t) \\ t)" proof - have "\.is_leftmost_reduction (\.leftmost_strategy (\.Src t))" by (metis assms(1) 2 \.Ide_Src \.Ide_iff_Src_self \.arr_char \.elementary_reduction_is_arr \.elementary_reduction_leftmost_strategy \.is_leftmost_reduction_def \.leftmost_strategy_is_reduction_strategy \.reduction_strategy_def \.sseq_imp_elementary_reduction1) moreover have 3: "\.elementary_reduction t" using assms \.sseq_imp_elementary_reduction1 by simp moreover have "\ \.is_leftmost_reduction t" using 1 \.is_leftmost_reduction_def by auto moreover have "\.coinitial (\.leftmost_strategy (\.Src t)) t" using 3 \.leftmost_strategy_is_reduction_strategy \.reduction_strategy_def \.Ide_Src \.elementary_reduction_is_arr by force ultimately show ?thesis using 1 \.leftmost_reduction_preservation by blast qed moreover have "\.coinitial (\.leftmost_strategy (\.Src t) \\ t) u" using assms(1) calculation \.Arr_not_Nil \.Src_resid \.elementary_reduction_is_arr \.is_leftmost_reduction_def \.seq_char \.sseq_imp_seq by force moreover have "\v. \\.is_leftmost_reduction v; \.coinitial v u\ \ v = u" by (metis \.arr_iff_has_source \.arr_resid_iff_con \.confluence assms(2) \.Arr_not_Nil \.Coinitial_iff_Con \.is_leftmost_reduction_def \.sources_char\<^sub>\) ultimately have "\.leftmost_strategy (\.Src t) \\ t = u" by blast thus ?thesis using assms(1) * by blast qed thus ?thesis using assms(1) \.is_leftmost_reduction_def \.sseq_imp_elementary_reduction1 by force qed lemma elementary_reduction_to_NF_is_leftmost: shows "\\.elementary_reduction t; \.NF (Trg [t])\ \ \.leftmost_strategy (\.Src t) = t" proof (induct t) show "\.leftmost_strategy (\.Src \<^bold>\) = \<^bold>\" by simp show "\x. \\.elementary_reduction \<^bold>\x\<^bold>\; \.NF (Trg [\<^bold>\x\<^bold>\])\ \ \.leftmost_strategy (\.Src \<^bold>\x\<^bold>\) = \<^bold>\x\<^bold>\" by auto show "\t. \\\.elementary_reduction t; \.NF (Trg [t])\ \ \.leftmost_strategy (\.Src t) = t; \.elementary_reduction \<^bold>\\<^bold>[t\<^bold>]; \.NF (Trg [\<^bold>\\<^bold>[t\<^bold>]])\ \ \.leftmost_strategy (\.Src \<^bold>\\<^bold>[t\<^bold>]) = \<^bold>\\<^bold>[t\<^bold>]" using lambda_calculus.NF_Lam_iff lambda_calculus.elementary_reduction_is_arr by force show "\t1 t2. \\\.elementary_reduction t1; \.NF (Trg [t1])\ \ \.leftmost_strategy (\.Src t1) = t1; \\.elementary_reduction t2; \.NF (Trg [t2])\ \ \.leftmost_strategy (\.Src t2) = t2; \.elementary_reduction (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2); \.NF (Trg [\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2])\ \ \.leftmost_strategy (\.Src (\<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2)) = \<^bold>\\<^bold>[t1\<^bold>] \<^bold>\ t2" apply simp by (metis \.Ide_iff_Src_self \.Ide_implies_Arr) fix t1 t2 assume ind1: "\\.elementary_reduction t1; \.NF (Trg [t1])\ \ \.leftmost_strategy (\.Src t1) = t1" assume ind2: "\\.elementary_reduction t2; \.NF (Trg [t2])\ \ \.leftmost_strategy (\.Src t2) = t2" assume t: "\.elementary_reduction (\.App t1 t2)" have t1: "\.Arr t1" using t \.Arr.simps(4) \.elementary_reduction_is_arr by blast have t2: "\.Arr t2" using t \.Arr.simps(4) \.elementary_reduction_is_arr by blast assume NF: "\.NF (Trg [\.App t1 t2])" have 1: "\ \.is_Lam t1" using NF \.NF_def apply (cases t1) apply simp_all by (metis (mono_tags) \.Ide.simps(1) \.NF_App_iff \.Trg.simps(2-3) \.lambda.discI(2)) have 2: "\.NF (\.Trg t1) \ \.NF (\.Trg t2)" using NF t1 t2 1 \.NF_App_iff by simp show "\.leftmost_strategy (\.Src (\.App t1 t2)) = \.App t1 t2" using t t1 t2 1 2 ind1 ind2 apply (cases t1) apply simp_all apply (metis \.Ide.simps(4) \.Ide_iff_Src_self \.Ide_iff_Trg_self \.NF_iff_has_no_redex \.elementary_reduction_not_ide \.eq_Ide_are_cong \.has_redex_iff_not_Ide_leftmost_strategy \.resid_Arr_Src t1) using \.Ide_iff_Src_self by blast qed lemma Std_path_to_NF_is_leftmost: shows "\Std T; \.NF (Trg T)\ \ set T \ Collect \.is_leftmost_reduction" proof - have 1: "\t. \Std (t # T); \.NF (Trg (t # T))\ \ \.is_leftmost_reduction t" for T proof (induct T) show "\t. \Std [t]; \.NF (Trg [t])\ \ \.is_leftmost_reduction t" using elementary_reduction_to_NF_is_leftmost \.is_leftmost_reduction_def by simp fix t u T assume ind: "\t. \Std (t # T); \.NF (Trg (t # T))\ \ \.is_leftmost_reduction t" assume Std: "Std (t # u # T)" assume "\.NF (Trg (t # u # T))" show "\.is_leftmost_reduction t" using Std \\.NF (Trg (t # u # T))\ ind sseq_reflects_leftmost_reduction by auto qed show "\Std T; \.NF (Trg T)\ \ set T \ Collect \.is_leftmost_reduction" proof (induct T) show "set [] \ Collect \.is_leftmost_reduction" by simp fix t T assume ind: "\Std T; \.NF (Trg T)\ \ set T \ Collect \.is_leftmost_reduction" assume Std: "Std (t # T)" and NF: "\.NF (Trg (t # T))" show "set (t # T) \ Collect \.is_leftmost_reduction" proof (cases "T = []") show "T = [] \ ?thesis" by (metis 1 NF Std \set [] \ Collect \.is_leftmost_reduction\ mem_Collect_eq set_ConsD subset_code(1)) assume T: "T \ []" have "\.is_leftmost_reduction t" using 1 NF Std elementary_reduction_to_NF_is_leftmost by blast thus ?thesis using T NF Std ind by auto qed qed qed theorem leftmost_reduction_theorem: shows "\.normalizing_strategy \.leftmost_strategy" proof (unfold \.normalizing_strategy_def, intro allI impI) fix a assume a: "\.normalizable a" show "\n. \.NF (\.reduce \.leftmost_strategy a n)" proof (cases "\.NF a") show "\.NF a \ ?thesis" by (metis lambda_calculus.reduce.simps(1)) assume 1: "\ \.NF a" obtain T where T: "Arr T \ Src T = a \ \.NF (Trg T)" using a \.normalizable_def red_iff by auto have 2: "\ Ide T" using T 1 Ide_imp_Src_eq_Trg by fastforce obtain U where U: "Std U \ cong T U" using T 2 standardization_theorem by blast have 3: "set U \ Collect \.is_leftmost_reduction" using 1 U Std_path_to_NF_is_leftmost by (metis Con_Arr_self Resid_parallel Src_resid T cong_implies_coinitial) have "\U. \Arr U; length U = n; set U \ Collect \.is_leftmost_reduction\ \ U = apply_strategy \.leftmost_strategy (Src U) (length U)" for n proof (induct n) show "\U. \Arr U; length U = 0; set U \ Collect \.is_leftmost_reduction\ \ U = apply_strategy \.leftmost_strategy (Src U) (length U)" by simp fix n U assume ind: "\U. \Arr U; length U = n; set U \ Collect \.is_leftmost_reduction\ \ U = apply_strategy \.leftmost_strategy (Src U) (length U)" assume U: "Arr U" assume n: "length U = Suc n" assume set: "set U \ Collect \.is_leftmost_reduction" show "U = apply_strategy \.leftmost_strategy (Src U) (length U)" proof (cases "n = 0") show "n = 0 \ ?thesis" using U n 1 set \.is_leftmost_reduction_def by (cases U) auto assume 5: "n \ 0" have 4: "hd U = \.leftmost_strategy (Src U)" using n U set \.is_leftmost_reduction_def by (cases U) auto have 6: "tl U \ []" using 4 5 n U by (metis Suc_length_conv list.sel(3) list.size(3)) show ?thesis using 4 5 6 n U set ind [of "tl U"] apply (cases n) apply simp_all by (metis (no_types, lifting) Arr_consE Nil_tl Nitpick.size_list_simp(2) ind [of "tl U"] \.arr_char \.trg_char list.collapse list.set_sel(2) old.nat.inject reduction_paths.apply_strategy.simps(2) subset_code(1)) qed qed hence "U = apply_strategy \.leftmost_strategy (Src U) (length U)" by (metis 3 Con_implies_Arr(1) Ide.simps(1) U ide_char) moreover have "Src U = a" using T U cong_implies_coinitial by (metis Con_imp_eq_Srcs Con_implies_Arr(2) Ide.simps(1) Srcs_simp\<^sub>P\<^sub>W\<^sub>E empty_set ex_un_Src ide_char list.set_intros(1) list.simps(15)) ultimately have "Trg U = \.reduce \.leftmost_strategy a (length U)" using reduce_eq_Trg_apply_strategy by (metis Arr.simps(1) Con_implies_Arr(1) Ide.simps(1) U a ide_char \.leftmost_strategy_is_reduction_strategy \.normalizable_def length_greater_0_conv) thus ?thesis by (metis Ide.simps(1) Ide_imp_Src_eq_Trg Src_resid T Trg_resid_sym U ide_char) qed qed end end diff --git a/thys/ResiduatedTransitionSystem/ResiduatedTransitionSystem.thy b/thys/ResiduatedTransitionSystem/ResiduatedTransitionSystem.thy --- a/thys/ResiduatedTransitionSystem/ResiduatedTransitionSystem.thy +++ b/thys/ResiduatedTransitionSystem/ResiduatedTransitionSystem.thy @@ -1,8848 +1,8849 @@ chapter "Residuated Transition Systems" theory ResiduatedTransitionSystem imports Main begin section "Basic Definitions and Properties" subsection "Partial Magmas" text \ A \emph{partial magma} consists simply of a partial binary operation. We represent the partiality by assuming the existence of a unique value \null\ that behaves as a zero for the operation. \ (* TODO: Possibly unify with Category3.partial_magma? *) locale partial_magma = fixes OP :: "'a \ 'a \ 'a" assumes ex_un_null: "\!n. \t. OP n t = n \ OP t n = n" begin definition null :: 'a where "null = (THE n. \t. OP n t = n \ OP t n = n)" lemma null_eqI: assumes "\t. OP n t = n \ OP t n = n" shows "n = null" using assms null_def ex_un_null the1_equality [of "\n. \t. OP n t = n \ OP t n = n"] by auto lemma null_is_zero [simp]: shows "OP null t = null" and "OP t null = null" using null_def ex_un_null theI' [of "\n. \t. OP n t = n \ OP t n = n"] by auto end subsection "Residuation" text \ A \emph{residuation} is a partial binary operation subject to three axioms. The first, \con_sym_ax\, states that the domain of a residuation is symmetric. The second, \con_imp_arr_resid\, constrains the results of residuation either to be \null\, which indicates inconsistency, or something that is self-consistent, which we will define below to be an ``arrow''. The ``cube axiom'', \cube_ax\, states that if \v\ can be transported by residuation around one side of the ``commuting square'' formed by \t\ and \u \ t\, then it can also be transported around the other side, formed by \u\ and \t \ u\, with the same result. \ type_synonym 'a resid = "'a \ 'a \ 'a" locale residuation = partial_magma resid for resid :: "'a resid" (infix "\\" 70) + assumes con_sym_ax: "t \\ u \ null \ u \\ t \ null" and con_imp_arr_resid: "t \\ u \ null \ (t \\ u) \\ (t \\ u) \ null" and cube_ax: "(v \\ t) \\ (u \\ t) \ null \ (v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" begin text \ The axiom \cube_ax\ is equivalent to the following unconditional form. The locale assumptions use the weaker form to avoid having to treat the case \(v \ t) \ (u \ t) = null\ specially for every interpretation. \ lemma cube: shows "(v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" using cube_ax by metis text \ We regard \t\ and \u\ as \emph{consistent} if the residuation \t \ u\ is defined. It is convenient to make this a definition, with associated notation. \ definition con (infix "\" 50) where "t \ u \ t \\ u \ null" lemma conI [intro]: assumes "t \\ u \ null" shows "t \ u" using assms con_def by blast lemma conE [elim]: assumes "t \ u" and "t \\ u \ null \ T" shows T using assms con_def by simp lemma con_sym: assumes "t \ u" shows "u \ t" using assms con_def con_sym_ax by blast text \ We call \t\ an \emph{arrow} if it is self-consistent. \ definition arr where "arr t \ t \ t" lemma arrI [intro]: assumes "t \ t" shows "arr t" using assms arr_def by simp lemma arrE [elim]: assumes "arr t" and "t \ t \ T" shows T using assms arr_def by simp lemma not_arr_null [simp]: shows "\ arr null" by (auto simp add: con_def) lemma con_implies_arr: assumes "t \ u" shows "arr t" and "arr u" using assms by (metis arrI con_def con_imp_arr_resid cube null_is_zero(2))+ lemma arr_resid [simp]: assumes "t \ u" shows "arr (t \\ u)" using assms con_imp_arr_resid by blast lemma arr_resid_iff_con: shows "arr (t \\ u) \ t \ u" by auto text \ The residuation of an arrow along itself is the \emph{canonical target} of the arrow. \ definition trg where "trg t \ t \\ t" lemma resid_arr_self: shows "t \\ t = trg t" using trg_def by auto text \ An \emph{identity} is an arrow that is its own target. \ definition ide where "ide a \ a \ a \ a \\ a = a" lemma ideI [intro]: assumes "a \ a" and "a \\ a = a" shows "ide a" using assms ide_def by auto lemma ideE [elim]: assumes "ide a" and "\a \ a; a \\ a = a\ \ T" shows T using assms ide_def by blast lemma ide_implies_arr [simp]: assumes "ide a" shows "arr a" using assms by blast end subsection "Residuated Transition System" text \ A \emph{residuated transition system} consists of a residuation subject to additional axioms that concern the relationship between identities and residuation. These axioms make it possible to sensibly associate with each arrow certain nonempty sets of identities called the \emph{sources} and \emph{targets} of the arrow. Axiom \ide_trg\ states that the canonical target \trg t\ of an arrow \t\ is an identity. Axiom \resid_arr_ide\ states that identities are right units for residuation, when it is defined. Axiom \resid_ide_arr\ states that the residuation of an identity along an arrow is again an identity, assuming that the residuation is defined. Axiom \con_imp_coinitial_ax\ states that if arrows \t\ and \u\ are consistent, then there is an identity that is consistent with both of them (\emph{i.e.}~they have a common source). Axiom \con_target\ states that an identity of the form \t \ u\ (which may be regarded as a ``target'' of \u\) is consistent with any other arrow \v \ u\ obtained by residuation along \u\. We note that replacing the premise \ide (t \ u)\ in this axiom by either \arr (t \ u)\ or \t \ u\ would result in a strictly stronger statement. \ locale rts = residuation + assumes ide_trg [simp]: "arr t \ ide (trg t)" and resid_arr_ide: "\ide a; t \ a\ \ t \\ a = t" and resid_ide_arr [simp]: "\ide a; a \ t\ \ ide (a \\ t)" and con_imp_coinitial_ax: "t \ u \ \a. ide a \ a \ t \ a \ u" and con_target: "\ide (t \\ u); u \ v\ \ t \\ u \ v \\ u" begin text \ We define the \emph{sources} of an arrow \t\ to be the identities that are consistent with \t\. \ definition sources where "sources t = {a. ide a \ t \ a}" text \ We define the \emph{targets} of an arrow \t\ to be the identities that are consistent with the canonical target \trg t\. \ definition targets where "targets t = {b. ide b \ trg t \ b}" lemma in_sourcesI [intro, simp]: assumes "ide a" and "t \ a" shows "a \ sources t" using assms sources_def by simp lemma in_sourcesE [elim]: assumes "a \ sources t" and "\ide a; t \ a\ \ T" shows T using assms sources_def by auto lemma in_targetsI [intro, simp]: assumes "ide b" and "trg t \ b" shows "b \ targets t" using assms targets_def resid_arr_self by simp lemma in_targetsE [elim]: assumes "b \ targets t" and "\ide b; trg t \ b\ \ T" shows T using assms targets_def resid_arr_self by force lemma trg_in_targets: assumes "arr t" shows "trg t \ targets t" using assms by (meson ideE ide_trg in_targetsI) lemma source_is_ide: assumes "a \ sources t" shows "ide a" using assms by blast lemma target_is_ide: assumes "a \ targets t" shows "ide a" using assms by blast text \ Consistent arrows have a common source. \ lemma con_imp_common_source: assumes "t \ u" shows "sources t \ sources u \ {}" using assms by (meson disjoint_iff in_sourcesI con_imp_coinitial_ax con_sym) text \ Arrows are characterized by the property of having a nonempty set of sources, or equivalently, by that of having a nonempty set of targets. \ lemma arr_iff_has_source: shows "arr t \ sources t \ {}" using con_imp_common_source con_implies_arr(1) sources_def by blast lemma arr_iff_has_target: shows "arr t \ targets t \ {}" using trg_def trg_in_targets by fastforce text \ The residuation of a source of an arrow along that arrow gives a target of the same arrow. However, it is \emph{not} true that every target of an arrow \t\ is of the form \u \ t\ for some \u\ with \t \ u\. \ lemma resid_source_in_targets: assumes "a \ sources t" shows "a \\ t \ targets t" by (metis arr_resid assms con_target con_sym resid_arr_ide ide_trg in_sourcesE resid_ide_arr in_targetsI resid_arr_self) text \ Residuation along an identity reflects identities. \ lemma ide_backward_stable: assumes "ide a" and "ide (t \\ a)" shows "ide t" by (metis assms ideE resid_arr_ide arr_resid_iff_con) lemma resid_reflects_con: assumes "t \ v" and "u \ v" and "t \\ v \ u \\ v" shows "t \ u" using assms cube by (elim conE) auto lemma con_transitive_on_ide: assumes "ide a" and "ide b" and "ide c" shows "\a \ b; b \ c\ \ a \ c" using assms by (metis resid_arr_ide con_target con_sym) lemma sources_are_con: assumes "a \ sources t" and "a' \ sources t" shows "a \ a'" using assms by (metis (no_types, lifting) CollectD con_target con_sym resid_ide_arr sources_def resid_reflects_con) lemma sources_con_closed: assumes "a \ sources t" and "ide a'" and "a \ a'" shows "a' \ sources t" using assms by (metis (no_types, lifting) con_target con_sym resid_arr_ide mem_Collect_eq sources_def) lemma sources_eqI: assumes "sources t \ sources t' \ {}" shows "sources t = sources t'" using assms sources_def sources_are_con sources_con_closed by blast lemma targets_are_con: assumes "b \ targets t" and "b' \ targets t" shows "b \ b'" using assms sources_are_con sources_def targets_def by blast lemma targets_con_closed: assumes "b \ targets t" and "ide b'" and "b \ b'" shows "b' \ targets t" using assms sources_con_closed sources_def targets_def by blast lemma targets_eqI: assumes "targets t \ targets t' \ {}" shows "targets t = targets t'" using assms targets_def targets_are_con targets_con_closed by blast text \ Arrows are \emph{coinitial} if they have a common source, and \emph{coterminal} if they have a common target. \ definition coinitial where "coinitial t u \ sources t \ sources u \ {}" definition coterminal where "coterminal t u \ targets t \ targets u \ {}" lemma coinitialI [intro]: assumes "arr t" and "sources t = sources u" shows "coinitial t u" using assms coinitial_def arr_iff_has_source by simp lemma coinitialE [elim]: assumes "coinitial t u" and "\arr t; arr u; sources t = sources u\ \ T" shows T using assms coinitial_def sources_eqI arr_iff_has_source by auto lemma con_imp_coinitial: assumes "t \ u" shows "coinitial t u" using assms by (simp add: coinitial_def con_imp_common_source) lemma coinitial_iff: shows "coinitial t t' \ arr t \ arr t' \ sources t = sources t'" by (metis arr_iff_has_source coinitial_def inf_idem sources_eqI) lemma coterminal_iff: shows "coterminal t t' \ arr t \ arr t' \ targets t = targets t'" by (metis arr_iff_has_target coterminal_def inf_idem targets_eqI) lemma coterminal_iff_con_trg: shows "coterminal t u \ trg t \ trg u" by (metis coinitial_iff con_imp_coinitial coterminal_iff in_targetsE trg_in_targets resid_arr_self arr_resid_iff_con sources_def targets_def) lemma coterminalI [intro]: assumes "arr t" and "targets t = targets u" shows "coterminal t u" using assms coterminal_iff arr_iff_has_target by auto lemma coterminalE [elim]: assumes "coterminal t u" and "\arr t; arr u; targets t = targets u\ \ T" shows T using assms coterminal_iff by auto lemma sources_resid [simp]: assumes "t \ u" shows "sources (t \\ u) = targets u" unfolding targets_def trg_def using assms conI conE by (metis con_imp_arr_resid assms coinitial_iff con_imp_coinitial cube ex_un_null sources_def) lemma targets_resid_sym: assumes "t \ u" shows "targets (t \\ u) = targets (u \\ t)" using assms apply (intro targets_eqI) by (metis (no_types, opaque_lifting) assms cube inf_idem arr_iff_has_target arr_def arr_resid_iff_con sources_resid) text \ Arrows \t\ and \u\ are \emph{sequential} if the set of targets of \t\ equals the set of sources of \u\. \ definition seq where "seq t u \ arr t \ arr u \ targets t = sources u" lemma seqI [intro]: assumes "arr t" and "arr u" and "targets t = sources u" shows "seq t u" using assms seq_def by auto lemma seqE [elim]: assumes "seq t u" and "\arr t; arr u; targets t = sources u\ \ T" shows T using assms seq_def by blast subsubsection "Congruence of Transitions" text \ Residuation induces a preorder \\\ on transitions, defined by \t \ u\ if and only if \t \ u\ is an identity. \ abbreviation prfx (infix "\" 50) where "t \ u \ ide (t \\ u)" lemma prfx_implies_con: assumes "t \ u" shows "t \ u" using assms arr_resid_iff_con by blast lemma prfx_reflexive: assumes "arr t" shows "t \ t" by (simp add: assms resid_arr_self) lemma prfx_transitive [trans]: assumes "t \ u" and "u \ v" shows "t \ v" using assms con_target resid_ide_arr ide_backward_stable cube conI by metis text \ The equivalence \\\ associated with \\\ is substitutive with respect to residuation. \ abbreviation cong (infix "\" 50) where "t \ u \ t \ u \ u \ t" lemma cong_reflexive: assumes "arr t" shows "t \ t" using assms prfx_reflexive by simp lemma cong_symmetric: assumes "t \ u" shows "u \ t" using assms by simp lemma cong_transitive [trans]: assumes "t \ u" and "u \ v" shows "t \ v" using assms prfx_transitive by auto lemma cong_subst_left: assumes "t \ t'" and "t \ u" shows "t' \ u" and "t \\ u \ t' \\ u" apply (meson assms con_sym con_target prfx_implies_con resid_reflects_con) by (metis assms con_sym con_target cube prfx_implies_con resid_ide_arr resid_reflects_con) lemma cong_subst_right: assumes "u \ u'" and "t \ u" shows "t \ u'" and "t \\ u \ t \\ u'" proof - have 1: "t \ u' \ t \\ u' \ u \\ u' \ (t \\ u) \\ (u' \\ u) = (t \\ u') \\ (u \\ u')" using assms cube con_sym con_target cong_subst_left(1) by meson show "t \ u'" using 1 by simp show "t \\ u \ t \\ u'" by (metis 1 arr_resid_iff_con assms(1) cong_reflexive resid_arr_ide) qed lemma cong_implies_coinitial: assumes "u \ u'" shows "coinitial u u'" using assms con_imp_coinitial prfx_implies_con by simp lemma cong_implies_coterminal: assumes "u \ u'" shows "coterminal u u'" using assms by (metis con_implies_arr(1) coterminalI ideE prfx_implies_con sources_resid targets_resid_sym) lemma ide_imp_con_iff_cong: assumes "ide t" and "ide u" shows "t \ u \ t \ u" using assms by (metis con_sym resid_ide_arr prfx_implies_con) lemma sources_are_cong: assumes "a \ sources t" and "a' \ sources t" shows "a \ a'" using assms sources_are_con by (metis CollectD ide_imp_con_iff_cong sources_def) lemma sources_cong_closed: assumes "a \ sources t" and "a \ a'" shows "a' \ sources t" using assms sources_def by (meson in_sourcesE in_sourcesI cong_subst_right(1) ide_backward_stable) lemma targets_are_cong: assumes "b \ targets t" and "b' \ targets t" shows "b \ b'" using assms(1-2) sources_are_cong sources_def targets_def by blast lemma targets_cong_closed: assumes "b \ targets t" and "b \ b'" shows "b' \ targets t" using assms targets_def sources_cong_closed sources_def by blast lemma targets_char: shows "targets t = {b. arr t \ t \\ t \ b}" unfolding targets_def by (metis (no_types, lifting) con_def con_implies_arr(2) con_sym cong_reflexive ide_def resid_arr_ide trg_def) lemma coinitial_ide_are_cong: assumes "ide a" and "ide a'" and "coinitial a a'" shows "a \ a'" using assms coinitial_def by (metis ideE in_sourcesI coinitialE sources_are_cong) lemma cong_respects_seq: assumes "seq t u" and "cong t t'" and "cong u u'" shows "seq t' u'" by (metis assms coterminalE rts.coinitialE rts.cong_implies_coinitial rts.cong_implies_coterminal rts_axioms seqE seqI) end subsection "Weakly Extensional RTS" text \ A \emph{weakly extensional} RTS is an RTS that satisfies the additional condition that identity arrows have trivial congruence classes. This axiom has a number of useful consequences, including that each arrow has a unique source and target. \ locale weakly_extensional_rts = rts + assumes weak_extensionality: "\t \ u; ide t; ide u\ \ t = u" begin lemma con_ide_are_eq: assumes "ide a" and "ide a'" and "a \ a'" shows "a = a'" using assms ide_imp_con_iff_cong weak_extensionality by blast lemma coinitial_ide_are_eq: assumes "ide a" and "ide a'" and "coinitial a a'" shows "a = a'" using assms coinitial_def con_ide_are_eq by blast lemma arr_has_un_source: assumes "arr t" shows "\!a. a \ sources t" using assms by (meson arr_iff_has_source con_ide_are_eq ex_in_conv in_sourcesE sources_are_con) lemma arr_has_un_target: assumes "arr t" shows "\!b. b \ targets t" using assms by (metis arrE arr_has_un_source arr_resid sources_resid) definition src where "src t \ if arr t then THE a. a \ sources t else null" lemma src_in_sources: assumes "arr t" shows "src t \ sources t" using assms src_def arr_has_un_source the1I2 [of "\a. a \ sources t" "\a. a \ sources t"] by simp lemma src_eqI: assumes "ide a" and "a \ t" shows "src t = a" using assms src_in_sources by (metis arr_has_un_source resid_arr_ide in_sourcesI arr_resid_iff_con con_sym) lemma sources_char: shows "sources t = {a. arr t \ src t = a}" using src_in_sources arr_has_un_source arr_iff_has_source by auto lemma targets_char\<^sub>W\<^sub>E: shows "targets t = {b. arr t \ trg t = b}" using trg_in_targets arr_has_un_target arr_iff_has_target by auto lemma arr_src_iff_arr [iff]: shows "arr (src t) \ arr t" by (metis arrI conE null_is_zero(2) sources_are_con arrE src_def src_in_sources) lemma arr_trg_iff_arr [iff]: shows "arr (trg t) \ arr t" by (metis arrI arrE arr_resid_iff_con resid_arr_self) lemma con_imp_eq_src: assumes "t \ u" shows "src t = src u" using assms by (metis con_imp_coinitial_ax src_eqI) lemma src_resid [simp]: assumes "t \ u" shows "src (t \\ u) = trg u" using assms by (metis arr_resid_iff_con con_implies_arr(2) arr_has_un_source trg_in_targets sources_resid src_in_sources) lemma trg_resid_sym: assumes "t \ u" shows "trg (t \\ u) = trg (u \\ t)" using assms by (metis arr_has_un_target arr_resid con_sym targets_resid_sym trg_in_targets) lemma apex_sym: shows "trg (t \\ u) = trg (u \\ t)" using trg_resid_sym con_def by metis lemma seqI\<^sub>W\<^sub>E [intro, simp]: assumes "arr u" and "arr t" and "trg t = src u" shows "seq t u" using assms by (metis (mono_tags, lifting) arrE in_sourcesE resid_arr_ide sources_resid resid_arr_self seqI sources_are_con src_in_sources) lemma seqE\<^sub>W\<^sub>E [elim]: assumes "seq t u" and "\arr u; arr t; trg t = src u\ \ T" shows T using assms by (metis arr_has_un_source seq_def src_in_sources trg_in_targets) lemma coinitial_iff\<^sub>W\<^sub>E: shows "coinitial t u \ arr t \ arr u \ src t = src u" by (metis arr_has_un_source coinitial_def coinitial_iff disjoint_iff_not_equal src_in_sources) lemma coterminal_iff\<^sub>W\<^sub>E: shows "coterminal t u \ arr t \ arr u \ trg t = trg u" by (metis arr_has_un_target coterminal_iff_con_trg coterminal_iff trg_in_targets) lemma coinitialI\<^sub>W\<^sub>E [intro]: assumes "arr t" and "src t = src u" shows "coinitial t u" using assms coinitial_iff\<^sub>W\<^sub>E by (metis arr_src_iff_arr) lemma coinitialE\<^sub>W\<^sub>E [elim]: assumes "coinitial t u" and "\arr t; arr u; src t = src u\ \ T" shows T using assms coinitial_iff\<^sub>W\<^sub>E by blast lemma coterminalI\<^sub>W\<^sub>E [intro]: assumes "arr t" and "trg t = trg u" shows "coterminal t u" using assms coterminal_iff\<^sub>W\<^sub>E by (metis arr_trg_iff_arr) lemma coterminalE\<^sub>W\<^sub>E [elim]: assumes "coterminal t u" and "\arr t; arr u; trg t = trg u\ \ T" shows T using assms coterminal_iff\<^sub>W\<^sub>E by blast lemma ide_src [simp]: assumes "arr t" shows "ide (src t)" using assms by (metis arrE con_imp_coinitial_ax src_eqI) lemma src_ide [simp]: assumes "ide a" shows "src a = a" using arrI assms src_eqI by blast lemma trg_ide [simp]: assumes "ide a" shows "trg a = a" using assms resid_arr_self by force lemma ide_iff_src_self: assumes "arr a" shows "ide a \ src a = a" using assms by (metis ide_src src_ide) lemma ide_iff_trg_self: assumes "arr a" shows "ide a \ trg a = a" using assms ide_def resid_arr_self by auto lemma src_src [simp]: shows "src (src t) = src t" using ide_src src_def src_ide by auto lemma trg_trg [simp]: shows "trg (trg t) = trg t" by (metis con_def cong_reflexive ide_def null_is_zero(2) resid_arr_self residuation.con_implies_arr(1) residuation_axioms) lemma src_trg [simp]: shows "src (trg t) = trg t" by (metis con_def not_arr_null src_def src_resid trg_def) lemma trg_src [simp]: shows "trg (src t) = src t" by (metis ide_src null_is_zero(2) resid_arr_self src_def trg_ide) lemma resid_ide: assumes "ide a" and "coinitial a t" shows (* [simp]: *) "t \\ a = t" and "a \\ t = trg t" using assms resid_arr_ide apply blast using assms by (metis con_def con_sym_ax ideE in_sourcesE in_sourcesI resid_ide_arr coinitialE src_ide src_resid) end subsection "Extensional RTS" text \ An \emph{extensional} RTS is an RTS in which all arrows have trivial congruence classes; that is, congruent arrows are equal. \ locale extensional_rts = rts + assumes extensional: "t \ u \ t = u" begin sublocale weakly_extensional_rts using extensional by unfold_locales auto lemma cong_char: shows "t \ u \ arr t \ t = u" by (metis arrI cong_reflexive prfx_implies_con extensional) end subsection "Composites of Transitions" text \ Residuation can be used to define a notion of composite of transitions. Composites are not unique, but they are unique up to congruence. \ context rts begin definition composite_of where "composite_of u t v \ u \ v \ v \\ u \ t" lemma composite_ofI [intro]: assumes "u \ v" and "v \\ u \ t" shows "composite_of u t v" using assms composite_of_def by blast lemma composite_ofE [elim]: assumes "composite_of u t v" and "\u \ v; v \\ u \ t\ \ T" shows T using assms composite_of_def by auto lemma arr_composite_of: assumes "composite_of u t v" shows "arr v" using assms by (meson composite_of_def con_implies_arr(2) prfx_implies_con) lemma composite_of_unq_upto_cong: assumes "composite_of u t v" and "composite_of u t v'" shows "v \ v'" using assms cube ide_backward_stable prfx_transitive by (elim composite_ofE) metis lemma composite_of_ide_arr: assumes "ide a" shows "composite_of a t t \ t \ a" using assms by (metis composite_of_def con_implies_arr(1) con_sym resid_arr_ide resid_ide_arr prfx_implies_con prfx_reflexive) lemma composite_of_arr_ide: assumes "ide b" shows "composite_of t b t \ t \\ t \ b" using assms by (metis arr_resid_iff_con composite_of_def ide_imp_con_iff_cong con_implies_arr(1) prfx_implies_con prfx_reflexive) lemma composite_of_source_arr: assumes "arr t" and "a \ sources t" shows "composite_of a t t" using assms composite_of_ide_arr sources_def by auto lemma composite_of_arr_target: assumes "arr t" and "b \ targets t" shows "composite_of t b t" by (metis arrE assms composite_of_arr_ide in_sourcesE sources_resid) lemma composite_of_ide_self: assumes "ide a" shows "composite_of a a a" using assms composite_of_ide_arr by blast lemma con_prfx_composite_of: assumes "composite_of t u w" shows "t \ w" and "w \ v \ t \ v" using assms apply force using assms composite_of_def con_target prfx_implies_con resid_reflects_con con_sym by meson lemma sources_composite_of: assumes "composite_of u t v" shows "sources v = sources u" using assms by (meson arr_resid_iff_con composite_of_def con_imp_coinitial cong_implies_coinitial coinitial_iff) lemma targets_composite_of: assumes "composite_of u t v" shows "targets v = targets t" proof - have "targets t = targets (v \\ u)" using assms composite_of_def by (meson cong_implies_coterminal coterminal_iff) also have "... = targets (u \\ v)" using assms targets_resid_sym con_prfx_composite_of by metis also have "... = targets v" using assms composite_of_def by (metis prfx_implies_con sources_resid ideE) finally show ?thesis by auto qed lemma resid_composite_of: assumes "composite_of t u w" and "w \ v" shows "v \\ t \ w \\ t" and "v \\ t \ u" and "v \\ w \ (v \\ t) \\ u" and "composite_of (t \\ v) (u \\ (v \\ t)) (w \\ v)" proof - show 0: "v \\ t \ w \\ t" using assms con_def by (metis con_target composite_ofE conE con_sym cube) show 1: "v \\ w \ (v \\ t) \\ u" proof - have "v \\ w = (v \\ w) \\ (t \\ w)" using assms composite_of_def by (metis (no_types, opaque_lifting) con_target con_sym resid_arr_ide) also have "... = (v \\ t) \\ (w \\ t)" using assms cube by metis also have "... \ (v \\ t) \\ u" using assms 0 cong_subst_right(2) [of "w \\ t" u "v \\ t"] by blast finally show ?thesis by blast qed show 2: "v \\ t \ u" using assms 1 by force show "composite_of (t \\ v) (u \\ (v \\ t)) (w \\ v)" proof (unfold composite_of_def, intro conjI) show "t \\ v \ w \\ v" using assms cube con_target composite_of_def resid_ide_arr by metis show "(w \\ v) \\ (t \\ v) \ u \\ (v \\ t)" by (metis assms(1) 2 composite_ofE con_sym cong_subst_left(2) cube) thus "u \\ (v \\ t) \ (w \\ v) \\ (t \\ v)" using assms by (metis composite_of_def con_implies_arr(2) cong_subst_left(2) prfx_implies_con arr_resid_iff_con cube) qed qed lemma con_composite_of_iff: assumes "composite_of t u v" shows "w \ v \ w \\ t \ u" by (meson arr_resid_iff_con assms composite_ofE con_def con_implies_arr(1) con_sym_ax cong_subst_right(1) resid_composite_of(2) resid_reflects_con) definition composable where "composable t u \ \v. composite_of t u v" lemma composableD [dest]: assumes "composable t u" shows "arr t" and "arr u" and "targets t = sources u" using assms arr_composite_of arr_iff_has_source composable_def sources_composite_of arr_composite_of arr_iff_has_target composable_def targets_composite_of apply auto[2] by (metis assms composable_def composite_ofE con_prfx_composite_of(1) con_sym cong_implies_coinitial coinitial_iff sources_resid) lemma composable_imp_seq: assumes "composable t u" shows "seq t u" using assms by blast lemma bounded_imp_con: assumes "composite_of t u v" and "composite_of t' u' v" shows "con t t'" by (meson assms composite_of_def con_prfx_composite_of prfx_implies_con arr_resid_iff_con con_implies_arr(2)) lemma composite_of_cancel_left: assumes "composite_of t u v" and "composite_of t u' v" shows "u \ u'" using assms composite_of_def cong_transitive by blast end subsubsection "RTS with Composites" locale rts_with_composites = rts + assumes has_composites: "seq t u \ composable t u" begin lemma composable_iff_seq: shows "composable g f \ seq g f" using composable_imp_seq has_composites by blast lemma obtains_composite_of: assumes "seq g f" obtains h where "composite_of g f h" using assms has_composites composable_def by blast lemma diamond_commutes_upto_cong: assumes "composite_of t (u \\ t) v" and "composite_of u (t \\ u) v'" shows "v \ v'" using assms cube ide_backward_stable prfx_transitive by (elim composite_ofE) metis end subsection "Joins of Transitions" context rts begin text \ Transition \v\ is a \emph{join} of \u\ and \v\ when \v\ is the diagonal of the square formed by \u\, \v\, and their residuals. As was the case for composites, joins in an RTS are not unique, but they are unique up to congruence. \ definition join_of where "join_of t u v \ composite_of t (u \\ t) v \ composite_of u (t \\ u) v" lemma join_ofI [intro]: assumes "composite_of t (u \\ t) v" and "composite_of u (t \\ u) v" shows "join_of t u v" using assms join_of_def by simp lemma join_ofE [elim]: assumes "join_of t u v" and "\composite_of t (u \\ t) v; composite_of u (t \\ u) v\ \ T" shows T using assms join_of_def by simp definition joinable where "joinable t u \ \v. join_of t u v" lemma joinable_implies_con: assumes "joinable t u" shows "t \ u" by (meson assms bounded_imp_con join_of_def joinable_def) lemma joinable_implies_coinitial: assumes "joinable t u" shows "coinitial t u" using assms by (simp add: con_imp_coinitial joinable_implies_con) lemma join_of_un_upto_cong: assumes "join_of t u v" and "join_of t u v'" shows "v \ v'" using assms join_of_def composite_of_unq_upto_cong by auto lemma join_of_symmetric: assumes "join_of t u v" shows "join_of u t v" using assms join_of_def by simp lemma join_of_arr_self: assumes "arr t" shows "join_of t t t" by (meson assms composite_of_arr_ide ideE join_of_def prfx_reflexive) lemma join_of_arr_src: assumes "arr t" and "a \ sources t" shows "join_of a t t" and "join_of t a t" proof - show "join_of a t t" by (meson assms composite_of_arr_target composite_of_def composite_of_source_arr join_of_def prfx_transitive resid_source_in_targets) thus "join_of t a t" using join_of_symmetric by blast qed lemma sources_join_of: assumes "join_of t u v" shows "sources t = sources v" and "sources u = sources v" using assms join_of_def sources_composite_of by blast+ lemma targets_join_of: assumes "join_of t u v" shows "targets (t \\ u) = targets v" and "targets (u \\ t) = targets v" using assms join_of_def targets_composite_of by blast+ lemma join_of_resid: assumes "join_of t u w" and "con v w" shows "join_of (t \\ v) (u \\ v) (w \\ v)" using assms con_sym cube join_of_def resid_composite_of(4) by fastforce lemma con_with_join_of_iff: assumes "join_of t u w" shows "u \ v \ v \\ u \ t \\ u \ w \ v" and "w \ v \ t \ v \ v \\ t \ u \\ t" proof - have *: "t \ v \ v \\ t \ u \\ t \ u \ v \ v \\ u \ t \\ u" by (metis arr_resid_iff_con con_implies_arr(1) con_sym cube) show "u \ v \ v \\ u \ t \\ u \ w \ v" by (meson assms con_composite_of_iff con_sym join_of_def) show "w \ v \ t \ v \ v \\ t \ u \\ t" by (meson assms con_prfx_composite_of join_of_def resid_composite_of(2)) qed end subsubsection "RTS with Joins" locale rts_with_joins = rts + assumes has_joins: "t \ u \ joinable t u" subsection "Joins and Composites in a Weakly Extensional RTS" context weakly_extensional_rts begin lemma src_composite_of: assumes "composite_of u t v" shows "src v = src u" using assms by (metis con_imp_eq_src con_prfx_composite_of(1)) lemma trg_composite_of: assumes "composite_of u t v" shows "trg v = trg t" by (metis arr_composite_of arr_has_un_target arr_iff_has_target assms targets_composite_of trg_in_targets) lemma src_join_of: assumes "join_of t u v" shows "src t = src v" and "src u = src v" by (metis assms join_ofE src_composite_of)+ lemma trg_join_of: assumes "join_of t u v" shows "trg (t \\ u) = trg v" and "trg (u \\ t) = trg v" by (metis assms join_of_def trg_composite_of)+ end subsection "Joins and Composites in an Extensional RTS" context extensional_rts begin lemma composite_of_unique: assumes "composite_of t u v" and "composite_of t u v'" shows "v = v'" using assms composite_of_unq_upto_cong extensional by fastforce text \ Here we define composition of transitions. Note that we compose transitions in diagram order, rather than in the order used for function composition. This may eventually lead to confusion, but here (unlike in the case of a category) transitions are typically not functions, so we don't have the constraint of having to conform to the order of function application and composition, and diagram order seems more natural. \ definition comp (infixl "\" 55) where "t \ u \ if composable t u then THE v. composite_of t u v else null" lemma comp_is_composite_of: assumes "composite_of t u v" shows "composite_of t u (t \ u)" and "t \ u = v" proof - show "composite_of t u (t \ u)" using assms comp_def composite_of_unique the1I2 [of "composite_of t u" "composite_of t u"] composable_def by metis thus "t \ u = v" using assms composite_of_unique by simp qed lemma comp_null [simp]: shows "null \ t = null" and "t \ null = null" by (meson composableD not_arr_null comp_def)+ lemma composable_iff_arr_comp: shows "composable t u \ arr (t \ u)" by (metis arr_composite_of comp_is_composite_of(2) composable_def comp_def not_arr_null) lemma composable_iff_comp_not_null: shows "composable t u \ t \ u \ null" by (metis composable_iff_arr_comp comp_def not_arr_null) lemma comp_src_arr [simp]: assumes "arr t" and "src t = a" shows "a \ t = t" using assms comp_is_composite_of(2) composite_of_source_arr src_in_sources by blast lemma comp_arr_trg [simp]: assumes "arr t" and "trg t = b" shows "t \ b = t" using assms comp_is_composite_of(2) composite_of_arr_target trg_in_targets by blast lemma comp_ide_self: assumes "ide a" shows "a \ a = a" using assms comp_is_composite_of(2) composite_of_ide_self by fastforce lemma arr_comp [intro, simp]: assumes "composable t u" shows "arr (t \ u)" using assms composable_iff_arr_comp by blast lemma trg_comp [simp]: assumes "composable t u" shows "trg (t \ u) = trg u" by (metis arr_has_un_target assms comp_is_composite_of(2) composable_def composable_imp_seq arr_iff_has_target seq_def targets_composite_of trg_in_targets) lemma src_comp [simp]: assumes "composable t u" shows "src (t \ u) = src t" using assms comp_is_composite_of arr_iff_has_source sources_composite_of src_def composable_def by auto lemma con_comp_iff: shows "w \ t \ u \ composable t u \ w \\ t \ u" by (meson comp_is_composite_of(1) con_composite_of_iff con_sym con_implies_arr(2) composable_def composable_iff_arr_comp) lemma con_compI [intro]: assumes "composable t u" and "w \\ t \ u" shows "w \ t \ u" and "t \ u \ w" using assms con_comp_iff con_sym by blast+ lemma resid_comp: assumes "t \ u \ w" shows "w \\ (t \ u) = (w \\ t) \\ u" and "(t \ u) \\ w = (t \\ w) \ (u \\ (w \\ t))" proof - have 1: "composable t u" using assms composable_iff_comp_not_null by force show "w \\ (t \ u) = (w \\ t) \\ u" using 1 by (meson assms cong_char composable_def resid_composite_of(3) comp_is_composite_of(1)) show "(t \ u) \\ w = (t \\ w) \ (u \\ (w \\ t))" using assms 1 composable_def comp_is_composite_of(2) resid_composite_of by metis qed lemma prfx_decomp: assumes "t \ u" shows "t \ (u \\ t) = u" by (meson assms arr_resid_iff_con comp_is_composite_of(2) composite_of_def con_sym cong_reflexive prfx_implies_con) lemma prfx_comp: assumes "arr u" and "t \ v = u" shows "t \ u" by (metis assms comp_is_composite_of(2) composable_def composable_iff_arr_comp composite_of_def) lemma comp_eqI: assumes "t \ v" and "u = v \\ t" shows "t \ u = v" by (metis assms prfx_decomp) lemma comp_assoc: assumes "composable (t \ u) v" shows "t \ (u \ v) = (t \ u) \ v" proof - have 1: "t \ (t \ u) \ v" by (meson assms composable_iff_arr_comp composableD prfx_comp prfx_transitive) moreover have "((t \ u) \ v) \\ t = u \ v" proof - have "((t \ u) \ v) \\ t = ((t \ u) \\ t) \ (v \\ (t \\ (t \ u)))" by (meson assms calculation con_sym prfx_implies_con resid_comp(2)) also have "... = u \ v" proof - have 2: "(t \ u) \\ t = u" by (metis assms comp_is_composite_of(2) composable_def composable_iff_arr_comp composable_imp_seq composite_of_def extensional seqE) moreover have "v \\ (t \\ (t \ u)) = v" using assms by (meson 1 con_comp_iff con_sym composable_imp_seq resid_arr_ide prfx_implies_con prfx_comp seqE) ultimately show ?thesis by simp qed finally show ?thesis by blast qed ultimately show "t \ (u \ v) = (t \ u) \ v" by (metis comp_eqI) qed text \ We note the following assymmetry: \composable (t \ u) v \ composable u v\ is true, but \composable t (u \ v) \ composable t u\ is not. \ lemma comp_cancel_left: assumes "arr (t \ u)" and "t \ u = t \ v" shows "u = v" using assms by (metis composable_def composable_iff_arr_comp composite_of_cancel_left extensional comp_is_composite_of(2)) lemma comp_resid_prfx [simp]: assumes "arr (t \ u)" shows "(t \ u) \\ t = u" using assms by (metis comp_cancel_left comp_eqI prfx_comp) lemma bounded_imp_con\<^sub>E: assumes "t \ u \ t' \ u'" shows "t \ t'" by (metis arr_resid_iff_con assms con_comp_iff con_implies_arr(2) prfx_implies_con con_sym) lemma join_of_unique: assumes "join_of t u v" and "join_of t u v'" shows "v = v'" using assms join_of_def composite_of_unique by blast definition join (infix "\" 52) where "t \ u \ if joinable t u then THE v. join_of t u v else null" lemma join_is_join_of: assumes "joinable t u" shows "join_of t u (t \ u)" using assms joinable_def join_def join_of_unique the1I2 [of "join_of t u" "join_of t u"] by force lemma joinable_iff_arr_join: shows "joinable t u \ arr (t \ u)" by (metis cong_char join_is_join_of join_of_un_upto_cong not_arr_null join_def) lemma joinable_iff_join_not_null: shows "joinable t u \ t \ u \ null" by (metis join_def joinable_iff_arr_join not_arr_null) lemma join_sym: assumes "t \ u \ null" shows "t \ u = u \ t" using assms by (meson join_def join_is_join_of join_of_symmetric join_of_unique joinable_def) lemma src_join: assumes "joinable t u" shows "src (t \ u) = src t" using assms by (metis con_imp_eq_src con_prfx_composite_of(1) join_is_join_of join_of_def) lemma trg_join: assumes "joinable t u" shows "trg (t \ u) = trg (t \\ u)" using assms by (metis arr_resid_iff_con join_is_join_of joinable_iff_arr_join joinable_implies_con in_targetsE src_eqI targets_join_of(1) trg_in_targets) lemma resid_join\<^sub>E [simp]: assumes "joinable t u" and "v \ t \ u" shows "v \\ (t \ u) = (v \\ u) \\ (t \\ u)" and "v \\ (t \ u) = (v \\ t) \\ (u \\ t)" and "(t \ u) \\ v = (t \\ v) \ (u \\ v)" proof - show 1: "v \\ (t \ u) = (v \\ u) \\ (t \\ u)" by (meson assms con_sym join_of_def resid_composite_of(3) extensional join_is_join_of) show "v \\ (t \ u) = (v \\ t) \\ (u \\ t)" by (metis "1" cube) show "(t \ u) \\ v = (t \\ v) \ (u \\ v)" using assms joinable_def join_of_resid join_is_join_of extensional by (meson join_of_unique) qed lemma join_eqI: assumes "t \ v" and "u \ v" and "v \\ u = t \\ u" and "v \\ t = u \\ t" shows "t \ u = v" using assms composite_of_def cube ideE join_of_def joinable_def join_of_unique join_is_join_of trg_def by metis lemma comp_join: assumes "joinable (t \ u) (t \ u')" shows "composable t (u \ u')" and "t \ (u \ u') = t \ u \ t \ u'" proof - have "t \ t \ u \ t \ u'" using assms by (metis composable_def composite_of_def join_of_def join_is_join_of joinable_implies_con prfx_transitive comp_is_composite_of(2) con_comp_iff) moreover have "(t \ u \ t \ u') \\ t = u \ u'" by (metis arr_resid_iff_con assms calculation comp_resid_prfx con_implies_arr(2) joinable_implies_con resid_join\<^sub>E(3) con_implies_arr(1) ide_implies_arr) ultimately show "t \ (u \ u') = t \ u \ t \ u'" by (metis comp_eqI) thus "composable t (u \ u')" by (metis assms joinable_iff_join_not_null comp_def) qed lemma join_src: assumes "arr t" shows "src t \ t = t" using assms joinable_def join_of_arr_src join_is_join_of join_of_unique src_in_sources by meson lemma join_self: assumes "arr t" shows "t \ t = t" using assms joinable_def join_of_arr_self join_is_join_of join_of_unique by blast lemma arr_prfx_join_self: assumes "joinable t u" shows "t \ t \ u" using assms by (meson composite_of_def join_is_join_of join_of_def) text \ We note that it is not the case that the existence of either of \t \ (u \ v)\ or \(t \ u) \ v\ implies that of the other. For example, if \(t \ u) \ v \ null\, then it is not necessarily the case that \u \ v \ null\. \ end subsubsection "Extensional RTS with Joins" locale extensional_rts_with_joins = rts_with_joins + extensional_rts begin lemma joinable_iff_con: shows "joinable t u \ t \ u" by (meson has_joins joinable_implies_con) lemma src_join\<^sub>E\<^sub>J [simp]: assumes "t \ u" shows "src (t \ u) = src t" using assms by (meson has_joins src_join) lemma trg_join\<^sub>E\<^sub>J: assumes "t \ u" shows "trg (t \ u) = trg (t \\ u)" using assms by (meson has_joins trg_join) lemma resid_join\<^sub>E\<^sub>J [simp]: assumes "t \ u" and "v \ t \ u" shows "v \\ (t \ u) = (v \\ t) \\ (u \\ t)" and "(t \ u) \\ v = (t \\ v) \ (u \\ v)" using assms has_joins resid_join\<^sub>E by blast+ lemma join_assoc: shows "t \ (u \ v) = (t \ u) \ v" proof - have *: "\t u v. con (t \ u) v \ t \ (u \ v) = (t \ u) \ v" proof - fix t u v assume 1: "con (t \ u) v" have vt_ut: "v \\ t \ u \\ t" using 1 by (metis con_implies_arr(1) con_with_join_of_iff(2) join_is_join_of not_arr_null join_def) have tv_uv: "t \\ v \ u \\ v" using vt_ut cube con_sym by (metis arr_resid_iff_con) have 2: "(t \ u) \ v = (t \ (u \\ t)) \ (v \\ (t \ (u \\ t)))" using 1 by (metis comp_is_composite_of(2) con_implies_arr(1) has_joins join_is_join_of join_of_def joinable_iff_arr_join) also have "... = t \ ((u \\ t) \ (v \\ (t \ (u \\ t))))" using 1 by (metis calculation has_joins joinable_iff_join_not_null comp_assoc comp_def) also have "... = t \ ((u \\ t) \ ((v \\ t) \\ (u \\ t)))" using 1 by (metis 2 comp_null(2) con_compI(2) con_comp_iff has_joins resid_comp(1) conI joinable_iff_join_not_null) also have "... = t \ ((v \\ t) \ (u \\ t))" by (metis vt_ut comp_is_composite_of(2) has_joins join_of_def join_is_join_of) also have "... = t \ ((u \\ t) \ (v \\ t))" using join_sym by metis also have "... = t \ ((u \ v) \\ t)" by (metis tv_uv vt_ut con_implies_arr(2) con_sym con_with_join_of_iff(1) has_joins join_is_join_of arr_resid_iff_con resid_join\<^sub>E(3)) also have "... = t \ (u \ v)" by (metis comp_is_composite_of(2) comp_null(2) conI has_joins join_is_join_of join_of_def joinable_iff_join_not_null) finally show "t \ (u \ v) = (t \ u) \ v" by simp qed thus ?thesis by (metis (full_types) has_joins joinable_iff_join_not_null joinable_implies_con con_sym) qed lemma join_is_lub: assumes "t \ v" and "u \ v" shows "t \ u \ v" proof - have "(t \ u) \\ v = (t \\ v) \ (u \\ v)" using assms resid_join\<^sub>E(3) [of t u v] by (metis arr_prfx_join_self con_target con_sym join_assoc joinable_iff_con joinable_iff_join_not_null prfx_implies_con resid_reflects_con) also have "... = trg v \ trg v" using assms by (metis ideE prfx_implies_con src_resid trg_ide) also have "... = trg v" by (metis assms(2) ide_iff_src_self ide_implies_arr join_self prfx_implies_con src_resid) finally have "(t \ u) \\ v = trg v" by blast moreover have "ide (trg v)" using assms by (metis con_implies_arr(2) prfx_implies_con cong_char trg_def) ultimately show ?thesis by simp qed end subsubsection "Extensional RTS with Composites" text \ If an extensional RTS is assumed to have composites for all composable pairs of transitions, then the ``semantic'' property of transitions being composable can be replaced by the ``syntactic'' property of transitions being sequential. This results in simpler statements of a number of properties. \ locale extensional_rts_with_composites = rts_with_composites + extensional_rts begin lemma seq_implies_arr_comp: assumes "seq t u" shows "arr (t \ u)" using assms by (meson composable_iff_arr_comp composable_iff_seq) lemma arr_comp\<^sub>E\<^sub>C [intro, simp]: assumes "arr t" and "arr u" and "trg t = src u" shows "arr (t \ u)" using assms by (simp add: seq_implies_arr_comp) lemma arr_compE\<^sub>E\<^sub>C [elim]: assumes "arr (t \ u)" and "\arr t; arr u; trg t = src u\ \ T" shows T using assms composable_iff_arr_comp composable_iff_seq by blast lemma trg_comp\<^sub>E\<^sub>C [simp]: assumes "seq t u" shows "trg (t \ u) = trg u" by (meson assms has_composites trg_comp) lemma src_comp\<^sub>E\<^sub>C [simp]: assumes "seq t u" shows "src (t \ u) = src t" using assms src_comp has_composites by simp lemma con_comp_iff\<^sub>E\<^sub>C [simp]: shows "w \ t \ u \ seq t u \ u \ w \\ t" and "t \ u \ w \ seq t u \ u \ w \\ t" using composable_iff_seq con_comp_iff con_sym by meson+ lemma comp_assoc\<^sub>E\<^sub>C: shows "t \ (u \ v) = (t \ u) \ v" apply (cases "seq t u") apply (metis arr_comp comp_assoc comp_def not_arr_null arr_compE\<^sub>E\<^sub>C arr_comp\<^sub>E\<^sub>C seq_implies_arr_comp trg_comp\<^sub>E\<^sub>C) by (metis comp_def composable_iff_arr_comp seqI\<^sub>W\<^sub>E src_comp arr_compE\<^sub>E\<^sub>C) lemma diamond_commutes: shows "t \ (u \\ t) = u \ (t \\ u)" proof (cases "t \ u") show "\ t \ u \ ?thesis" by (metis comp_null(2) conI con_sym) assume con: "t \ u" have "(t \ (u \\ t)) \\ u = (t \\ u) \ ((u \\ t) \\ (u \\ t))" using con by (metis (no_types, lifting) arr_resid_iff_con con_compI(2) con_implies_arr(1) resid_comp(2) con_imp_arr_resid con_sym comp_def arr_comp\<^sub>E\<^sub>C src_resid conI) moreover have "u \ t \ (u \\ t)" by (metis arr_resid_iff_con calculation con cong_reflexive comp_arr_trg resid_arr_self resid_comp(1) trg_resid_sym) ultimately show ?thesis by (metis comp_eqI con comp_arr_trg resid_arr_self arr_resid trg_resid_sym) qed lemma mediating_transition: assumes "t \ v = u \ w" shows "v \\ (u \\ t) = w \\ (t \\ u)" proof (cases "seq t v") assume 1: "seq t v" hence 2: "arr (u \ w)" using assms by (metis arr_comp\<^sub>E\<^sub>C seqE\<^sub>W\<^sub>E) have 3: "v \\ (u \\ t) = ((t \ v) \\ t) \\ (u \\ t)" by (metis "1" comp_is_composite_of(1) composite_of_def obtains_composite_of extensional) also have "... = (t \ v) \\ (t \ (u \\ t))" by (metis (no_types, lifting) "2" assms con_comp_iff\<^sub>E\<^sub>C(2) con_imp_eq_src con_implies_arr(2) con_sym comp_resid_prfx prfx_comp resid_comp(1) arr_compE\<^sub>E\<^sub>C arr_comp\<^sub>E\<^sub>C prfx_implies_con) also have "... = (u \ w) \\ (u \ (t \\ u))" using assms diamond_commutes by presburger also have "... = ((u \ w) \\ u) \\ (t \\ u)" by (metis 3 assms calculation cube) also have "... = w \\ (t \\ u)" using 2 by simp finally show ?thesis by blast next assume 1: "\ seq t v" have "v \\ (u \\ t) = null" using 1 by (metis (mono_tags, lifting) arr_resid_iff_con coinitial_iff\<^sub>W\<^sub>E con_imp_coinitial seqI\<^sub>W\<^sub>E src_resid conI) also have "... = w \\ (t \\ u)" by (metis (no_types, lifting) "1" arr_comp\<^sub>E\<^sub>C assms composable_imp_seq con_imp_eq_src con_implies_arr(1) con_implies_arr(2) comp_def not_arr_null conI src_resid) finally show ?thesis by blast qed lemma induced_arrow: assumes "seq t u" and "t \ u = t' \ u'" shows "(t' \\ t) \ (u \\ (t' \\ t)) = u" and "(t \\ t') \ (u \\ (t' \\ t)) = u'" and "(t' \\ t) \ v = u \ v = u \\ (t' \\ t)" apply (metis assms comp_eqI arr_compE\<^sub>E\<^sub>C prfx_comp resid_comp(1) arr_resid_iff_con seq_implies_arr_comp) apply (metis assms comp_resid_prfx arr_compE\<^sub>E\<^sub>C resid_comp(2) arr_resid_iff_con seq_implies_arr_comp) by (metis assms(1) comp_resid_prfx seq_def) text \ If an extensional RTS has composites, then it automatically has joins. \ sublocale extensional_rts_with_joins proof fix t u assume con: "t \ u" have 1: "con u (t \ (u \\ t))" using con_compI(1) [of t "u \\ t" u] by (metis con con_implies_arr(1) con_sym diamond_commutes prfx_implies_con arr_resid prfx_comp src_resid arr_comp\<^sub>E\<^sub>C) have "t \ u = t \ (u \\ t)" proof (intro join_eqI) show "t \ t \ (u \\ t)" by (metis 1 composable_def comp_is_composite_of(2) composite_of_def con_comp_iff) moreover show 2: "u \ t \ (u \\ t)" using 1 arr_resid con con_sym prfx_reflexive resid_comp(1) by metis moreover show "(t \ (u \\ t)) \\ u = t \\ u" using 1 diamond_commutes induced_arrow(2) resid_comp(2) by force ultimately show "(t \ (u \\ t)) \\ t = u \\ t" by (metis con_comp_iff\<^sub>E\<^sub>C(1) con_sym prfx_implies_con resid_comp(2) induced_arrow(1)) qed thus "joinable t u" by (metis "1" con_implies_arr(2) joinable_iff_join_not_null not_arr_null) qed lemma join_expansion: assumes "t \ u" shows "t \ u = t \ (u \\ t)" and "seq t (u \\ t)" proof - show "t \ u = t \ (u \\ t)" by (metis assms comp_is_composite_of(2) has_joins join_is_join_of join_of_def) thus "seq t (u \\ t)" by (meson assms composable_def composable_iff_seq has_joins join_is_join_of join_of_def) qed lemma join3_expansion: assumes "t \ u" and "t \ v" and "u \ v" shows "(t \ u) \ v = (t \ (u \\ t)) \ ((v \\ t) \\ (u \\ t))" proof (cases "v \\ t \ u \\ t") show "\ v \\ t \ u \\ t \ ?thesis" by (metis assms(1) comp_null(2) join_expansion(1) joinable_implies_con resid_comp(1) join_def conI) assume 1: "v \\ t \ u \\ t " have "(t \ u) \ v = (t \ u) \ (v \\ (t \ u))" by (metis comp_null(1) diamond_commutes ex_un_null join_expansion(1) joinable_implies_con null_is_zero(2) join_def conI) also have "... = (t \ (u \\ t)) \ (v \\ (t \ u))" using join_expansion [of t u] assms(1) by presburger also have "... = (t \ (u \\ t)) \ ((v \\ u) \\ (t \\ u))" using assms 1 join_of_resid(1) [of t u v] cube [of v t u] by (metis con_compI(2) con_implies_arr(2) join_expansion(1) not_arr_null resid_comp(1) con_sym comp_def src_resid arr_comp\<^sub>E\<^sub>C) also have "... = (t \ (u \\ t)) \ ((v \\ t) \\ (u \\ t))" by (metis cube) finally show ?thesis by blast qed lemma resid_common_prefix: assumes "t \ u \ t \ v" shows "(t \ u) \\ (t \ v) = u \\ v" using assms by (metis con_comp_iff con_sym con_comp_iff\<^sub>E\<^sub>C(2) con_implies_arr(2) induced_arrow(1) resid_comp(1) resid_comp(2) residuation.arr_resid_iff_con residuation_axioms) end subsection "Confluence" text \ An RTS is \emph{confluent} if every coinitial pair of transitions is consistent. \ locale confluent_rts = rts + assumes confluence: "coinitial t u \ con t u" section "Simulations" text \ \emph{Simulations} are morphism of residuated transition systems. They are assumed to preserve consistency and residuation. \ locale simulation = A: rts A + B: rts B for A :: "'a resid" (infixr "\\\<^sub>A" 70) and B :: "'b resid" (infixr "\\\<^sub>B" 70) and F :: "'a \ 'b" + assumes extensional: "\ A.arr t \ F t = B.null" and preserves_con [simp]: "A.con t u \ B.con (F t) (F u)" and preserves_resid [simp]: "A.con t u \ F (t \\\<^sub>A u) = F t \\\<^sub>B F u" begin lemma preserves_reflects_arr [iff]: shows "B.arr (F t) \ A.arr t" by (metis A.arr_def B.con_implies_arr(2) B.not_arr_null extensional preserves_con) lemma preserves_ide [simp]: assumes "A.ide a" shows "B.ide (F a)" by (metis A.ideE assms preserves_con preserves_resid B.ideI) lemma preserves_sources: shows "F ` A.sources t \ B.sources (F t)" using A.sources_def B.sources_def preserves_con preserves_ide by auto lemma preserves_targets: shows "F ` A.targets t \ B.targets (F t)" by (metis A.arrE B.arrE A.sources_resid B.sources_resid equals0D image_subset_iff A.arr_iff_has_target preserves_reflects_arr preserves_resid preserves_sources) lemma preserves_trg: assumes "A.arr t" shows "F (A.trg t) = B.trg (F t)" using assms A.trg_def B.trg_def by auto lemma preserves_composites: assumes "A.composite_of t u v" shows "B.composite_of (F t) (F u) (F v)" using assms by (metis A.composite_ofE A.prfx_implies_con B.composite_of_def preserves_ide preserves_resid A.con_sym) lemma preserves_joins: assumes "A.join_of t u v" shows "B.join_of (F t) (F u) (F v)" using assms A.join_of_def B.join_of_def A.joinable_def by (metis A.joinable_implies_con preserves_composites preserves_resid) lemma preserves_prfx: assumes "A.prfx t u" shows "B.prfx (F t) (F u)" using assms by (metis A.prfx_implies_con preserves_ide preserves_resid) lemma preserves_cong: assumes "A.cong t u" shows "B.cong (F t) (F u)" using assms preserves_prfx by simp end subsection "Identity Simulation" locale identity_simulation = rts begin abbreviation map where "map \ \t. if arr t then t else null" sublocale simulation resid resid map using con_implies_arr con_sym arr_resid_iff_con by unfold_locales auto end subsection "Composite of Simulations" lemma simulation_comp: assumes "simulation A B F" and "simulation B C G" shows "simulation A C (G o F)" proof - interpret F: simulation A B F using assms(1) by auto interpret G: simulation B C G using assms(2) by auto show "simulation A C (G o F)" using F.extensional G.extensional by unfold_locales auto qed locale composite_simulation = F: simulation A B F + G: simulation B C G for A :: "'a resid" and B :: "'b resid" and C :: "'c resid" and F :: "'a \ 'b" and G :: "'b \ 'c" begin abbreviation map where "map \ G o F" sublocale simulation A C map using simulation_comp F.simulation_axioms G.simulation_axioms by blast lemma is_simulation: shows "simulation A C map" .. end subsection "Simulations into a Weakly Extensional RTS" locale simulation_to_weakly_extensional_rts = simulation + B: weakly_extensional_rts B begin lemma preserves_src: shows "\a. a \ A.sources t \ B.src (F t) = F a" by (metis equals0D image_subset_iff B.arr_iff_has_source preserves_sources B.arr_has_un_source B.src_in_sources) lemma preserves_trg: shows "\b. b \ A.targets t \ B.trg (F t) = F b" by (metis equals0D image_subset_iff B.arr_iff_has_target preserves_targets B.arr_has_un_target B.trg_in_targets) end subsection "Simulations into an Extensional RTS" locale simulation_to_extensional_rts = simulation + B: extensional_rts B begin lemma preserves_comp: assumes "A.composite_of t u v" shows "F v = B.comp (F t) (F u)" using assms by (metis preserves_composites B.comp_is_composite_of(2)) lemma preserves_join: assumes "A.join_of t u v" shows "F v = B.join (F t) (F u)" using assms preserves_joins by (meson B.join_is_join_of B.join_of_unique B.joinable_def) end subsection "Simulations between Extensional RTS's" locale simulation_between_extensional_rts = simulation_to_extensional_rts + A: extensional_rts A begin lemma preserves_src: shows "B.src (F t) = F (A.src t)" by (metis A.arr_src_iff_arr A.src_in_sources extensional image_subset_iff preserves_reflects_arr preserves_sources B.arr_has_un_source B.src_def B.src_in_sources) lemma preserves_trg: shows "B.trg (F t) = F (A.trg t)" by (metis A.arr_trg_iff_arr A.residuation_axioms A.trg_def B.null_is_zero(2) B.trg_def extensional preserves_resid residuation.arrE) lemma preserves_comp: assumes "A.composable t u" shows "F (A.comp t u) = B.comp (F t) (F u)" using assms by (metis A.arr_comp A.comp_resid_prfx A.composableD(2) A.not_arr_null A.prfx_comp A.residuation_axioms B.comp_eqI preserves_prfx preserves_resid residuation.conI) lemma preserves_join: assumes "A.joinable t u" shows "F (A.join t u) = B.join (F t) (F u)" using assms by (meson A.join_is_join_of B.joinable_def preserves_joins B.join_is_join_of B.join_of_unique) end subsection "Transformations" text \ A \emph{transformation} is a morphism of simulations, analogously to how a natural transformation is a morphism of functors, except the normal commutativity condition for that ``naturality squares'' is replaced by the requirement that the arrows at the apex of such a square are given by residuation of the arrows at the base. If the codomain RTS is extensional, then this condition implies the commutativity of the square with respect to composition, as would be the case for a natural transformation between functors. The proper way to define a transformation when the domain and codomain are general RTS's is not yet clear to me. However, if the domain and codomain are weakly extensional, then we have unique sources and targets, so there is no problem. The definition below is limited to that case. I do not make any attempt here to develop facts about transformations. My main reason for including this definition here is so that in the subsequent application to the \\\-calculus, I can exhibit \\\-reduction as an example of a transformation. \ locale transformation = A: weakly_extensional_rts A + B: weakly_extensional_rts B + F: simulation A B F + G: simulation A B G for A :: "'a resid" (infixr "\\\<^sub>A" 70) and B :: "'b resid" (infixr "\\\<^sub>B" 70) and F :: "'a \ 'b" and G :: "'a \ 'b" and \ :: "'a \ 'b" + assumes extensional: "\ A.arr f \ \ f = B.null" - and preserves_src: "A.arr f \ B.src (\ f) = F (A.src f)" - and preserves_trg: "A.arr f \ B.trg (\ f) = G (A.trg f)" + and preserves_src: "A.ide f \ B.src (\ f) = F (A.src f)" + and preserves_trg: "A.ide f \ B.trg (\ f) = G (A.trg f)" and naturality1: "A.arr f \ \ (A.src f) \\\<^sub>B F f = \ (A.trg f)" and naturality2: "A.arr f \ F f \\\<^sub>B \ (A.src f) = G f" + and naturality3: "A.arr f \ B.join_of (\ (A.src f)) (F f) (\ f)" section "Normal Sub-RTS's and Congruence" text \ We now develop a general quotient construction on an RTS. We define a \emph{normal sub-RTS} of an RTS to be a collection of transitions \\\ having certain ``local'' closure properties. A normal sub-RTS induces an equivalence relation \\\<^sub>0\, which we call \emph{semi-congruence}, by defining \t \\<^sub>0 u\ to hold exactly when \t \ u\ and \u \ t\ are both in \\\. This relation generalizes the relation \\\ defined for an arbitrary RTS, in the sense that \\\ is obtained when \\\ consists of all and only the identity transitions. However, in general the relation \\\<^sub>0\ is fully substitutive only in the left argument position of residuation; for the right argument position, a somewhat weaker property is satisfied. We then coarsen \\\<^sub>0\ to a relation \\\, by defining \t \ u\ to hold exactly when \t\ and \u\ can be transported by residuation along transitions in \\\ to a common source, in such a way that the residuals are related by \\\<^sub>0\. To obtain full substitutivity of \\\ with respect to residuation, we need to impose an additional condition on \\\. This condition, which we call \emph{coherence}, states that transporting a transition \t\ along parallel transitions \u\ and \v\ in \\\ always yields residuals \t \ u\ and \u \ t\ that are related by \\\<^sub>0\. We show that, under the assumption of coherence, the relation \\\ is fully substitutive, and the quotient of the original RTS by this relation is an extensional RTS which has the \\\-connected components of the original RTS as identities. Although the coherence property has a somewhat \emph{ad hoc} feel to it, we show that, in the context of the other conditions assumed for \\\, coherence is in fact equivalent to substitutivity for \\\. \ subsection "Normal Sub-RTS's" locale normal_sub_rts = R: rts + fixes \ :: "'a set" assumes elements_are_arr: "t \ \ \ R.arr t" and ide_closed: "R.ide a \ a \ \" and forward_stable: "\ u \ \; R.coinitial t u \ \ u \\ t \ \" and backward_stable: "\ u \ \; t \\ u \ \ \ \ t \ \" and composite_closed_left: "\ u \ \; R.seq u t \ \ \v. R.composite_of u t v" and composite_closed_right: "\ u \ \; R.seq t u \ \ \v. R.composite_of t u v" begin lemma prfx_closed: assumes "u \ \" and "R.prfx t u" shows "t \ \" using assms backward_stable ide_closed by blast lemma composite_closed: assumes "t \ \" and "u \ \" and "R.composite_of t u v" shows "v \ \" using assms backward_stable R.composite_of_def prfx_closed by blast lemma factor_closed: assumes "R.composite_of t u v" and "v \ \" shows "t \ \" and "u \ \" apply (metis assms R.composite_of_def prfx_closed) by (meson assms R.composite_of_def R.con_imp_coinitial forward_stable prfx_closed R.prfx_implies_con) lemma resid_along_elem_preserves_con: assumes "t \ t'" and "R.coinitial t u" and "u \ \" shows "t \\ u \ t' \\ u" proof - have "R.coinitial (t \\ t') (u \\ t')" by (metis assms R.arr_resid_iff_con R.coinitialI R.con_imp_common_source forward_stable elements_are_arr R.con_implies_arr(2) R.sources_resid R.sources_eqI) hence "t \\ t' \ u \\ t'" by (metis assms(3) R.coinitial_iff R.con_imp_coinitial R.con_sym elements_are_arr forward_stable R.arr_resid_iff_con) thus ?thesis using assms R.cube forward_stable by fastforce qed end subsubsection "Normal Sub-RTS's of an Extensional RTS with Composites" locale normal_in_extensional_rts_with_composites = R: extensional_rts + R: rts_with_composites + normal_sub_rts begin lemma factor_closed\<^sub>E\<^sub>C: assumes "t \ u \ \" shows "t \ \" and "u \ \" using assms factor_closed by (metis R.arrE R.composable_def R.comp_is_composite_of(2) R.con_comp_iff elements_are_arr)+ lemma comp_in_normal_iff: shows "t \ u \ \ \ t \ \ \ u \ \ \ R.seq t u" by (metis R.comp_is_composite_of(2) composite_closed elements_are_arr factor_closed(1-2) R.composable_def R.has_composites R.rts_with_composites_axioms R.extensional_rts_axioms extensional_rts_with_composites.arr_compE\<^sub>E\<^sub>C extensional_rts_with_composites_def R.seqI\<^sub>W\<^sub>E) end subsection "Semi-Congruence" context normal_sub_rts begin text \ We will refer to the elements of \\\ as \emph{normal transitions}. Generalizing identity transitions to normal transitions in the definition of congruence, we obtain the notion of \emph{semi-congruence} of transitions with respect to a normal sub-RTS. \ abbreviation Cong\<^sub>0 (infix "\\<^sub>0" 50) where "t \\<^sub>0 t' \ t \\ t' \ \ \ t' \\ t \ \" lemma Cong\<^sub>0_reflexive: assumes "R.arr t" shows "t \\<^sub>0 t" using assms R.cong_reflexive ide_closed by simp lemma Cong\<^sub>0_symmetric: assumes "t \\<^sub>0 t'" shows "t' \\<^sub>0 t" using assms by simp lemma Cong\<^sub>0_transitive [trans]: assumes "t \\<^sub>0 t'" and "t' \\<^sub>0 t''" shows "t \\<^sub>0 t''" by (metis (full_types) R.arr_resid_iff_con assms backward_stable forward_stable elements_are_arr R.coinitialI R.cube R.sources_resid) lemma Cong\<^sub>0_imp_con: assumes "t \\<^sub>0 t'" shows "R.con t t'" using assms R.arr_resid_iff_con elements_are_arr by blast lemma Cong\<^sub>0_imp_coinitial: assumes "t \\<^sub>0 t'" shows "R.sources t = R.sources t'" using assms by (meson Cong\<^sub>0_imp_con R.coinitial_iff R.con_imp_coinitial) text \ Semi-congruence is preserved and reflected by residuation along normal transitions. \ lemma Resid_along_normal_preserves_Cong\<^sub>0: assumes "t \\<^sub>0 t'" and "u \ \" and "R.sources t = R.sources u" shows "t \\ u \\<^sub>0 t' \\ u" by (metis Cong\<^sub>0_imp_coinitial R.arr_resid_iff_con R.coinitialI R.coinitial_def R.cube R.sources_resid assms elements_are_arr forward_stable) lemma Resid_along_normal_reflects_Cong\<^sub>0: assumes "t \\ u \\<^sub>0 t' \\ u" and "u \ \" shows "t \\<^sub>0 t'" using assms by (metis backward_stable R.con_imp_coinitial R.cube R.null_is_zero(2) forward_stable R.conI) text \ Semi-congruence is substitutive for the left-hand argument of residuation. \ lemma Cong\<^sub>0_subst_left: assumes "t \\<^sub>0 t'" and "t \ u" shows "t' \ u" and "t \\ u \\<^sub>0 t' \\ u" proof - have 1: "t \ u \ t \ t' \ u \\ t \ t' \\ t" using assms by (metis Resid_along_normal_preserves_Cong\<^sub>0 Cong\<^sub>0_imp_con Cong\<^sub>0_reflexive R.con_sym R.null_is_zero(2) R.arr_resid_iff_con R.sources_resid R.conI) hence 2: "t' \ u \ u \\ t \ t' \\ t \ (t \\ u) \\ (t' \\ u) = (t \\ t') \\ (u \\ t') \ (t' \\ u) \\ (t \\ u) = (t' \\ t) \\ (u \\ t)" by (meson R.con_sym R.cube R.resid_reflects_con) show "t' \ u" using 2 by simp show "t \\ u \\<^sub>0 t' \\ u" using assms 1 2 by (metis R.arr_resid_iff_con R.con_imp_coinitial R.cube forward_stable) qed text \ Semi-congruence is not exactly substitutive for residuation on the right. Instead, the following weaker property is satisfied. Obtaining exact substitutivity on the right is the motivation for defining a coarser notion of congruence below. \ lemma Cong\<^sub>0_subst_right: assumes "u \\<^sub>0 u'" and "t \ u" shows "t \ u'" and "(t \\ u) \\ (u' \\ u) \\<^sub>0 (t \\ u') \\ (u \\ u')" using assms apply (meson Cong\<^sub>0_subst_left(1) R.con_sym) using assms by (metis R.sources_resid Cong\<^sub>0_imp_con Cong\<^sub>0_reflexive Resid_along_normal_preserves_Cong\<^sub>0 R.arr_resid_iff_con residuation.cube R.residuation_axioms) lemma Cong\<^sub>0_subst_Con: assumes "t \\<^sub>0 t'" and "u \\<^sub>0 u'" shows "t \ u \ t' \ u'" using assms by (meson Cong\<^sub>0_subst_left(1) Cong\<^sub>0_subst_right(1)) lemma Cong\<^sub>0_cancel_left: assumes "R.composite_of t u v" and "R.composite_of t u' v'" and "v \\<^sub>0 v'" shows "u \\<^sub>0 u'" proof - have "u \\<^sub>0 v \\ t" using assms(1) ide_closed by blast also have "v \\ t \\<^sub>0 v' \\ t" by (meson assms(1,3) Cong\<^sub>0_subst_left(2) R.composite_of_def R.con_sym R.prfx_implies_con) also have "v' \\ t \\<^sub>0 u'" using assms(2) ide_closed by blast finally show ?thesis by auto qed lemma Cong\<^sub>0_iff: shows "t \\<^sub>0 t' \ (\u u' v v'. u \ \ \ u' \ \ \ v \\<^sub>0 v' \ R.composite_of t u v \ R.composite_of t' u' v')" proof (intro iffI) show "\u u' v v'. u \ \ \ u' \ \ \ v \\<^sub>0 v' \ R.composite_of t u v \ R.composite_of t' u' v' \ t \\<^sub>0 t'" by (meson Cong\<^sub>0_transitive R.composite_of_def ide_closed prfx_closed) show "t \\<^sub>0 t' \ \u u' v v'. u \ \ \ u' \ \ \ v \\<^sub>0 v' \ R.composite_of t u v \ R.composite_of t' u' v'" by (metis Cong\<^sub>0_imp_con Cong\<^sub>0_transitive R.composite_of_def R.prfx_reflexive R.arrI R.ideE) qed lemma diamond_commutes_upto_Cong\<^sub>0: assumes "t \ u" and "R.composite_of t (u \\ t) v" and "R.composite_of u (t \\ u) v'" shows "v \\<^sub>0 v'" proof - have "v \\ v \\<^sub>0 v' \\ v \ v' \\ v' \\<^sub>0 v \\ v'" proof- have 1: "(v \\ t) \\ (u \\ t) \\<^sub>0 (v' \\ u) \\ (t \\ u)" using assms(2-3) R.cube [of v t u] by (metis R.con_target R.composite_ofE R.ide_imp_con_iff_cong ide_closed R.conI) have 2: "v \\ v \\<^sub>0 v' \\ v" proof - have "v \\ v \\<^sub>0 (v \\ t) \\ (u \\ t)" using assms R.composite_of_def ide_closed by (meson R.composite_of_unq_upto_cong R.prfx_implies_con R.resid_composite_of(3)) also have "(v \\ t) \\ (u \\ t) \\<^sub>0 (v' \\ u) \\ (t \\ u)" using 1 by simp also have "(v' \\ u) \\ (t \\ u) \\<^sub>0 (v' \\ t) \\ (u \\ t)" by (metis "1" Cong\<^sub>0_transitive R.cube) also have "(v' \\ t) \\ (u \\ t) \\<^sub>0 v' \\ v" using assms R.composite_of_def ide_closed by (metis "1" R.conI R.con_sym_ax R.cube R.null_is_zero(2) R.resid_composite_of(3)) finally show ?thesis by auto qed moreover have "v' \\ v' \\<^sub>0 v \\ v'" proof - have "v' \\ v' \\<^sub>0 (v' \\ u) \\ (t \\ u)" using assms R.composite_of_def ide_closed by (meson R.composite_of_unq_upto_cong R.prfx_implies_con R.resid_composite_of(3)) also have "(v' \\ u) \\ (t \\ u) \\<^sub>0 (v \\ t) \\ (u \\ t)" using 1 by simp also have "(v \\ t) \\ (u \\ t) \\<^sub>0 (v \\ u) \\ (t \\ u)" using R.cube [of v t u] ide_closed by (metis Cong\<^sub>0_reflexive R.arr_resid_iff_con assms(2) R.composite_of_def R.prfx_implies_con) also have "(v \\ u) \\ (t \\ u) \\<^sub>0 v \\ v'" using assms R.composite_of_def ide_closed by (metis 2 R.conI elements_are_arr R.not_arr_null R.null_is_zero(2) R.resid_composite_of(3)) finally show ?thesis by auto qed ultimately show ?thesis by blast qed thus ?thesis by (metis assms(2-3) R.composite_of_unq_upto_cong R.resid_arr_ide Cong\<^sub>0_imp_con) qed subsection "Congruence" text \ We use semi-congruence to define a coarser relation as follows. \ definition Cong (infix "\" 50) where "Cong t t' \ \u u'. u \ \ \ u' \ \ \ t \\ u \\<^sub>0 t' \\ u'" lemma CongI [intro]: assumes "u \ \" and "u' \ \" and "t \\ u \\<^sub>0 t' \\ u'" shows "Cong t t'" using assms Cong_def by auto lemma CongE [elim]: assumes "t \ t'" obtains u u' where "u \ \" and "u' \ \" and "t \\ u \\<^sub>0 t' \\ u'" using assms Cong_def by auto lemma Cong_imp_arr: assumes "t \ t'" shows "R.arr t" and "R.arr t'" using assms Cong_def by (meson R.arr_resid_iff_con R.con_implies_arr(2) R.con_sym elements_are_arr)+ lemma Cong_reflexive: assumes "R.arr t" shows "t \ t" by (metis CongI Cong\<^sub>0_reflexive assms R.con_imp_coinitial_ax ide_closed R.resid_arr_ide R.arrE R.con_sym) lemma Cong_symmetric: assumes "t \ t'" shows "t' \ t" using assms Cong_def by auto text \ The existence of composites of normal transitions is used in the following. \ lemma Cong_transitive [trans]: assumes "t \ t''" and "t'' \ t'" shows "t \ t'" proof - obtain u u'' where uu'': "u \ \ \ u'' \ \ \ t \\ u \\<^sub>0 t'' \\ u''" using assms Cong_def by blast obtain v' v'' where v'v'': "v' \ \ \ v'' \ \ \ t'' \\ v'' \\<^sub>0 t' \\ v'" using assms Cong_def by blast let ?w = "(t \\ u) \\ (v'' \\ u'')" let ?w' = "(t' \\ v') \\ (u'' \\ v'')" let ?w'' = "(t'' \\ v'') \\ (u'' \\ v'')" have w'': "?w'' = (t'' \\ u'') \\ (v'' \\ u'')" by (metis R.cube) have u''v'': "R.coinitial u'' v''" by (metis (full_types) R.coinitial_iff elements_are_arr R.con_imp_coinitial R.arr_resid_iff_con uu'' v'v'') hence v''u'': "R.coinitial v'' u''" by (meson R.con_imp_coinitial elements_are_arr forward_stable R.arr_resid_iff_con v'v'') have 1: "?w \\ ?w'' \ \" proof - have "(v'' \\ u'') \\ (t'' \\ u'') \ \" by (metis Cong\<^sub>0_transitive R.con_imp_coinitial forward_stable Cong\<^sub>0_imp_con resid_along_elem_preserves_con R.arrI R.arr_resid_iff_con u''v'' uu'' v'v'') thus ?thesis by (metis Cong\<^sub>0_subst_left(2) R.con_sym R.null_is_zero(1) uu'' w'' R.conI) qed have 2: "?w'' \\ ?w \ \" by (metis 1 Cong\<^sub>0_subst_left(2) uu'' w'' R.conI) have 3: "R.seq u (v'' \\ u'')" by (metis (full_types) 2 Cong\<^sub>0_imp_coinitial R.sources_resid Cong\<^sub>0_imp_con R.arr_resid_iff_con R.con_implies_arr(2) R.seqI uu'' R.conI) have 4: "R.seq v' (u'' \\ v'')" by (metis 1 Cong\<^sub>0_imp_coinitial Cong\<^sub>0_imp_con R.arr_resid_iff_con R.con_implies_arr(2) R.seq_def R.sources_resid v'v'' R.conI) obtain x where x: "R.composite_of u (v'' \\ u'') x" using 3 composite_closed_left uu'' by blast obtain x' where x': "R.composite_of v' (u'' \\ v'') x'" using 4 composite_closed_left v'v'' by presburger have "?w \\<^sub>0 ?w'" proof - have "?w \\<^sub>0 ?w'' \ ?w' \\<^sub>0 ?w''" using 1 2 by (metis Cong\<^sub>0_subst_left(2) R.null_is_zero(2) v'v'' R.conI) thus ?thesis using Cong\<^sub>0_transitive by blast qed moreover have "x \ \ \ ?w \\<^sub>0 t \\ x" apply (intro conjI) apply (meson composite_closed forward_stable u''v'' uu'' v'v'' x) apply (metis (full_types) R.arr_resid_iff_con R.con_implies_arr(2) R.con_sym ide_closed forward_stable R.composite_of_def R.resid_composite_of(3) Cong\<^sub>0_subst_right(1) prfx_closed u''v'' uu'' v'v'' x R.conI) by (metis (no_types, lifting) 1 R.con_composite_of_iff ide_closed R.resid_composite_of(3) R.arr_resid_iff_con R.con_implies_arr(1) R.con_sym x R.conI) moreover have "x' \ \ \ ?w' \\<^sub>0 t' \\ x'" apply (intro conjI) apply (meson composite_closed forward_stable uu'' v''u'' v'v'' x') apply (metis (full_types) Cong\<^sub>0_subst_right(1) R.composite_ofE R.con_sym ide_closed forward_stable R.con_imp_coinitial prfx_closed R.resid_composite_of(3) R.arr_resid_iff_con R.con_implies_arr(1) uu'' v'v'' x' R.conI) by (metis (full_types) Cong\<^sub>0_subst_left(1) R.composite_ofE R.con_sym ide_closed forward_stable R.con_imp_coinitial prfx_closed R.resid_composite_of(3) R.arr_resid_iff_con R.con_implies_arr(1) uu'' v'v'' x' R.conI) ultimately show "t \ t'" using Cong_def Cong\<^sub>0_transitive by metis qed lemma Cong_closure_props: shows "t \ u \ u \ t" and "\t \ u; u \ v\ \ t \ v" and "t \\<^sub>0 u \ t \ u" and "\u \ \; R.sources t = R.sources u\ \ t \ t \\ u" proof - show "t \ u \ u \ t" using Cong_symmetric by blast show "\t \ u; u \ v\ \ t \ v" using Cong_transitive by blast show "t \\<^sub>0 u \ t \ u" by (metis Cong\<^sub>0_subst_left(2) Cong_def Cong_reflexive R.con_implies_arr(1) R.null_is_zero(2) R.conI) show "\u \ \; R.sources t = R.sources u\ \ t \ t \\ u" proof - assume u: "u \ \" and coinitial: "R.sources t = R.sources u" obtain a where a: "a \ R.targets u" by (meson elements_are_arr empty_subsetI R.arr_iff_has_target subsetI subset_antisym u) have "t \\ u \\<^sub>0 (t \\ u) \\ a" proof - have "R.arr t" using R.arr_iff_has_source coinitial elements_are_arr u by presburger thus ?thesis by (meson u a R.arr_resid_iff_con coinitial ide_closed forward_stable elements_are_arr R.coinitial_iff R.composite_of_arr_target R.resid_composite_of(3)) qed thus ?thesis using Cong_def by (metis a R.composite_of_arr_target elements_are_arr factor_closed(2) u) qed qed lemma Cong\<^sub>0_implies_Cong: assumes "t \\<^sub>0 t'" shows "t \ t'" using assms Cong_closure_props(3) by simp lemma in_sources_respects_Cong: assumes "t \ t'" and "a \ R.sources t" and "a' \ R.sources t'" shows "a \ a'" proof - obtain u u' where uu': "u \ \ \ u' \ \ \ t \\ u \\<^sub>0 t' \\ u'" using assms Cong_def by blast show "a \ a'" proof show "u \ \" using uu' by simp show "u' \ \" using uu' by simp show "a \\ u \\<^sub>0 a' \\ u'" proof - have "a \\ u \ R.targets u" by (metis Cong\<^sub>0_imp_con R.arr_resid_iff_con assms(2) R.con_imp_common_source R.con_implies_arr(1) R.resid_source_in_targets R.sources_eqI uu') moreover have "a' \\ u' \ R.targets u'" by (metis Cong\<^sub>0_imp_con R.arr_resid_iff_con assms(3) R.con_imp_common_source R.resid_source_in_targets R.con_implies_arr(1) R.sources_eqI uu') moreover have "R.targets u = R.targets u'" by (metis Cong\<^sub>0_imp_coinitial Cong\<^sub>0_imp_con R.arr_resid_iff_con R.con_implies_arr(1) R.sources_resid uu') ultimately show ?thesis using ide_closed R.targets_are_cong by presburger qed qed qed lemma in_targets_respects_Cong: assumes "t \ t'" and "b \ R.targets t" and "b' \ R.targets t'" shows "b \ b'" proof - obtain u u' where uu': "u \ \ \ u' \ \ \ t \\ u \\<^sub>0 t' \\ u'" using assms Cong_def by blast have seq: "R.seq (u \\ t) ((t' \\ u') \\ (t \\ u)) \ R.seq (u' \\ t') ((t \\ u) \\ (t' \\ u'))" by (metis R.arr_iff_has_source R.arr_iff_has_target R.conI elements_are_arr R.not_arr_null R.seqI R.sources_resid R.targets_resid_sym uu') obtain v where v: "R.composite_of (u \\ t) ((t' \\ u') \\ (t \\ u)) v" using seq composite_closed_right uu' by presburger obtain v' where v': "R.composite_of (u' \\ t') ((t \\ u) \\ (t' \\ u')) v'" using seq composite_closed_right uu' by presburger show "b \ b'" proof show v_in_\: "v \ \" by (metis composite_closed R.con_imp_coinitial R.con_implies_arr(1) forward_stable R.composite_of_def R.prfx_implies_con R.arr_resid_iff_con R.con_sym uu' v) show v'_in_\: "v' \ \" by (metis backward_stable R.composite_of_def R.con_imp_coinitial forward_stable R.null_is_zero(2) prfx_closed uu' v' R.conI) show "b \\ v \\<^sub>0 b' \\ v'" using assms uu' v v' by (metis R.arr_resid_iff_con ide_closed R.seq_def R.sources_resid R.targets_resid_sym R.resid_source_in_targets seq R.sources_composite_of R.targets_are_cong R.targets_composite_of) qed qed lemma sources_are_Cong: assumes "a \ R.sources t" and "a' \ R.sources t" shows "a \ a'" using assms by (simp add: ide_closed R.sources_are_cong Cong_closure_props(3)) lemma targets_are_Cong: assumes "b \ R.targets t" and "b' \ R.targets t" shows "b \ b'" using assms by (simp add: ide_closed R.targets_are_cong Cong_closure_props(3)) text \ It is \emph{not} the case that sources and targets are \\\-closed; \emph{i.e.} \t \ t' \ sources t = sources t'\ and \t \ t' \ targets t = targets t'\ do not hold, in general. \ lemma Resid_along_normal_preserves_reflects_con: assumes "u \ \" and "R.sources t = R.sources u" shows "t \\ u \ t' \\ u \ t \ t'" by (metis R.arr_resid_iff_con assms R.con_implies_arr(1-2) elements_are_arr R.coinitial_iff R.resid_reflects_con resid_along_elem_preserves_con) text \ We can alternatively characterize \\\ as the least symmetric and transitive relation on transitions that extends \\\<^sub>0\ and has the property of being preserved by residuation along transitions in \\\. \ inductive Cong' where "\t u. Cong' t u \ Cong' u t" | "\t u v. \Cong' t u; Cong' u v\ \ Cong' t v" | "\t u. t \\<^sub>0 u \ Cong' t u" | "\t u. \R.arr t; u \ \; R.sources t = R.sources u\ \ Cong' t (t \\ u)" lemma Cong'_if: shows "\u \ \; u' \ \; t \\ u \\<^sub>0 t' \\ u'\ \ Cong' t t'" proof - assume u: "u \ \" and u': "u' \ \" and 1: "t \\ u \\<^sub>0 t' \\ u'" show "Cong' t t'" using u u' 1 by (metis (no_types, lifting) Cong'.simps Cong\<^sub>0_imp_con R.arr_resid_iff_con R.coinitial_iff R.con_imp_coinitial) qed lemma Cong_char: shows "Cong t t' \ Cong' t t'" proof - have "Cong t t' \ Cong' t t'" using Cong_def Cong'_if by blast moreover have "Cong' t t' \ Cong t t'" apply (induction rule: Cong'.induct) using Cong_symmetric apply simp using Cong_transitive apply simp using Cong_closure_props(3) apply simp using Cong_closure_props(4) by simp ultimately show ?thesis using Cong_def by blast qed lemma normal_is_Cong_closed: assumes "t \ \" and "t \ t'" shows "t' \ \" using assms by (metis (full_types) CongE R.con_imp_coinitial forward_stable R.null_is_zero(2) backward_stable R.conI) subsection "Congruence Classes" text \ Here we develop some notions relating to the congruence classes of \\\. \ definition Cong_class ("\_\") where "Cong_class t \ {t'. t \ t'}" definition is_Cong_class where "is_Cong_class \ \ \t. t \ \ \ \ = \t\" definition Cong_class_rep where "Cong_class_rep \ \ SOME t. t \ \" lemma Cong_class_is_nonempty: assumes "is_Cong_class \" shows "\ \ {}" using assms is_Cong_class_def Cong_class_def by auto lemma rep_in_Cong_class: assumes "is_Cong_class \" shows "Cong_class_rep \ \ \" using assms is_Cong_class_def Cong_class_rep_def someI_ex [of "\t. t \ \"] by metis lemma arr_in_Cong_class: assumes "R.arr t" shows "t \ \t\" using assms Cong_class_def Cong_reflexive by simp lemma is_Cong_classI: assumes "R.arr t" shows "is_Cong_class \t\" using assms Cong_class_def is_Cong_class_def Cong_reflexive by blast lemma is_Cong_classI' [intro]: assumes "\ \ {}" and "\t t'. \t \ \; t' \ \\ \ t \ t'" and "\t t'. \t \ \; t' \ t\ \ t' \ \" shows "is_Cong_class \" proof - obtain t where t: "t \ \" using assms by auto have "\ = \t\" unfolding Cong_class_def using assms(2-3) t by blast thus ?thesis using is_Cong_class_def t by blast qed lemma Cong_class_memb_is_arr: assumes "is_Cong_class \" and "t \ \" shows "R.arr t" using assms Cong_class_def is_Cong_class_def Cong_imp_arr(2) by force lemma Cong_class_membs_are_Cong: assumes "is_Cong_class \" and "t \ \" and "t' \ \" shows "Cong t t'" using assms Cong_class_def is_Cong_class_def by (metis CollectD Cong_closure_props(2) Cong_symmetric) lemma Cong_class_eqI: assumes "t \ t'" shows "\t\ = \t'\" using assms Cong_class_def by (metis (full_types) Collect_cong Cong'.intros(1-2) Cong_char) lemma Cong_class_eqI': assumes "is_Cong_class \" and "is_Cong_class \" and "\ \ \ \ {}" shows "\ = \" using assms is_Cong_class_def Cong_class_eqI Cong_class_membs_are_Cong by (metis (no_types, lifting) Int_emptyI) lemma is_Cong_classE [elim]: assumes "is_Cong_class \" and "\\ \ {}; \t t'. \t \ \; t' \ \\ \ t \ t'; \t t'. \t \ \; t' \ t\ \ t' \ \\ \ T" shows T proof - have \: "\ \ {}" using assms Cong_class_is_nonempty by simp moreover have 1: "\t t'. \t \ \; t' \ \\ \ t \ t'" using assms Cong_class_membs_are_Cong by metis moreover have "\t t'. \t \ \; t' \ t\ \ t' \ \" using assms Cong_class_def by (metis 1 Cong_class_eqI Cong_imp_arr(1) is_Cong_class_def arr_in_Cong_class) ultimately show ?thesis using assms by blast qed lemma Cong_class_rep [simp]: assumes "is_Cong_class \" shows "\Cong_class_rep \\ = \" by (metis Cong_class_membs_are_Cong Cong_class_eqI assms is_Cong_class_def rep_in_Cong_class) lemma Cong_class_memb_Cong_rep: assumes "is_Cong_class \" and "t \ \" shows "Cong t (Cong_class_rep \)" using assms Cong_class_membs_are_Cong rep_in_Cong_class by simp lemma composite_of_normal_arr: shows "\ R.arr t; u \ \; R.composite_of u t t' \ \ t' \ t" by (meson Cong'.intros(3) Cong_char R.composite_of_def R.con_implies_arr(2) ide_closed R.prfx_implies_con Cong_closure_props(2,4) R.sources_composite_of) lemma composite_of_arr_normal: shows "\ arr t; u \ \; R.composite_of t u t' \ \ t' \\<^sub>0 t" by (meson Cong_closure_props(3) R.composite_of_def ide_closed prfx_closed) end subsection "Coherent Normal Sub-RTS's" text \ A \emph{coherent} normal sub-RTS is one that satisfies a parallel moves property with respect to arbitrary transitions. The congruence \\\ induced by a coherent normal sub-RTS is fully substitutive with respect to consistency and residuation, and in fact coherence is equivalent to substitutivity in this context. \ locale coherent_normal_sub_rts = normal_sub_rts + assumes coherent: "\ R.arr t; u \ \; u' \ \; R.sources u = R.sources u'; R.targets u = R.targets u'; R.sources t = R.sources u \ \ t \\ u \\<^sub>0 t \\ u'" (* * TODO: Should coherence be part of normality, or is it an additional property that guarantees * the existence of the quotient? * * e.g. see http://nlab-pages.s3.us-east-2.amazonaws.com/nlab/show/normal+subobject * Maybe also http://www.tac.mta.ca/tac/volumes/36/3/36-03.pdf for recent work. *) context normal_sub_rts begin text \ The above ``parallel moves'' formulation of coherence is equivalent to the following formulation, which involves ``opposing spans''. \ lemma coherent_iff: shows "(\t u u'. R.arr t \ u \ \ \ u' \ \ \ R.sources t = R.sources u \ R.sources u = R.sources u' \ R.targets u = R.targets u' \ t \\ u \\<^sub>0 t \\ u') \ (\t t' v v' w w'. v \ \ \ v' \ \ \ w \ \ \ w' \ \ \ R.sources v = R.sources w \ R.sources v' = R.sources w' \ R.targets w = R.targets w' \ t \\ v \\<^sub>0 t' \\ v' \ t \\ w \\<^sub>0 t' \\ w')" proof assume 1: "\t t' v v' w w'. v \ \ \ v' \ \ \ w \ \ \ w' \ \ \ R.sources v = R.sources w \ R.sources v' = R.sources w' \ R.targets w = R.targets w' \ t \\ v \\<^sub>0 t' \\ v' \ t \\ w \\<^sub>0 t' \\ w'" show "\t u u'. R.arr t \ u \ \ \ u' \ \ \ R.sources t = R.sources u \ R.sources u = R.sources u' \ R.targets u = R.targets u' \ t \\ u \\<^sub>0 t \\ u'" proof (intro allI impI, elim conjE) fix t u u' assume t: "R.arr t" and u: "u \ \" and u': "u' \ \" and tu: "R.sources t = R.sources u" and sources: "R.sources u = R.sources u'" and targets: "R.targets u = R.targets u'" show "t \\ u \\<^sub>0 t \\ u'" by (metis 1 Cong\<^sub>0_reflexive Resid_along_normal_preserves_Cong\<^sub>0 sources t targets tu u u') qed next assume 1: "\t u u'. R.arr t \ u \ \ \ u' \ \ \ R.sources t = R.sources u \ R.sources u = R.sources u' \ R.targets u = R.targets u' \ t \\ u \\<^sub>0 t \\ u'" show "\t t' v v' w w'. v \ \ \ v' \ \ \ w \ \ \ w' \ \ \ R.sources v = R.sources w \ R.sources v' = R.sources w' \ R.targets w = R.targets w' \ t \\ v \\<^sub>0 t' \\ v' \ t \\ w \\<^sub>0 t' \\ w'" proof (intro allI impI, elim conjE) fix t t' v v' w w' assume v: "v \ \" and v': "v' \ \" and w: "w \ \" and w': "w' \ \" and vw: "R.sources v = R.sources w" and v'w': "R.sources v' = R.sources w'" and ww': "R.targets w = R.targets w'" and tvt'v': "(t \\ v) \\ (t' \\ v') \ \" and t'v'tv: "(t' \\ v') \\ (t \\ v) \ \" show "t \\ w \\<^sub>0 t' \\ w'" proof - have 3: "R.sources t = R.sources v \ R.sources t' = R.sources v'" using R.con_imp_coinitial by (meson Cong\<^sub>0_imp_con tvt'v' t'v'tv R.coinitial_iff R.arr_resid_iff_con) have 2: "t \\ w \ t' \\ w'" using Cong_closure_props by (metis tvt'v' t'v'tv 3 vw v'w' v v' w w') obtain z z' where zz': "z \ \ \ z' \ \ \ (t \\ w) \\ z \\<^sub>0 (t' \\ w') \\ z'" using 2 by auto have "(t \\ w) \\ z \\<^sub>0 (t \\ w) \\ z'" proof - have "R.coinitial ((t \\ w) \\ z) ((t \\ w) \\ z')" by (metis Cong\<^sub>0_imp_coinitial Cong_imp_arr(1) Resid_along_normal_preserves_reflects_con R.arr_def R.coinitialI R.con_imp_common_source Cong_closure_props(3) R.arr_resid_iff_con R.sources_eqI R.sources_resid ww' zz') thus ?thesis apply (intro conjI) by (metis 1 R.coinitial_iff R.con_imp_coinitial R.arr_resid_iff_con R.sources_resid zz')+ qed hence "(t \\ w) \\ z' \\<^sub>0 (t' \\ w') \\ z'" using zz' Cong\<^sub>0_transitive Cong\<^sub>0_symmetric by blast thus ?thesis using zz' Resid_along_normal_reflects_Cong\<^sub>0 by metis qed qed qed end context coherent_normal_sub_rts begin text \ The proof of the substitutivity of \\\ with respect to residuation only uses coherence in the ``opposing spans'' form. \ lemma coherent': assumes "v \ \" and "v' \ \" and "w \ \" and "w' \ \" and "R.sources v = R.sources w" and "R.sources v' = R.sources w'" and "R.targets w = R.targets w'" and "t \\ v \\<^sub>0 t' \\ v'" shows "t \\ w \\<^sub>0 t' \\ w'" using assms coherent coherent_iff by metis (* 6 sec *) text \ The relation \\\ is substitutive with respect to both arguments of residuation. \ lemma Cong_subst: assumes "t \ t'" and "u \ u'" and "t \ u" and "R.sources t' = R.sources u'" shows "t' \ u'" and "t \\ u \ t' \\ u'" proof - obtain v v' where vv': "v \ \ \ v' \ \ \ t \\ v \\<^sub>0 t' \\ v'" using assms by auto obtain w w' where ww': "w \ \ \ w' \ \ \ u \\ w \\<^sub>0 u' \\ w'" using assms by auto let ?x = "t \\ v" and ?x' = "t' \\ v'" let ?y = "u \\ w" and ?y' = "u' \\ w'" have xx': "?x \\<^sub>0 ?x'" using assms vv' by blast have yy': "?y \\<^sub>0 ?y'" using assms ww' by blast have 1: "t \\ w \\<^sub>0 t' \\ w'" proof - have "R.sources v = R.sources w" by (metis (no_types, lifting) Cong\<^sub>0_imp_con R.arr_resid_iff_con assms(3) R.con_imp_common_source R.con_implies_arr(2) R.sources_eqI ww' xx') moreover have "R.sources v' = R.sources w'" by (metis (no_types, lifting) assms(4) R.coinitial_iff R.con_imp_coinitial Cong\<^sub>0_imp_con R.arr_resid_iff_con ww' xx') moreover have "R.targets w = R.targets w'" by (metis Cong\<^sub>0_implies_Cong Cong\<^sub>0_imp_coinitial Cong_imp_arr(1) R.arr_resid_iff_con R.sources_resid ww') ultimately show ?thesis using assms vv' ww' by (intro coherent' [of v v' w w' t]) auto qed have 2: "t' \\ w' \ u' \\ w'" using assms 1 ww' by (metis Cong\<^sub>0_subst_left(1) Cong\<^sub>0_subst_right(1) Resid_along_normal_preserves_reflects_con R.arr_resid_iff_con R.coinitial_iff R.con_imp_coinitial elements_are_arr) thus 3: "t' \ u'" using ww' R.cube by force have "t \\ u \ ((t \\ u) \\ (w \\ u)) \\ (?y' \\ ?y)" proof - have "t \\ u \ (t \\ u) \\ (w \\ u)" by (metis Cong_closure_props(4) assms(3) R.con_imp_coinitial elements_are_arr forward_stable R.arr_resid_iff_con R.con_implies_arr(1) R.sources_resid ww') also have "... \ ((t \\ u) \\ (w \\ u)) \\ (?y' \\ ?y)" by (metis Cong\<^sub>0_imp_con Cong_closure_props(4) Cong_imp_arr(2) R.arr_resid_iff_con calculation R.con_implies_arr(2) R.targets_resid_sym R.sources_resid ww') finally show ?thesis by simp qed also have "... \ (((t \\ w) \\ ?y) \\ (?y' \\ ?y))" using ww' by (metis Cong_imp_arr(2) Cong_reflexive calculation R.cube) also have "... \ (((t' \\ w') \\ ?y) \\ (?y' \\ ?y))" using 1 Cong\<^sub>0_subst_left(2) [of "t \\ w" "(t' \\ w')" ?y] Cong\<^sub>0_subst_left(2) [of "(t \\ w) \\ ?y" "(t' \\ w') \\ ?y" "?y' \\ ?y"] by (meson 2 Cong\<^sub>0_implies_Cong Cong\<^sub>0_subst_Con Cong_imp_arr(2) R.arr_resid_iff_con calculation ww') also have "... \ ((t' \\ w') \\ ?y') \\ (?y \\ ?y')" using 2 Cong\<^sub>0_implies_Cong Cong\<^sub>0_subst_right(2) ww' by presburger also have 4: "... \ (t' \\ u') \\ (w' \\ u')" using 2 ww' by (metis Cong\<^sub>0_imp_con Cong_closure_props(4) Cong_symmetric R.cube R.sources_resid) also have "... \ t' \\ u'" using ww' 3 4 by (metis Cong_closure_props(4) Cong_imp_arr(2) Cong_symmetric R.con_imp_coinitial R.con_implies_arr(2) forward_stable R.sources_resid R.arr_resid_iff_con) finally show "t \\ u \ t' \\ u'" by simp qed lemma Cong_subst_con: assumes "R.sources t = R.sources u" and "R.sources t' = R.sources u'" and "t \ t'" and "u \ u'" shows "t \ u \ t' \ u'" using assms by (meson Cong_subst(1) Cong_symmetric) lemma Cong\<^sub>0_composite_of_arr_normal: assumes "R.composite_of t u t'" and "u \ \" shows "t' \\<^sub>0 t" using assms backward_stable R.composite_of_def ide_closed by blast lemma Cong_composite_of_normal_arr: assumes "R.composite_of u t t'" and "u \ \" shows "t' \ t" using assms by (meson Cong_closure_props(2-4) R.arr_composite_of ide_closed R.composite_of_def R.sources_composite_of) end context normal_sub_rts begin text \ Coherence is not an arbitrary property: here we show that substitutivity of congruence in residuation is equivalent to the ``opposing spans'' form of coherence. \ lemma Cong_subst_iff_coherent': shows "(\t t' u u'. t \ t' \ u \ u' \ t \ u \ R.sources t' = R.sources u' \ t' \ u' \ t \\ u \ t' \\ u') \ (\t t' v v' w w'. v \ \ \ v' \ \ \ w \ \ \ w' \ \ \ R.sources v = R.sources w \ R.sources v' = R.sources w' \ R.targets w = R.targets w' \ t \\ v \\<^sub>0 t' \\ v' \ t \\ w \\<^sub>0 t' \\ w')" proof assume 1: "\t t' u u'. t \ t' \ u \ u' \ t \ u \ R.sources t' = R.sources u' \ t' \ u' \ t \\ u \ t' \\ u'" show "\t t' v v' w w'. v \ \ \ v' \ \ \ w \ \ \ w' \ \ \ R.sources v = R.sources w \ R.sources v' = R.sources w' \ R.targets w = R.targets w' \ t \\ v \\<^sub>0 t' \\ v' \ t \\ w \\<^sub>0 t' \\ w'" proof (intro allI impI, elim conjE) fix t t' v v' w w' assume v: "v \ \" and v': "v' \ \" and w: "w \ \" and w': "w' \ \" and sources_vw: "R.sources v = R.sources w" and sources_v'w': "R.sources v' = R.sources w'" and targets_ww': "R.targets w = R.targets w'" and tt': "(t \\ v) \\ (t' \\ v') \ \" and t't: "(t' \\ v') \\ (t \\ v) \ \" show "t \\ w \\<^sub>0 t' \\ w'" proof - have 2: "\t t' u u'. \t \ t'; u \ u'; t \ u; R.sources t' = R.sources u'\ \ t' \ u' \ t \\ u \ t' \\ u'" using 1 by blast have 3: "t \\ w \ t \\ v \ t' \\ w' \ t' \\ v'" by (metis tt' t't sources_vw sources_v'w' Cong\<^sub>0_subst_right(2) Cong_closure_props(4) Cong_def R.arr_resid_iff_con Cong_closure_props(3) Cong_imp_arr(1) normal_is_Cong_closed v w v' w') have "(t \\ w) \\ (t' \\ w') \ (t \\ v) \\ (t' \\ v')" using 2 [of "t \\ w" "t \\ v" "t' \\ w'" "t' \\ v'"] 3 by (metis tt' t't targets_ww' 1 Cong\<^sub>0_imp_con Cong_imp_arr(1) Cong_symmetric R.arr_resid_iff_con R.sources_resid) moreover have "(t' \\ w') \\ (t \\ w) \ (t' \\ v') \\ (t \\ v)" using 2 3 by (metis tt' t't targets_ww' Cong\<^sub>0_imp_con Cong_symmetric Cong_imp_arr(1) R.arr_resid_iff_con R.sources_resid) ultimately show ?thesis by (meson tt' t't normal_is_Cong_closed Cong_symmetric) qed qed next assume 1: "\t t' v v' w w'. v \ \ \ v' \ \ \ w \ \ \ w' \ \ \ R.sources v = R.sources w \ R.sources v' = R.sources w' \ R.targets w = R.targets w' \ t \\ v \\<^sub>0 t' \\ v' \ t \\ w \\<^sub>0 t' \\ w'" show "\t t' u u'. t \ t' \ u \ u' \ t \ u \ R.sources t' = R.sources u' \ t' \ u' \ t \\ u \ t' \\ u'" proof (intro allI impI, elim conjE, intro conjI) have *: "\t t' v v' w w'. \v \ \; v' \ \; w \ \; w' \ \; R.sources v = R.sources w; R.sources v' = R.sources w'; R.targets v = R.targets v'; R.targets w = R.targets w'; t \\ v \\<^sub>0 t' \\ v'\ \ t \\ w \\<^sub>0 t' \\ w'" using 1 by metis fix t t' u u' assume tt': "t \ t'" and uu': "u \ u'" and con: "t \ u" and t'u': "R.sources t' = R.sources u'" obtain v v' where vv': "v \ \ \ v' \ \ \ t \\ v \\<^sub>0 t' \\ v'" using tt' by auto obtain w w' where ww': "w \ \ \ w' \ \ \ u \\ w \\<^sub>0 u' \\ w'" using uu' by auto let ?x = "t \\ v" and ?x' = "t' \\ v'" let ?y = "u \\ w" and ?y' = "u' \\ w'" have xx': "?x \\<^sub>0 ?x'" using tt' vv' by blast have yy': "?y \\<^sub>0 ?y'" using uu' ww' by blast have 1: "t \\ w \\<^sub>0 t' \\ w'" proof - have "R.sources v = R.sources w \ R.sources v' = R.sources w'" proof show "R.sources v' = R.sources w'" using Cong\<^sub>0_imp_con R.arr_resid_iff_con R.coinitial_iff R.con_imp_coinitial t'u' vv' ww' by metis show "R.sources v = R.sources w" by (metis con elements_are_arr R.not_arr_null R.null_is_zero(2) R.conI R.con_imp_common_source rts.sources_eqI R.rts_axioms vv' ww') qed moreover have "R.targets v = R.targets v' \ R.targets w = R.targets w'" by (metis Cong\<^sub>0_imp_coinitial Cong\<^sub>0_imp_con R.arr_resid_iff_con R.con_implies_arr(2) R.sources_resid vv' ww') ultimately show ?thesis using vv' ww' xx' by (intro * [of v v' w w' t t']) auto qed have 2: "t' \\ w' \ u' \\ w'" using 1 tt' ww' by (meson Cong\<^sub>0_imp_con Cong\<^sub>0_subst_Con R.arr_resid_iff_con con R.con_imp_coinitial R.con_implies_arr(2) resid_along_elem_preserves_con) thus 3: "t' \ u'" using ww' R.cube by force have "t \\ u \ (t \\ u) \\ (w \\ u)" by (metis Cong_closure_props(4) R.arr_resid_iff_con con R.con_imp_coinitial elements_are_arr forward_stable R.con_implies_arr(2) R.sources_resid ww') also have "(t \\ u) \\ (w \\ u) \ ((t \\ u) \\ (w \\ u)) \\ (?y' \\ ?y)" using yy' by (metis Cong\<^sub>0_imp_con Cong_closure_props(4) Cong_imp_arr(2) R.arr_resid_iff_con calculation R.con_implies_arr(2) R.sources_resid R.targets_resid_sym) also have "... \ (((t \\ w) \\ ?y) \\ (?y' \\ ?y))" using ww' by (metis Cong_imp_arr(2) Cong_reflexive calculation R.cube) also have "... \ (((t' \\ w') \\ ?y) \\ (?y' \\ ?y))" proof - have "((t \\ w) \\ ?y) \\ (?y' \\ ?y) \\<^sub>0 ((t' \\ w') \\ ?y) \\ (?y' \\ ?y)" using 1 2 Cong\<^sub>0_subst_left(2) by (meson Cong\<^sub>0_subst_Con calculation Cong_imp_arr(2) R.arr_resid_iff_con ww') thus ?thesis using Cong\<^sub>0_implies_Cong by presburger qed also have "... \ ((t' \\ w') \\ ?y') \\ (?y \\ ?y')" by (meson "2" Cong\<^sub>0_implies_Cong Cong\<^sub>0_subst_right(2) ww') also have 4: "... \ (t' \\ u') \\ (w' \\ u')" using 2 ww' by (metis Cong\<^sub>0_imp_con Cong_closure_props(4) Cong_symmetric R.cube R.sources_resid) also have "... \ t' \\ u'" using ww' 2 3 4 by (metis Cong'.intros(1) Cong'.intros(4) Cong_char Cong_imp_arr(2) R.arr_resid_iff_con forward_stable R.con_imp_coinitial R.sources_resid R.con_implies_arr(2)) finally show "t \\ u \ t' \\ u'" by simp qed qed end subsection "Quotient by Coherent Normal Sub-RTS" text \ We now define the quotient of an RTS by a coherent normal sub-RTS and show that it is an extensional RTS. \ locale quotient_by_coherent_normal = R: rts + N: coherent_normal_sub_rts begin definition Resid (infix "\\\\" 70) where "\ \\\\ \ \ if N.is_Cong_class \ \ N.is_Cong_class \ \ (\t u. t \ \ \ u \ \ \ t \ u) then N.Cong_class (fst (SOME tu. fst tu \ \ \ snd tu \ \ \ fst tu \ snd tu) \\ snd (SOME tu. fst tu \ \ \ snd tu \ \ \ fst tu \ snd tu)) else {}" sublocale partial_magma Resid using N.Cong_class_is_nonempty Resid_def by unfold_locales metis lemma is_partial_magma: shows "partial_magma Resid" .. lemma null_char: shows "null = {}" using N.Cong_class_is_nonempty Resid_def by (metis null_is_zero(2)) lemma Resid_by_members: assumes "N.is_Cong_class \" and "N.is_Cong_class \" and "t \ \" and "u \ \" and "t \ u" shows "\ \\\\ \ = \t \\ u\" using assms Resid_def someI_ex [of "\tu. fst tu \ \ \ snd tu \ \ \ fst tu \ snd tu"] apply simp by (meson N.Cong_class_membs_are_Cong N.Cong_class_eqI N.Cong_subst(2) R.coinitial_iff R.con_imp_coinitial) abbreviation Con (infix "\\\" 50) where "\ \\\ \ \ \ \\\\ \ \ {}" lemma Con_char: shows "\ \\\ \ \ N.is_Cong_class \ \ N.is_Cong_class \ \ (\t u. t \ \ \ u \ \ \ t \ u)" by (metis (no_types, opaque_lifting) N.Cong_class_is_nonempty N.is_Cong_classI Resid_def Resid_by_members R.arr_resid_iff_con) lemma Con_sym: assumes "Con \ \" shows "Con \ \" using assms Con_char R.con_sym by meson lemma is_Cong_class_Resid: assumes "\ \\\ \" shows "N.is_Cong_class (\ \\\\ \)" using assms Con_char Resid_by_members R.arr_resid_iff_con N.is_Cong_classI by auto lemma Con_witnesses: assumes "\ \\\ \" and "t \ \" and "u \ \" shows "\v w. v \ \ \ w \ \ \ t \\ v \ u \\ w" proof - have 1: "N.is_Cong_class \ \ N.is_Cong_class \ \ (\t u. t \ \ \ u \ \ \ t \ u)" using assms Con_char by simp obtain t' u' where t'u': "t' \ \ \ u' \ \ \ t' \ u'" using 1 by auto have 2: "t' \ t \ u' \ u" using assms 1 t'u' N.Cong_class_membs_are_Cong by auto obtain v v' where vv': "v \ \ \ v' \ \ \ t' \\ v \\<^sub>0 t \\ v'" using 2 by auto obtain w w' where ww': "w \ \ \ w' \ \ \ u' \\ w \\<^sub>0 u \\ w'" using 2 by auto have 3: "w \ v" by (metis R.arr_resid_iff_con R.con_def R.con_imp_coinitial R.ex_un_null N.elements_are_arr R.null_is_zero(2) N.resid_along_elem_preserves_con t'u' vv' ww') have "R.seq v (w \\ v)" by (simp add: N.elements_are_arr R.seq_def 3 vv') obtain x where x: "R.composite_of v (w \\ v) x" using N.composite_closed_left \R.seq v (w \ v)\ vv' by blast obtain x' where x': "R.composite_of v' (w \\ v) x'" using x vv' N.composite_closed_left by (metis N.Cong\<^sub>0_implies_Cong N.Cong\<^sub>0_imp_coinitial N.Cong_imp_arr(1) R.composable_def R.composable_imp_seq R.con_implies_arr(2) R.seq_def R.sources_resid R.arr_resid_iff_con) have *: "t' \\ x \\<^sub>0 t \\ x'" by (metis N.coherent' N.composite_closed N.forward_stable R.con_imp_coinitial R.targets_composite_of 3 R.con_sym R.sources_composite_of vv' ww' x x') obtain y where y: "R.composite_of w (v \\ w) y" using x vv' ww' by (metis R.arr_resid_iff_con R.composable_def R.composable_imp_seq R.con_imp_coinitial R.seq_def R.sources_resid N.elements_are_arr N.forward_stable N.composite_closed_left) obtain y' where y': "R.composite_of w' (v \\ w) y'" using y ww' by (metis N.Cong\<^sub>0_imp_coinitial N.Cong_closure_props(3) N.Cong_imp_arr(1) R.composable_def R.composable_imp_seq R.con_implies_arr(2) R.seq_def R.sources_resid N.composite_closed_left R.arr_resid_iff_con) have **: "u' \\ y \\<^sub>0 u \\ y'" by (metis N.composite_closed N.forward_stable R.con_imp_coinitial R.targets_composite_of \w \ v\ N.coherent' R.sources_composite_of vv' ww' y y') have 4: "x \ \ \ y \ \" using x y vv' ww' * ** by (metis 3 N.composite_closed N.forward_stable R.con_imp_coinitial R.con_sym) have "t \\ x' \ u \\ y'" proof - have "t \\ x' \\<^sub>0 t' \\ x" using * by simp moreover have "t' \\ x \ u' \\ y" proof - have "t' \\ x \ u' \\ x" using t'u' vv' ww' 4 * by (metis N.Resid_along_normal_preserves_reflects_con N.elements_are_arr R.coinitial_iff R.con_imp_coinitial R.arr_resid_iff_con) moreover have "u' \\ x \\<^sub>0 u' \\ y" using ww' x y by (metis 4 N.Cong\<^sub>0_imp_coinitial N.Cong\<^sub>0_imp_con N.Cong\<^sub>0_transitive N.coherent' N.factor_closed(2) R.sources_composite_of R.targets_composite_of R.targets_resid_sym) ultimately show ?thesis using N.Cong\<^sub>0_subst_right by blast qed moreover have "u' \\ y \\<^sub>0 u \\ y'" using ** R.con_sym by simp ultimately show ?thesis using N.Cong\<^sub>0_subst_Con by auto qed moreover have "x' \ \ \ y' \ \" using x' y' vv' ww' by (metis N.Cong_composite_of_normal_arr N.Cong_imp_arr(2) N.composite_closed R.con_imp_coinitial N.forward_stable R.arr_resid_iff_con) ultimately show ?thesis by auto qed abbreviation Arr where "Arr \ \ Con \ \" lemma Arr_Resid: assumes "Con \ \" shows "Arr (\ \\\\ \)" by (metis Con_char N.Cong_class_memb_is_arr R.arrE N.rep_in_Cong_class assms is_Cong_class_Resid) lemma Cube: assumes "Con (\ \\\\ \) (\ \\\\ \)" shows "(\ \\\\ \) \\\\ (\ \\\\ \) = (\ \\\\ \) \\\\ (\ \\\\ \)" proof - obtain t u where tu: "t \ \ \ u \ \ \ t \ u \ \ \\\\ \ = \t \\ u\" using assms by (metis Con_char N.Cong_class_is_nonempty R.con_sym Resid_by_members) obtain t' v where t'v: "t' \ \ \ v \ \ \ t' \ v \ \ \\\\ \ = \t' \\ v\" using assms by (metis Con_char N.Cong_class_is_nonempty Resid_by_members Con_sym) have tt': "t \ t'" using assms by (metis N.Cong_class_membs_are_Cong N.Cong_class_is_nonempty Resid_def t'v tu) obtain w w' where ww': "w \ \ \ w' \ \ \ t \\ w \\<^sub>0 t' \\ w'" using tu t'v tt' by auto have 1: "\ \\\\ \ = \u \\ t\ \ \ \\\\ \ = \v \\ t'\" by (metis Con_char N.Cong_class_is_nonempty R.con_sym Resid_by_members assms t'v tu) obtain x x' where xx': "x \ \ \ x' \ \ \ (u \\ t) \\ x \ (v \\ t') \\ x'" using 1 Con_witnesses [of "\ \\\\ \" "\ \\\\ \" "u \\ t" "v \\ t'"] by (metis N.arr_in_Cong_class R.con_sym t'v tu assms Con_sym R.arr_resid_iff_con) have "R.seq t x" by (metis R.arr_resid_iff_con R.coinitial_iff R.con_imp_coinitial R.seqI R.sources_resid xx') have "R.seq t' x'" by (metis R.arr_resid_iff_con R.sources_resid R.coinitialE R.con_imp_coinitial R.seqI xx') obtain tx where tx: "R.composite_of t x tx" using xx' \R.seq t x\ N.composite_closed_right [of x t] R.composable_def by auto obtain t'x' where t'x': "R.composite_of t' x' t'x'" using xx' \R.seq t' x'\ N.composite_closed_right [of x' t'] R.composable_def by auto let ?tx_w = "tx \\ w" and ?t'x'_w' = "t'x' \\ w'" let ?w_tx = "(w \\ t) \\ x" and ?w'_t'x' = "(w' \\ t') \\ x'" let ?u_tx = "(u \\ t) \\ x" and ?v_t'x' = "(v \\ t') \\ x'" let ?u_w = "u \\ w" and ?v_w' = "v \\ w'" let ?w_u = "w \\ u" and ?w'_v = "w' \\ v" have w_tx_in_\: "?w_tx \ \" using tx ww' xx' R.con_composite_of_iff [of t x tx w] by (metis (full_types) N.Cong\<^sub>0_composite_of_arr_normal N.Cong\<^sub>0_subst_left(1) N.forward_stable R.null_is_zero(2) R.con_imp_coinitial R.conI R.con_sym) have w'_t'x'_in_\: "?w'_t'x' \ \" using t'x' ww' xx' R.con_composite_of_iff [of t' x' t'x' w'] by (metis (full_types) N.Cong\<^sub>0_composite_of_arr_normal N.Cong\<^sub>0_subst_left(1) R.con_sym N.forward_stable R.null_is_zero(2) R.con_imp_coinitial R.conI) have 2: "?tx_w \\<^sub>0 ?t'x'_w'" proof - have "?tx_w \\<^sub>0 t \\ w" using t'x' tx ww' xx' N.Cong\<^sub>0_composite_of_arr_normal [of t x tx] N.Cong\<^sub>0_subst_left(2) by (metis N.Cong\<^sub>0_transitive R.conI) also have "t \\ w \\<^sub>0 t' \\ w'" using ww' by blast also have "t' \\ w' \\<^sub>0 ?t'x'_w'" using t'x' tx ww' xx' N.Cong\<^sub>0_composite_of_arr_normal [of t' x' t'x'] N.Cong\<^sub>0_subst_left(2) by (metis N.Cong\<^sub>0_transitive R.conI) finally show ?thesis by blast qed obtain z where z: "R.composite_of ?tx_w (?t'x'_w' \\ ?tx_w) z" by (metis "2" R.arr_resid_iff_con R.con_implies_arr(2) N.elements_are_arr N.composite_closed_right R.seqI R.sources_resid) obtain z' where z': "R.composite_of ?t'x'_w' (?tx_w \\ ?t'x'_w') z'" by (metis "2" R.arr_resid_iff_con R.con_implies_arr(2) N.elements_are_arr N.composite_closed_right R.seqI R.sources_resid) have 3: "z \\<^sub>0 z'" using 2 N.diamond_commutes_upto_Cong\<^sub>0 N.Cong\<^sub>0_imp_con z z' by blast have "R.targets z = R.targets z'" by (metis R.targets_resid_sym z z' R.targets_composite_of R.conI) have Con_z_uw: "z \ ?u_w" proof - have "?tx_w \ ?u_w" by (meson 3 N.Cong\<^sub>0_composite_of_arr_normal N.Cong\<^sub>0_subst_left(1) R.bounded_imp_con R.con_implies_arr(1) R.con_imp_coinitial N.resid_along_elem_preserves_con tu tx ww' xx' z z' R.arr_resid_iff_con) thus ?thesis using 2 N.Cong\<^sub>0_composite_of_arr_normal N.Cong\<^sub>0_subst_left(1) z by blast qed moreover have Con_z'_vw': "z' \ ?v_w'" proof - have "?t'x'_w' \ ?v_w'" by (meson 3 N.Cong\<^sub>0_composite_of_arr_normal N.Cong\<^sub>0_subst_left(1) R.bounded_imp_con t'v t'x' ww' xx' z z' R.con_imp_coinitial N.resid_along_elem_preserves_con R.arr_resid_iff_con R.con_implies_arr(1)) thus ?thesis by (meson 2 N.Cong\<^sub>0_composite_of_arr_normal N.Cong\<^sub>0_subst_left(1) z') qed moreover have Con_z_vw': "z \ ?v_w'" using 3 Con_z'_vw' N.Cong\<^sub>0_subst_left(1) by blast moreover have *: "?u_w \\ z \ ?v_w' \\ z" proof - obtain y where y: "R.composite_of (w \\ tx) (?t'x'_w' \\ ?tx_w) y" by (metis 2 R.arr_resid_iff_con R.composable_def R.composable_imp_seq R.con_imp_coinitial N.elements_are_arr N.composite_closed_right R.seq_def R.targets_resid_sym ww' z N.forward_stable) obtain y' where y': "R.composite_of (w' \\ t'x') (?tx_w \\ ?t'x'_w') y'" by (metis 2 R.arr_resid_iff_con R.composable_def R.composable_imp_seq R.con_imp_coinitial N.elements_are_arr N.composite_closed_right R.targets_resid_sym ww' z' R.seq_def N.forward_stable) have y_comp: "R.composite_of (w \\ tx) ((t'x' \\ w') \\ (tx \\ w)) y" using y by simp have y_in_normal: "y \ \" by (metis 2 Con_z_uw R.arr_iff_has_source R.arr_resid_iff_con N.composite_closed R.con_imp_coinitial R.con_implies_arr(1) N.forward_stable R.sources_composite_of ww' y_comp z) have y_coinitial: "R.coinitial y (u \\ tx)" using y R.coinitial_def by (metis Con_z_uw R.con_def R.con_prfx_composite_of(2) R.con_sym R.cube R.sources_composite_of R.con_imp_common_source z) have y_con: "y \ u \\ tx" using y_in_normal y_coinitial by (metis R.coinitial_iff N.elements_are_arr N.forward_stable R.arr_resid_iff_con) have A: "?u_w \\ z \ (u \\ tx) \\ y" proof - have "(u \\ tx) \\ y \ ((u \\ tx) \\ (w \\ tx)) \\ (?t'x'_w' \\ ?tx_w)" using y_comp y_con R.resid_composite_of(3) [of "w \\ tx" "?t'x'_w' \\ ?tx_w" y "u \\ tx"] by simp also have "((u \\ tx) \\ (w \\ tx)) \\ (?t'x'_w' \\ ?tx_w) \ ?u_w \\ z" by (metis Con_z_uw R.resid_composite_of(3) z R.cube) finally show ?thesis by blast qed have y'_comp: "R.composite_of (w' \\ t'x') (?tx_w \\ ?t'x'_w') y'" using y' by simp have y'_in_normal: "y' \ \" by (metis 2 Con_z'_vw' R.arr_iff_has_source R.arr_resid_iff_con N.composite_closed R.con_imp_coinitial R.con_implies_arr(1) N.forward_stable R.sources_composite_of ww' y'_comp z') have y'_coinitial: "R.coinitial y' (v \\ t'x')" using y' R.coinitial_def by (metis Con_z'_vw' R.arr_resid_iff_con R.composite_ofE R.con_imp_coinitial R.con_implies_arr(1) R.cube R.prfx_implies_con R.resid_composite_of(1) R.sources_resid z') have y'_con: "y' \ v \\ t'x'" using y'_in_normal y'_coinitial by (metis R.coinitial_iff N.elements_are_arr N.forward_stable R.arr_resid_iff_con) have B: "?v_w' \\ z' \ (v \\ t'x') \\ y'" proof - have "(v \\ t'x') \\ y' \ ((v \\ t'x') \\ (w' \\ t'x')) \\ (?tx_w \\ ?t'x'_w')" using y'_comp y'_con R.resid_composite_of(3) [of "w' \\ t'x'" "?tx_w \\ ?t'x'_w'" y' "v \\ t'x'"] by blast also have "((v \\ t'x') \\ (w' \\ t'x')) \\ (?tx_w \\ ?t'x'_w') \ ?v_w' \\ z'" by (metis Con_z'_vw' R.cube R.resid_composite_of(3) z') finally show ?thesis by blast qed have C: "u \\ tx \ v \\ t'x'" using tx t'x' xx' R.con_sym R.cong_subst_right(1) R.resid_composite_of(3) by (meson R.coinitial_iff R.arr_resid_iff_con y'_coinitial y_coinitial) have D: "y \\<^sub>0 y'" proof - have "y \\<^sub>0 w \\ tx" using 2 N.Cong\<^sub>0_composite_of_arr_normal y_comp by blast also have "w \\ tx \\<^sub>0 w' \\ t'x'" proof - have "w \\ tx \ \ \ w' \\ t'x' \ \" using N.factor_closed(1) y_comp y_in_normal y'_comp y'_in_normal by blast moreover have "R.coinitial (w \\ tx) (w' \\ t'x')" by (metis C R.coinitial_def R.con_implies_arr(2) N.elements_are_arr R.sources_resid calculation R.con_imp_coinitial R.arr_resid_iff_con y_con) ultimately show ?thesis by (meson R.arr_resid_iff_con R.con_imp_coinitial N.forward_stable N.elements_are_arr) qed also have "w' \\ t'x' \\<^sub>0 y'" using 2 N.Cong\<^sub>0_composite_of_arr_normal y'_comp by blast finally show ?thesis by blast qed have par_y_y': "R.sources y = R.sources y' \ R.targets y = R.targets y'" using D N.Cong\<^sub>0_imp_coinitial R.targets_composite_of y'_comp y_comp z z' \R.targets z = R.targets z'\ by presburger have E: "(u \\ tx) \\ y \ (v \\ t'x') \\ y'" proof - have "(u \\ tx) \\ y \ (v \\ t'x') \\ y" using C N.Resid_along_normal_preserves_reflects_con R.coinitial_iff y_coinitial y_in_normal by presburger moreover have "(v \\ t'x') \\ y \\<^sub>0 (v \\ t'x') \\ y'" using par_y_y' N.coherent R.coinitial_iff y'_coinitial y'_in_normal y_in_normal by presburger ultimately show ?thesis using N.Cong\<^sub>0_subst_right(1) by blast qed hence "?u_w \\ z \ ?v_w' \\ z'" proof - have "(u \\ tx) \\ y \ ?u_w \\ z" using A by simp moreover have "(u \\ tx) \\ y \ (v \\ t'x') \\ y'" using E by blast moreover have "(v \\ t'x') \\ y' \ ?v_w' \\ z'" using B R.cong_symmetric by blast moreover have "R.sources ((u \\ w) \\ z) = R.sources ((v \\ w') \\ z')" by (simp add: Con_z'_vw' Con_z_uw R.con_sym \R.targets z = R.targets z'\) ultimately show ?thesis by (meson N.Cong\<^sub>0_subst_Con N.ide_closed) qed moreover have "?v_w' \\ z' \ ?v_w' \\ z" by (meson 3 Con_z_vw' N.CongI N.Cong\<^sub>0_subst_right(2) R.con_sym) moreover have "R.sources ((v \\ w') \\ z) = R.sources ((u \\ w) \\ z)" by (metis R.con_implies_arr(1) R.sources_resid calculation(1) calculation(2) N.Cong_imp_arr(2) R.arr_resid_iff_con) ultimately show ?thesis by (metis N.Cong_reflexive N.Cong_subst(1) R.con_implies_arr(1)) qed ultimately have **: "?v_w' \\ z \ ?u_w \\ z \ (?v_w' \\ z) \\ (?u_w \\ z) = (?v_w' \\ ?u_w) \\ (z \\ ?u_w)" by (meson R.con_sym R.cube) have Cong_t_z: "t \ z" by (metis 2 N.Cong\<^sub>0_composite_of_arr_normal N.Cong_closure_props(2-3) N.Cong_closure_props(4) N.Cong_imp_arr(2) R.coinitial_iff R.con_imp_coinitial tx ww' xx' z R.arr_resid_iff_con) have Cong_u_uw: "u \ ?u_w" by (meson Con_z_uw N.Cong_closure_props(4) R.coinitial_iff R.con_imp_coinitial ww' R.arr_resid_iff_con) have Cong_v_vw': "v \ ?v_w'" by (meson Con_z_vw' N.Cong_closure_props(4) R.coinitial_iff ww' R.con_imp_coinitial R.arr_resid_iff_con) have \: "N.is_Cong_class \ \ z \ \" by (metis (no_types, lifting) Cong_t_z N.Cong_class_eqI N.Cong_class_is_nonempty N.Cong_class_memb_Cong_rep N.Cong_class_rep N.Cong_imp_arr(2) N.arr_in_Cong_class tu assms Con_char) have \: "N.is_Cong_class \ \ ?u_w \ \" by (metis Con_char Con_z_uw Cong_u_uw Int_iff N.Cong_class_eqI' N.Cong_class_eqI N.arr_in_Cong_class R.con_implies_arr(2) N.is_Cong_classI tu assms empty_iff) have \: "N.is_Cong_class \ \ ?v_w' \ \" by (metis Con_char Con_z_vw' Cong_v_vw' Int_iff N.Cong_class_eqI' N.Cong_class_eqI N.arr_in_Cong_class R.con_implies_arr(2) N.is_Cong_classI t'v assms empty_iff) show "(\ \\\\ \) \\\\ (\ \\\\ \) = (\ \\\\ \) \\\\ (\ \\\\ \)" proof - have "(\ \\\\ \) \\\\ (\ \\\\ \) = \(?v_w' \\ z) \\ (?u_w \\ z)\" using \ \ \ * Resid_by_members by (metis ** Con_char N.arr_in_Cong_class R.arr_resid_iff_con assms R.con_implies_arr(2)) moreover have "(\ \\\\ \) \\\\ (\ \\\\ \) = \(?v_w' \\ ?u_w) \\ (z \\ ?u_w)\" using Resid_by_members [of \ \ ?v_w' ?u_w] Resid_by_members [of \ \ z ?u_w] Resid_by_members [of "\ \\\\ \" "\ \\\\ \" "?v_w' \\ ?u_w" "z \\ ?u_w"] by (metis \ \ \ * ** N.arr_in_Cong_class R.con_implies_arr(2) N.is_Cong_classI R.resid_reflects_con R.arr_resid_iff_con) ultimately show ?thesis using ** by simp qed qed sublocale residuation Resid using null_char Con_sym Arr_Resid Cube by unfold_locales metis+ lemma is_residuation: shows "residuation Resid" .. lemma arr_char: shows "arr \ \ N.is_Cong_class \" by (metis N.is_Cong_class_def arrI not_arr_null null_char N.Cong_class_memb_is_arr Con_char R.arrE arrE arr_resid conI) lemma ide_char: shows "ide \ \ arr \ \ \ \ \ \ {}" proof show "ide \ \ arr \ \ \ \ \ \ {}" apply (elim ideE) by (metis Con_char N.Cong\<^sub>0_reflexive Resid_by_members disjoint_iff null_char N.arr_in_Cong_class R.arrE R.arr_resid arr_resid conE) show "arr \ \ \ \ \ \ {} \ ide \" proof - assume \: "arr \ \ \ \ \ \ {}" obtain u where u: "R.arr u \ u \ \ \ \" using \ arr_char by (metis IntI N.Cong_class_memb_is_arr disjoint_iff) show ?thesis by (metis IntD1 IntD2 N.Cong_class_eqI N.Cong_closure_props(4) N.arr_in_Cong_class N.is_Cong_classI Resid_by_members \ arrE arr_char disjoint_iff ideI N.Cong_class_eqI' R.arrE u) qed qed lemma ide_char': shows "ide \ \ arr \ \ \ \ \" by (metis Int_absorb2 Int_emptyI N.Cong_class_memb_Cong_rep N.Cong_closure_props(1) ide_char not_arr_null null_char N.normal_is_Cong_closed arr_char subsetI) lemma con_char\<^sub>Q\<^sub>C\<^sub>N: shows "con \ \ \ N.is_Cong_class \ \ N.is_Cong_class \ \ (\t u. t \ \ \ u \ \ \ t \ u)" by (metis Con_char conE conI null_char) (* * TODO: Does the stronger form of con_char hold in this context? * I am currently only able to prove it for the more special context of paths, * but it doesn't seem like that should be required. * * The issue is that congruent paths have the same sets of sources, * but this does not necessarily hold in general. If we know that all representatives * of a congruence class have the same sets of sources, then we known that if any * pair of representatives is consistent, then the arbitrarily chosen representatives * of the congruence class are consistent. This is by substitutivity of congruence, * which has coinitiality as a hypothesis. * * In the general case, we have to reason as follows: if t and u are consistent * representatives of \ and \, and if t' and u' are arbitrary coinitial representatives * of \ and \, then we can obtain "opposing spans" connecting t u and t' u'. * The opposing span form of coherence then implies that t' and u' are consistent. * So we should be able to show that if congruence classes \ and \ are consistent, * then all pairs of coinitial representatives are consistent. *) lemma con_imp_coinitial_members_are_con: assumes "con \ \" and "t \ \" and "u \ \" and "R.sources t = R.sources u" shows "t \ u" by (meson assms N.Cong_subst(1) N.is_Cong_classE con_char\<^sub>Q\<^sub>C\<^sub>N) sublocale rts Resid proof show 1: "\\ \. \ide \; con \ \\ \ \ \\\\ \ = \" proof - fix \ \ assume \: "ide \" and con: "con \ \" obtain t a where ta: "t \ \ \ a \ \ \ R.con t a \ \ \\\\ \ = \t \\ a\" using con con_char\<^sub>Q\<^sub>C\<^sub>N Resid_by_members by auto have "a \ \" using \ ta ide_char' by auto hence "t \\ a \ t" by (meson N.Cong_closure_props(4) N.Cong_symmetric R.coinitialE R.con_imp_coinitial ta) thus "\ \\\\ \ = \" using ta by (metis N.Cong_class_eqI N.Cong_class_memb_Cong_rep N.Cong_class_rep con con_char\<^sub>Q\<^sub>C\<^sub>N) qed show "\\. arr \ \ ide (trg \)" by (metis N.Cong\<^sub>0_reflexive Resid_by_members disjoint_iff ide_char N.Cong_class_memb_is_arr N.arr_in_Cong_class N.is_Cong_class_def arr_char R.arrE R.arr_resid resid_arr_self) show "\\ \. \ide \; con \ \\ \ ide (\ \\\\ \)" by (metis 1 arrE arr_resid con_sym ideE ideI cube) show "\\ \. con \ \ \ \\. ide \ \ con \ \ \ con \ \" proof - fix \ \ assume \\: "con \ \" obtain t u where tu: "\ = \t\ \ \ = \u\ \ t \ u" using \\ con_char\<^sub>Q\<^sub>C\<^sub>N arr_char by (metis N.Cong_class_memb_Cong_rep N.Cong_class_eqI N.Cong_class_rep) obtain a where a: "a \ R.sources t" using \\ tu R.con_implies_arr(1) R.arr_iff_has_source by blast have "ide \a\ \ con \a\ \ \ con \a\ \" proof (intro conjI) have 2: "a \ \" using \\ tu a arr_char N.ide_closed R.sources_def by force show 3: "ide \a\" using \\ tu 2 a ide_char arr_char con_char\<^sub>Q\<^sub>C\<^sub>N by (metis IntI N.arr_in_Cong_class N.is_Cong_classI empty_iff N.elements_are_arr) show "con \a\ \" using \\ tu 2 3 a ide_char arr_char con_char\<^sub>Q\<^sub>C\<^sub>N by (metis N.arr_in_Cong_class R.composite_of_source_arr R.composite_of_def R.prfx_implies_con R.con_implies_arr(1)) show "con \a\ \" using \\ tu a ide_char arr_char con_char\<^sub>Q\<^sub>C\<^sub>N by (metis N.arr_in_Cong_class R.composite_of_source_arr R.con_prfx_composite_of N.is_Cong_classI R.con_implies_arr(1) R.con_implies_arr(2)) qed thus "\\. ide \ \ con \ \ \ con \ \" by auto qed show "\\ \ \. \ide (\ \\\\ \); con \ \\ \ con (\ \\\\ \) (\ \\\\ \)" proof - fix \ \ \ assume \\: "ide (\ \\\\ \)" assume \\: "con \ \" obtain t u where tu: "t \ \ \ u \ \ \ t \ u \ \ \\\\ \ = \t \\ u\" using \\ by (meson Resid_by_members ide_implies_arr quotient_by_coherent_normal.con_char\<^sub>Q\<^sub>C\<^sub>N quotient_by_coherent_normal_axioms arr_resid_iff_con) obtain v u' where vu': "v \ \ \ u' \ \ \ v \ u' \ \ \\\\ \ = \v \\ u'\" by (meson R.con_sym Resid_by_members \\ con_char\<^sub>Q\<^sub>C\<^sub>N) have 1: "u \ u'" using \\ tu vu' by (meson N.Cong_class_membs_are_Cong con_char\<^sub>Q\<^sub>C\<^sub>N) obtain w w' where ww': "w \ \ \ w' \ \ \ u \\ w \\<^sub>0 u' \\ w'" using 1 by auto have 2: "((t \\ u) \\ (w \\ u)) \\ ((u' \\ w') \\ (u \\ w)) \ ((v \\ u') \\ (w' \\ u')) \\ ((u \\ w) \\ (u' \\ w'))" proof - have "((t \\ u) \\ (w \\ u)) \\ ((u' \\ w') \\ (u \\ w)) \ \" proof - have "t \\ u \ \" using tu N.arr_in_Cong_class R.arr_resid_iff_con \\ ide_char' by blast hence "(t \\ u) \\ (w \\ u) \ \" by (metis N.Cong_closure_props(4) N.forward_stable R.null_is_zero(2) R.con_imp_coinitial R.sources_resid N.Cong_imp_arr(2) R.arr_resid_iff_con tu ww' R.conI) thus ?thesis by (metis N.Cong_closure_props(4) N.normal_is_Cong_closed R.sources_resid R.targets_resid_sym N.elements_are_arr R.arr_resid_iff_con ww' R.conI) qed moreover have "R.sources (((t \\ u) \\ (w \\ u)) \\ ((u' \\ w') \\ (u \\ w))) = R.sources (((v \\ u') \\ (w' \\ u')) \\ ((u \\ w) \\ (u' \\ w')))" proof - have "R.sources (((t \\ u) \\ (w \\ u)) \\ ((u' \\ w') \\ (u \\ w))) = R.targets ((u' \\ w') \\ (u \\ w))" using R.arr_resid_iff_con N.elements_are_arr R.sources_resid calculation by blast also have "... = R.targets ((u \\ w) \\ (u' \\ w'))" by (metis R.targets_resid_sym R.conI) also have "... = R.sources (((v \\ u') \\ (w' \\ u')) \\ ((u \\ w) \\ (u' \\ w')))" using R.arr_resid_iff_con N.elements_are_arr R.sources_resid by (metis N.Cong_closure_props(4) N.Cong_imp_arr(2) R.con_implies_arr(1) R.con_imp_coinitial N.forward_stable R.targets_resid_sym vu' ww') finally show ?thesis by simp qed ultimately show ?thesis by (metis (no_types, lifting) N.Cong\<^sub>0_imp_con N.Cong_closure_props(4) N.Cong_imp_arr(2) R.arr_resid_iff_con R.con_imp_coinitial N.forward_stable R.null_is_zero(2) R.conI) qed moreover have "t \\ u \ ((t \\ u) \\ (w \\ u)) \\ ((u' \\ w') \\ (u \\ w))" by (metis (no_types, opaque_lifting) N.Cong_closure_props(4) N.Cong_transitive N.forward_stable R.arr_resid_iff_con R.con_imp_coinitial R.rts_axioms calculation rts.coinitial_iff ww') moreover have "v \\ u' \ ((v \\ u') \\ (w' \\ u')) \\ ((u \\ w) \\ (u' \\ w'))" proof - have "w' \\ u' \ \" by (meson R.con_implies_arr(2) R.con_imp_coinitial N.forward_stable ww' N.Cong\<^sub>0_imp_con R.arr_resid_iff_con) moreover have "(u \\ w) \\ (u' \\ w') \ \" using ww' by blast ultimately show ?thesis by (meson 2 N.Cong_closure_props(2) N.Cong_closure_props(4) R.arr_resid_iff_con R.coinitial_iff R.con_imp_coinitial) qed ultimately show "con (\ \\\\ \) (\ \\\\ \)" using con_char\<^sub>Q\<^sub>C\<^sub>N N.Cong_class_def N.is_Cong_classI tu vu' R.arr_resid_iff_con by auto qed qed lemma is_rts: shows "rts Resid" .. sublocale extensional_rts Resid proof fix \ \ assume \\: "cong \ \" show "\ = \" proof - obtain t u where tu: "\ = \t\ \ \ = \u\ \ t \ u" by (metis Con_char N.Cong_class_eqI N.Cong_class_memb_Cong_rep N.Cong_class_rep \\ ide_char not_arr_null null_char) have "t \\<^sub>0 u" proof show "t \\ u \ \" using tu \\ Resid_by_members [of \ \ t u] by (metis (full_types) N.arr_in_Cong_class R.con_implies_arr(1-2) N.is_Cong_classI ide_char' R.arr_resid_iff_con subset_iff) show "u \\ t \ \" using tu \\ Resid_by_members [of \ \ u t] R.con_sym by (metis (full_types) N.arr_in_Cong_class R.con_implies_arr(1-2) N.is_Cong_classI ide_char' R.arr_resid_iff_con subset_iff) qed hence "t \ u" using N.Cong\<^sub>0_implies_Cong by simp thus "\ = \" by (simp add: N.Cong_class_eqI tu) qed qed theorem is_extensional_rts: shows "extensional_rts Resid" .. lemma sources_char\<^sub>Q\<^sub>C\<^sub>N: shows "sources \ = {\. arr \ \ \ = {a. \t a'. t \ \ \ a' \ R.sources t \ a' \ a}}" proof - let ?\ = "{a. \t a'. t \ \ \ a' \ R.sources t \ a' \ a}" have 1: "arr \ \ ide ?\" proof (unfold ide_char', intro conjI) assume \: "arr \" show "?\ \ \" using N.ide_closed N.normal_is_Cong_closed by blast show "arr ?\" proof - have "N.is_Cong_class ?\" proof show "?\ \ {}" by (metis (mono_tags, lifting) Collect_empty_eq N.Cong_class_def N.Cong_imp_arr(1) N.is_Cong_class_def N.sources_are_Cong R.arr_iff_has_source R.sources_def \ arr_char mem_Collect_eq) show "\t t'. \t \ ?\; t' \ t\ \ t' \ ?\" using N.Cong_symmetric N.Cong_transitive by blast show "\a a'. \a \ ?\; a' \ ?\\ \ a \ a'" proof - fix a a' assume a: "a \ ?\" and a': "a' \ ?\" obtain t b where b: "t \ \ \ b \ R.sources t \ b \ a" using a by blast obtain t' b' where b': "t' \ \ \ b' \ R.sources t' \ b' \ a'" using a' by blast have "b \ b'" using \ arr_char b b' by (meson IntD1 N.Cong_class_membs_are_Cong N.in_sources_respects_Cong) thus "a \ a'" by (meson N.Cong_symmetric N.Cong_transitive b b') qed qed thus ?thesis using arr_char by auto qed qed moreover have "arr \ \ con \ ?\" proof - assume \: "arr \" obtain t a where a: "t \ \ \ a \ R.sources t" using \ arr_char by (metis N.Cong_class_is_nonempty R.arr_iff_has_source empty_subsetI N.Cong_class_memb_is_arr subsetI subset_antisym) have "t \ \ \ a \ {a. \t a'. t \ \ \ a' \ R.sources t \ a' \ a} \ t \ a" using a N.Cong_reflexive R.sources_def R.con_implies_arr(2) by fast thus ?thesis using \ 1 arr_char con_char\<^sub>Q\<^sub>C\<^sub>N [of \ ?\] by auto qed ultimately have "arr \ \ ?\ \ sources \" using sources_def by blast thus ?thesis using "1" ide_char sources_char by auto qed lemma targets_char\<^sub>Q\<^sub>C\<^sub>N: shows "targets \ = {\. arr \ \ \ = \ \\\\ \}" proof - have "targets \ = {\. ide \ \ con (\ \\\\ \) \}" by (simp add: targets_def trg_def) also have "... = {\. arr \ \ ide \ \ (\t u. t \ \ \\\\ \ \ u \ \ \ t \ u)}" using arr_resid_iff_con con_char\<^sub>Q\<^sub>C\<^sub>N arr_char arr_def by auto also have "... = {\. arr \ \ ide \ \ (\t t' b u. t \ \ \ t' \ \ \ t \ t' \ b \ \t \\ t'\ \ u \ \ \ b \ u)}" using arr_char ide_char Resid_by_members [of \ \] N.Cong_class_memb_is_arr N.is_Cong_class_def R.arr_def by auto metis+ also have "... = {\. arr \ \ ide \ \ (\t t' b. t \ \ \ t' \ \ \ t \ t' \ b \ \t \\ t'\ \ b \ \)}" proof - have "\\ t t' b. \arr \; ide \; t \ \; t' \ \; t \ t'; b \ \t \\ t'\\ \ (\u. u \ \ \ b \ u) \ b \ \" proof - fix \ t t' b assume \: "arr \" and \: "ide \" and t: "t \ \" and t': "t' \ \" and tt': "t \ t'" and b: "b \ \t \\ t'\" have 0: "b \ \" by (metis Resid_by_members \ b ide_char' ide_trg arr_char subsetD t t' trg_def tt') show "(\u. u \ \ \ b \ u) \ b \ \" using 0 by (meson N.Cong_closure_props(3) N.forward_stable N.elements_are_arr \ arr_char R.con_imp_coinitial N.is_Cong_classE ide_char' R.arrE R.con_sym subsetD) qed thus ?thesis using ide_char arr_char by (metis (no_types, lifting)) qed also have "... = {\. arr \ \ ide \ \ (\t t'. t \ \ \ t' \ \ \ t \ t' \ \t \\ t'\ \ \)}" proof - have "\\ t t' b. \arr \; ide \; t \ \; t' \ \; t \ t'\ \ (\b. b \ \t \\ t'\ \ b \ \) \ \t \\ t'\ \ \" using ide_char arr_char apply (intro iffI) apply (metis IntI N.Cong_class_eqI' R.arr_resid_iff_con N.is_Cong_classI empty_iff set_eq_subset) by (meson N.arr_in_Cong_class R.arr_resid_iff_con subsetD) thus ?thesis using ide_char arr_char by (metis (no_types, lifting)) qed also have "... = {\. arr \ \ ide \ \ \ \\\\ \ \ \}" using arr_char ide_char Resid_by_members [of \ \] by (metis (no_types, opaque_lifting) arrE con_char\<^sub>Q\<^sub>C\<^sub>N) also have "... = {\. arr \ \ \ = \ \\\\ \}" by (metis (no_types, lifting) arr_has_un_target calculation con_ide_are_eq cong_reflexive mem_Collect_eq targets_def trg_def) finally show ?thesis by blast qed lemma src_char\<^sub>Q\<^sub>C\<^sub>N: shows "src \ = {a. arr \ \ (\t a'. t \ \ \ a' \ R.sources t \ a' \ a)}" using sources_char\<^sub>Q\<^sub>C\<^sub>N [of \] by (simp add: null_char src_def) lemma trg_char\<^sub>Q\<^sub>C\<^sub>N: shows "trg \ = \ \\\\ \" unfolding trg_def by blast subsubsection "Quotient Map" abbreviation quot where "quot t \ \t\" sublocale quot: simulation resid Resid quot proof show "\t. \ R.arr t \ \t\ = null" using N.Cong_class_def N.Cong_imp_arr(1) null_char by force show "\t u. t \ u \ con \t\ \u\" by (meson N.arr_in_Cong_class N.is_Cong_classI R.con_implies_arr(1-2) con_char\<^sub>Q\<^sub>C\<^sub>N) show "\t u. t \ u \ \t \\ u\ = \t\ \\\\ \u\" by (metis N.arr_in_Cong_class N.is_Cong_classI R.con_implies_arr(1-2) Resid_by_members) qed lemma quotient_is_simulation: shows "simulation resid Resid quot" .. (* * TODO: Show couniversality. *) end subsection "Identities form a Coherent Normal Sub-RTS" text \ We now show that the collection of identities of an RTS form a coherent normal sub-RTS, and that the associated congruence \\\ coincides with \\\. Thus, every RTS can be factored by the relation \\\ to obtain an extensional RTS. Although we could have shown that fact much earlier, we have delayed proving it so that we could simply obtain it as a special case of our general quotient result without redundant work. \ context rts begin interpretation normal_sub_rts resid \Collect ide\ proof show "\t. t \ Collect ide \ arr t" by blast show 1: "\a. ide a \ a \ Collect ide" by blast show "\u t. \u \ Collect ide; coinitial t u\ \ u \\ t \ Collect ide" by (metis 1 CollectD arr_def coinitial_iff con_sym in_sourcesE in_sourcesI resid_ide_arr) show "\u t. \u \ Collect ide; t \\ u \ Collect ide\ \ t \ Collect ide" using ide_backward_stable by blast show "\u t. \u \ Collect ide; seq u t\ \ \v. composite_of u t v" by (metis composite_of_source_arr ide_def in_sourcesI mem_Collect_eq seq_def resid_source_in_targets) show "\u t. \u \ Collect ide; seq t u\ \ \v. composite_of t u v" by (metis arrE composite_of_arr_target in_sourcesI seqE mem_Collect_eq) qed lemma identities_form_normal_sub_rts: shows "normal_sub_rts resid (Collect ide)" .. interpretation coherent_normal_sub_rts resid \Collect ide\ apply unfold_locales by (metis CollectD Cong\<^sub>0_reflexive Cong_closure_props(4) Cong_imp_arr(2) arr_resid_iff_con resid_arr_ide) lemma identities_form_coherent_normal_sub_rts: shows "coherent_normal_sub_rts resid (Collect ide)" .. lemma Cong_iff_cong: shows "Cong t u \ t \ u" by (metis CollectD Cong_def ide_closed resid_arr_ide Cong_closure_props(3) Cong_imp_arr(2) arr_resid_iff_con) end section "Paths" text \ A \emph{path} in an RTS is a nonempty list of arrows such that the set of targets of each arrow suitably matches the set of sources of its successor. The residuation on the given RTS extends inductively to a residuation on paths, so that paths also form an RTS. The append operation on lists yields a composite for each pair of compatible paths. \ locale paths_in_rts = R: rts begin fun Srcs where "Srcs [] = {}" | "Srcs [t] = R.sources t" | "Srcs (t # T) = R.sources t" fun Trgs where "Trgs [] = {}" | "Trgs [t] = R.targets t" | "Trgs (t # T) = Trgs T" fun Arr where "Arr [] = False" | "Arr [t] = R.arr t" | "Arr (t # T) = (R.arr t \ Arr T \ R.targets t \ Srcs T)" fun Ide where "Ide [] = False" | "Ide [t] = R.ide t" | "Ide (t # T) = (R.ide t \ Ide T \ R.targets t \ Srcs T)" lemma set_Arr_subset_arr: shows "Arr T \ set T \ Collect R.arr" apply (induct T) apply auto using Arr.elims(2) apply blast by (metis Arr.simps(3) Ball_Collect list.set_cases) lemma Arr_imp_arr_hd [simp]: assumes "Arr T" shows "R.arr (hd T)" using assms by (metis Arr.simps(1) CollectD hd_in_set set_Arr_subset_arr subset_code(1)) lemma Arr_imp_arr_last [simp]: assumes "Arr T" shows "R.arr (last T)" using assms by (metis Arr.simps(1) CollectD in_mono last_in_set set_Arr_subset_arr) lemma Arr_imp_Arr_tl [simp]: assumes "Arr T" and "tl T \ []" shows "Arr (tl T)" using assms by (metis Arr.simps(3) list.exhaust_sel list.sel(2)) lemma set_Ide_subset_ide: shows "Ide T \ set T \ Collect R.ide" apply (induct T) apply auto using Ide.elims(2) apply blast by (metis Ide.simps(3) Ball_Collect list.set_cases) lemma Ide_imp_Ide_hd [simp]: assumes "Ide T" shows "R.ide (hd T)" using assms by (metis Ide.simps(1) CollectD hd_in_set set_Ide_subset_ide subset_code(1)) lemma Ide_imp_Ide_last [simp]: assumes "Ide T" shows "R.ide (last T)" using assms by (metis Ide.simps(1) CollectD in_mono last_in_set set_Ide_subset_ide) lemma Ide_imp_Ide_tl [simp]: assumes "Ide T" and "tl T \ []" shows "Ide (tl T)" using assms by (metis Ide.simps(3) list.exhaust_sel list.sel(2)) lemma Ide_implies_Arr: shows "Ide T \ Arr T" apply (induct T) apply simp using Ide.elims(2) by fastforce lemma const_ide_is_Ide: shows "\T \ []; R.ide (hd T); set T \ {hd T}\ \ Ide T" apply (induct T) apply auto by (metis Ide.simps(2-3) R.ideE R.sources_resid Srcs.simps(2-3) empty_iff insert_iff list.exhaust_sel list.set_sel(1) order_refl subset_singletonD) lemma Ide_char: shows "Ide T \ Arr T \ set T \ Collect R.ide" apply (induct T) apply auto[1] by (metis Arr.simps(3) Ide.simps(2-3) Ide_implies_Arr empty_subsetI insert_subset list.simps(15) mem_Collect_eq neq_Nil_conv set_empty) lemma IdeI [intro]: assumes "Arr T" and "set T \ Collect R.ide" shows "Ide T" using assms Ide_char by force lemma Arr_has_Src: shows "Arr T \ Srcs T \ {}" apply (cases T) apply simp by (metis R.arr_iff_has_source Srcs.elims Arr.elims(2) list.distinct(1) list.sel(1)) lemma Arr_has_Trg: shows "Arr T \ Trgs T \ {}" using R.arr_iff_has_target apply (induct T) apply simp by (metis Arr.simps(2) Arr.simps(3) Trgs.simps(2-3) list.exhaust_sel) lemma Srcs_are_ide: shows "Srcs T \ Collect R.ide" apply (cases T) apply simp by (metis (no_types, lifting) Srcs.elims list.distinct(1) mem_Collect_eq R.sources_def subsetI) lemma Trgs_are_ide: shows "Trgs T \ Collect R.ide" apply (induct T) apply simp by (metis R.arr_iff_has_target R.sources_resid Srcs.simps(2) Trgs.simps(2-3) Srcs_are_ide empty_subsetI list.exhaust R.arrE) lemma Srcs_are_con: assumes "a \ Srcs T" and "a' \ Srcs T" shows "a \ a'" using assms by (metis Srcs.elims empty_iff R.sources_are_con) lemma Srcs_con_closed: assumes "a \ Srcs T" and "R.ide a'" and "a \ a'" shows "a' \ Srcs T" using assms R.sources_con_closed apply (cases T, auto) by (metis Srcs.simps(2-3) neq_Nil_conv) lemma Srcs_eqI: assumes "Srcs T \ Srcs T' \ {}" shows "Srcs T = Srcs T'" using assms R.sources_eqI apply (cases T; cases T') apply auto apply (metis IntI Srcs.simps(2-3) empty_iff neq_Nil_conv) by (metis Srcs.simps(2-3) assms neq_Nil_conv) lemma Trgs_are_con: shows "\b b'. \b \ Trgs T; b' \ Trgs T\ \ b \ b'" apply (induct T) apply auto by (metis R.targets_are_con Trgs.simps(2-3) list.exhaust_sel) lemma Trgs_con_closed: shows "\b \ Trgs T; R.ide b'; b \ b'\ \ b' \ Trgs T" apply (induct T) apply auto by (metis R.targets_con_closed Trgs.simps(2-3) neq_Nil_conv) lemma Trgs_eqI: assumes "Trgs T \ Trgs T' \ {}" shows "Trgs T = Trgs T'" using assms Trgs_are_ide Trgs_are_con Trgs_con_closed by blast lemma Srcs_simp\<^sub>P: assumes "Arr T" shows "Srcs T = R.sources (hd T)" using assms by (metis Arr_has_Src Srcs.simps(1) Srcs.simps(2) Srcs.simps(3) list.exhaust_sel) lemma Trgs_simp\<^sub>P: shows "Arr T \ Trgs T = R.targets (last T)" apply (induct T) apply simp by (metis Arr.simps(3) Trgs.simps(2) Trgs.simps(3) last_ConsL last_ConsR neq_Nil_conv) subsection "Residuation on Paths" text \ It was more difficult than I thought to get a correct formal definition for residuation on paths and to prove things from it. Straightforward attempts to write a single recursive definition ran into problems with being able to prove termination, as well as getting the cases correct so that the domain of definition was symmetric. Eventually I found the definition below, which simplifies the termination proof to some extent through the use of two auxiliary functions, and which has a symmetric form that makes symmetry easier to prove. However, there was still some difficulty in proving the recursive expansions with respect to cons and append that I needed. \ text \ The following defines residuation of a single transition along a path, yielding a transition. \ fun Resid1x (infix "\<^sup>1\\\<^sup>*" 70) where "t \<^sup>1\\\<^sup>* [] = R.null" | "t \<^sup>1\\\<^sup>* [u] = t \\ u" | "t \<^sup>1\\\<^sup>* (u # U) = (t \\ u) \<^sup>1\\\<^sup>* U" text \ Next, we have residuation of a path along a single transition, yielding a path. \ fun Residx1 (infix "\<^sup>*\\\<^sup>1" 70) where "[] \<^sup>*\\\<^sup>1 u = []" | "[t] \<^sup>*\\\<^sup>1 u = (if t \ u then [t \\ u] else [])" | "(t # T) \<^sup>*\\\<^sup>1 u = (if t \ u \ T \<^sup>*\\\<^sup>1 (u \\ t) \ [] then (t \\ u) # T \<^sup>*\\\<^sup>1 (u \\ t) else [])" text \ Finally, residuation of a path along a path, yielding a path. \ function (sequential) Resid (infix "\<^sup>*\\\<^sup>*" 70) where "[] \<^sup>*\\\<^sup>* _ = []" | "_ \<^sup>*\\\<^sup>* [] = []" | "[t] \<^sup>*\\\<^sup>* [u] = (if t \ u then [t \\ u] else [])" | "[t] \<^sup>*\\\<^sup>* (u # U) = (if t \ u \ (t \\ u) \<^sup>1\\\<^sup>* U \ R.null then [(t \\ u) \<^sup>1\\\<^sup>* U] else [])" | "(t # T) \<^sup>*\\\<^sup>* [u] = (if t \ u \ T \<^sup>*\\\<^sup>1 (u \\ t) \ [] then (t \\ u) # (T \<^sup>*\\\<^sup>1 (u \\ t)) else [])" | "(t # T) \<^sup>*\\\<^sup>* (u # U) = (if t \ u \ (t \\ u) \<^sup>1\\\<^sup>* U \ R.null \ (T \<^sup>*\\\<^sup>1 (u \\ t)) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>1 (t \\ u)) \ [] then (t \\ u) \<^sup>1\\\<^sup>* U # (T \<^sup>*\\\<^sup>1 (u \\ t)) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>1 (t \\ u)) else [])" by pat_completeness auto text \ Residuation of a path along a single transition is length non-increasing. Actually, it is length-preserving, except in case the path and the transition are not consistent. We will show that later, but for now this is what we need to establish termination for (\\\). \ lemma length_Residx1: shows "\u. length (T \<^sup>*\\\<^sup>1 u) \ length T" proof (induct T) show "\u. length ([] \<^sup>*\\\<^sup>1 u) \ length []" by simp fix t T u assume ind: "\u. length (T \<^sup>*\\\<^sup>1 u) \ length T" show "length ((t # T) \<^sup>*\\\<^sup>1 u) \ length (t # T)" using ind by (cases T, cases "t \ u", cases "T \<^sup>*\\\<^sup>1 (u \\ t)") auto qed termination Resid proof (relation "measure (\(T, U). length T + length U)") show "wf (measure (\(T, U). length T + length U))" by simp fix t t' T u U have "length ((t' # T) \<^sup>*\\\<^sup>1 (u \\ t)) + length (U \<^sup>*\\\<^sup>1 (t \\ u)) < length (t # t' # T) + length (u # U)" using length_Residx1 by (metis add_less_le_mono impossible_Cons le_neq_implies_less list.size(4) trans_le_add1) thus 1: "(((t' # T) \<^sup>*\\\<^sup>1 (u \\ t), U \<^sup>*\\\<^sup>1 (t \\ u)), t # t' # T, u # U) \ measure (\(T, U). length T + length U)" by simp show "(((t' # T) \<^sup>*\\\<^sup>1 (u \\ t), U \<^sup>*\\\<^sup>1 (t \\ u)), t # t' # T, u # U) \ measure (\(T, U). length T + length U)" using 1 length_Residx1 by blast have "length (T \<^sup>*\\\<^sup>1 (u \\ t)) + length (U \<^sup>*\\\<^sup>1 (t \\ u)) \ length T + length U" using length_Residx1 by (simp add: add_mono) thus 2: "((T \<^sup>*\\\<^sup>1 (u \\ t), U \<^sup>*\\\<^sup>1 (t \\ u)), t # T, u # U) \ measure (\(T, U). length T + length U)" by simp show "((T \<^sup>*\\\<^sup>1 (u \\ t), U \<^sup>*\\\<^sup>1 (t \\ u)), t # T, u # U) \ measure (\(T, U). length T + length U)" using 2 length_Residx1 by blast qed lemma Resid1x_null: shows "R.null \<^sup>1\\\<^sup>* T = R.null" apply (induct T) apply auto by (metis R.null_is_zero(1) Resid1x.simps(2-3) list.exhaust) lemma Resid1x_ide: shows "\a. \R.ide a; a \<^sup>1\\\<^sup>* T \ R.null\ \ R.ide (a \<^sup>1\\\<^sup>* T)" proof (induct T) show "\a. a \<^sup>1\\\<^sup>* [] \ R.null \ R.ide (a \<^sup>1\\\<^sup>* [])" by simp fix a t T assume a: "R.ide a" assume ind: "\a. \R.ide a; a \<^sup>1\\\<^sup>* T \ R.null\ \ R.ide (a \<^sup>1\\\<^sup>* T)" assume con: "a \<^sup>1\\\<^sup>* (t # T) \ R.null" have 1: "a \ t" using con by (metis R.con_def Resid1x.simps(2-3) Resid1x_null list.exhaust) show "R.ide (a \<^sup>1\\\<^sup>* (t # T))" using a 1 con ind R.resid_ide_arr by (metis Resid1x.simps(2-3) list.exhaust) qed (* * TODO: Try to make this a definition, rather than an abbreviation. * * I made an attempt at this, but there are many, many places where the * definition needs to be unwound. It is not clear how valuable it might * end up being to have this as a definition. *) abbreviation Con (infix "\<^sup>*\\<^sup>*" 50) where "T \<^sup>*\\<^sup>* U \ T \<^sup>*\\\<^sup>* U \ []" lemma Con_sym1: shows "\u. T \<^sup>*\\\<^sup>1 u \ [] \ u \<^sup>1\\\<^sup>* T \ R.null" proof (induct T) show "\u. [] \<^sup>*\\\<^sup>1 u \ [] \ u \<^sup>1\\\<^sup>* [] \ R.null" by simp show "\t T u. (\u. T \<^sup>*\\\<^sup>1 u \ [] \ u \<^sup>1\\\<^sup>* T \ R.null) \ (t # T) \<^sup>*\\\<^sup>1 u \ [] \ u \<^sup>1\\\<^sup>* (t # T) \ R.null" proof - fix t T u assume ind: "\u. T \<^sup>*\\\<^sup>1 u \ [] \ u \<^sup>1\\\<^sup>* T \ R.null" show "(t # T) \<^sup>*\\\<^sup>1 u \ [] \ u \<^sup>1\\\<^sup>* (t # T) \ R.null" proof show "(t # T) \<^sup>*\\\<^sup>1 u \ [] \ u \<^sup>1\\\<^sup>* (t # T) \ R.null" by (metis R.con_sym Resid1x.simps(2-3) Residx1.simps(2-3) ind neq_Nil_conv R.conE) show "u \<^sup>1\\\<^sup>* (t # T) \ R.null \ (t # T) \<^sup>*\\\<^sup>1 u \ []" using ind R.con_sym apply (cases T) apply auto by (metis R.conI Resid1x_null) qed qed qed lemma Con_sym_ind: shows "\T U. length T + length U \ n \ T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" proof (induct n) show "\T U. length T + length U \ 0 \ T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" by simp fix n and T U :: "'a list" assume ind: "\T U. length T + length U \ n \ T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" assume 1: "length T + length U \ Suc n" show "T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" using R.con_sym Con_sym1 apply (cases T; cases U) apply auto[3] proof - fix t u T' U' assume T: "T = t # T'" and U: "U = u # U'" show "T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" proof (cases "T' = []") show "T' = [] \ T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" using T U Con_sym1 R.con_sym by (cases U') auto show "T' \ [] \ T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" proof (cases "U' = []") show "\T' \ []; U' = []\ \ T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" using T U R.con_sym Con_sym1 by (cases T') auto show "\T' \ []; U' \ []\ \ T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" proof - assume T': "T' \ []" and U': "U' \ []" have 2: "length (U' \<^sup>*\\\<^sup>1 (t \\ u)) + length (T' \<^sup>*\\\<^sup>1 (u \\ t)) \ n" proof - have "length (U' \<^sup>*\\\<^sup>1 (t \\ u)) + length (T' \<^sup>*\\\<^sup>1 (u \\ t)) \ length U' + length T'" by (simp add: add_le_mono length_Residx1) also have "... \ length T' + length U'" using T' add.commute not_less_eq_eq by auto also have "... \ n" using 1 T U by simp finally show ?thesis by blast qed show "T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" proof assume Con: "T \<^sup>*\\<^sup>* U" have 3: "t \ u \ T' \<^sup>*\\\<^sup>1 (u \\ t) \ [] \ (t \\ u) \<^sup>1\\\<^sup>* U' \ R.null \ (T' \<^sup>*\\\<^sup>1 (u \\ t)) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>1 (t \\ u)) \ []" using Con T U T' U' Con_sym1 apply (cases T', cases U') apply simp_all by (metis Resid.simps(1) Resid.simps(6) neq_Nil_conv) hence "u \ t \ U' \<^sup>*\\\<^sup>1 (t \\ u) \ [] \ (u \\ t) \<^sup>1\\\<^sup>* T' \ R.null" using T' U' R.con_sym Con_sym1 by simp moreover have "(U' \<^sup>*\\\<^sup>1 (t \\ u)) \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>1 (u \\ t)) \ []" using 2 3 ind by simp ultimately show "U \<^sup>*\\<^sup>* T" using T U T' U' by (cases T'; cases U') auto next assume Con: "U \<^sup>*\\<^sup>* T" have 3: "u \ t \ U' \<^sup>*\\\<^sup>1 (t \\ u) \ [] \ (u \\ t) \<^sup>1\\\<^sup>* T' \ R.null \ (U' \<^sup>*\\\<^sup>1 (t \\ u)) \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>1 (u \\ t)) \ []" using Con T U T' U' Con_sym1 apply (cases T'; cases U') apply auto apply argo by force hence "t \ u \ T' \<^sup>*\\\<^sup>1 (u \\ t) \ [] \ (t \\ u) \<^sup>1\\\<^sup>* U' \ R.null" using T' U' R.con_sym Con_sym1 by simp moreover have "(T' \<^sup>*\\\<^sup>1 (u \\ t)) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>1 (t \\ u)) \ []" using 2 3 ind by simp ultimately show "T \<^sup>*\\<^sup>* U" using T U T' U' by (cases T'; cases U') auto qed qed qed qed qed qed lemma Con_sym: shows "T \<^sup>*\\<^sup>* U \ U \<^sup>*\\<^sup>* T" using Con_sym_ind by blast lemma Residx1_as_Resid: shows "T \<^sup>*\\\<^sup>1 u = T \<^sup>*\\\<^sup>* [u]" proof (induct T) show "[] \<^sup>*\\\<^sup>1 u = [] \<^sup>*\\\<^sup>* [u]" by simp fix t T assume ind: "T \<^sup>*\\\<^sup>1 u = T \<^sup>*\\\<^sup>* [u]" show "(t # T) \<^sup>*\\\<^sup>1 u = (t # T) \<^sup>*\\\<^sup>* [u]" by (cases T) auto qed lemma Resid1x_as_Resid': shows "t \<^sup>1\\\<^sup>* U = (if [t] \<^sup>*\\\<^sup>* U \ [] then hd ([t] \<^sup>*\\\<^sup>* U) else R.null)" proof (induct U) show "t \<^sup>1\\\<^sup>* [] = (if [t] \<^sup>*\\\<^sup>* [] \ [] then hd ([t] \<^sup>*\\\<^sup>* []) else R.null)" by simp fix u U assume ind: "t \<^sup>1\\\<^sup>* U = (if [t] \<^sup>*\\\<^sup>* U \ [] then hd ([t] \<^sup>*\\\<^sup>* U) else R.null)" show "t \<^sup>1\\\<^sup>* (u # U) = (if [t] \<^sup>*\\\<^sup>* (u # U) \ [] then hd ([t] \<^sup>*\\\<^sup>* (u # U)) else R.null)" using Resid1x_null by (cases U) auto qed text \ The following recursive expansion for consistency of paths is an intermediate result that is not yet quite in the form we really want. \ lemma Con_rec: shows "[t] \<^sup>*\\<^sup>* [u] \ t \ u" and "T \ [] \ t # T \<^sup>*\\<^sup>* [u] \ t \ u \ T \<^sup>*\\<^sup>* [u \\ t]" and "U \ [] \ [t] \<^sup>*\\<^sup>* (u # U) \ t \ u \ [t \\ u] \<^sup>*\\<^sup>* U" and "\T \ []; U \ []\ \ t # T \<^sup>*\\<^sup>* u # U \ t \ u \ T \<^sup>*\\<^sup>* [u \\ t] \ [t \\ u] \<^sup>*\\<^sup>* U \ T \<^sup>*\\\<^sup>* [u \\ t] \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t \\ u]" proof - show "[t] \<^sup>*\\<^sup>* [u] \ t \ u" by simp show "T \ [] \ t # T \<^sup>*\\<^sup>* [u] \ t \ u \ T \<^sup>*\\<^sup>* [u \\ t]" using Residx1_as_Resid by (cases T) auto show "U \ [] \ [t] \<^sup>*\\<^sup>* (u # U) \ t \ u \ [t \\ u] \<^sup>*\\<^sup>* U" using Resid1x_as_Resid' Con_sym Con_sym1 Resid1x.simps(3) Residx1_as_Resid by (cases U) auto show "\T \ []; U \ []\ \ t # T \<^sup>*\\<^sup>* u # U \ t \ u \ T \<^sup>*\\<^sup>* [u \\ t] \ [t \\ u] \<^sup>*\\<^sup>* U \ T \<^sup>*\\\<^sup>* [u \\ t] \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t \\ u]" using Residx1_as_Resid Resid1x_as_Resid' Con_sym1 Con_sym R.con_sym by (cases T; cases U) auto qed text \ This version is a more appealing form of the previously proved fact \Resid1x_as_Resid'\. \ lemma Resid1x_as_Resid: assumes "[t] \<^sup>*\\\<^sup>* U \ []" shows "[t] \<^sup>*\\\<^sup>* U = [t \<^sup>1\\\<^sup>* U]" using assms Con_rec(2,4) apply (cases U; cases "tl U") apply auto by argo+ (* TODO: Why can auto no longer complete this proof? *) text \ The following is an intermediate version of a recursive expansion for residuation, to be improved subsequently. \ lemma Resid_rec: shows [simp]: "[t] \<^sup>*\\<^sup>* [u] \ [t] \<^sup>*\\\<^sup>* [u] = [t \\ u]" and "\T \ []; t # T \<^sup>*\\<^sup>* [u]\ \ (t # T) \<^sup>*\\\<^sup>* [u] = (t \\ u) # (T \<^sup>*\\\<^sup>* [u \\ t])" and "\U \ []; Con [t] (u # U)\ \ [t] \<^sup>*\\\<^sup>* (u # U) = [t \\ u] \<^sup>*\\\<^sup>* U" and "\T \ []; U \ []; Con (t # T) (u # U)\ \ (t # T) \<^sup>*\\\<^sup>* (u # U) = ([t \\ u] \<^sup>*\\\<^sup>* U) @ ((T \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t \\ u]))" proof - show "[t] \<^sup>*\\<^sup>* [u] \ Resid [t] [u] = [t \\ u]" by (meson Resid.simps(3)) show "\T \ []; t # T \<^sup>*\\<^sup>* [u]\ \ (t # T) \<^sup>*\\\<^sup>* [u] = (t \\ u) # (T \<^sup>*\\\<^sup>* [u \\ t])" using Residx1_as_Resid by (metis Residx1.simps(3) list.exhaust_sel) show 1: "\U \ []; [t] \<^sup>*\\<^sup>* u # U\ \ [t] \<^sup>*\\\<^sup>* (u # U) = [t \\ u] \<^sup>*\\\<^sup>* U" by (metis Con_rec(3) Resid1x.simps(3) Resid1x_as_Resid list.exhaust) show "\T \ []; U \ []; t # T \<^sup>*\\<^sup>* u # U\ \ (t # T) \<^sup>*\\\<^sup>* (u # U) = ([t \\ u] \<^sup>*\\\<^sup>* U) @ ((T \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t \\ u]))" proof - assume T: "T \ []" and U: "U \ []" and Con: "Con (t # T) (u # U)" have tu: "t \ u" using Con Con_rec by metis have "(t # T) \<^sup>*\\\<^sup>* (u # U) = ((t \\ u) \<^sup>1\\\<^sup>* U) # ((T \<^sup>*\\\<^sup>1 (u \\ t)) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>1 (t \\ u)))" using T U Con tu by (cases T; cases U) auto also have "... = ([t \\ u] \<^sup>*\\\<^sup>* U) @ ((T \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t \\ u]))" using T U Con tu Con_rec(4) Resid1x_as_Resid Residx1_as_Resid by force finally show ?thesis by simp qed qed text \ For consistent paths, residuation is length-preserving. \ lemma length_Resid_ind: shows "\T U. \length T + length U \ n; T \<^sup>*\\<^sup>* U\ \ length (T \<^sup>*\\\<^sup>* U) = length T" apply (induct n) apply simp proof - fix n T U assume ind: "\T U. \length T + length U \ n; T \<^sup>*\\<^sup>* U\ \ length (T \<^sup>*\\\<^sup>* U) = length T" assume Con: "T \<^sup>*\\<^sup>* U" assume len: "length T + length U \ Suc n" show "length (T \<^sup>*\\\<^sup>* U) = length T" using Con len ind Resid1x_as_Resid length_Cons Con_rec(2) Resid_rec(2) apply (cases T; cases U) apply auto apply (cases "tl T = []"; cases "tl U = []") apply auto apply metis apply fastforce proof - fix t T' u U' assume T: "T = t # T'" and U: "U = u # U'" assume T': "T' \ []" and U': "U' \ []" show "length ((t # T') \<^sup>*\\\<^sup>* (u # U')) = Suc (length T')" using Con Con_rec(4) Con_sym Resid_rec(4) T T' U U' ind len by auto qed qed lemma length_Resid: assumes "T \<^sup>*\\<^sup>* U" shows "length (T \<^sup>*\\\<^sup>* U) = length T" using assms length_Resid_ind by auto lemma Con_initial_left: shows "\t T. t # T \<^sup>*\\<^sup>* U \ [t] \<^sup>*\\<^sup>* U" apply (induct U) apply simp by (metis Con_rec(1-4)) lemma Con_initial_right: shows "\u U. T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\<^sup>* [u]" apply (induct T) apply simp by (metis Con_rec(1-4)) lemma Resid_cons_ind: shows "\T U. \T \ []; U \ []; length T + length U \ n\ \ (\t. t # T \<^sup>*\\<^sup>* U \ [t] \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t]) \ (\u. T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\<^sup>* [u] \ T \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* U) \ (\t. t # T \<^sup>*\\<^sup>* U \ (t # T) \<^sup>*\\\<^sup>* U = [t] \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])) \ (\u. T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\\<^sup>* (u # U) = (T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U)" proof (induct n) show "\T U. \T \ []; U \ []; length T + length U \ 0\ \ (\t. t # T \<^sup>*\\<^sup>* U \ [t] \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t]) \ (\u. T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\<^sup>* [u] \ T \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* U) \ (\t. t # T \<^sup>*\\<^sup>* U \ (t # T) \<^sup>*\\\<^sup>* U = [t] \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])) \ (\u. T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\\<^sup>* (u # U) = (T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U)" by simp fix n and T U :: "'a list" assume ind: "\T U. \T \ []; U \ []; length T + length U \ n\ \ (\t. t # T \<^sup>*\\<^sup>* U \ [t] \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t]) \ (\u. T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\<^sup>* [u] \ T \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* U) \ (\t. t # T \<^sup>*\\<^sup>* U \ (t # T) \<^sup>*\\\<^sup>* U = [t] \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])) \ (\u. T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\\<^sup>* (u # U) = (T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U)" assume T: "T \ []" and U: "U \ []" assume len: "length T + length U \ Suc n" show "(\t. t # T \<^sup>*\\<^sup>* U \ [t] \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t]) \ (\u. T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\<^sup>* [u] \ T \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* U) \ (\t. t # T \<^sup>*\\<^sup>* U \ (t # T) \<^sup>*\\\<^sup>* U = [t] \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])) \ (\u. T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\\<^sup>* (u # U) = (T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U)" proof (intro allI conjI iffI impI) fix t show 1: "t # T \<^sup>*\\<^sup>* U \ (t # T) \<^sup>*\\\<^sup>* U = [t] \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" proof (cases U) show "U = [] \ ?thesis" using U by simp fix u U' assume U: "U = u # U'" assume Con: "t # T \<^sup>*\\<^sup>* U" show ?thesis proof (cases "U' = []") show "U' = [] \ ?thesis" using T U Con R.con_sym Con_rec(2) Resid_rec(2) by auto assume U': "U' \ []" have "(t # T) \<^sup>*\\\<^sup>* U = [t \\ u] \<^sup>*\\\<^sup>* U' @ (T \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* [t \\ u])" using T U U' Con Resid_rec(4) by fastforce also have 1: "... = [t] \<^sup>*\\\<^sup>* U @ (T \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* [t \\ u])" using T U U' Con Con_rec(3-4) Resid_rec(3) by auto also have "... = [t] \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* ((u \\ t) # (U' \<^sup>*\\\<^sup>* [t \\ u]))" proof - have "T \<^sup>*\\\<^sup>* ((u \\ t) # (U' \<^sup>*\\\<^sup>* [t \\ u])) = (T \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* [t \\ u])" using T U U' ind [of T "U' \<^sup>*\\\<^sup>* [t \\ u]"] Con Con_rec(4) Con_sym len length_Resid by fastforce thus ?thesis by auto qed also have "... = [t] \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" using T U U' 1 Con Con_rec(4) Con_sym1 Residx1_as_Resid Resid1x_as_Resid Resid_rec(2) Con_sym Con_initial_left by auto finally show ?thesis by simp qed qed show "t # T \<^sup>*\\<^sup>* U \ [t] \<^sup>*\\<^sup>* U" by (simp add: Con_initial_left) show "t # T \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" by (metis "1" Suc_inject T append_Nil2 length_0_conv length_Cons length_Resid) show "[t] \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t] \ t # T \<^sup>*\\<^sup>* U" proof (cases U) show "\[t] \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t]; U = []\ \ t # T \<^sup>*\\<^sup>* U" using U by simp fix u U' assume U: "U = u # U'" assume Con: "[t] \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t]" show "t # T \<^sup>*\\<^sup>* U" proof (cases "U' = []") show "U' = [] \ ?thesis" using T U Con by (metis Con_rec(2) Resid.simps(3) R.con_sym) assume U': "U' \ []" show ?thesis proof - have "t \ u" using T U U' Con Con_rec(3) by blast moreover have "T \<^sup>*\\<^sup>* [u \\ t]" using T U U' Con Con_initial_right Con_sym1 Residx1_as_Resid Resid1x_as_Resid Resid_rec(2) by (metis Con_sym) moreover have "[t \\ u] \<^sup>*\\<^sup>* U'" using T U U' Con Resid_rec(3) by force moreover have "T \<^sup>*\\\<^sup>* [u \\ t] \<^sup>*\\<^sup>* U' \<^sup>*\\\<^sup>* [t \\ u]" by (metis (no_types, opaque_lifting) Con Con_sym Resid_rec(2) Suc_le_mono T U U' add_Suc_right calculation(3) ind len length_Cons length_Resid) ultimately show ?thesis using T U U' Con_rec(4) by simp qed qed qed next fix u show 1: "T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\\<^sup>* (u # U) = (T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U" proof (cases T) show 2: "\T \<^sup>*\\<^sup>* u # U; T = []\ \ T \<^sup>*\\\<^sup>* (u # U) = (T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U" using T by simp fix t T' assume T: "T = t # T'" assume Con: "T \<^sup>*\\<^sup>* u # U" show ?thesis proof (cases "T' = []") show "T' = [] \ ?thesis" using T U Con Con_rec(3) Resid1x_as_Resid Resid_rec(3) by force assume T': "T' \ []" have "T \<^sup>*\\\<^sup>* (u # U) = [t \\ u] \<^sup>*\\\<^sup>* U @ (T' \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t \\ u])" using T U T' Con Resid_rec(4) [of T' U t u] by simp also have "... = ((t \\ u) # (T' \<^sup>*\\\<^sup>* [u \\ t])) \<^sup>*\\\<^sup>* U" proof - have "length (T' \<^sup>*\\\<^sup>* [u \\ t]) + length U \ n" by (metis (no_types, lifting) Con Con_rec(4) One_nat_def Suc_eq_plus1 Suc_leI T T' U add_Suc le_less_trans len length_Resid lessI list.size(4) not_le) thus ?thesis using ind [of "T' \<^sup>*\\\<^sup>* [u \\ t]" U] Con Con_rec(4) T T' U by auto qed also have "... = (T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U" using T U T' Con Con_rec(2,4) Resid_rec(2) by force finally show ?thesis by simp qed qed show "T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\<^sup>* [u]" using 1 by force show "T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* U" using 1 by fastforce show "T \<^sup>*\\<^sup>* [u] \ T \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* u # U" proof (cases T) show "\T \<^sup>*\\<^sup>* [u] \ T \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* U; T = []\ \ T \<^sup>*\\<^sup>* u # U" using T by simp fix t T' assume T: "T = t # T'" assume Con: "T \<^sup>*\\<^sup>* [u] \ T \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* U" show "Con T (u # U)" proof (cases "T' = []") show "T' = [] \ ?thesis" using Con T U Con_rec(1,3) by auto assume T': "T' \ []" have "t \ u" using Con T U T' Con_rec(2) by blast moreover have 2: "T' \<^sup>*\\<^sup>* [u \\ t]" using Con T U T' Con_rec(2) by blast moreover have "[t \\ u] \<^sup>*\\<^sup>* U" using Con T U T' by (metis Con_initial_left Resid_rec(2)) moreover have "T' \<^sup>*\\\<^sup>* [u \\ t] \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t \\ u]" proof - have 0: "length (U \<^sup>*\\\<^sup>* [t \\ u]) = length U" using Con T U T' length_Resid Con_sym calculation(3) by blast hence 1: "length T' + length (U \<^sup>*\\\<^sup>* [t \\ u]) \ n" using Con T U T' len length_Resid Con_sym by simp have "length ((T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U) = length ([t \\ u] \<^sup>*\\\<^sup>* U) + length ((T' \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t \\ u]))" proof - have "(T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U = [t \\ u] \<^sup>*\\\<^sup>* U @ (T' \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t \\ u])" by (metis 0 1 2 Con Resid_rec(2) T T' U ind length_Resid) thus ?thesis using Con T U T' length_Resid by simp qed moreover have "length ((T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U) = length T" using Con T U T' length_Resid by metis moreover have "length ([t \\ u] \<^sup>*\\\<^sup>* U) \ 1" using Con T U T' Resid1x_as_Resid by (metis One_nat_def length_Cons list.size(3) order_refl zero_le) ultimately show ?thesis using Con T U T' length_Resid by auto qed ultimately show "T \<^sup>*\\<^sup>* u # U" using T Con_rec(4) [of T' U t u] by fastforce qed qed qed qed text \ The following are the final versions of recursive expansion for consistency and residuation on paths. These are what I really wanted the original definitions to look like, but if this is tried, then \Con\ and \Resid\ end up having to be mutually recursive, expressing the definitions so that they are single-valued becomes an issue, and proving termination is more problematic. \ lemma Con_cons: assumes "T \ []" and "U \ []" shows "t # T \<^sup>*\\<^sup>* U \ [t] \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t]" and "T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\<^sup>* [u] \ T \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* U" using assms Resid_cons_ind [of T U] by blast+ lemma Con_consI [intro, simp]: shows "\T \ []; U \ []; [t] \<^sup>*\\<^sup>* U; T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t]\ \ t # T \<^sup>*\\<^sup>* U" and "\T \ []; U \ []; T \<^sup>*\\<^sup>* [u]; T \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* U\ \ T \<^sup>*\\<^sup>* u # U" using Con_cons by auto (* TODO: Making this a simp currently seems to produce undesirable breakage. *) lemma Resid_cons: assumes "U \ []" shows "t # T \<^sup>*\\<^sup>* U \ (t # T) \<^sup>*\\\<^sup>* U = ([t] \<^sup>*\\\<^sup>* U) @ (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" and "T \<^sup>*\\<^sup>* u # U \ T \<^sup>*\\\<^sup>* (u # U) = (T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U" using assms Resid_cons_ind [of T U] Resid.simps(1) by blast+ text \ The following expansion of residuation with respect to the first argument is stated in terms of the more primitive cons, rather than list append, but as a result \\<^sup>1\\<^sup>*\ has to be used. \ (* TODO: Making this a simp seems to produce similar breakage as above. *) lemma Resid_cons': assumes "T \ []" shows "t # T \<^sup>*\\<^sup>* U \ (t # T) \<^sup>*\\\<^sup>* U = (t \<^sup>1\\\<^sup>* U) # (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" using assms by (metis Con_sym Resid.simps(1) Resid1x_as_Resid Resid_cons(1) append_Cons append_Nil) lemma Srcs_Resid_Arr_single: assumes "T \<^sup>*\\<^sup>* [u]" shows "Srcs (T \<^sup>*\\\<^sup>* [u]) = R.targets u" proof (cases T) show "T = [] \ Srcs (T \<^sup>*\\\<^sup>* [u]) = R.targets u" using assms by simp fix t T' assume T: "T = t # T'" show "Srcs (T \<^sup>*\\\<^sup>* [u]) = R.targets u" proof (cases "T' = []") show "T' = [] \ ?thesis" using assms T R.sources_resid by auto assume T': "T' \ []" have "Srcs (T \<^sup>*\\\<^sup>* [u]) = Srcs ((t # T') \<^sup>*\\\<^sup>* [u])" using T by simp also have "... = Srcs ((t \\ u) # (T' \<^sup>*\\\<^sup>* ([u] \<^sup>*\\\<^sup>* T')))" using assms T by (metis Resid_rec(2) Srcs.elims T' list.distinct(1) list.sel(1)) also have "... = R.sources (t \\ u)" using Srcs.elims by blast also have "... = R.targets u" using assms Con_rec(2) T T' R.sources_resid by force finally show ?thesis by blast qed qed lemma Srcs_Resid_single_Arr: shows "\u. [u] \<^sup>*\\<^sup>* T \ Srcs ([u] \<^sup>*\\\<^sup>* T) = Trgs T" proof (induct T) show "\u. [u] \<^sup>*\\<^sup>* [] \ Srcs ([u] \<^sup>*\\\<^sup>* []) = Trgs []" by simp fix t u T assume ind: "\u. [u] \<^sup>*\\<^sup>* T \ Srcs ([u] \<^sup>*\\\<^sup>* T) = Trgs T" assume Con: "[u] \<^sup>*\\<^sup>* t # T" show "Srcs ([u] \<^sup>*\\\<^sup>* (t # T)) = Trgs (t # T)" proof (cases "T = []") show "T = [] \ ?thesis" using Con Srcs_Resid_Arr_single Trgs.simps(2) by presburger assume T: "T \ []" have "Srcs ([u] \<^sup>*\\\<^sup>* (t # T)) = Srcs ([u \\ t] \<^sup>*\\\<^sup>* T)" using Con Resid_rec(3) T by force also have "... = Trgs T" using Con ind Con_rec(3) T by auto also have "... = Trgs (t # T)" by (metis T Trgs.elims Trgs.simps(3)) finally show ?thesis by simp qed qed lemma Trgs_Resid_sym_Arr_single: shows "\u. T \<^sup>*\\<^sup>* [u] \ Trgs (T \<^sup>*\\\<^sup>* [u]) = Trgs ([u] \<^sup>*\\\<^sup>* T)" proof (induct T) show "\u. [] \<^sup>*\\<^sup>* [u] \ Trgs ([] \<^sup>*\\\<^sup>* [u]) = Trgs ([u] \<^sup>*\\\<^sup>* [])" by simp fix t u T assume ind: "\u. T \<^sup>*\\<^sup>* [u] \ Trgs (T \<^sup>*\\\<^sup>* [u]) = Trgs ([u] \<^sup>*\\\<^sup>* T)" assume Con: "t # T \<^sup>*\\<^sup>* [u]" show "Trgs ((t # T) \<^sup>*\\\<^sup>* [u]) = Trgs ([u] \<^sup>*\\\<^sup>* (t # T))" proof (cases "T = []") show "T = [] \ ?thesis" using R.targets_resid_sym by (simp add: R.con_sym) assume T: "T \ []" show ?thesis proof - have "Trgs ((t # T) \<^sup>*\\\<^sup>* [u]) = Trgs ((t \\ u) # (T \<^sup>*\\\<^sup>* [u \\ t]))" using Con Resid_rec(2) T by auto also have "... = Trgs (T \<^sup>*\\\<^sup>* [u \\ t])" using T Con Con_rec(2) [of T t u] by (metis Trgs.elims Trgs.simps(3)) also have "... = Trgs ([u \\ t] \<^sup>*\\\<^sup>* T)" using T Con ind Con_sym by metis also have "... = Trgs ([u] \<^sup>*\\\<^sup>* (t # T))" using T Con Con_sym Resid_rec(3) by presburger finally show ?thesis by blast qed qed qed lemma Srcs_Resid [simp]: shows "\T. T \<^sup>*\\<^sup>* U \ Srcs (T \<^sup>*\\\<^sup>* U) = Trgs U" proof (induct U) show "\T. T \<^sup>*\\<^sup>* [] \ Srcs (T \<^sup>*\\\<^sup>* []) = Trgs []" using Con_sym Resid.simps(1) by blast fix u U T assume ind: "\T. T \<^sup>*\\<^sup>* U \ Srcs (T \<^sup>*\\\<^sup>* U) = Trgs U" assume Con: "T \<^sup>*\\<^sup>* u # U" show "Srcs (T \<^sup>*\\\<^sup>* (u # U)) = Trgs (u # U)" by (metis Con Resid_cons(2) Srcs_Resid_Arr_single Trgs.simps(2-3) ind list.exhaust_sel) qed lemma Trgs_Resid_sym [simp]: shows "\T. T \<^sup>*\\<^sup>* U \ Trgs (T \<^sup>*\\\<^sup>* U) = Trgs (U \<^sup>*\\\<^sup>* T)" proof (induct U) show "\T. T \<^sup>*\\<^sup>* [] \ Trgs (T \<^sup>*\\\<^sup>* []) = Trgs ([] \<^sup>*\\\<^sup>* T)" by (meson Con_sym Resid.simps(1)) fix u U T assume ind: "\T. T \<^sup>*\\<^sup>* U \ Trgs (T \<^sup>*\\\<^sup>* U) = Trgs (U \<^sup>*\\\<^sup>* T)" assume Con: "T \<^sup>*\\<^sup>* u # U" show "Trgs (T \<^sup>*\\\<^sup>* (u # U)) = Trgs ((u # U) \<^sup>*\\\<^sup>* T)" proof (cases "U = []") show "U = [] \ ?thesis" using Con Trgs_Resid_sym_Arr_single by blast assume U: "U \ []" show ?thesis proof - have "Trgs (T \<^sup>*\\\<^sup>* (u # U)) = Trgs ((T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U)" using U by (metis Con Resid_cons(2)) also have "... = Trgs (U \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* [u]))" using U Con by (metis Con_sym ind) also have "... = Trgs ((u # U) \<^sup>*\\\<^sup>* T)" by (metis (no_types, opaque_lifting) Con_cons(1) Con_sym Resid.simps(1) Resid_cons' Trgs.simps(3) U neq_Nil_conv) finally show ?thesis by simp qed qed qed lemma img_Resid_Srcs: shows "Arr T \ (\a. [a] \<^sup>*\\\<^sup>* T) ` Srcs T \ (\b. [b]) ` Trgs T" proof (induct T) show "Arr [] \ (\a. [a] \<^sup>*\\\<^sup>* []) ` Srcs [] \ (\b. [b]) ` Trgs []" by simp fix t :: 'a and T :: "'a list" assume tT: "Arr (t # T)" assume ind: "Arr T \ (\a. [a] \<^sup>*\\\<^sup>* T) ` Srcs T \ (\b. [b]) ` Trgs T" show "(\a. [a] \<^sup>*\\\<^sup>* (t # T)) ` Srcs (t # T) \ (\b. [b]) ` Trgs (t # T)" proof fix B assume B: "B \ (\a. [a] \<^sup>*\\\<^sup>* (t # T)) ` Srcs (t # T)" show "B \ (\b. [b]) ` Trgs (t # T)" proof (cases "T = []") assume T: "T = []" obtain a where a: "a \ R.sources t \ [a \\ t] = B" by (metis (no_types, lifting) B R.composite_of_source_arr R.con_prfx_composite_of(1) Resid_rec(1) Srcs.simps(2) T Arr.simps(2) Con_rec(1) imageE tT) have "a \\ t \ Trgs (t # T)" using tT T a by (simp add: R.resid_source_in_targets) thus ?thesis using B a image_iff by fastforce next assume T: "T \ []" obtain a where a: "a \ R.sources t \ [a] \<^sup>*\\\<^sup>* (t # T) = B" using tT T B Srcs.elims by blast have "[a \\ t] \<^sup>*\\\<^sup>* T = B" using tT T B a by (metis Con_rec(3) R.arrI R.resid_source_in_targets R.targets_are_cong Resid_rec(3) R.arr_resid_iff_con R.ide_implies_arr) moreover have "a \\ t \ Srcs T" using a tT by (metis Arr.simps(3) R.resid_source_in_targets T neq_Nil_conv subsetD) ultimately show ?thesis using T tT ind by (metis Trgs.simps(3) Arr.simps(3) image_iff list.exhaust_sel subsetD) qed qed qed lemma Resid_Arr_Src: shows "\a. \Arr T; a \ Srcs T\ \ T \<^sup>*\\\<^sup>* [a] = T" proof (induct T) show "\a. \Arr []; a \ Srcs []\ \ [] \<^sup>*\\\<^sup>* [a] = []" by simp fix a t T assume ind: "\a. \Arr T; a \ Srcs T\ \ T \<^sup>*\\\<^sup>* [a] = T" assume Arr: "Arr (t # T)" assume a: "a \ Srcs (t # T)" show "(t # T) \<^sup>*\\\<^sup>* [a] = t # T" proof (cases "T = []") show "T = [] \ ?thesis" using a R.resid_arr_ide R.sources_def by auto assume T: "T \ []" show "(t # T) \<^sup>*\\\<^sup>* [a] = t # T" proof - have 1: "R.arr t \ Arr T \ R.targets t \ Srcs T" using Arr T by (metis Arr.elims(2) list.sel(1) list.sel(3)) have 2: "t # T \<^sup>*\\<^sup>* [a]" using T a Arr Con_rec(2) by (metis (no_types, lifting) img_Resid_Srcs Con_sym imageE image_subset_iff list.distinct(1)) have "(t # T) \<^sup>*\\\<^sup>* [a] = (t \\ a) # (T \<^sup>*\\\<^sup>* [a \\ t])" using 2 T Resid_rec(2) by simp moreover have "t \\ a = t" using Arr a R.sources_def by (metis "2" CollectD Con_rec(2) T Srcs_are_ide in_mono R.resid_arr_ide) moreover have "T \<^sup>*\\\<^sup>* [a \\ t] = T" by (metis "1" "2" R.in_sourcesI R.resid_source_in_targets Srcs_are_ide T a Con_rec(2) in_mono ind mem_Collect_eq) ultimately show ?thesis by simp qed qed qed lemma Con_single_ide_ind: shows "\a. R.ide a \ [a] \<^sup>*\\<^sup>* T \ Arr T \ a \ Srcs T" proof (induct T) show "\a. [a] \<^sup>*\\<^sup>* [] \ Arr [] \ a \ Srcs []" by simp fix a t T assume ind: "\a. R.ide a \ [a] \<^sup>*\\<^sup>* T \ Arr T \ a \ Srcs T" assume a: "R.ide a" show "[a] \<^sup>*\\<^sup>* (t # T) \ Arr (t # T) \ a \ Srcs (t # T)" proof (cases "T = []") show "T = [] \ ?thesis" using a Con_sym by (metis Arr.simps(2) Resid_Arr_Src Srcs.simps(2) R.arr_iff_has_source Con_rec(1) empty_iff R.in_sourcesI list.distinct(1)) assume T: "T \ []" have 1: "[a] \<^sup>*\\<^sup>* (t # T) \ a \ t \ [a \\ t] \<^sup>*\\<^sup>* T" using a T Con_cons(2) [of "[a]" T t] by simp also have 2: "... \ a \ t \ Arr T \ a \\ t \ Srcs T" using a T ind R.resid_ide_arr by blast also have "... \ Arr (t # T) \ a \ Srcs (t # T)" using a T Con_sym R.con_sym Resid_Arr_Src R.con_implies_arr Srcs_are_ide apply (cases T) apply simp by (metis Arr.simps(3) R.resid_arr_ide R.targets_resid_sym Srcs.simps(3) Srcs_Resid_Arr_single calculation dual_order.eq_iff list.distinct(1) R.in_sourcesI) finally show ?thesis by simp qed qed lemma Con_single_ide_iff: assumes "R.ide a" shows "[a] \<^sup>*\\<^sup>* T \ Arr T \ a \ Srcs T" using assms Con_single_ide_ind by simp lemma Con_single_ideI [intro]: assumes "R.ide a" and "Arr T" and "a \ Srcs T" shows "[a] \<^sup>*\\<^sup>* T" and "T \<^sup>*\\<^sup>* [a]" using assms Con_single_ide_iff Con_sym by auto lemma Resid_single_ide: assumes "R.ide a" and "[a] \<^sup>*\\<^sup>* T" shows "[a] \<^sup>*\\\<^sup>* T \ (\b. [b]) ` Trgs T" and [simp]: "T \<^sup>*\\\<^sup>* [a] = T" using assms Con_single_ide_ind img_Resid_Srcs Resid_Arr_Src Con_sym by blast+ lemma Resid_Arr_Ide_ind: shows "\Ide A; T \<^sup>*\\<^sup>* A\ \ T \<^sup>*\\\<^sup>* A = T" proof (induct A) show "\Ide []; T \<^sup>*\\<^sup>* []\ \ T \<^sup>*\\\<^sup>* [] = T" by simp fix a A assume ind: "\Ide A; T \<^sup>*\\<^sup>* A\ \ T \<^sup>*\\\<^sup>* A = T" assume Ide: "Ide (a # A)" assume Con: "T \<^sup>*\\<^sup>* a # A" show "T \<^sup>*\\\<^sup>* (a # A) = T" by (metis (no_types, lifting) Con Con_initial_left Con_sym Ide Ide.elims(2) Resid_cons(2) Resid_single_ide(2) ind list.inject) qed lemma Resid_Ide_Arr_ind: shows "\Ide A; A \<^sup>*\\<^sup>* T\ \ Ide (A \<^sup>*\\\<^sup>* T)" proof (induct A) show "\Ide []; [] \<^sup>*\\<^sup>* T\ \ Ide ([] \<^sup>*\\\<^sup>* T)" by simp fix a A assume ind: "\Ide A; A \<^sup>*\\<^sup>* T\ \ Ide (A \<^sup>*\\\<^sup>* T)" assume Ide: "Ide (a # A)" assume Con: "a # A \<^sup>*\\<^sup>* T" have T: "Arr T" using Con Ide Con_single_ide_ind Con_initial_left Ide.elims(2) by blast show "Ide ((a # A) \<^sup>*\\\<^sup>* T)" proof (cases "A = []") show "A = [] \ ?thesis" by (metis Con Con_sym1 Ide Ide.simps(2) Resid1x_as_Resid Resid1x_ide Residx1_as_Resid Con_sym) assume A: "A \ []" show ?thesis proof - have "Ide ([a] \<^sup>*\\\<^sup>* T)" by (metis Con Con_initial_left Con_sym Con_sym1 Ide Ide.simps(3) Resid1x_as_Resid Residx1_as_Resid Ide.simps(2) Resid1x_ide list.exhaust_sel) moreover have "Trgs ([a] \<^sup>*\\\<^sup>* T) \ Srcs (A \<^sup>*\\\<^sup>* T)" using A T Ide Con by (metis (no_types, lifting) Con_sym Ide.elims(2) Ide.simps(2) Resid_Arr_Ide_ind Srcs_Resid Trgs_Resid_sym Con_cons(2) dual_order.eq_iff list.inject) moreover have "Ide (A \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* [a]))" by (metis A Con Con_cons(1) Con_sym Ide Ide.simps(3) Resid_Arr_Ide_ind Resid_single_ide(2) ind list.exhaust_sel) moreover have "Ide ((a # A) \<^sup>*\\\<^sup>* T) \ Ide ([a] \<^sup>*\\\<^sup>* T) \ Ide (A \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* [a])) \ Trgs ([a] \<^sup>*\\\<^sup>* T) \ Srcs (A \<^sup>*\\\<^sup>* T)" using calculation(1-3) by (metis Arr.simps(1) Con Ide Ide.simps(3) Resid1x_as_Resid Resid_cons' Trgs.simps(2) Con_single_ide_iff Ide.simps(2) Ide_implies_Arr Resid_Arr_Src list.exhaust_sel) ultimately show ?thesis by blast qed qed qed lemma Resid_Ide: assumes "Ide A" and "A \<^sup>*\\<^sup>* T" shows "T \<^sup>*\\\<^sup>* A = T" and "Ide (A \<^sup>*\\\<^sup>* T)" using assms Resid_Ide_Arr_ind Resid_Arr_Ide_ind Con_sym by auto lemma Con_Ide_iff: shows "Ide A \ A \<^sup>*\\<^sup>* T \ Arr T \ Srcs T = Srcs A" proof (induct A) show "Ide [] \ [] \<^sup>*\\<^sup>* T \ Arr T \ Srcs T = Srcs []" by simp fix a A assume ind: "Ide A \ A \<^sup>*\\<^sup>* T \ Arr T \ Srcs T = Srcs A" assume Ide: "Ide (a # A)" show "a # A \<^sup>*\\<^sup>* T \ Arr T \ Srcs T = Srcs (a # A)" proof (cases "A = []") show "A = [] \ ?thesis" using Con_single_ide_ind Ide by (metis Arr.simps(2) Con_sym Ide.simps(2) Ide_implies_Arr R.arrE Resid_Arr_Src Srcs.simps(2) Srcs_Resid R.in_sourcesI) assume A: "A \ []" have "a # A \<^sup>*\\<^sup>* T \ [a] \<^sup>*\\<^sup>* T \ A \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* [a]" using A Ide Con_cons(1) [of A T a] by fastforce also have 1: "... \ Arr T \ a \ Srcs T" by (metis A Arr_has_Src Con_single_ide_ind Ide Ide.elims(2) Resid_Arr_Src Srcs_Resid_Arr_single Con_sym Srcs_eqI ind inf.absorb_iff2 list.inject) also have "... \ Arr T \ Srcs T = Srcs (a # A)" by (metis A 1 Con_sym Ide Ide.simps(3) R.ideE R.sources_resid Resid_Arr_Src Srcs.simps(3) Srcs_Resid_Arr_single list.exhaust_sel R.in_sourcesI) finally show "a # A \<^sup>*\\<^sup>* T \ Arr T \ Srcs T = Srcs (a # A)" by blast qed qed lemma Con_IdeI: assumes "Ide A" and "Arr T" and "Srcs T = Srcs A" shows "A \<^sup>*\\<^sup>* T" and "T \<^sup>*\\<^sup>* A" using assms Con_Ide_iff Con_sym by auto lemma Con_Arr_self: shows "Arr T \ T \<^sup>*\\<^sup>* T" proof (induct T) show "Arr [] \ [] \<^sup>*\\<^sup>* []" by simp fix t T assume ind: "Arr T \ T \<^sup>*\\<^sup>* T" assume Arr: "Arr (t # T)" show "t # T \<^sup>*\\<^sup>* t # T" proof (cases "T = []") show "T = [] \ ?thesis" using Arr R.arrE by simp assume T: "T \ []" have "t \ t \ T \<^sup>*\\<^sup>* [t \\ t] \ [t \\ t] \<^sup>*\\<^sup>* T \ T \<^sup>*\\\<^sup>* [t \\ t] \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* [t \\ t]" proof - have "t \ t" using Arr Arr.elims(1) by auto moreover have "T \<^sup>*\\<^sup>* [t \\ t]" proof - have "Ide [t \\ t]" by (simp add: R.arr_def R.prfx_reflexive calculation) moreover have "Srcs [t \\ t] = Srcs T" by (metis Arr Arr.simps(2) Arr_has_Trg R.arrE R.sources_resid Srcs.simps(2) Srcs_eqI T Trgs.simps(2) Arr.simps(3) inf.absorb_iff2 list.exhaust) ultimately show ?thesis by (metis Arr Con_sym T Arr.simps(3) Con_Ide_iff neq_Nil_conv) qed ultimately show ?thesis by (metis Con_single_ide_ind Con_sym R.prfx_reflexive Resid_single_ide(2) ind R.con_implies_arr(1)) qed thus ?thesis using Con_rec(4) [of T T t t] by force qed qed lemma Resid_Arr_self: shows "Arr T \ Ide (T \<^sup>*\\\<^sup>* T)" proof (induct T) show "Arr [] \ Ide ([] \<^sup>*\\\<^sup>* [])" by simp fix t T assume ind: "Arr T \ Ide (T \<^sup>*\\\<^sup>* T)" assume Arr: "Arr (t # T)" show "Ide ((t # T) \<^sup>*\\\<^sup>* (t # T))" proof (cases "T = []") show "T = [] \ ?thesis" using Arr R.prfx_reflexive by auto assume T: "T \ []" have 1: "(t # T) \<^sup>*\\\<^sup>* (t # T) = t \<^sup>1\\\<^sup>* (t # T) # T \<^sup>*\\\<^sup>* ((t # T) \<^sup>*\\\<^sup>* [t])" using Arr T Resid_cons' [of T t "t # T"] Con_Arr_self by presburger also have "... = (t \\ t) \<^sup>1\\\<^sup>* T # T \<^sup>*\\\<^sup>* (t \<^sup>1\\\<^sup>* [t] # T \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* [t]))" using Arr T Resid_cons' [of T t "[t]"] by (metis Con_initial_right Resid1x.simps(3) calculation neq_Nil_conv) also have "... = (t \\ t) \<^sup>1\\\<^sup>* T # (T \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* [t])) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* [t]))" by (metis 1 Resid1x.simps(2) Residx1.simps(2) Residx1_as_Resid T calculation Con_cons(1) Con_rec(4) Resid_cons(2) list.distinct(1) list.inject) finally have 2: "(t # T) \<^sup>*\\\<^sup>* (t # T) = (t \\ t) \<^sup>1\\\<^sup>* T # (T \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* [t])) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* [t]))" by blast moreover have "Ide ..." proof - have "R.ide ((t \\ t) \<^sup>1\\\<^sup>* T)" using Arr T by (metis Con_initial_right Con_rec(2) Con_sym1 R.con_implies_arr(1) Resid1x_ide Con_Arr_self Residx1_as_Resid R.prfx_reflexive) moreover have "Ide ((T \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* [t])) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* [t])))" using Arr T by (metis Con_Arr_self Con_rec(4) Resid_single_ide(2) Con_single_ide_ind Resid.simps(3) ind R.prfx_reflexive R.con_implies_arr(2)) moreover have "R.targets ((t \\ t) \<^sup>1\\\<^sup>* T) \ Srcs ((T \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* [t])) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* [t])))" by (metis (no_types, lifting) 1 2 Con_cons(1) Resid1x_as_Resid T Trgs.simps(2) Trgs_Resid_sym Srcs_Resid dual_order.eq_iff list.discI list.inject) ultimately show ?thesis using Arr T by (metis Ide.simps(1,3) list.exhaust_sel) qed ultimately show ?thesis by auto qed qed lemma Con_imp_eq_Srcs: assumes "T \<^sup>*\\<^sup>* U" shows "Srcs T = Srcs U" proof (cases T) show "T = [] \ ?thesis" using assms by simp fix t T' assume T: "T = t # T'" show "Srcs T = Srcs U" proof (cases U) show "U = [] \ ?thesis" using assms T by simp fix u U' assume U: "U = u # U'" show "Srcs T = Srcs U" by (metis Con_initial_right Con_rec(1) Con_sym R.con_imp_common_source Srcs.simps(2-3) Srcs_eqI T Trgs.cases U assms) qed qed lemma Arr_iff_Con_self: shows "Arr T \ T \<^sup>*\\<^sup>* T" proof (induct T) show "Arr [] \ [] \<^sup>*\\<^sup>* []" by simp fix t T assume ind: "Arr T \ T \<^sup>*\\<^sup>* T" show "Arr (t # T) \ t # T \<^sup>*\\<^sup>* t # T" proof (cases "T = []") show "T = [] \ ?thesis" by auto assume T: "T \ []" show ?thesis proof show "Arr (t # T) \ t # T \<^sup>*\\<^sup>* t # T" using Con_Arr_self by simp show "t # T \<^sup>*\\<^sup>* t # T \ Arr (t # T)" proof - assume Con: "t # T \<^sup>*\\<^sup>* t # T" have "R.arr t" using T Con Con_rec(4) [of T T t t] by blast moreover have "Arr T" using T Con Con_rec(4) [of T T t t] ind R.arrI by (meson R.prfx_reflexive Con_single_ide_ind) moreover have "R.targets t \ Srcs T" using T Con by (metis Con_cons(2) Con_imp_eq_Srcs Trgs.simps(2) Srcs_Resid list.distinct(1) subsetI) ultimately show ?thesis by (cases T) auto qed qed qed qed lemma Arr_Resid_single: shows "\u. T \<^sup>*\\<^sup>* [u] \ Arr (T \<^sup>*\\\<^sup>* [u])" proof (induct T) show "\u. [] \<^sup>*\\<^sup>* [u] \ Arr ([] \<^sup>*\\\<^sup>* [u])" by simp fix t u T assume ind: "\u. T \<^sup>*\\<^sup>* [u] \ Arr (T \<^sup>*\\\<^sup>* [u])" assume Con: "t # T \<^sup>*\\<^sup>* [u]" show "Arr ((t # T) \<^sup>*\\\<^sup>* [u])" proof (cases "T = []") show "T = [] \ ?thesis" using Con Arr_iff_Con_self R.con_imp_arr_resid Con_rec(1) by fastforce assume T: "T \ []" have "Arr ((t # T) \<^sup>*\\\<^sup>* [u]) \ Arr ((t \\ u) # (T \<^sup>*\\\<^sup>* [u \\ t]))" using Con T Resid_rec(2) by auto also have "... \ R.arr (t \\ u) \ Arr (T \<^sup>*\\\<^sup>* [u \\ t]) \ R.targets (t \\ u) \ Srcs (T \<^sup>*\\\<^sup>* [u \\ t])" using Con T by (metis Arr.simps(3) Con_rec(2) neq_Nil_conv) also have "... \ R.con t u \ Arr (T \<^sup>*\\\<^sup>* [u \\ t])" using Con T by (metis Srcs_Resid_Arr_single Con_rec(2) R.arr_resid_iff_con subsetI R.targets_resid_sym) also have "... \ True" using Con ind T Con_rec(2) by blast finally show ?thesis by auto qed qed lemma Con_imp_Arr_Resid: shows "\T. T \<^sup>*\\<^sup>* U \ Arr (T \<^sup>*\\\<^sup>* U)" proof (induct U) show "\T. T \<^sup>*\\<^sup>* [] \ Arr (T \<^sup>*\\\<^sup>* [])" by (meson Con_sym Resid.simps(1)) fix u U T assume ind: "\T. T \<^sup>*\\<^sup>* U \ Arr (T \<^sup>*\\\<^sup>* U)" assume Con: "T \<^sup>*\\<^sup>* u # U" show "Arr (T \<^sup>*\\\<^sup>* (u # U))" by (metis Arr_Resid_single Con Resid_cons(2) ind) qed lemma Cube_ind: shows "\T U V. \T \<^sup>*\\<^sup>* U; V \<^sup>*\\<^sup>* T; length T + length U + length V \ n\ \ (V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U) \ (V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U))" proof (induct n) show "\T U V. \T \<^sup>*\\<^sup>* U; V \<^sup>*\\<^sup>* T; length T + length U + length V \ 0\ \ (V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U) \ (V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U))" by simp fix n and T U V :: "'a list" assume Con_TU: "T \<^sup>*\\<^sup>* U" and Con_VT: "V \<^sup>*\\<^sup>* T" have T: "T \ []" using Con_TU by auto have U: "U \ []" using Con_TU Con_sym Resid.simps(1) by blast have V: "V \ []" using Con_VT by auto assume len: "length T + length U + length V \ Suc n" assume ind: "\T U V. \T \<^sup>*\\<^sup>* U; V \<^sup>*\\<^sup>* T; length T + length U + length V \ n\ \ (V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U) \ (V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U))" show "(V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U) \ (V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U))" proof (cases V) show "V = [] \ ?thesis" using V by simp (* * TODO: I haven't found a better way to do this than just consider each combination * of T U V being a singleton. *) fix v V' assume V: "V = v # V'" show ?thesis proof (cases U) show "U = [] \ ?thesis" using U by simp fix u U' assume U: "U = u # U'" show ?thesis proof (cases T) show "T = [] \ ?thesis" using T by simp fix t T' assume T: "T = t # T'" show ?thesis proof (cases "V' = []", cases "U' = []", cases "T' = []") show "\V' = []; U' = []; T' = []\ \ ?thesis" using T U V R.cube Con_TU Resid.simps(2) Resid.simps(3) R.arr_resid_iff_con R.con_implies_arr Con_sym by metis assume T': "T' \ []" and V': "V' = []" and U': "U' = []" have 1: "U \<^sup>*\\<^sup>* [t]" using T Con_TU Con_cons(2) Con_sym Resid.simps(2) by metis have 2: "V \<^sup>*\\<^sup>* [t]" using V Con_VT Con_initial_right T by blast show ?thesis proof (intro conjI impI) have 3: "length [t] + length U + length V \ n" using T T' le_Suc_eq len by fastforce show *: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" proof - have "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T' \<^sup>*\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T'" using Con_TU Con_VT Con_sym Resid_cons(2) T T' by force also have "... \ V \<^sup>*\\\<^sup>* [t] \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t] \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" proof (intro iffI conjI) show "(V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T' \<^sup>*\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T' \ V \<^sup>*\\\<^sup>* [t] \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t]" using T U V T' U' V' 1 ind len Con_TU Con_rec(2) Resid_rec(1) Resid.simps(1) length_Cons Suc_le_mono add_Suc by (metis (no_types)) show "(V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T' \<^sup>*\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T' \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" using T U V T' U' V' by (metis Con_sym Resid.simps(1) Resid_rec(1) Suc_le_mono ind len length_Cons list.size(3-4)) show "V \<^sup>*\\\<^sup>* [t] \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t] \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T' \<^sup>*\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T'" using T U V T' U' V' 1 ind len Con_TU Con_VT Con_rec(1-3) by (metis (no_types, lifting) One_nat_def Resid_rec(1) Suc_le_mono add.commute list.size(3) list.size(4) plus_1_eq_Suc) qed also have "... \ (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U) \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" by (metis 2 3 Con_sym ind Resid.simps(1)) also have "... \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" using Con_rec(2) [of T' t] by (metis (no_types, lifting) "1" Con_TU Con_cons(2) Resid.simps(1) Resid.simps(3) Resid_rec(2) T T' U U') finally show ?thesis by simp qed assume Con: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T" show "(V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" proof - have "(V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = ((V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T') \<^sup>*\\\<^sup>* ((U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T')" using Con_TU Con_VT Con_sym Resid_cons(2) T T' by force also have "... = ((V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])) \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" using T U V T' U' V' 1 Con ind [of T' "Resid U [t]" "Resid V [t]"] by (metis One_nat_def add.commute calculation len length_0_conv length_Resid list.size(4) nat_add_left_cancel_le Con_sym plus_1_eq_Suc) also have "... = ((V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U)) \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" by (metis "1" "2" "3" Con_sym ind) also have "... = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" using T U T' U' Con * by (metis Con_sym Resid_rec(1-2) Resid.simps(1) Resid_cons(2)) finally show ?thesis by simp qed qed next assume U': "U' \ []" and V': "V' = []" show ?thesis proof (intro conjI impI) show *: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" proof (cases "T' = []") assume T': "T' = []" show ?thesis proof - have "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* [t] \<^sup>*\\<^sup>* (u \\ t) # (U' \<^sup>*\\\<^sup>* [t \\ u])" using Con_TU Con_sym Resid_rec(2) T T' U U' by auto also have "... \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* [u \\ t] \<^sup>*\\<^sup>* U' \<^sup>*\\\<^sup>* [t \\ u]" by (metis Con_TU Con_cons(2) Con_rec(3) Con_sym Resid.simps(1) T U U') also have "... \ (V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* [t \\ u] \<^sup>*\\<^sup>* U' \<^sup>*\\\<^sup>* [t \\ u]" using T U V V' R.cube_ax apply simp by (metis R.con_implies_arr(1) R.not_arr_null R.con_def) also have "... \ (V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U' \<^sup>*\\<^sup>* [t \\ u] \<^sup>*\\\<^sup>* U'" proof - have "length [t \\ u] + length U' + length (V \<^sup>*\\\<^sup>* [u]) \ n" using T U V V' len by force thus ?thesis by (metis Con_sym Resid.simps(1) add.commute ind) qed also have "... \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" by (metis Con_TU Resid_cons(2) Resid_rec(3) T T' U U' Con_cons(2) length_Resid length_0_conv) finally show ?thesis by simp qed next assume T': "T' \ []" show ?thesis proof - have "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T' \<^sup>*\\<^sup>* ((U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T')" using Con_TU Con_VT Con_sym Resid_cons(2) T T' by force also have "... \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" proof - have "length T' + length (U \<^sup>*\\\<^sup>* [t]) + length (V \<^sup>*\\\<^sup>* [t]) \ n" by (metis (no_types, lifting) Con_TU Con_VT Con_initial_right Con_sym One_nat_def Suc_eq_plus1 T ab_semigroup_add_class.add_ac(1) add_le_imp_le_left len length_Resid list.size(4) plus_1_eq_Suc) thus ?thesis by (metis Con_TU Con_VT Con_cons(1) Con_cons(2) T T' U V ind list.discI) qed also have "... \ (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U) \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" proof - have "length [t] + length U + length V \ n" using T T' le_Suc_eq len by fastforce thus ?thesis by (metis Con_TU Con_VT Con_initial_left Con_initial_right T ind) qed also have "... \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" by (metis Con_cons(2) Con_sym Resid.simps(1) Resid1x_as_Resid Residx1_as_Resid Resid_cons' T T') finally show ?thesis by blast qed qed show "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" proof - assume Con: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T" show ?thesis proof (cases "T' = []") assume T': "T' = []" show ?thesis proof - have 1: "(V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* ((u \\ t) # (U'\<^sup>*\\\<^sup>* [t \\ u]))" using Con_TU Con_sym Resid_rec(2) T T' U U' by force also have "... = ((V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* [t \\ u])" by (metis Con Con_TU Con_rec(2) Con_sym Resid_cons(2) T T' U U' calculation) also have "... = ((V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* [t \\ u]) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* [t \\ u])" by (metis "*" Con Con_rec(3) R.cube Resid.simps(1,3) T T' U V V' calculation R.conI R.conE) also have "... = ((V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U') \<^sup>*\\\<^sup>* ([t \\ u] \<^sup>*\\\<^sup>* U')" proof - have "length [t \\ u] + length (U' \<^sup>*\\\<^sup>* [t \\ u]) + length (V \<^sup>*\\\<^sup>* [u]) \ n" by (metis (no_types, lifting) Nat.le_diff_conv2 One_nat_def T U V V' add.commute add_diff_cancel_left' add_leD2 len length_Cons length_Resid list.size(3) plus_1_eq_Suc) thus ?thesis by (metis Con_sym add.commute Resid.simps(1) ind length_Resid) qed also have "... = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" by (metis Con_TU Con_cons(2) Resid_cons(2) T T' U U' Resid_rec(3) length_0_conv length_Resid) finally show ?thesis by blast qed next assume T': "T' \ []" show ?thesis proof - have "(V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = ((V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* ([u] \<^sup>*\\\<^sup>* T)) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* [u]))" by (metis Con Con_TU Resid.simps(2) Resid1x_as_Resid U U' Con_cons(2) Con_sym Resid_cons' Resid_cons(2)) also have "... = ((V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* [u])) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* [u]))" proof - have "length T + length [u] + length V \ n" using U U' antisym_conv len not_less_eq_eq by fastforce thus ?thesis by (metis Con_TU Con_VT Con_initial_right U ind) qed also have "... = ((V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U') \<^sup>*\\\<^sup>* ((T \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U')" proof - have "length (T \<^sup>*\\\<^sup>* [u]) + length U' + length (V \<^sup>*\\\<^sup>* [u]) \ n" using Con_TU Con_initial_right U V V' len length_Resid by force thus ?thesis by (metis Con Con_TU Con_cons(2) U U' calculation ind length_0_conv length_Resid) qed also have "... = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" by (metis "*" Con Con_TU Resid_cons(2) U U' length_Resid length_0_conv) finally show ?thesis by blast qed qed qed qed next assume V': "V' \ []" show ?thesis proof (cases "U' = []") assume U': "U' = []" show ?thesis proof (cases "T' = []") assume T': "T' = []" show ?thesis proof (intro conjI impI) show *: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" proof - have "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (v \\ t) # (V' \<^sup>*\\\<^sup>* [t \\ v]) \<^sup>*\\<^sup>* [u \\ t]" using Con_TU Con_VT Con_sym Resid_rec(1-2) T T' U U' V V' by metis also have "... \ [v \\ t] \<^sup>*\\<^sup>* [u \\ t] \ V' \<^sup>*\\\<^sup>* [t \\ v] \<^sup>*\\<^sup>* [u \\ v] \<^sup>*\\\<^sup>* [t \\ v]" by (metis T T' V V' Con_VT Con_rec(1-2) Con_sym R.con_def R.cube Resid.simps(3)) also have "... \ [v \\ t] \<^sup>*\\<^sup>* [u \\ t] \ V' \<^sup>*\\\<^sup>* [u \\ v] \<^sup>*\\<^sup>* [t \\ v] \<^sup>*\\\<^sup>* [u \\ v]" proof - have "length [t \\ v] + length [u \\ v] + length V' \ n" using T U V len by fastforce thus ?thesis by (metis Con_imp_Arr_Resid Arr_has_Src Con_VT T T' Trgs.simps(1) Trgs_Resid_sym V V' Con_rec(2) Srcs_Resid ind) qed also have "... \ [v \\ t] \<^sup>*\\<^sup>* [u \\ t] \ V' \<^sup>*\\\<^sup>* [u \\ v] \<^sup>*\\<^sup>* [t \\ u] \<^sup>*\\\<^sup>* [v \\ u]" by (simp add: R.con_def R.cube) also have "... \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" proof assume 1: "V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" have tu_vu: "t \\ u \ v \\ u" by (metis (no_types, lifting) 1 T T' U U' V V' Con_rec(3) Resid_rec(1-2) Con_sym length_Resid length_0_conv) have vt_ut: "v \\ t \ u \\ t" using 1 by (metis R.con_def R.con_sym R.cube tu_vu) show "[v \\ t] \<^sup>*\\<^sup>* [u \\ t] \ V' \<^sup>*\\\<^sup>* [u \\ v] \<^sup>*\\<^sup>* [t \\ u] \<^sup>*\\\<^sup>* [v \\ u]" by (metis (no_types, lifting) "1" Con_TU Con_cons(1) Con_rec(1-2) Resid_rec(1) T T' U U' V V' Resid_rec(2) length_Resid length_0_conv vt_ut) next assume 1: "[v \\ t] \<^sup>*\\<^sup>* [u \\ t] \ V' \<^sup>*\\\<^sup>* [u \\ v] \<^sup>*\\<^sup>* [t \\ u] \<^sup>*\\\<^sup>* [v \\ u]" have tu_vu: "t \\ u \ v \\ u \ v \\ t \ u \\ t" by (metis 1 Con_sym Resid.simps(1) Residx1.simps(2) Residx1_as_Resid) have tu: "t \ u" using Con_TU Con_rec(1) T T' U U' by blast show "V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" by (metis (no_types, opaque_lifting) 1 Con_rec(2) Con_sym R.con_implies_arr(2) Resid.simps(1,3) T T' U U' V V' Resid_rec(2) R.arr_resid_iff_con) qed finally show ?thesis by simp qed show "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" proof - assume Con: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T" have "(V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = ((v \\ t) # (V' \<^sup>*\\\<^sup>* [t \\ v])) \<^sup>*\\\<^sup>* [u \\ t]" using Con_TU Con_VT Con_sym Resid_rec(1-2) T T' U U' V V' by metis also have 1: "... = ((v \\ t) \\ (u \\ t)) # (V' \<^sup>*\\\<^sup>* [t \\ v]) \<^sup>*\\\<^sup>* ([u \\ v] \<^sup>*\\\<^sup>* [t \\ v])" apply simp by (metis Con Con_VT Con_rec(2) R.conE R.conI R.con_sym R.cube Resid_rec(2) T T' V V' calculation(1)) also have "... = ((v \\ t) \\ (u \\ t)) # (V' \<^sup>*\\\<^sup>* [u \\ v]) \<^sup>*\\\<^sup>* ([t \\ v] \<^sup>*\\\<^sup>* [u \\ v])" proof - have "length [t \\ v] + length [u \\ v] + length V' \ n" using T U V len by fastforce moreover have "u \\ v \ t \\ v" by (metis 1 Con_VT Con_rec(2) R.con_sym_ax T T' V V' list.discI R.conE R.conI R.cube) moreover have "t \\ v \ u \\ v" using R.con_sym calculation(2) by blast ultimately show ?thesis by (metis Con_VT Con_rec(2) T T' V V' Con_rec(1) ind) qed also have "... = ((v \\ t) \\ (u \\ t)) # ((V' \<^sup>*\\\<^sup>* [u \\ v]) \<^sup>*\\\<^sup>* ([t \\ u] \<^sup>*\\\<^sup>* [v \\ u]))" using R.cube by fastforce also have "... = ((v \\ u) \\ (t \\ u)) # ((V' \<^sup>*\\\<^sup>* [u \\ v]) \<^sup>*\\\<^sup>* ([t \\ u] \<^sup>*\\\<^sup>* [v \\ u]))" by (metis R.cube) also have "... = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" proof - have "(V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U) = ((v \\ u) # ((V' \<^sup>*\\\<^sup>* [u \\ v]))) \<^sup>*\\\<^sup>* [t \\ u]" using T T' U U' V Resid_cons(1) [of "[u]" v V'] by (metis "*" Con Con_TU Resid.simps(1) Resid_rec(1) Resid_rec(2)) also have "... = ((v \\ u) \\ (t \\ u)) # ((V' \<^sup>*\\\<^sup>* [u \\ v]) \<^sup>*\\\<^sup>* ([t \\ u] \<^sup>*\\\<^sup>* [v \\ u]))" by (metis "*" Con Con_initial_left calculation Con_sym Resid.simps(1) Resid_rec(1-2)) finally show ?thesis by simp qed finally show ?thesis by simp qed qed next assume T': "T' \ []" show ?thesis proof (intro conjI impI) show *: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" proof - have "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T' \<^sup>*\\<^sup>* [u \\ t] \<^sup>*\\\<^sup>* T'" using Con_TU Con_VT Con_sym Resid_cons(2) Resid_rec(3) T T' U U' by force also have "... \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* [u \\ t] \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* [u \\ t]" proof - have "length [u \\ t] + length T' + length (V \<^sup>*\\\<^sup>* [t]) \ n" using Con_VT Con_initial_right T U length_Resid len by fastforce thus ?thesis by (metis Con_TU Con_VT Con_rec(2) T T' U V add.commute Con_cons(2) ind list.discI) qed also have "... \ (V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* [t \\ u] \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* [u \\ t]" proof - have "length [t] + length [u] + length V \ n" using T T' U le_Suc_eq len by fastforce hence "(V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* ([u] \<^sup>*\\\<^sup>* [t]) = (V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* [u])" using ind [of "[t]" "[u]" V] by (metis Con_TU Con_VT Con_initial_left Con_initial_right T U) thus ?thesis by (metis (full_types) Con_TU Con_initial_left Con_sym Resid_rec(1) T U) qed also have "... \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" by (metis Con_TU Con_cons(2) Con_rec(2) Resid.simps(1) Resid_rec(2) T T' U U') finally show ?thesis by simp qed show "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" proof - assume Con: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T" have "(V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = ((V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T') \<^sup>*\\\<^sup>* ([u \\ t] \<^sup>*\\\<^sup>* T')" using Con_TU Con_VT Con_sym Resid_cons(2) Resid_rec(3) T T' U U' by force also have "... = ((V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>* [u \\ t])" proof - have "length [u \\ t] + length T' + length (Resid V [t]) \ n" using Con_VT Con_initial_right T U length_Resid len by fastforce thus ?thesis by (metis Con_TU Con_VT Con_cons(2) Con_rec(2) T T' U V add.commute ind list.discI) qed also have "... = ((V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* [t \\ u]) \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>* [u \\ t])" proof - have "length [t] + length [u] + length V \ n" using T T' U le_Suc_eq len by fastforce thus ?thesis using ind [of "[t]" "[u]" V] by (metis Con_TU Con_VT Con_initial_left Con_sym Resid_rec(1) T U) qed also have "... = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" using * Con Con_TU Con_rec(2) Resid_cons(2) Resid_rec(2) T T' U U' by auto finally show ?thesis by simp qed qed qed next assume U': "U' \ []" show ?thesis proof (cases "T' = []") assume T': "T' = []" show ?thesis proof (intro conjI impI) show *: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" proof - have "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* [t] \<^sup>*\\<^sup>* (u \\ t) # (U' \<^sup>*\\\<^sup>* [t \\ u])" using T U V T' U' V' Con_TU Con_VT Con_sym Resid_rec(2) by auto also have "... \ V \<^sup>*\\\<^sup>* [t] \<^sup>*\\<^sup>* [u \\ t] \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* [u \\ t] \<^sup>*\\<^sup>* U' \<^sup>*\\\<^sup>* [t \\ u]" by (metis Con_TU Con_VT Con_cons(2) Con_initial_right Con_rec(2) Con_sym T U U') also have "... \ V \<^sup>*\\\<^sup>* [t] \<^sup>*\\<^sup>* [u \\ t] \ (V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* [t \\ u] \<^sup>*\\<^sup>* U' \<^sup>*\\\<^sup>* [t \\ u]" proof - have "length [u] + length [t] + length V \ n" using T U V T' U' V' len not_less_eq_eq order_trans by fastforce thus ?thesis using ind [of "[t]" "[u]" V] by (metis Con_TU Con_VT Con_initial_right Resid_rec(1) T U Con_sym length_Cons) qed also have "... \ V \<^sup>*\\\<^sup>* [u] \<^sup>*\\<^sup>* [t \\ u] \ (V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* [t \\ u] \<^sup>*\\<^sup>* U' \<^sup>*\\\<^sup>* [t \\ u]" proof - have "length [t] + length [u] + length V \ n" using T U V T' U' V' len antisym_conv not_less_eq_eq by fastforce thus ?thesis by (metis (full_types) Con_TU Con_VT Con_initial_right Con_sym Resid_rec(1) T U ind) qed also have "... \ (V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U' \<^sup>*\\<^sup>* [t \\ u] \<^sup>*\\\<^sup>* U'" proof - have "length [t \\ u] + length U' + length (V \<^sup>*\\\<^sup>* [u]) \ n" by (metis T T' U add.assoc add.right_neutral add_leD1 add_le_cancel_left length_Resid len length_Cons list.size(3) plus_1_eq_Suc) thus ?thesis by (metis (no_types, opaque_lifting) Con_sym Resid.simps(1) add.commute ind) qed also have "... \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" by (metis Con_TU Resid_cons(2) Resid_rec(3) T T' U U' Con_cons(2) length_Resid length_0_conv) finally show ?thesis by blast qed show "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" proof - assume Con: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T" have "(V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* ((u \\ t) # (U' \<^sup>*\\\<^sup>* [t \\ u]))" using Con_TU Con_sym Resid_rec(2) T T' U U' by auto also have "... = ((V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* [u \\ t]) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* [t \\ u])" by (metis Con Con_TU Con_rec(2) Con_sym T T' U U' calculation Resid_cons(2)) also have "... = ((V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* [t \\ u]) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* [t \\ u])" proof - have "length [t] + length [u] + length V \ n" using T U U' le_Suc_eq len by fastforce thus ?thesis using T U Con_TU Con_VT Con_sym ind [of "[t]" "[u]" V] by (metis (no_types, opaque_lifting) Con_initial_right Resid.simps(3)) qed also have "... = ((V \<^sup>*\\\<^sup>* [u]) \<^sup>*\\\<^sup>* U') \<^sup>*\\\<^sup>* ([t \\ u] \<^sup>*\\\<^sup>* U')" proof - have "length [t \\ u] + length U' + length (V \<^sup>*\\\<^sup>* [u]) \ n" by (metis (no_types, opaque_lifting) T T' U add.left_commute add.right_neutral add_leD2 add_le_cancel_left len length_Cons length_Resid list.size(3) plus_1_eq_Suc) thus ?thesis by (metis Con Con_TU Con_rec(3) T T' U U' calculation ind length_0_conv length_Resid) qed also have "... = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" by (metis "*" Con Con_TU Resid_rec(3) T T' U U' Resid_cons(2) length_Resid length_0_conv) finally show ?thesis by blast qed qed next assume T': "T' \ []" show ?thesis proof (intro conjI impI) have 1: "U \<^sup>*\\<^sup>* [t]" using T Con_TU by (metis Con_cons(2) Con_sym Resid.simps(2)) have 2: "V \<^sup>*\\<^sup>* [t]" using V Con_VT Con_initial_right T by blast have 3: "length T' + length (U \<^sup>*\\\<^sup>* [t]) + length (V \<^sup>*\\\<^sup>* [t]) \ n" using "1" "2" T len length_Resid by force have 4: "length [t] + length U + length V \ n" using T T' len antisym_conv not_less_eq_eq by fastforce show *: "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" proof - have "V \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* T \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T' \<^sup>*\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T'" using Con_TU Con_VT Con_sym Resid_cons(2) T T' by force also have "... \ (V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" by (metis 3 Con_TU Con_VT Con_cons(1) Con_cons(2) T T' U V ind list.discI) also have "... \ (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U) \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" by (metis 1 2 4 Con_sym ind) also have "... \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* hd ([t] \<^sup>*\\\<^sup>* U) # T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" by (metis 1 Con_TU Con_cons(1) Con_cons(2) Resid.simps(1) Resid1x_as_Resid T T' list.sel(1)) also have "... \ V \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T \<^sup>*\\\<^sup>* U" using 1 Resid_cons' [of T' t U] Con_TU T T' Resid1x_as_Resid Con_sym by force finally show ?thesis by simp qed show "(V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" proof - have "(V \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T) = ((V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T') \<^sup>*\\\<^sup>* ((U \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* T')" using Con_TU Con_VT Con_sym Resid_cons(2) T T' by force also have "... = ((V \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])) \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" by (metis (no_types, lifting) "3" Con_TU Con_VT T T' U V Con_cons(1) Con_cons(2) ind list.simps(3)) also have "... = ((V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U)) \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" by (metis 1 2 4 Con_sym ind) also have "... = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ((t # T') \<^sup>*\\\<^sup>* U)" by (metis "*" Con_TU Con_cons(1) Resid1x_as_Resid Resid_cons' T T' U calculation Resid_cons(2) list.distinct(1)) also have "... = (V \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* U)" using T by fastforce finally show ?thesis by simp qed qed qed qed qed qed qed qed qed lemma Cube: shows "T \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* V \<^sup>*\\\<^sup>* U \ T \<^sup>*\\\<^sup>* V \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* V" and "T \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* V \<^sup>*\\\<^sup>* U \ (T \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (V \<^sup>*\\\<^sup>* U) = (T \<^sup>*\\\<^sup>* V) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* V)" proof - show "T \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* V \<^sup>*\\\<^sup>* U \ T \<^sup>*\\\<^sup>* V \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* V" using Cube_ind by (metis Con_sym Resid.simps(1) le_add2) show "T \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* V \<^sup>*\\\<^sup>* U \ (T \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* (V \<^sup>*\\\<^sup>* U) = (T \<^sup>*\\\<^sup>* V) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* V)" using Cube_ind by (metis Con_sym Resid.simps(1) order_refl) qed lemma Con_implies_Arr: assumes "T \<^sup>*\\<^sup>* U" shows "Arr T" and "Arr U" using assms Con_sym by (metis Con_imp_Arr_Resid Arr_iff_Con_self Cube(1) Resid.simps(1))+ sublocale partial_magma Resid by (unfold_locales, metis Resid.simps(1) Con_sym) lemma is_partial_magma: shows "partial_magma Resid" .. lemma null_char: shows "null = []" by (metis null_is_zero(2) Resid.simps(1)) sublocale residuation Resid using null_char Con_sym Arr_iff_Con_self Con_imp_Arr_Resid Cube null_is_zero(2) by unfold_locales auto lemma is_residuation: shows "residuation Resid" .. lemma arr_char: shows "arr T \ Arr T" using null_char Arr_iff_Con_self by fastforce lemma arrI\<^sub>P [intro]: assumes "Arr T" shows "arr T" using assms arr_char by auto lemma ide_char: shows "ide T \ Ide T" by (metis Con_Arr_self Ide_implies_Arr Resid_Arr_Ide_ind Resid_Arr_self arr_char ide_def arr_def) lemma con_char: shows "con T U \ Con T U" using null_char by auto lemma conI\<^sub>P [intro]: assumes "Con T U" shows "con T U" using assms con_char by auto sublocale rts Resid proof show "\A T. \ide A; con T A\ \ T \<^sup>*\\\<^sup>* A = T" using Resid_Arr_Ide_ind ide_char null_char by auto show "\T. arr T \ ide (trg T)" by (metis arr_char Resid_Arr_self ide_char resid_arr_self) show "\A T. \ide A; con A T\ \ ide (A \<^sup>*\\\<^sup>* T)" by (simp add: Resid_Ide_Arr_ind con_char ide_char) show "\T U. con T U \ \A. ide A \ con A T \ con A U" proof - fix T U assume TU: "con T U" have 1: "Srcs T = Srcs U" using TU Con_imp_eq_Srcs con_char by force obtain a where a: "a \ Srcs T \ Srcs U" using 1 by (metis Int_absorb Int_emptyI TU arr_char Arr_has_Src con_implies_arr(1)) show "\A. ide A \ con A T \ con A U" using a 1 by (metis (full_types) Ball_Collect Con_single_ide_ind Ide.simps(2) Int_absorb TU Srcs_are_ide arr_char con_char con_implies_arr(1-2) ide_char) qed show "\T U V. \ide (Resid T U); con U V\ \ con (T \<^sup>*\\\<^sup>* U) (V \<^sup>*\\\<^sup>* U)" using null_char ide_char by (metis Con_imp_Arr_Resid Con_Ide_iff Srcs_Resid con_char con_sym arr_resid_iff_con ide_implies_arr) qed theorem is_rts: shows "rts Resid" .. notation cong (infix "\<^sup>*\\<^sup>*" 50) notation prfx (infix "\<^sup>*\\<^sup>*" 50) lemma sources_char\<^sub>P: shows "sources T = {A. Ide A \ Arr T \ Srcs A = Srcs T}" using Con_Ide_iff Con_sym con_char ide_char sources_def by fastforce lemma sources_cons: shows "Arr (t # T) \ sources (t # T) = sources [t]" apply (induct T) apply simp using sources_char\<^sub>P by auto lemma targets_char\<^sub>P: shows "targets T = {B. Ide B \ Arr T \ Srcs B = Trgs T}" unfolding targets_def by (metis (no_types, lifting) trg_def Arr.simps(1) Ide_implies_Arr Resid_Arr_self arr_char Con_Ide_iff Srcs_Resid con_char ide_char con_implies_arr(1)) lemma seq_char': shows "seq T U \ Arr T \ Arr U \ Trgs T \ Srcs U \ {}" proof show "seq T U \ Arr T \ Arr U \ Trgs T \ Srcs U \ {}" unfolding seq_def using Arr_has_Trg arr_char Con_Arr_self sources_char\<^sub>P trg_def trg_in_targets by fastforce assume 1: "Arr T \ Arr U \ Trgs T \ Srcs U \ {}" have "targets T = sources U" proof - obtain a where a: "R.ide a \ a \ Trgs T \ a \ Srcs U" using 1 Trgs_are_ide by blast have "Trgs [a] = Trgs T" using a 1 by (metis Con_single_ide_ind Con_sym Resid_Arr_Src Srcs_Resid Trgs_eqI) moreover have "Srcs [a] = Srcs U" using a 1 Con_single_ide_ind Con_imp_eq_Srcs by blast moreover have "Trgs [a] = Srcs [a]" using a by (metis R.residuation_axioms R.sources_resid Srcs.simps(2) Trgs.simps(2) residuation.ideE) ultimately show ?thesis using 1 sources_char\<^sub>P targets_char\<^sub>P by auto qed thus "seq T U" using 1 by blast qed lemma seq_char: shows "seq T U \ Arr T \ Arr U \ Trgs T = Srcs U" by (metis Int_absorb Srcs_Resid Arr_has_Src Arr_iff_Con_self Srcs_eqI seq_char') lemma seqI\<^sub>P [intro]: assumes "Arr T" and "Arr U" and "Trgs T \ Srcs U \ {}" shows "seq T U" using assms seq_char' by auto lemma Ide_imp_sources_eq_targets: assumes "Ide T" shows "sources T = targets T" using assms by (metis Resid_Arr_Ide_ind arr_iff_has_source arr_iff_has_target con_char arr_def sources_resid) subsection "Inclusion Map" text \ Inclusion of an RTS to the RTS of its paths. \ abbreviation incl where "incl \ \t. if R.arr t then [t] else null" lemma incl_is_simulation: shows "simulation resid Resid incl" using R.con_implies_arr(1-2) con_char R.arr_resid_iff_con null_char by unfold_locales auto lemma incl_is_injective: shows "inj_on incl (Collect R.arr)" by (intro inj_onI) simp lemma reflects_con: assumes "incl t \<^sup>*\\<^sup>* incl u" shows "t \ u" using assms by (metis (full_types) Arr.simps(1) Con_implies_Arr(1-2) Con_rec(1) null_char) end subsection "Composites of Paths" text \ The RTS of paths has composites, given by the append operation on lists. \ context paths_in_rts begin lemma Srcs_append [simp]: assumes "T \ []" shows "Srcs (T @ U) = Srcs T" by (metis Nil_is_append_conv Srcs.simps(2) Srcs.simps(3) assms hd_append list.exhaust_sel) lemma Trgs_append [simp]: shows "U \ [] \ Trgs (T @ U) = Trgs U" proof (induct T) show "U \ [] \ Trgs ([] @ U) = Trgs U" by auto show "\t T. \U \ [] \ Trgs (T @ U) = Trgs U; U \ []\ \ Trgs ((t # T) @ U) = Trgs U" by (metis Nil_is_append_conv Trgs.simps(3) append_Cons list.exhaust) qed lemma seq_implies_Trgs_eq_Srcs: shows "\Arr T; Arr U; Trgs T \ Srcs U\ \ Trgs T = Srcs U" by (metis inf.orderE Arr_has_Trg seqI\<^sub>P seq_char) lemma Arr_append_iff\<^sub>P: shows "\U. \T \ []; U \ []\ \ Arr (T @ U) \ Arr T \ Arr U \ Trgs T \ Srcs U" proof (induct T) show "\U. \[] \ []; U \ []\ \ Arr ([] @ U) = (Arr [] \ Arr U \ Trgs [] \ Srcs U)" by simp fix t T and U :: "'a list" assume ind: "\U. \T \ []; U \ []\ \ Arr (T @ U) = (Arr T \ Arr U \ Trgs T \ Srcs U)" assume U: "U \ []" show "Arr ((t # T) @ U) \ Arr (t # T) \ Arr U \ Trgs (t # T) \ Srcs U" proof (cases "T = []") show "T = [] \ ?thesis" using Arr.elims(1) U by auto assume T: "T \ []" have "Arr ((t # T) @ U) \ Arr (t # (T @ U))" by simp also have "... \ R.arr t \ Arr (T @ U) \ R.targets t \ Srcs (T @ U)" using T U by (metis Arr.simps(3) Nil_is_append_conv neq_Nil_conv) also have "... \ R.arr t \ Arr T \ Arr U \ Trgs T \ Srcs U \ R.targets t \ Srcs T" using T U ind by auto also have "... \ Arr (t # T) \ Arr U \ Trgs (t # T) \ Srcs U" using T U by (metis Arr.simps(3) Trgs.simps(3) neq_Nil_conv) finally show ?thesis by auto qed qed lemma Arr_consI\<^sub>P [intro, simp]: assumes "R.arr t" and "Arr U" and "R.targets t \ Srcs U" shows "Arr (t # U)" using assms Arr.elims(3) by blast lemma Arr_appendI\<^sub>P [intro, simp]: assumes "Arr T" and "Arr U" and "Trgs T \ Srcs U" shows "Arr (T @ U)" using assms by (metis Arr.simps(1) Arr_append_iff\<^sub>P) lemma Arr_appendE\<^sub>P [elim]: assumes "Arr (T @ U)" and "T \ []" and "U \ []" and "\Arr T; Arr U; Trgs T = Srcs U\ \ thesis" shows thesis using assms Arr_append_iff\<^sub>P seq_implies_Trgs_eq_Srcs by force lemma Ide_append_iff\<^sub>P: shows "\U. \T \ []; U \ []\ \ Ide (T @ U) \ Ide T \ Ide U \ Trgs T \ Srcs U" using Ide_char by auto lemma Ide_appendI\<^sub>P [intro, simp]: assumes "Ide T" and "Ide U" and "Trgs T \ Srcs U" shows "Ide (T @ U)" using assms by (metis Ide.simps(1) Ide_append_iff\<^sub>P) lemma Resid_append_ind: shows "\T U. \T \ []; U \ []; V \ []\ \ (V @ T \<^sup>*\\<^sup>* U \ V \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* V) \ (T \<^sup>*\\<^sup>* V @ U \ T \<^sup>*\\<^sup>* V \ T \<^sup>*\\\<^sup>* V \<^sup>*\\<^sup>* U) \ (V @ T \<^sup>*\\<^sup>* U \ (V @ T) \<^sup>*\\\<^sup>* U = V \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* V)) \ (T \<^sup>*\\<^sup>* V @ U \ T \<^sup>*\\\<^sup>* (V @ U) = (T \<^sup>*\\\<^sup>* V) \<^sup>*\\\<^sup>* U)" proof (induct V) show "\T U. \T \ []; U \ []; [] \ []\ \ ([] @ T \<^sup>*\\<^sup>* U \ [] \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* []) \ (T \<^sup>*\\<^sup>* [] @ U \ T \<^sup>*\\<^sup>* [] \ T \<^sup>*\\\<^sup>* [] \<^sup>*\\<^sup>* U) \ ([] @ T \<^sup>*\\<^sup>* U \ ([] @ T) \<^sup>*\\\<^sup>* U = [] \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [])) \ (T \<^sup>*\\<^sup>* [] @ U \ T \<^sup>*\\\<^sup>* ([] @ U) = (T \<^sup>*\\\<^sup>* []) \<^sup>*\\\<^sup>* U)" by simp fix v :: 'a and T U V :: "'a list" assume ind: "\T U. \T \ []; U \ []; V \ []\ \ (V @ T \<^sup>*\\<^sup>* U \ V \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* V) \ (T \<^sup>*\\<^sup>* V @ U \ T \<^sup>*\\<^sup>* V \ T \<^sup>*\\\<^sup>* V \<^sup>*\\<^sup>* U) \ (V @ T \<^sup>*\\<^sup>* U \ (V @ T) \<^sup>*\\\<^sup>* U = V \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* V)) \ (T \<^sup>*\\<^sup>* V @ U \ T \<^sup>*\\\<^sup>* (V @ U) = (T \<^sup>*\\\<^sup>* V) \<^sup>*\\\<^sup>* U)" assume T: "T \ []" and U: "U \ []" show "((v # V) @ T \<^sup>*\\<^sup>* U \ (v # V) \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* (v # V)) \ (T \<^sup>*\\<^sup>* (v # V) @ U \ T \<^sup>*\\<^sup>* (v # V) \ T \<^sup>*\\\<^sup>* (v # V) \<^sup>*\\<^sup>* U) \ ((v # V) @ T \<^sup>*\\<^sup>* U \ ((v # V) @ T) \<^sup>*\\\<^sup>* U = (v # V) \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* (v # V))) \ (T \<^sup>*\\<^sup>* (v # V) @ U \ T \<^sup>*\\\<^sup>* ((v # V) @ U) = (T \<^sup>*\\\<^sup>* (v # V)) \<^sup>*\\\<^sup>* U)" proof (intro conjI iffI impI) show 1: "(v # V) @ T \<^sup>*\\<^sup>* U \ ((v # V) @ T) \<^sup>*\\\<^sup>* U = (v # V) \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* (v # V))" proof (cases "V = []") show "V = [] \ (v # V) @ T \<^sup>*\\<^sup>* U \ ?thesis" using T U Resid_cons(1) U by auto assume V: "V \ []" assume Con: "(v # V) @ T \<^sup>*\\<^sup>* U" have "((v # V) @ T) \<^sup>*\\\<^sup>* U = (v # (V @ T)) \<^sup>*\\\<^sup>* U" by simp also have "... = [v] \<^sup>*\\\<^sup>* U @ (V @ T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [v])" using T U Con Resid_cons by simp also have "... = [v] \<^sup>*\\\<^sup>* U @ V \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [v]) @ T \<^sup>*\\\<^sup>* ((U \<^sup>*\\\<^sup>* [v]) \<^sup>*\\\<^sup>* V)" using T U V Con ind Resid_cons by (metis Con_sym Cons_eq_appendI append_is_Nil_conv Con_cons(1)) also have "... = (v # V) \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* (v # V))" by (metis Con Con_cons(2) Cons_eq_appendI Resid_cons(1) Resid_cons(2) T U V append.assoc append_is_Nil_conv Con_sym ind) finally show ?thesis by simp qed show 2: "T \<^sup>*\\<^sup>* (v # V) @ U \ T \<^sup>*\\\<^sup>* ((v # V) @ U) = (T \<^sup>*\\\<^sup>* (v # V)) \<^sup>*\\\<^sup>* U" proof (cases "V = []") show "V = [] \ T \<^sup>*\\<^sup>* (v # V) @ U \ ?thesis" using Resid_cons(2) T U by auto assume V: "V \ []" assume Con: "T \<^sup>*\\<^sup>* (v # V) @ U" have "T \<^sup>*\\\<^sup>* ((v # V) @ U) = T \<^sup>*\\\<^sup>* (v # (V @ U))" by simp also have 1: "... = (T \<^sup>*\\\<^sup>* [v]) \<^sup>*\\\<^sup>* (V @ U)" using V Con Resid_cons(2) T by force also have "... = ((T \<^sup>*\\\<^sup>* [v]) \<^sup>*\\\<^sup>* V) \<^sup>*\\\<^sup>* U" using T U V 1 Con ind by (metis Con_initial_right Cons_eq_appendI) also have "... = (T \<^sup>*\\\<^sup>* (v # V)) \<^sup>*\\\<^sup>* U" using T V Con by (metis Con_cons(2) Con_initial_right Cons_eq_appendI Resid_cons(2)) finally show ?thesis by blast qed show "(v # V) @ T \<^sup>*\\<^sup>* U \ v # V \<^sup>*\\<^sup>* U" by (metis 1 Con_sym Resid.simps(1) append_Nil) show "(v # V) @ T \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* (v # V)" using T U Con_sym by (metis 1 Con_initial_right Resid_cons(1-2) append.simps(2) ind self_append_conv) show "T \<^sup>*\\<^sup>* (v # V) @ U \ T \<^sup>*\\<^sup>* v # V" using 2 by fastforce show "T \<^sup>*\\<^sup>* (v # V) @ U \ T \<^sup>*\\\<^sup>* (v # V) \<^sup>*\\<^sup>* U" using 2 by fastforce show "T \<^sup>*\\<^sup>* v # V \ T \<^sup>*\\\<^sup>* (v # V) \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* (v # V) @ U" proof - assume Con: "T \<^sup>*\\<^sup>* v # V \ T \<^sup>*\\\<^sup>* (v # V) \<^sup>*\\<^sup>* U" have "T \<^sup>*\\<^sup>* (v # V) @ U \ T \<^sup>*\\<^sup>* v # (V @ U)" by simp also have "... \ T \<^sup>*\\<^sup>* [v] \ T \<^sup>*\\\<^sup>* [v] \<^sup>*\\<^sup>* V @ U" using T U Con_cons(2) by simp also have "... \ T \<^sup>*\\\<^sup>* [v] \<^sup>*\\<^sup>* V @ U" by fastforce also have "... \ True" using Con ind by (metis Con_cons(2) Resid_cons(2) T U self_append_conv2) finally show ?thesis by blast qed show "v # V \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* (v # V) \ (v # V) @ T \<^sup>*\\<^sup>* U" proof - assume Con: "v # V \<^sup>*\\<^sup>* U \ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* (v # V)" have "(v # V) @ T \<^sup>*\\<^sup>* U \v # (V @ T) \<^sup>*\\<^sup>* U" by simp also have "... \ [v] \<^sup>*\\<^sup>* U \ V @ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [v]" using T U Con_cons(1) by simp also have "... \ V @ T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [v]" by (metis Con Con_cons(1) U) also have "... \ True" using Con ind by (metis Con_cons(1) Con_sym Resid_cons(2) T U append_self_conv2) finally show ?thesis by blast qed qed qed lemma Con_append: assumes "T \ []" and "U \ []" and "V \ []" shows "T @ U \<^sup>*\\<^sup>* V \ T \<^sup>*\\<^sup>* V \ U \<^sup>*\\<^sup>* V \<^sup>*\\\<^sup>* T" and "T \<^sup>*\\<^sup>* U @ V \ T \<^sup>*\\<^sup>* U \ T \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* V" using assms Resid_append_ind by blast+ lemma Con_appendI [intro]: shows "\T \<^sup>*\\<^sup>* V; U \<^sup>*\\<^sup>* V \<^sup>*\\\<^sup>* T\ \ T @ U \<^sup>*\\<^sup>* V" and "\T \<^sup>*\\<^sup>* U; T \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* V\ \ T \<^sup>*\\<^sup>* U @ V" by (metis Con_append(1) Con_sym Resid.simps(1))+ lemma Resid_append [intro, simp]: shows "\T \ []; T @ U \<^sup>*\\<^sup>* V\ \ (T @ U) \<^sup>*\\\<^sup>* V = (T \<^sup>*\\\<^sup>* V) @ (U \<^sup>*\\\<^sup>* (V \<^sup>*\\\<^sup>* T))" and "\U \ []; V \ []; T \<^sup>*\\<^sup>* U @ V\ \ T \<^sup>*\\\<^sup>* (U @ V) = (T \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* V" using Resid_append_ind apply (metis Con_sym Resid.simps(1) append_self_conv) using Resid_append_ind by (metis Resid.simps(1)) lemma Resid_append2 [simp]: assumes "T \ []" and "U \ []" and "V \ []" and "W \ []" and "T @ U \<^sup>*\\<^sup>* V @ W" shows "(T @ U) \<^sup>*\\\<^sup>* (V @ W) = (T \<^sup>*\\\<^sup>* V) \<^sup>*\\\<^sup>* W @ (U \<^sup>*\\\<^sup>* (V \<^sup>*\\\<^sup>* T)) \<^sup>*\\\<^sup>* (W \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* V))" using assms Resid_append by (metis Con_append(1-2) append_is_Nil_conv) lemma append_is_composite_of: assumes "seq T U" shows "composite_of T U (T @ U)" unfolding composite_of_def using assms apply (intro conjI) apply (metis Arr.simps(1) Resid_Arr_self Resid_Ide_Arr_ind Arr_appendI\<^sub>P Resid_append_ind ide_char order_refl seq_char) apply (metis Arr.simps(1) Arr_appendI\<^sub>P Con_Arr_self Resid_Arr_self Resid_append_ind ide_char seq_char order_refl) by (metis Arr.simps(1) Con_Arr_self Con_append(1) Resid_Arr_self Arr_appendI\<^sub>P Ide_append_iff\<^sub>P Resid_append(1) ide_char seq_char order_refl) sublocale rts_with_composites Resid using append_is_composite_of composable_def by unfold_locales blast theorem is_rts_with_composites: shows "rts_with_composites Resid" .. (* TODO: This stuff might be redundant. *) lemma arr_append [intro, simp]: assumes "seq T U" shows "arr (T @ U)" using assms arrI\<^sub>P seq_char by simp lemma arr_append_imp_seq: assumes "T \ []" and "U \ []" and "arr (T @ U)" shows "seq T U" using assms arr_char seq_char Arr_append_iff\<^sub>P seq_implies_Trgs_eq_Srcs by simp lemma sources_append [simp]: assumes "seq T U" shows "sources (T @ U) = sources T" using assms by (meson append_is_composite_of sources_composite_of) lemma targets_append [simp]: assumes "seq T U" shows "targets (T @ U) = targets U" using assms by (meson append_is_composite_of targets_composite_of) lemma cong_respects_seq\<^sub>P: assumes "seq T U" and "T \<^sup>*\\<^sup>* T'" and "U \<^sup>*\\<^sup>* U'" shows "seq T' U'" by (meson assms cong_respects_seq) lemma cong_append [intro]: assumes "seq T U" and "T \<^sup>*\\<^sup>* T'" and "U \<^sup>*\\<^sup>* U'" shows "T @ U \<^sup>*\\<^sup>* T' @ U'" proof have 1: "\T U T' U'. \seq T U; T \<^sup>*\\<^sup>* T'; U \<^sup>*\\<^sup>* U'\ \ seq T' U'" using assms cong_respects_seq\<^sub>P by simp have 2: "\T U T' U'. \seq T U; T \<^sup>*\\<^sup>* T'; U \<^sup>*\\<^sup>* U'\ \ T @ U \<^sup>*\\<^sup>* T' @ U'" proof - fix T U T' U' assume TU: "seq T U" and TT': "T \<^sup>*\\<^sup>* T'" and UU': "U \<^sup>*\\<^sup>* U'" have T'U': "seq T' U'" using TU TT' UU' cong_respects_seq\<^sub>P by simp have 3: "Ide (T \<^sup>*\\\<^sup>* T') \ Ide (T' \<^sup>*\\\<^sup>* T) \ Ide (U \<^sup>*\\\<^sup>* U') \ Ide (U' \<^sup>*\\\<^sup>* U)" using TU TT' UU' ide_char by blast have "(T @ U) \<^sup>*\\\<^sup>* (T' @ U') = ((T \<^sup>*\\\<^sup>* T') \<^sup>*\\\<^sup>* U') @ U \<^sup>*\\\<^sup>* ((T' \<^sup>*\\\<^sup>* T) @ U' \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* T'))" proof - have 4: "T \ [] \ U \ [] \ T' \ [] \ U' \ []" using TU TT' UU' Arr.simps(1) seq_char ide_char by auto moreover have "(T @ U) \<^sup>*\\\<^sup>* (T' @ U') \ []" proof (intro Con_appendI) show "T \<^sup>*\\\<^sup>* T' \ []" using "3" by force show "(T \<^sup>*\\\<^sup>* T') \<^sup>*\\\<^sup>* U' \ []" using "3" T'U' \T \<^sup>*\\<^sup>* T' \ []\ Con_Ide_iff seq_char by fastforce show "U \<^sup>*\\\<^sup>* ((T' @ U') \<^sup>*\\\<^sup>* T) \ []" proof - have "U \<^sup>*\\\<^sup>* ((T' @ U') \<^sup>*\\\<^sup>* T) = U \<^sup>*\\\<^sup>* ((T' \<^sup>*\\\<^sup>* T) @ U' \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* T'))" by (metis Con_appendI(1) Resid_append(1) \(T \<^sup>*\\<^sup>* T') \<^sup>*\\<^sup>* U' \ []\ \T \<^sup>*\\<^sup>* T' \ []\ calculation Con_sym) also have "... = (U \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>* T)) \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* T'))" by (metis Arr.simps(1) Con_append(2) Resid_append(2) \(T \<^sup>*\\<^sup>* T') \<^sup>*\\<^sup>* U' \ []\ Con_implies_Arr(1) Con_sym) also have "... = U \<^sup>*\\\<^sup>* U'" by (metis (mono_tags, lifting) "3" Ide.simps(1) Resid_Ide(1) Srcs_Resid TU \(T \<^sup>*\\<^sup>* T') \<^sup>*\\<^sup>* U' \ []\ Con_Ide_iff seq_char) finally show ?thesis using 3 UU' by force qed qed ultimately show ?thesis using Resid_append2 [of T U T' U'] seq_char by (metis Con_append(2) Con_sym Resid_append(2) Resid.simps(1)) qed moreover have "Ide ..." proof have 3: "Ide (T \<^sup>*\\\<^sup>* T') \ Ide (T' \<^sup>*\\\<^sup>* T) \ Ide (U \<^sup>*\\\<^sup>* U') \ Ide (U' \<^sup>*\\\<^sup>* U)" using TU TT' UU' ide_char by blast show 4: "Ide ((T \<^sup>*\\\<^sup>* T') \<^sup>*\\\<^sup>* U')" using TU T'U' TT' UU' 1 3 by (metis (full_types) Srcs_Resid Con_Ide_iff Resid_Ide_Arr_ind seq_char) show 5: "Ide (U \<^sup>*\\\<^sup>* ((T' \<^sup>*\\\<^sup>* T) @ U' \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* T')))" proof - have "U \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>* T) = U" by (metis (full_types) "3" TT' TU Con_Ide_iff Resid_Ide(1) Srcs_Resid con_char seq_char prfx_implies_con) moreover have "U' \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* T') = U'" by (metis "3" "4" Ide.simps(1) Resid_Ide(1)) ultimately show ?thesis by (metis "3" "4" Arr.simps(1) Con_append(2) Ide.simps(1) Resid_append(2) TU Con_sym seq_char) qed show "Trgs ((T \<^sup>*\\\<^sup>* T') \<^sup>*\\\<^sup>* U') \ Srcs (U \<^sup>*\\\<^sup>* (T' \<^sup>*\\\<^sup>* T @ U' \<^sup>*\\\<^sup>* (T \<^sup>*\\\<^sup>* T')))" by (metis 4 5 Arr_append_iff\<^sub>P Ide.simps(1) Nil_is_append_conv calculation Con_imp_Arr_Resid) qed ultimately show "T @ U \<^sup>*\\<^sup>* T' @ U'" using ide_char by presburger qed show "T @ U \<^sup>*\\<^sup>* T' @ U'" using assms 2 by simp show "T' @ U' \<^sup>*\\<^sup>* T @ U" using assms 1 2 cong_symmetric by blast qed lemma cong_cons [intro]: assumes "seq [t] U" and "t \ t'" and "U \<^sup>*\\<^sup>* U'" shows "t # U \<^sup>*\\<^sup>* t' # U'" using assms cong_append [of "[t]" U "[t']" U'] by (simp add: R.prfx_implies_con ide_char) lemma cong_append_ideI [intro]: assumes "seq T U" shows "ide T \ T @ U \<^sup>*\\<^sup>* U" and "ide U \ T @ U \<^sup>*\\<^sup>* T" and "ide T \ U \<^sup>*\\<^sup>* T @ U" and "ide U \ T \<^sup>*\\<^sup>* T @ U" proof - show 1: "ide T \ T @ U \<^sup>*\\<^sup>* U" using assms by (metis append_is_composite_of composite_ofE resid_arr_ide prfx_implies_con con_sym) show 2: "ide U \ T @ U \<^sup>*\\<^sup>* T" by (meson assms append_is_composite_of composite_ofE ide_backward_stable) show "ide T \ U \<^sup>*\\<^sup>* T @ U" using 1 cong_symmetric by auto show "ide U \ T \<^sup>*\\<^sup>* T @ U" using 2 cong_symmetric by auto qed lemma cong_cons_ideI [intro]: assumes "seq [t] U" and "R.ide t" shows "t # U \<^sup>*\\<^sup>* U" and "U \<^sup>*\\<^sup>* t # U" using assms cong_append_ideI [of "[t]" U] by (auto simp add: ide_char) lemma prfx_decomp: assumes "[t] \<^sup>*\\<^sup>* [u]" shows "[t] @ [u \\ t] \<^sup>*\\<^sup>* [u]" proof (* TODO: I really want these to be doable by auto. *) show 1: "[u] \<^sup>*\\<^sup>* [t] @ [u \\ t]" using assms by (metis Con_imp_Arr_Resid Con_rec(3) Resid.simps(3) Resid_rec(3) R.con_sym append.left_neutral append_Cons arr_char cong_reflexive list.distinct(1)) show "[t] @ [u \\ t] \<^sup>*\\<^sup>* [u]" proof - have "([t] @ [u \\ t]) \<^sup>*\\\<^sup>* [u] = ([t] \<^sup>*\\\<^sup>* [u]) @ ([u \\ t] \<^sup>*\\\<^sup>* [u \\ t])" using assms by (metis Arr_Resid_single Con_Arr_self Con_appendI(1) Con_sym Resid_append(1) Resid_rec(1) con_char list.discI prfx_implies_con) moreover have "Ide ..." using assms by (metis 1 Con_sym append_Nil2 arr_append_imp_seq calculation cong_append_ideI(4) ide_backward_stable Con_implies_Arr(2) Resid_Arr_self con_char ide_char prfx_implies_con arr_resid_iff_con) ultimately show ?thesis using ide_char by presburger qed qed lemma composite_of_single_single: assumes "R.composite_of t u v" shows "composite_of [t] [u] ([t] @ [u])" proof show "[t] \<^sup>*\\<^sup>* [t] @ [u]" proof - have "[t] \<^sup>*\\\<^sup>* ([t] @ [u]) = ([t] \<^sup>*\\\<^sup>* [t]) \<^sup>*\\\<^sup>* [u]" using assms by auto moreover have "Ide ..." by (metis (no_types, lifting) Con_implies_Arr(2) R.bounded_imp_con R.con_composite_of_iff R.con_prfx_composite_of(1) assms resid_ide_arr Con_rec(1) Resid.simps(3) Resid_Arr_self con_char ide_char) ultimately show ?thesis using ide_char by presburger qed show "([t] @ [u]) \<^sup>*\\\<^sup>* [t] \<^sup>*\\<^sup>* [u]" using assms by (metis \prfx [t] ([t] @ [u])\ append_is_composite_of arr_append_imp_seq composite_ofE con_def not_Cons_self2 Con_implies_Arr(2) arr_char null_char prfx_implies_con) qed end subsection "Paths in a Weakly Extensional RTS" locale paths_in_weakly_extensional_rts = R: weakly_extensional_rts + paths_in_rts begin lemma ex_un_Src: assumes "Arr T" shows "\!a. a \ Srcs T" using assms by (simp add: R.weakly_extensional_rts_axioms Srcs_simp\<^sub>P R.arr_has_un_source) fun Src where "Src T = R.src (hd T)" lemma Srcs_simp\<^sub>P\<^sub>W\<^sub>E: assumes "Arr T" shows "Srcs T = {Src T}" proof - have "[R.src (hd T)] \ sources T" by (metis Arr_imp_arr_hd Con_single_ide_ind Ide.simps(2) Srcs_simp\<^sub>P assms con_char ide_char in_sourcesI con_sym R.ide_src R.src_in_sources) hence "R.src (hd T) \ Srcs T" using assms by (metis Srcs.elims Arr_has_Src list.sel(1) R.arr_iff_has_source R.src_in_sources) thus ?thesis using assms ex_un_Src by auto qed lemma ex_un_Trg: assumes "Arr T" shows "\!b. b \ Trgs T" using assms apply (induct T) apply auto[1] by (metis Con_Arr_self Ide_implies_Arr Resid_Arr_self Srcs_Resid ex_un_Src) fun Trg where "Trg [] = R.null" | "Trg [t] = R.trg t" | "Trg (t # T) = Trg T" lemma Trg_simp [simp]: shows "T \ [] \ Trg T = R.trg (last T)" apply (induct T) apply auto by (metis Trg.simps(3) list.exhaust_sel) lemma Trgs_simp\<^sub>P\<^sub>W\<^sub>E [simp]: assumes "Arr T" shows "Trgs T = {Trg T}" using assms by (metis Arr_imp_arr_last Con_Arr_self Con_imp_Arr_Resid R.trg_in_targets Srcs.simps(1) Srcs_Resid Srcs_simp\<^sub>P\<^sub>W\<^sub>E Trg_simp insertE insert_absorb insert_not_empty Trgs_simp\<^sub>P) lemma Src_resid [simp]: assumes "T \<^sup>*\\<^sup>* U" shows "Src (T \<^sup>*\\\<^sup>* U) = Trg U" using assms Con_imp_Arr_Resid Con_implies_Arr(2) Srcs_Resid Srcs_simp\<^sub>P\<^sub>W\<^sub>E by force lemma Trg_resid_sym: assumes "T \<^sup>*\\<^sup>* U" shows "Trg (T \<^sup>*\\\<^sup>* U) = Trg (U \<^sup>*\\\<^sup>* T)" using assms Con_imp_Arr_Resid Con_sym Trgs_Resid_sym by auto lemma Src_append [simp]: assumes "seq T U" shows "Src (T @ U) = Src T" using assms by (metis Arr.simps(1) Src.simps hd_append seq_char) lemma Trg_append [simp]: assumes "seq T U" shows "Trg (T @ U) = Trg U" using assms by (metis Ide.simps(1) Resid.simps(1) Trg_simp append_is_Nil_conv ide_char ide_trg last_appendR seqE trg_def) lemma Arr_append_iff\<^sub>P\<^sub>W\<^sub>E: assumes "T \ []" and "U \ []" shows "Arr (T @ U) \ Arr T \ Arr U \ Trg T = Src U" using assms Arr_appendE\<^sub>P Srcs_simp\<^sub>P\<^sub>W\<^sub>E by auto lemma Arr_consI\<^sub>P\<^sub>W\<^sub>E [intro, simp]: assumes "R.arr t" and "Arr U" and "R.trg t = Src U" shows "Arr (t # U)" using assms by (metis Arr.simps(2) Srcs_simp\<^sub>P\<^sub>W\<^sub>E Trg.simps(2) Trgs.simps(2) Trgs_simp\<^sub>P\<^sub>W\<^sub>E dual_order.eq_iff Arr_consI\<^sub>P) lemma Arr_consE [elim]: assumes "Arr (t # U)" and "\R.arr t; U \ [] \ Arr U; U \ [] \ R.trg t = Src U\ \ thesis" shows thesis using assms by (metis Arr_append_iff\<^sub>P\<^sub>W\<^sub>E Trg.simps(2) append_Cons append_Nil list.distinct(1) Arr.simps(2)) lemma Arr_appendI\<^sub>P\<^sub>W\<^sub>E [intro, simp]: assumes "Arr T" and "Arr U" and "Trg T = Src U" shows "Arr (T @ U)" using assms by (metis Arr.simps(1) Arr_append_iff\<^sub>P\<^sub>W\<^sub>E) lemma Arr_appendE\<^sub>P\<^sub>W\<^sub>E [elim]: assumes "Arr (T @ U)" and "T \ []" and "U \ []" and "\Arr T; Arr U; Trg T = Src U\ \ thesis" shows thesis using assms Arr_append_iff\<^sub>P\<^sub>W\<^sub>E seq_implies_Trgs_eq_Srcs by force lemma Ide_append_iff\<^sub>P\<^sub>W\<^sub>E: assumes "T \ []" and "U \ []" shows "Ide (T @ U) \ Ide T \ Ide U \ Trg T = Src U" using assms Ide_char by auto lemma Ide_appendI\<^sub>P\<^sub>W\<^sub>E [intro, simp]: assumes "Ide T" and "Ide U" and "Trg T = Src U" shows "Ide (T @ U)" using assms by (metis Ide.simps(1) Ide_append_iff\<^sub>P\<^sub>W\<^sub>E) lemma Ide_appendE [elim]: assumes "Ide (T @ U)" and "T \ []" and "U \ []" and "\Ide T; Ide U; Trg T = Src U\ \ thesis" shows thesis using assms Ide_append_iff\<^sub>P\<^sub>W\<^sub>E by metis lemma Ide_consI [intro, simp]: assumes "R.ide t" and "Ide U" and "R.trg t = Src U" shows "Ide (t # U)" using assms by (simp add: Ide_char) lemma Ide_consE [elim]: assumes "Ide (t # U)" and "\R.ide t; U \ [] \ Ide U; U \ [] \ R.trg t = Src U\ \ thesis" shows thesis using assms by (metis Con_rec(4) Ide.simps(2) Ide_imp_Ide_hd Ide_imp_Ide_tl R.trg_def R.trg_ide Resid_Arr_Ide_ind Trg.simps(2) ide_char list.sel(1) list.sel(3) list.simps(3) Src_resid ide_def) lemma Ide_imp_Src_eq_Trg: assumes "Ide T" shows "Src T = Trg T" using assms by (metis Ide.simps(1) Src_resid ide_char ide_def) end subsection "Paths in a Confluent RTS" text \ Here we show that confluence of an RTS extends to confluence of the RTS of its paths. \ locale paths_in_confluent_rts = paths_in_rts + R: confluent_rts begin lemma confluence_single: assumes "\t u. R.coinitial t u \ t \ u" shows "\t. \R.arr t; Arr U; R.sources t = Srcs U\ \ [t] \<^sup>*\\<^sup>* U" proof (induct U) show "\t. \R.arr t; Arr []; R.sources t = Srcs []\ \ [t] \<^sup>*\\<^sup>* []" by simp fix t u U assume ind: "\t. \R.arr t; Arr U; R.sources t = Srcs U\ \ [t] \<^sup>*\\<^sup>* U" assume t: "R.arr t" assume uU: "Arr (u # U)" assume coinitial: "R.sources t = Srcs (u # U)" hence 1: "R.coinitial t u" using t uU by (metis Arr.simps(2) Con_implies_Arr(1) Con_imp_eq_Srcs Con_initial_left Srcs.simps(2) Con_Arr_self R.coinitial_iff) show "[t] \<^sup>*\\<^sup>* u # U" proof (cases "U = []") show "U = [] \ ?thesis" using assms t uU coinitial R.coinitial_iff by fastforce assume U: "U \ []" show ?thesis proof - have 2: "Arr [t \\ u] \ Arr U \ Srcs [t \\ u] = Srcs U" using assms 1 t uU U R.arr_resid_iff_con apply (intro conjI) apply simp apply (metis Con_Arr_self Con_implies_Arr(2) Resid_cons(2)) by (metis (full_types) Con_cons(2) Srcs.simps(2) Srcs_Resid Trgs.simps(2) Con_Arr_self Con_imp_eq_Srcs list.simps(3) R.sources_resid) have "[t] \<^sup>*\\<^sup>* u # U \ t \ u \ [t \\ u] \<^sup>*\\<^sup>* U" using U Con_rec(3) [of U t u] by simp also have "... \ True" using assms t uU U 1 2 ind by force finally show ?thesis by blast qed qed qed lemma confluence_ind: shows "\U. \Arr T; Arr U; Srcs T = Srcs U\ \ T \<^sup>*\\<^sup>* U" proof (induct T) show "\U. \Arr []; Arr U; Srcs [] = Srcs U\ \ [] \<^sup>*\\<^sup>* U" by simp fix t T U assume ind: "\U. \Arr T; Arr U; Srcs T = Srcs U\ \ T \<^sup>*\\<^sup>* U" assume tT: "Arr (t # T)" assume U: "Arr U" assume coinitial: "Srcs (t # T) = Srcs U" show "t # T \<^sup>*\\<^sup>* U" proof (cases "T = []") show "T = [] \ ?thesis" using U tT coinitial confluence_single [of t U] R.confluence by simp assume T: "T \ []" show ?thesis proof - have 1: "[t] \<^sup>*\\<^sup>* U" using tT U coinitial R.confluence by (metis R.arr_def Srcs.simps(2) T Con_Arr_self Con_imp_eq_Srcs Con_initial_right Con_rec(4) confluence_single) moreover have "T \<^sup>*\\<^sup>* U \<^sup>*\\\<^sup>* [t]" using 1 tT U T coinitial ind [of "U \<^sup>*\\\<^sup>* [t]"] by (metis (full_types) Con_imp_Arr_Resid Arr_iff_Con_self Con_implies_Arr(2) Con_imp_eq_Srcs Con_sym R.sources_resid Srcs.simps(2) Srcs_Resid Trgs.simps(2) Con_rec(4)) ultimately show ?thesis using Con_cons(1) [of T U t] by fastforce qed qed qed lemma confluence\<^sub>P: assumes "coinitial T U" shows "con T U" using assms confluence_ind sources_char\<^sub>P coinitial_def con_char by auto sublocale confluent_rts Resid apply (unfold_locales) using confluence\<^sub>P by simp lemma is_confluent_rts: shows "confluent_rts Resid" .. end subsection "Simulations Lift to Paths" text \ In this section we show that a simulation from RTS \A\ to RTS \B\ determines a simulation from the RTS of paths in \A\ to the RTS of paths in \B\. In other words, the path-RTS construction is functorial with respect to simulation. \ context simulation begin interpretation P\<^sub>A: paths_in_rts A .. interpretation P\<^sub>B: paths_in_rts B .. lemma map_Resid_single: shows "\u. P\<^sub>A.con T [u] \ map F (P\<^sub>A.Resid T [u]) = P\<^sub>B.Resid (map F T) [F u]" apply (induct T) apply simp proof - fix t u T assume ind: "\u. P\<^sub>A.con T [u] \ map F (P\<^sub>A.Resid T [u]) = P\<^sub>B.Resid (map F T) [F u]" assume 1: "P\<^sub>A.con (t # T) [u]" show "map F (P\<^sub>A.Resid (t # T) [u]) = P\<^sub>B.Resid (map F (t # T)) [F u]" proof (cases "T = []") show "T = [] \ ?thesis" using "1" P\<^sub>A.null_char by fastforce assume T: "T \ []" show ?thesis using T 1 ind P\<^sub>A.con_def P\<^sub>A.null_char P\<^sub>A.Con_rec(2) P\<^sub>A.Resid_rec(2) P\<^sub>B.Con_rec(2) P\<^sub>B.Resid_rec(2) apply simp by (metis A.con_sym Nil_is_map_conv preserves_con preserves_resid) qed qed lemma map_Resid: shows "\T. P\<^sub>A.con T U \ map F (P\<^sub>A.Resid T U) = P\<^sub>B.Resid (map F T) (map F U)" apply (induct U) using P\<^sub>A.Resid.simps(1) P\<^sub>A.con_char P\<^sub>A.con_sym apply blast proof - fix u U T assume ind: "\T. P\<^sub>A.con T U \ map F (P\<^sub>A.Resid T U) = P\<^sub>B.Resid (map F T) (map F U)" assume 1: "P\<^sub>A.con T (u # U)" show "map F (P\<^sub>A.Resid T (u # U)) = P\<^sub>B.Resid (map F T) (map F (u # U))" proof (cases "U = []") show "U = [] \ ?thesis" using "1" map_Resid_single by force assume U: "U \ []" have "P\<^sub>B.Resid (map F T) (map F (u # U)) = P\<^sub>B.Resid (P\<^sub>B.Resid (map F T) [F u]) (map F U)" using U 1 P\<^sub>B.Resid_cons(2) apply simp by (metis P\<^sub>B.Arr.simps(1) P\<^sub>B.Con_consI(2) P\<^sub>B.Con_implies_Arr(1) list.map_disc_iff) also have "... = map F (P\<^sub>A.Resid (P\<^sub>A.Resid T [u]) U)" using U 1 ind by (metis P\<^sub>A.Con_initial_right P\<^sub>A.Resid_cons(2) P\<^sub>A.con_char map_Resid_single) also have "... = map F (P\<^sub>A.Resid T (u # U))" using "1" P\<^sub>A.Resid_cons(2) P\<^sub>A.con_char U by auto finally show ?thesis by simp qed qed lemma preserves_paths: shows "P\<^sub>A.Arr T \ P\<^sub>B.Arr (map F T)" by (metis P\<^sub>A.Con_Arr_self P\<^sub>A.conI\<^sub>P P\<^sub>B.Arr_iff_Con_self map_Resid map_is_Nil_conv) interpretation Fx: simulation P\<^sub>A.Resid P\<^sub>B.Resid \\T. if P\<^sub>A.Arr T then map F T else []\ proof let ?Fx = "\T. if P\<^sub>A.Arr T then map F T else []" show "\T. \ P\<^sub>A.arr T \ ?Fx T = P\<^sub>B.null" by (simp add: P\<^sub>A.arr_char P\<^sub>B.null_char) show "\T U. P\<^sub>A.con T U \ P\<^sub>B.con (?Fx T) (?Fx U)" using P\<^sub>A.Con_implies_Arr(1) P\<^sub>A.Con_implies_Arr(2) P\<^sub>A.con_char map_Resid by fastforce show "\T U. P\<^sub>A.con T U \ ?Fx (P\<^sub>A.Resid T U) = P\<^sub>B.Resid (?Fx T) (?Fx U)" by (simp add: P\<^sub>A.Con_imp_Arr_Resid P\<^sub>A.Con_implies_Arr(1) P\<^sub>A.Con_implies_Arr(2) P\<^sub>A.con_char map_Resid) qed lemma lifts_to_paths: shows "simulation P\<^sub>A.Resid P\<^sub>B.Resid (\T. if P\<^sub>A.Arr T then map F T else [])" .. end subsection "Normal Sub-RTS's Lift to Paths" text \ Here we show that a normal sub-RTS \N\ of an RTS \R\ lifts to a normal sub-RTS of the RTS of paths in \N\, and that it is coherent if \N\ is. \ locale paths_in_rts_with_normal = R: rts + N: normal_sub_rts + paths_in_rts begin text \ We define a ``normal path'' to be a path that consists entirely of normal transitions. We show that the collection of all normal paths is a normal sub-RTS of the RTS of paths. \ definition NPath where "NPath T \ (Arr T \ set T \ \)" lemma Ide_implies_NPath: assumes "Ide T" shows "NPath T" using assms by (metis Ball_Collect NPath_def Ide_implies_Arr N.ide_closed set_Ide_subset_ide subsetI) lemma NPath_implies_Arr: assumes "NPath T" shows "Arr T" using assms NPath_def by simp lemma NPath_append: assumes "T \ []" and "U \ []" shows "NPath (T @ U) \ NPath T \ NPath U \ Trgs T \ Srcs U" using assms NPath_def by auto lemma NPath_appendI [intro, simp]: assumes "NPath T" and "NPath U" and "Trgs T \ Srcs U" shows "NPath (T @ U)" using assms NPath_def by simp lemma NPath_Resid_single_Arr: shows "\t. \t \ \; Arr U; R.sources t = Srcs U\ \ NPath (Resid [t] U)" proof (induct U) show "\t. \t \ \; Arr []; R.sources t = Srcs []\ \ NPath (Resid [t] [])" by simp fix t u U assume ind: "\t. \t \ \; Arr U; R.sources t = Srcs U\ \ NPath (Resid [t] U)" assume t: "t \ \" assume uU: "Arr (u # U)" assume src: "R.sources t = Srcs (u # U)" show "NPath (Resid [t] (u # U))" proof (cases "U = []") show "U = [] \ ?thesis" using NPath_def t src apply simp by (metis Arr.simps(2) R.arr_resid_iff_con R.coinitialI N.forward_stable N.elements_are_arr uU) assume U: "U \ []" show ?thesis proof - have "NPath (Resid [t] (u # U)) \ NPath (Resid [t \\ u] U)" using t U uU src by (metis Arr.simps(2) Con_implies_Arr(1) Resid_rec(3) Con_rec(3) R.arr_resid_iff_con) also have "... \ True" proof - have "t \\ u \ \" using t U uU src N.forward_stable [of t u] by (metis Con_Arr_self Con_imp_eq_Srcs Con_initial_left Srcs.simps(2) inf.idem Arr_has_Src R.coinitial_def) moreover have "Arr U" using U uU by (metis Arr.simps(3) neq_Nil_conv) moreover have "R.sources (t \\ u) = Srcs U" using t uU src by (metis Con_Arr_self Srcs.simps(2) U calculation(1) Con_imp_eq_Srcs Con_rec(4) N.elements_are_arr R.sources_resid R.arr_resid_iff_con) ultimately show ?thesis using ind [of "t \\ u"] by simp qed finally show ?thesis by blast qed qed qed lemma NPath_Resid_Arr_single: shows "\u. \ NPath T; R.arr u; Srcs T = R.sources u \ \ NPath (Resid T [u])" proof (induct T) show "\u. \NPath []; R.arr u; Srcs [] = R.sources u\ \ NPath (Resid [] [u])" by simp fix t u T assume ind: "\u. \NPath T; R.arr u; Srcs T = R.sources u\ \ NPath (Resid T [u])" assume tT: "NPath (t # T)" assume u: "R.arr u" assume src: "Srcs (t # T) = R.sources u" show "NPath (Resid (t # T) [u])" proof (cases "T = []") show "T = [] \ ?thesis" using tT u src NPath_def by (metis Arr.simps(2) NPath_Resid_single_Arr Srcs.simps(2) list.set_intros(1) subsetD) assume T: "T \ []" have "R.coinitial u t" by (metis R.coinitialI Srcs.simps(3) T list.exhaust_sel src u) hence con: "t \ u" using tT T u src R.con_sym NPath_def by (metis N.forward_stable N.elements_are_arr R.not_arr_null list.set_intros(1) R.conI subsetD) have 1: "NPath (Resid (t # T) [u]) \ NPath ((t \\ u) # Resid T [u \\ t])" proof - have "t # T \<^sup>*\\<^sup>* [u]" proof - have 2: "[t] \<^sup>*\\<^sup>* [u]" by (simp add: Con_rec(1) con) moreover have "T \<^sup>*\\<^sup>* Resid [u] [t]" proof - have "NPath T" using tT T NPath_def by (metis NPath_append append_Cons append_Nil) moreover have 3: "R.arr (u \\ t)" using con by (meson R.arr_resid_iff_con R.con_sym) moreover have "Srcs T = R.sources (u \\ t)" using tT T u src con by (metis "3" Arr_iff_Con_self Con_cons(2) Con_imp_eq_Srcs R.sources_resid Srcs_Resid Trgs.simps(2) NPath_implies_Arr list.discI R.arr_resid_iff_con) ultimately show ?thesis using 2 ind [of "u \\ t"] NPath_def by auto qed ultimately show ?thesis using tT T u src Con_cons(1) [of T "[u]" t] by simp qed thus ?thesis using tT T u src Resid_cons(1) [of T t "[u]"] Resid_rec(2) by presburger qed also have "... \ True" proof - have 2: "t \\ u \ \ \ R.arr (u \\ t)" using tT u src con NPath_def by (meson R.arr_resid_iff_con R.con_sym N.forward_stable \R.coinitial u t\ list.set_intros(1) subsetD) moreover have 3: "NPath (T \<^sup>*\\\<^sup>* [u \\ t])" using tT ind [of "u \\ t"] NPath_def by (metis Con_Arr_self Con_imp_eq_Srcs Con_rec(4) R.arr_resid_iff_con R.sources_resid Srcs.simps(2) T calculation insert_subset list.exhaust list.simps(15) Arr.simps(3)) moreover have "R.targets (t \\ u) \ Srcs (Resid T [u \\ t])" using tT T u src NPath_def by (metis "3" Arr.simps(1) R.targets_resid_sym Srcs_Resid_Arr_single con subset_refl) ultimately show ?thesis using NPath_def by (metis Arr_consI\<^sub>P N.elements_are_arr insert_subset list.simps(15)) qed finally show ?thesis by blast qed qed lemma NPath_Resid [simp]: shows "\U. \NPath T; Arr U; Srcs T = Srcs U\ \ NPath (T \<^sup>*\\\<^sup>* U)" proof (induct T) show "\U. \NPath []; Arr U; Srcs [] = Srcs U\ \ NPath ([] \<^sup>*\\\<^sup>* U)" by simp fix t T U assume ind: "\U. \NPath T; Arr U; Srcs T = Srcs U\ \ NPath (T \<^sup>*\\\<^sup>* U)" assume tT: "NPath (t # T)" assume U: "Arr U" assume Coinitial: "Srcs (t # T) = Srcs U" show "NPath ((t # T) \<^sup>*\\\<^sup>* U)" proof (cases "T = []") show "T = [] \ ?thesis" using tT U Coinitial NPath_Resid_single_Arr [of t U] NPath_def by force assume T: "T \ []" have 0: "NPath ((t # T) \<^sup>*\\\<^sup>* U) \ NPath ([t] \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" proof - have "U \ []" using U by auto moreover have "(t # T) \<^sup>*\\<^sup>* U" proof - have "t \ \" using tT NPath_def by auto moreover have "R.sources t = Srcs U" using Coinitial by (metis Srcs.elims U list.sel(1) Arr_has_Src) ultimately have 1: "[t] \<^sup>*\\<^sup>* U" using U NPath_Resid_single_Arr [of t U] NPath_def by auto moreover have "T \<^sup>*\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" proof - have "Srcs T = Srcs (U \<^sup>*\\\<^sup>* [t])" using tT U Coinitial 1 by (metis Con_Arr_self Con_cons(2) Con_imp_eq_Srcs Con_sym Srcs_Resid_Arr_single T list.discI NPath_implies_Arr) hence "NPath (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" using tT U Coinitial 1 Con_sym ind [of "Resid U [t]"] NPath_def by (metis Con_imp_Arr_Resid Srcs.elims T insert_subset list.simps(15) Arr.simps(3)) thus ?thesis using NPath_def by auto qed ultimately show ?thesis using Con_cons(1) [of T U t] by fastforce qed ultimately show ?thesis using tT U T Coinitial Resid_cons(1) by auto qed also have "... \ True" proof (intro iffI, simp_all) have 1: "NPath ([t] \<^sup>*\\\<^sup>* U)" by (metis Coinitial NPath_Resid_single_Arr Srcs_simp\<^sub>P U insert_subset list.sel(1) list.simps(15) NPath_def tT) moreover have 2: "NPath (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" by (metis "0" Arr.simps(1) Con_cons(1) Con_imp_eq_Srcs Con_implies_Arr(1-2) NPath_def T append_Nil2 calculation ind insert_subset list.simps(15) tT) moreover have "Trgs ([t] \<^sup>*\\\<^sup>* U) \ Srcs (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" by (metis Arr.simps(1) NPath_def Srcs_Resid Trgs_Resid_sym calculation(2) dual_order.refl) ultimately show "NPath ([t] \<^sup>*\\\<^sup>* U @ T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" using NPath_append [of "T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" "[t] \<^sup>*\\\<^sup>* U"] by fastforce qed finally show ?thesis by blast qed qed lemma Backward_stable_single: shows "\t. \NPath U; NPath ([t] \<^sup>*\\\<^sup>* U)\ \ NPath [t]" proof (induct U) show "\t. \NPath []; NPath ([t] \<^sup>*\\\<^sup>* [])\ \ NPath [t]" using NPath_def by simp fix t u U assume ind: "\t. \NPath U; NPath ([t] \<^sup>*\\\<^sup>* U)\ \ NPath [t]" assume uU: "NPath (u # U)" assume resid: "NPath ([t] \<^sup>*\\\<^sup>* (u # U))" show "NPath [t]" using uU ind NPath_def by (metis Arr.simps(1) Arr.simps(2) Con_implies_Arr(2) N.backward_stable N.elements_are_arr Resid_rec(1) Resid_rec(3) insert_subset list.simps(15) resid) qed lemma Backward_stable: shows "\U. \NPath U; NPath (T \<^sup>*\\\<^sup>* U)\ \ NPath T" proof (induct T) show "\U. \NPath U; NPath ([] \<^sup>*\\\<^sup>* U)\ \ NPath []" by simp fix t T U assume ind: "\U. \NPath U; NPath (T \<^sup>*\\\<^sup>* U)\ \ NPath T" assume U: "NPath U" assume resid: "NPath ((t # T) \<^sup>*\\\<^sup>* U)" show "NPath (t # T)" proof (cases "T = []") show "T = [] \ ?thesis" using U resid Backward_stable_single by blast assume T: "T \ []" have 1: "NPath ([t] \<^sup>*\\\<^sup>* U) \ NPath (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" using T U NPath_append resid NPath_def by (metis Arr.simps(1) Con_cons(1) Resid_cons(1)) have 2: "t \ \" using 1 U Backward_stable_single NPath_def by simp moreover have "NPath T" using 1 U resid ind by (metis 2 Arr.simps(2) Con_imp_eq_Srcs NPath_Resid N.elements_are_arr) moreover have "R.targets t \ Srcs T" using resid 1 Con_imp_eq_Srcs Con_sym Srcs_Resid_Arr_single NPath_def by (metis Arr.simps(1) dual_order.eq_iff) ultimately show ?thesis using NPath_def by (simp add: N.elements_are_arr) qed qed sublocale normal_sub_rts Resid \Collect NPath\ using Ide_implies_NPath NPath_implies_Arr arr_char ide_char coinitial_def sources_char\<^sub>P append_is_composite_of apply unfold_locales apply auto using Backward_stable by metis+ theorem normal_extends_to_paths: shows "normal_sub_rts Resid (Collect NPath)" .. lemma Resid_NPath_preserves_reflects_Con: assumes "NPath U" and "Srcs T = Srcs U" shows "T \<^sup>*\\\<^sup>* U \<^sup>*\\<^sup>* T' \<^sup>*\\\<^sup>* U \ T \<^sup>*\\<^sup>* T'" using assms NPath_def NPath_Resid con_char con_imp_coinitial resid_along_elem_preserves_con Con_implies_Arr(2) Con_sym Cube(1) by (metis Arr.simps(1) mem_Collect_eq) notation Cong\<^sub>0 (infix "\\<^sup>*\<^sub>0" 50) notation Cong (infix "\\<^sup>*" 50) (* * TODO: Leave these for now -- they still seem a little difficult to prove * in this context, but are probably useful. *) lemma Cong\<^sub>0_cancel_left\<^sub>C\<^sub>S: assumes "T @ U \\<^sup>*\<^sub>0 T @ U'" and "T \ []" and "U \ []" and "U' \ []" shows "U \\<^sup>*\<^sub>0 U'" using assms Cong\<^sub>0_cancel_left [of T U "T @ U" U' "T @ U'"] Cong\<^sub>0_reflexive append_is_composite_of by (metis Cong\<^sub>0_implies_Cong Cong_imp_arr(1) arr_append_imp_seq) lemma Srcs_respects_Cong: assumes "T \\<^sup>* T'" and "a \ Srcs T" and "a' \ Srcs T'" shows "[a] \\<^sup>* [a']" proof - obtain U U' where UU': "NPath U \ NPath U' \ T \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 T' \<^sup>*\\\<^sup>* U'" using assms(1) by blast show ?thesis proof show "U \ Collect NPath" using UU' by simp show "U' \ Collect NPath" using UU' by simp show "[a] \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 [a'] \<^sup>*\\\<^sup>* U'" proof - have "NPath ([a] \<^sup>*\\\<^sup>* U) \ NPath ([a'] \<^sup>*\\\<^sup>* U')" by (metis Arr.simps(1) Con_imp_eq_Srcs Con_implies_Arr(1) Con_single_ide_ind NPath_implies_Arr N.ide_closed R.in_sourcesE Srcs.simps(2) Srcs_simp\<^sub>P UU' assms(2-3) elements_are_arr not_arr_null null_char NPath_Resid_single_Arr) thus ?thesis using UU' by (metis Con_imp_eq_Srcs Cong\<^sub>0_imp_con NPath_Resid Srcs_Resid con_char NPath_implies_Arr mem_Collect_eq arr_resid_iff_con con_implies_arr(2)) qed qed qed lemma Trgs_respects_Cong: assumes "T \\<^sup>* T'" and "b \ Trgs T" and "b' \ Trgs T'" shows "[b] \\<^sup>* [b']" proof - have "[b] \ targets T \ [b'] \ targets T'" proof - have 1: "Ide [b] \ Ide [b']" using assms by (metis Ball_Collect Trgs_are_ide Ide.simps(2)) moreover have "Srcs [b] = Trgs T" using assms by (metis 1 Con_imp_Arr_Resid Con_imp_eq_Srcs Cong_imp_arr(1) Ide.simps(2) Srcs_Resid Con_single_ide_ind con_char arrE) moreover have "Srcs [b'] = Trgs T'" using assms by (metis Con_imp_Arr_Resid Con_imp_eq_Srcs Cong_imp_arr(2) Ide.simps(2) Srcs_Resid 1 Con_single_ide_ind con_char arrE) ultimately show ?thesis unfolding targets_char\<^sub>P using assms Cong_imp_arr(2) arr_char by blast qed thus ?thesis using assms targets_char in_targets_respects_Cong [of T T' "[b]" "[b']"] by simp qed lemma Cong\<^sub>0_append_resid_NPath: assumes "NPath (T \<^sup>*\\\<^sup>* U)" shows "Cong\<^sub>0 (T @ (U \<^sup>*\\\<^sup>* T)) U" proof (intro conjI) show 0: "(T @ U \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* U \ Collect NPath" proof - have 1: "(T @ U \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* U = T \<^sup>*\\\<^sup>* U @ (U \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* T)" by (metis Arr.simps(1) NPath_implies_Arr assms Con_append(1) Con_implies_Arr(2) Con_sym Resid_append(1) con_imp_arr_resid null_char) moreover have "NPath ..." using assms by (metis 1 Arr_append_iff\<^sub>P NPath_append NPath_implies_Arr Ide_implies_NPath Nil_is_append_conv Resid_Arr_self arr_char con_char arr_resid_iff_con self_append_conv) ultimately show ?thesis by simp qed show "U \<^sup>*\\\<^sup>* (T @ U \<^sup>*\\\<^sup>* T) \ Collect NPath" using assms 0 by (metis Arr.simps(1) Con_implies_Arr(2) Cong\<^sub>0_reflexive Resid_append(2) append.right_neutral arr_char Con_sym) qed end locale paths_in_rts_with_coherent_normal = R: rts + N: coherent_normal_sub_rts + paths_in_rts begin sublocale paths_in_rts_with_normal resid \ .. notation Cong\<^sub>0 (infix "\\<^sup>*\<^sub>0" 50) notation Cong (infix "\\<^sup>*" 50) text \ Since composites of normal transitions are assumed to exist, normal paths can be ``folded'' by composition down to single transitions. \ lemma NPath_folding: shows "NPath U \ \u. u \ \ \ R.sources u = Srcs U \ R.targets u = Trgs U \ (\t. con [t] U \ [t] \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 [t \\ u])" proof (induct U) show "NPath [] \ \u. u \ \ \ R.sources u = Srcs [] \ R.targets u = Trgs [] \ (\t. con [t] [] \ [t] \<^sup>*\\\<^sup>* [] \\<^sup>*\<^sub>0 [t \\ u])" using NPath_def by auto fix v U assume ind: "NPath U \ \u. u \ \ \ R.sources u = Srcs U \ R.targets u = Trgs U \ (\t. con [t] U \ [t] \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 [t \\ u])" assume vU: "NPath (v # U)" show "\vU. vU \ \ \ R.sources vU = Srcs (v # U) \ R.targets vU = Trgs (v # U) \ (\t. con [t] (v # U) \ [t] \<^sup>*\\\<^sup>* (v # U) \\<^sup>*\<^sub>0 [t \\ vU])" proof (cases "U = []") show "U = [] \ \vU. vU \ \ \ R.sources vU = Srcs (v # U) \ R.targets vU = Trgs (v # U) \ (\t. con [t] (v # U) \ [t] \<^sup>*\\\<^sup>* (v # U) \\<^sup>*\<^sub>0 [t \\ vU])" using vU Resid_rec(1) con_char by (metis Cong\<^sub>0_reflexive NPath_def Srcs.simps(2) Trgs.simps(2) arr_resid_iff_con insert_subset list.simps(15)) assume "U \ []" hence U: "NPath U" using vU by (metis NPath_append append_Cons append_Nil) obtain u where u: "u \ \ \ R.sources u = Srcs U \ R.targets u = Trgs U \ (\t. con [t] U \ [t] \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 [t \\ u])" using U ind by blast have seq: "R.seq v u" proof show "R.arr v" using vU by (metis Con_Arr_self Con_rec(4) NPath_implies_Arr \U \ []\ R.arrI) show "R.arr u" by (simp add: N.elements_are_arr u) show "R.targets v = R.sources u" by (metis (full_types) NPath_implies_Arr R.sources_resid Srcs.simps(2) \U \ []\ Con_Arr_self Con_imp_eq_Srcs Con_initial_right Con_rec(2) u vU) qed obtain vu where vu: "R.composite_of v u vu" using N.composite_closed_right seq u by presburger have "vu \ \ \ R.sources vu = Srcs (v # U) \ R.targets vu = Trgs (v # U) \ (\t. con [t] (v # U) \ [t] \<^sup>*\\\<^sup>* (v # U) \\<^sup>*\<^sub>0 [t \\ vu])" proof (intro conjI allI) show "vu \ \" by (meson NPath_def N.composite_closed list.set_intros(1) subsetD u vU vu) show "R.sources vu = Srcs (v # U)" by (metis Con_imp_eq_Srcs Con_initial_right NPath_implies_Arr R.sources_composite_of Srcs.simps(2) Arr_iff_Con_self vU vu) show "R.targets vu = Trgs (v # U)" by (metis R.targets_composite_of Trgs.simps(3) \U \ []\ list.exhaust_sel u vu) fix t show "con [t] (v # U) \ [t] \<^sup>*\\\<^sup>* (v # U) \\<^sup>*\<^sub>0 [t \\ vu]" proof (intro impI) assume t: "con [t] (v # U)" have 1: "[t] \<^sup>*\\\<^sup>* (v # U) = [t \\ v] \<^sup>*\\\<^sup>* U" using t Resid_rec(3) \U \ []\ con_char by force also have "... \\<^sup>*\<^sub>0 [(t \\ v) \\ u]" using 1 t u by force also have "[(t \\ v) \\ u] \\<^sup>*\<^sub>0 [t \\ vu]" proof - have "(t \\ v) \\ u \ t \\ vu" using vu R.resid_composite_of by (metis (no_types, lifting) N.Cong\<^sub>0_composite_of_arr_normal N.Cong\<^sub>0_subst_right(1) \U \ []\ Con_rec(3) con_char R.con_sym t u) thus ?thesis using Ide.simps(2) R.prfx_implies_con Resid.simps(3) ide_char ide_closed by presburger qed finally show "[t] \<^sup>*\\\<^sup>* (v # U) \\<^sup>*\<^sub>0 [t \\ vu]" by blast qed qed thus ?thesis by blast qed qed text \ Coherence for single transitions extends inductively to paths. \ lemma Coherent_single: assumes "R.arr t" and "NPath U" and "NPath U'" and "R.sources t = Srcs U" and "Srcs U = Srcs U'" and "Trgs U = Trgs U'" shows "[t] \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 [t] \<^sup>*\\\<^sup>* U'" proof - have 1: "con [t] U \ con [t] U'" using assms by (metis Arr.simps(1-2) Arr_iff_Con_self Resid_NPath_preserves_reflects_Con Srcs.simps(2) con_char) obtain u where u: "u \ \ \ R.sources u = Srcs U \ R.targets u = Trgs U \ (\t. con [t] U \ [t] \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 [t \\ u])" using assms NPath_folding by metis obtain u' where u': "u' \ \ \ R.sources u' = Srcs U' \ R.targets u' = Trgs U' \ (\t. con [t] U' \ [t] \<^sup>*\\\<^sup>* U' \\<^sup>*\<^sub>0 [t \\ u'])" using assms NPath_folding by metis have "[t] \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 [t \\ u]" using u 1 by blast also have "[t \\ u] \\<^sup>*\<^sub>0 [t \\ u']" using assms(1,4-6) N.Cong\<^sub>0_imp_con N.coherent u u' NPath_def by simp also have "[t \\ u'] \\<^sup>*\<^sub>0 [t] \<^sup>*\\\<^sup>* U'" using u' 1 by simp finally show ?thesis by simp qed lemma Coherent: shows "\U U'. \ Arr T; NPath U; NPath U'; Srcs T = Srcs U; Srcs U = Srcs U'; Trgs U = Trgs U' \ \ T \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 T \<^sup>*\\\<^sup>* U'" proof (induct T) show "\U U'. \ Arr []; NPath U; NPath U'; Srcs [] = Srcs U; Srcs U = Srcs U'; Trgs U = Trgs U' \ \ [] \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 [] \<^sup>*\\\<^sup>* U'" by (simp add: arr_char) fix t T U U' assume tT: "Arr (t # T)" and U: "NPath U" and U': "NPath U'" and Srcs1: "Srcs (t # T) = Srcs U" and Srcs2: "Srcs U = Srcs U'" and Trgs: "Trgs U = Trgs U'" and ind: "\U U'. \ Arr T; NPath U; NPath U'; Srcs T = Srcs U; Srcs U = Srcs U'; Trgs U = Trgs U' \ \ T \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 T \<^sup>*\\\<^sup>* U'" have t: "R.arr t" using tT by (metis Arr.simps(2) Con_Arr_self Con_rec(4) R.arrI) show "(t # T) \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 (t # T) \<^sup>*\\\<^sup>* U'" proof (cases "T = []") show "T = [] \ ?thesis" by (metis Srcs.simps(2) Srcs1 Srcs2 Trgs U U' Coherent_single Arr.simps(2) tT) assume T: "T \ []" let ?t = "[t] \<^sup>*\\\<^sup>* U" and ?t' = "[t] \<^sup>*\\\<^sup>* U'" let ?T = "T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])" let ?T' = "T \<^sup>*\\\<^sup>* (U' \<^sup>*\\\<^sup>* [t])" have 0: "(t # T) \<^sup>*\\\<^sup>* U = ?t @ ?T \ (t # T) \<^sup>*\\\<^sup>* U' = ?t' @ ?T'" using tT U U' Srcs1 Srcs2 by (metis Arr_has_Src Arr_iff_Con_self Resid_cons(1) Srcs.simps(1) Resid_NPath_preserves_reflects_Con) have 1: "?t \\<^sup>*\<^sub>0 ?t'" by (metis Srcs1 Srcs2 Srcs_simp\<^sub>P Trgs U U' list.sel(1) Coherent_single t tT) have A: "?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t) = T \<^sup>*\\\<^sup>* ((U \<^sup>*\\\<^sup>* [t]) @ (?t' \<^sup>*\\\<^sup>* ?t))" using 1 Arr.simps(1) Con_append(2) Con_sym Resid_append(2) Con_implies_Arr(1) NPath_def by (metis arr_char elements_are_arr) have B: "?T' \<^sup>*\\\<^sup>* (?t \<^sup>*\\\<^sup>* ?t') = T \<^sup>*\\\<^sup>* ((U' \<^sup>*\\\<^sup>* [t]) @ (?t \<^sup>*\\\<^sup>* ?t'))" by (metis "1" Con_appendI(2) Con_sym Resid.simps(1) Resid_append(2) elements_are_arr not_arr_null null_char) have E: "?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t) \\<^sup>*\<^sub>0 ?T' \<^sup>*\\\<^sup>* (?t \<^sup>*\\\<^sup>* ?t')" proof - have "Arr T" using Arr.elims(1) T tT by blast moreover have "NPath (U \<^sup>*\\\<^sup>* [t] @ ([t] \<^sup>*\\\<^sup>* U') \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U))" using 1 U t tT Srcs1 Srcs_simp\<^sub>P apply (intro NPath_appendI) apply auto by (metis Arr.simps(1) NPath_def Srcs_Resid Trgs_Resid_sym) moreover have "NPath (U' \<^sup>*\\\<^sup>* [t] @ ([t] \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U'))" using t U' 1 Con_imp_eq_Srcs Trgs_Resid_sym apply (intro NPath_appendI) apply auto apply (metis Arr.simps(2) NPath_Resid Resid.simps(1)) by (metis Arr.simps(1) NPath_def Srcs_Resid) moreover have "Srcs T = Srcs (U \<^sup>*\\\<^sup>* [t] @ ([t] \<^sup>*\\\<^sup>* U') \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U))" using A B by (metis (full_types) "0" "1" Arr_has_Src Con_cons(1) Con_implies_Arr(1) Srcs.simps(1) Srcs_append T elements_are_arr not_arr_null null_char Con_imp_eq_Srcs) moreover have "Srcs (U \<^sup>*\\\<^sup>* [t] @ ([t] \<^sup>*\\\<^sup>* U') \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U)) = Srcs (U' \<^sup>*\\\<^sup>* [t] @ ([t] \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U'))" by (metis "1" Con_implies_Arr(2) Con_sym Cong\<^sub>0_imp_con Srcs_Resid Srcs_append arr_char con_char arr_resid_iff_con) moreover have "Trgs (U \<^sup>*\\\<^sup>* [t] @ ([t] \<^sup>*\\\<^sup>* U') \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U)) = Trgs (U' \<^sup>*\\\<^sup>* [t] @ ([t] \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ([t] \<^sup>*\\\<^sup>* U'))" using "1" Cong\<^sub>0_imp_con con_char by force ultimately show ?thesis using A B ind [of "(U \<^sup>*\\\<^sup>* [t]) @ (?t' \<^sup>*\\\<^sup>* ?t)" "(U' \<^sup>*\\\<^sup>* [t]) @ (?t \<^sup>*\\\<^sup>* ?t')"] by simp qed have C: "NPath ((?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t)) \<^sup>*\\\<^sup>* (?T' \<^sup>*\\\<^sup>* (?t \<^sup>*\\\<^sup>* ?t')))" using E by blast have D: "NPath ((?T' \<^sup>*\\\<^sup>* (?t \<^sup>*\\\<^sup>* ?t')) \<^sup>*\\\<^sup>* (?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t)))" using E by blast show ?thesis proof have 2: "((t # T) \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ((t # T) \<^sup>*\\\<^sup>* U') = ((?t \<^sup>*\\\<^sup>* ?t') \<^sup>*\\\<^sup>* ?T') @ ((?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t)) \<^sup>*\\\<^sup>* (?T' \<^sup>*\\\<^sup>* (?t \<^sup>*\\\<^sup>* ?t')))" proof - have "((t # T) \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ((t # T) \<^sup>*\\\<^sup>* U') = (?t @ ?T) \<^sup>*\\\<^sup>* (?t' @ ?T')" using 0 by fastforce also have "... = ((?t @ ?T) \<^sup>*\\\<^sup>* ?t') \<^sup>*\\\<^sup>* ?T'" using tT T U U' Srcs1 Srcs2 0 by (metis Con_appendI(2) Con_cons(1) Con_sym Resid.simps(1) Resid_append(2)) also have "... = ((?t \<^sup>*\\\<^sup>* ?t') @ (?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t))) \<^sup>*\\\<^sup>* ?T'" by (metis (no_types, lifting) Arr.simps(1) Con_appendI(1) Con_implies_Arr(1) D NPath_def Resid_append(1) null_is_zero(2)) also have "... = ((?t \<^sup>*\\\<^sup>* ?t') \<^sup>*\\\<^sup>* ?T') @ ((?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t)) \<^sup>*\\\<^sup>* (?T' \<^sup>*\\\<^sup>* (?t \<^sup>*\\\<^sup>* ?t')))" proof - have "?t \<^sup>*\\\<^sup>* ?t' @ ?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t) \<^sup>*\\<^sup>* ?T'" using C D E Con_sym by (metis Con_append(2) Cong\<^sub>0_imp_con con_char arr_resid_iff_con con_implies_arr(2)) thus ?thesis using Resid_append(1) by (metis Con_sym append.right_neutral Resid.simps(1)) qed finally show ?thesis by simp qed moreover have 3: "NPath ..." proof - have "NPath ((?t \<^sup>*\\\<^sup>* ?t') \<^sup>*\\\<^sup>* ?T')" using 0 1 E by (metis Con_imp_Arr_Resid Con_imp_eq_Srcs NPath_Resid Resid.simps(1) ex_un_null mem_Collect_eq) moreover have "Trgs ((?t \<^sup>*\\\<^sup>* ?t') \<^sup>*\\\<^sup>* ?T') = Srcs ((?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t)) \<^sup>*\\\<^sup>* (?T' \<^sup>*\\\<^sup>* (?t \<^sup>*\\\<^sup>* ?t')))" using C by (metis NPath_implies_Arr Srcs.simps(1) Srcs_Resid Trgs_Resid_sym Arr_has_Src) ultimately show ?thesis using C by blast qed ultimately show "((t # T) \<^sup>*\\\<^sup>* U) \<^sup>*\\\<^sup>* ((t # T) \<^sup>*\\\<^sup>* U') \ Collect NPath" by simp have 4: "((t # T) \<^sup>*\\\<^sup>* U') \<^sup>*\\\<^sup>* ((t # T) \<^sup>*\\\<^sup>* U) = ((?t' \<^sup>*\\\<^sup>* ?t) \<^sup>*\\\<^sup>* ?T) @ ((?T' \<^sup>*\\\<^sup>* (?t \<^sup>*\\\<^sup>* ?t')) \<^sup>*\\\<^sup>* (?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t)))" by (metis "0" "2" "3" Arr.simps(1) Con_implies_Arr(1) Con_sym D NPath_def Resid_append2) moreover have "NPath ..." proof - have "NPath ((?t' \<^sup>*\\\<^sup>* ?t) \<^sup>*\\\<^sup>* ?T)" by (metis "1" CollectD Cong\<^sub>0_imp_con E con_imp_coinitial forward_stable arr_resid_iff_con con_implies_arr(2)) moreover have "NPath ((?T' \<^sup>*\\\<^sup>* (?t \<^sup>*\\\<^sup>* ?t')) \<^sup>*\\\<^sup>* (?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t)))" using U U' 1 D ind Coherent_single [of t U' U] by blast moreover have "Trgs ((?t' \<^sup>*\\\<^sup>* ?t) \<^sup>*\\\<^sup>* ?T) = Srcs ((?T' \<^sup>*\\\<^sup>* (?t \<^sup>*\\\<^sup>* ?t')) \<^sup>*\\\<^sup>* (?T \<^sup>*\\\<^sup>* (?t' \<^sup>*\\\<^sup>* ?t)))" by (metis Arr.simps(1) NPath_def Srcs_Resid Trgs_Resid_sym calculation(2)) ultimately show ?thesis by blast qed ultimately show "((t # T) \<^sup>*\\\<^sup>* U') \<^sup>*\\\<^sup>* ((t # T) \<^sup>*\\\<^sup>* U) \ Collect NPath" by simp qed qed qed sublocale rts_with_composites Resid using is_rts_with_composites by simp sublocale coherent_normal_sub_rts Resid \Collect NPath\ proof fix T U U' assume T: "arr T" and U: "U \ Collect NPath" and U': "U' \ Collect NPath" assume sources_UU': "sources U = sources U'" and targets_UU': "targets U = targets U'" and TU: "sources T = sources U" have "Srcs T = Srcs U" using TU sources_char\<^sub>P T arr_iff_has_source by auto moreover have "Srcs U = Srcs U'" by (metis Con_imp_eq_Srcs T TU con_char con_imp_coinitial_ax con_sym in_sourcesE in_sourcesI arr_def sources_UU') moreover have "Trgs U = Trgs U'" using U U' targets_UU' targets_char by (metis (full_types) arr_iff_has_target composable_def composable_iff_seq composite_of_arr_target elements_are_arr equals0I seq_char) ultimately show "T \<^sup>*\\\<^sup>* U \\<^sup>*\<^sub>0 T \<^sup>*\\\<^sup>* U'" using T U U' Coherent [of T U U'] arr_char by blast qed theorem coherent_normal_extends_to_paths: shows "coherent_normal_sub_rts Resid (Collect NPath)" .. lemma Cong\<^sub>0_append_Arr_NPath: assumes "T \ []" and "Arr (T @ U)" and "NPath U" shows "Cong\<^sub>0 (T @ U) T" using assms by (metis Arr.simps(1) Arr_appendE\<^sub>P NPath_implies_Arr append_is_composite_of arrI\<^sub>P arr_append_imp_seq composite_of_arr_normal mem_Collect_eq) lemma Cong_append_NPath_Arr: assumes "T \ []" and "Arr (U @ T)" and "NPath U" shows "U @ T \\<^sup>* T" using assms by (metis (full_types) Arr.simps(1) Con_Arr_self Con_append(2) Con_implies_Arr(2) Con_imp_eq_Srcs composite_of_normal_arr Srcs_Resid append_is_composite_of arr_char NPath_implies_Arr mem_Collect_eq seq_char) subsubsection "Permutation Congruence" text \ Here we show that \\<^sup>*\\<^sup>*\ coincides with ``permutation congruence'': the least congruence respecting composition that relates \[t, u \ t]\ and \[u, t \ u]\ whenever \t \ u\ and that relates \T @ [b]\ and \T\ whenever \b\ is an identity such that \seq T [b]\. \ inductive PCong where "Arr T \ PCong T T" | "PCong T U \ PCong U T" | "\PCong T U; PCong U V\ \ PCong T V" | "\seq T U; PCong T T'; PCong U U'\ \ PCong (T @ U) (T' @ U')" | "\seq T [b]; R.ide b\ \ PCong (T @ [b]) T" | "t \ u \ PCong [t, u \\ t] [u, t \\ u]" lemmas PCong.intros(3) [trans] lemma PCong_append_Ide: shows "\seq T B; Ide B\ \ PCong (T @ B) T" proof (induct B) show "\seq T []; Ide []\ \ PCong (T @ []) T" by auto fix b B T assume ind: "\seq T B; Ide B\ \ PCong (T @ B) T" assume seq: "seq T (b # B)" assume Ide: "Ide (b # B)" have "T @ (b # B) = (T @ [b]) @ B" by simp also have "PCong ... (T @ B)" apply (cases "B = []") using Ide PCong.intros(5) seq apply force using seq Ide PCong.intros(4) [of "T @ [b]" B T B] by (metis Arr.simps(1) Ide_imp_Ide_hd PCong.intros(1) PCong.intros(5) append_is_Nil_conv arr_append arr_append_imp_seq arr_char calculation list.distinct(1) list.sel(1) seq_char) also have "PCong (T @ B) T" proof (cases "B = []") show "B = [] \ ?thesis" using PCong.intros(1) seq seq_char by force assume B: "B \ []" have "seq T B" using B seq Ide by (metis Con_imp_eq_Srcs Ide_imp_Ide_hd Trgs_append \T @ b # B = (T @ [b]) @ B\ append_is_Nil_conv arr_append arr_append_imp_seq arr_char cong_cons_ideI(2) list.distinct(1) list.sel(1) not_arr_null null_char seq_char ide_implies_arr) thus ?thesis using seq Ide ind by (metis Arr.simps(1) Ide.elims(3) Ide.simps(3) seq_char) qed finally show "PCong (T @ (b # B)) T" by blast qed lemma PCong_imp_Cong: shows "PCong T U \ T \<^sup>*\\<^sup>* U" proof (induct rule: PCong.induct) show "\T. Arr T \ T \<^sup>*\\<^sup>* T" using cong_reflexive by blast show "\T U. \PCong T U; T \<^sup>*\\<^sup>* U\ \ U \<^sup>*\\<^sup>* T" by blast show "\T U V. \PCong T U; T \<^sup>*\\<^sup>* U; PCong U V; U \<^sup>*\\<^sup>* V\ \ T \<^sup>*\\<^sup>* V" using cong_transitive by blast show "\T U U' T'. \seq T U; PCong T T'; T \<^sup>*\\<^sup>* T'; PCong U U'; U \<^sup>*\\<^sup>* U'\ \ T @ U \<^sup>*\\<^sup>* T' @ U'" using cong_append by simp show "\T b. \seq T [b]; R.ide b\ \ T @ [b] \<^sup>*\\<^sup>* T" using cong_append_ideI(4) ide_char by force show "\t u. t \ u \ [t, u \\ t] \<^sup>*\\<^sup>* [u, t \\ u]" proof - have "\t u. t \ u \ [t, u \\ t] \<^sup>*\\<^sup>* [u, t \\ u]" proof - fix t u assume con: "t \ u" have "([t] @ [u \\ t]) \<^sup>*\\\<^sup>* ([u] @ [t \\ u]) = [(t \\ u) \\ (t \\ u), ((u \\ t) \\ (u \\ t)) \\ ((t \\ u) \\ (t \\ u))]" using con Resid_append2 [of "[t]" "[u \\ t]" "[u]" "[t \\ u]"] apply simp by (metis R.arr_resid_iff_con R.con_target R.conE R.con_sym R.prfx_implies_con R.prfx_reflexive R.cube) moreover have "Ide ..." using con by (metis Arr.simps(2) Arr.simps(3) Ide.simps(2) Ide.simps(3) R.arr_resid_iff_con R.con_sym R.resid_ide_arr R.prfx_reflexive calculation Con_imp_Arr_Resid) ultimately show"[t, u \\ t] \<^sup>*\\<^sup>* [u, t \\ u]" using ide_char by auto qed thus "\t u. t \ u \ [t, u \\ t] \<^sup>*\\<^sup>* [u, t \\ u]" using R.con_sym by blast qed qed lemma PCong_permute_single: shows "\t. [t] \<^sup>*\\<^sup>* U \ PCong ([t] @ (U \<^sup>*\\\<^sup>* [t])) (U @ ([t] \<^sup>*\\\<^sup>* U))" proof (induct U) show "\t. [t] \<^sup>*\\<^sup>* [] \ PCong ([t] @ [] \<^sup>*\\\<^sup>* [t]) ([] @ [t] \<^sup>*\\\<^sup>* [])" by auto fix t u U assume ind: "\t. [t] \<^sup>*\\\<^sup>* U \ [] \ PCong ([t] @( U \<^sup>*\\\<^sup>* [t])) (U @ ([t] \<^sup>*\\\<^sup>* U))" assume con: "[t] \<^sup>*\\<^sup>* u # U" show "PCong ([t] @ (u # U) \<^sup>*\\\<^sup>* [t]) ((u # U) @ [t] \<^sup>*\\\<^sup>* (u # U))" proof (cases "U = []") show "U = [] \ ?thesis" by (metis PCong.intros(6) Resid.simps(3) append_Cons append_eq_append_conv2 append_self_conv con_char con_def con con_sym_ax) assume U: "U \ []" show "PCong ([t] @ ((u # U) \<^sup>*\\\<^sup>* [t])) ((u # U) @ ([t] \<^sup>*\\\<^sup>* (u # U)))" proof - have "[t] @ ((u # U) \<^sup>*\\\<^sup>* [t]) = [t] @ ([u \\ t] @ (U \<^sup>*\\\<^sup>* [t \\ u]))" using Con_sym Resid_rec(2) U con by auto also have "... = ([t] @ [u \\ t]) @ (U \<^sup>*\\\<^sup>* [t \\ u])" by auto also have "PCong ... (([u] @ [t \\ u]) @ (U \<^sup>*\\\<^sup>* [t \\ u]))" proof - have "PCong ([t] @ [u \\ t]) ([u] @ [t \\ u])" using con by (simp add: Con_rec(3) PCong.intros(6) U) thus ?thesis by (metis Arr_Resid_single Con_implies_Arr(1) Con_rec(2) Con_sym PCong.intros(1,4) Srcs_Resid U append_is_Nil_conv append_is_composite_of arr_append_imp_seq arr_char calculation composite_of_unq_upto_cong con not_arr_null null_char ide_implies_arr seq_char) qed also have "([u] @ [t \\ u]) @ (U \<^sup>*\\\<^sup>* [t \\ u]) = [u] @ ([t \\ u] @ (U \<^sup>*\\\<^sup>* [t \\ u]))" by simp also have "PCong ... ([u] @ (U @ ([t \\ u] \<^sup>*\\\<^sup>* U)))" proof - have "PCong ([t \\ u] @ (U \<^sup>*\\\<^sup>* [t \\ u])) (U @ ([t \\ u] \<^sup>*\\\<^sup>* U))" using ind by (metis Resid_rec(3) U con) moreover have "seq [u] ([t \\ u] @ U \<^sup>*\\\<^sup>* [t \\ u])" proof show "Arr [u]" using Con_implies_Arr(2) Con_initial_right con by blast show "Arr ([t \\ u] @ U \<^sup>*\\\<^sup>* [t \\ u])" using Con_implies_Arr(1) U con Con_imp_Arr_Resid Con_rec(3) Con_sym by fastforce show "Trgs [u] \ Srcs ([t \\ u] @ U \<^sup>*\\\<^sup>* [t \\ u]) \ {}" by (metis Arr.simps(1) Arr.simps(2) Arr_has_Trg Con_implies_Arr(1) Int_absorb R.arr_resid_iff_con R.sources_resid Resid_rec(3) Srcs.simps(2) Srcs_append Trgs.simps(2) U \Arr [u]\ con) qed moreover have "PCong [u] [u]" using PCong.intros(1) calculation(2) seq_char by force ultimately show ?thesis using U arr_append arr_char con seq_char PCong.intros(4) [of "[u]" "[t \\ u] @ (U \<^sup>*\\\<^sup>* [t \\ u])" "[u]" "U @ ([t \\ u] \<^sup>*\\\<^sup>* U)"] by blast qed also have "([u] @ (U @ ([t \\ u] \<^sup>*\\\<^sup>* U))) = ((u # U) @ [t] \<^sup>*\\\<^sup>* (u # U))" by (metis Resid_rec(3) U append_Cons append_Nil con) finally show ?thesis by blast qed qed qed lemma PCong_permute: shows "\U. T \<^sup>*\\<^sup>* U \ PCong (T @ (U \<^sup>*\\\<^sup>* T)) (U @ (T \<^sup>*\\\<^sup>* U))" proof (induct T) show "\U. [] \<^sup>*\\\<^sup>* U \ [] \ PCong ([] @ U \<^sup>*\\\<^sup>* []) (U @ [] \<^sup>*\\\<^sup>* U)" by simp fix t T U assume ind: "\U. T \<^sup>*\\<^sup>* U \ PCong (T @ (U \<^sup>*\\\<^sup>* T)) (U @ (T \<^sup>*\\\<^sup>* U))" assume con: "t # T \<^sup>*\\<^sup>* U" show "PCong ((t # T) @ (U \<^sup>*\\\<^sup>* (t # T))) (U @ ((t # T) \<^sup>*\\\<^sup>* U))" proof (cases "T = []") assume T: "T = []" have "(t # T) @ (U \<^sup>*\\\<^sup>* (t # T)) = [t] @ (U \<^sup>*\\\<^sup>* [t])" using con T by simp also have "PCong ... (U @ ([t] \<^sup>*\\\<^sup>* U))" using PCong_permute_single T con by blast finally show ?thesis using T by fastforce next assume T: "T \ []" have "(t # T) @ (U \<^sup>*\\\<^sup>* (t # T)) = [t] @ (T @ (U \<^sup>*\\\<^sup>* (t # T)))" by simp also have "PCong ... ([t] @ (U \<^sup>*\\\<^sup>* [t]) @ (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])))" using ind [of "U \<^sup>*\\\<^sup>* [t]"] by (metis Arr.simps(1) Con_imp_Arr_Resid Con_implies_Arr(2) Con_sym PCong.intros(1,4) Resid_cons(2) Srcs_Resid T arr_append arr_append_imp_seq calculation con not_arr_null null_char seq_char) also have "[t] @ (U \<^sup>*\\\<^sup>* [t]) @ (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])) = ([t] @ (U \<^sup>*\\\<^sup>* [t])) @ (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))" by simp also have "PCong (([t] @ (U \<^sup>*\\\<^sup>* [t])) @ (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t]))) ((U @ ([t] \<^sup>*\\\<^sup>* U)) @ (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])))" by (metis Arr.simps(1) Con_cons(1) Con_imp_Arr_Resid Con_implies_Arr(2) PCong.intros(1,4) PCong_permute_single Srcs_Resid T Trgs_append arr_append arr_char con seq_char) also have "(U @ ([t] \<^sup>*\\\<^sup>* U)) @ (T \<^sup>*\\\<^sup>* (U \<^sup>*\\\<^sup>* [t])) = U @ ((t # T) \<^sup>*\\\<^sup>* U)" by (metis Resid.simps(2) Resid_cons(1) append.assoc con) finally show ?thesis by blast qed qed lemma Cong_imp_PCong: assumes "T \<^sup>*\\<^sup>* U" shows "PCong T U" proof - have "PCong T (T @ (U \<^sup>*\\\<^sup>* T))" using assms PCong.intros(2) PCong_append_Ide by (metis Con_implies_Arr(1) Ide.simps(1) Srcs_Resid ide_char Con_imp_Arr_Resid seq_char) also have "PCong (T @ (U \<^sup>*\\\<^sup>* T)) (U @ (T \<^sup>*\\\<^sup>* U))" using PCong_permute assms con_char prfx_implies_con by presburger also have "PCong (U @ (T \<^sup>*\\\<^sup>* U)) U" using assms PCong_append_Ide by (metis Con_imp_Arr_Resid Con_implies_Arr(1) Srcs_Resid arr_resid_iff_con ide_implies_arr con_char ide_char seq_char) finally show ?thesis by blast qed lemma Cong_iff_PCong: shows "T \<^sup>*\\<^sup>* U \ PCong T U" using PCong_imp_Cong Cong_imp_PCong by blast end section "Composite Completion" text \ The RTS of paths in an RTS factors via the coherent normal sub-RTS of identity paths into an extensional RTS with composites, which can be regarded as a ``composite completion'' of the original RTS. \ locale composite_completion = R: rts begin interpretation N: coherent_normal_sub_rts resid \Collect R.ide\ using R.rts_axioms R.identities_form_coherent_normal_sub_rts by auto sublocale P: paths_in_rts_with_coherent_normal resid \Collect R.ide\ .. sublocale quotient_by_coherent_normal P.Resid \Collect P.NPath\ .. notation P.Resid (infix "\<^sup>*\\\<^sup>*" 70) notation P.Con (infix "\<^sup>*\\<^sup>*" 50) notation P.Cong (infix "\<^sup>*\\<^sup>*" 50) notation P.Cong\<^sub>0 (infix "\<^sup>*\\<^sub>0\<^sup>*" 50) notation P.Cong_class ("\_\") notation Resid (infix "\\<^sup>*\\\<^sup>*\" 70) notation con (infix "\\<^sup>*\\<^sup>*\" 50) notation prfx (infix "\\<^sup>*\\<^sup>*\" 50) lemma NPath_char: shows "P.NPath T \ P.Ide T" using P.NPath_def P.Ide_implies_NPath by blast lemma Cong_eq_Cong\<^sub>0: shows "T \<^sup>*\\<^sup>* T' \ T \<^sup>*\\<^sub>0\<^sup>* T'" by (metis P.Cong_iff_cong P.ide_char P.ide_closed CollectD Collect_cong NPath_char) lemma Srcs_respects_Cong: assumes "T \<^sup>*\\<^sup>* T'" shows "P.Srcs T = P.Srcs T'" using assms by (meson P.Con_imp_eq_Srcs P.Cong\<^sub>0_imp_con P.con_char Cong_eq_Cong\<^sub>0) lemma sources_respects_Cong: assumes "T \<^sup>*\\<^sup>* T'" shows "P.sources T = P.sources T'" using assms by (meson P.Cong\<^sub>0_imp_coinitial Cong_eq_Cong\<^sub>0) lemma Trgs_respects_Cong: assumes "T \<^sup>*\\<^sup>* T'" shows "P.Trgs T = P.Trgs T'" proof - have "P.Trgs T = P.Trgs (T @ (T' \<^sup>*\\\<^sup>* T))" using assms NPath_char P.Arr.simps(1) P.Con_imp_Arr_Resid P.Con_sym P.Cong_def P.Con_Arr_self P.Con_implies_Arr(2) P.Resid_Ide(1) P.Srcs_Resid P.Trgs_append by (metis P.Cong\<^sub>0_imp_con P.con_char CollectD) also have "... = P.Trgs (T' @ (T \<^sup>*\\\<^sup>* T'))" using P.Cong\<^sub>0_imp_con P.con_char Cong_eq_Cong\<^sub>0 assms by force also have "... = P.Trgs T'" using assms NPath_char P.Arr.simps(1) P.Con_imp_Arr_Resid P.Con_sym P.Cong_def P.Con_Arr_self P.Con_implies_Arr(2) P.Resid_Ide(1) P.Srcs_Resid P.Trgs_append by (metis P.Cong\<^sub>0_imp_con P.con_char CollectD) finally show ?thesis by blast qed lemma targets_respects_Cong: assumes "T \<^sup>*\\<^sup>* T'" shows "P.targets T = P.targets T'" using assms P.Cong_imp_arr(1) P.Cong_imp_arr(2) P.arr_iff_has_target P.targets_char\<^sub>P Trgs_respects_Cong by force lemma ide_char\<^sub>C\<^sub>C: shows "ide \ \ arr \ \ (\T. T \ \ \ P.Ide T)" using NPath_char ide_char' by force lemma con_char\<^sub>C\<^sub>C: shows "\ \\<^sup>*\\<^sup>*\ \ \ arr \ \ arr \ \ P.Cong_class_rep \ \<^sup>*\\<^sup>* P.Cong_class_rep \" proof show "arr \ \ arr \ \ P.Cong_class_rep \ \<^sup>*\\<^sup>* P.Cong_class_rep \ \ \ \\<^sup>*\\<^sup>*\ \" using arr_char P.con_char by (meson P.rep_in_Cong_class con_char\<^sub>Q\<^sub>C\<^sub>N) show "\ \\<^sup>*\\<^sup>*\ \ \ arr \ \ arr \ \ P.Cong_class_rep \ \<^sup>*\\<^sup>* P.Cong_class_rep \" proof - assume con: "\ \\<^sup>*\\<^sup>*\ \" have 1: "arr \ \ arr \" using con coinitial_iff con_imp_coinitial by blast moreover have "P.Cong_class_rep \ \<^sup>*\\<^sup>* P.Cong_class_rep \" proof - obtain T U where TU: "T \ \ \ U \ \ \ P.Con T U" using con Resid_def by (meson P.con_char con_char\<^sub>Q\<^sub>C\<^sub>N) have "T \<^sup>*\\<^sup>* P.Cong_class_rep \ \ U \<^sup>*\\<^sup>* P.Cong_class_rep \" using TU 1 by (meson P.Cong_class_memb_Cong_rep arr_char) thus ?thesis using TU P.Cong_subst(1) [of T "P.Cong_class_rep \" U "P.Cong_class_rep \"] by (metis P.coinitial_iff P.con_char P.con_imp_coinitial sources_respects_Cong) qed ultimately show ?thesis by simp qed qed lemma con_char\<^sub>C\<^sub>C': shows "\ \\<^sup>*\\<^sup>*\ \ \ arr \ \ arr \ \ (\T U. T \ \ \ U \ \ \ T \<^sup>*\\<^sup>* U)" proof show "arr \ \ arr \ \ (\T U. T \ \ \ U \ \ \ T \<^sup>*\\<^sup>* U) \ \ \\<^sup>*\\<^sup>*\ \" using con_char\<^sub>C\<^sub>C by (simp add: P.rep_in_Cong_class arr_char) show "\ \\<^sup>*\\<^sup>*\ \ \ arr \ \ arr \ \ (\T U. T \ \ \ U \ \ \ T \<^sup>*\\<^sup>* U)" proof (intro conjI allI impI) assume 1: "\ \\<^sup>*\\<^sup>*\ \" show "arr \" using 1 con_implies_arr by simp show "arr \" using 1 con_implies_arr by simp fix T U assume 2: "T \ \ \ U \ \" show "T \<^sup>*\\<^sup>* U" using 1 2 P.Cong_class_memb_Cong_rep by (meson P.Cong\<^sub>0_subst_Con P.con_char Cong_eq_Cong\<^sub>0 arr_char con_char\<^sub>C\<^sub>C) qed qed lemma resid_char: shows "\ \\<^sup>*\\\<^sup>*\ \ = (if \ \\<^sup>*\\<^sup>*\ \ then \P.Cong_class_rep \ \<^sup>*\\\<^sup>* P.Cong_class_rep \\ else {})" by (metis P.con_char P.rep_in_Cong_class Resid_by_members arr_char arr_resid_iff_con con_char\<^sub>C\<^sub>C is_Cong_class_Resid) lemma src_char': shows "src \ = {A. arr \ \ P.Ide A \ P.Srcs (P.Cong_class_rep \) = P.Srcs A}" proof (cases "arr \") show "\ arr \ \ ?thesis" by (simp add: null_char src_def) assume \: "arr \" have 1: "\A. P.Ide A \ P.Srcs (P.Cong_class_rep \) = P.Srcs A" by (metis P.Arr.simps(1) P.Con_imp_eq_Srcs P.Cong\<^sub>0_imp_con P.Cong_class_memb_Cong_rep P.Cong_def P.con_char P.rep_in_Cong_class CollectD \ NPath_char P.Con_implies_Arr(1) arr_char) let ?A = "SOME A. P.Ide A \ P.Srcs (P.Cong_class_rep \) = P.Srcs A" have A: "P.Ide ?A \ P.Srcs (P.Cong_class_rep \) = P.Srcs ?A" using 1 someI_ex [of "\A. P.Ide A \ P.Srcs (P.Cong_class_rep \) = P.Srcs A"] by simp have a: "arr \?A\" using A P.ide_char P.is_Cong_classI arr_char by blast have ide_a: "ide \?A\" using a A P.Cong_class_def P.normal_is_Cong_closed NPath_char ide_char\<^sub>C\<^sub>C by auto have "sources \ = {\?A\}" proof - have "\ \\<^sup>*\\<^sup>*\ \?A\" by (metis (no_types, lifting) A P.Con_Ide_iff P.Cong_class_memb_Cong_rep P.Cong_imp_arr(1) P.arr_char P.arr_in_Cong_class P.ide_char P.ide_implies_arr P.rep_in_Cong_class Con_char a \ P.con_char null_char arr_char P.con_sym conI) hence "\?A\ \ sources \" using ide_a in_sourcesI by simp thus ?thesis using sources_char by auto qed moreover have "\?A\ = {A. P.Ide A \ P.Srcs (P.Cong_class_rep \) = P.Srcs A}" proof show "{A. P.Ide A \ P.Srcs (P.Cong_class_rep \) = P.Srcs A} \ \?A\" using A P.Cong_class_def P.Cong_closure_props(3) P.Ide_implies_Arr P.ide_closed P.ide_char by fastforce show "\?A\ \ {A. P.Ide A \ P.Srcs (P.Cong_class_rep \) = P.Srcs A}" using a A P.Cong_class_def Srcs_respects_Cong ide_a ide_char\<^sub>C\<^sub>C by blast qed ultimately show ?thesis using \ src_in_sources by force qed lemma src_char: shows "src \ = {A. arr \ \ P.Ide A \ (\T. T \ \ \ P.Srcs T = P.Srcs A)}" proof (cases "arr \") show "\ arr \ \ ?thesis" by (simp add: null_char src_def) assume \: "arr \" have "\T. T \ \ \ P.Srcs T = P.Srcs (P.Cong_class_rep \)" using \ P.Cong_class_memb_Cong_rep Srcs_respects_Cong arr_char by auto thus ?thesis using \ src_char' P.is_Cong_class_def arr_char by force qed lemma trg_char': shows "trg \ = {B. arr \ \ P.Ide B \ P.Trgs (P.Cong_class_rep \) = P.Srcs B}" proof (cases "arr \") show "\ arr \ \ ?thesis" by (metis (no_types, lifting) Collect_empty_eq arrI resid_arr_self resid_char) assume \: "arr \" have 1: "\B. P.Ide B \ P.Trgs (P.Cong_class_rep \) = P.Srcs B" by (metis P.Con_implies_Arr(2) P.Resid_Arr_self P.Srcs_Resid \ con_char\<^sub>C\<^sub>C arrE) define B where "B = (SOME B. P.Ide B \ P.Trgs (P.Cong_class_rep \) = P.Srcs B)" have B: "P.Ide B \ P.Trgs (P.Cong_class_rep \) = P.Srcs B" unfolding B_def using 1 someI_ex [of "\B. P.Ide B \ P.Trgs (P.Cong_class_rep \) = P.Srcs B"] by simp hence 2: "P.Ide B \ P.Con (P.Resid (P.Cong_class_rep \) (P.Cong_class_rep \)) B" using \ by (metis (no_types, lifting) P.Con_Ide_iff P.Ide_implies_Arr P.Resid_Arr_self P.Srcs_Resid arrE P.Con_implies_Arr(2) con_char\<^sub>C\<^sub>C) have b: "arr \B\" by (simp add: "2" P.ide_char P.is_Cong_classI arr_char) have ide_b: "ide \B\" by (meson "2" P.arr_in_Cong_class P.ide_char P.ide_closed b disjoint_iff ide_char P.ide_implies_arr) have "targets \ = {\B\}" proof - have "cong (\ \\<^sup>*\\\<^sup>*\ \) \B\" proof - have "\ \\<^sup>*\\\<^sup>*\ \ = \B\" by (metis (no_types, lifting) "2" P.Cong_class_eqI P.Cong_closure_props(3) P.Resid_Arr_Ide_ind P.Resid_Ide(1) NPath_char \ con_char\<^sub>C\<^sub>C resid_char P.Con_implies_Arr(2) P.Resid_Arr_self mem_Collect_eq) thus ?thesis using b cong_reflexive by presburger qed thus ?thesis using \ targets_char\<^sub>Q\<^sub>C\<^sub>N [of \] cong_char by auto qed moreover have "\B\ = {B. P.Ide B \ P.Trgs (P.Cong_class_rep \) = P.Srcs B}" proof show "{B. P.Ide B \ P.Trgs (P.Cong_class_rep \) = P.Srcs B} \ \B\" using B P.Cong_class_def P.Cong_closure_props(3) P.Ide_implies_Arr P.ide_closed P.ide_char by force show "\B\ \ {B. P.Ide B \ P.Trgs (P.Cong_class_rep \) = P.Srcs B}" proof - have "\B'. P.Cong B' B \ P.Ide B' \ P.Trgs (P.Cong_class_rep \) = P.Srcs B'" using B NPath_char P.normal_is_Cong_closed Srcs_respects_Cong by (metis P.Cong_closure_props(1) mem_Collect_eq) thus ?thesis using P.Cong_class_def by blast qed qed ultimately show ?thesis using \ trg_in_targets by force qed lemma trg_char: shows "trg \ = {B. arr \ \ P.Ide B \ (\T. T \ \ \ P.Trgs T = P.Srcs B)}" proof (cases "arr \") show "\ arr \ \ ?thesis" using trg_char' by presburger assume \: "arr \" have "\T. T \ \ \ P.Trgs T = P.Trgs (P.Cong_class_rep \)" using \ by (metis P.Cong_class_memb_Cong_rep Trgs_respects_Cong arr_char) thus ?thesis using \ trg_char' P.is_Cong_class_def arr_char by force qed lemma is_extensional_rts_with_composites: shows "extensional_rts_with_composites Resid" proof fix \ \ assume seq: "seq \ \" obtain T where T: "\ = \T\" using seq P.Cong_class_rep arr_char seq_def by blast obtain U where U: "\ = \U\" using seq P.Cong_class_rep arr_char seq_def by blast have 1: "P.Arr T \ P.Arr U" using seq T U P.Con_implies_Arr(2) P.Cong\<^sub>0_subst_right(1) P.Cong_class_def P.con_char seq_def by (metis Collect_empty_eq P.Cong_imp_arr(1) P.arr_char P.rep_in_Cong_class empty_iff arr_char) have 2: "P.Trgs T = P.Srcs U" proof - have "targets \ = sources \" using seq seq_def sources_char targets_char\<^sub>W\<^sub>E by force hence 3: "trg \ = src \" using seq arr_has_un_source arr_has_un_target by (metis seq_def src_in_sources trg_in_targets) hence "{B. P.Ide B \ P.Trgs (P.Cong_class_rep \) = P.Srcs B} = {A. P.Ide A \ P.Srcs (P.Cong_class_rep \) = P.Srcs A}" using seq seq_def src_char' [of \] trg_char' [of \] by force hence "P.Trgs (P.Cong_class_rep \) = P.Srcs (P.Cong_class_rep \)" using seq seq_def arr_char by (metis (mono_tags, lifting) "3" P.Cong_class_is_nonempty Collect_empty_eq arr_src_iff_arr mem_Collect_eq trg_char') thus ?thesis using seq seq_def arr_char T U P.Srcs_respects_Cong P.Trgs_respects_Cong P.Cong_class_memb_Cong_rep P.Cong_symmetric by (metis "1" P.arr_char P.arr_in_Cong_class Srcs_respects_Cong Trgs_respects_Cong) qed have "P.Arr (T @ U)" using 1 2 by simp moreover have "P.Ide (T \<^sup>*\\\<^sup>* (T @ U))" by (metis "1" P.Con_append(2) P.Con_sym P.Resid_Arr_self P.Resid_Ide_Arr_ind P.Resid_append(2) P.Trgs.simps(1) calculation P.Arr_has_Trg) moreover have "(T @ U) \<^sup>*\\\<^sup>* T \<^sup>*\\<^sup>* U" by (metis "1" P.Arr.simps(1) P.Con_sym P.Cong\<^sub>0_append_resid_NPath P.Cong\<^sub>0_cancel_left\<^sub>C\<^sub>S P.Ide.simps(1) calculation(2) Cong_eq_Cong\<^sub>0 NPath_char) ultimately have "composite_of \ \ \T @ U\" proof (unfold composite_of_def, intro conjI) show "prfx \ (P.Cong_class (T @ U))" proof - have "ide (\ \\<^sup>*\\\<^sup>*\ \T @ U\)" proof (unfold ide_char, intro conjI) have 3: "T \<^sup>*\\\<^sup>* (T @ U) \ \ \\<^sup>*\\\<^sup>*\ \T @ U\" proof - have "\ \\<^sup>*\\\<^sup>*\ \T @ U\ = \T \<^sup>*\\\<^sup>* (T @ U)\" by (metis "1" P.Ide.simps(1) P.arr_char P.arr_in_Cong_class P.con_char P.is_Cong_classI Resid_by_members T \P.Arr (T @ U)\ \P.Ide (T \<^sup>*\\<^sup>* (T @ U))\) thus ?thesis by (simp add: P.arr_in_Cong_class P.elements_are_arr NPath_char \P.Ide (T \<^sup>*\\<^sup>* (T @ U))\) qed show "arr (\ \\<^sup>*\\\<^sup>*\ \T @ U\)" using 3 arr_char is_Cong_class_Resid by blast show "\ \\<^sup>*\\\<^sup>*\ \T @ U\ \ Collect P.NPath \ {}" using 3 P.ide_closed P.ide_char \P.Ide (T \<^sup>*\\<^sup>* (T @ U))\ by blast qed thus ?thesis by blast qed show "\T @ U\ \\<^sup>*\\\<^sup>*\ \ \\<^sup>*\\<^sup>*\ \" proof - have 3: "((T @ U) \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* U \ (\T @ U\ \\<^sup>*\\\<^sup>*\ \) \\<^sup>*\\\<^sup>*\ \" proof - have "(\T @ U\ \\<^sup>*\\\<^sup>*\ \) \\<^sup>*\\\<^sup>*\ \ = \((T @ U) \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* U\" proof - have "\T @ U\ \\<^sup>*\\\<^sup>*\ \ = \(T @ U) \<^sup>*\\\<^sup>* T\" by (metis "1" P.Cong_imp_arr(1) P.arr_char P.arr_in_Cong_class P.is_Cong_classI T \P.Arr (T @ U)\ \(T @ U) \<^sup>*\\<^sup>* T \<^sup>*\\<^sup>* U\ Resid_by_members P.arr_resid_iff_con) moreover have "\(T @ U) \<^sup>*\\\<^sup>* T\ \\<^sup>*\\\<^sup>*\ \ = \((T @ U) \<^sup>*\\\<^sup>* T) \<^sup>*\\\<^sup>* U\" by (metis "1" P.Cong_class_eqI P.Cong_imp_arr(1) P.arr_char P.arr_in_Cong_class P.con_char P.is_Cong_classI arr_char arrE U \(T @ U) \<^sup>*\\<^sup>* T \<^sup>*\\<^sup>* U\ con_char\<^sub>C\<^sub>C' Resid_by_members) ultimately show ?thesis by auto qed thus ?thesis by (metis "1" P.Arr.simps(1) P.Cong\<^sub>0_reflexive P.Resid_append(2) P.arr_char P.arr_in_Cong_class P.elements_are_arr \P.Arr (T @ U)\) qed have "\T @ U\ \\<^sup>*\\\<^sup>*\ \ \\<^sup>*\\<^sup>*\ \" proof (unfold ide_char, intro conjI) show "arr ((\T @ U\ \\<^sup>*\\\<^sup>*\ \) \\<^sup>*\\\<^sup>*\ \)" using 3 arr_char is_Cong_class_Resid by blast show "(\T @ U\ \\<^sup>*\\\<^sup>*\ \) \\<^sup>*\\\<^sup>*\ \ \ Collect P.NPath \ {}" by (metis 1 3 P.Arr.simps(1) P.Resid_append(2) P.con_char IntI \P.Arr (T @ U)\ NPath_char P.Resid_Arr_self P.arr_char empty_iff mem_Collect_eq P.arrE) qed thus ?thesis by blast qed show "\ \\<^sup>*\\<^sup>*\ \T @ U\ \\<^sup>*\\\<^sup>*\ \" proof (unfold ide_char, intro conjI) have 3: "U \<^sup>*\\\<^sup>* ((T @ U) \<^sup>*\\\<^sup>* T) \ \ \\<^sup>*\\\<^sup>*\ (\T @ U\ \\<^sup>*\\\<^sup>*\ \)" proof - have "\ \\<^sup>*\\\<^sup>*\ (\T @ U\ \\<^sup>*\\\<^sup>*\ \) = \U \<^sup>*\\\<^sup>* ((T @ U) \<^sup>*\\\<^sup>* T)\" proof - have "\T @ U\ \\<^sup>*\\\<^sup>*\ \ = \(T @ U) \<^sup>*\\\<^sup>* T\" by (metis "1" P.Con_sym P.Ide.simps(1) P.arr_char P.arr_in_Cong_class P.con_char P.is_Cong_classI Resid_by_members T \P.Arr (T @ U)\ \P.Ide (T \<^sup>*\\<^sup>* (T @ U))\) moreover have "\ \\<^sup>*\\\<^sup>*\ (\T @ U\ \\<^sup>*\\\<^sup>*\ \) = \U \<^sup>*\\\<^sup>* ((T @ U) \<^sup>*\\\<^sup>* T)\" by (metis "1" P.Cong_class_eqI P.Cong_imp_arr(1) P.arr_char P.arr_in_Cong_class P.con_char P.is_Cong_classI prfx_implies_con U \(T @ U) \<^sup>*\\<^sup>* T \<^sup>*\\<^sup>* U\ \\T @ U\ \\<^sup>*\\<^sup>*\ \ \\<^sup>*\\<^sup>*\ \\ calculation con_char\<^sub>C\<^sub>C' Resid_by_members) ultimately show ?thesis by blast qed thus ?thesis by (metis "1" P.Arr.simps(1) P.Resid_append_ind P.arr_in_Cong_class P.con_char \P.Arr (T @ U)\ P.Con_Arr_self P.arr_resid_iff_con) qed show "arr (\ \\<^sup>*\\\<^sup>*\ (\T @ U\ \\<^sup>*\\\<^sup>*\ \))" by (metis "3" arr_resid_iff_con empty_iff resid_char) show "\ \\<^sup>*\\\<^sup>*\ (\T @ U\ \\<^sup>*\\\<^sup>*\ \) \ Collect P.NPath \ {}" by (metis "1" "3" P.Arr.simps(1) P.Cong\<^sub>0_append_resid_NPath P.Cong\<^sub>0_cancel_left\<^sub>C\<^sub>S P.Cong_imp_arr(1) P.arr_char NPath_char IntI \(T @ U) \<^sup>*\\<^sup>* T \<^sup>*\\<^sup>* U\ \P.Ide (T \<^sup>*\\<^sup>* (T @ U))\ empty_iff) qed qed thus "composable \ \" using composable_def by auto qed sublocale extensional_rts_with_composites Resid using is_extensional_rts_with_composites by simp subsection "Inclusion Map" abbreviation incl where "incl t \ \[t]\" text \ The inclusion into the composite completion preserves consistency and residuation. \ lemma incl_preserves_con: assumes "t \ u" shows "\[t]\ \\<^sup>*\\<^sup>*\ \[u]\" using assms by (meson P.Con_rec(1) P.arr_in_Cong_class P.con_char P.is_Cong_classI con_char\<^sub>Q\<^sub>C\<^sub>N P.con_implies_arr(1-2)) lemma incl_preserves_resid: shows "\[t \\ u]\ = \[t]\ \\<^sup>*\\\<^sup>*\ \[u]\" proof (cases "t \ u") show "t \ u \ ?thesis" proof - assume 1: "t \ u" have "P.is_Cong_class \[t]\ \ P.is_Cong_class \[u]\" using 1 con_char\<^sub>Q\<^sub>C\<^sub>N incl_preserves_con by presburger moreover have "[t] \ \[t]\ \ [u] \ \[u]\" using 1 by (meson P.Con_rec(1) P.arr_in_Cong_class P.con_char P.Con_implies_Arr(2) P.arr_char P.con_implies_arr(1)) moreover have "P.con [t] [u]" using 1 by (simp add: P.con_char) ultimately show ?thesis using Resid_by_members [of "\[t]\" "\[u]\" "[t]" "[u]"] by (simp add: "1") qed assume 1: "\ t \ u" have "\[t \\ u]\ = {}" using 1 R.arrI by (metis Collect_empty_eq P.Con_Arr_self P.Con_rec(1) P.Cong_class_def P.Cong_imp_arr(1) P.arr_char R.arr_resid_iff_con) also have "... = \[t]\ \\<^sup>*\\\<^sup>*\ \[u]\" by (metis (full_types) "1" Con_char CollectD P.Con_rec(1) P.Cong_class_def P.Cong_imp_arr(1) P.arr_in_Cong_class con_char\<^sub>C\<^sub>C' null_char conI) finally show ?thesis by simp qed lemma incl_reflects_con: assumes "\[t]\ \\<^sup>*\\<^sup>*\ \[u]\" shows "t \ u" by (metis P.Con_rec(1) P.Cong_class_def P.Cong_imp_arr(1) P.arr_in_Cong_class CollectD assms con_char\<^sub>C\<^sub>C' con_char\<^sub>Q\<^sub>C\<^sub>N) text \ The inclusion map is a simulation. \ sublocale incl: simulation resid Resid incl proof show "\t. \ R.arr t \ incl t = null" by (metis Collect_empty_eq P.Cong_class_def P.Cong_imp_arr(1) P.Ide.simps(2) P.Resid_rec(1) P.cong_reflexive P.elements_are_arr P.ide_char P.ide_closed P.not_arr_null P.null_char R.prfx_implies_con null_char R.con_implies_arr(1)) show "\t u. t \ u \ incl t \\<^sup>*\\<^sup>*\ incl u" using incl_preserves_con by blast show "\t u. t \ u \ incl (t \\ u) = incl t \\<^sup>*\\\<^sup>*\ incl u" using incl_preserves_resid by blast qed lemma inclusion_is_simulation: shows "simulation resid Resid incl" .. lemma incl_preserves_arr: assumes "R.arr a" shows "arr \[a]\" using assms incl_preserves_con by auto lemma incl_preserves_ide: assumes "R.ide a" shows "ide \[a]\" by (metis assms incl_preserves_con incl_preserves_resid R.ide_def ide_def) lemma cong_iff_eq_incl: assumes "R.arr t" and "R.arr u" shows "\[t]\ = \[u]\ \ t \ u" proof show "\[t]\ = \[u]\ \ t \ u" by (metis P.Con_rec(1) P.Ide.simps(2) P.Resid.simps(3) P.arr_in_Cong_class P.con_char R.arr_def R.cong_reflexive assms(1) ide_char\<^sub>C\<^sub>C incl_preserves_con incl_preserves_ide incl_preserves_resid incl_reflects_con P.arr_resid_iff_con) show "t \ u \ \[t]\ = \[u]\" using assms by (metis incl_preserves_resid extensional incl_preserves_ide) qed text \ The inclusion is surjective on identities. \ lemma img_incl_ide: shows "incl ` (Collect R.ide) = Collect ide" proof show "incl ` Collect R.ide \ Collect ide" by (simp add: image_subset_iff) show "Collect ide \ incl ` Collect R.ide" proof fix \ assume \: "\ \ Collect ide" obtain A where A: "A \ \" using \ ide_char by blast have "P.NPath A" by (metis A Ball_Collect \ ide_char' mem_Collect_eq) obtain a where a: "a \ P.Srcs A" using \P.NPath A\ by (meson P.NPath_implies_Arr equals0I P.Arr_has_Src) have "P.Cong\<^sub>0 A [a]" proof - have "P.Ide [a]" by (metis NPath_char P.Con_Arr_self P.Ide.simps(2) P.NPath_implies_Arr P.Resid_Ide(1) P.Srcs.elims R.in_sourcesE \P.NPath A\ a) thus ?thesis using a A by (metis P.Ide.simps(2) P.ide_char P.ide_closed \P.NPath A\ NPath_char P.Con_single_ide_iff P.Ide_implies_Arr P.Resid_Arr_Ide_ind P.Resid_Arr_Src) qed have "\ = \[a]\" by (metis A P.Cong\<^sub>0_imp_con P.Cong\<^sub>0_implies_Cong P.Cong\<^sub>0_transitive P.Cong_class_eqI P.ide_char P.resid_arr_ide Resid_by_members \ \A \<^sup>*\\<^sub>0\<^sup>* [a]\ \P.NPath A\ arr_char NPath_char ideE ide_implies_arr mem_Collect_eq) thus "\ \ incl ` Collect R.ide" using NPath_char P.Ide.simps(2) P.backward_stable \A \<^sup>*\\<^sub>0\<^sup>* [a]\ \P.NPath A\ by blast qed qed end subsection "Composite Completion of an Extensional RTS" locale composite_completion_of_extensional_rts = R: extensional_rts + composite_completion begin sublocale P: paths_in_weakly_extensional_rts resid .. notation comp (infixl "\\<^sup>*\\<^sup>*\" 55) text \ When applied to an extensional RTS, the composite completion construction does not identify any states that are distinct in the original RTS. \ lemma incl_injective_on_ide: shows "inj_on incl (Collect R.ide)" using R.extensional cong_iff_eq_incl by (intro inj_onI) auto text \ When applied to an extensional RTS, the composite completion construction is a bijection between the states of the original RTS and the states of its completion. \ lemma incl_bijective_on_ide: shows "bij_betw incl (Collect R.ide) (Collect ide)" using incl_injective_on_ide img_incl_ide bij_betw_def by blast end subsection "Freeness of Composite Completion" text \ In this section we show that the composite completion construction is free: any simulation from RTS \A\ to an extensional RTS with composites \B\ extends uniquely to a simulation on the composite completion of \A\. \ locale extension_of_simulation = A: paths_in_rts resid\<^sub>A + B: extensional_rts_with_composites resid\<^sub>B + F: simulation resid\<^sub>A resid\<^sub>B F for resid\<^sub>A :: "'a resid" (infix "\\\<^sub>A" 70) and resid\<^sub>B :: "'b resid" (infix "\\\<^sub>B" 70) and F :: "'a \ 'b" begin notation A.Resid (infix "\<^sup>*\\\<^sub>A\<^sup>*" 70) notation A.Resid1x (infix "\<^sup>1\\\<^sub>A\<^sup>*" 70) notation A.Residx1 (infix "\<^sup>*\\\<^sub>A\<^sup>1" 70) notation A.Con (infix "\<^sup>*\\<^sub>A\<^sup>*" 70) notation B.comp (infixl "\\<^sub>B" 55) notation B.con (infix "\\<^sub>B" 50) fun map where "map [] = B.null" | "map [t] = F t" | "map (t # T) = (if A.arr (t # T) then F t \\<^sub>B map T else B.null)" lemma map_o_incl_eq: shows "map (A.incl t) = F t" by (simp add: A.null_char F.extensional) lemma extensional: shows "\ A.arr T \ map T = B.null" using F.extensional A.arr_char by (metis A.Arr.simps(2) map.elims) lemma preserves_comp: shows "\U. \T \ []; U \ []; A.Arr (T @ U)\ \ map (T @ U) = map T \\<^sub>B map U" proof (induct T) show "\U. [] \ [] \ map ([] @ U) = map [] \\<^sub>B map U" by simp fix t and T U :: "'a list" assume ind: "\U. \T \ []; U \ []; A.Arr (T @ U)\ \ map (T @ U) = map T \\<^sub>B map U" assume U: "U \ []" assume Arr: "A.Arr ((t # T) @ U)" hence 1: "A.Arr (t # (T @ U))" by simp have 2: "A.Arr (t # T)" by (metis A.Con_Arr_self A.Con_append(1) A.Con_implies_Arr(1) Arr U append_is_Nil_conv list.distinct(1)) show "map ((t # T) @ U) = B.comp (map (t # T)) (map U)" proof (cases "T = []") show "T = [] \ ?thesis" by (metis (full_types) "1" A.arr_char U append_Cons append_Nil list.exhaust map.simps(2) map.simps(3)) assume T: "T \ []" have "map ((t # T) @ U) = map (t # (T @ U))" by simp also have "... = F t \\<^sub>B map (T @ U)" using T 1 by (metis A.arr_char Nil_is_append_conv list.exhaust map.simps(3)) also have "... = F t \\<^sub>B (map T \\<^sub>B map U)" using ind by (metis "1" A.Con_Arr_self A.Con_implies_Arr(1) A.Con_rec(4) T U append_is_Nil_conv) also have "... = F t \\<^sub>B map T \\<^sub>B map U" using B.comp_assoc\<^sub>E\<^sub>C by blast also have "... = map (t # T) \\<^sub>B map U" using T 2 by (metis A.arr_char list.exhaust map.simps(3)) finally show "map ((t # T) @ U) = map (t # T) \\<^sub>B map U" by simp qed qed lemma preserves_arr_ind: shows "\a. \A.arr T; a \ A.Srcs T\ \ B.arr (map T) \ B.src (map T) = F a" proof (induct T) show "\a. \A.arr []; a \ A.Srcs []\ \ B.arr (map []) \ B.src (map []) = F a" using A.arr_char by simp fix a t T assume a: "a \ A.Srcs (t # T)" assume tT: "A.arr (t # T)" assume ind: "\a. \A.arr T; a \ A.Srcs T\ \ B.arr (map T) \ B.src (map T) = F a" have 1: "a \ A.R.sources t" using a tT A.Con_imp_eq_Srcs A.Con_initial_right A.Srcs.simps(2) A.con_char by blast show "B.arr (map (t # T)) \ B.src (map (t # T)) = F a" proof (cases "T = []") show "T = [] \ ?thesis" by (metis "1" A.Arr.simps(2) A.arr_char B.arr_has_un_source B.src_in_sources F.preserves_reflects_arr F.preserves_sources image_subset_iff map.simps(2) tT) assume T: "T \ []" obtain a' where a': "a' \ A.R.targets t" using tT "1" A.R.resid_source_in_targets by auto have 2: "a' \ A.Srcs T" using a' tT by (metis A.Con_Arr_self A.R.sources_resid A.Srcs.simps(2) A.arr_char T A.Con_imp_eq_Srcs A.Con_rec(4)) have "B.arr (map (t # T)) \ B.arr (F t \\<^sub>B map T)" using tT T by (metis map.simps(3) neq_Nil_conv) also have 2: "... \ True" by (metis (no_types, lifting) "2" A.arr_char B.arr_comp\<^sub>E\<^sub>C B.arr_has_un_target B.trg_in_targets F.preserves_reflects_arr F.preserves_targets T a' A.Arr.elims(2) image_subset_iff ind list.sel(1) list.sel(3) tT) finally have "B.arr (map (t # T))" by simp moreover have "B.src (map (t # T)) = F a" proof - have "B.src (map (t # T)) = B.src (F t \\<^sub>B map T)" using tT T by (metis map.simps(3) neq_Nil_conv) also have "... = B.src (F t)" using "2" B.con_comp_iff by force also have "... = F a" by (meson "1" B.weakly_extensional_rts_axioms F.simulation_axioms simulation_to_weakly_extensional_rts.preserves_src simulation_to_weakly_extensional_rts_def) finally show ?thesis by simp qed ultimately show ?thesis by simp qed qed lemma preserves_arr: shows "A.arr T \ B.arr (map T)" using preserves_arr_ind A.arr_char A.Arr_has_Src by blast lemma preserves_src: assumes "A.arr T" and "a \ A.Srcs T" shows "B.src (map T) = F a" using assms preserves_arr_ind by simp lemma preserves_trg: shows "\A.arr T; b \ A.Trgs T\ \ B.trg (map T) = F b" proof (induct T) show "\A.arr []; b \ A.Trgs []\ \ B.trg (map []) = F b" by simp fix t T assume tT: "A.arr (t # T)" assume b: "b \ A.Trgs (t # T)" assume ind: "\A.arr T; b \ A.Trgs T\ \ B.trg (map T) = F b" show "B.trg (map (t # T)) = F b" proof (cases "T = []") show "T = [] \ ?thesis" using tT b by (metis A.Trgs.simps(2) B.arr_has_un_target B.trg_in_targets F.preserves_targets preserves_arr image_subset_iff map.simps(2)) assume T: "T \ []" have 1: "B.trg (map (t # T)) = B.trg (F t \\<^sub>B map T)" using tT T b by (metis map.simps(3) neq_Nil_conv) also have "... = B.trg (map T)" by (metis B.arr_trg_iff_arr B.composable_iff_arr_comp B.trg_comp calculation preserves_arr tT) also have "... = F b" using tT b ind by (metis A.Trgs.simps(3) T A.Arr.simps(3) A.arr_char list.exhaust) finally show ?thesis by simp qed qed lemma preserves_Resid1x_ind: shows "\t. t \<^sup>1\\\<^sub>A\<^sup>* U \ A.R.null \ F t \\<^sub>B map U \ F (t \<^sup>1\\\<^sub>A\<^sup>* U) = F t \\\<^sub>B map U" proof (induct U) show "\t. t \<^sup>1\\\<^sub>A\<^sup>* [] \ A.R.null \ F t \\<^sub>B map [] \ F (t \<^sup>1\\\<^sub>A\<^sup>* []) = F t \\\<^sub>B map []" by simp fix t u U assume uU: "t \<^sup>1\\\<^sub>A\<^sup>* (u # U) \ A.R.null" assume ind: "\t. t \<^sup>1\\\<^sub>A\<^sup>* U \ A.R.null \ F t \\<^sub>B map U \ F (t \<^sup>1\\\<^sub>A\<^sup>* U) = F t \\\<^sub>B map U" show "F t \\<^sub>B map (u # U) \ F (t \<^sup>1\\\<^sub>A\<^sup>* (u # U)) = F t \\\<^sub>B map (u # U)" proof show 1: "F t \\<^sub>B map (u # U)" proof (cases "U = []") show "U = [] \ ?thesis" using A.Resid1x.simps(2) map.simps(2) F.preserves_con uU by fastforce assume U: "U \ []" have 3: "[t] \<^sup>*\\\<^sub>A\<^sup>* [u] \ [] \ ([t] \<^sup>*\\\<^sub>A\<^sup>* [u]) \<^sup>*\\\<^sub>A\<^sup>* U \ []" using A.Con_cons(2) [of "[t]" U u] by (meson A.Resid1x_as_Resid' U not_Cons_self2 uU) hence 2: "F t \\<^sub>B F u \ F t \\\<^sub>B F u \\<^sub>B map U" by (metis A.Con_rec(1) A.Con_sym A.Con_sym1 A.Residx1_as_Resid A.Resid_rec(1) F.preserves_con F.preserves_resid ind) moreover have "B.seq (F u) (map U)" by (metis B.coinitial_iff\<^sub>W\<^sub>E B.con_imp_coinitial B.seqI\<^sub>W\<^sub>E B.src_resid calculation) ultimately have "F t \\<^sub>B map ([u] @ U)" using B.con_comp_iff\<^sub>E\<^sub>C(1) [of "F t" "F u" "map U"] B.con_sym preserves_comp by (metis 3 A.Con_cons(2) A.Con_implies_Arr(2) append.left_neutral append_Cons map.simps(2) not_Cons_self2) thus ?thesis by simp qed show "F (t \<^sup>1\\\<^sub>A\<^sup>* (u # U)) = F t \\\<^sub>B map (u # U)" proof (cases "U = []") show "U = [] \ ?thesis" using A.Resid1x.simps(2) F.preserves_resid map.simps(2) uU by fastforce assume U: "U \ []" have "F (t \<^sup>1\\\<^sub>A\<^sup>* (u # U)) = F ((t \\\<^sub>A u) \<^sup>1\\\<^sub>A\<^sup>* U)" using A.Resid1x_as_Resid' A.Resid_rec(3) U uU by metis also have "... = F (t \\\<^sub>A u) \\\<^sub>B map U" using uU U ind A.Con_rec(3) A.Resid1x_as_Resid [of "t \\\<^sub>A u" U] by (metis A.Resid1x.simps(3) list.exhaust) also have "... = (F t \\\<^sub>B F u) \\\<^sub>B map U" using uU U by (metis A.Resid1x_as_Resid' F.preserves_resid A.Con_rec(3)) also have "... = F t \\\<^sub>B (F u \\<^sub>B map U)" by (metis B.comp_null(2) B.composable_iff_comp_not_null B.con_compI(2) B.conI B.con_sym_ax B.mediating_transition B.null_is_zero(2) B.resid_comp(1)) also have "... = F t \\\<^sub>B map (u # U)" by (metis A.Resid1x_as_Resid' A.con_char U map.simps(3) neq_Nil_conv A.con_implies_arr(2) uU) finally show ?thesis by simp qed qed qed lemma preserves_Residx1_ind: shows "\t. U \<^sup>*\\\<^sub>A\<^sup>1 t \ [] \ map U \\<^sub>B F t \ map (U \<^sup>*\\\<^sub>A\<^sup>1 t) = map U \\\<^sub>B F t" proof (induct U) show "\t. [] \<^sup>*\\\<^sub>A\<^sup>1 t \ [] \ map [] \\<^sub>B F t \ map ([] \<^sup>*\\\<^sub>A\<^sup>1 t) = map [] \\\<^sub>B F t" by simp fix t u U assume ind: "\t. U \<^sup>*\\\<^sub>A\<^sup>1 t \ [] \ map U \\<^sub>B F t \ map (U \<^sup>*\\\<^sub>A\<^sup>1 t) = map U \\\<^sub>B F t" assume uU: "(u # U) \<^sup>*\\\<^sub>A\<^sup>1 t \ []" show "map (u # U) \\<^sub>B F t \ map ((u # U) \<^sup>*\\\<^sub>A\<^sup>1 t) = map (u # U) \\\<^sub>B F t" proof (cases "U = []") show "U = [] \ ?thesis" using A.Residx1.simps(2) F.preserves_con F.preserves_resid map.simps(2) uU by presburger assume U: "U \ []" show ?thesis proof show "map (u # U) \\<^sub>B F t" using uU U A.Con_sym1 B.con_sym preserves_Resid1x_ind by blast show "map ((u # U) \<^sup>*\\\<^sub>A\<^sup>1 t) = map (u # U) \\\<^sub>B F t" proof - have "map ((u # U) \<^sup>*\\\<^sub>A\<^sup>1 t) = map ((u \\\<^sub>A t) # U \<^sup>*\\\<^sub>A\<^sup>1 (t \\\<^sub>A u))" using uU U A.Residx1_as_Resid A.Resid_rec(2) by fastforce also have "... = F (u \\\<^sub>A t) \\<^sub>B map (U \<^sup>*\\\<^sub>A\<^sup>1 (t \\\<^sub>A u))" by (metis A.Residx1_as_Resid A.arr_char U A.Con_imp_Arr_Resid A.Con_rec(2) A.Resid_rec(2) list.exhaust map.simps(3) uU) also have "... = F (u \\\<^sub>A t) \\<^sub>B map U \\\<^sub>B F (t \\\<^sub>A u)" using uU U ind A.Con_rec(2) A.Residx1_as_Resid by force also have "... = (F u \\\<^sub>B F t) \\<^sub>B map U \\\<^sub>B (F t \\\<^sub>B F u)" using uU U by (metis A.Con_initial_right A.Con_rec(1) A.Con_sym1 A.Resid1x_as_Resid' A.Residx1_as_Resid F.preserves_resid) also have "... = (F u \\<^sub>B map U) \\\<^sub>B F t" by (metis B.comp_null(2) B.composable_iff_comp_not_null B.con_compI(2) B.con_sym B.mediating_transition B.null_is_zero(2) B.resid_comp(2) B.con_def) also have "... = map (u # U) \\\<^sub>B F t" by (metis A.Con_implies_Arr(2) A.Con_sym A.Residx1_as_Resid U A.arr_char map.simps(3) neq_Nil_conv uU) finally show ?thesis by simp qed qed qed qed lemma preserves_resid_ind: shows "\U. A.con T U \ map T \\<^sub>B map U \ map (T \<^sup>*\\\<^sub>A\<^sup>* U) = map T \\\<^sub>B map U" proof (induct T) show "\U. A.con [] U \ map [] \\<^sub>B map U \ map ([] \<^sup>*\\\<^sub>A\<^sup>* U) = map [] \\\<^sub>B map U" using A.con_char A.Resid.simps(1) by blast fix t T U assume tT: "A.con (t # T) U" assume ind: "\U. A.con T U \ map T \\<^sub>B map U \ map (T \<^sup>*\\\<^sub>A\<^sup>* U) = map T \\\<^sub>B map U" show "map (t # T) \\<^sub>B map U \ map ((t # T) \<^sup>*\\\<^sub>A\<^sup>* U) = map (t # T) \\\<^sub>B map U" proof (cases "T = []") assume T: "T = []" show ?thesis using T tT apply simp by (metis A.Resid1x_as_Resid A.Residx1_as_Resid A.con_char A.Con_sym A.Con_sym1 map.simps(2) preserves_Resid1x_ind) next assume T: "T \ []" have 1: "map (t # T) = F t \\<^sub>B map T" using tT T by (metis A.con_implies_arr(1) list.exhaust map.simps(3)) show ?thesis proof show 2: "B.con (map (t # T)) (map U)" using T tT by (metis "1" A.Con_cons(1) A.Residx1_as_Resid A.con_char A.not_arr_null A.null_char B.composable_iff_comp_not_null B.con_compI(2) B.con_sym B.not_arr_null preserves_arr ind preserves_Residx1_ind A.con_implies_arr(1-2)) show "map ((t # T) \<^sup>*\\\<^sub>A\<^sup>* U) = map (t # T) \\\<^sub>B map U" proof - have "map ((t # T) \<^sup>*\\\<^sub>A\<^sup>* U) = map (([t] \<^sup>*\\\<^sub>A\<^sup>* U) @ (T \<^sup>*\\\<^sub>A\<^sup>* (U \<^sup>*\\\<^sub>A\<^sup>* [t])))" by (metis A.Resid.simps(1) A.Resid_cons(1) A.con_char A.ex_un_null tT) also have "... = map ([t] \<^sup>*\\\<^sub>A\<^sup>* U) \\<^sub>B map (T \<^sup>*\\\<^sub>A\<^sup>* (U \<^sup>*\\\<^sub>A\<^sup>* [t]))" by (metis A.Arr.simps(1) A.Con_imp_Arr_Resid A.Con_implies_Arr(2) A.Con_sym A.Resid_cons(1-2) A.con_char T preserves_comp tT) also have "... = (map [t] \\\<^sub>B map U) \\<^sub>B map (T \<^sup>*\\\<^sub>A\<^sup>* (U \<^sup>*\\\<^sub>A\<^sup>* [t]))" by (metis A.Con_initial_right A.Con_sym A.Resid1x_as_Resid A.Residx1_as_Resid A.con_char A.Con_sym1 map.simps(2) preserves_Resid1x_ind tT) also have "... = (map [t] \\\<^sub>B map U) \\<^sub>B (map T \\\<^sub>B map (U \<^sup>*\\\<^sub>A\<^sup>* [t]))" using tT T ind by (metis A.Con_cons(1) A.Con_sym A.Resid.simps(1) A.con_char) also have "... = (map [t] \\\<^sub>B map U) \\<^sub>B (map T \\\<^sub>B (map U \\\<^sub>B map [t]))" using tT T by (metis A.Con_cons(1) A.Con_sym A.Resid.simps(2) A.Residx1_as_Resid A.con_char map.simps(2) preserves_Residx1_ind) also have "... = (F t \\\<^sub>B map U) \\<^sub>B (map T \\\<^sub>B (map U \\\<^sub>B F t))" using tT T by simp also have "... = map (t # T) \\\<^sub>B map U" using 1 2 B.resid_comp(2) by presburger finally show ?thesis by simp qed qed qed qed lemma preserves_con: assumes "A.con T U" shows "map T \\<^sub>B map U" using assms preserves_resid_ind by simp lemma preserves_resid: assumes "A.con T U" shows "map (T \<^sup>*\\\<^sub>A\<^sup>* U) = map T \\\<^sub>B map U" using assms preserves_resid_ind by simp sublocale simulation A.Resid resid\<^sub>B map using A.con_char preserves_con preserves_resid extensional by unfold_locales auto sublocale simulation_to_extensional_rts A.Resid resid\<^sub>B map .. lemma is_universal: assumes "rts_with_composites resid\<^sub>B" and "simulation resid\<^sub>A resid\<^sub>B F" shows "\!F'. simulation A.Resid resid\<^sub>B F' \ F' o A.incl = F" proof interpret B: rts_with_composites resid\<^sub>B using assms by auto interpret F: simulation resid\<^sub>A resid\<^sub>B F using assms by auto show "simulation A.Resid resid\<^sub>B map \ map \ A.incl = F" using map_o_incl_eq simulation_axioms by auto show "\F'. simulation A.Resid resid\<^sub>B F' \ F' o A.incl = F \ F' = map" proof fix F' T assume F': "simulation A.Resid resid\<^sub>B F' \ F' o A.incl = F" interpret F': simulation A.Resid resid\<^sub>B F' using F' by simp show "F' T = map T" proof (induct T) show "F' [] = map []" by (simp add: A.arr_char F'.extensional) fix t T assume ind: "F' T = map T" show "F' (t # T) = map (t # T)" proof (cases "A.Arr (t # T)") show "\ A.Arr (t # T) \ ?thesis" by (simp add: A.arr_char F'.extensional extensional) assume tT: "A.Arr (t # T)" show ?thesis proof (cases "T = []") show 2: "T = [] \ ?thesis" using F' tT by auto assume T: "T \ []" have "F' (t # T) = F' [t] \\<^sub>B map T" proof - have "F' (t # T) = F' ([t] @ T)" by simp also have "... = F' [t] \\<^sub>B F' T" proof - have "A.composite_of [t] T ([t] @ T)" using T tT by (metis (full_types) A.Arr.simps(2) A.Con_Arr_self A.append_is_composite_of A.Con_implies_Arr(1) A.Con_imp_eq_Srcs A.Con_rec(4) A.Resid_rec(1) A.Srcs_Resid A.seq_char A.R.arrI) thus ?thesis using F'.preserves_composites [of "[t]" T "[t] @ T"] B.comp_is_composite_of by auto qed also have "... = F' [t] \\<^sub>B map T" using T ind by simp finally show ?thesis by simp qed also have "... = (F' \ A.incl) t \\<^sub>B map T" using tT by (simp add: A.arr_char A.null_char F'.extensional) also have "... = F t \\<^sub>B map T" using F' by simp also have "... = map (t # T)" using T tT by (metis A.arr_char list.exhaust map.simps(3)) finally show ?thesis by simp qed qed qed qed qed end (* * TODO: Localize to context rts? *) lemma composite_completion_of_rts: assumes "rts A" shows "\(C :: 'a list resid) I. rts_with_composites C \ simulation A C I \ (\B (J :: 'a \ 'c). extensional_rts_with_composites B \ simulation A B J \ (\!J'. simulation C B J' \ J' o I = J))" proof (intro exI conjI) interpret A: rts A using assms by auto interpret P\<^sub>A: paths_in_rts A .. show "rts_with_composites P\<^sub>A.Resid" using P\<^sub>A.rts_with_composites_axioms by simp show "simulation A P\<^sub>A.Resid P\<^sub>A.incl" using P\<^sub>A.incl_is_simulation by simp show "\B (J :: 'a \ 'c). extensional_rts_with_composites B \ simulation A B J \ (\!J'. simulation P\<^sub>A.Resid B J' \ J' o P\<^sub>A.incl = J)" proof (intro allI impI) fix B :: "'c resid" and J assume 1: "extensional_rts_with_composites B \ simulation A B J" interpret B: extensional_rts_with_composites B using 1 by simp interpret J: simulation A B J using 1 by simp interpret J: extension_of_simulation A B J .. have "simulation P\<^sub>A.Resid B J.map" using J.simulation_axioms by simp moreover have "J.map o P\<^sub>A.incl = J" using J.map_o_incl_eq by auto moreover have "\J'. simulation P\<^sub>A.Resid B J' \ J' o P\<^sub>A.incl = J \ J' = J.map" using "1" B.rts_with_composites_axioms J.is_universal J.simulation_axioms calculation(2) by blast ultimately show "\!J'. simulation P\<^sub>A.Resid B J' \ J' \ P\<^sub>A.incl = J" by auto qed qed section "Constructions on RTS's" subsection "Products of RTS's" locale product_rts = R1: rts R1 + R2: rts R2 for R1 :: "'a1 resid" (infix "\\\<^sub>1" 70) and R2 :: "'a2 resid" (infix "\\\<^sub>2" 70) begin type_synonym ('aa1, 'aa2) arr = "'aa1 * 'aa2" abbreviation (input) Null :: "('a1, 'a2) arr" where "Null \ (R1.null, R2.null)" definition resid :: "('a1, 'a2) arr \ ('a1, 'a2) arr \ ('a1, 'a2) arr" where "resid t u = (if R1.con (fst t) (fst u) \ R2.con (snd t) (snd u) then (fst t \\\<^sub>1 fst u, snd t \\\<^sub>2 snd u) else Null)" notation resid (infix "\\" 70) sublocale partial_magma resid by unfold_locales (metis R1.con_implies_arr(1-2) R1.not_arr_null fst_conv resid_def) lemma is_partial_magma: shows "partial_magma resid" .. lemma null_char [simp]: shows "null = Null" by (metis R2.null_is_zero(1) R2.residuation_axioms ex_un_null null_is_zero(1) resid_def residuation.conE snd_conv) sublocale residuation resid proof show "\t u. t \\ u \ null \ u \\ t \ null" by (metis R1.con_def R1.con_sym null_char prod.inject resid_def R2.con_sym) show "\t u. t \\ u \ null \ (t \\ u) \\ (t \\ u) \ null" by (metis (no_types, lifting) R1.arrE R2.con_def R2.con_imp_arr_resid fst_conv null_char resid_def R1.arr_resid snd_conv) show "\v t u. (v \\ t) \\ (u \\ t) \ null \ (v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" proof - fix t u v assume 1: "(v \\ t) \\ (u \\ t) \ null" have "(fst v \\\<^sub>1 fst t) \\\<^sub>1 (fst u \\\<^sub>1 fst t) \ R1.null" by (metis 1 R1.not_arr_null fst_conv null_char null_is_zero(1-2) resid_def R1.arr_resid) moreover have "(snd v \\\<^sub>2 snd t) \\\<^sub>2 (snd u \\\<^sub>2 snd t) \ R2.null" by (metis 1 R2.not_arr_null snd_conv null_char null_is_zero(1-2) resid_def R2.arr_resid) ultimately show "(v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" using resid_def null_char R1.con_def R2.con_def R1.cube R2.cube apply simp by (metis (no_types, lifting) R1.conI R1.con_sym_ax R1.resid_reflects_con R2.con_sym_ax R2.null_is_zero(1)) qed qed lemma is_residuation: shows "residuation resid" .. lemma arr_char [iff]: shows "arr t \ R1.arr (fst t) \ R2.arr (snd t)" by (metis (no_types, lifting) R1.arr_def R2.arr_def R2.conE null_char resid_def residuation.arr_def residuation.con_def residuation_axioms snd_eqD) lemma ide_char [iff]: shows "ide t \ R1.ide (fst t) \ R2.ide (snd t)" by (metis (no_types, lifting) R1.residuation_axioms R2.residuation_axioms arr_char arr_def fst_conv null_char prod.collapse resid_def residuation.conE residuation.ide_def residuation.ide_implies_arr residuation_axioms snd_conv) lemma con_char [iff]: shows "con t u \ R1.con (fst t) (fst u) \ R2.con (snd t) (snd u)" by (simp add: R2.residuation_axioms con_def resid_def residuation.con_def) lemma trg_char: shows "trg t = (if arr t then (R1.trg (fst t), R2.trg (snd t)) else Null)" using R1.trg_def R2.trg_def resid_def trg_def by auto sublocale rts resid proof show "\t. arr t \ ide (trg t)" by (simp add: trg_char) show "\a t. \ide a; con t a\ \ t \\ a = t" by (simp add: R1.resid_arr_ide R2.resid_arr_ide resid_def) show "\a t. \ide a; con a t\ \ ide (a \\ t)" by (metis \\t a. \ide a; con t a\ \ t \ a = t\ con_sym cube ideE ideI residuation.con_def residuation_axioms) show "\t u. con t u \ \a. ide a \ con a t \ con a u" proof - fix t u assume tu: "con t u" obtain a1 where a1: "a1 \ R1.sources (fst t) \ R1.sources (fst u)" by (meson R1.con_imp_common_source all_not_in_conv con_char tu) obtain a2 where a2: "a2 \ R2.sources (snd t) \ R2.sources (snd u)" by (meson R2.con_imp_common_source all_not_in_conv con_char tu) have "ide (a1, a2) \ con (a1, a2) t \ con (a1, a2) u" using a1 a2 ide_char con_char by (metis R1.con_imp_common_source R1.in_sourcesE R1.sources_eqI R2.con_imp_common_source R2.in_sourcesE R2.sources_eqI con_sym fst_conv inf_idem snd_conv tu) thus "\a. ide a \ con a t \ con a u" by blast qed show "\t u v. \ide (t \\ u); con u v\ \ con (t \\ u) (v \\ u)" proof - fix t u v assume tu: "ide (t \\ u)" assume uv: "con u v" have "R1.ide (fst t \\\<^sub>1 fst u) \ R2.ide (snd t \\\<^sub>2 snd u)" using tu ide_char by (metis conI con_char fst_eqD ide_implies_arr not_arr_null resid_def snd_conv) moreover have "R1.con (fst u) (fst v) \ R2.con (snd u) (snd v)" using uv con_char by blast ultimately show "con (t \\ u) (v \\ u)" by (simp add: R1.con_target R1.con_sym R1.prfx_implies_con R2.con_target R2.con_sym R2.prfx_implies_con resid_def) qed qed lemma is_rts: shows "rts resid" .. lemma sources_char: shows "sources t = R1.sources (fst t) \ R2.sources (snd t)" by force lemma targets_char: shows "targets t = R1.targets (fst t) \ R2.targets (snd t)" proof show "targets t \ R1.targets (fst t) \ R2.targets (snd t)" using targets_def ide_char con_char resid_def trg_char trg_def by auto show "R1.targets (fst t) \ R2.targets (snd t) \ targets t" proof fix a assume a: "a \ R1.targets (fst t) \ R2.targets (snd t)" show "a \ targets t" proof show "ide a" using a ide_char by auto show "con (trg t) a" using a trg_char con_char [of "trg t" a] by (metis (no_types, lifting) SigmaE arr_char con_char con_implies_arr(1) fst_conv R1.in_targetsE R2.in_targetsE R1.arr_resid_iff_con R2.arr_resid_iff_con R1.trg_def R2.trg_def snd_conv) qed qed qed lemma prfx_char: shows "prfx t u \ R1.prfx (fst t) (fst u) \ R2.prfx (snd t) (snd u)" using R1.prfx_implies_con R2.prfx_implies_con resid_def by auto lemma cong_char: shows "cong t u \ R1.cong (fst t) (fst u) \ R2.cong (snd t) (snd u)" using prfx_char by auto end locale product_of_weakly_extensional_rts = R1: weakly_extensional_rts R1 + R2: weakly_extensional_rts R2 + product_rts begin sublocale weakly_extensional_rts resid proof show "\t u. \cong t u; ide t; ide u\ \ t = u" by (metis cong_char ide_char prod.exhaust_sel R1.weak_extensionality R2.weak_extensionality) qed lemma src_char: shows "src t = (if arr t then (R1.src (fst t), R2.src (snd t)) else null)" proof (cases "arr t") show "\ arr t \ ?thesis" using src_def by presburger assume t: "arr t" show ?thesis proof (intro src_eqI) show "ide (if arr t then (R1.src (fst t), R2.src (snd t)) else null)" using t by simp show "con (if arr t then (R1.src (fst t), R2.src (snd t)) else null) t" using t con_char arr_char apply (cases t) apply simp_all by (metis R1.con_imp_coinitial_ax R1.residuation_axioms R1.src_eqI R2.con_sym R2.in_sourcesE R2.src_in_sources residuation.arr_def) qed qed end locale product_of_extensional_rts = R1: extensional_rts R1 + R2: extensional_rts R2 + product_of_weakly_extensional_rts begin sublocale extensional_rts resid proof show "\t u. cong t u \ t = u" by (metis R1.extensional R2.extensional cong_char prod.collapse) qed end subsubsection "Product Simulations" locale product_simulation = A1: rts A1 + A2: rts A2 + B1: rts B1 + B2: rts B2 + A1xA2: product_rts A1 A2 + B1xB2: product_rts B1 B2 + F1: simulation A1 B1 F1 + F2: simulation A2 B2 F2 for A1 :: "'a1 resid" (infix "\\\<^sub>A\<^sub>1" 70) and A2 :: "'a2 resid" (infix "\\\<^sub>A\<^sub>2" 70) and B1 :: "'b1 resid" (infix "\\\<^sub>B\<^sub>1" 70) and B2 :: "'b2 resid" (infix "\\\<^sub>B\<^sub>2" 70) and F1 :: "'a1 \ 'b1" and F2 :: "'a2 \ 'b2" begin definition map where "map = (\a. if A1xA2.arr a then (F1 (fst a), F2 (snd a)) else B1xB2.null)" lemma map_simp [simp]: assumes "A1.arr a1" and "A2.arr a2" shows "map (a1, a2) = (F1 a1, F2 a2)" using assms map_def by auto sublocale simulation A1xA2.resid B1xB2.resid map proof show "\t. \ A1xA2.arr t \ map t = B1xB2.null" using map_def by auto show "\t u. A1xA2.con t u \ B1xB2.con (map t) (map u)" using A1xA2.con_char B1xB2.con_char A1.con_implies_arr A2.con_implies_arr by auto show "\t u. A1xA2.con t u \ map (A1xA2.resid t u) = B1xB2.resid (map t) (map u)" using A1xA2.resid_def B1xB2.resid_def A1.con_implies_arr A2.con_implies_arr by auto qed lemma is_simulation: shows "simulation A1xA2.resid B1xB2.resid map" .. end subsubsection "Binary Simulations" locale binary_simulation = A1: rts A1 + A2: rts A2 + A: product_rts A1 A2 + B: rts B + simulation A.resid B F for A1 :: "'a1 resid" (infixr "\\\<^sub>A\<^sub>1" 70) and A2 :: "'a2 resid" (infixr "\\\<^sub>A\<^sub>2" 70) and B :: "'b resid" (infixr "\\\<^sub>B" 70) and F :: "'a1 * 'a2 \ 'b" begin lemma fixing_ide_gives_simulation_1: assumes "A1.ide a1" shows "simulation A2 B (\t2. F (a1, t2))" proof show "\t2. \ A2.arr t2 \ F (a1, t2) = B.null" using assms extensional A.arr_char by simp show "\t2 u2. A2.con t2 u2 \ B.con (F (a1, t2)) (F (a1, u2))" using assms A.con_char preserves_con by auto show "\t2 u2. A2.con t2 u2 \ F (a1, t2 \\\<^sub>A\<^sub>2 u2) = F (a1, t2) \\\<^sub>B F (a1, u2)" using assms A.con_char A.resid_def preserves_resid by (metis A1.ideE fst_conv snd_conv) qed lemma fixing_ide_gives_simulation_2: assumes "A2.ide a2" shows "simulation A1 B (\t1. F (t1, a2))" proof show "\t1. \ A1.arr t1 \ F (t1, a2) = B.null" using assms extensional A.arr_char by simp show "\t1 u1. A1.con t1 u1 \ B.con (F (t1, a2)) (F (u1, a2))" using assms A.con_char preserves_con by auto show "\t1 u1. A1.con t1 u1 \ F (t1 \\\<^sub>A\<^sub>1 u1, a2) = F (t1, a2) \\\<^sub>B F (u1, a2)" using assms A.con_char A.resid_def preserves_resid by (metis A2.ideE fst_conv snd_conv) qed end subsection "Sub-RTS's" locale sub_rts = R: rts R for R :: "'a resid" (infix "\\\<^sub>R" 70) and Arr :: "'a \ bool" + assumes inclusion: "Arr t \ R.arr t" and sources_closed: "Arr t \ R.sources t \ Collect Arr" and resid_closed: "\Arr t; Arr u; R.con t u\ \ Arr (t \\\<^sub>R u)" begin definition resid (infix "\\" 70) where "t \\ u \ (if Arr t \ Arr u \ R.con t u then t \\\<^sub>R u else R.null)" sublocale partial_magma resid by unfold_locales (metis R.ex_un_null R.null_is_zero(2) resid_def) lemma is_partial_magma: shows "partial_magma resid" .. lemma null_char [simp]: shows "null = R.null" by (metis R.null_is_zero(1) ex_un_null null_is_zero(1) resid_def) sublocale residuation resid proof show "\t u. t \\ u \ null \ u \\ t \ null" by (metis R.con_sym R.con_sym_ax null_char resid_def) show "\t u. t \\ u \ null \ (t \\ u) \\ (t \\ u) \ null" by (metis R.arrE R.arr_resid R.not_arr_null null_char resid_closed resid_def) show "\v t u. (v \\ t) \\ (u \\ t) \ null \ (v \\ t) \\ (u \\ t) = (v \\ u) \\ (t \\ u)" by (metis R.cube R.ex_un_null R.null_is_zero(1) R.residuation_axioms null_is_zero(2) resid_closed resid_def residuation.conE residuation.conI) qed lemma is_residuation: shows "residuation resid" .. lemma arr_char [iff]: shows "arr t \ Arr t" proof show "arr t \ Arr t" by (metis arrE conE null_char resid_def) show "Arr t \ arr t" by (metis R.arrE R.conE conI con_implies_arr(2) inclusion null_char resid_def) qed lemma ide_char [iff]: shows "ide t \ Arr t \ R.ide t" by (metis R.ide_def arrE arr_char conE ide_def null_char resid_def) lemma con_char [iff]: shows "con t u \ Arr t \ Arr u \ R.con t u" using con_def resid_def by auto lemma trg_char: shows "trg t = (if arr t then R.trg t else null)" using R.trg_def arr_def resid_def trg_def by force sublocale rts resid proof show "\t. arr t \ ide (trg t)" by (metis R.ide_trg arrE arr_char arr_resid ide_char inclusion trg_char trg_def) show "\a t. \ide a; con t a\ \ t \\ a = t" by (simp add: R.resid_arr_ide resid_def) show "\a t. \ide a; con a t\ \ ide (a \\ t)" by (metis R.resid_ide_arr arr_resid_iff_con arr_char con_char ide_char resid_def) show "\t u. con t u \ \a. ide a \ con a t \ con a u" by (metis (full_types) R.con_imp_coinitial_ax R.con_sym R.in_sourcesI con_char ide_char in_mono mem_Collect_eq sources_closed) show "\t u v. \ide (t \\ u); con u v\ \ con (t \\ u) (v \\ u)" by (metis R.con_target arr_resid_iff_con con_char con_sym ide_char ide_implies_arr resid_closed resid_def) qed lemma is_rts: shows "rts resid" .. lemma sources_char\<^sub>S\<^sub>R\<^sub>T\<^sub>S: shows "sources t = {a. Arr t \ a \ R.sources t}" using sources_closed by auto lemma targets_char\<^sub>S\<^sub>R\<^sub>T\<^sub>S: shows "targets t = {b. Arr t \ b \ R.targets t}" proof show "targets t \ {b. Arr t \ b \ R.targets t}" proof fix b assume b: "b \ targets t" show "b \ {b. Arr t \ b \ R.targets t}" proof have "Arr t" using arr_iff_has_target b by force moreover have "Arr b" using b by blast moreover have "b \ R.targets t" by (metis R.in_targetsI b calculation(1) con_char in_targetsE arr_char ide_char trg_char) ultimately show "Arr t \ b \ R.targets t" by blast qed qed show "{b. Arr t \ b \ R.targets t} \ targets t" proof fix b assume b: "b \ {b. Arr t \ b \ R.targets t}" show "b \ targets t" proof (intro in_targetsI) show "ide b" using b by (metis R.arrE ide_char inclusion mem_Collect_eq R.sources_resid R.target_is_ide resid_closed sources_closed subset_eq) show "con (trg t) b" using b using \ide b\ ide_trg trg_char by auto qed qed qed lemma prfx_char\<^sub>S\<^sub>R\<^sub>T\<^sub>S: shows "prfx t u \ Arr t \ Arr u \ R.prfx t u" by (metis R.prfx_implies_con con_char ide_char prfx_implies_con resid_closed resid_def) lemma cong_char\<^sub>S\<^sub>R\<^sub>T\<^sub>S: shows "cong t u \ Arr t \ Arr u \ R.cong t u" using prfx_char\<^sub>S\<^sub>R\<^sub>T\<^sub>S by force lemma inclusion_is_simulation: shows "simulation resid R (\t. if arr t then t else null)" using resid_closed resid_def by unfold_locales auto interpretation P\<^sub>R: paths_in_rts R .. interpretation P: paths_in_rts resid .. lemma path_reflection: shows "\P\<^sub>R.Arr T; set T \ Collect Arr\ \ P.Arr T" apply (induct T) apply simp proof - fix t T assume ind: "\P\<^sub>R.Arr T; set T \ Collect Arr\ \ P.Arr T" assume tT: "P\<^sub>R.Arr (t # T)" assume set: "set (t # T) \ Collect Arr" have 1: "R.arr t" using tT by (metis P\<^sub>R.Arr_imp_arr_hd list.sel(1)) show "P.Arr (t # T)" proof (cases "T = []") show "T = [] \ ?thesis" using 1 set by simp assume T: "T \ []" show ?thesis proof show "arr t" using 1 arr_char set by simp show "P.Arr T" using T tT P\<^sub>R.Arr_imp_Arr_tl by (metis ind insert_subset list.sel(3) list.simps(15) set) show "targets t \ P.Srcs T" proof - have "targets t \ R.targets t" using targets_char\<^sub>S\<^sub>R\<^sub>T\<^sub>S by blast also have "... \ R.sources (hd T)" using T tT by (metis P\<^sub>R.Arr.simps(3) P\<^sub>R.Srcs_simp\<^sub>P list.collapse) also have "... \ P.Srcs T" using P.Arr_imp_arr_hd P.Srcs_simp\<^sub>P \P.Arr T\ sources_char\<^sub>S\<^sub>R\<^sub>T\<^sub>S by force finally show ?thesis by blast qed qed qed qed end locale sub_weakly_extensional_rts = sub_rts + R: weakly_extensional_rts R begin sublocale weakly_extensional_rts resid apply unfold_locales using R.weak_extensionality cong_char\<^sub>S\<^sub>R\<^sub>T\<^sub>S by blast lemma is_weakly_extensional_rts: shows "weakly_extensional_rts resid" .. lemma src_char: shows "src t = (if arr t then R.src t else null)" proof (cases "arr t") show "\ arr t \ ?thesis" by (simp add: src_def) assume t: "arr t" show ?thesis proof (intro src_eqI) show "ide (if arr t then R.src t else null)" using t sources_closed inclusion R.src_in_sources by auto show "con (if arr t then R.src t else null) t" using t con_char by (metis (full_types) R.con_sym R.in_sourcesE R.src_in_sources \ide (if arr t then R.src t else null)\ arr_char ide_char inclusion) qed qed end text \ Here we justify the terminology ``normal sub-RTS'', which was introduced earlier, by showing that a normal sub-RTS really is a sub-RTS. \ lemma (in normal_sub_rts) is_sub_rts: shows "sub_rts resid (\t. t \ \)" using elements_are_arr ide_closed apply unfold_locales apply auto[2] by (meson R.con_imp_coinitial R.con_sym forward_stable) end