Page MenuHomeIsabelle/Phabricator

No OneTemporary

This file is larger than 256 KB, so syntax highlighting was skipped.
diff --git a/Admin/components/components.sha1 b/Admin/components/components.sha1
--- a/Admin/components/components.sha1
+++ b/Admin/components/components.sha1
@@ -1,539 +1,540 @@
59a71e08c34ff01f3f5c4af00db5e16369527eb7 Haskabelle-2013.tar.gz
23a96ff4951d72f4024b6e8843262eda988bc151 Haskabelle-2014.tar.gz
eccff31931fb128c1dd522cfc85495c9b66e67af Haskabelle-2015.tar.gz
ed740867925dcf58692c8d3e350c28e3b4d4a60f Isabelle_app-20210126.tar.gz
8ee375cfc38972f080dbc78f07b68dac03efe968 ProofGeneral-3.7.1.1.tar.gz
847b52c0676b5eb0fbf0476f64fc08c2d72afd0c ProofGeneral-4.1.tar.gz
8e0b2b432755ef11d964e20637d1bc567d1c0477 ProofGeneral-4.2-1.tar.gz
51e1e0f399e934020565b2301358452c0bcc8a5e ProofGeneral-4.2-2.tar.gz
8472221c876a430cde325841ce52893328302712 ProofGeneral-4.2.tar.gz
ce750fb7f26f6f51c03c6e78096a57b8eaf11d21 apache-commons-20211211.tar.gz
fbe83b522cb37748ac1b3c943ad71704fdde2f82 bash_process-1.1.1.tar.gz
bb9ef498cd594b4289221b96146d529c899da209 bash_process-1.1.tar.gz
81250148f8b89ac3587908fb20645081d7f53207 bash_process-1.2.1.tar.gz
97b2491382130a841b3bbaebdcf8720c4d4fb227 bash_process-1.2.2.tar.gz
5c5b7c18cc1dc2a4d22b997dac196da09eaca868 bash_process-1.2.3-1.tar.gz
48b01bd9436e243ffcb7297f08b498d0c0875ed9 bash_process-1.2.3.tar.gz
11815d5f3af0de9022e903ed8702c136591f06fe bash_process-1.2.4-1.tar.gz
729486311833e4eff0fbf2d8041dddad520ca88c bash_process-1.2.4-2.tar.gz
7ae9ec8aab2d8a811842d9dc67d8bf6c179e11ee bash_process-1.2.4.tar.gz
9e21f447bfa0431ae5097301d553dd6df3c58218 bash_process-1.2.tar.gz
84246b9b6460296a6f8e8d661643b36719f7834a bash_process-1.3.tar.gz
a65ce644b6094d41e9f991ef851cf05eff5dd0a9 bib2xhtml-20171221.tar.gz
4085dd6060a32d7e0d2e3f874c463a9964fd409b bib2xhtml-20190409.tar.gz
f92cff635dfba5d4d77f469307369226c868542c cakeml-2.0.tar.gz
e7ffe4238b61a3c1ee87aca4421e7a612e09b836 ci-extras-1.tar.gz
81ff56cd379744d1965425c7624feefffdf381eb ci-extras-2.tar.gz
e880f31f59bd403fb72fcd3b5afb413c3831a21c csdp-6.1-1.tar.gz
2659100ba8e28e7cb0ecb554178ee5315d4a87f5 csdp-6.1.1.tar.gz
a2bd94f4f9281dc70dfda66cf28016c2ffef7ed7 csdp-6.1.tar.gz
ec17080269737e4a97b4424a379924c09b338ca2 csdp-6.2.0.tar.gz
70105fd6fbfd1a868383fc510772b95234325d31 csdp-6.x.tar.gz
2f6417b8e96a0e4e8354fe0f1a253c18fb55d9a7 cvc3-2.4.1.tar.gz
d70bfbe63590153c07709dea7084fbc39c669841 cvc4-1.5-1.tar.gz
541eac340464c5d34b70bb163ae277cc8829c40f cvc4-1.5-2.tar.gz
1a44895d2a440091a15cc92d7f77a06a2e432507 cvc4-1.5-3.tar.gz
c0d8d5929b00e113752d8bf5d11241cd3bccafce cvc4-1.5-4.tar.gz
ffb0d4739c10eb098eb092baef13eccf94a79bad cvc4-1.5-5.tar.gz
3682476dc5e915cf260764fa5b86f1ebdab57507 cvc4-1.5.tar.gz
a5e02b5e990da4275dc5d4480c3b72fc73160c28 cvc4-1.5pre-1.tar.gz
4d9658fd2688ae8ac78da8fdfcbf85960f871b71 cvc4-1.5pre-2.tar.gz
b01fdb93f2dc2b8bcfd41c6091d91b37d6e240f9 cvc4-1.5pre-3.tar.gz
76ff6103b8560f0e2778bbfbdb05f5fa18f850b7 cvc4-1.5pre-4.tar.gz
03aec2ec5757301c9df149f115d1f4f1d2cafd9e cvc4-1.5pre.tar.gz
e99560d0b7cb9bafde2b0ec1a3a95af315918a25 cvc4-1.8.tar.gz
9e0d91f9f3bc0b69e60e50ca683cfcdcbfee6d62 cvc5-1.0.2.tar.gz
842d9526f37b928cf9e22f141884365129990d63 cygwin-20130110.tar.gz
cb3b0706d208f104b800267697204f6d82f7b48a cygwin-20130114.tar.gz
3b44cca04855016d5f8cfb5101b2e0579ab80197 cygwin-20130117.tar.gz
1fde9ddf0fa4f398965113d0c0c4f0e97c78d008 cygwin-20130716.tar.gz
a03735a53c2963eb0b453f6a7282d3419f28bf38 cygwin-20130916.tar.gz
7470125fc46e24ee188bdaacc6d560e01b6fa839 cygwin-20140520.tar.gz
db4dedae026981c5f001be283180abc1962b79ad cygwin-20140521.tar.gz
acbc4bf161ad21e96ecfe506266ccdbd288f8a6f cygwin-20140530.tar.gz
3dc680d9eb85276e8c3e9f6057dad0efe2d5aa41 cygwin-20140626.tar.gz
8e562dfe57a2f894f9461f4addedb88afa108152 cygwin-20140725.tar.gz
238d8e30e8e22495b7ea3f5ec36e852e97fe8bbf cygwin-20140813.tar.gz
629b8fbe35952d1551cd2a7ff08db697f6dff870 cygwin-20141024.tar.gz
ce93d0b3b2743c4f4e5bba30c2889b3b7bc22f2c cygwin-20150410.tar.gz
fa712dd5ec66ad16add1779d68aa171ff5694064 cygwin-20151210.tar.gz
056b843d5a3b69ecf8a52c06f2ce6e696dd275f9 cygwin-20151221.tar.gz
44f3a530f727e43a9413226c2423c9ca3e4c0cf5 cygwin-20161002.tar.gz
dd56dd16d861fc6e1a008bf5e9da6f33ed6eb820 cygwin-20161022.tar.gz
d9ad7aae99d54e3b9813151712eb88a441613f04 cygwin-20161024.tar.gz
f8eb6a0f722e3cfe3775d1204c5c7063ee1f008e cygwin-20170828.tar.gz
c22048912b010a5a0b4f2a3eb4d318d6953761e4 cygwin-20170930.tar.gz
5a3919e665947b820fd7f57787280c7512be3782 cygwin-20180604.tar.gz
2aa049170e8088de59bd70eed8220f552093932d cygwin-20190320.tar.gz
fb898e263fcf6f847d97f564fe49ea0760bb453f cygwin-20190322.tar.gz
cd01fac0ab4fdb50a2bbb6416da3f15a4d540da1 cygwin-20190524.tar.gz
caa616fbab14c1fce790a87db5c4758c1322cf28 cygwin-20200116.tar.gz
f053a9ab01f0be9cb456560f7eff66a8e7ba2fd2 cygwin-20200323.tar.gz
0107343cd2562618629f73b2581168f0045c3234 cygwin-20201002.tar.gz
a3d481401b633c0ee6abf1da07d75da94076574c cygwin-20201130.tar.gz
5b1820b87b25d8f2d237515d9854e3ce54ee331b cygwin-20211002.tar.gz
5dff30be394d88dd83ea584fa6f8063bdcdc21fd cygwin-20211004.tar.gz
fffaae24da4d274d34b8dc79a76b478b87ec31dd cygwin-20211007.tar.gz
66e16dccd7b177c086ab53013c1b74d09c1893ad cygwin-20220831.tar.gz
6cd34e30e2e650f239d19725c3d15c206fb3a7cf cygwin-20221002.tar.gz
0fe549949a025d65d52d6deca30554de8fca3b6e e-1.5.tar.gz
2e293256a134eb8e5b1a283361b15eb812fbfbf1 e-1.6-1.tar.gz
e1919e72416cbd7ac8de5455caba8901acc7b44d e-1.6-2.tar.gz
b98a98025d1f7e560ca6864a53296137dae736b4 e-1.6.tar.gz
c11b25c919e2ec44fe2b6ac2086337b456344e97 e-1.8.tar.gz
a895a96ec7e6fcc275114bb9b4c92b20fac73dba e-2.0-1.tar.gz
2ebd7e3067a2cdae3cb8b073345827013978d74b e-2.0-2.tar.gz
fac44556dd16f666a2c186be30aa6d8c67228bb9 e-2.0-3.tar.gz
5d36fb62912cfcff7f3b99a6266c578aafc288b7 e-2.0-4.tar.gz
3223c51c0b16fe00ced4ae903041fff858e61742 e-2.0-5.tar.gz
6b962a6b4539b7ca4199977973c61a8c98a492e8 e-2.0.tar.gz
66449a7b68b7d85a7189e10735a81069356123b6 e-2.5-1.tar.gz
813b66ca151d7a39b5cacb39ab52acabc2a54845 e-2.5.tar.gz
6e63f9f354b8c06035952845b987080699a12d55 e-2.6-1.tar.gz
a3bebab5df4294dac2dd7fd2065a94df00e0b3ff e-2.6.tar.gz
239e7b8bebbfc29a1c5151e8fb261ffad44877f1 easychair-3.5.tar.gz
4a3b4b4e0441c4498a0c71dc348f3538be589a15 eptcs-1.7.0.tar.gz
6d34b18ca0aa1e10bab6413045d079188c0e2dfb exec_process-1.0.1.tar.gz
8b9bffd10e396d965e815418295f2ee2849bea75 exec_process-1.0.2.tar.gz
e6aada354da11e533af2dee3dcdd96c06479b053 exec_process-1.0.3.tar.gz
ae7ee5becb26512f18c609e83b34612918bae5f0 exec_process-1.0.tar.gz
7a4b46752aa60c1ee6c53a2c128dedc8255a4568 flatlaf-0.46-1.tar.gz
ed5cbc216389b655dac21a19e770a02a96867b85 flatlaf-0.46.tar.gz
d37b38b9a27a6541c644e22eeebe9a339282173d flatlaf-1.0-rc1.tar.gz
dac46ce81cee10fb36a9d39b414dec7b7b671545 flatlaf-1.0-rc2.tar.gz
d94e6da7299004890c04a7b395a3f2d381a3281e flatlaf-1.0-rc3.tar.gz
7ca3e6a8c9bd837990e64d89e7fa07a7e7cf78ff flatlaf-1.0.tar.gz
9908e5ab721f1c0035c0ab04dc7ad0bd00a8db27 flatlaf-1.2.tar.gz
9534b721b7b78344f3225067ee4df28a5440b87e flatlaf-1.6.4.tar.gz
212a0f1f867511722024cc60156fd71872a16f92 flatlaf-1.6.tar.gz
6d4dbb6f2bde5804298d9008e3edceb0b9ee20ae flatlaf-2.4.tar.gz
b1c40ce6c087da7e70e221ddd3fcadfa569acb2f foiltex-2.1.4b.tar.gz
f339234ec18369679be0095264e0c0af7762f351 gnu-utils-20210414.tar.gz
71259aa46134e6cf2c6473b4fc408051b3336490 gnu-utils-20211030.tar.gz
683acd94761ef460cca1a628f650355370de5afb hol-light-bundle-0.5-126.tar.gz
511fa8df8be88eb0500032bbd17742d33bdd4636 hugo-0.88.1.tar.gz
989234b3799fe8750f3c24825d1f717c24fb0214 idea-icons-20210508.tar.gz
20b53cfc3ffc5b15c1eabc91846915b49b4c0367 isabelle_fonts-20151021.tar.gz
736844204b2ef83974cd9f0a215738b767958c41 isabelle_fonts-20151104.tar.gz
9502c1aea938021f154adadff254c5c55da344bd isabelle_fonts-20151106.tar.gz
f5c63689a394b974ac0d365debda577c6fa31c07 isabelle_fonts-20151107.tar.gz
812101680b75f7fa9ee8e138ea6314fa4824ea2d isabelle_fonts-20151229.tar.gz
2730e1475c7d655655882e75743e0b451725a274 isabelle_fonts-20151231.tar.gz
1f004a6bf20088a7e8f1b3d4153aa85de6fc1091 isabelle_fonts-20160101.tar.gz
379d51ef3b71452dac34ba905def3daa8b590f2e isabelle_fonts-20160102.tar.gz
878536aab1eaf1a52da560c20bb41ab942971fa3 isabelle_fonts-20160227.tar.gz
8ff0eedf0191d808ecc58c6b3149a4697f29ab21 isabelle_fonts-20160812-1.tar.gz
9283e3b0b4c7239f57b18e076ec8bb21021832cb isabelle_fonts-20160812.tar.gz
620cffeb125e198b91a716da116f754d6cc8174b isabelle_fonts-20160830.tar.gz
b70690c85c05d0ca5bc29287abd20142f6ddcfb0 isabelle_fonts-20171222.tar.gz
c17c482e411bbaf992498041a3e1dea80336aaa6 isabelle_fonts-20171230.tar.gz
3affbb306baff37c360319b21cbaa2cc96ebb282 isabelle_fonts-20180113.tar.gz
bee32019e5d7cf096ef2ea1d836c732e9a7628cc isabelle_fonts-20181124.tar.gz
f249bc2c85bd2af9eee509de17187a766b74ab86 isabelle_fonts-20181129.tar.gz
928b5320073d04d93bcc5bc4347b6d01632b9d45 isabelle_fonts-20190210.tar.gz
dfcdf9a757b9dc36cee87f82533b43c58ba84abe isabelle_fonts-20190309.tar.gz
95e3acf038df7fdeeacd8b4769930e6f57bf3692 isabelle_fonts-20190406.tar.gz
dabcf5085d67c99159007007ff0e9bf775e423d1 isabelle_fonts-20190409.tar.gz
76827987c70051719e117138858930d42041f57d isabelle_fonts-20190717.tar.gz
abc8aea3ae471f9313917008ac90e5c1c99e17da isabelle_fonts-20210317.tar.gz
3ff9195aab574fc75ca3b77af0adb33f9b6d7b74 isabelle_fonts-20210318.tar.gz
b166b4bd583b6442a5d75eab06f7adbb66919d6d isabelle_fonts-20210319.tar.gz
9467ad54a9ac10a6e7e8db5458d8d2a5516eba96 isabelle_fonts-20210321.tar.gz
1f7a0b9829ecac6552b21e995ad0f0ac168634f3 isabelle_fonts-20210322.tar.gz
667000ce6dd6ea3c2d11601a41c206060468807d isabelle_fonts-20211004.tar.gz
916adccd2f40c55116b68b92ce1eccb24d4dd9a2 isabelle_setup-20210630.tar.gz
c611e363287fcc9bdd93c33bef85fa4e66cd3f37 isabelle_setup-20210701.tar.gz
a0e7527448ef0f7ce164a38a50dc26e98de3cad6 isabelle_setup-20210709.tar.gz
e413706694b0968245ee15183af2d464814ce0a4 isabelle_setup-20210711.tar.gz
d2c9fd7b73457a460111edd6eb93a133272935fb isabelle_setup-20210715.tar.gz
a5f478ba1088f67c2c86dc2fa7764b6d884e5ae5 isabelle_setup-20210716-1.tar.gz
79fad009cb22aa5e7cb4aed3c810ad5f61790293 isabelle_setup-20210716.tar.gz
692a39f716998e556ec9559c9ca362fc8fc9d5b6 isabelle_setup-20210717-1.tar.gz
7322d6d84d75c486a58ed36630431db4499e3232 isabelle_setup-20210717-2.tar.gz
14f8508bcae9140815bb23e430e26d2cbc504b81 isabelle_setup-20210717.tar.gz
ca801d5c380ea896ee32b309ff19ae5f34538963 isabelle_setup-20210718.tar.gz
ac9739e38e4fbbfce1a71a0987a57b22f83922d3 isabelle_setup-20210724-1.tar.gz
4554679cc8ea31e539655810a14d14216b383d0e isabelle_setup-20210724-2.tar.gz
127a75ae33e97480d352087fcb9b47a632d77169 isabelle_setup-20210724.tar.gz
309909ec6d43ae460338e9af54c1b2a48adcb1ec isabelle_setup-20210726.tar.gz
a14ce46c62c64c3413f3cc9239242e33570d0f3d isabelle_setup-20210922.tar.gz
b22066a9dcde6f813352dcf6404ac184440a22df isabelle_setup-20211109.tar.gz
91c5d29e9fa40aee015e8e65ffea043e218c2fc5 isabelle_setup-20220323.tar.gz
056979bd1c08eb9d0d12cc1118b4ff70bfe2d594 isabelle_setup-20220701.tar.gz
be91402b3e5ef5bc6d4802a45175ee238cd9653e isabelle_setup-20220808.tar.gz
171df3eb58bdac4cc495f773b797fa578f7d4be6 isabelle_setup-20220817.tar.gz
7b1ce9bd85e33076fa7022eeb66ce15915d078d9 isabelle_setup-20221020.tar.gz
cb9f061ccd7c6f90d00c8aa115aeea8679f3f996 isabelle_setup-20221028.tar.gz
f582c621471583d06e00007c6acc01376c7395af isabelle_setup-20230206.tar.gz
0b2206f914336dec4923dd0479d8cee4b904f544 jdk-11+28.tar.gz
e12574d838ed55ef2845acf1152329572ab0cc56 jdk-11.0.10+9.tar.gz
3e05213cad47dbef52804fe329395db9b4e57f39 jdk-11.0.2+9.tar.gz
06ac8993b5bebd02c70f1bd18ce13075f01115f3 jdk-11.0.3+7.tar.gz
e7e3cc9b0550c1e5d71197ad8c30f92b622d7183 jdk-11.0.4+11.tar.gz
49007a84a2643a204ce4406770dfd574b97880d9 jdk-11.0.5+10.tar.gz
3c250e98eb82f98afc6744ddc9170d293f0677e1 jdk-11.0.6+10.tar.gz
76cf7a141e15db30bd975089c65c833b58092aa7 jdk-11.0.9+11.tar.gz
71d19df63816e9be1c4c5eb44aea7a44cfadb319 jdk-11.tar.gz
72455a2fdb6cced9cd563f4d5d6134f7a6c34913 jdk-15.0.1+9.tar.gz
e8ae300e61b0b121018456d50010b555bc96ce10 jdk-15.0.2+7.tar.gz
a426a32ad34014953c0f7d4cc6f44199572e1c38 jdk-17+35.tar.gz
85707cfe369d0d32accbe3d96a0730c87e8639b5 jdk-17.0.1+12.tar.gz
699ab2d723b2f1df151a7dbcbdf33ddad36c7978 jdk-17.0.2+8.tar.gz
260f5e03e8fc7185f7987a6d2961a23abdce6a0b jdk-17.0.4.1+1.tar.gz
8f417fcbe5d0fef3a958aeb9740499230aa00046 jdk-17.0.5.tar.gz
e904e85d0b5f6552344aa385c90f3ca528dc3514 jdk-17.0.6.tar.gz
ee31c8ac65d5828d8c426fa3eedeb467cfa497ab jdk-17.0.7.tar.gz
8d83e433c1419e0c0cc5fd1762903d11b4a5752c jdk-6u31.tar.gz
38d2d2a91c66714c18430e136e7e5191af3996e6 jdk-7u11.tar.gz
d765bc4ad2f34d494429b2a8c1563c49db224944 jdk-7u13.tar.gz
13a265e4b706ece26fdfa6fc9f4a3dd1366016d2 jdk-7u21.tar.gz
5080274f8721a18111a7f614793afe6c88726739 jdk-7u25.tar.gz
dd24d63afd6d17b29ec9cb2b2464d4ff2e02de2c jdk-7u40.tar.gz
ec740ee9ffd43551ddf1e5b91641405116af6291 jdk-7u6.tar.gz
71b629b2ce83dbb69967c4785530afce1bec3809 jdk-7u60.tar.gz
e119f4cbfa2a39a53b9578d165d0dc44b59527b7 jdk-7u65.tar.gz
d6d1c42989433839fe64f34eb77298ef6627aed4 jdk-7u67.tar.gz
b66039bc6dc2bdb2992133743005e1e4fc58ae24 jdk-7u72.tar.gz
d980055694ddfae430ee001c7ee877d535e97252 jdk-7u76.tar.gz
baa6de37bb6f7a104ce5fe6506bca3d2572d601a jdk-7u80.tar.gz
7d5b152ac70f720bb9e783fa45ecadcf95069584 jdk-7u9.tar.gz
baf275a68d3f799a841932e4e9a95a1a604058ae jdk-8u102.tar.gz
5442f1015a0657259be0590b04572cd933431df7 jdk-8u11.tar.gz
741de6a4a805a0f9fb917d1845409e99346c2747 jdk-8u112.tar.gz
ae7df8bd0c18eb40237cf54cc28933f4893b9c92 jdk-8u121.tar.gz
51531a3a0c16e180ed95cb7d2bd680c2ec0aa553 jdk-8u131.tar.gz
e45edcf184f608d6f4a7b966d65a5d3289462693 jdk-8u144.tar.gz
264e806b9300a4fb3b6e15ba0e2c664d4ea698c8 jdk-8u152.tar.gz
84b04d877a2ea3a4e2082297b540e14f76722bc5 jdk-8u162.tar.gz
87303a0de3fd595aa3857c8f7cececa036d6ed18 jdk-8u172.tar.gz
9ae0338a5277d8749b4b4c7e65fc627319d98b27 jdk-8u181.tar.gz
cfecb1383faaf027ffbabfcd77a0b6a6521e0969 jdk-8u20.tar.gz
44ffeeae219782d40ce6822b580e608e72fd4c76 jdk-8u31.tar.gz
c95ebf7777beb3e7ef10c0cf3f734cb78f9828e4 jdk-8u5.tar.gz
4132cf52d5025bf330d53b96a5c6466fef432377 jdk-8u51.tar.gz
74df343671deba03be7caa49de217d78b693f817 jdk-8u60.tar.gz
dfb087bd64c3e5da79430e0ba706b9abc559c090 jdk-8u66.tar.gz
2ac389babd15aa5ddd1a424c1509e1c459e6fbb1 jdk-8u72.tar.gz
caa0cf65481b6207f66437576643f41dabae3c83 jdk-8u92.tar.gz
778fd85c827ec49d2d658a832d20e63916186b0d jedit-20210715.tar.gz
beb99f2cb0bd4e595c5c597d3970c46aa21616e4 jedit-20210717.tar.gz
33dd96cd83f2c6a26c035b7a0ee57624655224c5 jedit-20210724.tar.gz
0e4fd4d66388ddc760fa5fbd8d4a9a3b77cf59c7 jedit-20210802.tar.gz
258d527819583d740a3aa52dfef630eed389f8c6 jedit-20211019.tar.gz
f4f3fcbd54488297a5d2fcd23a2595912d5ba80b jedit-20211103.tar.gz
44775a22f42a9d665696bfb49e53c79371c394b0 jedit_build-20111217.tar.gz
a242a688810f2bccf24587b0062ce8027bf77fa2 jedit_build-20120304.tar.gz
4c948dee53f74361c097c08f49a1a5ff9b17bd1d jedit_build-20120307.tar.gz
9c221fe71af8a063fcffcce21672a97aea0a8d5b jedit_build-20120313.tar.gz
ed72630f307729df08fdedb095f0af8725f81b9c jedit_build-20120327.tar.gz
6425f622625024c1de27f3730d6811f6370a19cd jedit_build-20120414.tar.gz
7b012f725ec1cc102dc259df178d511cc7890bba jedit_build-20120813.tar.gz
8e1d36f5071e3def2cb281f7fefe9f52352cb88f jedit_build-20120903.tar.gz
8fa0c67f59beba369ab836562eed4e56382f672a jedit_build-20121201.tar.gz
06e9be2627ebb95c45a9bcfa025d2eeef086b408 jedit_build-20130104.tar.gz
c85c0829b8170f25aa65ec6852f505ce2a50639b jedit_build-20130628.tar.gz
5de3e399be2507f684b49dfd13da45228214bbe4 jedit_build-20130905.tar.gz
87136818fd5528d97288f5b06bd30c787229eb0d jedit_build-20130910.tar.gz
c63189cbe39eb8104235a0928f579d9523de78a9 jedit_build-20130925.tar.gz
65cc13054be20d3a60474d406797c32a976d7db7 jedit_build-20130926.tar.gz
30ca171f745adf12b65c798c660ac77f9c0f9b4b jedit_build-20131106.tar.gz
054c1300128f8abd0f46a3e92c756ccdb96ff2af jedit_build-20140405.tar.gz
4a963665537ea66c69de4d761846541ebdbf69f2 jedit_build-20140511.tar.gz
a9d637a30f6a87a3583f265da51e63e3619cff52 jedit_build-20140722.tar.gz
f29391c53d85715f8454e1aaa304fbccc352928f jedit_build-20141018.tar.gz
d7206d4c9d14d3f4c8115422b7391ffbcc6e80b4 jedit_build-20141026.tar.gz
f15d36abc1780875a46b6dbd4568e43b776d5db6 jedit_build-20141104.tar.gz
14ce124c897abfa23713928dc034d6ef0e1c5031 jedit_build-20150228.tar.gz
b5f7115384c167559211768eb5fe98138864473b jedit_build-20151023.tar.gz
8ba7b6791be788f316427cdcd805daeaa6935190 jedit_build-20151124.tar.gz
c70c5a6c565d435a09a8639f8afd3de360708e1c jedit_build-20160330.tar.gz
d4e1496c257659cf15458d718f4663cdd95a404e jedit_build-20161024.tar.gz
d806c1c26b571b5b4ef05ea11e8b9cf936518e06 jedit_build-20170319.tar.gz
7bcb202e13358dd750e964b2f747664428b5d8b3 jedit_build-20180417.tar.gz
23c8a05687d05a6937f7d600ac3aa19e3ce59c9c jedit_build-20180504.tar.gz
9c64ee0705e5284b507ca527196081979d689519 jedit_build-20181025.tar.gz
cfa65bf8720b9b798ffa0986bafbc8437f44f758 jedit_build-20181026.tar.gz
847492b75b38468268f9ea424d27d53f2d95cef4 jedit_build-20181203.tar.gz
536a38ed527115b4bf2545a2137ec57b6ffad718 jedit_build-20190120.tar.gz
58b9f03e5ec0b85f8123c31f5d8092dae5803773 jedit_build-20190130.tar.gz
ec0aded5f2655e2de8bc4427106729e797584f2f jedit_build-20190224.tar.gz
1e53598a02ec8d8736b15f480cbe2c84767a7827 jedit_build-20190508.tar.gz
b9c6f49d3f6ebe2e85a50595ce7412d01a4314ac jedit_build-20190717.tar.gz
1c753beb93e92e95e99e8ead23a68346bd1af44a jedit_build-20200610.tar.gz
533b1ee6459f59bcbe4f09e214ad2cb990fb6952 jedit_build-20200908.tar.gz
f9966b5ed26740bb5b8bddbfe947fcefaea43d4d jedit_build-20201223.tar.gz
0bdbd36eda5992396e9c6b66aa24259d4dd7559c jedit_build-20210201.tar.gz
a0744f1948abdde4bfb51dd4769b619e7444baf1 jedit_build-20210510-1.tar.gz
837d6c8f72ecb21ad59a2544c69aadc9f05684c6 jedit_build-20210510.tar.gz
7bdae3d24b10261f6cb277446cf9ecab6062bd6f jedit_build-20210708.tar.gz
0bd2bc2d9a491ba5fc8dd99df27c04f11a72e8fa jfreechart-1.0.14-1.tar.gz
8122526f1fc362ddae1a328bdbc2152853186fee jfreechart-1.0.14.tar.gz
d911f63a5c9b4c7335bb73f805cb1711ce017a84 jfreechart-1.5.0.tar.gz
d84b7d8ef273afec55284327fca7dd20f5ecb77a jfreechart-1.5.1.tar.gz
6fa0c221ef55919b684449f0111a8112358e94ff jfreechart-1.5.3.tar.gz
c8a19a36adf6cefa779d85f22ded2f4654e68ea5 jortho-1.0-1.tar.gz
2155e0bdbd29cd3d2905454de2e7203b9661d239 jortho-1.0-2.tar.gz
ffe179867cf5ffaabbb6bb096db9bdc0d7110065 jortho-1.0.tar.gz
df8bb213d39a7eecae97e6af3b11752d6c704c90 jsoup-1.15.4.tar.gz
6c737137cc597fc920943783382e928ea79e3feb kodkodi-1.2.16.tar.gz
afb04f4048a87bb888fe7b05b0139cb060c7925b kodkodi-1.5.2-1.tar.gz
5f95c96bb99927f3a026050f85bd056f37a9189e kodkodi-1.5.2.tar.gz
0634a946b216f7f07f1a0f7e28cf345daa28828f kodkodi-1.5.3.tar.gz
52e95b3493d71902f9df89d0bb59d0046a5f0c63 kodkodi-1.5.4-1.tar.gz
267189c637de26cf304d699cfa95389da002b250 kodkodi-1.5.4.tar.gz
3ecdade953bb455ed2907952be287d7e5cf6533b kodkodi-1.5.5.tar.gz
8aa939f5127290eb9a99952d375be9ffbf90c43b kodkodi-1.5.6-1.tar.gz
6b12bf3f40b16fae8ff22aa39171fa018d107cb3 kodkodi-1.5.6.tar.gz
c8b2e632f3ab959a4e037833a45e6360c8b72a99 kodkodi-1.5.7.tar.gz
377e36efb8608e6c828c7718d890e97fde2006a4 linux_app-20131007.tar.gz
759848095e2ad506083d92b5646947e3c32f27a0 linux_app-20191223.tar.gz
1a449ce69ac874e21804595d16aaaf5a0d0d0c10 linux_app-20200110.tar.gz
5557b396f5a9aa22388d3e2171f9bc58e4bd6cd7 lipics-3.1.2.tar.gz
71b6a272d10c53bb54cba23102e15334ec39bfce llncs-2.22.tar.gz
0aab4f73ff7f5e36f33276547e10897e1e56fb1d macos_app-20130716.tar.gz
ad5d0e640ce3609a885cecab645389a2204e03bb macos_app-20150916.tar.gz
400af57ec5cd51f96928d9de00d077524a6fe316 macos_app-20181205.tar.gz
3bc42b8e22f0be5ec5614f1914066164c83498f8 macos_app-20181208.tar.gz
5fb1a2d21b220d0e588790c0203ac87c10ed0870 minisat-2.2.1-1.tar.gz
ae76bfaade3bf72ff6b2d3aafcd52fa45609fcd1 minisat-2.2.1.tar.gz
59aa13f48685326995714cc6028aebb789e445e3 mlton-20210117-1.tar.gz
5d48b7163a68c18b691bedc1511364b0b103baeb mlton-20210117.tar.gz
eda10c62da927a842c0a8881f726eac85e1cb4f7 naproche-20210122.tar.gz
edcb517b7578db4eec1b6573b624f291776e11f6 naproche-20210124.tar.gz
d858eb0ede6aea6b8cc40de63bd3a17f8f9f5300 naproche-20210129.tar.gz
810ee0f35adada9bf970c33fd80b986ab2255bf3 naproche-20210201.tar.gz
37bb6d934cfaf157efcadb349a0244d145ce15b0 naproche-20211211.tar.gz
5a8a59132476ae75cfddefc98781db55f18ed82c naproche-20220808.tar.gz
516b3714e56b708bc291bb0a592ea89de39ac894 naproche-20220910.tar.gz
c695a038197477b69180917ee17ec2d92142e8f2 naproche-20220917.tar.gz
0b5a3161a18045540ab618249ba85a464c1fce66 naproche-20221002.tar.gz
48e9d4cbf95626c8e3013bee86ff82e67df6cefd naproche-20221018.tar.gz
c66f5ce13d429ea9c8dcc0d33d34b7abf178da5d naproche-20221024.tar.gz
d098dd0873b1720a77dc4e060267f9a6c93f341a naproche-2d99afe5c349.tar.gz
4a4e56fd03b7ba4edd38046f853873a90cf55d1a naproche-4ad61140062f.tar.gz
77252e0b40f89825b9b5935f9f0c4cd5d4e7012a naproche-6d0d76ce2f2a.tar.gz
9c02ecf93863c3289002c5e5ac45a83e2505984c naproche-755224402e36.tar.gz
e1b34e8f54e7e5844873612635444fed434718a1 naproche-7d0947a91dd5.tar.gz
26df569cee9c2fd91b9ac06714afd43f3b37a1dd nunchaku-0.3.tar.gz
e573f2cbb57eb7b813ed5908753cfe2cb41033ca nunchaku-0.5.tar.gz
3d7b7690dfd09e25ad56e64b519f61f06e3ab706 old_vampire-4.2.2.tar.gz
fe57793aca175336deea4f5e9c0d949a197850ac opam-1.2.2.tar.gz
eb499a18e7040ca0fe1ca824c9dcb2087c47c9ba opam-2.0.3-1.tar.gz
002f74c9e65e650de2638bf54d7b012b8de76c28 opam-2.0.3.tar.gz
ddb3b438430d9565adbf5e3d913bd52af8337511 opam-2.0.6.tar.gz
fc66802c169f44511d3be30435eb89a11e635742 opam-2.0.7.tar.gz
108e947d17e9aa6170872614492d8f647802f483 opam-2.1.0.tar.gz
f8d0218371457eabe2b4214427d9570de92ed861 pdfjs-2.12.313.tar.gz
aa7fc4a3d2cbd6c8744ddfeefd863828ea602bcd pdfjs-2.14.305.tar.gz
1c8cb6a8f4cbeaedce2d6d1ba8fc7e2ab3663aeb polyml-5.4.1.tar.gz
a3f9c159a0ee9a63b7a5d0c835ed9c2c908f8b56 polyml-5.5.0-1.tar.gz
7d604a99355efbfc1459d80db3279ffa7ade3e39 polyml-5.5.0-2.tar.gz
b3d776e6744f0cd2773d467bc2cfe1de3d1ca2fd polyml-5.5.0-3.tar.gz
1812e9fa6d163f63edb93e37d1217640a166cf3e polyml-5.5.0.tar.gz
36f5b8224f484721749682a3655c796a55a2718d polyml-5.5.1-1.tar.gz
36f78f27291a9ceb13bf1120b62a45625afd44a6 polyml-5.5.1.tar.gz
a588640dbf5da9ae15455b02ef709764a48637dc polyml-5.5.2-1.tar.gz
4b690390946f7bfb777b89eb16d6f08987cca12f polyml-5.5.2-2.tar.gz
5b31ad8556e41dfd6d5e85f407818be399aa3d2a polyml-5.5.2-3.tar.gz
532f6e8814752aeb406c62fabcfd2cc05f8a7ca8 polyml-5.5.2.tar.gz
1c53f699d35c0db6c7cf4ea51f2310adbd1d0dc5 polyml-5.5.3-20150820.tar.gz
b4b624fb5f34d1dc814fb4fb469fafd7d7ea018a polyml-5.5.3-20150908.tar.gz
b668e1f43a41608a8eb365c5e19db6c54c72748a polyml-5.5.3-20150911.tar.gz
1f5cd9b1390dab13861f90dfc06d4180cc107587 polyml-5.5.3-20150916.tar.gz
f78896e588e8ebb4da57bf0c95210b0f0fa9e551 polyml-5.6-1.tar.gz
21fa0592b7dfd23269063f42604438165630c0f0 polyml-5.6-2.tar.gz
03ba81e595fa6d6df069532d67ad3195c37d9046 polyml-5.6-20151123.tar.gz
822f489c18e38ce5ef979ec21dccce4473e09be6 polyml-5.6-20151206.tar.gz
bd6a448f0e0d5787747f4f30ca661f9c1868e4a7 polyml-5.6-20151223.tar.gz
5b70c12c95a90d858f90c1945011289944ea8e17 polyml-5.6-20160118.tar.gz
5b19dc93082803b82aa553a5cfb3e914606c0ffd polyml-5.6.tar.gz
80b923fca3533bf291ff9da991f2262a98b68cc4 polyml-5.7-20170217.tar.gz
381a70cecf0fdee47f6842e2bdb5107ed52adab6 polyml-5.7.1-1.tar.gz
39dac33b569ac66f76126b8f4edc6d9227bd8a63 polyml-5.7.1-2.tar.gz
0b896ccc35bd3f2541cd55e6f0ed14637ed9fc68 polyml-5.7.1-4.tar.gz
262450ac9966abebae2e1d4f9ae703cfe0f5d8d9 polyml-5.7.1-5.tar.gz
1aeb57877d694db7fe4d4395287cddf3bc77710b polyml-5.7.1-6.tar.gz
e3e7e20b1e0e5d5d68df4cd4caa1e1a7410d46b6 polyml-5.7.1-7.tar.gz
1430533c09b17f8be73798a47a5f409d43a04cf4 polyml-5.7.1-8.tar.gz
171b5783b88522a35e4822b19ef8ba838c04f494 polyml-5.7.1.tar.gz
5fbcab1da2b5eb97f24da2590ece189d55b3a105 polyml-5.7.tar.gz
51e024225b460900da5279f0b91b217085f98cf9 polyml-5.8-20190220.tar.gz
20a83fa58d497b533150defe39bcd4540529b25f polyml-5.8-20190306.tar.gz
9f0e9cd10df4c3383b063eb076e8b698ca50c3d0 polyml-5.8.1-20191101.tar.gz
f46deb909d645ac8c140968e4d32b5763beb9add polyml-5.8.1-20191113.tar.gz
36a40a981b57daae0463d14940a8edf6fa1af179 polyml-5.8.1-20191114.tar.gz
525b05536b08c11a1eae943fe6818a8622326084 polyml-5.8.1-20191124.tar.gz
9043828803483ca14df64488dff014ad050a6d34 polyml-5.8.1-20200228.tar.gz
1186607e2c43b77db86731f12fbedb531ca50a21 polyml-5.8.1-20200708.tar.gz
22ae16bf7850e73b903d2ca8eb506da05b441cf3 polyml-5.8.1.tar.gz
cb8e85387315f62dcfc6b21ec378186e58068f76 polyml-5.8.2.tar.gz
d1fd6eced69dc1df7226432fcb824568e0994ff2 polyml-5.8.tar.gz
fb40145228f84513a9b083b54678a7d61b9c34c4 polyml-5.9-5d4caa8f7148.tar.gz
0f1c903b043acf7b221821d8b6374b3f943a122b polyml-5.9-610a153b941d.tar.gz
5f00a47b8f5180b33e68fcc6c343b061957a0a98 polyml-5.9-960de0cd0795.tar.gz
7056b285af67902b32f5049349a064f073f05860 polyml-5.9-cc80e2b43c38.tar.gz
0c396bd6b46ff11a2432b91aab2be0248bd9b0a4 polyml-5.9.tar.gz
f399ab9ee4a586fddeb6e73ca3605af65a89f969 polyml-5e9c8155ea96.tar.gz
+2cea4dd48bb8b171bc04c9793a55c7fa4c2d96f1 polyml-a5d5fba90286.tar.gz
49f1adfacdd6d29fa9f72035d94a31eaac411a97 polyml-test-0a6ebca445fc.tar.gz
2a8c4421e0a03c0d6ad556b3c36c34eb11568adb polyml-test-1236652ebd55.tar.gz
8e83fb5088cf265902b8da753a8eac5fe3f6a14b polyml-test-159dc81efc3b.tar.gz
b80c17398293d0c8f8d9923427176efb33cf2d89 polyml-test-15c840d48c9a.tar.gz
a0064c157a59e2706e18512a49a6dca914fa17fc polyml-test-1b2dcf8f5202.tar.gz
4e6543dbbb2b2aa402fd61428e1c045c48f18b47 polyml-test-79534495ee94.tar.gz
853ab0e9ff2b73790cc80a2d36cbff8b03e50a8e polyml-test-7a7b742897e9.tar.gz
85bfda83d138e936fdafd68ed3627b1058e5c2c3 polyml-test-7e49fce62e3d.tar.gz
c629cd499a724bbe37b962f727e4ff340c50299d polyml-test-8529546198aa.tar.gz
7df4857d73dbc9edda25a6ad329e47639e70fadf polyml-test-8fda4fd22441.tar.gz
2b7c02b67feb2f44dda6938a7244f4257e7c580c polyml-test-905dae2ebfda.tar.gz
3dfdc58e5d9b28f038a725e05c9c2f2ce0bb2632 polyml-test-a3cfdf648da-1.tar.gz
e2f075b0cc709f4f7f6492b725362f9010b2c6d1 polyml-test-a3cfdf648da-2.tar.gz
33568f69ce813b7405386ddbefa14ad0342bb8f0 polyml-test-a3cfdf648da.tar.gz
4bedaac4f1fb9a9199aa63695735063c47059003 polyml-test-a444f281ccec.tar.gz
f3031692edcc5d8028a42861e4e40779f0f9d3e1 polyml-test-b68438d33c69.tar.gz
b2901b604124cfe46a6c28041f47c5a3bd3673f0 polyml-test-bafe319bc3a6-1.tar.gz
3ac7e916832c07accebeada9a81b301c299e1930 polyml-test-bafe319bc3a6.tar.gz
cb2318cff6ea9293cd16a4435a4fe28ad9dbe0b8 polyml-test-cf46747fee61.tar.gz
67ffed2f98864721bdb1e87f0ef250e4c69e6160 polyml-test-d68c6736402e.tar.gz
b4ceeaac47f3baae41c2491a8368b03217946166 polyml-test-e7a662f8f9c4.tar.gz
609c7d09d3ed01156ff91261e801e2403ff93729 polyml-test-e8d82343b692.tar.gz
b6d87466e9b44e8ef4a2fac74c96b139080a506a polyml-test-f54aa41240d0.tar.gz
d365f3fc11c2427cafc62b3c79951880a1476ebb polyml-test-f86ae3dc1686.tar.gz
a619177143fea42a464f49bb864665407c07a16c polyml-test-fb4f42af00fa.tar.gz
53123dc011b2d4b4e8fe307f3c9fa355718ad01a postgresql-42.1.1.tar.gz
3a5d31377ec07a5069957f5477a4848cfc89a594 postgresql-42.1.4.tar.gz
7d6ef4320d5163ceb052eb83c1cb3968f099a422 postgresql-42.2.18.tar.gz
e7cd5c7955e9eb5ce8cd07feb97230b23d2eec40 postgresql-42.2.2.tar.gz
1aaa38429dc9aa7b1095394d9a7ba3465f8d6e04 postgresql-42.2.24.tar.gz
231b33c9c3c27d47e3ba01b399103d70509e0731 postgresql-42.2.5.tar.gz
6335fbc0658e447b5b9bc48c9ad36e33a05bb72b postgresql-42.2.9.tar.gz
f84c7ecafb07a0d763f1d70edc54f7c43c2e8c63 postgresql-42.4.0.tar.gz
143d0d32a13d7d8e15b9bab866e14ad4308a6246 postgresql-42.5.0.tar.gz
f132329ca1045858ef456cc08b197c9eeea6881b postgresql-9.4.1212.tar.gz
3fc5e7f759e7220b9e3fc5bac296e312e34a60ad prismjs-1.29.0.tar.gz
f042bba5fb82c7eb8aee99f92eb6ec38c8a067f7 python-3.10.4.tar.gz
d144120b7cf2d2b3106632af0b98c78278c467d7 rsync-3.2.7.tar.gz
0885e1f1d8feaca78d2f204b6487e6eec6dfab4b scala-2.10.0.tar.gz
f7dc7a4e1aea46408fd6e44b8cfacb33af61afbc scala-2.10.1.tar.gz
207e4916336335386589c918c5e3f3dcc14698f2 scala-2.10.2.tar.gz
21c8ee274ffa471ab54d4196ecd827bf3d43e591 scala-2.10.3.tar.gz
d4688ddaf83037ca43b5bf271325fc53ae70e3aa scala-2.10.4.tar.gz
44d12297a78988ffd34363535e6a8e0d94c1d8b5 scala-2.11.0.tar.gz
14f20de82b25215a5e055631fb147356400625e6 scala-2.11.1.tar.gz
4fe9590d08e55760b86755d3fab750e90ac6c380 scala-2.11.2.tar.gz
27a296495b2167148de06314ed9a942f2dbe23fe scala-2.11.4.tar.gz
4b24326541161ce65424293ca9da3e7c2c6ab452 scala-2.11.5.tar.gz
e7cf20e3b27c894c6127c7a37042c1667f57385e scala-2.11.6.tar.gz
4810c1b00719115df235be1c5991aa6ea7186134 scala-2.11.7.tar.gz
3eca4b80710996fff87ed1340dcea2c5f6ebf4f7 scala-2.11.8.tar.gz
0004e53f885fb165b50c95686dec40d99ab0bdbd scala-2.12.0.tar.gz
059cbdc58d36e3ac1fffcccd9139ecd34f271882 scala-2.12.10.tar.gz
82056106aa6fd37c159ea76d16096c20a749cccd scala-2.12.11.tar.gz
fe7ff585acffaad7f0dd4a1d079134d15c26ed0d scala-2.12.12.tar.gz
74a8c3dab3a25a87357996ab3e95d825dc820fd0 scala-2.12.2.tar.gz
d66796a68ec3254b46b17b1f8ee5bcc56a93aacf scala-2.12.3.tar.gz
1636556167dff2c191baf502c23f12e09181ef78 scala-2.12.4.tar.gz
8171f494bba54fb0d01c887f889ab8fde7171c2a scala-2.12.5.tar.gz
54c1b06fa2c5f6c2ab3d391ef342c0532cd7f392 scala-2.12.6.tar.gz
02358f00acc138371324b6248fdb62eed791c6bd scala-2.12.7.tar.gz
201c05ae9cc382ee6c08af49430e426f6bbe0d5a scala-2.12.8.tar.gz
a0622fe75c3482ba7dc3ce74d58583b648a1ff0d scala-2.13.4-1.tar.gz
ec53cce3c5edda1145ec5d13924a5f9418995c15 scala-2.13.4.tar.gz
caedd48ae65db9d116a0e1712eec3a66fe95c712 scala-2.13.5-1.tar.gz
f51981baf34c020ad103b262f81796c37abcaa4a scala-2.13.5.tar.gz
0a7cab09dec357dab7819273f2542ff1c3ea0968 scala-2.13.6.tar.gz
1f8532dba290c6b2ef364632f3f92e71da93baba scala-2.13.7.tar.gz
b447017e81600cc5e30dd61b5d4962f6da01aa80 scala-2.8.1.final.tar.gz
5659440f6b86db29f0c9c0de7249b7e24a647126 scala-2.9.2.tar.gz
97c5b73011f4d6438b616e5940e6d759034f5414 scala-3.1.3.tar.gz
87c8e53100df4bc85cd8d4f55028088646d70fb4 scala-3.2.0-1.tar.gz
c58db22b9e1e90f5b7a3f5edd8bdb4ddab4947fd scala-3.2.0-2.tar.gz
7677b02fe06c992ca6cf82bf68adb16287294256 scala-3.2.0.tar.gz
bee1c9416a086e553057171e5cb571271ed02c60 scala-3.2.1.tar.gz
abe7a3b50da529d557a478e9f631a22429418a67 smbc-0.4.1.tar.gz
cbd491c0feba1d21019d05564e76dd04f592ccb4 spass-3.8ds-1.tar.gz
edaa1268d82203067657aabcf0371ce7d4b579b9 spass-3.8ds-2.tar.gz
43b5afbcad575ab6817d2289756ca22fd2ef43a9 spass-3.8ds.tar.gz
b016a785f1f78855c00d351ff598355c3b87450f sqlite-jdbc-3.18.0-1.tar.gz
b85b5bc071a59ef2a8326ceb1617d5a9a5be41cf sqlite-jdbc-3.18.0.tar.gz
e56117a67ab01fb24c7fc054ede3160cefdac5f8 sqlite-jdbc-3.20.0.tar.gz
27aeac6a91353d69f0438837798ac4ae6f9ff8c5 sqlite-jdbc-3.23.1.tar.gz
4d17611857fa3a93944c1f159c0fd2a161967aaf sqlite-jdbc-3.27.2.1.tar.gz
806be457eb79408fcc5a72aeca3f64b2d89a6b63 sqlite-jdbc-3.30.1.tar.gz
cba2b194114216b226d75d49a70d1bd12b141ac8 sqlite-jdbc-3.32.3.2.tar.gz
29306acd6ce9f4c87032b2c271c6df035fe7d4d3 sqlite-jdbc-3.34.0.tar.gz
8a2ca4d02cfedbfe4dad4490f1ed3ddba33a009a sqlite-jdbc-3.36.0.3.tar.gz
d2c707638b08ad56469b92dc2941d403efbb3394 sqlite-jdbc-3.39.4.1.tar.gz
12cb90b265bc2308858c63f00d5ecbfb80603dbd sqlite-jdbc-3.41.0.0.tar.gz
8d20968603f45a2c640081df1ace6a8b0527452a sqlite-jdbc-3.8.11.2.tar.gz
2369f06e8d095f9ba26df938b1a96000e535afff ssh-java-20161009.tar.gz
a2335d28b5b95d8d26500a53f1a9303fc5beaf36 ssh-java-20190323.tar.gz
fdc415284e031ee3eb2f65828cbc6945736fe995 stack-1.9.1.tar.gz
6e19948ff4a821e2052fc9b3ddd9ae343f4fcdbb stack-1.9.3.tar.gz
f969443705aa8619e93af5b34ea98d15cd7efaf1 stack-2.1.3.tar.gz
ebd0221d038966aa8bde075f1b0189ff867b02ca stack-2.5.1.tar.gz
fa2d882ec45cbc8c7d2f3838b705a8316696dc66 stack-2.7.3.tar.gz
1f4a2053cc1f34fa36c4d9d2ac906ad4ebc863fd sumatra_pdf-2.1.1.tar.gz
601e08d048d8e50b0729429c8928b667d9b6bde9 sumatra_pdf-2.3.2.tar.gz
14d46c2eb1a34821703da59d543433f581e91df3 sumatra_pdf-2.4.tar.gz
44d67b6742919ce59a42368fc60e2afa210a3e42 sumatra_pdf-2.5.2.tar.gz
89719a13bc92810730a430973684629426ed1b2a sumatra_pdf-3.0.tar.gz
f5afcc82f8e734665d38867e99475d3ad0d5ed15 sumatra_pdf-3.1.1.tar.gz
a45eca5c1277f42f87bb8dc12a3074ccf5488221 sumatra_pdf-3.1.2-1.tar.gz
3b3239b2e6f8062b90d819f3703e30a50f4fa1e7 sumatra_pdf-3.1.2-2.tar.gz
8486387f61557147ec06b1f637117c017c8f0528 sumatra_pdf-3.1.2.tar.gz
e8648878f908e93d64a393231ab21fdac976a9c2 sumatra_pdf-3.3.3.tar.gz
25d2004325585fceb0a951181716f77fc4d9d0d4 sumatra_pdf-3.4.6.tar.gz
869ea6d8ea35c8ba68d7fcb028f16b2b7064c5fd vampire-1.0.tar.gz
399f687b56575b93e730f68c91c989cb48aa34d8 vampire-4.2.2.tar.gz
0402978ca952f08eea73e483b694928ac402a304 vampire-4.5.1-1.tar.gz
26d9d171e169c6420a08aa99eda03ef5abb9c545 vampire-4.5.1.tar.gz
4571c042efd6fc3097e105a528826959acd888a3 vampire-4.6.tar.gz
98c5c79fef7256db9f64c8feea2edef0a789ce46 verit-2016post.tar.gz
52ba18a6c96b53c5ae9b179d5a805a0c08f1da6d verit-2020.10-rmx-1.tar.gz
b6706e74e20e14038e9b38f0acdb5639a134246a verit-2020.10-rmx.tar.gz
d33e1e36139e86b9e9a48d8b46a6f90d7863a51c verit-2021.06-rmx-1.tar.gz
c11d1120fcefaec79f099fe2be05b03cd2aed8b9 verit-2021.06-rmx.tar.gz
b576fd5d89767c1067541d4839fb749c6a68d22c verit-2021.06.1-rmx.tar.gz
19c6e5677b0a26cbc5805da79d00d06a66b7a671 verit-2021.06.2-rmx.tar.gz
c4666a6d8080b5e376b50471fd2d9edeb1f9c988 vscode_extension-20220324.tar.gz
86c952d739d1eb868be88898982d4870a3d8c2dc vscode_extension-20220325.tar.gz
5293b9e77e5c887d449b671828b133fad4f18632 vscode_extension-20220829.tar.gz
0d9551ffeb968813b6017278fa7ab9bd6062883f vscode_extension-20230206.tar.gz
67b271186631f84efd97246bf85f6d8cfaa5edfd vscodium-1.65.2.tar.gz
c439ab741e0cc49354cc03aa9af501202a5a38e3 vscodium-1.70.1.tar.gz
81d21dfd0ea5c58f375301f5166be9dbf8921a7a windows_app-20130716.tar.gz
fe15e1079cf5ad86f3cbab4553722a0d20002d11 windows_app-20130905.tar.gz
e6a43b7b3b21295853bd2a63b27ea20bd6102f5f windows_app-20130906.tar.gz
8fe004aead867d4c82425afac481142bd3f01fb0 windows_app-20130908.tar.gz
d273abdc7387462f77a127fa43095eed78332b5c windows_app-20130909.tar.gz
c368908584e2bca38b3bcb20431d0c69399fc2f0 windows_app-20131130.tar.gz
c3f5285481a95fde3c1961595b4dd0311ee7ac1f windows_app-20131201.tar.gz
14807afcf69e50d49663d5b48f4b103f30ae842b windows_app-20150821.tar.gz
ed106181510e825bf959025d8e0a2fc3f78e7a3f windows_app-20180417.tar.gz
e809e4ab0d33cb413a7c47dd947e7dbdfcca1c24 windows_app-20181002.tar.gz
9e96ba128a0617a9020a178781df49d48c997e19 windows_app-20181006.tar.gz
1c36a840320dfa9bac8af25fc289a4df5ea3eccb xz-java-1.2-1.tar.gz
2ae13aa17d0dc95ce254a52f1dba10929763a10d xz-java-1.2.tar.gz
c22196148fcace5443a933238216cff5112948df xz-java-1.5.tar.gz
4368ee09154dff42666a8c87e072261745619e51 xz-java-1.6.tar.gz
63f5fa09e92a895cb9aea27d7142abc86c487d25 xz-java-1.8.tar.gz
a06875bdadd653627a68d2083c5178c1264d8fc6 xz-java-1.9.tar.gz
4530a1aa6f4498ee3d78d6000fa71a3f63bd077f yices-1.0.28.tar.gz
3a8f77822278fe9250890e357248bc678d8fac95 z3-3.2-1.tar.gz
12ae71acde43bd7bed1e005c43034b208c0cba4c z3-3.2.tar.gz
d94a716502c8503d63952bcb4d4176fac8b28704 z3-4.0.tar.gz
86e721296c400ada440e4a9ce11b9e845eec9e25 z3-4.3.0.tar.gz
a8917c31b31c182edeec0aaa48870844960c8a61 z3-4.3.2pre-1.tar.gz
06b30757ff23aefbc30479785c212685ffd39f4d z3-4.3.2pre.tar.gz
ed37c451b9b748901295898bf713b24d22cc8c17 z3-4.4.0_4.4.1.tar.gz
93e7e4bddc6afcf87fe2b6656cfcb1b1acd0a4f8 z3-4.4.0pre-1.tar.gz
b1bc411c2083fc01577070b56b94514676f53854 z3-4.4.0pre-2.tar.gz
4c366ab255d2e9343fb635d44d4d55ddd24c76d0 z3-4.4.0pre-3.tar.gz
517ba7b94c1985416c5b411c8ae84456367eb231 z3-4.4.0pre.tar.gz
6e5d7a65757cac970eb5ad28cd62130c99f42c23 z3-4.4.1.tar.gz
aa20745f0b03e606b1a4149598e0c7572b63c657 z3-4.8.3.tar.gz
9dfeb39c87393af7b6a34118507637aa53aca05e zipperposition-2.0-1.tar.gz
b884c60653002a7811e3b652ae0515e825d98667 zipperposition-2.0.tar.gz
b129ec4f8a4474953ec107536298ee08a01fbebc zipperposition-2.1-1.tar.gz
5f53a77efb5cbe9d0c95d74a1588cc923bd711a7 zipperposition-2.1.tar.gz
c101182780aeafbc2e0ea7e8b10b91c6f7483af2 zstd-jni-1.5.2-5.tar.gz
diff --git a/Admin/components/main b/Admin/components/main
--- a/Admin/components/main
+++ b/Admin/components/main
@@ -1,44 +1,44 @@
#main components for repository clones or release bundles
gnu-utils-20211030
bash_process-1.3
bib2xhtml-20190409
csdp-6.1.1
cvc4-1.8
e-2.6-1
easychair-3.5
eptcs-1.7.0
flatlaf-2.4
foiltex-2.1.4b
idea-icons-20210508
isabelle_fonts-20211004
isabelle_setup-20230206
jdk-17.0.7
jedit-20211103
jfreechart-1.5.3
jortho-1.0-2
jsoup-1.15.4
kodkodi-1.5.7
lipics-3.1.2
llncs-2.22
minisat-2.2.1-1
mlton-20210117-1
nunchaku-0.5
opam-2.0.7
pdfjs-2.14.305
-polyml-5e9c8155ea96
+polyml-a5d5fba90286
postgresql-42.5.0
prismjs-1.29.0
rsync-3.2.7
scala-3.2.0-2
smbc-0.4.1
spass-3.8ds-2
sqlite-jdbc-3.41.0.0
stack-2.7.3
vampire-4.6
verit-2021.06.2-rmx
vscode_extension-20230206
vscodium-1.70.1
xz-java-1.9
z3-4.4.0_4.4.1
zipperposition-2.1-1
zstd-jni-1.5.2-5
diff --git a/src/Doc/Implementation/Proof.thy b/src/Doc/Implementation/Proof.thy
--- a/src/Doc/Implementation/Proof.thy
+++ b/src/Doc/Implementation/Proof.thy
@@ -1,483 +1,485 @@
(*:maxLineLen=78:*)
theory Proof
imports Base
begin
chapter \<open>Structured proofs\<close>
section \<open>Variables \label{sec:variables}\<close>
text \<open>
Any variable that is not explicitly bound by \<open>\<lambda>\<close>-abstraction is considered
as ``free''. Logically, free variables act like outermost universal
quantification at the sequent level: \<open>A\<^sub>1(x), \<dots>, A\<^sub>n(x) \<turnstile> B(x)\<close> means that
the result holds \<^emph>\<open>for all\<close> values of \<open>x\<close>. Free variables for terms (not
types) can be fully internalized into the logic: \<open>\<turnstile> B(x)\<close> and \<open>\<turnstile> \<And>x. B(x)\<close>
are interchangeable, provided that \<open>x\<close> does not occur elsewhere in the
context. Inspecting \<open>\<turnstile> \<And>x. B(x)\<close> more closely, we see that inside the
quantifier, \<open>x\<close> is essentially ``arbitrary, but fixed'', while from outside
it appears as a place-holder for instantiation (thanks to \<open>\<And>\<close> elimination).
The Pure logic represents the idea of variables being either inside or
outside the current scope by providing separate syntactic categories for
\<^emph>\<open>fixed variables\<close> (e.g.\ \<open>x\<close>) vs.\ \<^emph>\<open>schematic variables\<close> (e.g.\ \<open>?x\<close>).
Incidently, a universal result \<open>\<turnstile> \<And>x. B(x)\<close> has the HHF normal form \<open>\<turnstile>
B(?x)\<close>, which represents its generality without requiring an explicit
quantifier. The same principle works for type variables: \<open>\<turnstile> B(?\<alpha>)\<close>
represents the idea of ``\<open>\<turnstile> \<forall>\<alpha>. B(\<alpha>)\<close>'' without demanding a truly
polymorphic framework.
\<^medskip>
Additional care is required to treat type variables in a way that
facilitates type-inference. In principle, term variables depend on type
variables, which means that type variables would have to be declared first.
For example, a raw type-theoretic framework would demand the context to be
constructed in stages as follows: \<open>\<Gamma> = \<alpha>: type, x: \<alpha>, a: A(x\<^sub>\<alpha>)\<close>.
We allow a slightly less formalistic mode of operation: term variables \<open>x\<close>
are fixed without specifying a type yet (essentially \<^emph>\<open>all\<close> potential
occurrences of some instance \<open>x\<^sub>\<tau>\<close> are fixed); the first occurrence of \<open>x\<close>
within a specific term assigns its most general type, which is then
maintained consistently in the context. The above example becomes \<open>\<Gamma> = x:
term, \<alpha>: type, A(x\<^sub>\<alpha>)\<close>, where type \<open>\<alpha>\<close> is fixed \<^emph>\<open>after\<close> term \<open>x\<close>, and the
constraint \<open>x :: \<alpha>\<close> is an implicit consequence of the occurrence of \<open>x\<^sub>\<alpha>\<close> in
the subsequent proposition.
This twist of dependencies is also accommodated by the reverse operation of
exporting results from a context: a type variable \<open>\<alpha>\<close> is considered fixed as
long as it occurs in some fixed term variable of the context. For example,
exporting \<open>x: term, \<alpha>: type \<turnstile> x\<^sub>\<alpha> \<equiv> x\<^sub>\<alpha>\<close> produces in the first step \<open>x: term
\<turnstile> x\<^sub>\<alpha> \<equiv> x\<^sub>\<alpha>\<close> for fixed \<open>\<alpha>\<close>, and only in the second step \<open>\<turnstile> ?x\<^sub>?\<^sub>\<alpha> \<equiv> ?x\<^sub>?\<^sub>\<alpha>\<close>
for schematic \<open>?x\<close> and \<open>?\<alpha>\<close>. The following Isar source text illustrates this
scenario.
\<close>
notepad
begin
{
fix x \<comment> \<open>all potential occurrences of some \<open>x::\<tau>\<close> are fixed\<close>
{
have "x::'a \<equiv> x" \<comment> \<open>implicit type assignment by concrete occurrence\<close>
by (rule reflexive)
}
thm this \<comment> \<open>result still with fixed type \<open>'a\<close>\<close>
}
thm this \<comment> \<open>fully general result for arbitrary \<open>?x::?'a\<close>\<close>
end
text \<open>
The Isabelle/Isar proof context manages the details of term vs.\ type
variables, with high-level principles for moving the frontier between fixed
and schematic variables.
The \<open>add_fixes\<close> operation explicitly declares fixed variables; the
\<open>declare_term\<close> operation absorbs a term into a context by fixing new type
variables and adding syntactic constraints.
The \<open>export\<close> operation is able to perform the main work of generalizing term
and type variables as sketched above, assuming that fixing variables and
terms have been declared properly.
There \<open>import\<close> operation makes a generalized fact a genuine part of the
context, by inventing fixed variables for the schematic ones. The effect can
be reversed by using \<open>export\<close> later, potentially with an extended context;
the result is equivalent to the original modulo renaming of schematic
variables.
The \<open>focus\<close> operation provides a variant of \<open>import\<close> for nested propositions
(with explicit quantification): \<open>\<And>x\<^sub>1 \<dots> x\<^sub>n. B(x\<^sub>1, \<dots>, x\<^sub>n)\<close> is decomposed
by inventing fixed variables \<open>x\<^sub>1, \<dots>, x\<^sub>n\<close> for the body.
\<close>
text %mlref \<open>
\begin{mldecls}
@{define_ML Variable.add_fixes: "
string list -> Proof.context -> string list * Proof.context"} \\
@{define_ML Variable.variant_fixes: "
string list -> Proof.context -> string list * Proof.context"} \\
@{define_ML Variable.declare_term: "term -> Proof.context -> Proof.context"} \\
@{define_ML Variable.declare_constraints: "term -> Proof.context -> Proof.context"} \\
@{define_ML Variable.export: "Proof.context -> Proof.context -> thm list -> thm list"} \\
@{define_ML Variable.polymorphic: "Proof.context -> term list -> term list"} \\
@{define_ML Variable.import: "bool -> thm list -> Proof.context ->
((ctyp TVars.table * cterm Vars.table) * thm list)
* Proof.context"} \\
@{define_ML Variable.focus: "binding list option -> term -> Proof.context ->
((string * (string * typ)) list * term) * Proof.context"} \\
\end{mldecls}
\<^descr> \<^ML>\<open>Variable.add_fixes\<close>~\<open>xs ctxt\<close> fixes term variables \<open>xs\<close>, returning
the resulting internal names. By default, the internal representation
coincides with the external one, which also means that the given variables
must not be fixed already. There is a different policy within a local proof
body: the given names are just hints for newly invented Skolem variables.
\<^descr> \<^ML>\<open>Variable.variant_fixes\<close> is similar to \<^ML>\<open>Variable.add_fixes\<close>, but
always produces fresh variants of the given names.
\<^descr> \<^ML>\<open>Variable.declare_term\<close>~\<open>t ctxt\<close> declares term \<open>t\<close> to belong to the
context. This automatically fixes new type variables, but not term
variables. Syntactic constraints for type and term variables are declared
uniformly, though.
\<^descr> \<^ML>\<open>Variable.declare_constraints\<close>~\<open>t ctxt\<close> declares syntactic constraints
from term \<open>t\<close>, without making it part of the context yet.
\<^descr> \<^ML>\<open>Variable.export\<close>~\<open>inner outer thms\<close> generalizes fixed type and term
variables in \<open>thms\<close> according to the difference of the \<open>inner\<close> and \<open>outer\<close>
context, following the principles sketched above.
\<^descr> \<^ML>\<open>Variable.polymorphic\<close>~\<open>ctxt ts\<close> generalizes type variables in \<open>ts\<close> as
far as possible, even those occurring in fixed term variables. The default
policy of type-inference is to fix newly introduced type variables, which is
essentially reversed with \<^ML>\<open>Variable.polymorphic\<close>: here the given terms
are detached from the context as far as possible.
\<^descr> \<^ML>\<open>Variable.import\<close>~\<open>open thms ctxt\<close> invents fixed type and term
variables for the schematic ones occurring in \<open>thms\<close>. The \<open>open\<close> flag
indicates whether the fixed names should be accessible to the user,
otherwise newly introduced names are marked as ``internal''
(\secref{sec:names}).
\<^descr> \<^ML>\<open>Variable.focus\<close>~\<open>bindings B\<close> decomposes the outermost \<open>\<And>\<close> prefix of
proposition \<open>B\<close>, using the given name bindings.
\<close>
text %mlex \<open>
The following example shows how to work with fixed term and type parameters
and with type-inference.
\<close>
ML_val \<open>
(*static compile-time context -- for testing only*)
val ctxt0 = \<^context>;
(*locally fixed parameters -- no type assignment yet*)
val ([x, y], ctxt1) = ctxt0 |> Variable.add_fixes ["x", "y"];
(*t1: most general fixed type; t1': most general arbitrary type*)
val t1 = Syntax.read_term ctxt1 "x";
val t1' = singleton (Variable.polymorphic ctxt1) t1;
(*term u enforces specific type assignment*)
val u = Syntax.read_term ctxt1 "(x::nat) \<equiv> y";
(*official declaration of u -- propagates constraints etc.*)
val ctxt2 = ctxt1 |> Variable.declare_term u;
val t2 = Syntax.read_term ctxt2 "x"; (*x::nat is enforced*)
\<close>
text \<open>
In the above example, the starting context is derived from the toplevel
theory, which means that fixed variables are internalized literally: \<open>x\<close> is
mapped again to \<open>x\<close>, and attempting to fix it again in the subsequent
context is an error. Alternatively, fixed parameters can be renamed
explicitly as follows:
\<close>
ML_val \<open>
val ctxt0 = \<^context>;
val ([x1, x2, x3], ctxt1) =
ctxt0 |> Variable.variant_fixes ["x", "x", "x"];
\<close>
text \<open>
The following ML code can now work with the invented names of \<open>x1\<close>, \<open>x2\<close>,
\<open>x3\<close>, without depending on the details on the system policy for introducing
these variants. Recall that within a proof body the system always invents
fresh ``Skolem constants'', e.g.\ as follows:
\<close>
notepad
begin
ML_prf %"ML"
\<open>val ctxt0 = \<^context>;
val ([x1], ctxt1) = ctxt0 |> Variable.add_fixes ["x"];
val ([x2], ctxt2) = ctxt1 |> Variable.add_fixes ["x"];
val ([x3], ctxt3) = ctxt2 |> Variable.add_fixes ["x"];
val ([y1, y2], ctxt4) =
ctxt3 |> Variable.variant_fixes ["y", "y"];\<close>
end
text \<open>
In this situation \<^ML>\<open>Variable.add_fixes\<close> and \<^ML>\<open>Variable.variant_fixes\<close>
are very similar, but identical name proposals given in a row are only
accepted by the second version.
\<close>
section \<open>Assumptions \label{sec:assumptions}\<close>
text \<open>
An \<^emph>\<open>assumption\<close> is a proposition that it is postulated in the current
context. Local conclusions may use assumptions as additional facts, but this
imposes implicit hypotheses that weaken the overall statement.
Assumptions are restricted to fixed non-schematic statements, i.e.\ all
generality needs to be expressed by explicit quantifiers. Nevertheless, the
result will be in HHF normal form with outermost quantifiers stripped. For
example, by assuming \<open>\<And>x :: \<alpha>. P x\<close> we get \<open>\<And>x :: \<alpha>. P x \<turnstile> P ?x\<close> for
schematic \<open>?x\<close> of fixed type \<open>\<alpha>\<close>. Local derivations accumulate more and more
explicit references to hypotheses: \<open>A\<^sub>1, \<dots>, A\<^sub>n \<turnstile> B\<close> where \<open>A\<^sub>1, \<dots>, A\<^sub>n\<close>
needs to be covered by the assumptions of the current context.
\<^medskip>
The \<open>add_assms\<close> operation augments the context by local assumptions, which
are parameterized by an arbitrary \<open>export\<close> rule (see below).
The \<open>export\<close> operation moves facts from a (larger) inner context into a
(smaller) outer context, by discharging the difference of the assumptions as
specified by the associated export rules. Note that the discharged portion
is determined by the difference of contexts, not the facts being exported!
There is a separate flag to indicate a goal context, where the result is
meant to refine an enclosing sub-goal of a structured proof state.
\<^medskip>
The most basic export rule discharges assumptions directly by means of the
\<open>\<Longrightarrow>\<close> introduction rule:
\[
\infer[(\<open>\<Longrightarrow>\<hyphen>intro\<close>)]{\<open>\<Gamma> - A \<turnstile> A \<Longrightarrow> B\<close>}{\<open>\<Gamma> \<turnstile> B\<close>}
\]
The variant for goal refinements marks the newly introduced premises, which
causes the canonical Isar goal refinement scheme to enforce unification with
local premises within the goal:
\[
\infer[(\<open>#\<Longrightarrow>\<hyphen>intro\<close>)]{\<open>\<Gamma> - A \<turnstile> #A \<Longrightarrow> B\<close>}{\<open>\<Gamma> \<turnstile> B\<close>}
\]
\<^medskip>
Alternative versions of assumptions may perform arbitrary transformations on
export, as long as the corresponding portion of hypotheses is removed from
the given facts. For example, a local definition works by fixing \<open>x\<close> and
assuming \<open>x \<equiv> t\<close>, with the following export rule to reverse the effect:
\[
\infer[(\<open>\<equiv>\<hyphen>expand\<close>)]{\<open>\<Gamma> - (x \<equiv> t) \<turnstile> B t\<close>}{\<open>\<Gamma> \<turnstile> B x\<close>}
\]
This works, because the assumption \<open>x \<equiv> t\<close> was introduced in a context with
\<open>x\<close> being fresh, so \<open>x\<close> does not occur in \<open>\<Gamma>\<close> here.
\<close>
text %mlref \<open>
\begin{mldecls}
@{define_ML_type Assumption.export} \\
@{define_ML Assumption.assume: "Proof.context -> cterm -> thm"} \\
@{define_ML Assumption.add_assms:
"Assumption.export ->
cterm list -> Proof.context -> thm list * Proof.context"} \\
@{define_ML Assumption.add_assumes: "
cterm list -> Proof.context -> thm list * Proof.context"} \\
- @{define_ML Assumption.export: "bool -> Proof.context -> Proof.context -> thm -> thm"} \\
+ @{define_ML Assumption.export: "Proof.context -> Proof.context -> thm -> thm"} \\
+ @{define_ML Assumption.export_goal: "Proof.context -> Proof.context -> thm -> thm"} \\
@{define_ML Assumption.export_term: "Proof.context -> Proof.context -> term -> term"} \\
\end{mldecls}
\<^descr> Type \<^ML_type>\<open>Assumption.export\<close> represents export rules, as a pair of
functions \<^ML_type>\<open>bool -> cterm list -> (thm -> thm) * (term -> term)\<close>.
The \<^ML_type>\<open>bool\<close> argument indicates goal mode, and the \<^ML_type>\<open>cterm list\<close>
the collection of assumptions to be discharged simultaneously.
\<^descr> \<^ML>\<open>Assumption.assume\<close>~\<open>ctxt A\<close> turns proposition \<open>A\<close> into a primitive
assumption \<open>A \<turnstile> A'\<close>, where the conclusion \<open>A'\<close> is in HHF normal form.
\<^descr> \<^ML>\<open>Assumption.add_assms\<close>~\<open>r As\<close> augments the context by assumptions \<open>As\<close>
with export rule \<open>r\<close>. The resulting facts are hypothetical theorems as
produced by the raw \<^ML>\<open>Assumption.assume\<close>.
\<^descr> \<^ML>\<open>Assumption.add_assumes\<close>~\<open>As\<close> is a special case of
\<^ML>\<open>Assumption.add_assms\<close> where the export rule performs \<open>\<Longrightarrow>\<hyphen>intro\<close> or
\<open>#\<Longrightarrow>\<hyphen>intro\<close>, depending on goal mode.
- \<^descr> \<^ML>\<open>Assumption.export\<close>~\<open>is_goal inner outer thm\<close> exports result \<open>thm\<close>
- from the \<open>inner\<close> context back into the \<open>outer\<close> one; \<open>is_goal = true\<close> means
- this is a goal context. The result is in HHF normal form. Note that
- \<^ML>\<open>Proof_Context.export\<close> combines \<^ML>\<open>Variable.export\<close> and
- \<^ML>\<open>Assumption.export\<close> in the canonical way.
+ \<^descr> \<^ML>\<open>Assumption.export\<close>~\<open>inner outer thm\<close> exports result \<open>thm\<close> from the
+ \<open>inner\<close> context back into the \<open>outer\<close> one; \<^ML>\<open>Assumption.export_goal\<close>
+ does the same in a goal context (\<^theory_text>\<open>fix/assume/show\<close> in Isabelle/Isar). The
+ result is always in HHF normal form. Note that \<^ML>\<open>Proof_Context.export\<close>
+ combines \<^ML>\<open>Variable.export\<close> and \<^ML>\<open>Assumption.export\<close> in the
+ canonical way.
\<^descr> \<^ML>\<open>Assumption.export_term\<close>~\<open>inner outer t\<close> exports term \<open>t\<close> from the
\<open>inner\<close> context back into the \<open>outer\<close> one. This is analogous to
\<^ML>\<open>Assumption.export\<close>, but only takes syntactical aspects of the
context into account (such as locally specified variables as seen in
@{command define} or @{command obtain}).
\<close>
text %mlex \<open>
The following example demonstrates how rules can be derived by building up a
context of assumptions first, and exporting some local fact afterwards. We
refer to \<^theory>\<open>Pure\<close> equality here for testing purposes.
\<close>
ML_val \<open>
(*static compile-time context -- for testing only*)
val ctxt0 = \<^context>;
val ([eq], ctxt1) =
ctxt0 |> Assumption.add_assumes [\<^cprop>\<open>x \<equiv> y\<close>];
val eq' = Thm.symmetric eq;
(*back to original context -- discharges assumption*)
- val r = Assumption.export false ctxt1 ctxt0 eq';
+ val r = Assumption.export ctxt1 ctxt0 eq';
\<close>
text \<open>
Note that the variables of the resulting rule are not generalized. This
would have required to fix them properly in the context beforehand, and
export wrt.\ variables afterwards (cf.\ \<^ML>\<open>Variable.export\<close> or the
combined \<^ML>\<open>Proof_Context.export\<close>).
\<close>
section \<open>Structured goals and results \label{sec:struct-goals}\<close>
text \<open>
Local results are established by monotonic reasoning from facts within a
context. This allows common combinations of theorems, e.g.\ via \<open>\<And>/\<Longrightarrow>\<close>
elimination, resolution rules, or equational reasoning, see
\secref{sec:thms}. Unaccounted context manipulations should be avoided,
notably raw \<open>\<And>/\<Longrightarrow>\<close> introduction or ad-hoc references to free variables or
assumptions not present in the proof context.
\<^medskip>
The \<open>SUBPROOF\<close> combinator allows to structure a tactical proof recursively
by decomposing a selected sub-goal: \<open>(\<And>x. A(x) \<Longrightarrow> B(x)) \<Longrightarrow> \<dots>\<close> is turned into
\<open>B(x) \<Longrightarrow> \<dots>\<close> after fixing \<open>x\<close> and assuming \<open>A(x)\<close>. This means the tactic needs
to solve the conclusion, but may use the premise as a local fact, for
locally fixed variables.
The family of \<open>FOCUS\<close> combinators is similar to \<open>SUBPROOF\<close>, but allows to
retain schematic variables and pending subgoals in the resulting goal state.
The \<open>prove\<close> operation provides an interface for structured backwards
reasoning under program control, with some explicit sanity checks of the
result. The goal context can be augmented by additional fixed variables
(cf.\ \secref{sec:variables}) and assumptions (cf.\
\secref{sec:assumptions}), which will be available as local facts during the
proof and discharged into implications in the result. Type and term
variables are generalized as usual, according to the context.
The \<open>obtain\<close> operation produces results by eliminating existing facts by
means of a given tactic. This acts like a dual conclusion: the proof
demonstrates that the context may be augmented by parameters and
assumptions, without affecting any conclusions that do not mention these
parameters. See also \<^cite>\<open>"isabelle-isar-ref"\<close> for the corresponding Isar
proof command @{command obtain}. Final results, which may not refer to the
parameters in the conclusion, need to exported explicitly into the original
context.
\<close>
text %mlref \<open>
\begin{mldecls}
@{define_ML SUBPROOF: "(Subgoal.focus -> tactic) ->
Proof.context -> int -> tactic"} \\
@{define_ML Subgoal.FOCUS: "(Subgoal.focus -> tactic) ->
Proof.context -> int -> tactic"} \\
@{define_ML Subgoal.FOCUS_PREMS: "(Subgoal.focus -> tactic) ->
Proof.context -> int -> tactic"} \\
@{define_ML Subgoal.FOCUS_PARAMS: "(Subgoal.focus -> tactic) ->
Proof.context -> int -> tactic"} \\
@{define_ML Subgoal.focus: "Proof.context -> int -> binding list option ->
thm -> Subgoal.focus * thm"} \\
@{define_ML Subgoal.focus_prems: "Proof.context -> int -> binding list option ->
thm -> Subgoal.focus * thm"} \\
@{define_ML Subgoal.focus_params: "Proof.context -> int -> binding list option ->
thm -> Subgoal.focus * thm"} \\
\end{mldecls}
\begin{mldecls}
@{define_ML Goal.prove: "Proof.context -> string list -> term list -> term ->
({prems: thm list, context: Proof.context} -> tactic) -> thm"} \\
@{define_ML Goal.prove_common: "Proof.context -> int option ->
string list -> term list -> term list ->
({prems: thm list, context: Proof.context} -> tactic) -> thm list"} \\
\end{mldecls}
\begin{mldecls}
@{define_ML Obtain.result: "(Proof.context -> tactic) -> thm list ->
Proof.context -> ((string * cterm) list * thm list) * Proof.context"} \\
\end{mldecls}
\<^descr> \<^ML>\<open>SUBPROOF\<close>~\<open>tac ctxt i\<close> decomposes the structure of the specified
sub-goal, producing an extended context and a reduced goal, which needs to
be solved by the given tactic. All schematic parameters of the goal are
imported into the context as fixed ones, which may not be instantiated in
the sub-proof.
\<^descr> \<^ML>\<open>Subgoal.FOCUS\<close>, \<^ML>\<open>Subgoal.FOCUS_PREMS\<close>, and \<^ML>\<open>Subgoal.FOCUS_PARAMS\<close> are similar to \<^ML>\<open>SUBPROOF\<close>, but are slightly more
flexible: only the specified parts of the subgoal are imported into the
context, and the body tactic may introduce new subgoals and schematic
variables.
\<^descr> \<^ML>\<open>Subgoal.focus\<close>, \<^ML>\<open>Subgoal.focus_prems\<close>, \<^ML>\<open>Subgoal.focus_params\<close>
extract the focus information from a goal state in the same way as the
corresponding tacticals above. This is occasionally useful to experiment
without writing actual tactics yet.
\<^descr> \<^ML>\<open>Goal.prove\<close>~\<open>ctxt xs As C tac\<close> states goal \<open>C\<close> in the context
augmented by fixed variables \<open>xs\<close> and assumptions \<open>As\<close>, and applies tactic
\<open>tac\<close> to solve it. The latter may depend on the local assumptions being
presented as facts. The result is in HHF normal form.
\<^descr> \<^ML>\<open>Goal.prove_common\<close>~\<open>ctxt fork_pri\<close> is the common form to state and
prove a simultaneous goal statement, where \<^ML>\<open>Goal.prove\<close> is a convenient
shorthand that is most frequently used in applications.
The given list of simultaneous conclusions is encoded in the goal state by
means of Pure conjunction: \<^ML>\<open>Goal.conjunction_tac\<close> will turn this into a
collection of individual subgoals, but note that the original multi-goal
state is usually required for advanced induction.
It is possible to provide an optional priority for a forked proof, typically
\<^ML>\<open>SOME ~1\<close>, while \<^ML>\<open>NONE\<close> means the proof is immediate (sequential)
as for \<^ML>\<open>Goal.prove\<close>. Note that a forked proof does not exhibit any
failures in the usual way via exceptions in ML, but accumulates error
situations under the execution id of the running transaction. Thus the
system is able to expose error messages ultimately to the end-user, even
though the subsequent ML code misses them.
\<^descr> \<^ML>\<open>Obtain.result\<close>~\<open>tac thms ctxt\<close> eliminates the given facts using a
tactic, which results in additional fixed variables and assumptions in the
context. Final results need to be exported explicitly.
\<close>
text %mlex \<open>
The following minimal example illustrates how to access the focus
information of a structured goal state.
\<close>
notepad
begin
fix A B C :: "'a \<Rightarrow> bool"
have "\<And>x. A x \<Longrightarrow> B x \<Longrightarrow> C x"
ML_val
\<open>val {goal, context = goal_ctxt, ...} = @{Isar.goal};
val (focus as {params, asms, concl, ...}, goal') =
Subgoal.focus goal_ctxt 1 (SOME [\<^binding>\<open>x\<close>]) goal;
val [A, B] = #prems focus;
val [(_, x)] = #params focus;\<close>
sorry
end
text \<open>
\<^medskip>
The next example demonstrates forward-elimination in a local context, using
\<^ML>\<open>Obtain.result\<close>.
\<close>
notepad
begin
assume ex: "\<exists>x. B x"
ML_prf %"ML"
\<open>val ctxt0 = \<^context>;
val (([(_, x)], [B]), ctxt1) = ctxt0
|> Obtain.result (fn _ => eresolve_tac ctxt0 @{thms exE} 1) [@{thm ex}];\<close>
ML_prf %"ML"
\<open>singleton (Proof_Context.export ctxt1 ctxt0) @{thm refl};\<close>
ML_prf %"ML"
\<open>Proof_Context.export ctxt1 ctxt0 [Thm.reflexive x]
handle ERROR msg => (warning msg; []);\<close>
end
end
diff --git a/src/Doc/Isar_Ref/Generic.thy b/src/Doc/Isar_Ref/Generic.thy
--- a/src/Doc/Isar_Ref/Generic.thy
+++ b/src/Doc/Isar_Ref/Generic.thy
@@ -1,1826 +1,1826 @@
(*:maxLineLen=78:*)
theory Generic
imports Main Base
begin
chapter \<open>Generic tools and packages \label{ch:gen-tools}\<close>
section \<open>Configuration options \label{sec:config}\<close>
text \<open>
Isabelle/Pure maintains a record of named configuration options within the
theory or proof context, with values of type \<^ML_type>\<open>bool\<close>, \<^ML_type>\<open>int\<close>, \<^ML_type>\<open>real\<close>, or \<^ML_type>\<open>string\<close>. Tools may declare options in
ML, and then refer to these values (relative to the context). Thus global
reference variables are easily avoided. The user may change the value of a
configuration option by means of an associated attribute of the same name.
This form of context declaration works particularly well with commands such
as @{command "declare"} or @{command "using"} like this:
\<close>
(*<*)experiment begin(*>*)
declare [[show_main_goal = false]]
notepad
begin
note [[show_main_goal = true]]
end
(*<*)end(*>*)
text \<open>
\begin{matharray}{rcll}
@{command_def "print_options"} & : & \<open>context \<rightarrow>\<close> \\
\end{matharray}
\<^rail>\<open>
@@{command print_options} ('!'?)
;
@{syntax name} ('=' ('true' | 'false' | @{syntax int} | @{syntax float} | @{syntax name}))?
\<close>
\<^descr> @{command "print_options"} prints the available configuration options,
with names, types, and current values; the ``\<open>!\<close>'' option indicates extra
verbosity.
\<^descr> \<open>name = value\<close> as an attribute expression modifies the named option, with
the syntax of the value depending on the option's type. For \<^ML_type>\<open>bool\<close>
the default value is \<open>true\<close>. Any attempt to change a global option in a
local context is ignored.
\<close>
section \<open>Basic proof tools\<close>
subsection \<open>Miscellaneous methods and attributes \label{sec:misc-meth-att}\<close>
text \<open>
\begin{matharray}{rcl}
@{method_def unfold} & : & \<open>method\<close> \\
@{method_def fold} & : & \<open>method\<close> \\
@{method_def insert} & : & \<open>method\<close> \\[0.5ex]
@{method_def erule}\<open>\<^sup>*\<close> & : & \<open>method\<close> \\
@{method_def drule}\<open>\<^sup>*\<close> & : & \<open>method\<close> \\
@{method_def frule}\<open>\<^sup>*\<close> & : & \<open>method\<close> \\
@{method_def intro} & : & \<open>method\<close> \\
@{method_def elim} & : & \<open>method\<close> \\
@{method_def fail} & : & \<open>method\<close> \\
@{method_def succeed} & : & \<open>method\<close> \\
@{method_def sleep} & : & \<open>method\<close> \\
\end{matharray}
\<^rail>\<open>
(@@{method fold} | @@{method unfold} | @@{method insert}) @{syntax thms}
;
(@@{method erule} | @@{method drule} | @@{method frule})
('(' @{syntax nat} ')')? @{syntax thms}
;
(@@{method intro} | @@{method elim}) @{syntax thms}?
;
@@{method sleep} @{syntax real}
\<close>
\<^descr> @{method unfold}~\<open>a\<^sub>1 \<dots> a\<^sub>n\<close> and @{method fold}~\<open>a\<^sub>1 \<dots> a\<^sub>n\<close> expand (or
fold back) the given definitions throughout all goals; any chained facts
provided are inserted into the goal and subject to rewriting as well.
Unfolding works in two stages: first, the given equations are used directly
for rewriting; second, the equations are passed through the attribute
@{attribute_ref abs_def} before rewriting --- to ensure that definitions are
fully expanded, regardless of the actual parameters that are provided.
\<^descr> @{method insert}~\<open>a\<^sub>1 \<dots> a\<^sub>n\<close> inserts theorems as facts into all goals of
the proof state. Note that current facts indicated for forward chaining are
ignored.
\<^descr> @{method erule}~\<open>a\<^sub>1 \<dots> a\<^sub>n\<close>, @{method drule}~\<open>a\<^sub>1 \<dots> a\<^sub>n\<close>, and @{method
frule}~\<open>a\<^sub>1 \<dots> a\<^sub>n\<close> are similar to the basic @{method rule} method (see
\secref{sec:pure-meth-att}), but apply rules by elim-resolution,
destruct-resolution, and forward-resolution, respectively \<^cite>\<open>"isabelle-implementation"\<close>. The optional natural number argument (default 0)
specifies additional assumption steps to be performed here.
Note that these methods are improper ones, mainly serving for
experimentation and tactic script emulation. Different modes of basic rule
application are usually expressed in Isar at the proof language level,
rather than via implicit proof state manipulations. For example, a proper
single-step elimination would be done using the plain @{method rule} method,
with forward chaining of current facts.
\<^descr> @{method intro} and @{method elim} repeatedly refine some goal by intro-
or elim-resolution, after having inserted any chained facts. Exactly the
rules given as arguments are taken into account; this allows fine-tuned
decomposition of a proof problem, in contrast to common automated tools.
\<^descr> @{method fail} yields an empty result sequence; it is the identity of the
``\<open>|\<close>'' method combinator (cf.\ \secref{sec:proof-meth}).
\<^descr> @{method succeed} yields a single (unchanged) result; it is the identity
of the ``\<open>,\<close>'' method combinator (cf.\ \secref{sec:proof-meth}).
\<^descr> @{method sleep}~\<open>s\<close> succeeds after a real-time delay of \<open>s\<close> seconds. This
is occasionally useful for demonstration and testing purposes.
\begin{matharray}{rcl}
@{attribute_def tagged} & : & \<open>attribute\<close> \\
@{attribute_def untagged} & : & \<open>attribute\<close> \\[0.5ex]
@{attribute_def THEN} & : & \<open>attribute\<close> \\
@{attribute_def unfolded} & : & \<open>attribute\<close> \\
@{attribute_def folded} & : & \<open>attribute\<close> \\
@{attribute_def abs_def} & : & \<open>attribute\<close> \\[0.5ex]
@{attribute_def rotated} & : & \<open>attribute\<close> \\
@{attribute_def (Pure) elim_format} & : & \<open>attribute\<close> \\
@{attribute_def no_vars}\<open>\<^sup>*\<close> & : & \<open>attribute\<close> \\
\end{matharray}
\<^rail>\<open>
@@{attribute tagged} @{syntax name} @{syntax name}
;
@@{attribute untagged} @{syntax name}
;
@@{attribute THEN} ('[' @{syntax nat} ']')? @{syntax thm}
;
(@@{attribute unfolded} | @@{attribute folded}) @{syntax thms}
;
@@{attribute rotated} @{syntax int}?
\<close>
\<^descr> @{attribute tagged}~\<open>name value\<close> and @{attribute untagged}~\<open>name\<close> add and
remove \<^emph>\<open>tags\<close> of some theorem. Tags may be any list of string pairs that
serve as formal comment. The first string is considered the tag name, the
second its value. Note that @{attribute untagged} removes any tags of the
same name.
\<^descr> @{attribute THEN}~\<open>a\<close> composes rules by resolution; it resolves with the
first premise of \<open>a\<close> (an alternative position may be also specified). See
also \<^ML_infix>\<open>RS\<close> in \<^cite>\<open>"isabelle-implementation"\<close>.
\<^descr> @{attribute unfolded}~\<open>a\<^sub>1 \<dots> a\<^sub>n\<close> and @{attribute folded}~\<open>a\<^sub>1 \<dots> a\<^sub>n\<close>
expand and fold back again the given definitions throughout a rule.
\<^descr> @{attribute abs_def} turns an equation of the form \<^prop>\<open>f x y \<equiv> t\<close>
into \<^prop>\<open>f \<equiv> \<lambda>x y. t\<close>, which ensures that @{method simp} steps always
expand it. This also works for object-logic equality.
\<^descr> @{attribute rotated}~\<open>n\<close> rotate the premises of a theorem by \<open>n\<close> (default
1).
\<^descr> @{attribute (Pure) elim_format} turns a destruction rule into elimination
rule format, by resolving with the rule \<^prop>\<open>PROP A \<Longrightarrow> (PROP A \<Longrightarrow> PROP B) \<Longrightarrow>
PROP B\<close>.
Note that the Classical Reasoner (\secref{sec:classical}) provides its own
version of this operation.
\<^descr> @{attribute no_vars} replaces schematic variables by free ones; this is
mainly for tuning output of pretty printed theorems.
\<close>
subsection \<open>Low-level equational reasoning\<close>
text \<open>
\begin{matharray}{rcl}
@{method_def subst} & : & \<open>method\<close> \\
@{method_def hypsubst} & : & \<open>method\<close> \\
@{method_def split} & : & \<open>method\<close> \\
\end{matharray}
\<^rail>\<open>
@@{method subst} ('(' 'asm' ')')? \<newline> ('(' (@{syntax nat}+) ')')? @{syntax thm}
;
@@{method split} @{syntax thms}
\<close>
These methods provide low-level facilities for equational reasoning that are
intended for specialized applications only. Normally, single step
calculations would be performed in a structured text (see also
\secref{sec:calculation}), while the Simplifier methods provide the
canonical way for automated normalization (see \secref{sec:simplifier}).
\<^descr> @{method subst}~\<open>eq\<close> performs a single substitution step using rule \<open>eq\<close>,
which may be either a meta or object equality.
\<^descr> @{method subst}~\<open>(asm) eq\<close> substitutes in an assumption.
\<^descr> @{method subst}~\<open>(i \<dots> j) eq\<close> performs several substitutions in the
conclusion. The numbers \<open>i\<close> to \<open>j\<close> indicate the positions to substitute at.
Positions are ordered from the top of the term tree moving down from left to
right. For example, in \<open>(a + b) + (c + d)\<close> there are three positions where
commutativity of \<open>+\<close> is applicable: 1 refers to \<open>a + b\<close>, 2 to the whole
term, and 3 to \<open>c + d\<close>.
If the positions in the list \<open>(i \<dots> j)\<close> are non-overlapping (e.g.\ \<open>(2 3)\<close> in
\<open>(a + b) + (c + d)\<close>) you may assume all substitutions are performed
simultaneously. Otherwise the behaviour of \<open>subst\<close> is not specified.
\<^descr> @{method subst}~\<open>(asm) (i \<dots> j) eq\<close> performs the substitutions in the
assumptions. The positions refer to the assumptions in order from left to
right. For example, given in a goal of the form \<open>P (a + b) \<Longrightarrow> P (c + d) \<Longrightarrow> \<dots>\<close>,
position 1 of commutativity of \<open>+\<close> is the subterm \<open>a + b\<close> and position 2 is
the subterm \<open>c + d\<close>.
\<^descr> @{method hypsubst} performs substitution using some assumption; this only
works for equations of the form \<open>x = t\<close> where \<open>x\<close> is a free or bound
variable.
\<^descr> @{method split}~\<open>a\<^sub>1 \<dots> a\<^sub>n\<close> performs single-step case splitting using the
given rules. Splitting is performed in the conclusion or some assumption of
the subgoal, depending of the structure of the rule.
Note that the @{method simp} method already involves repeated application of
split rules as declared in the current context, using @{attribute split},
for example.
\<close>
section \<open>The Simplifier \label{sec:simplifier}\<close>
text \<open>
The Simplifier performs conditional and unconditional rewriting and uses
contextual information: rule declarations in the background theory or local
proof context are taken into account, as well as chained facts and subgoal
premises (``local assumptions''). There are several general hooks that allow
to modify the simplification strategy, or incorporate other proof tools that
solve sub-problems, produce rewrite rules on demand etc.
The rewriting strategy is always strictly bottom up, except for congruence
rules, which are applied while descending into a term. Conditions in
conditional rewrite rules are solved recursively before the rewrite rule is
applied.
The default Simplifier setup of major object logics (HOL, HOLCF, FOL, ZF)
makes the Simplifier ready for immediate use, without engaging into the
internal structures. Thus it serves as general-purpose proof tool with the
main focus on equational reasoning, and a bit more than that.
\<close>
subsection \<open>Simplification methods \label{sec:simp-meth}\<close>
text \<open>
\begin{tabular}{rcll}
@{method_def simp} & : & \<open>method\<close> \\
@{method_def simp_all} & : & \<open>method\<close> \\
\<open>Pure.\<close>@{method_def (Pure) simp} & : & \<open>method\<close> \\
\<open>Pure.\<close>@{method_def (Pure) simp_all} & : & \<open>method\<close> \\
@{attribute_def simp_depth_limit} & : & \<open>attribute\<close> & default \<open>100\<close> \\
\end{tabular}
\<^medskip>
\<^rail>\<open>
(@@{method simp} | @@{method simp_all}) opt? (@{syntax simpmod} * )
;
opt: '(' ('no_asm' | 'no_asm_simp' | 'no_asm_use' | 'asm_lr' ) ')'
;
@{syntax_def simpmod}: ('add' | 'del' | 'flip' | 'only' |
'split' (() | '!' | 'del') | 'cong' (() | 'add' | 'del'))
':' @{syntax thms}
\<close>
\<^descr> @{method simp} invokes the Simplifier on the first subgoal, after
inserting chained facts as additional goal premises; further rule
declarations may be included via \<open>(simp add: facts)\<close>. The proof method fails
if the subgoal remains unchanged after simplification.
Note that the original goal premises and chained facts are subject to
simplification themselves, while declarations via \<open>add\<close>/\<open>del\<close> merely follow
the policies of the object-logic to extract rewrite rules from theorems,
without further simplification. This may lead to slightly different behavior
in either case, which might be required precisely like that in some boundary
situations to perform the intended simplification step!
\<^medskip>
Modifier \<open>flip\<close> deletes the following theorems from the simpset and adds
their symmetric version (i.e.\ lhs and rhs exchanged). No warning is shown
if the original theorem was not present.
\<^medskip>
The \<open>only\<close> modifier first removes all other rewrite rules, looper tactics
(including split rules), congruence rules, and then behaves like \<open>add\<close>.
Implicit solvers remain, which means that trivial rules like reflexivity or
introduction of \<open>True\<close> are available to solve the simplified subgoals, but
also non-trivial tools like linear arithmetic in HOL. The latter may lead to
some surprise of the meaning of ``only'' in Isabelle/HOL compared to
English!
\<^medskip>
The \<open>split\<close> modifiers add or delete rules for the Splitter (see also
\secref{sec:simp-strategies} on the looper). This works only if the
Simplifier method has been properly setup to include the Splitter (all major
object logics such HOL, HOLCF, FOL, ZF do this already).
The \<open>!\<close> option causes the split rules to be used aggressively:
after each application of a split rule in the conclusion, the \<open>safe\<close>
tactic of the classical reasoner (see \secref{sec:classical:partial})
is applied to the new goal. The net effect is that the goal is split into
the different cases. This option can speed up simplification of goals
with many nested conditional or case expressions significantly.
There is also a separate @{method_ref split} method available for
single-step case splitting. The effect of repeatedly applying \<open>(split thms)\<close>
can be imitated by ``\<open>(simp only: split: thms)\<close>''.
\<^medskip>
The \<open>cong\<close> modifiers add or delete Simplifier congruence rules (see also
\secref{sec:simp-rules}); the default is to add.
\<^descr> @{method simp_all} is similar to @{method simp}, but acts on all goals,
working backwards from the last to the first one as usual in Isabelle.\<^footnote>\<open>The
order is irrelevant for goals without schematic variables, so simplification
might actually be performed in parallel here.\<close>
Chained facts are inserted into all subgoals, before the simplification
process starts. Further rule declarations are the same as for @{method
simp}.
The proof method fails if all subgoals remain unchanged after
simplification.
\<^descr> @{attribute simp_depth_limit} limits the number of recursive invocations
of the Simplifier during conditional rewriting.
By default the Simplifier methods above take local assumptions fully into
account, using equational assumptions in the subsequent normalization
process, or simplifying assumptions themselves. Further options allow to
fine-tune the behavior of the Simplifier in this respect, corresponding to a
variety of ML tactics as follows.\<^footnote>\<open>Unlike the corresponding Isar proof
methods, the ML tactics do not insist in changing the goal state.\<close>
\begin{center}
\small
\begin{tabular}{|l|l|p{0.3\textwidth}|}
\hline
Isar method & ML tactic & behavior \\\hline
\<open>(simp (no_asm))\<close> & \<^ML>\<open>simp_tac\<close> & assumptions are ignored completely
\\\hline
\<open>(simp (no_asm_simp))\<close> & \<^ML>\<open>asm_simp_tac\<close> & assumptions are used in the
simplification of the conclusion but are not themselves simplified \\\hline
\<open>(simp (no_asm_use))\<close> & \<^ML>\<open>full_simp_tac\<close> & assumptions are simplified but
are not used in the simplification of each other or the conclusion \\\hline
\<open>(simp)\<close> & \<^ML>\<open>asm_full_simp_tac\<close> & assumptions are used in the
simplification of the conclusion and to simplify other assumptions \\\hline
\<open>(simp (asm_lr))\<close> & \<^ML>\<open>asm_lr_simp_tac\<close> & compatibility mode: an
assumption is only used for simplifying assumptions which are to the right
of it \\\hline
\end{tabular}
\end{center}
\<^medskip>
In Isabelle/Pure, proof methods @{method (Pure) simp} and @{method (Pure)
simp_all} only know about meta-equality \<open>\<equiv>\<close>. Any new object-logic needs to
re-define these methods via \<^ML>\<open>Simplifier.method_setup\<close> in ML:
Isabelle/FOL or Isabelle/HOL may serve as blue-prints.
\<close>
subsubsection \<open>Examples\<close>
text \<open>
We consider basic algebraic simplifications in Isabelle/HOL. The rather
trivial goal \<^prop>\<open>0 + (x + 0) = x + 0 + 0\<close> looks like a good candidate
to be solved by a single call of @{method simp}:
\<close>
lemma "0 + (x + 0) = x + 0 + 0" apply simp? oops
text \<open>
The above attempt \<^emph>\<open>fails\<close>, because \<^term>\<open>0\<close> and \<^term>\<open>(+)\<close> in the
HOL library are declared as generic type class operations, without stating
any algebraic laws yet. More specific types are required to get access to
certain standard simplifications of the theory context, e.g.\ like this:\<close>
lemma fixes x :: nat shows "0 + (x + 0) = x + 0 + 0" by simp
lemma fixes x :: int shows "0 + (x + 0) = x + 0 + 0" by simp
lemma fixes x :: "'a :: monoid_add" shows "0 + (x + 0) = x + 0 + 0" by simp
text \<open>
\<^medskip>
In many cases, assumptions of a subgoal are also needed in the
simplification process. For example:
\<close>
lemma fixes x :: nat shows "x = 0 \<Longrightarrow> x + x = 0" by simp
lemma fixes x :: nat assumes "x = 0" shows "x + x = 0" apply simp oops
lemma fixes x :: nat assumes "x = 0" shows "x + x = 0" using assms by simp
text \<open>
As seen above, local assumptions that shall contribute to simplification
need to be part of the subgoal already, or indicated explicitly for use by
the subsequent method invocation. Both too little or too much information
can make simplification fail, for different reasons.
In the next example the malicious assumption \<^prop>\<open>\<And>x::nat. f x = g (f (g
x))\<close> does not contribute to solve the problem, but makes the default
@{method simp} method loop: the rewrite rule \<open>f ?x \<equiv> g (f (g ?x))\<close> extracted
from the assumption does not terminate. The Simplifier notices certain
simple forms of nontermination, but not this one. The problem can be solved
nonetheless, by ignoring assumptions via special options as explained
before:
\<close>
lemma "(\<And>x::nat. f x = g (f (g x))) \<Longrightarrow> f 0 = f 0 + 0"
by (simp (no_asm))
text \<open>
The latter form is typical for long unstructured proof scripts, where the
control over the goal content is limited. In structured proofs it is usually
better to avoid pushing too many facts into the goal state in the first
place. Assumptions in the Isar proof context do not intrude the reasoning if
not used explicitly. This is illustrated for a toplevel statement and a
local proof body as follows:
\<close>
lemma
assumes "\<And>x::nat. f x = g (f (g x))"
shows "f 0 = f 0 + 0" by simp
notepad
begin
assume "\<And>x::nat. f x = g (f (g x))"
have "f 0 = f 0 + 0" by simp
end
text \<open>
\<^medskip>
Because assumptions may simplify each other, there can be very subtle cases
of nontermination. For example, the regular @{method simp} method applied to
\<^prop>\<open>P (f x) \<Longrightarrow> y = x \<Longrightarrow> f x = f y \<Longrightarrow> Q\<close> gives rise to the infinite
reduction sequence
\[
\<open>P (f x)\<close> \stackrel{\<open>f x \<equiv> f y\<close>}{\longmapsto}
\<open>P (f y)\<close> \stackrel{\<open>y \<equiv> x\<close>}{\longmapsto}
\<open>P (f x)\<close> \stackrel{\<open>f x \<equiv> f y\<close>}{\longmapsto} \cdots
\]
whereas applying the same to \<^prop>\<open>y = x \<Longrightarrow> f x = f y \<Longrightarrow> P (f x) \<Longrightarrow> Q\<close>
terminates (without solving the goal):
\<close>
lemma "y = x \<Longrightarrow> f x = f y \<Longrightarrow> P (f x) \<Longrightarrow> Q"
apply simp
oops
text \<open>
See also \secref{sec:simp-trace} for options to enable Simplifier trace
mode, which often helps to diagnose problems with rewrite systems.
\<close>
subsection \<open>Declaring rules \label{sec:simp-rules}\<close>
text \<open>
\begin{matharray}{rcl}
@{attribute_def simp} & : & \<open>attribute\<close> \\
@{attribute_def split} & : & \<open>attribute\<close> \\
@{attribute_def cong} & : & \<open>attribute\<close> \\
@{command_def "print_simpset"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
\end{matharray}
\<^rail>\<open>
(@@{attribute simp} | @@{attribute cong}) (() | 'add' | 'del') |
@@{attribute split} (() | '!' | 'del')
;
@@{command print_simpset} ('!'?)
\<close>
\<^descr> @{attribute simp} declares rewrite rules, by adding or deleting them from
the simpset within the theory or proof context. Rewrite rules are theorems
expressing some form of equality, for example:
\<open>Suc ?m + ?n = ?m + Suc ?n\<close> \\
\<open>?P \<and> ?P \<longleftrightarrow> ?P\<close> \\
\<open>?A \<union> ?B \<equiv> {x. x \<in> ?A \<or> x \<in> ?B}\<close>
\<^medskip>
Conditional rewrites such as \<open>?m < ?n \<Longrightarrow> ?m div ?n = 0\<close> are also permitted;
the conditions can be arbitrary formulas.
\<^medskip>
Internally, all rewrite rules are translated into Pure equalities, theorems
with conclusion \<open>lhs \<equiv> rhs\<close>. The simpset contains a function for extracting
equalities from arbitrary theorems, which is usually installed when the
object-logic is configured initially. For example, \<open>\<not> ?x \<in> {}\<close> could be
turned into \<open>?x \<in> {} \<equiv> False\<close>. Theorems that are declared as @{attribute
simp} and local assumptions within a goal are treated uniformly in this
respect.
The Simplifier accepts the following formats for the \<open>lhs\<close> term:
\<^enum> First-order patterns, considering the sublanguage of application of
constant operators to variable operands, without \<open>\<lambda>\<close>-abstractions or
functional variables. For example:
\<open>(?x + ?y) + ?z \<equiv> ?x + (?y + ?z)\<close> \\
\<open>f (f ?x ?y) ?z \<equiv> f ?x (f ?y ?z)\<close>
\<^enum> Higher-order patterns in the sense of \<^cite>\<open>"nipkow-patterns"\<close>. These
are terms in \<open>\<beta>\<close>-normal form (this will always be the case unless you have
done something strange) where each occurrence of an unknown is of the form
\<open>?F x\<^sub>1 \<dots> x\<^sub>n\<close>, where the \<open>x\<^sub>i\<close> are distinct bound variables.
For example, \<open>(\<forall>x. ?P x \<and> ?Q x) \<equiv> (\<forall>x. ?P x) \<and> (\<forall>x. ?Q x)\<close> or its
symmetric form, since the \<open>rhs\<close> is also a higher-order pattern.
\<^enum> Physical first-order patterns over raw \<open>\<lambda>\<close>-term structure without
\<open>\<alpha>\<beta>\<eta>\<close>-equality; abstractions and bound variables are treated like
quasi-constant term material.
For example, the rule \<open>?f ?x \<in> range ?f = True\<close> rewrites the term \<open>g a \<in>
range g\<close> to \<open>True\<close>, but will fail to match \<open>g (h b) \<in> range (\<lambda>x. g (h
x))\<close>. However, offending subterms (in our case \<open>?f ?x\<close>, which is not a
pattern) can be replaced by adding new variables and conditions like this:
\<open>?y = ?f ?x \<Longrightarrow> ?y \<in> range ?f = True\<close> is acceptable as a conditional rewrite
rule of the second category since conditions can be arbitrary terms.
\<^descr> @{attribute split} declares case split rules.
\<^descr> @{attribute cong} declares congruence rules to the Simplifier context.
Congruence rules are equalities of the form @{text [display]
"\<dots> \<Longrightarrow> f ?x\<^sub>1 \<dots> ?x\<^sub>n = f ?y\<^sub>1 \<dots> ?y\<^sub>n"}
This controls the simplification of the arguments of \<open>f\<close>. For example, some
arguments can be simplified under additional assumptions:
@{text [display]
"?P\<^sub>1 \<longleftrightarrow> ?Q\<^sub>1 \<Longrightarrow>
(?Q\<^sub>1 \<Longrightarrow> ?P\<^sub>2 \<longleftrightarrow> ?Q\<^sub>2) \<Longrightarrow>
(?P\<^sub>1 \<longrightarrow> ?P\<^sub>2) \<longleftrightarrow> (?Q\<^sub>1 \<longrightarrow> ?Q\<^sub>2)"}
Given this rule, the Simplifier assumes \<open>?Q\<^sub>1\<close> and extracts rewrite rules
from it when simplifying \<open>?P\<^sub>2\<close>. Such local assumptions are effective for
rewriting formulae such as \<open>x = 0 \<longrightarrow> y + x = y\<close>.
%FIXME
%The local assumptions are also provided as theorems to the solver;
%see \secref{sec:simp-solver} below.
\<^medskip>
The following congruence rule for bounded quantifiers also supplies
contextual information --- about the bound variable: @{text [display]
"(?A = ?B) \<Longrightarrow>
(\<And>x. x \<in> ?B \<Longrightarrow> ?P x \<longleftrightarrow> ?Q x) \<Longrightarrow>
(\<forall>x \<in> ?A. ?P x) \<longleftrightarrow> (\<forall>x \<in> ?B. ?Q x)"}
\<^medskip>
This congruence rule for conditional expressions can supply contextual
information for simplifying the arms: @{text [display]
"?p = ?q \<Longrightarrow>
(?q \<Longrightarrow> ?a = ?c) \<Longrightarrow>
(\<not> ?q \<Longrightarrow> ?b = ?d) \<Longrightarrow>
(if ?p then ?a else ?b) = (if ?q then ?c else ?d)"}
A congruence rule can also \<^emph>\<open>prevent\<close> simplification of some arguments. Here
is an alternative congruence rule for conditional expressions that conforms
to non-strict functional evaluation: @{text [display]
"?p = ?q \<Longrightarrow>
(if ?p then ?a else ?b) = (if ?q then ?a else ?b)"}
Only the first argument is simplified; the others remain unchanged. This can
make simplification much faster, but may require an extra case split over
the condition \<open>?q\<close> to prove the goal.
\<^descr> @{command "print_simpset"} prints the collection of rules declared to the
Simplifier, which is also known as ``simpset'' internally; the ``\<open>!\<close>''
option indicates extra verbosity.
The implicit simpset of the theory context is propagated monotonically
through the theory hierarchy: forming a new theory, the union of the
simpsets of its imports are taken as starting point. Also note that
definitional packages like @{command "datatype"}, @{command "primrec"},
@{command "fun"} routinely declare Simplifier rules to the target context,
while plain @{command "definition"} is an exception in \<^emph>\<open>not\<close> declaring
anything.
\<^medskip>
It is up the user to manipulate the current simpset further by explicitly
adding or deleting theorems as simplification rules, or installing other
tools via simplification procedures (\secref{sec:simproc}). Good simpsets
are hard to design. Rules that obviously simplify, like \<open>?n + 0 \<equiv> ?n\<close> are
good candidates for the implicit simpset, unless a special non-normalizing
behavior of certain operations is intended. More specific rules (such as
distributive laws, which duplicate subterms) should be added only for
specific proof steps. Conversely, sometimes a rule needs to be deleted just
for some part of a proof. The need of frequent additions or deletions may
indicate a poorly designed simpset.
\begin{warn}
The union of simpsets from theory imports (as described above) is not always
a good starting point for the new theory. If some ancestors have deleted
simplification rules because they are no longer wanted, while others have
left those rules in, then the union will contain the unwanted rules, and
thus have to be deleted again in the theory body.
\end{warn}
\<close>
subsection \<open>Ordered rewriting with permutative rules\<close>
text \<open>
A rewrite rule is \<^emph>\<open>permutative\<close> if the left-hand side and right-hand side
are the equal up to renaming of variables. The most common permutative rule
is commutativity: \<open>?x + ?y = ?y + ?x\<close>. Other examples include \<open>(?x - ?y) -
?z = (?x - ?z) - ?y\<close> in arithmetic and \<open>insert ?x (insert ?y ?A) = insert ?y
(insert ?x ?A)\<close> for sets. Such rules are common enough to merit special
attention.
Because ordinary rewriting loops given such rules, the Simplifier employs a
special strategy, called \<^emph>\<open>ordered rewriting\<close>. Permutative rules are
detected and only applied if the rewriting step decreases the redex wrt.\ a
given term ordering. For example, commutativity rewrites \<open>b + a\<close> to \<open>a + b\<close>,
but then stops, because the redex cannot be decreased further in the sense
of the term ordering.
The default is lexicographic ordering of term structure, but this could be
also changed locally for special applications via @{define_ML
Simplifier.set_term_ord} in Isabelle/ML.
\<^medskip>
Permutative rewrite rules are declared to the Simplifier just like other
rewrite rules. Their special status is recognized automatically, and their
application is guarded by the term ordering accordingly.
\<close>
subsubsection \<open>Rewriting with AC operators\<close>
text \<open>
Ordered rewriting is particularly effective in the case of
associative-commutative operators. (Associativity by itself is not
permutative.) When dealing with an AC-operator \<open>f\<close>, keep the following
points in mind:
\<^item> The associative law must always be oriented from left to right, namely
\<open>f (f x y) z = f x (f y z)\<close>. The opposite orientation, if used with
commutativity, leads to looping in conjunction with the standard term
order.
\<^item> To complete your set of rewrite rules, you must add not just
associativity (A) and commutativity (C) but also a derived rule
\<^emph>\<open>left-commutativity\<close> (LC): \<open>f x (f y z) = f y (f x z)\<close>.
Ordered rewriting with the combination of A, C, and LC sorts a term
lexicographically --- the rewriting engine imitates bubble-sort.
\<close>
experiment
fixes f :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infix "\<bullet>" 60)
assumes assoc: "(x \<bullet> y) \<bullet> z = x \<bullet> (y \<bullet> z)"
assumes commute: "x \<bullet> y = y \<bullet> x"
begin
lemma left_commute: "x \<bullet> (y \<bullet> z) = y \<bullet> (x \<bullet> z)"
proof -
have "(x \<bullet> y) \<bullet> z = (y \<bullet> x) \<bullet> z" by (simp only: commute)
then show ?thesis by (simp only: assoc)
qed
lemmas AC_rules = assoc commute left_commute
text \<open>
Thus the Simplifier is able to establish equalities with arbitrary
permutations of subterms, by normalizing to a common standard form. For
example:
\<close>
lemma "(b \<bullet> c) \<bullet> a = xxx"
apply (simp only: AC_rules)
txt \<open>\<^subgoals>\<close>
oops
lemma "(b \<bullet> c) \<bullet> a = a \<bullet> (b \<bullet> c)" by (simp only: AC_rules)
lemma "(b \<bullet> c) \<bullet> a = c \<bullet> (b \<bullet> a)" by (simp only: AC_rules)
lemma "(b \<bullet> c) \<bullet> a = (c \<bullet> b) \<bullet> a" by (simp only: AC_rules)
end
text \<open>
Martin and Nipkow \<^cite>\<open>"martin-nipkow"\<close> discuss the theory and give many
examples; other algebraic structures are amenable to ordered rewriting, such
as Boolean rings. The Boyer-Moore theorem prover \<^cite>\<open>bm88book\<close> also
employs ordered rewriting.
\<close>
subsubsection \<open>Re-orienting equalities\<close>
text \<open>Another application of ordered rewriting uses the derived rule
@{thm [source] eq_commute}: @{thm [source = false] eq_commute} to
reverse equations.
This is occasionally useful to re-orient local assumptions according
to the term ordering, when other built-in mechanisms of
reorientation and mutual simplification fail to apply.\<close>
subsection \<open>Simplifier tracing and debugging \label{sec:simp-trace}\<close>
text \<open>
\begin{tabular}{rcll}
@{attribute_def simp_trace} & : & \<open>attribute\<close> & default \<open>false\<close> \\
@{attribute_def simp_trace_depth_limit} & : & \<open>attribute\<close> & default \<open>1\<close> \\
@{attribute_def simp_debug} & : & \<open>attribute\<close> & default \<open>false\<close> \\
@{attribute_def simp_trace_new} & : & \<open>attribute\<close> \\
@{attribute_def simp_break} & : & \<open>attribute\<close> \\
\end{tabular}
\<^medskip>
\<^rail>\<open>
@@{attribute simp_trace_new} ('interactive')? \<newline>
('mode' '=' ('full' | 'normal'))? \<newline>
('depth' '=' @{syntax nat})?
;
@@{attribute simp_break} (@{syntax term}*)
\<close>
These attributes and configurations options control various aspects of
Simplifier tracing and debugging.
\<^descr> @{attribute simp_trace} makes the Simplifier output internal operations.
This includes rewrite steps, but also bookkeeping like modifications of the
simpset.
\<^descr> @{attribute simp_trace_depth_limit} limits the effect of @{attribute
simp_trace} to the given depth of recursive Simplifier invocations (when
solving conditions of rewrite rules).
\<^descr> @{attribute simp_debug} makes the Simplifier output some extra information
about internal operations. This includes any attempted invocation of
simplification procedures.
\<^descr> @{attribute simp_trace_new} controls Simplifier tracing within
Isabelle/PIDE applications, notably Isabelle/jEdit \<^cite>\<open>"isabelle-jedit"\<close>.
This provides a hierarchical representation of the rewriting steps performed
by the Simplifier.
Users can configure the behaviour by specifying breakpoints, verbosity and
enabling or disabling the interactive mode. In normal verbosity (the
default), only rule applications matching a breakpoint will be shown to the
user. In full verbosity, all rule applications will be logged. Interactive
mode interrupts the normal flow of the Simplifier and defers the decision
how to continue to the user via some GUI dialog.
\<^descr> @{attribute simp_break} declares term or theorem breakpoints for
@{attribute simp_trace_new} as described above. Term breakpoints are
patterns which are checked for matches on the redex of a rule application.
Theorem breakpoints trigger when the corresponding theorem is applied in a
rewrite step. For example:
\<close>
(*<*)experiment begin(*>*)
declare conjI [simp_break]
declare [[simp_break "?x \<and> ?y"]]
(*<*)end(*>*)
subsection \<open>Simplification procedures \label{sec:simproc}\<close>
text \<open>
Simplification procedures are ML functions that produce proven rewrite rules
on demand. They are associated with higher-order patterns that approximate
the left-hand sides of equations. The Simplifier first matches the current
redex against one of the LHS patterns; if this succeeds, the corresponding
ML function is invoked, passing the Simplifier context and redex term. Thus
rules may be specifically fashioned for particular situations, resulting in
a more powerful mechanism than term rewriting by a fixed set of rules.
Any successful result needs to be a (possibly conditional) rewrite rule \<open>t \<equiv>
u\<close> that is applicable to the current redex. The rule will be applied just as
any ordinary rewrite rule. It is expected to be already in \<^emph>\<open>internal form\<close>,
bypassing the automatic preprocessing of object-level equivalences.
\begin{matharray}{rcl}
@{command_def "simproc_setup"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
simproc & : & \<open>attribute\<close> \\
\end{matharray}
\<^rail>\<open>
@@{command simproc_setup} @{syntax name} '(' (@{syntax term} + '|') ')' '='
@{syntax text};
@@{attribute simproc} (('add' ':')? | 'del' ':') (@{syntax name}+)
\<close>
\<^descr> @{command "simproc_setup"} defines a named simplification procedure that
is invoked by the Simplifier whenever any of the given term patterns match
the current redex. The implementation, which is provided as ML source text,
needs to be of type
\<^ML_type>\<open>morphism -> Proof.context -> cterm -> thm option\<close>, where the
\<^ML_type>\<open>cterm\<close> represents the current redex \<open>r\<close> and the result is supposed
to be some proven rewrite rule \<open>r \<equiv> r'\<close> (or a generalized version), or \<^ML>\<open>NONE\<close> to indicate failure. The \<^ML_type>\<open>Proof.context\<close> argument holds the
full context of the current Simplifier invocation. The \<^ML_type>\<open>morphism\<close>
informs about the difference of the original compilation context wrt.\ the
one of the actual application later on.
Morphisms are only relevant for simprocs that are defined within a local
target context, e.g.\ in a locale.
\<^descr> \<open>simproc add: name\<close> and \<open>simproc del: name\<close> add or delete named simprocs
to the current Simplifier context. The default is to add a simproc. Note
that @{command "simproc_setup"} already adds the new simproc to the
subsequent context.
\<close>
subsubsection \<open>Example\<close>
text \<open>
The following simplification procedure for @{thm [source = false,
show_types] unit_eq} in HOL performs fine-grained control over rule
application, beyond higher-order pattern matching. Declaring @{thm unit_eq}
as @{attribute simp} directly would make the Simplifier loop! Note that a
version of this simplification procedure is already active in Isabelle/HOL.
\<close>
(*<*)experiment begin(*>*)
simproc_setup unit ("x::unit") =
- \<open>fn _ => fn _ => fn ct =>
+ \<open>K (K (fn ct =>
if HOLogic.is_unit (Thm.term_of ct) then NONE
- else SOME (mk_meta_eq @{thm unit_eq})\<close>
+ else SOME (mk_meta_eq @{thm unit_eq})))\<close>
(*<*)end(*>*)
text \<open>
Since the Simplifier applies simplification procedures frequently, it is
important to make the failure check in ML reasonably fast.\<close>
subsection \<open>Configurable Simplifier strategies \label{sec:simp-strategies}\<close>
text \<open>
The core term-rewriting engine of the Simplifier is normally used in
combination with some add-on components that modify the strategy and allow
to integrate other non-Simplifier proof tools. These may be reconfigured in
ML as explained below. Even if the default strategies of object-logics like
Isabelle/HOL are used unchanged, it helps to understand how the standard
Simplifier strategies work.\<close>
subsubsection \<open>The subgoaler\<close>
text \<open>
\begin{mldecls}
@{define_ML Simplifier.set_subgoaler: "(Proof.context -> int -> tactic) ->
Proof.context -> Proof.context"} \\
@{define_ML Simplifier.prems_of: "Proof.context -> thm list"} \\
\end{mldecls}
The subgoaler is the tactic used to solve subgoals arising out of
conditional rewrite rules or congruence rules. The default should be
simplification itself. In rare situations, this strategy may need to be
changed. For example, if the premise of a conditional rule is an instance of
its conclusion, as in \<open>Suc ?m < ?n \<Longrightarrow> ?m < ?n\<close>, the default strategy could
loop. % FIXME !??
\<^descr> \<^ML>\<open>Simplifier.set_subgoaler\<close>~\<open>tac ctxt\<close> sets the subgoaler of the
context to \<open>tac\<close>. The tactic will be applied to the context of the running
Simplifier instance.
\<^descr> \<^ML>\<open>Simplifier.prems_of\<close>~\<open>ctxt\<close> retrieves the current set of premises
from the context. This may be non-empty only if the Simplifier has been
told to utilize local assumptions in the first place (cf.\ the options in
\secref{sec:simp-meth}).
As an example, consider the following alternative subgoaler:
\<close>
ML_val \<open>
fun subgoaler_tac ctxt =
assume_tac ctxt ORELSE'
resolve_tac ctxt (Simplifier.prems_of ctxt) ORELSE'
asm_simp_tac ctxt
\<close>
text \<open>
This tactic first tries to solve the subgoal by assumption or by resolving
with with one of the premises, calling simplification only if that fails.\<close>
subsubsection \<open>The solver\<close>
text \<open>
\begin{mldecls}
@{define_ML_type solver} \\
@{define_ML Simplifier.mk_solver: "string ->
(Proof.context -> int -> tactic) -> solver"} \\
@{define_ML_infix setSolver: "Proof.context * solver -> Proof.context"} \\
@{define_ML_infix addSolver: "Proof.context * solver -> Proof.context"} \\
@{define_ML_infix setSSolver: "Proof.context * solver -> Proof.context"} \\
@{define_ML_infix addSSolver: "Proof.context * solver -> Proof.context"} \\
\end{mldecls}
A solver is a tactic that attempts to solve a subgoal after simplification.
Its core functionality is to prove trivial subgoals such as \<^prop>\<open>True\<close>
and \<open>t = t\<close>, but object-logics might be more ambitious. For example,
Isabelle/HOL performs a restricted version of linear arithmetic here.
Solvers are packaged up in abstract type \<^ML_type>\<open>solver\<close>, with \<^ML>\<open>Simplifier.mk_solver\<close> as the only operation to create a solver.
\<^medskip>
Rewriting does not instantiate unknowns. For example, rewriting alone cannot
prove \<open>a \<in> ?A\<close> since this requires instantiating \<open>?A\<close>. The solver, however,
is an arbitrary tactic and may instantiate unknowns as it pleases. This is
the only way the Simplifier can handle a conditional rewrite rule whose
condition contains extra variables. When a simplification tactic is to be
combined with other provers, especially with the Classical Reasoner, it is
important whether it can be considered safe or not. For this reason a
simpset contains two solvers: safe and unsafe.
The standard simplification strategy solely uses the unsafe solver, which is
appropriate in most cases. For special applications where the simplification
process is not allowed to instantiate unknowns within the goal,
simplification starts with the safe solver, but may still apply the ordinary
unsafe one in nested simplifications for conditional rules or congruences.
Note that in this way the overall tactic is not totally safe: it may
instantiate unknowns that appear also in other subgoals.
\<^descr> \<^ML>\<open>Simplifier.mk_solver\<close>~\<open>name tac\<close> turns \<open>tac\<close> into a solver; the
\<open>name\<close> is only attached as a comment and has no further significance.
\<^descr> \<open>ctxt setSSolver solver\<close> installs \<open>solver\<close> as the safe solver of \<open>ctxt\<close>.
\<^descr> \<open>ctxt addSSolver solver\<close> adds \<open>solver\<close> as an additional safe solver; it
will be tried after the solvers which had already been present in \<open>ctxt\<close>.
\<^descr> \<open>ctxt setSolver solver\<close> installs \<open>solver\<close> as the unsafe solver of \<open>ctxt\<close>.
\<^descr> \<open>ctxt addSolver solver\<close> adds \<open>solver\<close> as an additional unsafe solver; it
will be tried after the solvers which had already been present in \<open>ctxt\<close>.
\<^medskip>
The solver tactic is invoked with the context of the running Simplifier.
Further operations may be used to retrieve relevant information, such as the
list of local Simplifier premises via \<^ML>\<open>Simplifier.prems_of\<close> --- this
list may be non-empty only if the Simplifier runs in a mode that utilizes
local assumptions (see also \secref{sec:simp-meth}). The solver is also
presented the full goal including its assumptions in any case. Thus it can
use these (e.g.\ by calling \<^ML>\<open>assume_tac\<close>), even if the Simplifier proper
happens to ignore local premises at the moment.
\<^medskip>
As explained before, the subgoaler is also used to solve the premises of
congruence rules. These are usually of the form \<open>s = ?x\<close>, where \<open>s\<close> needs to
be simplified and \<open>?x\<close> needs to be instantiated with the result. Typically,
the subgoaler will invoke the Simplifier at some point, which will
eventually call the solver. For this reason, solver tactics must be prepared
to solve goals of the form \<open>t = ?x\<close>, usually by reflexivity. In particular,
reflexivity should be tried before any of the fancy automated proof tools.
It may even happen that due to simplification the subgoal is no longer an
equality. For example, \<open>False \<longleftrightarrow> ?Q\<close> could be rewritten to \<open>\<not> ?Q\<close>. To cover
this case, the solver could try resolving with the theorem \<open>\<not> False\<close> of the
object-logic.
\<^medskip>
\begin{warn}
If a premise of a congruence rule cannot be proved, then the congruence is
ignored. This should only happen if the rule is \<^emph>\<open>conditional\<close> --- that is,
contains premises not of the form \<open>t = ?x\<close>. Otherwise it indicates that some
congruence rule, or possibly the subgoaler or solver, is faulty.
\end{warn}
\<close>
subsubsection \<open>The looper\<close>
text \<open>
\begin{mldecls}
@{define_ML_infix setloop: "Proof.context *
(Proof.context -> int -> tactic) -> Proof.context"} \\
@{define_ML_infix addloop: "Proof.context *
(string * (Proof.context -> int -> tactic))
-> Proof.context"} \\
@{define_ML_infix delloop: "Proof.context * string -> Proof.context"} \\
@{define_ML Splitter.add_split: "thm -> Proof.context -> Proof.context"} \\
@{define_ML Splitter.add_split: "thm -> Proof.context -> Proof.context"} \\
@{define_ML Splitter.add_split_bang: "
thm -> Proof.context -> Proof.context"} \\
@{define_ML Splitter.del_split: "thm -> Proof.context -> Proof.context"} \\
\end{mldecls}
The looper is a list of tactics that are applied after simplification, in
case the solver failed to solve the simplified goal. If the looper succeeds,
the simplification process is started all over again. Each of the subgoals
generated by the looper is attacked in turn, in reverse order.
A typical looper is \<^emph>\<open>case splitting\<close>: the expansion of a conditional.
Another possibility is to apply an elimination rule on the assumptions. More
adventurous loopers could start an induction.
\<^descr> \<open>ctxt setloop tac\<close> installs \<open>tac\<close> as the only looper tactic of \<open>ctxt\<close>.
\<^descr> \<open>ctxt addloop (name, tac)\<close> adds \<open>tac\<close> as an additional looper tactic
with name \<open>name\<close>, which is significant for managing the collection of
loopers. The tactic will be tried after the looper tactics that had
already been present in \<open>ctxt\<close>.
\<^descr> \<open>ctxt delloop name\<close> deletes the looper tactic that was associated with
\<open>name\<close> from \<open>ctxt\<close>.
\<^descr> \<^ML>\<open>Splitter.add_split\<close>~\<open>thm ctxt\<close> adds split tactic
for \<open>thm\<close> as additional looper tactic of \<open>ctxt\<close>
(overwriting previous split tactic for the same constant).
\<^descr> \<^ML>\<open>Splitter.add_split_bang\<close>~\<open>thm ctxt\<close> adds aggressive
(see \S\ref{sec:simp-meth})
split tactic for \<open>thm\<close> as additional looper tactic of \<open>ctxt\<close>
(overwriting previous split tactic for the same constant).
\<^descr> \<^ML>\<open>Splitter.del_split\<close>~\<open>thm ctxt\<close> deletes the split tactic
corresponding to \<open>thm\<close> from the looper tactics of \<open>ctxt\<close>.
The splitter replaces applications of a given function; the right-hand side
of the replacement can be anything. For example, here is a splitting rule
for conditional expressions:
@{text [display] "?P (if ?Q ?x ?y) \<longleftrightarrow> (?Q \<longrightarrow> ?P ?x) \<and> (\<not> ?Q \<longrightarrow> ?P ?y)"}
Another example is the elimination operator for Cartesian products (which
happens to be called \<^const>\<open>case_prod\<close> in Isabelle/HOL:
@{text [display] "?P (case_prod ?f ?p) \<longleftrightarrow> (\<forall>a b. ?p = (a, b) \<longrightarrow> ?P (f a b))"}
For technical reasons, there is a distinction between case splitting in the
conclusion and in the premises of a subgoal. The former is done by \<^ML>\<open>Splitter.split_tac\<close> with rules like @{thm [source] if_split} or @{thm
[source] option.split}, which do not split the subgoal, while the latter is
done by \<^ML>\<open>Splitter.split_asm_tac\<close> with rules like @{thm [source]
if_split_asm} or @{thm [source] option.split_asm}, which split the subgoal.
The function \<^ML>\<open>Splitter.add_split\<close> automatically takes care of which
tactic to call, analyzing the form of the rules given as argument; it is the
same operation behind \<open>split\<close> attribute or method modifier syntax in the
Isar source language.
Case splits should be allowed only when necessary; they are expensive and
hard to control. Case-splitting on if-expressions in the conclusion is
usually beneficial, so it is enabled by default in Isabelle/HOL and
Isabelle/FOL/ZF.
\begin{warn}
With \<^ML>\<open>Splitter.split_asm_tac\<close> as looper component, the Simplifier may
split subgoals! This might cause unexpected problems in tactic expressions
that silently assume 0 or 1 subgoals after simplification.
\end{warn}
\<close>
subsection \<open>Forward simplification \label{sec:simp-forward}\<close>
text \<open>
\begin{matharray}{rcl}
@{attribute_def simplified} & : & \<open>attribute\<close> \\
\end{matharray}
\<^rail>\<open>
@@{attribute simplified} opt? @{syntax thms}?
;
opt: '(' ('no_asm' | 'no_asm_simp' | 'no_asm_use') ')'
\<close>
\<^descr> @{attribute simplified}~\<open>a\<^sub>1 \<dots> a\<^sub>n\<close> causes a theorem to be simplified,
either by exactly the specified rules \<open>a\<^sub>1, \<dots>, a\<^sub>n\<close>, or the implicit
Simplifier context if no arguments are given. The result is fully simplified
by default, including assumptions and conclusion; the options \<open>no_asm\<close> etc.\
tune the Simplifier in the same way as the for the \<open>simp\<close> method.
Note that forward simplification restricts the Simplifier to its most basic
operation of term rewriting; solver and looper tactics
(\secref{sec:simp-strategies}) are \<^emph>\<open>not\<close> involved here. The @{attribute
simplified} attribute should be only rarely required under normal
circumstances.
\<close>
section \<open>The Classical Reasoner \label{sec:classical}\<close>
subsection \<open>Basic concepts\<close>
text \<open>Although Isabelle is generic, many users will be working in
some extension of classical first-order logic. Isabelle/ZF is built
upon theory FOL, while Isabelle/HOL conceptually contains
first-order logic as a fragment. Theorem-proving in predicate logic
is undecidable, but many automated strategies have been developed to
assist in this task.
Isabelle's classical reasoner is a generic package that accepts
certain information about a logic and delivers a suite of automatic
proof tools, based on rules that are classified and declared in the
context. These proof procedures are slow and simplistic compared
with high-end automated theorem provers, but they can save
considerable time and effort in practice. They can prove theorems
such as Pelletier's \<^cite>\<open>pelletier86\<close> problems 40 and 41 in a few
milliseconds (including full proof reconstruction):\<close>
lemma "(\<exists>y. \<forall>x. F x y \<longleftrightarrow> F x x) \<longrightarrow> \<not> (\<forall>x. \<exists>y. \<forall>z. F z y \<longleftrightarrow> \<not> F z x)"
by blast
lemma "(\<forall>z. \<exists>y. \<forall>x. f x y \<longleftrightarrow> f x z \<and> \<not> f x x) \<longrightarrow> \<not> (\<exists>z. \<forall>x. f x z)"
by blast
text \<open>The proof tools are generic. They are not restricted to
first-order logic, and have been heavily used in the development of
the Isabelle/HOL library and applications. The tactics can be
traced, and their components can be called directly; in this manner,
any proof can be viewed interactively.\<close>
subsubsection \<open>The sequent calculus\<close>
text \<open>Isabelle supports natural deduction, which is easy to use for
interactive proof. But natural deduction does not easily lend
itself to automation, and has a bias towards intuitionism. For
certain proofs in classical logic, it can not be called natural.
The \<^emph>\<open>sequent calculus\<close>, a generalization of natural deduction,
is easier to automate.
A \<^bold>\<open>sequent\<close> has the form \<open>\<Gamma> \<turnstile> \<Delta>\<close>, where \<open>\<Gamma>\<close>
and \<open>\<Delta>\<close> are sets of formulae.\<^footnote>\<open>For first-order
logic, sequents can equivalently be made from lists or multisets of
formulae.\<close> The sequent \<open>P\<^sub>1, \<dots>, P\<^sub>m \<turnstile> Q\<^sub>1, \<dots>, Q\<^sub>n\<close> is
\<^bold>\<open>valid\<close> if \<open>P\<^sub>1 \<and> \<dots> \<and> P\<^sub>m\<close> implies \<open>Q\<^sub>1 \<or> \<dots> \<or>
Q\<^sub>n\<close>. Thus \<open>P\<^sub>1, \<dots>, P\<^sub>m\<close> represent assumptions, each of which
is true, while \<open>Q\<^sub>1, \<dots>, Q\<^sub>n\<close> represent alternative goals. A
sequent is \<^bold>\<open>basic\<close> if its left and right sides have a common
formula, as in \<open>P, Q \<turnstile> Q, R\<close>; basic sequents are trivially
valid.
Sequent rules are classified as \<^bold>\<open>right\<close> or \<^bold>\<open>left\<close>,
indicating which side of the \<open>\<turnstile>\<close> symbol they operate on.
Rules that operate on the right side are analogous to natural
deduction's introduction rules, and left rules are analogous to
elimination rules. The sequent calculus analogue of \<open>(\<longrightarrow>I)\<close>
is the rule
\[
\infer[\<open>(\<longrightarrow>R)\<close>]{\<open>\<Gamma> \<turnstile> \<Delta>, P \<longrightarrow> Q\<close>}{\<open>P, \<Gamma> \<turnstile> \<Delta>, Q\<close>}
\]
Applying the rule backwards, this breaks down some implication on
the right side of a sequent; \<open>\<Gamma>\<close> and \<open>\<Delta>\<close> stand for
the sets of formulae that are unaffected by the inference. The
analogue of the pair \<open>(\<or>I1)\<close> and \<open>(\<or>I2)\<close> is the
single rule
\[
\infer[\<open>(\<or>R)\<close>]{\<open>\<Gamma> \<turnstile> \<Delta>, P \<or> Q\<close>}{\<open>\<Gamma> \<turnstile> \<Delta>, P, Q\<close>}
\]
This breaks down some disjunction on the right side, replacing it by
both disjuncts. Thus, the sequent calculus is a kind of
multiple-conclusion logic.
To illustrate the use of multiple formulae on the right, let us
prove the classical theorem \<open>(P \<longrightarrow> Q) \<or> (Q \<longrightarrow> P)\<close>. Working
backwards, we reduce this formula to a basic sequent:
\[
\infer[\<open>(\<or>R)\<close>]{\<open>\<turnstile> (P \<longrightarrow> Q) \<or> (Q \<longrightarrow> P)\<close>}
{\infer[\<open>(\<longrightarrow>R)\<close>]{\<open>\<turnstile> (P \<longrightarrow> Q), (Q \<longrightarrow> P)\<close>}
{\infer[\<open>(\<longrightarrow>R)\<close>]{\<open>P \<turnstile> Q, (Q \<longrightarrow> P)\<close>}
{\<open>P, Q \<turnstile> Q, P\<close>}}}
\]
This example is typical of the sequent calculus: start with the
desired theorem and apply rules backwards in a fairly arbitrary
manner. This yields a surprisingly effective proof procedure.
Quantifiers add only few complications, since Isabelle handles
parameters and schematic variables. See \<^cite>\<open>\<open>Chapter 10\<close> in
"paulson-ml2"\<close> for further discussion.\<close>
subsubsection \<open>Simulating sequents by natural deduction\<close>
text \<open>Isabelle can represent sequents directly, as in the
object-logic LK. But natural deduction is easier to work with, and
most object-logics employ it. Fortunately, we can simulate the
sequent \<open>P\<^sub>1, \<dots>, P\<^sub>m \<turnstile> Q\<^sub>1, \<dots>, Q\<^sub>n\<close> by the Isabelle formula
\<open>P\<^sub>1 \<Longrightarrow> \<dots> \<Longrightarrow> P\<^sub>m \<Longrightarrow> \<not> Q\<^sub>2 \<Longrightarrow> ... \<Longrightarrow> \<not> Q\<^sub>n \<Longrightarrow> Q\<^sub>1\<close> where the order of
the assumptions and the choice of \<open>Q\<^sub>1\<close> are arbitrary.
Elim-resolution plays a key role in simulating sequent proofs.
We can easily handle reasoning on the left. Elim-resolution with
the rules \<open>(\<or>E)\<close>, \<open>(\<bottom>E)\<close> and \<open>(\<exists>E)\<close> achieves
a similar effect as the corresponding sequent rules. For the other
connectives, we use sequent-style elimination rules instead of
destruction rules such as \<open>(\<and>E1, 2)\<close> and \<open>(\<forall>E)\<close>.
But note that the rule \<open>(\<not>L)\<close> has no effect under our
representation of sequents!
\[
\infer[\<open>(\<not>L)\<close>]{\<open>\<not> P, \<Gamma> \<turnstile> \<Delta>\<close>}{\<open>\<Gamma> \<turnstile> \<Delta>, P\<close>}
\]
What about reasoning on the right? Introduction rules can only
affect the formula in the conclusion, namely \<open>Q\<^sub>1\<close>. The
other right-side formulae are represented as negated assumptions,
\<open>\<not> Q\<^sub>2, \<dots>, \<not> Q\<^sub>n\<close>. In order to operate on one of these, it
must first be exchanged with \<open>Q\<^sub>1\<close>. Elim-resolution with the
\<open>swap\<close> rule has this effect: \<open>\<not> P \<Longrightarrow> (\<not> R \<Longrightarrow> P) \<Longrightarrow> R\<close>
To ensure that swaps occur only when necessary, each introduction
rule is converted into a swapped form: it is resolved with the
second premise of \<open>(swap)\<close>. The swapped form of \<open>(\<and>I)\<close>, which might be called \<open>(\<not>\<and>E)\<close>, is
@{text [display] "\<not> (P \<and> Q) \<Longrightarrow> (\<not> R \<Longrightarrow> P) \<Longrightarrow> (\<not> R \<Longrightarrow> Q) \<Longrightarrow> R"}
Similarly, the swapped form of \<open>(\<longrightarrow>I)\<close> is
@{text [display] "\<not> (P \<longrightarrow> Q) \<Longrightarrow> (\<not> R \<Longrightarrow> P \<Longrightarrow> Q) \<Longrightarrow> R"}
Swapped introduction rules are applied using elim-resolution, which
deletes the negated formula. Our representation of sequents also
requires the use of ordinary introduction rules. If we had no
regard for readability of intermediate goal states, we could treat
the right side more uniformly by representing sequents as @{text
[display] "P\<^sub>1 \<Longrightarrow> \<dots> \<Longrightarrow> P\<^sub>m \<Longrightarrow> \<not> Q\<^sub>1 \<Longrightarrow> \<dots> \<Longrightarrow> \<not> Q\<^sub>n \<Longrightarrow> \<bottom>"}
\<close>
subsubsection \<open>Extra rules for the sequent calculus\<close>
text \<open>As mentioned, destruction rules such as \<open>(\<and>E1, 2)\<close> and
\<open>(\<forall>E)\<close> must be replaced by sequent-style elimination rules.
In addition, we need rules to embody the classical equivalence
between \<open>P \<longrightarrow> Q\<close> and \<open>\<not> P \<or> Q\<close>. The introduction
rules \<open>(\<or>I1, 2)\<close> are replaced by a rule that simulates
\<open>(\<or>R)\<close>: @{text [display] "(\<not> Q \<Longrightarrow> P) \<Longrightarrow> P \<or> Q"}
The destruction rule \<open>(\<longrightarrow>E)\<close> is replaced by @{text [display]
"(P \<longrightarrow> Q) \<Longrightarrow> (\<not> P \<Longrightarrow> R) \<Longrightarrow> (Q \<Longrightarrow> R) \<Longrightarrow> R"}
Quantifier replication also requires special rules. In classical
logic, \<open>\<exists>x. P x\<close> is equivalent to \<open>\<not> (\<forall>x. \<not> P x)\<close>;
the rules \<open>(\<exists>R)\<close> and \<open>(\<forall>L)\<close> are dual:
\[
\infer[\<open>(\<exists>R)\<close>]{\<open>\<Gamma> \<turnstile> \<Delta>, \<exists>x. P x\<close>}{\<open>\<Gamma> \<turnstile> \<Delta>, \<exists>x. P x, P t\<close>}
\qquad
\infer[\<open>(\<forall>L)\<close>]{\<open>\<forall>x. P x, \<Gamma> \<turnstile> \<Delta>\<close>}{\<open>P t, \<forall>x. P x, \<Gamma> \<turnstile> \<Delta>\<close>}
\]
Thus both kinds of quantifier may be replicated. Theorems requiring
multiple uses of a universal formula are easy to invent; consider
@{text [display] "(\<forall>x. P x \<longrightarrow> P (f x)) \<and> P a \<longrightarrow> P (f\<^sup>n a)"} for any
\<open>n > 1\<close>. Natural examples of the multiple use of an
existential formula are rare; a standard one is \<open>\<exists>x. \<forall>y. P x
\<longrightarrow> P y\<close>.
Forgoing quantifier replication loses completeness, but gains
decidability, since the search space becomes finite. Many useful
theorems can be proved without replication, and the search generally
delivers its verdict in a reasonable time. To adopt this approach,
represent the sequent rules \<open>(\<exists>R)\<close>, \<open>(\<exists>L)\<close> and
\<open>(\<forall>R)\<close> by \<open>(\<exists>I)\<close>, \<open>(\<exists>E)\<close> and \<open>(\<forall>I)\<close>,
respectively, and put \<open>(\<forall>E)\<close> into elimination form: @{text
[display] "\<forall>x. P x \<Longrightarrow> (P t \<Longrightarrow> Q) \<Longrightarrow> Q"}
Elim-resolution with this rule will delete the universal formula
after a single use. To replicate universal quantifiers, replace the
rule by @{text [display] "\<forall>x. P x \<Longrightarrow> (P t \<Longrightarrow> \<forall>x. P x \<Longrightarrow> Q) \<Longrightarrow> Q"}
To replicate existential quantifiers, replace \<open>(\<exists>I)\<close> by
@{text [display] "(\<not> (\<exists>x. P x) \<Longrightarrow> P t) \<Longrightarrow> \<exists>x. P x"}
All introduction rules mentioned above are also useful in swapped
form.
Replication makes the search space infinite; we must apply the rules
with care. The classical reasoner distinguishes between safe and
unsafe rules, applying the latter only when there is no alternative.
Depth-first search may well go down a blind alley; best-first search
is better behaved in an infinite search space. However, quantifier
replication is too expensive to prove any but the simplest theorems.
\<close>
subsection \<open>Rule declarations\<close>
text \<open>The proof tools of the Classical Reasoner depend on
collections of rules declared in the context, which are classified
as introduction, elimination or destruction and as \<^emph>\<open>safe\<close> or
\<^emph>\<open>unsafe\<close>. In general, safe rules can be attempted blindly,
while unsafe rules must be used with care. A safe rule must never
reduce a provable goal to an unprovable set of subgoals.
The rule \<open>P \<Longrightarrow> P \<or> Q\<close> is unsafe because it reduces \<open>P
\<or> Q\<close> to \<open>P\<close>, which might turn out as premature choice of an
unprovable subgoal. Any rule whose premises contain new unknowns is
unsafe. The elimination rule \<open>\<forall>x. P x \<Longrightarrow> (P t \<Longrightarrow> Q) \<Longrightarrow> Q\<close> is
unsafe, since it is applied via elim-resolution, which discards the
assumption \<open>\<forall>x. P x\<close> and replaces it by the weaker
assumption \<open>P t\<close>. The rule \<open>P t \<Longrightarrow> \<exists>x. P x\<close> is
unsafe for similar reasons. The quantifier duplication rule \<open>\<forall>x. P x \<Longrightarrow> (P t \<Longrightarrow> \<forall>x. P x \<Longrightarrow> Q) \<Longrightarrow> Q\<close> is unsafe in a different sense:
since it keeps the assumption \<open>\<forall>x. P x\<close>, it is prone to
looping. In classical first-order logic, all rules are safe except
those mentioned above.
The safe~/ unsafe distinction is vague, and may be regarded merely
as a way of giving some rules priority over others. One could argue
that \<open>(\<or>E)\<close> is unsafe, because repeated application of it
could generate exponentially many subgoals. Induction rules are
unsafe because inductive proofs are difficult to set up
automatically. Any inference that instantiates an unknown
in the proof state is unsafe --- thus matching must be used, rather than
unification. Even proof by assumption is unsafe if it instantiates
unknowns shared with other subgoals.
\begin{matharray}{rcl}
@{command_def "print_claset"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
@{attribute_def intro} & : & \<open>attribute\<close> \\
@{attribute_def elim} & : & \<open>attribute\<close> \\
@{attribute_def dest} & : & \<open>attribute\<close> \\
@{attribute_def rule} & : & \<open>attribute\<close> \\
@{attribute_def iff} & : & \<open>attribute\<close> \\
@{attribute_def swapped} & : & \<open>attribute\<close> \\
\end{matharray}
\<^rail>\<open>
(@@{attribute intro} | @@{attribute elim} | @@{attribute dest}) ('!' | () | '?') @{syntax nat}?
;
@@{attribute rule} 'del'
;
@@{attribute iff} (((() | 'add') '?'?) | 'del')
\<close>
\<^descr> @{command "print_claset"} prints the collection of rules
declared to the Classical Reasoner, i.e.\ the \<^ML_type>\<open>claset\<close>
within the context.
\<^descr> @{attribute intro}, @{attribute elim}, and @{attribute dest}
declare introduction, elimination, and destruction rules,
respectively. By default, rules are considered as \<^emph>\<open>unsafe\<close>
(i.e.\ not applied blindly without backtracking), while ``\<open>!\<close>'' classifies as \<^emph>\<open>safe\<close>. Rule declarations marked by
``\<open>?\<close>'' coincide with those of Isabelle/Pure, cf.\
\secref{sec:pure-meth-att} (i.e.\ are only applied in single steps
of the @{method rule} method). The optional natural number
specifies an explicit weight argument, which is ignored by the
automated reasoning tools, but determines the search order of single
rule steps.
Introduction rules are those that can be applied using ordinary
resolution. Their swapped forms are generated internally, which
will be applied using elim-resolution. Elimination rules are
applied using elim-resolution. Rules are sorted by the number of
new subgoals they will yield; rules that generate the fewest
subgoals will be tried first. Otherwise, later declarations take
precedence over earlier ones.
Rules already present in the context with the same classification
are ignored. A warning is printed if the rule has already been
added with some other classification, but the rule is added anyway
as requested.
\<^descr> @{attribute rule}~\<open>del\<close> deletes all occurrences of a
rule from the classical context, regardless of its classification as
introduction~/ elimination~/ destruction and safe~/ unsafe.
\<^descr> @{attribute iff} declares logical equivalences to the
Simplifier and the Classical reasoner at the same time.
Non-conditional rules result in a safe introduction and elimination
pair; conditional ones are considered unsafe. Rules with negative
conclusion are automatically inverted (using \<open>\<not>\<close>-elimination
internally).
The ``\<open>?\<close>'' version of @{attribute iff} declares rules to
the Isabelle/Pure context only, and omits the Simplifier
declaration.
\<^descr> @{attribute swapped} turns an introduction rule into an
elimination, by resolving with the classical swap principle \<open>\<not> P \<Longrightarrow> (\<not> R \<Longrightarrow> P) \<Longrightarrow> R\<close> in the second position. This is mainly for
illustrative purposes: the Classical Reasoner already swaps rules
internally as explained above.
\<close>
subsection \<open>Structured methods\<close>
text \<open>
\begin{matharray}{rcl}
@{method_def rule} & : & \<open>method\<close> \\
@{method_def contradiction} & : & \<open>method\<close> \\
\end{matharray}
\<^rail>\<open>
@@{method rule} @{syntax thms}?
\<close>
\<^descr> @{method rule} as offered by the Classical Reasoner is a
refinement over the Pure one (see \secref{sec:pure-meth-att}). Both
versions work the same, but the classical version observes the
classical rule context in addition to that of Isabelle/Pure.
Common object logics (HOL, ZF, etc.) declare a rich collection of
classical rules (even if these would qualify as intuitionistic
ones), but only few declarations to the rule context of
Isabelle/Pure (\secref{sec:pure-meth-att}).
\<^descr> @{method contradiction} solves some goal by contradiction,
deriving any result from both \<open>\<not> A\<close> and \<open>A\<close>. Chained
facts, which are guaranteed to participate, may appear in either
order.
\<close>
subsection \<open>Fully automated methods\<close>
text \<open>
\begin{matharray}{rcl}
@{method_def blast} & : & \<open>method\<close> \\
@{method_def auto} & : & \<open>method\<close> \\
@{method_def force} & : & \<open>method\<close> \\
@{method_def fast} & : & \<open>method\<close> \\
@{method_def slow} & : & \<open>method\<close> \\
@{method_def best} & : & \<open>method\<close> \\
@{method_def fastforce} & : & \<open>method\<close> \\
@{method_def slowsimp} & : & \<open>method\<close> \\
@{method_def bestsimp} & : & \<open>method\<close> \\
@{method_def deepen} & : & \<open>method\<close> \\
\end{matharray}
\<^rail>\<open>
@@{method blast} @{syntax nat}? (@{syntax clamod} * )
;
@@{method auto} (@{syntax nat} @{syntax nat})? (@{syntax clasimpmod} * )
;
@@{method force} (@{syntax clasimpmod} * )
;
(@@{method fast} | @@{method slow} | @@{method best}) (@{syntax clamod} * )
;
(@@{method fastforce} | @@{method slowsimp} | @@{method bestsimp})
(@{syntax clasimpmod} * )
;
@@{method deepen} (@{syntax nat} ?) (@{syntax clamod} * )
;
@{syntax_def clamod}:
(('intro' | 'elim' | 'dest') ('!' | () | '?') | 'del') ':' @{syntax thms}
;
@{syntax_def clasimpmod}: ('simp' (() | 'add' | 'del' | 'only') |
'cong' (() | 'add' | 'del') |
'split' (() | '!' | 'del') |
'iff' (((() | 'add') '?'?) | 'del') |
(('intro' | 'elim' | 'dest') ('!' | () | '?') | 'del')) ':' @{syntax thms}
\<close>
\<^descr> @{method blast} is a separate classical tableau prover that
uses the same classical rule declarations as explained before.
Proof search is coded directly in ML using special data structures.
A successful proof is then reconstructed using regular Isabelle
inferences. It is faster and more powerful than the other classical
reasoning tools, but has major limitations too.
\<^item> It does not use the classical wrapper tacticals, such as the
integration with the Simplifier of @{method fastforce}.
\<^item> It does not perform higher-order unification, as needed by the
rule @{thm [source=false] rangeI} in HOL. There are often
alternatives to such rules, for example @{thm [source=false]
range_eqI}.
\<^item> Function variables may only be applied to parameters of the
subgoal. (This restriction arises because the prover does not use
higher-order unification.) If other function variables are present
then the prover will fail with the message
@{verbatim [display] \<open>Function unknown's argument not a bound variable\<close>}
\<^item> Its proof strategy is more general than @{method fast} but can
be slower. If @{method blast} fails or seems to be running forever,
try @{method fast} and the other proof tools described below.
The optional integer argument specifies a bound for the number of
unsafe steps used in a proof. By default, @{method blast} starts
with a bound of 0 and increases it successively to 20. In contrast,
\<open>(blast lim)\<close> tries to prove the goal using a search bound
of \<open>lim\<close>. Sometimes a slow proof using @{method blast} can
be made much faster by supplying the successful search bound to this
proof method instead.
\<^descr> @{method auto} combines classical reasoning with
simplification. It is intended for situations where there are a lot
of mostly trivial subgoals; it proves all the easy ones, leaving the
ones it cannot prove. Occasionally, attempting to prove the hard
ones may take a long time.
The optional depth arguments in \<open>(auto m n)\<close> refer to its
builtin classical reasoning procedures: \<open>m\<close> (default 4) is for
@{method blast}, which is tried first, and \<open>n\<close> (default 2) is
for a slower but more general alternative that also takes wrappers
into account.
\<^descr> @{method force} is intended to prove the first subgoal
completely, using many fancy proof tools and performing a rather
exhaustive search. As a result, proof attempts may take rather long
or diverge easily.
\<^descr> @{method fast}, @{method best}, @{method slow} attempt to
prove the first subgoal using sequent-style reasoning as explained
before. Unlike @{method blast}, they construct proofs directly in
Isabelle.
There is a difference in search strategy and back-tracking: @{method
fast} uses depth-first search and @{method best} uses best-first
search (guided by a heuristic function: normally the total size of
the proof state).
Method @{method slow} is like @{method fast}, but conducts a broader
search: it may, when backtracking from a failed proof attempt, undo
even the step of proving a subgoal by assumption.
\<^descr> @{method fastforce}, @{method slowsimp}, @{method bestsimp}
are like @{method fast}, @{method slow}, @{method best},
respectively, but use the Simplifier as additional wrapper. The name
@{method fastforce}, reflects the behaviour of this popular method
better without requiring an understanding of its implementation.
\<^descr> @{method deepen} works by exhaustive search up to a certain
depth. The start depth is 4 (unless specified explicitly), and the
depth is increased iteratively up to 10. Unsafe rules are modified
to preserve the formula they act on, so that it be used repeatedly.
This method can prove more goals than @{method fast}, but is much
slower, for example if the assumptions have many universal
quantifiers.
Any of the above methods support additional modifiers of the context
of classical (and simplifier) rules, but the ones related to the
Simplifier are explicitly prefixed by \<open>simp\<close> here. The
semantics of these ad-hoc rule declarations is analogous to the
attributes given before. Facts provided by forward chaining are
inserted into the goal before commencing proof search.
\<close>
subsection \<open>Partially automated methods\label{sec:classical:partial}\<close>
text \<open>These proof methods may help in situations when the
fully-automated tools fail. The result is a simpler subgoal that
can be tackled by other means, such as by manual instantiation of
quantifiers.
\begin{matharray}{rcl}
@{method_def safe} & : & \<open>method\<close> \\
@{method_def clarify} & : & \<open>method\<close> \\
@{method_def clarsimp} & : & \<open>method\<close> \\
\end{matharray}
\<^rail>\<open>
(@@{method safe} | @@{method clarify}) (@{syntax clamod} * )
;
@@{method clarsimp} (@{syntax clasimpmod} * )
\<close>
\<^descr> @{method safe} repeatedly performs safe steps on all subgoals.
It is deterministic, with at most one outcome.
\<^descr> @{method clarify} performs a series of safe steps without
splitting subgoals; see also @{method clarify_step}.
\<^descr> @{method clarsimp} acts like @{method clarify}, but also does
simplification. Note that if the Simplifier context includes a
splitter for the premises, the subgoal may still be split.
\<close>
subsection \<open>Single-step tactics\<close>
text \<open>
\begin{matharray}{rcl}
@{method_def safe_step} & : & \<open>method\<close> \\
@{method_def inst_step} & : & \<open>method\<close> \\
@{method_def step} & : & \<open>method\<close> \\
@{method_def slow_step} & : & \<open>method\<close> \\
@{method_def clarify_step} & : & \<open>method\<close> \\
\end{matharray}
These are the primitive tactics behind the automated proof methods
of the Classical Reasoner. By calling them yourself, you can
execute these procedures one step at a time.
\<^descr> @{method safe_step} performs a safe step on the first subgoal.
The safe wrapper tacticals are applied to a tactic that may include
proof by assumption or Modus Ponens (taking care not to instantiate
unknowns), or substitution.
\<^descr> @{method inst_step} is like @{method safe_step}, but allows
unknowns to be instantiated.
\<^descr> @{method step} is the basic step of the proof procedure, it
operates on the first subgoal. The unsafe wrapper tacticals are
applied to a tactic that tries @{method safe}, @{method inst_step},
or applies an unsafe rule from the context.
\<^descr> @{method slow_step} resembles @{method step}, but allows
backtracking between using safe rules with instantiation (@{method
inst_step}) and using unsafe rules. The resulting search space is
larger.
\<^descr> @{method clarify_step} performs a safe step on the first
subgoal; no splitting step is applied. For example, the subgoal
\<open>A \<and> B\<close> is left as a conjunction. Proof by assumption,
Modus Ponens, etc., may be performed provided they do not
instantiate unknowns. Assumptions of the form \<open>x = t\<close> may
be eliminated. The safe wrapper tactical is applied.
\<close>
subsection \<open>Modifying the search step\<close>
text \<open>
\begin{mldecls}
@{define_ML_type wrapper = "(int -> tactic) -> (int -> tactic)"} \\[0.5ex]
@{define_ML_infix addSWrapper: "Proof.context *
(string * (Proof.context -> wrapper)) -> Proof.context"} \\
@{define_ML_infix addSbefore: "Proof.context *
(string * (Proof.context -> int -> tactic)) -> Proof.context"} \\
@{define_ML_infix addSafter: "Proof.context *
(string * (Proof.context -> int -> tactic)) -> Proof.context"} \\
@{define_ML_infix delSWrapper: "Proof.context * string -> Proof.context"} \\[0.5ex]
@{define_ML_infix addWrapper: "Proof.context *
(string * (Proof.context -> wrapper)) -> Proof.context"} \\
@{define_ML_infix addbefore: "Proof.context *
(string * (Proof.context -> int -> tactic)) -> Proof.context"} \\
@{define_ML_infix addafter: "Proof.context *
(string * (Proof.context -> int -> tactic)) -> Proof.context"} \\
@{define_ML_infix delWrapper: "Proof.context * string -> Proof.context"} \\[0.5ex]
@{define_ML addSss: "Proof.context -> Proof.context"} \\
@{define_ML addss: "Proof.context -> Proof.context"} \\
\end{mldecls}
The proof strategy of the Classical Reasoner is simple. Perform as
many safe inferences as possible; or else, apply certain safe rules,
allowing instantiation of unknowns; or else, apply an unsafe rule.
The tactics also eliminate assumptions of the form \<open>x = t\<close>
by substitution if they have been set up to do so. They may perform
a form of Modus Ponens: if there are assumptions \<open>P \<longrightarrow> Q\<close> and
\<open>P\<close>, then replace \<open>P \<longrightarrow> Q\<close> by \<open>Q\<close>.
The classical reasoning tools --- except @{method blast} --- allow
to modify this basic proof strategy by applying two lists of
arbitrary \<^emph>\<open>wrapper tacticals\<close> to it. The first wrapper list,
which is considered to contain safe wrappers only, affects @{method
safe_step} and all the tactics that call it. The second one, which
may contain unsafe wrappers, affects the unsafe parts of @{method
step}, @{method slow_step}, and the tactics that call them. A
wrapper transforms each step of the search, for example by
attempting other tactics before or after the original step tactic.
All members of a wrapper list are applied in turn to the respective
step tactic.
Initially the two wrapper lists are empty, which means no
modification of the step tactics. Safe and unsafe wrappers are added
to the context with the functions given below, supplying them with
wrapper names. These names may be used to selectively delete
wrappers.
\<^descr> \<open>ctxt addSWrapper (name, wrapper)\<close> adds a new wrapper,
which should yield a safe tactic, to modify the existing safe step
tactic.
\<^descr> \<open>ctxt addSbefore (name, tac)\<close> adds the given tactic as a
safe wrapper, such that it is tried \<^emph>\<open>before\<close> each safe step of
the search.
\<^descr> \<open>ctxt addSafter (name, tac)\<close> adds the given tactic as a
safe wrapper, such that it is tried when a safe step of the search
would fail.
\<^descr> \<open>ctxt delSWrapper name\<close> deletes the safe wrapper with
the given name.
\<^descr> \<open>ctxt addWrapper (name, wrapper)\<close> adds a new wrapper to
modify the existing (unsafe) step tactic.
\<^descr> \<open>ctxt addbefore (name, tac)\<close> adds the given tactic as an
unsafe wrapper, such that it its result is concatenated
\<^emph>\<open>before\<close> the result of each unsafe step.
\<^descr> \<open>ctxt addafter (name, tac)\<close> adds the given tactic as an
unsafe wrapper, such that it its result is concatenated \<^emph>\<open>after\<close>
the result of each unsafe step.
\<^descr> \<open>ctxt delWrapper name\<close> deletes the unsafe wrapper with
the given name.
\<^descr> \<open>addSss\<close> adds the simpset of the context to its
classical set. The assumptions and goal will be simplified, in a
rather safe way, after each safe step of the search.
\<^descr> \<open>addss\<close> adds the simpset of the context to its
classical set. The assumptions and goal will be simplified, before
the each unsafe step of the search.
\<close>
section \<open>Object-logic setup \label{sec:object-logic}\<close>
text \<open>
\begin{matharray}{rcl}
@{command_def "judgment"} & : & \<open>theory \<rightarrow> theory\<close> \\
@{method_def atomize} & : & \<open>method\<close> \\
@{attribute_def atomize} & : & \<open>attribute\<close> \\
@{attribute_def rule_format} & : & \<open>attribute\<close> \\
@{attribute_def rulify} & : & \<open>attribute\<close> \\
\end{matharray}
The very starting point for any Isabelle object-logic is a ``truth
judgment'' that links object-level statements to the meta-logic
(with its minimal language of \<open>prop\<close> that covers universal
quantification \<open>\<And>\<close> and implication \<open>\<Longrightarrow>\<close>).
Common object-logics are sufficiently expressive to internalize rule
statements over \<open>\<And>\<close> and \<open>\<Longrightarrow>\<close> within their own
language. This is useful in certain situations where a rule needs
to be viewed as an atomic statement from the meta-level perspective,
e.g.\ \<open>\<And>x. x \<in> A \<Longrightarrow> P x\<close> versus \<open>\<forall>x \<in> A. P x\<close>.
From the following language elements, only the @{method atomize}
method and @{attribute rule_format} attribute are occasionally
required by end-users, the rest is for those who need to setup their
own object-logic. In the latter case existing formulations of
Isabelle/FOL or Isabelle/HOL may be taken as realistic examples.
Generic tools may refer to the information provided by object-logic
declarations internally.
\<^rail>\<open>
@@{command judgment} @{syntax name} '::' @{syntax type} @{syntax mixfix}?
;
@@{attribute atomize} ('(' 'full' ')')?
;
@@{attribute rule_format} ('(' 'noasm' ')')?
\<close>
\<^descr> @{command "judgment"}~\<open>c :: \<sigma> (mx)\<close> declares constant
\<open>c\<close> as the truth judgment of the current object-logic. Its
type \<open>\<sigma>\<close> should specify a coercion of the category of
object-level propositions to \<open>prop\<close> of the Pure meta-logic;
the mixfix annotation \<open>(mx)\<close> would typically just link the
object language (internally of syntactic category \<open>logic\<close>)
with that of \<open>prop\<close>. Only one @{command "judgment"}
declaration may be given in any theory development.
\<^descr> @{method atomize} (as a method) rewrites any non-atomic
premises of a sub-goal, using the meta-level equations declared via
@{attribute atomize} (as an attribute) beforehand. As a result,
heavily nested goals become amenable to fundamental operations such
as resolution (cf.\ the @{method (Pure) rule} method). Giving the ``\<open>(full)\<close>'' option here means to turn the whole subgoal into an
object-statement (if possible), including the outermost parameters
and assumptions as well.
A typical collection of @{attribute atomize} rules for a particular
object-logic would provide an internalization for each of the
connectives of \<open>\<And>\<close>, \<open>\<Longrightarrow>\<close>, and \<open>\<equiv>\<close>.
Meta-level conjunction should be covered as well (this is
particularly important for locales, see \secref{sec:locale}).
\<^descr> @{attribute rule_format} rewrites a theorem by the equalities
declared as @{attribute rulify} rules in the current object-logic.
By default, the result is fully normalized, including assumptions
and conclusions at any depth. The \<open>(no_asm)\<close> option
restricts the transformation to the conclusion of a rule.
In common object-logics (HOL, FOL, ZF), the effect of @{attribute
rule_format} is to replace (bounded) universal quantification
(\<open>\<forall>\<close>) and implication (\<open>\<longrightarrow>\<close>) by the corresponding
rule statements over \<open>\<And>\<close> and \<open>\<Longrightarrow>\<close>.
\<close>
section \<open>Tracing higher-order unification\<close>
text \<open>
\begin{tabular}{rcll}
@{attribute_def unify_trace_simp} & : & \<open>attribute\<close> & default \<open>false\<close> \\
@{attribute_def unify_trace_types} & : & \<open>attribute\<close> & default \<open>false\<close> \\
@{attribute_def unify_trace_bound} & : & \<open>attribute\<close> & default \<open>50\<close> \\
@{attribute_def unify_search_bound} & : & \<open>attribute\<close> & default \<open>60\<close> \\
\end{tabular}
\<^medskip>
Higher-order unification works well in most practical situations,
but sometimes needs extra care to identify problems. These tracing
options may help.
\<^descr> @{attribute unify_trace_simp} controls tracing of the
simplification phase of higher-order unification.
\<^descr> @{attribute unify_trace_types} controls warnings of
incompleteness, when unification is not considering all possible
instantiations of schematic type variables.
\<^descr> @{attribute unify_trace_bound} determines the depth where
unification starts to print tracing information once it reaches
depth; 0 for full tracing. At the default value, tracing
information is almost never printed in practice.
\<^descr> @{attribute unify_search_bound} prevents unification from
searching past the given depth. Because of this bound, higher-order
unification cannot return an infinite sequence, though it can return
an exponentially long one. The search rarely approaches the default
value in practice. If the search is cut off, unification prints a
warning ``Unification bound exceeded''.
\begin{warn}
Options for unification cannot be modified in a local context. Only
the global theory content is taken into account.
\end{warn}
\<close>
end
diff --git a/src/Doc/Isar_Ref/Spec.thy b/src/Doc/Isar_Ref/Spec.thy
--- a/src/Doc/Isar_Ref/Spec.thy
+++ b/src/Doc/Isar_Ref/Spec.thy
@@ -1,1548 +1,1548 @@
(*:maxLineLen=78:*)
theory Spec
imports Main Base
begin
chapter \<open>Specifications\<close>
text \<open>
The Isabelle/Isar theory format integrates specifications and proofs, with
support for interactive development by continuous document editing. There is
a separate document preparation system (see \chref{ch:document-prep}), for
typesetting formal developments together with informal text. The resulting
hyper-linked PDF documents can be used both for WWW presentation and printed
copies.
The Isar proof language (see \chref{ch:proofs}) is embedded into the theory
language as a proper sub-language. Proof mode is entered by stating some
\<^theory_text>\<open>theorem\<close> or \<^theory_text>\<open>lemma\<close> at the theory level, and left again with the final
conclusion (e.g.\ via \<^theory_text>\<open>qed\<close>).
\<close>
section \<open>Defining theories \label{sec:begin-thy}\<close>
text \<open>
\begin{matharray}{rcl}
@{command_def "theory"} & : & \<open>toplevel \<rightarrow> theory\<close> \\
@{command_def (global) "end"} & : & \<open>theory \<rightarrow> toplevel\<close> \\
@{command_def "thy_deps"}\<open>\<^sup>*\<close> & : & \<open>theory \<rightarrow>\<close> \\
\end{matharray}
Isabelle/Isar theories are defined via theory files, which consist of an
outermost sequence of definition--statement--proof elements. Some
definitions are self-sufficient (e.g.\ \<^theory_text>\<open>fun\<close> in Isabelle/HOL), with
foundational proofs performed internally. Other definitions require an
explicit proof as justification (e.g.\ \<^theory_text>\<open>function\<close> and \<^theory_text>\<open>termination\<close> in
Isabelle/HOL). Plain statements like \<^theory_text>\<open>theorem\<close> or \<^theory_text>\<open>lemma\<close> are merely a
special case of that, defining a theorem from a given proposition and its
proof.
The theory body may be sub-structured by means of \<^emph>\<open>local theory targets\<close>,
such as \<^theory_text>\<open>locale\<close> and \<^theory_text>\<open>class\<close>. It is also possible to use \<^theory_text>\<open>context begin \<dots>
end\<close> blocks to delimited a local theory context: a \<^emph>\<open>named context\<close> to
augment a locale or class specification, or an \<^emph>\<open>unnamed context\<close> to refer
to local parameters and assumptions that are discharged later. See
\secref{sec:target} for more details.
\<^medskip>
A theory is commenced by the \<^theory_text>\<open>theory\<close> command, which indicates imports of
previous theories, according to an acyclic foundational order. Before the
initial \<^theory_text>\<open>theory\<close> command, there may be optional document header material
(like \<^theory_text>\<open>section\<close> or \<^theory_text>\<open>text\<close>, see \secref{sec:markup}). The document header
is outside of the formal theory context, though.
A theory is concluded by a final @{command (global) "end"} command, one that
does not belong to a local theory target. No further commands may follow
such a global @{command (global) "end"}.
\<^rail>\<open>
@@{command theory} @{syntax system_name}
@'imports' (@{syntax system_name} +) \<newline>
keywords? abbrevs? @'begin'
;
keywords: @'keywords' (keyword_decls + @'and')
;
keyword_decls: (@{syntax string} +) ('::' @{syntax name} @{syntax tags})?
;
abbrevs: @'abbrevs' (((text+) '=' (text+)) + @'and')
;
@@{command thy_deps} (thy_bounds thy_bounds?)?
;
thy_bounds: @{syntax name} | '(' (@{syntax name} + @'|') ')'
\<close>
\<^descr> \<^theory_text>\<open>theory A imports B\<^sub>1 \<dots> B\<^sub>n begin\<close> starts a new theory \<open>A\<close> based on the
merge of existing theories \<open>B\<^sub>1 \<dots> B\<^sub>n\<close>. Due to the possibility to import
more than one ancestor, the resulting theory structure of an Isabelle
session forms a directed acyclic graph (DAG). Isabelle takes care that
sources contributing to the development graph are always up-to-date: changed
files are automatically rechecked whenever a theory header specification is
processed.
Empty imports are only allowed in the bootstrap process of the special
theory \<^theory>\<open>Pure\<close>, which is the start of any other formal development
based on Isabelle. Regular user theories usually refer to some more complex
entry point, such as theory \<^theory>\<open>Main\<close> in Isabelle/HOL.
The @{keyword_def "keywords"} specification declares outer syntax
(\chref{ch:outer-syntax}) that is introduced in this theory later on (rare
in end-user applications). Both minor keywords and major keywords of the
Isar command language need to be specified, in order to make parsing of
proof documents work properly. Command keywords need to be classified
according to their structural role in the formal text. Examples may be seen
in Isabelle/HOL sources itself, such as @{keyword "keywords"}~\<^verbatim>\<open>"typedef"\<close>
\<open>:: thy_goal_defn\<close> or @{keyword "keywords"}~\<^verbatim>\<open>"datatype"\<close> \<open>:: thy_defn\<close> for
theory-level definitions with and without proof, respectively. Additional
@{syntax tags} provide defaults for document preparation
(\secref{sec:document-markers}).
The @{keyword_def "abbrevs"} specification declares additional abbreviations
for syntactic completion. The default for a new keyword is just its name,
but completion may be avoided by defining @{keyword "abbrevs"} with empty
text.
\<^descr> @{command (global) "end"} concludes the current theory definition. Note
that some other commands, e.g.\ local theory targets \<^theory_text>\<open>locale\<close> or \<^theory_text>\<open>class\<close>
may involve a \<^theory_text>\<open>begin\<close> that needs to be matched by @{command (local) "end"},
according to the usual rules for nested blocks.
\<^descr> \<^theory_text>\<open>thy_deps\<close> visualizes the theory hierarchy as a directed acyclic graph.
By default, all imported theories are shown. This may be restricted by
specifying bounds wrt. the theory inclusion relation.
\<close>
section \<open>Local theory targets \label{sec:target}\<close>
text \<open>
\begin{matharray}{rcll}
@{command_def "context"} & : & \<open>theory \<rightarrow> local_theory\<close> \\
@{command_def (local) "end"} & : & \<open>local_theory \<rightarrow> theory\<close> \\
@{keyword_def "private"} \\
@{keyword_def "qualified"} \\
\end{matharray}
A local theory target is a specification context that is managed separately
within the enclosing theory. Contexts may introduce parameters (fixed
variables) and assumptions (hypotheses). Definitions and theorems depending
on the context may be added incrementally later on.
\<^emph>\<open>Named contexts\<close> refer to locales (cf.\ \secref{sec:locale}) or type
classes (cf.\ \secref{sec:class}); the name ``\<open>-\<close>'' signifies the global
theory context.
\<^emph>\<open>Unnamed contexts\<close> may introduce additional parameters and assumptions, and
results produced in the context are generalized accordingly. Such auxiliary
contexts may be nested within other targets, like \<^theory_text>\<open>locale\<close>, \<^theory_text>\<open>class\<close>,
\<^theory_text>\<open>instantiation\<close>, \<^theory_text>\<open>overloading\<close>.
\<^rail>\<open>
@@{command context} @{syntax name} @{syntax_ref "opening"}? @'begin'
;
@@{command context} @{syntax_ref "includes"}? (@{syntax context_elem} * ) @'begin'
;
@{syntax_def target}: '(' @'in' @{syntax name} ')'
\<close>
\<^descr> \<^theory_text>\<open>context c bundles begin\<close> opens a named context, by recommencing an existing
locale or class \<open>c\<close>. Note that locale and class definitions allow to include
the \<^theory_text>\<open>begin\<close> keyword as well, in order to continue the local theory
immediately after the initial specification. Optionally given
\<open>bundles\<close> only take effect in the surface context within the \<^theory_text>\<open>begin\<close> /
\<^theory_text>\<open>end\<close> block.
\<^descr> \<^theory_text>\<open>context bundles elements begin\<close> opens an unnamed context, by extending
the enclosing global or local theory target by the given declaration bundles
(\secref{sec:bundle}) and context elements (\<^theory_text>\<open>fixes\<close>, \<^theory_text>\<open>assumes\<close> etc.). This
means any results stemming from definitions and proofs in the extended
context will be exported into the enclosing target by lifting over extra
parameters and premises.
\<^descr> @{command (local) "end"} concludes the current local theory, according to
the nesting of contexts. Note that a global @{command (global) "end"} has a
different meaning: it concludes the theory itself (\secref{sec:begin-thy}).
\<^descr> \<^theory_text>\<open>private\<close> or \<^theory_text>\<open>qualified\<close> may be given as modifiers before any local
theory command. This restricts name space accesses to the local scope, as
determined by the enclosing \<^theory_text>\<open>context begin \<dots> end\<close> block. Outside its scope,
a \<^theory_text>\<open>private\<close> name is inaccessible, and a \<^theory_text>\<open>qualified\<close> name is only
accessible with some qualification.
Neither a global \<^theory_text>\<open>theory\<close> nor a \<^theory_text>\<open>locale\<close> target provides a local scope by
itself: an extra unnamed context is required to use \<^theory_text>\<open>private\<close> or
\<^theory_text>\<open>qualified\<close> here.
\<^descr> \<open>(\<close>@{keyword_def "in"}~\<open>c)\<close> given after any local theory command specifies
an immediate target, e.g.\ ``\<^theory_text>\<open>definition (in c)\<close>'' or
``\<^theory_text>\<open>theorem (in c)\<close>''. This works both in a local or global theory context;
the current target context will be suspended for this command only. Note
that ``\<^theory_text>\<open>(in -)\<close>'' will always produce a global result independently of the
current target context.
Any specification element that operates on \<open>local_theory\<close> according to this
manual implicitly allows the above target syntax \<^theory_text>\<open>(in c)\<close>, but individual
syntax diagrams omit that aspect for clarity.
\<^medskip>
The exact meaning of results produced within a local theory context depends
on the underlying target infrastructure (locale, type class etc.). The
general idea is as follows, considering a context named \<open>c\<close> with parameter
\<open>x\<close> and assumption \<open>A[x]\<close>.
Definitions are exported by introducing a global version with additional
arguments; a syntactic abbreviation links the long form with the abstract
version of the target context. For example, \<open>a \<equiv> t[x]\<close> becomes \<open>c.a ?x \<equiv>
t[?x]\<close> at the theory level (for arbitrary \<open>?x\<close>), together with a local
abbreviation \<open>a \<equiv> c.a x\<close> in the target context (for the fixed parameter
\<open>x\<close>).
Theorems are exported by discharging the assumptions and generalizing the
parameters of the context. For example, \<open>a: B[x]\<close> becomes \<open>c.a: A[?x] \<Longrightarrow>
B[?x]\<close>, again for arbitrary \<open>?x\<close>.
\<close>
section \<open>Bundled declarations \label{sec:bundle}\<close>
text \<open>
\begin{matharray}{rcl}
@{command_def "bundle"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command "bundle"} & : & \<open>theory \<rightarrow> local_theory\<close> \\
@{command_def "print_bundles"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
@{command_def "include"} & : & \<open>proof(state) \<rightarrow> proof(state)\<close> \\
@{command_def "including"} & : & \<open>proof(prove) \<rightarrow> proof(prove)\<close> \\
@{keyword_def "includes"} & : & syntax \\
\end{matharray}
The outer syntax of fact expressions (\secref{sec:syn-att}) involves
theorems and attributes, which are evaluated in the context and applied to
it. Attributes may declare theorems to the context, as in \<open>this_rule [intro]
that_rule [elim]\<close> for example. Configuration options (\secref{sec:config})
are special declaration attributes that operate on the context without a
theorem, as in \<open>[[show_types = false]]\<close> for example.
Expressions of this form may be defined as \<^emph>\<open>bundled declarations\<close> in the
context, and included in other situations later on. Including declaration
bundles augments a local context casually without logical dependencies,
which is in contrast to locales and locale interpretation
(\secref{sec:locale}).
\<^rail>\<open>
@@{command bundle} @{syntax name}
( '=' @{syntax thms} @{syntax for_fixes} | @'begin')
;
@@{command print_bundles} ('!'?)
;
(@@{command include} | @@{command including}) (@{syntax name}+)
;
@{syntax_def "includes"}: @'includes' (@{syntax name}+)
;
@{syntax_def "opening"}: @'opening' (@{syntax name}+)
;
@@{command unbundle} (@{syntax name}+)
\<close>
\<^descr> \<^theory_text>\<open>bundle b = decls\<close> defines a bundle of declarations in the current
context. The RHS is similar to the one of the \<^theory_text>\<open>declare\<close> command. Bundles
defined in local theory targets are subject to transformations via
morphisms, when moved into different application contexts; this works
analogously to any other local theory specification.
\<^descr> \<^theory_text>\<open>bundle b begin body end\<close> defines a bundle of declarations from the
\<open>body\<close> of local theory specifications. It may consist of commands that are
technically equivalent to \<^theory_text>\<open>declare\<close> or \<^theory_text>\<open>declaration\<close>, which also includes
\<^theory_text>\<open>notation\<close>, for example. Named fact declarations like ``\<^theory_text>\<open>lemmas a [simp] =
b\<close>'' or ``\<^theory_text>\<open>lemma a [simp]: B \<proof>\<close>'' are also admitted, but the name
bindings are not recorded in the bundle.
\<^descr> \<^theory_text>\<open>print_bundles\<close> prints the named bundles that are available in the
current context; the ``\<open>!\<close>'' option indicates extra verbosity.
\<^descr> \<^theory_text>\<open>include b\<^sub>1 \<dots> b\<^sub>n\<close> activates the declarations from the given bundles
in a proof body (forward mode). This is analogous to \<^theory_text>\<open>note\<close>
(\secref{sec:proof-facts}) with the expanded bundles.
\<^descr> \<^theory_text>\<open>including b\<^sub>1 \<dots> b\<^sub>n\<close> is similar to \<^theory_text>\<open>include\<close>, but works in proof refinement
(backward mode). This is analogous to \<^theory_text>\<open>using\<close> (\secref{sec:proof-facts})
with the expanded bundles.
\<^descr> \<^theory_text>\<open>includes b\<^sub>1 \<dots> b\<^sub>n\<close> is similar to \<^theory_text>\<open>include\<close>, but applies to a
confined specification context: unnamed \<^theory_text>\<open>context\<close>s and
long statements of \<^theory_text>\<open>theorem\<close>.
\<^descr> \<^theory_text>\<open>opening b\<^sub>1 \<dots> b\<^sub>n\<close> is similar to \<^theory_text>\<open>includes\<close>, but applies to
a named specification context: \<^theory_text>\<open>locale\<close>s, \<^theory_text>\<open>class\<close>es and
named \<^theory_text>\<open>context\<close>s. The effect is confined to the surface context within the
specification block itself and the corresponding \<^theory_text>\<open>begin\<close> / \<^theory_text>\<open>end\<close> block.
\<^descr> \<^theory_text>\<open>unbundle b\<^sub>1 \<dots> b\<^sub>n\<close> activates the declarations from the given bundles in
the current local theory context. This is analogous to \<^theory_text>\<open>lemmas\<close>
(\secref{sec:theorems}) with the expanded bundles.
Here is an artificial example of bundling various configuration options:
\<close>
(*<*)experiment begin(*>*)
bundle trace = [[simp_trace, linarith_trace, metis_trace, smt_trace]]
lemma "x = x"
including trace by metis
(*<*)end(*>*)
section \<open>Term definitions \label{sec:term-definitions}\<close>
text \<open>
\begin{matharray}{rcll}
@{command_def "definition"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{attribute_def "defn"} & : & \<open>attribute\<close> \\
@{command_def "print_defn_rules"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
@{command_def "abbreviation"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "print_abbrevs"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
\end{matharray}
Term definitions may either happen within the logic (as equational axioms of
a certain form (see also \secref{sec:overloading}), or outside of it as
rewrite system on abstract syntax. The second form is called
``abbreviation''.
\<^rail>\<open>
@@{command definition} decl? definition
;
@@{command abbreviation} @{syntax mode}? decl? abbreviation
;
@@{command print_abbrevs} ('!'?)
;
decl: @{syntax name} ('::' @{syntax type})? @{syntax mixfix}? @'where'
;
definition: @{syntax thmdecl}? @{syntax prop}
@{syntax spec_prems} @{syntax for_fixes}
;
abbreviation: @{syntax prop} @{syntax for_fixes}
\<close>
\<^descr> \<^theory_text>\<open>definition c where eq\<close> produces an internal definition \<open>c \<equiv> t\<close> according
to the specification given as \<open>eq\<close>, which is then turned into a proven fact.
The given proposition may deviate from internal meta-level equality
according to the rewrite rules declared as @{attribute defn} by the
object-logic. This usually covers object-level equality \<open>x = y\<close> and
equivalence \<open>A \<longleftrightarrow> B\<close>. End-users normally need not change the @{attribute
defn} setup.
Definitions may be presented with explicit arguments on the LHS, as well as
additional conditions, e.g.\ \<open>f x y = t\<close> instead of \<open>f \<equiv> \<lambda>x y. t\<close> and \<open>y \<noteq> 0
\<Longrightarrow> g x y = u\<close> instead of an unrestricted \<open>g \<equiv> \<lambda>x y. u\<close>.
\<^descr> \<^theory_text>\<open>print_defn_rules\<close> prints the definitional rewrite rules declared via
@{attribute defn} in the current context.
\<^descr> \<^theory_text>\<open>abbreviation c where eq\<close> introduces a syntactic constant which is
associated with a certain term according to the meta-level equality \<open>eq\<close>.
Abbreviations participate in the usual type-inference process, but are
expanded before the logic ever sees them. Pretty printing of terms involves
higher-order rewriting with rules stemming from reverted abbreviations. This
needs some care to avoid overlapping or looping syntactic replacements!
The optional \<open>mode\<close> specification restricts output to a particular print
mode; using ``\<open>input\<close>'' here achieves the effect of one-way abbreviations.
The mode may also include an ``\<^theory_text>\<open>output\<close>'' qualifier that affects the
concrete syntax declared for abbreviations, cf.\ \<^theory_text>\<open>syntax\<close> in
\secref{sec:syn-trans}.
\<^descr> \<^theory_text>\<open>print_abbrevs\<close> prints all constant abbreviations of the current context;
the ``\<open>!\<close>'' option indicates extra verbosity.
\<close>
section \<open>Axiomatizations \label{sec:axiomatizations}\<close>
text \<open>
\begin{matharray}{rcll}
@{command_def "axiomatization"} & : & \<open>theory \<rightarrow> theory\<close> & (axiomatic!) \\
\end{matharray}
\<^rail>\<open>
@@{command axiomatization} @{syntax vars}? (@'where' axiomatization)?
;
axiomatization: (@{syntax thmdecl} @{syntax prop} + @'and')
@{syntax spec_prems} @{syntax for_fixes}
\<close>
\<^descr> \<^theory_text>\<open>axiomatization c\<^sub>1 \<dots> c\<^sub>m where \<phi>\<^sub>1 \<dots> \<phi>\<^sub>n\<close> introduces several constants
simultaneously and states axiomatic properties for these. The constants are
marked as being specified once and for all, which prevents additional
specifications for the same constants later on, but it is always possible to
emit axiomatizations without referring to particular constants. Note that
lack of precise dependency tracking of axiomatizations may disrupt the
well-formedness of an otherwise definitional theory.
Axiomatization is restricted to a global theory context: support for local
theory targets \secref{sec:target} would introduce an extra dimension of
uncertainty what the written specifications really are, and make it
infeasible to argue why they are correct.
Axiomatic specifications are required when declaring a new logical system
within Isabelle/Pure, but in an application environment like Isabelle/HOL
the user normally stays within definitional mechanisms provided by the logic
and its libraries.
\<close>
section \<open>Generic declarations\<close>
text \<open>
\begin{matharray}{rcl}
@{command_def "declaration"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "syntax_declaration"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "declare"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
\end{matharray}
Arbitrary operations on the background context may be wrapped-up as generic
declaration elements. Since the underlying concept of local theories may be
subject to later re-interpretation, there is an additional dependency on a
morphism that tells the difference of the original declaration context wrt.\
the application context encountered later on. A fact declaration is an
important special case: it consists of a theorem which is applied to the
context by means of an attribute.
\<^rail>\<open>
(@@{command declaration} | @@{command syntax_declaration})
('(' @'pervasive' ')')? \<newline> @{syntax text}
;
@@{command declare} (@{syntax thms} + @'and')
\<close>
- \<^descr> \<^theory_text>\<open>declaration d\<close> adds the declaration function \<open>d\<close> of ML type \<^ML_type>\<open>declaration\<close>, to the current local theory under construction. In later
+ \<^descr> \<^theory_text>\<open>declaration d\<close> adds the declaration function \<open>d\<close> of ML type \<^ML_type>\<open>Morphism.declaration\<close>, to the current local theory under construction. In later
application contexts, the function is transformed according to the morphisms
being involved in the interpretation hierarchy.
If the \<^theory_text>\<open>(pervasive)\<close> option is given, the corresponding declaration is
applied to all possible contexts involved, including the global background
theory.
\<^descr> \<^theory_text>\<open>syntax_declaration\<close> is similar to \<^theory_text>\<open>declaration\<close>, but is meant to affect
only ``syntactic'' tools by convention (such as notation and type-checking
information).
\<^descr> \<^theory_text>\<open>declare thms\<close> declares theorems to the current local theory context. No
theorem binding is involved here, unlike \<^theory_text>\<open>lemmas\<close> (cf.\
\secref{sec:theorems}), so \<^theory_text>\<open>declare\<close> only has the effect of applying
attributes as included in the theorem specification.
\<close>
section \<open>Locales \label{sec:locale}\<close>
text \<open>
A locale is a functor that maps parameters (including implicit type
parameters) and a specification to a list of declarations. The syntax of
locales is modeled after the Isar proof context commands (cf.\
\secref{sec:proof-context}).
Locale hierarchies are supported by maintaining a graph of dependencies
between locale instances in the global theory. Dependencies may be
introduced through import (where a locale is defined as sublocale of the
imported instances) or by proving that an existing locale is a sublocale of
one or several locale instances.
A locale may be opened with the purpose of appending to its list of
declarations (cf.\ \secref{sec:target}). When opening a locale declarations
from all dependencies are collected and are presented as a local theory. In
this process, which is called \<^emph>\<open>roundup\<close>, redundant locale instances are
omitted. A locale instance is redundant if it is subsumed by an instance
encountered earlier. A more detailed description of this process is
available elsewhere \<^cite>\<open>Ballarin2014\<close>.
\<close>
subsection \<open>Locale expressions \label{sec:locale-expr}\<close>
text \<open>
A \<^emph>\<open>locale expression\<close> denotes a context composed of instances of existing
locales. The context consists of the declaration elements from the locale
instances. Redundant locale instances are omitted according to roundup.
\<^rail>\<open>
@{syntax_def locale_expr}: (instance + '+') @{syntax for_fixes}
;
instance: (qualifier ':')? @{syntax name} (pos_insts | named_insts) \<newline>
rewrites?
;
qualifier: @{syntax name} ('?')?
;
pos_insts: ('_' | @{syntax term})*
;
named_insts: @'where' (@{syntax name} '=' @{syntax term} + @'and')
;
rewrites: @'rewrites' (@{syntax thmdecl}? @{syntax prop} + @'and')
\<close>
A locale instance consists of a reference to a locale and either positional
or named parameter instantiations optionally followed by rewrites clauses.
Identical instantiations (that is, those
that instantiate a parameter by itself) may be omitted. The notation ``\<open>_\<close>''
enables to omit the instantiation for a parameter inside a positional
instantiation.
Terms in instantiations are from the context the locale expressions is
declared in. Local names may be added to this context with the optional
\<^theory_text>\<open>for\<close> clause. This is useful for shadowing names bound in outer contexts,
and for declaring syntax. In addition, syntax declarations from one instance
are effective when parsing subsequent instances of the same expression.
Instances have an optional qualifier which applies to names in declarations.
Names include local definitions and theorem names. If present, the qualifier
itself is either mandatory (default) or non-mandatory (when followed by
``\<^verbatim>\<open>?\<close>''). Non-mandatory means that the qualifier may be omitted on input.
Qualifiers only affect name spaces; they play no role in determining whether
one locale instance subsumes another.
Rewrite clauses amend instances with equations that act as rewrite rules.
This is particularly useful for changing concepts introduced through
definitions. Rewrite clauses are available only in interpretation commands
(see \secref{sec:locale-interpretation} below) and must be proved the user.
\<close>
subsection \<open>Locale declarations\<close>
text \<open>
\begin{tabular}{rcl}
@{command_def "locale"} & : & \<open>theory \<rightarrow> local_theory\<close> \\
@{command_def "experiment"} & : & \<open>theory \<rightarrow> local_theory\<close> \\
@{command_def "print_locale"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
@{command_def "print_locales"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
@{command_def "locale_deps"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
\end{tabular}
@{index_ref \<open>\<^theory_text>\<open>fixes\<close> (element)\<close>}
@{index_ref \<open>\<^theory_text>\<open>constrains\<close> (element)\<close>}
@{index_ref \<open>\<^theory_text>\<open>assumes\<close> (element)\<close>}
@{index_ref \<open>\<^theory_text>\<open>defines\<close> (element)\<close>}
@{index_ref \<open>\<^theory_text>\<open>notes\<close> (element)\<close>}
\<^rail>\<open>
@@{command locale} @{syntax name} ('=' @{syntax locale})? @'begin'?
;
@@{command experiment} (@{syntax context_elem}*) @'begin'
;
@@{command print_locale} '!'? @{syntax name}
;
@@{command print_locales} ('!'?)
;
@{syntax_def locale}: @{syntax context_elem}+ |
@{syntax_ref "opening"} ('+' (@{syntax context_elem}+))? |
@{syntax locale_expr} @{syntax_ref "opening"}? ('+' (@{syntax context_elem}+))?
;
@{syntax_def context_elem}:
@'fixes' @{syntax vars} |
@'constrains' (@{syntax name} '::' @{syntax type} + @'and') |
@'assumes' (@{syntax props} + @'and') |
@'defines' (@{syntax thmdecl}? @{syntax prop} @{syntax prop_pat}? + @'and') |
@'notes' (@{syntax thmdef}? @{syntax thms} + @'and')
\<close>
\<^descr> \<^theory_text>\<open>locale loc = import opening bundles + body\<close> defines a new locale \<open>loc\<close>
as a context consisting of a certain view of existing locales (\<open>import\<close>) plus some
additional elements (\<open>body\<close>) with declaration \<open>bundles\<close> enriching the context
of the command itself. All \<open>import\<close>, \<open>bundles\<close> and \<open>body\<close> are optional; the
degenerate form \<^theory_text>\<open>locale loc\<close> defines an empty locale, which may still be
useful to collect declarations of facts later on. Type-inference on locale
expressions automatically takes care of the most general typing that the
combined context elements may acquire.
The \<open>import\<close> consists of a locale expression; see \secref{sec:locale-expr}
above. Its \<^theory_text>\<open>for\<close> clause defines the parameters of \<open>import\<close>. These are
parameters of the defined locale. Locale parameters whose instantiation is
omitted automatically extend the (possibly empty) \<^theory_text>\<open>for\<close> clause: they are
inserted at its beginning. This means that these parameters may be referred
to from within the expression and also in the subsequent context elements
and provides a notational convenience for the inheritance of parameters in
locale declarations.
Declarations from \<open>bundles\<close>, see \secref{sec:bundle}, are effective in the
entire command including a subsequent \<^theory_text>\<open>begin\<close> / \<^theory_text>\<open>end\<close> block, but they do
not contribute to the declarations stored in the locale.
The \<open>body\<close> consists of context elements:
\<^descr> @{element "fixes"}~\<open>x :: \<tau> (mx)\<close> declares a local parameter of type \<open>\<tau>\<close>
and mixfix annotation \<open>mx\<close> (both are optional). The special syntax
declaration ``\<open>(\<close>@{keyword_ref "structure"}\<open>)\<close>'' means that \<open>x\<close> may be
referenced implicitly in this context.
\<^descr> @{element "constrains"}~\<open>x :: \<tau>\<close> introduces a type constraint \<open>\<tau>\<close> on the
local parameter \<open>x\<close>. This element is deprecated. The type constraint
should be introduced in the \<^theory_text>\<open>for\<close> clause or the relevant @{element
"fixes"} element.
\<^descr> @{element "assumes"}~\<open>a: \<phi>\<^sub>1 \<dots> \<phi>\<^sub>n\<close> introduces local premises, similar
to \<^theory_text>\<open>assume\<close> within a proof (cf.\ \secref{sec:proof-context}).
\<^descr> @{element "defines"}~\<open>a: x \<equiv> t\<close> defines a previously declared parameter.
This is similar to \<^theory_text>\<open>define\<close> within a proof (cf.\
\secref{sec:proof-context}), but @{element "defines"} is restricted to
Pure equalities and the defined variable needs to be declared beforehand
via @{element "fixes"}. The left-hand side of the equation may have
additional arguments, e.g.\ ``@{element "defines"}~\<open>f x\<^sub>1 \<dots> x\<^sub>n \<equiv> t\<close>'',
which need to be free in the context.
\<^descr> @{element "notes"}~\<open>a = b\<^sub>1 \<dots> b\<^sub>n\<close> reconsiders facts within a local
context. Most notably, this may include arbitrary declarations in any
attribute specifications included here, e.g.\ a local @{attribute simp}
rule.
Both @{element "assumes"} and @{element "defines"} elements contribute to
the locale specification. When defining an operation derived from the
parameters, \<^theory_text>\<open>definition\<close> (\secref{sec:term-definitions}) is usually more
appropriate.
Note that ``\<^theory_text>\<open>(is p\<^sub>1 \<dots> p\<^sub>n)\<close>'' patterns given in the syntax of @{element
"assumes"} and @{element "defines"} above are illegal in locale definitions.
In the long goal format of \secref{sec:goals}, term bindings may be included
as expected, though.
\<^medskip>
Locale specifications are ``closed up'' by turning the given text into a
predicate definition \<open>loc_axioms\<close> and deriving the original assumptions as
local lemmas (modulo local definitions). The predicate statement covers only
the newly specified assumptions, omitting the content of included locale
expressions. The full cumulative view is only provided on export, involving
another predicate \<open>loc\<close> that refers to the complete specification text.
In any case, the predicate arguments are those locale parameters that
actually occur in the respective piece of text. Also these predicates
operate at the meta-level in theory, but the locale packages attempts to
internalize statements according to the object-logic setup (e.g.\ replacing
\<open>\<And>\<close> by \<open>\<forall>\<close>, and \<open>\<Longrightarrow>\<close> by \<open>\<longrightarrow>\<close> in HOL; see also \secref{sec:object-logic}).
Separate introduction rules \<open>loc_axioms.intro\<close> and \<open>loc.intro\<close> are provided
as well.
\<^descr> \<^theory_text>\<open>experiment body begin\<close> opens an anonymous locale context with private
naming policy. Specifications in its body are inaccessible from outside.
This is useful to perform experiments, without polluting the name space.
\<^descr> \<^theory_text>\<open>print_locale "locale"\<close> prints the contents of the named locale. The
command omits @{element "notes"} elements by default. Use \<^theory_text>\<open>print_locale!\<close>
to have them included.
\<^descr> \<^theory_text>\<open>print_locales\<close> prints the names of all locales of the current theory;
the ``\<open>!\<close>'' option indicates extra verbosity.
\<^descr> \<^theory_text>\<open>locale_deps\<close> visualizes all locales and their relations as a Hasse
diagram. This includes locales defined as type classes (\secref{sec:class}).
\<close>
subsection \<open>Locale interpretation \label{sec:locale-interpretation}\<close>
text \<open>
\begin{matharray}{rcl}
@{command "interpretation"} & : & \<open>local_theory \<rightarrow> proof(prove)\<close> \\
@{command_def "interpret"} & : & \<open>proof(state) | proof(chain) \<rightarrow> proof(prove)\<close> \\
@{command_def "global_interpretation"} & : & \<open>theory | local_theory \<rightarrow> proof(prove)\<close> \\
@{command_def "sublocale"} & : & \<open>theory | local_theory \<rightarrow> proof(prove)\<close> \\
@{command_def "print_interps"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
@{method_def intro_locales} & : & \<open>method\<close> \\
@{method_def unfold_locales} & : & \<open>method\<close> \\
@{attribute_def trace_locales} & : & \mbox{\<open>attribute\<close> \quad default \<open>false\<close>} \\
\end{matharray}
Locales may be instantiated, and the resulting instantiated declarations
added to the current context. This requires proof (of the instantiated
specification) and is called \<^emph>\<open>locale interpretation\<close>. Interpretation is
possible within arbitrary local theories (\<^theory_text>\<open>interpretation\<close>), within proof
bodies (\<^theory_text>\<open>interpret\<close>), into global theories (\<^theory_text>\<open>global_interpretation\<close>) and
into locales (\<^theory_text>\<open>sublocale\<close>).
\<^rail>\<open>
@@{command interpretation} @{syntax locale_expr}
;
@@{command interpret} @{syntax locale_expr}
;
@@{command global_interpretation} @{syntax locale_expr} definitions?
;
@@{command sublocale} (@{syntax name} ('<' | '\<subseteq>'))? @{syntax locale_expr} \<newline>
definitions?
;
@@{command print_interps} @{syntax name}
;
definitions: @'defines' (@{syntax thmdecl}? @{syntax name} \<newline>
@{syntax mixfix}? '=' @{syntax term} + @'and');
\<close>
The core of each interpretation command is a locale expression \<open>expr\<close>; the
command generates proof obligations for the instantiated specifications.
Once these are discharged by the user, instantiated declarations (in
particular, facts) are added to the context in a post-processing phase, in a
manner specific to each command.
Interpretation commands are aware of interpretations that are already
active: post-processing is achieved through a variant of roundup that takes
interpretations of the current global or local theory into account. In order
to simplify the proof obligations according to existing interpretations use
methods @{method intro_locales} or @{method unfold_locales}.
Rewrites clauses \<^theory_text>\<open>rewrites eqns\<close> occur within expressions. They amend the
morphism through which a locale instance is interpreted with rewrite rules,
also called rewrite morphisms. This is particularly useful for interpreting
concepts introduced through definitions. The equations must be proved the
user. To enable syntax of the instantiated locale within the equation, while
reading a locale expression, equations of a locale instance are read in a
temporary context where the instance is already activated. If activation
fails, typically due to duplicate constant declarations, processing falls
back to reading the equation first.
Given definitions \<open>defs\<close> produce corresponding definitions in the local
theory's underlying target \<^emph>\<open>and\<close> amend the morphism with rewrite rules
stemming from the symmetric of those definitions. Hence these need not be
proved explicitly the user. Such rewrite definitions are a even more useful
device for interpreting concepts introduced through definitions, but they
are only supported for interpretation commands operating in a local theory
whose implementing target actually supports this. Note that despite
the suggestive \<^theory_text>\<open>and\<close> connective, \<open>defs\<close>
are processed sequentially without mutual recursion.
\<^descr> \<^theory_text>\<open>interpretation expr\<close> interprets \<open>expr\<close> into a local theory
such that its lifetime is limited to the current context block (e.g. a
locale or unnamed context). At the closing @{command end} of the block the
interpretation and its declarations disappear. Hence facts based on
interpretation can be established without creating permanent links to the
interpreted locale instances, as would be the case with @{command
sublocale}.
When used on the level of a global theory, there is no end of a current
context block, hence \<^theory_text>\<open>interpretation\<close> behaves identically to
\<^theory_text>\<open>global_interpretation\<close> then.
\<^descr> \<^theory_text>\<open>interpret expr\<close> interprets \<open>expr\<close> into a proof context:
the interpretation and its declarations disappear when closing the current
proof block. Note that for \<^theory_text>\<open>interpret\<close> the \<open>eqns\<close> should be explicitly
universally quantified.
\<^descr> \<^theory_text>\<open>global_interpretation expr defines defs\<close> interprets \<open>expr\<close>
into a global theory.
When adding declarations to locales, interpreted versions of these
declarations are added to the global theory for all interpretations in the
global theory as well. That is, interpretations into global theories
dynamically participate in any declarations added to locales.
Free variables in the interpreted expression are allowed. They are turned
into schematic variables in the generated declarations. In order to use a
free variable whose name is already bound in the context --- for example,
because a constant of that name exists --- add it to the \<^theory_text>\<open>for\<close> clause.
When used in a nested target, resulting declarations are propagated
through the whole target stack.
\<^descr> \<^theory_text>\<open>sublocale name \<subseteq> expr defines defs\<close> interprets \<open>expr\<close>
into the locale \<open>name\<close>. A proof that the specification of \<open>name\<close> implies the
specification of \<open>expr\<close> is required. As in the localized version of the
theorem command, the proof is in the context of \<open>name\<close>. After the proof
obligation has been discharged, the locale hierarchy is changed as if \<open>name\<close>
imported \<open>expr\<close> (hence the name \<^theory_text>\<open>sublocale\<close>). When the context of \<open>name\<close> is
subsequently entered, traversing the locale hierarchy will involve the
locale instances of \<open>expr\<close>, and their declarations will be added to the
context. This makes \<^theory_text>\<open>sublocale\<close> dynamic: extensions of a locale that is
instantiated in \<open>expr\<close> may take place after the \<^theory_text>\<open>sublocale\<close> declaration and
still become available in the context. Circular \<^theory_text>\<open>sublocale\<close> declarations
are allowed as long as they do not lead to infinite chains.
If interpretations of \<open>name\<close> exist in the current global theory, the command
adds interpretations for \<open>expr\<close> as well, with the same qualifier, although
only for fragments of \<open>expr\<close> that are not interpreted in the theory already.
Rewrites clauses in the expression or rewrite definitions \<open>defs\<close> can help break
infinite chains induced by circular \<^theory_text>\<open>sublocale\<close> declarations.
In a named context block the \<^theory_text>\<open>sublocale\<close> command may also be used, but the
locale argument must be omitted. The command then refers to the locale (or
class) target of the context block.
\<^descr> \<^theory_text>\<open>print_interps name\<close> lists all interpretations of locale \<open>name\<close> in the
current theory or proof context, including those due to a combination of an
\<^theory_text>\<open>interpretation\<close> or \<^theory_text>\<open>interpret\<close> and one or several \<^theory_text>\<open>sublocale\<close>
declarations.
\<^descr> @{method intro_locales} and @{method unfold_locales} repeatedly expand all
introduction rules of locale predicates of the theory. While @{method
intro_locales} only applies the \<open>loc.intro\<close> introduction rules and therefore
does not descend to assumptions, @{method unfold_locales} is more aggressive
and applies \<open>loc_axioms.intro\<close> as well. Both methods are aware of locale
specifications entailed by the context, both from target statements, and
from interpretations (see below). New goals that are entailed by the current
context are discharged automatically.
While @{method unfold_locales} is part of the default method for \<^theory_text>\<open>proof\<close>
and often invoked ``behind the scenes'', @{method intro_locales} helps
understand which proof obligations originated from which locale instances.
The latter method is useful while developing proofs but rare in finished
developments.
\<^descr> @{attribute trace_locales}, when set to \<open>true\<close>, prints the locale
instances activated during roundup. Use this when locale commands yield
obscure errors or for understanding local theories created by complex locale
hierarchies.
\begin{warn}
If a global theory inherits declarations (body elements) for a locale from
one parent and an interpretation of that locale from another parent, the
interpretation will not be applied to the declarations.
\end{warn}
\begin{warn}
Since attributes are applied to interpreted theorems, interpretation may
modify the context of common proof tools, e.g.\ the Simplifier or
Classical Reasoner. As the behaviour of such tools is \<^emph>\<open>not\<close> stable under
interpretation morphisms, manual declarations might have to be added to
the target context of the interpretation to revert such declarations.
\end{warn}
\begin{warn}
An interpretation in a local theory or proof context may subsume previous
interpretations. This happens if the same specification fragment is
interpreted twice and the instantiation of the second interpretation is
more general than the interpretation of the first. The locale package does
not attempt to remove subsumed interpretations.
\end{warn}
\begin{warn}
While \<^theory_text>\<open>interpretation (in c) \<dots>\<close> is admissible, it is not useful since
its result is discarded immediately.
\end{warn}
\<close>
section \<open>Classes \label{sec:class}\<close>
text \<open>
\begin{matharray}{rcl}
@{command_def "class"} & : & \<open>theory \<rightarrow> local_theory\<close> \\
@{command_def "instantiation"} & : & \<open>theory \<rightarrow> local_theory\<close> \\
@{command_def "instance"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command "instance"} & : & \<open>theory \<rightarrow> proof(prove)\<close> \\
@{command_def "subclass"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "print_classes"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
@{command_def "class_deps"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
@{method_def intro_classes} & : & \<open>method\<close> \\
\end{matharray}
A class is a particular locale with \<^emph>\<open>exactly one\<close> type variable \<open>\<alpha>\<close>. Beyond
the underlying locale, a corresponding type class is established which is
interpreted logically as axiomatic type class \<^cite>\<open>"Wenzel:1997:TPHOL"\<close>
whose logical content are the assumptions of the locale. Thus, classes
provide the full generality of locales combined with the commodity of type
classes (notably type-inference). See \<^cite>\<open>"isabelle-classes"\<close> for a short
tutorial.
\<^rail>\<open>
@@{command class} class_spec @'begin'?
;
class_spec: @{syntax name} '='
((@{syntax name} @{syntax_ref "opening"}? '+' (@{syntax context_elem}+)) |
@{syntax name} @{syntax_ref "opening"}? |
@{syntax_ref "opening"}? '+' (@{syntax context_elem}+))
;
@@{command instantiation} (@{syntax name} + @'and') '::' @{syntax arity} @'begin'
;
@@{command instance} (() | (@{syntax name} + @'and') '::' @{syntax arity} |
@{syntax name} ('<' | '\<subseteq>') @{syntax name} )
;
@@{command subclass} @{syntax name}
;
@@{command class_deps} (class_bounds class_bounds?)?
;
class_bounds: @{syntax sort} | '(' (@{syntax sort} + @'|') ')'
\<close>
\<^descr> \<^theory_text>\<open>class c = superclasses bundles + body\<close> defines a new class \<open>c\<close>, inheriting from
\<open>superclasses\<close>. This introduces a locale \<open>c\<close> with import of all locales
\<open>superclasses\<close>.
Any @{element "fixes"} in \<open>body\<close> are lifted to the global theory level
(\<^emph>\<open>class operations\<close> \<open>f\<^sub>1, \<dots>, f\<^sub>n\<close> of class \<open>c\<close>), mapping the local type
parameter \<open>\<alpha>\<close> to a schematic type variable \<open>?\<alpha> :: c\<close>.
Likewise, @{element "assumes"} in \<open>body\<close> are also lifted, mapping each local
parameter \<open>f :: \<tau>[\<alpha>]\<close> to its corresponding global constant \<open>f :: \<tau>[?\<alpha> ::
c]\<close>. The corresponding introduction rule is provided as
\<open>c_class_axioms.intro\<close>. This rule should be rarely needed directly --- the
@{method intro_classes} method takes care of the details of class membership
proofs.
Optionally given \<open>bundles\<close> take effect in the surface context within the
\<open>body\<close> and the potentially following \<^theory_text>\<open>begin\<close> / \<^theory_text>\<open>end\<close> block.
\<^descr> \<^theory_text>\<open>instantiation t :: (s\<^sub>1, \<dots>, s\<^sub>n)s begin\<close> opens a target (cf.\
\secref{sec:target}) which allows to specify class operations \<open>f\<^sub>1, \<dots>, f\<^sub>n\<close>
corresponding to sort \<open>s\<close> at the particular type instance \<open>(\<alpha>\<^sub>1 :: s\<^sub>1, \<dots>,
\<alpha>\<^sub>n :: s\<^sub>n) t\<close>. A plain \<^theory_text>\<open>instance\<close> command in the target body poses a goal
stating these type arities. The target is concluded by an @{command_ref
(local) "end"} command.
Note that a list of simultaneous type constructors may be given; this
corresponds nicely to mutually recursive type definitions, e.g.\ in
Isabelle/HOL.
\<^descr> \<^theory_text>\<open>instance\<close> in an instantiation target body sets up a goal stating the
type arities claimed at the opening \<^theory_text>\<open>instantiation\<close>. The proof would
usually proceed by @{method intro_classes}, and then establish the
characteristic theorems of the type classes involved. After finishing the
proof, the background theory will be augmented by the proven type arities.
On the theory level, \<^theory_text>\<open>instance t :: (s\<^sub>1, \<dots>, s\<^sub>n)s\<close> provides a convenient
way to instantiate a type class with no need to specify operations: one can
continue with the instantiation proof immediately.
\<^descr> \<^theory_text>\<open>subclass c\<close> in a class context for class \<open>d\<close> sets up a goal stating that
class \<open>c\<close> is logically contained in class \<open>d\<close>. After finishing the proof,
class \<open>d\<close> is proven to be subclass \<open>c\<close> and the locale \<open>c\<close> is interpreted
into \<open>d\<close> simultaneously.
A weakened form of this is available through a further variant of @{command
instance}: \<^theory_text>\<open>instance c\<^sub>1 \<subseteq> c\<^sub>2\<close> opens a proof that class \<open>c\<^sub>2\<close> implies
\<open>c\<^sub>1\<close> without reference to the underlying locales; this is useful if the
properties to prove the logical connection are not sufficient on the locale
level but on the theory level.
\<^descr> \<^theory_text>\<open>print_classes\<close> prints all classes in the current theory.
\<^descr> \<^theory_text>\<open>class_deps\<close> visualizes classes and their subclass relations as a
directed acyclic graph. By default, all classes from the current theory
context are show. This may be restricted by optional bounds as follows:
\<^theory_text>\<open>class_deps upper\<close> or \<^theory_text>\<open>class_deps upper lower\<close>. A class is visualized, iff
it is a subclass of some sort from \<open>upper\<close> and a superclass of some sort
from \<open>lower\<close>.
\<^descr> @{method intro_classes} repeatedly expands all class introduction rules of
this theory. Note that this method usually needs not be named explicitly, as
it is already included in the default proof step (e.g.\ of \<^theory_text>\<open>proof\<close>). In
particular, instantiation of trivial (syntactic) classes may be performed by
a single ``\<^theory_text>\<open>..\<close>'' proof step.
\<close>
subsection \<open>The class target\<close>
text \<open>
%FIXME check
A named context may refer to a locale (cf.\ \secref{sec:target}). If this
locale is also a class \<open>c\<close>, apart from the common locale target behaviour
the following happens.
\<^item> Local constant declarations \<open>g[\<alpha>]\<close> referring to the local type parameter
\<open>\<alpha>\<close> and local parameters \<open>f[\<alpha>]\<close> are accompanied by theory-level constants
\<open>g[?\<alpha> :: c]\<close> referring to theory-level class operations \<open>f[?\<alpha> :: c]\<close>.
\<^item> Local theorem bindings are lifted as are assumptions.
\<^item> Local syntax refers to local operations \<open>g[\<alpha>]\<close> and global operations
\<open>g[?\<alpha> :: c]\<close> uniformly. Type inference resolves ambiguities. In rare
cases, manual type annotations are needed.
\<close>
subsection \<open>Co-regularity of type classes and arities\<close>
text \<open>
The class relation together with the collection of type-constructor arities
must obey the principle of \<^emph>\<open>co-regularity\<close> as defined below.
\<^medskip>
For the subsequent formulation of co-regularity we assume that the class
relation is closed by transitivity and reflexivity. Moreover the collection
of arities \<open>t :: (\<^vec>s)c\<close> is completed such that \<open>t :: (\<^vec>s)c\<close> and
\<open>c \<subseteq> c'\<close> implies \<open>t :: (\<^vec>s)c'\<close> for all such declarations.
Treating sorts as finite sets of classes (meaning the intersection), the
class relation \<open>c\<^sub>1 \<subseteq> c\<^sub>2\<close> is extended to sorts as follows:
\[
\<open>s\<^sub>1 \<subseteq> s\<^sub>2 \<equiv> \<forall>c\<^sub>2 \<in> s\<^sub>2. \<exists>c\<^sub>1 \<in> s\<^sub>1. c\<^sub>1 \<subseteq> c\<^sub>2\<close>
\]
This relation on sorts is further extended to tuples of sorts (of the same
length) in the component-wise way.
\<^medskip>
Co-regularity of the class relation together with the arities relation
means:
\[
\<open>t :: (\<^vec>s\<^sub>1)c\<^sub>1 \<Longrightarrow> t :: (\<^vec>s\<^sub>2)c\<^sub>2 \<Longrightarrow> c\<^sub>1 \<subseteq> c\<^sub>2 \<Longrightarrow> \<^vec>s\<^sub>1 \<subseteq> \<^vec>s\<^sub>2\<close>
\]
for all such arities. In other words, whenever the result classes of some
type-constructor arities are related, then the argument sorts need to be
related in the same way.
\<^medskip>
Co-regularity is a very fundamental property of the order-sorted algebra of
types. For example, it entails principal types and most general unifiers,
e.g.\ see \<^cite>\<open>"nipkow-prehofer"\<close>.
\<close>
section \<open>Overloaded constant definitions \label{sec:overloading}\<close>
text \<open>
Definitions essentially express abbreviations within the logic. The simplest
form of a definition is \<open>c :: \<sigma> \<equiv> t\<close>, where \<open>c\<close> is a new constant and \<open>t\<close> is
a closed term that does not mention \<open>c\<close>. Moreover, so-called \<^emph>\<open>hidden
polymorphism\<close> is excluded: all type variables in \<open>t\<close> need to occur in its
type \<open>\<sigma>\<close>.
\<^emph>\<open>Overloading\<close> means that a constant being declared as \<open>c :: \<alpha> decl\<close> may be
defined separately on type instances \<open>c :: (\<beta>\<^sub>1, \<dots>, \<beta>\<^sub>n)\<kappa> decl\<close> for each
type constructor \<open>\<kappa>\<close>. At most occasions overloading will be used in a
Haskell-like fashion together with type classes by means of \<^theory_text>\<open>instantiation\<close>
(see \secref{sec:class}). Sometimes low-level overloading is desirable; this
is supported by \<^theory_text>\<open>consts\<close> and \<^theory_text>\<open>overloading\<close> explained below.
The right-hand side of overloaded definitions may mention overloaded
constants recursively at type instances corresponding to the immediate
argument types \<open>\<beta>\<^sub>1, \<dots>, \<beta>\<^sub>n\<close>. Incomplete specification patterns impose
global constraints on all occurrences. E.g.\ \<open>d :: \<alpha> \<times> \<alpha>\<close> on the left-hand
side means that all corresponding occurrences on some right-hand side need
to be an instance of this, and general \<open>d :: \<alpha> \<times> \<beta>\<close> will be disallowed. Full
details are given by Kun\v{c}ar \<^cite>\<open>"Kuncar:2015"\<close>.
\<^medskip>
The \<^theory_text>\<open>consts\<close> command and the \<^theory_text>\<open>overloading\<close> target provide a convenient
interface for end-users. Regular specification elements such as @{command
definition}, @{command inductive}, @{command function} may be used in the
body. It is also possible to use \<^theory_text>\<open>consts c :: \<sigma>\<close> with later \<^theory_text>\<open>overloading c
\<equiv> c :: \<sigma>\<close> to keep the declaration and definition of a constant separate.
\begin{matharray}{rcl}
@{command_def "consts"} & : & \<open>theory \<rightarrow> theory\<close> \\
@{command_def "overloading"} & : & \<open>theory \<rightarrow> local_theory\<close> \\
\end{matharray}
\<^rail>\<open>
@@{command consts} ((@{syntax name} '::' @{syntax type} @{syntax mixfix}?) +)
;
@@{command overloading} ( spec + ) @'begin'
;
spec: @{syntax name} ( '\<equiv>' | '==' ) @{syntax term} ( '(' @'unchecked' ')' )?
\<close>
\<^descr> \<^theory_text>\<open>consts c :: \<sigma>\<close> declares constant \<open>c\<close> to have any instance of type scheme
\<open>\<sigma>\<close>. The optional mixfix annotations may attach concrete syntax to the
constants declared.
\<^descr> \<^theory_text>\<open>overloading x\<^sub>1 \<equiv> c\<^sub>1 :: \<tau>\<^sub>1 \<dots> x\<^sub>n \<equiv> c\<^sub>n :: \<tau>\<^sub>n begin \<dots> end\<close> defines
a theory target (cf.\ \secref{sec:target}) which allows to specify already
declared constants via definitions in the body. These are identified by an
explicitly given mapping from variable names \<open>x\<^sub>i\<close> to constants \<open>c\<^sub>i\<close> at
particular type instances. The definitions themselves are established using
common specification tools, using the names \<open>x\<^sub>i\<close> as reference to the
corresponding constants.
Option \<^theory_text>\<open>(unchecked)\<close> disables global dependency checks for the
corresponding definition, which is occasionally useful for exotic
overloading; this is a form of axiomatic specification. It is at the
discretion of the user to avoid malformed theory specifications!
\<close>
subsubsection \<open>Example\<close>
consts Length :: "'a \<Rightarrow> nat"
overloading
Length\<^sub>0 \<equiv> "Length :: unit \<Rightarrow> nat"
Length\<^sub>1 \<equiv> "Length :: 'a \<times> unit \<Rightarrow> nat"
Length\<^sub>2 \<equiv> "Length :: 'a \<times> 'b \<times> unit \<Rightarrow> nat"
Length\<^sub>3 \<equiv> "Length :: 'a \<times> 'b \<times> 'c \<times> unit \<Rightarrow> nat"
begin
fun Length\<^sub>0 :: "unit \<Rightarrow> nat" where "Length\<^sub>0 () = 0"
fun Length\<^sub>1 :: "'a \<times> unit \<Rightarrow> nat" where "Length\<^sub>1 (a, ()) = 1"
fun Length\<^sub>2 :: "'a \<times> 'b \<times> unit \<Rightarrow> nat" where "Length\<^sub>2 (a, b, ()) = 2"
fun Length\<^sub>3 :: "'a \<times> 'b \<times> 'c \<times> unit \<Rightarrow> nat" where "Length\<^sub>3 (a, b, c, ()) = 3"
end
lemma "Length (a, b, c, ()) = 3" by simp
lemma "Length ((a, b), (c, d), ()) = 2" by simp
lemma "Length ((a, b, c, d, e), ()) = 1" by simp
section \<open>Incorporating ML code \label{sec:ML}\<close>
text \<open>
\begin{matharray}{rcl}
@{command_def "SML_file"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "SML_file_debug"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "SML_file_no_debug"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "ML_file"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "ML_file_debug"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "ML_file_no_debug"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "ML"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "ML_export"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "ML_prf"} & : & \<open>proof \<rightarrow> proof\<close> \\
@{command_def "ML_val"} & : & \<open>any \<rightarrow>\<close> \\
@{command_def "ML_command"} & : & \<open>any \<rightarrow>\<close> \\
@{command_def "setup"} & : & \<open>theory \<rightarrow> theory\<close> \\
@{command_def "local_setup"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "attribute_setup"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
\end{matharray}
\begin{tabular}{rcll}
@{attribute_def ML_print_depth} & : & \<open>attribute\<close> & default 10 \\
@{attribute_def ML_source_trace} & : & \<open>attribute\<close> & default \<open>false\<close> \\
@{attribute_def ML_debugger} & : & \<open>attribute\<close> & default \<open>false\<close> \\
@{attribute_def ML_exception_trace} & : & \<open>attribute\<close> & default \<open>false\<close> \\
@{attribute_def ML_exception_debugger} & : & \<open>attribute\<close> & default \<open>false\<close> \\
@{attribute_def ML_environment} & : & \<open>attribute\<close> & default \<open>Isabelle\<close> \\
\end{tabular}
\<^rail>\<open>
(@@{command SML_file} |
@@{command SML_file_debug} |
@@{command SML_file_no_debug} |
@@{command ML_file} |
@@{command ML_file_debug} |
@@{command ML_file_no_debug}) @{syntax name} ';'?
;
(@@{command ML} | @@{command ML_export} | @@{command ML_prf} |
@@{command ML_val} | @@{command ML_command} | @@{command setup} |
@@{command local_setup}) @{syntax text}
;
@@{command attribute_setup} @{syntax name} '=' @{syntax text} @{syntax text}?
\<close>
\<^descr> \<^theory_text>\<open>SML_file name\<close> reads and evaluates the given Standard ML file. Top-level
SML bindings are stored within the (global or local) theory context; the
initial environment is restricted to the Standard ML implementation of
Poly/ML, without the many add-ons of Isabelle/ML. Multiple \<^theory_text>\<open>SML_file\<close>
commands may be used to build larger Standard ML projects, independently of
the regular Isabelle/ML environment.
\<^descr> \<^theory_text>\<open>ML_file name\<close> reads and evaluates the given ML file. The current theory
context is passed down to the ML toplevel and may be modified, using \<^ML>\<open>Context.>>\<close> or derived ML commands. Top-level ML bindings are stored
within the (global or local) theory context.
\<^descr> \<^theory_text>\<open>SML_file_debug\<close>, \<^theory_text>\<open>SML_file_no_debug\<close>, \<^theory_text>\<open>ML_file_debug\<close>, and
\<^theory_text>\<open>ML_file_no_debug\<close> change the @{attribute ML_debugger} option locally while
the given file is compiled.
\<^descr> \<^theory_text>\<open>ML\<close> is similar to \<^theory_text>\<open>ML_file\<close>, but evaluates directly the given \<open>text\<close>.
Top-level ML bindings are stored within the (global or local) theory
context.
\<^descr> \<^theory_text>\<open>ML_export\<close> is similar to \<^theory_text>\<open>ML\<close>, but the resulting toplevel bindings are
exported to the global bootstrap environment of the ML process --- it has
a lasting effect that cannot be retracted. This allows ML evaluation
without a formal theory context, e.g. for command-line tools via @{tool
process} \<^cite>\<open>"isabelle-system"\<close>.
\<^descr> \<^theory_text>\<open>ML_prf\<close> is analogous to \<^theory_text>\<open>ML\<close> but works within a proof context.
Top-level ML bindings are stored within the proof context in a purely
sequential fashion, disregarding the nested proof structure. ML bindings
introduced by \<^theory_text>\<open>ML_prf\<close> are discarded at the end of the proof.
\<^descr> \<^theory_text>\<open>ML_val\<close> and \<^theory_text>\<open>ML_command\<close> are diagnostic versions of \<^theory_text>\<open>ML\<close>, which means
that the context may not be updated. \<^theory_text>\<open>ML_val\<close> echos the bindings produced
at the ML toplevel, but \<^theory_text>\<open>ML_command\<close> is silent.
\<^descr> \<^theory_text>\<open>setup "text"\<close> changes the current theory context by applying \<open>text\<close>,
which refers to an ML expression of type \<^ML_type>\<open>theory -> theory\<close>. This
enables to initialize any object-logic specific tools and packages written
in ML, for example.
\<^descr> \<^theory_text>\<open>local_setup\<close> is similar to \<^theory_text>\<open>setup\<close> for a local theory context, and an
ML expression of type \<^ML_type>\<open>local_theory -> local_theory\<close>. This allows
to invoke local theory specification packages without going through concrete
outer syntax, for example.
\<^descr> \<^theory_text>\<open>attribute_setup name = "text" description\<close> defines an attribute in the
current context. The given \<open>text\<close> has to be an ML expression of type
\<^ML_type>\<open>attribute context_parser\<close>, cf.\ basic parsers defined in
structure \<^ML_structure>\<open>Args\<close> and \<^ML_structure>\<open>Attrib\<close>.
In principle, attributes can operate both on a given theorem and the
implicit context, although in practice only one is modified and the other
serves as parameter. Here are examples for these two cases:
\<close>
(*<*)experiment begin(*>*)
attribute_setup my_rule =
\<open>Attrib.thms >> (fn ths =>
Thm.rule_attribute ths
(fn context: Context.generic => fn th: thm =>
let val th' = th OF ths
in th' end))\<close>
attribute_setup my_declaration =
\<open>Attrib.thms >> (fn ths =>
Thm.declaration_attribute
(fn th: thm => fn context: Context.generic =>
let val context' = context
in context' end))\<close>
(*<*)end(*>*)
text \<open>
\<^descr> @{attribute ML_print_depth} controls the printing depth of the ML toplevel
pretty printer. Typically the limit should be less than 10. Bigger values
such as 100--1000 are occasionally useful for debugging.
\<^descr> @{attribute ML_source_trace} indicates whether the source text that is
given to the ML compiler should be output: it shows the raw Standard ML
after expansion of Isabelle/ML antiquotations.
\<^descr> @{attribute ML_debugger} controls compilation of sources with or without
debugging information. The global system option @{system_option_ref
ML_debugger} does the same when building a session image. It is also
possible use commands like \<^theory_text>\<open>ML_file_debug\<close> etc. The ML debugger is
explained further in \<^cite>\<open>"isabelle-jedit"\<close>.
\<^descr> @{attribute ML_exception_trace} indicates whether the ML run-time system
should print a detailed stack trace on exceptions. The result is dependent
on various ML compiler optimizations. The boundary for the exception trace
is the current Isar command transactions: it is occasionally better to
insert the combinator \<^ML>\<open>Runtime.exn_trace\<close> into ML code for debugging
\<^cite>\<open>"isabelle-implementation"\<close>, closer to the point where it actually
happens.
\<^descr> @{attribute ML_exception_debugger} controls detailed exception trace via
the Poly/ML debugger, at the cost of extra compile-time and run-time
overhead. Relevant ML modules need to be compiled beforehand with debugging
enabled, see @{attribute ML_debugger} above.
\<^descr> @{attribute ML_environment} determines the named ML environment for
toplevel declarations, e.g.\ in command \<^theory_text>\<open>ML\<close> or \<^theory_text>\<open>ML_file\<close>. The following
ML environments are predefined in Isabelle/Pure:
\<^item> \<open>Isabelle\<close> for Isabelle/ML. It contains all modules of Isabelle/Pure and
further add-ons, e.g. material from Isabelle/HOL.
\<^item> \<open>SML\<close> for official Standard ML. It contains only the initial basis
according to \<^url>\<open>http://sml-family.org/Basis/overview.html\<close>.
The Isabelle/ML function \<^ML>\<open>ML_Env.setup\<close> defines a new ML environment.
This is useful to incorporate big SML projects in an isolated name space,
possibly with variations on ML syntax; the existing setup of
\<^ML>\<open>ML_Env.SML_operations\<close> follows the official standard.
It is also possible to move toplevel bindings between ML environments, using
a notation with ``\<open>>\<close>'' as separator. For example:
\<close>
(*<*)experiment begin(*>*)
declare [[ML_environment = "Isabelle>SML"]]
ML \<open>val println = writeln\<close>
declare [[ML_environment = "SML"]]
ML \<open>println "test"\<close>
declare [[ML_environment = "Isabelle"]]
ML \<open>ML \<open>println\<close> (*bad*) handle ERROR msg => warning msg\<close>
(*<*)end(*>*)
section \<open>Generated files and exported files\<close>
text \<open>
Write access to the physical file-system is incompatible with the stateless
model of processing Isabelle documents. To avoid bad effects, the following
concepts for abstract file-management are provided by Isabelle:
\<^descr>[Generated files] are stored within the theory context in Isabelle/ML.
This allows to operate on the content in Isabelle/ML, e.g. via the command
@{command compile_generated_files}.
\<^descr>[Exported files] are stored within the session database in
Isabelle/Scala. This allows to deliver artefacts to external tools, see
also \<^cite>\<open>"isabelle-system"\<close> for session \<^verbatim>\<open>ROOT\<close> declaration
\<^theory_text>\<open>export_files\<close>, and @{tool build} option \<^verbatim>\<open>-e\<close>.
A notable example is the command @{command_ref export_code}
(\chref{ch:export-code}): it uses both concepts simultaneously.
File names are hierarchically structured, using a slash as separator. The
(long) theory name is used as a prefix: the resulting name needs to be
globally unique.
\begin{matharray}{rcll}
@{command_def "generate_file"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "export_generated_files"} & : & \<open>context \<rightarrow>\<close> \\
@{command_def "compile_generated_files"} & : & \<open>context \<rightarrow>\<close> \\
@{command_def "external_file"} & : & \<open>any \<rightarrow> any\<close> \\
\end{matharray}
\<^rail>\<open>
@@{command generate_file} path '=' content
;
path: @{syntax embedded}
;
content: @{syntax embedded}
;
@@{command export_generated_files} (files_in_theory + @'and')
;
files_in_theory: (@'_' | (path+)) (('(' @'in' @{syntax name} ')')?)
;
@@{command compile_generated_files} (files_in_theory + @'and') \<newline>
(@'external_files' (external_files + @'and'))? \<newline>
(@'export_files' (export_files + @'and'))? \<newline>
(@'export_prefix' path)?
;
external_files: (path+) (('(' @'in' path ')')?)
;
export_files: (path+) (executable?)
;
executable: '(' ('exe' | 'executable') ')'
;
@@{command external_file} @{syntax name} ';'?
\<close>
\<^descr> \<^theory_text>\<open>generate_file path = content\<close> augments the table of generated files
within the current theory by a new entry: duplicates are not allowed. The
name extension determines a pre-existent file-type; the \<open>content\<close> is a
string that is preprocessed according to rules of this file-type.
For example, Isabelle/Pure supports \<^verbatim>\<open>.hs\<close> as file-type for Haskell:
embedded cartouches are evaluated as Isabelle/ML expressions of type
\<^ML_type>\<open>string\<close>, the result is inlined in Haskell string syntax.
\<^descr> \<^theory_text>\<open>export_generated_files paths (in thy)\<close> retrieves named generated files
from the given theory (that needs be reachable via imports of the current
one). By default, the current theory node is used. Using ``\<^verbatim>\<open>_\<close>''
(underscore) instead of explicit path names refers to \emph{all} files of a
theory node.
The overall list of files is prefixed with the respective (long) theory name
and exported to the session database. In Isabelle/jEdit the result can be
browsed via the virtual file-system with prefix ``\<^verbatim>\<open>isabelle-export:\<close>''
(using the regular file-browser).
\<^descr> \<^theory_text>\<open>scala_build_generated_files paths (in thy)\<close> retrieves named generated
files as for \<^theory_text>\<open>export_generated_files\<close> and writes them into a temporary
directory, which is taken as starting point for build process of
Isabelle/Scala/Java modules (see \<^cite>\<open>"isabelle-system"\<close>). The
corresponding @{path \<open>build.props\<close>} file is expected directly in the toplevel
directory, instead of @{path \<open>etc/build.props\<close>} for Isabelle system
components. These properties need to specify sources, resources, services
etc. as usual. The resulting JAR module becomes an export artefact of the
session database, with a name of the form
``\<open>theory\<close>\<^verbatim>\<open>:classpath/\<close>\<open>module\<close>\<^verbatim>\<open>.jar\<close>''.
\<^descr> \<^theory_text>\<open>compile_generated_files paths (in thy) where compile_body\<close> retrieves
named generated files as for \<^theory_text>\<open>export_generated_files\<close> and writes them into
a temporary directory, such that the \<open>compile_body\<close> may operate on them as
an ML function of type \<^ML_type>\<open>Path.T -> unit\<close>. This may create further
files, e.g.\ executables produced by a compiler that is invoked as external
process (e.g.\ via \<^ML>\<open>Isabelle_System.bash\<close>), or any other files.
The option ``\<^theory_text>\<open>external_files paths (in base_dir)\<close>'' copies files from the
physical file-system into the temporary directory, \emph{before} invoking
\<open>compile_body\<close>. The \<open>base_dir\<close> prefix is removed from each of the \<open>paths\<close>,
but the remaining sub-directory structure is reconstructed in the target
directory.
The option ``\<^theory_text>\<open>export_files paths\<close>'' exports the specified files from the
temporary directory to the session database, \emph{after} invoking
\<open>compile_body\<close>. Entries may be decorated with ``\<^theory_text>\<open>(exe)\<close>'' to say that it is
a platform-specific executable program: the executable file-attribute will
be set, and on Windows the \<^verbatim>\<open>.exe\<close> file-extension will be included;
``\<^theory_text>\<open>(executable)\<close>'' only refers to the file-attribute, without special
treatment of the \<^verbatim>\<open>.exe\<close> extension.
The option ``\<^theory_text>\<open>export_prefix path\<close>'' specifies an extra path prefix for all
exports of \<^theory_text>\<open>export_files\<close> above.
\<^descr> \<^theory_text>\<open>external_file name\<close> declares the formal dependency on the given file
name, such that the Isabelle build process knows about it (see also \<^cite>\<open>"isabelle-system"\<close>). This is required for any files mentioned in
\<^theory_text>\<open>compile_generated_files / external_files\<close> above, in order to document
source dependencies properly. It is also possible to use \<^theory_text>\<open>external_file\<close>
alone, e.g.\ when other Isabelle/ML tools use \<^ML>\<open>File.read\<close>, without
specific management of content by the Prover IDE.
\<close>
section \<open>Primitive specification elements\<close>
subsection \<open>Sorts\<close>
text \<open>
\begin{matharray}{rcll}
@{command_def "default_sort"} & : & \<open>local_theory \<rightarrow> local_theory\<close>
\end{matharray}
\<^rail>\<open>
@@{command default_sort} @{syntax sort}
\<close>
\<^descr> \<^theory_text>\<open>default_sort s\<close> makes sort \<open>s\<close> the new default sort for any type
variable that is given explicitly in the text, but lacks a sort constraint
(wrt.\ the current context). Type variables generated by type inference are
not affected.
Usually the default sort is only changed when defining a new object-logic.
For example, the default sort in Isabelle/HOL is \<^class>\<open>type\<close>, the class of
all HOL types.
When merging theories, the default sorts of the parents are logically
intersected, i.e.\ the representations as lists of classes are joined.
\<close>
subsection \<open>Types \label{sec:types-pure}\<close>
text \<open>
\begin{matharray}{rcll}
@{command_def "type_synonym"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "typedecl"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
\end{matharray}
\<^rail>\<open>
@@{command type_synonym} (@{syntax typespec} '=' @{syntax type} @{syntax mixfix}?)
;
@@{command typedecl} @{syntax typespec} @{syntax mixfix}?
\<close>
\<^descr> \<^theory_text>\<open>type_synonym (\<alpha>\<^sub>1, \<dots>, \<alpha>\<^sub>n) t = \<tau>\<close> introduces a \<^emph>\<open>type synonym\<close> \<open>(\<alpha>\<^sub>1, \<dots>,
\<alpha>\<^sub>n) t\<close> for the existing type \<open>\<tau>\<close>. Unlike the semantic type definitions in
Isabelle/HOL, type synonyms are merely syntactic abbreviations without any
logical significance. Internally, type synonyms are fully expanded.
\<^descr> \<^theory_text>\<open>typedecl (\<alpha>\<^sub>1, \<dots>, \<alpha>\<^sub>n) t\<close> declares a new type constructor \<open>t\<close>. If the
object-logic defines a base sort \<open>s\<close>, then the constructor is declared to
operate on that, via the axiomatic type-class instance \<open>t :: (s, \<dots>, s)s\<close>.
\begin{warn}
If you introduce a new type axiomatically, i.e.\ via @{command_ref
typedecl} and @{command_ref axiomatization}
(\secref{sec:axiomatizations}), the minimum requirement is that it has a
non-empty model, to avoid immediate collapse of the logical environment.
Moreover, one needs to demonstrate that the interpretation of such
free-form axiomatizations can coexist with other axiomatization schemes
for types, notably @{command_def typedef} in Isabelle/HOL
(\secref{sec:hol-typedef}), or any other extension that people might have
introduced elsewhere.
\end{warn}
\<close>
section \<open>Naming existing theorems \label{sec:theorems}\<close>
text \<open>
\begin{matharray}{rcll}
@{command_def "lemmas"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "named_theorems"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
\end{matharray}
\<^rail>\<open>
@@{command lemmas} (@{syntax thmdef}? @{syntax thms} + @'and')
@{syntax for_fixes}
;
@@{command named_theorems} (@{syntax name} @{syntax text}? + @'and')
\<close>
\<^descr> \<^theory_text>\<open>lemmas a = b\<^sub>1 \<dots> b\<^sub>n\<close>~@{keyword_def "for"}~\<open>x\<^sub>1 \<dots> x\<^sub>m\<close> evaluates given
facts (with attributes) in the current context, which may be augmented by
local variables. Results are standardized before being stored, i.e.\
schematic variables are renamed to enforce index \<open>0\<close> uniformly.
\<^descr> \<^theory_text>\<open>named_theorems name description\<close> declares a dynamic fact within the
context. The same \<open>name\<close> is used to define an attribute with the usual
\<open>add\<close>/\<open>del\<close> syntax (e.g.\ see \secref{sec:simp-rules}) to maintain the
content incrementally, in canonical declaration order of the text structure.
\<close>
section \<open>Oracles \label{sec:oracles}\<close>
text \<open>
\begin{matharray}{rcll}
@{command_def "oracle"} & : & \<open>theory \<rightarrow> theory\<close> & (axiomatic!) \\
@{command_def "thm_oracles"}\<open>\<^sup>*\<close> & : & \<open>context \<rightarrow>\<close> \\
\end{matharray}
Oracles allow Isabelle to take advantage of external reasoners such as
arithmetic decision procedures, model checkers, fast tautology checkers or
computer algebra systems. Invoked as an oracle, an external reasoner can
create arbitrary Isabelle theorems.
It is the responsibility of the user to ensure that the external reasoner is
as trustworthy as the application requires. Another typical source of errors
is the linkup between Isabelle and the external tool, not just its concrete
implementation, but also the required translation between two different
logical environments.
Isabelle merely guarantees well-formedness of the propositions being
asserted, and records within the internal derivation object how presumed
theorems depend on unproven suppositions. This also includes implicit
type-class reasoning via the order-sorted algebra of class relations and
type arities (see also @{command_ref instantiation} and @{command_ref
instance}).
\<^rail>\<open>
@@{command oracle} @{syntax name} '=' @{syntax text}
;
@@{command thm_oracles} @{syntax thms}
\<close>
\<^descr> \<^theory_text>\<open>oracle name = "text"\<close> turns the given ML expression \<open>text\<close> of type
\<^ML_text>\<open>'a -> cterm\<close> into an ML function of type \<^ML_text>\<open>'a -> thm\<close>,
which is bound to the global identifier \<^ML_text>\<open>name\<close>. This acts like an
infinitary specification of axioms! Invoking the oracle only works within
the scope of the resulting theory.
See \<^file>\<open>~~/src/HOL/Examples/Iff_Oracle.thy\<close> for a worked example of defining
a new primitive rule as oracle, and turning it into a proof method.
\<^descr> \<^theory_text>\<open>thm_oracles thms\<close> displays all oracles used in the internal derivation
of the given theorems; this covers the full graph of transitive
dependencies.
\<close>
section \<open>Name spaces\<close>
text \<open>
\begin{matharray}{rcl}
@{command_def "alias"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "type_alias"} & : & \<open>local_theory \<rightarrow> local_theory\<close> \\
@{command_def "hide_class"} & : & \<open>theory \<rightarrow> theory\<close> \\
@{command_def "hide_type"} & : & \<open>theory \<rightarrow> theory\<close> \\
@{command_def "hide_const"} & : & \<open>theory \<rightarrow> theory\<close> \\
@{command_def "hide_fact"} & : & \<open>theory \<rightarrow> theory\<close> \\
\end{matharray}
\<^rail>\<open>
(@{command alias} | @{command type_alias}) @{syntax name} '=' @{syntax name}
;
(@{command hide_class} | @{command hide_type} |
@{command hide_const} | @{command hide_fact}) ('(' @'open' ')')? (@{syntax name} + )
\<close>
Isabelle organizes any kind of name declarations (of types, constants,
theorems etc.) by separate hierarchically structured name spaces. Normally
the user does not have to control the behaviour of name spaces by hand, yet
the following commands provide some way to do so.
\<^descr> \<^theory_text>\<open>alias\<close> and \<^theory_text>\<open>type_alias\<close> introduce aliases for constants and type
constructors, respectively. This allows adhoc changes to name-space
accesses.
\<^descr> \<^theory_text>\<open>type_alias b = c\<close> introduces an alias for an existing type constructor.
\<^descr> \<^theory_text>\<open>hide_class names\<close> fully removes class declarations from a given name
space; with the \<open>(open)\<close> option, only the unqualified base name is hidden.
Note that hiding name space accesses has no impact on logical declarations
--- they remain valid internally. Entities that are no longer accessible to
the user are printed with the special qualifier ``\<open>??\<close>'' prefixed to the
full internal name.
\<^descr> \<^theory_text>\<open>hide_type\<close>, \<^theory_text>\<open>hide_const\<close>, and \<^theory_text>\<open>hide_fact\<close> are similar to
\<^theory_text>\<open>hide_class\<close>, but hide types, constants, and facts, respectively.
\<close>
end
diff --git a/src/HOL/Analysis/Abstract_Metric_Spaces.thy b/src/HOL/Analysis/Abstract_Metric_Spaces.thy
new file mode 100644
--- /dev/null
+++ b/src/HOL/Analysis/Abstract_Metric_Spaces.thy
@@ -0,0 +1,2634 @@
+section \<open>Abstract Metric Spaces\<close>
+
+theory Abstract_Metric_Spaces
+ imports Elementary_Metric_Spaces Abstract_Limits Abstract_Topological_Spaces
+begin
+
+(*Avoid a clash with the existing metric_space locale (from the type class)*)
+locale Metric_space =
+ fixes M :: "'a set" and d :: "'a \<Rightarrow> 'a \<Rightarrow> real"
+ assumes nonneg [simp]: "\<And>x y. 0 \<le> d x y"
+ assumes commute: "\<And>x y. d x y = d y x"
+ assumes zero [simp]: "\<And>x y. \<lbrakk>x \<in> M; y \<in> M\<rbrakk> \<Longrightarrow> d x y = 0 \<longleftrightarrow> x=y"
+ assumes triangle: "\<And>x y z. \<lbrakk>x \<in> M; y \<in> M; z \<in> M\<rbrakk> \<Longrightarrow> d x z \<le> d x y + d y z"
+
+text \<open>Link with the type class version\<close>
+interpretation Met_TC: Metric_space UNIV dist
+ by (simp add: dist_commute dist_triangle Metric_space.intro)
+
+context Metric_space
+begin
+
+lemma subspace: "M' \<subseteq> M \<Longrightarrow> Metric_space M' d"
+ by (simp add: commute in_mono Metric_space.intro triangle)
+
+lemma abs_mdist [simp] : "\<bar>d x y\<bar> = d x y"
+ by simp
+
+lemma mdist_pos_less: "\<lbrakk>x \<noteq> y; x \<in> M; y \<in> M\<rbrakk> \<Longrightarrow> 0 < d x y"
+ by (metis less_eq_real_def nonneg zero)
+
+lemma mdist_zero [simp]: "x \<in> M \<Longrightarrow> d x x = 0"
+ by simp
+
+lemma mdist_pos_eq [simp]: "\<lbrakk>x \<in> M; y \<in> M\<rbrakk> \<Longrightarrow> 0 < d x y \<longleftrightarrow> x \<noteq> y"
+ using mdist_pos_less zero by fastforce
+
+lemma triangle': "\<lbrakk>x \<in> M; y \<in> M; z \<in> M\<rbrakk> \<Longrightarrow> d x z \<le> d x y + d z y"
+ by (simp add: commute triangle)
+
+lemma triangle'': "\<lbrakk>x \<in> M; y \<in> M; z \<in> M\<rbrakk> \<Longrightarrow> d x z \<le> d y x + d y z"
+ by (simp add: commute triangle)
+
+lemma mdist_reverse_triangle: "\<lbrakk>x \<in> M; y \<in> M; z \<in> M\<rbrakk> \<Longrightarrow> \<bar>d x y - d y z\<bar> \<le> d x z"
+ by (smt (verit) commute triangle)
+
+text\<open> Open and closed balls \<close>
+
+definition mball where "mball x r \<equiv> {y. x \<in> M \<and> y \<in> M \<and> d x y < r}"
+definition mcball where "mcball x r \<equiv> {y. x \<in> M \<and> y \<in> M \<and> d x y \<le> r}"
+
+lemma in_mball [simp]: "y \<in> mball x r \<longleftrightarrow> x \<in> M \<and> y \<in> M \<and> d x y < r"
+ by (simp add: local.Metric_space_axioms Metric_space.mball_def)
+
+lemma centre_in_mball_iff [iff]: "x \<in> mball x r \<longleftrightarrow> x \<in> M \<and> 0 < r"
+ using in_mball mdist_zero by force
+
+lemma mball_subset_mspace: "mball x r \<subseteq> M"
+ by auto
+
+lemma mball_eq_empty: "mball x r = {} \<longleftrightarrow> (x \<notin> M) \<or> r \<le> 0"
+ by (smt (verit, best) Collect_empty_eq centre_in_mball_iff mball_def nonneg)
+
+lemma mball_subset: "\<lbrakk>d x y + a \<le> b; y \<in> M\<rbrakk> \<Longrightarrow> mball x a \<subseteq> mball y b"
+ by (smt (verit) commute in_mball subsetI triangle)
+
+lemma disjoint_mball: "r + r' \<le> d x x' \<Longrightarrow> disjnt (mball x r) (mball x' r')"
+ by (smt (verit) commute disjnt_iff in_mball triangle)
+
+lemma mball_subset_concentric: "r \<le> s \<Longrightarrow> mball x r \<subseteq> mball x s"
+ by auto
+
+lemma in_mcball [simp]: "y \<in> mcball x r \<longleftrightarrow> x \<in> M \<and> y \<in> M \<and> d x y \<le> r"
+ by (simp add: local.Metric_space_axioms Metric_space.mcball_def)
+
+lemma centre_in_mcball_iff [iff]: "x \<in> mcball x r \<longleftrightarrow> x \<in> M \<and> 0 \<le> r"
+ using mdist_zero by force
+
+lemma mcball_eq_empty: "mcball x r = {} \<longleftrightarrow> (x \<notin> M) \<or> r < 0"
+ by (smt (verit, best) Collect_empty_eq centre_in_mcball_iff empty_iff mcball_def nonneg)
+
+lemma mcball_subset_mspace: "mcball x r \<subseteq> M"
+ by auto
+
+lemma mball_subset_mcball: "mball x r \<subseteq> mcball x r"
+ by auto
+
+lemma mcball_subset: "\<lbrakk>d x y + a \<le> b; y \<in> M\<rbrakk> \<Longrightarrow> mcball x a \<subseteq> mcball y b"
+ by (smt (verit) in_mcball mdist_reverse_triangle subsetI)
+
+lemma mcball_subset_concentric: "r \<le> s \<Longrightarrow> mcball x r \<subseteq> mcball x s"
+ by force
+
+lemma mcball_subset_mball: "\<lbrakk>d x y + a < b; y \<in> M\<rbrakk> \<Longrightarrow> mcball x a \<subseteq> mball y b"
+ by (smt (verit) commute in_mball in_mcball subsetI triangle)
+
+lemma mcball_subset_mball_concentric: "a < b \<Longrightarrow> mcball x a \<subseteq> mball x b"
+ by force
+
+end
+
+
+
+subsection \<open>Metric topology \<close>
+
+context Metric_space
+begin
+
+definition mopen where
+ "mopen U \<equiv> U \<subseteq> M \<and> (\<forall>x. x \<in> U \<longrightarrow> (\<exists>r>0. mball x r \<subseteq> U))"
+
+definition mtopology :: "'a topology" where
+ "mtopology \<equiv> topology mopen"
+
+lemma is_topology_metric_topology [iff]: "istopology mopen"
+proof -
+ have "\<And>S T. \<lbrakk>mopen S; mopen T\<rbrakk> \<Longrightarrow> mopen (S \<inter> T)"
+ by (smt (verit, del_insts) Int_iff in_mball mopen_def subset_eq)
+ moreover have "\<And>\<K>. (\<forall>K\<in>\<K>. mopen K) \<longrightarrow> mopen (\<Union>\<K>)"
+ using mopen_def by fastforce
+ ultimately show ?thesis
+ by (simp add: istopology_def)
+qed
+
+lemma openin_mtopology: "openin mtopology U \<longleftrightarrow> U \<subseteq> M \<and> (\<forall>x. x \<in> U \<longrightarrow> (\<exists>r>0. mball x r \<subseteq> U))"
+ by (simp add: mopen_def mtopology_def)
+
+lemma topspace_mtopology [simp]: "topspace mtopology = M"
+ by (meson order.refl mball_subset_mspace openin_mtopology openin_subset openin_topspace subset_antisym zero_less_one)
+
+lemma subtopology_mspace [simp]: "subtopology mtopology M = mtopology"
+ by (metis subtopology_topspace topspace_mtopology)
+
+lemma open_in_mspace [iff]: "openin mtopology M"
+ by (metis openin_topspace topspace_mtopology)
+
+lemma closedin_mspace [iff]: "closedin mtopology M"
+ by (metis closedin_topspace topspace_mtopology)
+
+lemma openin_mball [iff]: "openin mtopology (mball x r)"
+proof -
+ have "\<And>y. \<lbrakk>x \<in> M; d x y < r\<rbrakk> \<Longrightarrow> \<exists>s>0. mball y s \<subseteq> mball x r"
+ by (metis add_diff_cancel_left' add_diff_eq commute less_add_same_cancel1 mball_subset order_refl)
+ then show ?thesis
+ by (auto simp: openin_mtopology)
+qed
+
+lemma mcball_eq_cball [simp]: "Met_TC.mcball = cball"
+ by force
+
+lemma mball_eq_ball [simp]: "Met_TC.mball = ball"
+ by force
+
+lemma mopen_eq_open [simp]: "Met_TC.mopen = open"
+ by (force simp: open_contains_ball Met_TC.mopen_def)
+
+lemma limitin_iff_tendsto [iff]: "limitin Met_TC.mtopology \<sigma> x F = tendsto \<sigma> x F"
+ by (simp add: Met_TC.mtopology_def)
+
+lemma mtopology_is_euclideanreal [simp]: "Met_TC.mtopology = euclideanreal"
+ by (simp add: Met_TC.mtopology_def)
+
+(*
+lemma metric_injective_image:
+ "\<And>f m s.
+ f ` s \<subseteq> M \<and>
+ (\<forall>x y. x \<in> s \<and> y \<in> s \<and> f x = f y \<Longrightarrow> x = y)
+ \<Longrightarrow> (mspace(metric(s,\<lambda>(x,y). d (f x) (f y))) = s) \<and>
+ (d(metric(s,\<lambda>(x,y). d (f x) (f y))) =
+ \<lambda>(x,y). d (f x) (f y))"
+oops
+ REWRITE_TAC[\<subseteq>; FORALL_IN_IMAGE; INJECTIVE_ON_ALT] THEN
+ REPEAT GEN_TAC THEN STRIP_TAC THEN
+ REWRITE_TAC[mspace; d; GSYM PAIR_EQ] THEN
+ REWRITE_TAC[GSYM(CONJUNCT2 metric_tybij); is_metric_space] THEN
+ REWRITE_TAC[GSYM mspace; GSYM d] THEN
+ ASM_SIMP_TAC[MDIST_POS_LE; MDIST_TRIANGLE; MDIST_0] THEN
+ ASM_MESON_TAC[MDIST_SYM]);;
+*)
+
+lemma mtopology_base:
+ "mtopology = topology(arbitrary union_of (\<lambda>U. \<exists>x \<in> M. \<exists>r>0. U = mball x r))"
+proof -
+ have "\<And>S. \<exists>x r. x \<in> M \<and> 0 < r \<and> S = mball x r \<Longrightarrow> openin mtopology S"
+ using openin_mball by blast
+ moreover have "\<And>U x. \<lbrakk>openin mtopology U; x \<in> U\<rbrakk> \<Longrightarrow> \<exists>B. (\<exists>x r. x \<in> M \<and> 0 < r \<and> B = mball x r) \<and> x \<in> B \<and> B \<subseteq> U"
+ by (metis centre_in_mball_iff in_mono openin_mtopology)
+ ultimately show ?thesis
+ by (smt (verit) topology_base_unique)
+qed
+
+lemma closedin_metric:
+ "closedin mtopology C \<longleftrightarrow> C \<subseteq> M \<and> (\<forall>x. x \<in> M - C \<longrightarrow> (\<exists>r>0. disjnt C (mball x r)))" (is "?lhs = ?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ unfolding closedin_def openin_mtopology
+ by (metis Diff_disjoint disjnt_def disjnt_subset2 topspace_mtopology)
+ show "?rhs \<Longrightarrow> ?lhs"
+ unfolding closedin_def openin_mtopology disjnt_def
+ by (metis Diff_subset Diff_triv Int_Diff Int_commute inf.absorb_iff2 mball_subset_mspace topspace_mtopology)
+qed
+
+lemma closedin_mcball [iff]: "closedin mtopology (mcball x r)"
+proof -
+ have "\<exists>ra>0. disjnt (mcball x r) (mball y ra)" if "x \<notin> M" for y
+ by (metis disjnt_empty1 gt_ex mcball_eq_empty that)
+ moreover have "disjnt (mcball x r) (mball y (d x y - r))" if "y \<in> M" "d x y > r" for y
+ using that disjnt_iff in_mball in_mcball mdist_reverse_triangle by force
+ ultimately show ?thesis
+ using closedin_metric mcball_subset_mspace by fastforce
+qed
+
+lemma mball_iff_mcball: "(\<exists>r>0. mball x r \<subseteq> U) = (\<exists>r>0. mcball x r \<subseteq> U)"
+ by (meson dense mball_subset_mcball mcball_subset_mball_concentric order_trans)
+
+lemma openin_mtopology_mcball:
+ "openin mtopology U \<longleftrightarrow> U \<subseteq> M \<and> (\<forall>x. x \<in> U \<longrightarrow> (\<exists>r. 0 < r \<and> mcball x r \<subseteq> U))"
+ using mball_iff_mcball openin_mtopology by presburger
+
+lemma metric_derived_set_of:
+ "mtopology derived_set_of S = {x \<in> M. \<forall>r>0. \<exists>y\<in>S. y\<noteq>x \<and> y \<in> mball x r}" (is "?lhs=?rhs")
+proof
+ show "?lhs \<subseteq> ?rhs"
+ unfolding openin_mtopology derived_set_of_def
+ by clarsimp (metis in_mball openin_mball openin_mtopology zero)
+ show "?rhs \<subseteq> ?lhs"
+ unfolding openin_mtopology derived_set_of_def
+ by clarify (metis subsetD topspace_mtopology)
+qed
+
+lemma metric_closure_of:
+ "mtopology closure_of S = {x \<in> M. \<forall>r>0. \<exists>y \<in> S. y \<in> mball x r}"
+proof -
+ have "\<And>x r. \<lbrakk>0 < r; x \<in> mtopology closure_of S\<rbrakk> \<Longrightarrow> \<exists>y\<in>S. y \<in> mball x r"
+ by (metis centre_in_mball_iff in_closure_of openin_mball topspace_mtopology)
+ moreover have "\<And>x T. \<lbrakk>x \<in> M; \<forall>r>0. \<exists>y\<in>S. y \<in> mball x r\<rbrakk> \<Longrightarrow> x \<in> mtopology closure_of S"
+ by (smt (verit) in_closure_of in_mball openin_mtopology subsetD topspace_mtopology)
+ ultimately show ?thesis
+ by (auto simp: in_closure_of)
+qed
+
+lemma metric_closure_of_alt:
+ "mtopology closure_of S = {x \<in> M. \<forall>r>0. \<exists>y \<in> S. y \<in> mcball x r}"
+proof -
+ have "\<And>x r. \<lbrakk>\<forall>r>0. x \<in> M \<and> (\<exists>y\<in>S. y \<in> mcball x r); 0 < r\<rbrakk> \<Longrightarrow> \<exists>y\<in>S. y \<in> M \<and> d x y < r"
+ by (meson dense in_mcball le_less_trans)
+ then show ?thesis
+ by (fastforce simp: metric_closure_of in_closure_of)
+qed
+
+lemma metric_interior_of:
+ "mtopology interior_of S = {x \<in> M. \<exists>\<epsilon>>0. mball x \<epsilon> \<subseteq> S}" (is "?lhs=?rhs")
+proof
+ show "?lhs \<subseteq> ?rhs"
+ using interior_of_maximal_eq openin_mtopology by fastforce
+ show "?rhs \<subseteq> ?lhs"
+ using interior_of_def openin_mball by fastforce
+qed
+
+lemma metric_interior_of_alt:
+ "mtopology interior_of S = {x \<in> M. \<exists>\<epsilon>>0. mcball x \<epsilon> \<subseteq> S}"
+ by (fastforce simp: mball_iff_mcball metric_interior_of)
+
+lemma in_interior_of_mball:
+ "x \<in> mtopology interior_of S \<longleftrightarrow> x \<in> M \<and> (\<exists>\<epsilon>>0. mball x \<epsilon> \<subseteq> S)"
+ using metric_interior_of by force
+
+lemma in_interior_of_mcball:
+ "x \<in> mtopology interior_of S \<longleftrightarrow> x \<in> M \<and> (\<exists>\<epsilon>>0. mcball x \<epsilon> \<subseteq> S)"
+ using metric_interior_of_alt by force
+
+lemma Hausdorff_space_mtopology: "Hausdorff_space mtopology"
+ unfolding Hausdorff_space_def
+proof clarify
+ fix x y
+ assume x: "x \<in> topspace mtopology" and y: "y \<in> topspace mtopology" and "x \<noteq> y"
+ then have gt0: "d x y / 2 > 0"
+ by auto
+ have "disjnt (mball x (d x y / 2)) (mball y (d x y / 2))"
+ by (simp add: disjoint_mball)
+ then show "\<exists>U V. openin mtopology U \<and> openin mtopology V \<and> x \<in> U \<and> y \<in> V \<and> disjnt U V"
+ by (metis centre_in_mball_iff gt0 openin_mball topspace_mtopology x y)
+qed
+
+
+
+subsection\<open>Bounded sets\<close>
+
+definition mbounded where "mbounded S \<longleftrightarrow> (\<exists>x B. S \<subseteq> mcball x B)"
+
+lemma mbounded_pos: "mbounded S \<longleftrightarrow> (\<exists>x B. 0 < B \<and> S \<subseteq> mcball x B)"
+proof -
+ have "\<exists>x' r'. 0 < r' \<and> S \<subseteq> mcball x' r'" if "S \<subseteq> mcball x r" for x r
+ by (metis gt_ex less_eq_real_def linorder_not_le mcball_subset_concentric order_trans that)
+ then show ?thesis
+ by (auto simp: mbounded_def)
+qed
+
+lemma mbounded_alt:
+ "mbounded S \<longleftrightarrow> S \<subseteq> M \<and> (\<exists>B. \<forall>x \<in> S. \<forall>y \<in> S. d x y \<le> B)"
+proof -
+ have "\<And>x B. S \<subseteq> mcball x B \<Longrightarrow> \<forall>x\<in>S. \<forall>y\<in>S. d x y \<le> 2 * B"
+ by (smt (verit, best) commute in_mcball subsetD triangle)
+ then show ?thesis
+ apply (auto simp: mbounded_def subset_iff)
+ apply blast+
+ done
+qed
+
+
+lemma mbounded_alt_pos:
+ "mbounded S \<longleftrightarrow> S \<subseteq> M \<and> (\<exists>B>0. \<forall>x \<in> S. \<forall>y \<in> S. d x y \<le> B)"
+ by (smt (verit, del_insts) gt_ex mbounded_alt)
+
+lemma mbounded_subset: "\<lbrakk>mbounded T; S \<subseteq> T\<rbrakk> \<Longrightarrow> mbounded S"
+ by (meson mbounded_def order_trans)
+
+lemma mbounded_subset_mspace: "mbounded S \<Longrightarrow> S \<subseteq> M"
+ by (simp add: mbounded_alt)
+
+lemma mbounded:
+ "mbounded S \<longleftrightarrow> S = {} \<or> (\<forall>x \<in> S. x \<in> M) \<and> (\<exists>y B. y \<in> M \<and> (\<forall>x \<in> S. d y x \<le> B))"
+ by (meson all_not_in_conv in_mcball mbounded_def subset_iff)
+
+lemma mbounded_empty [iff]: "mbounded {}"
+ by (simp add: mbounded)
+
+lemma mbounded_mcball: "mbounded (mcball x r)"
+ using mbounded_def by auto
+
+lemma mbounded_mball [iff]: "mbounded (mball x r)"
+ by (meson mball_subset_mcball mbounded_def)
+
+lemma mbounded_insert: "mbounded (insert a S) \<longleftrightarrow> a \<in> M \<and> mbounded S"
+proof -
+ have "\<And>y B. \<lbrakk>y \<in> M; \<forall>x\<in>S. d y x \<le> B\<rbrakk>
+ \<Longrightarrow> \<exists>y. y \<in> M \<and> (\<exists>B \<ge> d y a. \<forall>x\<in>S. d y x \<le> B)"
+ by (metis order.trans nle_le)
+ then show ?thesis
+ by (auto simp: mbounded)
+qed
+
+lemma mbounded_Int: "mbounded S \<Longrightarrow> mbounded (S \<inter> T)"
+ by (meson inf_le1 mbounded_subset)
+
+lemma mbounded_Un: "mbounded (S \<union> T) \<longleftrightarrow> mbounded S \<and> mbounded T" (is "?lhs=?rhs")
+proof
+ assume R: ?rhs
+ show ?lhs
+ proof (cases "S={} \<or> T={}")
+ case True then show ?thesis
+ using R by auto
+ next
+ case False
+ obtain x y B C where "S \<subseteq> mcball x B" "T \<subseteq> mcball y C" "B > 0" "C > 0" "x \<in> M" "y \<in> M"
+ using R mbounded_pos
+ by (metis False mcball_eq_empty subset_empty)
+ then have "S \<union> T \<subseteq> mcball x (B + C + d x y)"
+ by (smt (verit) commute dual_order.trans le_supI mcball_subset mdist_pos_eq)
+ then show ?thesis
+ using mbounded_def by blast
+ qed
+next
+ show "?lhs \<Longrightarrow> ?rhs"
+ using mbounded_def by auto
+qed
+
+lemma mbounded_Union:
+ "\<lbrakk>finite \<F>; \<And>X. X \<in> \<F> \<Longrightarrow> mbounded X\<rbrakk> \<Longrightarrow> mbounded (\<Union>\<F>)"
+ by (induction \<F> rule: finite_induct) (auto simp: mbounded_Un)
+
+lemma mbounded_closure_of:
+ "mbounded S \<Longrightarrow> mbounded (mtopology closure_of S)"
+ by (meson closedin_mcball closure_of_minimal mbounded_def)
+
+lemma mbounded_closure_of_eq:
+ "S \<subseteq> M \<Longrightarrow> (mbounded (mtopology closure_of S) \<longleftrightarrow> mbounded S)"
+ by (metis closure_of_subset mbounded_closure_of mbounded_subset topspace_mtopology)
+
+
+lemma maxdist_thm:
+ assumes "mbounded S"
+ and "x \<in> S"
+ and "y \<in> S"
+ shows "d x y = (SUP z\<in>S. \<bar>d x z - d z y\<bar>)"
+proof -
+ have "\<bar>d x z - d z y\<bar> \<le> d x y" if "z \<in> S" for z
+ by (metis all_not_in_conv assms mbounded mdist_reverse_triangle that)
+ moreover have "d x y \<le> r"
+ if "\<And>z. z \<in> S \<Longrightarrow> \<bar>d x z - d z y\<bar> \<le> r" for r :: real
+ using that assms mbounded_subset_mspace mdist_zero by fastforce
+ ultimately show ?thesis
+ by (intro cSup_eq [symmetric]) auto
+qed
+
+
+lemma metric_eq_thm: "\<lbrakk>S \<subseteq> M; x \<in> S; y \<in> S\<rbrakk> \<Longrightarrow> (x = y) = (\<forall>z\<in>S. d x z = d y z)"
+ by (metis commute subset_iff zero)
+
+lemma compactin_imp_mbounded:
+ assumes "compactin mtopology S"
+ shows "mbounded S"
+proof -
+ have "S \<subseteq> M"
+ and com: "\<And>\<U>. \<lbrakk>\<forall>U\<in>\<U>. openin mtopology U; S \<subseteq> \<Union>\<U>\<rbrakk> \<Longrightarrow> \<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> S \<subseteq> \<Union>\<F>"
+ using assms by (auto simp: compactin_def mbounded_def)
+ show ?thesis
+ proof (cases "S = {}")
+ case False
+ with \<open>S \<subseteq> M\<close> obtain a where "a \<in> S" "a \<in> M"
+ by blast
+ with \<open>S \<subseteq> M\<close> gt_ex have "S \<subseteq> \<Union>(range (mball a))"
+ by force
+ moreover have "\<forall>U \<in> range (mball a). openin mtopology U"
+ by (simp add: openin_mball)
+ ultimately obtain \<F> where "finite \<F>" "\<F> \<subseteq> range (mball a)" "S \<subseteq> \<Union>\<F>"
+ by (meson com)
+ then show ?thesis
+ using mbounded_Union mbounded_subset by fastforce
+ qed auto
+qed
+
+end
+
+
+subsection\<open>Subspace of a metric space\<close>
+
+locale submetric = Metric_space +
+ fixes A
+ assumes subset: "A \<subseteq> M"
+
+sublocale submetric \<subseteq> sub: Metric_space A d
+ by (simp add: subset subspace)
+
+context submetric
+begin
+
+lemma mball_submetric_eq: "sub.mball a r = (if a \<in> A then A \<inter> mball a r else {})"
+and mcball_submetric_eq: "sub.mcball a r = (if a \<in> A then A \<inter> mcball a r else {})"
+ using subset by force+
+
+lemma mtopology_submetric: "sub.mtopology = subtopology mtopology A"
+ unfolding topology_eq
+proof (intro allI iffI)
+ fix S
+ assume "openin sub.mtopology S"
+ then have "\<exists>T. openin (subtopology mtopology A) T \<and> x \<in> T \<and> T \<subseteq> S" if "x \<in> S" for x
+ by (metis mball_submetric_eq openin_mball openin_subtopology_Int2 sub.centre_in_mball_iff sub.openin_mtopology subsetD that)
+ then show "openin (subtopology mtopology A) S"
+ by (meson openin_subopen)
+next
+ fix S
+ assume "openin (subtopology mtopology A) S"
+ then obtain T where "openin mtopology T" "S = T \<inter> A"
+ by (meson openin_subtopology)
+ then have "mopen T"
+ by (simp add: mopen_def openin_mtopology)
+ then have "sub.mopen (T \<inter> A)"
+ unfolding sub.mopen_def mopen_def
+ by (metis inf.coboundedI2 mball_submetric_eq Int_iff \<open>S = T \<inter> A\<close> inf.bounded_iff subsetI)
+ then show "openin sub.mtopology S"
+ using \<open>S = T \<inter> A\<close> sub.mopen_def sub.openin_mtopology by force
+qed
+
+lemma mbounded_submetric: "sub.mbounded T \<longleftrightarrow> mbounded T \<and> T \<subseteq> A"
+ by (meson mbounded_alt sub.mbounded_alt subset subset_trans)
+
+end
+
+lemma (in Metric_space) submetric_empty [iff]: "submetric M d {}"
+ by (simp add: Metric_space_axioms submetric.intro submetric_axioms_def)
+
+
+subsection\<open>The discrete metric\<close>
+
+locale discrete_metric =
+ fixes M :: "'a set"
+
+definition (in discrete_metric) dd :: "'a \<Rightarrow> 'a \<Rightarrow> real"
+ where "dd \<equiv> \<lambda>x y::'a. if x=y then 0 else 1"
+
+lemma metric_M_dd: "Metric_space M discrete_metric.dd"
+ by (simp add: discrete_metric.dd_def Metric_space.intro)
+
+sublocale discrete_metric \<subseteq> disc: Metric_space M dd
+ by (simp add: metric_M_dd)
+
+
+lemma (in discrete_metric) mopen_singleton:
+ assumes "x \<in> M" shows "disc.mopen {x}"
+proof -
+ have "disc.mball x (1/2) \<subseteq> {x}"
+ by (smt (verit) dd_def disc.in_mball less_divide_eq_1_pos singleton_iff subsetI)
+ with assms show ?thesis
+ using disc.mopen_def half_gt_zero_iff zero_less_one by blast
+qed
+
+lemma (in discrete_metric) mtopology_discrete_metric:
+ "disc.mtopology = discrete_topology M"
+proof -
+ have "\<And>x. x \<in> M \<Longrightarrow> openin disc.mtopology {x}"
+ by (simp add: disc.mtopology_def mopen_singleton)
+ then show ?thesis
+ by (metis disc.topspace_mtopology discrete_topology_unique)
+qed
+
+lemma (in discrete_metric) discrete_ultrametric:
+ "dd x z \<le> max (dd x y) (dd y z)"
+ by (simp add: dd_def)
+
+
+lemma (in discrete_metric) dd_le1: "dd x y \<le> 1"
+ by (simp add: dd_def)
+
+lemma (in discrete_metric) mbounded_discrete_metric: "disc.mbounded S \<longleftrightarrow> S \<subseteq> M"
+ by (meson dd_le1 disc.mbounded_alt)
+
+
+
+subsection\<open>Metrizable spaces\<close>
+
+definition metrizable_space where
+ "metrizable_space X \<equiv> \<exists>M d. Metric_space M d \<and> X = Metric_space.mtopology M d"
+
+lemma (in Metric_space) metrizable_space_mtopology: "metrizable_space mtopology"
+ using local.Metric_space_axioms metrizable_space_def by blast
+
+lemma openin_mtopology_eq_open [simp]: "openin Met_TC.mtopology = open"
+ by (simp add: Met_TC.mtopology_def)
+
+lemma closedin_mtopology_eq_closed [simp]: "closedin Met_TC.mtopology = closed"
+proof -
+ have "(euclidean::'a topology) = Met_TC.mtopology"
+ by (simp add: Met_TC.mtopology_def)
+ then show ?thesis
+ using closed_closedin by fastforce
+qed
+
+lemma compactin_mtopology_eq_compact [simp]: "compactin Met_TC.mtopology = compact"
+ by (simp add: compactin_def compact_eq_Heine_Borel fun_eq_iff) meson
+
+lemma metrizable_space_discrete_topology:
+ "metrizable_space(discrete_topology U)"
+ by (metis discrete_metric.mtopology_discrete_metric metric_M_dd metrizable_space_def)
+
+lemma metrizable_space_subtopology:
+ assumes "metrizable_space X"
+ shows "metrizable_space(subtopology X S)"
+proof -
+ obtain M d where "Metric_space M d" and X: "X = Metric_space.mtopology M d"
+ using assms metrizable_space_def by blast
+ then interpret submetric M d "M \<inter> S"
+ by (simp add: submetric.intro submetric_axioms_def)
+ show ?thesis
+ unfolding metrizable_space_def
+ by (metis X mtopology_submetric sub.Metric_space_axioms subtopology_restrict topspace_mtopology)
+qed
+
+lemma homeomorphic_metrizable_space_aux:
+ assumes "X homeomorphic_space Y" "metrizable_space X"
+ shows "metrizable_space Y"
+proof -
+ obtain M d where "Metric_space M d" and X: "X = Metric_space.mtopology M d"
+ using assms by (auto simp: metrizable_space_def)
+ then interpret m: Metric_space M d
+ by simp
+ obtain f g where hmf: "homeomorphic_map X Y f" and hmg: "homeomorphic_map Y X g"
+ and fg: "(\<forall>x \<in> M. g(f x) = x) \<and> (\<forall>y \<in> topspace Y. f(g y) = y)"
+ using assms X homeomorphic_maps_map homeomorphic_space_def by fastforce
+ define d' where "d' x y \<equiv> d (g x) (g y)" for x y
+ interpret m': Metric_space "topspace Y" "d'"
+ unfolding d'_def
+ proof
+ show "(d (g x) (g y) = 0) = (x = y)" if "x \<in> topspace Y" "y \<in> topspace Y" for x y
+ by (metis fg X hmg homeomorphic_imp_surjective_map imageI m.topspace_mtopology m.zero that)
+ show "d (g x) (g z) \<le> d (g x) (g y) + d (g y) (g z)"
+ if "x \<in> topspace Y" and "y \<in> topspace Y" and "z \<in> topspace Y" for x y z
+ by (metis X that hmg homeomorphic_eq_everything_map imageI m.topspace_mtopology m.triangle)
+ qed (auto simp: m.nonneg m.commute)
+ have "Y = Metric_space.mtopology (topspace Y) d'"
+ unfolding topology_eq
+ proof (intro allI)
+ fix S
+ have "openin m'.mtopology S" if S: "S \<subseteq> topspace Y" and "openin X (g ` S)"
+ unfolding m'.openin_mtopology
+ proof (intro conjI that strip)
+ fix y
+ assume "y \<in> S"
+ then obtain r where "r>0" and r: "m.mball (g y) r \<subseteq> g ` S"
+ using X \<open>openin X (g ` S)\<close> m.openin_mtopology using \<open>y \<in> S\<close> by auto
+ then have "g ` m'.mball y r \<subseteq> m.mball (g y) r"
+ using X d'_def hmg homeomorphic_imp_surjective_map by fastforce
+ with S fg have "m'.mball y r \<subseteq> S"
+ by (smt (verit, del_insts) image_iff m'.in_mball r subset_iff)
+ then show "\<exists>r>0. m'.mball y r \<subseteq> S"
+ using \<open>0 < r\<close> by blast
+ qed
+ moreover have "openin X (g ` S)" if ope': "openin m'.mtopology S"
+ proof -
+ have "\<exists>r>0. m.mball (g y) r \<subseteq> g ` S" if "y \<in> S" for y
+ proof -
+ have y: "y \<in> topspace Y"
+ using m'.openin_mtopology ope' that by blast
+ obtain r where "r > 0" and r: "m'.mball y r \<subseteq> S"
+ using ope' by (meson \<open>y \<in> S\<close> m'.openin_mtopology)
+ moreover have "\<And>x. \<lbrakk>x \<in> M; d (g y) x < r\<rbrakk> \<Longrightarrow> \<exists>u. u \<in> topspace Y \<and> d' y u < r \<and> x = g u"
+ using fg X d'_def hmf homeomorphic_imp_surjective_map by fastforce
+ ultimately have "m.mball (g y) r \<subseteq> g ` m'.mball y r"
+ using y by (force simp: m'.openin_mtopology)
+ then show ?thesis
+ using \<open>0 < r\<close> r by blast
+ qed
+ then show ?thesis
+ using X hmg homeomorphic_imp_surjective_map m.openin_mtopology ope' openin_subset by fastforce
+ qed
+ ultimately have "(S \<subseteq> topspace Y \<and> openin X (g ` S)) = openin m'.mtopology S"
+ using m'.topspace_mtopology openin_subset by blast
+ then show "openin Y S = openin m'.mtopology S"
+ by (simp add: m'.mopen_def homeomorphic_map_openness_eq [OF hmg])
+ qed
+ then show ?thesis
+ using m'.metrizable_space_mtopology by force
+qed
+
+lemma homeomorphic_metrizable_space:
+ assumes "X homeomorphic_space Y"
+ shows "metrizable_space X \<longleftrightarrow> metrizable_space Y"
+ using assms homeomorphic_metrizable_space_aux homeomorphic_space_sym by metis
+
+lemma metrizable_space_retraction_map_image:
+ "retraction_map X Y r \<and> metrizable_space X
+ \<Longrightarrow> metrizable_space Y"
+ using hereditary_imp_retractive_property metrizable_space_subtopology homeomorphic_metrizable_space
+ by blast
+
+
+lemma metrizable_imp_Hausdorff_space:
+ "metrizable_space X \<Longrightarrow> Hausdorff_space X"
+ by (metis Metric_space.Hausdorff_space_mtopology metrizable_space_def)
+
+(**
+lemma metrizable_imp_kc_space:
+ "metrizable_space X \<Longrightarrow> kc_space X"
+oops
+ MESON_TAC[METRIZABLE_IMP_HAUSDORFF_SPACE; HAUSDORFF_IMP_KC_SPACE]);;
+
+lemma kc_space_mtopology:
+ "kc_space mtopology"
+oops
+ REWRITE_TAC[GSYM FORALL_METRIZABLE_SPACE; METRIZABLE_IMP_KC_SPACE]);;
+**)
+
+lemma metrizable_imp_t1_space:
+ "metrizable_space X \<Longrightarrow> t1_space X"
+ by (simp add: Hausdorff_imp_t1_space metrizable_imp_Hausdorff_space)
+
+lemma closed_imp_gdelta_in:
+ assumes X: "metrizable_space X" and S: "closedin X S"
+ shows "gdelta_in X S"
+proof -
+ obtain M d where "Metric_space M d" and Xeq: "X = Metric_space.mtopology M d"
+ using X metrizable_space_def by blast
+ then interpret M: Metric_space M d
+ by blast
+ have "S \<subseteq> M"
+ using M.closedin_metric \<open>X = M.mtopology\<close> S by blast
+ show ?thesis
+ proof (cases "S = {}")
+ case True
+ then show ?thesis
+ by simp
+ next
+ case False
+ have "\<exists>y\<in>S. d x y < inverse (1 + real n)" if "x \<in> S" for x n
+ using \<open>S \<subseteq> M\<close> M.mdist_zero [of x] that by force
+ moreover
+ have "x \<in> S" if "x \<in> M" and \<section>: "\<And>n. \<exists>y\<in>S. d x y < inverse(Suc n)" for x
+ proof -
+ have *: "\<exists>y\<in>S. d x y < \<epsilon>" if "\<epsilon> > 0" for \<epsilon>
+ by (metis \<section> that not0_implies_Suc order_less_le order_less_le_trans real_arch_inverse)
+ have "closedin M.mtopology S"
+ using S by (simp add: Xeq)
+ then show ?thesis
+ apply (simp add: M.closedin_metric)
+ by (metis * \<open>x \<in> M\<close> M.in_mball disjnt_insert1 insert_absorb subsetD)
+ qed
+ ultimately have Seq: "S = \<Inter>(range (\<lambda>n. {x\<in>M. \<exists>y\<in>S. d x y < inverse(Suc n)}))"
+ using \<open>S \<subseteq> M\<close> by force
+ have "openin M.mtopology {xa \<in> M. \<exists>y\<in>S. d xa y < inverse (1 + real n)}" for n
+ proof (clarsimp simp: M.openin_mtopology)
+ fix x y
+ assume "x \<in> M" "y \<in> S" and dxy: "d x y < inverse (1 + real n)"
+ then have "\<And>z. \<lbrakk>z \<in> M; d x z < inverse (1 + real n) - d x y\<rbrakk> \<Longrightarrow> \<exists>y\<in>S. d z y < inverse (1 + real n)"
+ by (smt (verit) M.commute M.triangle \<open>S \<subseteq> M\<close> in_mono)
+ with dxy show "\<exists>r>0. M.mball x r \<subseteq> {z \<in> M. \<exists>y\<in>S. d z y < inverse (1 + real n)}"
+ by (rule_tac x="inverse(Suc n) - d x y" in exI) auto
+ qed
+ then show ?thesis
+ apply (subst Seq)
+ apply (force simp: Xeq intro: gdelta_in_Inter open_imp_gdelta_in)
+ done
+ qed
+qed
+
+lemma open_imp_fsigma_in:
+ "\<lbrakk>metrizable_space X; openin X S\<rbrakk> \<Longrightarrow> fsigma_in X S"
+ by (meson closed_imp_gdelta_in fsigma_in_gdelta_in openin_closedin openin_subset)
+
+(*NEEDS first_countable
+lemma first_countable_mtopology:
+ "first_countable mtopology"
+oops
+ GEN_TAC THEN REWRITE_TAC[first_countable; TOPSPACE_MTOPOLOGY] THEN
+ X_GEN_TAC `x::A` THEN DISCH_TAC THEN
+ EXISTS_TAC `{ mball m (x::A,r) | rational r \<and> 0 < r}` THEN
+ REWRITE_TAC[FORALL_IN_GSPEC; OPEN_IN_MBALL; EXISTS_IN_GSPEC] THEN
+ ONCE_REWRITE_TAC[SET_RULE
+ `{f x | S x \<and> Q x} = f ` {x \<in> S. Q x}`] THEN
+ SIMP_TAC[COUNTABLE_IMAGE; COUNTABLE_RATIONAL; COUNTABLE_RESTRICT] THEN
+ REWRITE_TAC[OPEN_IN_MTOPOLOGY] THEN
+ X_GEN_TAC `U::A=>bool` THEN STRIP_TAC THEN
+ FIRST_X_ASSUM(MP_TAC \<circ> SPEC `x::A`) THEN
+ ASM_REWRITE_TAC[LEFT_IMP_EXISTS_THM] THEN
+ X_GEN_TAC `r::real` THEN STRIP_TAC THEN FIRST_ASSUM
+ (MP_TAC \<circ> SPEC `r::real` \<circ> MATCH_MP RATIONAL_APPROXIMATION_BELOW) THEN
+ MATCH_MP_TAC MONO_EXISTS THEN X_GEN_TAC `q::real` THEN
+ REWRITE_TAC[REAL_SUB_REFL] THEN STRIP_TAC THEN
+ ASM_SIMP_TAC[CENTRE_IN_MBALL] THEN
+ TRANS_TAC SUBSET_TRANS `mball m (x::A,r)` THEN
+ ASM_SIMP_TAC[MBALL_SUBSET_CONCENTRIC; REAL_LT_IMP_LE]);;
+
+lemma metrizable_imp_first_countable:
+ "metrizable_space X \<Longrightarrow> first_countable X"
+oops
+ REWRITE_TAC[FORALL_METRIZABLE_SPACE; FIRST_COUNTABLE_MTOPOLOGY]);;
+*)
+
+lemma mball_eq_ball [simp]: "Met_TC.mball = ball"
+ by force
+
+lemma mopen_eq_open [simp]: "Met_TC.mopen = open"
+ by (force simp: open_contains_ball Met_TC.mopen_def)
+
+lemma metrizable_space_euclidean:
+ "metrizable_space (euclidean :: 'a::metric_space topology)"
+ unfolding metrizable_space_def
+ by (metis Met_TC.Metric_space_axioms Met_TC.mtopology_def mopen_eq_open)
+
+lemma (in Metric_space) regular_space_mtopology:
+ "regular_space mtopology"
+unfolding regular_space_def
+proof clarify
+ fix C a
+ assume C: "closedin mtopology C" and a: "a \<in> topspace mtopology" and "a \<notin> C"
+ have "openin mtopology (topspace mtopology - C)"
+ by (simp add: C openin_diff)
+ then obtain r where "r>0" and r: "mball a r \<subseteq> topspace mtopology - C"
+ unfolding openin_mtopology using \<open>a \<notin> C\<close> a by auto
+ show "\<exists>U V. openin mtopology U \<and> openin mtopology V \<and> a \<in> U \<and> C \<subseteq> V \<and> disjnt U V"
+ proof (intro exI conjI)
+ show "a \<in> mball a (r/2)"
+ using \<open>0 < r\<close> a by force
+ show "C \<subseteq> topspace mtopology - mcball a (r/2)"
+ using C \<open>0 < r\<close> r by (fastforce simp: closedin_metric)
+ qed (auto simp: openin_mball closedin_mcball openin_diff disjnt_iff)
+qed
+
+lemma metrizable_imp_regular_space:
+ "metrizable_space X \<Longrightarrow> regular_space X"
+ by (metis Metric_space.regular_space_mtopology metrizable_space_def)
+
+lemma regular_space_euclidean:
+ "regular_space (euclidean :: 'a::metric_space topology)"
+ by (simp add: metrizable_imp_regular_space metrizable_space_euclidean)
+
+
+subsection\<open>Limits at a point in a topological space\<close>
+
+lemma (in Metric_space) eventually_atin_metric:
+ "eventually P (atin mtopology a) \<longleftrightarrow>
+ (a \<in> M \<longrightarrow> (\<exists>\<delta>>0. \<forall>x. x \<in> M \<and> 0 < d x a \<and> d x a < \<delta> \<longrightarrow> P x))" (is "?lhs=?rhs")
+proof (cases "a \<in> M")
+ case True
+ show ?thesis
+ proof
+ assume L: ?lhs
+ with True obtain U where "openin mtopology U" "a \<in> U" and U: "\<forall>x\<in>U - {a}. P x"
+ by (auto simp: eventually_atin)
+ then obtain r where "r>0" and "mball a r \<subseteq> U"
+ by (meson openin_mtopology)
+ with U show ?rhs
+ by (smt (verit, ccfv_SIG) commute in_mball insert_Diff_single insert_iff subset_iff)
+ next
+ assume ?rhs
+ then obtain \<delta> where "\<delta>>0" and \<delta>: "\<forall>x. x \<in> M \<and> 0 < d x a \<and> d x a < \<delta> \<longrightarrow> P x"
+ using True by blast
+ then have "\<forall>x \<in> mball a \<delta> - {a}. P x"
+ by (simp add: commute)
+ then show ?lhs
+ unfolding eventually_atin openin_mtopology
+ by (metis True \<open>0 < \<delta>\<close> centre_in_mball_iff openin_mball openin_mtopology)
+ qed
+qed auto
+
+subsection \<open>Normal spaces and metric spaces\<close>
+
+lemma (in Metric_space) normal_space_mtopology:
+ "normal_space mtopology"
+ unfolding normal_space_def
+proof clarify
+ fix S T
+ assume "closedin mtopology S"
+ then have "\<And>x. x \<in> M - S \<Longrightarrow> (\<exists>r>0. mball x r \<subseteq> M - S)"
+ by (simp add: closedin_def openin_mtopology)
+ then obtain \<delta> where d0: "\<And>x. x \<in> M - S \<Longrightarrow> \<delta> x > 0 \<and> mball x (\<delta> x) \<subseteq> M - S"
+ by metis
+ assume "closedin mtopology T"
+ then have "\<And>x. x \<in> M - T \<Longrightarrow> (\<exists>r>0. mball x r \<subseteq> M - T)"
+ by (simp add: closedin_def openin_mtopology)
+ then obtain \<epsilon> where e: "\<And>x. x \<in> M - T \<Longrightarrow> \<epsilon> x > 0 \<and> mball x (\<epsilon> x) \<subseteq> M - T"
+ by metis
+ assume "disjnt S T"
+ have "S \<subseteq> M" "T \<subseteq> M"
+ using \<open>closedin mtopology S\<close> \<open>closedin mtopology T\<close> closedin_metric by blast+
+ have \<delta>: "\<And>x. x \<in> T \<Longrightarrow> \<delta> x > 0 \<and> mball x (\<delta> x) \<subseteq> M - S"
+ by (meson DiffI \<open>T \<subseteq> M\<close> \<open>disjnt S T\<close> d0 disjnt_iff subsetD)
+ have \<epsilon>: "\<And>x. x \<in> S \<Longrightarrow> \<epsilon> x > 0 \<and> mball x (\<epsilon> x) \<subseteq> M - T"
+ by (meson Diff_iff \<open>S \<subseteq> M\<close> \<open>disjnt S T\<close> disjnt_iff e subsetD)
+ show "\<exists>U V. openin mtopology U \<and> openin mtopology V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> disjnt U V"
+ proof (intro exI conjI)
+ show "openin mtopology (\<Union>x\<in>S. mball x (\<epsilon> x / 2))" "openin mtopology (\<Union>x\<in>T. mball x (\<delta> x / 2))"
+ by force+
+ show "S \<subseteq> (\<Union>x\<in>S. mball x (\<epsilon> x / 2))"
+ using \<epsilon> \<open>S \<subseteq> M\<close> by force
+ show "T \<subseteq> (\<Union>x\<in>T. mball x (\<delta> x / 2))"
+ using \<delta> \<open>T \<subseteq> M\<close> by force
+ show "disjnt (\<Union>x\<in>S. mball x (\<epsilon> x / 2)) (\<Union>x\<in>T. mball x (\<delta> x / 2))"
+ using \<epsilon> \<delta>
+ apply (clarsimp simp: disjnt_iff subset_iff)
+ by (smt (verit, ccfv_SIG) field_sum_of_halves triangle')
+ qed
+qed
+
+lemma metrizable_imp_normal_space:
+ "metrizable_space X \<Longrightarrow> normal_space X"
+ by (metis Metric_space.normal_space_mtopology metrizable_space_def)
+
+subsection\<open>Topological limitin in metric spaces\<close>
+
+
+lemma (in Metric_space) limitin_mspace:
+ "limitin mtopology f l F \<Longrightarrow> l \<in> M"
+ using limitin_topspace by fastforce
+
+lemma (in Metric_space) limitin_metric_unique:
+ "\<lbrakk>limitin mtopology f l1 F; limitin mtopology f l2 F; F \<noteq> bot\<rbrakk> \<Longrightarrow> l1 = l2"
+ by (meson Hausdorff_space_mtopology limitin_Hausdorff_unique)
+
+lemma (in Metric_space) limitin_metric:
+ "limitin mtopology f l F \<longleftrightarrow> l \<in> M \<and> (\<forall>\<epsilon>>0. eventually (\<lambda>x. f x \<in> M \<and> d (f x) l < \<epsilon>) F)"
+ (is "?lhs=?rhs")
+proof
+ assume L: ?lhs
+ show ?rhs
+ unfolding limitin_def
+ proof (intro conjI strip)
+ show "l \<in> M"
+ using L limitin_mspace by blast
+ fix \<epsilon>::real
+ assume "\<epsilon>>0"
+ then have "\<forall>\<^sub>F x in F. f x \<in> mball l \<epsilon>"
+ using L openin_mball by (fastforce simp: limitin_def)
+ then show "\<forall>\<^sub>F x in F. f x \<in> M \<and> d (f x) l < \<epsilon>"
+ using commute eventually_mono by fastforce
+ qed
+next
+ assume R: ?rhs
+ then show ?lhs
+ by (force simp: limitin_def commute openin_mtopology subset_eq elim: eventually_mono)
+qed
+
+lemma (in Metric_space) limit_metric_sequentially:
+ "limitin mtopology f l sequentially \<longleftrightarrow>
+ l \<in> M \<and> (\<forall>\<epsilon>>0. \<exists>N. \<forall>n\<ge>N. f n \<in> M \<and> d (f n) l < \<epsilon>)"
+ by (auto simp: limitin_metric eventually_sequentially)
+
+lemma (in submetric) limitin_submetric_iff:
+ "limitin sub.mtopology f l F \<longleftrightarrow>
+ l \<in> A \<and> eventually (\<lambda>x. f x \<in> A) F \<and> limitin mtopology f l F" (is "?lhs=?rhs")
+ by (simp add: limitin_subtopology mtopology_submetric)
+
+lemma (in Metric_space) metric_closedin_iff_sequentially_closed:
+ "closedin mtopology S \<longleftrightarrow>
+ S \<subseteq> M \<and> (\<forall>\<sigma> l. range \<sigma> \<subseteq> S \<and> limitin mtopology \<sigma> l sequentially \<longrightarrow> l \<in> S)" (is "?lhs=?rhs")
+proof
+ assume ?lhs then show ?rhs
+ by (force simp: closedin_metric limitin_closedin range_subsetD)
+next
+ assume R: ?rhs
+ show ?lhs
+ unfolding closedin_metric
+ proof (intro conjI strip)
+ show "S \<subseteq> M"
+ using R by blast
+ fix x
+ assume "x \<in> M - S"
+ have False if "\<forall>r>0. \<exists>y. y \<in> M \<and> y \<in> S \<and> d x y < r"
+ proof -
+ have "\<forall>n. \<exists>y. y \<in> M \<and> y \<in> S \<and> d x y < inverse(Suc n)"
+ using that by auto
+ then obtain \<sigma> where \<sigma>: "\<And>n. \<sigma> n \<in> M \<and> \<sigma> n \<in> S \<and> d x (\<sigma> n) < inverse(Suc n)"
+ by metis
+ then have "range \<sigma> \<subseteq> M"
+ by blast
+ have "\<exists>N. \<forall>n\<ge>N. d x (\<sigma> n) < \<epsilon>" if "\<epsilon>>0" for \<epsilon>
+ proof -
+ have "real (Suc (nat \<lceil>inverse \<epsilon>\<rceil>)) \<ge> inverse \<epsilon>"
+ by linarith
+ then have "\<forall>n \<ge> nat \<lceil>inverse \<epsilon>\<rceil>. d x (\<sigma> n) < \<epsilon>"
+ by (metis \<sigma> inverse_inverse_eq inverse_le_imp_le nat_ceiling_le_eq nle_le not_less_eq_eq order.strict_trans2 that)
+ then show ?thesis ..
+ qed
+ with \<sigma> have "limitin mtopology \<sigma> x sequentially"
+ using \<open>x \<in> M - S\<close> commute limit_metric_sequentially by auto
+ then show ?thesis
+ by (metis R DiffD2 \<sigma> image_subset_iff \<open>x \<in> M - S\<close>)
+ qed
+ then show "\<exists>r>0. disjnt S (mball x r)"
+ by (meson disjnt_iff in_mball)
+ qed
+qed
+
+lemma (in Metric_space) limit_atin_metric:
+ "limitin X f y (atin mtopology x) \<longleftrightarrow>
+ y \<in> topspace X \<and>
+ (x \<in> M
+ \<longrightarrow> (\<forall>V. openin X V \<and> y \<in> V
+ \<longrightarrow> (\<exists>\<delta>>0. \<forall>x'. x' \<in> M \<and> 0 < d x' x \<and> d x' x < \<delta> \<longrightarrow> f x' \<in> V)))"
+ by (force simp: limitin_def eventually_atin_metric)
+
+lemma (in Metric_space) limitin_metric_dist_null:
+ "limitin mtopology f l F \<longleftrightarrow> l \<in> M \<and> eventually (\<lambda>x. f x \<in> M) F \<and> ((\<lambda>x. d (f x) l) \<longlongrightarrow> 0) F"
+ by (simp add: limitin_metric tendsto_iff eventually_conj_iff all_conj_distrib imp_conjR gt_ex)
+
+
+subsection\<open>Cauchy sequences and complete metric spaces\<close>
+
+context Metric_space
+begin
+
+definition MCauchy :: "(nat \<Rightarrow> 'a) \<Rightarrow> bool"
+ where "MCauchy \<sigma> \<equiv> range \<sigma> \<subseteq> M \<and> (\<forall>\<epsilon>>0. \<exists>N. \<forall>n n'. N \<le> n \<longrightarrow> N \<le> n' \<longrightarrow> d (\<sigma> n) (\<sigma> n') < \<epsilon>)"
+
+definition mcomplete
+ where "mcomplete \<equiv> (\<forall>\<sigma>. MCauchy \<sigma> \<longrightarrow> (\<exists>x. limitin mtopology \<sigma> x sequentially))"
+
+lemma mcomplete_empty [iff]: "Metric_space.mcomplete {} d"
+ by (simp add: Metric_space.MCauchy_def Metric_space.mcomplete_def subspace)
+
+lemma MCauchy_imp_MCauchy_suffix: "MCauchy \<sigma> \<Longrightarrow> MCauchy (\<sigma> \<circ> (+)n)"
+ unfolding MCauchy_def image_subset_iff comp_apply
+ by (metis UNIV_I add.commute trans_le_add1)
+
+lemma mcomplete:
+ "mcomplete \<longleftrightarrow>
+ (\<forall>\<sigma>. (\<forall>\<^sub>F n in sequentially. \<sigma> n \<in> M) \<and>
+ (\<forall>\<epsilon>>0. \<exists>N. \<forall>n n'. N \<le> n \<longrightarrow> N \<le> n' \<longrightarrow> d (\<sigma> n) (\<sigma> n') < \<epsilon>) \<longrightarrow>
+ (\<exists>x. limitin mtopology \<sigma> x sequentially))" (is "?lhs=?rhs")
+proof
+ assume L: ?lhs
+ show ?rhs
+ proof clarify
+ fix \<sigma>
+ assume "\<forall>\<^sub>F n in sequentially. \<sigma> n \<in> M"
+ and \<sigma>: "\<forall>\<epsilon>>0. \<exists>N. \<forall>n n'. N \<le> n \<longrightarrow> N \<le> n' \<longrightarrow> d (\<sigma> n) (\<sigma> n') < \<epsilon>"
+ then obtain N where "\<And>n. n\<ge>N \<Longrightarrow> \<sigma> n \<in> M"
+ by (auto simp: eventually_sequentially)
+ with \<sigma> have "MCauchy (\<sigma> \<circ> (+)N)"
+ unfolding MCauchy_def image_subset_iff comp_apply by (meson le_add1 trans_le_add2)
+ then obtain x where "limitin mtopology (\<sigma> \<circ> (+)N) x sequentially"
+ using L MCauchy_imp_MCauchy_suffix mcomplete_def by blast
+ then have "limitin mtopology \<sigma> x sequentially"
+ unfolding o_def by (auto simp: add.commute limitin_sequentially_offset_rev)
+ then show "\<exists>x. limitin mtopology \<sigma> x sequentially" ..
+ qed
+qed (simp add: mcomplete_def MCauchy_def image_subset_iff)
+
+lemma mcomplete_empty_mspace: "M = {} \<Longrightarrow> mcomplete"
+ using MCauchy_def mcomplete_def by blast
+
+lemma MCauchy_const [simp]: "MCauchy (\<lambda>n. a) \<longleftrightarrow> a \<in> M"
+ using MCauchy_def mdist_zero by auto
+
+lemma convergent_imp_MCauchy:
+ assumes "range \<sigma> \<subseteq> M" and lim: "limitin mtopology \<sigma> l sequentially"
+ shows "MCauchy \<sigma>"
+ unfolding MCauchy_def image_subset_iff
+proof (intro conjI strip)
+ fix \<epsilon>::real
+ assume "\<epsilon> > 0"
+ then have "\<forall>\<^sub>F n in sequentially. \<sigma> n \<in> M \<and> d (\<sigma> n) l < \<epsilon>/2"
+ using half_gt_zero lim limitin_metric by blast
+ then obtain N where "\<And>n. n\<ge>N \<Longrightarrow> \<sigma> n \<in> M \<and> d (\<sigma> n) l < \<epsilon>/2"
+ by (force simp: eventually_sequentially)
+ then show "\<exists>N. \<forall>n n'. N \<le> n \<longrightarrow> N \<le> n' \<longrightarrow> d (\<sigma> n) (\<sigma> n') < \<epsilon>"
+ by (smt (verit) Metric_space.limitin_mspace Metric_space.mdist_reverse_triangle Metric_space_axioms field_sum_of_halves lim)
+qed (use assms in blast)
+
+
+lemma mcomplete_alt:
+ "mcomplete \<longleftrightarrow> (\<forall>\<sigma>. MCauchy \<sigma> \<longleftrightarrow> range \<sigma> \<subseteq> M \<and> (\<exists>x. limitin mtopology \<sigma> x sequentially))"
+ using MCauchy_def convergent_imp_MCauchy mcomplete_def by blast
+
+lemma MCauchy_subsequence:
+ assumes "strict_mono r" "MCauchy \<sigma>"
+ shows "MCauchy (\<sigma> \<circ> r)"
+proof -
+ have "d (\<sigma> (r n)) (\<sigma> (r n')) < \<epsilon>"
+ if "N \<le> n" "N \<le> n'" "strict_mono r" "\<forall>n n'. N \<le> n \<longrightarrow> N \<le> n' \<longrightarrow> d (\<sigma> n) (\<sigma> n') < \<epsilon>"
+ for \<epsilon> N n n'
+ using that by (meson le_trans strict_mono_imp_increasing)
+ moreover have "range (\<lambda>x. \<sigma> (r x)) \<subseteq> M"
+ using MCauchy_def assms by blast
+ ultimately show ?thesis
+ using assms by (simp add: MCauchy_def) metis
+qed
+
+lemma MCauchy_offset:
+ assumes cau: "MCauchy (\<sigma> \<circ> (+)k)" and \<sigma>: "\<And>n. n < k \<Longrightarrow> \<sigma> n \<in> M"
+ shows "MCauchy \<sigma>"
+ unfolding MCauchy_def image_subset_iff
+proof (intro conjI strip)
+ fix n
+ show "\<sigma> n \<in> M"
+ using assms
+ unfolding MCauchy_def image_subset_iff
+ by (metis UNIV_I comp_apply le_iff_add linorder_not_le)
+next
+ fix \<epsilon> :: real
+ assume "\<epsilon> > 0"
+ obtain N where "\<forall>n n'. N \<le> n \<longrightarrow> N \<le> n' \<longrightarrow> d ((\<sigma> \<circ> (+)k) n) ((\<sigma> \<circ> (+)k) n') < \<epsilon>"
+ using cau \<open>\<epsilon> > 0\<close> by (fastforce simp: MCauchy_def)
+ then show "\<exists>N. \<forall>n n'. N \<le> n \<longrightarrow> N \<le> n' \<longrightarrow> d (\<sigma> n) (\<sigma> n') < \<epsilon>"
+ unfolding o_def
+ apply (rule_tac x="k+N" in exI)
+ by (smt (verit, del_insts) add.assoc le_add1 less_eqE)
+qed
+
+lemma MCauchy_convergent_subsequence:
+ assumes cau: "MCauchy \<sigma>" and "strict_mono r"
+ and lim: "limitin mtopology (\<sigma> \<circ> r) a sequentially"
+ shows "limitin mtopology \<sigma> a sequentially"
+ unfolding limitin_metric
+proof (intro conjI strip)
+ show "a \<in> M"
+ by (meson assms limitin_mspace)
+ fix \<epsilon> :: real
+ assume "\<epsilon> > 0"
+ then obtain N1 where N1: "\<And>n n'. \<lbrakk>n\<ge>N1; n'\<ge>N1\<rbrakk> \<Longrightarrow> d (\<sigma> n) (\<sigma> n') < \<epsilon>/2"
+ using cau unfolding MCauchy_def by (meson half_gt_zero)
+ obtain N2 where N2: "\<And>n. n \<ge> N2 \<Longrightarrow> (\<sigma> \<circ> r) n \<in> M \<and> d ((\<sigma> \<circ> r) n) a < \<epsilon>/2"
+ by (metis (no_types, lifting) lim \<open>\<epsilon> > 0\<close> half_gt_zero limit_metric_sequentially)
+ have "\<sigma> n \<in> M \<and> d (\<sigma> n) a < \<epsilon>" if "n \<ge> max N1 N2" for n
+ proof (intro conjI)
+ show "\<sigma> n \<in> M"
+ using MCauchy_def cau by blast
+ have "N1 \<le> r n"
+ by (meson \<open>strict_mono r\<close> le_trans max.cobounded1 strict_mono_imp_increasing that)
+ then show "d (\<sigma> n) a < \<epsilon>"
+ using N1[of n "r n"] N2[of n] \<open>\<sigma> n \<in> M\<close> \<open>a \<in> M\<close> triangle that by fastforce
+ qed
+ then show "\<forall>\<^sub>F n in sequentially. \<sigma> n \<in> M \<and> d (\<sigma> n) a < \<epsilon>"
+ using eventually_sequentially by blast
+qed
+
+lemma MCauchy_interleaving_gen:
+ "MCauchy (\<lambda>n. if even n then x(n div 2) else y(n div 2)) \<longleftrightarrow>
+ (MCauchy x \<and> MCauchy y \<and> (\<lambda>n. d (x n) (y n)) \<longlonglongrightarrow> 0)" (is "?lhs=?rhs")
+proof
+ assume L: ?lhs
+ have evens: "strict_mono (\<lambda>n::nat. 2 * n)" and odds: "strict_mono (\<lambda>n::nat. Suc (2 * n))"
+ by (auto simp: strict_mono_def)
+ show ?rhs
+ proof (intro conjI)
+ show "MCauchy x" "MCauchy y"
+ using MCauchy_subsequence [OF evens L] MCauchy_subsequence [OF odds L] by (auto simp: o_def)
+ show "(\<lambda>n. d (x n) (y n)) \<longlonglongrightarrow> 0"
+ unfolding LIMSEQ_iff
+ proof (intro strip)
+ fix \<epsilon> :: real
+ assume "\<epsilon> > 0"
+ then obtain N where N:
+ "\<And>n n'. \<lbrakk>n\<ge>N; n'\<ge>N\<rbrakk> \<Longrightarrow> d (if even n then x (n div 2) else y (n div 2))
+ (if even n' then x (n' div 2) else y (n' div 2)) < \<epsilon>"
+ using L MCauchy_def by fastforce
+ have "d (x n) (y n) < \<epsilon>" if "n\<ge>N" for n
+ using N [of "2*n" "Suc(2*n)"] that by auto
+ then show "\<exists>N. \<forall>n\<ge>N. norm (d (x n) (y n) - 0) < \<epsilon>"
+ by auto
+ qed
+ qed
+next
+ assume R: ?rhs
+ show ?lhs
+ unfolding MCauchy_def
+ proof (intro conjI strip)
+ show "range (\<lambda>n. if even n then x (n div 2) else y (n div 2)) \<subseteq> M"
+ using R by (auto simp: MCauchy_def)
+ fix \<epsilon> :: real
+ assume "\<epsilon> > 0"
+ obtain Nx where Nx: "\<And>n n'. \<lbrakk>n\<ge>Nx; n'\<ge>Nx\<rbrakk> \<Longrightarrow> d (x n) (x n') < \<epsilon>/2"
+ by (meson half_gt_zero MCauchy_def R \<open>\<epsilon> > 0\<close>)
+ obtain Ny where Ny: "\<And>n n'. \<lbrakk>n\<ge>Ny; n'\<ge>Ny\<rbrakk> \<Longrightarrow> d (y n) (y n') < \<epsilon>/2"
+ by (meson half_gt_zero MCauchy_def R \<open>\<epsilon> > 0\<close>)
+ obtain Nxy where Nxy: "\<And>n. n\<ge>Nxy \<Longrightarrow> d (x n) (y n) < \<epsilon>/2"
+ using R \<open>\<epsilon> > 0\<close> half_gt_zero unfolding LIMSEQ_iff
+ by (metis abs_mdist diff_zero real_norm_def)
+ define N where "N \<equiv> 2 * Max{Nx,Ny,Nxy}"
+ show "\<exists>N. \<forall>n n'. N \<le> n \<longrightarrow> N \<le> n' \<longrightarrow> d (if even n then x (n div 2) else y (n div 2)) (if even n' then x (n' div 2) else y (n' div 2)) < \<epsilon>"
+ proof (intro exI strip)
+ fix n n'
+ assume "N \<le> n" and "N \<le> n'"
+ then have "n div 2 \<ge> Nx" "n div 2 \<ge> Ny" "n div 2 \<ge> Nxy" "n' div 2 \<ge> Nx" "n' div 2 \<ge> Ny"
+ by (auto simp: N_def)
+ then have dxyn: "d (x (n div 2)) (y (n div 2)) < \<epsilon>/2"
+ and dxnn': "d (x (n div 2)) (x (n' div 2)) < \<epsilon>/2"
+ and dynn': "d (y (n div 2)) (y (n' div 2)) < \<epsilon>/2"
+ using Nx Ny Nxy by blast+
+ have inM: "x (n div 2) \<in> M" "x (n' div 2) \<in> M""y (n div 2) \<in> M" "y (n' div 2) \<in> M"
+ using Metric_space.MCauchy_def Metric_space_axioms R by blast+
+ show "d (if even n then x (n div 2) else y (n div 2)) (if even n' then x (n' div 2) else y (n' div 2)) < \<epsilon>"
+ proof (cases "even n")
+ case nt: True
+ show ?thesis
+ proof (cases "even n'")
+ case True
+ with \<open>\<epsilon> > 0\<close> nt dxnn' show ?thesis by auto
+ next
+ case False
+ with nt dxyn dynn' inM triangle show ?thesis
+ by fastforce
+ qed
+ next
+ case nf: False
+ show ?thesis
+ proof (cases "even n'")
+ case True
+ then show ?thesis
+ by (smt (verit) \<open>\<epsilon> > 0\<close> dxyn dxnn' triangle commute inM field_sum_of_halves)
+ next
+ case False
+ with \<open>\<epsilon> > 0\<close> nf dynn' show ?thesis by auto
+ qed
+ qed
+ qed
+ qed
+qed
+
+lemma MCauchy_interleaving:
+ "MCauchy (\<lambda>n. if even n then \<sigma>(n div 2) else a) \<longleftrightarrow>
+ range \<sigma> \<subseteq> M \<and> limitin mtopology \<sigma> a sequentially" (is "?lhs=?rhs")
+proof -
+ have "?lhs \<longleftrightarrow> (MCauchy \<sigma> \<and> a \<in> M \<and> (\<lambda>n. d (\<sigma> n) a) \<longlonglongrightarrow> 0)"
+ by (simp add: MCauchy_interleaving_gen [where y = "\<lambda>n. a"])
+ also have "... = ?rhs"
+ by (metis MCauchy_def always_eventually convergent_imp_MCauchy limitin_metric_dist_null range_subsetD)
+ finally show ?thesis .
+qed
+
+lemma mcomplete_nest:
+ "mcomplete \<longleftrightarrow>
+ (\<forall>C::nat \<Rightarrow>'a set. (\<forall>n. closedin mtopology (C n)) \<and>
+ (\<forall>n. C n \<noteq> {}) \<and> decseq C \<and> (\<forall>\<epsilon>>0. \<exists>n a. C n \<subseteq> mcball a \<epsilon>)
+ \<longrightarrow> \<Inter> (range C) \<noteq> {})" (is "?lhs=?rhs")
+proof
+ assume L: ?lhs
+ show ?rhs
+ unfolding imp_conjL
+ proof (intro strip)
+ fix C :: "nat \<Rightarrow> 'a set"
+ assume clo: "\<forall>n. closedin mtopology (C n)"
+ and ne: "\<forall>n. C n \<noteq> ({}::'a set)"
+ and dec: "decseq C"
+ and cover [rule_format]: "\<forall>\<epsilon>>0. \<exists>n a. C n \<subseteq> mcball a \<epsilon>"
+ obtain \<sigma> where \<sigma>: "\<And>n. \<sigma> n \<in> C n"
+ by (meson ne empty_iff set_eq_iff)
+ have "MCauchy \<sigma>"
+ unfolding MCauchy_def
+ proof (intro conjI strip)
+ show "range \<sigma> \<subseteq> M"
+ using \<sigma> clo metric_closedin_iff_sequentially_closed by auto
+ fix \<epsilon> :: real
+ assume "\<epsilon> > 0"
+ then obtain N a where N: "C N \<subseteq> mcball a (\<epsilon>/3)"
+ using cover by fastforce
+ have "d (\<sigma> m) (\<sigma> n) < \<epsilon>" if "N \<le> m" "N \<le> n" for m n
+ proof -
+ have "d a (\<sigma> m) \<le> \<epsilon>/3" "d a (\<sigma> n) \<le> \<epsilon>/3"
+ using dec N \<sigma> that by (fastforce simp: decseq_def)+
+ then have "d (\<sigma> m) (\<sigma> n) \<le> \<epsilon>/3 + \<epsilon>/3"
+ using triangle \<sigma> commute dec decseq_def subsetD that N
+ by (smt (verit, ccfv_threshold) in_mcball)
+ also have "... < \<epsilon>"
+ using \<open>\<epsilon> > 0\<close> by auto
+ finally show ?thesis .
+ qed
+ then show "\<exists>N. \<forall>m n. N \<le> m \<longrightarrow> N \<le> n \<longrightarrow> d (\<sigma> m) (\<sigma> n) < \<epsilon>"
+ by blast
+ qed
+ then obtain x where x: "limitin mtopology \<sigma> x sequentially"
+ using L mcomplete_def by blast
+ have "x \<in> C n" for n
+ proof (rule limitin_closedin [OF x])
+ show "closedin mtopology (C n)"
+ by (simp add: clo)
+ show "\<forall>\<^sub>F x in sequentially. \<sigma> x \<in> C n"
+ by (metis \<sigma> dec decseq_def eventually_sequentiallyI subsetD)
+ qed auto
+ then show "\<Inter> (range C) \<noteq> {}"
+ by blast
+qed
+next
+ assume R: ?rhs
+ show ?lhs
+ unfolding mcomplete_def
+ proof (intro strip)
+ fix \<sigma>
+ assume "MCauchy \<sigma>"
+ then have "range \<sigma> \<subseteq> M"
+ using MCauchy_def by blast
+ define C where "C \<equiv> \<lambda>n. mtopology closure_of (\<sigma> ` {n..})"
+ have "\<forall>n. closedin mtopology (C n)"
+ by (auto simp: C_def)
+ moreover
+ have ne: "\<And>n. C n \<noteq> {}"
+ using \<open>MCauchy \<sigma>\<close> by (auto simp: C_def MCauchy_def disjnt_iff closure_of_eq_empty_gen)
+ moreover
+ have dec: "decseq C"
+ unfolding monotone_on_def
+ proof (intro strip)
+ fix m n::nat
+ assume "m \<le> n"
+ then have "{n..} \<subseteq> {m..}"
+ by auto
+ then show "C n \<subseteq> C m"
+ unfolding C_def by (meson closure_of_mono image_mono)
+ qed
+ moreover
+ have C: "\<exists>N u. C N \<subseteq> mcball u \<epsilon>" if "\<epsilon>>0" for \<epsilon>
+ proof -
+ obtain N where "\<And>m n. N \<le> m \<and> N \<le> n \<Longrightarrow> d (\<sigma> m) (\<sigma> n) < \<epsilon>"
+ by (meson MCauchy_def \<open>0 < \<epsilon>\<close> \<open>MCauchy \<sigma>\<close>)
+ then have "\<sigma> ` {N..} \<subseteq> mcball (\<sigma> N) \<epsilon>"
+ using MCauchy_def \<open>MCauchy \<sigma>\<close> by (force simp: less_eq_real_def)
+ then have "C N \<subseteq> mcball (\<sigma> N) \<epsilon>"
+ by (simp add: C_def closure_of_minimal)
+ then show ?thesis
+ by blast
+ qed
+ ultimately obtain l where x: "l \<in> \<Inter> (range C)"
+ by (metis R ex_in_conv)
+ then have *: "\<And>\<epsilon> N. 0 < \<epsilon> \<Longrightarrow> \<exists>n'. N \<le> n' \<and> l \<in> M \<and> \<sigma> n' \<in> M \<and> d l (\<sigma> n') < \<epsilon>"
+ by (force simp: C_def metric_closure_of)
+ then have "l \<in> M"
+ using gt_ex by blast
+ show "\<exists>l. limitin mtopology \<sigma> l sequentially"
+ unfolding limitin_metric
+ proof (intro conjI strip exI)
+ show "l \<in> M"
+ using \<open>\<forall>n. closedin mtopology (C n)\<close> closedin_subset x by fastforce
+ fix \<epsilon>::real
+ assume "\<epsilon> > 0"
+ obtain N where N: "\<And>m n. N \<le> m \<and> N \<le> n \<Longrightarrow> d (\<sigma> m) (\<sigma> n) < \<epsilon>/2"
+ by (meson MCauchy_def \<open>0 < \<epsilon>\<close> \<open>MCauchy \<sigma>\<close> half_gt_zero)
+ with * [of "\<epsilon>/2" N]
+ have "\<forall>n\<ge>N. \<sigma> n \<in> M \<and> d (\<sigma> n) l < \<epsilon>"
+ by (smt (verit) \<open>range \<sigma> \<subseteq> M\<close> commute field_sum_of_halves range_subsetD triangle)
+ then show "\<forall>\<^sub>F n in sequentially. \<sigma> n \<in> M \<and> d (\<sigma> n) l < \<epsilon>"
+ using eventually_sequentially by blast
+ qed
+ qed
+qed
+
+
+lemma mcomplete_nest_sing:
+ "mcomplete \<longleftrightarrow>
+ (\<forall>C. (\<forall>n. closedin mtopology (C n)) \<and>
+ (\<forall>n. C n \<noteq> {}) \<and> decseq C \<and> (\<forall>e>0. \<exists>n a. C n \<subseteq> mcball a e)
+ \<longrightarrow> (\<exists>l. l \<in> M \<and> \<Inter> (range C) = {l}))"
+proof -
+ have *: False
+ if clo: "\<forall>n. closedin mtopology (C n)"
+ and cover: "\<forall>\<epsilon>>0. \<exists>n a. C n \<subseteq> mcball a \<epsilon>"
+ and no_sing: "\<And>y. y \<in> M \<Longrightarrow> \<Inter> (range C) \<noteq> {y}"
+ and l: "\<forall>n. l \<in> C n"
+ for C :: "nat \<Rightarrow> 'a set" and l
+ proof -
+ have inM: "\<And>x. x \<in> \<Inter> (range C) \<Longrightarrow> x \<in> M"
+ using closedin_metric clo by fastforce
+ then have "l \<in> M"
+ by (simp add: l)
+ have False if l': "l' \<in> \<Inter> (range C)" and "l' \<noteq> l" for l'
+ proof -
+ have "l' \<in> M"
+ using inM l' by blast
+ obtain n a where na: "C n \<subseteq> mcball a (d l l' / 3)"
+ using inM \<open>l \<in> M\<close> l' \<open>l' \<noteq> l\<close> cover by force
+ then have "d a l \<le> (d l l' / 3)" "d a l' \<le> (d l l' / 3)" "a \<in> M"
+ using l l' na in_mcball by auto
+ then have "d l l' \<le> (d l l' / 3) + (d l l' / 3)"
+ using \<open>l \<in> M\<close> \<open>l' \<in> M\<close> mdist_reverse_triangle by fastforce
+ then show False
+ using nonneg [of l l'] \<open>l' \<noteq> l\<close> \<open>l \<in> M\<close> \<open>l' \<in> M\<close> zero by force
+ qed
+ then show False
+ by (metis l \<open>l \<in> M\<close> no_sing INT_I empty_iff insertI1 is_singletonE is_singletonI')
+ qed
+ show ?thesis
+ unfolding mcomplete_nest imp_conjL
+ apply (intro all_cong1 imp_cong refl)
+ using *
+ by (smt (verit) Inter_iff ex_in_conv range_constant range_eqI)
+qed
+
+lemma mcomplete_fip:
+ "mcomplete \<longleftrightarrow>
+ (\<forall>\<C>. (\<forall>C \<in> \<C>. closedin mtopology C) \<and>
+ (\<forall>e>0. \<exists>C a. C \<in> \<C> \<and> C \<subseteq> mcball a e) \<and> (\<forall>\<F>. finite \<F> \<and> \<F> \<subseteq> \<C> \<longrightarrow> \<Inter> \<F> \<noteq> {})
+ \<longrightarrow> \<Inter> \<C> \<noteq> {})"
+ (is "?lhs = ?rhs")
+proof
+ assume L: ?lhs
+ show ?rhs
+ unfolding mcomplete_nest_sing imp_conjL
+ proof (intro strip)
+ fix \<C> :: "'a set set"
+ assume clo: "\<forall>C\<in>\<C>. closedin mtopology C"
+ and cover: "\<forall>e>0. \<exists>C a. C \<in> \<C> \<and> C \<subseteq> mcball a e"
+ and fip: "\<forall>\<F>. finite \<F> \<longrightarrow> \<F> \<subseteq> \<C> \<longrightarrow> \<Inter> \<F> \<noteq> {}"
+ then have "\<forall>n. \<exists>C. C \<in> \<C> \<and> (\<exists>a. C \<subseteq> mcball a (inverse (Suc n)))"
+ by simp
+ then obtain C where C: "\<And>n. C n \<in> \<C>"
+ and coverC: "\<And>n. \<exists>a. C n \<subseteq> mcball a (inverse (Suc n))"
+ by metis
+ define D where "D \<equiv> \<lambda>n. \<Inter> (C ` {..n})"
+ have cloD: "closedin mtopology (D n)" for n
+ unfolding D_def using clo C by blast
+ have neD: "D n \<noteq> {}" for n
+ using fip C by (simp add: D_def image_subset_iff)
+ have decD: "decseq D"
+ by (force simp: D_def decseq_def)
+ have coverD: "\<exists>n a. D n \<subseteq> mcball a \<epsilon>" if " \<epsilon> >0" for \<epsilon>
+ proof -
+ obtain n where "inverse (Suc n) < \<epsilon>"
+ using \<open>0 < \<epsilon>\<close> reals_Archimedean by blast
+ then obtain a where "C n \<subseteq> mcball a \<epsilon>"
+ by (meson coverC less_eq_real_def mcball_subset_concentric order_trans)
+ then show ?thesis
+ unfolding D_def by blast
+ qed
+ have *: "a \<in> \<Inter>\<C>" if a: "\<Inter> (range D) = {a}" and "a \<in> M" for a
+ proof -
+ have aC: "a \<in> C n" for n
+ using that by (auto simp: D_def)
+ have eqa: "\<And>u. (\<forall>n. u \<in> C n) \<Longrightarrow> a = u"
+ using that by (auto simp: D_def)
+ have "a \<in> T" if "T \<in> \<C>" for T
+ proof -
+ have cloT: "closedin mtopology (T \<inter> D n)" for n
+ using clo cloD that by blast
+ have "\<Inter> (insert T (C ` {..n})) \<noteq> {}" for n
+ using that C by (intro fip [rule_format]) auto
+ then have neT: "T \<inter> D n \<noteq> {}" for n
+ by (simp add: D_def)
+ have decT: "decseq (\<lambda>n. T \<inter> D n)"
+ by (force simp: D_def decseq_def)
+ have coverT: "\<exists>n a. T \<inter> D n \<subseteq> mcball a \<epsilon>" if " \<epsilon> >0" for \<epsilon>
+ by (meson coverD le_infI2 that)
+ show ?thesis
+ using L [unfolded mcomplete_nest_sing, rule_format, of "\<lambda>n. T \<inter> D n"] a
+ by (force simp: cloT neT decT coverT)
+ qed
+ then show ?thesis by auto
+ qed
+ show "\<Inter> \<C> \<noteq> {}"
+ by (metis L cloD neD decD coverD * empty_iff mcomplete_nest_sing)
+ qed
+next
+ assume R [rule_format]: ?rhs
+ show ?lhs
+ unfolding mcomplete_nest imp_conjL
+ proof (intro strip)
+ fix C :: "nat \<Rightarrow> 'a set"
+ assume clo: "\<forall>n. closedin mtopology (C n)"
+ and ne: "\<forall>n. C n \<noteq> {}"
+ and dec: "decseq C"
+ and cover: "\<forall>\<epsilon>>0. \<exists>n a. C n \<subseteq> mcball a \<epsilon>"
+ have "\<Inter>(C ` N) \<noteq> {}" if "finite N" for N
+ proof -
+ obtain k where "N \<subseteq> {..k}"
+ using \<open>finite N\<close> finite_nat_iff_bounded_le by auto
+ with dec have "C k \<subseteq> \<Inter>(C ` N)" by (auto simp: decseq_def)
+ then show ?thesis
+ using ne by force
+ qed
+ with clo cover R [of "range C"] show "\<Inter> (range C) \<noteq> {}"
+ by (metis (no_types, opaque_lifting) finite_subset_image image_iff UNIV_I)
+ qed
+qed
+
+
+lemma mcomplete_fip_sing:
+ "mcomplete \<longleftrightarrow>
+ (\<forall>\<C>. (\<forall>C\<in>\<C>. closedin mtopology C) \<and>
+ (\<forall>e>0. \<exists>c a. c \<in> \<C> \<and> c \<subseteq> mcball a e) \<and>
+ (\<forall>\<F>. finite \<F> \<and> \<F> \<subseteq> \<C> \<longrightarrow> \<Inter> \<F> \<noteq> {}) \<longrightarrow>
+ (\<exists>l. l \<in> M \<and> \<Inter> \<C> = {l}))"
+ (is "?lhs = ?rhs")
+proof
+ have *: "l \<in> M" "\<Inter> \<C> = {l}"
+ if clo: "Ball \<C> (closedin mtopology)"
+ and cover: "\<forall>e>0. \<exists>C a. C \<in> \<C> \<and> C \<subseteq> mcball a e"
+ and fin: "\<forall>\<F>. finite \<F> \<longrightarrow> \<F> \<subseteq> \<C> \<longrightarrow> \<Inter> \<F> \<noteq> {}"
+ and l: "l \<in> \<Inter> \<C>"
+ for \<C> :: "'a set set" and l
+ proof -
+ show "l \<in> M"
+ by (meson Inf_lower2 clo cover gt_ex metric_closedin_iff_sequentially_closed subsetD that(4))
+ show "\<Inter> \<C> = {l}"
+ proof (cases "\<C> = {}")
+ case True
+ then show ?thesis
+ using cover mbounded_pos by auto
+ next
+ case False
+ have CM: "\<And>a. a \<in> \<Inter>\<C> \<Longrightarrow> a \<in> M"
+ using False clo closedin_subset by fastforce
+ have "l' \<notin> \<Inter> \<C>" if "l' \<noteq> l" for l'
+ proof
+ assume l': "l' \<in> \<Inter> \<C>"
+ with CM have "l' \<in> M" by blast
+ with that \<open>l \<in> M\<close> have gt0: "0 < d l l'"
+ by simp
+ then obtain C a where "C \<in> \<C>" and C: "C \<subseteq> mcball a (d l l' / 3)"
+ using cover [rule_format, of "d l l' / 3"] by auto
+ then have "d a l \<le> (d l l' / 3)" "d a l' \<le> (d l l' / 3)" "a \<in> M"
+ using l l' in_mcball by auto
+ then have "d l l' \<le> (d l l' / 3) + (d l l' / 3)"
+ using \<open>l \<in> M\<close> \<open>l' \<in> M\<close> mdist_reverse_triangle by fastforce
+ with gt0 show False by auto
+ qed
+ then show ?thesis
+ using l by fastforce
+ qed
+ qed
+ assume L: ?lhs
+ with * show ?rhs
+ unfolding mcomplete_fip imp_conjL ex_in_conv [symmetric]
+ by (elim all_forward imp_forward2 asm_rl) (blast intro: elim: )
+next
+ assume ?rhs then show ?lhs
+ unfolding mcomplete_fip by (force elim!: all_forward)
+qed
+
+end
+
+lemma MCauchy_iff_Cauchy [iff]: "Met_TC.MCauchy = Cauchy"
+ by (force simp: Cauchy_def Met_TC.MCauchy_def)
+
+lemma mcomplete_iff_complete [iff]:
+ "Met_TC.mcomplete (Pure.type ::'a::metric_space itself) \<longleftrightarrow> complete (UNIV::'a set)"
+ by (auto simp: Met_TC.mcomplete_def complete_def)
+
+lemma euclidean_metric: "Met_TC.mcomplete (Pure.type ::'a::euclidean_space itself)"
+ using complete_UNIV mcomplete_iff_complete by blast
+
+context submetric
+begin
+
+lemma MCauchy_submetric:
+ "sub.MCauchy \<sigma> \<longleftrightarrow> range \<sigma> \<subseteq> A \<and> MCauchy \<sigma>"
+ using MCauchy_def sub.MCauchy_def subset by force
+
+lemma closedin_mcomplete_imp_mcomplete:
+ assumes clo: "closedin mtopology A" and "mcomplete"
+ shows "sub.mcomplete"
+ unfolding sub.mcomplete_def
+proof (intro strip)
+ fix \<sigma>
+ assume "sub.MCauchy \<sigma>"
+ then have \<sigma>: "MCauchy \<sigma>" "range \<sigma> \<subseteq> A"
+ using MCauchy_submetric by blast+
+ then obtain x where x: "limitin mtopology \<sigma> x sequentially"
+ using \<open>mcomplete\<close> unfolding mcomplete_def by blast
+ then have "x \<in> A"
+ using \<sigma> clo metric_closedin_iff_sequentially_closed by force
+ with \<sigma> x show "\<exists>x. limitin sub.mtopology \<sigma> x sequentially"
+ using limitin_submetric_iff range_subsetD by fastforce
+qed
+
+
+lemma sequentially_closedin_mcomplete_imp_mcomplete:
+ assumes "mcomplete" and "\<And>\<sigma> l. range \<sigma> \<subseteq> A \<and> limitin mtopology \<sigma> l sequentially \<Longrightarrow> l \<in> A"
+ shows "sub.mcomplete"
+ using assms closedin_mcomplete_imp_mcomplete metric_closedin_iff_sequentially_closed subset by blast
+
+end
+
+
+context Metric_space
+begin
+
+lemma mcomplete_Un:
+ assumes A: "submetric M d A" "Metric_space.mcomplete A d"
+ and B: "submetric M d B" "Metric_space.mcomplete B d"
+ shows "submetric M d (A \<union> B)" "Metric_space.mcomplete (A \<union> B) d"
+proof -
+ show "submetric M d (A \<union> B)"
+ by (meson assms le_sup_iff submetric_axioms_def submetric_def)
+ then interpret MAB: Metric_space "A \<union> B" d
+ by (meson submetric.subset subspace)
+ interpret MA: Metric_space A d
+ by (meson A submetric.subset subspace)
+ interpret MB: Metric_space B d
+ by (meson B submetric.subset subspace)
+ show "Metric_space.mcomplete (A \<union> B) d"
+ unfolding MAB.mcomplete_def
+ proof (intro strip)
+ fix \<sigma>
+ assume "MAB.MCauchy \<sigma>"
+ then have "range \<sigma> \<subseteq> A \<union> B"
+ using MAB.MCauchy_def by blast
+ then have "UNIV \<subseteq> \<sigma> -` A \<union> \<sigma> -` B"
+ by blast
+ then consider "infinite (\<sigma> -` A)" | "infinite (\<sigma> -` B)"
+ using finite_subset by auto
+ then show "\<exists>x. limitin MAB.mtopology \<sigma> x sequentially"
+ proof cases
+ case 1
+ then obtain r where "strict_mono r" and r: "\<And>n::nat. r n \<in> \<sigma> -` A"
+ using infinite_enumerate by blast
+ then have "MA.MCauchy (\<sigma> \<circ> r)"
+ using MA.MCauchy_def MAB.MCauchy_def MAB.MCauchy_subsequence \<open>MAB.MCauchy \<sigma>\<close> by auto
+ with A obtain x where "limitin MA.mtopology (\<sigma> \<circ> r) x sequentially"
+ using MA.mcomplete_def by blast
+ then have "limitin MAB.mtopology (\<sigma> \<circ> r) x sequentially"
+ by (metis MA.limit_metric_sequentially MAB.limit_metric_sequentially UnCI)
+ then show ?thesis
+ using MAB.MCauchy_convergent_subsequence \<open>MAB.MCauchy \<sigma>\<close> \<open>strict_mono r\<close> by blast
+ next
+ case 2
+ then obtain r where "strict_mono r" and r: "\<And>n::nat. r n \<in> \<sigma> -` B"
+ using infinite_enumerate by blast
+ then have "MB.MCauchy (\<sigma> \<circ> r)"
+ using MB.MCauchy_def MAB.MCauchy_def MAB.MCauchy_subsequence \<open>MAB.MCauchy \<sigma>\<close> by auto
+ with B obtain x where "limitin MB.mtopology (\<sigma> \<circ> r) x sequentially"
+ using MB.mcomplete_def by blast
+ then have "limitin MAB.mtopology (\<sigma> \<circ> r) x sequentially"
+ by (metis MB.limit_metric_sequentially MAB.limit_metric_sequentially UnCI)
+ then show ?thesis
+ using MAB.MCauchy_convergent_subsequence \<open>MAB.MCauchy \<sigma>\<close> \<open>strict_mono r\<close> by blast
+ qed
+ qed
+qed
+
+lemma mcomplete_Union:
+ assumes "finite \<S>"
+ and "\<And>A. A \<in> \<S> \<Longrightarrow> submetric M d A" "\<And>A. A \<in> \<S> \<Longrightarrow> Metric_space.mcomplete A d"
+ shows "submetric M d (\<Union>\<S>)" "Metric_space.mcomplete (\<Union>\<S>) d"
+ using assms
+ by (induction rule: finite_induct) (auto simp: mcomplete_Un)
+
+lemma mcomplete_Inter:
+ assumes "finite \<S>" "\<S> \<noteq> {}"
+ and sub: "\<And>A. A \<in> \<S> \<Longrightarrow> submetric M d A"
+ and comp: "\<And>A. A \<in> \<S> \<Longrightarrow> Metric_space.mcomplete A d"
+ shows "submetric M d (\<Inter>\<S>)" "Metric_space.mcomplete (\<Inter>\<S>) d"
+proof -
+ show "submetric M d (\<Inter>\<S>)"
+ using assms unfolding submetric_def submetric_axioms_def
+ by (metis Inter_lower equals0I inf.orderE le_inf_iff)
+ then interpret MS: submetric M d "\<Inter>\<S>"
+ by (meson submetric.subset subspace)
+ show "Metric_space.mcomplete (\<Inter>\<S>) d"
+ unfolding MS.sub.mcomplete_def
+ proof (intro strip)
+ fix \<sigma>
+ assume "MS.sub.MCauchy \<sigma>"
+ then have "range \<sigma> \<subseteq> \<Inter>\<S>"
+ using MS.MCauchy_submetric by blast
+ obtain A where "A \<in> \<S>" and A: "Metric_space.mcomplete A d"
+ using assms by blast
+ then have "range \<sigma> \<subseteq> A"
+ using \<open>range \<sigma> \<subseteq> \<Inter>\<S>\<close> by blast
+ interpret SA: submetric M d A
+ by (meson \<open>A \<in> \<S>\<close> sub submetric.subset subspace)
+ have "MCauchy \<sigma>"
+ using MS.MCauchy_submetric \<open>MS.sub.MCauchy \<sigma>\<close> by blast
+ then obtain x where x: "limitin SA.sub.mtopology \<sigma> x sequentially"
+ by (metis A SA.sub.MCauchy_def SA.sub.mcomplete_alt MCauchy_def \<open>range \<sigma> \<subseteq> A\<close>)
+ show "\<exists>x. limitin MS.sub.mtopology \<sigma> x sequentially"
+ apply (rule_tac x="x" in exI)
+ unfolding MS.limitin_submetric_iff
+ proof (intro conjI)
+ show "x \<in> \<Inter> \<S>"
+ proof clarsimp
+ fix U
+ assume "U \<in> \<S>"
+ interpret SU: submetric M d U
+ by (meson \<open>U \<in> \<S>\<close> sub submetric.subset subspace)
+ have "range \<sigma> \<subseteq> U"
+ using \<open>U \<in> \<S>\<close> \<open>range \<sigma> \<subseteq> \<Inter> \<S>\<close> by blast
+ moreover have "Metric_space.mcomplete U d"
+ by (simp add: \<open>U \<in> \<S>\<close> comp)
+ ultimately obtain x' where x': "limitin SU.sub.mtopology \<sigma> x' sequentially"
+ using MCauchy_def SU.sub.MCauchy_def SU.sub.mcomplete_alt \<open>MCauchy \<sigma>\<close> by meson
+ have "x' = x"
+ proof (intro limitin_metric_unique)
+ show "limitin mtopology \<sigma> x' sequentially"
+ by (meson SU.submetric_axioms submetric.limitin_submetric_iff x')
+ show "limitin mtopology \<sigma> x sequentially"
+ by (meson SA.submetric_axioms submetric.limitin_submetric_iff x)
+ qed auto
+ then show "x \<in> U"
+ using SU.sub.limitin_mspace x' by blast
+ qed
+ show "\<forall>\<^sub>F n in sequentially. \<sigma> n \<in> \<Inter>\<S>"
+ by (meson \<open>range \<sigma> \<subseteq> \<Inter> \<S>\<close> always_eventually range_subsetD)
+ show "limitin mtopology \<sigma> x sequentially"
+ by (meson SA.submetric_axioms submetric.limitin_submetric_iff x)
+ qed
+ qed
+qed
+
+
+lemma mcomplete_Int:
+ assumes A: "submetric M d A" "Metric_space.mcomplete A d"
+ and B: "submetric M d B" "Metric_space.mcomplete B d"
+ shows "submetric M d (A \<inter> B)" "Metric_space.mcomplete (A \<inter> B) d"
+ using mcomplete_Inter [of "{A,B}"] assms by force+
+
+subsection\<open>Totally bounded subsets of metric spaces\<close>
+
+definition mtotally_bounded
+ where "mtotally_bounded S \<equiv> \<forall>\<epsilon>>0. \<exists>K. finite K \<and> K \<subseteq> S \<and> S \<subseteq> (\<Union>x\<in>K. mball x \<epsilon>)"
+
+lemma mtotally_bounded_empty [iff]: "mtotally_bounded {}"
+by (simp add: mtotally_bounded_def)
+
+lemma finite_imp_mtotally_bounded:
+ "\<lbrakk>finite S; S \<subseteq> M\<rbrakk> \<Longrightarrow> mtotally_bounded S"
+ by (auto simp: mtotally_bounded_def)
+
+lemma mtotally_bounded_imp_subset: "mtotally_bounded S \<Longrightarrow> S \<subseteq> M"
+ by (force simp: mtotally_bounded_def intro!: zero_less_one)
+
+lemma mtotally_bounded_sing [simp]:
+ "mtotally_bounded {x} \<longleftrightarrow> x \<in> M"
+ by (meson empty_subsetI finite.simps finite_imp_mtotally_bounded insert_subset mtotally_bounded_imp_subset)
+
+lemma mtotally_bounded_Un:
+ assumes "mtotally_bounded S" "mtotally_bounded T"
+ shows "mtotally_bounded (S \<union> T)"
+proof -
+ have "\<exists>K. finite K \<and> K \<subseteq> S \<union> T \<and> S \<union> T \<subseteq> (\<Union>x\<in>K. mball x e)"
+ if "e>0" and K: "finite K \<and> K \<subseteq> S \<and> S \<subseteq> (\<Union>x\<in>K. mball x e)"
+ and L: "finite L \<and> L \<subseteq> T \<and> T \<subseteq> (\<Union>x\<in>L. mball x e)" for K L e
+ using that by (rule_tac x="K \<union> L" in exI) auto
+ with assms show ?thesis
+ unfolding mtotally_bounded_def by presburger
+qed
+
+lemma mtotally_bounded_Union:
+ assumes "finite f" "\<And>S. S \<in> f \<Longrightarrow> mtotally_bounded S"
+ shows "mtotally_bounded (\<Union>f)"
+ using assms by (induction f) (auto simp: mtotally_bounded_Un)
+
+lemma mtotally_bounded_imp_mbounded:
+ assumes "mtotally_bounded S"
+ shows "mbounded S"
+proof -
+ obtain K where "finite K \<and> K \<subseteq> S \<and> S \<subseteq> (\<Union>x\<in>K. mball x 1)"
+ using assms by (force simp: mtotally_bounded_def)
+ then show ?thesis
+ by (smt (verit) finite_imageI image_iff mbounded_Union mbounded_mball mbounded_subset)
+qed
+
+
+lemma mtotally_bounded_sequentially:
+ "mtotally_bounded S \<longleftrightarrow>
+ S \<subseteq> M \<and> (\<forall>\<sigma>::nat \<Rightarrow> 'a. range \<sigma> \<subseteq> S \<longrightarrow> (\<exists>r. strict_mono r \<and> MCauchy (\<sigma> \<circ> r)))"
+ (is "_ \<longleftrightarrow> _ \<and> ?rhs")
+proof (cases "S \<subseteq> M")
+ case True
+ show ?thesis
+ proof -
+ { fix \<sigma> :: "nat \<Rightarrow> 'a"
+ assume L: "mtotally_bounded S" and \<sigma>: "range \<sigma> \<subseteq> S"
+ have "\<exists>j > i. d (\<sigma> i) (\<sigma> j) < 3*\<epsilon>/2 \<and> infinite (\<sigma> -` mball (\<sigma> j) (\<epsilon>/2))"
+ if inf: "infinite (\<sigma> -` mball (\<sigma> i) \<epsilon>)" and "\<epsilon> > 0" for i \<epsilon>
+ proof -
+ obtain K where "finite K" "K \<subseteq> S" and K: "S \<subseteq> (\<Union>x\<in>K. mball x (\<epsilon>/4))"
+ by (metis L mtotally_bounded_def \<open>\<epsilon> > 0\<close> zero_less_divide_iff zero_less_numeral)
+ then have K_imp_ex: "\<And>y. y \<in> S \<Longrightarrow> \<exists>x\<in>K. d x y < \<epsilon>/4"
+ by fastforce
+ have False if "\<forall>x\<in>K. d x (\<sigma> i) < \<epsilon> + \<epsilon>/4 \<longrightarrow> finite (\<sigma> -` mball x (\<epsilon>/4))"
+ proof -
+ have "\<exists>w. w \<in> K \<and> d w (\<sigma> i) < 5 * \<epsilon>/4 \<and> d w (\<sigma> j) < \<epsilon>/4"
+ if "d (\<sigma> i) (\<sigma> j) < \<epsilon>" for j
+ proof -
+ obtain w where w: "d w (\<sigma> j) < \<epsilon>/4" "w \<in> K"
+ using K_imp_ex \<sigma> by blast
+ then have "d w (\<sigma> i) < \<epsilon> + \<epsilon>/4"
+ by (smt (verit, ccfv_SIG) True \<open>K \<subseteq> S\<close> \<sigma> rangeI subset_eq that triangle')
+ with w show ?thesis
+ using in_mball by auto
+ qed
+ then have "(\<sigma> -` mball (\<sigma> i) \<epsilon>) \<subseteq> (\<Union>x\<in>K. if d x (\<sigma> i) < \<epsilon> + \<epsilon>/4 then \<sigma> -` mball x (\<epsilon>/4) else {})"
+ using True \<open>K \<subseteq> S\<close> by force
+ then show False
+ using finite_subset inf \<open>finite K\<close> that by fastforce
+ qed
+ then obtain x where "x \<in> K" and dxi: "d x (\<sigma> i) < \<epsilon> + \<epsilon>/4" and infx: "infinite (\<sigma> -` mball x (\<epsilon>/4))"
+ by blast
+ then obtain j where "j \<in> (\<sigma> -` mball x (\<epsilon>/4)) - {..i}"
+ using bounded_nat_set_is_finite by (meson Diff_infinite_finite finite_atMost)
+ then have "j > i" and dxj: "d x (\<sigma> j) < \<epsilon>/4"
+ by auto
+ have "(\<sigma> -` mball x (\<epsilon>/4)) \<subseteq> (\<sigma> -` mball y (\<epsilon>/2))" if "d x y < \<epsilon>/4" "y \<in> M" for y
+ using that by (simp add: mball_subset Metric_space_axioms vimage_mono)
+ then have infj: "infinite (\<sigma> -` mball (\<sigma> j) (\<epsilon>/2))"
+ by (meson True \<open>d x (\<sigma> j) < \<epsilon>/4\<close> \<sigma> in_mono infx rangeI finite_subset)
+ have "\<sigma> i \<in> M" "\<sigma> j \<in> M" "x \<in> M"
+ using True \<open>K \<subseteq> S\<close> \<open>x \<in> K\<close> \<sigma> by force+
+ then have "d (\<sigma> i) (\<sigma> j) \<le> d x (\<sigma> i) + d x (\<sigma> j)"
+ using triangle'' by blast
+ also have "\<dots> < 3*\<epsilon>/2"
+ using dxi dxj by auto
+ finally have "d (\<sigma> i) (\<sigma> j) < 3*\<epsilon>/2" .
+ with \<open>i < j\<close> infj show ?thesis by blast
+ qed
+ then obtain nxt where nxt: "\<And>i \<epsilon>. \<lbrakk>\<epsilon> > 0; infinite (\<sigma> -` mball (\<sigma> i) \<epsilon>)\<rbrakk> \<Longrightarrow>
+ nxt i \<epsilon> > i \<and> d (\<sigma> i) (\<sigma> (nxt i \<epsilon>)) < 3*\<epsilon>/2 \<and> infinite (\<sigma> -` mball (\<sigma> (nxt i \<epsilon>)) (\<epsilon>/2))"
+ by metis
+ have "mbounded S"
+ using L by (simp add: mtotally_bounded_imp_mbounded)
+ then obtain B where B: "\<forall>y \<in> S. d (\<sigma> 0) y \<le> B" and "B > 0"
+ by (meson \<sigma> mbounded_alt_pos range_subsetD)
+ define eps where "eps \<equiv> \<lambda>n. (B+1) / 2^n"
+ have [simp]: "eps (Suc n) = eps n / 2" "eps n > 0" for n
+ using \<open>B > 0\<close> by (auto simp: eps_def)
+ have "UNIV \<subseteq> \<sigma> -` mball (\<sigma> 0) (B+1)"
+ using B True \<sigma> unfolding image_iff subset_iff
+ by (smt (verit, best) UNIV_I in_mball vimageI)
+ then have inf0: "infinite (\<sigma> -` mball (\<sigma> 0) (eps 0))"
+ using finite_subset by (auto simp: eps_def)
+ define r where "r \<equiv> rec_nat 0 (\<lambda>n rec. nxt rec (eps n))"
+ have [simp]: "r 0 = 0" "r (Suc n) = nxt (r n) (eps n)" for n
+ by (auto simp: r_def)
+ have \<sigma>rM[simp]: "\<sigma> (r n) \<in> M" for n
+ using True \<sigma> by blast
+ have inf: "infinite (\<sigma> -` mball (\<sigma> (r n)) (eps n))" for n
+ proof (induction n)
+ case 0 then show ?case
+ by (simp add: inf0)
+ next
+ case (Suc n) then show ?case
+ using nxt [of "eps n" "r n"] by simp
+ qed
+ then have "r (Suc n) > r n" for n
+ by (simp add: nxt)
+ then have "strict_mono r"
+ by (simp add: strict_mono_Suc_iff)
+ have d_less: "d (\<sigma> (r n)) (\<sigma> (r (Suc n))) < 3 * eps n / 2" for n
+ using nxt [OF _ inf] by simp
+ have eps_plus: "eps (k + n) = eps n * (1/2)^k" for k n
+ by (simp add: eps_def power_add field_simps)
+ have *: "d (\<sigma> (r n)) (\<sigma> (r (k + n))) < 3 * eps n" for n k
+ proof -
+ have "d (\<sigma> (r n)) (\<sigma> (r (k+n))) \<le> 3/2 * eps n * (\<Sum>i<k. (1/2)^i)"
+ proof (induction k)
+ case 0 then show ?case
+ by simp
+ next
+ case (Suc k)
+ have "d (\<sigma> (r n)) (\<sigma> (r (Suc k + n))) \<le> d (\<sigma> (r n)) (\<sigma> (r (k + n))) + d (\<sigma> (r (k + n))) (\<sigma> (r (Suc (k + n))))"
+ by (metis \<sigma>rM add.commute add_Suc_right triangle)
+ with d_less[of "k+n"] Suc show ?case
+ by (simp add: algebra_simps eps_plus)
+ qed
+ also have "\<dots> < 3/2 * eps n * 2"
+ using geometric_sum [of "1/2::real" k] by simp
+ finally show ?thesis by simp
+ qed
+ have "\<exists>N. \<forall>n\<ge>N. \<forall>n'\<ge>N. d (\<sigma> (r n)) (\<sigma> (r n')) < \<epsilon>" if "\<epsilon> > 0" for \<epsilon>
+ proof -
+ define N where "N \<equiv> nat \<lceil>(log 2 (6*(B+1) / \<epsilon>))\<rceil>"
+ have \<section>: "b \<le> 2 ^ nat \<lceil>log 2 b\<rceil>" for b
+ by (smt (verit) less_log_of_power real_nat_ceiling_ge)
+ have N: "6 * eps N \<le> \<epsilon>"
+ using \<section> [of "(6*(B+1) / \<epsilon>)"] that by (auto simp: N_def eps_def field_simps)
+ have "d (\<sigma> (r N)) (\<sigma> (r n)) < 3 * eps N" if "n \<ge> N" for n
+ by (metis * add.commute nat_le_iff_add that)
+ then have "\<forall>n\<ge>N. \<forall>n'\<ge>N. d (\<sigma> (r n)) (\<sigma> (r n')) < 3 * eps N + 3 * eps N"
+ by (smt (verit, best) \<sigma>rM triangle'')
+ with N show ?thesis
+ by fastforce
+ qed
+ then have "MCauchy (\<sigma> \<circ> r)"
+ unfolding MCauchy_def using True \<sigma> by auto
+ then have "\<exists>r. strict_mono r \<and> MCauchy (\<sigma> \<circ> r)"
+ using \<open>strict_mono r\<close> by blast
+ }
+ moreover
+ { assume R: ?rhs
+ have "mtotally_bounded S"
+ unfolding mtotally_bounded_def
+ proof (intro strip)
+ fix \<epsilon> :: real
+ assume "\<epsilon> > 0"
+ have False if \<section>: "\<And>K. \<lbrakk>finite K; K \<subseteq> S\<rbrakk> \<Longrightarrow> \<exists>s\<in>S. s \<notin> (\<Union>x\<in>K. mball x \<epsilon>)"
+ proof -
+ obtain f where f: "\<And>K. \<lbrakk>finite K; K \<subseteq> S\<rbrakk> \<Longrightarrow> f K \<in> S \<and> f K \<notin> (\<Union>x\<in>K. mball x \<epsilon>)"
+ using \<section> by metis
+ define \<sigma> where "\<sigma> \<equiv> wfrec less_than (\<lambda>seq n. f (seq ` {..<n}))"
+ have \<sigma>_eq: "\<sigma> n = f (\<sigma> ` {..<n})" for n
+ by (simp add: cut_apply def_wfrec [OF \<sigma>_def])
+ have [simp]: "\<sigma> n \<in> S" for n
+ using wf_less_than
+ proof (induction n rule: wf_induct_rule)
+ case (less n) with f show ?case
+ by (auto simp: \<sigma>_eq [of n])
+ qed
+ then have "range \<sigma> \<subseteq> S" by blast
+ have \<sigma>: "p < n \<Longrightarrow> \<epsilon> \<le> d (\<sigma> p) (\<sigma> n)" for n p
+ using f[of "\<sigma> ` {..<n}"] True by (fastforce simp: \<sigma>_eq [of n] Ball_def)
+ then obtain r where "strict_mono r" "MCauchy (\<sigma> \<circ> r)"
+ by (meson R \<open>range \<sigma> \<subseteq> S\<close>)
+ with \<open>0 < \<epsilon>\<close> obtain N
+ where N: "\<And>n n'. \<lbrakk>n\<ge>N; n'\<ge>N\<rbrakk> \<Longrightarrow> d (\<sigma> (r n)) (\<sigma> (r n')) < \<epsilon>"
+ by (force simp: MCauchy_def)
+ show ?thesis
+ using N [of N "Suc (r N)"] \<open>strict_mono r\<close>
+ by (smt (verit) Suc_le_eq \<sigma> le_SucI order_refl strict_mono_imp_increasing)
+ qed
+ then show "\<exists>K. finite K \<and> K \<subseteq> S \<and> S \<subseteq> (\<Union>x\<in>K. mball x \<epsilon>)"
+ by blast
+ qed
+ }
+ ultimately show ?thesis
+ using True by blast
+ qed
+qed (use mtotally_bounded_imp_subset in auto)
+
+
+lemma mtotally_bounded_subset:
+ "\<lbrakk>mtotally_bounded S; T \<subseteq> S\<rbrakk> \<Longrightarrow> mtotally_bounded T"
+ by (meson mtotally_bounded_sequentially order_trans)
+
+lemma mtotally_bounded_submetric:
+ assumes "mtotally_bounded S" "S \<subseteq> T" "T \<subseteq> M"
+ shows "Metric_space.mtotally_bounded T d S"
+proof -
+ interpret submetric M d T
+ by (simp add: Metric_space_axioms assms submetric.intro submetric_axioms.intro)
+ show ?thesis
+ using assms
+ unfolding sub.mtotally_bounded_def mtotally_bounded_def
+ by (force simp: subset_iff elim!: all_forward ex_forward)
+qed
+
+lemma mtotally_bounded_absolute:
+ "mtotally_bounded S \<longleftrightarrow> S \<subseteq> M \<and> Metric_space.mtotally_bounded S d S "
+proof -
+ have "mtotally_bounded S" if "S \<subseteq> M" "Metric_space.mtotally_bounded S d S"
+ proof -
+ interpret submetric M d S
+ by (simp add: Metric_space_axioms submetric_axioms.intro submetric_def \<open>S \<subseteq> M\<close>)
+ show ?thesis
+ using that
+ by (metis MCauchy_submetric Metric_space.mtotally_bounded_sequentially Metric_space_axioms subspace)
+ qed
+ moreover have "mtotally_bounded S \<Longrightarrow> Metric_space.mtotally_bounded S d S"
+ by (simp add: mtotally_bounded_imp_subset mtotally_bounded_submetric)
+ ultimately show ?thesis
+ using mtotally_bounded_imp_subset by blast
+qed
+
+lemma mtotally_bounded_closure_of:
+ assumes "mtotally_bounded S"
+ shows "mtotally_bounded (mtopology closure_of S)"
+proof -
+ have "S \<subseteq> M"
+ by (simp add: assms mtotally_bounded_imp_subset)
+ have "mtotally_bounded(mtopology closure_of S)"
+ unfolding mtotally_bounded_def
+ proof (intro strip)
+ fix \<epsilon>::real
+ assume "\<epsilon> > 0"
+ then obtain K where "finite K" "K \<subseteq> S" and K: "S \<subseteq> (\<Union>x\<in>K. mball x (\<epsilon>/2))"
+ by (metis assms mtotally_bounded_def half_gt_zero)
+ have "mtopology closure_of S \<subseteq> (\<Union>x\<in>K. mball x \<epsilon>)"
+ unfolding metric_closure_of
+ proof clarsimp
+ fix x
+ assume "x \<in> M" and x: "\<forall>r>0. \<exists>y\<in>S. y \<in> M \<and> d x y < r"
+ then obtain y where "y \<in> S" and y: "d x y < \<epsilon>/2"
+ using \<open>0 < \<epsilon>\<close> half_gt_zero by blast
+ then obtain x' where "x' \<in> K" "y \<in> mball x' (\<epsilon>/2)"
+ using K by auto
+ then have "d x' x < \<epsilon>/2 + \<epsilon>/2"
+ using triangle y \<open>x \<in> M\<close> commute by fastforce
+ then show "\<exists>x'\<in>K. x' \<in> M \<and> d x' x < \<epsilon>"
+ using \<open>K \<subseteq> S\<close> \<open>S \<subseteq> M\<close> \<open>x' \<in> K\<close> by force
+ qed
+ then show "\<exists>K. finite K \<and> K \<subseteq> mtopology closure_of S \<and> mtopology closure_of S \<subseteq> (\<Union>x\<in>K. mball x \<epsilon>)"
+ using closure_of_subset_Int \<open>K \<subseteq> S\<close> \<open>finite K\<close> K by fastforce
+ qed
+ then show ?thesis
+ by (simp add: assms inf.absorb2 mtotally_bounded_imp_subset)
+qed
+
+lemma mtotally_bounded_closure_of_eq:
+ "S \<subseteq> M \<Longrightarrow> mtotally_bounded (mtopology closure_of S) \<longleftrightarrow> mtotally_bounded S"
+ by (metis closure_of_subset mtotally_bounded_closure_of mtotally_bounded_subset topspace_mtopology)
+
+lemma mtotally_bounded_cauchy_sequence:
+ assumes "MCauchy \<sigma>"
+ shows "mtotally_bounded (range \<sigma>)"
+ unfolding MCauchy_def mtotally_bounded_def
+proof (intro strip)
+ fix \<epsilon>::real
+ assume "\<epsilon> > 0"
+ then obtain N where "\<And>n. N \<le> n \<Longrightarrow> d (\<sigma> N) (\<sigma> n) < \<epsilon>"
+ using assms by (force simp: MCauchy_def)
+ then have "\<And>m. \<exists>n\<le>N. \<sigma> n \<in> M \<and> \<sigma> m \<in> M \<and> d (\<sigma> n) (\<sigma> m) < \<epsilon>"
+ by (metis MCauchy_def assms mdist_zero nle_le range_subsetD)
+ then
+ show "\<exists>K. finite K \<and> K \<subseteq> range \<sigma> \<and> range \<sigma> \<subseteq> (\<Union>x\<in>K. mball x \<epsilon>)"
+ by (rule_tac x="\<sigma> ` {0..N}" in exI) force
+qed
+
+lemma MCauchy_imp_mbounded:
+ "MCauchy \<sigma> \<Longrightarrow> mbounded (range \<sigma>)"
+ by (simp add: mtotally_bounded_cauchy_sequence mtotally_bounded_imp_mbounded)
+
+subsection\<open>Compactness in metric spaces\<close>
+
+lemma Bolzano_Weierstrass_property:
+ assumes "S \<subseteq> U" "S \<subseteq> M"
+ shows
+ "(\<forall>\<sigma>::nat\<Rightarrow>'a. range \<sigma> \<subseteq> S
+ \<longrightarrow> (\<exists>l r. l \<in> U \<and> strict_mono r \<and> limitin mtopology (\<sigma> \<circ> r) l sequentially)) \<longleftrightarrow>
+ (\<forall>T. T \<subseteq> S \<and> infinite T \<longrightarrow> U \<inter> mtopology derived_set_of T \<noteq> {})" (is "?lhs=?rhs")
+proof
+ assume L: ?lhs
+ show ?rhs
+ proof clarify
+ fix T
+ assume "T \<subseteq> S" and "infinite T"
+ and T: "U \<inter> mtopology derived_set_of T = {}"
+ then obtain \<sigma> :: "nat\<Rightarrow>'a" where "inj \<sigma>" "range \<sigma> \<subseteq> T"
+ by (meson infinite_countable_subset)
+ with L obtain l r where "l \<in> U" "strict_mono r"
+ and lr: "limitin mtopology (\<sigma> \<circ> r) l sequentially"
+ by (meson \<open>T \<subseteq> S\<close> subset_trans)
+ then obtain \<epsilon> where "\<epsilon> > 0" and \<epsilon>: "\<And>y. y \<in> T \<Longrightarrow> y = l \<or> \<not> d l y < \<epsilon>"
+ using T \<open>T \<subseteq> S\<close> \<open>S \<subseteq> M\<close>
+ by (force simp: metric_derived_set_of limitin_metric disjoint_iff)
+ with lr have "\<forall>\<^sub>F n in sequentially. \<sigma> (r n) \<in> M \<and> d (\<sigma> (r n)) l < \<epsilon>"
+ by (auto simp: limitin_metric)
+ then obtain N where N: "d (\<sigma> (r N)) l < \<epsilon>" "d (\<sigma> (r (Suc N))) l < \<epsilon>"
+ using less_le_not_le by (auto simp: eventually_sequentially)
+ moreover have "\<sigma> (r N) \<noteq> l \<or> \<sigma> (r (Suc N)) \<noteq> l"
+ by (meson \<open>inj \<sigma>\<close> \<open>strict_mono r\<close> injD n_not_Suc_n strict_mono_eq)
+ ultimately
+ show False
+ using \<epsilon> \<open>range \<sigma> \<subseteq> T\<close> commute by fastforce
+ qed
+next
+ assume R: ?rhs
+ show ?lhs
+ proof (intro strip)
+ fix \<sigma> :: "nat \<Rightarrow> 'a"
+ assume "range \<sigma> \<subseteq> S"
+ show "\<exists>l r. l \<in> U \<and> strict_mono r \<and> limitin mtopology (\<sigma> \<circ> r) l sequentially"
+ proof (cases "finite (range \<sigma>)")
+ case True
+ then obtain m where "infinite (\<sigma> -` {\<sigma> m})"
+ by (metis image_iff inf_img_fin_dom nat_not_finite)
+ then obtain r where [iff]: "strict_mono r" and r: "\<And>n::nat. r n \<in> \<sigma> -` {\<sigma> m}"
+ using infinite_enumerate by blast
+ have [iff]: "\<sigma> m \<in> U" "\<sigma> m \<in> M"
+ using \<open>range \<sigma> \<subseteq> S\<close> assms by blast+
+ show ?thesis
+ proof (intro conjI exI)
+ show "limitin mtopology (\<sigma> \<circ> r) (\<sigma> m) sequentially"
+ using r by (simp add: limitin_metric)
+ qed auto
+ next
+ case False
+ then obtain l where "l \<in> U" and l: "l \<in> mtopology derived_set_of (range \<sigma>)"
+ by (meson R \<open>range \<sigma> \<subseteq> S\<close> disjoint_iff)
+ then obtain g where g: "\<And>\<epsilon>. \<epsilon>>0 \<Longrightarrow> \<sigma> (g \<epsilon>) \<noteq> l \<and> d l (\<sigma> (g \<epsilon>)) < \<epsilon>"
+ by (simp add: metric_derived_set_of) metis
+ have "range \<sigma> \<subseteq> M"
+ using \<open>range \<sigma> \<subseteq> S\<close> assms by auto
+ have "l \<in> M"
+ using l metric_derived_set_of by auto
+ define E where \<comment>\<open>a construction to ensure monotonicity\<close>
+ "E \<equiv> \<lambda>rec n. insert (inverse (Suc n)) ((\<lambda>i. d l (\<sigma> i)) ` (\<Union>k<n. {0..rec k})) - {0}"
+ define r where "r \<equiv> wfrec less_than (\<lambda>rec n. g (Min (E rec n)))"
+ have "(\<Union>k<n. {0..cut r less_than n k}) = (\<Union>k<n. {0..r k})" for n
+ by (auto simp: cut_apply)
+ then have r_eq: "r n = g (Min (E r n))" for n
+ by (metis E_def def_wfrec [OF r_def] wf_less_than)
+ have dl_pos[simp]: "d l (\<sigma> (r n)) > 0" for n
+ using wf_less_than
+ proof (induction n rule: wf_induct_rule)
+ case (less n)
+ then have *: "Min (E r n) > 0"
+ using \<open>l \<in> M\<close> \<open>range \<sigma> \<subseteq> M\<close> by (auto simp: E_def image_subset_iff)
+ show ?case
+ using g [OF *] r_eq [of n]
+ by (metis \<open>l \<in> M\<close> \<open>range \<sigma> \<subseteq> M\<close> mdist_pos_less range_subsetD)
+ qed
+ then have non_l: "\<sigma> (r n) \<noteq> l" for n
+ using \<open>range \<sigma> \<subseteq> M\<close> mdist_pos_eq by blast
+ have Min_pos: "Min (E r n) > 0" for n
+ using dl_pos \<open>l \<in> M\<close> \<open>range \<sigma> \<subseteq> M\<close> by (auto simp: E_def image_subset_iff)
+ have d_small: "d (\<sigma>(r n)) l < inverse(Suc n)" for n
+ proof -
+ have "d (\<sigma>(r n)) l < Min (E r n)"
+ by (simp add: \<open>0 < Min (E r n)\<close> commute g r_eq)
+ also have "... \<le> inverse(Suc n)"
+ by (simp add: E_def)
+ finally show ?thesis .
+ qed
+ have d_lt_d: "d l (\<sigma> (r n)) < d l (\<sigma> i)" if \<section>: "p < n" "i \<le> r p" "\<sigma> i \<noteq> l" for i p n
+ proof -
+ have 1: "d l (\<sigma> i) \<in> E r n"
+ using \<section> \<open>l \<in> M\<close> \<open>range \<sigma> \<subseteq> M\<close>
+ by (force simp: E_def image_subset_iff image_iff)
+ have "d l (\<sigma> (g (Min (E r n)))) < Min (E r n)"
+ by (rule conjunct2 [OF g [OF Min_pos]])
+ also have "Min (E r n) \<le> d l (\<sigma> i)"
+ using 1 unfolding E_def by (force intro!: Min.coboundedI)
+ finally show ?thesis
+ by (simp add: r_eq)
+ qed
+ have r: "r p < r n" if "p < n" for p n
+ using d_lt_d [OF that] non_l by (meson linorder_not_le order_less_irrefl)
+ show ?thesis
+ proof (intro exI conjI)
+ show "strict_mono r"
+ by (simp add: r strict_monoI)
+ show "limitin mtopology (\<sigma> \<circ> r) l sequentially"
+ unfolding limitin_metric
+ proof (intro conjI strip \<open>l \<in> M\<close>)
+ fix \<epsilon> :: real
+ assume "\<epsilon> > 0"
+ then have "\<forall>\<^sub>F n in sequentially. inverse(Suc n) < \<epsilon>"
+ using Archimedean_eventually_inverse by auto
+ then show "\<forall>\<^sub>F n in sequentially. (\<sigma> \<circ> r) n \<in> M \<and> d ((\<sigma> \<circ> r) n) l < \<epsilon>"
+ by (smt (verit) \<open>range \<sigma> \<subseteq> M\<close> commute comp_apply d_small eventually_mono range_subsetD)
+ qed
+ qed (use \<open>l \<in> U\<close> in auto)
+ qed
+ qed
+qed
+
+subsubsection \<open>More on Bolzano Weierstrass\<close>
+
+lemma Bolzano_Weierstrass_A:
+ assumes "compactin mtopology S" "T \<subseteq> S" "infinite T"
+ shows "S \<inter> mtopology derived_set_of T \<noteq> {}"
+ by (simp add: assms compactin_imp_Bolzano_Weierstrass)
+
+lemma Bolzano_Weierstrass_B:
+ fixes \<sigma> :: "nat \<Rightarrow> 'a"
+ assumes "S \<subseteq> M" "range \<sigma> \<subseteq> S"
+ and "\<And>T. \<lbrakk>T \<subseteq> S \<and> infinite T\<rbrakk> \<Longrightarrow> S \<inter> mtopology derived_set_of T \<noteq> {}"
+ shows "\<exists>l r. l \<in> S \<and> strict_mono r \<and> limitin mtopology (\<sigma> \<circ> r) l sequentially"
+ using Bolzano_Weierstrass_property assms by blast
+
+lemma Bolzano_Weierstrass_C:
+ assumes "S \<subseteq> M"
+ assumes "\<And>\<sigma>:: nat \<Rightarrow> 'a. range \<sigma> \<subseteq> S \<Longrightarrow>
+ (\<exists>l r. l \<in> S \<and> strict_mono r \<and> limitin mtopology (\<sigma> \<circ> r) l sequentially)"
+ shows "mtotally_bounded S"
+ unfolding mtotally_bounded_sequentially
+ by (metis convergent_imp_MCauchy assms image_comp image_mono subset_UNIV subset_trans)
+
+lemma Bolzano_Weierstrass_D:
+ assumes "S \<subseteq> M" "S \<subseteq> \<Union>\<C>" and opeU: "\<And>U. U \<in> \<C> \<Longrightarrow> openin mtopology U"
+ assumes \<section>: "(\<forall>\<sigma>::nat\<Rightarrow>'a. range \<sigma> \<subseteq> S
+ \<longrightarrow> (\<exists>l r. l \<in> S \<and> strict_mono r \<and> limitin mtopology (\<sigma> \<circ> r) l sequentially))"
+ shows "\<exists>\<epsilon>>0. \<forall>x \<in> S. \<exists>U \<in> \<C>. mball x \<epsilon> \<subseteq> U"
+proof (rule ccontr)
+ assume "\<not> (\<exists>\<epsilon>>0. \<forall>x \<in> S. \<exists>U \<in> \<C>. mball x \<epsilon> \<subseteq> U)"
+ then have "\<forall>n. \<exists>x\<in>S. \<forall>U\<in>\<C>. \<not> mball x (inverse (Suc n)) \<subseteq> U"
+ by simp
+ then obtain \<sigma> where "\<And>n. \<sigma> n \<in> S"
+ and \<sigma>: "\<And>n U. U \<in> \<C> \<Longrightarrow> \<not> mball (\<sigma> n) (inverse (Suc n)) \<subseteq> U"
+ by metis
+ then obtain l r where "l \<in> S" "strict_mono r"
+ and lr: "limitin mtopology (\<sigma> \<circ> r) l sequentially"
+ by (meson \<section> image_subsetI)
+ with \<open>S \<subseteq> \<Union>\<C>\<close> obtain B where "l \<in> B" "B \<in> \<C>"
+ by auto
+ then obtain \<epsilon> where "\<epsilon> > 0" and \<epsilon>: "\<And>z. \<lbrakk>z \<in> M; d z l < \<epsilon>\<rbrakk> \<Longrightarrow> z \<in> B"
+ by (metis opeU [OF \<open>B \<in> \<C>\<close>] commute in_mball openin_mtopology subset_iff)
+ then have "\<forall>\<^sub>F n in sequentially. \<sigma> (r n) \<in> M \<and> d (\<sigma> (r n)) l < \<epsilon>/2"
+ using lr half_gt_zero unfolding limitin_metric o_def by blast
+ moreover have "\<forall>\<^sub>F n in sequentially. inverse (real (Suc n)) < \<epsilon>/2"
+ using Archimedean_eventually_inverse \<open>0 < \<epsilon>\<close> half_gt_zero by blast
+ ultimately obtain n where n: "d (\<sigma> (r n)) l < \<epsilon>/2" "inverse (real (Suc n)) < \<epsilon>/2"
+ by (smt (verit, del_insts) eventually_sequentially le_add1 le_add2)
+ have "x \<in> B" if "d (\<sigma> (r n)) x < inverse (Suc(r n))" "x \<in> M" for x
+ proof -
+ have rle: "inverse (real (Suc (r n))) \<le> inverse (real (Suc n))"
+ using \<open>strict_mono r\<close> strict_mono_imp_increasing by auto
+ have "d x l \<le> d (\<sigma> (r n)) x + d (\<sigma> (r n)) l"
+ using that by (metis triangle \<open>\<And>n. \<sigma> n \<in> S\<close> \<open>l \<in> S\<close> \<open>S \<subseteq> M\<close> commute subsetD)
+ also have "... < \<epsilon>"
+ using that n rle by linarith
+ finally show ?thesis
+ by (simp add: \<epsilon> that)
+ qed
+ then show False
+ using \<sigma> [of B "r n"] by (simp add: \<open>B \<in> \<C>\<close> subset_iff)
+qed
+
+
+lemma Bolzano_Weierstrass_E:
+ assumes "mtotally_bounded S" "S \<subseteq> M"
+ and S: "\<And>\<C>. \<lbrakk>\<And>U. U \<in> \<C> \<Longrightarrow> openin mtopology U; S \<subseteq> \<Union>\<C>\<rbrakk> \<Longrightarrow> \<exists>\<epsilon>>0. \<forall>x \<in> S. \<exists>U \<in> \<C>. mball x \<epsilon> \<subseteq> U"
+ shows "compactin mtopology S"
+proof (clarsimp simp: compactin_def assms)
+ fix \<U> :: "'a set set"
+ assume \<U>: "\<forall>x\<in>\<U>. openin mtopology x" and "S \<subseteq> \<Union>\<U>"
+ then obtain \<epsilon> where "\<epsilon>>0" and \<epsilon>: "\<And>x. x \<in> S \<Longrightarrow> \<exists>U \<in> \<U>. mball x \<epsilon> \<subseteq> U"
+ by (metis S)
+ then obtain f where f: "\<And>x. x \<in> S \<Longrightarrow> f x \<in> \<U> \<and> mball x \<epsilon> \<subseteq> f x"
+ by metis
+ then obtain K where "finite K" "K \<subseteq> S" and K: "S \<subseteq> (\<Union>x\<in>K. mball x \<epsilon>)"
+ by (metis \<open>0 < \<epsilon>\<close> \<open>mtotally_bounded S\<close> mtotally_bounded_def)
+ show "\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> S \<subseteq> \<Union>\<F>"
+ proof (intro conjI exI)
+ show "finite (f ` K)"
+ by (simp add: \<open>finite K\<close>)
+ show "f ` K \<subseteq> \<U>"
+ using \<open>K \<subseteq> S\<close> f by blast
+ show "S \<subseteq> \<Union>(f ` K)"
+ using K \<open>K \<subseteq> S\<close> by (force dest: f)
+ qed
+qed
+
+
+lemma compactin_eq_Bolzano_Weierstrass:
+ "compactin mtopology S \<longleftrightarrow>
+ S \<subseteq> M \<and> (\<forall>T. T \<subseteq> S \<and> infinite T \<longrightarrow> S \<inter> mtopology derived_set_of T \<noteq> {})"
+ using Bolzano_Weierstrass_C Bolzano_Weierstrass_D Bolzano_Weierstrass_E
+ by (smt (verit, del_insts) Bolzano_Weierstrass_property compactin_imp_Bolzano_Weierstrass compactin_subspace subset_refl topspace_mtopology)
+
+lemma compactin_sequentially:
+ shows "compactin mtopology S \<longleftrightarrow>
+ S \<subseteq> M \<and>
+ ((\<forall>\<sigma>::nat\<Rightarrow>'a. range \<sigma> \<subseteq> S
+ \<longrightarrow> (\<exists>l r. l \<in> S \<and> strict_mono r \<and> limitin mtopology (\<sigma> \<circ> r) l sequentially)))"
+ by (metis Bolzano_Weierstrass_property compactin_eq_Bolzano_Weierstrass subset_refl)
+
+lemma compactin_imp_mtotally_bounded:
+ "compactin mtopology S \<Longrightarrow> mtotally_bounded S"
+ by (simp add: Bolzano_Weierstrass_C compactin_sequentially)
+
+lemma lebesgue_number:
+ "\<lbrakk>compactin mtopology S; S \<subseteq> \<Union>\<C>; \<And>U. U \<in> \<C> \<Longrightarrow> openin mtopology U\<rbrakk>
+ \<Longrightarrow> \<exists>\<epsilon>>0. \<forall>x \<in> S. \<exists>U \<in> \<C>. mball x \<epsilon> \<subseteq> U"
+ by (simp add: Bolzano_Weierstrass_D compactin_sequentially)
+
+lemma compact_space_sequentially:
+ "compact_space mtopology \<longleftrightarrow>
+ (\<forall>\<sigma>::nat\<Rightarrow>'a. range \<sigma> \<subseteq> M
+ \<longrightarrow> (\<exists>l r. l \<in> M \<and> strict_mono r \<and> limitin mtopology (\<sigma> \<circ> r) l sequentially))"
+ by (simp add: compact_space_def compactin_sequentially)
+
+lemma compact_space_eq_Bolzano_Weierstrass:
+ "compact_space mtopology \<longleftrightarrow>
+ (\<forall>S. S \<subseteq> M \<and> infinite S \<longrightarrow> mtopology derived_set_of S \<noteq> {})"
+ using Int_absorb1 [OF derived_set_of_subset_topspace [of mtopology]]
+ by (force simp: compact_space_def compactin_eq_Bolzano_Weierstrass)
+
+lemma compact_space_nest:
+ "compact_space mtopology \<longleftrightarrow>
+ (\<forall>C. (\<forall>n::nat. closedin mtopology (C n)) \<and> (\<forall>n. C n \<noteq> {}) \<and> decseq C \<longrightarrow> \<Inter>(range C) \<noteq> {})"
+ (is "?lhs=?rhs")
+proof
+ assume L: ?lhs
+ show ?rhs
+ proof clarify
+ fix C :: "nat \<Rightarrow> 'a set"
+ assume "\<forall>n. closedin mtopology (C n)"
+ and "\<forall>n. C n \<noteq> {}"
+ and "decseq C"
+ and "\<Inter> (range C) = {}"
+ then obtain K where K: "finite K" "\<Inter>(C ` K) = {}"
+ by (metis L compact_space_imp_nest)
+ then obtain k where "K \<subseteq> {..k}"
+ using finite_nat_iff_bounded_le by auto
+ then have "C k \<subseteq> \<Inter>(C ` K)"
+ using \<open>decseq C\<close> by (auto simp:decseq_def)
+ then show False
+ by (simp add: K \<open>\<forall>n. C n \<noteq> {}\<close>)
+ qed
+next
+ assume R [rule_format]: ?rhs
+ show ?lhs
+ unfolding compact_space_sequentially
+ proof (intro strip)
+ fix \<sigma> :: "nat \<Rightarrow> 'a"
+ assume \<sigma>: "range \<sigma> \<subseteq> M"
+ have "mtopology closure_of \<sigma> ` {n..} \<noteq> {}" for n
+ using \<open>range \<sigma> \<subseteq> M\<close> by (auto simp: closure_of_eq_empty image_subset_iff)
+ moreover have "decseq (\<lambda>n. mtopology closure_of \<sigma> ` {n..})"
+ using closure_of_mono image_mono by (smt (verit) atLeast_subset_iff decseq_def)
+ ultimately obtain l where l: "\<And>n. l \<in> mtopology closure_of \<sigma> ` {n..}"
+ using R [of "\<lambda>n. mtopology closure_of (\<sigma> ` {n..})"] by auto
+ then have "l \<in> M" and "\<And>n. \<forall>r>0. \<exists>k\<ge>n. \<sigma> k \<in> M \<and> d l (\<sigma> k) < r"
+ using metric_closure_of by fastforce+
+ then obtain f where f: "\<And>n r. r>0 \<Longrightarrow> f n r \<ge> n \<and> \<sigma> (f n r) \<in> M \<and> d l (\<sigma> (f n r)) < r"
+ by metis
+ define r where "r = rec_nat (f 0 1) (\<lambda>n rec. (f (Suc rec) (inverse (Suc (Suc n)))))"
+ have r: "d l (\<sigma>(r n)) < inverse(Suc n)" for n
+ by (induction n) (auto simp: rec_nat_0_imp [OF r_def] rec_nat_Suc_imp [OF r_def] f)
+ have "r n < r(Suc n)" for n
+ by (simp add: Suc_le_lessD f r_def)
+ then have "strict_mono r"
+ by (simp add: strict_mono_Suc_iff)
+ moreover have "limitin mtopology (\<sigma> \<circ> r) l sequentially"
+ proof (clarsimp simp: limitin_metric \<open>l \<in> M\<close>)
+ fix \<epsilon> :: real
+ assume "\<epsilon> > 0"
+ then have "(\<forall>\<^sub>F n in sequentially. inverse (real (Suc n)) < \<epsilon>)"
+ using Archimedean_eventually_inverse by blast
+ then show "\<forall>\<^sub>F n in sequentially. \<sigma> (r n) \<in> M \<and> d (\<sigma> (r n)) l < \<epsilon>"
+ by eventually_elim (metis commute \<open>range \<sigma> \<subseteq> M\<close> order_less_trans r range_subsetD)
+ qed
+ ultimately show "\<exists>l r. l \<in> M \<and> strict_mono r \<and> limitin mtopology (\<sigma> \<circ> r) l sequentially"
+ using \<open>l \<in> M\<close> by blast
+ qed
+qed
+
+
+lemma (in discrete_metric) mcomplete_discrete_metric: "disc.mcomplete"
+proof (clarsimp simp: disc.mcomplete_def)
+ fix \<sigma> :: "nat \<Rightarrow> 'a"
+ assume "disc.MCauchy \<sigma>"
+ then obtain N where "\<And>n. N \<le> n \<Longrightarrow> \<sigma> N = \<sigma> n"
+ unfolding disc.MCauchy_def by (metis dd_def dual_order.refl order_less_irrefl zero_less_one)
+ moreover have "range \<sigma> \<subseteq> M"
+ using \<open>disc.MCauchy \<sigma>\<close> disc.MCauchy_def by blast
+ ultimately have "limitin disc.mtopology \<sigma> (\<sigma> N) sequentially"
+ by (metis disc.limit_metric_sequentially disc.zero range_subsetD)
+ then show "\<exists>x. limitin disc.mtopology \<sigma> x sequentially" ..
+qed
+
+lemma compact_space_imp_mcomplete: "compact_space mtopology \<Longrightarrow> mcomplete"
+ by (simp add: compact_space_nest mcomplete_nest)
+
+lemma (in submetric) compactin_imp_mcomplete:
+ "compactin mtopology A \<Longrightarrow> sub.mcomplete"
+ by (simp add: compactin_subspace mtopology_submetric sub.compact_space_imp_mcomplete)
+
+lemma (in submetric) mcomplete_imp_closedin:
+ assumes "sub.mcomplete"
+ shows "closedin mtopology A"
+proof -
+ have "l \<in> A"
+ if "range \<sigma> \<subseteq> A" and l: "limitin mtopology \<sigma> l sequentially"
+ for \<sigma> :: "nat \<Rightarrow> 'a" and l
+ proof -
+ have "sub.MCauchy \<sigma>"
+ using convergent_imp_MCauchy subset that by (force simp: MCauchy_submetric)
+ then have "limitin sub.mtopology \<sigma> l sequentially"
+ using assms unfolding sub.mcomplete_def
+ using l limitin_metric_unique limitin_submetric_iff trivial_limit_sequentially by blast
+ then show ?thesis
+ using limitin_submetric_iff by blast
+ qed
+ then show ?thesis
+ using metric_closedin_iff_sequentially_closed subset by auto
+qed
+
+lemma (in submetric) closedin_eq_mcomplete:
+ "mcomplete \<Longrightarrow> (closedin mtopology A \<longleftrightarrow> sub.mcomplete)"
+ using closedin_mcomplete_imp_mcomplete mcomplete_imp_closedin by blast
+
+lemma compact_space_eq_mcomplete_mtotally_bounded:
+ "compact_space mtopology \<longleftrightarrow> mcomplete \<and> mtotally_bounded M"
+ by (meson Bolzano_Weierstrass_C compact_space_imp_mcomplete compact_space_sequentially limitin_mspace
+ mcomplete_alt mtotally_bounded_sequentially subset_refl)
+
+
+lemma compact_closure_of_imp_mtotally_bounded:
+ "\<lbrakk>compactin mtopology (mtopology closure_of S); S \<subseteq> M\<rbrakk>
+ \<Longrightarrow> mtotally_bounded S"
+ using compactin_imp_mtotally_bounded mtotally_bounded_closure_of_eq by blast
+
+lemma mtotally_bounded_eq_compact_closure_of:
+ assumes "mcomplete"
+ shows "mtotally_bounded S \<longleftrightarrow> S \<subseteq> M \<and> compactin mtopology (mtopology closure_of S)"
+ (is "?lhs=?rhs")
+proof
+ assume L: ?lhs
+ show ?rhs
+ unfolding compactin_subspace
+ proof (intro conjI)
+ show "S \<subseteq> M"
+ using L by (simp add: mtotally_bounded_imp_subset)
+ show "mtopology closure_of S \<subseteq> topspace mtopology"
+ by (simp add: \<open>S \<subseteq> M\<close> closure_of_minimal)
+ then have MSM: "mtopology closure_of S \<subseteq> M"
+ by auto
+ interpret S: submetric M d "mtopology closure_of S"
+ proof qed (use MSM in auto)
+ have "S.sub.mtotally_bounded (mtopology closure_of S)"
+ using L mtotally_bounded_absolute mtotally_bounded_closure_of by blast
+ then
+ show "compact_space (subtopology mtopology (mtopology closure_of S))"
+ using S.closedin_mcomplete_imp_mcomplete S.mtopology_submetric S.sub.compact_space_eq_mcomplete_mtotally_bounded assms by force
+ qed
+qed (auto simp: compact_closure_of_imp_mtotally_bounded)
+
+
+
+lemma compact_closure_of_eq_Bolzano_Weierstrass:
+ "compactin mtopology (mtopology closure_of S) \<longleftrightarrow>
+ (\<forall>T. infinite T \<and> T \<subseteq> S \<and> T \<subseteq> M \<longrightarrow> mtopology derived_set_of T \<noteq> {})" (is "?lhs=?rhs")
+proof
+ assume L: ?lhs
+ show ?rhs
+ proof (intro strip)
+ fix T
+ assume T: "infinite T \<and> T \<subseteq> S \<and> T \<subseteq> M"
+ show "mtopology derived_set_of T \<noteq> {}"
+ proof (intro compact_closure_of_imp_Bolzano_Weierstrass)
+ show "compactin mtopology (mtopology closure_of S)"
+ by (simp add: L)
+ qed (use T in auto)
+ qed
+next
+ have "compactin mtopology (mtopology closure_of S)"
+ if \<section>: "\<And>T. \<lbrakk>infinite T; T \<subseteq> S\<rbrakk> \<Longrightarrow> mtopology derived_set_of T \<noteq> {}" and "S \<subseteq> M" for S
+ unfolding compactin_sequentially
+ proof (intro conjI strip)
+ show MSM: "mtopology closure_of S \<subseteq> M"
+ using closure_of_subset_topspace by fastforce
+ fix \<sigma> :: "nat \<Rightarrow> 'a"
+ assume \<sigma>: "range \<sigma> \<subseteq> mtopology closure_of S"
+ then have "\<exists>y \<in> S. d (\<sigma> n) y < inverse(Suc n)" for n
+ by (simp add: metric_closure_of image_subset_iff) (metis inverse_Suc of_nat_Suc)
+ then obtain \<tau> where \<tau>: "\<And>n. \<tau> n \<in> S \<and> d (\<sigma> n) (\<tau> n) < inverse(Suc n)"
+ by metis
+ then have "range \<tau> \<subseteq> S"
+ by blast
+ moreover
+ have *: "\<forall>T. T \<subseteq> S \<and> infinite T \<longrightarrow> mtopology closure_of S \<inter> mtopology derived_set_of T \<noteq> {}"
+ using "\<section>"(1) derived_set_of_mono derived_set_of_subset_closure_of by fastforce
+ moreover have "S \<subseteq> mtopology closure_of S"
+ by (simp add: \<open>S \<subseteq> M\<close> closure_of_subset)
+ ultimately obtain l r where lr:
+ "l \<in> mtopology closure_of S" "strict_mono r" "limitin mtopology (\<tau> \<circ> r) l sequentially"
+ using Bolzano_Weierstrass_property \<open>S \<subseteq> M\<close> by metis
+ then have "l \<in> M"
+ using limitin_mspace by blast
+ have dr_less: "d ((\<sigma> \<circ> r) n) ((\<tau> \<circ> r) n) < inverse(Suc n)" for n
+ proof -
+ have "d ((\<sigma> \<circ> r) n) ((\<tau> \<circ> r) n) < inverse(Suc (r n))"
+ using \<tau> by auto
+ also have "... \<le> inverse(Suc n)"
+ using lr strict_mono_imp_increasing by auto
+ finally show ?thesis .
+ qed
+ have "limitin mtopology (\<sigma> \<circ> r) l sequentially"
+ unfolding limitin_metric
+ proof (intro conjI strip)
+ show "l \<in> M"
+ using limitin_mspace lr by blast
+ fix \<epsilon> :: real
+ assume "\<epsilon> > 0"
+ then have "\<forall>\<^sub>F n in sequentially. (\<tau> \<circ> r) n \<in> M \<and> d ((\<tau> \<circ> r) n) l < \<epsilon>/2"
+ using lr half_gt_zero limitin_metric by blast
+ moreover have "\<forall>\<^sub>F n in sequentially. inverse (real (Suc n)) < \<epsilon>/2"
+ using Archimedean_eventually_inverse \<open>0 < \<epsilon>\<close> half_gt_zero by blast
+ then have "\<forall>\<^sub>F n in sequentially. d ((\<sigma> \<circ> r) n) ((\<tau> \<circ> r) n) < \<epsilon>/2"
+ by eventually_elim (smt (verit, del_insts) dr_less)
+ ultimately have "\<forall>\<^sub>F n in sequentially. d ((\<sigma> \<circ> r) n) l < \<epsilon>/2 + \<epsilon>/2"
+ by eventually_elim (smt (verit) triangle \<open>l \<in> M\<close> MSM \<sigma> comp_apply order_trans range_subsetD)
+ then show "\<forall>\<^sub>F n in sequentially. (\<sigma> \<circ> r) n \<in> M \<and> d ((\<sigma> \<circ> r) n) l < \<epsilon>"
+ apply eventually_elim
+ using \<open>mtopology closure_of S \<subseteq> M\<close> \<sigma> by auto
+ qed
+ with lr show "\<exists>l r. l \<in> mtopology closure_of S \<and> strict_mono r \<and> limitin mtopology (\<sigma> \<circ> r) l sequentially"
+ by blast
+ qed
+ then show "?rhs \<Longrightarrow> ?lhs"
+ by (metis Int_subset_iff closure_of_restrict inf_le1 topspace_mtopology)
+qed
+
+end
+
+lemma (in discrete_metric) mtotally_bounded_discrete_metric:
+ "disc.mtotally_bounded S \<longleftrightarrow> finite S \<and> S \<subseteq> M" (is "?lhs=?rhs")
+proof
+ assume L: ?lhs
+ show ?rhs
+ proof
+ show "finite S"
+ by (metis (no_types) L closure_of_subset_Int compactin_discrete_topology disc.mtotally_bounded_eq_compact_closure_of
+ disc.topspace_mtopology discrete_metric.mcomplete_discrete_metric inf.absorb_iff2 mtopology_discrete_metric finite_subset)
+ show "S \<subseteq> M"
+ by (simp add: L disc.mtotally_bounded_imp_subset)
+ qed
+qed (simp add: disc.finite_imp_mtotally_bounded)
+
+
+context Metric_space
+begin
+
+lemma derived_set_of_infinite_openin_metric:
+ "mtopology derived_set_of S =
+ {x \<in> M. \<forall>U. x \<in> U \<and> openin mtopology U \<longrightarrow> infinite(S \<inter> U)}"
+ by (simp add: derived_set_of_infinite_openin Hausdorff_space_mtopology)
+
+lemma derived_set_of_infinite_1:
+ assumes "infinite (S \<inter> mball x \<epsilon>)"
+ shows "infinite (S \<inter> mcball x \<epsilon>)"
+ by (meson Int_mono assms finite_subset mball_subset_mcball subset_refl)
+
+lemma derived_set_of_infinite_2:
+ assumes "openin mtopology U" "\<And>\<epsilon>. 0 < \<epsilon> \<Longrightarrow> infinite (S \<inter> mcball x \<epsilon>)" and "x \<in> U"
+ shows "infinite (S \<inter> U)"
+ by (metis assms openin_mtopology_mcball finite_Int inf.absorb_iff2 inf_assoc)
+
+lemma derived_set_of_infinite_mball:
+ "mtopology derived_set_of S = {x \<in> M. \<forall>e>0. infinite(S \<inter> mball x e)}"
+ unfolding derived_set_of_infinite_openin_metric
+ by (meson centre_in_mball_iff openin_mball derived_set_of_infinite_1 derived_set_of_infinite_2)
+
+lemma derived_set_of_infinite_mcball:
+ "mtopology derived_set_of S = {x \<in> M. \<forall>e>0. infinite(S \<inter> mcball x e)}"
+ unfolding derived_set_of_infinite_openin_metric
+ by (meson centre_in_mball_iff openin_mball derived_set_of_infinite_1 derived_set_of_infinite_2)
+
+end
+
+subsection\<open>Continuous functions on metric spaces\<close>
+
+context Metric_space
+begin
+
+lemma continuous_map_to_metric:
+ "continuous_map X mtopology f \<longleftrightarrow>
+ (\<forall>x \<in> topspace X. \<forall>\<epsilon>>0. \<exists>U. openin X U \<and> x \<in> U \<and> (\<forall>y\<in>U. f y \<in> mball (f x) \<epsilon>))"
+ (is "?lhs=?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ unfolding continuous_map_eq_topcontinuous_at topcontinuous_at_def
+ by (metis centre_in_mball_iff openin_mball topspace_mtopology)
+next
+ assume R: ?rhs
+ then have "\<forall>x\<in>topspace X. f x \<in> M"
+ by (meson gt_ex in_mball)
+ moreover
+ have "\<And>x V. \<lbrakk>x \<in> topspace X; openin mtopology V; f x \<in> V\<rbrakk> \<Longrightarrow> \<exists>U. openin X U \<and> x \<in> U \<and> (\<forall>y\<in>U. f y \<in> V)"
+ unfolding openin_mtopology by (metis Int_iff R inf.orderE)
+ ultimately
+ show ?lhs
+ by (simp add: continuous_map_eq_topcontinuous_at topcontinuous_at_def)
+qed
+
+lemma continuous_map_from_metric:
+ "continuous_map mtopology X f \<longleftrightarrow>
+ f ` M \<subseteq> topspace X \<and>
+ (\<forall>a \<in> M. \<forall>U. openin X U \<and> f a \<in> U \<longrightarrow> (\<exists>r>0. \<forall>x. x \<in> M \<and> d a x < r \<longrightarrow> f x \<in> U))"
+proof (cases "f ` M \<subseteq> topspace X")
+ case True
+ then show ?thesis
+ by (fastforce simp: continuous_map openin_mtopology subset_eq)
+next
+ case False
+ then show ?thesis
+ by (simp add: continuous_map_eq_image_closure_subset_gen)
+qed
+
+text \<open>An abstract formulation, since the limits do not have to be sequential\<close>
+lemma continuous_map_uniform_limit:
+ assumes contf: "\<forall>\<^sub>F \<xi> in F. continuous_map X mtopology (f \<xi>)"
+ and dfg: "\<And>\<epsilon>. 0 < \<epsilon> \<Longrightarrow> \<forall>\<^sub>F \<xi> in F. \<forall>x \<in> topspace X. g x \<in> M \<and> d (f \<xi> x) (g x) < \<epsilon>"
+ and nontriv: "\<not> trivial_limit F"
+ shows "continuous_map X mtopology g"
+ unfolding continuous_map_to_metric
+proof (intro strip)
+ fix x and \<epsilon>::real
+ assume "x \<in> topspace X" and "\<epsilon> > 0"
+ then obtain \<xi> where k: "continuous_map X mtopology (f \<xi>)"
+ and gM: "\<forall>x \<in> topspace X. g x \<in> M"
+ and third: "\<forall>x \<in> topspace X. d (f \<xi> x) (g x) < \<epsilon>/3"
+ using eventually_conj [OF contf] contf dfg [of "\<epsilon>/3"] eventually_happens' [OF nontriv]
+ by (smt (verit, ccfv_SIG) zero_less_divide_iff)
+ then obtain U where U: "openin X U" "x \<in> U" and Uthird: "\<forall>y\<in>U. d (f \<xi> y) (f \<xi> x) < \<epsilon>/3"
+ unfolding continuous_map_to_metric
+ by (metis \<open>0 < \<epsilon>\<close> \<open>x \<in> topspace X\<close> commute divide_pos_pos in_mball zero_less_numeral)
+ have f_inM: "f \<xi> y \<in> M" if "y\<in>U" for y
+ using U k openin_subset that by (fastforce simp: continuous_map_def)
+ have "d (g y) (g x) < \<epsilon>" if "y\<in>U" for y
+ proof -
+ have "g y \<in> M"
+ using U gM openin_subset that by blast
+ have "d (g y) (g x) \<le> d (g y) (f \<xi> x) + d (f \<xi> x) (g x)"
+ by (simp add: U \<open>g y \<in> M\<close> \<open>x \<in> topspace X\<close> f_inM gM triangle)
+ also have "\<dots> \<le> d (g y) (f \<xi> y) + d (f \<xi> y) (f \<xi> x) + d (f \<xi> x) (g x)"
+ by (simp add: U \<open>g y \<in> M\<close> commute f_inM that triangle')
+ also have "\<dots> < \<epsilon>/3 + \<epsilon>/3 + \<epsilon>/3"
+ by (smt (verit) U(1) Uthird \<open>x \<in> topspace X\<close> commute openin_subset subsetD that third)
+ finally show ?thesis by simp
+ qed
+ with U gM show "\<exists>U. openin X U \<and> x \<in> U \<and> (\<forall>y\<in>U. g y \<in> mball (g x) \<epsilon>)"
+ by (metis commute in_mball in_mono openin_subset)
+qed
+
+
+lemma continuous_map_uniform_limit_alt:
+ assumes contf: "\<forall>\<^sub>F \<xi> in F. continuous_map X mtopology (f \<xi>)"
+ and gim: "g ` (topspace X) \<subseteq> M"
+ and dfg: "\<And>\<epsilon>. 0 < \<epsilon> \<Longrightarrow> \<forall>\<^sub>F \<xi> in F. \<forall>x \<in> topspace X. d (f \<xi> x) (g x) < \<epsilon>"
+ and nontriv: "\<not> trivial_limit F"
+ shows "continuous_map X mtopology g"
+proof (rule continuous_map_uniform_limit [OF contf])
+ fix \<epsilon> :: real
+ assume "\<epsilon> > 0"
+ with gim dfg show "\<forall>\<^sub>F \<xi> in F. \<forall>x\<in>topspace X. g x \<in> M \<and> d (f \<xi> x) (g x) < \<epsilon>"
+ by (simp add: image_subset_iff)
+qed (use nontriv in auto)
+
+
+lemma continuous_map_uniformly_Cauchy_limit:
+ assumes "mcomplete"
+ assumes contf: "\<forall>\<^sub>F n in sequentially. continuous_map X mtopology (f n)"
+ and Cauchy': "\<And>\<epsilon>. \<epsilon> > 0 \<Longrightarrow> \<exists>N. \<forall>m n x. N \<le> m \<longrightarrow> N \<le> n \<longrightarrow> x \<in> topspace X \<longrightarrow> d (f m x) (f n x) < \<epsilon>"
+ obtains g where
+ "continuous_map X mtopology g"
+ "\<And>\<epsilon>. 0 < \<epsilon> \<Longrightarrow> \<forall>\<^sub>F n in sequentially. \<forall>x\<in>topspace X. d (f n x) (g x) < \<epsilon>"
+proof -
+ have "\<And>x. x \<in> topspace X \<Longrightarrow> \<exists>l. limitin mtopology (\<lambda>n. f n x) l sequentially"
+ using \<open>mcomplete\<close> [unfolded mcomplete, rule_format] assms
+ by (smt (verit) contf continuous_map_def eventually_mono topspace_mtopology)
+ then obtain g where g: "\<And>x. x \<in> topspace X \<Longrightarrow> limitin mtopology (\<lambda>n. f n x) (g x) sequentially"
+ by metis
+ show thesis
+ proof
+ show "\<forall>\<^sub>F n in sequentially. \<forall>x\<in>topspace X. d (f n x) (g x) < \<epsilon>"
+ if "\<epsilon> > 0" for \<epsilon> :: real
+ proof -
+ obtain N where N: "\<And>m n x. \<lbrakk>N \<le> m; N \<le> n; x \<in> topspace X\<rbrakk> \<Longrightarrow> d (f m x) (f n x) < \<epsilon>/2"
+ by (meson Cauchy' \<open>0 < \<epsilon>\<close> half_gt_zero)
+ obtain P where P: "\<And>n x. \<lbrakk>n \<ge> P; x \<in> topspace X\<rbrakk> \<Longrightarrow> f n x \<in> M"
+ using contf by (auto simp: eventually_sequentially continuous_map_def)
+ show ?thesis
+ proof (intro eventually_sequentiallyI strip)
+ fix n x
+ assume "max N P \<le> n" and x: "x \<in> topspace X"
+ obtain L where "g x \<in> M" and L: "\<forall>n\<ge>L. f n x \<in> M \<and> d (f n x) (g x) < \<epsilon>/2"
+ using g [OF x] \<open>\<epsilon> > 0\<close> unfolding limitin_metric
+ by (metis (no_types, lifting) eventually_sequentially half_gt_zero)
+ define n' where "n' \<equiv> Max{L,N,P}"
+ have L': "\<forall>m \<ge> n'. f m x \<in> M \<and> d (f m x) (g x) < \<epsilon>/2"
+ using L by (simp add: n'_def)
+ moreover
+ have "d (f n x) (f n' x) < \<epsilon>/2"
+ using N [of n n' x] \<open>max N P \<le> n\<close> n'_def x by fastforce
+ ultimately have "d (f n x) (g x) < \<epsilon>/2 + \<epsilon>/2"
+ by (smt (verit, ccfv_SIG) P \<open>g x \<in> M\<close> \<open>max N P \<le> n\<close> le_refl max.bounded_iff mdist_zero triangle' x)
+ then show "d (f n x) (g x) < \<epsilon>" by simp
+ qed
+ qed
+ then show "continuous_map X mtopology g"
+ by (smt (verit, del_insts) eventually_mono g limitin_mspace trivial_limit_sequentially continuous_map_uniform_limit [OF contf])
+ qed
+qed
+
+lemma metric_continuous_map:
+ assumes "Metric_space M' d'"
+ shows
+ "continuous_map mtopology (Metric_space.mtopology M' d') f \<longleftrightarrow>
+ f ` M \<subseteq> M' \<and> (\<forall>a \<in> M. \<forall>\<epsilon>>0. \<exists>\<delta>>0. (\<forall>x. x \<in> M \<and> d a x < \<delta> \<longrightarrow> d' (f a) (f x) < \<epsilon>))"
+ (is "?lhs = ?rhs")
+proof -
+ interpret M': Metric_space M' d'
+ by (simp add: assms)
+ show ?thesis
+ proof
+ assume L: ?lhs
+ show ?rhs
+ proof (intro conjI strip)
+ show "f ` M \<subseteq> M'"
+ using L by (auto simp: continuous_map_def)
+ fix a and \<epsilon> :: real
+ assume "a \<in> M" and "\<epsilon> > 0"
+ then have "openin mtopology {x \<in> M. f x \<in> M'.mball (f a) \<epsilon>}" "f a \<in> M'"
+ using L unfolding continuous_map_def by fastforce+
+ then obtain \<delta> where "\<delta> > 0" "mball a \<delta> \<subseteq> {x \<in> M. f x \<in> M' \<and> d' (f a) (f x) < \<epsilon>}"
+ using \<open>0 < \<epsilon>\<close> \<open>a \<in> M\<close> openin_mtopology by auto
+ then show "\<exists>\<delta>>0. \<forall>x. x \<in> M \<and> d a x < \<delta> \<longrightarrow> d' (f a) (f x) < \<epsilon>"
+ using \<open>a \<in> M\<close> in_mball by blast
+ qed
+ next
+ assume R: ?rhs
+ show ?lhs
+ unfolding continuous_map_def
+ proof (intro conjI strip)
+ fix U
+ assume "openin M'.mtopology U"
+ then show "openin mtopology {x \<in> topspace mtopology. f x \<in> U}"
+ apply (simp add: continuous_map_def openin_mtopology M'.openin_mtopology subset_iff)
+ by (metis R image_subset_iff)
+ qed (use R in auto)
+ qed
+qed
+
+end (*Metric_space*)
+
+end
+
diff --git a/src/HOL/Analysis/Abstract_Topological_Spaces.thy b/src/HOL/Analysis/Abstract_Topological_Spaces.thy
--- a/src/HOL/Analysis/Abstract_Topological_Spaces.thy
+++ b/src/HOL/Analysis/Abstract_Topological_Spaces.thy
@@ -1,3468 +1,5309 @@
(* Author: L C Paulson, University of Cambridge [ported from HOL Light] *)
section \<open>Various Forms of Topological Spaces\<close>
theory Abstract_Topological_Spaces
imports Lindelof_Spaces Locally Sum_Topology FSigma
begin
subsection\<open>Connected topological spaces\<close>
lemma connected_space_eq_frontier_eq_empty:
"connected_space X \<longleftrightarrow> (\<forall>S. S \<subseteq> topspace X \<and> X frontier_of S = {} \<longrightarrow> S = {} \<or> S = topspace X)"
by (meson clopenin_eq_frontier_of connected_space_clopen_in)
lemma connected_space_frontier_eq_empty:
"connected_space X \<and> S \<subseteq> topspace X
\<Longrightarrow> (X frontier_of S = {} \<longleftrightarrow> S = {} \<or> S = topspace X)"
by (meson connected_space_eq_frontier_eq_empty frontier_of_empty frontier_of_topspace)
lemma connectedin_eq_subset_separated_union:
"connectedin X C \<longleftrightarrow>
C \<subseteq> topspace X \<and> (\<forall>S T. separatedin X S T \<and> C \<subseteq> S \<union> T \<longrightarrow> C \<subseteq> S \<or> C \<subseteq> T)" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
using connectedin_subset_topspace connectedin_subset_separated_union by blast
next
assume ?rhs
then show ?lhs
by (metis closure_of_subset connectedin_separation dual_order.eq_iff inf.orderE separatedin_def sup.boundedE)
qed
lemma connectedin_clopen_cases:
"\<lbrakk>connectedin X C; closedin X T; openin X T\<rbrakk> \<Longrightarrow> C \<subseteq> T \<or> disjnt C T"
by (metis Diff_eq_empty_iff Int_empty_right clopenin_eq_frontier_of connectedin_Int_frontier_of disjnt_def)
lemma connected_space_quotient_map_image:
"\<lbrakk>quotient_map X X' q; connected_space X\<rbrakk> \<Longrightarrow> connected_space X'"
by (metis connectedin_continuous_map_image connectedin_topspace quotient_imp_continuous_map quotient_imp_surjective_map)
lemma connected_space_retraction_map_image:
"\<lbrakk>retraction_map X X' r; connected_space X\<rbrakk> \<Longrightarrow> connected_space X'"
using connected_space_quotient_map_image retraction_imp_quotient_map by blast
lemma connectedin_imp_perfect_gen:
assumes X: "t1_space X" and S: "connectedin X S" and nontriv: "\<nexists>a. S = {a}"
shows "S \<subseteq> X derived_set_of S"
unfolding derived_set_of_def
proof (intro subsetI CollectI conjI strip)
show XS: "x \<in> topspace X" if "x \<in> S" for x
using that S connectedin by fastforce
show "\<exists>y. y \<noteq> x \<and> y \<in> S \<and> y \<in> T"
if "x \<in> S" and "x \<in> T \<and> openin X T" for x T
proof -
have opeXx: "openin X (topspace X - {x})"
by (meson X openin_topspace t1_space_openin_delete_alt)
moreover
have "S \<subseteq> T \<union> (topspace X - {x})"
using XS that(2) by auto
moreover have "(topspace X - {x}) \<inter> S \<noteq> {}"
by (metis Diff_triv S connectedin double_diff empty_subsetI inf_commute insert_subsetI nontriv that(1))
ultimately show ?thesis
using that connectedinD [OF S, of T "topspace X - {x}"]
by blast
qed
qed
lemma connectedin_imp_perfect:
"\<lbrakk>Hausdorff_space X; connectedin X S; \<nexists>a. S = {a}\<rbrakk> \<Longrightarrow> S \<subseteq> X derived_set_of S"
by (simp add: Hausdorff_imp_t1_space connectedin_imp_perfect_gen)
subsection\<open>The notion of "separated between" (complement of "connected between)"\<close>
definition separated_between
where "separated_between X S T \<equiv>
\<exists>U V. openin X U \<and> openin X V \<and> U \<union> V = topspace X \<and> disjnt U V \<and> S \<subseteq> U \<and> T \<subseteq> V"
lemma separated_between_alt:
"separated_between X S T \<longleftrightarrow>
(\<exists>U V. closedin X U \<and> closedin X V \<and> U \<union> V = topspace X \<and> disjnt U V \<and> S \<subseteq> U \<and> T \<subseteq> V)"
unfolding separated_between_def
by (metis separatedin_open_sets separation_closedin_Un_gen subtopology_topspace
separatedin_closed_sets separation_openin_Un_gen)
lemma separated_between:
"separated_between X S T \<longleftrightarrow>
(\<exists>U. closedin X U \<and> openin X U \<and> S \<subseteq> U \<and> T \<subseteq> topspace X - U)"
unfolding separated_between_def closedin_def disjnt_def
by (smt (verit, del_insts) Diff_cancel Diff_disjoint Diff_partition Un_Diff Un_Diff_Int openin_subset)
lemma separated_between_mono:
"\<lbrakk>separated_between X S T; S' \<subseteq> S; T' \<subseteq> T\<rbrakk> \<Longrightarrow> separated_between X S' T'"
by (meson order.trans separated_between)
lemma separated_between_refl:
"separated_between X S S \<longleftrightarrow> S = {}"
unfolding separated_between_def
by (metis Un_empty_right disjnt_def disjnt_empty2 disjnt_subset2 disjnt_sym le_iff_inf openin_empty openin_topspace)
lemma separated_between_sym:
"separated_between X S T \<longleftrightarrow> separated_between X T S"
by (metis disjnt_sym separated_between_alt sup_commute)
lemma separated_between_imp_subset:
"separated_between X S T \<Longrightarrow> S \<subseteq> topspace X \<and> T \<subseteq> topspace X"
by (metis le_supI1 le_supI2 separated_between_def)
lemma separated_between_empty:
"(separated_between X {} S \<longleftrightarrow> S \<subseteq> topspace X) \<and> (separated_between X S {} \<longleftrightarrow> S \<subseteq> topspace X)"
by (metis Diff_empty bot.extremum closedin_empty openin_empty separated_between separated_between_imp_subset separated_between_sym)
lemma separated_between_Un:
"separated_between X S (T \<union> U) \<longleftrightarrow> separated_between X S T \<and> separated_between X S U"
by (auto simp: separated_between)
lemma separated_between_Un':
"separated_between X (S \<union> T) U \<longleftrightarrow> separated_between X S U \<and> separated_between X T U"
by (simp add: separated_between_Un separated_between_sym)
lemma separated_between_imp_disjoint:
"separated_between X S T \<Longrightarrow> disjnt S T"
by (meson disjnt_iff separated_between_def subsetD)
lemma separated_between_imp_separatedin:
"separated_between X S T \<Longrightarrow> separatedin X S T"
by (meson separated_between_def separatedin_mono separatedin_open_sets)
lemma separated_between_full:
assumes "S \<union> T = topspace X"
shows "separated_between X S T \<longleftrightarrow> disjnt S T \<and> closedin X S \<and> openin X S \<and> closedin X T \<and> openin X T"
proof -
have "separated_between X S T \<longrightarrow> separatedin X S T"
by (simp add: separated_between_imp_separatedin)
then show ?thesis
unfolding separated_between_def
by (metis assms separation_closedin_Un_gen separation_openin_Un_gen subset_refl subtopology_topspace)
qed
lemma separated_between_eq_separatedin:
"S \<union> T = topspace X \<Longrightarrow> (separated_between X S T \<longleftrightarrow> separatedin X S T)"
by (simp add: separated_between_full separatedin_full)
lemma separated_between_pointwise_left:
assumes "compactin X S"
shows "separated_between X S T \<longleftrightarrow>
(S = {} \<longrightarrow> T \<subseteq> topspace X) \<and> (\<forall>x \<in> S. separated_between X {x} T)" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
using separated_between_imp_subset separated_between_mono by fastforce
next
assume R: ?rhs
then have "T \<subseteq> topspace X"
by (meson equals0I separated_between_imp_subset)
show ?lhs
proof -
obtain U where U: "\<forall>x \<in> S. openin X (U x)"
"\<forall>x \<in> S. \<exists>V. openin X V \<and> U x \<union> V = topspace X \<and> disjnt (U x) V \<and> {x} \<subseteq> U x \<and> T \<subseteq> V"
using R unfolding separated_between_def by metis
then have "S \<subseteq> \<Union>(U ` S)"
by blast
then obtain K where "finite K" "K \<subseteq> S" and K: "S \<subseteq> (\<Union>i\<in>K. U i)"
using assms U unfolding compactin_def by (smt (verit) finite_subset_image imageE)
show ?thesis
unfolding separated_between
proof (intro conjI exI)
have "\<And>x. x \<in> K \<Longrightarrow> closedin X (U x)"
by (smt (verit) \<open>K \<subseteq> S\<close> Diff_cancel U(2) Un_Diff Un_Diff_Int disjnt_def openin_closedin_eq subsetD)
then show "closedin X (\<Union> (U ` K))"
by (metis (mono_tags, lifting) \<open>finite K\<close> closedin_Union finite_imageI image_iff)
show "openin X (\<Union> (U ` K))"
using U(1) \<open>K \<subseteq> S\<close> by blast
show "S \<subseteq> \<Union> (U ` K)"
by (simp add: K)
have "\<And>x i. \<lbrakk>x \<in> T; i \<in> K; x \<in> U i\<rbrakk> \<Longrightarrow> False"
by (meson U(2) \<open>K \<subseteq> S\<close> disjnt_iff subsetD)
then show "T \<subseteq> topspace X - \<Union> (U ` K)"
using \<open>T \<subseteq> topspace X\<close> by auto
qed
qed
qed
lemma separated_between_pointwise_right:
"compactin X T
\<Longrightarrow> separated_between X S T \<longleftrightarrow> (T = {} \<longrightarrow> S \<subseteq> topspace X) \<and> (\<forall>y \<in> T. separated_between X S {y})"
by (meson separated_between_pointwise_left separated_between_sym)
lemma separated_between_closure_of:
"S \<subseteq> topspace X \<Longrightarrow> separated_between X (X closure_of S) T \<longleftrightarrow> separated_between X S T"
by (meson closure_of_minimal_eq separated_between_alt)
lemma separated_between_closure_of':
"T \<subseteq> topspace X \<Longrightarrow> separated_between X S (X closure_of T) \<longleftrightarrow> separated_between X S T"
by (meson separated_between_closure_of separated_between_sym)
lemma separated_between_closure_of_eq:
"separated_between X S T \<longleftrightarrow> S \<subseteq> topspace X \<and> separated_between X (X closure_of S) T"
by (metis separated_between_closure_of separated_between_imp_subset)
lemma separated_between_closure_of_eq':
"separated_between X S T \<longleftrightarrow> T \<subseteq> topspace X \<and> separated_between X S (X closure_of T)"
by (metis separated_between_closure_of' separated_between_imp_subset)
lemma separated_between_frontier_of_eq':
"separated_between X S T \<longleftrightarrow>
T \<subseteq> topspace X \<and> disjnt S T \<and> separated_between X S (X frontier_of T)" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
by (metis interior_of_union_frontier_of separated_between_Un separated_between_closure_of_eq'
separated_between_imp_disjoint)
next
assume R: ?rhs
then obtain U where U: "closedin X U" "openin X U" "S \<subseteq> U" "X closure_of T - X interior_of T \<subseteq> topspace X - U"
by (metis frontier_of_def separated_between)
show ?lhs
proof (rule separated_between_mono [of _ S "X closure_of T"])
have "separated_between X S T"
unfolding separated_between
proof (intro conjI exI)
show "S \<subseteq> U - T" "T \<subseteq> topspace X - (U - T)"
using R U(3) by (force simp: disjnt_iff)+
have "T \<subseteq> X closure_of T"
by (simp add: R closure_of_subset)
then have *: "U - T = U - X interior_of T"
using U(4) interior_of_subset by fastforce
then show "closedin X (U - T)"
by (simp add: U(1) closedin_diff)
have "U \<inter> X frontier_of T = {}"
using U(4) frontier_of_def by fastforce
then show "openin X (U - T)"
by (metis * Diff_Un U(2) Un_Diff_Int Un_Int_eq(1) closedin_closure_of interior_of_union_frontier_of openin_diff sup_bot_right)
qed
then show "separated_between X S (X closure_of T)"
by (simp add: R separated_between_closure_of')
qed (auto simp: R closure_of_subset)
qed
lemma separated_between_frontier_of_eq:
"separated_between X S T \<longleftrightarrow> S \<subseteq> topspace X \<and> disjnt S T \<and> separated_between X (X frontier_of S) T"
by (metis disjnt_sym separated_between_frontier_of_eq' separated_between_sym)
lemma separated_between_frontier_of:
"\<lbrakk>S \<subseteq> topspace X; disjnt S T\<rbrakk>
\<Longrightarrow> (separated_between X (X frontier_of S) T \<longleftrightarrow> separated_between X S T)"
using separated_between_frontier_of_eq by blast
lemma separated_between_frontier_of':
"\<lbrakk>T \<subseteq> topspace X; disjnt S T\<rbrakk>
\<Longrightarrow> (separated_between X S (X frontier_of T) \<longleftrightarrow> separated_between X S T)"
using separated_between_frontier_of_eq' by auto
lemma connected_space_separated_between:
"connected_space X \<longleftrightarrow> (\<forall>S T. separated_between X S T \<longrightarrow> S = {} \<or> T = {})" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
by (metis Diff_cancel connected_space_clopen_in separated_between subset_empty)
next
assume ?rhs then show ?lhs
by (meson connected_space_eq_not_separated separated_between_eq_separatedin)
qed
lemma connected_space_imp_separated_between_trivial:
"connected_space X
\<Longrightarrow> (separated_between X S T \<longleftrightarrow> S = {} \<and> T \<subseteq> topspace X \<or> S \<subseteq> topspace X \<and> T = {})"
by (metis connected_space_separated_between separated_between_empty)
subsection\<open>Connected components\<close>
lemma connected_component_of_subtopology_eq:
"connected_component_of (subtopology X U) a = connected_component_of X a \<longleftrightarrow>
connected_component_of_set X a \<subseteq> U"
by (force simp: connected_component_of_set connectedin_subtopology connected_component_of_def fun_eq_iff subset_iff)
lemma connected_components_of_subtopology:
assumes "C \<in> connected_components_of X" "C \<subseteq> U"
shows "C \<in> connected_components_of (subtopology X U)"
proof -
obtain a where a: "connected_component_of_set X a \<subseteq> U" and "a \<in> topspace X"
and Ceq: "C = connected_component_of_set X a"
using assms by (force simp: connected_components_of_def)
then have "a \<in> U"
by (simp add: connected_component_of_refl in_mono)
then have "connected_component_of_set X a = connected_component_of_set (subtopology X U) a"
by (metis a connected_component_of_subtopology_eq)
then show ?thesis
by (simp add: Ceq \<open>a \<in> U\<close> \<open>a \<in> topspace X\<close> connected_component_in_connected_components_of)
qed
thm connected_space_iff_components_eq
lemma open_in_finite_connected_components:
assumes "finite(connected_components_of X)" "C \<in> connected_components_of X"
shows "openin X C"
proof -
have "closedin X (topspace X - C)"
by (metis DiffD1 assms closedin_Union closedin_connected_components_of complement_connected_components_of_Union finite_Diff)
then show ?thesis
by (simp add: assms connected_components_of_subset openin_closedin)
qed
thm connected_component_of_eq_overlap
lemma connected_components_of_disjoint:
assumes "C \<in> connected_components_of X" "C' \<in> connected_components_of X"
shows "(disjnt C C' \<longleftrightarrow> (C \<noteq> C'))"
proof -
have "C \<noteq> {}"
using nonempty_connected_components_of assms by blast
with assms show ?thesis
by (metis disjnt_self_iff_empty pairwiseD pairwise_disjoint_connected_components_of)
qed
lemma connected_components_of_overlap:
"\<lbrakk>C \<in> connected_components_of X; C' \<in> connected_components_of X\<rbrakk> \<Longrightarrow> C \<inter> C' \<noteq> {} \<longleftrightarrow> C = C'"
by (meson connected_components_of_disjoint disjnt_def)
lemma pairwise_separated_connected_components_of:
"pairwise (separatedin X) (connected_components_of X)"
by (simp add: closedin_connected_components_of connected_components_of_disjoint pairwiseI separatedin_closed_sets)
lemma finite_connected_components_of_finite:
"finite(topspace X) \<Longrightarrow> finite(connected_components_of X)"
by (simp add: Union_connected_components_of finite_UnionD)
lemma connected_component_of_unique:
"\<lbrakk>x \<in> C; connectedin X C; \<And>C'. x \<in> C' \<and> connectedin X C' \<Longrightarrow> C' \<subseteq> C\<rbrakk>
\<Longrightarrow> connected_component_of_set X x = C"
by (meson connected_component_of_maximal connectedin_connected_component_of subsetD subset_antisym)
lemma closedin_connected_component_of_subtopology:
"\<lbrakk>C \<in> connected_components_of (subtopology X s); X closure_of C \<subseteq> s\<rbrakk> \<Longrightarrow> closedin X C"
by (metis closedin_Int_closure_of closedin_connected_components_of closure_of_eq inf.absorb_iff2)
lemma connected_component_of_discrete_topology:
"connected_component_of_set (discrete_topology U) x = (if x \<in> U then {x} else {})"
by (simp add: locally_path_connected_space_discrete_topology flip: path_component_eq_connected_component_of)
lemma connected_components_of_discrete_topology:
"connected_components_of (discrete_topology U) = (\<lambda>x. {x}) ` U"
by (simp add: connected_component_of_discrete_topology connected_components_of_def)
lemma connected_component_of_continuous_image:
"\<lbrakk>continuous_map X Y f; connected_component_of X x y\<rbrakk>
\<Longrightarrow> connected_component_of Y (f x) (f y)"
by (meson connected_component_of_def connectedin_continuous_map_image image_eqI)
lemma homeomorphic_map_connected_component_of:
assumes "homeomorphic_map X Y f" and x: "x \<in> topspace X"
shows "connected_component_of_set Y (f x) = f ` (connected_component_of_set X x)"
proof -
obtain g where g: "continuous_map X Y f"
"continuous_map Y X g " "\<And>x. x \<in> topspace X \<Longrightarrow> g (f x) = x"
"\<And>y. y \<in> topspace Y \<Longrightarrow> f (g y) = y"
using assms(1) homeomorphic_map_maps homeomorphic_maps_def by fastforce
show ?thesis
using connected_component_in_topspace [of Y] x g
connected_component_of_continuous_image [of X Y f]
connected_component_of_continuous_image [of Y X g]
by force
qed
lemma homeomorphic_map_connected_components_of:
assumes "homeomorphic_map X Y f"
shows "connected_components_of Y = (image f) ` (connected_components_of X)"
proof -
have "topspace Y = f ` topspace X"
by (metis assms homeomorphic_imp_surjective_map)
with homeomorphic_map_connected_component_of [OF assms] show ?thesis
by (auto simp: connected_components_of_def image_iff)
qed
lemma connected_component_of_pair:
"connected_component_of_set (prod_topology X Y) (x,y) =
connected_component_of_set X x \<times> connected_component_of_set Y y"
proof (cases "x \<in> topspace X \<and> y \<in> topspace Y")
case True
show ?thesis
proof (rule connected_component_of_unique)
show "(x, y) \<in> connected_component_of_set X x \<times> connected_component_of_set Y y"
using True by (simp add: connected_component_of_refl)
show "connectedin (prod_topology X Y) (connected_component_of_set X x \<times> connected_component_of_set Y y)"
by (metis connectedin_Times connectedin_connected_component_of)
show "C \<subseteq> connected_component_of_set X x \<times> connected_component_of_set Y y"
if "(x, y) \<in> C \<and> connectedin (prod_topology X Y) C" for C
using that unfolding connected_component_of_def
apply clarsimp
by (metis (no_types) connectedin_continuous_map_image continuous_map_fst continuous_map_snd fst_conv imageI snd_conv)
qed
next
case False then show ?thesis
by (metis Sigma_empty1 Sigma_empty2 connected_component_of_eq_empty mem_Sigma_iff topspace_prod_topology)
qed
lemma connected_components_of_prod_topology:
"connected_components_of (prod_topology X Y) =
{C \<times> D |C D. C \<in> connected_components_of X \<and> D \<in> connected_components_of Y}" (is "?lhs=?rhs")
proof
show "?lhs \<subseteq> ?rhs"
apply (clarsimp simp: connected_components_of_def)
by (metis (no_types) connected_component_of_pair imageI)
next
show "?rhs \<subseteq> ?lhs"
using connected_component_of_pair
by (fastforce simp: connected_components_of_def)
qed
lemma connected_component_of_product_topology:
"connected_component_of_set (product_topology X I) x =
(if x \<in> extensional I then PiE I (\<lambda>i. connected_component_of_set (X i) (x i)) else {})"
(is "?lhs = If _ ?R _")
proof (cases "x \<in> topspace(product_topology X I)")
case True
have "?lhs = (\<Pi>\<^sub>E i\<in>I. connected_component_of_set (X i) (x i))"
if xX: "\<And>i. i\<in>I \<Longrightarrow> x i \<in> topspace (X i)" and ext: "x \<in> extensional I"
proof (rule connected_component_of_unique)
show "x \<in> ?R"
by (simp add: PiE_iff connected_component_of_refl local.ext xX)
show "connectedin (product_topology X I) ?R"
by (simp add: connectedin_PiE connectedin_connected_component_of)
show "C \<subseteq> ?R"
if "x \<in> C \<and> connectedin (product_topology X I) C" for C
proof -
have "C \<subseteq> extensional I"
using PiE_def connectedin_subset_topspace that by fastforce
have "\<And>y. y \<in> C \<Longrightarrow> y \<in> (\<Pi> i\<in>I. connected_component_of_set (X i) (x i))"
apply (simp add: connected_component_of_def Pi_def)
by (metis connectedin_continuous_map_image continuous_map_product_projection imageI that)
then show ?thesis
using PiE_def \<open>C \<subseteq> extensional I\<close> by fastforce
qed
qed
with True show ?thesis
by (simp add: PiE_iff)
next
case False
then show ?thesis
apply (simp add: PiE_iff)
by (smt (verit) Collect_empty_eq False PiE_eq_empty_iff PiE_iff connected_component_of_eq_empty)
qed
lemma connected_components_of_product_topology:
"connected_components_of (product_topology X I) =
{PiE I B |B. \<forall>i \<in> I. B i \<in> connected_components_of(X i)}" (is "?lhs=?rhs")
proof
show "?lhs \<subseteq> ?rhs"
by (auto simp: connected_components_of_def connected_component_of_product_topology PiE_iff)
show "?rhs \<subseteq> ?lhs"
proof
fix F
assume "F \<in> ?rhs"
then obtain B where Feq: "F = Pi\<^sub>E I B" and
"\<forall>i\<in>I. \<exists>x\<in>topspace (X i). B i = connected_component_of_set (X i) x"
by (force simp: connected_components_of_def connected_component_of_product_topology image_iff)
then obtain f where
f: "\<And>i. i \<in> I \<Longrightarrow> f i \<in> topspace (X i) \<and> B i = connected_component_of_set (X i) (f i)"
by metis
then have "(\<lambda>i\<in>I. f i) \<in> ((\<Pi>\<^sub>E i\<in>I. topspace (X i)) \<inter> extensional I)"
by simp
with f show "F \<in> ?lhs"
unfolding Feq connected_components_of_def connected_component_of_product_topology image_iff
by (smt (verit, del_insts) PiE_cong restrict_PiE_iff restrict_apply' restrict_extensional topspace_product_topology)
qed
qed
subsection \<open>Monotone maps (in the general topological sense)\<close>
definition monotone_map
where "monotone_map X Y f ==
f ` (topspace X) \<subseteq> topspace Y \<and>
(\<forall>y \<in> topspace Y. connectedin X {x \<in> topspace X. f x = y})"
lemma monotone_map:
"monotone_map X Y f \<longleftrightarrow>
f ` (topspace X) \<subseteq> topspace Y \<and> (\<forall>y. connectedin X {x \<in> topspace X. f x = y})"
apply (simp add: monotone_map_def)
by (metis (mono_tags, lifting) connectedin_empty [of X] Collect_empty_eq image_subset_iff)
lemma monotone_map_in_subtopology:
"monotone_map X (subtopology Y S) f \<longleftrightarrow> monotone_map X Y f \<and> f ` (topspace X) \<subseteq> S"
by (smt (verit, del_insts) le_inf_iff monotone_map topspace_subtopology)
lemma monotone_map_from_subtopology:
assumes "monotone_map X Y f"
"\<And>x y. x \<in> topspace X \<and> y \<in> topspace X \<and> x \<in> S \<and> f x = f y \<Longrightarrow> y \<in> S"
shows "monotone_map (subtopology X S) Y f"
using assms
unfolding monotone_map_def connectedin_subtopology
by (smt (verit, del_insts) Collect_cong Collect_empty_eq IntE IntI connectedin_empty image_subset_iff mem_Collect_eq subsetI topspace_subtopology)
lemma monotone_map_restriction:
"monotone_map X Y f \<and> {x \<in> topspace X. f x \<in> v} = u
\<Longrightarrow> monotone_map (subtopology X u) (subtopology Y v) f"
by (smt (verit, best) IntI Int_Collect image_subset_iff mem_Collect_eq monotone_map monotone_map_from_subtopology topspace_subtopology)
lemma injective_imp_monotone_map:
assumes "f ` topspace X \<subseteq> topspace Y" "inj_on f (topspace X)"
shows "monotone_map X Y f"
unfolding monotone_map_def
proof (intro conjI assms strip)
fix y
assume "y \<in> topspace Y"
then have "{x \<in> topspace X. f x = y} = {} \<or> (\<exists>a \<in> topspace X. {x \<in> topspace X. f x = y} = {a})"
using assms(2) unfolding inj_on_def by blast
then show "connectedin X {x \<in> topspace X. f x = y}"
by (metis (no_types, lifting) connectedin_empty connectedin_sing)
qed
lemma embedding_imp_monotone_map:
"embedding_map X Y f \<Longrightarrow> monotone_map X Y f"
by (metis (no_types) embedding_map_def homeomorphic_eq_everything_map inf.absorb_iff2 injective_imp_monotone_map topspace_subtopology)
lemma section_imp_monotone_map:
"section_map X Y f \<Longrightarrow> monotone_map X Y f"
by (simp add: embedding_imp_monotone_map section_imp_embedding_map)
lemma homeomorphic_imp_monotone_map:
"homeomorphic_map X Y f \<Longrightarrow> monotone_map X Y f"
by (meson section_and_retraction_eq_homeomorphic_map section_imp_monotone_map)
proposition connected_space_monotone_quotient_map_preimage:
assumes f: "monotone_map X Y f" "quotient_map X Y f" and "connected_space Y"
shows "connected_space X"
proof (rule ccontr)
assume "\<not> connected_space X"
then obtain U V where "openin X U" "openin X V" "U \<inter> V = {}"
"U \<noteq> {}" "V \<noteq> {}" and topUV: "topspace X \<subseteq> U \<union> V"
by (auto simp: connected_space_def)
then have UVsub: "U \<subseteq> topspace X" "V \<subseteq> topspace X"
by (auto simp: openin_subset)
have "\<not> connected_space Y"
unfolding connected_space_def not_not
proof (intro exI conjI)
show "topspace Y \<subseteq> f`U \<union> f`V"
by (metis f(2) image_Un quotient_imp_surjective_map subset_Un_eq topUV)
show "f`U \<noteq> {}"
by (simp add: \<open>U \<noteq> {}\<close>)
show "(f`V) \<noteq> {}"
by (simp add: \<open>V \<noteq> {}\<close>)
have *: "y \<notin> f ` V" if "y \<in> f ` U" for y
proof -
have \<section>: "connectedin X {x \<in> topspace X. f x = y}"
using f(1) monotone_map by fastforce
show ?thesis
using connectedinD [OF \<section> \<open>openin X U\<close> \<open>openin X V\<close>] UVsub topUV \<open>U \<inter> V = {}\<close> that
by (force simp: disjoint_iff)
qed
then show "f`U \<inter> f`V = {}"
by blast
show "openin Y (f`U)"
using f \<open>openin X U\<close> topUV * unfolding quotient_map_saturated_open by force
show "openin Y (f`V)"
using f \<open>openin X V\<close> topUV * unfolding quotient_map_saturated_open by force
qed
then show False
by (simp add: assms)
qed
lemma connectedin_monotone_quotient_map_preimage:
assumes "monotone_map X Y f" "quotient_map X Y f" "connectedin Y C" "openin Y C \<or> closedin Y C"
shows "connectedin X {x \<in> topspace X. f x \<in> C}"
proof -
have "connected_space (subtopology X {x \<in> topspace X. f x \<in> C})"
proof -
have "connected_space (subtopology Y C)"
using \<open>connectedin Y C\<close> connectedin_def by blast
moreover have "quotient_map (subtopology X {a \<in> topspace X. f a \<in> C}) (subtopology Y C) f"
by (simp add: assms quotient_map_restriction)
ultimately show ?thesis
using \<open>monotone_map X Y f\<close> connected_space_monotone_quotient_map_preimage monotone_map_restriction by blast
qed
then show ?thesis
by (simp add: connectedin_def)
qed
lemma monotone_open_map:
assumes "continuous_map X Y f" "open_map X Y f" and fim: "f ` (topspace X) = topspace Y"
shows "monotone_map X Y f \<longleftrightarrow> (\<forall>C. connectedin Y C \<longrightarrow> connectedin X {x \<in> topspace X. f x \<in> C})"
(is "?lhs=?rhs")
proof
assume L: ?lhs
show ?rhs
unfolding connectedin_def
proof (intro strip conjI)
fix C
assume C: "C \<subseteq> topspace Y \<and> connected_space (subtopology Y C)"
show "connected_space (subtopology X {x \<in> topspace X. f x \<in> C})"
proof (rule connected_space_monotone_quotient_map_preimage)
show "monotone_map (subtopology X {x \<in> topspace X. f x \<in> C}) (subtopology Y C) f"
by (simp add: L monotone_map_restriction)
show "quotient_map (subtopology X {x \<in> topspace X. f x \<in> C}) (subtopology Y C) f"
proof (rule continuous_open_imp_quotient_map)
show "continuous_map (subtopology X {x \<in> topspace X. f x \<in> C}) (subtopology Y C) f"
using assms continuous_map_from_subtopology continuous_map_in_subtopology by fastforce
qed (use open_map_restriction assms in fastforce)+
qed (simp add: C)
qed auto
next
assume ?rhs
then have "\<forall>y. connectedin Y {y} \<longrightarrow> connectedin X {x \<in> topspace X. f x = y}"
by (smt (verit) Collect_cong singletonD singletonI)
then show ?lhs
by (simp add: fim monotone_map_def)
qed
lemma monotone_closed_map:
assumes "continuous_map X Y f" "closed_map X Y f" and fim: "f ` (topspace X) = topspace Y"
shows "monotone_map X Y f \<longleftrightarrow> (\<forall>C. connectedin Y C \<longrightarrow> connectedin X {x \<in> topspace X. f x \<in> C})"
(is "?lhs=?rhs")
proof
assume L: ?lhs
show ?rhs
unfolding connectedin_def
proof (intro strip conjI)
fix C
assume C: "C \<subseteq> topspace Y \<and> connected_space (subtopology Y C)"
show "connected_space (subtopology X {x \<in> topspace X. f x \<in> C})"
proof (rule connected_space_monotone_quotient_map_preimage)
show "monotone_map (subtopology X {x \<in> topspace X. f x \<in> C}) (subtopology Y C) f"
by (simp add: L monotone_map_restriction)
show "quotient_map (subtopology X {x \<in> topspace X. f x \<in> C}) (subtopology Y C) f"
proof (rule continuous_closed_imp_quotient_map)
show "continuous_map (subtopology X {x \<in> topspace X. f x \<in> C}) (subtopology Y C) f"
using assms continuous_map_from_subtopology continuous_map_in_subtopology by fastforce
qed (use closed_map_restriction assms in fastforce)+
qed (simp add: C)
qed auto
next
assume ?rhs
then have "\<forall>y. connectedin Y {y} \<longrightarrow> connectedin X {x \<in> topspace X. f x = y}"
by (smt (verit) Collect_cong singletonD singletonI)
then show ?lhs
by (simp add: fim monotone_map_def)
qed
subsection\<open>Other countability properties\<close>
definition second_countable
where "second_countable X \<equiv>
\<exists>\<B>. countable \<B> \<and> (\<forall>V \<in> \<B>. openin X V) \<and>
(\<forall>U x. openin X U \<and> x \<in> U \<longrightarrow> (\<exists>V \<in> \<B>. x \<in> V \<and> V \<subseteq> U))"
definition first_countable
where "first_countable X \<equiv>
\<forall>x \<in> topspace X.
\<exists>\<B>. countable \<B> \<and> (\<forall>V \<in> \<B>. openin X V) \<and>
(\<forall>U. openin X U \<and> x \<in> U \<longrightarrow> (\<exists>V \<in> \<B>. x \<in> V \<and> V \<subseteq> U))"
definition separable_space
where "separable_space X \<equiv>
\<exists>C. countable C \<and> C \<subseteq> topspace X \<and> X closure_of C = topspace X"
lemma second_countable:
"second_countable X \<longleftrightarrow>
(\<exists>\<B>. countable \<B> \<and> openin X = arbitrary union_of (\<lambda>x. x \<in> \<B>))"
by (smt (verit) openin_topology_base_unique second_countable_def)
lemma second_countable_subtopology:
assumes "second_countable X"
shows "second_countable (subtopology X S)"
proof -
obtain \<B> where \<B>: "countable \<B>" "\<And>V. V \<in> \<B> \<Longrightarrow> openin X V"
"\<And>U x. openin X U \<and> x \<in> U \<longrightarrow> (\<exists>V \<in> \<B>. x \<in> V \<and> V \<subseteq> U)"
using assms by (auto simp: second_countable_def)
show ?thesis
unfolding second_countable_def
proof (intro exI conjI)
show "\<forall>V\<in>((\<inter>)S) ` \<B>. openin (subtopology X S) V"
using openin_subtopology_Int2 \<B> by blast
show "\<forall>U x. openin (subtopology X S) U \<and> x \<in> U \<longrightarrow> (\<exists>V\<in>((\<inter>)S) ` \<B>. x \<in> V \<and> V \<subseteq> U)"
using \<B> unfolding openin_subtopology
by (smt (verit, del_insts) IntI image_iff inf_commute inf_le1 subset_iff)
qed (use \<B> in auto)
qed
lemma second_countable_discrete_topology:
"second_countable(discrete_topology U) \<longleftrightarrow> countable U" (is "?lhs=?rhs")
proof
assume L: ?lhs
then
obtain \<B> where \<B>: "countable \<B>" "\<And>V. V \<in> \<B> \<Longrightarrow> V \<subseteq> U"
"\<And>W x. W \<subseteq> U \<and> x \<in> W \<longrightarrow> (\<exists>V \<in> \<B>. x \<in> V \<and> V \<subseteq> W)"
by (auto simp: second_countable_def)
then have "{x} \<in> \<B>" if "x \<in> U" for x
by (metis empty_subsetI insertCI insert_subset subset_antisym that)
then show ?rhs
by (smt (verit) countable_subset image_subsetI \<open>countable \<B>\<close> countable_image_inj_on [OF _ inj_singleton])
next
assume ?rhs
then show ?lhs
unfolding second_countable_def
by (rule_tac x="(\<lambda>x. {x}) ` U" in exI) auto
qed
lemma second_countable_open_map_image:
assumes "continuous_map X Y f" "open_map X Y f"
and fim: "f ` (topspace X) = topspace Y" and "second_countable X"
shows "second_countable Y"
proof -
have openXYf: "\<And>U. openin X U \<longrightarrow> openin Y (f ` U)"
using assms by (auto simp: open_map_def)
obtain \<B> where \<B>: "countable \<B>" "\<And>V. V \<in> \<B> \<Longrightarrow> openin X V"
and *: "\<And>U x. openin X U \<and> x \<in> U \<longrightarrow> (\<exists>V \<in> \<B>. x \<in> V \<and> V \<subseteq> U)"
using assms by (auto simp: second_countable_def)
show ?thesis
unfolding second_countable_def
proof (intro exI conjI strip)
fix V y
assume V: "openin Y V \<and> y \<in> V"
then obtain x where "x \<in> topspace X" and x: "f x = y"
by (metis fim image_iff openin_subset subsetD)
then obtain W where "W\<in>\<B>" "x \<in> W" "W \<subseteq> {x \<in> topspace X. f x \<in> V}"
using * [of "{x \<in> topspace X. f x \<in> V}" x] V assms openin_continuous_map_preimage
by force
then show "\<exists>W \<in> (image f) ` \<B>. y \<in> W \<and> W \<subseteq> V"
using x by auto
qed (use \<B> openXYf in auto)
qed
lemma homeomorphic_space_second_countability:
"X homeomorphic_space Y \<Longrightarrow> (second_countable X \<longleftrightarrow> second_countable Y)"
by (meson homeomorphic_eq_everything_map homeomorphic_space homeomorphic_space_sym second_countable_open_map_image)
lemma second_countable_retraction_map_image:
"\<lbrakk>retraction_map X Y r; second_countable X\<rbrakk> \<Longrightarrow> second_countable Y"
using hereditary_imp_retractive_property homeomorphic_space_second_countability second_countable_subtopology by blast
lemma second_countable_imp_first_countable:
"second_countable X \<Longrightarrow> first_countable X"
by (metis first_countable_def second_countable_def)
lemma first_countable_subtopology:
assumes "first_countable X"
shows "first_countable (subtopology X S)"
unfolding first_countable_def
proof
fix x
assume "x \<in> topspace (subtopology X S)"
then obtain \<B> where "countable \<B>" and \<B>: "\<And>V. V \<in> \<B> \<Longrightarrow> openin X V"
"\<And>U. openin X U \<and> x \<in> U \<longrightarrow> (\<exists>V \<in> \<B>. x \<in> V \<and> V \<subseteq> U)"
using assms first_countable_def by force
show "\<exists>\<B>. countable \<B> \<and> (\<forall>V\<in>\<B>. openin (subtopology X S) V) \<and> (\<forall>U. openin (subtopology X S) U \<and> x \<in> U \<longrightarrow> (\<exists>V\<in>\<B>. x \<in> V \<and> V \<subseteq> U))"
proof (intro exI conjI strip)
show "countable (((\<inter>)S) ` \<B>)"
using \<open>countable \<B>\<close> by blast
show "openin (subtopology X S) V" if "V \<in> ((\<inter>)S) ` \<B>" for V
using \<B> openin_subtopology_Int2 that by fastforce
show "\<exists>V\<in>((\<inter>)S) ` \<B>. x \<in> V \<and> V \<subseteq> U"
if "openin (subtopology X S) U \<and> x \<in> U" for U
using that \<B>(2) by (clarsimp simp: openin_subtopology) (meson le_infI2)
qed
qed
lemma first_countable_discrete_topology:
"first_countable (discrete_topology U)"
unfolding first_countable_def topspace_discrete_topology openin_discrete_topology
proof
fix x assume "x \<in> U"
show "\<exists>\<B>. countable \<B> \<and> (\<forall>V\<in>\<B>. V \<subseteq> U) \<and> (\<forall>Ua. Ua \<subseteq> U \<and> x \<in> Ua \<longrightarrow> (\<exists>V\<in>\<B>. x \<in> V \<and> V \<subseteq> Ua))"
using \<open>x \<in> U\<close> by (rule_tac x="{{x}}" in exI) auto
qed
lemma first_countable_open_map_image:
assumes "continuous_map X Y f" "open_map X Y f"
and fim: "f ` (topspace X) = topspace Y" and "first_countable X"
shows "first_countable Y"
unfolding first_countable_def
proof
fix y
assume "y \<in> topspace Y"
have openXYf: "\<And>U. openin X U \<longrightarrow> openin Y (f ` U)"
using assms by (auto simp: open_map_def)
then obtain x where x: "x \<in> topspace X" "f x = y"
by (metis \<open>y \<in> topspace Y\<close> fim imageE)
obtain \<B> where \<B>: "countable \<B>" "\<And>V. V \<in> \<B> \<Longrightarrow> openin X V"
and *: "\<And>U. openin X U \<and> x \<in> U \<longrightarrow> (\<exists>V \<in> \<B>. x \<in> V \<and> V \<subseteq> U)"
using assms x first_countable_def by force
show "\<exists>\<B>. countable \<B> \<and>
(\<forall>V\<in>\<B>. openin Y V) \<and> (\<forall>U. openin Y U \<and> y \<in> U \<longrightarrow> (\<exists>V\<in>\<B>. y \<in> V \<and> V \<subseteq> U))"
proof (intro exI conjI strip)
fix V assume "openin Y V \<and> y \<in> V"
then have "\<exists>W\<in>\<B>. x \<in> W \<and> W \<subseteq> {x \<in> topspace X. f x \<in> V}"
using * [of "{x \<in> topspace X. f x \<in> V}"] assms openin_continuous_map_preimage x
by fastforce
then show "\<exists>V' \<in> (image f) ` \<B>. y \<in> V' \<and> V' \<subseteq> V"
using image_mono x by auto
qed (use \<B> openXYf in force)+
qed
lemma homeomorphic_space_first_countability:
"X homeomorphic_space Y \<Longrightarrow> first_countable X \<longleftrightarrow> first_countable Y"
by (meson first_countable_open_map_image homeomorphic_eq_everything_map homeomorphic_space homeomorphic_space_sym)
lemma first_countable_retraction_map_image:
"\<lbrakk>retraction_map X Y r; first_countable X\<rbrakk> \<Longrightarrow> first_countable Y"
using first_countable_subtopology hereditary_imp_retractive_property homeomorphic_space_first_countability by blast
lemma separable_space_open_subset:
assumes "separable_space X" "openin X S"
shows "separable_space (subtopology X S)"
proof -
obtain C where C: "countable C" "C \<subseteq> topspace X" "X closure_of C = topspace X"
by (meson assms separable_space_def)
then have "\<And>x T. \<lbrakk>x \<in> topspace X; x \<in> T; openin (subtopology X S) T\<rbrakk>
\<Longrightarrow> \<exists>y. y \<in> S \<and> y \<in> C \<and> y \<in> T"
by (smt (verit) \<open>openin X S\<close> in_closure_of openin_open_subtopology subsetD)
with C \<open>openin X S\<close> show ?thesis
unfolding separable_space_def
by (rule_tac x="S \<inter> C" in exI) (force simp: in_closure_of)
qed
lemma separable_space_continuous_map_image:
assumes "separable_space X" "continuous_map X Y f"
and fim: "f ` (topspace X) = topspace Y"
shows "separable_space Y"
proof -
have cont: "\<And>S. f ` (X closure_of S) \<subseteq> Y closure_of f ` S"
by (simp add: assms continuous_map_image_closure_subset)
obtain C where C: "countable C" "C \<subseteq> topspace X" "X closure_of C = topspace X"
by (meson assms separable_space_def)
then show ?thesis
unfolding separable_space_def
by (metis cont fim closure_of_subset_topspace countable_image image_mono subset_antisym)
qed
lemma separable_space_quotient_map_image:
"\<lbrakk>quotient_map X Y q; separable_space X\<rbrakk> \<Longrightarrow> separable_space Y"
by (meson quotient_imp_continuous_map quotient_imp_surjective_map separable_space_continuous_map_image)
lemma separable_space_retraction_map_image:
"\<lbrakk>retraction_map X Y r; separable_space X\<rbrakk> \<Longrightarrow> separable_space Y"
using retraction_imp_quotient_map separable_space_quotient_map_image by blast
lemma homeomorphic_separable_space:
"X homeomorphic_space Y \<Longrightarrow> (separable_space X \<longleftrightarrow> separable_space Y)"
by (meson homeomorphic_eq_everything_map homeomorphic_maps_map homeomorphic_space_def separable_space_continuous_map_image)
lemma separable_space_discrete_topology:
"separable_space(discrete_topology U) \<longleftrightarrow> countable U"
by (metis countable_Int2 discrete_topology_closure_of dual_order.refl inf.orderE separable_space_def topspace_discrete_topology)
lemma second_countable_imp_separable_space:
assumes "second_countable X"
shows "separable_space X"
proof -
obtain \<B> where \<B>: "countable \<B>" "\<And>V. V \<in> \<B> \<Longrightarrow> openin X V"
and *: "\<And>U x. openin X U \<and> x \<in> U \<longrightarrow> (\<exists>V \<in> \<B>. x \<in> V \<and> V \<subseteq> U)"
using assms by (auto simp: second_countable_def)
obtain c where c: "\<And>V. \<lbrakk>V \<in> \<B>; V \<noteq> {}\<rbrakk> \<Longrightarrow> c V \<in> V"
by (metis all_not_in_conv)
then have **: "\<And>x. x \<in> topspace X \<Longrightarrow> x \<in> X closure_of c ` (\<B> - {{}})"
using * by (force simp: closure_of_def)
show ?thesis
unfolding separable_space_def
proof (intro exI conjI)
show "countable (c ` (\<B>-{{}}))"
using \<B>(1) by blast
show "(c ` (\<B>-{{}})) \<subseteq> topspace X"
using \<B>(2) c openin_subset by fastforce
show "X closure_of (c ` (\<B>-{{}})) = topspace X"
by (meson ** closure_of_subset_topspace subsetI subset_antisym)
qed
qed
lemma second_countable_imp_Lindelof_space:
assumes "second_countable X"
shows "Lindelof_space X"
unfolding Lindelof_space_def
proof clarify
fix \<U>
assume "\<forall>U \<in> \<U>. openin X U" and UU: "\<Union>\<U> = topspace X"
obtain \<B> where \<B>: "countable \<B>" "\<And>V. V \<in> \<B> \<Longrightarrow> openin X V"
and *: "\<And>U x. openin X U \<and> x \<in> U \<longrightarrow> (\<exists>V \<in> \<B>. x \<in> V \<and> V \<subseteq> U)"
using assms by (auto simp: second_countable_def)
define \<B>' where "\<B>' = {B \<in> \<B>. \<exists>U. U \<in> \<U> \<and> B \<subseteq> U}"
have \<B>': "countable \<B>'" "\<Union>\<B>' = \<Union>\<U>"
using \<B> using "*" \<open>\<forall>U\<in>\<U>. openin X U\<close> by (fastforce simp: \<B>'_def)+
have "\<And>b. \<exists>U. b \<in> \<B>' \<longrightarrow> U \<in> \<U> \<and> b \<subseteq> U"
by (simp add: \<B>'_def)
then obtain G where G: "\<And>b. b \<in> \<B>' \<longrightarrow> G b \<in> \<U> \<and> b \<subseteq> G b"
by metis
with \<B>' UU show "\<exists>\<V>. countable \<V> \<and> \<V> \<subseteq> \<U> \<and> \<Union>\<V> = topspace X"
by (rule_tac x="G ` \<B>'" in exI) fastforce
qed
subsection \<open>Neigbourhood bases EXTRAS\<close>
(* Neigbourhood bases (useful for "local" properties of various kind). *)
lemma openin_topology_neighbourhood_base_unique:
"openin X = arbitrary union_of P \<longleftrightarrow>
(\<forall>u. P u \<longrightarrow> openin X u) \<and> neighbourhood_base_of P X"
by (smt (verit, best) open_neighbourhood_base_of openin_topology_base_unique)
lemma neighbourhood_base_at_topology_base:
" openin X = arbitrary union_of b
\<Longrightarrow> (neighbourhood_base_at x P X \<longleftrightarrow>
(\<forall>w. b w \<and> x \<in> w \<longrightarrow> (\<exists>u v. openin X u \<and> P v \<and> x \<in> u \<and> u \<subseteq> v \<and> v \<subseteq> w)))"
apply (simp add: neighbourhood_base_at_def)
by (smt (verit, del_insts) openin_topology_base_unique subset_trans)
lemma neighbourhood_base_of_unlocalized:
assumes "\<And>S t. P S \<and> openin X t \<and> (t \<noteq> {}) \<and> t \<subseteq> S \<Longrightarrow> P t"
shows "neighbourhood_base_of P X \<longleftrightarrow>
(\<forall>x \<in> topspace X. \<exists>u v. openin X u \<and> P v \<and> x \<in> u \<and> u \<subseteq> v \<and> v \<subseteq> topspace X)"
apply (simp add: neighbourhood_base_of_def)
by (smt (verit, ccfv_SIG) assms empty_iff neighbourhood_base_at_unlocalized)
lemma neighbourhood_base_at_discrete_topology:
"neighbourhood_base_at x P (discrete_topology u) \<longleftrightarrow> x \<in> u \<Longrightarrow> P {x}"
apply (simp add: neighbourhood_base_at_def)
by (smt (verit) empty_iff empty_subsetI insert_subset singletonI subsetD subset_singletonD)
lemma neighbourhood_base_of_discrete_topology:
"neighbourhood_base_of P (discrete_topology u) \<longleftrightarrow> (\<forall>x \<in> u. P {x})"
apply (simp add: neighbourhood_base_of_def)
using neighbourhood_base_at_discrete_topology[of _ P u]
by (metis empty_subsetI insert_subset neighbourhood_base_at_def openin_discrete_topology singletonI)
lemma second_countable_neighbourhood_base_alt:
"second_countable X \<longleftrightarrow>
(\<exists>\<B>. countable \<B> \<and> (\<forall>V \<in> \<B>. openin X V) \<and> neighbourhood_base_of (\<lambda>A. A\<in>\<B>) X)"
by (metis (full_types) openin_topology_neighbourhood_base_unique second_countable)
lemma first_countable_neighbourhood_base_alt:
"first_countable X \<longleftrightarrow>
(\<forall>x \<in> topspace X. \<exists>\<B>. countable \<B> \<and> (\<forall>V \<in> \<B>. openin X V) \<and> neighbourhood_base_at x (\<lambda>V. V \<in> \<B>) X)"
unfolding first_countable_def
apply (intro ball_cong refl ex_cong conj_cong)
by (metis (mono_tags, lifting) open_neighbourhood_base_at)
lemma second_countable_neighbourhood_base:
"second_countable X \<longleftrightarrow>
(\<exists>\<B>. countable \<B> \<and> neighbourhood_base_of (\<lambda>V. V \<in> \<B>) X)" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
using second_countable_neighbourhood_base_alt by blast
next
assume ?rhs
then obtain \<B> where "countable \<B>"
and \<B>: "\<And>W x. openin X W \<and> x \<in> W \<longrightarrow> (\<exists>U. openin X U \<and> (\<exists>V. V \<in> \<B> \<and> x \<in> U \<and> U \<subseteq> V \<and> V \<subseteq> W))"
by (metis neighbourhood_base_of)
then show ?lhs
unfolding second_countable_neighbourhood_base_alt neighbourhood_base_of
apply (rule_tac x="(\<lambda>u. X interior_of u) ` \<B>" in exI)
by (smt (verit, best) interior_of_eq interior_of_mono countable_image image_iff openin_interior_of)
qed
lemma first_countable_neighbourhood_base:
"first_countable X \<longleftrightarrow>
(\<forall>x \<in> topspace X. \<exists>\<B>. countable \<B> \<and> neighbourhood_base_at x (\<lambda>V. V \<in> \<B>) X)" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
by (metis first_countable_neighbourhood_base_alt)
next
assume R: ?rhs
show ?lhs
unfolding first_countable_neighbourhood_base_alt
proof
fix x
assume "x \<in> topspace X"
with R obtain \<B> where "countable \<B>" and \<B>: "neighbourhood_base_at x (\<lambda>V. V \<in> \<B>) X"
by blast
then
show "\<exists>\<B>. countable \<B> \<and> Ball \<B> (openin X) \<and> neighbourhood_base_at x (\<lambda>V. V \<in> \<B>) X"
unfolding neighbourhood_base_at_def
apply (rule_tac x="(\<lambda>u. X interior_of u) ` \<B>" in exI)
by (smt (verit, best) countable_image image_iff interior_of_eq interior_of_mono openin_interior_of)
qed
qed
subsection\<open>$T_0$ spaces and the Kolmogorov quotient\<close>
definition t0_space where
"t0_space X \<equiv>
\<forall>x \<in> topspace X. \<forall>y \<in> topspace X. x \<noteq> y \<longrightarrow> (\<exists>U. openin X U \<and> (x \<notin> U \<longleftrightarrow> y \<in> U))"
lemma t0_space_expansive:
"\<lbrakk>topspace Y = topspace X; \<And>U. openin X U \<Longrightarrow> openin Y U\<rbrakk> \<Longrightarrow> t0_space X \<Longrightarrow> t0_space Y"
by (metis t0_space_def)
lemma t1_imp_t0_space: "t1_space X \<Longrightarrow> t0_space X"
by (metis t0_space_def t1_space_def)
lemma t1_eq_symmetric_t0_space_alt:
"t1_space X \<longleftrightarrow>
t0_space X \<and>
(\<forall>x \<in> topspace X. \<forall>y \<in> topspace X. x \<in> X closure_of {y} \<longleftrightarrow> y \<in> X closure_of {x})"
apply (simp add: t0_space_def t1_space_def closure_of_def)
by (smt (verit, best) openin_topspace)
lemma t1_eq_symmetric_t0_space:
"t1_space X \<longleftrightarrow> t0_space X \<and> (\<forall>x y. x \<in> X closure_of {y} \<longleftrightarrow> y \<in> X closure_of {x})"
by (auto simp: t1_eq_symmetric_t0_space_alt in_closure_of)
lemma Hausdorff_imp_t0_space:
"Hausdorff_space X \<Longrightarrow> t0_space X"
by (simp add: Hausdorff_imp_t1_space t1_imp_t0_space)
lemma t0_space:
"t0_space X \<longleftrightarrow>
(\<forall>x \<in> topspace X. \<forall>y \<in> topspace X. x \<noteq> y \<longrightarrow> (\<exists>C. closedin X C \<and> (x \<notin> C \<longleftrightarrow> y \<in> C)))"
unfolding t0_space_def by (metis Diff_iff closedin_def openin_closedin_eq)
lemma homeomorphic_t0_space:
assumes "X homeomorphic_space Y"
shows "t0_space X \<longleftrightarrow> t0_space Y"
proof -
obtain f where f: "homeomorphic_map X Y f" and F: "inj_on f (topspace X)" and "topspace Y = f ` topspace X"
by (metis assms homeomorphic_imp_injective_map homeomorphic_imp_surjective_map homeomorphic_space)
with inj_on_image_mem_iff [OF F]
show ?thesis
apply (simp add: t0_space_def homeomorphic_eq_everything_map continuous_map_def open_map_def inj_on_def)
by (smt (verit) mem_Collect_eq openin_subset)
qed
lemma t0_space_closure_of_sing:
"t0_space X \<longleftrightarrow>
(\<forall>x \<in> topspace X. \<forall>y \<in> topspace X. X closure_of {x} = X closure_of {y} \<longrightarrow> x = y)"
by (simp add: t0_space_def closure_of_def set_eq_iff) (smt (verit))
lemma t0_space_discrete_topology: "t0_space (discrete_topology S)"
by (simp add: Hausdorff_imp_t0_space)
lemma t0_space_subtopology: "t0_space X \<Longrightarrow> t0_space (subtopology X U)"
by (simp add: t0_space_def openin_subtopology) (metis Int_iff)
lemma t0_space_retraction_map_image:
"\<lbrakk>retraction_map X Y r; t0_space X\<rbrakk> \<Longrightarrow> t0_space Y"
using hereditary_imp_retractive_property homeomorphic_t0_space t0_space_subtopology by blast
lemma XY: "{x}\<times>{y} = {(x,y)}"
by simp
lemma t0_space_prod_topologyI: "\<lbrakk>t0_space X; t0_space Y\<rbrakk> \<Longrightarrow> t0_space (prod_topology X Y)"
by (simp add: t0_space_closure_of_sing closure_of_Times closure_of_eq_empty_gen times_eq_iff flip: XY insert_Times_insert)
lemma t0_space_prod_topology_iff:
"t0_space (prod_topology X Y) \<longleftrightarrow> topspace (prod_topology X Y) = {} \<or> t0_space X \<and> t0_space Y" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
by (metis Sigma_empty1 Sigma_empty2 retraction_map_fst retraction_map_snd t0_space_retraction_map_image topspace_prod_topology)
qed (metis empty_iff t0_space_def t0_space_prod_topologyI)
proposition t0_space_product_topology:
"t0_space (product_topology X I) \<longleftrightarrow>
topspace(product_topology X I) = {} \<or> (\<forall>i \<in> I. t0_space (X i))" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
by (meson retraction_map_product_projection t0_space_retraction_map_image)
next
assume R: ?rhs
show ?lhs
proof (cases "topspace(product_topology X I) = {}")
case True
then show ?thesis
by (simp add: t0_space_def)
next
case False
show ?thesis
unfolding t0_space
proof (intro strip)
fix x y
assume x: "x \<in> topspace (product_topology X I)"
and y: "y \<in> topspace (product_topology X I)"
and "x \<noteq> y"
then obtain i where "i \<in> I" "x i \<noteq> y i"
by (metis PiE_ext topspace_product_topology)
then have "t0_space (X i)"
using False R by blast
then obtain U where "closedin (X i) U" "(x i \<notin> U \<longleftrightarrow> y i \<in> U)"
by (metis t0_space PiE_mem \<open>i \<in> I\<close> \<open>x i \<noteq> y i\<close> topspace_product_topology x y)
with \<open>i \<in> I\<close> x y show "\<exists>U. closedin (product_topology X I) U \<and> (x \<notin> U) = (y \<in> U)"
by (rule_tac x="PiE I (\<lambda>j. if j = i then U else topspace(X j))" in exI)
(simp add: closedin_product_topology PiE_iff)
qed
qed
qed
subsection \<open>Kolmogorov quotients\<close>
definition Kolmogorov_quotient
where "Kolmogorov_quotient X \<equiv> \<lambda>x. @y. \<forall>U. openin X U \<longrightarrow> (y \<in> U \<longleftrightarrow> x \<in> U)"
lemma Kolmogorov_quotient_in_open:
"openin X U \<Longrightarrow> (Kolmogorov_quotient X x \<in> U \<longleftrightarrow> x \<in> U)"
by (smt (verit, ccfv_SIG) Kolmogorov_quotient_def someI_ex)
lemma Kolmogorov_quotient_in_topspace:
"Kolmogorov_quotient X x \<in> topspace X \<longleftrightarrow> x \<in> topspace X"
by (simp add: Kolmogorov_quotient_in_open)
lemma Kolmogorov_quotient_in_closed:
"closedin X C \<Longrightarrow> (Kolmogorov_quotient X x \<in> C \<longleftrightarrow> x \<in> C)"
unfolding closedin_def
by (meson DiffD2 DiffI Kolmogorov_quotient_in_open Kolmogorov_quotient_in_topspace in_mono)
lemma continuous_map_Kolmogorov_quotient:
"continuous_map X X (Kolmogorov_quotient X)"
using Kolmogorov_quotient_in_open openin_subopen openin_subset
by (fastforce simp: continuous_map_def Kolmogorov_quotient_in_topspace)
lemma open_map_Kolmogorov_quotient_explicit:
"openin X U \<Longrightarrow> Kolmogorov_quotient X ` U = Kolmogorov_quotient X ` topspace X \<inter> U"
using Kolmogorov_quotient_in_open openin_subset by fastforce
lemma open_map_Kolmogorov_quotient_gen:
"open_map (subtopology X S) (subtopology X (image (Kolmogorov_quotient X) S)) (Kolmogorov_quotient X)"
proof (clarsimp simp: open_map_def openin_subtopology_alt image_iff)
fix U
assume "openin X U"
then have "Kolmogorov_quotient X ` (S \<inter> U) = Kolmogorov_quotient X ` S \<inter> U"
using Kolmogorov_quotient_in_open [of X U] by auto
then show "\<exists>V. openin X V \<and> Kolmogorov_quotient X ` (S \<inter> U) = Kolmogorov_quotient X ` S \<inter> V"
using \<open>openin X U\<close> by blast
qed
lemma open_map_Kolmogorov_quotient:
"open_map X (subtopology X (Kolmogorov_quotient X ` topspace X))
(Kolmogorov_quotient X)"
by (metis open_map_Kolmogorov_quotient_gen subtopology_topspace)
lemma closed_map_Kolmogorov_quotient_explicit:
"closedin X U \<Longrightarrow> Kolmogorov_quotient X ` U = Kolmogorov_quotient X ` topspace X \<inter> U"
using closedin_subset by (fastforce simp: Kolmogorov_quotient_in_closed)
lemma closed_map_Kolmogorov_quotient_gen:
"closed_map (subtopology X S) (subtopology X (Kolmogorov_quotient X ` S))
(Kolmogorov_quotient X)"
using Kolmogorov_quotient_in_closed by (force simp: closed_map_def closedin_subtopology_alt image_iff)
lemma closed_map_Kolmogorov_quotient:
"closed_map X (subtopology X (Kolmogorov_quotient X ` topspace X))
(Kolmogorov_quotient X)"
by (metis closed_map_Kolmogorov_quotient_gen subtopology_topspace)
lemma quotient_map_Kolmogorov_quotient_gen:
"quotient_map (subtopology X S) (subtopology X (Kolmogorov_quotient X ` S)) (Kolmogorov_quotient X)"
proof (intro continuous_open_imp_quotient_map)
show "continuous_map (subtopology X S) (subtopology X (Kolmogorov_quotient X ` S)) (Kolmogorov_quotient X)"
by (simp add: continuous_map_Kolmogorov_quotient continuous_map_from_subtopology continuous_map_in_subtopology image_mono)
show "open_map (subtopology X S) (subtopology X (Kolmogorov_quotient X ` S)) (Kolmogorov_quotient X)"
using open_map_Kolmogorov_quotient_gen by blast
show "Kolmogorov_quotient X ` topspace (subtopology X S) = topspace (subtopology X (Kolmogorov_quotient X ` S))"
by (force simp: Kolmogorov_quotient_in_open)
qed
lemma quotient_map_Kolmogorov_quotient:
"quotient_map X (subtopology X (Kolmogorov_quotient X ` topspace X)) (Kolmogorov_quotient X)"
by (metis quotient_map_Kolmogorov_quotient_gen subtopology_topspace)
lemma Kolmogorov_quotient_eq:
"Kolmogorov_quotient X x = Kolmogorov_quotient X y \<longleftrightarrow>
(\<forall>U. openin X U \<longrightarrow> (x \<in> U \<longleftrightarrow> y \<in> U))" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
by (metis Kolmogorov_quotient_in_open)
next
assume ?rhs then show ?lhs
by (simp add: Kolmogorov_quotient_def)
qed
lemma Kolmogorov_quotient_eq_alt:
"Kolmogorov_quotient X x = Kolmogorov_quotient X y \<longleftrightarrow>
(\<forall>U. closedin X U \<longrightarrow> (x \<in> U \<longleftrightarrow> y \<in> U))" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
by (metis Kolmogorov_quotient_in_closed)
next
assume ?rhs then show ?lhs
by (smt (verit) Diff_iff Kolmogorov_quotient_eq closedin_topspace in_mono openin_closedin_eq)
qed
lemma Kolmogorov_quotient_continuous_map:
assumes "continuous_map X Y f" "t0_space Y" and x: "x \<in> topspace X"
shows "f (Kolmogorov_quotient X x) = f x"
using assms unfolding continuous_map_def t0_space_def
by (smt (verit, ccfv_SIG) Kolmogorov_quotient_in_open Kolmogorov_quotient_in_topspace x mem_Collect_eq)
lemma t0_space_Kolmogorov_quotient:
"t0_space (subtopology X (Kolmogorov_quotient X ` topspace X))"
apply (clarsimp simp: t0_space_def )
by (smt (verit, best) Kolmogorov_quotient_eq imageE image_eqI open_map_Kolmogorov_quotient open_map_def)
lemma Kolmogorov_quotient_id:
"t0_space X \<Longrightarrow> x \<in> topspace X \<Longrightarrow> Kolmogorov_quotient X x = x"
by (metis Kolmogorov_quotient_in_open Kolmogorov_quotient_in_topspace t0_space_def)
lemma Kolmogorov_quotient_idemp:
"Kolmogorov_quotient X (Kolmogorov_quotient X x) = Kolmogorov_quotient X x"
by (simp add: Kolmogorov_quotient_eq Kolmogorov_quotient_in_open)
lemma retraction_maps_Kolmogorov_quotient:
"retraction_maps X
(subtopology X (Kolmogorov_quotient X ` topspace X))
(Kolmogorov_quotient X) id"
unfolding retraction_maps_def continuous_map_in_subtopology
using Kolmogorov_quotient_idemp continuous_map_Kolmogorov_quotient by force
lemma retraction_map_Kolmogorov_quotient:
"retraction_map X
(subtopology X (Kolmogorov_quotient X ` topspace X))
(Kolmogorov_quotient X)"
using retraction_map_def retraction_maps_Kolmogorov_quotient by blast
lemma retract_of_space_Kolmogorov_quotient_image:
"Kolmogorov_quotient X ` topspace X retract_of_space X"
proof -
have "continuous_map X X (Kolmogorov_quotient X)"
by (simp add: continuous_map_Kolmogorov_quotient)
then have "Kolmogorov_quotient X ` topspace X \<subseteq> topspace X"
by (simp add: continuous_map_image_subset_topspace)
then show ?thesis
by (meson retract_of_space_retraction_maps retraction_maps_Kolmogorov_quotient)
qed
lemma Kolmogorov_quotient_lift_exists:
assumes "S \<subseteq> topspace X" "t0_space Y" and f: "continuous_map (subtopology X S) Y f"
obtains g where "continuous_map (subtopology X (image (Kolmogorov_quotient X) S)) Y g"
"\<And>x. x \<in> S \<Longrightarrow> g(Kolmogorov_quotient X x) = f x"
proof -
have "\<And>x y. \<lbrakk>x \<in> S; y \<in> S; Kolmogorov_quotient X x = Kolmogorov_quotient X y\<rbrakk>
\<Longrightarrow> f x = f y"
using assms
apply (simp add: Kolmogorov_quotient_eq t0_space_def continuous_map_def Int_absorb1 openin_subtopology)
by (smt (verit, del_insts) Int_iff mem_Collect_eq)
then obtain g where g: "continuous_map (subtopology X (Kolmogorov_quotient X ` S)) Y g"
"g ` (topspace X \<inter> Kolmogorov_quotient X ` S) = f ` S"
"\<And>x. x \<in> S \<Longrightarrow> g (Kolmogorov_quotient X x) = f x"
using quotient_map_lift_exists [OF quotient_map_Kolmogorov_quotient_gen [of X S] f]
by (metis assms(1) topspace_subtopology topspace_subtopology_subset)
show ?thesis
proof qed (use g in auto)
qed
subsection\<open>Closed diagonals and graphs\<close>
lemma Hausdorff_space_closedin_diagonal:
"Hausdorff_space X \<longleftrightarrow>
closedin (prod_topology X X) ((\<lambda>x. (x,x)) ` topspace X)"
proof -
have \<section>: "((\<lambda>x. (x, x)) ` topspace X) \<subseteq> topspace X \<times> topspace X"
by auto
show ?thesis
apply (simp add: closedin_def openin_prod_topology_alt Hausdorff_space_def disjnt_iff \<section>)
apply (intro all_cong1 imp_cong ex_cong1 conj_cong refl)
by (force dest!: openin_subset)+
qed
lemma closed_map_diag_eq:
"closed_map X (prod_topology X X) (\<lambda>x. (x,x)) \<longleftrightarrow> Hausdorff_space X"
proof -
have "section_map X (prod_topology X X) (\<lambda>x. (x, x))"
unfolding section_map_def retraction_maps_def
by (smt (verit) continuous_map_fst continuous_map_of_fst continuous_map_on_empty continuous_map_pairwise fst_conv fst_diag_fst snd_diag_fst)
then have "embedding_map X (prod_topology X X) (\<lambda>x. (x, x))"
by (rule section_imp_embedding_map)
then show ?thesis
using Hausdorff_space_closedin_diagonal embedding_imp_closed_map_eq by blast
qed
+lemma proper_map_diag_eq [simp]:
+ "proper_map X (prod_topology X X) (\<lambda>x. (x,x)) \<longleftrightarrow> Hausdorff_space X"
+ by (simp add: closed_map_diag_eq inj_on_convol_ident injective_imp_proper_eq_closed_map)
+
lemma closedin_continuous_maps_eq:
assumes "Hausdorff_space Y" and f: "continuous_map X Y f" and g: "continuous_map X Y g"
shows "closedin X {x \<in> topspace X. f x = g x}"
proof -
have \<section>:"{x \<in> topspace X. f x = g x} = {x \<in> topspace X. (f x,g x) \<in> ((\<lambda>y.(y,y)) ` topspace Y)}"
using f continuous_map_image_subset_topspace by fastforce
show ?thesis
unfolding \<section>
proof (intro closedin_continuous_map_preimage)
show "continuous_map X (prod_topology Y Y) (\<lambda>x. (f x, g x))"
by (simp add: continuous_map_pairedI f g)
show "closedin (prod_topology Y Y) ((\<lambda>y. (y, y)) ` topspace Y)"
using Hausdorff_space_closedin_diagonal assms by blast
qed
qed
lemma retract_of_space_imp_closedin:
assumes "Hausdorff_space X" and S: "S retract_of_space X"
shows "closedin X S"
proof -
obtain r where r: "continuous_map X (subtopology X S) r" "\<forall>x\<in>S. r x = x"
using assms by (meson retract_of_space_def)
then have \<section>: "S = {x \<in> topspace X. r x = x}"
using S retract_of_space_imp_subset by (force simp: continuous_map_def)
show ?thesis
unfolding \<section>
using r continuous_map_into_fulltopology assms
by (force intro: closedin_continuous_maps_eq)
qed
lemma homeomorphic_maps_graph:
"homeomorphic_maps X (subtopology (prod_topology X Y) ((\<lambda>x. (x, f x)) ` (topspace X)))
(\<lambda>x. (x, f x)) fst \<longleftrightarrow> continuous_map X Y f"
(is "?lhs=?rhs")
proof
assume ?lhs
then
have h: "homeomorphic_map X (subtopology (prod_topology X Y) ((\<lambda>x. (x, f x)) ` topspace X)) (\<lambda>x. (x, f x))"
by (auto simp: homeomorphic_maps_map)
have "f = snd \<circ> (\<lambda>x. (x, f x))"
by force
then show ?rhs
by (metis (no_types, lifting) h continuous_map_in_subtopology continuous_map_snd_of homeomorphic_eq_everything_map)
next
assume ?rhs
then show ?lhs
unfolding homeomorphic_maps_def
by (smt (verit, ccfv_threshold) continuous_map_eq continuous_map_subtopology_fst embedding_map_def embedding_map_graph homeomorphic_eq_everything_map image_cong image_iff prod.collapse prod.inject)
qed
subsection \<open> KC spaces, those where all compact sets are closed.\<close>
definition kc_space
where "kc_space X \<equiv> \<forall>S. compactin X S \<longrightarrow> closedin X S"
lemma kc_space_euclidean: "kc_space (euclidean :: 'a::metric_space topology)"
by (simp add: compact_imp_closed kc_space_def)
lemma kc_space_expansive:
"\<lbrakk>kc_space X; topspace Y = topspace X; \<And>U. openin X U \<Longrightarrow> openin Y U\<rbrakk>
\<Longrightarrow> kc_space Y"
by (meson compactin_contractive kc_space_def topology_finer_closedin)
lemma compactin_imp_closedin_gen:
"\<lbrakk>kc_space X; compactin X S\<rbrakk> \<Longrightarrow> closedin X S"
using kc_space_def by blast
lemma Hausdorff_imp_kc_space: "Hausdorff_space X \<Longrightarrow> kc_space X"
by (simp add: compactin_imp_closedin kc_space_def)
lemma kc_imp_t1_space: "kc_space X \<Longrightarrow> t1_space X"
by (simp add: finite_imp_compactin kc_space_def t1_space_closedin_finite)
lemma kc_space_subtopology:
"kc_space X \<Longrightarrow> kc_space(subtopology X S)"
by (metis closedin_Int_closure_of closure_of_eq compactin_subtopology inf.absorb2 kc_space_def)
lemma kc_space_discrete_topology:
"kc_space(discrete_topology U)"
using Hausdorff_space_discrete_topology compactin_imp_closedin kc_space_def by blast
lemma kc_space_continuous_injective_map_preimage:
assumes "kc_space Y" "continuous_map X Y f" and injf: "inj_on f (topspace X)"
shows "kc_space X"
unfolding kc_space_def
proof (intro strip)
fix S
assume S: "compactin X S"
have "S = {x \<in> topspace X. f x \<in> f ` S}"
using S compactin_subset_topspace inj_onD [OF injf] by blast
with assms S show "closedin X S"
by (metis (no_types, lifting) Collect_cong closedin_continuous_map_preimage compactin_imp_closedin_gen image_compactin)
qed
lemma kc_space_retraction_map_image:
assumes "retraction_map X Y r" "kc_space X"
shows "kc_space Y"
proof -
obtain s where s: "continuous_map X Y r" "continuous_map Y X s" "\<And>x. x \<in> topspace Y \<Longrightarrow> r (s x) = x"
using assms by (force simp: retraction_map_def retraction_maps_def)
then have inj: "inj_on s (topspace Y)"
by (metis inj_on_def)
show ?thesis
unfolding kc_space_def
proof (intro strip)
fix S
assume S: "compactin Y S"
have "S = {x \<in> topspace Y. s x \<in> s ` S}"
using S compactin_subset_topspace inj_onD [OF inj] by blast
with assms S show "closedin Y S"
by (meson compactin_imp_closedin_gen inj kc_space_continuous_injective_map_preimage s(2))
qed
qed
lemma homeomorphic_kc_space:
"X homeomorphic_space Y \<Longrightarrow> kc_space X \<longleftrightarrow> kc_space Y"
by (meson homeomorphic_eq_everything_map homeomorphic_space homeomorphic_space_sym kc_space_continuous_injective_map_preimage)
lemma compact_kc_eq_maximal_compact_space:
assumes "compact_space X"
shows "kc_space X \<longleftrightarrow>
(\<forall>Y. topspace Y = topspace X \<and> (\<forall>S. openin X S \<longrightarrow> openin Y S) \<and> compact_space Y \<longrightarrow> Y = X)" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
by (metis closedin_compact_space compactin_contractive kc_space_def topology_eq topology_finer_closedin)
next
assume R: ?rhs
show ?lhs
unfolding kc_space_def
proof (intro strip)
fix S
assume S: "compactin X S"
define Y where
"Y \<equiv> topology (arbitrary union_of (finite intersection_of (\<lambda>A. A = topspace X - S \<or> openin X A)
relative_to (topspace X)))"
have "topspace Y = topspace X"
by (auto simp: Y_def)
have "openin X T \<longrightarrow> openin Y T" for T
by (simp add: Y_def arbitrary_union_of_inc finite_intersection_of_inc openin_subbase openin_subset relative_to_subset)
have "compact_space Y"
proof (rule Alexander_subbase_alt)
show "\<exists>\<F>'. finite \<F>' \<and> \<F>' \<subseteq> \<C> \<and> topspace X \<subseteq> \<Union> \<F>'"
if \<C>: "\<C> \<subseteq> insert (topspace X - S) (Collect (openin X))" and sub: "topspace X \<subseteq> \<Union>\<C>" for \<C>
proof -
consider "\<C> \<subseteq> Collect (openin X)" | \<V> where "\<C> = insert (topspace X - S) \<V>" "\<V> \<subseteq> Collect (openin X)"
using \<C> by (metis insert_Diff subset_insert_iff)
then show ?thesis
proof cases
case 1
then show ?thesis
by (metis assms compact_space_alt mem_Collect_eq subsetD that(2))
next
case 2
then have "S \<subseteq> \<Union>\<V>"
using S sub compactin_subset_topspace by blast
with 2 obtain \<F> where "finite \<F> \<and> \<F> \<subseteq> \<V> \<and> S \<subseteq> \<Union>\<F>"
using S unfolding compactin_def by (metis Ball_Collect)
with 2 show ?thesis
by (rule_tac x="insert (topspace X - S) \<F>" in exI) blast
qed
qed
qed (auto simp: Y_def)
have "Y = X"
using R \<open>\<And>S. openin X S \<longrightarrow> openin Y S\<close> \<open>compact_space Y\<close> \<open>topspace Y = topspace X\<close> by blast
moreover have "openin Y (topspace X - S)"
by (simp add: Y_def arbitrary_union_of_inc finite_intersection_of_inc openin_subbase relative_to_subset)
ultimately show "closedin X S"
unfolding closedin_def using S compactin_subset_topspace by blast
qed
qed
lemma continuous_imp_closed_map_gen:
"\<lbrakk>compact_space X; kc_space Y; continuous_map X Y f\<rbrakk> \<Longrightarrow> closed_map X Y f"
by (meson closed_map_def closedin_compact_space compactin_imp_closedin_gen image_compactin)
lemma kc_space_compact_subtopologies:
"kc_space X \<longleftrightarrow> (\<forall>K. compactin X K \<longrightarrow> kc_space(subtopology X K))" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
by (auto simp: kc_space_def closedin_closed_subtopology compactin_subtopology)
next
assume R: ?rhs
show ?lhs
unfolding kc_space_def
proof (intro strip)
fix K
assume K: "compactin X K"
then have "K \<subseteq> topspace X"
by (simp add: compactin_subset_topspace)
moreover have "X closure_of K \<subseteq> K"
proof
fix x
assume x: "x \<in> X closure_of K"
have "kc_space (subtopology X K)"
by (simp add: R \<open>compactin X K\<close>)
have "compactin X (insert x K)"
by (metis K x compactin_Un compactin_sing in_closure_of insert_is_Un)
then show "x \<in> K"
by (metis R x K Int_insert_left_if1 closedin_Int_closure_of compact_imp_compactin_subtopology
insertCI kc_space_def subset_insertI)
qed
ultimately show "closedin X K"
using closure_of_subset_eq by blast
qed
qed
lemma kc_space_compact_prod_topology:
assumes "compact_space X"
shows "kc_space(prod_topology X X) \<longleftrightarrow> Hausdorff_space X" (is "?lhs=?rhs")
proof
assume L: ?lhs
show ?rhs
unfolding closed_map_diag_eq [symmetric]
proof (intro continuous_imp_closed_map_gen)
show "continuous_map X (prod_topology X X) (\<lambda>x. (x, x))"
by (intro continuous_intros)
qed (use L assms in auto)
next
assume ?rhs then show ?lhs
by (simp add: Hausdorff_imp_kc_space Hausdorff_space_prod_topology)
qed
lemma kc_space_prod_topology:
"kc_space(prod_topology X X) \<longleftrightarrow> (\<forall>K. compactin X K \<longrightarrow> Hausdorff_space(subtopology X K))" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
by (metis compactin_subspace kc_space_compact_prod_topology kc_space_subtopology subtopology_Times)
next
assume R: ?rhs
have "kc_space (subtopology (prod_topology X X) L)" if "compactin (prod_topology X X) L" for L
proof -
define K where "K \<equiv> fst ` L \<union> snd ` L"
have "L \<subseteq> K \<times> K"
by (force simp: K_def)
have "compactin X K"
by (metis K_def compactin_Un continuous_map_fst continuous_map_snd image_compactin that)
then have "Hausdorff_space (subtopology X K)"
by (simp add: R)
then have "kc_space (prod_topology (subtopology X K) (subtopology X K))"
by (simp add: \<open>compactin X K\<close> compact_space_subtopology kc_space_compact_prod_topology)
then have "kc_space (subtopology (prod_topology (subtopology X K) (subtopology X K)) L)"
using kc_space_subtopology by blast
then show ?thesis
using \<open>L \<subseteq> K \<times> K\<close> subtopology_Times subtopology_subtopology
by (metis (no_types, lifting) Sigma_cong inf.absorb_iff2)
qed
then show ?lhs
using kc_space_compact_subtopologies by blast
qed
lemma kc_space_prod_topology_alt:
"kc_space(prod_topology X X) \<longleftrightarrow>
kc_space X \<and>
(\<forall>K. compactin X K \<longrightarrow> Hausdorff_space(subtopology X K))"
using Hausdorff_imp_kc_space kc_space_compact_subtopologies kc_space_prod_topology by blast
proposition kc_space_prod_topology_left:
assumes X: "kc_space X" and Y: "Hausdorff_space Y"
shows "kc_space (prod_topology X Y)"
unfolding kc_space_def
proof (intro strip)
fix K
assume K: "compactin (prod_topology X Y) K"
then have "K \<subseteq> topspace X \<times> topspace Y"
using compactin_subset_topspace topspace_prod_topology by blast
moreover have "\<exists>T. openin (prod_topology X Y) T \<and> (a,b) \<in> T \<and> T \<subseteq> (topspace X \<times> topspace Y) - K"
if ab: "(a,b) \<in> (topspace X \<times> topspace Y) - K" for a b
proof -
have "compactin Y {b}"
using that by force
moreover
have "compactin Y {y \<in> topspace Y. (a,y) \<in> K}"
proof -
have "compactin (prod_topology X Y) (K \<inter> {a} \<times> topspace Y)"
using that compact_Int_closedin [OF K]
by (simp add: X closedin_prod_Times_iff compactin_imp_closedin_gen)
moreover have "subtopology (prod_topology X Y) (K \<inter> {a} \<times> topspace Y) homeomorphic_space
subtopology Y {y \<in> topspace Y. (a, y) \<in> K}"
unfolding homeomorphic_space_def homeomorphic_maps_def
using that
apply (rule_tac x="snd" in exI)
apply (rule_tac x="Pair a" in exI)
by (force simp: continuous_map_in_subtopology continuous_map_from_subtopology continuous_map_subtopology_snd continuous_map_paired)
ultimately show ?thesis
by (simp add: compactin_subspace homeomorphic_compact_space)
qed
moreover have "disjnt {b} {y \<in> topspace Y. (a,y) \<in> K}"
using ab by force
ultimately obtain V U where VU: "openin Y V" "openin Y U" "{b} \<subseteq> V" "{y \<in> topspace Y. (a,y) \<in> K} \<subseteq> U" "disjnt V U"
using Hausdorff_space_compact_separation [OF Y] by blast
define V' where "V' \<equiv> topspace Y - U"
have W: "closedin Y V'" "{y \<in> topspace Y. (a,y) \<in> K} \<subseteq> topspace Y - V'" "disjnt V (topspace Y - V')"
using VU by (auto simp: V'_def disjnt_iff)
with VU obtain "V \<subseteq> topspace Y" "V' \<subseteq> topspace Y"
by (meson closedin_subset openin_closedin_eq)
then obtain "b \<in> V" "disjnt {y \<in> topspace Y. (a,y) \<in> K} V'" "V \<subseteq> V'"
using VU unfolding disjnt_iff V'_def by force
define C where "C \<equiv> image fst (K \<inter> {z \<in> topspace(prod_topology X Y). snd z \<in> V'})"
have "closedin (prod_topology X Y) {z \<in> topspace (prod_topology X Y). snd z \<in> V'}"
using closedin_continuous_map_preimage \<open>closedin Y V'\<close> continuous_map_snd by blast
then have "compactin X C"
unfolding C_def by (meson K compact_Int_closedin continuous_map_fst image_compactin)
then have "closedin X C"
using assms by (auto simp: kc_space_def)
show ?thesis
proof (intro exI conjI)
show "openin (prod_topology X Y) ((topspace X - C) \<times> V)"
by (simp add: VU \<open>closedin X C\<close> openin_diff openin_prod_Times_iff)
have "a \<notin> C"
using VU by (auto simp: C_def V'_def)
then show "(a, b) \<in> (topspace X - C) \<times> V"
using \<open>a \<notin> C\<close> \<open>b \<in> V\<close> that by blast
show "(topspace X - C) \<times> V \<subseteq> topspace X \<times> topspace Y - K"
using \<open>V \<subseteq> V'\<close> \<open>V \<subseteq> topspace Y\<close>
apply (simp add: C_def )
by (smt (verit, ccfv_threshold) DiffE DiffI IntI SigmaE SigmaI image_eqI mem_Collect_eq prod.sel(1) snd_conv subset_iff)
qed
qed
ultimately show "closedin (prod_topology X Y) K"
by (metis surj_pair closedin_def openin_subopen topspace_prod_topology)
qed
lemma kc_space_prod_topology_right:
"\<lbrakk>Hausdorff_space X; kc_space Y\<rbrakk> \<Longrightarrow> kc_space (prod_topology X Y)"
using kc_space_prod_topology_left homeomorphic_kc_space homeomorphic_space_prod_topology_swap by blast
+subsection \<open>Technical results about proper maps, perfect maps, etc\<close>
+
+lemma compact_imp_proper_map_gen:
+ assumes Y: "\<And>S. \<lbrakk>S \<subseteq> topspace Y; \<And>K. compactin Y K \<Longrightarrow> compactin Y (S \<inter> K)\<rbrakk>
+ \<Longrightarrow> closedin Y S"
+ and fim: "f ` (topspace X) \<subseteq> topspace Y"
+ and f: "continuous_map X Y f \<or> kc_space X"
+ and YX: "\<And>K. compactin Y K \<Longrightarrow> compactin X {x \<in> topspace X. f x \<in> K}"
+ shows "proper_map X Y f"
+ unfolding proper_map_alt closed_map_def
+proof (intro conjI strip)
+ fix C
+ assume C: "closedin X C"
+ show "closedin Y (f ` C)"
+ proof (intro Y)
+ show "f ` C \<subseteq> topspace Y"
+ using C closedin_subset fim by blast
+ fix K
+ assume K: "compactin Y K"
+ define A where "A \<equiv> {x \<in> topspace X. f x \<in> K}"
+ have eq: "f ` C \<inter> K = f ` ({x \<in> topspace X. f x \<in> K} \<inter> C)"
+ using C closedin_subset by auto
+ show "compactin Y (f ` C \<inter> K)"
+ unfolding eq
+ proof (rule image_compactin)
+ show "compactin (subtopology X A) ({x \<in> topspace X. f x \<in> K} \<inter> C)"
+ proof (rule closedin_compact_space)
+ show "compact_space (subtopology X A)"
+ by (simp add: A_def K YX compact_space_subtopology)
+ show "closedin (subtopology X A) ({x \<in> topspace X. f x \<in> K} \<inter> C)"
+ using A_def C closedin_subtopology by blast
+ qed
+ have "continuous_map (subtopology X A) (subtopology Y K) f" if "kc_space X"
+ unfolding continuous_map_closedin
+ proof (intro conjI strip)
+ show "f x \<in> topspace (subtopology Y K)"
+ if "x \<in> topspace (subtopology X A)" for x
+ using that A_def K compactin_subset_topspace by auto
+ next
+ fix C
+ assume C: "closedin (subtopology Y K) C"
+ show "closedin (subtopology X A) {x \<in> topspace (subtopology X A). f x \<in> C}"
+ proof (rule compactin_imp_closedin_gen)
+ show "kc_space (subtopology X A)"
+ by (simp add: kc_space_subtopology that)
+ have [simp]: "{x \<in> topspace X. f x \<in> K \<and> f x \<in> C} = {x \<in> topspace X. f x \<in> C}"
+ using C closedin_imp_subset by auto
+ have "compactin (subtopology Y K) C"
+ by (simp add: C K closedin_compact_space compact_space_subtopology)
+ then have "compactin X {x \<in> topspace X. x \<in> A \<and> f x \<in> C}"
+ by (auto simp: A_def compactin_subtopology dest: YX)
+ then show "compactin (subtopology X A) {x \<in> topspace (subtopology X A). f x \<in> C}"
+ by (auto simp add: compactin_subtopology)
+ qed
+ qed
+ with f show "continuous_map (subtopology X A) Y f"
+ using continuous_map_from_subtopology continuous_map_in_subtopology by blast
+ qed
+ qed
+qed (simp add: YX)
+
+lemma tube_lemma_left:
+ assumes W: "openin (prod_topology X Y) W" and C: "compactin X C"
+ and y: "y \<in> topspace Y" and subW: "C \<times> {y} \<subseteq> W"
+ shows "\<exists>U V. openin X U \<and> openin Y V \<and> C \<subseteq> U \<and> y \<in> V \<and> U \<times> V \<subseteq> W"
+proof (cases "C = {}")
+ case True
+ with y show ?thesis by auto
+next
+ case False
+ have "\<exists>U V. openin X U \<and> openin Y V \<and> x \<in> U \<and> y \<in> V \<and> U \<times> V \<subseteq> W"
+ if "x \<in> C" for x
+ using W openin_prod_topology_alt subW subsetD that by fastforce
+ then obtain U V where UV: "\<And>x. x \<in> C \<Longrightarrow> openin X (U x) \<and> openin Y (V x) \<and> x \<in> U x \<and> y \<in> V x \<and> U x \<times> V x \<subseteq> W"
+ by metis
+ then obtain D where D: "finite D" "D \<subseteq> C" "C \<subseteq> \<Union> (U ` D)"
+ using compactinD [OF C, of "U`C"]
+ by (smt (verit) UN_I finite_subset_image imageE subsetI)
+ show ?thesis
+ proof (intro exI conjI)
+ show "openin X (\<Union> (U ` D))" "openin Y (\<Inter> (V ` D))"
+ using D False UV by blast+
+ show "y \<in> \<Inter> (V ` D)" "C \<subseteq> \<Union> (U ` D)" "\<Union>(U ` D) \<times> \<Inter>(V ` D) \<subseteq> W"
+ using D UV by force+
+ qed
+qed
+
+lemma Wallace_theorem_prod_topology:
+ assumes "compactin X K" "compactin Y L"
+ and W: "openin (prod_topology X Y) W" and subW: "K \<times> L \<subseteq> W"
+ obtains U V where "openin X U" "openin Y V" "K \<subseteq> U" "L \<subseteq> V" "U \<times> V \<subseteq> W"
+proof -
+ have "\<And>y. y \<in> L \<Longrightarrow> \<exists>U V. openin X U \<and> openin Y V \<and> K \<subseteq> U \<and> y \<in> V \<and> U \<times> V \<subseteq> W"
+ proof (intro tube_lemma_left assms)
+ fix y assume "y \<in> L"
+ show "y \<in> topspace Y"
+ using assms \<open>y \<in> L\<close> compactin_subset_topspace by blast
+ show "K \<times> {y} \<subseteq> W"
+ using \<open>y \<in> L\<close> subW by force
+ qed
+ then obtain U V where UV:
+ "\<And>y. y \<in> L \<Longrightarrow> openin X (U y) \<and> openin Y (V y) \<and> K \<subseteq> U y \<and> y \<in> V y \<and> U y \<times> V y \<subseteq> W"
+ by metis
+ then obtain M where "finite M" "M \<subseteq> L" and M: "L \<subseteq> \<Union> (V ` M)"
+ using \<open>compactin Y L\<close> unfolding compactin_def
+ by (smt (verit) UN_iff finite_subset_image imageE subset_iff)
+ show thesis
+ proof (cases "M={}")
+ case True
+ with M have "L={}"
+ by blast
+ then show ?thesis
+ using \<open>compactin X K\<close> compactin_subset_topspace that by fastforce
+ next
+ case False
+ show ?thesis
+ proof
+ show "openin X (\<Inter>(U`M))"
+ using False UV \<open>M \<subseteq> L\<close> \<open>finite M\<close> by blast
+ show "openin Y (\<Union>(V`M))"
+ using UV \<open>M \<subseteq> L\<close> by blast
+ show "K \<subseteq> \<Inter>(U`M)"
+ by (meson INF_greatest UV \<open>M \<subseteq> L\<close> subsetD)
+ show "L \<subseteq> \<Union>(V`M)"
+ by (simp add: M)
+ show "\<Inter>(U`M) \<times> \<Union>(V`M) \<subseteq> W"
+ using UV \<open>M \<subseteq> L\<close> by fastforce
+ qed
+ qed
+qed
+
+lemma proper_map_prod:
+ "proper_map (prod_topology X Y) (prod_topology X' Y') (\<lambda>(x,y). (f x, g y)) \<longleftrightarrow>
+ topspace(prod_topology X Y) = {} \<or> proper_map X X' f \<and> proper_map Y Y' g"
+ (is "?lhs \<longleftrightarrow> _ \<or> ?rhs")
+proof (cases "topspace(prod_topology X Y) = {}")
+ case True
+ then show ?thesis
+ by (simp add: proper_map_on_empty)
+next
+ case False
+ then have ne: "topspace X \<noteq> {}" "topspace Y \<noteq> {}"
+ by auto
+ define h where "h \<equiv> \<lambda>(x,y). (f x, g y)"
+ have "proper_map X X' f" "proper_map Y Y' g" if ?lhs
+ proof -
+ have cm: "closed_map X X' f" "closed_map Y Y' g"
+ using that False closed_map_prod proper_imp_closed_map by blast+
+ show "proper_map X X' f"
+ proof (clarsimp simp add: proper_map_def cm)
+ fix y
+ assume y: "y \<in> topspace X'"
+ obtain z where z: "z \<in> topspace Y"
+ using ne by blast
+ then have eq: "{x \<in> topspace X. f x = y} =
+ fst ` {u \<in> topspace X \<times> topspace Y. h u = (y,g z)}"
+ by (force simp: h_def)
+ show "compactin X {x \<in> topspace X. f x = y}"
+ unfolding eq
+ proof (intro image_compactin)
+ have "g z \<in> topspace Y'"
+ by (meson closed_map_def closedin_subset closedin_topspace cm image_subset_iff z)
+ with y show "compactin (prod_topology X Y) {u \<in> topspace X \<times> topspace Y. (h u) = (y, g z)}"
+ using that by (simp add: h_def proper_map_def)
+ show "continuous_map (prod_topology X Y) X fst"
+ by (simp add: continuous_map_fst)
+ qed
+ qed
+ show "proper_map Y Y' g"
+ proof (clarsimp simp add: proper_map_def cm)
+ fix y
+ assume y: "y \<in> topspace Y'"
+ obtain z where z: "z \<in> topspace X"
+ using ne by blast
+ then have eq: "{x \<in> topspace Y. g x = y} =
+ snd ` {u \<in> topspace X \<times> topspace Y. h u = (f z,y)}"
+ by (force simp: h_def)
+ show "compactin Y {x \<in> topspace Y. g x = y}"
+ unfolding eq
+ proof (intro image_compactin)
+ have "f z \<in> topspace X'"
+ by (meson closed_map_def closedin_subset closedin_topspace cm image_subset_iff z)
+ with y show "compactin (prod_topology X Y) {u \<in> topspace X \<times> topspace Y. (h u) = (f z, y)}"
+ using that by (simp add: proper_map_def h_def)
+ show "continuous_map (prod_topology X Y) Y snd"
+ by (simp add: continuous_map_snd)
+ qed
+ qed
+ qed
+ moreover
+ { assume R: ?rhs
+ then have fgim: "f ` topspace X \<subseteq> topspace X'" "g ` topspace Y \<subseteq> topspace Y'"
+ and cm: "closed_map X X' f" "closed_map Y Y' g"
+ by (auto simp: proper_map_def closed_map_imp_subset_topspace)
+ have "closed_map (prod_topology X Y) (prod_topology X' Y') h"
+ unfolding closed_map_fibre_neighbourhood imp_conjL
+ proof (intro conjI strip)
+ show "h ` topspace (prod_topology X Y) \<subseteq> topspace (prod_topology X' Y')"
+ unfolding h_def using fgim by auto
+ fix W w
+ assume W: "openin (prod_topology X Y) W"
+ and w: "w \<in> topspace (prod_topology X' Y')"
+ and subW: "{x \<in> topspace (prod_topology X Y). h x = w} \<subseteq> W"
+ then obtain x' y' where weq: "w = (x',y')" "x' \<in> topspace X'" "y' \<in> topspace Y'"
+ by auto
+ have eq: "{u \<in> topspace X \<times> topspace Y. h u = (x',y')} = {x \<in> topspace X. f x = x'} \<times> {y \<in> topspace Y. g y = y'}"
+ by (auto simp: h_def)
+ obtain U V where "openin X U" "openin Y V" "U \<times> V \<subseteq> W"
+ and U: "{x \<in> topspace X. f x = x'} \<subseteq> U"
+ and V: "{x \<in> topspace Y. g x = y'} \<subseteq> V"
+ proof (rule Wallace_theorem_prod_topology)
+ show "compactin X {x \<in> topspace X. f x = x'}" "compactin Y {x \<in> topspace Y. g x = y'}"
+ using R weq unfolding proper_map_def closed_map_fibre_neighbourhood by fastforce+
+ show "{x \<in> topspace X. f x = x'} \<times> {x \<in> topspace Y. g x = y'} \<subseteq> W"
+ using weq subW by (auto simp: h_def)
+ qed (use W in auto)
+ obtain U' where "openin X' U'" "x' \<in> U'" and U': "{x \<in> topspace X. f x \<in> U'} \<subseteq> U"
+ using cm U \<open>openin X U\<close> weq unfolding closed_map_fibre_neighbourhood by meson
+ obtain V' where "openin Y' V'" "y' \<in> V'" and V': "{x \<in> topspace Y. g x \<in> V'} \<subseteq> V"
+ using cm V \<open>openin Y V\<close> weq unfolding closed_map_fibre_neighbourhood by meson
+ show "\<exists>V. openin (prod_topology X' Y') V \<and> w \<in> V \<and> {x \<in> topspace (prod_topology X Y). h x \<in> V} \<subseteq> W"
+ proof (intro conjI exI)
+ show "openin (prod_topology X' Y') (U' \<times> V')"
+ by (simp add: \<open>openin X' U'\<close> \<open>openin Y' V'\<close> openin_prod_Times_iff)
+ show "w \<in> U' \<times> V'"
+ using \<open>x' \<in> U'\<close> \<open>y' \<in> V'\<close> weq by blast
+ show "{x \<in> topspace (prod_topology X Y). h x \<in> U' \<times> V'} \<subseteq> W"
+ using \<open>U \<times> V \<subseteq> W\<close> U' V' h_def by auto
+ qed
+ qed
+ moreover
+ have "compactin (prod_topology X Y) {u \<in> topspace X \<times> topspace Y. h u = (w, z)}"
+ if "w \<in> topspace X'" and "z \<in> topspace Y'" for w z
+ proof -
+ have eq: "{u \<in> topspace X \<times> topspace Y. h u = (w,z)} =
+ {u \<in> topspace X. f u = w} \<times> {y. y \<in> topspace Y \<and> g y = z}"
+ by (auto simp: h_def)
+ show ?thesis
+ using R that by (simp add: eq compactin_Times proper_map_def)
+ qed
+ ultimately have ?lhs
+ by (auto simp: h_def proper_map_def)
+ }
+ ultimately show ?thesis using False by metis
+qed
+
+lemma proper_map_paired:
+ assumes "Hausdorff_space X \<and> proper_map X Y f \<and> proper_map X Z g \<or>
+ Hausdorff_space Y \<and> continuous_map X Y f \<and> proper_map X Z g \<or>
+ Hausdorff_space Z \<and> proper_map X Y f \<and> continuous_map X Z g"
+ shows "proper_map X (prod_topology Y Z) (\<lambda>x. (f x,g x))"
+ using assms
+proof (elim disjE conjE)
+ assume \<section>: "Hausdorff_space X" "proper_map X Y f" "proper_map X Z g"
+ have eq: "(\<lambda>x. (f x, g x)) = (\<lambda>(x, y). (f x, g y)) \<circ> (\<lambda>x. (x, x))"
+ by auto
+ show "proper_map X (prod_topology Y Z) (\<lambda>x. (f x, g x))"
+ unfolding eq
+ proof (rule proper_map_compose)
+ show "proper_map X (prod_topology X X) (\<lambda>x. (x,x))"
+ by (simp add: \<section>)
+ show "proper_map (prod_topology X X) (prod_topology Y Z) (\<lambda>(x,y). (f x, g y))"
+ by (simp add: \<section> proper_map_prod)
+ qed
+next
+ assume \<section>: "Hausdorff_space Y" "continuous_map X Y f" "proper_map X Z g"
+ have eq: "(\<lambda>x. (f x, g x)) = (\<lambda>(x,y). (x,g y)) \<circ> (\<lambda>x. (f x,x))"
+ by auto
+ show "proper_map X (prod_topology Y Z) (\<lambda>x. (f x, g x))"
+ unfolding eq
+ proof (rule proper_map_compose)
+ show "proper_map X (prod_topology Y X) (\<lambda>x. (f x,x))"
+ by (simp add: \<section> proper_map_paired_continuous_map_left)
+ show "proper_map (prod_topology Y X) (prod_topology Y Z) (\<lambda>(x,y). (x,g y))"
+ by (simp add: \<section> proper_map_prod proper_map_id [unfolded id_def])
+ qed
+next
+ assume \<section>: "Hausdorff_space Z" "proper_map X Y f" "continuous_map X Z g"
+ have eq: "(\<lambda>x. (f x, g x)) = (\<lambda>(x,y). (f x,y)) \<circ> (\<lambda>x. (x,g x))"
+ by auto
+ show "proper_map X (prod_topology Y Z) (\<lambda>x. (f x, g x))"
+ unfolding eq
+ proof (rule proper_map_compose)
+ show "proper_map X (prod_topology X Z) (\<lambda>x. (x, g x))"
+ using \<section> proper_map_paired_continuous_map_right by auto
+ show "proper_map (prod_topology X Z) (prod_topology Y Z) (\<lambda>(x,y). (f x,y))"
+ by (simp add: \<section> proper_map_prod proper_map_id [unfolded id_def])
+ qed
+qed
+
+lemma proper_map_pairwise:
+ assumes
+ "Hausdorff_space X \<and> proper_map X Y (fst \<circ> f) \<and> proper_map X Z (snd \<circ> f) \<or>
+ Hausdorff_space Y \<and> continuous_map X Y (fst \<circ> f) \<and> proper_map X Z (snd \<circ> f) \<or>
+ Hausdorff_space Z \<and> proper_map X Y (fst \<circ> f) \<and> continuous_map X Z (snd \<circ> f)"
+ shows "proper_map X (prod_topology Y Z) f"
+ using proper_map_paired [OF assms] by (simp add: o_def)
+
+lemma proper_map_from_composition_right:
+ assumes "Hausdorff_space Y" "proper_map X Z (g \<circ> f)" and "continuous_map X Y f"
+ and contg: "continuous_map Y Z g"
+ shows "proper_map X Y f"
+proof -
+ define YZ where "YZ \<equiv> subtopology (prod_topology Y Z) ((\<lambda>x. (x, g x)) ` topspace Y)"
+ have "proper_map X Y (fst \<circ> (\<lambda>x. (f x, (g \<circ> f) x)))"
+ proof (rule proper_map_compose)
+ have [simp]: "x \<in> topspace X \<Longrightarrow> f x \<in> topspace Y" for x
+ by (meson assms(3) continuous_map_def)
+ show "proper_map X YZ (\<lambda>x. (f x, (g \<circ> f) x))"
+ unfolding YZ_def
+ using assms
+ by (force intro!: proper_map_into_subtopology proper_map_paired simp: o_def image_iff)+
+ show "proper_map YZ Y fst"
+ using contg
+ by (simp flip: homeomorphic_maps_graph add: YZ_def homeomorphic_maps_map homeomorphic_imp_proper_map)
+ qed
+ moreover have "fst \<circ> (\<lambda>x. (f x, (g \<circ> f) x)) = f"
+ by auto
+ ultimately show ?thesis
+ by auto
+qed
+
+lemma perfect_map_from_composition_right:
+ "\<lbrakk>Hausdorff_space Y; perfect_map X Z (g \<circ> f);
+ continuous_map X Y f; continuous_map Y Z g; f ` topspace X = topspace Y\<rbrakk>
+ \<Longrightarrow> perfect_map X Y f"
+ by (meson perfect_map_def proper_map_from_composition_right)
+
+lemma perfect_map_from_composition_right_inj:
+ "\<lbrakk>perfect_map X Z (g \<circ> f); f ` topspace X = topspace Y;
+ continuous_map X Y f; continuous_map Y Z g; inj_on g (topspace Y)\<rbrakk>
+ \<Longrightarrow> perfect_map X Y f"
+ by (meson continuous_map_image_subset_topspace perfect_map_def proper_map_from_composition_right_inj)
+
subsection \<open>Regular spaces\<close>
text \<open>Regular spaces are *not* a priori assumed to be Hausdorff or $T_1$\<close>
definition regular_space
where "regular_space X \<equiv>
\<forall>C a. closedin X C \<and> a \<in> topspace X - C
\<longrightarrow> (\<exists>U V. openin X U \<and> openin X V \<and> a \<in> U \<and> C \<subseteq> V \<and> disjnt U V)"
lemma homeomorphic_regular_space_aux:
assumes hom: "X homeomorphic_space Y" and X: "regular_space X"
shows "regular_space Y"
proof -
obtain f g where hmf: "homeomorphic_map X Y f" and hmg: "homeomorphic_map Y X g"
and fg: "(\<forall>x \<in> topspace X. g(f x) = x) \<and> (\<forall>y \<in> topspace Y. f(g y) = y)"
using assms X homeomorphic_maps_map homeomorphic_space_def by fastforce
show ?thesis
unfolding regular_space_def
proof clarify
fix C a
assume Y: "closedin Y C" "a \<in> topspace Y" and "a \<notin> C"
then obtain "closedin X (g ` C)" "g a \<in> topspace X" "g a \<notin> g ` C"
using \<open>closedin Y C\<close> hmg homeomorphic_map_closedness_eq
by (smt (verit, ccfv_SIG) fg homeomorphic_imp_surjective_map image_iff in_mono)
then obtain S T where ST: "openin X S" "openin X T" "g a \<in> S" "g`C \<subseteq> T" "disjnt S T"
using X unfolding regular_space_def by (metis DiffI)
then have "openin Y (f`S)" "openin Y (f`T)"
by (meson hmf homeomorphic_map_openness_eq)+
moreover have "a \<in> f`S \<and> C \<subseteq> f`T"
using ST by (smt (verit, best) Y closedin_subset fg image_eqI subset_iff)
moreover have "disjnt (f`S) (f`T)"
using ST by (smt (verit, ccfv_SIG) disjnt_iff fg image_iff openin_subset subsetD)
ultimately show "\<exists>U V. openin Y U \<and> openin Y V \<and> a \<in> U \<and> C \<subseteq> V \<and> disjnt U V"
by metis
qed
qed
lemma homeomorphic_regular_space:
"X homeomorphic_space Y
\<Longrightarrow> (regular_space X \<longleftrightarrow> regular_space Y)"
by (meson homeomorphic_regular_space_aux homeomorphic_space_sym)
lemma regular_space:
"regular_space X \<longleftrightarrow>
(\<forall>C a. closedin X C \<and> a \<in> topspace X - C
\<longrightarrow> (\<exists>U. openin X U \<and> a \<in> U \<and> disjnt C (X closure_of U)))"
unfolding regular_space_def
proof (intro all_cong1 imp_cong refl ex_cong1)
fix C a U
assume C: "closedin X C \<and> a \<in> topspace X - C"
show "(\<exists>V. openin X U \<and> openin X V \<and> a \<in> U \<and> C \<subseteq> V \<and> disjnt U V)
\<longleftrightarrow> (openin X U \<and> a \<in> U \<and> disjnt C (X closure_of U))" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
by (smt (verit, best) disjnt_iff in_closure_of subsetD)
next
assume R: ?rhs
then have "disjnt U (topspace X - X closure_of U)"
by (meson DiffD2 closure_of_subset disjnt_iff openin_subset subsetD)
moreover have "C \<subseteq> topspace X - X closure_of U"
by (meson C DiffI R closedin_subset disjnt_iff subset_eq)
ultimately show ?lhs
using R by (rule_tac x="topspace X - X closure_of U" in exI) auto
qed
qed
lemma neighbourhood_base_of_closedin:
"neighbourhood_base_of (closedin X) X \<longleftrightarrow> regular_space X" (is "?lhs=?rhs")
proof -
have "?lhs \<longleftrightarrow> (\<forall>W x. openin X W \<and> x \<in> W \<longrightarrow>
(\<exists>U. openin X U \<and> (\<exists>V. closedin X V \<and> x \<in> U \<and> U \<subseteq> V \<and> V \<subseteq> W)))"
by (simp add: neighbourhood_base_of)
also have "\<dots> \<longleftrightarrow> (\<forall>W x. closedin X W \<and> x \<in> topspace X - W \<longrightarrow>
(\<exists>U. openin X U \<and> (\<exists>V. closedin X V \<and> x \<in> U \<and> U \<subseteq> V \<and> V \<subseteq> topspace X - W)))"
by (smt (verit) Diff_Diff_Int closedin_def inf.absorb_iff2 openin_closedin_eq)
also have "\<dots> \<longleftrightarrow> ?rhs"
proof -
have \<section>: "(\<exists>V. closedin X V \<and> x \<in> U \<and> U \<subseteq> V \<and> V \<subseteq> topspace X - W)
\<longleftrightarrow> (\<exists>V. openin X V \<and> x \<in> U \<and> W \<subseteq> V \<and> disjnt U V)" (is "?lhs=?rhs")
if "openin X U" "closedin X W" "x \<in> topspace X" "x \<notin> W" for W U x
proof
assume ?lhs with \<open>closedin X W\<close> show ?rhs
unfolding closedin_def by (smt (verit) Diff_mono disjnt_Diff1 double_diff subset_eq)
next
assume ?rhs with \<open>openin X U\<close> show ?lhs
unfolding openin_closedin_eq disjnt_def
by (smt (verit) Diff_Diff_Int Diff_disjoint Diff_eq_empty_iff Int_Diff inf.orderE)
qed
show ?thesis
unfolding regular_space_def
by (intro all_cong1 ex_cong1 imp_cong refl) (metis \<section> DiffE)
qed
finally show ?thesis .
qed
lemma regular_space_discrete_topology:
"regular_space (discrete_topology S)"
using neighbourhood_base_of_closedin neighbourhood_base_of_discrete_topology by fastforce
lemma regular_space_subtopology:
"regular_space X \<Longrightarrow> regular_space (subtopology X S)"
unfolding regular_space_def openin_subtopology_alt closedin_subtopology_alt disjnt_iff
by clarsimp (smt (verit, best) inf.orderE inf_le1 le_inf_iff)
lemma regular_space_retraction_map_image:
"\<lbrakk>retraction_map X Y r; regular_space X\<rbrakk> \<Longrightarrow> regular_space Y"
using hereditary_imp_retractive_property homeomorphic_regular_space regular_space_subtopology by blast
lemma regular_t0_imp_Hausdorff_space:
"\<lbrakk>regular_space X; t0_space X\<rbrakk> \<Longrightarrow> Hausdorff_space X"
apply (clarsimp simp: regular_space_def t0_space Hausdorff_space_def)
by (metis disjnt_sym subsetD)
lemma regular_t0_eq_Hausdorff_space:
"regular_space X \<Longrightarrow> (t0_space X \<longleftrightarrow> Hausdorff_space X)"
using Hausdorff_imp_t0_space regular_t0_imp_Hausdorff_space by blast
lemma regular_t1_imp_Hausdorff_space:
"\<lbrakk>regular_space X; t1_space X\<rbrakk> \<Longrightarrow> Hausdorff_space X"
by (simp add: regular_t0_imp_Hausdorff_space t1_imp_t0_space)
lemma regular_t1_eq_Hausdorff_space:
"regular_space X \<Longrightarrow> t1_space X \<longleftrightarrow> Hausdorff_space X"
using regular_t0_imp_Hausdorff_space t1_imp_t0_space t1_or_Hausdorff_space by blast
lemma compact_Hausdorff_imp_regular_space:
assumes "compact_space X" "Hausdorff_space X"
shows "regular_space X"
unfolding regular_space_def
proof clarify
fix S a
assume "closedin X S" and "a \<in> topspace X" and "a \<notin> S"
then show "\<exists>U V. openin X U \<and> openin X V \<and> a \<in> U \<and> S \<subseteq> V \<and> disjnt U V"
using assms unfolding Hausdorff_space_compact_sets
by (metis closedin_compact_space compactin_sing disjnt_empty1 insert_subset disjnt_insert1)
qed
lemma regular_space_topspace_empty: "topspace X = {} \<Longrightarrow> regular_space X"
by (simp add: Hausdorff_space_topspace_empty compact_Hausdorff_imp_regular_space compact_space_topspace_empty)
lemma neighbourhood_base_of_closed_Hausdorff_space:
"regular_space X \<and> Hausdorff_space X \<longleftrightarrow>
neighbourhood_base_of (\<lambda>C. closedin X C \<and> Hausdorff_space(subtopology X C)) X" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
by (simp add: Hausdorff_space_subtopology neighbourhood_base_of_closedin)
next
assume ?rhs then show ?lhs
by (metis (mono_tags, lifting) Hausdorff_space_closed_neighbourhood neighbourhood_base_of neighbourhood_base_of_closedin openin_topspace)
qed
lemma locally_compact_imp_kc_eq_Hausdorff_space:
"neighbourhood_base_of (compactin X) X \<Longrightarrow> kc_space X \<longleftrightarrow> Hausdorff_space X"
by (metis Hausdorff_imp_kc_space kc_imp_t1_space kc_space_def neighbourhood_base_of_closedin neighbourhood_base_of_mono regular_t1_imp_Hausdorff_space)
lemma regular_space_compact_closed_separation:
assumes X: "regular_space X"
and S: "compactin X S"
and T: "closedin X T"
and "disjnt S T"
shows "\<exists>U V. openin X U \<and> openin X V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> disjnt U V"
proof (cases "S={}")
case True
then show ?thesis
by (meson T closedin_def disjnt_empty1 empty_subsetI openin_empty openin_topspace)
next
case False
then have "\<exists>U V. x \<in> S \<longrightarrow> openin X U \<and> openin X V \<and> x \<in> U \<and> T \<subseteq> V \<and> disjnt U V" for x
using assms unfolding regular_space_def
by (smt (verit) Diff_iff compactin_subset_topspace disjnt_iff subsetD)
then obtain U V where UV: "\<And>x. x \<in> S \<Longrightarrow> openin X (U x) \<and> openin X (V x) \<and> x \<in> (U x) \<and> T \<subseteq> (V x) \<and> disjnt (U x) (V x)"
by metis
then obtain \<F> where "finite \<F>" "\<F> \<subseteq> U ` S" "S \<subseteq> \<Union> \<F>"
using S unfolding compactin_def by (smt (verit) UN_iff image_iff subsetI)
then obtain K where "finite K" "K \<subseteq> S" and K: "S \<subseteq> \<Union>(U ` K)"
by (metis finite_subset_image)
show ?thesis
proof (intro exI conjI)
show "openin X (\<Union>(U ` K))"
using \<open>K \<subseteq> S\<close> UV by blast
show "openin X (\<Inter>(V ` K))"
using False K UV \<open>K \<subseteq> S\<close> \<open>finite K\<close> by blast
show "S \<subseteq> \<Union>(U ` K)"
by (simp add: K)
show "T \<subseteq> \<Inter>(V ` K)"
using UV \<open>K \<subseteq> S\<close> by blast
show "disjnt (\<Union>(U ` K)) (\<Inter>(V ` K))"
by (smt (verit) Inter_iff UN_E UV \<open>K \<subseteq> S\<close> disjnt_iff image_eqI subset_iff)
qed
qed
lemma regular_space_compact_closed_sets:
"regular_space X \<longleftrightarrow>
(\<forall>S T. compactin X S \<and> closedin X T \<and> disjnt S T
\<longrightarrow> (\<exists>U V. openin X U \<and> openin X V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> disjnt U V))" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
using regular_space_compact_closed_separation by fastforce
next
assume R: ?rhs
show ?lhs
unfolding regular_space
proof clarify
fix S x
assume "closedin X S" and "x \<in> topspace X" and "x \<notin> S"
then obtain U V where "openin X U \<and> openin X V \<and> {x} \<subseteq> U \<and> S \<subseteq> V \<and> disjnt U V"
by (metis R compactin_sing disjnt_empty1 disjnt_insert1)
then show "\<exists>U. openin X U \<and> x \<in> U \<and> disjnt S (X closure_of U)"
by (smt (verit, best) disjnt_iff in_closure_of insert_subset subsetD)
qed
qed
lemma regular_space_prod_topology:
"regular_space (prod_topology X Y) \<longleftrightarrow>
topspace X = {} \<or> topspace Y = {} \<or> regular_space X \<and> regular_space Y" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
by (metis regular_space_retraction_map_image retraction_map_fst retraction_map_snd)
next
assume R: ?rhs
show ?lhs
proof (cases "topspace X = {} \<or> topspace Y = {}")
case True
then show ?thesis
by (simp add: regular_space_topspace_empty)
next
case False
then have "regular_space X" "regular_space Y"
using R by auto
show ?thesis
unfolding neighbourhood_base_of_closedin [symmetric] neighbourhood_base_of
proof clarify
fix W x y
assume W: "openin (prod_topology X Y) W" and "(x,y) \<in> W"
then obtain U V where U: "openin X U" "x \<in> U" and V: "openin Y V" "y \<in> V"
and "U \<times> V \<subseteq> W"
by (metis openin_prod_topology_alt)
obtain D1 C1 where 1: "openin X D1" "closedin X C1" "x \<in> D1" "D1 \<subseteq> C1" "C1 \<subseteq> U"
by (metis \<open>regular_space X\<close> U neighbourhood_base_of neighbourhood_base_of_closedin)
obtain D2 C2 where 2: "openin Y D2" "closedin Y C2" "y \<in> D2" "D2 \<subseteq> C2" "C2 \<subseteq> V"
by (metis \<open>regular_space Y\<close> V neighbourhood_base_of neighbourhood_base_of_closedin)
show "\<exists>U V. openin (prod_topology X Y) U \<and> closedin (prod_topology X Y) V \<and>
(x,y) \<in> U \<and> U \<subseteq> V \<and> V \<subseteq> W"
proof (intro conjI exI)
show "openin (prod_topology X Y) (D1 \<times> D2)"
by (simp add: 1 2 openin_prod_Times_iff)
show "closedin (prod_topology X Y) (C1 \<times> C2)"
by (simp add: 1 2 closedin_prod_Times_iff)
qed (use 1 2 \<open>U \<times> V \<subseteq> W\<close> in auto)
qed
qed
qed
lemma regular_space_product_topology:
"regular_space (product_topology X I) \<longleftrightarrow>
topspace (product_topology X I) = {} \<or> (\<forall>i \<in> I. regular_space (X i))" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
by (meson regular_space_retraction_map_image retraction_map_product_projection)
next
assume R: ?rhs
show ?lhs
proof (cases "topspace(product_topology X I) = {}")
case True
then show ?thesis
by (simp add: regular_space_topspace_empty)
next
case False
then obtain x where x: "x \<in> topspace (product_topology X I)"
by blast
define \<F> where "\<F> \<equiv> {Pi\<^sub>E I U |U. finite {i \<in> I. U i \<noteq> topspace (X i)}
\<and> (\<forall>i\<in>I. openin (X i) (U i))}"
have oo: "openin (product_topology X I) = arbitrary union_of (\<lambda>W. W \<in> \<F>)"
by (simp add: \<F>_def openin_product_topology product_topology_base_alt)
have "\<exists>U V. openin (product_topology X I) U \<and> closedin (product_topology X I) V \<and> x \<in> U \<and> U \<subseteq> V \<and> V \<subseteq> Pi\<^sub>E I W"
if fin: "finite {i \<in> I. W i \<noteq> topspace (X i)}"
and opeW: "\<And>k. k \<in> I \<Longrightarrow> openin (X k) (W k)" and x: "x \<in> PiE I W" for W x
proof -
have "\<And>i. i \<in> I \<Longrightarrow> \<exists>U V. openin (X i) U \<and> closedin (X i) V \<and> x i \<in> U \<and> U \<subseteq> V \<and> V \<subseteq> W i"
by (metis False PiE_iff R neighbourhood_base_of neighbourhood_base_of_closedin opeW x)
then obtain U C where UC:
"\<And>i. i \<in> I \<Longrightarrow> openin (X i) (U i) \<and> closedin (X i) (C i) \<and> x i \<in> U i \<and> U i \<subseteq> C i \<and> C i \<subseteq> W i"
by metis
define PI where "PI \<equiv> \<lambda>V. PiE I (\<lambda>i. if W i = topspace(X i) then topspace(X i) else V i)"
show ?thesis
proof (intro conjI exI)
have "\<forall>i\<in>I. W i \<noteq> topspace (X i) \<longrightarrow> openin (X i) (U i)"
using UC by force
with fin show "openin (product_topology X I) (PI U)"
by (simp add: Collect_mono_iff PI_def openin_PiE_gen rev_finite_subset)
show "closedin (product_topology X I) (PI C)"
by (simp add: UC closedin_product_topology PI_def)
show "x \<in> PI U"
using UC x by (fastforce simp: PI_def)
show "PI U \<subseteq> PI C"
by (smt (verit) UC Orderings.order_eq_iff PiE_mono PI_def)
show "PI C \<subseteq> Pi\<^sub>E I W"
by (simp add: UC PI_def subset_PiE)
qed
qed
then have "neighbourhood_base_of (closedin (product_topology X I)) (product_topology X I)"
unfolding neighbourhood_base_of_topology_base [OF oo] by (force simp: \<F>_def)
then show ?thesis
by (simp add: neighbourhood_base_of_closedin)
qed
qed
lemma closed_map_paired_gen_aux:
assumes "regular_space Y" and f: "closed_map Z X f" and g: "closed_map Z Y g"
and clo: "\<And>y. y \<in> topspace X \<Longrightarrow> closedin Z {x \<in> topspace Z. f x = y}"
and contg: "continuous_map Z Y g"
shows "closed_map Z (prod_topology X Y) (\<lambda>x. (f x, g x))"
unfolding closed_map_def
proof (intro strip)
fix C assume "closedin Z C"
then have "C \<subseteq> topspace Z"
by (simp add: closedin_subset)
have "f ` topspace Z \<subseteq> topspace X" "g ` topspace Z \<subseteq> topspace Y"
by (simp_all add: assms closed_map_imp_subset_topspace)
show "closedin (prod_topology X Y) ((\<lambda>x. (f x, g x)) ` C)"
unfolding closedin_def topspace_prod_topology
proof (intro conjI)
have "closedin Y (g ` C)"
using \<open>closedin Z C\<close> assms(3) closed_map_def by blast
with assms show "(\<lambda>x. (f x, g x)) ` C \<subseteq> topspace X \<times> topspace Y"
using \<open>C \<subseteq> topspace Z\<close> \<open>f ` topspace Z \<subseteq> topspace X\<close> continuous_map_closedin subsetD by fastforce
have *: "\<exists>T. openin (prod_topology X Y) T \<and> (y1,y2) \<in> T \<and> T \<subseteq> topspace X \<times> topspace Y - (\<lambda>x. (f x, g x)) ` C"
if "(y1,y2) \<notin> (\<lambda>x. (f x, g x)) ` C" and y1: "y1 \<in> topspace X" and y2: "y2 \<in> topspace Y"
for y1 y2
proof -
define A where "A \<equiv> topspace Z - (C \<inter> {x \<in> topspace Z. f x = y1})"
have A: "openin Z A" "{x \<in> topspace Z. g x = y2} \<subseteq> A"
using that \<open>closedin Z C\<close> clo that(2) by (auto simp: A_def)
obtain V0 where "openin Y V0 \<and> y2 \<in> V0" and UA: "{x \<in> topspace Z. g x \<in> V0} \<subseteq> A"
using g A y2 unfolding closed_map_fibre_neighbourhood by blast
then obtain V V' where VV: "openin Y V \<and> closedin Y V' \<and> y2 \<in> V \<and> V \<subseteq> V'" and "V' \<subseteq> V0"
by (metis (no_types, lifting) \<open>regular_space Y\<close> neighbourhood_base_of neighbourhood_base_of_closedin)
with UA have subA: "{x \<in> topspace Z. g x \<in> V'} \<subseteq> A"
by blast
show ?thesis
proof -
define B where "B \<equiv> topspace Z - (C \<inter> {x \<in> topspace Z. g x \<in> V'})"
have "openin Z B"
using VV \<open>closedin Z C\<close> contg by (fastforce simp: B_def continuous_map_closedin)
have "{x \<in> topspace Z. f x = y1} \<subseteq> B"
using A_def subA by (auto simp: A_def B_def)
then obtain U where "openin X U" "y1 \<in> U" and subB: "{x \<in> topspace Z. f x \<in> U} \<subseteq> B"
using \<open>openin Z B\<close> y1 f unfolding closed_map_fibre_neighbourhood by meson
show ?thesis
proof (intro conjI exI)
show "openin (prod_topology X Y) (U \<times> V)"
by (metis VV \<open>openin X U\<close> openin_prod_Times_iff)
show "(y1, y2) \<in> U \<times> V"
by (simp add: VV \<open>y1 \<in> U\<close>)
show "U \<times> V \<subseteq> topspace X \<times> topspace Y - (\<lambda>x. (f x, g x)) ` C"
using VV \<open>C \<subseteq> topspace Z\<close> \<open>openin X U\<close> subB
by (force simp: image_iff B_def subset_iff dest: openin_subset)
qed
qed
qed
then show "openin (prod_topology X Y) (topspace X \<times> topspace Y - (\<lambda>x. (f x, g x)) ` C)"
by (smt (verit, ccfv_threshold) Diff_iff SigmaE openin_subopen)
qed
qed
lemma closed_map_paired_gen:
assumes f: "closed_map Z X f" and g: "closed_map Z Y g"
and D: "(regular_space X \<and> continuous_map Z X f \<and> (\<forall>z \<in> topspace Y. closedin Z {x \<in> topspace Z. g x = z})
\<or> regular_space Y \<and> continuous_map Z Y g \<and> (\<forall>y \<in> topspace X. closedin Z {x \<in> topspace Z. f x = y}))"
(is "?RSX \<or> ?RSY")
shows "closed_map Z (prod_topology X Y) (\<lambda>x. (f x, g x))"
using D
proof
assume RSX: ?RSX
have eq: "(\<lambda>x. (f x, g x)) = (\<lambda>(x,y). (y,x)) \<circ> (\<lambda>x. (g x, f x))"
by auto
show ?thesis
unfolding eq
proof (rule closed_map_compose)
show "closed_map Z (prod_topology Y X) (\<lambda>x. (g x, f x))"
using RSX closed_map_paired_gen_aux f g by fastforce
show "closed_map (prod_topology Y X) (prod_topology X Y) (\<lambda>(x, y). (y, x))"
using homeomorphic_imp_closed_map homeomorphic_map_swap by blast
qed
qed (blast intro: assms closed_map_paired_gen_aux)
lemma closed_map_paired:
assumes "closed_map Z X f" and contf: "continuous_map Z X f"
"closed_map Z Y g" and contg: "continuous_map Z Y g"
and D: "t1_space X \<and> regular_space Y \<or> regular_space X \<and> t1_space Y"
shows "closed_map Z (prod_topology X Y) (\<lambda>x. (f x, g x))"
proof (rule closed_map_paired_gen)
show "regular_space X \<and> continuous_map Z X f \<and> (\<forall>z\<in>topspace Y. closedin Z {x \<in> topspace Z. g x = z}) \<or> regular_space Y \<and> continuous_map Z Y g \<and> (\<forall>y\<in>topspace X. closedin Z {x \<in> topspace Z. f x = y})"
using D contf contg
by (smt (verit, del_insts) Collect_cong closedin_continuous_map_preimage t1_space_closedin_singleton singleton_iff)
qed (use assms in auto)
lemma closed_map_pairwise:
assumes "closed_map Z X (fst \<circ> f)" "continuous_map Z X (fst \<circ> f)"
"closed_map Z Y (snd \<circ> f)" "continuous_map Z Y (snd \<circ> f)"
"t1_space X \<and> regular_space Y \<or> regular_space X \<and> t1_space Y"
shows "closed_map Z (prod_topology X Y) f"
proof -
have "closed_map Z (prod_topology X Y) (\<lambda>a. ((fst \<circ> f) a, (snd \<circ> f) a))"
using assms closed_map_paired by blast
then show ?thesis
by auto
qed
+lemma continuous_imp_proper_map:
+ "\<lbrakk>compact_space X; kc_space Y; continuous_map X Y f\<rbrakk> \<Longrightarrow> proper_map X Y f"
+ by (simp add: continuous_closed_imp_proper_map continuous_imp_closed_map_gen kc_imp_t1_space)
+
lemma tube_lemma_right:
assumes W: "openin (prod_topology X Y) W" and C: "compactin Y C"
and x: "x \<in> topspace X" and subW: "{x} \<times> C \<subseteq> W"
shows "\<exists>U V. openin X U \<and> openin Y V \<and> x \<in> U \<and> C \<subseteq> V \<and> U \<times> V \<subseteq> W"
proof (cases "C = {}")
case True
with x show ?thesis by auto
next
case False
have "\<exists>U V. openin X U \<and> openin Y V \<and> x \<in> U \<and> y \<in> V \<and> U \<times> V \<subseteq> W"
if "y \<in> C" for y
using W openin_prod_topology_alt subW subsetD that by fastforce
then obtain U V where UV: "\<And>y. y \<in> C \<Longrightarrow> openin X (U y) \<and> openin Y (V y) \<and> x \<in> U y \<and> y \<in> V y \<and> U y \<times> V y \<subseteq> W"
by metis
then obtain D where D: "finite D" "D \<subseteq> C" "C \<subseteq> \<Union> (V ` D)"
using compactinD [OF C, of "V`C"]
by (smt (verit) UN_I finite_subset_image imageE subsetI)
show ?thesis
proof (intro exI conjI)
show "openin X (\<Inter> (U ` D))" "openin Y (\<Union> (V ` D))"
using D False UV by blast+
show "x \<in> \<Inter> (U ` D)" "C \<subseteq> \<Union> (V ` D)" "\<Inter> (U ` D) \<times> \<Union> (V ` D) \<subseteq> W"
using D UV by force+
qed
qed
lemma closed_map_fst:
assumes "compact_space Y"
shows "closed_map (prod_topology X Y) X fst"
proof -
have *: "{x \<in> topspace X \<times> topspace Y. fst x \<in> U} = U \<times> topspace Y"
if "U \<subseteq> topspace X" for U
using that by force
have **: "\<And>U y. \<lbrakk>openin (prod_topology X Y) U; y \<in> topspace X;
{x \<in> topspace X \<times> topspace Y. fst x = y} \<subseteq> U\<rbrakk>
\<Longrightarrow> \<exists>V. openin X V \<and> y \<in> V \<and> V \<times> topspace Y \<subseteq> U"
- using tube_lemma_right[of X Y _ "topspace Y"] assms compact_space_def
- by force
+ using tube_lemma_right[of X Y _ "topspace Y"] assms by (fastforce simp: compact_space_def)
show ?thesis
unfolding closed_map_fibre_neighbourhood
by (force simp: * openin_subset cong: conj_cong intro: **)
qed
lemma closed_map_snd:
assumes "compact_space X"
shows "closed_map (prod_topology X Y) Y snd"
proof -
have "snd = fst o prod.swap"
by force
moreover have "closed_map (prod_topology X Y) Y (fst o prod.swap)"
proof (rule closed_map_compose)
show "closed_map (prod_topology X Y) (prod_topology Y X) prod.swap"
by (metis (no_types, lifting) homeomorphic_imp_closed_map homeomorphic_map_eq homeomorphic_map_swap prod.swap_def split_beta)
show "closed_map (prod_topology Y X) Y fst"
by (simp add: closed_map_fst assms)
qed
ultimately show ?thesis
by metis
qed
lemma closed_map_paired_closed_map_right:
"\<lbrakk>closed_map X Y f; regular_space X;
\<And>y. y \<in> topspace Y \<Longrightarrow> closedin X {x \<in> topspace X. f x = y}\<rbrakk>
\<Longrightarrow> closed_map X (prod_topology X Y) (\<lambda>x. (x, f x))"
by (rule closed_map_paired_gen [OF closed_map_id, unfolded id_def]) auto
lemma closed_map_paired_closed_map_left:
assumes "closed_map X Y f" "regular_space X"
"\<And>y. y \<in> topspace Y \<Longrightarrow> closedin X {x \<in> topspace X. f x = y}"
shows "closed_map X (prod_topology Y X) (\<lambda>x. (f x, x))"
proof -
have eq: "(\<lambda>x. (f x, x)) = (\<lambda>(x,y). (y,x)) \<circ> (\<lambda>x. (x, f x))"
by auto
show ?thesis
unfolding eq
proof (rule closed_map_compose)
show "closed_map X (prod_topology X Y) (\<lambda>x. (x, f x))"
by (simp add: assms closed_map_paired_closed_map_right)
show "closed_map (prod_topology X Y) (prod_topology Y X) (\<lambda>(x, y). (y, x))"
using homeomorphic_imp_closed_map homeomorphic_map_swap by blast
qed
qed
lemma closed_map_imp_closed_graph:
assumes "closed_map X Y f" "regular_space X"
"\<And>y. y \<in> topspace Y \<Longrightarrow> closedin X {x \<in> topspace X. f x = y}"
shows "closedin (prod_topology X Y) ((\<lambda>x. (x, f x)) ` topspace X)"
using assms closed_map_def closed_map_paired_closed_map_right by blast
lemma proper_map_paired_closed_map_right:
assumes "closed_map X Y f" "regular_space X"
"\<And>y. y \<in> topspace Y \<Longrightarrow> closedin X {x \<in> topspace X. f x = y}"
shows "proper_map X (prod_topology X Y) (\<lambda>x. (x, f x))"
by (simp add: assms closed_injective_imp_proper_map inj_on_def closed_map_paired_closed_map_right)
lemma proper_map_paired_closed_map_left:
assumes "closed_map X Y f" "regular_space X"
"\<And>y. y \<in> topspace Y \<Longrightarrow> closedin X {x \<in> topspace X. f x = y}"
shows "proper_map X (prod_topology Y X) (\<lambda>x. (f x, x))"
by (simp add: assms closed_injective_imp_proper_map inj_on_def closed_map_paired_closed_map_left)
proposition regular_space_continuous_proper_map_image:
assumes "regular_space X" and contf: "continuous_map X Y f" and pmapf: "proper_map X Y f"
and fim: "f ` (topspace X) = topspace Y"
shows "regular_space Y"
unfolding regular_space_def
proof clarify
fix C y
assume "closedin Y C" and "y \<in> topspace Y" and "y \<notin> C"
have "closed_map X Y f" "(\<forall>y \<in> topspace Y. compactin X {x \<in> topspace X. f x = y})"
using pmapf proper_map_def by force+
moreover have "closedin X {z \<in> topspace X. f z \<in> C}"
using \<open>closedin Y C\<close> contf continuous_map_closedin by fastforce
moreover have "disjnt {z \<in> topspace X. f z = y} {z \<in> topspace X. f z \<in> C}"
using \<open>y \<notin> C\<close> disjnt_iff by blast
ultimately
obtain U V where UV: "openin X U" "openin X V" "{z \<in> topspace X. f z = y} \<subseteq> U" "{z \<in> topspace X. f z \<in> C} \<subseteq> V"
and dUV: "disjnt U V"
using \<open>y \<in> topspace Y\<close> \<open>regular_space X\<close> unfolding regular_space_compact_closed_sets
by meson
have *: "\<And>U T. openin X U \<and> T \<subseteq> topspace Y \<and> {x \<in> topspace X. f x \<in> T} \<subseteq> U \<longrightarrow>
(\<exists>V. openin Y V \<and> T \<subseteq> V \<and> {x \<in> topspace X. f x \<in> V} \<subseteq> U)"
using \<open>closed_map X Y f\<close> unfolding closed_map_preimage_neighbourhood by blast
obtain V1 where V1: "openin Y V1 \<and> y \<in> V1" and sub1: "{x \<in> topspace X. f x \<in> V1} \<subseteq> U"
using * [of U "{y}"] UV \<open>y \<in> topspace Y\<close> by auto
moreover
obtain V2 where "openin Y V2 \<and> C \<subseteq> V2" and sub2: "{x \<in> topspace X. f x \<in> V2} \<subseteq> V"
by (smt (verit, ccfv_SIG) * UV \<open>closedin Y C\<close> closedin_subset mem_Collect_eq subset_iff)
moreover have "disjnt V1 V2"
proof -
have "\<And>x. \<lbrakk>\<forall>x. x \<in> U \<longrightarrow> x \<notin> V; x \<in> V1; x \<in> V2\<rbrakk> \<Longrightarrow> False"
by (smt (verit) V1 fim image_iff mem_Collect_eq openin_subset sub1 sub2 subsetD)
with dUV show ?thesis by (auto simp: disjnt_iff)
qed
ultimately show "\<exists>U V. openin Y U \<and> openin Y V \<and> y \<in> U \<and> C \<subseteq> V \<and> disjnt U V"
by meson
qed
lemma regular_space_perfect_map_image:
"\<lbrakk>regular_space X; perfect_map X Y f\<rbrakk> \<Longrightarrow> regular_space Y"
by (meson perfect_map_def regular_space_continuous_proper_map_image)
proposition regular_space_perfect_map_image_eq:
assumes "Hausdorff_space X" and perf: "perfect_map X Y f"
shows "regular_space X \<longleftrightarrow> regular_space Y" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
using perf regular_space_perfect_map_image by blast
next
assume R: ?rhs
have "continuous_map X Y f" "proper_map X Y f" and fim: "f ` (topspace X) = topspace Y"
using perf by (auto simp: perfect_map_def)
then have "closed_map X Y f" and preYf: "(\<forall>y \<in> topspace Y. compactin X {x \<in> topspace X. f x = y})"
by (simp_all add: proper_map_def)
show ?lhs
unfolding regular_space_def
proof clarify
fix C x
assume "closedin X C" and "x \<in> topspace X" and "x \<notin> C"
obtain U1 U2 where "openin X U1" "openin X U2" "{x} \<subseteq> U1" and "disjnt U1 U2"
and subV: "C \<inter> {z \<in> topspace X. f z = f x} \<subseteq> U2"
proof (rule Hausdorff_space_compact_separation [of X "{x}" "C \<inter> {z \<in> topspace X. f z = f x}", OF \<open>Hausdorff_space X\<close>])
show "compactin X {x}"
by (simp add: \<open>x \<in> topspace X\<close>)
show "compactin X (C \<inter> {z \<in> topspace X. f z = f x})"
using \<open>closedin X C\<close> fim \<open>x \<in> topspace X\<close> closed_Int_compactin preYf by fastforce
show "disjnt {x} (C \<inter> {z \<in> topspace X. f z = f x})"
using \<open>x \<notin> C\<close> by force
qed
have "closedin Y (f ` (C - U2))"
using \<open>closed_map X Y f\<close> \<open>closedin X C\<close> \<open>openin X U2\<close> closed_map_def by blast
moreover
have "f x \<in> topspace Y - f ` (C - U2)"
using \<open>closedin X C\<close> \<open>continuous_map X Y f\<close> \<open>x \<in> topspace X\<close> closedin_subset continuous_map_def subV by fastforce
ultimately
obtain V1 V2 where VV: "openin Y V1" "openin Y V2" "f x \<in> V1"
and subV2: "f ` (C - U2) \<subseteq> V2" and "disjnt V1 V2"
by (meson R regular_space_def)
show "\<exists>U U'. openin X U \<and> openin X U' \<and> x \<in> U \<and> C \<subseteq> U' \<and> disjnt U U'"
proof (intro exI conjI)
show "openin X (U1 \<inter> {x \<in> topspace X. f x \<in> V1})"
using VV(1) \<open>continuous_map X Y f\<close> \<open>openin X U1\<close> continuous_map by fastforce
show "openin X (U2 \<union> {x \<in> topspace X. f x \<in> V2})"
using VV(2) \<open>continuous_map X Y f\<close> \<open>openin X U2\<close> continuous_map by fastforce
show "x \<in> U1 \<inter> {x \<in> topspace X. f x \<in> V1}"
using VV(3) \<open>x \<in> topspace X\<close> \<open>{x} \<subseteq> U1\<close> by auto
show "C \<subseteq> U2 \<union> {x \<in> topspace X. f x \<in> V2}"
using \<open>closedin X C\<close> closedin_subset subV2 by auto
show "disjnt (U1 \<inter> {x \<in> topspace X. f x \<in> V1}) (U2 \<union> {x \<in> topspace X. f x \<in> V2})"
using \<open>disjnt U1 U2\<close> \<open>disjnt V1 V2\<close> by (auto simp: disjnt_iff)
qed
qed
qed
subsection\<open>Locally compact spaces\<close>
definition locally_compact_space
where "locally_compact_space X \<equiv>
\<forall>x \<in> topspace X. \<exists>U K. openin X U \<and> compactin X K \<and> x \<in> U \<and> U \<subseteq> K"
lemma homeomorphic_locally_compact_spaceD:
assumes X: "locally_compact_space X" and "X homeomorphic_space Y"
shows "locally_compact_space Y"
proof -
obtain f where hmf: "homeomorphic_map X Y f"
using assms homeomorphic_space by blast
then have eq: "topspace Y = f ` (topspace X)"
by (simp add: homeomorphic_imp_surjective_map)
have "\<exists>V K. openin Y V \<and> compactin Y K \<and> f x \<in> V \<and> V \<subseteq> K"
if "x \<in> topspace X" "openin X U" "compactin X K" "x \<in> U" "U \<subseteq> K" for x U K
using that
by (meson hmf homeomorphic_map_compactness_eq homeomorphic_map_openness_eq image_mono image_eqI)
with X show ?thesis
by (smt (verit) eq image_iff locally_compact_space_def)
qed
lemma homeomorphic_locally_compact_space:
assumes "X homeomorphic_space Y"
shows "locally_compact_space X \<longleftrightarrow> locally_compact_space Y"
by (meson assms homeomorphic_locally_compact_spaceD homeomorphic_space_sym)
lemma locally_compact_space_retraction_map_image:
assumes "retraction_map X Y r" and X: "locally_compact_space X"
shows "locally_compact_space Y"
proof -
obtain s where s: "retraction_maps X Y r s"
using assms retraction_map_def by blast
obtain T where "T retract_of_space X" and Teq: "T = s ` topspace Y"
using retraction_maps_section_image1 s by blast
then obtain r where r: "continuous_map X (subtopology X T) r" "\<forall>x\<in>T. r x = x"
by (meson retract_of_space_def)
have "locally_compact_space (subtopology X T)"
unfolding locally_compact_space_def openin_subtopology_alt
proof clarsimp
fix x
assume "x \<in> topspace X" "x \<in> T"
obtain U K where UK: "openin X U \<and> compactin X K \<and> x \<in> U \<and> U \<subseteq> K"
by (meson X \<open>x \<in> topspace X\<close> locally_compact_space_def)
then have "compactin (subtopology X T) (r ` K) \<and> T \<inter> U \<subseteq> r ` K"
by (smt (verit) IntD1 image_compactin image_iff inf_le2 r subset_iff)
then show "\<exists>U. openin X U \<and> (\<exists>K. compactin (subtopology X T) K \<and> x \<in> U \<and> T \<inter> U \<subseteq> K)"
using UK by auto
qed
with Teq show ?thesis
using homeomorphic_locally_compact_space retraction_maps_section_image2 s by blast
qed
lemma compact_imp_locally_compact_space:
"compact_space X \<Longrightarrow> locally_compact_space X"
using compact_space_def locally_compact_space_def by blast
lemma neighbourhood_base_imp_locally_compact_space:
"neighbourhood_base_of (compactin X) X \<Longrightarrow> locally_compact_space X"
by (metis locally_compact_space_def neighbourhood_base_of openin_topspace)
lemma locally_compact_imp_neighbourhood_base:
assumes loc: "locally_compact_space X" and reg: "regular_space X"
shows "neighbourhood_base_of (compactin X) X"
unfolding neighbourhood_base_of
proof clarify
fix W x
assume "openin X W" and "x \<in> W"
then obtain U K where "openin X U" "compactin X K" "x \<in> U" "U \<subseteq> K"
by (metis loc locally_compact_space_def openin_subset subsetD)
moreover have "openin X (U \<inter> W) \<and> x \<in> U \<inter> W"
using \<open>openin X W\<close> \<open>x \<in> W\<close> \<open>openin X U\<close> \<open>x \<in> U\<close> by blast
then have "\<exists>u' v. openin X u' \<and> closedin X v \<and> x \<in> u' \<and> u' \<subseteq> v \<and> v \<subseteq> U \<and> v \<subseteq> W"
using reg
by (metis le_infE neighbourhood_base_of neighbourhood_base_of_closedin)
then show "\<exists>U V. openin X U \<and> compactin X V \<and> x \<in> U \<and> U \<subseteq> V \<and> V \<subseteq> W"
by (meson \<open>U \<subseteq> K\<close> \<open>compactin X K\<close> closed_compactin subset_trans)
qed
lemma Hausdorff_regular: "\<lbrakk>Hausdorff_space X; neighbourhood_base_of (compactin X) X\<rbrakk> \<Longrightarrow> regular_space X"
by (metis compactin_imp_closedin neighbourhood_base_of_closedin neighbourhood_base_of_mono)
lemma locally_compact_Hausdorff_imp_regular_space:
assumes loc: "locally_compact_space X" and "Hausdorff_space X"
shows "regular_space X"
unfolding neighbourhood_base_of_closedin [symmetric] neighbourhood_base_of
proof clarify
fix W x
assume "openin X W" and "x \<in> W"
then have "x \<in> topspace X"
using openin_subset by blast
then obtain U K where "openin X U" "compactin X K" and UK: "x \<in> U" "U \<subseteq> K"
by (meson loc locally_compact_space_def)
with \<open>Hausdorff_space X\<close> have "regular_space (subtopology X K)"
using Hausdorff_space_subtopology compact_Hausdorff_imp_regular_space compact_space_subtopology by blast
then have "\<exists>U' V'. openin (subtopology X K) U' \<and> closedin (subtopology X K) V' \<and> x \<in> U' \<and> U' \<subseteq> V' \<and> V' \<subseteq> K \<inter> W"
unfolding neighbourhood_base_of_closedin [symmetric] neighbourhood_base_of
by (meson IntI \<open>U \<subseteq> K\<close> \<open>openin X W\<close> \<open>x \<in> U\<close> \<open>x \<in> W\<close> openin_subtopology_Int2 subsetD)
then obtain V C where "openin X V" "closedin X C" and VC: "x \<in> K \<inter> V" "K \<inter> V \<subseteq> K \<inter> C" "K \<inter> C \<subseteq> K \<inter> W"
by (metis Int_commute closedin_subtopology openin_subtopology)
show "\<exists>U V. openin X U \<and> closedin X V \<and> x \<in> U \<and> U \<subseteq> V \<and> V \<subseteq> W"
proof (intro conjI exI)
show "openin X (U \<inter> V)"
using \<open>openin X U\<close> \<open>openin X V\<close> by blast
show "closedin X (K \<inter> C)"
using \<open>closedin X C\<close> \<open>compactin X K\<close> compactin_imp_closedin \<open>Hausdorff_space X\<close> by blast
qed (use UK VC in auto)
qed
lemma locally_compact_space_neighbourhood_base:
"Hausdorff_space X \<or> regular_space X
\<Longrightarrow> locally_compact_space X \<longleftrightarrow> neighbourhood_base_of (compactin X) X"
by (metis locally_compact_imp_neighbourhood_base locally_compact_Hausdorff_imp_regular_space
neighbourhood_base_imp_locally_compact_space)
lemma locally_compact_Hausdorff_or_regular:
"locally_compact_space X \<and> (Hausdorff_space X \<or> regular_space X) \<longleftrightarrow> locally_compact_space X \<and> regular_space X"
using locally_compact_Hausdorff_imp_regular_space by blast
lemma locally_compact_space_compact_closedin:
assumes "Hausdorff_space X \<or> regular_space X"
shows "locally_compact_space X \<longleftrightarrow>
(\<forall>x \<in> topspace X. \<exists>U K. openin X U \<and> compactin X K \<and> closedin X K \<and> x \<in> U \<and> U \<subseteq> K)"
using locally_compact_Hausdorff_or_regular unfolding locally_compact_space_def
by (metis assms closed_compactin inf.absorb_iff2 le_infE neighbourhood_base_of neighbourhood_base_of_closedin)
lemma locally_compact_space_compact_closure_of:
assumes "Hausdorff_space X \<or> regular_space X"
shows "locally_compact_space X \<longleftrightarrow>
(\<forall>x \<in> topspace X. \<exists>U. openin X U \<and> compactin X (X closure_of U) \<and> x \<in> U)" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
by (metis assms closed_compactin closedin_closure_of closure_of_eq closure_of_mono locally_compact_space_compact_closedin)
next
assume ?rhs then show ?lhs
by (meson closure_of_subset locally_compact_space_def openin_subset)
qed
lemma locally_compact_space_neighbourhood_base_closedin:
assumes "Hausdorff_space X \<or> regular_space X"
shows "locally_compact_space X \<longleftrightarrow> neighbourhood_base_of (\<lambda>C. compactin X C \<and> closedin X C) X" (is "?lhs=?rhs")
proof
assume L: ?lhs
then have "regular_space X"
using assms locally_compact_Hausdorff_imp_regular_space by blast
with L have "neighbourhood_base_of (compactin X) X"
by (simp add: locally_compact_imp_neighbourhood_base)
with \<open>regular_space X\<close> show ?rhs
by (smt (verit, ccfv_threshold) closed_compactin neighbourhood_base_of subset_trans neighbourhood_base_of_closedin)
next
assume ?rhs then show ?lhs
using neighbourhood_base_imp_locally_compact_space neighbourhood_base_of_mono by blast
qed
lemma locally_compact_space_neighbourhood_base_closure_of:
assumes "Hausdorff_space X \<or> regular_space X"
shows "locally_compact_space X \<longleftrightarrow> neighbourhood_base_of (\<lambda>T. compactin X (X closure_of T)) X"
(is "?lhs=?rhs")
proof
assume L: ?lhs
then have "regular_space X"
using assms locally_compact_Hausdorff_imp_regular_space by blast
with L have "neighbourhood_base_of (\<lambda>A. compactin X A \<and> closedin X A) X"
using locally_compact_space_neighbourhood_base_closedin by blast
then show ?rhs
by (simp add: closure_of_closedin neighbourhood_base_of_mono)
next
assume ?rhs then show ?lhs
unfolding locally_compact_space_def neighbourhood_base_of
by (meson closure_of_subset openin_topspace subset_trans)
qed
lemma locally_compact_space_neighbourhood_base_open_closure_of:
assumes "Hausdorff_space X \<or> regular_space X"
shows "locally_compact_space X \<longleftrightarrow>
neighbourhood_base_of (\<lambda>U. openin X U \<and> compactin X (X closure_of U)) X"
(is "?lhs=?rhs")
proof
assume L: ?lhs
then have "regular_space X"
using assms locally_compact_Hausdorff_imp_regular_space by blast
then have "neighbourhood_base_of (\<lambda>T. compactin X (X closure_of T)) X"
using L locally_compact_space_neighbourhood_base_closure_of by auto
with L show ?rhs
unfolding neighbourhood_base_of
by (meson closed_compactin closure_of_closure_of closure_of_eq closure_of_mono subset_trans)
next
assume ?rhs then show ?lhs
unfolding locally_compact_space_def neighbourhood_base_of
by (meson closure_of_subset openin_topspace subset_trans)
qed
lemma locally_compact_space_compact_closed_compact:
assumes "Hausdorff_space X \<or> regular_space X"
shows "locally_compact_space X \<longleftrightarrow>
(\<forall>K. compactin X K
\<longrightarrow> (\<exists>U L. openin X U \<and> compactin X L \<and> closedin X L \<and> K \<subseteq> U \<and> U \<subseteq> L))"
(is "?lhs=?rhs")
proof
assume L: ?lhs
then obtain U L where UL: "\<forall>x \<in> topspace X. openin X (U x) \<and> compactin X (L x) \<and> closedin X (L x) \<and> x \<in> U x \<and> U x \<subseteq> L x"
unfolding locally_compact_space_compact_closedin [OF assms]
by metis
show ?rhs
proof clarify
fix K
assume "compactin X K"
then have "K \<subseteq> topspace X"
by (simp add: compactin_subset_topspace)
then have *: "(\<forall>U\<in>U ` K. openin X U) \<and> K \<subseteq> \<Union> (U ` K)"
using UL by blast
with \<open>compactin X K\<close> obtain KF where KF: "finite KF" "KF \<subseteq> K" "K \<subseteq> \<Union>(U ` KF)"
by (metis compactinD finite_subset_image)
show "\<exists>U L. openin X U \<and> compactin X L \<and> closedin X L \<and> K \<subseteq> U \<and> U \<subseteq> L"
proof (intro conjI exI)
show "openin X (\<Union> (U ` KF))"
using "*" \<open>KF \<subseteq> K\<close> by fastforce
show "compactin X (\<Union> (L ` KF))"
by (smt (verit) UL \<open>K \<subseteq> topspace X\<close> KF compactin_Union finite_imageI imageE subset_iff)
show "closedin X (\<Union> (L ` KF))"
by (smt (verit) UL \<open>K \<subseteq> topspace X\<close> KF closedin_Union finite_imageI imageE subsetD)
qed (use UL \<open>K \<subseteq> topspace X\<close> KF in auto)
qed
next
assume ?rhs then show ?lhs
by (metis compactin_sing insert_subset locally_compact_space_def)
qed
lemma locally_compact_regular_space_neighbourhood_base:
"locally_compact_space X \<and> regular_space X \<longleftrightarrow>
neighbourhood_base_of (\<lambda>C. compactin X C \<and> closedin X C) X"
using locally_compact_space_neighbourhood_base_closedin neighbourhood_base_of_closedin neighbourhood_base_of_mono by blast
lemma locally_compact_kc_space:
"neighbourhood_base_of (compactin X) X \<and> kc_space X \<longleftrightarrow>
locally_compact_space X \<and> Hausdorff_space X"
using Hausdorff_imp_kc_space locally_compact_imp_kc_eq_Hausdorff_space locally_compact_space_neighbourhood_base by blast
lemma locally_compact_kc_space_alt:
"neighbourhood_base_of (compactin X) X \<and> kc_space X \<longleftrightarrow>
locally_compact_space X \<and> Hausdorff_space X \<and> regular_space X"
using Hausdorff_regular locally_compact_kc_space by blast
lemma locally_compact_kc_imp_regular_space:
"\<lbrakk>neighbourhood_base_of (compactin X) X; kc_space X\<rbrakk> \<Longrightarrow> regular_space X"
using Hausdorff_regular locally_compact_imp_kc_eq_Hausdorff_space by blast
lemma kc_locally_compact_space:
"kc_space X
\<Longrightarrow> neighbourhood_base_of (compactin X) X \<longleftrightarrow> locally_compact_space X \<and> Hausdorff_space X \<and> regular_space X"
using Hausdorff_regular locally_compact_kc_space by blast
lemma locally_compact_space_closed_subset:
assumes loc: "locally_compact_space X" and "closedin X S"
shows "locally_compact_space (subtopology X S)"
proof (clarsimp simp: locally_compact_space_def)
fix x assume x: "x \<in> topspace X" "x \<in> S"
then obtain U K where UK: "openin X U \<and> compactin X K \<and> x \<in> U \<and> U \<subseteq> K"
by (meson loc locally_compact_space_def)
show "\<exists>U. openin (subtopology X S) U \<and>
(\<exists>K. compactin (subtopology X S) K \<and> x \<in> U \<and> U \<subseteq> K)"
proof (intro conjI exI)
show "openin (subtopology X S) (S \<inter> U)"
by (simp add: UK openin_subtopology_Int2)
show "compactin (subtopology X S) (S \<inter> K)"
by (simp add: UK assms(2) closed_Int_compactin compactin_subtopology)
qed (use UK x in auto)
qed
lemma locally_compact_space_open_subset:
- assumes reg: "regular_space X" and loc: "locally_compact_space X" and "openin X S"
+ assumes X: "Hausdorff_space X \<or> regular_space X" and loc: "locally_compact_space X" and "openin X S"
shows "locally_compact_space (subtopology X S)"
proof (clarsimp simp: locally_compact_space_def)
fix x assume x: "x \<in> topspace X" "x \<in> S"
then obtain U K where UK: "openin X U" "compactin X K" "x \<in> U" "U \<subseteq> K"
by (meson loc locally_compact_space_def)
- have "openin X (U \<inter> S)"
+ moreover have reg: "regular_space X"
+ using X loc locally_compact_Hausdorff_imp_regular_space by blast
+ moreover have "openin X (U \<inter> S)"
by (simp add: UK \<open>openin X S\<close> openin_Int)
- with UK reg x obtain V C
+ ultimately obtain V C
where VC: "openin X V" "closedin X C" "x \<in> V" "V \<subseteq> C" "C \<subseteq> U" "C \<subseteq> S"
- by (metis IntI le_inf_iff neighbourhood_base_of neighbourhood_base_of_closedin)
+ by (metis \<open>x \<in> S\<close> IntI le_inf_iff neighbourhood_base_of neighbourhood_base_of_closedin)
show "\<exists>U. openin (subtopology X S) U \<and>
(\<exists>K. compactin (subtopology X S) K \<and> x \<in> U \<and> U \<subseteq> K)"
proof (intro conjI exI)
show "openin (subtopology X S) V"
using VC by (meson \<open>openin X S\<close> openin_open_subtopology order_trans)
show "compactin (subtopology X S) (C \<inter> K)"
using UK VC closed_Int_compactin compactin_subtopology by fastforce
qed (use UK VC x in auto)
qed
lemma locally_compact_space_discrete_topology:
"locally_compact_space (discrete_topology U)"
by (simp add: neighbourhood_base_imp_locally_compact_space neighbourhood_base_of_discrete_topology)
lemma locally_compact_space_continuous_open_map_image:
"\<lbrakk>continuous_map X X' f; open_map X X' f;
f ` topspace X = topspace X'; locally_compact_space X\<rbrakk> \<Longrightarrow> locally_compact_space X'"
unfolding locally_compact_space_def open_map_def
by (smt (verit, ccfv_SIG) image_compactin image_iff image_mono)
lemma locally_compact_subspace_openin_closure_of:
assumes "Hausdorff_space X" and S: "S \<subseteq> topspace X"
and loc: "locally_compact_space (subtopology X S)"
shows "openin (subtopology X (X closure_of S)) S"
unfolding openin_subopen [where S=S]
proof clarify
fix a assume "a \<in> S"
then obtain T K where *: "openin X T" "compactin X K" "K \<subseteq> S" "a \<in> S" "a \<in> T" "S \<inter> T \<subseteq> K"
using loc unfolding locally_compact_space_def
by (metis IntE S compactin_subtopology inf_commute openin_subtopology topspace_subtopology_subset)
have "T \<inter> X closure_of S \<subseteq> X closure_of (T \<inter> S)"
by (simp add: "*"(1) openin_Int_closure_of_subset)
also have "... \<subseteq> S"
using * \<open>Hausdorff_space X\<close> by (metis closure_of_minimal compactin_imp_closedin order.trans inf_commute)
finally have "T \<inter> X closure_of S \<subseteq> T \<inter> S" by simp
then have "openin (subtopology X (X closure_of S)) (T \<inter> S)"
unfolding openin_subtopology using \<open>openin X T\<close> S closure_of_subset by fastforce
with * show "\<exists>T. openin (subtopology X (X closure_of S)) T \<and> a \<in> T \<and> T \<subseteq> S"
by blast
qed
lemma locally_compact_subspace_closed_Int_openin:
"\<lbrakk>Hausdorff_space X \<and> S \<subseteq> topspace X \<and> locally_compact_space(subtopology X S)\<rbrakk>
\<Longrightarrow> \<exists>C U. closedin X C \<and> openin X U \<and> C \<inter> U = S"
by (metis closedin_closure_of inf_commute locally_compact_subspace_openin_closure_of openin_subtopology)
lemma locally_compact_subspace_open_in_closure_of_eq:
assumes "Hausdorff_space X" and loc: "locally_compact_space X"
shows "openin (subtopology X (X closure_of S)) S \<longleftrightarrow> S \<subseteq> topspace X \<and> locally_compact_space(subtopology X S)" (is "?lhs=?rhs")
proof
assume L: ?lhs
then obtain "S \<subseteq> topspace X" "regular_space X"
using assms locally_compact_Hausdorff_imp_regular_space openin_subset by fastforce
then have "locally_compact_space (subtopology (subtopology X (X closure_of S)) S)"
by (simp add: L loc locally_compact_space_closed_subset locally_compact_space_open_subset regular_space_subtopology)
then show ?rhs
by (metis L inf.orderE inf_commute le_inf_iff openin_subset subtopology_subtopology topspace_subtopology)
next
assume ?rhs then show ?lhs
using assms locally_compact_subspace_openin_closure_of by blast
qed
lemma locally_compact_subspace_closed_Int_openin_eq:
assumes "Hausdorff_space X" and loc: "locally_compact_space X"
shows "(\<exists>C U. closedin X C \<and> openin X U \<and> C \<inter> U = S) \<longleftrightarrow> S \<subseteq> topspace X \<and> locally_compact_space(subtopology X S)" (is "?lhs=?rhs")
proof
assume L: ?lhs
then obtain C U where "closedin X C" "openin X U" and Seq: "S = C \<inter> U"
by blast
then have "C \<subseteq> topspace X"
by (simp add: closedin_subset)
have "locally_compact_space (subtopology (subtopology X C) (topspace (subtopology X C) \<inter> U))"
- proof (rule locally_compact_space_open_subset)
- show "regular_space (subtopology X C)"
- by (simp add: \<open>Hausdorff_space X\<close> loc locally_compact_Hausdorff_imp_regular_space regular_space_subtopology)
- show "locally_compact_space (subtopology X C)"
- by (simp add: \<open>closedin X C\<close> loc locally_compact_space_closed_subset)
- show "openin (subtopology X C) (topspace (subtopology X C) \<inter> U)"
- by (simp add: \<open>openin X U\<close> Int_left_commute inf_commute openin_Int openin_subtopology_Int2)
-qed
- then show ?rhs
- by (metis Seq \<open>C \<subseteq> topspace X\<close> inf.coboundedI1 subtopology_subtopology subtopology_topspace)
+ proof (rule locally_compact_space_open_subset)
+ show "locally_compact_space (subtopology X C)"
+ by (simp add: \<open>closedin X C\<close> loc locally_compact_space_closed_subset)
+ show "openin (subtopology X C) (topspace (subtopology X C) \<inter> U)"
+ by (simp add: \<open>openin X U\<close> Int_left_commute inf_commute openin_Int openin_subtopology_Int2)
+ qed (simp add: Hausdorff_space_subtopology \<open>Hausdorff_space X\<close>)
+ then show ?rhs
+ by (metis Seq \<open>C \<subseteq> topspace X\<close> inf.coboundedI1 subtopology_subtopology subtopology_topspace)
next
assume ?rhs then show ?lhs
- using assms locally_compact_subspace_closed_Int_openin by blast
+ using assms locally_compact_subspace_closed_Int_openin by blast
qed
lemma dense_locally_compact_openin_Hausdorff_space:
"\<lbrakk>Hausdorff_space X; S \<subseteq> topspace X; X closure_of S = topspace X;
locally_compact_space (subtopology X S)\<rbrakk> \<Longrightarrow> openin X S"
by (metis locally_compact_subspace_openin_closure_of subtopology_topspace)
lemma locally_compact_space_prod_topology:
"locally_compact_space (prod_topology X Y) \<longleftrightarrow>
topspace (prod_topology X Y) = {} \<or>
locally_compact_space X \<and> locally_compact_space Y" (is "?lhs=?rhs")
proof (cases "topspace (prod_topology X Y) = {}")
case True
then show ?thesis
unfolding locally_compact_space_def by blast
next
case False
then obtain w z where wz: "w \<in> topspace X" "z \<in> topspace Y"
by auto
show ?thesis
proof
assume L: ?lhs then show ?rhs
by (metis wz empty_iff locally_compact_space_retraction_map_image retraction_map_fst retraction_map_snd)
next
assume R: ?rhs
show ?lhs
unfolding locally_compact_space_def
proof clarsimp
fix x y
assume "x \<in> topspace X" and "y \<in> topspace Y"
obtain U C where "openin X U" "compactin X C" "x \<in> U" "U \<subseteq> C"
by (meson False R \<open>x \<in> topspace X\<close> locally_compact_space_def)
obtain V D where "openin Y V" "compactin Y D" "y \<in> V" "V \<subseteq> D"
by (meson False R \<open>y \<in> topspace Y\<close> locally_compact_space_def)
show "\<exists>U. openin (prod_topology X Y) U \<and> (\<exists>K. compactin (prod_topology X Y) K \<and> (x, y) \<in> U \<and> U \<subseteq> K)"
proof (intro exI conjI)
show "openin (prod_topology X Y) (U \<times> V)"
by (simp add: \<open>openin X U\<close> \<open>openin Y V\<close> openin_prod_Times_iff)
show "compactin (prod_topology X Y) (C \<times> D)"
by (simp add: \<open>compactin X C\<close> \<open>compactin Y D\<close> compactin_Times)
show "(x, y) \<in> U \<times> V"
by (simp add: \<open>x \<in> U\<close> \<open>y \<in> V\<close>)
show "U \<times> V \<subseteq> C \<times> D"
by (simp add: Sigma_mono \<open>U \<subseteq> C\<close> \<open>V \<subseteq> D\<close>)
qed
qed
qed
qed
lemma locally_compact_space_product_topology:
"locally_compact_space(product_topology X I) \<longleftrightarrow>
topspace(product_topology X I) = {} \<or>
finite {i \<in> I. \<not> compact_space(X i)} \<and> (\<forall>i \<in> I. locally_compact_space(X i))" (is "?lhs=?rhs")
proof (cases "topspace (product_topology X I) = {}")
case True
then show ?thesis
by (simp add: locally_compact_space_def)
next
case False
show ?thesis
proof
assume L: ?lhs
obtain z where z: "z \<in> topspace (product_topology X I)"
using False by auto
with L z obtain U C where "openin (product_topology X I) U" "compactin (product_topology X I) C" "z \<in> U" "U \<subseteq> C"
by (meson locally_compact_space_def)
then obtain V where finV: "finite {i \<in> I. V i \<noteq> topspace (X i)}" and "\<forall>i \<in> I. openin (X i) (V i)"
and "z \<in> PiE I V" "PiE I V \<subseteq> U"
by (auto simp: openin_product_topology_alt)
have "compact_space (X i)" if "i \<in> I" "V i = topspace (X i)" for i
proof -
have "compactin (X i) ((\<lambda>x. x i) ` C)"
using \<open>compactin (product_topology X I) C\<close> image_compactin
by (metis continuous_map_product_projection \<open>i \<in> I\<close>)
moreover have "V i \<subseteq> (\<lambda>x. x i) ` C"
proof -
have "V i \<subseteq> (\<lambda>x. x i) ` Pi\<^sub>E I V"
by (metis \<open>z \<in> Pi\<^sub>E I V\<close> empty_iff image_projection_PiE order_refl \<open>i \<in> I\<close>)
also have "\<dots> \<subseteq> (\<lambda>x. x i) ` C"
using \<open>U \<subseteq> C\<close> \<open>Pi\<^sub>E I V \<subseteq> U\<close> by blast
finally show ?thesis .
qed
ultimately show ?thesis
by (metis closed_compactin closedin_topspace compact_space_def that(2))
qed
with finV have "finite {i \<in> I. \<not> compact_space (X i)}"
by (metis (mono_tags, lifting) mem_Collect_eq finite_subset subsetI)
moreover have "locally_compact_space (X i)" if "i \<in> I" for i
by (meson False L locally_compact_space_retraction_map_image retraction_map_product_projection that)
ultimately show ?rhs by metis
next
assume R: ?rhs
show ?lhs
unfolding locally_compact_space_def
proof clarsimp
fix z
assume z: "z \<in> (\<Pi>\<^sub>E i\<in>I. topspace (X i))"
have "\<exists>U C. openin (X i) U \<and> compactin (X i) C \<and> z i \<in> U \<and> U \<subseteq> C \<and>
(compact_space(X i) \<longrightarrow> U = topspace(X i) \<and> C = topspace(X i))"
if "i \<in> I" for i
using that R z unfolding locally_compact_space_def compact_space_def
by (metis (no_types, lifting) False PiE_mem openin_topspace set_eq_subset)
then obtain U C where UC: "\<And>i. i \<in> I \<Longrightarrow>
openin (X i) (U i) \<and> compactin (X i) (C i) \<and> z i \<in> U i \<and> U i \<subseteq> C i \<and>
(compact_space(X i) \<longrightarrow> U i = topspace(X i) \<and> C i = topspace(X i))"
by metis
show "\<exists>U. openin (product_topology X I) U \<and> (\<exists>K. compactin (product_topology X I) K \<and> z \<in> U \<and> U \<subseteq> K)"
proof (intro exI conjI)
show "openin (product_topology X I) (Pi\<^sub>E I U)"
by (smt (verit) Collect_cong False R UC compactin_subspace openin_PiE_gen subset_antisym subtopology_topspace)
show "compactin (product_topology X I) (Pi\<^sub>E I C)"
by (simp add: UC compactin_PiE)
qed (use UC z in blast)+
qed
qed
qed
lemma locally_compact_space_sum_topology:
"locally_compact_space (sum_topology X I) \<longleftrightarrow> (\<forall>i \<in> I. locally_compact_space (X i))" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
by (metis closed_map_component_injection embedding_map_imp_homeomorphic_space embedding_map_component_injection
embedding_imp_closed_map_eq homeomorphic_locally_compact_space locally_compact_space_closed_subset)
next
assume R: ?rhs
show ?lhs
unfolding locally_compact_space_def
proof clarsimp
fix i y
assume "i \<in> I" and y: "y \<in> topspace (X i)"
then obtain U K where UK: "openin (X i) U" "compactin (X i) K" "y \<in> U" "U \<subseteq> K"
using R by (fastforce simp: locally_compact_space_def)
then show "\<exists>U. openin (sum_topology X I) U \<and> (\<exists>K. compactin (sum_topology X I) K \<and> (i, y) \<in> U \<and> U \<subseteq> K)"
by (metis \<open>i \<in> I\<close> continuous_map_component_injection image_compactin image_mono
imageI open_map_component_injection open_map_def)
qed
qed
proposition quotient_map_prod_right:
assumes loc: "locally_compact_space Z"
and reg: "Hausdorff_space Z \<or> regular_space Z"
and f: "quotient_map X Y f"
shows "quotient_map (prod_topology Z X) (prod_topology Z Y) (\<lambda>(x,y). (x,f y))"
proof -
define h where "h \<equiv> (\<lambda>(x::'a,y). (x,f y))"
have "continuous_map (prod_topology Z X) Y (f o snd)"
by (simp add: continuous_map_of_snd f quotient_imp_continuous_map)
then have cmh: "continuous_map (prod_topology Z X) (prod_topology Z Y) h"
by (simp add: h_def continuous_map_paired split_def continuous_map_fst o_def)
have fim: "f ` topspace X = topspace Y"
by (simp add: f quotient_imp_surjective_map)
moreover
have "openin (prod_topology Z X) {u \<in> topspace Z \<times> topspace X. h u \<in> W}
\<longleftrightarrow> openin (prod_topology Z Y) W" (is "?lhs=?rhs")
if W: "W \<subseteq> topspace Z \<times> topspace Y" for W
proof
define S where "S \<equiv> {u \<in> topspace Z \<times> topspace X. h u \<in> W}"
assume ?lhs
then have L: "openin (prod_topology Z X) S"
using S_def by blast
have "\<exists>T. openin (prod_topology Z Y) T \<and> (x0, z0) \<in> T \<and> T \<subseteq> W"
if \<section>: "(x0,z0) \<in> W" for x0 z0
proof -
have x0: "x0 \<in> topspace Z"
using W that by blast
obtain y0 where y0: "y0 \<in> topspace X" "f y0 = z0"
by (metis W fim imageE insert_absorb insert_subset mem_Sigma_iff \<section>)
then have "(x0, y0) \<in> S"
by (simp add: S_def h_def that x0)
have "continuous_map Z (prod_topology Z X) (\<lambda>x. (x, y0))"
by (simp add: continuous_map_paired y0)
with openin_continuous_map_preimage [OF _ L]
have ope_ZS: "openin Z {x \<in> topspace Z. (x,y0) \<in> S}"
by blast
obtain U U' where "openin Z U" "compactin Z U'" "closedin Z U'"
"x0 \<in> U" "U \<subseteq> U'" "U' \<subseteq> {x \<in> topspace Z. (x,y0) \<in> S}"
using loc ope_ZS x0 \<open>(x0, y0) \<in> S\<close>
by (force simp: locally_compact_space_neighbourhood_base_closedin [OF reg]
neighbourhood_base_of)
then have D: "U' \<times> {y0} \<subseteq> S"
by (auto simp: )
define V where "V \<equiv> {z \<in> topspace Y. U' \<times> {y \<in> topspace X. f y = z} \<subseteq> S}"
have "z0 \<in> V"
using D y0 Int_Collect fim by (fastforce simp: h_def V_def S_def)
have "openin X {x \<in> topspace X. f x \<in> V} \<Longrightarrow> openin Y V"
using f unfolding V_def quotient_map_def subset_iff
by (smt (verit, del_insts) Collect_cong mem_Collect_eq)
moreover have "openin X {x \<in> topspace X. f x \<in> V}"
proof -
let ?Z = "subtopology Z U'"
have *: "{x \<in> topspace X. f x \<in> V} = topspace X - snd ` (U' \<times> topspace X - S)"
by (force simp: V_def S_def h_def simp flip: fim)
have "compact_space ?Z"
using \<open>compactin Z U'\<close> compactin_subspace by auto
moreover have "closedin (prod_topology ?Z X) (U' \<times> topspace X - S)"
by (simp add: L \<open>closedin Z U'\<close> closedin_closed_subtopology closedin_diff closedin_prod_Times_iff
prod_topology_subtopology(1))
ultimately show ?thesis
using "*" closed_map_snd closed_map_def by fastforce
qed
ultimately have "openin Y V"
by metis
show ?thesis
proof (intro conjI exI)
show "openin (prod_topology Z Y) (U \<times> V)"
by (simp add: openin_prod_Times_iff \<open>openin Z U\<close> \<open>openin Y V\<close>)
show "(x0, z0) \<in> U \<times> V"
by (simp add: \<open>x0 \<in> U\<close> \<open>z0 \<in> V\<close>)
show "U \<times> V \<subseteq> W"
using \<open>U \<subseteq> U'\<close> by (force simp: V_def S_def h_def simp flip: fim)
qed
qed
with openin_subopen show ?rhs by force
next
assume ?rhs then show ?lhs
using openin_continuous_map_preimage cmh by fastforce
qed
ultimately show ?thesis
by (fastforce simp: image_iff quotient_map_def h_def)
qed
lemma quotient_map_prod_left:
assumes loc: "locally_compact_space Z"
and reg: "Hausdorff_space Z \<or> regular_space Z"
and f: "quotient_map X Y f"
shows "quotient_map (prod_topology X Z) (prod_topology Y Z) (\<lambda>(x,y). (f x,y))"
proof -
have "(\<lambda>(x,y). (f x,y)) = prod.swap \<circ> (\<lambda>(x,y). (x,f y)) \<circ> prod.swap"
by force
then
show ?thesis
apply (rule ssubst)
proof (intro quotient_map_compose)
show "quotient_map (prod_topology X Z) (prod_topology Z X) prod.swap"
"quotient_map (prod_topology Z Y) (prod_topology Y Z) prod.swap"
using homeomorphic_map_def homeomorphic_map_swap quotient_map_eq by fastforce+
show "quotient_map (prod_topology Z X) (prod_topology Z Y) (\<lambda>(x, y). (x, f y))"
by (simp add: f loc quotient_map_prod_right reg)
qed
qed
lemma locally_compact_space_perfect_map_preimage:
assumes "locally_compact_space X'" and f: "perfect_map X X' f"
shows "locally_compact_space X"
unfolding locally_compact_space_def
proof (intro strip)
fix x
assume x: "x \<in> topspace X"
then obtain U K where "openin X' U" "compactin X' K" "f x \<in> U" "U \<subseteq> K"
using assms unfolding locally_compact_space_def perfect_map_def
by (metis (no_types, lifting) continuous_map_closedin)
show "\<exists>U K. openin X U \<and> compactin X K \<and> x \<in> U \<and> U \<subseteq> K"
proof (intro exI conjI)
have "continuous_map X X' f"
using f perfect_map_def by blast
then show "openin X {x \<in> topspace X. f x \<in> U}"
by (simp add: \<open>openin X' U\<close> continuous_map)
show "compactin X {x \<in> topspace X. f x \<in> K}"
using \<open>compactin X' K\<close> f perfect_imp_proper_map proper_map_alt by blast
qed (use x \<open>f x \<in> U\<close> \<open>U \<subseteq> K\<close> in auto)
qed
subsection\<open>Special characterizations of classes of functions into and out of R\<close>
lemma monotone_map_into_euclideanreal_alt:
assumes "continuous_map X euclideanreal f"
shows "(\<forall>k. is_interval k \<longrightarrow> connectedin X {x \<in> topspace X. f x \<in> k}) \<longleftrightarrow>
connected_space X \<and> monotone_map X euclideanreal f" (is "?lhs=?rhs")
proof
assume L: ?lhs
show ?rhs
proof
show "connected_space X"
using L connected_space_subconnected by blast
have "connectedin X {x \<in> topspace X. f x \<in> {y}}" for y
by (metis L is_interval_1 nle_le singletonD)
then show "monotone_map X euclideanreal f"
by (simp add: monotone_map)
qed
next
assume R: ?rhs
then
have *: False
if "a < b" "closedin X U" "closedin X V" "U \<noteq> {}" "V \<noteq> {}" "disjnt U V"
and UV: "{x \<in> topspace X. f x \<in> {a..b}} = U \<union> V"
and dis: "disjnt U {x \<in> topspace X. f x = b}" "disjnt V {x \<in> topspace X. f x = a}"
for a b U V
proof -
define E1 where "E1 \<equiv> U \<union> {x \<in> topspace X. f x \<in> {c. c \<le> a}}"
define E2 where "E2 \<equiv> V \<union> {x \<in> topspace X. f x \<in> {c. b \<le> c}}"
have "closedin X {x \<in> topspace X. f x \<le> a}" "closedin X {x \<in> topspace X. b \<le> f x}"
using assms continuous_map_upper_lower_semicontinuous_le by blast+
then have "closedin X E1" "closedin X E2"
unfolding E1_def E2_def using that by auto
moreover
have "E1 \<inter> E2 = {}"
unfolding E1_def E2_def using \<open>a<b\<close> \<open>disjnt U V\<close> dis UV
by (simp add: disjnt_def set_eq_iff) (smt (verit))
have "topspace X \<subseteq> E1 \<union> E2"
unfolding E1_def E2_def using UV by fastforce
have "E1 = {} \<or> E2 = {}"
using R connected_space_closedin
using \<open>E1 \<inter> E2 = {}\<close> \<open>closedin X E1\<close> \<open>closedin X E2\<close> \<open>topspace X \<subseteq> E1 \<union> E2\<close> by blast
then show False
using E1_def E2_def \<open>U \<noteq> {}\<close> \<open>V \<noteq> {}\<close> by fastforce
qed
show ?lhs
proof (intro strip)
fix K :: "real set"
assume "is_interval K"
have False
if "a \<in> K" "b \<in> K" and clo: "closedin X U" "closedin X V"
and UV: "{x. x \<in> topspace X \<and> f x \<in> K} \<subseteq> U \<union> V"
"U \<inter> V \<inter> {x. x \<in> topspace X \<and> f x \<in> K} = {}"
and nondis: "\<not> disjnt U {x. x \<in> topspace X \<and> f x = a}"
"\<not> disjnt V {x. x \<in> topspace X \<and> f x = b}"
for a b U V
proof -
have "\<forall>y. connectedin X {x. x \<in> topspace X \<and> f x = y}"
using R monotone_map by fastforce
then have **: False if "p \<in> U \<and> q \<in> V \<and> f p = f q \<and> f q \<in> K" for p q
unfolding connectedin_closedin
using \<open>a \<in> K\<close> \<open>b \<in> K\<close> UV clo that
by (smt (verit, ccfv_threshold) closedin_subset disjoint_iff mem_Collect_eq subset_iff)
consider "a < b" | "a = b" | "b < a"
by linarith
then show ?thesis
proof cases
case 1
define W where "W \<equiv> {x \<in> topspace X. f x \<in> {a..b}}"
have "closedin X W"
unfolding W_def
by (metis (no_types) assms closed_real_atLeastAtMost closed_closedin continuous_map_closedin)
show ?thesis
proof (rule * [OF 1 , of "U \<inter> W" "V \<inter> W"])
show "closedin X (U \<inter> W)" "closedin X (V \<inter> W)"
using \<open>closedin X W\<close> clo by auto
show "U \<inter> W \<noteq> {}" "V \<inter> W \<noteq> {}"
using nondis 1 by (auto simp: disjnt_iff W_def)
show "disjnt (U \<inter> W) (V \<inter> W)"
using \<open>is_interval K\<close> unfolding is_interval_1 disjnt_iff W_def
by (metis (mono_tags, lifting) \<open>a \<in> K\<close> \<open>b \<in> K\<close> ** Int_Collect atLeastAtMost_iff)
have "\<And>x. \<lbrakk>x \<in> topspace X; a \<le> f x; f x \<le> b\<rbrakk> \<Longrightarrow> x \<in> U \<or> x \<in> V"
using \<open>a \<in> K\<close> \<open>b \<in> K\<close> \<open>is_interval K\<close> UV unfolding is_interval_1 disjnt_iff
by blast
then show "{x \<in> topspace X. f x \<in> {a..b}} = U \<inter> W \<union> V \<inter> W"
by (auto simp: W_def)
show "disjnt (U \<inter> W) {x \<in> topspace X. f x = b}" "disjnt (V \<inter> W) {x \<in> topspace X. f x = a}"
using ** \<open>a \<in> K\<close> \<open>b \<in> K\<close> nondis by (force simp: disjnt_iff)+
qed
next
case 2
then show ?thesis
using ** nondis \<open>b \<in> K\<close> by (force simp add: disjnt_iff)
next
case 3
define W where "W \<equiv> {x \<in> topspace X. f x \<in> {b..a}}"
have "closedin X W"
unfolding W_def
by (metis (no_types) assms closed_real_atLeastAtMost closed_closedin continuous_map_closedin)
show ?thesis
proof (rule * [OF 3, of "V \<inter> W" "U \<inter> W"])
show "closedin X (U \<inter> W)" "closedin X (V \<inter> W)"
using \<open>closedin X W\<close> clo by auto
show "U \<inter> W \<noteq> {}" "V \<inter> W \<noteq> {}"
using nondis 3 by (auto simp: disjnt_iff W_def)
show "disjnt (V \<inter> W) (U \<inter> W)"
using \<open>is_interval K\<close> unfolding is_interval_1 disjnt_iff W_def
by (metis (mono_tags, lifting) \<open>a \<in> K\<close> \<open>b \<in> K\<close> ** Int_Collect atLeastAtMost_iff)
have "\<And>x. \<lbrakk>x \<in> topspace X; b \<le> f x; f x \<le> a\<rbrakk> \<Longrightarrow> x \<in> U \<or> x \<in> V"
using \<open>a \<in> K\<close> \<open>b \<in> K\<close> \<open>is_interval K\<close> UV unfolding is_interval_1 disjnt_iff
by blast
then show "{x \<in> topspace X. f x \<in> {b..a}} = V \<inter> W \<union> U \<inter> W"
by (auto simp: W_def)
show "disjnt (V \<inter> W) {x \<in> topspace X. f x = a}" "disjnt (U \<inter> W) {x \<in> topspace X. f x = b}"
using ** \<open>a \<in> K\<close> \<open>b \<in> K\<close> nondis by (force simp: disjnt_iff)+
qed
qed
qed
then show "connectedin X {x \<in> topspace X. f x \<in> K}"
unfolding connectedin_closedin disjnt_iff by blast
qed
qed
lemma monotone_map_into_euclideanreal:
"\<lbrakk>connected_space X; continuous_map X euclideanreal f\<rbrakk>
\<Longrightarrow> monotone_map X euclideanreal f \<longleftrightarrow>
(\<forall>k. is_interval k \<longrightarrow> connectedin X {x \<in> topspace X. f x \<in> k})"
by (simp add: monotone_map_into_euclideanreal_alt)
lemma monotone_map_euclideanreal_alt:
"(\<forall>I::real set. is_interval I \<longrightarrow> is_interval {x::real. x \<in> S \<and> f x \<in> I}) \<longleftrightarrow>
is_interval S \<and> (mono_on S f \<or> antimono_on S f)" (is "?lhs=?rhs")
proof
assume L [rule_format]: ?lhs
show ?rhs
proof
show "is_interval S"
using L is_interval_1 by auto
have False if "a \<in> S" "b \<in> S" "c \<in> S" "a<b" "b<c" and d: "f a < f b \<and> f c < f b \<or> f a > f b \<and> f c > f b" for a b c
using d
proof
assume "f a < f b \<and> f c < f b"
then show False
using L [of "{y. y < f b}"] unfolding is_interval_1
by (smt (verit, best) mem_Collect_eq that)
next
assume "f b < f a \<and> f b < f c"
then show False
using L [of "{y. y > f b}"] unfolding is_interval_1
by (smt (verit, best) mem_Collect_eq that)
qed
then show "mono_on S f \<or> monotone_on S (\<le>) (\<ge>) f"
unfolding monotone_on_def by (smt (verit))
qed
next
assume ?rhs then show ?lhs
unfolding is_interval_1 monotone_on_def by simp meson
qed
lemma monotone_map_euclideanreal:
fixes S :: "real set"
shows
"\<lbrakk>is_interval S; continuous_on S f\<rbrakk> \<Longrightarrow>
monotone_map (top_of_set S) euclideanreal f \<longleftrightarrow> (mono_on S f \<or> monotone_on S (\<le>) (\<ge>) f)"
using monotone_map_euclideanreal_alt
by (simp add: monotone_map_into_euclideanreal connectedin_subtopology is_interval_connected_1)
lemma injective_eq_monotone_map:
fixes f :: "real \<Rightarrow> real"
assumes "is_interval S" "continuous_on S f"
shows "inj_on f S \<longleftrightarrow> strict_mono_on S f \<or> strict_antimono_on S f"
by (metis assms injective_imp_monotone_map monotone_map_euclideanreal strict_antimono_iff_antimono
strict_mono_iff_mono top_greatest topspace_euclidean topspace_euclidean_subtopology)
subsection\<open>Normal spaces including Urysohn's lemma and the Tietze extension theorem\<close>
definition normal_space
where "normal_space X \<equiv>
\<forall>S T. closedin X S \<and> closedin X T \<and> disjnt S T
\<longrightarrow> (\<exists>U V. openin X U \<and> openin X V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> disjnt U V)"
lemma normal_space_retraction_map_image:
assumes r: "retraction_map X Y r" and X: "normal_space X"
shows "normal_space Y"
unfolding normal_space_def
proof clarify
fix S T
assume "closedin Y S" and "closedin Y T" and "disjnt S T"
obtain r' where r': "retraction_maps X Y r r'"
using r retraction_map_def by blast
have "closedin X {x \<in> topspace X. r x \<in> S}" "closedin X {x \<in> topspace X. r x \<in> T}"
using closedin_continuous_map_preimage \<open>closedin Y S\<close> \<open>closedin Y T\<close> r'
by (auto simp: retraction_maps_def)
moreover
have "disjnt {x \<in> topspace X. r x \<in> S} {x \<in> topspace X. r x \<in> T}"
using \<open>disjnt S T\<close> by (auto simp: disjnt_def)
ultimately
obtain U V where UV: "openin X U \<and> openin X V \<and> {x \<in> topspace X. r x \<in> S} \<subseteq> U \<and> {x \<in> topspace X. r x \<in> T} \<subseteq> V" "disjnt U V"
by (meson X normal_space_def)
show "\<exists>U V. openin Y U \<and> openin Y V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> disjnt U V"
proof (intro exI conjI)
show "openin Y {x \<in> topspace Y. r' x \<in> U}" "openin Y {x \<in> topspace Y. r' x \<in> V}"
using openin_continuous_map_preimage UV r'
by (auto simp: retraction_maps_def)
show "S \<subseteq> {x \<in> topspace Y. r' x \<in> U}" "T \<subseteq> {x \<in> topspace Y. r' x \<in> V}"
using openin_continuous_map_preimage UV r' \<open>closedin Y S\<close> \<open>closedin Y T\<close>
by (auto simp add: closedin_def continuous_map_closedin retraction_maps_def subset_iff)
show "disjnt {x \<in> topspace Y. r' x \<in> U} {x \<in> topspace Y. r' x \<in> V}"
using \<open>disjnt U V\<close> by (auto simp: disjnt_def)
qed
qed
lemma homeomorphic_normal_space:
"X homeomorphic_space Y \<Longrightarrow> normal_space X \<longleftrightarrow> normal_space Y"
unfolding homeomorphic_space_def
by (meson homeomorphic_imp_retraction_maps homeomorphic_maps_sym normal_space_retraction_map_image retraction_map_def)
lemma normal_space:
"normal_space X \<longleftrightarrow>
(\<forall>S T. closedin X S \<and> closedin X T \<and> disjnt S T
\<longrightarrow> (\<exists>U. openin X U \<and> S \<subseteq> U \<and> disjnt T (X closure_of U)))"
proof -
have "(\<exists>V. openin X U \<and> openin X V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> disjnt U V) \<longleftrightarrow> openin X U \<and> S \<subseteq> U \<and> disjnt T (X closure_of U)"
(is "?lhs=?rhs")
if "closedin X S" "closedin X T" "disjnt S T" for S T U
proof
show "?lhs \<Longrightarrow> ?rhs"
by (smt (verit, best) disjnt_iff in_closure_of subsetD)
assume R: ?rhs
then have "(U \<union> S) \<inter> (topspace X - X closure_of U) = {}"
by (metis Diff_eq_empty_iff Int_Diff Int_Un_eq(4) closure_of_subset inf.orderE openin_subset)
moreover have "T \<subseteq> topspace X - X closure_of U"
by (meson DiffI R closedin_subset disjnt_iff subsetD subsetI that(2))
ultimately show ?lhs
by (metis R closedin_closure_of closedin_def disjnt_def sup.orderE)
qed
then show ?thesis
unfolding normal_space_def by meson
qed
lemma normal_space_alt:
"normal_space X \<longleftrightarrow>
(\<forall>S U. closedin X S \<and> openin X U \<and> S \<subseteq> U \<longrightarrow> (\<exists>V. openin X V \<and> S \<subseteq> V \<and> X closure_of V \<subseteq> U))"
proof -
have "\<exists>V. openin X V \<and> S \<subseteq> V \<and> X closure_of V \<subseteq> U"
if "\<And>T. closedin X T \<longrightarrow> disjnt S T \<longrightarrow> (\<exists>U. openin X U \<and> S \<subseteq> U \<and> disjnt T (X closure_of U))"
"closedin X S" "openin X U" "S \<subseteq> U"
for S U
using that
by (smt (verit) Diff_eq_empty_iff Int_Diff closure_of_subset_topspace disjnt_def inf.orderE inf_commute openin_closedin_eq)
moreover have "\<exists>U. openin X U \<and> S \<subseteq> U \<and> disjnt T (X closure_of U)"
if "\<And>U. openin X U \<and> S \<subseteq> U \<longrightarrow> (\<exists>V. openin X V \<and> S \<subseteq> V \<and> X closure_of V \<subseteq> U)"
and "closedin X S" "closedin X T" "disjnt S T"
for S T
using that
by (smt (verit) Diff_Diff_Int Diff_eq_empty_iff Int_Diff closedin_def disjnt_def inf.absorb_iff2 inf.orderE)
ultimately show ?thesis
by (fastforce simp: normal_space)
qed
lemma normal_space_closures:
"normal_space X \<longleftrightarrow>
(\<forall>S T. S \<subseteq> topspace X \<and> T \<subseteq> topspace X \<and>
disjnt (X closure_of S) (X closure_of T)
\<longrightarrow> (\<exists>U V. openin X U \<and> openin X V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> disjnt U V))"
(is "?lhs=?rhs")
proof
show "?lhs \<Longrightarrow> ?rhs"
by (meson closedin_closure_of closure_of_subset normal_space_def order.trans)
show "?rhs \<Longrightarrow> ?lhs"
by (metis closedin_subset closure_of_eq normal_space_def)
qed
lemma normal_space_disjoint_closures:
"normal_space X \<longleftrightarrow>
(\<forall>S T. closedin X S \<and> closedin X T \<and> disjnt S T
\<longrightarrow> (\<exists>U V. openin X U \<and> openin X V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and>
disjnt (X closure_of U) (X closure_of V)))"
(is "?lhs=?rhs")
proof
show "?lhs \<Longrightarrow> ?rhs"
by (metis closedin_closure_of normal_space)
show "?rhs \<Longrightarrow> ?lhs"
by (smt (verit) closure_of_subset disjnt_iff normal_space openin_subset subset_eq)
qed
lemma normal_space_dual:
"normal_space X \<longleftrightarrow>
(\<forall>U V. openin X U \<longrightarrow> openin X V \<and> U \<union> V = topspace X
\<longrightarrow> (\<exists>S T. closedin X S \<and> closedin X T \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> S \<union> T = topspace X))"
(is "_ = ?rhs")
proof -
have "normal_space X \<longleftrightarrow>
(\<forall>U V. closedin X U \<longrightarrow> closedin X V \<longrightarrow> disjnt U V \<longrightarrow>
(\<exists>S T. \<not> (openin X S \<and> openin X T \<longrightarrow>
\<not> (U \<subseteq> S \<and> V \<subseteq> T \<and> disjnt S T))))"
unfolding normal_space_def by meson
also have "... \<longleftrightarrow> (\<forall>U V. openin X U \<longrightarrow> openin X V \<and> disjnt (topspace X - U) (topspace X - V) \<longrightarrow>
(\<exists>S T. \<not> (openin X S \<and> openin X T \<longrightarrow>
\<not> (topspace X - U \<subseteq> S \<and> topspace X - V \<subseteq> T \<and> disjnt S T))))"
by (auto simp: all_closedin)
also have "... \<longleftrightarrow> ?rhs"
proof -
have *: "disjnt (topspace X - U) (topspace X - V) \<longleftrightarrow> U \<union> V = topspace X"
if "U \<subseteq> topspace X" "V \<subseteq> topspace X" for U V
using that by (auto simp: disjnt_iff)
show ?thesis
using ex_closedin *
apply (simp add: ex_closedin * [OF openin_subset openin_subset] cong: conj_cong)
apply (intro all_cong1 ex_cong1 imp_cong refl)
by (smt (verit, best) "*" Diff_Diff_Int Diff_subset Diff_subset_conv inf.orderE inf_commute openin_subset sup_commute)
qed
finally show ?thesis .
qed
lemma normal_t1_imp_Hausdorff_space:
assumes "normal_space X" "t1_space X"
shows "Hausdorff_space X"
unfolding Hausdorff_space_def
proof clarify
fix x y
assume xy: "x \<in> topspace X" "y \<in> topspace X" "x \<noteq> y"
then have "disjnt {x} {y}"
by (auto simp: disjnt_iff)
then show "\<exists>U V. openin X U \<and> openin X V \<and> x \<in> U \<and> y \<in> V \<and> disjnt U V"
using assms xy closedin_t1_singleton normal_space_def
by (metis singletonI subsetD)
qed
lemma normal_t1_eq_Hausdorff_space:
"normal_space X \<Longrightarrow> t1_space X \<longleftrightarrow> Hausdorff_space X"
using normal_t1_imp_Hausdorff_space t1_or_Hausdorff_space by blast
lemma normal_t1_imp_regular_space:
"\<lbrakk>normal_space X; t1_space X\<rbrakk> \<Longrightarrow> regular_space X"
by (metis compactin_imp_closedin normal_space_def normal_t1_eq_Hausdorff_space regular_space_compact_closed_sets)
lemma compact_Hausdorff_or_regular_imp_normal_space:
"\<lbrakk>compact_space X; Hausdorff_space X \<or> regular_space X\<rbrakk>
\<Longrightarrow> normal_space X"
by (metis Hausdorff_space_compact_sets closedin_compact_space normal_space_def regular_space_compact_closed_sets)
lemma normal_space_discrete_topology:
"normal_space(discrete_topology U)"
by (metis discrete_topology_closure_of inf_le2 normal_space_alt)
lemma normal_space_fsigmas:
"normal_space X \<longleftrightarrow>
(\<forall>S T. fsigma_in X S \<and> fsigma_in X T \<and> separatedin X S T
\<longrightarrow> (\<exists>U B. openin X U \<and> openin X B \<and> S \<subseteq> U \<and> T \<subseteq> B \<and> disjnt U B))" (is "?lhs=?rhs")
proof
assume L: ?lhs
show ?rhs
proof clarify
fix S T
assume "fsigma_in X S"
then obtain C where C: "\<And>n. closedin X (C n)" "\<And>n. C n \<subseteq> C (Suc n)" "\<Union> (range C) = S"
by (meson fsigma_in_ascending)
assume "fsigma_in X T"
then obtain D where D: "\<And>n. closedin X (D n)" "\<And>n. D n \<subseteq> D (Suc n)" "\<Union> (range D) = T"
by (meson fsigma_in_ascending)
assume "separatedin X S T"
have "\<And>n. disjnt (D n) (X closure_of S)"
by (metis D(3) \<open>separatedin X S T\<close> disjnt_Union1 disjnt_def rangeI separatedin_def)
then have "\<And>n. \<exists>V V'. openin X V \<and> openin X V' \<and> D n \<subseteq> V \<and> X closure_of S \<subseteq> V' \<and> disjnt V V'"
by (metis D(1) L closedin_closure_of normal_space_def)
then obtain V V' where V: "\<And>n. openin X (V n)" and "\<And>n. openin X (V' n)" "\<And>n. disjnt (V n) (V' n)"
and DV: "\<And>n. D n \<subseteq> V n"
and subV': "\<And>n. X closure_of S \<subseteq> V' n"
by metis
then have VV: "V' n \<inter> X closure_of V n = {}" for n
using openin_Int_closure_of_eq_empty [of X "V' n" "V n"] by (simp add: Int_commute disjnt_def)
have "\<And>n. disjnt (C n) (X closure_of T)"
by (metis C(3) \<open>separatedin X S T\<close> disjnt_Union1 disjnt_def rangeI separatedin_def)
then have "\<And>n. \<exists>U U'. openin X U \<and> openin X U' \<and> C n \<subseteq> U \<and> X closure_of T \<subseteq> U' \<and> disjnt U U'"
by (metis C(1) L closedin_closure_of normal_space_def)
then obtain U U' where U: "\<And>n. openin X (U n)" and "\<And>n. openin X (U' n)" "\<And>n. disjnt (U n) (U' n)"
and CU: "\<And>n. C n \<subseteq> U n"
and subU': "\<And>n. X closure_of T \<subseteq> U' n"
by metis
then have UU: "U' n \<inter> X closure_of U n = {}" for n
using openin_Int_closure_of_eq_empty [of X "U' n" "U n"] by (simp add: Int_commute disjnt_def)
show "\<exists>U B. openin X U \<and> openin X B \<and> S \<subseteq> U \<and> T \<subseteq> B \<and> disjnt U B"
proof (intro conjI exI)
have "\<And>S n. closedin X (\<Union>m\<le>n. X closure_of V m)"
by (force intro: closedin_Union)
then show "openin X (\<Union>n. U n - (\<Union>m\<le>n. X closure_of V m))"
using U by blast
have "\<And>S n. closedin X (\<Union>m\<le>n. X closure_of U m)"
by (force intro: closedin_Union)
then show "openin X (\<Union>n. V n - (\<Union>m\<le>n. X closure_of U m))"
using V by blast
have "S \<subseteq> topspace X"
by (simp add: \<open>fsigma_in X S\<close> fsigma_in_subset)
then show "S \<subseteq> (\<Union>n. U n - (\<Union>m\<le>n. X closure_of V m))"
apply (clarsimp simp: Ball_def)
by (metis VV C(3) CU IntI UN_E closure_of_subset empty_iff subV' subsetD)
have "T \<subseteq> topspace X"
by (simp add: \<open>fsigma_in X T\<close> fsigma_in_subset)
then show "T \<subseteq> (\<Union>n. V n - (\<Union>m\<le>n. X closure_of U m))"
apply (clarsimp simp: Ball_def)
by (metis UU D(3) DV IntI UN_E closure_of_subset empty_iff subU' subsetD)
have "\<And>x m n. \<lbrakk>x \<in> U n; x \<in> V m; \<forall>k\<le>m. x \<notin> X closure_of U k\<rbrakk> \<Longrightarrow> \<exists>k\<le>n. x \<in> X closure_of V k"
by (meson U V closure_of_subset nat_le_linear openin_subset subsetD)
then show "disjnt (\<Union>n. U n - (\<Union>m\<le>n. X closure_of V m)) (\<Union>n. V n - (\<Union>m\<le>n. X closure_of U m))"
by (force simp: disjnt_iff)
qed
qed
next
show "?rhs \<Longrightarrow> ?lhs"
by (simp add: closed_imp_fsigma_in normal_space_def separatedin_closed_sets)
qed
lemma normal_space_fsigma_subtopology:
assumes "normal_space X" "fsigma_in X S"
shows "normal_space (subtopology X S)"
unfolding normal_space_fsigmas
proof clarify
fix T U
assume "fsigma_in (subtopology X S) T"
and "fsigma_in (subtopology X S) U"
and TU: "separatedin (subtopology X S) T U"
then obtain A B where "openin X A \<and> openin X B \<and> T \<subseteq> A \<and> U \<subseteq> B \<and> disjnt A B"
by (metis assms fsigma_in_fsigma_subtopology normal_space_fsigmas separatedin_subtopology)
then
show "\<exists>A B. openin (subtopology X S) A \<and> openin (subtopology X S) B \<and> T \<subseteq> A \<and>
U \<subseteq> B \<and> disjnt A B"
using TU
by (force simp add: separatedin_subtopology openin_subtopology_alt disjnt_iff)
qed
lemma normal_space_closed_subtopology:
assumes "normal_space X" "closedin X S"
shows "normal_space (subtopology X S)"
by (simp add: assms closed_imp_fsigma_in normal_space_fsigma_subtopology)
lemma normal_space_continuous_closed_map_image:
assumes "normal_space X" and contf: "continuous_map X Y f"
and clof: "closed_map X Y f" and fim: "f ` topspace X = topspace Y"
shows "normal_space Y"
unfolding normal_space_def
proof clarify
fix S T
assume "closedin Y S" and "closedin Y T" and "disjnt S T"
have "closedin X {x \<in> topspace X. f x \<in> S}" "closedin X {x \<in> topspace X. f x \<in> T}"
using \<open>closedin Y S\<close> \<open>closedin Y T\<close> closedin_continuous_map_preimage contf by auto
moreover
have "disjnt {x \<in> topspace X. f x \<in> S} {x \<in> topspace X. f x \<in> T}"
using \<open>disjnt S T\<close> by (auto simp: disjnt_iff)
ultimately
obtain U V where "closedin X U" "closedin X V"
and subXU: "{x \<in> topspace X. f x \<in> S} \<subseteq> topspace X - U"
and subXV: "{x \<in> topspace X. f x \<in> T} \<subseteq> topspace X - V"
and dis: "disjnt (topspace X - U) (topspace X -V)"
using \<open>normal_space X\<close> by (force simp add: normal_space_def ex_openin)
have "closedin Y (f ` U)" "closedin Y (f ` V)"
using \<open>closedin X U\<close> \<open>closedin X V\<close> clof closed_map_def by blast+
moreover have "S \<subseteq> topspace Y - f ` U"
using \<open>closedin Y S\<close> \<open>closedin X U\<close> subXU by (force dest: closedin_subset)
moreover have "T \<subseteq> topspace Y - f ` V"
using \<open>closedin Y T\<close> \<open>closedin X V\<close> subXV by (force dest: closedin_subset)
moreover have "disjnt (topspace Y - f ` U) (topspace Y - f ` V)"
using fim dis by (force simp add: disjnt_iff)
ultimately show "\<exists>U V. openin Y U \<and> openin Y V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> disjnt U V"
by (force simp add: ex_openin)
qed
subsection \<open>Hereditary topological properties\<close>
definition hereditarily
where "hereditarily P X \<equiv>
\<forall>S. S \<subseteq> topspace X \<longrightarrow> P(subtopology X S)"
lemma hereditarily:
"hereditarily P X \<longleftrightarrow> (\<forall>S. P(subtopology X S))"
by (metis Int_lower1 hereditarily_def subtopology_restrict)
lemma hereditarily_mono:
"\<lbrakk>hereditarily P X; \<And>x. P x \<Longrightarrow> Q x\<rbrakk> \<Longrightarrow> hereditarily Q X"
by (simp add: hereditarily)
lemma hereditarily_inc:
"hereditarily P X \<Longrightarrow> P X"
by (metis hereditarily subtopology_topspace)
lemma hereditarily_subtopology:
"hereditarily P X \<Longrightarrow> hereditarily P (subtopology X S)"
by (simp add: hereditarily subtopology_subtopology)
lemma hereditarily_normal_space_continuous_closed_map_image:
assumes X: "hereditarily normal_space X" and contf: "continuous_map X Y f"
and clof: "closed_map X Y f" and fim: "f ` (topspace X) = topspace Y"
shows "hereditarily normal_space Y"
unfolding hereditarily_def
proof (intro strip)
fix T
assume "T \<subseteq> topspace Y"
then have nx: "normal_space (subtopology X {x \<in> topspace X. f x \<in> T})"
by (meson X hereditarily)
moreover have "continuous_map (subtopology X {x \<in> topspace X. f x \<in> T}) (subtopology Y T) f"
by (simp add: contf continuous_map_from_subtopology continuous_map_in_subtopology image_subset_iff)
moreover have "closed_map (subtopology X {x \<in> topspace X. f x \<in> T}) (subtopology Y T) f"
by (simp add: clof closed_map_restriction)
ultimately show "normal_space (subtopology Y T)"
using fim normal_space_continuous_closed_map_image by fastforce
qed
lemma homeomorphic_hereditarily_normal_space:
"X homeomorphic_space Y
\<Longrightarrow> (hereditarily normal_space X \<longleftrightarrow> hereditarily normal_space Y)"
by (meson hereditarily_normal_space_continuous_closed_map_image homeomorphic_eq_everything_map
homeomorphic_space homeomorphic_space_sym)
lemma hereditarily_normal_space_retraction_map_image:
"\<lbrakk>retraction_map X Y r; hereditarily normal_space X\<rbrakk> \<Longrightarrow> hereditarily normal_space Y"
by (smt (verit) hereditarily_subtopology hereditary_imp_retractive_property homeomorphic_hereditarily_normal_space)
subsection\<open>Limits in a topological space\<close>
lemma limitin_const_iff:
assumes "t1_space X" "\<not> trivial_limit F"
shows "limitin X (\<lambda>k. a) l F \<longleftrightarrow> l \<in> topspace X \<and> a = l" (is "?lhs=?rhs")
proof
assume ?lhs then show ?rhs
using assms unfolding limitin_def t1_space_def by (metis eventually_const openin_topspace)
next
assume ?rhs then show ?lhs
using assms by (auto simp: limitin_def t1_space_def)
qed
lemma compactin_sequence_with_limit:
assumes lim: "limitin X \<sigma> l sequentially" and "S \<subseteq> range \<sigma>" and SX: "S \<subseteq> topspace X"
shows "compactin X (insert l S)"
unfolding compactin_def
proof (intro conjI strip)
show "insert l S \<subseteq> topspace X"
by (meson SX insert_subset lim limitin_topspace)
fix \<U>
assume \<section>: "Ball \<U> (openin X) \<and> insert l S \<subseteq> \<Union> \<U>"
have "\<exists>V. finite V \<and> V \<subseteq> \<U> \<and> (\<exists>t \<in> V. l \<in> t) \<and> S \<subseteq> \<Union> V"
if *: "\<forall>x \<in> S. \<exists>T \<in> \<U>. x \<in> T" and "T \<in> \<U>" "l \<in> T" for T
proof -
obtain V where V: "\<And>x. x \<in> S \<Longrightarrow> V x \<in> \<U> \<and> x \<in> V x"
using * by metis
obtain N where N: "\<And>n. N \<le> n \<Longrightarrow> \<sigma> n \<in> T"
by (meson "\<section>" \<open>T \<in> \<U>\<close> \<open>l \<in> T\<close> lim limitin_sequentially)
show ?thesis
proof (intro conjI exI)
have "x \<in> T"
if "x \<in> S" and "\<forall>A. (\<forall>x \<in> S. (\<forall>n\<le>N. x \<noteq> \<sigma> n) \<or> A \<noteq> V x) \<or> x \<notin> A" for x
by (metis (no_types) N V that assms(2) imageE nle_le subsetD)
then show "S \<subseteq> \<Union> (insert T (V ` (S \<inter> \<sigma> ` {0..N})))"
by force
qed (use V that in auto)
qed
then show "\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> insert l S \<subseteq> \<Union> \<F>"
by (smt (verit, best) Union_iff \<section> insert_subset subsetD)
qed
lemma limitin_Hausdorff_unique:
assumes "limitin X f l1 F" "limitin X f l2 F" "\<not> trivial_limit F" "Hausdorff_space X"
shows "l1 = l2"
proof (rule ccontr)
assume "l1 \<noteq> l2"
with assms obtain U V where "openin X U" "openin X V" "l1 \<in> U" "l2 \<in> V" "disjnt U V"
by (metis Hausdorff_space_def limitin_topspace)
then have "eventually (\<lambda>x. f x \<in> U) F" "eventually (\<lambda>x. f x \<in> V) F"
using assms by (fastforce simp: limitin_def)+
then have "\<exists>x. f x \<in> U \<and> f x \<in> V"
using assms eventually_elim2 filter_eq_iff by fastforce
with assms \<open>disjnt U V\<close> show False
by (meson disjnt_iff)
qed
lemma limitin_kc_unique:
assumes "kc_space X" and lim1: "limitin X f l1 sequentially" and lim2: "limitin X f l2 sequentially"
shows "l1 = l2"
proof (rule ccontr)
assume "l1 \<noteq> l2"
define A where "A \<equiv> insert l1 (range f - {l2})"
have "l1 \<in> topspace X"
using lim1 limitin_def by fastforce
moreover have "compactin X (insert l1 (topspace X \<inter> (range f - {l2})))"
by (meson Diff_subset compactin_sequence_with_limit inf_le1 inf_le2 lim1 subset_trans)
ultimately have "compactin X (topspace X \<inter> A)"
by (simp add: A_def)
then have OXA: "openin X (topspace X - A)"
by (metis Diff_Diff_Int Diff_subset \<open>kc_space X\<close> kc_space_def openin_closedin_eq)
have "l2 \<in> topspace X - A"
using \<open>l1 \<noteq> l2\<close> A_def lim2 limitin_topspace by fastforce
then have "\<forall>\<^sub>F x in sequentially. f x = l2"
using limitinD [OF lim2 OXA] by (auto simp: A_def eventually_conj_iff)
then show False
using limitin_transform_eventually [OF _ lim1]
limitin_const_iff [OF kc_imp_t1_space trivial_limit_sequentially]
using \<open>l1 \<noteq> l2\<close> \<open>kc_space X\<close> by fastforce
qed
lemma limitin_closedin:
assumes lim: "limitin X f l F"
and "closedin X S" and ev: "eventually (\<lambda>x. f x \<in> S) F" "\<not> trivial_limit F"
shows "l \<in> S"
proof (rule ccontr)
assume "l \<notin> S"
have "\<forall>\<^sub>F x in F. f x \<in> topspace X - S"
by (metis Diff_iff \<open>l \<notin> S\<close> \<open>closedin X S\<close> closedin_def lim limitin_def)
with ev eventually_elim2 trivial_limit_def show False
by force
qed
+subsection\<open>Quasi-components\<close>
+
+definition quasi_component_of :: "'a topology \<Rightarrow> 'a \<Rightarrow> 'a \<Rightarrow> bool"
+ where
+ "quasi_component_of X x y \<equiv>
+ x \<in> topspace X \<and> y \<in> topspace X \<and>
+ (\<forall>T. closedin X T \<and> openin X T \<longrightarrow> (x \<in> T \<longleftrightarrow> y \<in> T))"
+
+abbreviation "quasi_component_of_set S x \<equiv> Collect (quasi_component_of S x)"
+
+definition quasi_components_of :: "'a topology \<Rightarrow> ('a set) set"
+ where
+ "quasi_components_of X = quasi_component_of_set X ` topspace X"
+
+lemma quasi_component_in_topspace:
+ "quasi_component_of X x y \<Longrightarrow> x \<in> topspace X \<and> y \<in> topspace X"
+ by (simp add: quasi_component_of_def)
+
+lemma quasi_component_of_refl [simp]:
+ "quasi_component_of X x x \<longleftrightarrow> x \<in> topspace X"
+ by (simp add: quasi_component_of_def)
+
+lemma quasi_component_of_sym:
+ "quasi_component_of X x y \<longleftrightarrow> quasi_component_of X y x"
+ by (meson quasi_component_of_def)
+
+lemma quasi_component_of_trans:
+ "\<lbrakk>quasi_component_of X x y; quasi_component_of X y z\<rbrakk> \<Longrightarrow> quasi_component_of X x z"
+ by (simp add: quasi_component_of_def)
+
+lemma quasi_component_of_subset_topspace:
+ "quasi_component_of_set X x \<subseteq> topspace X"
+ using quasi_component_of_def by fastforce
+
+lemma quasi_component_of_eq_empty:
+ "quasi_component_of_set X x = {} \<longleftrightarrow> (x \<notin> topspace X)"
+ using quasi_component_of_def by fastforce
+
+lemma quasi_component_of:
+ "quasi_component_of X x y \<longleftrightarrow>
+ x \<in> topspace X \<and> y \<in> topspace X \<and> (\<forall>T. x \<in> T \<and> closedin X T \<and> openin X T \<longrightarrow> y \<in> T)"
+ unfolding quasi_component_of_def by (metis Diff_iff closedin_def openin_closedin_eq)
+
+lemma quasi_component_of_alt:
+ "quasi_component_of X x y \<longleftrightarrow>
+ x \<in> topspace X \<and> y \<in> topspace X \<and>
+ \<not> (\<exists>U V. openin X U \<and> openin X V \<and> U \<union> V = topspace X \<and> disjnt U V \<and> x \<in> U \<and> y \<in> V)"
+ (is "?lhs = ?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ unfolding quasi_component_of_def
+ by (metis disjnt_iff separatedin_full separatedin_open_sets)
+ show "?rhs \<Longrightarrow> ?lhs"
+ unfolding quasi_component_of_def
+ by (metis Diff_disjoint Diff_iff Un_Diff_cancel closedin_def disjnt_def inf_commute sup.orderE sup_commute)
+qed
+
+lemma quasi_component_of_separated:
+ "quasi_component_of X x y \<longleftrightarrow>
+ x \<in> topspace X \<and> y \<in> topspace X \<and>
+ \<not> (\<exists>U V. separatedin X U V \<and> U \<union> V = topspace X \<and> x \<in> U \<and> y \<in> V)"
+ by (meson quasi_component_of_alt separatedin_full separatedin_open_sets)
+
+lemma quasi_component_of_subtopology:
+ "quasi_component_of (subtopology X s) x y \<Longrightarrow> quasi_component_of X x y"
+ unfolding quasi_component_of_def
+ by (simp add: closedin_subtopology) (metis Int_iff inf_commute openin_subtopology_Int2)
+
+lemma quasi_component_of_mono:
+ "quasi_component_of (subtopology X S) x y \<and> S \<subseteq> T
+ \<Longrightarrow> quasi_component_of (subtopology X T) x y"
+ by (metis inf.absorb_iff2 quasi_component_of_subtopology subtopology_subtopology)
+
+lemma quasi_component_of_equiv:
+ "quasi_component_of X x y \<longleftrightarrow>
+ x \<in> topspace X \<and> y \<in> topspace X \<and> quasi_component_of X x = quasi_component_of X y"
+ using quasi_component_of_def by fastforce
+
+lemma quasi_component_of_disjoint [simp]:
+ "disjnt (quasi_component_of_set X x) (quasi_component_of_set X y) \<longleftrightarrow> \<not> (quasi_component_of X x y)"
+ by (metis disjnt_iff quasi_component_of_equiv mem_Collect_eq)
+
+lemma quasi_component_of_eq:
+ "quasi_component_of X x = quasi_component_of X y \<longleftrightarrow>
+ (x \<notin> topspace X \<and> y \<notin> topspace X)
+ \<or> x \<in> topspace X \<and> y \<in> topspace X \<and> quasi_component_of X x y"
+ by (metis Collect_empty_eq_bot quasi_component_of_eq_empty quasi_component_of_equiv)
+
+lemma topspace_imp_quasi_components_of:
+ assumes "x \<in> topspace X"
+ obtains C where "C \<in> quasi_components_of X" "x \<in> C"
+ by (metis assms imageI mem_Collect_eq quasi_component_of_refl quasi_components_of_def)
+
+lemma Union_quasi_components_of: "\<Union> (quasi_components_of X) = topspace X"
+ by (auto simp: quasi_components_of_def quasi_component_of_def)
+
+lemma pairwise_disjoint_quasi_components_of:
+ "pairwise disjnt (quasi_components_of X)"
+ by (auto simp: quasi_components_of_def quasi_component_of_def disjoint_def)
+
+lemma complement_quasi_components_of_Union:
+ assumes "C \<in> quasi_components_of X"
+ shows "topspace X - C = \<Union> (quasi_components_of X - {C})" (is "?lhs = ?rhs")
+proof
+ show "?lhs \<subseteq> ?rhs"
+ using Union_quasi_components_of by fastforce
+ show "?rhs \<subseteq> ?lhs"
+ using assms
+ using quasi_component_of_equiv by (fastforce simp add: quasi_components_of_def image_iff subset_iff)
+qed
+
+lemma nonempty_quasi_components_of:
+ "C \<in> quasi_components_of X \<Longrightarrow> C \<noteq> {}"
+ by (metis imageE quasi_component_of_eq_empty quasi_components_of_def)
+
+lemma quasi_components_of_subset:
+ "C \<in> quasi_components_of X \<Longrightarrow> C \<subseteq> topspace X"
+ using Union_quasi_components_of by force
+
+lemma quasi_component_in_quasi_components_of:
+ "quasi_component_of_set X a \<in> quasi_components_of X \<longleftrightarrow> a \<in> topspace X"
+ by (metis (no_types, lifting) image_iff quasi_component_of_eq_empty quasi_components_of_def)
+
+lemma quasi_components_of_eq_empty [simp]:
+ "quasi_components_of X = {} \<longleftrightarrow> topspace X = {}"
+ by (simp add: quasi_components_of_def)
+
+lemma quasi_components_of_empty_space:
+ "topspace X = {} \<Longrightarrow> quasi_components_of X = {}"
+ by simp
+
+lemma quasi_component_of_set:
+ "quasi_component_of_set X x =
+ (if x \<in> topspace X
+ then \<Inter> {t. closedin X t \<and> openin X t \<and> x \<in> t}
+ else {})"
+ by (auto simp: quasi_component_of)
+
+lemma closedin_quasi_component_of: "closedin X (quasi_component_of_set X x)"
+ by (auto simp: quasi_component_of_set)
+
+lemma closedin_quasi_components_of:
+ "C \<in> quasi_components_of X \<Longrightarrow> closedin X C"
+ by (auto simp: quasi_components_of_def closedin_quasi_component_of)
+
+lemma openin_finite_quasi_components:
+ "\<lbrakk>finite(quasi_components_of X); C \<in> quasi_components_of X\<rbrakk> \<Longrightarrow> openin X C"
+ apply (simp add:openin_closedin_eq quasi_components_of_subset complement_quasi_components_of_Union)
+ by (meson DiffD1 closedin_Union closedin_quasi_components_of finite_Diff)
+
+lemma quasi_component_of_eq_overlap:
+ "quasi_component_of X x = quasi_component_of X y \<longleftrightarrow>
+ (x \<notin> topspace X \<and> y \<notin> topspace X) \<or>
+ \<not> (quasi_component_of_set X x \<inter> quasi_component_of_set X y = {})"
+ using quasi_component_of_equiv by fastforce
+
+lemma quasi_component_of_nonoverlap:
+ "quasi_component_of_set X x \<inter> quasi_component_of_set X y = {} \<longleftrightarrow>
+ (x \<notin> topspace X) \<or> (y \<notin> topspace X) \<or>
+ \<not> (quasi_component_of X x = quasi_component_of X y)"
+ by (metis inf.idem quasi_component_of_eq_empty quasi_component_of_eq_overlap)
+
+lemma quasi_component_of_overlap:
+ "\<not> (quasi_component_of_set X x \<inter> quasi_component_of_set X y = {}) \<longleftrightarrow>
+ x \<in> topspace X \<and> y \<in> topspace X \<and> quasi_component_of X x = quasi_component_of X y"
+ by (meson quasi_component_of_nonoverlap)
+
+lemma quasi_components_of_disjoint:
+ "\<lbrakk>C \<in> quasi_components_of X; D \<in> quasi_components_of X\<rbrakk> \<Longrightarrow> disjnt C D \<longleftrightarrow> C \<noteq> D"
+ by (metis disjnt_self_iff_empty nonempty_quasi_components_of pairwiseD pairwise_disjoint_quasi_components_of)
+
+lemma quasi_components_of_overlap:
+ "\<lbrakk>C \<in> quasi_components_of X; D \<in> quasi_components_of X\<rbrakk> \<Longrightarrow> \<not> (C \<inter> D = {}) \<longleftrightarrow> C = D"
+ by (metis disjnt_def quasi_components_of_disjoint)
+
+lemma pairwise_separated_quasi_components_of:
+ "pairwise (separatedin X) (quasi_components_of X)"
+ by (metis closedin_quasi_components_of pairwise_def pairwise_disjoint_quasi_components_of separatedin_closed_sets)
+
+lemma finite_quasi_components_of_finite:
+ "finite(topspace X) \<Longrightarrow> finite(quasi_components_of X)"
+ by (simp add: Union_quasi_components_of finite_UnionD)
+
+lemma connected_imp_quasi_component_of:
+ assumes "connected_component_of X x y"
+ shows "quasi_component_of X x y"
+proof -
+ have "x \<in> topspace X" "y \<in> topspace X"
+ by (meson assms connected_component_of_equiv)+
+ with assms show ?thesis
+ apply (clarsimp simp add: quasi_component_of connected_component_of_def)
+ by (meson connectedin_clopen_cases disjnt_iff subsetD)
+qed
+
+lemma connected_component_subset_quasi_component_of:
+ "connected_component_of_set X x \<subseteq> quasi_component_of_set X x"
+ using connected_imp_quasi_component_of by force
+
+lemma quasi_component_as_connected_component_Union:
+ "quasi_component_of_set X x =
+ \<Union> (connected_component_of_set X ` quasi_component_of_set X x)"
+ (is "?lhs = ?rhs")
+proof
+ show "?lhs \<subseteq> ?rhs"
+ using connected_component_of_refl quasi_component_of by fastforce
+ show "?rhs \<subseteq> ?lhs"
+ apply (rule SUP_least)
+ by (simp add: connected_component_subset_quasi_component_of quasi_component_of_equiv)
+qed
+
+lemma quasi_components_as_connected_components_Union:
+ assumes "C \<in> quasi_components_of X"
+ obtains \<T> where "\<T> \<subseteq> connected_components_of X" "\<Union>\<T> = C"
+proof -
+ obtain x where "x \<in> topspace X" and Ceq: "C = quasi_component_of_set X x"
+ by (metis assms imageE quasi_components_of_def)
+ define \<T> where "\<T> \<equiv> connected_component_of_set X ` quasi_component_of_set X x"
+ show thesis
+ proof
+ show "\<T> \<subseteq> connected_components_of X"
+ by (simp add: \<T>_def connected_components_of_def image_mono quasi_component_of_subset_topspace)
+ show "\<Union>\<T> = C"
+ by (metis \<T>_def Ceq quasi_component_as_connected_component_Union)
+ qed
+qed
+
+lemma path_imp_quasi_component_of:
+ "path_component_of X x y \<Longrightarrow> quasi_component_of X x y"
+ by (simp add: connected_imp_quasi_component_of path_imp_connected_component_of)
+
+lemma path_component_subset_quasi_component_of:
+ "path_component_of_set X x \<subseteq> quasi_component_of_set X x"
+ by (simp add: Collect_mono path_imp_quasi_component_of)
+
+lemma connected_space_iff_quasi_component:
+ "connected_space X \<longleftrightarrow> (\<forall>x \<in> topspace X. \<forall>y \<in> topspace X. quasi_component_of X x y)"
+ unfolding connected_space_clopen_in closedin_def quasi_component_of
+ by blast
+
+lemma connected_space_imp_quasi_component_of:
+ " \<lbrakk>connected_space X; a \<in> topspace X; b \<in> topspace X\<rbrakk> \<Longrightarrow> quasi_component_of X a b"
+ by (simp add: connected_space_iff_quasi_component)
+
+lemma connected_space_quasi_component_set:
+ "connected_space X \<longleftrightarrow> (\<forall>x \<in> topspace X. quasi_component_of_set X x = topspace X)"
+ by (metis Ball_Collect connected_space_iff_quasi_component quasi_component_of_subset_topspace subset_antisym)
+
+lemma connected_space_iff_quasi_components_eq:
+ "connected_space X \<longleftrightarrow>
+ (\<forall>C \<in> quasi_components_of X. \<forall>D \<in> quasi_components_of X. C = D)"
+ apply (simp add: quasi_components_of_def)
+ by (metis connected_space_iff_quasi_component mem_Collect_eq quasi_component_of_equiv)
+
+lemma quasi_components_of_subset_sing:
+ "quasi_components_of X \<subseteq> {S} \<longleftrightarrow> connected_space X \<and> (topspace X = {} \<or> topspace X = S)"
+proof (cases "quasi_components_of X = {}")
+ case True
+ then show ?thesis
+ by (simp add: connected_space_topspace_empty subset_singleton_iff)
+next
+ case False
+ then show ?thesis
+ apply (simp add: connected_space_iff_quasi_components_eq subset_iff Ball_def)
+ by (metis quasi_components_of_subset subsetI subset_antisym subset_empty topspace_imp_quasi_components_of)
+qed
+
+lemma connected_space_iff_quasi_components_subset_sing:
+ "connected_space X \<longleftrightarrow> (\<exists>a. quasi_components_of X \<subseteq> {a})"
+ by (simp add: quasi_components_of_subset_sing)
+
+lemma quasi_components_of_eq_singleton:
+ "quasi_components_of X = {S} \<longleftrightarrow>
+ connected_space X \<and> \<not> (topspace X = {}) \<and> S = topspace X"
+ by (metis ccpo_Sup_singleton insert_not_empty quasi_components_of_subset_sing subset_singleton_iff)
+
+lemma quasi_components_of_connected_space:
+ "connected_space X
+ \<Longrightarrow> quasi_components_of X = (if topspace X = {} then {} else {topspace X})"
+ by (simp add: quasi_components_of_eq_singleton)
+
+lemma separated_between_singletons:
+ "separated_between X {x} {y} \<longleftrightarrow>
+ x \<in> topspace X \<and> y \<in> topspace X \<and> \<not> (quasi_component_of X x y)"
+proof (cases "x \<in> topspace X \<and> y \<in> topspace X")
+ case True
+ then show ?thesis
+ by (auto simp add: separated_between_def quasi_component_of_alt)
+qed (use separated_between_imp_subset in blast)
+
+lemma quasi_component_nonseparated:
+ "quasi_component_of X x y \<longleftrightarrow> x \<in> topspace X \<and> y \<in> topspace X \<and> \<not> (separated_between X {x} {y})"
+ by (metis quasi_component_of_equiv separated_between_singletons)
+
+lemma separated_between_quasi_component_pointwise_left:
+ assumes "C \<in> quasi_components_of X"
+ shows "separated_between X C S \<longleftrightarrow> (\<exists>x \<in> C. separated_between X {x} S)" (is "?lhs = ?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ using assms quasi_components_of_disjoint separated_between_mono by fastforce
+next
+ assume ?rhs
+ then obtain y where "separated_between X {y} S" and "y \<in> C"
+ by metis
+ with assms show ?lhs
+ by (force simp add: separated_between quasi_components_of_def quasi_component_of_def)
+qed
+
+lemma separated_between_quasi_component_pointwise_right:
+ "C \<in> quasi_components_of X \<Longrightarrow> separated_between X S C \<longleftrightarrow> (\<exists>x \<in> C. separated_between X S {x})"
+ by (simp add: separated_between_quasi_component_pointwise_left separated_between_sym)
+
+lemma separated_between_quasi_component_point:
+ assumes "C \<in> quasi_components_of X"
+ shows "separated_between X C {x} \<longleftrightarrow> x \<in> topspace X - C" (is "?lhs = ?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ by (meson DiffI disjnt_insert2 insert_subset separated_between_imp_disjoint separated_between_imp_subset)
+next
+ assume ?rhs
+ with assms show ?lhs
+ unfolding quasi_components_of_def image_iff Diff_iff separated_between_quasi_component_pointwise_left [OF assms]
+ by (metis mem_Collect_eq quasi_component_of_refl separated_between_singletons)
+qed
+
+lemma separated_between_point_quasi_component:
+ "C \<in> quasi_components_of X \<Longrightarrow> separated_between X {x} C \<longleftrightarrow> x \<in> topspace X - C"
+ by (simp add: separated_between_quasi_component_point separated_between_sym)
+
+lemma separated_between_quasi_component_compact:
+ "\<lbrakk>C \<in> quasi_components_of X; compactin X K\<rbrakk> \<Longrightarrow> (separated_between X C K \<longleftrightarrow> disjnt C K)"
+ unfolding disjnt_iff
+ using compactin_subset_topspace quasi_components_of_subset separated_between_pointwise_right separated_between_quasi_component_point by fastforce
+
+lemma separated_between_compact_quasi_component:
+ "\<lbrakk>compactin X K; C \<in> quasi_components_of X\<rbrakk> \<Longrightarrow> separated_between X K C \<longleftrightarrow> disjnt K C"
+ using disjnt_sym separated_between_quasi_component_compact separated_between_sym by blast
+
+lemma separated_between_quasi_components:
+ assumes C: "C \<in> quasi_components_of X" and D: "D \<in> quasi_components_of X"
+ shows "separated_between X C D \<longleftrightarrow> disjnt C D" (is "?lhs = ?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ by (simp add: separated_between_imp_disjoint)
+next
+ assume ?rhs
+ obtain x y where x: "C = quasi_component_of_set X x" and "x \<in> C"
+ and y: "D = quasi_component_of_set X y" and "y \<in> D"
+ using assms by (auto simp: quasi_components_of_def)
+ then have "separated_between X {x} {y}"
+ using \<open>disjnt C D\<close> separated_between_singletons by fastforce
+ with \<open>x \<in> C\<close> \<open>y \<in> D\<close> show ?lhs
+ by (auto simp: assms separated_between_quasi_component_pointwise_left separated_between_quasi_component_pointwise_right)
+qed
+
+lemma quasi_eq_connected_component_of_eq:
+ "quasi_component_of X x = connected_component_of X x \<longleftrightarrow>
+ connectedin X (quasi_component_of_set X x)" (is "?lhs = ?rhs")
+proof (cases "x \<in> topspace X")
+ case True
+ show ?thesis
+ proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ by (simp add: connectedin_connected_component_of)
+ next
+ assume ?rhs
+ then have "\<And>y. quasi_component_of X x y = connected_component_of X x y"
+ by (metis connected_component_of_def connected_imp_quasi_component_of mem_Collect_eq quasi_component_of_equiv)
+ then show ?lhs
+ by force
+ qed
+next
+ case False
+ then show ?thesis
+ by (metis Collect_empty_eq_bot connected_component_of_eq_empty connectedin_empty quasi_component_of_eq_empty)
+qed
+
+lemma connected_quasi_component_of:
+ assumes "C \<in> quasi_components_of X"
+ shows "C \<in> connected_components_of X \<longleftrightarrow> connectedin X C" (is "?lhs = ?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ using assms
+ by (simp add: connectedin_connected_components_of)
+next
+ assume ?rhs
+ with assms show ?lhs
+ unfolding quasi_components_of_def connected_components_of_def image_iff
+ by (metis quasi_eq_connected_component_of_eq)
+qed
+
+lemma quasi_component_of_clopen_cases:
+ "\<lbrakk>C \<in> quasi_components_of X; closedin X T; openin X T\<rbrakk> \<Longrightarrow> C \<subseteq> T \<or> disjnt C T"
+ by (smt (verit) disjnt_iff image_iff mem_Collect_eq quasi_component_of_def quasi_components_of_def subset_iff)
+
+lemma quasi_components_of_set:
+ assumes "C \<in> quasi_components_of X"
+ shows "\<Inter> {T. closedin X T \<and> openin X T \<and> C \<subseteq> T} = C" (is "?lhs = ?rhs")
+proof
+ have "x \<in> C" if "x \<in> \<Inter> {T. closedin X T \<and> openin X T \<and> C \<subseteq> T}" for x
+ proof (rule ccontr)
+ assume "x \<notin> C"
+ have "x \<in> topspace X"
+ using assms quasi_components_of_subset that by force
+ then have "separated_between X C {x}"
+ by (simp add: \<open>x \<notin> C\<close> assms separated_between_quasi_component_point)
+ with that show False
+ by (auto simp: separated_between)
+ qed
+ then show "?lhs \<subseteq> ?rhs"
+ by auto
+qed blast
+
+lemma open_quasi_eq_connected_components_of:
+ assumes "openin X C"
+ shows "C \<in> quasi_components_of X \<longleftrightarrow> C \<in> connected_components_of X" (is "?lhs = ?rhs")
+proof (cases "closedin X C")
+ case True
+ show ?thesis
+ proof
+ assume L: ?lhs
+ have "T = {} \<or> T = topspace X \<inter> C"
+ if "openin (subtopology X C) T" "closedin (subtopology X C) T" for T
+ proof -
+ have "C \<subseteq> T \<or> disjnt C T"
+ by (meson L True assms closedin_trans_full openin_trans_full quasi_component_of_clopen_cases that)
+ with that show ?thesis
+ by (metis Int_absorb2 True closedin_imp_subset closure_of_subset_eq disjnt_def inf_absorb2)
+ qed
+ with L assms show "?rhs"
+ by (simp add: connected_quasi_component_of connected_space_clopen_in connectedin_def openin_subset)
+ next
+ assume ?rhs
+ then obtain x where "x \<in> topspace X" and x: "C = connected_component_of_set X x"
+ by (metis connected_components_of_def imageE)
+ have "C = quasi_component_of_set X x"
+ using True assms connected_component_of_refl connected_imp_quasi_component_of quasi_component_of_def x by fastforce
+ then show ?lhs
+ using \<open>x \<in> topspace X\<close> quasi_components_of_def by fastforce
+ qed
+next
+ case False
+ then show ?thesis
+ using closedin_connected_components_of closedin_quasi_components_of by blast
+qed
+
+lemma quasi_component_of_continuous_image:
+ assumes f: "continuous_map X Y f" and qc: "quasi_component_of X x y"
+ shows "quasi_component_of Y (f x) (f y)"
+ unfolding quasi_component_of_def
+proof (intro strip conjI)
+ show "f x \<in> topspace Y" "f y \<in> topspace Y"
+ using assms by (simp_all add: continuous_map_def quasi_component_of_def)
+ fix T
+ assume "closedin Y T \<and> openin Y T"
+ with assms show "(f x \<in> T) = (f y \<in> T)"
+ by (smt (verit) continuous_map_closedin continuous_map_def mem_Collect_eq quasi_component_of_def)
+qed
+
+lemma quasi_component_of_discrete_topology:
+ "quasi_component_of_set (discrete_topology U) x = (if x \<in> U then {x} else {})"
+proof -
+ have "quasi_component_of_set (discrete_topology U) y = {y}" if "y \<in> U" for y
+ using that
+ apply (simp add: set_eq_iff quasi_component_of_def)
+ by (metis Set.set_insert insertE subset_insertI)
+ then show ?thesis
+ by (simp add: quasi_component_of)
+qed
+
+lemma quasi_components_of_discrete_topology:
+ "quasi_components_of (discrete_topology U) = (\<lambda>x. {x}) ` U"
+ by (auto simp add: quasi_components_of_def quasi_component_of_discrete_topology)
+
+lemma homeomorphic_map_quasi_component_of:
+ assumes hmf: "homeomorphic_map X Y f" and "x \<in> topspace X"
+ shows "quasi_component_of_set Y (f x) = f ` (quasi_component_of_set X x)"
+proof -
+ obtain g where hmg: "homeomorphic_map Y X g"
+ and contf: "continuous_map X Y f" and contg: "continuous_map Y X g"
+ and fg: "(\<forall>x \<in> topspace X. g(f x) = x) \<and> (\<forall>y \<in> topspace Y. f(g y) = y)"
+ by (smt (verit, best) hmf homeomorphic_map_maps homeomorphic_maps_def)
+ show ?thesis
+ proof
+ show "quasi_component_of_set Y (f x) \<subseteq> f ` quasi_component_of_set X x"
+ using quasi_component_of_continuous_image [OF contg]
+ \<open>x \<in> topspace X\<close> fg image_iff quasi_component_of_subset_topspace by fastforce
+ show "f ` quasi_component_of_set X x \<subseteq> quasi_component_of_set Y (f x)"
+ using quasi_component_of_continuous_image [OF contf] by blast
+ qed
+qed
+
+
+lemma homeomorphic_map_quasi_components_of:
+ assumes "homeomorphic_map X Y f"
+ shows "quasi_components_of Y = image (image f) (quasi_components_of X)"
+ using assms
+proof -
+ have "\<exists>x\<in>topspace X. quasi_component_of_set Y y = f ` quasi_component_of_set X x"
+ if "y \<in> topspace Y" for y
+ by (metis that assms homeomorphic_imp_surjective_map homeomorphic_map_quasi_component_of image_iff)
+ moreover have "\<exists>x\<in>topspace Y. f ` quasi_component_of_set X u = quasi_component_of_set Y x"
+ if "u \<in> topspace X" for u
+ by (metis that assms homeomorphic_imp_surjective_map homeomorphic_map_quasi_component_of imageI)
+ ultimately show ?thesis
+ by (auto simp: quasi_components_of_def image_iff)
+qed
+
+lemma openin_quasi_component_of_locally_connected_space:
+ assumes "locally_connected_space X"
+ shows "openin X (quasi_component_of_set X x)"
+proof -
+ have *: "openin X (connected_component_of_set X x)"
+ by (simp add: assms openin_connected_component_of_locally_connected_space)
+ moreover have "connected_component_of_set X x = quasi_component_of_set X x"
+ using * closedin_connected_component_of connected_component_of_refl connected_imp_quasi_component_of
+ quasi_component_of_def by fastforce
+ ultimately show ?thesis
+ by simp
+qed
+
+lemma openin_quasi_components_of_locally_connected_space:
+ "locally_connected_space X \<and> c \<in> quasi_components_of X
+ \<Longrightarrow> openin X c"
+ by (smt (verit, best) image_iff openin_quasi_component_of_locally_connected_space quasi_components_of_def)
+
+lemma quasi_eq_connected_components_of_alt:
+ "quasi_components_of X = connected_components_of X \<longleftrightarrow> (\<forall>C \<in> quasi_components_of X. connectedin X C)"
+ (is "?lhs = ?rhs")
+proof
+ assume R: ?rhs
+ moreover have "connected_components_of X \<subseteq> quasi_components_of X"
+ using R unfolding quasi_components_of_def connected_components_of_def
+ by (force simp flip: quasi_eq_connected_component_of_eq)
+ ultimately show ?lhs
+ using connected_quasi_component_of by blast
+qed (use connected_quasi_component_of in blast)
+
+lemma connected_subset_quasi_components_of_pointwise:
+ "connected_components_of X \<subseteq> quasi_components_of X \<longleftrightarrow>
+ (\<forall>x \<in> topspace X. quasi_component_of X x = connected_component_of X x)"
+ (is "?lhs = ?rhs")
+proof
+ assume L: ?lhs
+ have "connectedin X (quasi_component_of_set X x)" if "x \<in> topspace X" for x
+ proof -
+ have "\<exists>y\<in>topspace X. connected_component_of_set X x = quasi_component_of_set X y"
+ using L that by (force simp: quasi_components_of_def connected_components_of_def image_subset_iff)
+ then show ?thesis
+ by (metis connected_component_of_equiv connectedin_connected_component_of mem_Collect_eq quasi_component_of_eq)
+ qed
+ then show ?rhs
+ by (simp add: quasi_eq_connected_component_of_eq)
+qed (simp add: connected_components_of_def quasi_components_of_def)
+
+lemma quasi_subset_connected_components_of_pointwise:
+ "quasi_components_of X \<subseteq> connected_components_of X \<longleftrightarrow>
+ (\<forall>x \<in> topspace X. quasi_component_of X x = connected_component_of X x)"
+ by (simp add: connected_quasi_component_of image_subset_iff quasi_components_of_def quasi_eq_connected_component_of_eq)
+
+lemma quasi_eq_connected_components_of_pointwise:
+ "quasi_components_of X = connected_components_of X \<longleftrightarrow>
+ (\<forall>x \<in> topspace X. quasi_component_of X x = connected_component_of X x)"
+ using connected_subset_quasi_components_of_pointwise quasi_subset_connected_components_of_pointwise by fastforce
+
+lemma quasi_eq_connected_components_of_pointwise_alt:
+ "quasi_components_of X = connected_components_of X \<longleftrightarrow>
+ (\<forall>x. quasi_component_of X x = connected_component_of X x)"
+ unfolding quasi_eq_connected_components_of_pointwise
+ by (metis connectedin_empty quasi_component_of_eq_empty quasi_eq_connected_component_of_eq)
+
+lemma quasi_eq_connected_components_of_inclusion:
+ "quasi_components_of X = connected_components_of X \<longleftrightarrow>
+ connected_components_of X \<subseteq> quasi_components_of X \<or>
+ quasi_components_of X \<subseteq> connected_components_of X"
+ by (simp add: connected_subset_quasi_components_of_pointwise dual_order.eq_iff quasi_subset_connected_components_of_pointwise)
+
+
+lemma quasi_eq_connected_components_of:
+ "finite(connected_components_of X) \<or>
+ finite(quasi_components_of X) \<or>
+ locally_connected_space X \<or>
+ compact_space X \<and> (Hausdorff_space X \<or> regular_space X \<or> normal_space X)
+ \<Longrightarrow> quasi_components_of X = connected_components_of X"
+proof (elim disjE)
+ show "quasi_components_of X = connected_components_of X"
+ if "finite (connected_components_of X)"
+ unfolding quasi_eq_connected_components_of_inclusion
+ using that open_in_finite_connected_components open_quasi_eq_connected_components_of by blast
+ show "quasi_components_of X = connected_components_of X"
+ if "finite (quasi_components_of X)"
+ unfolding quasi_eq_connected_components_of_inclusion
+ using that open_quasi_eq_connected_components_of openin_finite_quasi_components by blast
+ show "quasi_components_of X = connected_components_of X"
+ if "locally_connected_space X"
+ unfolding quasi_eq_connected_components_of_inclusion
+ using that open_quasi_eq_connected_components_of openin_quasi_components_of_locally_connected_space by auto
+ show "quasi_components_of X = connected_components_of X"
+ if "compact_space X \<and> (Hausdorff_space X \<or> regular_space X \<or> normal_space X)"
+ proof -
+ show ?thesis
+ unfolding quasi_eq_connected_components_of_alt
+ proof (intro strip)
+ fix C
+ assume C: "C \<in> quasi_components_of X"
+ then have cloC: "closedin X C"
+ by (simp add: closedin_quasi_components_of)
+ have "normal_space X"
+ using that compact_Hausdorff_or_regular_imp_normal_space by blast
+ show "connectedin X C"
+ proof (clarsimp simp add: connectedin_def connected_space_closedin_eq closedin_closed_subtopology cloC closedin_subset [OF cloC])
+ fix S T
+ assume "S \<subseteq> C" and "closedin X S" and "S \<inter> T = {}" and SUT: "S \<union> T = topspace X \<inter> C"
+ and T: "T \<subseteq> C" "T \<noteq> {}" and "closedin X T"
+ with \<open>normal_space X\<close> obtain U V where UV: "openin X U" "openin X V" "S \<subseteq> U" "T \<subseteq> V" "disjnt U V"
+ by (meson disjnt_def normal_space_def)
+ moreover have "compactin X (topspace X - (U \<union> V))"
+ using UV that by (intro closedin_compact_space closedin_diff openin_Un) auto
+ ultimately have "separated_between X C (topspace X - (U \<union> V)) \<longleftrightarrow> disjnt C (topspace X - (U \<union> V))"
+ by (simp add: \<open>C \<in> quasi_components_of X\<close> separated_between_quasi_component_compact)
+ moreover have "disjnt C (topspace X - (U \<union> V))"
+ using UV SUT disjnt_def by fastforce
+ ultimately have "separated_between X C (topspace X - (U \<union> V))"
+ by simp
+ then obtain A B where "openin X A" "openin X B" "A \<union> B = topspace X" "disjnt A B" "C \<subseteq> A"
+ and subB: "topspace X - (U \<union> V) \<subseteq> B"
+ by (meson separated_between_def)
+ have "B \<union> U = topspace X - (A \<inter> V)"
+ proof
+ show "B \<union> U \<subseteq> topspace X - A \<inter> V"
+ using \<open>openin X U\<close> \<open>disjnt U V\<close> \<open>disjnt A B\<close> \<open>openin X B\<close> disjnt_iff openin_closedin_eq by fastforce
+ show "topspace X - A \<inter> V \<subseteq> B \<union> U"
+ using \<open>A \<union> B = topspace X\<close> subB by fastforce
+ qed
+ then have "closedin X (B \<union> U)"
+ using \<open>openin X V\<close> \<open>openin X A\<close> by auto
+ then have "C \<subseteq> B \<union> U \<or> disjnt C (B \<union> U)"
+ using quasi_component_of_clopen_cases [OF C] \<open>openin X U\<close> \<open>openin X B\<close> by blast
+ with UV show "S = {}"
+ by (metis UnE \<open>C \<subseteq> A\<close> \<open>S \<subseteq> C\<close> T \<open>disjnt A B\<close> all_not_in_conv disjnt_Un2 disjnt_iff subset_eq)
+ qed
+ qed
+ qed
+qed
+
+
+lemma quasi_eq_connected_component_of:
+ "finite(connected_components_of X) \<or>
+ finite(quasi_components_of X) \<or>
+ locally_connected_space X \<or>
+ compact_space X \<and> (Hausdorff_space X \<or> regular_space X \<or> normal_space X)
+ \<Longrightarrow> quasi_component_of X x = connected_component_of X x"
+ by (metis quasi_eq_connected_components_of quasi_eq_connected_components_of_pointwise_alt)
+
+
+subsection\<open>Additional quasicomponent and continuum properties like Boundary Bumping\<close>
+
+lemma cut_wire_fence_theorem_gen:
+ assumes "compact_space X" and X: "Hausdorff_space X \<or> regular_space X \<or> normal_space X"
+ and S: "compactin X S" and T: "closedin X T"
+ and dis: "\<And>C. connectedin X C \<Longrightarrow> disjnt C S \<or> disjnt C T"
+ shows "separated_between X S T"
+ proof -
+ have "x \<in> topspace X" if "x \<in> S" and "T = {}" for x
+ using that S compactin_subset_topspace by auto
+ moreover have "separated_between X {x} {y}" if "x \<in> S" and "y \<in> T" for x y
+ proof (cases "x \<in> topspace X \<and> y \<in> topspace X")
+ case True
+ then have "\<not> connected_component_of X x y"
+ by (meson dis connected_component_of_def disjnt_iff that)
+ with True X \<open>compact_space X\<close> show ?thesis
+ by (metis quasi_component_nonseparated quasi_eq_connected_component_of)
+ next
+ case False
+ then show ?thesis
+ using S T compactin_subset_topspace closedin_subset that by blast
+ qed
+ ultimately show ?thesis
+ using assms
+ by (simp add: separated_between_pointwise_left separated_between_pointwise_right
+ closedin_compact_space closedin_subset)
+qed
+
+lemma cut_wire_fence_theorem:
+ "\<lbrakk>compact_space X; Hausdorff_space X; closedin X S; closedin X T;
+ \<And>C. connectedin X C \<Longrightarrow> disjnt C S \<or> disjnt C T\<rbrakk>
+ \<Longrightarrow> separated_between X S T"
+ by (simp add: closedin_compact_space cut_wire_fence_theorem_gen)
+
+lemma separated_between_from_closed_subtopology:
+ assumes XC: "separated_between (subtopology X C) S (X frontier_of C)"
+ and ST: "separated_between (subtopology X C) S T"
+ shows "separated_between X S T"
+proof -
+ obtain U where clo: "closedin (subtopology X C) U" and ope: "openin (subtopology X C) U"
+ and "S \<subseteq> U" and sub: "X frontier_of C \<union> T \<subseteq> topspace (subtopology X C) - U"
+ by (meson assms separated_between separated_between_Un)
+ then have "X frontier_of C \<union> T \<subseteq> topspace X \<inter> C - U"
+ by auto
+ have "closedin X (topspace X \<inter> C)"
+ by (metis XC frontier_of_restrict frontier_of_subset_eq inf_le1 separated_between_imp_subset topspace_subtopology)
+ then have "closedin X U"
+ by (metis clo closedin_closed_subtopology subtopology_restrict)
+ moreover have "openin (subtopology X C) U \<longleftrightarrow> openin X U \<and> U \<subseteq> C"
+ using disjnt_iff sub by (force intro!: openin_subset_topspace_eq)
+ with ope have "openin X U"
+ by blast
+ moreover have "T \<subseteq> topspace X - U"
+ using ope openin_closedin_eq sub by auto
+ ultimately show ?thesis
+ using \<open>S \<subseteq> U\<close> separated_between by blast
+qed
+
+lemma separated_between_from_closed_subtopology_frontier:
+ "separated_between (subtopology X T) S (X frontier_of T)
+ \<Longrightarrow> separated_between X S (X frontier_of T)"
+ using separated_between_from_closed_subtopology by blast
+
+lemma separated_between_from_frontier_of_closed_subtopology:
+ assumes "separated_between (subtopology X T) S (X frontier_of T)"
+ shows "separated_between X S (topspace X - T)"
+proof -
+ have "disjnt S (topspace X - T)"
+ using assms disjnt_iff separated_between_imp_subset by fastforce
+ then show ?thesis
+ by (metis Diff_subset assms frontier_of_complement separated_between_from_closed_subtopology separated_between_frontier_of_eq')
+qed
+
+lemma separated_between_compact_connected_component:
+ assumes "locally_compact_space X" "Hausdorff_space X"
+ and C: "C \<in> connected_components_of X"
+ and "compactin X C" "closedin X T" "disjnt C T"
+ shows "separated_between X C T"
+proof -
+ have Csub: "C \<subseteq> topspace X"
+ by (simp add: assms(4) compactin_subset_topspace)
+ have "Hausdorff_space (subtopology X (topspace X - T))"
+ using Hausdorff_space_subtopology assms(2) by blast
+ moreover have "compactin (subtopology X (topspace X - T)) C"
+ using assms Csub by (metis Diff_Int_distrib Diff_empty compact_imp_compactin_subtopology disjnt_def le_iff_inf)
+ moreover have "locally_compact_space (subtopology X (topspace X - T))"
+ by (meson assms closedin_def locally_compact_Hausdorff_imp_regular_space locally_compact_space_open_subset)
+ ultimately
+ obtain N L where "openin X N" "compactin X L" "closedin X L" "C \<subseteq> N" "N \<subseteq> L"
+ and Lsub: "L \<subseteq> topspace X - T"
+ using \<open>Hausdorff_space X\<close> \<open>closedin X T\<close>
+ apply (simp add: locally_compact_space_compact_closed_compact compactin_subtopology)
+ by (meson closedin_def compactin_imp_closedin openin_trans_full)
+ then have disC: "disjnt C (topspace X - L)"
+ by (meson DiffD2 disjnt_iff subset_iff)
+ have "separated_between (subtopology X L) C (X frontier_of L)"
+ proof (rule cut_wire_fence_theorem)
+ show "compact_space (subtopology X L)"
+ by (simp add: \<open>compactin X L\<close> compact_space_subtopology)
+ show "Hausdorff_space (subtopology X L)"
+ by (simp add: Hausdorff_space_subtopology \<open>Hausdorff_space X\<close>)
+ show "closedin (subtopology X L) C"
+ by (meson \<open>C \<subseteq> N\<close> \<open>N \<subseteq> L\<close> \<open>Hausdorff_space X\<close> \<open>compactin X C\<close> closedin_subset_topspace compactin_imp_closedin subset_trans)
+ show "closedin (subtopology X L) (X frontier_of L)"
+ by (simp add: \<open>closedin X L\<close> closedin_frontier_of closedin_subset_topspace frontier_of_subset_closedin)
+ show "disjnt D C \<or> disjnt D (X frontier_of L)"
+ if "connectedin (subtopology X L) D" for D
+ proof (rule ccontr)
+ assume "\<not> (disjnt D C \<or> disjnt D (X frontier_of L))"
+ moreover have "connectedin X D"
+ using connectedin_subtopology that by blast
+ ultimately show False
+ using that connected_components_of_maximal [of C X D] C
+ apply (simp add: disjnt_iff)
+ by (metis Diff_eq_empty_iff \<open>C \<subseteq> N\<close> \<open>N \<subseteq> L\<close> \<open>openin X N\<close> disjoint_iff frontier_of_openin_straddle_Int(2) subsetD)
+ qed
+ qed
+ then have "separated_between X (X frontier_of C) (topspace X - L)"
+ using separated_between_from_frontier_of_closed_subtopology separated_between_frontier_of_eq by blast
+ with \<open>closedin X T\<close>
+ separated_between_frontier_of [OF Csub disC]
+ show ?thesis
+ unfolding separated_between by (smt (verit) Diff_iff Lsub closedin_subset subset_iff)
+qed
+
+lemma wilder_locally_compact_component_thm:
+ assumes "locally_compact_space X" "Hausdorff_space X"
+ and "C \<in> connected_components_of X" "compactin X C" "openin X W" "C \<subseteq> W"
+ obtains U V where "openin X U" "openin X V" "disjnt U V" "U \<union> V = topspace X" "C \<subseteq> U" "U \<subseteq> W"
+proof -
+ have "closedin X (topspace X - W)"
+ using \<open>openin X W\<close> by blast
+ moreover have "disjnt C (topspace X - W)"
+ using \<open>C \<subseteq> W\<close> disjnt_def by fastforce
+ ultimately have "separated_between X C (topspace X - W)"
+ using separated_between_compact_connected_component assms by blast
+ then show thesis
+ by (smt (verit, del_insts) DiffI disjnt_iff openin_subset separated_between_def subset_iff that)
+qed
+
+lemma compact_quasi_eq_connected_components_of:
+ assumes "locally_compact_space X" "Hausdorff_space X" "compactin X C"
+ shows "C \<in> quasi_components_of X \<longleftrightarrow> C \<in> connected_components_of X"
+proof -
+ have "compactin X (connected_component_of_set X x)"
+ if "x \<in> topspace X" "compactin X (quasi_component_of_set X x)" for x
+ proof (rule closed_compactin)
+ show "compactin X (quasi_component_of_set X x)"
+ by (simp add: that)
+ show "connected_component_of_set X x \<subseteq> quasi_component_of_set X x"
+ by (simp add: connected_component_subset_quasi_component_of)
+ show "closedin X (connected_component_of_set X x)"
+ by (simp add: closedin_connected_component_of)
+ qed
+ moreover have "connected_component_of X x = quasi_component_of X x"
+ if \<section>: "x \<in> topspace X" "compactin X (connected_component_of_set X x)" for x
+ proof -
+ have "\<And>y. connected_component_of X x y \<Longrightarrow> quasi_component_of X x y"
+ by (simp add: connected_imp_quasi_component_of)
+ moreover have False if non: "\<not> connected_component_of X x y" and quasi: "quasi_component_of X x y" for y
+ proof -
+ have "y \<in> topspace X"
+ by (meson quasi_component_of_equiv that)
+ then have "closedin X {y}"
+ by (simp add: \<open>Hausdorff_space X\<close> compactin_imp_closedin)
+ moreover have "disjnt (connected_component_of_set X x) {y}"
+ by (simp add: non)
+ moreover have "\<not> separated_between X (connected_component_of_set X x) {y}"
+ using \<section> quasi separated_between_pointwise_left
+ by (fastforce simp: quasi_component_nonseparated connected_component_of_refl)
+ ultimately show False
+ using assms by (metis \<section> connected_component_in_connected_components_of separated_between_compact_connected_component)
+ qed
+ ultimately show ?thesis
+ by blast
+ qed
+ ultimately show ?thesis
+ using \<open>compactin X C\<close> unfolding connected_components_of_def image_iff quasi_components_of_def by metis
+qed
+
+
+lemma boundary_bumping_theorem_closed_gen:
+ assumes "connected_space X" "locally_compact_space X" "Hausdorff_space X" "closedin X S"
+ "S \<noteq> topspace X" and C: "compactin X C" "C \<in> connected_components_of (subtopology X S)"
+ shows "C \<inter> X frontier_of S \<noteq> {}"
+proof
+ assume \<section>: "C \<inter> X frontier_of S = {}"
+ consider "C \<noteq> {}" "X frontier_of S \<subseteq> topspace X" | "C \<subseteq> topspace X" "S = {}"
+ using C by (metis frontier_of_subset_topspace nonempty_connected_components_of)
+ then show False
+ proof cases
+ case 1
+ have "separated_between (subtopology X S) C (X frontier_of S)"
+ proof (rule separated_between_compact_connected_component)
+ show "compactin (subtopology X S) C"
+ using C compact_imp_compactin_subtopology connected_components_of_subset by fastforce
+ show "closedin (subtopology X S) (X frontier_of S)"
+ by (simp add: \<open>closedin X S\<close> closedin_frontier_of closedin_subset_topspace frontier_of_subset_closedin)
+ show "disjnt C (X frontier_of S)"
+ using \<section> by (simp add: disjnt_def)
+ qed (use assms Hausdorff_space_subtopology locally_compact_space_closed_subset in auto)
+ then have "separated_between X C (X frontier_of S)"
+ using separated_between_from_closed_subtopology by auto
+ then have "X frontier_of S = {}"
+ using \<open>C \<noteq> {}\<close> \<open>connected_space X\<close> connected_space_separated_between by blast
+ moreover have "C \<subseteq> S"
+ using C connected_components_of_subset by fastforce
+ ultimately show False
+ using 1 assms by (metis closedin_subset connected_space_eq_frontier_eq_empty subset_empty)
+ next
+ case 2
+ then show False
+ using C connected_components_of_eq_empty by fastforce
+ qed
+qed
+
+lemma boundary_bumping_theorem_closed:
+ assumes "connected_space X" "compact_space X" "Hausdorff_space X" "closedin X S"
+ "S \<noteq> topspace X" "C \<in> connected_components_of(subtopology X S)"
+ shows "C \<inter> X frontier_of S \<noteq> {}"
+ by (meson assms boundary_bumping_theorem_closed_gen closedin_compact_space closedin_connected_components_of
+ closedin_trans_full compact_imp_locally_compact_space)
+
+
+lemma intermediate_continuum_exists:
+ assumes "connected_space X" "locally_compact_space X" "Hausdorff_space X"
+ and C: "compactin X C" "connectedin X C" "C \<noteq> {}" "C \<noteq> topspace X"
+ and U: "openin X U" "C \<subseteq> U"
+ obtains D where "compactin X D" "connectedin X D" "C \<subset> D" "D \<subset> U"
+proof -
+ have "C \<subseteq> topspace X"
+ by (simp add: C compactin_subset_topspace)
+ with C obtain a where a: "a \<in> topspace X" "a \<notin> C"
+ by blast
+ moreover have "compactin (subtopology X (U - {a})) C"
+ by (simp add: C U a compact_imp_compactin_subtopology subset_Diff_insert)
+ moreover have "Hausdorff_space (subtopology X (U - {a}))"
+ using Hausdorff_space_subtopology assms(3) by blast
+ moreover
+ have "locally_compact_space (subtopology X (U - {a}))"
+ by (rule locally_compact_space_open_subset)
+ (auto simp: locally_compact_Hausdorff_imp_regular_space open_in_Hausdorff_delete assms)
+ ultimately obtain V K where V: "openin X V" "a \<notin> V" "V \<subseteq> U" and K: "compactin X K" "a \<notin> K" "K \<subseteq> U"
+ and cloK: "closedin (subtopology X (U - {a})) K" and "C \<subseteq> V" "V \<subseteq> K"
+ using locally_compact_space_compact_closed_compact [of "subtopology X (U - {a})"] assms
+ by (smt (verit, del_insts) Diff_empty compactin_subtopology open_in_Hausdorff_delete openin_open_subtopology subset_Diff_insert)
+ then obtain D where D: "D \<in> connected_components_of (subtopology X K)" and "C \<subseteq> D"
+ using C by (metis bot.extremum_unique connectedin_subtopology order.trans exists_connected_component_of_superset subtopology_topspace)
+ show thesis
+ proof
+ have cloD: "closedin (subtopology X K) D"
+ by (simp add: D closedin_connected_components_of)
+ then have XKD: "compactin (subtopology X K) D"
+ by (simp add: K closedin_compact_space compact_space_subtopology)
+ then show "compactin X D"
+ using compactin_subtopology_imp_compact by blast
+ show "connectedin X D"
+ using D connectedin_connected_components_of connectedin_subtopology by blast
+ have "K \<noteq> topspace X"
+ using K a by blast
+ moreover have "V \<subseteq> X interior_of K"
+ by (simp add: \<open>openin X V\<close> \<open>V \<subseteq> K\<close> interior_of_maximal)
+ ultimately have "C \<noteq> D"
+ using boundary_bumping_theorem_closed_gen [of X K C] D \<open>C \<subseteq> V\<close>
+ by (auto simp add: assms K compactin_imp_closedin frontier_of_def)
+ then show "C \<subset> D"
+ using \<open>C \<subseteq> D\<close> by blast
+ have "D \<subseteq> U"
+ using K(3) \<open>closedin (subtopology X K) D\<close> closedin_imp_subset by blast
+ moreover have "D \<noteq> U"
+ using K XKD \<open>C \<subset> D\<close> assms
+ by (metis \<open>K \<noteq> topspace X\<close> cloD closedin_imp_subset compactin_imp_closedin connected_space_clopen_in
+ inf_bot_left inf_le2 subset_antisym)
+ ultimately
+ show "D \<subset> U" by blast
+ qed
+qed
+
+lemma boundary_bumping_theorem_gen:
+ assumes X: "connected_space X" "locally_compact_space X" "Hausdorff_space X"
+ and "S \<subset> topspace X" and C: "C \<in> connected_components_of(subtopology X S)"
+ and compC: "compactin X (X closure_of C)"
+ shows "X frontier_of C \<inter> X frontier_of S \<noteq> {}"
+proof -
+ have Csub: "C \<subseteq> topspace X" "C \<subseteq> S" and "connectedin X C"
+ using C connectedin_connected_components_of connectedin_subset_topspace connectedin_subtopology
+ by fastforce+
+ have "C \<noteq> {}"
+ using C nonempty_connected_components_of by blast
+ obtain "X interior_of C \<subseteq> X interior_of S" "X closure_of C \<subseteq> X closure_of S"
+ by (simp add: Csub closure_of_mono interior_of_mono)
+ moreover have False if "X closure_of C \<subseteq> X interior_of S"
+ proof -
+ have "X closure_of C = C"
+ by (meson C closedin_connected_component_of_subtopology closure_of_eq interior_of_subset order_trans that)
+ with that have "C \<subseteq> X interior_of S"
+ by simp
+ then obtain D where "compactin X D" and "connectedin X D" and "C \<subset> D" and "D \<subset> X interior_of S"
+ using intermediate_continuum_exists assms \<open>X closure_of C = C\<close> compC Csub
+ by (metis \<open>C \<noteq> {}\<close> \<open>connectedin X C\<close> openin_interior_of psubsetE)
+ then have "D \<subseteq> C"
+ by (metis C \<open>C \<noteq> {}\<close> connected_components_of_maximal connectedin_subtopology disjnt_def inf.orderE interior_of_subset order_trans psubsetE)
+ then show False
+ using \<open>C \<subset> D\<close> by blast
+ qed
+ ultimately show ?thesis
+ by (smt (verit, ccfv_SIG) DiffI disjoint_iff_not_equal frontier_of_def subset_eq)
+qed
+
+lemma boundary_bumping_theorem:
+ "\<lbrakk>connected_space X; compact_space X; Hausdorff_space X; S \<subset> topspace X;
+ C \<in> connected_components_of(subtopology X S)\<rbrakk>
+ \<Longrightarrow> X frontier_of C \<inter> X frontier_of S \<noteq> {}"
+ by (simp add: boundary_bumping_theorem_gen closedin_compact_space compact_imp_locally_compact_space)
+
+subsection \<open>Compactly generated spaces (k-spaces)\<close>
+
+text \<open>These don't have to be Hausdorff\<close>
+
+definition k_space where
+ "k_space X \<equiv>
+ \<forall>S. S \<subseteq> topspace X \<longrightarrow>
+ (closedin X S \<longleftrightarrow> (\<forall>K. compactin X K \<longrightarrow> closedin (subtopology X K) (K \<inter> S)))"
+
+lemma k_space:
+ "k_space X \<longleftrightarrow>
+ (\<forall>S. S \<subseteq> topspace X \<and>
+ (\<forall>K. compactin X K \<longrightarrow> closedin (subtopology X K) (K \<inter> S)) \<longrightarrow> closedin X S)"
+ by (metis closedin_subtopology inf_commute k_space_def)
+
+lemma k_space_open:
+ "k_space X \<longleftrightarrow>
+ (\<forall>S. S \<subseteq> topspace X \<and>
+ (\<forall>K. compactin X K \<longrightarrow> openin (subtopology X K) (K \<inter> S)) \<longrightarrow> openin X S)"
+proof -
+ have "openin X S"
+ if "k_space X" "S \<subseteq> topspace X"
+ and "\<forall>K. compactin X K \<longrightarrow> openin (subtopology X K) (K \<inter> S)" for S
+ using that unfolding k_space openin_closedin_eq
+ by (metis Diff_Int_distrib2 Diff_subset inf_commute topspace_subtopology)
+ moreover have "k_space X"
+ if "\<forall>S. S \<subseteq> topspace X \<and> (\<forall>K. compactin X K \<longrightarrow> openin (subtopology X K) (K \<inter> S)) \<longrightarrow> openin X S"
+ unfolding k_space openin_closedin_eq
+ by (simp add: Diff_Int_distrib closedin_def inf_commute that)
+ ultimately show ?thesis
+ by blast
+qed
+
+lemma k_space_alt:
+ "k_space X \<longleftrightarrow>
+ (\<forall>S. S \<subseteq> topspace X
+ \<longrightarrow> (openin X S \<longleftrightarrow> (\<forall>K. compactin X K \<longrightarrow> openin (subtopology X K) (K \<inter> S))))"
+ by (meson k_space_open openin_subtopology_Int2)
+
+lemma k_space_quotient_map_image:
+ assumes q: "quotient_map X Y q" and X: "k_space X"
+ shows "k_space Y"
+ unfolding k_space
+proof clarify
+ fix S
+ assume "S \<subseteq> topspace Y" and S: "\<forall>K. compactin Y K \<longrightarrow> closedin (subtopology Y K) (K \<inter> S)"
+ then have iff: "closedin X {x \<in> topspace X. q x \<in> S} \<longleftrightarrow> closedin Y S"
+ using q quotient_map_closedin by fastforce
+ have "closedin (subtopology X K) (K \<inter> {x \<in> topspace X. q x \<in> S})" if "compactin X K" for K
+ proof -
+ have "{x \<in> topspace X. q x \<in> q ` K} \<inter> K = K"
+ using compactin_subset_topspace that by blast
+ then have *: "subtopology X K = subtopology (subtopology X {x \<in> topspace X. q x \<in> q ` K}) K"
+ by (simp add: subtopology_subtopology)
+ have **: "K \<inter> {x \<in> topspace X. q x \<in> S} =
+ K \<inter> {x \<in> topspace (subtopology X {x \<in> topspace X. q x \<in> q ` K}). q x \<in> q ` K \<inter> S}"
+ by auto
+ have "K \<subseteq> topspace X"
+ by (simp add: compactin_subset_topspace that)
+ show ?thesis
+ unfolding * **
+ proof (intro closedin_continuous_map_preimage closedin_subtopology_Int_closed)
+ show "continuous_map (subtopology X {x \<in> topspace X. q x \<in> q ` K}) (subtopology Y (q ` K)) q"
+ by (auto simp add: continuous_map_in_subtopology continuous_map_from_subtopology q quotient_imp_continuous_map)
+ show "closedin (subtopology Y (q ` K)) (q ` K \<inter> S)"
+ by (meson S image_compactin q quotient_imp_continuous_map that)
+ qed
+ qed
+ then have "closedin X {x \<in> topspace X. q x \<in> S}"
+ by (metis (no_types, lifting) X k_space mem_Collect_eq subsetI)
+ with iff show "closedin Y S" by simp
+qed
+
+lemma k_space_retraction_map_image:
+ "\<lbrakk>retraction_map X Y r; k_space X\<rbrakk> \<Longrightarrow> k_space Y"
+ using k_space_quotient_map_image retraction_imp_quotient_map by blast
+
+lemma homeomorphic_k_space:
+ "X homeomorphic_space Y \<Longrightarrow> k_space X \<longleftrightarrow> k_space Y"
+ by (meson homeomorphic_map_def homeomorphic_space homeomorphic_space_sym k_space_quotient_map_image)
+
+lemma k_space_perfect_map_image:
+ "\<lbrakk>k_space X; perfect_map X Y f\<rbrakk> \<Longrightarrow> k_space Y"
+ using k_space_quotient_map_image perfect_imp_quotient_map by blast
+
+lemma locally_compact_imp_k_space:
+ assumes "locally_compact_space X"
+ shows "k_space X"
+ unfolding k_space
+proof clarify
+ fix S
+ assume "S \<subseteq> topspace X" and S: "\<forall>K. compactin X K \<longrightarrow> closedin (subtopology X K) (K \<inter> S)"
+ have False if non: "\<not> (X closure_of S \<subseteq> S)"
+ proof -
+ obtain x where "x \<in> X closure_of S" "x \<notin> S"
+ using non by blast
+ then have "x \<in> topspace X"
+ by (simp add: in_closure_of)
+ then obtain K U where "openin X U" "compactin X K" "x \<in> U" "U \<subseteq> K"
+ by (meson assms locally_compact_space_def)
+ then show False
+ using \<open>x \<in> X closure_of S\<close> openin_Int_closure_of_eq [OF \<open>openin X U\<close>]
+ by (smt (verit, ccfv_threshold) Int_iff S \<open>x \<notin> S\<close> closedin_Int_closure_of inf.orderE inf_assoc)
+ qed
+ then show "closedin X S"
+ using S \<open>S \<subseteq> topspace X\<close> closure_of_subset_eq by blast
+qed
+
+lemma compact_imp_k_space:
+ "compact_space X \<Longrightarrow> k_space X"
+ by (simp add: compact_imp_locally_compact_space locally_compact_imp_k_space)
+
+lemma k_space_discrete_topology: "k_space(discrete_topology U)"
+ by (simp add: k_space_open)
+
+lemma k_space_closed_subtopology:
+ assumes "k_space X" "closedin X C"
+ shows "k_space (subtopology X C)"
+unfolding k_space compactin_subtopology
+proof clarsimp
+ fix S
+ assume Ssub: "S \<subseteq> topspace X" "S \<subseteq> C"
+ and S: "\<forall>K. compactin X K \<and> K \<subseteq> C \<longrightarrow> closedin (subtopology (subtopology X C) K) (K \<inter> S)"
+ have "closedin (subtopology X K) (K \<inter> S)" if "compactin X K" for K
+ proof -
+ have "closedin (subtopology (subtopology X C) (K \<inter> C)) ((K \<inter> C) \<inter> S)"
+ by (simp add: S \<open>closedin X C\<close> compact_Int_closedin that)
+ then show ?thesis
+ using \<open>closedin X C\<close> Ssub by (auto simp add: closedin_subtopology)
+ qed
+ then show "closedin (subtopology X C) S"
+ by (metis Ssub \<open>k_space X\<close> closedin_subset_topspace k_space_def)
+qed
+
+lemma k_space_subtopology:
+ assumes 1: "\<And>T. \<lbrakk>T \<subseteq> topspace X; T \<subseteq> S;
+ \<And>K. compactin X K \<Longrightarrow> closedin (subtopology X (K \<inter> S)) (K \<inter> T)\<rbrakk> \<Longrightarrow> closedin (subtopology X S) T"
+ assumes 2: "\<And>K. compactin X K \<Longrightarrow> k_space(subtopology X (K \<inter> S))"
+ shows "k_space (subtopology X S)"
+ unfolding k_space
+proof (intro conjI strip)
+ fix U
+ assume \<section>: "U \<subseteq> topspace (subtopology X S) \<and> (\<forall>K. compactin (subtopology X S) K \<longrightarrow> closedin (subtopology (subtopology X S) K) (K \<inter> U))"
+ have "closedin (subtopology X (K \<inter> S)) (K \<inter> U)" if "compactin X K" for K
+ proof -
+ have "K \<inter> U \<subseteq> topspace (subtopology X (K \<inter> S))"
+ using "\<section>" by auto
+ moreover
+ have "\<And>K'. compactin (subtopology X (K \<inter> S)) K' \<Longrightarrow> closedin (subtopology (subtopology X (K \<inter> S)) K') (K' \<inter> K \<inter> U)"
+ by (metis "\<section>" compactin_subtopology inf.orderE inf_commute subtopology_subtopology)
+ ultimately show ?thesis
+ by (metis (no_types, opaque_lifting) "2" inf.assoc k_space_def that)
+ qed
+ then show "closedin (subtopology X S) U"
+ using "1" \<section> by auto
+qed
+
+lemma k_space_subtopology_open:
+ assumes 1: "\<And>T. \<lbrakk>T \<subseteq> topspace X; T \<subseteq> S;
+ \<And>K. compactin X K \<Longrightarrow> openin (subtopology X (K \<inter> S)) (K \<inter> T)\<rbrakk> \<Longrightarrow> openin (subtopology X S) T"
+ assumes 2: "\<And>K. compactin X K \<Longrightarrow> k_space(subtopology X (K \<inter> S))"
+ shows "k_space (subtopology X S)"
+ unfolding k_space_open
+proof (intro conjI strip)
+ fix U
+ assume \<section>: "U \<subseteq> topspace (subtopology X S) \<and> (\<forall>K. compactin (subtopology X S) K \<longrightarrow> openin (subtopology (subtopology X S) K) (K \<inter> U))"
+ have "openin (subtopology X (K \<inter> S)) (K \<inter> U)" if "compactin X K" for K
+ proof -
+ have "K \<inter> U \<subseteq> topspace (subtopology X (K \<inter> S))"
+ using "\<section>" by auto
+ moreover
+ have "\<And>K'. compactin (subtopology X (K \<inter> S)) K' \<Longrightarrow> openin (subtopology (subtopology X (K \<inter> S)) K') (K' \<inter> K \<inter> U)"
+ by (metis "\<section>" compactin_subtopology inf.orderE inf_commute subtopology_subtopology)
+ ultimately show ?thesis
+ by (metis (no_types, opaque_lifting) "2" inf.assoc k_space_open that)
+ qed
+ then show "openin (subtopology X S) U"
+ using "1" \<section> by auto
+qed
+
+
+lemma k_space_open_subtopology_aux:
+ assumes "kc_space X" "compact_space X" "openin X V"
+ shows "k_space (subtopology X V)"
+proof (clarsimp simp: k_space subtopology_subtopology compactin_subtopology Int_absorb1)
+ fix S
+ assume "S \<subseteq> topspace X"
+ and "S \<subseteq> V"
+ and S: "\<forall>K. compactin X K \<and> K \<subseteq> V \<longrightarrow> closedin (subtopology X K) (K \<inter> S)"
+ then have "V \<subseteq> topspace X"
+ using assms openin_subset by blast
+ have "S = V \<inter> ((topspace X - V) \<union> S)"
+ using \<open>S \<subseteq> V\<close> by auto
+ moreover have "closedin (subtopology X V) (V \<inter> ((topspace X - V) \<union> S))"
+ proof (intro closedin_subtopology_Int_closed compactin_imp_closedin_gen \<open>kc_space X\<close>)
+ show "compactin X (topspace X - V \<union> S)"
+ unfolding compactin_def
+ proof (intro conjI strip)
+ show "topspace X - V \<union> S \<subseteq> topspace X"
+ by (simp add: \<open>S \<subseteq> topspace X\<close>)
+ fix \<U>
+ assume \<U>: "Ball \<U> (openin X) \<and> topspace X - V \<union> S \<subseteq> \<Union>\<U>"
+ moreover
+ have "compactin X (topspace X - V)"
+ using assms closedin_compact_space by blast
+ ultimately obtain \<G> where "finite \<G>" "\<G> \<subseteq> \<U>" and \<G>: "topspace X - V \<subseteq> \<Union>\<G>"
+ unfolding compactin_def using \<open>V \<subseteq> topspace X\<close> by (metis le_sup_iff)
+ then have "topspace X - \<Union>\<G> \<subseteq> V"
+ by blast
+ then have "closedin (subtopology X (topspace X - \<Union>\<G>)) ((topspace X - \<Union>\<G>) \<inter> S)"
+ by (meson S \<U> \<open>\<G> \<subseteq> \<U>\<close> \<open>compact_space X\<close> closedin_compact_space openin_Union openin_closedin_eq subset_iff)
+ then have "compactin X ((topspace X - \<Union>\<G>) \<inter> S)"
+ by (meson \<U> \<open>\<G> \<subseteq> \<U>\<close>\<open>compact_space X\<close> closedin_compact_space closedin_trans_full openin_Union openin_closedin_eq subset_iff)
+ then obtain \<H> where "finite \<H>" "\<H> \<subseteq> \<U>" "(topspace X - \<Union>\<G>) \<inter> S \<subseteq> \<Union>\<H>"
+ unfolding compactin_def by (smt (verit, best) \<U> inf_le2 subset_trans sup.boundedE)
+ with \<G> have "topspace X - V \<union> S \<subseteq> \<Union>(\<G> \<union> \<H>)"
+ using \<open>S \<subseteq> topspace X\<close> by auto
+ then show "\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> topspace X - V \<union> S \<subseteq> \<Union>\<F>"
+ by (metis \<open>\<G> \<subseteq> \<U>\<close> \<open>\<H> \<subseteq> \<U>\<close> \<open>finite \<G>\<close> \<open>finite \<H>\<close> finite_Un le_sup_iff)
+ qed
+ qed
+ ultimately show "closedin (subtopology X V) S"
+ by metis
+qed
+
+
+lemma k_space_open_subtopology:
+ assumes X: "kc_space X \<or> Hausdorff_space X \<or> regular_space X" and "k_space X" "openin X S"
+ shows "k_space(subtopology X S)"
+proof (rule k_space_subtopology_open)
+ fix T
+ assume "T \<subseteq> topspace X"
+ and "T \<subseteq> S"
+ and T: "\<And>K. compactin X K \<Longrightarrow> openin (subtopology X (K \<inter> S)) (K \<inter> T)"
+ have "openin (subtopology X K) (K \<inter> T)" if "compactin X K" for K
+ by (smt (verit, ccfv_threshold) T assms(3) inf_assoc inf_commute openin_Int openin_subtopology that)
+ then show "openin (subtopology X S) T"
+ by (metis \<open>T \<subseteq> S\<close> \<open>T \<subseteq> topspace X\<close> assms(2) k_space_alt subset_openin_subtopology)
+next
+ fix K
+ assume "compactin X K"
+ then have KS: "openin (subtopology X K) (K \<inter> S)"
+ by (simp add: \<open>openin X S\<close> openin_subtopology_Int2)
+ have XK: "compact_space (subtopology X K)"
+ by (simp add: \<open>compactin X K\<close> compact_space_subtopology)
+ show "k_space (subtopology X (K \<inter> S))"
+ using X
+ proof (rule disjE)
+ assume "kc_space X"
+ then show "k_space (subtopology X (K \<inter> S))"
+ using k_space_open_subtopology_aux [of "subtopology X K" "K \<inter> S"]
+ by (simp add: KS XK kc_space_subtopology subtopology_subtopology)
+ next
+ assume "Hausdorff_space X \<or> regular_space X"
+ then have "locally_compact_space (subtopology (subtopology X K) (K \<inter> S))"
+ using locally_compact_space_open_subset Hausdorff_space_subtopology KS XK
+ compact_imp_locally_compact_space regular_space_subtopology by blast
+ then show "k_space (subtopology X (K \<inter> S))"
+ by (simp add: locally_compact_imp_k_space subtopology_subtopology)
+ qed
+qed
+
+lemma k_kc_space_subtopology:
+ "\<lbrakk>k_space X; kc_space X; openin X S \<or> closedin X S\<rbrakk> \<Longrightarrow> k_space(subtopology X S) \<and> kc_space(subtopology X S)"
+ by (metis k_space_closed_subtopology k_space_open_subtopology kc_space_subtopology)
+
+
+lemma k_space_as_quotient_explicit:
+ "k_space X \<longleftrightarrow> quotient_map (sum_topology (subtopology X) {K. compactin X K}) X snd"
+proof -
+ have [simp]: "{x \<in> topspace X. x \<in> K \<and> x \<in> U} = K \<inter> U" if "U \<subseteq> topspace X" for K U
+ using that by blast
+ show "?thesis"
+ apply (simp add: quotient_map_def openin_sum_topology snd_image_Sigma k_space_alt)
+ by (smt (verit, del_insts) Union_iff compactin_sing inf.orderE mem_Collect_eq singletonI subsetI)
+qed
+
+lemma k_space_as_quotient:
+ fixes X :: "'a topology"
+ shows "k_space X \<longleftrightarrow> (\<exists>q. \<exists>Y:: ('a set * 'a) topology. locally_compact_space Y \<and> quotient_map Y X q)"
+ (is "?lhs=?rhs")
+proof
+ show "k_space X" if ?rhs
+ using that k_space_quotient_map_image locally_compact_imp_k_space by blast
+next
+ assume "k_space X"
+ show ?rhs
+ proof (intro exI conjI)
+ show "locally_compact_space (sum_topology (subtopology X) {K. compactin X K})"
+ by (simp add: compact_imp_locally_compact_space compact_space_subtopology locally_compact_space_sum_topology)
+ show "quotient_map (sum_topology (subtopology X) {K. compactin X K}) X snd"
+ using \<open>k_space X\<close> k_space_as_quotient_explicit by blast
+ qed
+qed
+
+lemma k_space_prod_topology_left:
+ assumes X: "locally_compact_space X" "Hausdorff_space X \<or> regular_space X" and "k_space Y"
+ shows "k_space (prod_topology X Y)"
+proof -
+ obtain q and Z :: "('b set * 'b) topology" where "locally_compact_space Z" and q: "quotient_map Z Y q"
+ using \<open>k_space Y\<close> k_space_as_quotient by blast
+ then show ?thesis
+ using quotient_map_prod_right [OF X q] X k_space_quotient_map_image locally_compact_imp_k_space
+ locally_compact_space_prod_topology by blast
+qed
+
+text \<open>Essentially the same proof\<close>
+lemma k_space_prod_topology_right:
+ assumes "k_space X" and Y: "locally_compact_space Y" "Hausdorff_space Y \<or> regular_space Y"
+ shows "k_space (prod_topology X Y)"
+proof -
+ obtain q and Z :: "('a set * 'a) topology" where "locally_compact_space Z" and q: "quotient_map Z X q"
+ using \<open>k_space X\<close> k_space_as_quotient by blast
+ then show ?thesis
+ using quotient_map_prod_left [OF Y q] using Y k_space_quotient_map_image locally_compact_imp_k_space
+ locally_compact_space_prod_topology by blast
+qed
+
+
+lemma continuous_map_from_k_space:
+ assumes "k_space X" and f: "\<And>K. compactin X K \<Longrightarrow> continuous_map(subtopology X K) Y f"
+ shows "continuous_map X Y f"
+proof -
+ have "\<And>x. x \<in> topspace X \<Longrightarrow> f x \<in> topspace Y"
+ by (metis compactin_absolute compactin_sing f image_compactin image_empty image_insert)
+ moreover have "closedin X {x \<in> topspace X. f x \<in> C}" if "closedin Y C" for C
+ proof -
+ have "{x \<in> topspace X. f x \<in> C} \<subseteq> topspace X"
+ by fastforce
+ moreover
+ have eq: "K \<inter> {x \<in> topspace X. f x \<in> C} = {x. x \<in> topspace(subtopology X K) \<and> f x \<in> (f ` K \<inter> C)}" for K
+ by auto
+ have "closedin (subtopology X K) (K \<inter> {x \<in> topspace X. f x \<in> C})" if "compactin X K" for K
+ unfolding eq
+ proof (rule closedin_continuous_map_preimage)
+ show "continuous_map (subtopology X K) (subtopology Y (f`K)) f"
+ by (simp add: continuous_map_in_subtopology f image_mono that)
+ show "closedin (subtopology Y (f`K)) (f ` K \<inter> C)"
+ using \<open>closedin Y C\<close> closedin_subtopology by blast
+ qed
+ ultimately show ?thesis
+ using \<open>k_space X\<close> k_space by blast
+ qed
+ ultimately show ?thesis
+ by (simp add: continuous_map_closedin)
+qed
+
+lemma closed_map_into_k_space:
+ assumes "k_space Y" and fim: "f ` (topspace X) \<subseteq> topspace Y"
+ and f: "\<And>K. compactin Y K
+ \<Longrightarrow> closed_map(subtopology X {x \<in> topspace X. f x \<in> K}) (subtopology Y K) f"
+ shows "closed_map X Y f"
+ unfolding closed_map_def
+proof (intro strip)
+ fix C
+ assume "closedin X C"
+ have "closedin (subtopology Y K) (K \<inter> f ` C)"
+ if "compactin Y K" for K
+ proof -
+ have eq: "K \<inter> f ` C = f ` ({x \<in> topspace X. f x \<in> K} \<inter> C)"
+ using \<open>closedin X C\<close> closedin_subset by auto
+ show ?thesis
+ unfolding eq
+ by (metis (no_types, lifting) \<open>closedin X C\<close> closed_map_def closedin_subtopology f inf_commute that)
+ qed
+ then show "closedin Y (f ` C)"
+ using \<open>k_space Y\<close> unfolding k_space
+ by (meson \<open>closedin X C\<close> closedin_subset dual_order.trans fim image_mono)
+qed
+
+
+text \<open>Essentially the same proof\<close>
+lemma open_map_into_k_space:
+ assumes "k_space Y" and fim: "f ` (topspace X) \<subseteq> topspace Y"
+ and f: "\<And>K. compactin Y K
+ \<Longrightarrow> open_map (subtopology X {x \<in> topspace X. f x \<in> K}) (subtopology Y K) f"
+ shows "open_map X Y f"
+ unfolding open_map_def
+proof (intro strip)
+ fix C
+ assume "openin X C"
+ have "openin (subtopology Y K) (K \<inter> f ` C)"
+ if "compactin Y K" for K
+ proof -
+ have eq: "K \<inter> f ` C = f ` ({x \<in> topspace X. f x \<in> K} \<inter> C)"
+ using \<open>openin X C\<close> openin_subset by auto
+ show ?thesis
+ unfolding eq
+ by (metis (no_types, lifting) \<open>openin X C\<close> open_map_def openin_subtopology f inf_commute that)
+ qed
+ then show "openin Y (f ` C)"
+ using \<open>k_space Y\<close> unfolding k_space_open
+ by (meson \<open>openin X C\<close> openin_subset dual_order.trans fim image_mono)
+qed
+
+lemma quotient_map_into_k_space:
+ fixes f :: "'a \<Rightarrow> 'b"
+ assumes "k_space Y" and cmf: "continuous_map X Y f"
+ and fim: "f ` (topspace X) = topspace Y"
+ and f: "\<And>k. compactin Y k
+ \<Longrightarrow> quotient_map (subtopology X {x \<in> topspace X. f x \<in> k})
+ (subtopology Y k) f"
+ shows "quotient_map X Y f"
+proof -
+ have "closedin Y C"
+ if "C \<subseteq> topspace Y" and K: "closedin X {x \<in> topspace X. f x \<in> C}" for C
+ proof -
+ have "closedin (subtopology Y K) (K \<inter> C)" if "compactin Y K" for K
+ proof -
+ define Kf where "Kf \<equiv> {x \<in> topspace X. f x \<in> K}"
+ have *: "K \<inter> C \<subseteq> topspace Y \<and> K \<inter> C \<subseteq> K"
+ using \<open>C \<subseteq> topspace Y\<close> by blast
+ then have eq: "closedin (subtopology X Kf) (Kf \<inter> {x \<in> topspace X. f x \<in> C}) =
+ closedin (subtopology Y K) (K \<inter> C)"
+ using f [OF that] * unfolding quotient_map_closedin Kf_def
+ by (smt (verit, ccfv_SIG) Collect_cong Int_def compactin_subset_topspace mem_Collect_eq that topspace_subtopology topspace_subtopology_subset)
+ have dd: "{x \<in> topspace X \<inter> Kf. f x \<in> K \<inter> C} = Kf \<inter> {x \<in> topspace X. f x \<in> C}"
+ by (auto simp add: Kf_def)
+ have "closedin (subtopology X Kf) {x \<in> topspace X. x \<in> Kf \<and> f x \<in> K \<and> f x \<in> C}"
+ using K closedin_subtopology by (fastforce simp add: Kf_def)
+ with K closedin_subtopology_Int_closed eq show ?thesis
+ by blast
+ qed
+ then show ?thesis
+ using \<open>k_space Y\<close> that unfolding k_space by blast
+ qed
+ moreover have "closedin X {x \<in> topspace X. f x \<in> K}"
+ if "K \<subseteq> topspace Y" "closedin Y K" for K
+ using that cmf continuous_map_closedin by fastforce
+ ultimately show ?thesis
+ unfolding quotient_map_closedin using fim by blast
+qed
+
+lemma quotient_map_into_k_space_eq:
+ assumes "k_space Y" "kc_space Y"
+ shows "quotient_map X Y f \<longleftrightarrow>
+ continuous_map X Y f \<and> f ` (topspace X) = topspace Y \<and>
+ (\<forall>K. compactin Y K
+ \<longrightarrow> quotient_map (subtopology X {x \<in> topspace X. f x \<in> K}) (subtopology Y K) f)"
+ using assms
+ by (auto simp: kc_space_def intro: quotient_map_into_k_space quotient_map_restriction
+ dest: quotient_imp_continuous_map quotient_imp_surjective_map)
+
+lemma open_map_into_k_space_eq:
+ assumes "k_space Y"
+ shows "open_map X Y f \<longleftrightarrow>
+ f ` (topspace X) \<subseteq> topspace Y \<and>
+ (\<forall>k. compactin Y k
+ \<longrightarrow> open_map (subtopology X {x \<in> topspace X. f x \<in> k}) (subtopology Y k) f)"
+ (is "?lhs=?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ by (simp add: open_map_imp_subset_topspace open_map_restriction)
+ show "?rhs \<Longrightarrow> ?lhs"
+ by (simp add: assms open_map_into_k_space)
+qed
+
+lemma closed_map_into_k_space_eq:
+ assumes "k_space Y"
+ shows "closed_map X Y f \<longleftrightarrow>
+ f ` (topspace X) \<subseteq> topspace Y \<and>
+ (\<forall>k. compactin Y k
+ \<longrightarrow> closed_map (subtopology X {x \<in> topspace X. f x \<in> k}) (subtopology Y k) f)"
+ (is "?lhs \<longleftrightarrow> ?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ by (simp add: closed_map_imp_subset_topspace closed_map_restriction)
+ show "?rhs \<Longrightarrow> ?lhs"
+ by (simp add: assms closed_map_into_k_space)
+qed
+
+lemma proper_map_into_k_space:
+ assumes "k_space Y" and fim: "f ` (topspace X) \<subseteq> topspace Y"
+ and f: "\<And>K. compactin Y K
+ \<Longrightarrow> proper_map (subtopology X {x \<in> topspace X. f x \<in> K})
+ (subtopology Y K) f"
+ shows "proper_map X Y f"
+proof -
+ have "closed_map X Y f"
+ by (meson assms closed_map_into_k_space fim proper_map_def)
+ with f topspace_subtopology_subset show ?thesis
+ apply (simp add: proper_map_alt)
+ by (smt (verit, best) Collect_cong compactin_absolute)
+qed
+
+lemma proper_map_into_k_space_eq:
+ assumes "k_space Y"
+ shows "proper_map X Y f \<longleftrightarrow>
+ f ` (topspace X) \<subseteq> topspace Y \<and>
+ (\<forall>K. compactin Y K
+ \<longrightarrow> proper_map (subtopology X {x \<in> topspace X. f x \<in> K}) (subtopology Y K) f)"
+ (is "?lhs \<longleftrightarrow> ?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ by (simp add: proper_map_imp_subset_topspace proper_map_restriction)
+ show "?rhs \<Longrightarrow> ?lhs"
+ by (simp add: assms proper_map_into_k_space)
+qed
+
+lemma compact_imp_proper_map:
+ assumes "k_space Y" "kc_space Y" and fim: "f ` (topspace X) \<subseteq> topspace Y"
+ and f: "continuous_map X Y f \<or> kc_space X"
+ and comp: "\<And>K. compactin Y K \<Longrightarrow> compactin X {x \<in> topspace X. f x \<in> K}"
+ shows "proper_map X Y f"
+proof (rule compact_imp_proper_map_gen)
+ fix S
+ assume "S \<subseteq> topspace Y"
+ and "\<And>K. compactin Y K \<Longrightarrow> compactin Y (S \<inter> K)"
+ with assms show "closedin Y S"
+ by (simp add: closedin_subset_topspace inf_commute k_space kc_space_def)
+qed (use assms in auto)
+
+lemma proper_eq_compact_map:
+ assumes "k_space Y" "kc_space Y"
+ and f: "continuous_map X Y f \<or> kc_space X"
+ shows "proper_map X Y f \<longleftrightarrow>
+ f ` (topspace X) \<subseteq> topspace Y \<and>
+ (\<forall>K. compactin Y K \<longrightarrow> compactin X {x \<in> topspace X. f x \<in> K})"
+ (is "?lhs \<longleftrightarrow> ?rhs")
+proof
+ show "?lhs \<Longrightarrow> ?rhs"
+ by (simp add: proper_map_alt proper_map_imp_subset_topspace)
+qed (use assms compact_imp_proper_map in auto)
+
+lemma compact_imp_perfect_map:
+ assumes "k_space Y" "kc_space Y" and "f ` (topspace X) = topspace Y"
+ and "continuous_map X Y f"
+ and "\<And>K. compactin Y K \<Longrightarrow> compactin X {x \<in> topspace X. f x \<in> K}"
+ shows "perfect_map X Y f"
+ by (simp add: assms compact_imp_proper_map perfect_map_def)
+
end
diff --git a/src/HOL/Analysis/Abstract_Topology.thy b/src/HOL/Analysis/Abstract_Topology.thy
--- a/src/HOL/Analysis/Abstract_Topology.thy
+++ b/src/HOL/Analysis/Abstract_Topology.thy
@@ -1,4994 +1,5090 @@
(* Author: L C Paulson, University of Cambridge [ported from HOL Light] *)
section \<open>Operators involving abstract topology\<close>
theory Abstract_Topology
imports
Complex_Main
"HOL-Library.Set_Idioms"
"HOL-Library.FuncSet"
begin
subsection \<open>General notion of a topology as a value\<close>
definition\<^marker>\<open>tag important\<close> istopology :: "('a set \<Rightarrow> bool) \<Rightarrow> bool" where
"istopology L \<equiv> (\<forall>S T. L S \<longrightarrow> L T \<longrightarrow> L (S \<inter> T)) \<and> (\<forall>\<K>. (\<forall>K\<in>\<K>. L K) \<longrightarrow> L (\<Union>\<K>))"
typedef\<^marker>\<open>tag important\<close> 'a topology = "{L::('a set) \<Rightarrow> bool. istopology L}"
morphisms "openin" "topology"
unfolding istopology_def by blast
lemma istopology_openin[iff]: "istopology(openin U)"
using openin[of U] by blast
lemma istopology_open[iff]: "istopology open"
by (auto simp: istopology_def)
lemma topology_inverse' [simp]: "istopology U \<Longrightarrow> openin (topology U) = U"
using topology_inverse[unfolded mem_Collect_eq] .
lemma topology_inverse_iff: "istopology U \<longleftrightarrow> openin (topology U) = U"
by (metis istopology_openin topology_inverse')
lemma topology_eq: "T1 = T2 \<longleftrightarrow> (\<forall>S. openin T1 S \<longleftrightarrow> openin T2 S)"
proof
assume "T1 = T2"
then show "\<forall>S. openin T1 S \<longleftrightarrow> openin T2 S" by simp
next
assume H: "\<forall>S. openin T1 S \<longleftrightarrow> openin T2 S"
then have "openin T1 = openin T2" by (simp add: fun_eq_iff)
then have "topology (openin T1) = topology (openin T2)" by simp
then show "T1 = T2" unfolding openin_inverse .
qed
text\<open>The "universe": the union of all sets in the topology.\<close>
definition "topspace T = \<Union>{S. openin T S}"
subsubsection \<open>Main properties of open sets\<close>
proposition openin_clauses:
fixes U :: "'a topology"
shows
"openin U {}"
"\<And>S T. openin U S \<Longrightarrow> openin U T \<Longrightarrow> openin U (S\<inter>T)"
"\<And>K. (\<forall>S \<in> K. openin U S) \<Longrightarrow> openin U (\<Union>K)"
using openin[of U] unfolding istopology_def by auto
lemma openin_subset: "openin U S \<Longrightarrow> S \<subseteq> topspace U"
unfolding topspace_def by blast
lemma openin_empty[simp]: "openin U {}"
by (rule openin_clauses)
lemma openin_Int[intro]: "openin U S \<Longrightarrow> openin U T \<Longrightarrow> openin U (S \<inter> T)"
by (rule openin_clauses)
lemma openin_Union[intro]: "(\<And>S. S \<in> K \<Longrightarrow> openin U S) \<Longrightarrow> openin U (\<Union>K)"
using openin_clauses by blast
lemma openin_Un[intro]: "openin U S \<Longrightarrow> openin U T \<Longrightarrow> openin U (S \<union> T)"
using openin_Union[of "{S,T}" U] by auto
lemma openin_topspace[intro, simp]: "openin U (topspace U)"
by (force simp: openin_Union topspace_def)
lemma openin_subopen: "openin U S \<longleftrightarrow> (\<forall>x \<in> S. \<exists>T. openin U T \<and> x \<in> T \<and> T \<subseteq> S)"
(is "?lhs \<longleftrightarrow> ?rhs")
proof
assume ?lhs
then show ?rhs by auto
next
assume H: ?rhs
let ?t = "\<Union>{T. openin U T \<and> T \<subseteq> S}"
have "openin U ?t" by (force simp: openin_Union)
also have "?t = S" using H by auto
finally show "openin U S" .
qed
lemma openin_INT [intro]:
assumes "finite I"
"\<And>i. i \<in> I \<Longrightarrow> openin T (U i)"
shows "openin T ((\<Inter>i \<in> I. U i) \<inter> topspace T)"
using assms by (induct, auto simp: inf_sup_aci(2) openin_Int)
lemma openin_INT2 [intro]:
assumes "finite I" "I \<noteq> {}"
"\<And>i. i \<in> I \<Longrightarrow> openin T (U i)"
shows "openin T (\<Inter>i \<in> I. U i)"
proof -
have "(\<Inter>i \<in> I. U i) \<subseteq> topspace T"
using \<open>I \<noteq> {}\<close> openin_subset[OF assms(3)] by auto
then show ?thesis
using openin_INT[of _ _ U, OF assms(1) assms(3)] by (simp add: inf.absorb2 inf_commute)
qed
lemma openin_Inter [intro]:
assumes "finite \<F>" "\<F> \<noteq> {}" "\<And>X. X \<in> \<F> \<Longrightarrow> openin T X" shows "openin T (\<Inter>\<F>)"
by (metis (full_types) assms openin_INT2 image_ident)
lemma openin_Int_Inter:
assumes "finite \<F>" "openin T U" "\<And>X. X \<in> \<F> \<Longrightarrow> openin T X" shows "openin T (U \<inter> \<Inter>\<F>)"
using openin_Inter [of "insert U \<F>"] assms by auto
subsubsection \<open>Closed sets\<close>
definition\<^marker>\<open>tag important\<close> closedin :: "'a topology \<Rightarrow> 'a set \<Rightarrow> bool" where
"closedin U S \<longleftrightarrow> S \<subseteq> topspace U \<and> openin U (topspace U - S)"
lemma closedin_subset: "closedin U S \<Longrightarrow> S \<subseteq> topspace U"
by (metis closedin_def)
lemma closedin_empty[simp]: "closedin U {}"
by (simp add: closedin_def)
lemma closedin_topspace[intro, simp]: "closedin U (topspace U)"
by (simp add: closedin_def)
lemma closedin_Un[intro]: "closedin U S \<Longrightarrow> closedin U T \<Longrightarrow> closedin U (S \<union> T)"
by (auto simp: Diff_Un closedin_def)
lemma Diff_Inter[intro]: "A - \<Inter>S = \<Union>{A - s|s. s\<in>S}"
by auto
lemma closedin_Union:
assumes "finite S" "\<And>T. T \<in> S \<Longrightarrow> closedin U T"
shows "closedin U (\<Union>S)"
using assms by induction auto
lemma closedin_Inter[intro]:
assumes Ke: "K \<noteq> {}"
and Kc: "\<And>S. S \<in>K \<Longrightarrow> closedin U S"
shows "closedin U (\<Inter>K)"
using Ke Kc unfolding closedin_def Diff_Inter by auto
lemma closedin_INT[intro]:
assumes "A \<noteq> {}" "\<And>x. x \<in> A \<Longrightarrow> closedin U (B x)"
shows "closedin U (\<Inter>x\<in>A. B x)"
using assms by blast
lemma closedin_Int[intro]: "closedin U S \<Longrightarrow> closedin U T \<Longrightarrow> closedin U (S \<inter> T)"
using closedin_Inter[of "{S,T}" U] by auto
lemma openin_closedin_eq: "openin U S \<longleftrightarrow> S \<subseteq> topspace U \<and> closedin U (topspace U - S)"
by (metis Diff_subset closedin_def double_diff equalityD1 openin_subset)
lemma topology_finer_closedin:
"topspace X = topspace Y \<Longrightarrow> (\<forall>S. openin Y S \<longrightarrow> openin X S) \<longleftrightarrow> (\<forall>S. closedin Y S \<longrightarrow> closedin X S)"
by (metis closedin_def openin_closedin_eq)
lemma openin_closedin: "S \<subseteq> topspace U \<Longrightarrow> (openin U S \<longleftrightarrow> closedin U (topspace U - S))"
by (simp add: openin_closedin_eq)
lemma openin_diff[intro]:
assumes oS: "openin U S"
and cT: "closedin U T"
shows "openin U (S - T)"
by (metis Int_Diff cT closedin_def inf.orderE oS openin_Int openin_subset)
lemma closedin_diff[intro]:
assumes oS: "closedin U S"
and cT: "openin U T"
shows "closedin U (S - T)"
by (metis Int_Diff cT closedin_Int closedin_subset inf.orderE oS openin_closedin_eq)
lemma all_openin: "(\<forall>U. openin X U \<longrightarrow> P U) \<longleftrightarrow> (\<forall>U. closedin X U \<longrightarrow> P (topspace X - U))"
by (metis Diff_Diff_Int closedin_def inf.absorb_iff2 openin_closedin_eq)
lemma all_closedin: "(\<forall>U. closedin X U \<longrightarrow> P U) \<longleftrightarrow> (\<forall>U. openin X U \<longrightarrow> P (topspace X - U))"
by (metis Diff_Diff_Int closedin_subset inf.absorb_iff2 openin_closedin_eq)
lemma ex_openin: "(\<exists>U. openin X U \<and> P U) \<longleftrightarrow> (\<exists>U. closedin X U \<and> P (topspace X - U))"
by (metis Diff_Diff_Int closedin_def inf.absorb_iff2 openin_closedin_eq)
lemma ex_closedin: "(\<exists>U. closedin X U \<and> P U) \<longleftrightarrow> (\<exists>U. openin X U \<and> P (topspace X - U))"
by (metis Diff_Diff_Int closedin_subset inf.absorb_iff2 openin_closedin_eq)
subsection\<open>The discrete topology\<close>
definition discrete_topology where "discrete_topology U \<equiv> topology (\<lambda>S. S \<subseteq> U)"
lemma openin_discrete_topology [simp]: "openin (discrete_topology U) S \<longleftrightarrow> S \<subseteq> U"
proof -
have "istopology (\<lambda>S. S \<subseteq> U)"
by (auto simp: istopology_def)
then show ?thesis
by (simp add: discrete_topology_def topology_inverse')
qed
lemma topspace_discrete_topology [simp]: "topspace(discrete_topology U) = U"
by (meson openin_discrete_topology openin_subset openin_topspace order_refl subset_antisym)
lemma closedin_discrete_topology [simp]: "closedin (discrete_topology U) S \<longleftrightarrow> S \<subseteq> U"
by (simp add: closedin_def)
lemma discrete_topology_unique:
"discrete_topology U = X \<longleftrightarrow> topspace X = U \<and> (\<forall>x \<in> U. openin X {x})" (is "?lhs = ?rhs")
proof
assume R: ?rhs
then have "openin X S" if "S \<subseteq> U" for S
using openin_subopen subsetD that by fastforce
then show ?lhs
by (metis R openin_discrete_topology openin_subset topology_eq)
qed auto
lemma discrete_topology_unique_alt:
"discrete_topology U = X \<longleftrightarrow> topspace X \<subseteq> U \<and> (\<forall>x \<in> U. openin X {x})"
using openin_subset
by (auto simp: discrete_topology_unique)
lemma subtopology_eq_discrete_topology_empty:
"X = discrete_topology {} \<longleftrightarrow> topspace X = {}"
using discrete_topology_unique [of "{}" X] by auto
lemma subtopology_eq_discrete_topology_sing:
"X = discrete_topology {a} \<longleftrightarrow> topspace X = {a}"
by (metis discrete_topology_unique openin_topspace singletonD)
subsection \<open>Subspace topology\<close>
definition\<^marker>\<open>tag important\<close> subtopology :: "'a topology \<Rightarrow> 'a set \<Rightarrow> 'a topology"
where "subtopology U V = topology (\<lambda>T. \<exists>S. T = S \<inter> V \<and> openin U S)"
lemma istopology_subtopology: "istopology (\<lambda>T. \<exists>S. T = S \<inter> V \<and> openin U S)"
(is "istopology ?L")
proof -
have "?L {}" by blast
{
fix A B
assume A: "?L A" and B: "?L B"
from A B obtain Sa and Sb where Sa: "openin U Sa" "A = Sa \<inter> V" and Sb: "openin U Sb" "B = Sb \<inter> V"
by blast
have "A \<inter> B = (Sa \<inter> Sb) \<inter> V" "openin U (Sa \<inter> Sb)"
using Sa Sb by blast+
then have "?L (A \<inter> B)" by blast
}
moreover
{
fix K
assume K: "K \<subseteq> Collect ?L"
have th0: "Collect ?L = (\<lambda>S. S \<inter> V) ` Collect (openin U)"
by blast
from K[unfolded th0 subset_image_iff]
obtain Sk where Sk: "Sk \<subseteq> Collect (openin U)" "K = (\<lambda>S. S \<inter> V) ` Sk"
by blast
have "\<Union>K = (\<Union>Sk) \<inter> V"
using Sk by auto
moreover have "openin U (\<Union>Sk)"
using Sk by (auto simp: subset_eq)
ultimately have "?L (\<Union>K)" by blast
}
ultimately show ?thesis
unfolding subset_eq mem_Collect_eq istopology_def by auto
qed
lemma openin_subtopology: "openin (subtopology U V) S \<longleftrightarrow> (\<exists>T. openin U T \<and> S = T \<inter> V)"
unfolding subtopology_def topology_inverse'[OF istopology_subtopology]
by auto
+lemma subset_openin_subtopology:
+ "\<lbrakk>openin X S; S \<subseteq> T\<rbrakk> \<Longrightarrow> openin (subtopology X T) S"
+ by (metis inf.orderE openin_subtopology)
+
lemma openin_subtopology_Int:
"openin X S \<Longrightarrow> openin (subtopology X T) (S \<inter> T)"
using openin_subtopology by auto
lemma openin_subtopology_Int2:
"openin X T \<Longrightarrow> openin (subtopology X S) (S \<inter> T)"
using openin_subtopology by auto
lemma openin_subtopology_diff_closed:
"\<lbrakk>S \<subseteq> topspace X; closedin X T\<rbrakk> \<Longrightarrow> openin (subtopology X S) (S - T)"
unfolding closedin_def openin_subtopology
by (rule_tac x="topspace X - T" in exI) auto
lemma openin_relative_to: "(openin X relative_to S) = openin (subtopology X S)"
by (force simp: relative_to_def openin_subtopology)
lemma topspace_subtopology [simp]: "topspace (subtopology U V) = topspace U \<inter> V"
by (auto simp: topspace_def openin_subtopology)
lemma topspace_subtopology_subset:
"S \<subseteq> topspace X \<Longrightarrow> topspace(subtopology X S) = S"
by (simp add: inf.absorb_iff2)
lemma closedin_subtopology: "closedin (subtopology U V) S \<longleftrightarrow> (\<exists>T. closedin U T \<and> S = T \<inter> V)"
unfolding closedin_def topspace_subtopology
by (auto simp: openin_subtopology)
+lemma closedin_subtopology_Int_closed:
+ "closedin X T \<Longrightarrow> closedin (subtopology X S) (S \<inter> T)"
+ using closedin_subtopology inf_commute by blast
+
lemma closedin_subset_topspace:
"\<lbrakk>closedin X S; S \<subseteq> T\<rbrakk> \<Longrightarrow> closedin (subtopology X T) S"
using closedin_subtopology by fastforce
lemma closedin_relative_to:
"(closedin X relative_to S) = closedin (subtopology X S)"
by (force simp: relative_to_def closedin_subtopology)
lemma openin_subtopology_refl: "openin (subtopology U V) V \<longleftrightarrow> V \<subseteq> topspace U"
unfolding openin_subtopology
by auto (metis IntD1 in_mono openin_subset)
lemma subtopology_subtopology:
"subtopology (subtopology X S) T = subtopology X (S \<inter> T)"
proof -
have eq: "\<And>T'. (\<exists>S'. T' = S' \<inter> T \<and> (\<exists>T. openin X T \<and> S' = T \<inter> S)) = (\<exists>Sa. T' = Sa \<inter> (S \<inter> T) \<and> openin X Sa)"
by (metis inf_assoc)
have "subtopology (subtopology X S) T = topology (\<lambda>Ta. \<exists>Sa. Ta = Sa \<inter> T \<and> openin (subtopology X S) Sa)"
by (simp add: subtopology_def)
also have "\<dots> = subtopology X (S \<inter> T)"
by (simp add: openin_subtopology eq) (simp add: subtopology_def)
finally show ?thesis .
qed
lemma openin_subtopology_alt:
"openin (subtopology X U) S \<longleftrightarrow> S \<in> (\<lambda>T. U \<inter> T) ` Collect (openin X)"
by (simp add: image_iff inf_commute openin_subtopology)
lemma closedin_subtopology_alt:
"closedin (subtopology X U) S \<longleftrightarrow> S \<in> (\<lambda>T. U \<inter> T) ` Collect (closedin X)"
by (simp add: image_iff inf_commute closedin_subtopology)
lemma subtopology_superset:
assumes UV: "topspace U \<subseteq> V"
shows "subtopology U V = U"
proof -
{ fix S
have "openin U S" if "openin U T" "S = T \<inter> V" for T
by (metis Int_subset_iff assms inf.orderE openin_subset that)
then have "(\<exists>T. openin U T \<and> S = T \<inter> V) \<longleftrightarrow> openin U S"
by (metis assms inf.orderE inf_assoc openin_subset)
}
then show ?thesis
unfolding topology_eq openin_subtopology by blast
qed
lemma subtopology_topspace[simp]: "subtopology U (topspace U) = U"
by (simp add: subtopology_superset)
lemma subtopology_UNIV[simp]: "subtopology U UNIV = U"
by (simp add: subtopology_superset)
lemma subtopology_restrict:
"subtopology X (topspace X \<inter> S) = subtopology X S"
by (metis subtopology_subtopology subtopology_topspace)
lemma openin_subtopology_empty:
"openin (subtopology U {}) S \<longleftrightarrow> S = {}"
by (metis Int_empty_right openin_empty openin_subtopology)
lemma closedin_subtopology_empty:
"closedin (subtopology U {}) S \<longleftrightarrow> S = {}"
by (metis Int_empty_right closedin_empty closedin_subtopology)
lemma closedin_subtopology_refl [simp]:
"closedin (subtopology U X) X \<longleftrightarrow> X \<subseteq> topspace U"
by (metis closedin_def closedin_topspace inf.absorb_iff2 le_inf_iff topspace_subtopology)
lemma closedin_topspace_empty: "topspace T = {} \<Longrightarrow> (closedin T S \<longleftrightarrow> S = {})"
by (simp add: closedin_def)
lemma open_in_topspace_empty:
"topspace X = {} \<Longrightarrow> openin X S \<longleftrightarrow> S = {}"
by (simp add: openin_closedin_eq)
lemma openin_imp_subset:
"openin (subtopology U S) T \<Longrightarrow> T \<subseteq> S"
by (metis Int_iff openin_subtopology subsetI)
lemma closedin_imp_subset:
"closedin (subtopology U S) T \<Longrightarrow> T \<subseteq> S"
by (simp add: closedin_def)
lemma openin_open_subtopology:
"openin X S \<Longrightarrow> openin (subtopology X S) T \<longleftrightarrow> openin X T \<and> T \<subseteq> S"
by (metis inf.orderE openin_Int openin_imp_subset openin_subtopology)
lemma closedin_closed_subtopology:
"closedin X S \<Longrightarrow> (closedin (subtopology X S) T \<longleftrightarrow> closedin X T \<and> T \<subseteq> S)"
by (metis closedin_Int closedin_imp_subset closedin_subtopology inf.orderE)
lemma closedin_trans_full:
"\<lbrakk>closedin (subtopology X U) S; closedin X U\<rbrakk> \<Longrightarrow> closedin X S"
using closedin_closed_subtopology by blast
lemma openin_subtopology_Un:
"\<lbrakk>openin (subtopology X T) S; openin (subtopology X U) S\<rbrakk>
\<Longrightarrow> openin (subtopology X (T \<union> U)) S"
by (simp add: openin_subtopology) blast
lemma closedin_subtopology_Un:
"\<lbrakk>closedin (subtopology X T) S; closedin (subtopology X U) S\<rbrakk>
\<Longrightarrow> closedin (subtopology X (T \<union> U)) S"
by (simp add: closedin_subtopology) blast
lemma openin_trans_full:
"\<lbrakk>openin (subtopology X U) S; openin X U\<rbrakk> \<Longrightarrow> openin X S"
by (simp add: openin_open_subtopology)
subsection \<open>The canonical topology from the underlying type class\<close>
abbreviation\<^marker>\<open>tag important\<close> euclidean :: "'a::topological_space topology"
where "euclidean \<equiv> topology open"
abbreviation top_of_set :: "'a::topological_space set \<Rightarrow> 'a topology"
where "top_of_set \<equiv> subtopology (topology open)"
lemma open_openin: "open S \<longleftrightarrow> openin euclidean S"
by simp
declare open_openin [symmetric, simp]
lemma topspace_euclidean [simp]: "topspace euclidean = UNIV"
by (force simp: topspace_def)
lemma topspace_euclidean_subtopology[simp]: "topspace (top_of_set S) = S"
by (simp)
lemma closed_closedin: "closed S \<longleftrightarrow> closedin euclidean S"
by (simp add: closed_def closedin_def Compl_eq_Diff_UNIV)
declare closed_closedin [symmetric, simp]
lemma openin_subtopology_self [simp]: "openin (top_of_set S) S"
by (metis openin_topspace topspace_euclidean_subtopology)
subsubsection\<open>The most basic facts about the usual topology and metric on R\<close>
abbreviation euclideanreal :: "real topology"
where "euclideanreal \<equiv> topology open"
subsection \<open>Basic "localization" results are handy for connectedness.\<close>
lemma openin_open: "openin (top_of_set U) S \<longleftrightarrow> (\<exists>T. open T \<and> (S = U \<inter> T))"
by (auto simp: openin_subtopology)
lemma openin_Int_open:
"\<lbrakk>openin (top_of_set U) S; open T\<rbrakk>
\<Longrightarrow> openin (top_of_set U) (S \<inter> T)"
by (metis open_Int Int_assoc openin_open)
lemma openin_open_Int[intro]: "open S \<Longrightarrow> openin (top_of_set U) (U \<inter> S)"
by (auto simp: openin_open)
lemma open_openin_trans[trans]:
"open S \<Longrightarrow> open T \<Longrightarrow> T \<subseteq> S \<Longrightarrow> openin (top_of_set S) T"
by (metis Int_absorb1 openin_open_Int)
lemma open_subset: "S \<subseteq> T \<Longrightarrow> open S \<Longrightarrow> openin (top_of_set T) S"
by (auto simp: openin_open)
lemma closedin_closed: "closedin (top_of_set U) S \<longleftrightarrow> (\<exists>T. closed T \<and> S = U \<inter> T)"
by (simp add: closedin_subtopology Int_ac)
lemma closedin_closed_Int: "closed S \<Longrightarrow> closedin (top_of_set U) (U \<inter> S)"
by (metis closedin_closed)
lemma closed_subset: "S \<subseteq> T \<Longrightarrow> closed S \<Longrightarrow> closedin (top_of_set T) S"
by (auto simp: closedin_closed)
lemma closedin_closed_subset:
"\<lbrakk>closedin (top_of_set U) V; T \<subseteq> U; S = V \<inter> T\<rbrakk>
\<Longrightarrow> closedin (top_of_set T) S"
by (metis (no_types, lifting) Int_assoc Int_commute closedin_closed inf.orderE)
lemma finite_imp_closedin:
fixes S :: "'a::t1_space set"
shows "\<lbrakk>finite S; S \<subseteq> T\<rbrakk> \<Longrightarrow> closedin (top_of_set T) S"
by (simp add: finite_imp_closed closed_subset)
lemma closedin_singleton [simp]:
fixes a :: "'a::t1_space"
shows "closedin (top_of_set U) {a} \<longleftrightarrow> a \<in> U"
using closedin_subset by (force intro: closed_subset)
lemma openin_euclidean_subtopology_iff:
fixes S U :: "'a::metric_space set"
shows "openin (top_of_set U) S \<longleftrightarrow>
S \<subseteq> U \<and> (\<forall>x\<in>S. \<exists>e>0. \<forall>x'\<in>U. dist x' x < e \<longrightarrow> x'\<in> S)"
(is "?lhs \<longleftrightarrow> ?rhs")
proof
assume ?lhs
then show ?rhs
unfolding openin_open open_dist by blast
next
define T where "T = {x. \<exists>a\<in>S. \<exists>d>0. (\<forall>y\<in>U. dist y a < d \<longrightarrow> y \<in> S) \<and> dist x a < d}"
have 1: "\<forall>x\<in>T. \<exists>e>0. \<forall>y. dist y x < e \<longrightarrow> y \<in> T"
unfolding T_def
apply clarsimp
apply (rule_tac x="d - dist x a" in exI)
by (metis add_0_left dist_commute dist_triangle_lt less_diff_eq)
assume ?rhs then have 2: "S = U \<inter> T"
unfolding T_def
by auto (metis dist_self)
from 1 2 show ?lhs
unfolding openin_open open_dist by fast
qed
lemma connected_openin:
"connected S \<longleftrightarrow>
\<not>(\<exists>E1 E2. openin (top_of_set S) E1 \<and>
openin (top_of_set S) E2 \<and>
S \<subseteq> E1 \<union> E2 \<and> E1 \<inter> E2 = {} \<and> E1 \<noteq> {} \<and> E2 \<noteq> {})"
unfolding connected_def openin_open disjoint_iff_not_equal by blast
lemma connected_openin_eq:
"connected S \<longleftrightarrow>
\<not>(\<exists>E1 E2. openin (top_of_set S) E1 \<and>
openin (top_of_set S) E2 \<and>
E1 \<union> E2 = S \<and> E1 \<inter> E2 = {} \<and>
E1 \<noteq> {} \<and> E2 \<noteq> {})"
unfolding connected_openin
by (metis (no_types, lifting) Un_subset_iff openin_imp_subset subset_antisym)
lemma connected_closedin:
"connected S \<longleftrightarrow>
(\<nexists>E1 E2.
closedin (top_of_set S) E1 \<and>
closedin (top_of_set S) E2 \<and>
S \<subseteq> E1 \<union> E2 \<and> E1 \<inter> E2 = {} \<and> E1 \<noteq> {} \<and> E2 \<noteq> {})"
(is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
by (auto simp add: connected_closed closedin_closed)
next
assume R: ?rhs
then show ?lhs
proof (clarsimp simp add: connected_closed closedin_closed)
fix A B
assume s_sub: "S \<subseteq> A \<union> B" "B \<inter> S \<noteq> {}"
and disj: "A \<inter> B \<inter> S = {}"
and cl: "closed A" "closed B"
have "S - A = B \<inter> S"
using Diff_subset_conv Un_Diff_Int disj s_sub(1) by auto
then show "A \<inter> S = {}"
by (metis Int_Diff_Un Int_Diff_disjoint R cl closedin_closed_Int dual_order.refl inf_commute s_sub(2))
qed
qed
lemma connected_closedin_eq:
"connected S \<longleftrightarrow>
\<not>(\<exists>E1 E2.
closedin (top_of_set S) E1 \<and>
closedin (top_of_set S) E2 \<and>
E1 \<union> E2 = S \<and> E1 \<inter> E2 = {} \<and>
E1 \<noteq> {} \<and> E2 \<noteq> {})"
unfolding connected_closedin
by (metis Un_subset_iff closedin_imp_subset subset_antisym)
text \<open>These "transitivity" results are handy too\<close>
lemma openin_trans[trans]:
"openin (top_of_set T) S \<Longrightarrow> openin (top_of_set U) T \<Longrightarrow>
openin (top_of_set U) S"
by (metis openin_Int_open openin_open)
lemma openin_open_trans: "openin (top_of_set T) S \<Longrightarrow> open T \<Longrightarrow> open S"
by (auto simp: openin_open intro: openin_trans)
lemma closedin_trans[trans]:
"closedin (top_of_set T) S \<Longrightarrow> closedin (top_of_set U) T \<Longrightarrow>
closedin (top_of_set U) S"
by (auto simp: closedin_closed closed_Inter Int_assoc)
lemma closedin_closed_trans: "closedin (top_of_set T) S \<Longrightarrow> closed T \<Longrightarrow> closed S"
by (auto simp: closedin_closed intro: closedin_trans)
lemma openin_subtopology_Int_subset:
"\<lbrakk>openin (top_of_set u) (u \<inter> S); v \<subseteq> u\<rbrakk> \<Longrightarrow> openin (top_of_set v) (v \<inter> S)"
by (auto simp: openin_subtopology)
lemma openin_open_eq: "open s \<Longrightarrow> (openin (top_of_set s) t \<longleftrightarrow> open t \<and> t \<subseteq> s)"
using open_subset openin_open_trans openin_subset by fastforce
subsection\<open>Derived set (set of limit points)\<close>
definition derived_set_of :: "'a topology \<Rightarrow> 'a set \<Rightarrow> 'a set" (infixl "derived'_set'_of" 80)
where "X derived_set_of S \<equiv>
{x \<in> topspace X.
(\<forall>T. x \<in> T \<and> openin X T \<longrightarrow> (\<exists>y\<noteq>x. y \<in> S \<and> y \<in> T))}"
lemma derived_set_of_restrict [simp]:
"X derived_set_of (topspace X \<inter> S) = X derived_set_of S"
by (simp add: derived_set_of_def) (metis openin_subset subset_iff)
lemma in_derived_set_of:
"x \<in> X derived_set_of S \<longleftrightarrow> x \<in> topspace X \<and> (\<forall>T. x \<in> T \<and> openin X T \<longrightarrow> (\<exists>y\<noteq>x. y \<in> S \<and> y \<in> T))"
by (simp add: derived_set_of_def)
lemma derived_set_of_subset_topspace:
"X derived_set_of S \<subseteq> topspace X"
by (auto simp add: derived_set_of_def)
lemma derived_set_of_subtopology:
"(subtopology X U) derived_set_of S = U \<inter> (X derived_set_of (U \<inter> S))"
by (simp add: derived_set_of_def openin_subtopology) blast
lemma derived_set_of_subset_subtopology:
"(subtopology X S) derived_set_of T \<subseteq> S"
by (simp add: derived_set_of_subtopology)
lemma derived_set_of_empty [simp]: "X derived_set_of {} = {}"
by (auto simp: derived_set_of_def)
lemma derived_set_of_mono:
"S \<subseteq> T \<Longrightarrow> X derived_set_of S \<subseteq> X derived_set_of T"
unfolding derived_set_of_def by blast
lemma derived_set_of_Un:
"X derived_set_of (S \<union> T) = X derived_set_of S \<union> X derived_set_of T" (is "?lhs = ?rhs")
proof
show "?lhs \<subseteq> ?rhs"
by (clarsimp simp: in_derived_set_of) (metis IntE IntI openin_Int)
show "?rhs \<subseteq> ?lhs"
by (simp add: derived_set_of_mono)
qed
lemma derived_set_of_Union:
"finite \<F> \<Longrightarrow> X derived_set_of (\<Union>\<F>) = (\<Union>S \<in> \<F>. X derived_set_of S)"
proof (induction \<F> rule: finite_induct)
case (insert S \<F>)
then show ?case
by (simp add: derived_set_of_Un)
qed auto
lemma derived_set_of_topspace:
"X derived_set_of (topspace X) = {x \<in> topspace X. \<not> openin X {x}}" (is "?lhs = ?rhs")
proof
show "?lhs \<subseteq> ?rhs"
by (auto simp: in_derived_set_of)
show "?rhs \<subseteq> ?lhs"
by (clarsimp simp: in_derived_set_of) (metis openin_closedin_eq openin_subopen singletonD subset_eq)
qed
lemma discrete_topology_unique_derived_set:
"discrete_topology U = X \<longleftrightarrow> topspace X = U \<and> X derived_set_of U = {}"
by (auto simp: discrete_topology_unique derived_set_of_topspace)
lemma subtopology_eq_discrete_topology_eq:
"subtopology X U = discrete_topology U \<longleftrightarrow> U \<subseteq> topspace X \<and> U \<inter> X derived_set_of U = {}"
using discrete_topology_unique_derived_set [of U "subtopology X U"]
by (auto simp: eq_commute derived_set_of_subtopology)
lemma subtopology_eq_discrete_topology:
"S \<subseteq> topspace X \<and> S \<inter> X derived_set_of S = {}
\<Longrightarrow> subtopology X S = discrete_topology S"
by (simp add: subtopology_eq_discrete_topology_eq)
lemma subtopology_eq_discrete_topology_gen:
assumes "S \<inter> X derived_set_of S = {}"
shows "subtopology X S = discrete_topology(topspace X \<inter> S)"
proof -
have "subtopology X S = subtopology X (topspace X \<inter> S)"
by (simp add: subtopology_restrict)
then show ?thesis
using assms by (simp add: inf.assoc subtopology_eq_discrete_topology_eq)
qed
lemma subtopology_discrete_topology [simp]:
"subtopology (discrete_topology U) S = discrete_topology(U \<inter> S)"
proof -
have "(\<lambda>T. \<exists>Sa. T = Sa \<inter> S \<and> Sa \<subseteq> U) = (\<lambda>Sa. Sa \<subseteq> U \<and> Sa \<subseteq> S)"
by force
then show ?thesis
by (simp add: subtopology_def) (simp add: discrete_topology_def)
qed
lemma openin_Int_derived_set_of_subset:
"openin X S \<Longrightarrow> S \<inter> X derived_set_of T \<subseteq> X derived_set_of (S \<inter> T)"
by (auto simp: derived_set_of_def)
lemma openin_Int_derived_set_of_eq:
assumes "openin X S"
shows "S \<inter> X derived_set_of T = S \<inter> X derived_set_of (S \<inter> T)" (is "?lhs = ?rhs")
proof
show "?lhs \<subseteq> ?rhs"
by (simp add: assms openin_Int_derived_set_of_subset)
show "?rhs \<subseteq> ?lhs"
by (metis derived_set_of_mono inf_commute inf_le1 inf_mono order_refl)
qed
subsection\<open> Closure with respect to a topological space\<close>
definition closure_of :: "'a topology \<Rightarrow> 'a set \<Rightarrow> 'a set" (infixr "closure'_of" 80)
where "X closure_of S \<equiv> {x \<in> topspace X. \<forall>T. x \<in> T \<and> openin X T \<longrightarrow> (\<exists>y \<in> S. y \<in> T)}"
lemma closure_of_restrict: "X closure_of S = X closure_of (topspace X \<inter> S)"
unfolding closure_of_def
using openin_subset by blast
lemma in_closure_of:
"x \<in> X closure_of S \<longleftrightarrow>
x \<in> topspace X \<and> (\<forall>T. x \<in> T \<and> openin X T \<longrightarrow> (\<exists>y. y \<in> S \<and> y \<in> T))"
by (auto simp: closure_of_def)
lemma closure_of: "X closure_of S = topspace X \<inter> (S \<union> X derived_set_of S)"
by (fastforce simp: in_closure_of in_derived_set_of)
lemma closure_of_alt: "X closure_of S = topspace X \<inter> S \<union> X derived_set_of S"
using derived_set_of_subset_topspace [of X S]
unfolding closure_of_def in_derived_set_of
by safe (auto simp: in_derived_set_of)
lemma derived_set_of_subset_closure_of:
"X derived_set_of S \<subseteq> X closure_of S"
by (fastforce simp: closure_of_def in_derived_set_of)
lemma closure_of_subtopology:
"(subtopology X U) closure_of S = U \<inter> (X closure_of (U \<inter> S))"
unfolding closure_of_def topspace_subtopology openin_subtopology
by safe (metis (full_types) IntI Int_iff inf.commute)+
lemma closure_of_empty [simp]: "X closure_of {} = {}"
by (simp add: closure_of_alt)
lemma closure_of_topspace [simp]: "X closure_of topspace X = topspace X"
by (simp add: closure_of)
lemma closure_of_UNIV [simp]: "X closure_of UNIV = topspace X"
by (simp add: closure_of)
lemma closure_of_subset_topspace: "X closure_of S \<subseteq> topspace X"
by (simp add: closure_of)
lemma closure_of_subset_subtopology: "(subtopology X S) closure_of T \<subseteq> S"
by (simp add: closure_of_subtopology)
lemma closure_of_mono: "S \<subseteq> T \<Longrightarrow> X closure_of S \<subseteq> X closure_of T"
by (fastforce simp add: closure_of_def)
lemma closure_of_subtopology_subset:
"(subtopology X U) closure_of S \<subseteq> (X closure_of S)"
unfolding closure_of_subtopology
by clarsimp (meson closure_of_mono contra_subsetD inf.cobounded2)
lemma closure_of_subtopology_mono:
"T \<subseteq> U \<Longrightarrow> (subtopology X T) closure_of S \<subseteq> (subtopology X U) closure_of S"
unfolding closure_of_subtopology
by auto (meson closure_of_mono inf_mono subset_iff)
lemma closure_of_Un [simp]: "X closure_of (S \<union> T) = X closure_of S \<union> X closure_of T"
by (simp add: Un_assoc Un_left_commute closure_of_alt derived_set_of_Un inf_sup_distrib1)
lemma closure_of_Union:
"finite \<F> \<Longrightarrow> X closure_of (\<Union>\<F>) = (\<Union>S \<in> \<F>. X closure_of S)"
by (induction \<F> rule: finite_induct) auto
lemma closure_of_subset: "S \<subseteq> topspace X \<Longrightarrow> S \<subseteq> X closure_of S"
by (auto simp: closure_of_def)
lemma closure_of_subset_Int: "topspace X \<inter> S \<subseteq> X closure_of S"
by (auto simp: closure_of_def)
lemma closure_of_subset_eq: "S \<subseteq> topspace X \<and> X closure_of S \<subseteq> S \<longleftrightarrow> closedin X S"
proof -
have "openin X (topspace X - S)"
if "\<And>x. \<lbrakk>x \<in> topspace X; \<forall>T. x \<in> T \<and> openin X T \<longrightarrow> S \<inter> T \<noteq> {}\<rbrakk> \<Longrightarrow> x \<in> S"
apply (subst openin_subopen)
by (metis Diff_iff Diff_mono Diff_triv inf.commute openin_subset order_refl that)
then show ?thesis
by (auto simp: closedin_def closure_of_def disjoint_iff_not_equal)
qed
lemma closure_of_eq: "X closure_of S = S \<longleftrightarrow> closedin X S"
by (metis closure_of_subset closure_of_subset_eq closure_of_subset_topspace subset_antisym)
lemma closedin_contains_derived_set:
"closedin X S \<longleftrightarrow> X derived_set_of S \<subseteq> S \<and> S \<subseteq> topspace X"
proof (intro iffI conjI)
show "closedin X S \<Longrightarrow> X derived_set_of S \<subseteq> S"
using closure_of_eq derived_set_of_subset_closure_of by fastforce
show "closedin X S \<Longrightarrow> S \<subseteq> topspace X"
using closedin_subset by blast
show "X derived_set_of S \<subseteq> S \<and> S \<subseteq> topspace X \<Longrightarrow> closedin X S"
by (metis closure_of closure_of_eq inf.absorb_iff2 sup.orderE)
qed
lemma derived_set_subset_gen:
"X derived_set_of S \<subseteq> S \<longleftrightarrow> closedin X (topspace X \<inter> S)"
by (simp add: closedin_contains_derived_set derived_set_of_subset_topspace)
lemma derived_set_subset: "S \<subseteq> topspace X \<Longrightarrow> (X derived_set_of S \<subseteq> S \<longleftrightarrow> closedin X S)"
by (simp add: closedin_contains_derived_set)
lemma closedin_derived_set:
"closedin (subtopology X T) S \<longleftrightarrow>
S \<subseteq> topspace X \<and> S \<subseteq> T \<and> (\<forall>x. x \<in> X derived_set_of S \<and> x \<in> T \<longrightarrow> x \<in> S)"
by (auto simp: closedin_contains_derived_set derived_set_of_subtopology Int_absorb1)
lemma closedin_Int_closure_of:
"closedin (subtopology X S) T \<longleftrightarrow> S \<inter> X closure_of T = T"
by (metis Int_left_absorb closure_of_eq closure_of_subtopology)
lemma closure_of_closedin: "closedin X S \<Longrightarrow> X closure_of S = S"
by (simp add: closure_of_eq)
lemma closure_of_eq_diff: "X closure_of S = topspace X - \<Union>{T. openin X T \<and> disjnt S T}"
by (auto simp: closure_of_def disjnt_iff)
lemma closedin_closure_of [simp]: "closedin X (X closure_of S)"
unfolding closure_of_eq_diff by blast
lemma closure_of_closure_of [simp]: "X closure_of (X closure_of S) = X closure_of S"
by (simp add: closure_of_eq)
lemma closure_of_hull:
assumes "S \<subseteq> topspace X" shows "X closure_of S = (closedin X) hull S"
by (metis assms closedin_closure_of closure_of_eq closure_of_mono closure_of_subset hull_unique)
lemma closure_of_minimal:
"\<lbrakk>S \<subseteq> T; closedin X T\<rbrakk> \<Longrightarrow> (X closure_of S) \<subseteq> T"
by (metis closure_of_eq closure_of_mono)
lemma closure_of_minimal_eq:
"\<lbrakk>S \<subseteq> topspace X; closedin X T\<rbrakk> \<Longrightarrow> (X closure_of S) \<subseteq> T \<longleftrightarrow> S \<subseteq> T"
by (meson closure_of_minimal closure_of_subset subset_trans)
lemma closure_of_unique:
"\<lbrakk>S \<subseteq> T; closedin X T;
\<And>T'. \<lbrakk>S \<subseteq> T'; closedin X T'\<rbrakk> \<Longrightarrow> T \<subseteq> T'\<rbrakk>
\<Longrightarrow> X closure_of S = T"
by (meson closedin_closure_of closedin_subset closure_of_minimal closure_of_subset eq_iff order.trans)
lemma closure_of_eq_empty_gen: "X closure_of S = {} \<longleftrightarrow> disjnt (topspace X) S"
unfolding disjnt_def closure_of_restrict [where S=S]
using closure_of by fastforce
lemma closure_of_eq_empty: "S \<subseteq> topspace X \<Longrightarrow> X closure_of S = {} \<longleftrightarrow> S = {}"
using closure_of_subset by fastforce
lemma openin_Int_closure_of_subset:
assumes "openin X S"
shows "S \<inter> X closure_of T \<subseteq> X closure_of (S \<inter> T)"
proof -
have "S \<inter> X derived_set_of T = S \<inter> X derived_set_of (S \<inter> T)"
by (meson assms openin_Int_derived_set_of_eq)
moreover have "S \<inter> (S \<inter> T) = S \<inter> T"
by fastforce
ultimately show ?thesis
by (metis closure_of_alt inf.cobounded2 inf_left_commute inf_sup_distrib1)
qed
lemma closure_of_openin_Int_closure_of:
assumes "openin X S"
shows "X closure_of (S \<inter> X closure_of T) = X closure_of (S \<inter> T)"
proof
show "X closure_of (S \<inter> X closure_of T) \<subseteq> X closure_of (S \<inter> T)"
by (simp add: assms closure_of_minimal openin_Int_closure_of_subset)
next
show "X closure_of (S \<inter> T) \<subseteq> X closure_of (S \<inter> X closure_of T)"
by (metis Int_subset_iff assms closure_of_alt closure_of_mono inf_mono openin_subset subset_refl sup.coboundedI1)
qed
lemma openin_Int_closure_of_eq:
assumes "openin X S" shows "S \<inter> X closure_of T = S \<inter> X closure_of (S \<inter> T)" (is "?lhs = ?rhs")
proof
show "?lhs \<subseteq> ?rhs"
by (simp add: assms openin_Int_closure_of_subset)
show "?rhs \<subseteq> ?lhs"
by (metis closure_of_mono inf_commute inf_le1 inf_mono order_refl)
qed
lemma openin_Int_closure_of_eq_empty:
assumes "openin X S" shows "S \<inter> X closure_of T = {} \<longleftrightarrow> S \<inter> T = {}" (is "?lhs = ?rhs")
proof
show "?lhs \<Longrightarrow> ?rhs"
unfolding disjoint_iff
by (meson assms in_closure_of in_mono openin_subset)
show "?rhs \<Longrightarrow> ?lhs"
by (simp add: assms openin_Int_closure_of_eq)
qed
lemma closure_of_openin_Int_superset:
"openin X S \<and> S \<subseteq> X closure_of T
\<Longrightarrow> X closure_of (S \<inter> T) = X closure_of S"
by (metis closure_of_openin_Int_closure_of inf.orderE)
lemma closure_of_openin_subtopology_Int_closure_of:
assumes S: "openin (subtopology X U) S" and "T \<subseteq> U"
shows "X closure_of (S \<inter> X closure_of T) = X closure_of (S \<inter> T)" (is "?lhs = ?rhs")
proof
obtain S0 where S0: "openin X S0" "S = S0 \<inter> U"
using assms by (auto simp: openin_subtopology)
then show "?lhs \<subseteq> ?rhs"
proof -
have "S0 \<inter> X closure_of T = S0 \<inter> X closure_of (S0 \<inter> T)"
by (meson S0(1) openin_Int_closure_of_eq)
moreover have "S0 \<inter> T = S0 \<inter> U \<inter> T"
using \<open>T \<subseteq> U\<close> by fastforce
ultimately have "S \<inter> X closure_of T \<subseteq> X closure_of (S \<inter> T)"
using S0(2) by auto
then show ?thesis
by (meson closedin_closure_of closure_of_minimal)
qed
next
show "?rhs \<subseteq> ?lhs"
proof -
have "T \<inter> S \<subseteq> T \<union> X derived_set_of T"
by force
then show ?thesis
by (smt (verit, del_insts) Int_iff in_closure_of inf.orderE openin_subset subsetI)
qed
qed
lemma closure_of_subtopology_open:
"openin X U \<or> S \<subseteq> U \<Longrightarrow> (subtopology X U) closure_of S = U \<inter> X closure_of S"
by (metis closure_of_subtopology inf_absorb2 openin_Int_closure_of_eq)
lemma discrete_topology_closure_of:
"(discrete_topology U) closure_of S = U \<inter> S"
by (metis closedin_discrete_topology closure_of_restrict closure_of_unique discrete_topology_unique inf_sup_ord(1) order_refl)
text\<open> Interior with respect to a topological space. \<close>
definition interior_of :: "'a topology \<Rightarrow> 'a set \<Rightarrow> 'a set" (infixr "interior'_of" 80)
where "X interior_of S \<equiv> {x. \<exists>T. openin X T \<and> x \<in> T \<and> T \<subseteq> S}"
lemma interior_of_restrict:
"X interior_of S = X interior_of (topspace X \<inter> S)"
using openin_subset by (auto simp: interior_of_def)
lemma interior_of_eq: "(X interior_of S = S) \<longleftrightarrow> openin X S"
unfolding interior_of_def using openin_subopen by blast
lemma interior_of_openin: "openin X S \<Longrightarrow> X interior_of S = S"
by (simp add: interior_of_eq)
lemma interior_of_empty [simp]: "X interior_of {} = {}"
by (simp add: interior_of_eq)
lemma interior_of_topspace [simp]: "X interior_of (topspace X) = topspace X"
by (simp add: interior_of_eq)
lemma openin_interior_of [simp]: "openin X (X interior_of S)"
unfolding interior_of_def
using openin_subopen by fastforce
lemma interior_of_interior_of [simp]:
"X interior_of X interior_of S = X interior_of S"
by (simp add: interior_of_eq)
lemma interior_of_subset: "X interior_of S \<subseteq> S"
by (auto simp: interior_of_def)
lemma interior_of_subset_closure_of: "X interior_of S \<subseteq> X closure_of S"
by (metis closure_of_subset_Int dual_order.trans interior_of_restrict interior_of_subset)
lemma subset_interior_of_eq: "S \<subseteq> X interior_of S \<longleftrightarrow> openin X S"
by (metis interior_of_eq interior_of_subset subset_antisym)
lemma interior_of_mono: "S \<subseteq> T \<Longrightarrow> X interior_of S \<subseteq> X interior_of T"
by (auto simp: interior_of_def)
lemma interior_of_maximal: "\<lbrakk>T \<subseteq> S; openin X T\<rbrakk> \<Longrightarrow> T \<subseteq> X interior_of S"
by (auto simp: interior_of_def)
lemma interior_of_maximal_eq: "openin X T \<Longrightarrow> T \<subseteq> X interior_of S \<longleftrightarrow> T \<subseteq> S"
by (meson interior_of_maximal interior_of_subset order_trans)
lemma interior_of_unique:
"\<lbrakk>T \<subseteq> S; openin X T; \<And>T'. \<lbrakk>T' \<subseteq> S; openin X T'\<rbrakk> \<Longrightarrow> T' \<subseteq> T\<rbrakk> \<Longrightarrow> X interior_of S = T"
by (simp add: interior_of_maximal_eq interior_of_subset subset_antisym)
lemma interior_of_subset_topspace: "X interior_of S \<subseteq> topspace X"
by (simp add: openin_subset)
lemma interior_of_subset_subtopology: "(subtopology X S) interior_of T \<subseteq> S"
by (meson openin_imp_subset openin_interior_of)
lemma interior_of_Int: "X interior_of (S \<inter> T) = X interior_of S \<inter> X interior_of T" (is "?lhs = ?rhs")
proof
show "?lhs \<subseteq> ?rhs"
by (simp add: interior_of_mono)
show "?rhs \<subseteq> ?lhs"
by (meson inf_mono interior_of_maximal interior_of_subset openin_Int openin_interior_of)
qed
lemma interior_of_Inter_subset: "X interior_of (\<Inter>\<F>) \<subseteq> (\<Inter>S \<in> \<F>. X interior_of S)"
by (simp add: INT_greatest Inf_lower interior_of_mono)
lemma union_interior_of_subset:
"X interior_of S \<union> X interior_of T \<subseteq> X interior_of (S \<union> T)"
by (simp add: interior_of_mono)
lemma interior_of_eq_empty:
"X interior_of S = {} \<longleftrightarrow> (\<forall>T. openin X T \<and> T \<subseteq> S \<longrightarrow> T = {})"
by (metis bot.extremum_uniqueI interior_of_maximal interior_of_subset openin_interior_of)
lemma interior_of_eq_empty_alt:
"X interior_of S = {} \<longleftrightarrow> (\<forall>T. openin X T \<and> T \<noteq> {} \<longrightarrow> T - S \<noteq> {})"
by (auto simp: interior_of_eq_empty)
lemma interior_of_Union_openin_subsets:
"\<Union>{T. openin X T \<and> T \<subseteq> S} = X interior_of S"
by (rule interior_of_unique [symmetric]) auto
lemma interior_of_complement:
"X interior_of (topspace X - S) = topspace X - X closure_of S"
by (auto simp: interior_of_def closure_of_def)
lemma interior_of_closure_of:
"X interior_of S = topspace X - X closure_of (topspace X - S)"
unfolding interior_of_complement [symmetric]
by (metis Diff_Diff_Int interior_of_restrict)
lemma closure_of_interior_of:
"X closure_of S = topspace X - X interior_of (topspace X - S)"
by (simp add: interior_of_complement Diff_Diff_Int closure_of)
lemma closure_of_complement: "X closure_of (topspace X - S) = topspace X - X interior_of S"
unfolding interior_of_def closure_of_def
by (blast dest: openin_subset)
lemma interior_of_eq_empty_complement:
"X interior_of S = {} \<longleftrightarrow> X closure_of (topspace X - S) = topspace X"
using interior_of_subset_topspace [of X S] closure_of_complement by fastforce
lemma closure_of_eq_topspace:
"X closure_of S = topspace X \<longleftrightarrow> X interior_of (topspace X - S) = {}"
using closure_of_subset_topspace [of X S] interior_of_complement by fastforce
lemma interior_of_subtopology_subset:
"U \<inter> X interior_of S \<subseteq> (subtopology X U) interior_of S"
by (auto simp: interior_of_def openin_subtopology)
lemma interior_of_subtopology_subsets:
"T \<subseteq> U \<Longrightarrow> T \<inter> (subtopology X U) interior_of S \<subseteq> (subtopology X T) interior_of S"
by (metis inf.absorb_iff2 interior_of_subtopology_subset subtopology_subtopology)
lemma interior_of_subtopology_mono:
"\<lbrakk>S \<subseteq> T; T \<subseteq> U\<rbrakk> \<Longrightarrow> (subtopology X U) interior_of S \<subseteq> (subtopology X T) interior_of S"
by (metis dual_order.trans inf.orderE inf_commute interior_of_subset interior_of_subtopology_subsets)
lemma interior_of_subtopology_open:
assumes "openin X U"
shows "(subtopology X U) interior_of S = U \<inter> X interior_of S" (is "?lhs = ?rhs")
proof
show "?lhs \<subseteq> ?rhs"
by (meson assms interior_of_maximal interior_of_subset le_infI openin_interior_of openin_open_subtopology)
show "?rhs \<subseteq> ?lhs"
by (simp add: interior_of_subtopology_subset)
qed
lemma dense_intersects_open:
"X closure_of S = topspace X \<longleftrightarrow> (\<forall>T. openin X T \<and> T \<noteq> {} \<longrightarrow> S \<inter> T \<noteq> {})"
proof -
have "X closure_of S = topspace X \<longleftrightarrow> (topspace X - X interior_of (topspace X - S) = topspace X)"
by (simp add: closure_of_interior_of)
also have "\<dots> \<longleftrightarrow> X interior_of (topspace X - S) = {}"
by (simp add: closure_of_complement interior_of_eq_empty_complement)
also have "\<dots> \<longleftrightarrow> (\<forall>T. openin X T \<and> T \<noteq> {} \<longrightarrow> S \<inter> T \<noteq> {})"
unfolding interior_of_eq_empty_alt
using openin_subset by fastforce
finally show ?thesis .
qed
lemma interior_of_closedin_union_empty_interior_of:
assumes "closedin X S" and disj: "X interior_of T = {}"
shows "X interior_of (S \<union> T) = X interior_of S"
proof -
have "X closure_of (topspace X - T) = topspace X"
by (metis Diff_Diff_Int disj closure_of_eq_topspace closure_of_restrict interior_of_closure_of)
then show ?thesis
unfolding interior_of_closure_of
by (metis Diff_Un Diff_subset assms(1) closedin_def closure_of_openin_Int_superset)
qed
lemma interior_of_union_eq_empty:
"closedin X S
\<Longrightarrow> (X interior_of (S \<union> T) = {} \<longleftrightarrow>
X interior_of S = {} \<and> X interior_of T = {})"
by (metis interior_of_closedin_union_empty_interior_of le_sup_iff subset_empty union_interior_of_subset)
lemma discrete_topology_interior_of [simp]:
"(discrete_topology U) interior_of S = U \<inter> S"
by (simp add: interior_of_restrict [of _ S] interior_of_eq)
subsection \<open>Frontier with respect to topological space \<close>
definition frontier_of :: "'a topology \<Rightarrow> 'a set \<Rightarrow> 'a set" (infixr "frontier'_of" 80)
where "X frontier_of S \<equiv> X closure_of S - X interior_of S"
lemma frontier_of_closures:
"X frontier_of S = X closure_of S \<inter> X closure_of (topspace X - S)"
by (metis Diff_Diff_Int closure_of_complement closure_of_subset_topspace double_diff frontier_of_def interior_of_subset_closure_of)
-
lemma interior_of_union_frontier_of [simp]:
"X interior_of S \<union> X frontier_of S = X closure_of S"
by (simp add: frontier_of_def interior_of_subset_closure_of subset_antisym)
lemma frontier_of_restrict: "X frontier_of S = X frontier_of (topspace X \<inter> S)"
by (metis closure_of_restrict frontier_of_def interior_of_restrict)
lemma closedin_frontier_of: "closedin X (X frontier_of S)"
by (simp add: closedin_Int frontier_of_closures)
lemma frontier_of_subset_topspace: "X frontier_of S \<subseteq> topspace X"
by (simp add: closedin_frontier_of closedin_subset)
lemma frontier_of_subset_subtopology: "(subtopology X S) frontier_of T \<subseteq> S"
by (metis (no_types) closedin_derived_set closedin_frontier_of)
lemma frontier_of_subtopology_subset:
"U \<inter> (subtopology X U) frontier_of S \<subseteq> (X frontier_of S)"
proof -
have "U \<inter> X interior_of S - subtopology X U interior_of S = {}"
by (simp add: interior_of_subtopology_subset)
moreover have "X closure_of S \<inter> subtopology X U closure_of S = subtopology X U closure_of S"
by (meson closure_of_subtopology_subset inf.absorb_iff2)
ultimately show ?thesis
unfolding frontier_of_def
by blast
qed
lemma frontier_of_subtopology_mono:
"\<lbrakk>S \<subseteq> T; T \<subseteq> U\<rbrakk> \<Longrightarrow> (subtopology X T) frontier_of S \<subseteq> (subtopology X U) frontier_of S"
by (simp add: frontier_of_def Diff_mono closure_of_subtopology_mono interior_of_subtopology_mono)
lemma clopenin_eq_frontier_of:
"closedin X S \<and> openin X S \<longleftrightarrow> S \<subseteq> topspace X \<and> X frontier_of S = {}"
proof (cases "S \<subseteq> topspace X")
case True
then show ?thesis
by (metis Diff_eq_empty_iff closure_of_eq closure_of_subset_eq frontier_of_def interior_of_eq interior_of_subset interior_of_union_frontier_of sup_bot_right)
next
case False
then show ?thesis
by (simp add: frontier_of_closures openin_closedin_eq)
qed
lemma frontier_of_eq_empty:
"S \<subseteq> topspace X \<Longrightarrow> (X frontier_of S = {} \<longleftrightarrow> closedin X S \<and> openin X S)"
by (simp add: clopenin_eq_frontier_of)
lemma frontier_of_openin:
"openin X S \<Longrightarrow> X frontier_of S = X closure_of S - S"
by (metis (no_types) frontier_of_def interior_of_eq)
lemma frontier_of_openin_straddle_Int:
assumes "openin X U" "U \<inter> X frontier_of S \<noteq> {}"
shows "U \<inter> S \<noteq> {}" "U - S \<noteq> {}"
proof -
have "U \<inter> (X closure_of S \<inter> X closure_of (topspace X - S)) \<noteq> {}"
using assms by (simp add: frontier_of_closures)
then show "U \<inter> S \<noteq> {}"
using assms openin_Int_closure_of_eq_empty by fastforce
show "U - S \<noteq> {}"
proof -
have "\<exists>A. X closure_of (A - S) \<inter> U \<noteq> {}"
using \<open>U \<inter> (X closure_of S \<inter> X closure_of (topspace X - S)) \<noteq> {}\<close> by blast
then have "\<not> U \<subseteq> S"
by (metis Diff_disjoint Diff_eq_empty_iff Int_Diff assms(1) inf_commute openin_Int_closure_of_eq_empty)
then show ?thesis
by blast
qed
qed
lemma frontier_of_subset_closedin: "closedin X S \<Longrightarrow> (X frontier_of S) \<subseteq> S"
using closure_of_eq frontier_of_def by fastforce
lemma frontier_of_empty [simp]: "X frontier_of {} = {}"
by (simp add: frontier_of_def)
lemma frontier_of_topspace [simp]: "X frontier_of topspace X = {}"
by (simp add: frontier_of_def)
lemma frontier_of_subset_eq:
assumes "S \<subseteq> topspace X"
shows "(X frontier_of S) \<subseteq> S \<longleftrightarrow> closedin X S"
proof
show "X frontier_of S \<subseteq> S \<Longrightarrow> closedin X S"
by (metis assms closure_of_subset_eq interior_of_subset interior_of_union_frontier_of le_sup_iff)
show "closedin X S \<Longrightarrow> X frontier_of S \<subseteq> S"
by (simp add: frontier_of_subset_closedin)
qed
lemma frontier_of_complement: "X frontier_of (topspace X - S) = X frontier_of S"
by (metis Diff_Diff_Int closure_of_restrict frontier_of_closures inf_commute)
lemma frontier_of_disjoint_eq:
assumes "S \<subseteq> topspace X"
shows "((X frontier_of S) \<inter> S = {} \<longleftrightarrow> openin X S)"
proof
assume "X frontier_of S \<inter> S = {}"
then have "closedin X (topspace X - S)"
using assms closure_of_subset frontier_of_def interior_of_eq interior_of_subset by fastforce
then show "openin X S"
using assms by (simp add: openin_closedin)
next
show "openin X S \<Longrightarrow> X frontier_of S \<inter> S = {}"
by (simp add: Diff_Diff_Int closedin_def frontier_of_openin inf.absorb_iff2 inf_commute)
qed
lemma frontier_of_disjoint_eq_alt:
"S \<subseteq> (topspace X - X frontier_of S) \<longleftrightarrow> openin X S"
proof (cases "S \<subseteq> topspace X")
case True
show ?thesis
using True frontier_of_disjoint_eq by auto
next
case False
then show ?thesis
by (meson Diff_subset openin_subset subset_trans)
qed
lemma frontier_of_Int:
"X frontier_of (S \<inter> T) =
X closure_of (S \<inter> T) \<inter> (X frontier_of S \<union> X frontier_of T)"
proof -
have *: "U \<subseteq> S \<and> U \<subseteq> T \<Longrightarrow> U \<inter> (S \<inter> A \<union> T \<inter> B) = U \<inter> (A \<union> B)" for U S T A B :: "'a set"
by blast
show ?thesis
by (simp add: frontier_of_closures closure_of_mono Diff_Int * flip: closure_of_Un)
qed
lemma frontier_of_Int_subset: "X frontier_of (S \<inter> T) \<subseteq> X frontier_of S \<union> X frontier_of T"
by (simp add: frontier_of_Int)
lemma frontier_of_Int_closedin:
assumes "closedin X S" "closedin X T"
shows "X frontier_of(S \<inter> T) = X frontier_of S \<inter> T \<union> S \<inter> X frontier_of T" (is "?lhs = ?rhs")
proof
show "?lhs \<subseteq> ?rhs"
using assms by (force simp add: frontier_of_Int closedin_Int closure_of_closedin)
show "?rhs \<subseteq> ?lhs"
using assms frontier_of_subset_closedin
by (auto simp add: frontier_of_Int closedin_Int closure_of_closedin)
qed
lemma frontier_of_Un_subset: "X frontier_of(S \<union> T) \<subseteq> X frontier_of S \<union> X frontier_of T"
by (metis Diff_Un frontier_of_Int_subset frontier_of_complement)
lemma frontier_of_Union_subset:
"finite \<F> \<Longrightarrow> X frontier_of (\<Union>\<F>) \<subseteq> (\<Union>T \<in> \<F>. X frontier_of T)"
proof (induction \<F> rule: finite_induct)
case (insert A \<F>)
then show ?case
using frontier_of_Un_subset by fastforce
qed simp
lemma frontier_of_frontier_of_subset:
"X frontier_of (X frontier_of S) \<subseteq> X frontier_of S"
by (simp add: closedin_frontier_of frontier_of_subset_closedin)
lemma frontier_of_subtopology_open:
"openin X U \<Longrightarrow> (subtopology X U) frontier_of S = U \<inter> X frontier_of S"
by (simp add: Diff_Int_distrib closure_of_subtopology_open frontier_of_def interior_of_subtopology_open)
lemma discrete_topology_frontier_of [simp]:
"(discrete_topology U) frontier_of S = {}"
by (simp add: Diff_eq discrete_topology_closure_of frontier_of_closures)
+lemma openin_subset_topspace_eq:
+ assumes "disjnt S (X frontier_of U)"
+ shows "openin (subtopology X U) S \<longleftrightarrow> openin X S \<and> S \<subseteq> U"
+proof
+ assume S: "openin (subtopology X U) S"
+ show "openin X S \<and> S \<subseteq> U"
+ proof
+ show "S \<subseteq> U"
+ using S openin_imp_subset by blast
+ have "disjnt S (X frontier_of (topspace X \<inter> U))"
+ by (metis assms frontier_of_restrict)
+ moreover
+ have "openin (subtopology X (topspace X \<inter> U)) S"
+ by (simp add: S subtopology_restrict)
+ moreover
+ have "openin X S"
+ if dis: "disjnt S (X frontier_of U)" and ope: "openin (subtopology X U) S" and "U \<subseteq> topspace X"
+ for S U
+ proof -
+ obtain T where T: "openin X T" "S = T \<inter> U"
+ using ope by (auto simp: openin_subtopology)
+ have "T \<inter> U = T \<inter> X interior_of U"
+ using that T interior_of_subset in_closure_of by (fastforce simp: disjnt_iff frontier_of_def)
+ then show ?thesis
+ using \<open>S = T \<inter> U\<close> \<open>openin X T\<close> by auto
+ qed
+ ultimately show "openin X S"
+ by blast
+ qed
+qed (metis inf.absorb_iff1 openin_subtopology_Int)
+
subsection\<open>Locally finite collections\<close>
definition locally_finite_in
where
"locally_finite_in X \<A> \<longleftrightarrow>
(\<Union>\<A> \<subseteq> topspace X) \<and>
(\<forall>x \<in> topspace X. \<exists>V. openin X V \<and> x \<in> V \<and> finite {U \<in> \<A>. U \<inter> V \<noteq> {}})"
lemma finite_imp_locally_finite_in:
"\<lbrakk>finite \<A>; \<Union>\<A> \<subseteq> topspace X\<rbrakk> \<Longrightarrow> locally_finite_in X \<A>"
by (auto simp: locally_finite_in_def)
lemma locally_finite_in_subset:
assumes "locally_finite_in X \<A>" "\<B> \<subseteq> \<A>"
shows "locally_finite_in X \<B>"
proof -
have "finite (\<A> \<inter> {U. U \<inter> V \<noteq> {}}) \<Longrightarrow> finite (\<B> \<inter> {U. U \<inter> V \<noteq> {}})" for V
by (meson \<open>\<B> \<subseteq> \<A>\<close> finite_subset inf_le1 inf_le2 le_inf_iff subset_trans)
then show ?thesis
using assms unfolding locally_finite_in_def Int_def by fastforce
qed
lemma locally_finite_in_refinement:
assumes \<A>: "locally_finite_in X \<A>" and f: "\<And>S. S \<in> \<A> \<Longrightarrow> f S \<subseteq> S"
shows "locally_finite_in X (f ` \<A>)"
proof -
show ?thesis
unfolding locally_finite_in_def
proof safe
fix x
assume "x \<in> topspace X"
then obtain V where "openin X V" "x \<in> V" "finite {U \<in> \<A>. U \<inter> V \<noteq> {}}"
using \<A> unfolding locally_finite_in_def by blast
moreover have "{U \<in> \<A>. f U \<inter> V \<noteq> {}} \<subseteq> {U \<in> \<A>. U \<inter> V \<noteq> {}}" for V
using f by blast
ultimately have "finite {U \<in> \<A>. f U \<inter> V \<noteq> {}}"
using finite_subset by blast
moreover have "f ` {U \<in> \<A>. f U \<inter> V \<noteq> {}} = {U \<in> f ` \<A>. U \<inter> V \<noteq> {}}"
by blast
ultimately have "finite {U \<in> f ` \<A>. U \<inter> V \<noteq> {}}"
by (metis (no_types, lifting) finite_imageI)
then show "\<exists>V. openin X V \<and> x \<in> V \<and> finite {U \<in> f ` \<A>. U \<inter> V \<noteq> {}}"
using \<open>openin X V\<close> \<open>x \<in> V\<close> by blast
next
show "\<And>x xa. \<lbrakk>xa \<in> \<A>; x \<in> f xa\<rbrakk> \<Longrightarrow> x \<in> topspace X"
by (meson Sup_upper \<A> f locally_finite_in_def subset_iff)
qed
qed
lemma locally_finite_in_subtopology:
assumes \<A>: "locally_finite_in X \<A>" "\<Union>\<A> \<subseteq> S"
shows "locally_finite_in (subtopology X S) \<A>"
unfolding locally_finite_in_def
proof safe
fix x
assume x: "x \<in> topspace (subtopology X S)"
then obtain V where "openin X V" "x \<in> V" and fin: "finite {U \<in> \<A>. U \<inter> V \<noteq> {}}"
using \<A> unfolding locally_finite_in_def topspace_subtopology by blast
show "\<exists>V. openin (subtopology X S) V \<and> x \<in> V \<and> finite {U \<in> \<A>. U \<inter> V \<noteq> {}}"
proof (intro exI conjI)
show "openin (subtopology X S) (S \<inter> V)"
by (simp add: \<open>openin X V\<close> openin_subtopology_Int2)
have "{U \<in> \<A>. U \<inter> (S \<inter> V) \<noteq> {}} \<subseteq> {U \<in> \<A>. U \<inter> V \<noteq> {}}"
by auto
with fin show "finite {U \<in> \<A>. U \<inter> (S \<inter> V) \<noteq> {}}"
using finite_subset by auto
show "x \<in> S \<inter> V"
using x \<open>x \<in> V\<close> by (simp)
qed
next
show "\<And>x A. \<lbrakk>x \<in> A; A \<in> \<A>\<rbrakk> \<Longrightarrow> x \<in> topspace (subtopology X S)"
using assms unfolding locally_finite_in_def topspace_subtopology by blast
qed
lemma closedin_locally_finite_Union:
assumes clo: "\<And>S. S \<in> \<A> \<Longrightarrow> closedin X S" and \<A>: "locally_finite_in X \<A>"
shows "closedin X (\<Union>\<A>)"
using \<A> unfolding locally_finite_in_def closedin_def
proof clarify
show "openin X (topspace X - \<Union>\<A>)"
proof (subst openin_subopen, clarify)
fix x
assume "x \<in> topspace X" and "x \<notin> \<Union>\<A>"
then obtain V where "openin X V" "x \<in> V" and fin: "finite {U \<in> \<A>. U \<inter> V \<noteq> {}}"
using \<A> unfolding locally_finite_in_def by blast
let ?T = "V - \<Union>{S \<in> \<A>. S \<inter> V \<noteq> {}}"
show "\<exists>T. openin X T \<and> x \<in> T \<and> T \<subseteq> topspace X - \<Union>\<A>"
proof (intro exI conjI)
show "openin X ?T"
by (metis (no_types, lifting) fin \<open>openin X V\<close> clo closedin_Union mem_Collect_eq openin_diff)
show "x \<in> ?T"
using \<open>x \<notin> \<Union>\<A>\<close> \<open>x \<in> V\<close> by auto
show "?T \<subseteq> topspace X - \<Union>\<A>"
using \<open>openin X V\<close> openin_subset by auto
qed
qed
qed
lemma locally_finite_in_closure:
assumes \<A>: "locally_finite_in X \<A>"
shows "locally_finite_in X ((\<lambda>S. X closure_of S) ` \<A>)"
using \<A> unfolding locally_finite_in_def
proof (intro conjI; clarsimp)
fix x A
assume "x \<in> X closure_of A"
then show "x \<in> topspace X"
by (meson in_closure_of)
next
fix x
assume "x \<in> topspace X" and "\<Union>\<A> \<subseteq> topspace X"
then obtain V where V: "openin X V" "x \<in> V" and fin: "finite {U \<in> \<A>. U \<inter> V \<noteq> {}}"
using \<A> unfolding locally_finite_in_def by blast
have eq: "{y \<in> f ` \<A>. Q y} = f ` {x. x \<in> \<A> \<and> Q(f x)}" for f and Q :: "'a set \<Rightarrow> bool"
by blast
have eq2: "{A \<in> \<A>. X closure_of A \<inter> V \<noteq> {}} = {A \<in> \<A>. A \<inter> V \<noteq> {}}"
using openin_Int_closure_of_eq_empty V by blast
have "finite {U \<in> (closure_of) X ` \<A>. U \<inter> V \<noteq> {}}"
by (simp add: eq eq2 fin)
with V show "\<exists>V. openin X V \<and> x \<in> V \<and> finite {U \<in> (closure_of) X ` \<A>. U \<inter> V \<noteq> {}}"
by blast
qed
lemma closedin_Union_locally_finite_closure:
"locally_finite_in X \<A> \<Longrightarrow> closedin X (\<Union>((\<lambda>S. X closure_of S) ` \<A>))"
by (metis (mono_tags) closedin_closure_of closedin_locally_finite_Union imageE locally_finite_in_closure)
lemma closure_of_Union_subset: "\<Union>((\<lambda>S. X closure_of S) ` \<A>) \<subseteq> X closure_of (\<Union>\<A>)"
by (simp add: SUP_le_iff Sup_upper closure_of_mono)
lemma closure_of_locally_finite_Union:
assumes "locally_finite_in X \<A>"
shows "X closure_of (\<Union>\<A>) = \<Union>((\<lambda>S. X closure_of S) ` \<A>)"
proof (rule closure_of_unique)
show "\<Union> \<A> \<subseteq> \<Union> ((closure_of) X ` \<A>)"
using assms by (simp add: SUP_upper2 Sup_le_iff closure_of_subset locally_finite_in_def)
show "closedin X (\<Union> ((closure_of) X ` \<A>))"
using assms by (simp add: closedin_Union_locally_finite_closure)
show "\<And>T'. \<lbrakk>\<Union> \<A> \<subseteq> T'; closedin X T'\<rbrakk> \<Longrightarrow> \<Union> ((closure_of) X ` \<A>) \<subseteq> T'"
by (simp add: Sup_le_iff closure_of_minimal)
qed
subsection\<^marker>\<open>tag important\<close> \<open>Continuous maps\<close>
text \<open>We will need to deal with continuous maps in terms of topologies and not in terms
of type classes, as defined below.\<close>
definition continuous_map where
"continuous_map X Y f \<equiv>
(\<forall>x \<in> topspace X. f x \<in> topspace Y) \<and>
(\<forall>U. openin Y U \<longrightarrow> openin X {x \<in> topspace X. f x \<in> U})"
lemma continuous_map:
"continuous_map X Y f \<longleftrightarrow>
f ` (topspace X) \<subseteq> topspace Y \<and> (\<forall>U. openin Y U \<longrightarrow> openin X {x \<in> topspace X. f x \<in> U})"
by (auto simp: continuous_map_def)
lemma continuous_map_image_subset_topspace:
"continuous_map X Y f \<Longrightarrow> f ` (topspace X) \<subseteq> topspace Y"
by (auto simp: continuous_map_def)
lemma continuous_map_on_empty: "topspace X = {} \<Longrightarrow> continuous_map X Y f"
by (auto simp: continuous_map_def)
lemma continuous_map_closedin:
"continuous_map X Y f \<longleftrightarrow>
(\<forall>x \<in> topspace X. f x \<in> topspace Y) \<and>
(\<forall>C. closedin Y C \<longrightarrow> closedin X {x \<in> topspace X. f x \<in> C})"
proof -
have "(\<forall>U. openin Y U \<longrightarrow> openin X {x \<in> topspace X. f x \<in> U}) =
(\<forall>C. closedin Y C \<longrightarrow> closedin X {x \<in> topspace X. f x \<in> C})"
if "\<And>x. x \<in> topspace X \<Longrightarrow> f x \<in> topspace Y"
proof -
have eq: "{x \<in> topspace X. f x \<in> topspace Y \<and> f x \<notin> C} = (topspace X - {x \<in> topspace X. f x \<in> C})" for C
using that by blast
show ?thesis
proof (intro iffI allI impI)
fix C
assume "\<forall>U. openin Y U \<longrightarrow> openin X {x \<in> topspace X. f x \<in> U}" and "closedin Y C"
then show "closedin X {x \<in> topspace X. f x \<in> C}"
by (auto simp add: closedin_def eq)
next
fix U
assume "\<forall>C. closedin Y C \<longrightarrow> closedin X {x \<in> topspace X. f x \<in> C}" and "openin Y U"
then show "openin X {x \<in> topspace X. f x \<in> U}"
by (auto simp add: openin_closedin_eq eq)
qed
qed
then show ?thesis
by (auto simp: continuous_map_def)
qed
lemma openin_continuous_map_preimage:
"\<lbrakk>continuous_map X Y f; openin Y U\<rbrakk> \<Longrightarrow> openin X {x \<in> topspace X. f x \<in> U}"
by (simp add: continuous_map_def)
lemma closedin_continuous_map_preimage:
"\<lbrakk>continuous_map X Y f; closedin Y C\<rbrakk> \<Longrightarrow> closedin X {x \<in> topspace X. f x \<in> C}"
by (simp add: continuous_map_closedin)
lemma openin_continuous_map_preimage_gen:
assumes "continuous_map X Y f" "openin X U" "openin Y V"
shows "openin X {x \<in> U. f x \<in> V}"
proof -
have eq: "{x \<in> U. f x \<in> V} = U \<inter> {x \<in> topspace X. f x \<in> V}"
using assms(2) openin_closedin_eq by fastforce
show ?thesis
unfolding eq
using assms openin_continuous_map_preimage by fastforce
qed
lemma closedin_continuous_map_preimage_gen:
assumes "continuous_map X Y f" "closedin X U" "closedin Y V"
shows "closedin X {x \<in> U. f x \<in> V}"
proof -
have eq: "{x \<in> U. f x \<in> V} = U \<inter> {x \<in> topspace X. f x \<in> V}"
using assms(2) closedin_def by fastforce
show ?thesis
unfolding eq
using assms closedin_continuous_map_preimage by fastforce
qed
lemma continuous_map_image_closure_subset:
assumes "continuous_map X Y f"
shows "f ` (X closure_of S) \<subseteq> Y closure_of f ` S"
proof -
have *: "f ` (topspace X) \<subseteq> topspace Y"
by (meson assms continuous_map)
have "X closure_of T \<subseteq> {x \<in> X closure_of T. f x \<in> Y closure_of (f ` T)}"
if "T \<subseteq> topspace X" for T
proof (rule closure_of_minimal)
show "T \<subseteq> {x \<in> X closure_of T. f x \<in> Y closure_of f ` T}"
using closure_of_subset * that by (fastforce simp: in_closure_of)
next
show "closedin X {x \<in> X closure_of T. f x \<in> Y closure_of f ` T}"
using assms closedin_continuous_map_preimage_gen by fastforce
qed
then show ?thesis
by (smt (verit, ccfv_threshold) assms continuous_map image_eqI image_subset_iff in_closure_of mem_Collect_eq)
qed
lemma continuous_map_subset_aux1: "continuous_map X Y f \<Longrightarrow>
(\<forall>S. f ` (X closure_of S) \<subseteq> Y closure_of f ` S)"
using continuous_map_image_closure_subset by blast
lemma continuous_map_subset_aux2:
assumes "\<forall>S. S \<subseteq> topspace X \<longrightarrow> f ` (X closure_of S) \<subseteq> Y closure_of f ` S"
shows "continuous_map X Y f"
unfolding continuous_map_closedin
proof (intro conjI ballI allI impI)
fix x
assume "x \<in> topspace X"
then show "f x \<in> topspace Y"
using assms closure_of_subset_topspace by fastforce
next
fix C
assume "closedin Y C"
then show "closedin X {x \<in> topspace X. f x \<in> C}"
proof (clarsimp simp flip: closure_of_subset_eq, intro conjI)
fix x
assume x: "x \<in> X closure_of {x \<in> topspace X. f x \<in> C}"
and "C \<subseteq> topspace Y" and "Y closure_of C \<subseteq> C"
show "x \<in> topspace X"
by (meson x in_closure_of)
have "{a \<in> topspace X. f a \<in> C} \<subseteq> topspace X"
by simp
moreover have "Y closure_of f ` {a \<in> topspace X. f a \<in> C} \<subseteq> C"
by (simp add: \<open>closedin Y C\<close> closure_of_minimal image_subset_iff)
ultimately show "f x \<in> C"
using x assms by blast
qed
qed
lemma continuous_map_eq_image_closure_subset:
"continuous_map X Y f \<longleftrightarrow> (\<forall>S. f ` (X closure_of S) \<subseteq> Y closure_of f ` S)"
using continuous_map_subset_aux1 continuous_map_subset_aux2 by metis
lemma continuous_map_eq_image_closure_subset_alt:
"continuous_map X Y f \<longleftrightarrow> (\<forall>S. S \<subseteq> topspace X \<longrightarrow> f ` (X closure_of S) \<subseteq> Y closure_of f ` S)"
using continuous_map_subset_aux1 continuous_map_subset_aux2 by metis
lemma continuous_map_eq_image_closure_subset_gen:
"continuous_map X Y f \<longleftrightarrow>
f ` (topspace X) \<subseteq> topspace Y \<and>
(\<forall>S. f ` (X closure_of S) \<subseteq> Y closure_of f ` S)"
using continuous_map_subset_aux1 continuous_map_subset_aux2 continuous_map_image_subset_topspace by metis
lemma continuous_map_closure_preimage_subset:
"continuous_map X Y f
\<Longrightarrow> X closure_of {x \<in> topspace X. f x \<in> T}
\<subseteq> {x \<in> topspace X. f x \<in> Y closure_of T}"
unfolding continuous_map_closedin
by (rule closure_of_minimal) (use in_closure_of in \<open>fastforce+\<close>)
lemma continuous_map_frontier_frontier_preimage_subset:
assumes "continuous_map X Y f"
shows "X frontier_of {x \<in> topspace X. f x \<in> T} \<subseteq> {x \<in> topspace X. f x \<in> Y frontier_of T}"
proof -
have eq: "topspace X - {x \<in> topspace X. f x \<in> T} = {x \<in> topspace X. f x \<in> topspace Y - T}"
using assms unfolding continuous_map_def by blast
have "X closure_of {x \<in> topspace X. f x \<in> T} \<subseteq> {x \<in> topspace X. f x \<in> Y closure_of T}"
by (simp add: assms continuous_map_closure_preimage_subset)
moreover
have "X closure_of (topspace X - {x \<in> topspace X. f x \<in> T}) \<subseteq> {x \<in> topspace X. f x \<in> Y closure_of (topspace Y - T)}"
using continuous_map_closure_preimage_subset [OF assms] eq by presburger
ultimately show ?thesis
by (auto simp: frontier_of_closures)
qed
lemma topology_finer_continuous_id:
assumes "topspace X = topspace Y"
shows "(\<forall>S. openin X S \<longrightarrow> openin Y S) \<longleftrightarrow> continuous_map Y X id" (is "?lhs = ?rhs")
proof
show "?lhs \<Longrightarrow> ?rhs"
unfolding continuous_map_def
using assms openin_subopen openin_subset by fastforce
show "?rhs \<Longrightarrow> ?lhs"
unfolding continuous_map_def
using assms openin_subopen topspace_def by fastforce
qed
lemma continuous_map_const [simp]:
"continuous_map X Y (\<lambda>x. C) \<longleftrightarrow> topspace X = {} \<or> C \<in> topspace Y"
proof (cases "topspace X = {}")
case False
show ?thesis
proof (cases "C \<in> topspace Y")
case True
with openin_subopen show ?thesis
by (auto simp: continuous_map_def)
next
case False
then show ?thesis
unfolding continuous_map_def by fastforce
qed
qed (auto simp: continuous_map_on_empty)
declare continuous_map_const [THEN iffD2, continuous_intros]
lemma continuous_map_compose [continuous_intros]:
assumes f: "continuous_map X X' f" and g: "continuous_map X' X'' g"
shows "continuous_map X X'' (g \<circ> f)"
unfolding continuous_map_def
proof (intro conjI ballI allI impI)
fix x
assume "x \<in> topspace X"
then show "(g \<circ> f) x \<in> topspace X''"
using assms unfolding continuous_map_def by force
next
fix U
assume "openin X'' U"
have eq: "{x \<in> topspace X. (g \<circ> f) x \<in> U} = {x \<in> topspace X. f x \<in> {y. y \<in> topspace X' \<and> g y \<in> U}}"
by auto (meson f continuous_map_def)
show "openin X {x \<in> topspace X. (g \<circ> f) x \<in> U}"
unfolding eq
using assms unfolding continuous_map_def
using \<open>openin X'' U\<close> by blast
qed
lemma continuous_map_eq:
assumes "continuous_map X X' f" and "\<And>x. x \<in> topspace X \<Longrightarrow> f x = g x"
shows "continuous_map X X' g"
proof -
have eq: "{x \<in> topspace X. f x \<in> U} = {x \<in> topspace X. g x \<in> U}" for U
using assms by auto
show ?thesis
using assms by (simp add: continuous_map_def eq)
qed
lemma restrict_continuous_map [simp]:
"topspace X \<subseteq> S \<Longrightarrow> continuous_map X X' (restrict f S) \<longleftrightarrow> continuous_map X X' f"
by (auto simp: elim!: continuous_map_eq)
lemma continuous_map_in_subtopology:
"continuous_map X (subtopology X' S) f \<longleftrightarrow> continuous_map X X' f \<and> f ` (topspace X) \<subseteq> S"
(is "?lhs = ?rhs")
proof
assume L: ?lhs
show ?rhs
proof -
have "\<And>A. f ` (X closure_of A) \<subseteq> subtopology X' S closure_of f ` A"
by (meson L continuous_map_image_closure_subset)
then show ?thesis
by (metis (no_types) closure_of_subset_subtopology closure_of_subtopology_subset closure_of_topspace continuous_map_eq_image_closure_subset order.trans)
qed
next
assume R: ?rhs
then have eq: "{x \<in> topspace X. f x \<in> U} = {x \<in> topspace X. f x \<in> U \<and> f x \<in> S}" for U
by auto
show ?lhs
using R
unfolding continuous_map
by (auto simp: openin_subtopology eq)
qed
lemma continuous_map_from_subtopology:
"continuous_map X X' f \<Longrightarrow> continuous_map (subtopology X S) X' f"
by (auto simp: continuous_map openin_subtopology)
lemma continuous_map_into_fulltopology:
"continuous_map X (subtopology X' T) f \<Longrightarrow> continuous_map X X' f"
by (auto simp: continuous_map_in_subtopology)
lemma continuous_map_into_subtopology:
"\<lbrakk>continuous_map X X' f; f ` topspace X \<subseteq> T\<rbrakk> \<Longrightarrow> continuous_map X (subtopology X' T) f"
by (auto simp: continuous_map_in_subtopology)
lemma continuous_map_from_subtopology_mono:
"\<lbrakk>continuous_map (subtopology X T) X' f; S \<subseteq> T\<rbrakk>
\<Longrightarrow> continuous_map (subtopology X S) X' f"
by (metis inf.absorb_iff2 continuous_map_from_subtopology subtopology_subtopology)
lemma continuous_map_from_discrete_topology [simp]:
"continuous_map (discrete_topology U) X f \<longleftrightarrow> f ` U \<subseteq> topspace X"
by (auto simp: continuous_map_def)
lemma continuous_map_iff_continuous [simp]: "continuous_map (top_of_set S) euclidean g = continuous_on S g"
by (fastforce simp add: continuous_map openin_subtopology continuous_on_open_invariant)
lemma continuous_map_iff_continuous2 [simp]: "continuous_map euclidean euclidean g = continuous_on UNIV g"
by (metis continuous_map_iff_continuous subtopology_UNIV)
lemma continuous_map_openin_preimage_eq:
"continuous_map X Y f \<longleftrightarrow>
f ` (topspace X) \<subseteq> topspace Y \<and> (\<forall>U. openin Y U \<longrightarrow> openin X (topspace X \<inter> f -` U))"
by (auto simp: continuous_map_def vimage_def Int_def)
lemma continuous_map_closedin_preimage_eq:
"continuous_map X Y f \<longleftrightarrow>
f ` (topspace X) \<subseteq> topspace Y \<and> (\<forall>U. closedin Y U \<longrightarrow> closedin X (topspace X \<inter> f -` U))"
by (auto simp: continuous_map_closedin vimage_def Int_def)
lemma continuous_map_square_root: "continuous_map euclideanreal euclideanreal sqrt"
by (simp add: continuous_at_imp_continuous_on isCont_real_sqrt)
lemma continuous_map_sqrt [continuous_intros]:
"continuous_map X euclideanreal f \<Longrightarrow> continuous_map X euclideanreal (\<lambda>x. sqrt(f x))"
by (meson continuous_map_compose continuous_map_eq continuous_map_square_root o_apply)
lemma continuous_map_id [simp, continuous_intros]: "continuous_map X X id"
unfolding continuous_map_def using openin_subopen topspace_def by fastforce
declare continuous_map_id [unfolded id_def, simp, continuous_intros]
lemma continuous_map_id_subt [simp]: "continuous_map (subtopology X S) X id"
by (simp add: continuous_map_from_subtopology)
declare continuous_map_id_subt [unfolded id_def, simp]
lemma\<^marker>\<open>tag important\<close> continuous_map_alt:
"continuous_map T1 T2 f
= ((\<forall>U. openin T2 U \<longrightarrow> openin T1 (f -` U \<inter> topspace T1)) \<and> f ` topspace T1 \<subseteq> topspace T2)"
by (auto simp: continuous_map_def vimage_def image_def Collect_conj_eq inf_commute)
lemma continuous_map_open [intro]:
"continuous_map T1 T2 f \<Longrightarrow> openin T2 U \<Longrightarrow> openin T1 (f-`U \<inter> topspace(T1))"
unfolding continuous_map_alt by auto
lemma continuous_map_preimage_topspace [intro]:
assumes "continuous_map T1 T2 f"
shows "f-`(topspace T2) \<inter> topspace T1 = topspace T1"
using assms unfolding continuous_map_def by auto
subsection\<open>Open and closed maps (not a priori assumed continuous)\<close>
definition open_map :: "'a topology \<Rightarrow> 'b topology \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> bool"
where "open_map X1 X2 f \<equiv> \<forall>U. openin X1 U \<longrightarrow> openin X2 (f ` U)"
definition closed_map :: "'a topology \<Rightarrow> 'b topology \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> bool"
where "closed_map X1 X2 f \<equiv> \<forall>U. closedin X1 U \<longrightarrow> closedin X2 (f ` U)"
lemma open_map_imp_subset_topspace:
"open_map X1 X2 f \<Longrightarrow> f ` (topspace X1) \<subseteq> topspace X2"
unfolding open_map_def by (simp add: openin_subset)
lemma open_map_on_empty:
"topspace X = {} \<Longrightarrow> open_map X Y f"
by (metis empty_iff imageE in_mono open_map_def openin_subopen openin_subset)
lemma closed_map_on_empty:
"topspace X = {} \<Longrightarrow> closed_map X Y f"
by (simp add: closed_map_def closedin_topspace_empty)
lemma closed_map_const:
"closed_map X Y (\<lambda>x. c) \<longleftrightarrow> topspace X = {} \<or> closedin Y {c}"
by (metis closed_map_def closed_map_on_empty closedin_empty closedin_topspace image_constant_conv)
lemma open_map_imp_subset:
"\<lbrakk>open_map X1 X2 f; S \<subseteq> topspace X1\<rbrakk> \<Longrightarrow> f ` S \<subseteq> topspace X2"
by (meson order_trans open_map_imp_subset_topspace subset_image_iff)
lemma topology_finer_open_id:
"(\<forall>S. openin X S \<longrightarrow> openin X' S) \<longleftrightarrow> open_map X X' id"
unfolding open_map_def by auto
lemma open_map_id: "open_map X X id"
unfolding open_map_def by auto
lemma open_map_eq:
"\<lbrakk>open_map X X' f; \<And>x. x \<in> topspace X \<Longrightarrow> f x = g x\<rbrakk> \<Longrightarrow> open_map X X' g"
unfolding open_map_def
by (metis image_cong openin_subset subset_iff)
lemma open_map_inclusion_eq:
"open_map (subtopology X S) X id \<longleftrightarrow> openin X (topspace X \<inter> S)"
by (metis openin_topspace openin_trans_full subtopology_restrict topology_finer_open_id topspace_subtopology)
lemma open_map_inclusion:
"openin X S \<Longrightarrow> open_map (subtopology X S) X id"
by (simp add: open_map_inclusion_eq openin_Int)
lemma open_map_compose:
"\<lbrakk>open_map X X' f; open_map X' X'' g\<rbrakk> \<Longrightarrow> open_map X X'' (g \<circ> f)"
by (metis (no_types, lifting) image_comp open_map_def)
lemma closed_map_imp_subset_topspace:
"closed_map X1 X2 f \<Longrightarrow> f ` (topspace X1) \<subseteq> topspace X2"
by (simp add: closed_map_def closedin_subset)
lemma closed_map_imp_subset:
"\<lbrakk>closed_map X1 X2 f; S \<subseteq> topspace X1\<rbrakk> \<Longrightarrow> f ` S \<subseteq> topspace X2"
using closed_map_imp_subset_topspace by blast
lemma topology_finer_closed_id:
"(\<forall>S. closedin X S \<longrightarrow> closedin X' S) \<longleftrightarrow> closed_map X X' id"
by (simp add: closed_map_def)
lemma closed_map_id: "closed_map X X id"
by (simp add: closed_map_def)
lemma closed_map_eq:
"\<lbrakk>closed_map X X' f; \<And>x. x \<in> topspace X \<Longrightarrow> f x = g x\<rbrakk> \<Longrightarrow> closed_map X X' g"
unfolding closed_map_def
by (metis image_cong closedin_subset subset_iff)
lemma closed_map_compose:
"\<lbrakk>closed_map X X' f; closed_map X' X'' g\<rbrakk> \<Longrightarrow> closed_map X X'' (g \<circ> f)"
by (metis (no_types, lifting) closed_map_def image_comp)
lemma closed_map_inclusion_eq:
"closed_map (subtopology X S) X id \<longleftrightarrow>
closedin X (topspace X \<inter> S)"
proof -
have *: "closedin X (T \<inter> S)" if "closedin X (S \<inter> topspace X)" "closedin X T" for T
by (smt (verit, best) closedin_Int closure_of_subset_eq inf_sup_aci le_iff_inf that)
then show ?thesis
by (fastforce simp add: closed_map_def Int_commute closedin_subtopology_alt intro: *)
qed
lemma closed_map_inclusion: "closedin X S \<Longrightarrow> closed_map (subtopology X S) X id"
by (simp add: closed_map_inclusion_eq closedin_Int)
lemma open_map_into_subtopology:
"\<lbrakk>open_map X X' f; f ` topspace X \<subseteq> S\<rbrakk> \<Longrightarrow> open_map X (subtopology X' S) f"
unfolding open_map_def openin_subtopology
using openin_subset by fastforce
lemma closed_map_into_subtopology:
"\<lbrakk>closed_map X X' f; f ` topspace X \<subseteq> S\<rbrakk> \<Longrightarrow> closed_map X (subtopology X' S) f"
unfolding closed_map_def closedin_subtopology
using closedin_subset by fastforce
lemma open_map_into_discrete_topology:
"open_map X (discrete_topology U) f \<longleftrightarrow> f ` (topspace X) \<subseteq> U"
unfolding open_map_def openin_discrete_topology using openin_subset by blast
lemma closed_map_into_discrete_topology:
"closed_map X (discrete_topology U) f \<longleftrightarrow> f ` (topspace X) \<subseteq> U"
unfolding closed_map_def closedin_discrete_topology using closedin_subset by blast
lemma bijective_open_imp_closed_map:
"\<lbrakk>open_map X X' f; f ` (topspace X) = topspace X'; inj_on f (topspace X)\<rbrakk> \<Longrightarrow> closed_map X X' f"
unfolding open_map_def closed_map_def closedin_def
by auto (metis Diff_subset inj_on_image_set_diff)
lemma bijective_closed_imp_open_map:
"\<lbrakk>closed_map X X' f; f ` (topspace X) = topspace X'; inj_on f (topspace X)\<rbrakk> \<Longrightarrow> open_map X X' f"
unfolding closed_map_def open_map_def openin_closedin_eq
by auto (metis Diff_subset inj_on_image_set_diff)
lemma open_map_from_subtopology:
"\<lbrakk>open_map X X' f; openin X U\<rbrakk> \<Longrightarrow> open_map (subtopology X U) X' f"
unfolding open_map_def openin_subtopology_alt by blast
lemma closed_map_from_subtopology:
"\<lbrakk>closed_map X X' f; closedin X U\<rbrakk> \<Longrightarrow> closed_map (subtopology X U) X' f"
unfolding closed_map_def closedin_subtopology_alt by blast
lemma open_map_restriction:
assumes f: "open_map X X' f" and U: "{x \<in> topspace X. f x \<in> V} = U"
shows "open_map (subtopology X U) (subtopology X' V) f"
unfolding open_map_def
proof clarsimp
fix W
assume "openin (subtopology X U) W"
then obtain T where "openin X T" "W = T \<inter> U"
by (meson openin_subtopology)
with f U have "f ` W = (f ` T) \<inter> V"
unfolding open_map_def openin_closedin_eq by auto
then show "openin (subtopology X' V) (f ` W)"
by (metis \<open>openin X T\<close> f open_map_def openin_subtopology_Int)
qed
lemma closed_map_restriction:
assumes f: "closed_map X X' f" and U: "{x \<in> topspace X. f x \<in> V} = U"
shows "closed_map (subtopology X U) (subtopology X' V) f"
unfolding closed_map_def
proof clarsimp
fix W
assume "closedin (subtopology X U) W"
then obtain T where "closedin X T" "W = T \<inter> U"
by (meson closedin_subtopology)
with f U have "f ` W = (f ` T) \<inter> V"
unfolding closed_map_def closedin_def by auto
then show "closedin (subtopology X' V) (f ` W)"
by (metis \<open>closedin X T\<close> closed_map_def closedin_subtopology f)
qed
lemma closed_map_closure_of_image:
"closed_map X Y f \<longleftrightarrow>
f ` topspace X \<subseteq> topspace Y \<and>
(\<forall>S. S \<subseteq> topspace X \<longrightarrow> Y closure_of (f ` S) \<subseteq> image f (X closure_of S))" (is "?lhs=?rhs")
proof
assume ?lhs
then show ?rhs
by (simp add: closed_map_def closed_map_imp_subset_topspace closure_of_minimal closure_of_subset image_mono)
next
assume ?rhs
then show ?lhs
by (metis closed_map_def closed_map_into_discrete_topology closure_of_eq
closure_of_subset_eq topspace_discrete_topology)
qed
lemma open_map_interior_of_image_subset:
"open_map X Y f \<longleftrightarrow> (\<forall>S. image f (X interior_of S) \<subseteq> Y interior_of (f ` S))"
by (metis image_mono interior_of_eq interior_of_maximal interior_of_subset open_map_def openin_interior_of subset_antisym)
lemma open_map_interior_of_image_subset_alt:
"open_map X Y f \<longleftrightarrow> (\<forall>S\<subseteq>topspace X. f ` (X interior_of S) \<subseteq> Y interior_of f ` S)"
by (metis interior_of_eq open_map_def open_map_interior_of_image_subset openin_subset subset_interior_of_eq)
lemma open_map_interior_of_image_subset_gen:
"open_map X Y f \<longleftrightarrow>
(f ` topspace X \<subseteq> topspace Y \<and> (\<forall>S. f ` (X interior_of S) \<subseteq> Y interior_of f ` S))"
by (meson open_map_imp_subset_topspace open_map_interior_of_image_subset)
lemma open_map_preimage_neighbourhood:
"open_map X Y f \<longleftrightarrow>
(f ` topspace X \<subseteq> topspace Y \<and>
(\<forall>U T. closedin X U \<and> T \<subseteq> topspace Y \<and>
{x \<in> topspace X. f x \<in> T} \<subseteq> U \<longrightarrow>
(\<exists>V. closedin Y V \<and> T \<subseteq> V \<and> {x \<in> topspace X. f x \<in> V} \<subseteq> U)))" (is "?lhs=?rhs")
proof
assume L: ?lhs
show ?rhs
proof (intro conjI strip)
show "f ` topspace X \<subseteq> topspace Y"
by (simp add: \<open>open_map X Y f\<close> open_map_imp_subset_topspace)
next
fix U T
assume UT: "closedin X U \<and> T \<subseteq> topspace Y \<and> {x \<in> topspace X. f x \<in> T} \<subseteq> U"
have "closedin Y (topspace Y - f ` (topspace X - U))"
by (meson UT L open_map_def openin_closedin_eq openin_diff openin_topspace)
with UT
show "\<exists>V. closedin Y V \<and> T \<subseteq> V \<and> {x \<in> topspace X. f x \<in> V} \<subseteq> U"
using image_iff by auto
qed
next
assume R: ?rhs
show ?lhs
unfolding open_map_def
proof (intro strip)
fix U assume "openin X U"
have "{x \<in> topspace X. f x \<in> topspace Y - f ` U} \<subseteq> topspace X - U"
by blast
then obtain V where V: "closedin Y V"
and sub: "topspace Y - f ` U \<subseteq> V" "{x \<in> topspace X. f x \<in> V} \<subseteq> topspace X - U"
using R \<open>openin X U\<close> by (meson Diff_subset openin_closedin_eq)
then have "f ` U \<subseteq> topspace Y - V"
using R \<open>openin X U\<close> openin_subset by fastforce
with sub have "f ` U = topspace Y - V"
by auto
then show "openin Y (f ` U)"
using V(1) by auto
qed
qed
lemma closed_map_preimage_neighbourhood:
"closed_map X Y f \<longleftrightarrow>
image f (topspace X) \<subseteq> topspace Y \<and>
(\<forall>U T. openin X U \<and> T \<subseteq> topspace Y \<and>
{x \<in> topspace X. f x \<in> T} \<subseteq> U
\<longrightarrow> (\<exists>V. openin Y V \<and> T \<subseteq> V \<and>
{x \<in> topspace X. f x \<in> V} \<subseteq> U))" (is "?lhs=?rhs")
proof
assume L: ?lhs
show ?rhs
proof (intro conjI strip)
show "f ` topspace X \<subseteq> topspace Y"
by (simp add: L closed_map_imp_subset_topspace)
next
fix U T
assume UT: "openin X U \<and> T \<subseteq> topspace Y \<and> {x \<in> topspace X. f x \<in> T} \<subseteq> U"
then have "openin Y (topspace Y - f ` (topspace X - U))"
by (meson L closed_map_def closedin_def closedin_diff closedin_topspace)
then show "\<exists>V. openin Y V \<and> T \<subseteq> V \<and> {x \<in> topspace X. f x \<in> V} \<subseteq> U"
using UT image_iff by auto
qed
next
assume R: ?rhs
show ?lhs
unfolding closed_map_def
proof (intro strip)
fix U assume "closedin X U"
have "{x \<in> topspace X. f x \<in> topspace Y - f ` U} \<subseteq> topspace X - U"
by blast
then obtain V where V: "openin Y V"
and sub: "topspace Y - f ` U \<subseteq> V" "{x \<in> topspace X. f x \<in> V} \<subseteq> topspace X - U"
using R Diff_subset \<open>closedin X U\<close> unfolding closedin_def
by (smt (verit, ccfv_threshold) Collect_mem_eq Collect_mono_iff)
then have "f ` U \<subseteq> topspace Y - V"
using R \<open>closedin X U\<close> closedin_subset by fastforce
with sub have "f ` U = topspace Y - V"
by auto
with V show "closedin Y (f ` U)"
by auto
qed
qed
lemma closed_map_fibre_neighbourhood:
"closed_map X Y f \<longleftrightarrow>
f ` (topspace X) \<subseteq> topspace Y \<and>
(\<forall>U y. openin X U \<and> y \<in> topspace Y \<and> {x \<in> topspace X. f x = y} \<subseteq> U
\<longrightarrow> (\<exists>V. openin Y V \<and> y \<in> V \<and> {x \<in> topspace X. f x \<in> V} \<subseteq> U))"
unfolding closed_map_preimage_neighbourhood
proof (intro conj_cong refl all_cong1)
fix U
assume "f ` topspace X \<subseteq> topspace Y"
show "(\<forall>T. openin X U \<and> T \<subseteq> topspace Y \<and> {x \<in> topspace X. f x \<in> T} \<subseteq> U
\<longrightarrow> (\<exists>V. openin Y V \<and> T \<subseteq> V \<and> {x \<in> topspace X. f x \<in> V} \<subseteq> U))
= (\<forall>y. openin X U \<and> y \<in> topspace Y \<and> {x \<in> topspace X. f x = y} \<subseteq> U
\<longrightarrow> (\<exists>V. openin Y V \<and> y \<in> V \<and> {x \<in> topspace X. f x \<in> V} \<subseteq> U))"
(is "(\<forall>T. ?P T) \<longleftrightarrow> (\<forall>y. ?Q y)")
proof
assume L [rule_format]: "\<forall>T. ?P T"
show "\<forall>y. ?Q y"
proof
fix y show "?Q y"
using L [of "{y}"] by blast
qed
next
assume R: "\<forall>y. ?Q y"
show "\<forall>T. ?P T"
proof (cases "openin X U")
case True
note [[unify_search_bound=3]]
obtain V where V: "\<And>y. \<lbrakk>y \<in> topspace Y; {x \<in> topspace X. f x = y} \<subseteq> U\<rbrakk> \<Longrightarrow>
openin Y (V y) \<and> y \<in> V y \<and> {x \<in> topspace X. f x \<in> V y} \<subseteq> U"
using R by (simp add: True) meson
show ?thesis
proof clarify
fix T
assume "openin X U" and "T \<subseteq> topspace Y" and "{x \<in> topspace X. f x \<in> T} \<subseteq> U"
with V show "\<exists>V. openin Y V \<and> T \<subseteq> V \<and> {x \<in> topspace X. f x \<in> V} \<subseteq> U"
by (rule_tac x="\<Union>y\<in>T. V y" in exI) fastforce
qed
qed auto
qed
qed
lemma open_map_in_subtopology:
"openin Y S
\<Longrightarrow> (open_map X (subtopology Y S) f \<longleftrightarrow> open_map X Y f \<and> f ` (topspace X) \<subseteq> S)"
by (metis le_inf_iff open_map_def open_map_imp_subset_topspace open_map_into_subtopology openin_trans_full topspace_subtopology)
lemma open_map_from_open_subtopology:
"\<lbrakk>openin Y S; open_map X (subtopology Y S) f\<rbrakk> \<Longrightarrow> open_map X Y f"
using open_map_in_subtopology by blast
lemma closed_map_in_subtopology:
"closedin Y S
\<Longrightarrow> closed_map X (subtopology Y S) f \<longleftrightarrow> (closed_map X Y f \<and> f ` topspace X \<subseteq> S)"
by (metis closed_map_def closed_map_imp_subset_topspace closed_map_into_subtopology
closedin_closed_subtopology closedin_subset topspace_subtopology_subset)
lemma closed_map_from_closed_subtopology:
"\<lbrakk>closedin Y S; closed_map X (subtopology Y S) f\<rbrakk> \<Longrightarrow> closed_map X Y f"
using closed_map_in_subtopology by blast
lemma closed_map_from_composition_left:
assumes cmf: "closed_map X Z (g \<circ> f)" and contf: "continuous_map X Y f" and fim: "f ` topspace X = topspace Y"
shows "closed_map Y Z g"
unfolding closed_map_def
proof (intro strip)
fix U assume "closedin Y U"
then have "closedin X {x \<in> topspace X. f x \<in> U}"
using contf closedin_continuous_map_preimage by blast
then have "closedin Z ((g \<circ> f) ` {x \<in> topspace X. f x \<in> U})"
using cmf closed_map_def by blast
moreover
have "\<And>y. y \<in> U \<Longrightarrow> \<exists>x \<in> topspace X. f x \<in> U \<and> g y = g (f x)"
by (smt (verit, ccfv_SIG) \<open>closedin Y U\<close> closedin_subset fim image_iff subsetD)
then have "(g \<circ> f) ` {x \<in> topspace X. f x \<in> U} = g`U" by auto
ultimately show "closedin Z (g ` U)"
by metis
qed
text \<open>identical proof as the above\<close>
lemma open_map_from_composition_left:
assumes cmf: "open_map X Z (g \<circ> f)" and contf: "continuous_map X Y f" and fim: "f ` topspace X = topspace Y"
shows "open_map Y Z g"
unfolding open_map_def
proof (intro strip)
fix U assume "openin Y U"
then have "openin X {x \<in> topspace X. f x \<in> U}"
using contf openin_continuous_map_preimage by blast
then have "openin Z ((g \<circ> f) ` {x \<in> topspace X. f x \<in> U})"
using cmf open_map_def by blast
moreover
have "\<And>y. y \<in> U \<Longrightarrow> \<exists>x \<in> topspace X. f x \<in> U \<and> g y = g (f x)"
by (smt (verit, ccfv_SIG) \<open>openin Y U\<close> openin_subset fim image_iff subsetD)
then have "(g \<circ> f) ` {x \<in> topspace X. f x \<in> U} = g`U" by auto
ultimately show "openin Z (g ` U)"
by metis
qed
lemma closed_map_from_composition_right:
assumes cmf: "closed_map X Z (g \<circ> f)" "f ` topspace X \<subseteq> topspace Y" "continuous_map Y Z g" "inj_on g (topspace Y)"
shows "closed_map X Y f"
unfolding closed_map_def
proof (intro strip)
fix C assume "closedin X C"
have "\<And>y c. \<lbrakk>y \<in> topspace Y; g y = g (f c); c \<in> C\<rbrakk> \<Longrightarrow> y \<in> f ` C"
using \<open>closedin X C\<close> assms closedin_subset inj_onD by fastforce
then
have "f ` C = {x \<in> topspace Y. g x \<in> (g \<circ> f) ` C}"
using \<open>closedin X C\<close> assms(2) closedin_subset by fastforce
moreover have "closedin Z ((g \<circ> f) ` C)"
using \<open>closedin X C\<close> cmf closed_map_def by blast
ultimately show "closedin Y (f ` C)"
using assms(3) closedin_continuous_map_preimage by fastforce
qed
text \<open>identical proof as the above\<close>
lemma open_map_from_composition_right:
assumes "open_map X Z (g \<circ> f)" "f ` topspace X \<subseteq> topspace Y" "continuous_map Y Z g" "inj_on g (topspace Y)"
shows "open_map X Y f"
unfolding open_map_def
proof (intro strip)
fix C assume "openin X C"
have "\<And>y c. \<lbrakk>y \<in> topspace Y; g y = g (f c); c \<in> C\<rbrakk> \<Longrightarrow> y \<in> f ` C"
using \<open>openin X C\<close> assms openin_subset inj_onD by fastforce
then
have "f ` C = {x \<in> topspace Y. g x \<in> (g \<circ> f) ` C}"
using \<open>openin X C\<close> assms(2) openin_subset by fastforce
moreover have "openin Z ((g \<circ> f) ` C)"
using \<open>openin X C\<close> assms(1) open_map_def by blast
ultimately show "openin Y (f ` C)"
using assms(3) openin_continuous_map_preimage by fastforce
qed
subsection\<open>Quotient maps\<close>
definition quotient_map where
"quotient_map X X' f \<longleftrightarrow>
f ` (topspace X) = topspace X' \<and>
(\<forall>U. U \<subseteq> topspace X' \<longrightarrow> (openin X {x. x \<in> topspace X \<and> f x \<in> U} \<longleftrightarrow> openin X' U))"
lemma quotient_map_eq:
assumes "quotient_map X X' f" "\<And>x. x \<in> topspace X \<Longrightarrow> f x = g x"
shows "quotient_map X X' g"
by (smt (verit) Collect_cong assms image_cong quotient_map_def)
lemma quotient_map_compose:
assumes f: "quotient_map X X' f" and g: "quotient_map X' X'' g"
shows "quotient_map X X'' (g \<circ> f)"
unfolding quotient_map_def
proof (intro conjI allI impI)
show "(g \<circ> f) ` topspace X = topspace X''"
using assms by (simp only: image_comp [symmetric]) (simp add: quotient_map_def)
next
fix U''
assume U'': "U'' \<subseteq> topspace X''"
define U' where "U' \<equiv> {y \<in> topspace X'. g y \<in> U''}"
have "U' \<subseteq> topspace X'"
by (auto simp add: U'_def)
then have U': "openin X {x \<in> topspace X. f x \<in> U'} = openin X' U'"
using assms unfolding quotient_map_def by simp
have "{x \<in> topspace X. f x \<in> topspace X' \<and> g (f x) \<in> U''} = {x \<in> topspace X. (g \<circ> f) x \<in> U''}"
using f quotient_map_def by fastforce
then show "openin X {x \<in> topspace X. (g \<circ> f) x \<in> U''} = openin X'' U''"
by (smt (verit, best) Collect_cong U' U'_def U'' g mem_Collect_eq quotient_map_def)
qed
lemma quotient_map_from_composition:
assumes f: "continuous_map X X' f" and g: "continuous_map X' X'' g" and gf: "quotient_map X X'' (g \<circ> f)"
shows "quotient_map X' X'' g"
unfolding quotient_map_def
proof (intro conjI allI impI)
show "g ` topspace X' = topspace X''"
using assms unfolding continuous_map_def quotient_map_def by fastforce
next
fix U'' :: "'c set"
assume U'': "U'' \<subseteq> topspace X''"
have eq: "{x \<in> topspace X. g (f x) \<in> U''} = {x \<in> topspace X. f x \<in> {y. y \<in> topspace X' \<and> g y \<in> U''}}"
using continuous_map_def f by fastforce
show "openin X' {x \<in> topspace X'. g x \<in> U''} = openin X'' U''"
using assms unfolding continuous_map_def quotient_map_def
by (metis (mono_tags, lifting) Collect_cong U'' comp_apply eq)
qed
lemma quotient_imp_continuous_map:
"quotient_map X X' f \<Longrightarrow> continuous_map X X' f"
by (simp add: continuous_map openin_subset quotient_map_def)
lemma quotient_imp_surjective_map:
"quotient_map X X' f \<Longrightarrow> f ` (topspace X) = topspace X'"
by (simp add: quotient_map_def)
lemma quotient_map_closedin:
"quotient_map X X' f \<longleftrightarrow>
f ` (topspace X) = topspace X' \<and>
(\<forall>U. U \<subseteq> topspace X' \<longrightarrow> (closedin X {x. x \<in> topspace X \<and> f x \<in> U} \<longleftrightarrow> closedin X' U))"
proof -
have eq: "(topspace X - {x \<in> topspace X. f x \<in> U'}) = {x \<in> topspace X. f x \<in> topspace X' \<and> f x \<notin> U'}"
if "f ` topspace X = topspace X'" "U' \<subseteq> topspace X'" for U'
using that by auto
have "(\<forall>U\<subseteq>topspace X'. openin X {x \<in> topspace X. f x \<in> U} = openin X' U) =
(\<forall>U\<subseteq>topspace X'. closedin X {x \<in> topspace X. f x \<in> U} = closedin X' U)"
if "f ` topspace X = topspace X'"
proof (rule iffI; intro allI impI subsetI)
fix U'
assume *[rule_format]: "\<forall>U\<subseteq>topspace X'. openin X {x \<in> topspace X. f x \<in> U} = openin X' U"
and U': "U' \<subseteq> topspace X'"
show "closedin X {x \<in> topspace X. f x \<in> U'} = closedin X' U'"
using U' by (auto simp add: closedin_def simp flip: * [of "topspace X' - U'"] eq [OF that])
next
fix U' :: "'b set"
assume *[rule_format]: "\<forall>U\<subseteq>topspace X'. closedin X {x \<in> topspace X. f x \<in> U} = closedin X' U"
and U': "U' \<subseteq> topspace X'"
show "openin X {x \<in> topspace X. f x \<in> U'} = openin X' U'"
using U' by (auto simp add: openin_closedin_eq simp flip: * [of "topspace X' - U'"] eq [OF that])
qed
then show ?thesis
unfolding quotient_map_def by force
qed
lemma continuous_open_imp_quotient_map:
assumes "continuous_map X X' f" and om: "open_map X X' f" and feq: "f ` (topspace X) = topspace X'"
shows "quotient_map X X' f"
proof -
{ fix U
assume U: "U \<subseteq> topspace X'" and "openin X {x \<in> topspace X. f x \<in> U}"
then have ope: "openin X' (f ` {x \<in> topspace X. f x \<in> U})"
using om unfolding open_map_def by blast
then have "openin X' U"
using U feq by (subst openin_subopen) force
}
moreover have "openin X {x \<in> topspace X. f x \<in> U}" if "U \<subseteq> topspace X'" and "openin X' U" for U
using that assms unfolding continuous_map_def by blast
ultimately show ?thesis
unfolding quotient_map_def using assms by blast
qed
lemma continuous_closed_imp_quotient_map:
assumes "continuous_map X X' f" and om: "closed_map X X' f" and feq: "f ` (topspace X) = topspace X'"
shows "quotient_map X X' f"
proof -
have "f ` {x \<in> topspace X. f x \<in> U} = U" if "U \<subseteq> topspace X'" for U
using that feq by auto
with assms show ?thesis
unfolding quotient_map_closedin closed_map_def continuous_map_closedin by auto
qed
lemma continuous_open_quotient_map:
"\<lbrakk>continuous_map X X' f; open_map X X' f\<rbrakk> \<Longrightarrow> quotient_map X X' f \<longleftrightarrow> f ` (topspace X) = topspace X'"
by (meson continuous_open_imp_quotient_map quotient_map_def)
lemma continuous_closed_quotient_map:
"\<lbrakk>continuous_map X X' f; closed_map X X' f\<rbrakk> \<Longrightarrow> quotient_map X X' f \<longleftrightarrow> f ` (topspace X) = topspace X'"
by (meson continuous_closed_imp_quotient_map quotient_map_def)
lemma injective_quotient_map:
assumes "inj_on f (topspace X)"
shows "quotient_map X X' f \<longleftrightarrow>
continuous_map X X' f \<and> open_map X X' f \<and> closed_map X X' f \<and> f ` (topspace X) = topspace X'"
(is "?lhs = ?rhs")
proof
assume L: ?lhs
have om: "open_map X X' f"
proof (clarsimp simp add: open_map_def)
fix U
assume "openin X U"
then have "U \<subseteq> topspace X"
by (simp add: openin_subset)
moreover have "{x \<in> topspace X. f x \<in> f ` U} = U"
using \<open>U \<subseteq> topspace X\<close> assms inj_onD by fastforce
ultimately show "openin X' (f ` U)"
using L unfolding quotient_map_def
by (metis (no_types, lifting) Collect_cong \<open>openin X U\<close> image_mono)
qed
then have "closed_map X X' f"
by (simp add: L assms bijective_open_imp_closed_map quotient_imp_surjective_map)
then show ?rhs
using L om by (simp add: quotient_imp_continuous_map quotient_imp_surjective_map)
next
assume ?rhs
then show ?lhs
by (simp add: continuous_closed_imp_quotient_map)
qed
lemma continuous_compose_quotient_map:
assumes f: "quotient_map X X' f" and g: "continuous_map X X'' (g \<circ> f)"
shows "continuous_map X' X'' g"
unfolding quotient_map_def continuous_map_def
proof (intro conjI ballI allI impI)
show "\<And>x'. x' \<in> topspace X' \<Longrightarrow> g x' \<in> topspace X''"
using assms unfolding quotient_map_def
by (metis (no_types, opaque_lifting) continuous_map_image_subset_topspace image_comp image_subset_iff)
next
fix U'' :: "'c set"
assume U'': "openin X'' U''"
have "f ` topspace X = topspace X'"
by (simp add: f quotient_imp_surjective_map)
then have eq: "{x \<in> topspace X. f x \<in> topspace X' \<and> g (f x) \<in> U} = {x \<in> topspace X. g (f x) \<in> U}" for U
by auto
have "openin X {x \<in> topspace X. f x \<in> topspace X' \<and> g (f x) \<in> U''}"
unfolding eq using U'' g openin_continuous_map_preimage by fastforce
then have *: "openin X {x \<in> topspace X. f x \<in> {x \<in> topspace X'. g x \<in> U''}}"
by auto
show "openin X' {x \<in> topspace X'. g x \<in> U''}"
using f unfolding quotient_map_def
by (metis (no_types) Collect_subset *)
qed
lemma continuous_compose_quotient_map_eq:
"quotient_map X X' f \<Longrightarrow> continuous_map X X'' (g \<circ> f) \<longleftrightarrow> continuous_map X' X'' g"
using continuous_compose_quotient_map continuous_map_compose quotient_imp_continuous_map by blast
lemma quotient_map_compose_eq:
"quotient_map X X' f \<Longrightarrow> quotient_map X X'' (g \<circ> f) \<longleftrightarrow> quotient_map X' X'' g"
by (meson continuous_compose_quotient_map_eq quotient_imp_continuous_map quotient_map_compose quotient_map_from_composition)
lemma quotient_map_restriction:
assumes quo: "quotient_map X Y f" and U: "{x \<in> topspace X. f x \<in> V} = U" and disj: "openin Y V \<or> closedin Y V"
shows "quotient_map (subtopology X U) (subtopology Y V) f"
using disj
proof
assume V: "openin Y V"
with U have sub: "U \<subseteq> topspace X" "V \<subseteq> topspace Y"
by (auto simp: openin_subset)
have fim: "f ` topspace X = topspace Y"
and Y: "\<And>U. U \<subseteq> topspace Y \<Longrightarrow> openin X {x \<in> topspace X. f x \<in> U} = openin Y U"
using quo unfolding quotient_map_def by auto
have "openin X U"
using U V Y sub(2) by blast
show ?thesis
unfolding quotient_map_def
proof (intro conjI allI impI)
show "f ` topspace (subtopology X U) = topspace (subtopology Y V)"
using sub U fim by (auto)
next
fix Y' :: "'b set"
assume "Y' \<subseteq> topspace (subtopology Y V)"
then have "Y' \<subseteq> topspace Y" "Y' \<subseteq> V"
by (simp_all)
then have eq: "{x \<in> topspace X. x \<in> U \<and> f x \<in> Y'} = {x \<in> topspace X. f x \<in> Y'}"
using U by blast
then show "openin (subtopology X U) {x \<in> topspace (subtopology X U). f x \<in> Y'} = openin (subtopology Y V) Y'"
using U V Y \<open>openin X U\<close> \<open>Y' \<subseteq> topspace Y\<close> \<open>Y' \<subseteq> V\<close>
by (simp add: openin_open_subtopology eq) (auto simp: openin_closedin_eq)
qed
next
assume V: "closedin Y V"
with U have sub: "U \<subseteq> topspace X" "V \<subseteq> topspace Y"
by (auto simp: closedin_subset)
have fim: "f ` topspace X = topspace Y"
and Y: "\<And>U. U \<subseteq> topspace Y \<Longrightarrow> closedin X {x \<in> topspace X. f x \<in> U} = closedin Y U"
using quo unfolding quotient_map_closedin by auto
have "closedin X U"
using U V Y sub(2) by blast
show ?thesis
unfolding quotient_map_closedin
proof (intro conjI allI impI)
show "f ` topspace (subtopology X U) = topspace (subtopology Y V)"
using sub U fim by (auto)
next
fix Y' :: "'b set"
assume "Y' \<subseteq> topspace (subtopology Y V)"
then have "Y' \<subseteq> topspace Y" "Y' \<subseteq> V"
by (simp_all)
then have eq: "{x \<in> topspace X. x \<in> U \<and> f x \<in> Y'} = {x \<in> topspace X. f x \<in> Y'}"
using U by blast
then show "closedin (subtopology X U) {x \<in> topspace (subtopology X U). f x \<in> Y'} = closedin (subtopology Y V) Y'"
using U V Y \<open>closedin X U\<close> \<open>Y' \<subseteq> topspace Y\<close> \<open>Y' \<subseteq> V\<close>
by (simp add: closedin_closed_subtopology eq) (auto simp: closedin_def)
qed
qed
lemma quotient_map_saturated_open:
"quotient_map X Y f \<longleftrightarrow>
continuous_map X Y f \<and> f ` (topspace X) = topspace Y \<and>
(\<forall>U. openin X U \<and> {x \<in> topspace X. f x \<in> f ` U} \<subseteq> U \<longrightarrow> openin Y (f ` U))"
(is "?lhs = ?rhs")
proof
assume L: ?lhs
then have fim: "f ` topspace X = topspace Y"
and Y: "\<And>U. U \<subseteq> topspace Y \<Longrightarrow> openin Y U = openin X {x \<in> topspace X. f x \<in> U}"
unfolding quotient_map_def by auto
show ?rhs
proof (intro conjI allI impI)
show "continuous_map X Y f"
by (simp add: L quotient_imp_continuous_map)
show "f ` topspace X = topspace Y"
by (simp add: fim)
next
fix U :: "'a set"
assume U: "openin X U \<and> {x \<in> topspace X. f x \<in> f ` U} \<subseteq> U"
then have sub: "f ` U \<subseteq> topspace Y" and eq: "{x \<in> topspace X. f x \<in> f ` U} = U"
using fim openin_subset by fastforce+
show "openin Y (f ` U)"
by (simp add: sub Y eq U)
qed
next
assume ?rhs
then have YX: "\<And>U. openin Y U \<Longrightarrow> openin X {x \<in> topspace X. f x \<in> U}"
and fim: "f ` topspace X = topspace Y"
and XY: "\<And>U. \<lbrakk>openin X U; {x \<in> topspace X. f x \<in> f ` U} \<subseteq> U\<rbrakk> \<Longrightarrow> openin Y (f ` U)"
by (auto simp: quotient_map_def continuous_map_def)
show ?lhs
proof (simp add: quotient_map_def fim, intro allI impI iffI)
fix U :: "'b set"
assume "U \<subseteq> topspace Y" and X: "openin X {x \<in> topspace X. f x \<in> U}"
have feq: "f ` {x \<in> topspace X. f x \<in> U} = U"
using \<open>U \<subseteq> topspace Y\<close> fim by auto
show "openin Y U"
using XY [OF X] by (simp add: feq)
next
fix U :: "'b set"
assume "U \<subseteq> topspace Y" and Y: "openin Y U"
show "openin X {x \<in> topspace X. f x \<in> U}"
by (metis YX [OF Y])
qed
qed
lemma quotient_map_saturated_closed:
"quotient_map X Y f \<longleftrightarrow>
continuous_map X Y f \<and> f ` (topspace X) = topspace Y \<and>
(\<forall>U. closedin X U \<and> {x \<in> topspace X. f x \<in> f ` U} \<subseteq> U \<longrightarrow> closedin Y (f ` U))"
(is "?lhs = ?rhs")
proof
assume L: ?lhs
then obtain fim: "f ` topspace X = topspace Y"
and Y: "\<And>U. U \<subseteq> topspace Y \<Longrightarrow> closedin Y U = closedin X {x \<in> topspace X. f x \<in> U}"
by (simp add: quotient_map_closedin)
show ?rhs
proof (intro conjI allI impI)
show "continuous_map X Y f"
by (simp add: L quotient_imp_continuous_map)
show "f ` topspace X = topspace Y"
by (simp add: fim)
next
fix U :: "'a set"
assume U: "closedin X U \<and> {x \<in> topspace X. f x \<in> f ` U} \<subseteq> U"
then have sub: "f ` U \<subseteq> topspace Y" and eq: "{x \<in> topspace X. f x \<in> f ` U} = U"
using fim closedin_subset by fastforce+
show "closedin Y (f ` U)"
by (simp add: sub Y eq U)
qed
next
assume ?rhs
then obtain YX: "\<And>U. closedin Y U \<Longrightarrow> closedin X {x \<in> topspace X. f x \<in> U}"
and fim: "f ` topspace X = topspace Y"
and XY: "\<And>U. \<lbrakk>closedin X U; {x \<in> topspace X. f x \<in> f ` U} \<subseteq> U\<rbrakk> \<Longrightarrow> closedin Y (f ` U)"
by (simp add: continuous_map_closedin)
show ?lhs
proof (simp add: quotient_map_closedin fim, intro allI impI iffI)
fix U :: "'b set"
assume "U \<subseteq> topspace Y" and X: "closedin X {x \<in> topspace X. f x \<in> U}"
have feq: "f ` {x \<in> topspace X. f x \<in> U} = U"
using \<open>U \<subseteq> topspace Y\<close> fim by auto
show "closedin Y U"
using XY [OF X] by (simp add: feq)
next
fix U :: "'b set"
assume "U \<subseteq> topspace Y" and Y: "closedin Y U"
show "closedin X {x \<in> topspace X. f x \<in> U}"
by (metis YX [OF Y])
qed
qed
lemma quotient_map_onto_image:
assumes "f ` topspace X \<subseteq> topspace Y" and U: "\<And>U. U \<subseteq> topspace Y \<Longrightarrow> openin X {x \<in> topspace X. f x \<in> U} = openin Y U"
shows "quotient_map X (subtopology Y (f ` topspace X)) f"
unfolding quotient_map_def topspace_subtopology
proof (intro conjI strip)
fix U
assume "U \<subseteq> topspace Y \<inter> f ` topspace X"
with U have "openin X {x \<in> topspace X. f x \<in> U} \<Longrightarrow> \<exists>x. openin Y x \<and> U = f ` topspace X \<inter> x"
by fastforce
moreover have "\<exists>x. openin Y x \<and> U = f ` topspace X \<inter> x \<Longrightarrow> openin X {x \<in> topspace X. f x \<in> U}"
by (metis (mono_tags, lifting) Collect_cong IntE IntI U image_eqI openin_subset)
ultimately show "openin X {x \<in> topspace X. f x \<in> U} = openin (subtopology Y (f ` topspace X)) U"
by (force simp: openin_subtopology_alt image_iff)
qed (use assms in auto)
lemma quotient_map_lift_exists:
assumes f: "quotient_map X Y f" and h: "continuous_map X Z h"
and "\<And>x y. \<lbrakk>x \<in> topspace X; y \<in> topspace X; f x = f y\<rbrakk> \<Longrightarrow> h x = h y"
obtains g where "continuous_map Y Z g" "g ` topspace Y = h ` topspace X"
"\<And>x. x \<in> topspace X \<Longrightarrow> g(f x) = h x"
proof -
obtain g where g: "\<And>x. x \<in> topspace X \<Longrightarrow> h x = g(f x)"
using function_factors_left_gen[of "\<lambda>x. x \<in> topspace X" f h] assms by blast
show ?thesis
proof
show "g ` topspace Y = h ` topspace X"
using f g by (force dest!: quotient_imp_surjective_map)
show "continuous_map Y Z g"
by (smt (verit) f g h continuous_compose_quotient_map_eq continuous_map_eq o_def)
qed (simp add: g)
qed
subsection\<open> Separated Sets\<close>
definition separatedin :: "'a topology \<Rightarrow> 'a set \<Rightarrow> 'a set \<Rightarrow> bool"
where "separatedin X S T \<equiv>
S \<subseteq> topspace X \<and> T \<subseteq> topspace X \<and>
S \<inter> X closure_of T = {} \<and> T \<inter> X closure_of S = {}"
lemma separatedin_empty [simp]:
"separatedin X S {} \<longleftrightarrow> S \<subseteq> topspace X"
"separatedin X {} S \<longleftrightarrow> S \<subseteq> topspace X"
by (simp_all add: separatedin_def)
lemma separatedin_refl [simp]:
"separatedin X S S \<longleftrightarrow> S = {}"
by (metis closure_of_subset empty_subsetI inf.orderE separatedin_def)
lemma separatedin_sym:
"separatedin X S T \<longleftrightarrow> separatedin X T S"
by (auto simp: separatedin_def)
lemma separatedin_imp_disjoint:
"separatedin X S T \<Longrightarrow> disjnt S T"
by (meson closure_of_subset disjnt_def disjnt_subset2 separatedin_def)
lemma separatedin_mono:
"\<lbrakk>separatedin X S T; S' \<subseteq> S; T' \<subseteq> T\<rbrakk> \<Longrightarrow> separatedin X S' T'"
unfolding separatedin_def
using closure_of_mono by blast
lemma separatedin_open_sets:
"\<lbrakk>openin X S; openin X T\<rbrakk> \<Longrightarrow> separatedin X S T \<longleftrightarrow> disjnt S T"
unfolding disjnt_def separatedin_def
by (auto simp: openin_Int_closure_of_eq_empty openin_subset)
lemma separatedin_closed_sets:
"\<lbrakk>closedin X S; closedin X T\<rbrakk> \<Longrightarrow> separatedin X S T \<longleftrightarrow> disjnt S T"
unfolding closure_of_eq disjnt_def separatedin_def
by (metis closedin_def closure_of_eq inf_commute)
lemma separatedin_subtopology:
"separatedin (subtopology X U) S T \<longleftrightarrow> S \<subseteq> U \<and> T \<subseteq> U \<and> separatedin X S T"
by (auto simp: separatedin_def closure_of_subtopology Int_ac disjoint_iff elim!: inf.orderE)
lemma separatedin_discrete_topology:
"separatedin (discrete_topology U) S T \<longleftrightarrow> S \<subseteq> U \<and> T \<subseteq> U \<and> disjnt S T"
by (metis openin_discrete_topology separatedin_def separatedin_open_sets topspace_discrete_topology)
lemma separated_eq_distinguishable:
"separatedin X {x} {y} \<longleftrightarrow>
x \<in> topspace X \<and> y \<in> topspace X \<and>
(\<exists>U. openin X U \<and> x \<in> U \<and> (y \<notin> U)) \<and>
(\<exists>v. openin X v \<and> y \<in> v \<and> (x \<notin> v))"
by (force simp: separatedin_def closure_of_def)
lemma separatedin_Un [simp]:
"separatedin X S (T \<union> U) \<longleftrightarrow> separatedin X S T \<and> separatedin X S U"
"separatedin X (S \<union> T) U \<longleftrightarrow> separatedin X S U \<and> separatedin X T U"
by (auto simp: separatedin_def)
lemma separatedin_Union:
"finite \<F> \<Longrightarrow> separatedin X S (\<Union>\<F>) \<longleftrightarrow> S \<subseteq> topspace X \<and> (\<forall>T \<in> \<F>. separatedin X S T)"
"finite \<F> \<Longrightarrow> separatedin X (\<Union>\<F>) S \<longleftrightarrow> (\<forall>T \<in> \<F>. separatedin X S T) \<and> S \<subseteq> topspace X"
by (auto simp: separatedin_def closure_of_Union)
lemma separatedin_openin_diff:
"\<lbrakk>openin X S; openin X T\<rbrakk> \<Longrightarrow> separatedin X (S - T) (T - S)"
unfolding separatedin_def
by (metis Diff_Int_distrib2 Diff_disjoint Diff_empty Diff_mono empty_Diff empty_subsetI openin_Int_closure_of_eq_empty openin_subset)
lemma separatedin_closedin_diff:
assumes "closedin X S" "closedin X T"
shows "separatedin X (S - T) (T - S)"
proof -
have "S - T \<subseteq> topspace X" "T - S \<subseteq> topspace X"
using assms closedin_subset by auto
with assms show ?thesis
by (simp add: separatedin_def Diff_Int_distrib2 closure_of_minimal inf_absorb2)
qed
lemma separation_closedin_Un_gen:
"separatedin X S T \<longleftrightarrow>
S \<subseteq> topspace X \<and> T \<subseteq> topspace X \<and> disjnt S T \<and>
closedin (subtopology X (S \<union> T)) S \<and>
closedin (subtopology X (S \<union> T)) T"
by (auto simp add: separatedin_def closedin_Int_closure_of disjnt_iff dest: closure_of_subset)
lemma separation_openin_Un_gen:
"separatedin X S T \<longleftrightarrow>
S \<subseteq> topspace X \<and> T \<subseteq> topspace X \<and> disjnt S T \<and>
openin (subtopology X (S \<union> T)) S \<and>
openin (subtopology X (S \<union> T)) T"
unfolding openin_closedin_eq topspace_subtopology separation_closedin_Un_gen disjnt_def
by (auto simp: Diff_triv Int_commute Un_Diff inf_absorb1 topspace_def)
lemma separatedin_full:
"S \<union> T = topspace X
\<Longrightarrow> separatedin X S T \<longleftrightarrow> disjnt S T \<and> closedin X S \<and> openin X S \<and> closedin X T \<and> openin X T"
by (metis separatedin_open_sets separation_closedin_Un_gen separation_openin_Un_gen subtopology_topspace)
subsection\<open>Homeomorphisms\<close>
text\<open>(1-way and 2-way versions may be useful in places)\<close>
definition homeomorphic_map :: "'a topology \<Rightarrow> 'b topology \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> bool"
where
"homeomorphic_map X Y f \<equiv> quotient_map X Y f \<and> inj_on f (topspace X)"
definition homeomorphic_maps :: "'a topology \<Rightarrow> 'b topology \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> ('b \<Rightarrow> 'a) \<Rightarrow> bool"
where
"homeomorphic_maps X Y f g \<equiv>
continuous_map X Y f \<and> continuous_map Y X g \<and>
(\<forall>x \<in> topspace X. g(f x) = x) \<and> (\<forall>y \<in> topspace Y. f(g y) = y)"
lemma homeomorphic_map_eq:
"\<lbrakk>homeomorphic_map X Y f; \<And>x. x \<in> topspace X \<Longrightarrow> f x = g x\<rbrakk> \<Longrightarrow> homeomorphic_map X Y g"
by (meson homeomorphic_map_def inj_on_cong quotient_map_eq)
lemma homeomorphic_maps_eq:
"\<lbrakk>homeomorphic_maps X Y f g;
\<And>x. x \<in> topspace X \<Longrightarrow> f x = f' x; \<And>y. y \<in> topspace Y \<Longrightarrow> g y = g' y\<rbrakk>
\<Longrightarrow> homeomorphic_maps X Y f' g'"
unfolding homeomorphic_maps_def
by (metis continuous_map_eq continuous_map_eq_image_closure_subset_gen image_subset_iff)
lemma homeomorphic_maps_sym:
"homeomorphic_maps X Y f g \<longleftrightarrow> homeomorphic_maps Y X g f"
by (auto simp: homeomorphic_maps_def)
lemma homeomorphic_maps_id:
"homeomorphic_maps X Y id id \<longleftrightarrow> Y = X" (is "?lhs = ?rhs")
proof
assume L: ?lhs
then have "topspace X = topspace Y"
by (auto simp: homeomorphic_maps_def continuous_map_def)
with L show ?rhs
unfolding homeomorphic_maps_def
by (metis topology_finer_continuous_id topology_eq)
next
assume ?rhs
then show ?lhs
unfolding homeomorphic_maps_def by auto
qed
lemma homeomorphic_map_id [simp]: "homeomorphic_map X Y id \<longleftrightarrow> Y = X"
(is "?lhs = ?rhs")
proof
assume L: ?lhs
then have eq: "topspace X = topspace Y"
by (auto simp: homeomorphic_map_def continuous_map_def quotient_map_def)
then have "\<And>S. openin X S \<longrightarrow> openin Y S"
by (meson L homeomorphic_map_def injective_quotient_map topology_finer_open_id)
then show ?rhs
using L unfolding homeomorphic_map_def
by (metis eq quotient_imp_continuous_map topology_eq topology_finer_continuous_id)
next
assume ?rhs
then show ?lhs
unfolding homeomorphic_map_def
by (simp add: closed_map_id continuous_closed_imp_quotient_map)
qed
lemma homeomorphic_map_compose:
assumes "homeomorphic_map X Y f" "homeomorphic_map Y X'' g"
shows "homeomorphic_map X X'' (g \<circ> f)"
proof -
have "inj_on g (f ` topspace X)"
by (metis (no_types) assms homeomorphic_map_def quotient_imp_surjective_map)
then show ?thesis
using assms by (meson comp_inj_on homeomorphic_map_def quotient_map_compose_eq)
qed
lemma homeomorphic_maps_compose:
"homeomorphic_maps X Y f h \<and>
homeomorphic_maps Y X'' g k
\<Longrightarrow> homeomorphic_maps X X'' (g \<circ> f) (h \<circ> k)"
unfolding homeomorphic_maps_def
by (auto simp: continuous_map_compose; simp add: continuous_map_def)
lemma homeomorphic_eq_everything_map:
"homeomorphic_map X Y f \<longleftrightarrow>
continuous_map X Y f \<and> open_map X Y f \<and> closed_map X Y f \<and>
f ` (topspace X) = topspace Y \<and> inj_on f (topspace X)"
unfolding homeomorphic_map_def
by (force simp: injective_quotient_map intro: injective_quotient_map)
lemma homeomorphic_imp_continuous_map:
"homeomorphic_map X Y f \<Longrightarrow> continuous_map X Y f"
by (simp add: homeomorphic_eq_everything_map)
lemma homeomorphic_imp_open_map:
"homeomorphic_map X Y f \<Longrightarrow> open_map X Y f"
by (simp add: homeomorphic_eq_everything_map)
lemma homeomorphic_imp_closed_map:
"homeomorphic_map X Y f \<Longrightarrow> closed_map X Y f"
by (simp add: homeomorphic_eq_everything_map)
lemma homeomorphic_imp_surjective_map:
"homeomorphic_map X Y f \<Longrightarrow> f ` (topspace X) = topspace Y"
by (simp add: homeomorphic_eq_everything_map)
lemma homeomorphic_imp_injective_map:
"homeomorphic_map X Y f \<Longrightarrow> inj_on f (topspace X)"
by (simp add: homeomorphic_eq_everything_map)
lemma bijective_open_imp_homeomorphic_map:
"\<lbrakk>continuous_map X Y f; open_map X Y f; f ` (topspace X) = topspace Y; inj_on f (topspace X)\<rbrakk>
\<Longrightarrow> homeomorphic_map X Y f"
by (simp add: homeomorphic_map_def continuous_open_imp_quotient_map)
lemma bijective_closed_imp_homeomorphic_map:
"\<lbrakk>continuous_map X Y f; closed_map X Y f; f ` (topspace X) = topspace Y; inj_on f (topspace X)\<rbrakk>
\<Longrightarrow> homeomorphic_map X Y f"
by (simp add: continuous_closed_quotient_map homeomorphic_map_def)
lemma open_eq_continuous_inverse_map:
assumes X: "\<And>x. x \<in> topspace X \<Longrightarrow> f x \<in> topspace Y \<and> g(f x) = x"
and Y: "\<And>y. y \<in> topspace Y \<Longrightarrow> g y \<in> topspace X \<and> f(g y) = y"
shows "open_map X Y f \<longleftrightarrow> continuous_map Y X g"
proof -
have eq: "{x \<in> topspace Y. g x \<in> U} = f ` U" if "openin X U" for U
using openin_subset [OF that] by (force simp: X Y image_iff)
show ?thesis
by (auto simp: Y open_map_def continuous_map_def eq)
qed
lemma closed_eq_continuous_inverse_map:
assumes X: "\<And>x. x \<in> topspace X \<Longrightarrow> f x \<in> topspace Y \<and> g(f x) = x"
and Y: "\<And>y. y \<in> topspace Y \<Longrightarrow> g y \<in> topspace X \<and> f(g y) = y"
shows "closed_map X Y f \<longleftrightarrow> continuous_map Y X g"
proof -
have eq: "{x \<in> topspace Y. g x \<in> U} = f ` U" if "closedin X U" for U
using closedin_subset [OF that] by (force simp: X Y image_iff)
show ?thesis
by (auto simp: Y closed_map_def continuous_map_closedin eq)
qed
lemma homeomorphic_maps_map:
"homeomorphic_maps X Y f g \<longleftrightarrow>
homeomorphic_map X Y f \<and> homeomorphic_map Y X g \<and>
(\<forall>x \<in> topspace X. g(f x) = x) \<and> (\<forall>y \<in> topspace Y. f(g y) = y)"
(is "?lhs = ?rhs")
proof
assume ?lhs
then have L: "continuous_map X Y f" "continuous_map Y X g" "\<forall>x\<in>topspace X. g (f x) = x" "\<forall>x'\<in>topspace Y. f (g x') = x'"
by (auto simp: homeomorphic_maps_def)
show ?rhs
proof (intro conjI bijective_open_imp_homeomorphic_map L)
show "open_map X Y f"
using L using open_eq_continuous_inverse_map [of concl: X Y f g] by (simp add: continuous_map_def)
show "open_map Y X g"
using L using open_eq_continuous_inverse_map [of concl: Y X g f] by (simp add: continuous_map_def)
show "f ` topspace X = topspace Y" "g ` topspace Y = topspace X"
using L by (force simp: continuous_map_closedin)+
show "inj_on f (topspace X)" "inj_on g (topspace Y)"
using L unfolding inj_on_def by metis+
qed
next
assume ?rhs
then show ?lhs
by (auto simp: homeomorphic_maps_def homeomorphic_imp_continuous_map)
qed
lemma homeomorphic_maps_imp_map:
"homeomorphic_maps X Y f g \<Longrightarrow> homeomorphic_map X Y f"
using homeomorphic_maps_map by blast
lemma homeomorphic_map_maps:
"homeomorphic_map X Y f \<longleftrightarrow> (\<exists>g. homeomorphic_maps X Y f g)"
(is "?lhs = ?rhs")
proof
assume ?lhs
then have L: "continuous_map X Y f" "open_map X Y f" "closed_map X Y f"
"f ` (topspace X) = topspace Y" "inj_on f (topspace X)"
by (auto simp: homeomorphic_eq_everything_map)
have X: "\<And>x. x \<in> topspace X \<Longrightarrow> f x \<in> topspace Y \<and> inv_into (topspace X) f (f x) = x"
using L by auto
have Y: "\<And>y. y \<in> topspace Y \<Longrightarrow> inv_into (topspace X) f y \<in> topspace X \<and> f (inv_into (topspace X) f y) = y"
by (simp add: L f_inv_into_f inv_into_into)
have "homeomorphic_maps X Y f (inv_into (topspace X) f)"
unfolding homeomorphic_maps_def
proof (intro conjI L)
show "continuous_map Y X (inv_into (topspace X) f)"
by (simp add: L X Y flip: open_eq_continuous_inverse_map [where f=f])
next
show "\<forall>x\<in>topspace X. inv_into (topspace X) f (f x) = x"
"\<forall>y\<in>topspace Y. f (inv_into (topspace X) f y) = y"
using X Y by auto
qed
then show ?rhs
by metis
next
assume ?rhs
then show ?lhs
using homeomorphic_maps_map by blast
qed
lemma homeomorphic_maps_involution:
"\<lbrakk>continuous_map X X f; \<And>x. x \<in> topspace X \<Longrightarrow> f(f x) = x\<rbrakk> \<Longrightarrow> homeomorphic_maps X X f f"
by (auto simp: homeomorphic_maps_def)
lemma homeomorphic_map_involution:
"\<lbrakk>continuous_map X X f; \<And>x. x \<in> topspace X \<Longrightarrow> f(f x) = x\<rbrakk> \<Longrightarrow> homeomorphic_map X X f"
using homeomorphic_maps_involution homeomorphic_maps_map by blast
lemma homeomorphic_map_openness:
assumes hom: "homeomorphic_map X Y f" and U: "U \<subseteq> topspace X"
shows "openin Y (f ` U) \<longleftrightarrow> openin X U"
proof -
obtain g where "homeomorphic_maps X Y f g"
using assms by (auto simp: homeomorphic_map_maps)
then have g: "homeomorphic_map Y X g" and gf: "\<And>x. x \<in> topspace X \<Longrightarrow> g(f x) = x"
by (auto simp: homeomorphic_maps_map)
then have "openin X U \<Longrightarrow> openin Y (f ` U)"
using hom homeomorphic_imp_open_map open_map_def by blast
show "openin Y (f ` U) = openin X U"
proof
assume L: "openin Y (f ` U)"
have "U = g ` (f ` U)"
using U gf by force
then show "openin X U"
by (metis L homeomorphic_imp_open_map open_map_def g)
next
assume "openin X U"
then show "openin Y (f ` U)"
using hom homeomorphic_imp_open_map open_map_def by blast
qed
qed
lemma homeomorphic_map_closedness:
assumes hom: "homeomorphic_map X Y f" and U: "U \<subseteq> topspace X"
shows "closedin Y (f ` U) \<longleftrightarrow> closedin X U"
proof -
obtain g where "homeomorphic_maps X Y f g"
using assms by (auto simp: homeomorphic_map_maps)
then have g: "homeomorphic_map Y X g" and gf: "\<And>x. x \<in> topspace X \<Longrightarrow> g(f x) = x"
by (auto simp: homeomorphic_maps_map)
then have "closedin X U \<Longrightarrow> closedin Y (f ` U)"
using hom homeomorphic_imp_closed_map closed_map_def by blast
show "closedin Y (f ` U) = closedin X U"
proof
assume L: "closedin Y (f ` U)"
have "U = g ` (f ` U)"
using U gf by force
then show "closedin X U"
by (metis L homeomorphic_imp_closed_map closed_map_def g)
next
assume "closedin X U"
then show "closedin Y (f ` U)"
using hom homeomorphic_imp_closed_map closed_map_def by blast
qed
qed
lemma homeomorphic_map_openness_eq:
"homeomorphic_map X Y f \<Longrightarrow> openin X U \<longleftrightarrow> U \<subseteq> topspace X \<and> openin Y (f ` U)"
by (meson homeomorphic_map_openness openin_closedin_eq)
lemma homeomorphic_map_closedness_eq:
"homeomorphic_map X Y f \<Longrightarrow> closedin X U \<longleftrightarrow> U \<subseteq> topspace X \<and> closedin Y (f ` U)"
by (meson closedin_subset homeomorphic_map_closedness)
lemma all_openin_homeomorphic_image:
assumes "homeomorphic_map X Y f"
shows "(\<forall>V. openin Y V \<longrightarrow> P V) \<longleftrightarrow> (\<forall>U. openin X U \<longrightarrow> P(f ` U))"
by (metis (no_types, lifting) assms homeomorphic_imp_surjective_map homeomorphic_map_openness openin_subset subset_image_iff)
lemma all_closedin_homeomorphic_image:
assumes "homeomorphic_map X Y f"
shows "(\<forall>V. closedin Y V \<longrightarrow> P V) \<longleftrightarrow> (\<forall>U. closedin X U \<longrightarrow> P(f ` U))" (is "?lhs = ?rhs")
by (metis (no_types, lifting) assms homeomorphic_imp_surjective_map homeomorphic_map_closedness closedin_subset subset_image_iff)
lemma homeomorphic_map_derived_set_of:
assumes hom: "homeomorphic_map X Y f" and S: "S \<subseteq> topspace X"
shows "Y derived_set_of (f ` S) = f ` (X derived_set_of S)"
proof -
have fim: "f ` (topspace X) = topspace Y" and inj: "inj_on f (topspace X)"
using hom by (auto simp: homeomorphic_eq_everything_map)
have iff: "(\<forall>T. x \<in> T \<and> openin X T \<longrightarrow> (\<exists>y. y \<noteq> x \<and> y \<in> S \<and> y \<in> T)) =
(\<forall>T. T \<subseteq> topspace Y \<longrightarrow> f x \<in> T \<longrightarrow> openin Y T \<longrightarrow> (\<exists>y. y \<noteq> f x \<and> y \<in> f ` S \<and> y \<in> T))"
if "x \<in> topspace X" for x
proof -
have \<section>: "(x \<in> T \<and> openin X T) = (T \<subseteq> topspace X \<and> f x \<in> f ` T \<and> openin Y (f ` T))" for T
by (meson hom homeomorphic_map_openness_eq inj inj_on_image_mem_iff that)
moreover have "(\<exists>y. y \<noteq> x \<and> y \<in> S \<and> y \<in> T) = (\<exists>y. y \<noteq> f x \<and> y \<in> f ` S \<and> y \<in> f ` T)" (is "?lhs = ?rhs")
if "T \<subseteq> topspace X \<and> f x \<in> f ` T \<and> openin Y (f ` T)" for T
by (smt (verit, del_insts) S \<open>x \<in> topspace X\<close> image_iff inj inj_on_def subsetD that)
ultimately show ?thesis
by (auto simp flip: fim simp: all_subset_image)
qed
have *: "\<lbrakk>T = f ` S; \<And>x. x \<in> S \<Longrightarrow> P x \<longleftrightarrow> Q(f x)\<rbrakk> \<Longrightarrow> {y. y \<in> T \<and> Q y} = f ` {x \<in> S. P x}" for T S P Q
by auto
show ?thesis
unfolding derived_set_of_def
by (rule *) (use fim iff openin_subset in force)+
qed
lemma homeomorphic_map_closure_of:
assumes hom: "homeomorphic_map X Y f" and S: "S \<subseteq> topspace X"
shows "Y closure_of (f ` S) = f ` (X closure_of S)"
unfolding closure_of
using homeomorphic_imp_surjective_map [OF hom] S
by (auto simp: in_derived_set_of homeomorphic_map_derived_set_of [OF assms])
lemma homeomorphic_map_interior_of:
assumes hom: "homeomorphic_map X Y f" and S: "S \<subseteq> topspace X"
shows "Y interior_of (f ` S) = f ` (X interior_of S)"
proof -
{ fix y
assume "y \<in> topspace Y" and "y \<notin> Y closure_of (topspace Y - f ` S)"
then have "y \<in> f ` (topspace X - X closure_of (topspace X - S))"
using homeomorphic_eq_everything_map [THEN iffD1, OF hom] homeomorphic_map_closure_of [OF hom]
by (metis DiffI Diff_subset S closure_of_subset_topspace inj_on_image_set_diff) }
moreover
{ fix x
assume "x \<in> topspace X"
then have "f x \<in> topspace Y"
using hom homeomorphic_imp_surjective_map by blast }
moreover
{ fix x
assume "x \<in> topspace X" and "x \<notin> X closure_of (topspace X - S)" and "f x \<in> Y closure_of (topspace Y - f ` S)"
then have "False"
using homeomorphic_map_closure_of [OF hom] hom
unfolding homeomorphic_eq_everything_map
by (metis Diff_subset S closure_of_subset_topspace inj_on_image_mem_iff inj_on_image_set_diff)
}
ultimately show ?thesis
by (auto simp: interior_of_closure_of)
qed
lemma homeomorphic_map_frontier_of:
assumes hom: "homeomorphic_map X Y f" and S: "S \<subseteq> topspace X"
shows "Y frontier_of (f ` S) = f ` (X frontier_of S)"
unfolding frontier_of_def
proof (intro equalityI subsetI DiffI)
fix y
assume "y \<in> Y closure_of f ` S - Y interior_of f ` S"
then show "y \<in> f ` (X closure_of S - X interior_of S)"
using S hom homeomorphic_map_closure_of homeomorphic_map_interior_of by fastforce
next
fix y
assume "y \<in> f ` (X closure_of S - X interior_of S)"
then show "y \<in> Y closure_of f ` S"
using S hom homeomorphic_map_closure_of by fastforce
next
fix x
assume "x \<in> f ` (X closure_of S - X interior_of S)"
then obtain y where y: "x = f y" "y \<in> X closure_of S" "y \<notin> X interior_of S"
by blast
then show "x \<notin> Y interior_of f ` S"
using S hom homeomorphic_map_interior_of y(1)
unfolding homeomorphic_map_def
by (smt (verit, ccfv_SIG) in_closure_of inj_on_image_mem_iff interior_of_subset_topspace)
qed
lemma homeomorphic_maps_subtopologies:
"\<lbrakk>homeomorphic_maps X Y f g; f ` (topspace X \<inter> S) = topspace Y \<inter> T\<rbrakk>
\<Longrightarrow> homeomorphic_maps (subtopology X S) (subtopology Y T) f g"
unfolding homeomorphic_maps_def
by (force simp: continuous_map_from_subtopology continuous_map_in_subtopology)
lemma homeomorphic_maps_subtopologies_alt:
"\<lbrakk>homeomorphic_maps X Y f g; f ` (topspace X \<inter> S) \<subseteq> T; g ` (topspace Y \<inter> T) \<subseteq> S\<rbrakk>
\<Longrightarrow> homeomorphic_maps (subtopology X S) (subtopology Y T) f g"
unfolding homeomorphic_maps_def
by (force simp: continuous_map_from_subtopology continuous_map_in_subtopology)
lemma homeomorphic_map_subtopologies:
"\<lbrakk>homeomorphic_map X Y f; f ` (topspace X \<inter> S) = topspace Y \<inter> T\<rbrakk>
\<Longrightarrow> homeomorphic_map (subtopology X S) (subtopology Y T) f"
by (meson homeomorphic_map_maps homeomorphic_maps_subtopologies)
lemma homeomorphic_map_subtopologies_alt:
assumes hom: "homeomorphic_map X Y f"
and S: "\<And>x. \<lbrakk>x \<in> topspace X; f x \<in> topspace Y\<rbrakk> \<Longrightarrow> f x \<in> T \<longleftrightarrow> x \<in> S"
shows "homeomorphic_map (subtopology X S) (subtopology Y T) f"
proof -
have "homeomorphic_maps (subtopology X S) (subtopology Y T) f g"
if "homeomorphic_maps X Y f g" for g
proof (rule homeomorphic_maps_subtopologies [OF that])
have "f ` (topspace X \<inter> S) \<subseteq> topspace Y \<inter> T"
using S hom homeomorphic_imp_surjective_map by fastforce
then show "f ` (topspace X \<inter> S) = topspace Y \<inter> T"
using that unfolding homeomorphic_maps_def continuous_map_def
by (smt (verit, del_insts) Int_iff S image_iff subsetI subset_antisym)
qed
then show ?thesis
using hom by (meson homeomorphic_map_maps)
qed
subsection\<open>Relation of homeomorphism between topological spaces\<close>
definition homeomorphic_space (infixr "homeomorphic'_space" 50)
where "X homeomorphic_space Y \<equiv> \<exists>f g. homeomorphic_maps X Y f g"
lemma homeomorphic_space_refl: "X homeomorphic_space X"
by (meson homeomorphic_maps_id homeomorphic_space_def)
lemma homeomorphic_space_sym:
"X homeomorphic_space Y \<longleftrightarrow> Y homeomorphic_space X"
unfolding homeomorphic_space_def by (metis homeomorphic_maps_sym)
lemma homeomorphic_space_trans [trans]:
"\<lbrakk>X1 homeomorphic_space X2; X2 homeomorphic_space X3\<rbrakk> \<Longrightarrow> X1 homeomorphic_space X3"
unfolding homeomorphic_space_def by (metis homeomorphic_maps_compose)
lemma homeomorphic_space:
"X homeomorphic_space Y \<longleftrightarrow> (\<exists>f. homeomorphic_map X Y f)"
by (simp add: homeomorphic_map_maps homeomorphic_space_def)
lemma homeomorphic_maps_imp_homeomorphic_space:
"homeomorphic_maps X Y f g \<Longrightarrow> X homeomorphic_space Y"
unfolding homeomorphic_space_def by metis
lemma homeomorphic_map_imp_homeomorphic_space:
"homeomorphic_map X Y f \<Longrightarrow> X homeomorphic_space Y"
unfolding homeomorphic_map_maps
using homeomorphic_space_def by blast
lemma homeomorphic_empty_space:
"X homeomorphic_space Y \<Longrightarrow> topspace X = {} \<longleftrightarrow> topspace Y = {}"
by (metis homeomorphic_imp_surjective_map homeomorphic_space image_is_empty)
lemma homeomorphic_empty_space_eq:
assumes "topspace X = {}"
shows "X homeomorphic_space Y \<longleftrightarrow> topspace Y = {}"
unfolding homeomorphic_maps_def homeomorphic_space_def
by (metis assms continuous_map_on_empty continuous_map_closedin ex_in_conv)
subsection\<open>Connected topological spaces\<close>
definition connected_space :: "'a topology \<Rightarrow> bool" where
"connected_space X \<equiv>
\<not>(\<exists>E1 E2. openin X E1 \<and> openin X E2 \<and>
topspace X \<subseteq> E1 \<union> E2 \<and> E1 \<inter> E2 = {} \<and> E1 \<noteq> {} \<and> E2 \<noteq> {})"
definition connectedin :: "'a topology \<Rightarrow> 'a set \<Rightarrow> bool" where
"connectedin X S \<equiv> S \<subseteq> topspace X \<and> connected_space (subtopology X S)"
lemma connected_spaceD:
"\<lbrakk>connected_space X;
openin X U; openin X V; topspace X \<subseteq> U \<union> V; U \<inter> V = {}; U \<noteq> {}; V \<noteq> {}\<rbrakk> \<Longrightarrow> False"
by (auto simp: connected_space_def)
lemma connectedin_subset_topspace: "connectedin X S \<Longrightarrow> S \<subseteq> topspace X"
by (simp add: connectedin_def)
lemma connectedin_topspace:
"connectedin X (topspace X) \<longleftrightarrow> connected_space X"
by (simp add: connectedin_def)
lemma connected_space_subtopology:
"connectedin X S \<Longrightarrow> connected_space (subtopology X S)"
by (simp add: connectedin_def)
lemma connectedin_subtopology:
"connectedin (subtopology X S) T \<longleftrightarrow> connectedin X T \<and> T \<subseteq> S"
by (force simp: connectedin_def subtopology_subtopology inf_absorb2)
lemma connected_space_eq:
"connected_space X \<longleftrightarrow>
(\<nexists>E1 E2. openin X E1 \<and> openin X E2 \<and> E1 \<union> E2 = topspace X \<and> E1 \<inter> E2 = {} \<and> E1 \<noteq> {} \<and> E2 \<noteq> {})"
unfolding connected_space_def
by (metis openin_Un openin_subset subset_antisym)
lemma connected_space_closedin:
"connected_space X \<longleftrightarrow>
(\<nexists>E1 E2. closedin X E1 \<and> closedin X E2 \<and> topspace X \<subseteq> E1 \<union> E2 \<and>
E1 \<inter> E2 = {} \<and> E1 \<noteq> {} \<and> E2 \<noteq> {})" (is "?lhs = ?rhs")
proof
assume ?lhs
then have "\<And>E1 E2. \<lbrakk>openin X E1; E1 \<inter> E2 = {}; topspace X \<subseteq> E1 \<union> E2; openin X E2\<rbrakk> \<Longrightarrow> E1 = {} \<or> E2 = {}"
by (simp add: connected_space_def)
then show ?rhs
unfolding connected_space_def
by (metis disjnt_def separatedin_closed_sets separation_openin_Un_gen subtopology_superset)
next
assume R: ?rhs
then show ?lhs
unfolding connected_space_def
by (metis Diff_triv Int_commute separatedin_openin_diff separation_closedin_Un_gen subtopology_superset)
qed
lemma connected_space_closedin_eq:
"connected_space X \<longleftrightarrow>
(\<nexists>E1 E2. closedin X E1 \<and> closedin X E2 \<and>
E1 \<union> E2 = topspace X \<and> E1 \<inter> E2 = {} \<and> E1 \<noteq> {} \<and> E2 \<noteq> {})"
by (metis closedin_Un closedin_def connected_space_closedin subset_antisym)
lemma connected_space_clopen_in:
"connected_space X \<longleftrightarrow>
(\<forall>T. openin X T \<and> closedin X T \<longrightarrow> T = {} \<or> T = topspace X)"
proof -
have eq: "openin X E1 \<and> openin X E2 \<and> E1 \<union> E2 = topspace X \<and> E1 \<inter> E2 = {} \<and> P
\<longleftrightarrow> E2 = topspace X - E1 \<and> openin X E1 \<and> openin X E2 \<and> P" for E1 E2 P
using openin_subset by blast
show ?thesis
unfolding connected_space_eq eq closedin_def
by (auto simp: openin_closedin_eq)
qed
lemma connectedin:
"connectedin X S \<longleftrightarrow>
S \<subseteq> topspace X \<and>
(\<nexists>E1 E2.
openin X E1 \<and> openin X E2 \<and>
S \<subseteq> E1 \<union> E2 \<and> E1 \<inter> E2 \<inter> S = {} \<and> E1 \<inter> S \<noteq> {} \<and> E2 \<inter> S \<noteq> {})" (is "?lhs = ?rhs")
proof -
have *: "(\<exists>E1:: 'a set. \<exists>E2:: 'a set. (\<exists>T1:: 'a set. P1 T1 \<and> E1 = f1 T1) \<and> (\<exists>T2:: 'a set. P2 T2 \<and> E2 = f2 T2) \<and>
R E1 E2) \<longleftrightarrow> (\<exists>T1 T2. P1 T1 \<and> P2 T2 \<and> R(f1 T1) (f2 T2))" for P1 f1 P2 f2 R
by auto
show ?thesis
unfolding connectedin_def connected_space_def openin_subtopology topspace_subtopology *
by (intro conj_cong arg_cong [where f=Not] ex_cong1; blast dest!: openin_subset)
qed
lemma connectedinD:
"\<lbrakk>connectedin X S; openin X E1; openin X E2; S \<subseteq> E1 \<union> E2; E1 \<inter> E2 \<inter> S = {}; E1 \<inter> S \<noteq> {}; E2 \<inter> S \<noteq> {}\<rbrakk> \<Longrightarrow> False"
by (meson connectedin)
lemma connectedin_iff_connected [simp]: "connectedin euclidean S \<longleftrightarrow> connected S"
by (simp add: connected_def connectedin)
lemma connectedin_closedin:
"connectedin X S \<longleftrightarrow>
S \<subseteq> topspace X \<and>
\<not>(\<exists>E1 E2. closedin X E1 \<and> closedin X E2 \<and>
S \<subseteq> (E1 \<union> E2) \<and>
(E1 \<inter> E2 \<inter> S = {}) \<and>
\<not>(E1 \<inter> S = {}) \<and> \<not>(E2 \<inter> S = {}))"
proof -
have *: "(\<exists>E1:: 'a set. \<exists>E2:: 'a set. (\<exists>T1:: 'a set. P1 T1 \<and> E1 = f1 T1) \<and> (\<exists>T2:: 'a set. P2 T2 \<and> E2 = f2 T2) \<and>
R E1 E2) \<longleftrightarrow> (\<exists>T1 T2. P1 T1 \<and> P2 T2 \<and> R(f1 T1) (f2 T2))" for P1 f1 P2 f2 R
by auto
show ?thesis
unfolding connectedin_def connected_space_closedin closedin_subtopology topspace_subtopology *
by (intro conj_cong arg_cong [where f=Not] ex_cong1; blast dest!: openin_subset)
qed
lemma connectedin_empty [simp]: "connectedin X {}"
by (simp add: connectedin)
lemma connected_space_topspace_empty:
"topspace X = {} \<Longrightarrow> connected_space X"
using connectedin_topspace by fastforce
lemma connectedin_sing [simp]: "connectedin X {a} \<longleftrightarrow> a \<in> topspace X"
by (simp add: connectedin)
lemma connectedin_absolute [simp]:
"connectedin (subtopology X S) S \<longleftrightarrow> connectedin X S"
by (simp add: connectedin_subtopology)
lemma connectedin_Union:
assumes \<U>: "\<And>S. S \<in> \<U> \<Longrightarrow> connectedin X S" and ne: "\<Inter>\<U> \<noteq> {}"
shows "connectedin X (\<Union>\<U>)"
proof -
have "\<Union>\<U> \<subseteq> topspace X"
using \<U> by (simp add: Union_least connectedin_def)
moreover have False
if "openin X E1" "openin X E2" and cover: "\<Union>\<U> \<subseteq> E1 \<union> E2" and disj: "E1 \<inter> E2 \<inter> \<Union>\<U> = {}"
and overlap1: "E1 \<inter> \<Union>\<U> \<noteq> {}" and overlap2: "E2 \<inter> \<Union>\<U> \<noteq> {}"
for E1 E2
proof -
have disjS: "E1 \<inter> E2 \<inter> S = {}" if "S \<in> \<U>" for S
using Diff_triv that disj by auto
have coverS: "S \<subseteq> E1 \<union> E2" if "S \<in> \<U>" for S
using that cover by blast
have "\<U> \<noteq> {}"
using overlap1 by blast
obtain a where a: "\<And>U. U \<in> \<U> \<Longrightarrow> a \<in> U"
using ne by force
with \<open>\<U> \<noteq> {}\<close> have "a \<in> \<Union>\<U>"
by blast
then consider "a \<in> E1" | "a \<in> E2"
using \<open>\<Union>\<U> \<subseteq> E1 \<union> E2\<close> by auto
then show False
proof cases
case 1
then obtain b S where "b \<in> E2" "b \<in> S" "S \<in> \<U>"
using overlap2 by blast
then show ?thesis
using "1" \<open>openin X E1\<close> \<open>openin X E2\<close> disjS coverS a [OF \<open>S \<in> \<U>\<close>] \<U>[OF \<open>S \<in> \<U>\<close>]
unfolding connectedin
by (meson disjoint_iff_not_equal)
next
case 2
then obtain b S where "b \<in> E1" "b \<in> S" "S \<in> \<U>"
using overlap1 by blast
then show ?thesis
using "2" \<open>openin X E1\<close> \<open>openin X E2\<close> disjS coverS a [OF \<open>S \<in> \<U>\<close>] \<U>[OF \<open>S \<in> \<U>\<close>]
unfolding connectedin
by (meson disjoint_iff_not_equal)
qed
qed
ultimately show ?thesis
unfolding connectedin by blast
qed
lemma connectedin_Un:
"\<lbrakk>connectedin X S; connectedin X T; S \<inter> T \<noteq> {}\<rbrakk> \<Longrightarrow> connectedin X (S \<union> T)"
using connectedin_Union [of "{S,T}"] by auto
lemma connected_space_subconnected:
"connected_space X \<longleftrightarrow> (\<forall>x \<in> topspace X. \<forall>y \<in> topspace X. \<exists>S. connectedin X S \<and> x \<in> S \<and> y \<in> S)" (is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
using connectedin_topspace by blast
next
assume R [rule_format]: ?rhs
have False if "openin X U" "openin X V" and disj: "U \<inter> V = {}" and cover: "topspace X \<subseteq> U \<union> V"
and "U \<noteq> {}" "V \<noteq> {}" for U V
proof -
obtain u v where "u \<in> U" "v \<in> V"
using \<open>U \<noteq> {}\<close> \<open>V \<noteq> {}\<close> by auto
then obtain T where "u \<in> T" "v \<in> T" and T: "connectedin X T"
using R [of u v] that
by (meson \<open>openin X U\<close> \<open>openin X V\<close> subsetD openin_subset)
then show False
using that unfolding connectedin
by (metis IntI \<open>u \<in> U\<close> \<open>v \<in> V\<close> empty_iff inf_bot_left subset_trans)
qed
then show ?lhs
by (auto simp: connected_space_def)
qed
lemma connectedin_intermediate_closure_of:
assumes "connectedin X S" "S \<subseteq> T" "T \<subseteq> X closure_of S"
shows "connectedin X T"
proof -
have S: "S \<subseteq> topspace X" and T: "T \<subseteq> topspace X"
using assms by (meson closure_of_subset_topspace dual_order.trans)+
have \<section>: "\<And>E1 E2. \<lbrakk>openin X E1; openin X E2; E1 \<inter> S = {} \<or> E2 \<inter> S = {}\<rbrakk> \<Longrightarrow> E1 \<inter> T = {} \<or> E2 \<inter> T = {}"
using assms unfolding disjoint_iff by (meson in_closure_of subsetD)
then show ?thesis
using assms
unfolding connectedin closure_of_subset_topspace S T
by (metis Int_empty_right T dual_order.trans inf.orderE inf_left_commute)
qed
lemma connectedin_closure_of:
"connectedin X S \<Longrightarrow> connectedin X (X closure_of S)"
by (meson closure_of_subset connectedin_def connectedin_intermediate_closure_of subset_refl)
lemma connectedin_separation:
"connectedin X S \<longleftrightarrow>
S \<subseteq> topspace X \<and>
(\<nexists>C1 C2. C1 \<union> C2 = S \<and> C1 \<noteq> {} \<and> C2 \<noteq> {} \<and> C1 \<inter> X closure_of C2 = {} \<and> C2 \<inter> X closure_of C1 = {})"
unfolding connectedin_def connected_space_closedin_eq closedin_Int_closure_of topspace_subtopology
apply (intro conj_cong refl arg_cong [where f=Not])
apply (intro ex_cong1 iffI, blast)
using closure_of_subset_Int by force
lemma connectedin_eq_not_separated:
"connectedin X S \<longleftrightarrow>
S \<subseteq> topspace X \<and>
(\<nexists>C1 C2. C1 \<union> C2 = S \<and> C1 \<noteq> {} \<and> C2 \<noteq> {} \<and> separatedin X C1 C2)"
unfolding separatedin_def by (metis connectedin_separation sup.boundedE)
lemma connectedin_eq_not_separated_subset:
"connectedin X S \<longleftrightarrow>
S \<subseteq> topspace X \<and> (\<nexists>C1 C2. S \<subseteq> C1 \<union> C2 \<and> S \<inter> C1 \<noteq> {} \<and> S \<inter> C2 \<noteq> {} \<and> separatedin X C1 C2)"
proof -
have "\<forall>C1 C2. S \<subseteq> C1 \<union> C2 \<longrightarrow> S \<inter> C1 = {} \<or> S \<inter> C2 = {} \<or> \<not> separatedin X C1 C2"
if "\<And>C1 C2. C1 \<union> C2 = S \<longrightarrow> C1 = {} \<or> C2 = {} \<or> \<not> separatedin X C1 C2"
proof (intro allI)
fix C1 C2
show "S \<subseteq> C1 \<union> C2 \<longrightarrow> S \<inter> C1 = {} \<or> S \<inter> C2 = {} \<or> \<not> separatedin X C1 C2"
using that [of "S \<inter> C1" "S \<inter> C2"]
by (auto simp: separatedin_mono)
qed
then show ?thesis
by (metis Un_Int_eq(1) Un_Int_eq(2) connectedin_eq_not_separated order_refl)
qed
lemma connected_space_eq_not_separated:
"connected_space X \<longleftrightarrow>
(\<nexists>C1 C2. C1 \<union> C2 = topspace X \<and> C1 \<noteq> {} \<and> C2 \<noteq> {} \<and> separatedin X C1 C2)"
by (simp add: connectedin_eq_not_separated flip: connectedin_topspace)
lemma connected_space_eq_not_separated_subset:
"connected_space X \<longleftrightarrow>
(\<nexists>C1 C2. topspace X \<subseteq> C1 \<union> C2 \<and> C1 \<noteq> {} \<and> C2 \<noteq> {} \<and> separatedin X C1 C2)"
by (metis connected_space_eq_not_separated le_sup_iff separatedin_def subset_antisym)
lemma connectedin_subset_separated_union:
"\<lbrakk>connectedin X C; separatedin X S T; C \<subseteq> S \<union> T\<rbrakk> \<Longrightarrow> C \<subseteq> S \<or> C \<subseteq> T"
unfolding connectedin_eq_not_separated_subset by blast
lemma connectedin_nonseparated_union:
assumes "connectedin X S" "connectedin X T" "\<not>separatedin X S T"
shows "connectedin X (S \<union> T)"
proof -
have "\<And>C1 C2. \<lbrakk>T \<subseteq> C1 \<union> C2; S \<subseteq> C1 \<union> C2\<rbrakk> \<Longrightarrow>
S \<inter> C1 = {} \<and> T \<inter> C1 = {} \<or> S \<inter> C2 = {} \<and> T \<inter> C2 = {} \<or> \<not> separatedin X C1 C2"
using assms
unfolding connectedin_eq_not_separated_subset
by (metis (no_types, lifting) assms connectedin_subset_separated_union inf.orderE separatedin_empty(1) separatedin_mono separatedin_sym)
then show ?thesis
unfolding connectedin_eq_not_separated_subset
by (simp add: assms connectedin_subset_topspace Int_Un_distrib2)
qed
lemma connected_space_closures:
"connected_space X \<longleftrightarrow>
(\<nexists>e1 e2. e1 \<union> e2 = topspace X \<and> X closure_of e1 \<inter> X closure_of e2 = {} \<and> e1 \<noteq> {} \<and> e2 \<noteq> {})"
(is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
unfolding connected_space_closedin_eq
by (metis Un_upper1 Un_upper2 closedin_closure_of closure_of_Un closure_of_eq_empty closure_of_topspace)
next
assume ?rhs
then show ?lhs
unfolding connected_space_closedin_eq
by (metis closure_of_eq)
qed
lemma connectedin_Int_frontier_of:
assumes "connectedin X S" "S \<inter> T \<noteq> {}" "S - T \<noteq> {}"
shows "S \<inter> X frontier_of T \<noteq> {}"
proof -
have "S \<subseteq> topspace X" and *:
"\<And>E1 E2. openin X E1 \<longrightarrow> openin X E2 \<longrightarrow> E1 \<inter> E2 \<inter> S = {} \<longrightarrow> S \<subseteq> E1 \<union> E2 \<longrightarrow> E1 \<inter> S = {} \<or> E2 \<inter> S = {}"
using \<open>connectedin X S\<close> by (auto simp: connectedin)
moreover
have "S - (topspace X \<inter> T) \<noteq> {}"
using assms(3) by blast
moreover
have "S \<inter> topspace X \<inter> T \<noteq> {}"
using assms connectedin by fastforce
moreover
have False if "S \<inter> T \<noteq> {}" "S - T \<noteq> {}" "T \<subseteq> topspace X" "S \<inter> X frontier_of T = {}" for T
proof -
have null: "S \<inter> (X closure_of T - X interior_of T) = {}"
using that unfolding frontier_of_def by blast
have "X interior_of T \<inter> (topspace X - X closure_of T) \<inter> S = {}"
by (metis Diff_disjoint inf_bot_left interior_of_Int interior_of_complement interior_of_empty)
moreover have "S \<subseteq> X interior_of T \<union> (topspace X - X closure_of T)"
using that \<open>S \<subseteq> topspace X\<close> null by auto
moreover have "S \<inter> X interior_of T \<noteq> {}"
using closure_of_subset that(1) that(3) null by fastforce
ultimately have "S \<inter> X interior_of (topspace X - T) = {}"
by (metis "*" inf_commute interior_of_complement openin_interior_of)
then have "topspace (subtopology X S) \<inter> X interior_of T = S"
using \<open>S \<subseteq> topspace X\<close> interior_of_complement null by fastforce
then show ?thesis
using that by (metis Diff_eq_empty_iff inf_le2 interior_of_subset subset_trans)
qed
ultimately show ?thesis
by (metis Int_lower1 frontier_of_restrict inf_assoc)
qed
lemma connectedin_continuous_map_image:
assumes f: "continuous_map X Y f" and "connectedin X S"
shows "connectedin Y (f ` S)"
proof -
have "S \<subseteq> topspace X" and *:
"\<And>E1 E2. openin X E1 \<longrightarrow> openin X E2 \<longrightarrow> E1 \<inter> E2 \<inter> S = {} \<longrightarrow> S \<subseteq> E1 \<union> E2 \<longrightarrow> E1 \<inter> S = {} \<or> E2 \<inter> S = {}"
using \<open>connectedin X S\<close> by (auto simp: connectedin)
show ?thesis
unfolding connectedin connected_space_def
proof (intro conjI notI; clarify)
show "f x \<in> topspace Y" if "x \<in> S" for x
using \<open>S \<subseteq> topspace X\<close> continuous_map_image_subset_topspace f that by blast
next
fix U V
let ?U = "{x \<in> topspace X. f x \<in> U}"
let ?V = "{x \<in> topspace X. f x \<in> V}"
assume UV: "openin Y U" "openin Y V" "f ` S \<subseteq> U \<union> V" "U \<inter> V \<inter> f ` S = {}" "U \<inter> f ` S \<noteq> {}" "V \<inter> f ` S \<noteq> {}"
then have 1: "?U \<inter> ?V \<inter> S = {}"
by auto
have 2: "openin X ?U" "openin X ?V"
using \<open>openin Y U\<close> \<open>openin Y V\<close> continuous_map f by fastforce+
show "False"
using * [of ?U ?V] UV \<open>S \<subseteq> topspace X\<close>
by (auto simp: 1 2)
qed
qed
lemma connected_space_quotient_map_image:
"\<lbrakk>quotient_map X X' q; connected_space X\<rbrakk> \<Longrightarrow> connected_space X'"
by (metis connectedin_continuous_map_image connectedin_topspace quotient_imp_continuous_map quotient_imp_surjective_map)
lemma homeomorphic_connected_space:
"X homeomorphic_space Y \<Longrightarrow> connected_space X \<longleftrightarrow> connected_space Y"
unfolding homeomorphic_space_def homeomorphic_maps_def
by (metis connected_space_subconnected connectedin_continuous_map_image connectedin_topspace continuous_map_image_subset_topspace image_eqI image_subset_iff)
lemma homeomorphic_map_connectedness:
assumes f: "homeomorphic_map X Y f" and U: "U \<subseteq> topspace X"
shows "connectedin Y (f ` U) \<longleftrightarrow> connectedin X U"
proof -
have 1: "f ` U \<subseteq> topspace Y \<longleftrightarrow> U \<subseteq> topspace X"
using U f homeomorphic_imp_surjective_map by blast
moreover have "connected_space (subtopology Y (f ` U)) \<longleftrightarrow> connected_space (subtopology X U)"
proof (rule homeomorphic_connected_space)
have "f ` U \<subseteq> topspace Y"
by (simp add: U 1)
then have "topspace Y \<inter> f ` U = f ` U"
by (simp add: subset_antisym)
then show "subtopology Y (f ` U) homeomorphic_space subtopology X U"
by (metis U f homeomorphic_map_imp_homeomorphic_space homeomorphic_map_subtopologies homeomorphic_space_sym inf.absorb_iff2)
qed
ultimately show ?thesis
by (auto simp: connectedin_def)
qed
lemma homeomorphic_map_connectedness_eq:
"homeomorphic_map X Y f
\<Longrightarrow> connectedin X U \<longleftrightarrow>
U \<subseteq> topspace X \<and> connectedin Y (f ` U)"
using homeomorphic_map_connectedness connectedin_subset_topspace by metis
lemma connectedin_discrete_topology:
"connectedin (discrete_topology U) S \<longleftrightarrow> S \<subseteq> U \<and> (\<exists>a. S \<subseteq> {a})"
proof (cases "S \<subseteq> U")
case True
show ?thesis
proof (cases "S = {}")
case False
moreover have "connectedin (discrete_topology U) S \<longleftrightarrow> (\<exists>a. S = {a})"
proof
show "connectedin (discrete_topology U) S \<Longrightarrow> \<exists>a. S = {a}"
using False connectedin_Int_frontier_of insert_Diff by fastforce
qed (use True in auto)
ultimately show ?thesis
by auto
qed simp
next
case False
then show ?thesis
by (simp add: connectedin_def)
qed
lemma connected_space_discrete_topology:
"connected_space (discrete_topology U) \<longleftrightarrow> (\<exists>a. U \<subseteq> {a})"
by (metis connectedin_discrete_topology connectedin_topspace order_refl topspace_discrete_topology)
subsection\<open>Compact sets\<close>
definition compactin where
"compactin X S \<longleftrightarrow>
S \<subseteq> topspace X \<and>
(\<forall>\<U>. (\<forall>U \<in> \<U>. openin X U) \<and> S \<subseteq> \<Union>\<U>
\<longrightarrow> (\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> S \<subseteq> \<Union>\<F>))"
definition compact_space where
"compact_space X \<equiv> compactin X (topspace X)"
lemma compact_space_alt:
"compact_space X \<longleftrightarrow>
(\<forall>\<U>. (\<forall>U \<in> \<U>. openin X U) \<and> topspace X \<subseteq> \<Union>\<U>
\<longrightarrow> (\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> topspace X \<subseteq> \<Union>\<F>))"
by (simp add: compact_space_def compactin_def)
lemma compact_space:
"compact_space X \<longleftrightarrow>
(\<forall>\<U>. (\<forall>U \<in> \<U>. openin X U) \<and> \<Union>\<U> = topspace X
\<longrightarrow> (\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> \<Union>\<F> = topspace X))"
unfolding compact_space_alt
using openin_subset by fastforce
lemma compactinD:
"\<lbrakk>compactin X S; \<And>U. U \<in> \<U> \<Longrightarrow> openin X U; S \<subseteq> \<Union>\<U>\<rbrakk> \<Longrightarrow> \<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> S \<subseteq> \<Union>\<F>"
by (auto simp: compactin_def)
lemma compactin_euclidean_iff [simp]: "compactin euclidean S \<longleftrightarrow> compact S"
by (simp add: compact_eq_Heine_Borel compactin_def) meson
lemma compactin_absolute [simp]:
"compactin (subtopology X S) S \<longleftrightarrow> compactin X S"
proof -
have eq: "(\<forall>U \<in> \<U>. \<exists>Y. openin X Y \<and> U = Y \<inter> S) \<longleftrightarrow> \<U> \<subseteq> (\<lambda>Y. Y \<inter> S) ` {y. openin X y}" for \<U>
by auto
show ?thesis
by (auto simp: compactin_def openin_subtopology eq imp_conjL all_subset_image ex_finite_subset_image)
qed
lemma compactin_subspace: "compactin X S \<longleftrightarrow> S \<subseteq> topspace X \<and> compact_space (subtopology X S)"
unfolding compact_space_def topspace_subtopology
by (metis compactin_absolute compactin_def inf.absorb2)
lemma compact_space_subtopology: "compactin X S \<Longrightarrow> compact_space (subtopology X S)"
by (simp add: compactin_subspace)
lemma compactin_subtopology: "compactin (subtopology X S) T \<longleftrightarrow> compactin X T \<and> T \<subseteq> S"
by (metis compactin_subspace inf.absorb_iff2 le_inf_iff subtopology_subtopology topspace_subtopology)
lemma compactin_subset_topspace: "compactin X S \<Longrightarrow> S \<subseteq> topspace X"
by (simp add: compactin_subspace)
lemma compactin_contractive:
"\<lbrakk>compactin X' S; topspace X' = topspace X;
\<And>U. openin X U \<Longrightarrow> openin X' U\<rbrakk> \<Longrightarrow> compactin X S"
by (simp add: compactin_def)
lemma finite_imp_compactin:
"\<lbrakk>S \<subseteq> topspace X; finite S\<rbrakk> \<Longrightarrow> compactin X S"
by (metis compactin_subspace compact_space finite_UnionD inf.absorb_iff2 order_refl topspace_subtopology)
lemma compactin_empty [iff]: "compactin X {}"
by (simp add: finite_imp_compactin)
lemma compact_space_topspace_empty:
"topspace X = {} \<Longrightarrow> compact_space X"
by (simp add: compact_space_def)
lemma finite_imp_compactin_eq:
"finite S \<Longrightarrow> (compactin X S \<longleftrightarrow> S \<subseteq> topspace X)"
using compactin_subset_topspace finite_imp_compactin by blast
lemma compactin_sing [simp]: "compactin X {a} \<longleftrightarrow> a \<in> topspace X"
by (simp add: finite_imp_compactin_eq)
lemma closed_compactin:
assumes XK: "compactin X K" and "C \<subseteq> K" and XC: "closedin X C"
shows "compactin X C"
unfolding compactin_def
proof (intro conjI allI impI)
show "C \<subseteq> topspace X"
by (simp add: XC closedin_subset)
next
fix \<U> :: "'a set set"
assume \<U>: "Ball \<U> (openin X) \<and> C \<subseteq> \<Union>\<U>"
have "(\<forall>U\<in>insert (topspace X - C) \<U>. openin X U)"
using XC \<U> by blast
moreover have "K \<subseteq> \<Union>(insert (topspace X - C) \<U>)"
using \<U> XK compactin_subset_topspace by fastforce
ultimately obtain \<F> where "finite \<F>" "\<F> \<subseteq> insert (topspace X - C) \<U>" "K \<subseteq> \<Union>\<F>"
using assms unfolding compactin_def by metis
moreover have "openin X (topspace X - C)"
using XC by auto
ultimately show "\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> C \<subseteq> \<Union>\<F>"
using \<open>C \<subseteq> K\<close>
by (rule_tac x="\<F> - {topspace X - C}" in exI) auto
qed
lemma closedin_compact_space:
"\<lbrakk>compact_space X; closedin X S\<rbrakk> \<Longrightarrow> compactin X S"
by (simp add: closed_compactin closedin_subset compact_space_def)
lemma compact_Int_closedin:
assumes "compactin X S" "closedin X T" shows "compactin X (S \<inter> T)"
proof -
have "compactin (subtopology X S) (S \<inter> T)"
by (metis assms closedin_compact_space closedin_subtopology compactin_subspace inf_commute)
then show ?thesis
by (simp add: compactin_subtopology)
qed
lemma closed_Int_compactin: "\<lbrakk>closedin X S; compactin X T\<rbrakk> \<Longrightarrow> compactin X (S \<inter> T)"
by (metis compact_Int_closedin inf_commute)
lemma compactin_Un:
assumes S: "compactin X S" and T: "compactin X T" shows "compactin X (S \<union> T)"
unfolding compactin_def
proof (intro conjI allI impI)
show "S \<union> T \<subseteq> topspace X"
using assms by (auto simp: compactin_def)
next
fix \<U> :: "'a set set"
assume \<U>: "Ball \<U> (openin X) \<and> S \<union> T \<subseteq> \<Union>\<U>"
with S obtain \<F> where \<V>: "finite \<F>" "\<F> \<subseteq> \<U>" "S \<subseteq> \<Union>\<F>"
unfolding compactin_def by (meson sup.bounded_iff)
obtain \<W> where "finite \<W>" "\<W> \<subseteq> \<U>" "T \<subseteq> \<Union>\<W>"
using \<U> T
unfolding compactin_def by (meson sup.bounded_iff)
with \<V> show "\<exists>\<V>. finite \<V> \<and> \<V> \<subseteq> \<U> \<and> S \<union> T \<subseteq> \<Union>\<V>"
by (rule_tac x="\<F> \<union> \<W>" in exI) auto
qed
lemma compactin_Union:
"\<lbrakk>finite \<F>; \<And>S. S \<in> \<F> \<Longrightarrow> compactin X S\<rbrakk> \<Longrightarrow> compactin X (\<Union>\<F>)"
by (induction rule: finite_induct) (simp_all add: compactin_Un)
lemma compactin_subtopology_imp_compact:
assumes "compactin (subtopology X S) K" shows "compactin X K"
using assms
proof (clarsimp simp add: compactin_def)
fix \<U>
define \<V> where "\<V> \<equiv> (\<lambda>U. U \<inter> S) ` \<U>"
assume "K \<subseteq> topspace X" and "K \<subseteq> S" and "\<forall>x\<in>\<U>. openin X x" and "K \<subseteq> \<Union>\<U>"
then have "\<forall>V \<in> \<V>. openin (subtopology X S) V" "K \<subseteq> \<Union>\<V>"
unfolding \<V>_def by (auto simp: openin_subtopology)
moreover
assume "\<forall>\<U>. (\<forall>x\<in>\<U>. openin (subtopology X S) x) \<and> K \<subseteq> \<Union>\<U> \<longrightarrow> (\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> K \<subseteq> \<Union>\<F>)"
ultimately obtain \<F> where "finite \<F>" "\<F> \<subseteq> \<V>" "K \<subseteq> \<Union>\<F>"
by meson
then have \<F>: "\<exists>U. U \<in> \<U> \<and> V = U \<inter> S" if "V \<in> \<F>" for V
unfolding \<V>_def using that by blast
let ?\<F> = "(\<lambda>F. @U. U \<in> \<U> \<and> F = U \<inter> S) ` \<F>"
show "\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> K \<subseteq> \<Union>\<F>"
proof (intro exI conjI)
show "finite ?\<F>"
using \<open>finite \<F>\<close> by blast
show "?\<F> \<subseteq> \<U>"
using someI_ex [OF \<F>] by blast
show "K \<subseteq> \<Union>?\<F>"
proof clarsimp
fix x
assume "x \<in> K"
then show "\<exists>V \<in> \<F>. x \<in> (SOME U. U \<in> \<U> \<and> V = U \<inter> S)"
using \<open>K \<subseteq> \<Union>\<F>\<close> someI_ex [OF \<F>]
by (metis (no_types, lifting) IntD1 Union_iff subsetCE)
qed
qed
qed
lemma compact_imp_compactin_subtopology:
assumes "compactin X K" "K \<subseteq> S" shows "compactin (subtopology X S) K"
using assms
proof (clarsimp simp add: compactin_def)
fix \<U> :: "'a set set"
define \<V> where "\<V> \<equiv> {V. openin X V \<and> (\<exists>U \<in> \<U>. U = V \<inter> S)}"
assume "K \<subseteq> S" and "K \<subseteq> topspace X" and "\<forall>U\<in>\<U>. openin (subtopology X S) U" and "K \<subseteq> \<Union>\<U>"
then have "\<forall>V \<in> \<V>. openin X V" "K \<subseteq> \<Union>\<V>"
unfolding \<V>_def by (fastforce simp: subset_eq openin_subtopology)+
moreover
assume "\<forall>\<U>. (\<forall>U\<in>\<U>. openin X U) \<and> K \<subseteq> \<Union>\<U> \<longrightarrow> (\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> K \<subseteq> \<Union>\<F>)"
ultimately obtain \<F> where "finite \<F>" "\<F> \<subseteq> \<V>" "K \<subseteq> \<Union>\<F>"
by meson
let ?\<F> = "(\<lambda>F. F \<inter> S) ` \<F>"
show "\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> K \<subseteq> \<Union>\<F>"
proof (intro exI conjI)
show "finite ?\<F>"
using \<open>finite \<F>\<close> by blast
show "?\<F> \<subseteq> \<U>"
using \<V>_def \<open>\<F> \<subseteq> \<V>\<close> by blast
show "K \<subseteq> \<Union>?\<F>"
using \<open>K \<subseteq> \<Union>\<F>\<close> assms(2) by auto
qed
qed
proposition compact_space_fip:
"compact_space X \<longleftrightarrow>
(\<forall>\<U>. (\<forall>C\<in>\<U>. closedin X C) \<and> (\<forall>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<longrightarrow> \<Inter>\<F> \<noteq> {}) \<longrightarrow> \<Inter>\<U> \<noteq> {})"
(is "_ = ?rhs")
proof (cases "topspace X = {}")
case True
then show ?thesis
unfolding compact_space_def
by (metis Sup_bot_conv(1) closedin_topspace_empty compactin_empty finite.emptyI finite_UnionD order_refl)
next
case False
show ?thesis
proof safe
fix \<U> :: "'a set set"
assume * [rule_format]: "\<forall>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<longrightarrow> \<Inter>\<F> \<noteq> {}"
define \<V> where "\<V> \<equiv> (\<lambda>S. topspace X - S) ` \<U>"
assume clo: "\<forall>C\<in>\<U>. closedin X C" and [simp]: "\<Inter>\<U> = {}"
then have "\<forall>V \<in> \<V>. openin X V" "topspace X \<subseteq> \<Union>\<V>"
by (auto simp: \<V>_def)
moreover assume [unfolded compact_space_alt, rule_format, of \<V>]: "compact_space X"
ultimately obtain \<F> where \<F>: "finite \<F>" "\<F> \<subseteq> \<U>" "topspace X \<subseteq> topspace X - \<Inter>\<F>"
by (auto simp: ex_finite_subset_image \<V>_def)
moreover have "\<F> \<noteq> {}"
using \<F> \<open>topspace X \<noteq> {}\<close> by blast
ultimately show "False"
using * [of \<F>]
by auto (metis Diff_iff Inter_iff clo closedin_def subsetD)
next
assume R [rule_format]: ?rhs
show "compact_space X"
unfolding compact_space_alt
proof clarify
fix \<U> :: "'a set set"
define \<V> where "\<V> \<equiv> (\<lambda>S. topspace X - S) ` \<U>"
assume "\<forall>C\<in>\<U>. openin X C" and "topspace X \<subseteq> \<Union>\<U>"
with \<open>topspace X \<noteq> {}\<close> have *: "\<forall>V \<in> \<V>. closedin X V" "\<U> \<noteq> {}"
by (auto simp: \<V>_def)
show "\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> topspace X \<subseteq> \<Union>\<F>"
proof (rule ccontr; simp)
assume "\<forall>\<F>\<subseteq>\<U>. finite \<F> \<longrightarrow> \<not> topspace X \<subseteq> \<Union>\<F>"
then have "\<forall>\<F>. finite \<F> \<and> \<F> \<subseteq> \<V> \<longrightarrow> \<Inter>\<F> \<noteq> {}"
by (simp add: \<V>_def all_finite_subset_image)
with \<open>topspace X \<subseteq> \<Union>\<U>\<close> show False
using R [of \<V>] * by (simp add: \<V>_def)
qed
qed
qed
qed
corollary compactin_fip:
"compactin X S \<longleftrightarrow>
S \<subseteq> topspace X \<and>
(\<forall>\<U>. (\<forall>C\<in>\<U>. closedin X C) \<and> (\<forall>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<longrightarrow> S \<inter> \<Inter>\<F> \<noteq> {}) \<longrightarrow> S \<inter> \<Inter>\<U> \<noteq> {})"
proof (cases "S = {}")
case False
show ?thesis
proof (cases "S \<subseteq> topspace X")
case True
then have "compactin X S \<longleftrightarrow>
(\<forall>\<U>. \<U> \<subseteq> (\<lambda>T. S \<inter> T) ` {T. closedin X T} \<longrightarrow>
(\<forall>\<F>. finite \<F> \<longrightarrow> \<F> \<subseteq> \<U> \<longrightarrow> \<Inter>\<F> \<noteq> {}) \<longrightarrow> \<Inter>\<U> \<noteq> {})"
by (simp add: compact_space_fip compactin_subspace closedin_subtopology image_def subset_eq Int_commute imp_conjL)
also have "\<dots> = (\<forall>\<U>\<subseteq>Collect (closedin X). (\<forall>\<F>. finite \<F> \<longrightarrow> \<F> \<subseteq> (\<inter>) S ` \<U> \<longrightarrow> \<Inter>\<F> \<noteq> {}) \<longrightarrow> \<Inter> ((\<inter>) S ` \<U>) \<noteq> {})"
by (simp add: all_subset_image)
also have "\<dots> = (\<forall>\<U>. (\<forall>C\<in>\<U>. closedin X C) \<and> (\<forall>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<longrightarrow> S \<inter> \<Inter>\<F> \<noteq> {}) \<longrightarrow> S \<inter> \<Inter>\<U> \<noteq> {})"
proof -
have eq: "((\<forall>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<longrightarrow> \<Inter> ((\<inter>) S ` \<F>) \<noteq> {}) \<longrightarrow> \<Inter> ((\<inter>) S ` \<U>) \<noteq> {}) \<longleftrightarrow>
((\<forall>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<longrightarrow> S \<inter> \<Inter>\<F> \<noteq> {}) \<longrightarrow> S \<inter> \<Inter>\<U> \<noteq> {})" for \<U>
by simp (use \<open>S \<noteq> {}\<close> in blast)
show ?thesis
unfolding imp_conjL [symmetric] all_finite_subset_image eq by blast
qed
finally show ?thesis
using True by simp
qed (simp add: compactin_subspace)
qed force
corollary compact_space_imp_nest:
fixes C :: "nat \<Rightarrow> 'a set"
assumes "compact_space X" and clo: "\<And>n. closedin X (C n)"
and ne: "\<And>n. C n \<noteq> {}" and dec: "decseq C"
shows "(\<Inter>n. C n) \<noteq> {}"
proof -
let ?\<U> = "range (\<lambda>n. \<Inter>m \<le> n. C m)"
have "closedin X A" if "A \<in> ?\<U>" for A
using that clo by auto
moreover have "(\<Inter>n\<in>K. \<Inter>m \<le> n. C m) \<noteq> {}" if "finite K" for K
proof -
obtain n where "\<And>k. k \<in> K \<Longrightarrow> k \<le> n"
using Max.coboundedI \<open>finite K\<close> by blast
with dec have "C n \<subseteq> (\<Inter>n\<in>K. \<Inter>m \<le> n. C m)"
unfolding decseq_def by blast
with ne [of n] show ?thesis
by blast
qed
ultimately show ?thesis
using \<open>compact_space X\<close> [unfolded compact_space_fip, rule_format, of ?\<U>]
by (simp add: all_finite_subset_image INT_extend_simps UN_atMost_UNIV del: INT_simps)
qed
lemma compactin_discrete_topology:
"compactin (discrete_topology X) S \<longleftrightarrow> S \<subseteq> X \<and> finite S" (is "?lhs = ?rhs")
proof (intro iffI conjI)
assume L: ?lhs
then show "S \<subseteq> X"
by (auto simp: compactin_def)
have *: "\<And>\<U>. Ball \<U> (openin (discrete_topology X)) \<and> S \<subseteq> \<Union>\<U> \<Longrightarrow>
(\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> S \<subseteq> \<Union>\<F>)"
using L by (auto simp: compactin_def)
show "finite S"
using * [of "(\<lambda>x. {x}) ` X"] \<open>S \<subseteq> X\<close>
by clarsimp (metis UN_singleton finite_subset_image infinite_super)
next
assume ?rhs
then show ?lhs
by (simp add: finite_imp_compactin)
qed
lemma compact_space_discrete_topology: "compact_space(discrete_topology X) \<longleftrightarrow> finite X"
by (simp add: compactin_discrete_topology compact_space_def)
lemma compact_space_imp_Bolzano_Weierstrass:
assumes "compact_space X" "infinite S" "S \<subseteq> topspace X"
shows "X derived_set_of S \<noteq> {}"
proof
assume X: "X derived_set_of S = {}"
then have "closedin X S"
by (simp add: closedin_contains_derived_set assms)
then have "compactin X S"
by (rule closedin_compact_space [OF \<open>compact_space X\<close>])
with X show False
by (metis \<open>infinite S\<close> compactin_subspace compact_space_discrete_topology inf_bot_right subtopology_eq_discrete_topology_eq)
qed
lemma compactin_imp_Bolzano_Weierstrass:
"\<lbrakk>compactin X S; infinite T \<and> T \<subseteq> S\<rbrakk> \<Longrightarrow> S \<inter> X derived_set_of T \<noteq> {}"
using compact_space_imp_Bolzano_Weierstrass [of "subtopology X S"]
by (simp add: compactin_subspace derived_set_of_subtopology inf_absorb2)
lemma compact_closure_of_imp_Bolzano_Weierstrass:
"\<lbrakk>compactin X (X closure_of S); infinite T; T \<subseteq> S; T \<subseteq> topspace X\<rbrakk> \<Longrightarrow> X derived_set_of T \<noteq> {}"
using closure_of_mono closure_of_subset compactin_imp_Bolzano_Weierstrass by fastforce
lemma discrete_compactin_eq_finite:
"S \<inter> X derived_set_of S = {} \<Longrightarrow> compactin X S \<longleftrightarrow> S \<subseteq> topspace X \<and> finite S"
by (meson compactin_imp_Bolzano_Weierstrass finite_imp_compactin_eq order_refl)
lemma discrete_compact_space_eq_finite:
"X derived_set_of (topspace X) = {} \<Longrightarrow> (compact_space X \<longleftrightarrow> finite(topspace X))"
by (metis compact_space_discrete_topology discrete_topology_unique_derived_set)
lemma image_compactin:
assumes cpt: "compactin X S" and cont: "continuous_map X Y f"
shows "compactin Y (f ` S)"
unfolding compactin_def
proof (intro conjI allI impI)
show "f ` S \<subseteq> topspace Y"
using compactin_subset_topspace cont continuous_map_image_subset_topspace cpt by blast
next
fix \<U> :: "'b set set"
assume \<U>: "Ball \<U> (openin Y) \<and> f ` S \<subseteq> \<Union>\<U>"
define \<V> where "\<V> \<equiv> (\<lambda>U. {x \<in> topspace X. f x \<in> U}) ` \<U>"
have "S \<subseteq> topspace X"
and *: "\<And>\<U>. \<lbrakk>\<forall>U\<in>\<U>. openin X U; S \<subseteq> \<Union>\<U>\<rbrakk> \<Longrightarrow> \<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> S \<subseteq> \<Union>\<F>"
using cpt by (auto simp: compactin_def)
obtain \<F> where \<F>: "finite \<F>" "\<F> \<subseteq> \<V>" "S \<subseteq> \<Union>\<F>"
proof -
have 1: "\<forall>U\<in>\<V>. openin X U"
unfolding \<V>_def using \<U> cont[unfolded continuous_map] by blast
have 2: "S \<subseteq> \<Union>\<V>"
unfolding \<V>_def using compactin_subset_topspace cpt \<U> by fastforce
show thesis
using * [OF 1 2] that by metis
qed
have "\<forall>v \<in> \<V>. \<exists>U. U \<in> \<U> \<and> v = {x \<in> topspace X. f x \<in> U}"
using \<V>_def by blast
then obtain U where U: "\<forall>v \<in> \<V>. U v \<in> \<U> \<and> v = {x \<in> topspace X. f x \<in> U v}"
by metis
show "\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> f ` S \<subseteq> \<Union>\<F>"
proof (intro conjI exI)
show "finite (U ` \<F>)"
by (simp add: \<open>finite \<F>\<close>)
next
show "U ` \<F> \<subseteq> \<U>"
using \<open>\<F> \<subseteq> \<V>\<close> U by auto
next
show "f ` S \<subseteq> \<Union> (U ` \<F>)"
using \<F>(2-3) U UnionE subset_eq U by fastforce
qed
qed
lemma homeomorphic_compact_space:
assumes "X homeomorphic_space Y"
shows "compact_space X \<longleftrightarrow> compact_space Y"
using homeomorphic_space_sym
by (metis assms compact_space_def homeomorphic_eq_everything_map homeomorphic_space image_compactin)
lemma homeomorphic_map_compactness:
assumes hom: "homeomorphic_map X Y f" and U: "U \<subseteq> topspace X"
shows "compactin Y (f ` U) \<longleftrightarrow> compactin X U"
proof -
have "f ` U \<subseteq> topspace Y"
using hom U homeomorphic_imp_surjective_map by blast
moreover have "homeomorphic_map (subtopology X U) (subtopology Y (f ` U)) f"
using U hom homeomorphic_imp_surjective_map by (blast intro: homeomorphic_map_subtopologies)
then have "compact_space (subtopology Y (f ` U)) = compact_space (subtopology X U)"
using homeomorphic_compact_space homeomorphic_map_imp_homeomorphic_space by blast
ultimately show ?thesis
by (simp add: compactin_subspace U)
qed
lemma homeomorphic_map_compactness_eq:
"homeomorphic_map X Y f
\<Longrightarrow> compactin X U \<longleftrightarrow> U \<subseteq> topspace X \<and> compactin Y (f ` U)"
by (meson compactin_subset_topspace homeomorphic_map_compactness)
subsection\<open>Embedding maps\<close>
definition embedding_map
where "embedding_map X Y f \<equiv> homeomorphic_map X (subtopology Y (f ` (topspace X))) f"
lemma embedding_map_eq:
"\<lbrakk>embedding_map X Y f; \<And>x. x \<in> topspace X \<Longrightarrow> f x = g x\<rbrakk> \<Longrightarrow> embedding_map X Y g"
unfolding embedding_map_def
by (metis homeomorphic_map_eq image_cong)
lemma embedding_map_compose:
assumes "embedding_map X X' f" "embedding_map X' X'' g"
shows "embedding_map X X'' (g \<circ> f)"
proof -
have hm: "homeomorphic_map X (subtopology X' (f ` topspace X)) f" "homeomorphic_map X' (subtopology X'' (g ` topspace X')) g"
using assms by (auto simp: embedding_map_def)
then obtain C where "g ` topspace X' \<inter> C = (g \<circ> f) ` topspace X"
by (metis homeomorphic_imp_surjective_map image_comp image_mono inf.absorb_iff2 topspace_subtopology)
then have "homeomorphic_map (subtopology X' (f ` topspace X)) (subtopology X'' ((g \<circ> f) ` topspace X)) g"
by (metis hm homeomorphic_imp_surjective_map homeomorphic_map_subtopologies image_comp subtopology_subtopology topspace_subtopology)
then show ?thesis
unfolding embedding_map_def
using hm(1) homeomorphic_map_compose by blast
qed
lemma surjective_embedding_map:
"embedding_map X Y f \<and> f ` (topspace X) = topspace Y \<longleftrightarrow> homeomorphic_map X Y f"
by (force simp: embedding_map_def homeomorphic_eq_everything_map)
lemma embedding_map_in_subtopology:
"embedding_map X (subtopology Y S) f \<longleftrightarrow> embedding_map X Y f \<and> f ` (topspace X) \<subseteq> S" (is "?lhs = ?rhs")
proof
show "?lhs \<Longrightarrow> ?rhs"
unfolding embedding_map_def
by (metis continuous_map_in_subtopology homeomorphic_imp_continuous_map inf_absorb2 subtopology_subtopology)
qed (simp add: embedding_map_def inf.absorb_iff2 subtopology_subtopology)
lemma injective_open_imp_embedding_map:
"\<lbrakk>continuous_map X Y f; open_map X Y f; inj_on f (topspace X)\<rbrakk> \<Longrightarrow> embedding_map X Y f"
unfolding embedding_map_def
by (simp add: continuous_map_in_subtopology continuous_open_quotient_map eq_iff homeomorphic_map_def open_map_imp_subset open_map_into_subtopology)
lemma injective_closed_imp_embedding_map:
"\<lbrakk>continuous_map X Y f; closed_map X Y f; inj_on f (topspace X)\<rbrakk> \<Longrightarrow> embedding_map X Y f"
unfolding embedding_map_def
by (simp add: closed_map_imp_subset closed_map_into_subtopology continuous_closed_quotient_map
continuous_map_in_subtopology dual_order.eq_iff homeomorphic_map_def)
lemma embedding_map_imp_homeomorphic_space:
"embedding_map X Y f \<Longrightarrow> X homeomorphic_space (subtopology Y (f ` (topspace X)))"
unfolding embedding_map_def
using homeomorphic_space by blast
lemma embedding_imp_closed_map:
"\<lbrakk>embedding_map X Y f; closedin Y (f ` topspace X)\<rbrakk> \<Longrightarrow> closed_map X Y f"
unfolding closed_map_def
by (auto simp: closedin_closed_subtopology embedding_map_def homeomorphic_map_closedness_eq)
lemma embedding_imp_closed_map_eq:
"embedding_map X Y f \<Longrightarrow> (closed_map X Y f \<longleftrightarrow> closedin Y (f ` topspace X))"
using closed_map_def embedding_imp_closed_map by blast
subsection\<open>Retraction and section maps\<close>
definition retraction_maps :: "'a topology \<Rightarrow> 'b topology \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> ('b \<Rightarrow> 'a) \<Rightarrow> bool"
where "retraction_maps X Y f g \<equiv>
continuous_map X Y f \<and> continuous_map Y X g \<and> (\<forall>x \<in> topspace Y. f(g x) = x)"
definition section_map :: "'a topology \<Rightarrow> 'b topology \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> bool"
where "section_map X Y f \<equiv> \<exists>g. retraction_maps Y X g f"
definition retraction_map :: "'a topology \<Rightarrow> 'b topology \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> bool"
where "retraction_map X Y f \<equiv> \<exists>g. retraction_maps X Y f g"
lemma retraction_maps_eq:
"\<lbrakk>retraction_maps X Y f g; \<And>x. x \<in> topspace X \<Longrightarrow> f x = f' x; \<And>x. x \<in> topspace Y \<Longrightarrow> g x = g' x\<rbrakk>
\<Longrightarrow> retraction_maps X Y f' g'"
unfolding retraction_maps_def by (metis (no_types, lifting) continuous_map_def continuous_map_eq)
lemma section_map_eq:
"\<lbrakk>section_map X Y f; \<And>x. x \<in> topspace X \<Longrightarrow> f x = g x\<rbrakk> \<Longrightarrow> section_map X Y g"
unfolding section_map_def using retraction_maps_eq by blast
lemma retraction_map_eq:
"\<lbrakk>retraction_map X Y f; \<And>x. x \<in> topspace X \<Longrightarrow> f x = g x\<rbrakk> \<Longrightarrow> retraction_map X Y g"
unfolding retraction_map_def using retraction_maps_eq by blast
lemma homeomorphic_imp_retraction_maps:
"homeomorphic_maps X Y f g \<Longrightarrow> retraction_maps X Y f g"
by (simp add: homeomorphic_maps_def retraction_maps_def)
lemma section_and_retraction_eq_homeomorphic_map:
"section_map X Y f \<and> retraction_map X Y f \<longleftrightarrow> homeomorphic_map X Y f" (is "?lhs = ?rhs")
proof
assume ?lhs
then obtain g where "homeomorphic_maps X Y f g"
unfolding homeomorphic_maps_def retraction_map_def section_map_def
by (smt (verit, best) continuous_map_def retraction_maps_def)
then show ?rhs
using homeomorphic_map_maps by blast
next
assume ?rhs
then show ?lhs
unfolding retraction_map_def section_map_def
by (meson homeomorphic_imp_retraction_maps homeomorphic_map_maps homeomorphic_maps_sym)
qed
lemma section_imp_embedding_map:
"section_map X Y f \<Longrightarrow> embedding_map X Y f"
unfolding section_map_def embedding_map_def homeomorphic_map_maps retraction_maps_def homeomorphic_maps_def
by (force simp: continuous_map_in_subtopology continuous_map_from_subtopology)
lemma retraction_imp_quotient_map:
assumes "retraction_map X Y f"
shows "quotient_map X Y f"
unfolding quotient_map_def
proof (intro conjI subsetI allI impI)
show "f ` topspace X = topspace Y"
using assms by (force simp: retraction_map_def retraction_maps_def continuous_map_def)
next
fix U
assume U: "U \<subseteq> topspace Y"
have "openin Y U"
if "\<forall>x\<in>topspace Y. g x \<in> topspace X" "\<forall>x\<in>topspace Y. f (g x) = x"
"openin Y {x \<in> topspace Y. g x \<in> {x \<in> topspace X. f x \<in> U}}" for g
using openin_subopen U that by fastforce
then show "openin X {x \<in> topspace X. f x \<in> U} = openin Y U"
using assms by (auto simp: retraction_map_def retraction_maps_def continuous_map_def)
qed
lemma retraction_maps_compose:
"\<lbrakk>retraction_maps X Y f f'; retraction_maps Y Z g g'\<rbrakk> \<Longrightarrow> retraction_maps X Z (g \<circ> f) (f' \<circ> g')"
by (clarsimp simp: retraction_maps_def continuous_map_compose) (simp add: continuous_map_def)
lemma retraction_map_compose:
"\<lbrakk>retraction_map X Y f; retraction_map Y Z g\<rbrakk> \<Longrightarrow> retraction_map X Z (g \<circ> f)"
by (meson retraction_map_def retraction_maps_compose)
lemma section_map_compose:
"\<lbrakk>section_map X Y f; section_map Y Z g\<rbrakk> \<Longrightarrow> section_map X Z (g \<circ> f)"
by (meson retraction_maps_compose section_map_def)
lemma surjective_section_eq_homeomorphic_map:
"section_map X Y f \<and> f ` (topspace X) = topspace Y \<longleftrightarrow> homeomorphic_map X Y f"
by (meson section_and_retraction_eq_homeomorphic_map section_imp_embedding_map surjective_embedding_map)
lemma surjective_retraction_or_section_map:
"f ` (topspace X) = topspace Y \<Longrightarrow> retraction_map X Y f \<or> section_map X Y f \<longleftrightarrow> retraction_map X Y f"
using section_and_retraction_eq_homeomorphic_map surjective_section_eq_homeomorphic_map by fastforce
lemma retraction_imp_surjective_map:
"retraction_map X Y f \<Longrightarrow> f ` (topspace X) = topspace Y"
by (simp add: retraction_imp_quotient_map quotient_imp_surjective_map)
lemma section_imp_injective_map:
"\<lbrakk>section_map X Y f; x \<in> topspace X; y \<in> topspace X\<rbrakk> \<Longrightarrow> f x = f y \<longleftrightarrow> x = y"
by (metis (mono_tags, opaque_lifting) retraction_maps_def section_map_def)
lemma retraction_maps_to_retract_maps:
"retraction_maps X Y r s
\<Longrightarrow> retraction_maps X (subtopology X (s ` (topspace Y))) (s \<circ> r) id"
unfolding retraction_maps_def
by (auto simp: continuous_map_compose continuous_map_into_subtopology continuous_map_from_subtopology)
subsection \<open>Continuity\<close>
lemma continuous_on_open:
"continuous_on S f \<longleftrightarrow>
(\<forall>T. openin (top_of_set (f ` S)) T \<longrightarrow>
openin (top_of_set S) (S \<inter> f -` T))"
unfolding continuous_on_open_invariant openin_open Int_def vimage_def Int_commute
by (simp add: imp_ex imageI conj_commute eq_commute cong: conj_cong)
lemma continuous_on_closed:
"continuous_on S f \<longleftrightarrow>
(\<forall>T. closedin (top_of_set (f ` S)) T \<longrightarrow>
closedin (top_of_set S) (S \<inter> f -` T))"
unfolding continuous_on_closed_invariant closedin_closed Int_def vimage_def Int_commute
by (simp add: imp_ex imageI conj_commute eq_commute cong: conj_cong)
lemma continuous_on_imp_closedin:
assumes "continuous_on S f" "closedin (top_of_set (f ` S)) T"
shows "closedin (top_of_set S) (S \<inter> f -` T)"
using assms continuous_on_closed by blast
lemma continuous_map_subtopology_eu [simp]:
"continuous_map (top_of_set S) (subtopology euclidean T) h \<longleftrightarrow> continuous_on S h \<and> h ` S \<subseteq> T"
by (simp add: continuous_map_in_subtopology)
lemma continuous_map_euclidean_top_of_set:
assumes eq: "f -` S = UNIV" and cont: "continuous_on UNIV f"
shows "continuous_map euclidean (top_of_set S) f"
by (simp add: cont continuous_map_into_subtopology eq image_subset_iff_subset_vimage)
subsection\<^marker>\<open>tag unimportant\<close> \<open>Half-global and completely global cases\<close>
lemma continuous_openin_preimage_gen:
assumes "continuous_on S f" "open T"
shows "openin (top_of_set S) (S \<inter> f -` T)"
proof -
have *: "(S \<inter> f -` T) = (S \<inter> f -` (T \<inter> f ` S))"
by auto
have "openin (top_of_set (f ` S)) (T \<inter> f ` S)"
using openin_open_Int[of T "f ` S", OF assms(2)] unfolding openin_open by auto
then show ?thesis
using assms(1)[unfolded continuous_on_open, THEN spec[where x="T \<inter> f ` S"]]
using * by auto
qed
lemma continuous_closedin_preimage:
assumes "continuous_on S f" and "closed T"
shows "closedin (top_of_set S) (S \<inter> f -` T)"
proof -
have *: "(S \<inter> f -` T) = (S \<inter> f -` (T \<inter> f ` S))"
by auto
have "closedin (top_of_set (f ` S)) (T \<inter> f ` S)"
using closedin_closed_Int[of T "f ` S", OF assms(2)]
by (simp add: Int_commute)
then show ?thesis
using assms(1)[unfolded continuous_on_closed, THEN spec[where x="T \<inter> f ` S"]]
using * by auto
qed
lemma continuous_openin_preimage_eq:
"continuous_on S f \<longleftrightarrow> (\<forall>T. open T \<longrightarrow> openin (top_of_set S) (S \<inter> f -` T))"
by (metis Int_commute continuous_on_open_invariant open_openin openin_subtopology)
lemma continuous_closedin_preimage_eq:
"continuous_on S f \<longleftrightarrow>
(\<forall>T. closed T \<longrightarrow> closedin (top_of_set S) (S \<inter> f -` T))"
by (metis Int_commute closedin_closed continuous_on_closed_invariant)
lemma continuous_open_preimage:
assumes contf: "continuous_on S f" and "open S" "open T"
shows "open (S \<inter> f -` T)"
proof-
obtain U where "open U" "(S \<inter> f -` T) = S \<inter> U"
using continuous_openin_preimage_gen[OF contf \<open>open T\<close>]
unfolding openin_open by auto
then show ?thesis
using open_Int[of S U, OF \<open>open S\<close>] by auto
qed
lemma continuous_closed_preimage:
assumes contf: "continuous_on S f" and "closed S" "closed T"
shows "closed (S \<inter> f -` T)"
proof-
obtain U where "closed U" "(S \<inter> f -` T) = S \<inter> U"
using continuous_closedin_preimage[OF contf \<open>closed T\<close>]
unfolding closedin_closed by auto
then show ?thesis using closed_Int[of S U, OF \<open>closed S\<close>] by auto
qed
lemma continuous_open_vimage: "open S \<Longrightarrow> (\<And>x. continuous (at x) f) \<Longrightarrow> open (f -` S)"
by (metis continuous_on_eq_continuous_within open_vimage)
lemma continuous_closed_vimage: "closed S \<Longrightarrow> (\<And>x. continuous (at x) f) \<Longrightarrow> closed (f -` S)"
by (simp add: closed_vimage continuous_on_eq_continuous_within)
lemma Times_in_interior_subtopology:
assumes "(x, y) \<in> U" "openin (top_of_set (S \<times> T)) U"
obtains V W where "openin (top_of_set S) V" "x \<in> V"
"openin (top_of_set T) W" "y \<in> W" "(V \<times> W) \<subseteq> U"
proof -
from assms obtain E where "open E" "U = S \<times> T \<inter> E" "(x, y) \<in> E" "x \<in> S" "y \<in> T"
by (auto simp: openin_open)
from open_prod_elim[OF \<open>open E\<close> \<open>(x, y) \<in> E\<close>]
obtain E1 E2 where "open E1" "open E2" "(x, y) \<in> E1 \<times> E2" "E1 \<times> E2 \<subseteq> E"
by blast
show ?thesis
proof
show "openin (top_of_set S) (E1 \<inter> S)" "openin (top_of_set T) (E2 \<inter> T)"
using \<open>open E1\<close> \<open>open E2\<close> by (auto simp: openin_open)
show "x \<in> E1 \<inter> S" "y \<in> E2 \<inter> T"
using \<open>(x, y) \<in> E1 \<times> E2\<close> \<open>x \<in> S\<close> \<open>y \<in> T\<close> by auto
show "(E1 \<inter> S) \<times> (E2 \<inter> T) \<subseteq> U"
using \<open>E1 \<times> E2 \<subseteq> E\<close> \<open>U = _\<close> by auto
qed
qed
lemma closedin_Times:
"closedin (top_of_set S) S' \<Longrightarrow> closedin (top_of_set T) T' \<Longrightarrow>
closedin (top_of_set (S \<times> T)) (S' \<times> T')"
unfolding closedin_closed using closed_Times by blast
lemma openin_Times:
"openin (top_of_set S) S' \<Longrightarrow> openin (top_of_set T) T' \<Longrightarrow>
openin (top_of_set (S \<times> T)) (S' \<times> T')"
unfolding openin_open using open_Times by blast
lemma openin_Times_eq:
fixes S :: "'a::topological_space set" and T :: "'b::topological_space set"
shows
"openin (top_of_set (S \<times> T)) (S' \<times> T') \<longleftrightarrow>
S' = {} \<or> T' = {} \<or> openin (top_of_set S) S' \<and> openin (top_of_set T) T'"
(is "?lhs = ?rhs")
proof (cases "S' = {} \<or> T' = {}")
case True
then show ?thesis by auto
next
case False
then obtain x y where "x \<in> S'" "y \<in> T'"
by blast
show ?thesis
proof
assume ?lhs
have "openin (top_of_set S) S'"
proof (subst openin_subopen, clarify)
show "\<exists>U. openin (top_of_set S) U \<and> x \<in> U \<and> U \<subseteq> S'" if "x \<in> S'" for x
using that \<open>y \<in> T'\<close> Times_in_interior_subtopology [OF _ \<open>?lhs\<close>, of x y]
by simp (metis mem_Sigma_iff subsetD subsetI)
qed
moreover have "openin (top_of_set T) T'"
proof (subst openin_subopen, clarify)
show "\<exists>U. openin (top_of_set T) U \<and> y \<in> U \<and> U \<subseteq> T'" if "y \<in> T'" for y
using that \<open>x \<in> S'\<close> Times_in_interior_subtopology [OF _ \<open>?lhs\<close>, of x y]
by simp (metis mem_Sigma_iff subsetD subsetI)
qed
ultimately show ?rhs
by simp
next
assume ?rhs
with False show ?lhs
by (simp add: openin_Times)
qed
qed
lemma Lim_transform_within_openin:
assumes f: "(f \<longlongrightarrow> l) (at a within T)"
and "openin (top_of_set T) S" "a \<in> S"
and eq: "\<And>x. \<lbrakk>x \<in> S; x \<noteq> a\<rbrakk> \<Longrightarrow> f x = g x"
shows "(g \<longlongrightarrow> l) (at a within T)"
proof -
have "\<forall>\<^sub>F x in at a within T. x \<in> T \<and> x \<noteq> a"
by (simp add: eventually_at_filter)
moreover
from \<open>openin _ _\<close> obtain U where "open U" "S = T \<inter> U"
by (auto simp: openin_open)
then have "a \<in> U" using \<open>a \<in> S\<close> by auto
from topological_tendstoD[OF tendsto_ident_at \<open>open U\<close> \<open>a \<in> U\<close>]
have "\<forall>\<^sub>F x in at a within T. x \<in> U" by auto
ultimately
have "\<forall>\<^sub>F x in at a within T. f x = g x"
by eventually_elim (auto simp: \<open>S = _\<close> eq)
with f show ?thesis
by (rule Lim_transform_eventually)
qed
lemma continuous_on_open_gen:
assumes "f ` S \<subseteq> T"
shows "continuous_on S f \<longleftrightarrow>
(\<forall>U. openin (top_of_set T) U
\<longrightarrow> openin (top_of_set S) (S \<inter> f -` U))"
(is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
by (clarsimp simp add: continuous_openin_preimage_eq openin_open)
(metis Int_assoc assms image_subset_iff_subset_vimage inf.absorb_iff1)
next
assume R [rule_format]: ?rhs
show ?lhs
proof (clarsimp simp add: continuous_openin_preimage_eq)
fix U::"'a set"
assume "open U"
then have "openin (top_of_set S) (S \<inter> f -` (U \<inter> T))"
by (metis R inf_commute openin_open)
then show "openin (top_of_set S) (S \<inter> f -` U)"
by (metis Int_assoc Int_commute assms image_subset_iff_subset_vimage inf.absorb_iff2 vimage_Int)
qed
qed
lemma continuous_openin_preimage:
"\<lbrakk>continuous_on S f; f ` S \<subseteq> T; openin (top_of_set T) U\<rbrakk>
\<Longrightarrow> openin (top_of_set S) (S \<inter> f -` U)"
by (simp add: continuous_on_open_gen)
lemma continuous_on_closed_gen:
assumes "f ` S \<subseteq> T"
shows "continuous_on S f \<longleftrightarrow>
(\<forall>U. closedin (top_of_set T) U
\<longrightarrow> closedin (top_of_set S) (S \<inter> f -` U))"
proof -
have *: "U \<subseteq> T \<Longrightarrow> S \<inter> f -` (T - U) = S - (S \<inter> f -` U)" for U
using assms by blast
then show ?thesis
unfolding continuous_on_open_gen [OF assms]
by (metis closedin_def inf.cobounded1 openin_closedin_eq topspace_euclidean_subtopology)
qed
lemma continuous_closedin_preimage_gen:
assumes "continuous_on S f" "f ` S \<subseteq> T" "closedin (top_of_set T) U"
shows "closedin (top_of_set S) (S \<inter> f -` U)"
using assms continuous_on_closed_gen by blast
lemma continuous_transform_within_openin:
assumes "continuous (at a within T) f"
and "openin (top_of_set T) S" "a \<in> S"
and eq: "\<And>x. x \<in> S \<Longrightarrow> f x = g x"
shows "continuous (at a within T) g"
using assms by (simp add: Lim_transform_within_openin continuous_within)
subsection\<^marker>\<open>tag important\<close> \<open>The topology generated by some (open) subsets\<close>
text \<open>In the definition below of a generated topology, the \<open>Empty\<close> case is not necessary,
as it follows from \<open>UN\<close> taking for \<open>K\<close> the empty set. However, it is convenient to have,
and is never a problem in proofs, so I prefer to write it down explicitly.
We do not require \<open>UNIV\<close> to be an open set, as this will not be the case in applications. (We are
thinking of a topology on a subset of \<open>UNIV\<close>, the remaining part of \<open>UNIV\<close> being irrelevant.)\<close>
inductive generate_topology_on for S where
Empty: "generate_topology_on S {}"
| Int: "generate_topology_on S a \<Longrightarrow> generate_topology_on S b \<Longrightarrow> generate_topology_on S (a \<inter> b)"
| UN: "(\<And>k. k \<in> K \<Longrightarrow> generate_topology_on S k) \<Longrightarrow> generate_topology_on S (\<Union>K)"
| Basis: "s \<in> S \<Longrightarrow> generate_topology_on S s"
lemma istopology_generate_topology_on:
"istopology (generate_topology_on S)"
unfolding istopology_def by (auto intro: generate_topology_on.intros)
text \<open>The basic property of the topology generated by a set \<open>S\<close> is that it is the
smallest topology containing all the elements of \<open>S\<close>:\<close>
lemma generate_topology_on_coarsest:
assumes T: "istopology T" "\<And>s. s \<in> S \<Longrightarrow> T s"
and gen: "generate_topology_on S s0"
shows "T s0"
using gen
by (induct rule: generate_topology_on.induct) (use T in \<open>auto simp: istopology_def\<close>)
abbreviation\<^marker>\<open>tag unimportant\<close> topology_generated_by::"('a set set) \<Rightarrow> ('a topology)"
where "topology_generated_by S \<equiv> topology (generate_topology_on S)"
lemma openin_topology_generated_by_iff:
"openin (topology_generated_by S) s \<longleftrightarrow> generate_topology_on S s"
using topology_inverse'[OF istopology_generate_topology_on[of S]] by simp
lemma openin_topology_generated_by:
"openin (topology_generated_by S) s \<Longrightarrow> generate_topology_on S s"
using openin_topology_generated_by_iff by auto
lemma topology_generated_by_topspace [simp]:
"topspace (topology_generated_by S) = (\<Union>S)"
proof
{
fix s assume "openin (topology_generated_by S) s"
then have "generate_topology_on S s" by (rule openin_topology_generated_by)
then have "s \<subseteq> (\<Union>S)" by (induct, auto)
}
then show "topspace (topology_generated_by S) \<subseteq> (\<Union>S)"
unfolding topspace_def by auto
next
have "generate_topology_on S (\<Union>S)"
using generate_topology_on.UN[OF generate_topology_on.Basis, of S S] by simp
then show "(\<Union>S) \<subseteq> topspace (topology_generated_by S)"
unfolding topspace_def using openin_topology_generated_by_iff by auto
qed
lemma topology_generated_by_Basis:
"s \<in> S \<Longrightarrow> openin (topology_generated_by S) s"
by (simp add: Basis openin_topology_generated_by_iff)
lemma generate_topology_on_Inter:
"\<lbrakk>finite \<F>; \<And>K. K \<in> \<F> \<Longrightarrow> generate_topology_on \<S> K; \<F> \<noteq> {}\<rbrakk> \<Longrightarrow> generate_topology_on \<S> (\<Inter>\<F>)"
by (induction \<F> rule: finite_induct; force intro: generate_topology_on.intros)
subsection\<open>Topology bases and sub-bases\<close>
lemma istopology_base_alt:
"istopology (arbitrary union_of P) \<longleftrightarrow>
(\<forall>S T. (arbitrary union_of P) S \<and> (arbitrary union_of P) T
\<longrightarrow> (arbitrary union_of P) (S \<inter> T))"
by (simp add: istopology_def) (blast intro: arbitrary_union_of_Union)
lemma istopology_base_eq:
"istopology (arbitrary union_of P) \<longleftrightarrow>
(\<forall>S T. P S \<and> P T \<longrightarrow> (arbitrary union_of P) (S \<inter> T))"
by (simp add: istopology_base_alt arbitrary_union_of_Int_eq)
lemma istopology_base:
"(\<And>S T. \<lbrakk>P S; P T\<rbrakk> \<Longrightarrow> P(S \<inter> T)) \<Longrightarrow> istopology (arbitrary union_of P)"
by (simp add: arbitrary_def istopology_base_eq union_of_inc)
lemma openin_topology_base_unique:
"openin X = arbitrary union_of P \<longleftrightarrow>
(\<forall>V. P V \<longrightarrow> openin X V) \<and> (\<forall>U x. openin X U \<and> x \<in> U \<longrightarrow> (\<exists>V. P V \<and> x \<in> V \<and> V \<subseteq> U))"
(is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
by (auto simp: union_of_def arbitrary_def)
next
assume R: ?rhs
then have *: "\<exists>\<U>\<subseteq>Collect P. \<Union>\<U> = S" if "openin X S" for S
using that by (rule_tac x="{V. P V \<and> V \<subseteq> S}" in exI) fastforce
from R show ?lhs
by (fastforce simp add: union_of_def arbitrary_def intro: *)
qed
lemma topology_base_unique:
assumes "\<And>S. P S \<Longrightarrow> openin X S"
"\<And>U x. \<lbrakk>openin X U; x \<in> U\<rbrakk> \<Longrightarrow> \<exists>B. P B \<and> x \<in> B \<and> B \<subseteq> U"
shows "topology (arbitrary union_of P) = X"
proof -
have "X = topology (openin X)"
by (simp add: openin_inverse)
also from assms have "openin X = arbitrary union_of P"
by (subst openin_topology_base_unique) auto
finally show ?thesis ..
qed
lemma topology_bases_eq_aux:
"\<lbrakk>(arbitrary union_of P) S;
\<And>U x. \<lbrakk>P U; x \<in> U\<rbrakk> \<Longrightarrow> \<exists>V. Q V \<and> x \<in> V \<and> V \<subseteq> U\<rbrakk>
\<Longrightarrow> (arbitrary union_of Q) S"
by (metis arbitrary_union_of_alt arbitrary_union_of_idempot)
lemma topology_bases_eq:
"\<lbrakk>\<And>U x. \<lbrakk>P U; x \<in> U\<rbrakk> \<Longrightarrow> \<exists>V. Q V \<and> x \<in> V \<and> V \<subseteq> U;
\<And>V x. \<lbrakk>Q V; x \<in> V\<rbrakk> \<Longrightarrow> \<exists>U. P U \<and> x \<in> U \<and> U \<subseteq> V\<rbrakk>
\<Longrightarrow> topology (arbitrary union_of P) =
topology (arbitrary union_of Q)"
by (fastforce intro: arg_cong [where f=topology] elim: topology_bases_eq_aux)
lemma istopology_subbase:
"istopology (arbitrary union_of (finite intersection_of P relative_to S))"
by (simp add: finite_intersection_of_Int istopology_base relative_to_Int)
lemma openin_subbase:
"openin (topology (arbitrary union_of (finite intersection_of B relative_to U))) S
\<longleftrightarrow> (arbitrary union_of (finite intersection_of B relative_to U)) S"
by (simp add: istopology_subbase topology_inverse')
lemma topspace_subbase [simp]:
"topspace(topology (arbitrary union_of (finite intersection_of B relative_to U))) = U" (is "?lhs = _")
proof
show "?lhs \<subseteq> U"
by (metis arbitrary_union_of_relative_to openin_subbase openin_topspace relative_to_imp_subset)
show "U \<subseteq> ?lhs"
by (metis arbitrary_union_of_inc finite_intersection_of_empty inf.orderE istopology_subbase
openin_subset relative_to_inc subset_UNIV topology_inverse')
qed
lemma minimal_topology_subbase:
assumes X: "\<And>S. P S \<Longrightarrow> openin X S" and "openin X U"
and S: "openin(topology(arbitrary union_of (finite intersection_of P relative_to U))) S"
shows "openin X S"
proof -
have "(arbitrary union_of (finite intersection_of P relative_to U)) S"
using S openin_subbase by blast
with X \<open>openin X U\<close> show ?thesis
by (force simp add: union_of_def intersection_of_def relative_to_def intro: openin_Int_Inter)
qed
lemma istopology_subbase_UNIV:
"istopology (arbitrary union_of (finite intersection_of P))"
by (simp add: istopology_base finite_intersection_of_Int)
lemma generate_topology_on_eq:
"generate_topology_on S = arbitrary union_of finite' intersection_of (\<lambda>x. x \<in> S)" (is "?lhs = ?rhs")
proof (intro ext iffI)
fix A
assume "?lhs A"
then show "?rhs A"
proof induction
case (Int a b)
then show ?case
by (metis (mono_tags, lifting) istopology_base_alt finite'_intersection_of_Int istopology_base)
next
case (UN K)
then show ?case
by (simp add: arbitrary_union_of_Union)
next
case (Basis s)
then show ?case
by (simp add: Sup_upper arbitrary_union_of_inc finite'_intersection_of_inc relative_to_subset)
qed auto
next
fix A
assume "?rhs A"
then obtain \<U> where \<U>: "\<And>T. T \<in> \<U> \<Longrightarrow> \<exists>\<F>. finite' \<F> \<and> \<F> \<subseteq> S \<and> \<Inter>\<F> = T" and eq: "A = \<Union>\<U>"
unfolding union_of_def intersection_of_def by auto
show "?lhs A"
unfolding eq
proof (rule generate_topology_on.UN)
fix T
assume "T \<in> \<U>"
with \<U> obtain \<F> where "finite' \<F>" "\<F> \<subseteq> S" "\<Inter>\<F> = T"
by blast
have "generate_topology_on S (\<Inter>\<F>)"
proof (rule generate_topology_on_Inter)
show "finite \<F>" "\<F> \<noteq> {}"
by (auto simp: \<open>finite' \<F>\<close>)
show "\<And>K. K \<in> \<F> \<Longrightarrow> generate_topology_on S K"
by (metis \<open>\<F> \<subseteq> S\<close> generate_topology_on.simps subset_iff)
qed
then show "generate_topology_on S T"
using \<open>\<Inter>\<F> = T\<close> by blast
qed
qed
lemma continuous_on_generated_topo_iff:
"continuous_map T1 (topology_generated_by S) f \<longleftrightarrow>
((\<forall>U. U \<in> S \<longrightarrow> openin T1 (f-`U \<inter> topspace(T1))) \<and> (f`(topspace T1) \<subseteq> (\<Union> S)))"
unfolding continuous_map_alt topology_generated_by_topspace
proof (auto simp add: topology_generated_by_Basis)
assume H: "\<forall>U. U \<in> S \<longrightarrow> openin T1 (f -` U \<inter> topspace T1)"
fix U assume "openin (topology_generated_by S) U"
then have "generate_topology_on S U" by (rule openin_topology_generated_by)
then show "openin T1 (f -` U \<inter> topspace T1)"
proof (induct)
fix a b
assume H: "openin T1 (f -` a \<inter> topspace T1)" "openin T1 (f -` b \<inter> topspace T1)"
have "f -` (a \<inter> b) \<inter> topspace T1 = (f-`a \<inter> topspace T1) \<inter> (f-`b \<inter> topspace T1)"
by auto
then show "openin T1 (f -` (a \<inter> b) \<inter> topspace T1)" using H by auto
next
fix K
assume H: "openin T1 (f -` k \<inter> topspace T1)" if "k\<in> K" for k
define L where "L = {f -` k \<inter> topspace T1|k. k \<in> K}"
have *: "openin T1 l" if "l \<in>L" for l using that H unfolding L_def by auto
have "openin T1 (\<Union>L)" using openin_Union[OF *] by simp
moreover have "(\<Union>L) = (f -` \<Union>K \<inter> topspace T1)" unfolding L_def by auto
ultimately show "openin T1 (f -` \<Union>K \<inter> topspace T1)" by simp
qed (auto simp add: H)
qed
lemma continuous_on_generated_topo:
assumes "\<And>U. U \<in>S \<Longrightarrow> openin T1 (f-`U \<inter> topspace(T1))"
"f`(topspace T1) \<subseteq> (\<Union> S)"
shows "continuous_map T1 (topology_generated_by S) f"
using assms continuous_on_generated_topo_iff by blast
subsection\<open>Continuity via bases/subbases, hence upper and lower semicontinuity\<close>
lemma continuous_map_into_topology_base:
assumes P: "openin Y = arbitrary union_of P"
and f: "\<And>x. x \<in> topspace X \<Longrightarrow> f x \<in> topspace Y"
and ope: "\<And>U. P U \<Longrightarrow> openin X {x \<in> topspace X. f x \<in> U}"
shows "continuous_map X Y f"
proof -
have *: "\<And>\<U>. (\<And>t. t \<in> \<U> \<Longrightarrow> P t) \<Longrightarrow> openin X {x \<in> topspace X. \<exists>U\<in>\<U>. f x \<in> U}"
by (smt (verit) Ball_Collect ope mem_Collect_eq openin_subopen)
show ?thesis
using P by (auto simp: continuous_map_def arbitrary_def union_of_def intro!: f *)
qed
lemma continuous_map_into_topology_base_eq:
assumes P: "openin Y = arbitrary union_of P"
shows
"continuous_map X Y f \<longleftrightarrow>
(\<forall>x \<in> topspace X. f x \<in> topspace Y) \<and> (\<forall>U. P U \<longrightarrow> openin X {x \<in> topspace X. f x \<in> U})"
(is "?lhs=?rhs")
proof
assume L: ?lhs
then have "\<And>x. x \<in> topspace X \<Longrightarrow> f x \<in> topspace Y"
by (meson continuous_map_def)
moreover have "\<And>U. P U \<Longrightarrow> openin X {x \<in> topspace X. f x \<in> U}"
using L assms continuous_map openin_topology_base_unique by fastforce
ultimately show ?rhs by auto
qed (simp add: assms continuous_map_into_topology_base)
lemma continuous_map_into_topology_subbase:
fixes U P
defines "Y \<equiv> topology(arbitrary union_of (finite intersection_of P relative_to U))"
assumes f: "\<And>x. x \<in> topspace X \<Longrightarrow> f x \<in> topspace Y"
and ope: "\<And>U. P U \<Longrightarrow> openin X {x \<in> topspace X. f x \<in> U}"
shows "continuous_map X Y f"
proof (intro continuous_map_into_topology_base)
show "openin Y = arbitrary union_of (finite intersection_of P relative_to U)"
unfolding Y_def using istopology_subbase topology_inverse' by blast
show "openin X {x \<in> topspace X. f x \<in> V}"
if \<section>: "(finite intersection_of P relative_to U) V" for V
proof -
define finv where "finv \<equiv> \<lambda>V. {x \<in> topspace X. f x \<in> V}"
obtain \<U> where \<U>: "finite \<U>" "\<And>V. V \<in> \<U> \<Longrightarrow> P V"
"{x \<in> topspace X. f x \<in> V} = (\<Inter>V \<in> insert U \<U>. finv V)"
using \<section> by (fastforce simp: finv_def intersection_of_def relative_to_def)
show ?thesis
unfolding \<U>
proof (intro openin_Inter ope)
have U: "U = topspace Y"
unfolding Y_def using topspace_subbase by fastforce
fix V
assume V: "V \<in> finv ` insert U \<U>"
with U f have "openin X {x \<in> topspace X. f x \<in> U}"
by (auto simp: openin_subopen [of X "Collect _"])
then show "openin X V"
using V \<U>(2) ope by (fastforce simp: finv_def)
qed (use \<open>finite \<U>\<close> in auto)
qed
qed (use f in auto)
lemma continuous_map_into_topology_subbase_eq:
assumes "Y = topology(arbitrary union_of (finite intersection_of P relative_to U))"
shows
"continuous_map X Y f \<longleftrightarrow>
(\<forall>x \<in> topspace X. f x \<in> topspace Y) \<and> (\<forall>U. P U \<longrightarrow> openin X {x \<in> topspace X. f x \<in> U})"
(is "?lhs=?rhs")
proof
assume L: ?lhs
show ?rhs
proof (intro conjI strip)
show "\<And>x. x \<in> topspace X \<Longrightarrow> f x \<in> topspace Y"
using L continuous_map_def by fastforce
fix V
assume "P V"
have "U = topspace Y"
unfolding assms using topspace_subbase by fastforce
then have eq: "{x \<in> topspace X. f x \<in> V} = {x \<in> topspace X. f x \<in> U \<inter> V}"
using L by (auto simp: continuous_map)
have "openin Y (U \<inter> V)"
unfolding assms openin_subbase
by (meson \<open>P V\<close> arbitrary_union_of_inc finite_intersection_of_inc relative_to_inc)
show "openin X {x \<in> topspace X. f x \<in> V}"
using L \<open>openin Y (U \<inter> V)\<close> continuous_map eq by fastforce
qed
next
show "?rhs \<Longrightarrow> ?lhs"
unfolding assms
by (intro continuous_map_into_topology_subbase) auto
qed
lemma subbase_subtopology_euclidean:
fixes U :: "'a::order_topology set"
shows
"topology
(arbitrary union_of
(finite intersection_of (\<lambda>x. x \<in> range greaterThan \<union> range lessThan) relative_to U))
= subtopology euclidean U"
proof -
have "\<exists>V. (finite intersection_of (\<lambda>x. x \<in> range greaterThan \<or> x \<in> range lessThan)) V \<and> x \<in> V \<and> V \<subseteq> W"
if "open W" "x \<in> W" for W and x::'a
using \<open>open W\<close> [unfolded open_generated_order] \<open>x \<in> W\<close>
proof (induct rule: generate_topology.induct)
case UNIV
then show ?case
using finite_intersection_of_empty by blast
next
case (Int a b)
then show ?case
by (meson Int_iff finite_intersection_of_Int inf_mono)
next
case (UN K)
then show ?case
by (meson Union_iff subset_iff)
next
case (Basis s)
then show ?case
by (metis (no_types, lifting) Un_iff finite_intersection_of_inc order_refl)
qed
moreover
have "\<And>V::'a set. (finite intersection_of (\<lambda>x. x \<in> range greaterThan \<or> x \<in> range lessThan)) V \<Longrightarrow> open V"
by (force simp: intersection_of_def subset_iff)
ultimately have *: "openin (euclidean::'a topology) =
(arbitrary union_of (finite intersection_of (\<lambda>x. x \<in> range greaterThan \<or> x \<in> range lessThan)))"
by (smt (verit, best) openin_topology_base_unique open_openin)
show ?thesis
unfolding subtopology_def arbitrary_union_of_relative_to [symmetric]
apply (simp add: relative_to_def flip: *)
by (metis Int_commute)
qed
lemma continuous_map_upper_lower_semicontinuous_lt_gen:
fixes U :: "'a::order_topology set"
shows "continuous_map X (subtopology euclidean U) f \<longleftrightarrow>
(\<forall>x \<in> topspace X. f x \<in> U) \<and>
(\<forall>a. openin X {x \<in> topspace X. f x > a}) \<and>
(\<forall>a. openin X {x \<in> topspace X. f x < a})"
by (auto simp: continuous_map_into_topology_subbase_eq [OF subbase_subtopology_euclidean [symmetric, of U]]
greaterThan_def lessThan_def image_iff simp flip: all_simps)
lemma continuous_map_upper_lower_semicontinuous_lt:
fixes f :: "'a \<Rightarrow> 'b::order_topology"
shows "continuous_map X euclidean f \<longleftrightarrow>
(\<forall>a. openin X {x \<in> topspace X. f x > a}) \<and>
(\<forall>a. openin X {x \<in> topspace X. f x < a})"
using continuous_map_upper_lower_semicontinuous_lt_gen [where U=UNIV]
by auto
lemma Int_Collect_imp_eq: "A \<inter> {x. x\<in>A \<longrightarrow> P x} = {x\<in>A. P x}"
by blast
lemma continuous_map_upper_lower_semicontinuous_le_gen:
shows
"continuous_map X (subtopology euclideanreal U) f \<longleftrightarrow>
(\<forall>x \<in> topspace X. f x \<in> U) \<and>
(\<forall>a. closedin X {x \<in> topspace X. f x \<ge> a}) \<and>
(\<forall>a. closedin X {x \<in> topspace X. f x \<le> a})"
unfolding continuous_map_upper_lower_semicontinuous_lt_gen
by (auto simp: closedin_def Diff_eq Compl_eq not_le Int_Collect_imp_eq)
lemma continuous_map_upper_lower_semicontinuous_le:
"continuous_map X euclideanreal f \<longleftrightarrow>
(\<forall>a. closedin X {x \<in> topspace X. f x \<ge> a}) \<and>
(\<forall>a. closedin X {x \<in> topspace X. f x \<le> a})"
using continuous_map_upper_lower_semicontinuous_le_gen [where U=UNIV]
by auto
lemma continuous_map_upper_lower_semicontinuous_lte_gen:
"continuous_map X (subtopology euclideanreal U) f \<longleftrightarrow>
(\<forall>x \<in> topspace X. f x \<in> U) \<and>
(\<forall>a. openin X {x \<in> topspace X. f x < a}) \<and>
(\<forall>a. closedin X {x \<in> topspace X. f x \<le> a})"
unfolding continuous_map_upper_lower_semicontinuous_lt_gen
by (auto simp: closedin_def Diff_eq Compl_eq not_le Int_Collect_imp_eq)
lemma continuous_map_upper_lower_semicontinuous_lte:
"continuous_map X euclideanreal f \<longleftrightarrow>
(\<forall>a. openin X {x \<in> topspace X. f x < a}) \<and>
(\<forall>a. closedin X {x \<in> topspace X. f x \<le> a})"
using continuous_map_upper_lower_semicontinuous_lte_gen [where U=UNIV]
by auto
subsection\<^marker>\<open>tag important\<close> \<open>Pullback topology\<close>
text \<open>Pulling back a topology by map gives again a topology. \<open>subtopology\<close> is
a special case of this notion, pulling back by the identity. We introduce the general notion as
we will need it to define the strong operator topology on the space of continuous linear operators,
by pulling back the product topology on the space of all functions.\<close>
text \<open>\<open>pullback_topology A f T\<close> is the pullback of the topology \<open>T\<close> by the map \<open>f\<close> on
the set \<open>A\<close>.\<close>
definition\<^marker>\<open>tag important\<close> pullback_topology::"('a set) \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> ('b topology) \<Rightarrow> ('a topology)"
where "pullback_topology A f T = topology (\<lambda>S. \<exists>U. openin T U \<and> S = f-`U \<inter> A)"
lemma istopology_pullback_topology:
"istopology (\<lambda>S. \<exists>U. openin T U \<and> S = f-`U \<inter> A)"
unfolding istopology_def proof (auto)
fix K assume "\<forall>S\<in>K. \<exists>U. openin T U \<and> S = f -` U \<inter> A"
then have "\<exists>U. \<forall>S\<in>K. openin T (U S) \<and> S = f-`(U S) \<inter> A"
by (rule bchoice)
then obtain U where U: "\<forall>S\<in>K. openin T (U S) \<and> S = f-`(U S) \<inter> A"
by blast
define V where "V = (\<Union>S\<in>K. U S)"
have "openin T V" "\<Union>K = f -` V \<inter> A" unfolding V_def using U by auto
then show "\<exists>V. openin T V \<and> \<Union>K = f -` V \<inter> A" by auto
qed
lemma openin_pullback_topology:
"openin (pullback_topology A f T) S \<longleftrightarrow> (\<exists>U. openin T U \<and> S = f-`U \<inter> A)"
unfolding pullback_topology_def topology_inverse'[OF istopology_pullback_topology] by auto
lemma topspace_pullback_topology:
"topspace (pullback_topology A f T) = f-`(topspace T) \<inter> A"
by (auto simp add: topspace_def openin_pullback_topology)
proposition continuous_map_pullback [intro]:
assumes "continuous_map T1 T2 g"
shows "continuous_map (pullback_topology A f T1) T2 (g o f)"
unfolding continuous_map_alt
proof (auto)
fix U::"'b set" assume "openin T2 U"
then have "openin T1 (g-`U \<inter> topspace T1)"
using assms unfolding continuous_map_alt by auto
have "(g o f)-`U \<inter> topspace (pullback_topology A f T1) = (g o f)-`U \<inter> A \<inter> f-`(topspace T1)"
unfolding topspace_pullback_topology by auto
also have "... = f-`(g-`U \<inter> topspace T1) \<inter> A "
by auto
also have "openin (pullback_topology A f T1) (...)"
unfolding openin_pullback_topology using \<open>openin T1 (g-`U \<inter> topspace T1)\<close> by auto
finally show "openin (pullback_topology A f T1) ((g \<circ> f) -` U \<inter> topspace (pullback_topology A f T1))"
by auto
next
fix x assume "x \<in> topspace (pullback_topology A f T1)"
then have "f x \<in> topspace T1"
unfolding topspace_pullback_topology by auto
then show "g (f x) \<in> topspace T2"
using assms unfolding continuous_map_def by auto
qed
proposition continuous_map_pullback' [intro]:
assumes "continuous_map T1 T2 (f o g)" "topspace T1 \<subseteq> g-`A"
shows "continuous_map T1 (pullback_topology A f T2) g"
unfolding continuous_map_alt
proof (auto)
fix U assume "openin (pullback_topology A f T2) U"
then have "\<exists>V. openin T2 V \<and> U = f-`V \<inter> A"
unfolding openin_pullback_topology by auto
then obtain V where "openin T2 V" "U = f-`V \<inter> A"
by blast
then have "g -` U \<inter> topspace T1 = g-`(f-`V \<inter> A) \<inter> topspace T1"
by blast
also have "... = (f o g)-`V \<inter> (g-`A \<inter> topspace T1)"
by auto
also have "... = (f o g)-`V \<inter> topspace T1"
using assms(2) by auto
also have "openin T1 (...)"
using assms(1) \<open>openin T2 V\<close> by auto
finally show "openin T1 (g -` U \<inter> topspace T1)" by simp
next
fix x assume "x \<in> topspace T1"
have "(f o g) x \<in> topspace T2"
using assms(1) \<open>x \<in> topspace T1\<close> unfolding continuous_map_def by auto
then have "g x \<in> f-`(topspace T2)"
unfolding comp_def by blast
moreover have "g x \<in> A" using assms(2) \<open>x \<in> topspace T1\<close> by blast
ultimately show "g x \<in> topspace (pullback_topology A f T2)"
unfolding topspace_pullback_topology by blast
qed
subsection\<open>Proper maps (not a priori assumed continuous) \<close>
definition proper_map
where
"proper_map X Y f \<equiv>
closed_map X Y f \<and> (\<forall>y \<in> topspace Y. compactin X {x \<in> topspace X. f x = y})"
lemma proper_imp_closed_map:
"proper_map X Y f \<Longrightarrow> closed_map X Y f"
by (simp add: proper_map_def)
lemma proper_map_imp_subset_topspace:
"proper_map X Y f \<Longrightarrow> f ` (topspace X) \<subseteq> topspace Y"
by (simp add: closed_map_imp_subset_topspace proper_map_def)
+lemma proper_map_restriction:
+ assumes "proper_map X Y f" "{x \<in> topspace X. f x \<in> V} = U"
+ shows "proper_map (subtopology X U) (subtopology Y V) f"
+proof -
+ have [simp]: "{x \<in> topspace X. f x \<in> V \<and> f x = y} = {x \<in> topspace X. f x = y}"
+ if "y \<in> V" for y
+ using that by auto
+ show ?thesis
+ using assms unfolding proper_map_def using closed_map_restriction
+ by (force simp: compactin_subtopology)
+qed
+
lemma closed_injective_imp_proper_map:
assumes f: "closed_map X Y f" and inj: "inj_on f (topspace X)"
shows "proper_map X Y f"
unfolding proper_map_def
proof (clarsimp simp: f)
show "compactin X {x \<in> topspace X. f x = y}"
if "y \<in> topspace Y" for y
using inj_on_eq_iff [OF inj] that
proof -
have "{x \<in> topspace X. f x = y} = {} \<or> (\<exists>a \<in> topspace X. {x \<in> topspace X. f x = y} = {a})"
using inj_on_eq_iff [OF inj] by auto
then show ?thesis
using that by (metis (no_types, lifting) compactin_empty compactin_sing)
qed
qed
lemma injective_imp_proper_eq_closed_map:
"inj_on f (topspace X) \<Longrightarrow> (proper_map X Y f \<longleftrightarrow> closed_map X Y f)"
using closed_injective_imp_proper_map proper_imp_closed_map by blast
lemma homeomorphic_imp_proper_map:
"homeomorphic_map X Y f \<Longrightarrow> proper_map X Y f"
by (simp add: closed_injective_imp_proper_map homeomorphic_eq_everything_map)
lemma compactin_proper_map_preimage:
assumes f: "proper_map X Y f" and "compactin Y K"
shows "compactin X {x. x \<in> topspace X \<and> f x \<in> K}"
proof -
have "f ` (topspace X) \<subseteq> topspace Y"
by (simp add: f proper_map_imp_subset_topspace)
have *: "\<And>y. y \<in> topspace Y \<Longrightarrow> compactin X {x \<in> topspace X. f x = y}"
using f by (auto simp: proper_map_def)
show ?thesis
unfolding compactin_def
proof clarsimp
show "\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> \<U> \<and> {x \<in> topspace X. f x \<in> K} \<subseteq> \<Union>\<F>"
if \<U>: "\<forall>U\<in>\<U>. openin X U" and sub: "{x \<in> topspace X. f x \<in> K} \<subseteq> \<Union>\<U>"
for \<U>
proof -
have "\<forall>y \<in> K. \<exists>\<V>. finite \<V> \<and> \<V> \<subseteq> \<U> \<and> {x \<in> topspace X. f x = y} \<subseteq> \<Union>\<V>"
proof
fix y
assume "y \<in> K"
then have "compactin X {x \<in> topspace X. f x = y}"
by (metis "*" \<open>compactin Y K\<close> compactin_subspace subsetD)
with \<open>y \<in> K\<close> show "\<exists>\<V>. finite \<V> \<and> \<V> \<subseteq> \<U> \<and> {x \<in> topspace X. f x = y} \<subseteq> \<Union>\<V>"
unfolding compactin_def using \<U> sub by fastforce
qed
then obtain \<V> where \<V>: "\<And>y. y \<in> K \<Longrightarrow> finite (\<V> y) \<and> \<V> y \<subseteq> \<U> \<and> {x \<in> topspace X. f x = y} \<subseteq> \<Union>(\<V> y)"
by (metis (full_types))
define F where "F \<equiv> \<lambda>y. topspace Y - f ` (topspace X - \<Union>(\<V> y))"
have "\<exists>\<F>. finite \<F> \<and> \<F> \<subseteq> F ` K \<and> K \<subseteq> \<Union>\<F>"
proof (rule compactinD [OF \<open>compactin Y K\<close>])
have "\<And>x. x \<in> K \<Longrightarrow> closedin Y (f ` (topspace X - \<Union>(\<V> x)))"
using f unfolding proper_map_def closed_map_def
by (meson \<U> \<V> openin_Union openin_closedin_eq subsetD)
then show "openin Y U" if "U \<in> F ` K" for U
using that by (auto simp: F_def)
show "K \<subseteq> \<Union>(F ` K)"
using \<V> \<open>compactin Y K\<close> unfolding F_def compactin_def by fastforce
qed
then obtain J where "finite J" "J \<subseteq> K" and J: "K \<subseteq> \<Union>(F ` J)"
by (auto simp: ex_finite_subset_image)
show ?thesis
unfolding F_def
proof (intro exI conjI)
show "finite (\<Union>(\<V> ` J))"
using \<V> \<open>J \<subseteq> K\<close> \<open>finite J\<close> by blast
show "\<Union>(\<V> ` J) \<subseteq> \<U>"
using \<V> \<open>J \<subseteq> K\<close> by blast
show "{x \<in> topspace X. f x \<in> K} \<subseteq> \<Union>(\<Union>(\<V> ` J))"
using J \<open>J \<subseteq> K\<close> unfolding F_def by auto
qed
qed
qed
qed
lemma compact_space_proper_map_preimage:
assumes f: "proper_map X Y f" and fim: "f ` (topspace X) = topspace Y" and "compact_space Y"
shows "compact_space X"
proof -
have eq: "topspace X = {x \<in> topspace X. f x \<in> topspace Y}"
using fim by blast
moreover have "compactin Y (topspace Y)"
using \<open>compact_space Y\<close> compact_space_def by auto
ultimately show ?thesis
unfolding compact_space_def
using eq f compactin_proper_map_preimage by fastforce
qed
lemma proper_map_alt:
"proper_map X Y f \<longleftrightarrow>
closed_map X Y f \<and> (\<forall>K. compactin Y K \<longrightarrow> compactin X {x. x \<in> topspace X \<and> f x \<in> K})"
proof (intro iffI conjI allI impI)
show "compactin X {x \<in> topspace X. f x \<in> K}"
if "proper_map X Y f" and "compactin Y K" for K
using that by (simp add: compactin_proper_map_preimage)
show "proper_map X Y f"
if f: "closed_map X Y f \<and> (\<forall>K. compactin Y K \<longrightarrow> compactin X {x \<in> topspace X. f x \<in> K})"
proof -
have "compactin X {x \<in> topspace X. f x = y}" if "y \<in> topspace Y" for y
proof -
have "compactin X {x \<in> topspace X. f x \<in> {y}}"
using f compactin_sing that by fastforce
then show ?thesis
by auto
qed
with f show ?thesis
by (auto simp: proper_map_def)
qed
qed (simp add: proper_imp_closed_map)
lemma proper_map_on_empty:
"topspace X = {} \<Longrightarrow> proper_map X Y f"
by (auto simp: proper_map_def closed_map_on_empty)
lemma proper_map_id [simp]:
"proper_map X X id"
proof (clarsimp simp: proper_map_alt closed_map_id)
fix K
assume K: "compactin X K"
then have "{a \<in> topspace X. a \<in> K} = K"
by (simp add: compactin_subspace subset_antisym subset_iff)
then show "compactin X {a \<in> topspace X. a \<in> K}"
using K by auto
qed
lemma proper_map_compose:
assumes "proper_map X Y f" "proper_map Y Z g"
shows "proper_map X Z (g \<circ> f)"
proof -
have "closed_map X Y f" and f: "\<And>K. compactin Y K \<Longrightarrow> compactin X {x \<in> topspace X. f x \<in> K}"
and "closed_map Y Z g" and g: "\<And>K. compactin Z K \<Longrightarrow> compactin Y {x \<in> topspace Y. g x \<in> K}"
using assms by (auto simp: proper_map_alt)
show ?thesis
unfolding proper_map_alt
proof (intro conjI allI impI)
show "closed_map X Z (g \<circ> f)"
using \<open>closed_map X Y f\<close> \<open>closed_map Y Z g\<close> closed_map_compose by blast
have "{x \<in> topspace X. g (f x) \<in> K} = {x \<in> topspace X. f x \<in> {b \<in> topspace Y. g b \<in> K}}" for K
using \<open>closed_map X Y f\<close> closed_map_imp_subset_topspace by blast
then show "compactin X {x \<in> topspace X. (g \<circ> f) x \<in> K}"
if "compactin Z K" for K
using f [OF g [OF that]] by auto
qed
qed
lemma proper_map_const:
"proper_map X Y (\<lambda>x. c) \<longleftrightarrow> compact_space X \<and> (topspace X = {} \<or> closedin Y {c})"
proof (cases "topspace X = {}")
case True
then show ?thesis
by (simp add: compact_space_topspace_empty proper_map_on_empty)
next
case False
have *: "compactin X {x \<in> topspace X. c = y}" if "compact_space X" for y
using that unfolding compact_space_def
by (metis (mono_tags, lifting) compactin_empty empty_subsetI mem_Collect_eq subsetI subset_antisym)
then show ?thesis
using closed_compactin closedin_subset
by (force simp: False proper_map_def closed_map_const compact_space_def)
qed
lemma proper_map_inclusion:
"S \<subseteq> topspace X \<Longrightarrow> proper_map (subtopology X S) X id \<longleftrightarrow> closedin X S \<and> (\<forall>k. compactin X k \<longrightarrow> compactin X (S \<inter> k))"
by (metis closed_Int_compactin closed_map_inclusion_eq inf.absorb_iff2 inj_on_id injective_imp_proper_eq_closed_map)
+lemma proper_map_into_subtopology:
+ "\<lbrakk>proper_map X Y f; f ` topspace X \<subseteq> C\<rbrakk> \<Longrightarrow> proper_map X (subtopology Y C) f"
+ by (simp add: closed_map_into_subtopology compactin_subtopology proper_map_alt)
+
+lemma proper_map_from_composition_left:
+ assumes gf: "proper_map X Z (g \<circ> f)" and contf: "continuous_map X Y f" and fim: "f ` topspace X = topspace Y"
+ shows "proper_map Y Z g"
+ unfolding proper_map_def
+proof (intro strip conjI)
+ show "closed_map Y Z g"
+ by (meson closed_map_from_composition_left gf contf fim proper_imp_closed_map)
+ fix z assume "z \<in> topspace Z"
+ have eq: "{y \<in> topspace Y. g y = z} = f ` {x \<in> topspace X. (g \<circ> f) x = z}"
+ using fim by force
+ show "compactin Y {x \<in> topspace Y. g x = z}"
+ unfolding eq
+ proof (rule image_compactin [OF _ contf])
+ show "compactin X {x \<in> topspace X. (g \<circ> f) x = z}"
+ using \<open>z \<in> topspace Z\<close> gf proper_map_def by fastforce
+ qed
+qed
+
+lemma proper_map_from_composition_right_inj:
+ assumes gf: "proper_map X Z (g \<circ> f)" and fim: "f ` topspace X \<subseteq> topspace Y"
+ and contf: "continuous_map Y Z g" and inj: "inj_on g (topspace Y)"
+ shows "proper_map X Y f"
+ unfolding proper_map_def
+proof (intro strip conjI)
+ show "closed_map X Y f"
+ by (meson closed_map_from_composition_right assms proper_imp_closed_map)
+ fix y
+ assume "y \<in> topspace Y"
+ with fim inj have eq: "{x \<in> topspace X. f x = y} = {x \<in> topspace X. (g \<circ> f) x = g y}"
+ by (auto simp: image_subset_iff inj_onD)
+ show "compactin X {x \<in> topspace X. f x = y}"
+ unfolding eq
+ by (smt (verit) Collect_cong \<open>y \<in> topspace Y\<close> contf continuous_map_closedin gf proper_map_def)
+qed
+
subsection\<open>Perfect maps (proper, continuous and surjective)\<close>
definition perfect_map
where "perfect_map X Y f \<equiv> continuous_map X Y f \<and> proper_map X Y f \<and> f ` (topspace X) = topspace Y"
lemma homeomorphic_imp_perfect_map:
"homeomorphic_map X Y f \<Longrightarrow> perfect_map X Y f"
by (simp add: homeomorphic_eq_everything_map homeomorphic_imp_proper_map perfect_map_def)
lemma perfect_imp_quotient_map:
"perfect_map X Y f \<Longrightarrow> quotient_map X Y f"
by (simp add: continuous_closed_imp_quotient_map perfect_map_def proper_map_def)
lemma homeomorphic_eq_injective_perfect_map:
"homeomorphic_map X Y f \<longleftrightarrow> perfect_map X Y f \<and> inj_on f (topspace X)"
using homeomorphic_imp_perfect_map homeomorphic_map_def perfect_imp_quotient_map by blast
lemma perfect_injective_eq_homeomorphic_map:
"perfect_map X Y f \<and> inj_on f (topspace X) \<longleftrightarrow> homeomorphic_map X Y f"
by (simp add: homeomorphic_eq_injective_perfect_map)
lemma perfect_map_id [simp]: "perfect_map X X id"
by (simp add: homeomorphic_imp_perfect_map)
lemma perfect_map_compose:
"\<lbrakk>perfect_map X Y f; perfect_map Y Z g\<rbrakk> \<Longrightarrow> perfect_map X Z (g \<circ> f)"
by (meson continuous_map_compose perfect_imp_quotient_map perfect_map_def proper_map_compose quotient_map_compose_eq quotient_map_def)
lemma perfect_imp_continuous_map:
"perfect_map X Y f \<Longrightarrow> continuous_map X Y f"
using perfect_map_def by blast
lemma perfect_imp_closed_map:
"perfect_map X Y f \<Longrightarrow> closed_map X Y f"
by (simp add: perfect_map_def proper_map_def)
lemma perfect_imp_proper_map:
"perfect_map X Y f \<Longrightarrow> proper_map X Y f"
by (simp add: perfect_map_def)
lemma perfect_imp_surjective_map:
"perfect_map X Y f \<Longrightarrow> f ` (topspace X) = topspace Y"
by (simp add: perfect_map_def)
+lemma perfect_map_from_composition_left:
+ assumes "perfect_map X Z (g \<circ> f)" and "continuous_map X Y f"
+ and "continuous_map Y Z g" and "f ` topspace X = topspace Y"
+ shows "perfect_map Y Z g"
+ using assms unfolding perfect_map_def
+ by (metis image_comp proper_map_from_composition_left)
+
end
diff --git a/src/HOL/Analysis/Analysis.thy b/src/HOL/Analysis/Analysis.thy
--- a/src/HOL/Analysis/Analysis.thy
+++ b/src/HOL/Analysis/Analysis.thy
@@ -1,57 +1,58 @@
theory Analysis
imports
(* Linear Algebra *)
Convex
Determinants
(* Topology *)
FSigma
Sum_Topology
Abstract_Topological_Spaces
+ Abstract_Metric_Spaces
Connected
Abstract_Limits
Isolated
(* Functional Analysis *)
Elementary_Normed_Spaces
Norm_Arith
(* Vector Analysis *)
Convex_Euclidean_Space
Operator_Norm
(* Unsorted *)
Line_Segment
Derivative
Cartesian_Euclidean_Space
Weierstrass_Theorems
(* Measure and Integration Theory *)
Ball_Volume
Integral_Test
Improper_Integral
Equivalence_Measurable_On_Borel
Lebesgue_Integral_Substitution
Embed_Measure
Complete_Measure
Radon_Nikodym
Fashoda_Theorem
Cross3
Homeomorphism
Bounded_Continuous_Function
Abstract_Topology
Product_Topology
Lindelof_Spaces
Infinite_Products
Infinite_Sum
Infinite_Set_Sum
Polytope
Jordan_Curve
Poly_Roots
Generalised_Binomial_Theorem
Gamma_Function
Change_Of_Vars
Multivariate_Analysis
Simplex_Content
FPS_Convergence
Smooth_Paths
Abstract_Euclidean_Space
Function_Metric
begin
end
diff --git a/src/HOL/Analysis/Measure_Space.thy b/src/HOL/Analysis/Measure_Space.thy
--- a/src/HOL/Analysis/Measure_Space.thy
+++ b/src/HOL/Analysis/Measure_Space.thy
@@ -1,3767 +1,3763 @@
(* Title: HOL/Analysis/Measure_Space.thy
Author: Lawrence C Paulson
Author: Johannes Hölzl, TU München
Author: Armin Heller, TU München
*)
section \<open>Measure Spaces\<close>
theory Measure_Space
imports
Measurable "HOL-Library.Extended_Nonnegative_Real"
begin
subsection\<^marker>\<open>tag unimportant\<close> "Relate extended reals and the indicator function"
lemma suminf_cmult_indicator:
fixes f :: "nat \<Rightarrow> ennreal"
assumes "disjoint_family A" "x \<in> A i"
shows "(\<Sum>n. f n * indicator (A n) x) = f i"
proof -
have **: "\<And>n. f n * indicator (A n) x = (if n = i then f n else 0 :: ennreal)"
using \<open>x \<in> A i\<close> assms unfolding disjoint_family_on_def indicator_def by auto
then have "\<And>n. (\<Sum>j<n. f j * indicator (A j) x) = (if i < n then f i else 0 :: ennreal)"
by (auto simp: sum.If_cases)
moreover have "(SUP n. if i < n then f i else 0) = (f i :: ennreal)"
proof (rule SUP_eqI)
fix y :: ennreal assume "\<And>n. n \<in> UNIV \<Longrightarrow> (if i < n then f i else 0) \<le> y"
from this[of "Suc i"] show "f i \<le> y" by auto
qed (use assms in simp)
ultimately show ?thesis using assms
by (simp add: suminf_eq_SUP)
qed
lemma suminf_indicator:
assumes "disjoint_family A"
shows "(\<Sum>n. indicator (A n) x :: ennreal) = indicator (\<Union>i. A i) x"
proof cases
assume *: "x \<in> (\<Union>i. A i)"
then obtain i where "x \<in> A i" by auto
from suminf_cmult_indicator[OF assms(1), OF \<open>x \<in> A i\<close>, of "\<lambda>k. 1"]
show ?thesis using * by simp
qed simp
lemma sum_indicator_disjoint_family:
fixes f :: "'d \<Rightarrow> 'e::semiring_1"
assumes d: "disjoint_family_on A P" and "x \<in> A j" and "finite P" and "j \<in> P"
shows "(\<Sum>i\<in>P. f i * indicator (A i) x) = f j"
proof -
have "P \<inter> {i. x \<in> A i} = {j}"
using d \<open>x \<in> A j\<close> \<open>j \<in> P\<close> unfolding disjoint_family_on_def
by auto
with \<open>finite P\<close> show ?thesis
by (simp add: indicator_def)
qed
text \<open>
The type for emeasure spaces is already defined in \<^theory>\<open>HOL-Analysis.Sigma_Algebra\<close>, as it
is also used to represent sigma algebras (with an arbitrary emeasure).
\<close>
subsection\<^marker>\<open>tag unimportant\<close> "Extend binary sets"
lemma LIMSEQ_binaryset:
assumes f: "f {} = 0"
shows "(\<lambda>n. \<Sum>i<n. f (binaryset A B i)) \<longlonglongrightarrow> f A + f B"
proof -
have "(\<lambda>n. \<Sum>i < Suc (Suc n). f (binaryset A B i)) = (\<lambda>n. f A + f B)"
proof
fix n
show "(\<Sum>i < Suc (Suc n). f (binaryset A B i)) = f A + f B"
by (induct n) (auto simp add: binaryset_def f)
qed
moreover
have "\<dots> \<longlonglongrightarrow> f A + f B" by (rule tendsto_const)
ultimately have "(\<lambda>n. \<Sum>i< n+2. f (binaryset A B i)) \<longlonglongrightarrow> f A + f B"
by simp
thus ?thesis by (rule LIMSEQ_offset [where k=2])
qed
lemma binaryset_sums:
assumes f: "f {} = 0"
shows "(\<lambda>n. f (binaryset A B n)) sums (f A + f B)"
using LIMSEQ_binaryset f sums_def by blast
lemma suminf_binaryset_eq:
fixes f :: "'a set \<Rightarrow> 'b::{comm_monoid_add, t2_space}"
shows "f {} = 0 \<Longrightarrow> (\<Sum>n. f (binaryset A B n)) = f A + f B"
by (metis binaryset_sums sums_unique)
subsection\<^marker>\<open>tag unimportant\<close> \<open>Properties of a premeasure \<^term>\<open>\<mu>\<close>\<close>
text \<open>
The definitions for \<^const>\<open>positive\<close> and \<^const>\<open>countably_additive\<close> should be here, by they are
necessary to define \<^typ>\<open>'a measure\<close> in \<^theory>\<open>HOL-Analysis.Sigma_Algebra\<close>.
\<close>
definition subadditive where
"subadditive M f \<longleftrightarrow> (\<forall>x\<in>M. \<forall>y\<in>M. x \<inter> y = {} \<longrightarrow> f (x \<union> y) \<le> f x + f y)"
lemma subadditiveD: "subadditive M f \<Longrightarrow> x \<inter> y = {} \<Longrightarrow> x \<in> M \<Longrightarrow> y \<in> M \<Longrightarrow> f (x \<union> y) \<le> f x + f y"
by (auto simp add: subadditive_def)
definition countably_subadditive where
"countably_subadditive M f \<longleftrightarrow>
(\<forall>A. range A \<subseteq> M \<longrightarrow> disjoint_family A \<longrightarrow> (\<Union>i. A i) \<in> M \<longrightarrow> (f (\<Union>i. A i) \<le> (\<Sum>i. f (A i))))"
lemma (in ring_of_sets) countably_subadditive_subadditive:
fixes f :: "'a set \<Rightarrow> ennreal"
assumes f: "positive M f" and cs: "countably_subadditive M f"
shows "subadditive M f"
proof (auto simp add: subadditive_def)
fix x y
assume x: "x \<in> M" and y: "y \<in> M" and "x \<inter> y = {}"
hence "disjoint_family (binaryset x y)"
by (auto simp add: disjoint_family_on_def binaryset_def)
hence "range (binaryset x y) \<subseteq> M \<longrightarrow>
(\<Union>i. binaryset x y i) \<in> M \<longrightarrow>
f (\<Union>i. binaryset x y i) \<le> (\<Sum> n. f (binaryset x y n))"
using cs by (auto simp add: countably_subadditive_def)
hence "{x,y,{}} \<subseteq> M \<longrightarrow> x \<union> y \<in> M \<longrightarrow>
f (x \<union> y) \<le> (\<Sum> n. f (binaryset x y n))"
by (simp add: range_binaryset_eq UN_binaryset_eq)
thus "f (x \<union> y) \<le> f x + f y" using f x y
by (auto simp add: Un o_def suminf_binaryset_eq positive_def)
qed
definition additive where
"additive M \<mu> \<longleftrightarrow> (\<forall>x\<in>M. \<forall>y\<in>M. x \<inter> y = {} \<longrightarrow> \<mu> (x \<union> y) = \<mu> x + \<mu> y)"
definition increasing where
"increasing M \<mu> \<longleftrightarrow> (\<forall>x\<in>M. \<forall>y\<in>M. x \<subseteq> y \<longrightarrow> \<mu> x \<le> \<mu> y)"
lemma positiveD1: "positive M f \<Longrightarrow> f {} = 0" by (auto simp: positive_def)
lemma positiveD_empty:
"positive M f \<Longrightarrow> f {} = 0"
by (auto simp add: positive_def)
lemma additiveD:
"additive M f \<Longrightarrow> x \<inter> y = {} \<Longrightarrow> x \<in> M \<Longrightarrow> y \<in> M \<Longrightarrow> f (x \<union> y) = f x + f y"
by (auto simp add: additive_def)
lemma increasingD:
"increasing M f \<Longrightarrow> x \<subseteq> y \<Longrightarrow> x\<in>M \<Longrightarrow> y\<in>M \<Longrightarrow> f x \<le> f y"
by (auto simp add: increasing_def)
lemma countably_additiveI[case_names countably]:
"(\<And>A. \<lbrakk>range A \<subseteq> M; disjoint_family A; (\<Union>i. A i) \<in> M\<rbrakk> \<Longrightarrow> (\<Sum>i. f(A i)) = f(\<Union>i. A i))
\<Longrightarrow> countably_additive M f"
by (simp add: countably_additive_def)
lemma (in ring_of_sets) disjointed_additive:
assumes f: "positive M f" "additive M f" and A: "range A \<subseteq> M" "incseq A"
shows "(\<Sum>i\<le>n. f (disjointed A i)) = f (A n)"
proof (induct n)
case (Suc n)
then have "(\<Sum>i\<le>Suc n. f (disjointed A i)) = f (A n) + f (disjointed A (Suc n))"
by simp
also have "\<dots> = f (A n \<union> disjointed A (Suc n))"
using A by (subst f(2)[THEN additiveD]) (auto simp: disjointed_mono)
also have "A n \<union> disjointed A (Suc n) = A (Suc n)"
using \<open>incseq A\<close> by (auto dest: incseq_SucD simp: disjointed_mono)
finally show ?case .
qed simp
lemma (in ring_of_sets) additive_sum:
fixes A:: "'i \<Rightarrow> 'a set"
assumes f: "positive M f" and ad: "additive M f" and "finite S"
and A: "A`S \<subseteq> M"
and disj: "disjoint_family_on A S"
shows "(\<Sum>i\<in>S. f (A i)) = f (\<Union>i\<in>S. A i)"
using \<open>finite S\<close> disj A
proof induct
case empty show ?case using f by (simp add: positive_def)
next
case (insert s S)
then have "A s \<inter> (\<Union>i\<in>S. A i) = {}"
by (auto simp add: disjoint_family_on_def neq_iff)
moreover
have "A s \<in> M" using insert by blast
moreover have "(\<Union>i\<in>S. A i) \<in> M"
using insert \<open>finite S\<close> by auto
ultimately have "f (A s \<union> (\<Union>i\<in>S. A i)) = f (A s) + f(\<Union>i\<in>S. A i)"
using ad UNION_in_sets A by (auto simp add: additive_def)
with insert show ?case using ad disjoint_family_on_mono[of S "insert s S" A]
by (auto simp add: additive_def subset_insertI)
qed
lemma (in ring_of_sets) additive_increasing:
fixes f :: "'a set \<Rightarrow> ennreal"
assumes posf: "positive M f" and addf: "additive M f"
shows "increasing M f"
proof (auto simp add: increasing_def)
fix x y
assume xy: "x \<in> M" "y \<in> M" "x \<subseteq> y"
then have "y - x \<in> M" by auto
then have "f x + 0 \<le> f x + f (y-x)" by (intro add_left_mono zero_le)
also have "\<dots> = f (x \<union> (y-x))"
by (metis addf Diff_disjoint \<open>y - x \<in> M\<close> additiveD xy(1))
also have "\<dots> = f y"
by (metis Un_Diff_cancel Un_absorb1 xy(3))
finally show "f x \<le> f y" by simp
qed
lemma (in ring_of_sets) subadditive:
fixes f :: "'a set \<Rightarrow> ennreal"
assumes f: "positive M f" "additive M f" and A: "A`S \<subseteq> M" and S: "finite S"
shows "f (\<Union>i\<in>S. A i) \<le> (\<Sum>i\<in>S. f (A i))"
using S A
proof (induct S)
case empty thus ?case using f by (auto simp: positive_def)
next
case (insert x F)
hence in_M: "A x \<in> M" "(\<Union>i\<in>F. A i) \<in> M" "(\<Union>i\<in>F. A i) - A x \<in> M" using A by force+
have subs: "(\<Union>i\<in>F. A i) - A x \<subseteq> (\<Union>i\<in>F. A i)" by auto
have "(\<Union>i\<in>(insert x F). A i) = A x \<union> ((\<Union>i\<in>F. A i) - A x)" by auto
hence "f (\<Union>i\<in>(insert x F). A i) = f (A x \<union> ((\<Union>i\<in>F. A i) - A x))"
by simp
also have "\<dots> = f (A x) + f ((\<Union>i\<in>F. A i) - A x)"
using f(2) by (rule additiveD) (insert in_M, auto)
also have "\<dots> \<le> f (A x) + f (\<Union>i\<in>F. A i)"
using additive_increasing[OF f] in_M subs
by (simp add: increasingD)
also have "\<dots> \<le> f (A x) + (\<Sum>i\<in>F. f (A i))"
using insert by (auto intro: add_left_mono)
finally show "f (\<Union>i\<in>(insert x F). A i) \<le> (\<Sum>i\<in>(insert x F). f (A i))"
by (simp add: insert)
qed
lemma (in ring_of_sets) countably_additive_additive:
fixes f :: "'a set \<Rightarrow> ennreal"
assumes posf: "positive M f" and ca: "countably_additive M f"
shows "additive M f"
proof (auto simp add: additive_def)
fix x y
assume x: "x \<in> M" and y: "y \<in> M" and "x \<inter> y = {}"
hence "disjoint_family (binaryset x y)"
by (auto simp add: disjoint_family_on_def binaryset_def)
hence "range (binaryset x y) \<subseteq> M \<longrightarrow>
(\<Union>i. binaryset x y i) \<in> M \<longrightarrow>
f (\<Union>i. binaryset x y i) = (\<Sum> n. f (binaryset x y n))"
using ca by (simp add: countably_additive_def)
hence "{x,y,{}} \<subseteq> M \<longrightarrow> x \<union> y \<in> M \<longrightarrow> f (x \<union> y) = (\<Sum>n. f (binaryset x y n))"
by (simp add: range_binaryset_eq UN_binaryset_eq)
thus "f (x \<union> y) = f x + f y" using posf x y
by (auto simp add: Un suminf_binaryset_eq positive_def)
qed
lemma (in algebra) increasing_additive_bound:
fixes A:: "nat \<Rightarrow> 'a set" and f :: "'a set \<Rightarrow> ennreal"
assumes f: "positive M f" and ad: "additive M f"
and inc: "increasing M f"
and A: "range A \<subseteq> M"
and disj: "disjoint_family A"
shows "(\<Sum>i. f (A i)) \<le> f \<Omega>"
proof (safe intro!: suminf_le_const)
fix N
note disj_N = disjoint_family_on_mono[OF _ disj, of "{..<N}"]
have "(\<Sum>i<N. f (A i)) = f (\<Union>i\<in>{..<N}. A i)"
using A by (intro additive_sum [OF f ad]) (auto simp: disj_N)
also have "\<dots> \<le> f \<Omega>" using space_closed A
by (intro increasingD[OF inc] finite_UN) auto
finally show "(\<Sum>i<N. f (A i)) \<le> f \<Omega>" by simp
qed (insert f A, auto simp: positive_def)
lemma (in ring_of_sets) countably_additiveI_finite:
fixes \<mu> :: "'a set \<Rightarrow> ennreal"
assumes "finite \<Omega>" "positive M \<mu>" "additive M \<mu>"
shows "countably_additive M \<mu>"
proof (rule countably_additiveI)
fix F :: "nat \<Rightarrow> 'a set" assume F: "range F \<subseteq> M" "(\<Union>i. F i) \<in> M" and disj: "disjoint_family F"
have "\<forall>i. F i \<noteq> {} \<longrightarrow> (\<exists>x. x \<in> F i)" by auto
then obtain f where f: "\<And>i. F i \<noteq> {} \<Longrightarrow> f i \<in> F i" by metis
have finU: "finite (\<Union>i. F i)"
by (metis F(2) assms(1) infinite_super sets_into_space)
have F_subset: "{i. \<mu> (F i) \<noteq> 0} \<subseteq> {i. F i \<noteq> {}}"
by (auto simp: positiveD_empty[OF \<open>positive M \<mu>\<close>])
moreover have fin_not_empty: "finite {i. F i \<noteq> {}}"
proof (rule finite_imageD)
from f have "f`{i. F i \<noteq> {}} \<subseteq> (\<Union>i. F i)" by auto
then show "finite (f`{i. F i \<noteq> {}})"
by (simp add: finU finite_subset)
show inj_f: "inj_on f {i. F i \<noteq> {}}"
using f disj
by (simp add: inj_on_def disjoint_family_on_def disjoint_iff) metis
qed
ultimately have fin_not_0: "finite {i. \<mu> (F i) \<noteq> 0}"
by (rule finite_subset)
have disj_not_empty: "disjoint_family_on F {i. F i \<noteq> {}}"
using disj by (auto simp: disjoint_family_on_def)
from fin_not_0 have "(\<Sum>i. \<mu> (F i)) = (\<Sum>i | \<mu> (F i) \<noteq> 0. \<mu> (F i))"
by (rule suminf_finite) auto
also have "\<dots> = (\<Sum>i | F i \<noteq> {}. \<mu> (F i))"
using fin_not_empty F_subset by (rule sum.mono_neutral_left) auto
also have "\<dots> = \<mu> (\<Union>i\<in>{i. F i \<noteq> {}}. F i)"
using \<open>positive M \<mu>\<close> \<open>additive M \<mu>\<close> fin_not_empty disj_not_empty F by (intro additive_sum) auto
also have "\<dots> = \<mu> (\<Union>i. F i)"
by (rule arg_cong[where f=\<mu>]) auto
finally show "(\<Sum>i. \<mu> (F i)) = \<mu> (\<Union>i. F i)" .
qed
lemma (in ring_of_sets) countably_additive_iff_continuous_from_below:
fixes f :: "'a set \<Rightarrow> ennreal"
assumes f: "positive M f" "additive M f"
shows "countably_additive M f \<longleftrightarrow>
(\<forall>A. range A \<subseteq> M \<longrightarrow> incseq A \<longrightarrow> (\<Union>i. A i) \<in> M \<longrightarrow> (\<lambda>i. f (A i)) \<longlonglongrightarrow> f (\<Union>i. A i))"
unfolding countably_additive_def
proof safe
assume count_sum: "\<forall>A. range A \<subseteq> M \<longrightarrow> disjoint_family A \<longrightarrow> \<Union>(A ` UNIV) \<in> M \<longrightarrow> (\<Sum>i. f (A i)) = f (\<Union>(A ` UNIV))"
fix A :: "nat \<Rightarrow> 'a set" assume A: "range A \<subseteq> M" "incseq A" "(\<Union>i. A i) \<in> M"
then have dA: "range (disjointed A) \<subseteq> M" by (auto simp: range_disjointed_sets)
with count_sum[THEN spec, of "disjointed A"] A(3)
have f_UN: "(\<Sum>i. f (disjointed A i)) = f (\<Union>i. A i)"
by (auto simp: UN_disjointed_eq disjoint_family_disjointed)
moreover have "(\<lambda>n. (\<Sum>i<n. f (disjointed A i))) \<longlonglongrightarrow> (\<Sum>i. f (disjointed A i))"
by (simp add: summable_LIMSEQ)
from LIMSEQ_Suc[OF this]
have "(\<lambda>n. (\<Sum>i\<le>n. f (disjointed A i))) \<longlonglongrightarrow> (\<Sum>i. f (disjointed A i))"
unfolding lessThan_Suc_atMost .
moreover have "\<And>n. (\<Sum>i\<le>n. f (disjointed A i)) = f (A n)"
using disjointed_additive[OF f A(1,2)] .
ultimately show "(\<lambda>i. f (A i)) \<longlonglongrightarrow> f (\<Union>i. A i)" by simp
next
assume cont[rule_format]: "\<forall>A. range A \<subseteq> M \<longrightarrow> incseq A \<longrightarrow> (\<Union>i. A i) \<in> M \<longrightarrow> (\<lambda>i. f (A i)) \<longlonglongrightarrow> f (\<Union>i. A i)"
fix A :: "nat \<Rightarrow> 'a set" assume A: "range A \<subseteq> M" "disjoint_family A" "(\<Union>i. A i) \<in> M"
have *: "(\<Union>n. (\<Union>i<n. A i)) = (\<Union>i. A i)" by auto
have "range (\<lambda>i. \<Union>i<i. A i) \<subseteq> M" "(\<Union>i. \<Union>i<i. A i) \<in> M"
using A * by auto
then have "(\<lambda>n. f (\<Union>i<n. A i)) \<longlonglongrightarrow> f (\<Union>i. A i)"
unfolding *[symmetric] by (force intro!: cont incseq_SucI)+
moreover have "\<And>n. f (\<Union>i<n. A i) = (\<Sum>i<n. f (A i))"
using A
by (intro additive_sum[OF f, symmetric]) (auto intro: disjoint_family_on_mono)
ultimately
have "(\<lambda>i. f (A i)) sums f (\<Union>i. A i)"
unfolding sums_def by simp
then show "(\<Sum>i. f (A i)) = f (\<Union>i. A i)"
by (metis sums_unique)
qed
lemma (in ring_of_sets) continuous_from_above_iff_empty_continuous:
fixes f :: "'a set \<Rightarrow> ennreal"
assumes f: "positive M f" "additive M f"
shows "(\<forall>A. range A \<subseteq> M \<longrightarrow> decseq A \<longrightarrow> (\<Inter>i. A i) \<in> M \<longrightarrow> (\<forall>i. f (A i) \<noteq> \<infinity>) \<longrightarrow> (\<lambda>i. f (A i)) \<longlonglongrightarrow> f (\<Inter>i. A i))
\<longleftrightarrow> (\<forall>A. range A \<subseteq> M \<longrightarrow> decseq A \<longrightarrow> (\<Inter>i. A i) = {} \<longrightarrow> (\<forall>i. f (A i) \<noteq> \<infinity>) \<longrightarrow> (\<lambda>i. f (A i)) \<longlonglongrightarrow> 0)"
proof safe
assume cont[rule_format]: "(\<forall>A. range A \<subseteq> M \<longrightarrow> decseq A \<longrightarrow> (\<Inter>i. A i) \<in> M \<longrightarrow> (\<forall>i. f (A i) \<noteq> \<infinity>) \<longrightarrow> (\<lambda>i. f (A i)) \<longlonglongrightarrow> f (\<Inter>i. A i))"
fix A :: "nat \<Rightarrow> 'a set"
assume A: "range A \<subseteq> M" "decseq A" "(\<Inter>i. A i) = {}" "\<forall>i. f (A i) \<noteq> \<infinity>"
with cont[of A] show "(\<lambda>i. f (A i)) \<longlonglongrightarrow> 0"
using \<open>positive M f\<close>[unfolded positive_def] by auto
next
assume cont[rule_format]: "\<forall>A. range A \<subseteq> M \<longrightarrow> decseq A \<longrightarrow> (\<Inter>i. A i) = {} \<longrightarrow> (\<forall>i. f (A i) \<noteq> \<infinity>) \<longrightarrow> (\<lambda>i. f (A i)) \<longlonglongrightarrow> 0"
fix A :: "nat \<Rightarrow> 'a set"
assume A: "range A \<subseteq> M" "decseq A" "(\<Inter>i. A i) \<in> M" "\<forall>i. f (A i) \<noteq> \<infinity>"
have f_mono: "\<And>a b. a \<in> M \<Longrightarrow> b \<in> M \<Longrightarrow> a \<subseteq> b \<Longrightarrow> f a \<le> f b"
using additive_increasing[OF f] unfolding increasing_def by simp
have decseq_fA: "decseq (\<lambda>i. f (A i))"
using A by (auto simp: decseq_def intro!: f_mono)
have decseq: "decseq (\<lambda>i. A i - (\<Inter>i. A i))"
using A by (auto simp: decseq_def)
then have decseq_f: "decseq (\<lambda>i. f (A i - (\<Inter>i. A i)))"
using A unfolding decseq_def by (auto intro!: f_mono Diff)
have "f (\<Inter>x. A x) \<le> f (A 0)"
using A by (auto intro!: f_mono)
then have f_Int_fin: "f (\<Inter>x. A x) \<noteq> \<infinity>"
using A by (auto simp: top_unique)
have f_fin: "f (A i - (\<Inter>i. A i)) \<noteq> \<infinity>" for i
using A by (metis Diff Diff_subset f_mono infinity_ennreal_def range_subsetD top_unique)
have "(\<lambda>i. f (A i - (\<Inter>i. A i))) \<longlonglongrightarrow> 0"
proof (intro cont[ OF _ decseq _ f_fin])
show "range (\<lambda>i. A i - (\<Inter>i. A i)) \<subseteq> M" "(\<Inter>i. A i - (\<Inter>i. A i)) = {}"
using A by auto
qed
with INF_Lim decseq_f have "(INF n. f (A n - (\<Inter>i. A i))) = 0" by metis
moreover have "(INF n. f (\<Inter>i. A i)) = f (\<Inter>i. A i)"
by auto
ultimately have "(INF n. f (A n - (\<Inter>i. A i)) + f (\<Inter>i. A i)) = 0 + f (\<Inter>i. A i)"
using A(4) f_fin f_Int_fin
using INF_ennreal_add_const by presburger
moreover {
fix n
have "f (A n - (\<Inter>i. A i)) + f (\<Inter>i. A i) = f ((A n - (\<Inter>i. A i)) \<union> (\<Inter>i. A i))"
using A by (subst f(2)[THEN additiveD]) auto
also have "(A n - (\<Inter>i. A i)) \<union> (\<Inter>i. A i) = A n"
by auto
finally have "f (A n - (\<Inter>i. A i)) + f (\<Inter>i. A i) = f (A n)" . }
ultimately have "(INF n. f (A n)) = f (\<Inter>i. A i)"
by simp
with LIMSEQ_INF[OF decseq_fA]
show "(\<lambda>i. f (A i)) \<longlonglongrightarrow> f (\<Inter>i. A i)" by simp
qed
lemma (in ring_of_sets) empty_continuous_imp_continuous_from_below:
fixes f :: "'a set \<Rightarrow> ennreal"
assumes f: "positive M f" "additive M f" "\<forall>A\<in>M. f A \<noteq> \<infinity>"
assumes cont: "\<forall>A. range A \<subseteq> M \<longrightarrow> decseq A \<longrightarrow> (\<Inter>i. A i) = {} \<longrightarrow> (\<lambda>i. f (A i)) \<longlonglongrightarrow> 0"
assumes A: "range A \<subseteq> M" "incseq A" "(\<Union>i. A i) \<in> M"
shows "(\<lambda>i. f (A i)) \<longlonglongrightarrow> f (\<Union>i. A i)"
proof -
from A have "(\<lambda>i. f ((\<Union>i. A i) - A i)) \<longlonglongrightarrow> 0"
by (intro cont[rule_format]) (auto simp: decseq_def incseq_def)
moreover
{ fix i
have "f ((\<Union>i. A i) - A i \<union> A i) = f ((\<Union>i. A i) - A i) + f (A i)"
using A by (intro f(2)[THEN additiveD]) auto
also have "((\<Union>i. A i) - A i) \<union> A i = (\<Union>i. A i)"
by auto
finally have "f ((\<Union>i. A i) - A i) = f (\<Union>i. A i) - f (A i)"
using assms f by fastforce
}
moreover have "\<forall>\<^sub>F i in sequentially. f (A i) \<le> f (\<Union>i. A i)"
using increasingD[OF additive_increasing[OF f(1, 2)], of "A _" "\<Union>i. A i"] A
by (auto intro!: always_eventually simp: subset_eq)
ultimately show "(\<lambda>i. f (A i)) \<longlonglongrightarrow> f (\<Union>i. A i)"
by (auto intro: ennreal_tendsto_const_minus)
qed
lemma (in ring_of_sets) empty_continuous_imp_countably_additive:
fixes f :: "'a set \<Rightarrow> ennreal"
assumes f: "positive M f" "additive M f" and fin: "\<forall>A\<in>M. f A \<noteq> \<infinity>"
assumes cont: "\<And>A. range A \<subseteq> M \<Longrightarrow> decseq A \<Longrightarrow> (\<Inter>i. A i) = {} \<Longrightarrow> (\<lambda>i. f (A i)) \<longlonglongrightarrow> 0"
shows "countably_additive M f"
using countably_additive_iff_continuous_from_below[OF f]
using empty_continuous_imp_continuous_from_below[OF f fin] cont
by blast
subsection\<^marker>\<open>tag unimportant\<close> \<open>Properties of \<^const>\<open>emeasure\<close>\<close>
lemma emeasure_positive: "positive (sets M) (emeasure M)"
by (cases M) (auto simp: sets_def emeasure_def Abs_measure_inverse measure_space_def)
lemma emeasure_empty[simp, intro]: "emeasure M {} = 0"
using emeasure_positive[of M] by (simp add: positive_def)
lemma emeasure_single_in_space: "emeasure M {x} \<noteq> 0 \<Longrightarrow> x \<in> space M"
using emeasure_notin_sets[of "{x}" M] by (auto dest: sets.sets_into_space zero_less_iff_neq_zero[THEN iffD2])
lemma emeasure_countably_additive: "countably_additive (sets M) (emeasure M)"
by (cases M) (auto simp: sets_def emeasure_def Abs_measure_inverse measure_space_def)
lemma suminf_emeasure:
"range A \<subseteq> sets M \<Longrightarrow> disjoint_family A \<Longrightarrow> (\<Sum>i. emeasure M (A i)) = emeasure M (\<Union>i. A i)"
using sets.countable_UN[of A UNIV M] emeasure_countably_additive[of M]
by (simp add: countably_additive_def)
lemma sums_emeasure:
"disjoint_family F \<Longrightarrow> (\<And>i. F i \<in> sets M) \<Longrightarrow> (\<lambda>i. emeasure M (F i)) sums emeasure M (\<Union>i. F i)"
unfolding sums_iff by (intro conjI suminf_emeasure) auto
lemma emeasure_additive: "additive (sets M) (emeasure M)"
by (metis sets.countably_additive_additive emeasure_positive emeasure_countably_additive)
lemma plus_emeasure:
"a \<in> sets M \<Longrightarrow> b \<in> sets M \<Longrightarrow> a \<inter> b = {} \<Longrightarrow> emeasure M a + emeasure M b = emeasure M (a \<union> b)"
using additiveD[OF emeasure_additive] ..
lemma emeasure_Un:
"A \<in> sets M \<Longrightarrow> B \<in> sets M \<Longrightarrow> emeasure M (A \<union> B) = emeasure M A + emeasure M (B - A)"
using plus_emeasure[of A M "B - A"] by auto
lemma emeasure_Un_Int:
assumes "A \<in> sets M" "B \<in> sets M"
shows "emeasure M A + emeasure M B = emeasure M (A \<union> B) + emeasure M (A \<inter> B)"
proof -
have "A = (A-B) \<union> (A \<inter> B)" by auto
then have "emeasure M A = emeasure M (A-B) + emeasure M (A \<inter> B)"
by (metis Diff_Diff_Int Diff_disjoint assms plus_emeasure sets.Diff)
moreover have "A \<union> B = (A-B) \<union> B" by auto
then have "emeasure M (A \<union> B) = emeasure M (A-B) + emeasure M B"
by (metis Diff_disjoint Int_commute assms plus_emeasure sets.Diff)
ultimately show ?thesis by (metis add.assoc add.commute)
qed
lemma sum_emeasure:
"F`I \<subseteq> sets M \<Longrightarrow> disjoint_family_on F I \<Longrightarrow> finite I \<Longrightarrow>
(\<Sum>i\<in>I. emeasure M (F i)) = emeasure M (\<Union>i\<in>I. F i)"
by (metis sets.additive_sum emeasure_positive emeasure_additive)
lemma emeasure_mono:
"a \<subseteq> b \<Longrightarrow> b \<in> sets M \<Longrightarrow> emeasure M a \<le> emeasure M b"
by (metis zero_le sets.additive_increasing emeasure_additive emeasure_notin_sets emeasure_positive increasingD)
lemma emeasure_space:
"emeasure M A \<le> emeasure M (space M)"
by (metis emeasure_mono emeasure_notin_sets sets.sets_into_space sets.top zero_le)
lemma emeasure_Diff:
assumes "emeasure M B \<noteq> \<infinity>"
and "A \<in> sets M" "B \<in> sets M" and "B \<subseteq> A"
shows "emeasure M (A - B) = emeasure M A - emeasure M B"
by (smt (verit, best) add_diff_self_ennreal assms emeasure_Un emeasure_mono
ennreal_add_left_cancel le_iff_sup)
lemma emeasure_compl:
"s \<in> sets M \<Longrightarrow> emeasure M s \<noteq> \<infinity> \<Longrightarrow> emeasure M (space M - s) = emeasure M (space M) - emeasure M s"
by (rule emeasure_Diff) (auto dest: sets.sets_into_space)
lemma Lim_emeasure_incseq:
"range A \<subseteq> sets M \<Longrightarrow> incseq A \<Longrightarrow> (\<lambda>i. (emeasure M (A i))) \<longlonglongrightarrow> emeasure M (\<Union>i. A i)"
using emeasure_countably_additive
by (auto simp add: sets.countably_additive_iff_continuous_from_below emeasure_positive
emeasure_additive)
lemma incseq_emeasure:
assumes "range B \<subseteq> sets M" "incseq B"
shows "incseq (\<lambda>i. emeasure M (B i))"
using assms by (auto simp: incseq_def intro!: emeasure_mono)
lemma SUP_emeasure_incseq:
assumes A: "range A \<subseteq> sets M" "incseq A"
shows "(SUP n. emeasure M (A n)) = emeasure M (\<Union>i. A i)"
using LIMSEQ_SUP[OF incseq_emeasure, OF A] Lim_emeasure_incseq[OF A]
by (simp add: LIMSEQ_unique)
lemma decseq_emeasure:
assumes "range B \<subseteq> sets M" "decseq B"
shows "decseq (\<lambda>i. emeasure M (B i))"
using assms by (auto simp: decseq_def intro!: emeasure_mono)
lemma INF_emeasure_decseq:
assumes A: "range A \<subseteq> sets M" and "decseq A"
and finite: "\<And>i. emeasure M (A i) \<noteq> \<infinity>"
shows "(INF n. emeasure M (A n)) = emeasure M (\<Inter>i. A i)"
proof -
have le_MI: "emeasure M (\<Inter>i. A i) \<le> emeasure M (A 0)"
using A by (auto intro!: emeasure_mono)
hence *: "emeasure M (\<Inter>i. A i) \<noteq> \<infinity>" using finite[of 0] by (auto simp: top_unique)
have "emeasure M (A 0) - (INF n. emeasure M (A n)) = (SUP n. emeasure M (A 0) - emeasure M (A n))"
by (simp add: ennreal_INF_const_minus)
also have "\<dots> = (SUP n. emeasure M (A 0 - A n))"
using A finite \<open>decseq A\<close>[unfolded decseq_def] by (subst emeasure_Diff) auto
also have "\<dots> = emeasure M (\<Union>i. A 0 - A i)"
proof (rule SUP_emeasure_incseq)
show "range (\<lambda>n. A 0 - A n) \<subseteq> sets M"
using A by auto
show "incseq (\<lambda>n. A 0 - A n)"
using \<open>decseq A\<close> by (auto simp add: incseq_def decseq_def)
qed
also have "\<dots> = emeasure M (A 0) - emeasure M (\<Inter>i. A i)"
using A finite * by (simp, subst emeasure_Diff) auto
finally show ?thesis
by (smt (verit, best) Inf_lower diff_diff_ennreal le_MI finite range_eqI)
qed
lemma INF_emeasure_decseq':
assumes A: "\<And>i. A i \<in> sets M" and "decseq A"
and finite: "\<exists>i. emeasure M (A i) \<noteq> \<infinity>"
shows "(INF n. emeasure M (A n)) = emeasure M (\<Inter>i. A i)"
proof -
from finite obtain i where i: "emeasure M (A i) < \<infinity>"
by (auto simp: less_top)
have fin: "i \<le> j \<Longrightarrow> emeasure M (A j) < \<infinity>" for j
by (rule le_less_trans[OF emeasure_mono i]) (auto intro!: decseqD[OF \<open>decseq A\<close>] A)
have "(INF n. emeasure M (A n)) = (INF n. emeasure M (A (n + i)))"
proof (rule INF_eq)
show "\<exists>j\<in>UNIV. emeasure M (A (j + i)) \<le> emeasure M (A i')" for i'
by (meson A \<open>decseq A\<close> decseq_def emeasure_mono iso_tuple_UNIV_I nat_le_iff_add)
qed auto
also have "\<dots> = emeasure M (INF n. (A (n + i)))"
using A \<open>decseq A\<close> fin by (intro INF_emeasure_decseq) (auto simp: decseq_def less_top)
also have "(INF n. (A (n + i))) = (INF n. A n)"
by (meson INF_eq UNIV_I assms(2) decseqD le_add1)
finally show ?thesis .
qed
lemma emeasure_INT_decseq_subset:
fixes F :: "nat \<Rightarrow> 'a set"
assumes I: "I \<noteq> {}" and F: "\<And>i j. i \<in> I \<Longrightarrow> j \<in> I \<Longrightarrow> i \<le> j \<Longrightarrow> F j \<subseteq> F i"
assumes F_sets[measurable]: "\<And>i. i \<in> I \<Longrightarrow> F i \<in> sets M"
and fin: "\<And>i. i \<in> I \<Longrightarrow> emeasure M (F i) \<noteq> \<infinity>"
shows "emeasure M (\<Inter>i\<in>I. F i) = (INF i\<in>I. emeasure M (F i))"
proof cases
assume "finite I"
have "(\<Inter>i\<in>I. F i) = F (Max I)"
using I \<open>finite I\<close> by (intro antisym INF_lower INF_greatest F) auto
moreover have "(INF i\<in>I. emeasure M (F i)) = emeasure M (F (Max I))"
using I \<open>finite I\<close> by (intro antisym INF_lower INF_greatest F emeasure_mono) auto
ultimately show ?thesis
by simp
next
assume "infinite I"
define L where "L n = (LEAST i. i \<in> I \<and> i \<ge> n)" for n
have L: "L n \<in> I \<and> n \<le> L n" for n
unfolding L_def
proof (rule LeastI_ex)
show "\<exists>x. x \<in> I \<and> n \<le> x"
using \<open>infinite I\<close> finite_subset[of I "{..< n}"]
by (rule_tac ccontr) (auto simp: not_le)
qed
have L_eq[simp]: "i \<in> I \<Longrightarrow> L i = i" for i
unfolding L_def by (intro Least_equality) auto
have L_mono: "i \<le> j \<Longrightarrow> L i \<le> L j" for i j
using L[of j] unfolding L_def by (intro Least_le) (auto simp: L_def)
have "emeasure M (\<Inter>i. F (L i)) = (INF i. emeasure M (F (L i)))"
proof (intro INF_emeasure_decseq[symmetric])
show "decseq (\<lambda>i. F (L i))"
using L by (intro antimonoI F L_mono) auto
qed (insert L fin, auto)
also have "\<dots> = (INF i\<in>I. emeasure M (F i))"
proof (intro antisym INF_greatest)
show "i \<in> I \<Longrightarrow> (INF i. emeasure M (F (L i))) \<le> emeasure M (F i)" for i
by (intro INF_lower2[of i]) auto
qed (insert L, auto intro: INF_lower)
also have "(\<Inter>i. F (L i)) = (\<Inter>i\<in>I. F i)"
proof (intro antisym INF_greatest)
show "i \<in> I \<Longrightarrow> (\<Inter>i. F (L i)) \<subseteq> F i" for i
by (intro INF_lower2[of i]) auto
qed (insert L, auto)
finally show ?thesis .
qed
lemma Lim_emeasure_decseq:
assumes A: "range A \<subseteq> sets M" "decseq A" and fin: "\<And>i. emeasure M (A i) \<noteq> \<infinity>"
shows "(\<lambda>i. emeasure M (A i)) \<longlonglongrightarrow> emeasure M (\<Inter>i. A i)"
using LIMSEQ_INF[OF decseq_emeasure, OF A]
using INF_emeasure_decseq[OF A fin] by simp
lemma emeasure_lfp'[consumes 1, case_names cont measurable]:
assumes "P M"
assumes cont: "sup_continuous F"
assumes *: "\<And>M A. P M \<Longrightarrow> (\<And>N. P N \<Longrightarrow> Measurable.pred N A) \<Longrightarrow> Measurable.pred M (F A)"
shows "emeasure M {x\<in>space M. lfp F x} = (SUP i. emeasure M {x\<in>space M. (F ^^ i) (\<lambda>x. False) x})"
proof -
have "emeasure M {x\<in>space M. lfp F x} = emeasure M (\<Union>i. {x\<in>space M. (F ^^ i) (\<lambda>x. False) x})"
using sup_continuous_lfp[OF cont] by (auto simp add: bot_fun_def intro!: arg_cong2[where f=emeasure])
moreover { fix i from \<open>P M\<close> have "{x\<in>space M. (F ^^ i) (\<lambda>x. False) x} \<in> sets M"
by (induct i arbitrary: M) (auto simp add: pred_def[symmetric] intro: *) }
moreover have "incseq (\<lambda>i. {x\<in>space M. (F ^^ i) (\<lambda>x. False) x})"
proof (rule incseq_SucI)
fix i
have "(F ^^ i) (\<lambda>x. False) \<le> (F ^^ (Suc i)) (\<lambda>x. False)"
proof (induct i)
case 0 show ?case by (simp add: le_fun_def)
next
case Suc thus ?case using monoD[OF sup_continuous_mono[OF cont] Suc] by auto
qed
then show "{x \<in> space M. (F ^^ i) (\<lambda>x. False) x} \<subseteq> {x \<in> space M. (F ^^ Suc i) (\<lambda>x. False) x}"
by auto
qed
ultimately show ?thesis
by (subst SUP_emeasure_incseq) auto
qed
lemma emeasure_lfp:
assumes [simp]: "\<And>s. sets (M s) = sets N"
assumes cont: "sup_continuous F" "sup_continuous f"
assumes meas: "\<And>P. Measurable.pred N P \<Longrightarrow> Measurable.pred N (F P)"
assumes iter: "\<And>P s. Measurable.pred N P \<Longrightarrow> P \<le> lfp F \<Longrightarrow> emeasure (M s) {x\<in>space N. F P x} = f (\<lambda>s. emeasure (M s) {x\<in>space N. P x}) s"
shows "emeasure (M s) {x\<in>space N. lfp F x} = lfp f s"
proof (subst lfp_transfer_bounded[where \<alpha>="\<lambda>F s. emeasure (M s) {x\<in>space N. F x}" and f=F , symmetric])
fix C assume "incseq C" "\<And>i. Measurable.pred N (C i)"
then show "(\<lambda>s. emeasure (M s) {x \<in> space N. (SUP i. C i) x}) = (SUP i. (\<lambda>s. emeasure (M s) {x \<in> space N. C i x}))"
unfolding SUP_apply
by (subst SUP_emeasure_incseq) (auto simp: mono_def fun_eq_iff intro!: arg_cong2[where f=emeasure])
qed (auto simp add: iter le_fun_def SUP_apply intro!: meas cont)
lemma emeasure_subadditive_finite:
"finite I \<Longrightarrow> A ` I \<subseteq> sets M \<Longrightarrow> emeasure M (\<Union>i\<in>I. A i) \<le> (\<Sum>i\<in>I. emeasure M (A i))"
by (rule sets.subadditive[OF emeasure_positive emeasure_additive]) auto
lemma emeasure_subadditive:
"A \<in> sets M \<Longrightarrow> B \<in> sets M \<Longrightarrow> emeasure M (A \<union> B) \<le> emeasure M A + emeasure M B"
using emeasure_subadditive_finite[of "{True, False}" "\<lambda>True \<Rightarrow> A | False \<Rightarrow> B" M] by simp
lemma emeasure_subadditive_countably:
assumes "range f \<subseteq> sets M"
shows "emeasure M (\<Union>i. f i) \<le> (\<Sum>i. emeasure M (f i))"
proof -
have "emeasure M (\<Union>i. f i) = emeasure M (\<Union>i. disjointed f i)"
unfolding UN_disjointed_eq ..
also have "\<dots> = (\<Sum>i. emeasure M (disjointed f i))"
using sets.range_disjointed_sets[OF assms] suminf_emeasure[of "disjointed f"]
by (simp add: disjoint_family_disjointed comp_def)
also have "\<dots> \<le> (\<Sum>i. emeasure M (f i))"
using sets.range_disjointed_sets[OF assms] assms
by (auto intro!: suminf_le emeasure_mono disjointed_subset)
finally show ?thesis .
qed
lemma emeasure_insert:
assumes sets: "{x} \<in> sets M" "A \<in> sets M" and "x \<notin> A"
shows "emeasure M (insert x A) = emeasure M {x} + emeasure M A"
proof -
have "{x} \<inter> A = {}" using \<open>x \<notin> A\<close> by auto
from plus_emeasure[OF sets this] show ?thesis by simp
qed
lemma emeasure_insert_ne:
"A \<noteq> {} \<Longrightarrow> {x} \<in> sets M \<Longrightarrow> A \<in> sets M \<Longrightarrow> x \<notin> A \<Longrightarrow> emeasure M (insert x A) = emeasure M {x} + emeasure M A"
by (rule emeasure_insert)
lemma emeasure_eq_sum_singleton:
assumes "finite S" "\<And>x. x \<in> S \<Longrightarrow> {x} \<in> sets M"
shows "emeasure M S = (\<Sum>x\<in>S. emeasure M {x})"
using sum_emeasure[of "\<lambda>x. {x}" S M] assms
by (auto simp: disjoint_family_on_def subset_eq)
lemma sum_emeasure_cover:
assumes "finite S" and "A \<in> sets M" and br_in_M: "B ` S \<subseteq> sets M"
assumes A: "A \<subseteq> (\<Union>i\<in>S. B i)"
assumes disj: "disjoint_family_on B S"
shows "emeasure M A = (\<Sum>i\<in>S. emeasure M (A \<inter> (B i)))"
proof -
have "(\<Sum>i\<in>S. emeasure M (A \<inter> (B i))) = emeasure M (\<Union>i\<in>S. A \<inter> (B i))"
proof (rule sum_emeasure)
show "disjoint_family_on (\<lambda>i. A \<inter> B i) S"
using \<open>disjoint_family_on B S\<close>
unfolding disjoint_family_on_def by auto
qed (insert assms, auto)
also have "(\<Union>i\<in>S. A \<inter> (B i)) = A"
using A by auto
finally show ?thesis by simp
qed
lemma emeasure_eq_0:
"N \<in> sets M \<Longrightarrow> emeasure M N = 0 \<Longrightarrow> K \<subseteq> N \<Longrightarrow> emeasure M K = 0"
by (metis emeasure_mono order_eq_iff zero_le)
lemma emeasure_UN_eq_0:
assumes "\<And>i::nat. emeasure M (N i) = 0" and "range N \<subseteq> sets M"
shows "emeasure M (\<Union>i. N i) = 0"
proof -
have "emeasure M (\<Union>i. N i) \<le> 0"
using emeasure_subadditive_countably[OF assms(2)] assms(1) by simp
then show ?thesis
by (auto intro: antisym zero_le)
qed
lemma measure_eqI_finite:
assumes [simp]: "sets M = Pow A" "sets N = Pow A" and "finite A"
assumes eq: "\<And>a. a \<in> A \<Longrightarrow> emeasure M {a} = emeasure N {a}"
shows "M = N"
proof (rule measure_eqI)
fix X assume "X \<in> sets M"
then have X: "X \<subseteq> A" by auto
then have "emeasure M X = (\<Sum>a\<in>X. emeasure M {a})"
using \<open>finite A\<close> by (subst emeasure_eq_sum_singleton) (auto dest: finite_subset)
also have "\<dots> = (\<Sum>a\<in>X. emeasure N {a})"
using X eq by (auto intro!: sum.cong)
also have "\<dots> = emeasure N X"
using X \<open>finite A\<close> by (subst emeasure_eq_sum_singleton) (auto dest: finite_subset)
finally show "emeasure M X = emeasure N X" .
qed simp
lemma measure_eqI_generator_eq:
fixes M N :: "'a measure" and E :: "'a set set" and A :: "nat \<Rightarrow> 'a set"
assumes "Int_stable E" "E \<subseteq> Pow \<Omega>"
and eq: "\<And>X. X \<in> E \<Longrightarrow> emeasure M X = emeasure N X"
and M: "sets M = sigma_sets \<Omega> E"
and N: "sets N = sigma_sets \<Omega> E"
and A: "range A \<subseteq> E" "(\<Union>i. A i) = \<Omega>" "\<And>i. emeasure M (A i) \<noteq> \<infinity>"
shows "M = N"
proof -
let ?\<mu> = "emeasure M" and ?\<nu> = "emeasure N"
interpret S: sigma_algebra \<Omega> "sigma_sets \<Omega> E" by (rule sigma_algebra_sigma_sets) fact
have "space M = \<Omega>"
using sets.top[of M] sets.space_closed[of M] S.top S.space_closed \<open>sets M = sigma_sets \<Omega> E\<close>
by blast
{ fix F D assume "F \<in> E" and "?\<mu> F \<noteq> \<infinity>"
then have [intro]: "F \<in> sigma_sets \<Omega> E" by auto
have "?\<nu> F \<noteq> \<infinity>" using \<open>?\<mu> F \<noteq> \<infinity>\<close> \<open>F \<in> E\<close> eq by simp
assume "D \<in> sets M"
with \<open>Int_stable E\<close> \<open>E \<subseteq> Pow \<Omega>\<close> have "emeasure M (F \<inter> D) = emeasure N (F \<inter> D)"
unfolding M
proof (induct rule: sigma_sets_induct_disjoint)
case (basic A)
then have "F \<inter> A \<in> E" using \<open>Int_stable E\<close> \<open>F \<in> E\<close> by (auto simp: Int_stable_def)
then show ?case using eq by auto
next
case empty then show ?case by simp
next
case (compl A)
then have **: "F \<inter> (\<Omega> - A) = F - (F \<inter> A)"
and [intro]: "F \<inter> A \<in> sigma_sets \<Omega> E"
using \<open>F \<in> E\<close> S.sets_into_space by (auto simp: M)
have "?\<nu> (F \<inter> A) \<le> ?\<nu> F" by (auto intro!: emeasure_mono simp: M N)
then have "?\<nu> (F \<inter> A) \<noteq> \<infinity>" using \<open>?\<nu> F \<noteq> \<infinity>\<close> by (auto simp: top_unique)
have "?\<mu> (F \<inter> A) \<le> ?\<mu> F" by (auto intro!: emeasure_mono simp: M N)
then have "?\<mu> (F \<inter> A) \<noteq> \<infinity>" using \<open>?\<mu> F \<noteq> \<infinity>\<close> by (auto simp: top_unique)
then have "?\<mu> (F \<inter> (\<Omega> - A)) = ?\<mu> F - ?\<mu> (F \<inter> A)" unfolding **
using \<open>F \<inter> A \<in> sigma_sets \<Omega> E\<close> by (auto intro!: emeasure_Diff simp: M N)
also have "\<dots> = ?\<nu> F - ?\<nu> (F \<inter> A)" using eq \<open>F \<in> E\<close> compl by simp
also have "\<dots> = ?\<nu> (F \<inter> (\<Omega> - A))" unfolding **
using \<open>F \<inter> A \<in> sigma_sets \<Omega> E\<close> \<open>?\<nu> (F \<inter> A) \<noteq> \<infinity>\<close>
by (auto intro!: emeasure_Diff[symmetric] simp: M N)
finally show ?case
using \<open>space M = \<Omega>\<close> by auto
next
case (union A)
then have "?\<mu> (\<Union>x. F \<inter> A x) = ?\<nu> (\<Union>x. F \<inter> A x)"
by (subst (1 2) suminf_emeasure[symmetric]) (auto simp: disjoint_family_on_def subset_eq M N)
with A show ?case
by auto
qed }
note * = this
show "M = N"
proof (rule measure_eqI)
show "sets M = sets N"
using M N by simp
have [simp, intro]: "\<And>i. A i \<in> sets M"
using A(1) by (auto simp: subset_eq M)
fix F assume "F \<in> sets M"
let ?D = "disjointed (\<lambda>i. F \<inter> A i)"
from \<open>space M = \<Omega>\<close> have F_eq: "F = (\<Union>i. ?D i)"
using \<open>F \<in> sets M\<close>[THEN sets.sets_into_space] A(2)[symmetric] by (auto simp: UN_disjointed_eq)
have [simp, intro]: "\<And>i. ?D i \<in> sets M"
using sets.range_disjointed_sets[of "\<lambda>i. F \<inter> A i" M] \<open>F \<in> sets M\<close>
by (auto simp: subset_eq)
have "disjoint_family ?D"
by (auto simp: disjoint_family_disjointed)
moreover
have "(\<Sum>i. emeasure M (?D i)) = (\<Sum>i. emeasure N (?D i))"
proof (intro arg_cong[where f=suminf] ext)
fix i
have "A i \<inter> ?D i = ?D i"
by (auto simp: disjointed_def)
then show "emeasure M (?D i) = emeasure N (?D i)"
using *[of "A i" "?D i", OF _ A(3)] A(1) by auto
qed
ultimately show "emeasure M F = emeasure N F"
by (simp add: image_subset_iff \<open>sets M = sets N\<close>[symmetric] F_eq[symmetric] suminf_emeasure)
qed
qed
lemma space_empty: "space M = {} \<Longrightarrow> M = count_space {}"
by (rule measure_eqI) (simp_all add: space_empty_iff)
lemma measure_eqI_generator_eq_countable:
fixes M N :: "'a measure" and E :: "'a set set" and A :: "'a set set"
assumes E: "Int_stable E" "E \<subseteq> Pow \<Omega>" "\<And>X. X \<in> E \<Longrightarrow> emeasure M X = emeasure N X"
and sets: "sets M = sigma_sets \<Omega> E" "sets N = sigma_sets \<Omega> E"
and A: "A \<subseteq> E" "(\<Union>A) = \<Omega>" "countable A" "\<And>a. a \<in> A \<Longrightarrow> emeasure M a \<noteq> \<infinity>"
shows "M = N"
proof cases
assume "\<Omega> = {}"
have *: "sigma_sets \<Omega> E = sets (sigma \<Omega> E)"
using E(2) by simp
have "space M = \<Omega>" "space N = \<Omega>"
using sets E(2) unfolding * by (auto dest: sets_eq_imp_space_eq simp del: sets_measure_of)
then show "M = N"
unfolding \<open>\<Omega> = {}\<close> by (auto dest: space_empty)
next
assume "\<Omega> \<noteq> {}" with \<open>\<Union>A = \<Omega>\<close> have "A \<noteq> {}" by auto
from this \<open>countable A\<close> have rng: "range (from_nat_into A) = A"
by (rule range_from_nat_into)
show "M = N"
proof (rule measure_eqI_generator_eq[OF E sets])
show "range (from_nat_into A) \<subseteq> E"
unfolding rng using \<open>A \<subseteq> E\<close> .
show "(\<Union>i. from_nat_into A i) = \<Omega>"
unfolding rng using \<open>\<Union>A = \<Omega>\<close> .
show "emeasure M (from_nat_into A i) \<noteq> \<infinity>" for i
using rng by (intro A) auto
qed
qed
lemma measure_of_of_measure: "measure_of (space M) (sets M) (emeasure M) = M"
proof (intro measure_eqI emeasure_measure_of_sigma)
show "sigma_algebra (space M) (sets M)" ..
show "positive (sets M) (emeasure M)"
by (simp add: positive_def)
show "countably_additive (sets M) (emeasure M)"
by (simp add: emeasure_countably_additive)
qed simp_all
subsection \<open>\<open>\<mu>\<close>-null sets\<close>
definition\<^marker>\<open>tag important\<close> null_sets :: "'a measure \<Rightarrow> 'a set set" where
"null_sets M = {N\<in>sets M. emeasure M N = 0}"
lemma null_setsD1[dest]: "A \<in> null_sets M \<Longrightarrow> emeasure M A = 0"
by (simp add: null_sets_def)
lemma null_setsD2[dest]: "A \<in> null_sets M \<Longrightarrow> A \<in> sets M"
unfolding null_sets_def by simp
lemma null_setsI[intro]: "emeasure M A = 0 \<Longrightarrow> A \<in> sets M \<Longrightarrow> A \<in> null_sets M"
unfolding null_sets_def by simp
interpretation null_sets: ring_of_sets "space M" "null_sets M" for M
proof (rule ring_of_setsI)
show "null_sets M \<subseteq> Pow (space M)"
using sets.sets_into_space by auto
show "{} \<in> null_sets M"
by auto
fix A B assume null_sets: "A \<in> null_sets M" "B \<in> null_sets M"
then have sets: "A \<in> sets M" "B \<in> sets M"
by auto
then have *: "emeasure M (A \<union> B) \<le> emeasure M A + emeasure M B"
"emeasure M (A - B) \<le> emeasure M A"
by (auto intro!: emeasure_subadditive emeasure_mono)
then have "emeasure M B = 0" "emeasure M A = 0"
using null_sets by auto
with sets * show "A - B \<in> null_sets M" "A \<union> B \<in> null_sets M"
by (auto intro!: antisym zero_le)
qed
lemma UN_from_nat_into:
assumes I: "countable I" "I \<noteq> {}"
shows "(\<Union>i\<in>I. N i) = (\<Union>i. N (from_nat_into I i))"
proof -
have "(\<Union>i\<in>I. N i) = \<Union>(N ` range (from_nat_into I))"
using I by simp
also have "\<dots> = (\<Union>i. (N \<circ> from_nat_into I) i)"
by simp
finally show ?thesis by simp
qed
lemma null_sets_UN':
assumes "countable I"
assumes "\<And>i. i \<in> I \<Longrightarrow> N i \<in> null_sets M"
shows "(\<Union>i\<in>I. N i) \<in> null_sets M"
proof cases
assume "I = {}" then show ?thesis by simp
next
assume "I \<noteq> {}"
show ?thesis
proof (intro conjI CollectI null_setsI)
show "(\<Union>i\<in>I. N i) \<in> sets M"
using assms by (intro sets.countable_UN') auto
have "emeasure M (\<Union>i\<in>I. N i) \<le> (\<Sum>n. emeasure M (N (from_nat_into I n)))"
unfolding UN_from_nat_into[OF \<open>countable I\<close> \<open>I \<noteq> {}\<close>]
using assms \<open>I \<noteq> {}\<close> by (intro emeasure_subadditive_countably) (auto intro: from_nat_into)
also have "(\<lambda>n. emeasure M (N (from_nat_into I n))) = (\<lambda>_. 0)"
using assms \<open>I \<noteq> {}\<close> by (auto intro: from_nat_into)
finally show "emeasure M (\<Union>i\<in>I. N i) = 0"
by (intro antisym zero_le) simp
qed
qed
lemma null_sets_UN[intro]:
"(\<And>i::'i::countable. N i \<in> null_sets M) \<Longrightarrow> (\<Union>i. N i) \<in> null_sets M"
by (rule null_sets_UN') auto
lemma null_set_Int1:
assumes "B \<in> null_sets M" "A \<in> sets M" shows "A \<inter> B \<in> null_sets M"
proof (intro CollectI conjI null_setsI)
show "emeasure M (A \<inter> B) = 0" using assms
by (intro emeasure_eq_0[of B _ "A \<inter> B"]) auto
qed (insert assms, auto)
lemma null_set_Int2:
assumes "B \<in> null_sets M" "A \<in> sets M" shows "B \<inter> A \<in> null_sets M"
using assms by (subst Int_commute) (rule null_set_Int1)
lemma emeasure_Diff_null_set:
assumes "B \<in> null_sets M" "A \<in> sets M"
shows "emeasure M (A - B) = emeasure M A"
proof -
have *: "A - B = (A - (A \<inter> B))" by auto
have "A \<inter> B \<in> null_sets M" using assms by (rule null_set_Int1)
then show ?thesis
unfolding * using assms
by (subst emeasure_Diff) auto
qed
lemma null_set_Diff:
assumes "B \<in> null_sets M" "A \<in> sets M" shows "B - A \<in> null_sets M"
proof (intro CollectI conjI null_setsI)
show "emeasure M (B - A) = 0" using assms by (intro emeasure_eq_0[of B _ "B - A"]) auto
qed (insert assms, auto)
lemma emeasure_Un_null_set:
assumes "A \<in> sets M" "B \<in> null_sets M"
shows "emeasure M (A \<union> B) = emeasure M A"
proof -
have *: "A \<union> B = A \<union> (B - A)" by auto
have "B - A \<in> null_sets M" using assms(2,1) by (rule null_set_Diff)
then show ?thesis
unfolding * using assms
by (subst plus_emeasure[symmetric]) auto
qed
lemma emeasure_Un':
assumes "A \<in> sets M" "B \<in> sets M" "A \<inter> B \<in> null_sets M"
shows "emeasure M (A \<union> B) = emeasure M A + emeasure M B"
proof -
have "A \<union> B = A \<union> (B - A \<inter> B)" by blast
also have "emeasure M \<dots> = emeasure M A + emeasure M (B - A \<inter> B)"
using assms by (subst plus_emeasure) auto
also have "emeasure M (B - A \<inter> B) = emeasure M B"
using assms by (intro emeasure_Diff_null_set) auto
finally show ?thesis .
qed
subsection \<open>The almost everywhere filter (i.e.\ quantifier)\<close>
definition\<^marker>\<open>tag important\<close> ae_filter :: "'a measure \<Rightarrow> 'a filter" where
"ae_filter M = (INF N\<in>null_sets M. principal (space M - N))"
abbreviation almost_everywhere :: "'a measure \<Rightarrow> ('a \<Rightarrow> bool) \<Rightarrow> bool" where
"almost_everywhere M P \<equiv> eventually P (ae_filter M)"
syntax
"_almost_everywhere" :: "pttrn \<Rightarrow> 'a \<Rightarrow> bool \<Rightarrow> bool" ("AE _ in _. _" [0,0,10] 10)
translations
"AE x in M. P" \<rightleftharpoons> "CONST almost_everywhere M (\<lambda>x. P)"
abbreviation
"set_almost_everywhere A M P \<equiv> AE x in M. x \<in> A \<longrightarrow> P x"
syntax
"_set_almost_everywhere" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> 'a \<Rightarrow> bool \<Rightarrow> bool"
("AE _\<in>_ in _./ _" [0,0,0,10] 10)
translations
"AE x\<in>A in M. P" \<rightleftharpoons> "CONST set_almost_everywhere A M (\<lambda>x. P)"
lemma eventually_ae_filter: "eventually P (ae_filter M) \<longleftrightarrow> (\<exists>N\<in>null_sets M. {x \<in> space M. \<not> P x} \<subseteq> N)"
unfolding ae_filter_def by (subst eventually_INF_base) (auto simp: eventually_principal subset_eq)
lemma AE_I':
"N \<in> null_sets M \<Longrightarrow> {x\<in>space M. \<not> P x} \<subseteq> N \<Longrightarrow> (AE x in M. P x)"
unfolding eventually_ae_filter by auto
lemma AE_iff_null:
assumes "{x\<in>space M. \<not> P x} \<in> sets M" (is "?P \<in> sets M")
shows "(AE x in M. P x) \<longleftrightarrow> {x\<in>space M. \<not> P x} \<in> null_sets M"
proof
assume "AE x in M. P x" then obtain N where N: "N \<in> sets M" "?P \<subseteq> N" "emeasure M N = 0"
unfolding eventually_ae_filter by auto
have "emeasure M ?P \<le> emeasure M N"
using assms N(1,2) by (auto intro: emeasure_mono)
then have "emeasure M ?P = 0"
unfolding \<open>emeasure M N = 0\<close> by auto
then show "?P \<in> null_sets M" using assms by auto
next
assume "?P \<in> null_sets M" with assms show "AE x in M. P x" by (auto intro: AE_I')
qed
lemma AE_iff_null_sets:
"N \<in> sets M \<Longrightarrow> N \<in> null_sets M \<longleftrightarrow> (AE x in M. x \<notin> N)"
using Int_absorb1[OF sets.sets_into_space, of N M]
by (subst AE_iff_null) (auto simp: Int_def[symmetric])
lemma ae_filter_eq_bot_iff: "ae_filter M = bot \<longleftrightarrow> emeasure M (space M) = 0"
proof -
have "ae_filter M = bot \<longleftrightarrow> (AE x in M. False)"
using trivial_limit_def by blast
also have "\<dots> \<longleftrightarrow> space M \<in> null_sets M"
by (simp add: AE_iff_null_sets eventually_ae_filter)
also have "\<dots> \<longleftrightarrow> emeasure M (space M) = 0"
by auto
finally show ?thesis .
qed
lemma AE_not_in:
"N \<in> null_sets M \<Longrightarrow> AE x in M. x \<notin> N"
by (metis AE_iff_null_sets null_setsD2)
lemma AE_iff_measurable:
"N \<in> sets M \<Longrightarrow> {x\<in>space M. \<not> P x} = N \<Longrightarrow> (AE x in M. P x) \<longleftrightarrow> emeasure M N = 0"
using AE_iff_null[of _ P] by auto
lemma AE_E[consumes 1]:
assumes "AE x in M. P x"
obtains N where "{x \<in> space M. \<not> P x} \<subseteq> N" "emeasure M N = 0" "N \<in> sets M"
using assms unfolding eventually_ae_filter by auto
lemma AE_E2:
assumes "AE x in M. P x"
shows "emeasure M {x\<in>space M. \<not> P x} = 0"
by (metis (mono_tags, lifting) AE_iff_null assms emeasure_notin_sets null_setsD1)
lemma AE_E3:
assumes "AE x in M. P x"
obtains N where "\<And>x. x \<in> space M - N \<Longrightarrow> P x" "N \<in> null_sets M"
using assms unfolding eventually_ae_filter by auto
lemma AE_I:
assumes "{x \<in> space M. \<not> P x} \<subseteq> N" "emeasure M N = 0" "N \<in> sets M"
shows "AE x in M. P x"
using assms unfolding eventually_ae_filter by auto
lemma AE_mp[elim!]:
assumes AE_P: "AE x in M. P x" and AE_imp: "AE x in M. P x \<longrightarrow> Q x"
shows "AE x in M. Q x"
using assms by (fact eventually_rev_mp)
text \<open>The next lemma is convenient to combine with a lemma whose conclusion is of the
form \<open>AE x in M. P x = Q x\<close>: for such a lemma, there is no \<open>[symmetric]\<close> variant,
but using \<open>AE_symmetric[OF\<dots>]\<close> will replace it.\<close>
(* depricated replace by laws about eventually *)
lemma
shows AE_iffI: "AE x in M. P x \<Longrightarrow> AE x in M. P x \<longleftrightarrow> Q x \<Longrightarrow> AE x in M. Q x"
and AE_disjI1: "AE x in M. P x \<Longrightarrow> AE x in M. P x \<or> Q x"
and AE_disjI2: "AE x in M. Q x \<Longrightarrow> AE x in M. P x \<or> Q x"
and AE_conjI: "AE x in M. P x \<Longrightarrow> AE x in M. Q x \<Longrightarrow> AE x in M. P x \<and> Q x"
and AE_conj_iff[simp]: "(AE x in M. P x \<and> Q x) \<longleftrightarrow> (AE x in M. P x) \<and> (AE x in M. Q x)"
by auto
lemma AE_symmetric:
assumes "AE x in M. P x = Q x"
shows "AE x in M. Q x = P x"
using assms by auto
lemma AE_impI:
"(P \<Longrightarrow> AE x in M. Q x) \<Longrightarrow> AE x in M. P \<longrightarrow> Q x"
by fastforce
lemma AE_measure:
assumes AE: "AE x in M. P x" and sets: "{x\<in>space M. P x} \<in> sets M" (is "?P \<in> sets M")
shows "emeasure M {x\<in>space M. P x} = emeasure M (space M)"
proof -
from AE_E[OF AE] obtain N
where N: "{x \<in> space M. \<not> P x} \<subseteq> N" "emeasure M N = 0" "N \<in> sets M"
by auto
with sets have "emeasure M (space M) \<le> emeasure M (?P \<union> N)"
by (intro emeasure_mono) auto
also have "\<dots> \<le> emeasure M ?P + emeasure M N"
using sets N by (intro emeasure_subadditive) auto
also have "\<dots> = emeasure M ?P" using N by simp
finally show "emeasure M ?P = emeasure M (space M)"
using emeasure_space[of M "?P"] by auto
qed
lemma AE_space: "AE x in M. x \<in> space M"
by (rule AE_I[where N="{}"]) auto
lemma AE_I2[simp, intro]:
"(\<And>x. x \<in> space M \<Longrightarrow> P x) \<Longrightarrow> AE x in M. P x"
using AE_space by force
lemma AE_Ball_mp:
"\<forall>x\<in>space M. P x \<Longrightarrow> AE x in M. P x \<longrightarrow> Q x \<Longrightarrow> AE x in M. Q x"
by auto
lemma AE_cong[cong]:
"(\<And>x. x \<in> space M \<Longrightarrow> P x \<longleftrightarrow> Q x) \<Longrightarrow> (AE x in M. P x) \<longleftrightarrow> (AE x in M. Q x)"
by auto
lemma AE_cong_simp: "M = N \<Longrightarrow> (\<And>x. x \<in> space N =simp=> P x = Q x) \<Longrightarrow> (AE x in M. P x) \<longleftrightarrow> (AE x in N. Q x)"
by (auto simp: simp_implies_def)
lemma AE_all_countable:
"(AE x in M. \<forall>i. P i x) \<longleftrightarrow> (\<forall>i::'i::countable. AE x in M. P i x)"
proof
assume "\<forall>i. AE x in M. P i x"
from this[unfolded eventually_ae_filter Bex_def, THEN choice]
obtain N where N: "\<And>i. N i \<in> null_sets M" "\<And>i. {x\<in>space M. \<not> P i x} \<subseteq> N i" by auto
have "{x\<in>space M. \<not> (\<forall>i. P i x)} \<subseteq> (\<Union>i. {x\<in>space M. \<not> P i x})" by auto
also have "\<dots> \<subseteq> (\<Union>i. N i)" using N by auto
finally have "{x\<in>space M. \<not> (\<forall>i. P i x)} \<subseteq> (\<Union>i. N i)" .
moreover from N have "(\<Union>i. N i) \<in> null_sets M"
by (intro null_sets_UN) auto
ultimately show "AE x in M. \<forall>i. P i x"
unfolding eventually_ae_filter by auto
qed auto
lemma AE_ball_countable:
assumes [intro]: "countable X"
shows "(AE x in M. \<forall>y\<in>X. P x y) \<longleftrightarrow> (\<forall>y\<in>X. AE x in M. P x y)"
proof
assume "\<forall>y\<in>X. AE x in M. P x y"
from this[unfolded eventually_ae_filter Bex_def, THEN bchoice]
obtain N where N: "\<And>y. y \<in> X \<Longrightarrow> N y \<in> null_sets M" "\<And>y. y \<in> X \<Longrightarrow> {x\<in>space M. \<not> P x y} \<subseteq> N y"
by auto
have "{x\<in>space M. \<not> (\<forall>y\<in>X. P x y)} \<subseteq> (\<Union>y\<in>X. {x\<in>space M. \<not> P x y})"
by auto
also have "\<dots> \<subseteq> (\<Union>y\<in>X. N y)"
using N by auto
finally have "{x\<in>space M. \<not> (\<forall>y\<in>X. P x y)} \<subseteq> (\<Union>y\<in>X. N y)" .
moreover from N have "(\<Union>y\<in>X. N y) \<in> null_sets M"
by (intro null_sets_UN') auto
ultimately show "AE x in M. \<forall>y\<in>X. P x y"
unfolding eventually_ae_filter by auto
qed auto
lemma AE_ball_countable':
"(\<And>N. N \<in> I \<Longrightarrow> AE x in M. P N x) \<Longrightarrow> countable I \<Longrightarrow> AE x in M. \<forall>N \<in> I. P N x"
unfolding AE_ball_countable by simp
lemma AE_pairwise: "countable F \<Longrightarrow> pairwise (\<lambda>A B. AE x in M. R x A B) F \<longleftrightarrow> (AE x in M. pairwise (R x) F)"
unfolding pairwise_alt by (simp add: AE_ball_countable)
lemma AE_discrete_difference:
assumes X: "countable X"
assumes null: "\<And>x. x \<in> X \<Longrightarrow> emeasure M {x} = 0"
assumes sets: "\<And>x. x \<in> X \<Longrightarrow> {x} \<in> sets M"
shows "AE x in M. x \<notin> X"
proof -
have "(\<Union>x\<in>X. {x}) \<in> null_sets M"
using assms by (intro null_sets_UN') auto
from AE_not_in[OF this] show "AE x in M. x \<notin> X"
by auto
qed
lemma AE_finite_all:
assumes f: "finite S" shows "(AE x in M. \<forall>i\<in>S. P i x) \<longleftrightarrow> (\<forall>i\<in>S. AE x in M. P i x)"
using f by induct auto
lemma AE_finite_allI:
assumes "finite S"
shows "(\<And>s. s \<in> S \<Longrightarrow> AE x in M. Q s x) \<Longrightarrow> AE x in M. \<forall>s\<in>S. Q s x"
using AE_finite_all[OF \<open>finite S\<close>] by auto
lemma emeasure_mono_AE:
assumes imp: "AE x in M. x \<in> A \<longrightarrow> x \<in> B"
and B: "B \<in> sets M"
shows "emeasure M A \<le> emeasure M B"
proof cases
assume A: "A \<in> sets M"
from imp obtain N where N: "{x\<in>space M. \<not> (x \<in> A \<longrightarrow> x \<in> B)} \<subseteq> N" "N \<in> null_sets M"
by (auto simp: eventually_ae_filter)
have "emeasure M A = emeasure M (A - N)"
using N A by (subst emeasure_Diff_null_set) auto
also have "emeasure M (A - N) \<le> emeasure M (B - N)"
using N A B sets.sets_into_space by (auto intro!: emeasure_mono)
also have "emeasure M (B - N) = emeasure M B"
using N B by (subst emeasure_Diff_null_set) auto
finally show ?thesis .
qed (simp add: emeasure_notin_sets)
lemma emeasure_eq_AE:
assumes iff: "AE x in M. x \<in> A \<longleftrightarrow> x \<in> B"
assumes A: "A \<in> sets M" and B: "B \<in> sets M"
shows "emeasure M A = emeasure M B"
using assms by (safe intro!: antisym emeasure_mono_AE) auto
lemma emeasure_Collect_eq_AE:
"AE x in M. P x \<longleftrightarrow> Q x \<Longrightarrow> Measurable.pred M Q \<Longrightarrow> Measurable.pred M P \<Longrightarrow>
emeasure M {x\<in>space M. P x} = emeasure M {x\<in>space M. Q x}"
by (intro emeasure_eq_AE) auto
lemma emeasure_eq_0_AE: "AE x in M. \<not> P x \<Longrightarrow> emeasure M {x\<in>space M. P x} = 0"
using AE_iff_measurable[OF _ refl, of M "\<lambda>x. \<not> P x"]
by (cases "{x\<in>space M. P x} \<in> sets M") (simp_all add: emeasure_notin_sets)
lemma emeasure_0_AE:
assumes "emeasure M (space M) = 0"
shows "AE x in M. P x"
using eventually_ae_filter assms by blast
lemma emeasure_add_AE:
assumes [measurable]: "A \<in> sets M" "B \<in> sets M" "C \<in> sets M"
assumes 1: "AE x in M. x \<in> C \<longleftrightarrow> x \<in> A \<or> x \<in> B"
assumes 2: "AE x in M. \<not> (x \<in> A \<and> x \<in> B)"
shows "emeasure M C = emeasure M A + emeasure M B"
proof -
have "emeasure M C = emeasure M (A \<union> B)"
by (rule emeasure_eq_AE) (insert 1, auto)
also have "\<dots> = emeasure M A + emeasure M (B - A)"
by (subst plus_emeasure) auto
also have "emeasure M (B - A) = emeasure M B"
by (rule emeasure_eq_AE) (insert 2, auto)
finally show ?thesis .
qed
subsection \<open>\<open>\<sigma>\<close>-finite Measures\<close>
locale\<^marker>\<open>tag important\<close> sigma_finite_measure =
fixes M :: "'a measure"
assumes sigma_finite_countable:
"\<exists>A::'a set set. countable A \<and> A \<subseteq> sets M \<and> (\<Union>A) = space M \<and> (\<forall>a\<in>A. emeasure M a \<noteq> \<infinity>)"
lemma (in sigma_finite_measure) sigma_finite:
obtains A :: "nat \<Rightarrow> 'a set"
where "range A \<subseteq> sets M" "(\<Union>i. A i) = space M" "\<And>i. emeasure M (A i) \<noteq> \<infinity>"
proof -
obtain A :: "'a set set" where
[simp]: "countable A" and
A: "A \<subseteq> sets M" "(\<Union>A) = space M" "\<And>a. a \<in> A \<Longrightarrow> emeasure M a \<noteq> \<infinity>"
using sigma_finite_countable by metis
show thesis
proof cases
assume "A = {}" with \<open>(\<Union>A) = space M\<close> show thesis
by (intro that[of "\<lambda>_. {}"]) auto
next
assume "A \<noteq> {}"
show thesis
proof
show "range (from_nat_into A) \<subseteq> sets M"
using \<open>A \<noteq> {}\<close> A by auto
have "(\<Union>i. from_nat_into A i) = \<Union>A"
using range_from_nat_into[OF \<open>A \<noteq> {}\<close> \<open>countable A\<close>] by auto
with A show "(\<Union>i. from_nat_into A i) = space M"
by auto
qed (intro A from_nat_into \<open>A \<noteq> {}\<close>)
qed
qed
lemma (in sigma_finite_measure) sigma_finite_disjoint:
obtains A :: "nat \<Rightarrow> 'a set"
where "range A \<subseteq> sets M" "(\<Union>i. A i) = space M" "\<And>i. emeasure M (A i) \<noteq> \<infinity>" "disjoint_family A"
proof -
obtain A :: "nat \<Rightarrow> 'a set" where
range: "range A \<subseteq> sets M" and
space: "(\<Union>i. A i) = space M" and
measure: "\<And>i. emeasure M (A i) \<noteq> \<infinity>"
using sigma_finite by blast
show thesis
proof (rule that[of "disjointed A"])
show "range (disjointed A) \<subseteq> sets M"
by (rule sets.range_disjointed_sets[OF range])
show "(\<Union>i. disjointed A i) = space M"
and "disjoint_family (disjointed A)"
using disjoint_family_disjointed UN_disjointed_eq[of A] space range
by auto
show "emeasure M (disjointed A i) \<noteq> \<infinity>" for i
proof -
have "emeasure M (disjointed A i) \<le> emeasure M (A i)"
using range disjointed_subset[of A i] by (auto intro!: emeasure_mono)
then show ?thesis using measure[of i] by (auto simp: top_unique)
qed
qed
qed
lemma (in sigma_finite_measure) sigma_finite_incseq:
obtains A :: "nat \<Rightarrow> 'a set"
where "range A \<subseteq> sets M" "(\<Union>i. A i) = space M" "\<And>i. emeasure M (A i) \<noteq> \<infinity>" "incseq A"
proof -
obtain F :: "nat \<Rightarrow> 'a set" where
F: "range F \<subseteq> sets M" "(\<Union>i. F i) = space M" "\<And>i. emeasure M (F i) \<noteq> \<infinity>"
using sigma_finite by blast
show thesis
proof (rule that[of "\<lambda>n. \<Union>i\<le>n. F i"])
show "range (\<lambda>n. \<Union>i\<le>n. F i) \<subseteq> sets M"
using F by (force simp: incseq_def)
show "(\<Union>n. \<Union>i\<le>n. F i) = space M"
proof -
from F have "\<And>x. x \<in> space M \<Longrightarrow> \<exists>i. x \<in> F i" by auto
with F show ?thesis by fastforce
qed
show "emeasure M (\<Union>i\<le>n. F i) \<noteq> \<infinity>" for n
proof -
have "emeasure M (\<Union>i\<le>n. F i) \<le> (\<Sum>i\<le>n. emeasure M (F i))"
using F by (auto intro!: emeasure_subadditive_finite)
also have "\<dots> < \<infinity>"
using F by (auto simp: sum_Pinfty less_top)
finally show ?thesis by simp
qed
show "incseq (\<lambda>n. \<Union>i\<le>n. F i)"
by (force simp: incseq_def)
qed
qed
lemma (in sigma_finite_measure) approx_PInf_emeasure_with_finite:
fixes C::real
assumes W_meas: "W \<in> sets M"
and W_inf: "emeasure M W = \<infinity>"
obtains Z where "Z \<in> sets M" "Z \<subseteq> W" "emeasure M Z < \<infinity>" "emeasure M Z > C"
proof -
obtain A :: "nat \<Rightarrow> 'a set"
where A: "range A \<subseteq> sets M" "(\<Union>i. A i) = space M" "\<And>i. emeasure M (A i) \<noteq> \<infinity>" "incseq A"
using sigma_finite_incseq by blast
define B where "B = (\<lambda>i. W \<inter> A i)"
have B_meas: "\<And>i. B i \<in> sets M" using W_meas \<open>range A \<subseteq> sets M\<close> B_def by blast
have b: "\<And>i. B i \<subseteq> W" using B_def by blast
{ fix i
have "emeasure M (B i) \<le> emeasure M (A i)"
using A by (intro emeasure_mono) (auto simp: B_def)
also have "emeasure M (A i) < \<infinity>"
using \<open>\<And>i. emeasure M (A i) \<noteq> \<infinity>\<close> by (simp add: less_top)
finally have "emeasure M (B i) < \<infinity>" . }
note c = this
have "W = (\<Union>i. B i)" using B_def \<open>(\<Union>i. A i) = space M\<close> W_meas by auto
moreover have "incseq B" using B_def \<open>incseq A\<close> by (simp add: incseq_def subset_eq)
ultimately have "(\<lambda>i. emeasure M (B i)) \<longlonglongrightarrow> emeasure M W" using W_meas B_meas
by (simp add: B_meas Lim_emeasure_incseq image_subset_iff)
then have "(\<lambda>i. emeasure M (B i)) \<longlonglongrightarrow> \<infinity>" using W_inf by simp
from order_tendstoD(1)[OF this, of C]
obtain i where d: "emeasure M (B i) > C"
by (auto simp: eventually_sequentially)
have "B i \<in> sets M" "B i \<subseteq> W" "emeasure M (B i) < \<infinity>" "emeasure M (B i) > C"
using B_meas b c d by auto
then show ?thesis using that by blast
qed
subsection \<open>Measure space induced by distribution of \<^const>\<open>measurable\<close>-functions\<close>
definition\<^marker>\<open>tag important\<close> distr :: "'a measure \<Rightarrow> 'b measure \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> 'b measure" where
"distr M N f =
measure_of (space N) (sets N) (\<lambda>A. emeasure M (f -` A \<inter> space M))"
lemma
shows sets_distr[simp, measurable_cong]: "sets (distr M N f) = sets N"
and space_distr[simp]: "space (distr M N f) = space N"
by (auto simp: distr_def)
lemma
shows measurable_distr_eq1[simp]: "measurable (distr Mf Nf f) Mf' = measurable Nf Mf'"
and measurable_distr_eq2[simp]: "measurable Mg' (distr Mg Ng g) = measurable Mg' Ng"
by (auto simp: measurable_def)
lemma distr_cong:
"M = K \<Longrightarrow> sets N = sets L \<Longrightarrow> (\<And>x. x \<in> space M \<Longrightarrow> f x = g x) \<Longrightarrow> distr M N f = distr K L g"
using sets_eq_imp_space_eq[of N L] by (simp add: distr_def Int_def cong: rev_conj_cong)
lemma emeasure_distr:
fixes f :: "'a \<Rightarrow> 'b"
assumes f: "f \<in> measurable M N" and A: "A \<in> sets N"
shows "emeasure (distr M N f) A = emeasure M (f -` A \<inter> space M)" (is "_ = ?\<mu> A")
unfolding distr_def
proof (rule emeasure_measure_of_sigma)
show "positive (sets N) ?\<mu>"
by (auto simp: positive_def)
show "countably_additive (sets N) ?\<mu>"
proof (intro countably_additiveI)
fix A :: "nat \<Rightarrow> 'b set" assume "range A \<subseteq> sets N" "disjoint_family A"
then have A: "\<And>i. A i \<in> sets N" "(\<Union>i. A i) \<in> sets N" by auto
then have *: "range (\<lambda>i. f -` (A i) \<inter> space M) \<subseteq> sets M"
using f by (auto simp: measurable_def)
moreover have "(\<Union>i. f -` A i \<inter> space M) \<in> sets M"
using * by blast
moreover have **: "disjoint_family (\<lambda>i. f -` A i \<inter> space M)"
using \<open>disjoint_family A\<close> by (auto simp: disjoint_family_on_def)
ultimately show "(\<Sum>i. ?\<mu> (A i)) = ?\<mu> (\<Union>i. A i)"
using suminf_emeasure[OF _ **] A f
by (auto simp: comp_def vimage_UN)
qed
show "sigma_algebra (space N) (sets N)" ..
qed fact
lemma emeasure_Collect_distr:
assumes X[measurable]: "X \<in> measurable M N" "Measurable.pred N P"
shows "emeasure (distr M N X) {x\<in>space N. P x} = emeasure M {x\<in>space M. P (X x)}"
by (subst emeasure_distr)
(auto intro!: arg_cong2[where f=emeasure] X(1)[THEN measurable_space])
lemma emeasure_lfp2[consumes 1, case_names cont f measurable]:
assumes "P M"
assumes cont: "sup_continuous F"
assumes f: "\<And>M. P M \<Longrightarrow> f \<in> measurable M' M"
assumes *: "\<And>M A. P M \<Longrightarrow> (\<And>N. P N \<Longrightarrow> Measurable.pred N A) \<Longrightarrow> Measurable.pred M (F A)"
shows "emeasure M' {x\<in>space M'. lfp F (f x)} = (SUP i. emeasure M' {x\<in>space M'. (F ^^ i) (\<lambda>x. False) (f x)})"
proof (subst (1 2) emeasure_Collect_distr[symmetric, where X=f])
show "f \<in> measurable M' M" "f \<in> measurable M' M"
using f[OF \<open>P M\<close>] by auto
{ fix i show "Measurable.pred M ((F ^^ i) (\<lambda>x. False))"
using \<open>P M\<close> by (induction i arbitrary: M) (auto intro!: *) }
show "Measurable.pred M (lfp F)"
using \<open>P M\<close> cont * by (rule measurable_lfp_coinduct[of P])
have "emeasure (distr M' M f) {x \<in> space (distr M' M f). lfp F x} =
(SUP i. emeasure (distr M' M f) {x \<in> space (distr M' M f). (F ^^ i) (\<lambda>x. False) x})"
using \<open>P M\<close>
proof (coinduction arbitrary: M rule: emeasure_lfp')
case (measurable A N) then have "\<And>N. P N \<Longrightarrow> Measurable.pred (distr M' N f) A"
by metis
then have "\<And>N. P N \<Longrightarrow> Measurable.pred N A"
by simp
with \<open>P N\<close>[THEN *] show ?case
by auto
qed fact
then show "emeasure (distr M' M f) {x \<in> space M. lfp F x} =
(SUP i. emeasure (distr M' M f) {x \<in> space M. (F ^^ i) (\<lambda>x. False) x})"
by simp
qed
lemma distr_id[simp]: "distr N N (\<lambda>x. x) = N"
by (rule measure_eqI) (auto simp: emeasure_distr)
lemma distr_id2: "sets M = sets N \<Longrightarrow> distr N M (\<lambda>x. x) = N"
by (rule measure_eqI) (auto simp: emeasure_distr)
lemma measure_distr:
"f \<in> measurable M N \<Longrightarrow> S \<in> sets N \<Longrightarrow> measure (distr M N f) S = measure M (f -` S \<inter> space M)"
by (simp add: emeasure_distr measure_def)
lemma distr_cong_AE:
assumes 1: "M = K" "sets N = sets L" and
2: "(AE x in M. f x = g x)" and "f \<in> measurable M N" and "g \<in> measurable K L"
shows "distr M N f = distr K L g"
proof (rule measure_eqI)
fix A assume "A \<in> sets (distr M N f)"
with assms show "emeasure (distr M N f) A = emeasure (distr K L g) A"
by (auto simp add: emeasure_distr intro!: emeasure_eq_AE measurable_sets)
qed (insert 1, simp)
lemma AE_distrD:
assumes f: "f \<in> measurable M M'"
and AE: "AE x in distr M M' f. P x"
shows "AE x in M. P (f x)"
proof -
from AE[THEN AE_E] obtain N
where "{x \<in> space (distr M M' f). \<not> P x} \<subseteq> N"
"emeasure (distr M M' f) N = 0"
"N \<in> sets (distr M M' f)"
by auto
with f show ?thesis
by (simp add: eventually_ae_filter, intro bexI[of _ "f -` N \<inter> space M"])
(auto simp: emeasure_distr measurable_def)
qed
lemma AE_distr_iff:
assumes f[measurable]: "f \<in> measurable M N" and P[measurable]: "{x \<in> space N. P x} \<in> sets N"
shows "(AE x in distr M N f. P x) \<longleftrightarrow> (AE x in M. P (f x))"
proof (subst (1 2) AE_iff_measurable[OF _ refl])
have "f -` {x\<in>space N. \<not> P x} \<inter> space M = {x \<in> space M. \<not> P (f x)}"
using f[THEN measurable_space] by auto
then show "(emeasure (distr M N f) {x \<in> space (distr M N f). \<not> P x} = 0) =
(emeasure M {x \<in> space M. \<not> P (f x)} = 0)"
by (simp add: emeasure_distr)
qed auto
lemma null_sets_distr_iff:
"f \<in> measurable M N \<Longrightarrow> A \<in> null_sets (distr M N f) \<longleftrightarrow> f -` A \<inter> space M \<in> null_sets M \<and> A \<in> sets N"
by (auto simp add: null_sets_def emeasure_distr)
proposition distr_distr:
"g \<in> measurable N L \<Longrightarrow> f \<in> measurable M N \<Longrightarrow> distr (distr M N f) L g = distr M L (g \<circ> f)"
by (auto simp add: emeasure_distr measurable_space
intro!: arg_cong[where f="emeasure M"] measure_eqI)
subsection\<^marker>\<open>tag unimportant\<close> \<open>Real measure values\<close>
lemma ring_of_finite_sets: "ring_of_sets (space M) {A\<in>sets M. emeasure M A \<noteq> top}"
proof (rule ring_of_setsI)
show "a \<in> {A \<in> sets M. emeasure M A \<noteq> top} \<Longrightarrow> b \<in> {A \<in> sets M. emeasure M A \<noteq> top} \<Longrightarrow>
a \<union> b \<in> {A \<in> sets M. emeasure M A \<noteq> top}" for a b
using emeasure_subadditive[of a M b] by (auto simp: top_unique)
show "a \<in> {A \<in> sets M. emeasure M A \<noteq> top} \<Longrightarrow> b \<in> {A \<in> sets M. emeasure M A \<noteq> top} \<Longrightarrow>
a - b \<in> {A \<in> sets M. emeasure M A \<noteq> top}" for a b
using emeasure_mono[of "a - b" a M] by (auto simp: top_unique)
qed (auto dest: sets.sets_into_space)
lemma measure_nonneg[simp]: "0 \<le> measure M A"
unfolding measure_def by auto
lemma measure_nonneg' [simp]: "\<not> measure M A < 0"
using measure_nonneg not_le by blast
lemma zero_less_measure_iff: "0 < measure M A \<longleftrightarrow> measure M A \<noteq> 0"
using measure_nonneg[of M A] by (auto simp add: le_less)
lemma measure_le_0_iff: "measure M X \<le> 0 \<longleftrightarrow> measure M X = 0"
using measure_nonneg[of M X] by linarith
lemma measure_empty[simp]: "measure M {} = 0"
unfolding measure_def by (simp add: zero_ennreal.rep_eq)
lemma emeasure_eq_ennreal_measure:
"emeasure M A \<noteq> top \<Longrightarrow> emeasure M A = ennreal (measure M A)"
by (cases "emeasure M A" rule: ennreal_cases) (auto simp: measure_def)
lemma measure_zero_top: "emeasure M A = top \<Longrightarrow> measure M A = 0"
by (simp add: measure_def)
lemma measure_eq_emeasure_eq_ennreal: "0 \<le> x \<Longrightarrow> emeasure M A = ennreal x \<Longrightarrow> measure M A = x"
using emeasure_eq_ennreal_measure[of M A]
by (cases "A \<in> M") (auto simp: measure_notin_sets emeasure_notin_sets)
lemma enn2real_plus:"a < top \<Longrightarrow> b < top \<Longrightarrow> enn2real (a + b) = enn2real a + enn2real b"
by (simp add: enn2real_def plus_ennreal.rep_eq real_of_ereal_add less_top
del: real_of_ereal_enn2ereal)
lemma enn2real_sum:"(\<And>i. i \<in> I \<Longrightarrow> f i < top) \<Longrightarrow> enn2real (sum f I) = sum (enn2real \<circ> f) I"
by (induction I rule: infinite_finite_induct) (auto simp: enn2real_plus)
lemma measure_eq_AE:
assumes iff: "AE x in M. x \<in> A \<longleftrightarrow> x \<in> B"
assumes A: "A \<in> sets M" and B: "B \<in> sets M"
shows "measure M A = measure M B"
using assms emeasure_eq_AE[OF assms] by (simp add: measure_def)
lemma measure_Union:
"emeasure M A \<noteq> \<infinity> \<Longrightarrow> emeasure M B \<noteq> \<infinity> \<Longrightarrow> A \<in> sets M \<Longrightarrow> B \<in> sets M \<Longrightarrow> A \<inter> B = {} \<Longrightarrow>
measure M (A \<union> B) = measure M A + measure M B"
by (simp add: measure_def plus_emeasure[symmetric] enn2real_plus less_top)
lemma measure_finite_Union:
"finite S \<Longrightarrow> A`S \<subseteq> sets M \<Longrightarrow> disjoint_family_on A S \<Longrightarrow> (\<And>i. i \<in> S \<Longrightarrow> emeasure M (A i) \<noteq> \<infinity>) \<Longrightarrow>
measure M (\<Union>i\<in>S. A i) = (\<Sum>i\<in>S. measure M (A i))"
by (induction S rule: finite_induct)
(auto simp: disjoint_family_on_insert measure_Union sum_emeasure[symmetric] sets.countable_UN'[OF countable_finite])
lemma measure_Diff:
assumes finite: "emeasure M A \<noteq> \<infinity>"
and measurable: "A \<in> sets M" "B \<in> sets M" "B \<subseteq> A"
shows "measure M (A - B) = measure M A - measure M B"
proof -
have "emeasure M (A - B) \<le> emeasure M A" "emeasure M B \<le> emeasure M A"
using measurable by (auto intro!: emeasure_mono)
hence "measure M ((A - B) \<union> B) = measure M (A - B) + measure M B"
using measurable finite by (rule_tac measure_Union) (auto simp: top_unique)
thus ?thesis using \<open>B \<subseteq> A\<close> by (auto simp: Un_absorb2)
qed
lemma measure_UNION:
assumes measurable: "range A \<subseteq> sets M" "disjoint_family A"
assumes finite: "emeasure M (\<Union>i. A i) \<noteq> \<infinity>"
shows "(\<lambda>i. measure M (A i)) sums (measure M (\<Union>i. A i))"
proof -
have "(\<lambda>i. emeasure M (A i)) sums (emeasure M (\<Union>i. A i))"
unfolding suminf_emeasure[OF measurable, symmetric] by (simp add: summable_sums)
moreover
{ fix i
have "emeasure M (A i) \<le> emeasure M (\<Union>i. A i)"
using measurable by (auto intro!: emeasure_mono)
then have "emeasure M (A i) = ennreal ((measure M (A i)))"
using finite by (intro emeasure_eq_ennreal_measure) (auto simp: top_unique) }
ultimately show ?thesis using finite
by (subst (asm) (2) emeasure_eq_ennreal_measure) simp_all
qed
lemma measure_subadditive:
assumes measurable: "A \<in> sets M" "B \<in> sets M"
and fin: "emeasure M A \<noteq> \<infinity>" "emeasure M B \<noteq> \<infinity>"
shows "measure M (A \<union> B) \<le> measure M A + measure M B"
proof -
have "emeasure M (A \<union> B) \<noteq> \<infinity>"
using emeasure_subadditive[OF measurable] fin by (auto simp: top_unique)
then show "(measure M (A \<union> B)) \<le> (measure M A) + (measure M B)"
unfolding measure_def
by (metis emeasure_subadditive[OF measurable] fin enn2real_mono enn2real_plus
ennreal_add_less_top infinity_ennreal_def less_top)
qed
lemma measure_subadditive_finite:
assumes A: "finite I" "A`I \<subseteq> sets M" and fin: "\<And>i. i \<in> I \<Longrightarrow> emeasure M (A i) \<noteq> \<infinity>"
shows "measure M (\<Union>i\<in>I. A i) \<le> (\<Sum>i\<in>I. measure M (A i))"
proof -
{ have "emeasure M (\<Union>i\<in>I. A i) \<le> (\<Sum>i\<in>I. emeasure M (A i))"
using emeasure_subadditive_finite[OF A] .
also have "\<dots> < \<infinity>"
using fin by (simp add: less_top A)
finally have "emeasure M (\<Union>i\<in>I. A i) \<noteq> top" by simp }
note * = this
show ?thesis
using emeasure_subadditive_finite[OF A] fin
unfolding emeasure_eq_ennreal_measure[OF *]
by (simp_all add: sum_nonneg emeasure_eq_ennreal_measure)
qed
lemma measure_subadditive_countably:
assumes A: "range A \<subseteq> sets M" and fin: "(\<Sum>i. emeasure M (A i)) \<noteq> \<infinity>"
shows "measure M (\<Union>i. A i) \<le> (\<Sum>i. measure M (A i))"
proof -
have **: "\<And>i. emeasure M (A i) \<noteq> top"
using fin ennreal_suminf_lessD[of "\<lambda>i. emeasure M (A i)"] by (simp add: less_top)
have ge0: "(\<Sum>i. Sigma_Algebra.measure M (A i)) \<ge> 0"
using fin emeasure_eq_ennreal_measure[OF **]
by (metis infinity_ennreal_def measure_nonneg suminf_cong suminf_nonneg summable_suminf_not_top)
have "emeasure M (\<Union>i. A i) \<noteq> top"
by (metis A emeasure_subadditive_countably fin infinity_ennreal_def neq_top_trans)
then have "ennreal (measure M (\<Union>i. A i)) = emeasure M (\<Union>i. A i)"
by (rule emeasure_eq_ennreal_measure[symmetric])
also have "\<dots> \<le> (\<Sum>i. emeasure M (A i))"
using emeasure_subadditive_countably[OF A] .
also have "\<dots> = ennreal (\<Sum>i. measure M (A i))"
using fin unfolding emeasure_eq_ennreal_measure[OF **]
by (subst suminf_ennreal) (auto simp: **)
finally show ?thesis
using ge0 ennreal_le_iff by blast
qed
lemma measure_Un_null_set: "A \<in> sets M \<Longrightarrow> B \<in> null_sets M \<Longrightarrow> measure M (A \<union> B) = measure M A"
by (simp add: measure_def emeasure_Un_null_set)
lemma measure_Diff_null_set: "A \<in> sets M \<Longrightarrow> B \<in> null_sets M \<Longrightarrow> measure M (A - B) = measure M A"
by (simp add: measure_def emeasure_Diff_null_set)
lemma measure_eq_sum_singleton:
"finite S \<Longrightarrow> (\<And>x. x \<in> S \<Longrightarrow> {x} \<in> sets M) \<Longrightarrow> (\<And>x. x \<in> S \<Longrightarrow> emeasure M {x} \<noteq> \<infinity>) \<Longrightarrow>
measure M S = (\<Sum>x\<in>S. measure M {x})"
using emeasure_eq_sum_singleton[of S M]
by (intro measure_eq_emeasure_eq_ennreal) (auto simp: sum_nonneg emeasure_eq_ennreal_measure)
lemma Lim_measure_incseq:
assumes A: "range A \<subseteq> sets M" "incseq A" and fin: "emeasure M (\<Union>i. A i) \<noteq> \<infinity>"
shows "(\<lambda>i. measure M (A i)) \<longlonglongrightarrow> measure M (\<Union>i. A i)"
proof (rule tendsto_ennrealD)
have "ennreal (measure M (\<Union>i. A i)) = emeasure M (\<Union>i. A i)"
using fin by (auto simp: emeasure_eq_ennreal_measure)
moreover have "ennreal (measure M (A i)) = emeasure M (A i)" for i
using assms emeasure_mono[of "A _" "\<Union>i. A i" M]
by (intro emeasure_eq_ennreal_measure[symmetric]) (auto simp: less_top UN_upper intro: le_less_trans)
ultimately show "(\<lambda>x. ennreal (measure M (A x))) \<longlonglongrightarrow> ennreal (measure M (\<Union>i. A i))"
using A by (auto intro!: Lim_emeasure_incseq)
qed auto
lemma Lim_measure_decseq:
assumes A: "range A \<subseteq> sets M" "decseq A" and fin: "\<And>i. emeasure M (A i) \<noteq> \<infinity>"
shows "(\<lambda>n. measure M (A n)) \<longlonglongrightarrow> measure M (\<Inter>i. A i)"
proof (rule tendsto_ennrealD)
have "ennreal (measure M (\<Inter>i. A i)) = emeasure M (\<Inter>i. A i)"
using fin[of 0] A emeasure_mono[of "\<Inter>i. A i" "A 0" M]
by (auto intro!: emeasure_eq_ennreal_measure[symmetric] simp: INT_lower less_top intro: le_less_trans)
moreover have "ennreal (measure M (A i)) = emeasure M (A i)" for i
using A fin[of i] by (intro emeasure_eq_ennreal_measure[symmetric]) auto
ultimately show "(\<lambda>x. ennreal (measure M (A x))) \<longlonglongrightarrow> ennreal (measure M (\<Inter>i. A i))"
using fin A by (auto intro!: Lim_emeasure_decseq)
qed auto
subsection \<open>Set of measurable sets with finite measure\<close>
definition\<^marker>\<open>tag important\<close> fmeasurable :: "'a measure \<Rightarrow> 'a set set" where
"fmeasurable M = {A\<in>sets M. emeasure M A < \<infinity>}"
lemma fmeasurableD[dest, measurable_dest]: "A \<in> fmeasurable M \<Longrightarrow> A \<in> sets M"
by (auto simp: fmeasurable_def)
lemma fmeasurableD2: "A \<in> fmeasurable M \<Longrightarrow> emeasure M A \<noteq> top"
by (auto simp: fmeasurable_def)
lemma fmeasurableI: "A \<in> sets M \<Longrightarrow> emeasure M A < \<infinity> \<Longrightarrow> A \<in> fmeasurable M"
by (auto simp: fmeasurable_def)
lemma fmeasurableI_null_sets: "A \<in> null_sets M \<Longrightarrow> A \<in> fmeasurable M"
by (auto simp: fmeasurable_def)
lemma fmeasurableI2: "A \<in> fmeasurable M \<Longrightarrow> B \<subseteq> A \<Longrightarrow> B \<in> sets M \<Longrightarrow> B \<in> fmeasurable M"
using emeasure_mono[of B A M] by (auto simp: fmeasurable_def)
lemma measure_mono_fmeasurable:
"A \<subseteq> B \<Longrightarrow> A \<in> sets M \<Longrightarrow> B \<in> fmeasurable M \<Longrightarrow> measure M A \<le> measure M B"
by (auto simp: measure_def fmeasurable_def intro!: emeasure_mono enn2real_mono)
lemma emeasure_eq_measure2: "A \<in> fmeasurable M \<Longrightarrow> emeasure M A = measure M A"
by (simp add: emeasure_eq_ennreal_measure fmeasurable_def less_top)
interpretation fmeasurable: ring_of_sets "space M" "fmeasurable M"
proof (rule ring_of_setsI)
show "fmeasurable M \<subseteq> Pow (space M)" "{} \<in> fmeasurable M"
by (auto simp: fmeasurable_def dest: sets.sets_into_space)
fix a b assume *: "a \<in> fmeasurable M" "b \<in> fmeasurable M"
then have "emeasure M (a \<union> b) \<le> emeasure M a + emeasure M b"
by (intro emeasure_subadditive) auto
also have "\<dots> < top"
using * by (auto simp: fmeasurable_def)
finally show "a \<union> b \<in> fmeasurable M"
using * by (auto intro: fmeasurableI)
show "a - b \<in> fmeasurable M"
using emeasure_mono[of "a - b" a M] * by (auto simp: fmeasurable_def)
qed
subsection\<^marker>\<open>tag unimportant\<close>\<open>Measurable sets formed by unions and intersections\<close>
lemma fmeasurable_Diff: "A \<in> fmeasurable M \<Longrightarrow> B \<in> sets M \<Longrightarrow> A - B \<in> fmeasurable M"
using fmeasurableI2[of A M "A - B"] by auto
lemma fmeasurable_Int_fmeasurable:
"\<lbrakk>S \<in> fmeasurable M; T \<in> sets M\<rbrakk> \<Longrightarrow> (S \<inter> T) \<in> fmeasurable M"
by (meson fmeasurableD fmeasurableI2 inf_le1 sets.Int)
lemma fmeasurable_UN:
assumes "countable I" "\<And>i. i \<in> I \<Longrightarrow> F i \<subseteq> A" "\<And>i. i \<in> I \<Longrightarrow> F i \<in> sets M" "A \<in> fmeasurable M"
shows "(\<Union>i\<in>I. F i) \<in> fmeasurable M"
proof (rule fmeasurableI2)
show "A \<in> fmeasurable M" "(\<Union>i\<in>I. F i) \<subseteq> A" using assms by auto
show "(\<Union>i\<in>I. F i) \<in> sets M"
using assms by (intro sets.countable_UN') auto
qed
lemma fmeasurable_INT:
assumes "countable I" "i \<in> I" "\<And>i. i \<in> I \<Longrightarrow> F i \<in> sets M" "F i \<in> fmeasurable M"
shows "(\<Inter>i\<in>I. F i) \<in> fmeasurable M"
proof (rule fmeasurableI2)
show "F i \<in> fmeasurable M" "(\<Inter>i\<in>I. F i) \<subseteq> F i"
using assms by auto
show "(\<Inter>i\<in>I. F i) \<in> sets M"
using assms by (intro sets.countable_INT') auto
qed
lemma measurable_measure_Diff:
assumes "A \<in> fmeasurable M" "B \<in> sets M" "B \<subseteq> A"
shows "measure M (A - B) = measure M A - measure M B"
by (simp add: assms fmeasurableD fmeasurableD2 measure_Diff)
lemma measurable_Un_null_set:
assumes "B \<in> null_sets M"
shows "(A \<union> B \<in> fmeasurable M \<and> A \<in> sets M) \<longleftrightarrow> A \<in> fmeasurable M"
using assms by (fastforce simp add: fmeasurable.Un fmeasurableI_null_sets intro: fmeasurableI2)
lemma measurable_Diff_null_set:
assumes "B \<in> null_sets M"
shows "(A - B) \<in> fmeasurable M \<and> A \<in> sets M \<longleftrightarrow> A \<in> fmeasurable M"
using assms
by (metis Un_Diff_cancel2 fmeasurable.Diff fmeasurableD fmeasurableI_null_sets measurable_Un_null_set)
lemma fmeasurable_Diff_D:
assumes m: "T - S \<in> fmeasurable M" "S \<in> fmeasurable M" and sub: "S \<subseteq> T"
shows "T \<in> fmeasurable M"
proof -
have "T = S \<union> (T - S)"
using assms by blast
then show ?thesis
by (metis m fmeasurable.Un)
qed
lemma measure_Un2:
"A \<in> fmeasurable M \<Longrightarrow> B \<in> fmeasurable M \<Longrightarrow> measure M (A \<union> B) = measure M A + measure M (B - A)"
using measure_Union[of M A "B - A"] by (auto simp: fmeasurableD2 fmeasurable.Diff)
lemma measure_Un3:
assumes "A \<in> fmeasurable M" "B \<in> fmeasurable M"
shows "measure M (A \<union> B) = measure M A + measure M B - measure M (A \<inter> B)"
proof -
have "measure M (A \<union> B) = measure M A + measure M (B - A)"
using assms by (rule measure_Un2)
also have "B - A = B - (A \<inter> B)"
by auto
also have "measure M (B - (A \<inter> B)) = measure M B - measure M (A \<inter> B)"
using assms by (intro measure_Diff) (auto simp: fmeasurable_def)
finally show ?thesis
by simp
qed
lemma measure_Un_AE:
"AE x in M. x \<notin> A \<or> x \<notin> B \<Longrightarrow> A \<in> fmeasurable M \<Longrightarrow> B \<in> fmeasurable M \<Longrightarrow>
measure M (A \<union> B) = measure M A + measure M B"
by (subst measure_Un2) (auto intro!: measure_eq_AE)
lemma measure_UNION_AE:
assumes I: "finite I"
shows "(\<And>i. i \<in> I \<Longrightarrow> F i \<in> fmeasurable M) \<Longrightarrow> pairwise (\<lambda>i j. AE x in M. x \<notin> F i \<or> x \<notin> F j) I \<Longrightarrow>
measure M (\<Union>i\<in>I. F i) = (\<Sum>i\<in>I. measure M (F i))"
unfolding AE_pairwise[OF countable_finite, OF I]
using I
proof (induction I rule: finite_induct)
case (insert x I)
have "measure M (F x \<union> \<Union>(F ` I)) = measure M (F x) + measure M (\<Union>(F ` I))"
by (rule measure_Un_AE) (use insert in \<open>auto simp: pairwise_insert\<close>)
with insert show ?case
by (simp add: pairwise_insert )
qed simp
lemma measure_UNION':
"finite I \<Longrightarrow> (\<And>i. i \<in> I \<Longrightarrow> F i \<in> fmeasurable M) \<Longrightarrow> pairwise (\<lambda>i j. disjnt (F i) (F j)) I \<Longrightarrow>
measure M (\<Union>i\<in>I. F i) = (\<Sum>i\<in>I. measure M (F i))"
by (intro measure_UNION_AE) (auto simp: disjnt_def elim!: pairwise_mono intro!: always_eventually)
lemma measure_Union_AE:
"finite F \<Longrightarrow> (\<And>S. S \<in> F \<Longrightarrow> S \<in> fmeasurable M) \<Longrightarrow> pairwise (\<lambda>S T. AE x in M. x \<notin> S \<or> x \<notin> T) F \<Longrightarrow>
measure M (\<Union>F) = (\<Sum>S\<in>F. measure M S)"
using measure_UNION_AE[of F "\<lambda>x. x" M] by simp
lemma measure_Union':
"finite F \<Longrightarrow> (\<And>S. S \<in> F \<Longrightarrow> S \<in> fmeasurable M) \<Longrightarrow> pairwise disjnt F \<Longrightarrow> measure M (\<Union>F) = (\<Sum>S\<in>F. measure M S)"
using measure_UNION'[of F "\<lambda>x. x" M] by simp
lemma measure_Un_le:
assumes "A \<in> sets M" "B \<in> sets M" shows "measure M (A \<union> B) \<le> measure M A + measure M B"
proof cases
assume "A \<in> fmeasurable M \<and> B \<in> fmeasurable M"
with measure_subadditive[of A M B] assms show ?thesis
by (auto simp: fmeasurableD2)
next
assume "\<not> (A \<in> fmeasurable M \<and> B \<in> fmeasurable M)"
then have "A \<union> B \<notin> fmeasurable M"
using fmeasurableI2[of "A \<union> B" M A] fmeasurableI2[of "A \<union> B" M B] assms by auto
with assms show ?thesis
by (auto simp: fmeasurable_def measure_def less_top[symmetric])
qed
lemma measure_UNION_le:
"finite I \<Longrightarrow> (\<And>i. i \<in> I \<Longrightarrow> F i \<in> sets M) \<Longrightarrow> measure M (\<Union>i\<in>I. F i) \<le> (\<Sum>i\<in>I. measure M (F i))"
proof (induction I rule: finite_induct)
case (insert i I)
then have "measure M (\<Union>i\<in>insert i I. F i) = measure M (F i \<union> \<Union> (F ` I))"
by simp
also from insert have "measure M (F i \<union> \<Union> (F ` I)) \<le> measure M (F i) + measure M (\<Union> (F ` I))"
by (intro measure_Un_le sets.finite_Union) auto
also have "measure M (\<Union>i\<in>I. F i) \<le> (\<Sum>i\<in>I. measure M (F i))"
using insert by auto
finally show ?case
using insert by simp
qed simp
lemma measure_Union_le:
"finite F \<Longrightarrow> (\<And>S. S \<in> F \<Longrightarrow> S \<in> sets M) \<Longrightarrow> measure M (\<Union>F) \<le> (\<Sum>S\<in>F. measure M S)"
using measure_UNION_le[of F "\<lambda>x. x" M] by simp
text\<open>Version for indexed union over a countable set\<close>
lemma
assumes "countable I" and I: "\<And>i. i \<in> I \<Longrightarrow> A i \<in> fmeasurable M"
and bound: "\<And>I'. I' \<subseteq> I \<Longrightarrow> finite I' \<Longrightarrow> measure M (\<Union>i\<in>I'. A i) \<le> B"
shows fmeasurable_UN_bound: "(\<Union>i\<in>I. A i) \<in> fmeasurable M" (is ?fm)
and measure_UN_bound: "measure M (\<Union>i\<in>I. A i) \<le> B" (is ?m)
proof -
have "B \<ge> 0"
using bound by force
have "?fm \<and> ?m"
proof cases
assume "I = {}"
with \<open>B \<ge> 0\<close> show ?thesis
by simp
next
assume "I \<noteq> {}"
have "(\<Union>i\<in>I. A i) = (\<Union>i. (\<Union>n\<le>i. A (from_nat_into I n)))"
by (subst range_from_nat_into[symmetric, OF \<open>I \<noteq> {}\<close> \<open>countable I\<close>]) auto
then have "emeasure M (\<Union>i\<in>I. A i) = emeasure M (\<Union>i. (\<Union>n\<le>i. A (from_nat_into I n)))" by simp
also have "\<dots> = (SUP i. emeasure M (\<Union>n\<le>i. A (from_nat_into I n)))"
using I \<open>I \<noteq> {}\<close>[THEN from_nat_into] by (intro SUP_emeasure_incseq[symmetric]) (fastforce simp: incseq_Suc_iff)+
also have "\<dots> \<le> B"
proof (intro SUP_least)
fix i :: nat
have "emeasure M (\<Union>n\<le>i. A (from_nat_into I n)) = measure M (\<Union>n\<le>i. A (from_nat_into I n))"
using I \<open>I \<noteq> {}\<close>[THEN from_nat_into] by (intro emeasure_eq_measure2 fmeasurable.finite_UN) auto
also have "\<dots> = measure M (\<Union>n\<in>from_nat_into I ` {..i}. A n)"
by simp
also have "\<dots> \<le> B"
by (intro ennreal_leI bound) (auto intro: from_nat_into[OF \<open>I \<noteq> {}\<close>])
finally show "emeasure M (\<Union>n\<le>i. A (from_nat_into I n)) \<le> ennreal B" .
qed
finally have *: "emeasure M (\<Union>i\<in>I. A i) \<le> B" .
then have ?fm
using I \<open>countable I\<close> by (intro fmeasurableI conjI) (auto simp: less_top[symmetric] top_unique)
with * \<open>0\<le>B\<close> show ?thesis
by (simp add: emeasure_eq_measure2)
qed
then show ?fm ?m by auto
qed
text\<open>Version for big union of a countable set\<close>
lemma
assumes "countable \<D>"
and meas: "\<And>D. D \<in> \<D> \<Longrightarrow> D \<in> fmeasurable M"
and bound: "\<And>\<E>. \<lbrakk>\<E> \<subseteq> \<D>; finite \<E>\<rbrakk> \<Longrightarrow> measure M (\<Union>\<E>) \<le> B"
shows fmeasurable_Union_bound: "\<Union>\<D> \<in> fmeasurable M" (is ?fm)
and measure_Union_bound: "measure M (\<Union>\<D>) \<le> B" (is ?m)
proof -
have "B \<ge> 0"
using bound by force
have "?fm \<and> ?m"
proof (cases "\<D> = {}")
case True
with \<open>B \<ge> 0\<close> show ?thesis
by auto
next
case False
then obtain D :: "nat \<Rightarrow> 'a set" where D: "\<D> = range D"
using \<open>countable \<D>\<close> uncountable_def by force
have 1: "\<And>i. D i \<in> fmeasurable M"
by (simp add: D meas)
have 2: "\<And>I'. finite I' \<Longrightarrow> measure M (\<Union>x\<in>I'. D x) \<le> B"
by (simp add: D bound image_subset_iff)
show ?thesis
unfolding D
by (intro conjI fmeasurable_UN_bound [OF _ 1 2] measure_UN_bound [OF _ 1 2]) auto
qed
then show ?fm ?m by auto
qed
text\<open>Version for indexed union over the type of naturals\<close>
lemma
fixes S :: "nat \<Rightarrow> 'a set"
assumes S: "\<And>i. S i \<in> fmeasurable M" and B: "\<And>n. measure M (\<Union>i\<le>n. S i) \<le> B"
shows fmeasurable_countable_Union: "(\<Union>i. S i) \<in> fmeasurable M"
and measure_countable_Union_le: "measure M (\<Union>i. S i) \<le> B"
proof -
have mB: "measure M (\<Union>i\<in>I. S i) \<le> B" if "finite I" for I
proof -
have "(\<Union>i\<in>I. S i) \<subseteq> (\<Union>i\<le>Max I. S i)"
using Max_ge that by force
then have "measure M (\<Union>i\<in>I. S i) \<le> measure M (\<Union>i \<le> Max I. S i)"
by (rule measure_mono_fmeasurable) (use S in \<open>blast+\<close>)
then show ?thesis
using B order_trans by blast
qed
show "(\<Union>i. S i) \<in> fmeasurable M"
by (auto intro: fmeasurable_UN_bound [OF _ S mB])
show "measure M (\<Union>n. S n) \<le> B"
by (auto intro: measure_UN_bound [OF _ S mB])
qed
lemma measure_diff_le_measure_setdiff:
assumes "S \<in> fmeasurable M" "T \<in> fmeasurable M"
shows "measure M S - measure M T \<le> measure M (S - T)"
proof -
have "measure M S \<le> measure M ((S - T) \<union> T)"
by (simp add: assms fmeasurable.Un fmeasurableD measure_mono_fmeasurable)
also have "\<dots> \<le> measure M (S - T) + measure M T"
using assms by (blast intro: measure_Un_le)
finally show ?thesis
by (simp add: algebra_simps)
qed
lemma suminf_exist_split2:
fixes f :: "nat \<Rightarrow> 'a::real_normed_vector"
assumes "summable f"
shows "(\<lambda>n. (\<Sum>k. f(k+n))) \<longlonglongrightarrow> 0"
by (subst lim_sequentially, auto simp add: dist_norm suminf_exist_split[OF _ assms])
lemma emeasure_union_summable:
assumes [measurable]: "\<And>n. A n \<in> sets M"
and "\<And>n. emeasure M (A n) < \<infinity>" "summable (\<lambda>n. measure M (A n))"
shows "emeasure M (\<Union>n. A n) < \<infinity>" "emeasure M (\<Union>n. A n) \<le> (\<Sum>n. measure M (A n))"
proof -
define B where "B = (\<lambda>N. (\<Union>n\<in>{..<N}. A n))"
have [measurable]: "B N \<in> sets M" for N unfolding B_def by auto
have "(\<lambda>N. emeasure M (B N)) \<longlonglongrightarrow> emeasure M (\<Union>N. B N)"
apply (rule Lim_emeasure_incseq) unfolding B_def by (auto simp add: SUP_subset_mono incseq_def)
moreover have "emeasure M (B N) \<le> ennreal (\<Sum>n. measure M (A n))" for N
proof -
have *: "(\<Sum>n<N. measure M (A n)) \<le> (\<Sum>n. measure M (A n))"
using assms(3) measure_nonneg sum_le_suminf by blast
have "emeasure M (B N) \<le> (\<Sum>n<N. emeasure M (A n))"
unfolding B_def by (rule emeasure_subadditive_finite, auto)
also have "\<dots> = (\<Sum>n<N. ennreal(measure M (A n)))"
using assms(2) by (simp add: emeasure_eq_ennreal_measure less_top)
also have "\<dots> = ennreal (\<Sum>n<N. measure M (A n))"
by auto
also have "\<dots> \<le> ennreal (\<Sum>n. measure M (A n))"
using * by (auto simp: ennreal_leI)
finally show ?thesis by simp
qed
ultimately have "emeasure M (\<Union>N. B N) \<le> ennreal (\<Sum>n. measure M (A n))"
by (simp add: Lim_bounded)
then show "emeasure M (\<Union>n. A n) \<le> (\<Sum>n. measure M (A n))"
unfolding B_def by (metis UN_UN_flatten UN_lessThan_UNIV)
then show "emeasure M (\<Union>n. A n) < \<infinity>"
by (auto simp: less_top[symmetric] top_unique)
qed
lemma borel_cantelli_limsup1:
assumes [measurable]: "\<And>n. A n \<in> sets M"
and "\<And>n. emeasure M (A n) < \<infinity>" "summable (\<lambda>n. measure M (A n))"
shows "limsup A \<in> null_sets M"
proof -
have "emeasure M (limsup A) \<le> 0"
proof (rule LIMSEQ_le_const)
have "(\<lambda>n. (\<Sum>k. measure M (A (k+n)))) \<longlonglongrightarrow> 0" by (rule suminf_exist_split2[OF assms(3)])
then show "(\<lambda>n. ennreal (\<Sum>k. measure M (A (k+n)))) \<longlonglongrightarrow> 0"
unfolding ennreal_0[symmetric] by (intro tendsto_ennrealI)
have "emeasure M (limsup A) \<le> (\<Sum>k. measure M (A (k+n)))" for n
proof -
have I: "(\<Union>k\<in>{n..}. A k) = (\<Union>k. A (k+n))" by (auto, metis le_add_diff_inverse2, fastforce)
have "emeasure M (limsup A) \<le> emeasure M (\<Union>k\<in>{n..}. A k)"
by (rule emeasure_mono, auto simp add: limsup_INF_SUP)
also have "\<dots> = emeasure M (\<Union>k. A (k+n))"
using I by auto
also have "\<dots> \<le> (\<Sum>k. measure M (A (k+n)))"
apply (rule emeasure_union_summable)
using assms summable_ignore_initial_segment[OF assms(3), of n] by auto
finally show ?thesis by simp
qed
then show "\<exists>N. \<forall>n\<ge>N. emeasure M (limsup A) \<le> (\<Sum>k. measure M (A (k+n)))"
by auto
qed
then show ?thesis using assms(1) measurable_limsup by auto
qed
lemma borel_cantelli_AE1:
assumes [measurable]: "\<And>n. A n \<in> sets M"
and "\<And>n. emeasure M (A n) < \<infinity>" "summable (\<lambda>n. measure M (A n))"
shows "AE x in M. eventually (\<lambda>n. x \<in> space M - A n) sequentially"
proof -
have "AE x in M. x \<notin> limsup A"
using borel_cantelli_limsup1[OF assms] unfolding eventually_ae_filter by auto
moreover have "\<forall>\<^sub>F n in sequentially. x \<notin> A n" if "x \<notin> limsup A" for x
using that by (auto simp: limsup_INF_SUP eventually_sequentially)
ultimately show ?thesis by auto
qed
subsection \<open>Measure spaces with \<^term>\<open>emeasure M (space M) < \<infinity>\<close>\<close>
locale\<^marker>\<open>tag important\<close> finite_measure = sigma_finite_measure M for M +
assumes finite_emeasure_space: "emeasure M (space M) \<noteq> top"
lemma finite_measureI[Pure.intro!]:
"emeasure M (space M) \<noteq> \<infinity> \<Longrightarrow> finite_measure M"
proof qed (auto intro!: exI[of _ "{space M}"])
lemma (in finite_measure) emeasure_finite[simp, intro]: "emeasure M A \<noteq> top"
using finite_emeasure_space emeasure_space[of M A] by (auto simp: top_unique)
lemma (in finite_measure) fmeasurable_eq_sets: "fmeasurable M = sets M"
by (auto simp: fmeasurable_def less_top[symmetric])
lemma (in finite_measure) emeasure_eq_measure: "emeasure M A = ennreal (measure M A)"
by (intro emeasure_eq_ennreal_measure) simp
lemma (in finite_measure) emeasure_real: "\<exists>r. 0 \<le> r \<and> emeasure M A = ennreal r"
using emeasure_finite[of A] by (cases "emeasure M A" rule: ennreal_cases) auto
lemma (in finite_measure) bounded_measure: "measure M A \<le> measure M (space M)"
using emeasure_space[of M A] emeasure_real[of A] emeasure_real[of "space M"] by (auto simp: measure_def)
lemma (in finite_measure) finite_measure_Diff:
assumes sets: "A \<in> sets M" "B \<in> sets M" and "B \<subseteq> A"
shows "measure M (A - B) = measure M A - measure M B"
using measure_Diff[OF _ assms] by simp
lemma (in finite_measure) finite_measure_Union:
assumes sets: "A \<in> sets M" "B \<in> sets M" and "A \<inter> B = {}"
shows "measure M (A \<union> B) = measure M A + measure M B"
using measure_Union[OF _ _ assms] by simp
lemma (in finite_measure) finite_measure_finite_Union:
assumes measurable: "finite S" "A`S \<subseteq> sets M" "disjoint_family_on A S"
shows "measure M (\<Union>i\<in>S. A i) = (\<Sum>i\<in>S. measure M (A i))"
using measure_finite_Union[OF assms] by simp
lemma (in finite_measure) finite_measure_UNION:
assumes A: "range A \<subseteq> sets M" "disjoint_family A"
shows "(\<lambda>i. measure M (A i)) sums (measure M (\<Union>i. A i))"
using measure_UNION[OF A] by simp
lemma (in finite_measure) finite_measure_mono:
assumes "A \<subseteq> B" "B \<in> sets M" shows "measure M A \<le> measure M B"
using emeasure_mono[OF assms] emeasure_real[of A] emeasure_real[of B] by (auto simp: measure_def)
lemma (in finite_measure) finite_measure_subadditive:
assumes m: "A \<in> sets M" "B \<in> sets M"
shows "measure M (A \<union> B) \<le> measure M A + measure M B"
using measure_subadditive[OF m] by simp
lemma (in finite_measure) finite_measure_subadditive_finite:
assumes "finite I" "A`I \<subseteq> sets M" shows "measure M (\<Union>i\<in>I. A i) \<le> (\<Sum>i\<in>I. measure M (A i))"
using measure_subadditive_finite[OF assms] by simp
lemma (in finite_measure) finite_measure_subadditive_countably:
"range A \<subseteq> sets M \<Longrightarrow> summable (\<lambda>i. measure M (A i)) \<Longrightarrow> measure M (\<Union>i. A i) \<le> (\<Sum>i. measure M (A i))"
by (rule measure_subadditive_countably)
(simp_all add: ennreal_suminf_neq_top emeasure_eq_measure)
lemma (in finite_measure) finite_measure_eq_sum_singleton:
assumes "finite S" and *: "\<And>x. x \<in> S \<Longrightarrow> {x} \<in> sets M"
shows "measure M S = (\<Sum>x\<in>S. measure M {x})"
using measure_eq_sum_singleton[OF assms] by simp
lemma (in finite_measure) finite_Lim_measure_incseq:
assumes A: "range A \<subseteq> sets M" "incseq A"
shows "(\<lambda>i. measure M (A i)) \<longlonglongrightarrow> measure M (\<Union>i. A i)"
using Lim_measure_incseq[OF A] by simp
lemma (in finite_measure) finite_Lim_measure_decseq:
assumes A: "range A \<subseteq> sets M" "decseq A"
shows "(\<lambda>n. measure M (A n)) \<longlonglongrightarrow> measure M (\<Inter>i. A i)"
using Lim_measure_decseq[OF A] by simp
lemma (in finite_measure) finite_measure_compl:
assumes S: "S \<in> sets M"
shows "measure M (space M - S) = measure M (space M) - measure M S"
using measure_Diff[OF _ sets.top S sets.sets_into_space] S by simp
lemma (in finite_measure) finite_measure_mono_AE:
assumes imp: "AE x in M. x \<in> A \<longrightarrow> x \<in> B" and B: "B \<in> sets M"
shows "measure M A \<le> measure M B"
using assms emeasure_mono_AE[OF imp B]
by (simp add: emeasure_eq_measure)
lemma (in finite_measure) finite_measure_eq_AE:
assumes iff: "AE x in M. x \<in> A \<longleftrightarrow> x \<in> B"
assumes A: "A \<in> sets M" and B: "B \<in> sets M"
shows "measure M A = measure M B"
using assms emeasure_eq_AE[OF assms] by (simp add: emeasure_eq_measure)
lemma (in finite_measure) measure_increasing: "increasing M (measure M)"
by (auto intro!: finite_measure_mono simp: increasing_def)
lemma (in finite_measure) measure_zero_union:
assumes "s \<in> sets M" "t \<in> sets M" "measure M t = 0"
shows "measure M (s \<union> t) = measure M s"
using assms
proof -
have "measure M (s \<union> t) \<le> measure M s"
using finite_measure_subadditive[of s t] assms by auto
moreover have "measure M (s \<union> t) \<ge> measure M s"
using assms by (blast intro: finite_measure_mono)
ultimately show ?thesis by simp
qed
lemma (in finite_measure) measure_eq_compl:
assumes "s \<in> sets M" "t \<in> sets M"
assumes "measure M (space M - s) = measure M (space M - t)"
shows "measure M s = measure M t"
using assms finite_measure_compl by auto
lemma (in finite_measure) measure_eq_bigunion_image:
assumes "range f \<subseteq> sets M" "range g \<subseteq> sets M"
assumes "disjoint_family f" "disjoint_family g"
assumes "\<And> n :: nat. measure M (f n) = measure M (g n)"
shows "measure M (\<Union>i. f i) = measure M (\<Union>i. g i)"
using assms
proof -
have a: "(\<lambda> i. measure M (f i)) sums (measure M (\<Union>i. f i))"
by (rule finite_measure_UNION[OF assms(1,3)])
have b: "(\<lambda> i. measure M (g i)) sums (measure M (\<Union>i. g i))"
by (rule finite_measure_UNION[OF assms(2,4)])
show ?thesis using sums_unique[OF b] sums_unique[OF a] assms by simp
qed
lemma (in finite_measure) measure_countably_zero:
assumes "range c \<subseteq> sets M"
assumes "\<And> i. measure M (c i) = 0"
shows "measure M (\<Union>i :: nat. c i) = 0"
proof (rule antisym)
show "measure M (\<Union>i :: nat. c i) \<le> 0"
using finite_measure_subadditive_countably[OF assms(1)] by (simp add: assms(2))
qed simp
lemma (in finite_measure) measure_space_inter:
assumes events:"s \<in> sets M" "t \<in> sets M"
assumes "measure M t = measure M (space M)"
shows "measure M (s \<inter> t) = measure M s"
proof -
have "measure M ((space M - s) \<union> (space M - t)) = measure M (space M - s)"
using events assms finite_measure_compl[of "t"] by (auto intro!: measure_zero_union)
also have "(space M - s) \<union> (space M - t) = space M - (s \<inter> t)"
by blast
finally show "measure M (s \<inter> t) = measure M s"
using events by (auto intro!: measure_eq_compl[of "s \<inter> t" s])
qed
lemma (in finite_measure) measure_equiprobable_finite_unions:
assumes s: "finite s" "\<And>x. x \<in> s \<Longrightarrow> {x} \<in> sets M"
assumes "\<And> x y. \<lbrakk>x \<in> s; y \<in> s\<rbrakk> \<Longrightarrow> measure M {x} = measure M {y}"
shows "measure M s = real (card s) * measure M {SOME x. x \<in> s}"
proof cases
assume "s \<noteq> {}"
then have "\<exists> x. x \<in> s" by blast
from someI_ex[OF this] assms
have prob_some: "\<And> x. x \<in> s \<Longrightarrow> measure M {x} = measure M {SOME y. y \<in> s}" by blast
have "measure M s = (\<Sum> x \<in> s. measure M {x})"
using finite_measure_eq_sum_singleton[OF s] by simp
also have "\<dots> = (\<Sum> x \<in> s. measure M {SOME y. y \<in> s})" using prob_some by auto
also have "\<dots> = real (card s) * measure M {(SOME x. x \<in> s)}"
using sum_constant assms by simp
finally show ?thesis by simp
qed simp
lemma (in finite_measure) measure_real_sum_image_fn:
assumes "e \<in> sets M"
assumes "\<And> x. x \<in> s \<Longrightarrow> e \<inter> f x \<in> sets M"
assumes "finite s"
assumes disjoint: "\<And> x y. \<lbrakk>x \<in> s ; y \<in> s ; x \<noteq> y\<rbrakk> \<Longrightarrow> f x \<inter> f y = {}"
assumes upper: "space M \<subseteq> (\<Union>i \<in> s. f i)"
shows "measure M e = (\<Sum> x \<in> s. measure M (e \<inter> f x))"
proof -
have "e \<subseteq> (\<Union>i\<in>s. f i)"
using \<open>e \<in> sets M\<close> sets.sets_into_space upper by blast
then have e: "e = (\<Union>i \<in> s. e \<inter> f i)"
by auto
hence "measure M e = measure M (\<Union>i \<in> s. e \<inter> f i)" by simp
also have "\<dots> = (\<Sum> x \<in> s. measure M (e \<inter> f x))"
proof (rule finite_measure_finite_Union)
show "finite s" by fact
show "(\<lambda>i. e \<inter> f i)`s \<subseteq> sets M" using assms(2) by auto
show "disjoint_family_on (\<lambda>i. e \<inter> f i) s"
using disjoint by (auto simp: disjoint_family_on_def)
qed
finally show ?thesis .
qed
lemma (in finite_measure) measure_exclude:
assumes "A \<in> sets M" "B \<in> sets M"
assumes "measure M A = measure M (space M)" "A \<inter> B = {}"
shows "measure M B = 0"
using measure_space_inter[of B A] assms by (auto simp: ac_simps)
lemma (in finite_measure) finite_measure_distr:
assumes f: "f \<in> measurable M M'"
shows "finite_measure (distr M M' f)"
proof (rule finite_measureI)
have "f -` space M' \<inter> space M = space M" using f by (auto dest: measurable_space)
with f show "emeasure (distr M M' f) (space (distr M M' f)) \<noteq> \<infinity>" by (auto simp: emeasure_distr)
qed
lemma emeasure_gfp[consumes 1, case_names cont measurable]:
assumes sets[simp]: "\<And>s. sets (M s) = sets N"
assumes "\<And>s. finite_measure (M s)"
assumes cont: "inf_continuous F" "inf_continuous f"
assumes meas: "\<And>P. Measurable.pred N P \<Longrightarrow> Measurable.pred N (F P)"
assumes iter: "\<And>P s. Measurable.pred N P \<Longrightarrow> emeasure (M s) {x\<in>space N. F P x} = f (\<lambda>s. emeasure (M s) {x\<in>space N. P x}) s"
assumes bound: "\<And>P. f P \<le> f (\<lambda>s. emeasure (M s) (space (M s)))"
shows "emeasure (M s) {x\<in>space N. gfp F x} = gfp f s"
proof (subst gfp_transfer_bounded[where \<alpha>="\<lambda>F s. emeasure (M s) {x\<in>space N. F x}" and P="Measurable.pred N", symmetric])
interpret finite_measure "M s" for s by fact
fix C assume "decseq C" "\<And>i. Measurable.pred N (C i)"
then show "(\<lambda>s. emeasure (M s) {x \<in> space N. (INF i. C i) x}) = (INF i. (\<lambda>s. emeasure (M s) {x \<in> space N. C i x}))"
unfolding INF_apply
by (subst INF_emeasure_decseq) (auto simp: antimono_def fun_eq_iff intro!: arg_cong2[where f=emeasure])
next
show "f x \<le> (\<lambda>s. emeasure (M s) {x \<in> space N. F top x})" for x
using bound[of x] sets_eq_imp_space_eq[OF sets] by (simp add: iter)
qed (auto simp add: iter le_fun_def INF_apply[abs_def] intro!: meas cont)
subsection\<^marker>\<open>tag unimportant\<close> \<open>Counting space\<close>
lemma strict_monoI_Suc:
- assumes ord [simp]: "(\<And>n. f n < f (Suc n))" shows "strict_mono f"
- unfolding strict_mono_def
-proof safe
- fix n m :: nat assume "n < m" then show "f n < f m"
- by (induct m) (auto simp: less_Suc_eq intro: less_trans ord)
-qed
+ assumes "(\<And>n. f n < f (Suc n))" shows "strict_mono f"
+ by (simp add: assms strict_mono_Suc_iff)
lemma emeasure_count_space:
assumes "X \<subseteq> A" shows "emeasure (count_space A) X = (if finite X then of_nat (card X) else \<infinity>)"
(is "_ = ?M X")
unfolding count_space_def
proof (rule emeasure_measure_of_sigma)
show "X \<in> Pow A" using \<open>X \<subseteq> A\<close> by auto
show "sigma_algebra A (Pow A)" by (rule sigma_algebra_Pow)
show positive: "positive (Pow A) ?M"
by (auto simp: positive_def)
have additive: "additive (Pow A) ?M"
by (auto simp: card_Un_disjoint additive_def)
interpret ring_of_sets A "Pow A"
by (rule ring_of_setsI) auto
show "countably_additive (Pow A) ?M"
unfolding countably_additive_iff_continuous_from_below[OF positive additive]
proof safe
fix F :: "nat \<Rightarrow> 'a set" assume "incseq F"
show "(\<lambda>i. ?M (F i)) \<longlonglongrightarrow> ?M (\<Union>i. F i)"
proof cases
assume "\<exists>i. \<forall>j\<ge>i. F i = F j"
then obtain i where i: "\<forall>j\<ge>i. F i = F j" ..
with \<open>incseq F\<close> have "F j \<subseteq> F i" for j
by (cases "i \<le> j") (auto simp: incseq_def)
then have eq: "(\<Union>i. F i) = F i"
by auto
with i show ?thesis
by (auto intro!: Lim_transform_eventually[OF tendsto_const] eventually_sequentiallyI[where c=i])
next
assume "\<not> (\<exists>i. \<forall>j\<ge>i. F i = F j)"
then obtain f where f: "\<And>i. i \<le> f i" "\<And>i. F i \<noteq> F (f i)" by metis
then have "\<And>i. F i \<subseteq> F (f i)" using \<open>incseq F\<close> by (auto simp: incseq_def)
with f have *: "\<And>i. F i \<subset> F (f i)" by auto
have "incseq (\<lambda>i. ?M (F i))"
using \<open>incseq F\<close> unfolding incseq_def by (auto simp: card_mono dest: finite_subset)
then have "(\<lambda>i. ?M (F i)) \<longlonglongrightarrow> (SUP n. ?M (F n))"
by (rule LIMSEQ_SUP)
moreover have "(SUP n. ?M (F n)) = top"
proof (rule ennreal_SUP_eq_top)
fix n :: nat show "\<exists>k::nat\<in>UNIV. of_nat n \<le> ?M (F k)"
proof (induct n)
case (Suc n)
then obtain k where "of_nat n \<le> ?M (F k)" ..
moreover have "finite (F k) \<Longrightarrow> finite (F (f k)) \<Longrightarrow> card (F k) < card (F (f k))"
using \<open>F k \<subset> F (f k)\<close> by (simp add: psubset_card_mono)
moreover have "finite (F (f k)) \<Longrightarrow> finite (F k)"
using \<open>k \<le> f k\<close> \<open>incseq F\<close> by (auto simp: incseq_def dest: finite_subset)
ultimately show ?case
by (auto intro!: exI[of _ "f k"] simp del: of_nat_Suc)
qed auto
qed
moreover
have "inj (\<lambda>n. F ((f ^^ n) 0))"
by (intro strict_mono_imp_inj_on strict_monoI_Suc) (simp add: *)
then have 1: "infinite (range (\<lambda>i. F ((f ^^ i) 0)))"
by (rule range_inj_infinite)
have "infinite (Pow (\<Union>i. F i))"
by (rule infinite_super[OF _ 1]) auto
then have "infinite (\<Union>i. F i)"
by auto
ultimately show ?thesis by (simp only:) simp
qed
qed
qed
lemma distr_bij_count_space:
assumes f: "bij_betw f A B"
shows "distr (count_space A) (count_space B) f = count_space B"
proof (rule measure_eqI)
have f': "f \<in> measurable (count_space A) (count_space B)"
using f unfolding Pi_def bij_betw_def by auto
fix X assume "X \<in> sets (distr (count_space A) (count_space B) f)"
then have X: "X \<in> sets (count_space B)" by auto
moreover from X have "f -` X \<inter> A = the_inv_into A f ` X"
using f by (auto simp: bij_betw_def subset_image_iff image_iff the_inv_into_f_f intro: the_inv_into_f_f[symmetric])
moreover have "inj_on (the_inv_into A f) B"
using X f by (auto simp: bij_betw_def inj_on_the_inv_into)
with X have "inj_on (the_inv_into A f) X"
by (auto intro: subset_inj_on)
ultimately show "emeasure (distr (count_space A) (count_space B) f) X = emeasure (count_space B) X"
using f unfolding emeasure_distr[OF f' X]
by (subst (1 2) emeasure_count_space) (auto simp: card_image dest: finite_imageD)
qed simp
lemma emeasure_count_space_finite[simp]:
"X \<subseteq> A \<Longrightarrow> finite X \<Longrightarrow> emeasure (count_space A) X = of_nat (card X)"
using emeasure_count_space[of X A] by simp
lemma emeasure_count_space_infinite[simp]:
"X \<subseteq> A \<Longrightarrow> infinite X \<Longrightarrow> emeasure (count_space A) X = \<infinity>"
using emeasure_count_space[of X A] by simp
lemma measure_count_space: "measure (count_space A) X = (if X \<subseteq> A then of_nat (card X) else 0)"
by (cases "finite X") (auto simp: measure_notin_sets ennreal_of_nat_eq_real_of_nat
measure_zero_top measure_eq_emeasure_eq_ennreal)
lemma emeasure_count_space_eq_0:
"emeasure (count_space A) X = 0 \<longleftrightarrow> (X \<subseteq> A \<longrightarrow> X = {})"
proof cases
assume X: "X \<subseteq> A"
then show ?thesis
proof (intro iffI impI)
assume "emeasure (count_space A) X = 0"
with X show "X = {}"
by (subst (asm) emeasure_count_space) (auto split: if_split_asm)
qed simp
qed (simp add: emeasure_notin_sets)
lemma null_sets_count_space: "null_sets (count_space A) = { {} }"
unfolding null_sets_def by (auto simp add: emeasure_count_space_eq_0)
lemma AE_count_space: "(AE x in count_space A. P x) \<longleftrightarrow> (\<forall>x\<in>A. P x)"
unfolding eventually_ae_filter by (auto simp add: null_sets_count_space)
lemma sigma_finite_measure_count_space_countable:
assumes A: "countable A"
shows "sigma_finite_measure (count_space A)"
proof qed (insert A, auto intro!: exI[of _ "(\<lambda>a. {a}) ` A"])
lemma sigma_finite_measure_count_space:
fixes A :: "'a::countable set" shows "sigma_finite_measure (count_space A)"
by (rule sigma_finite_measure_count_space_countable) auto
lemma finite_measure_count_space:
assumes [simp]: "finite A"
shows "finite_measure (count_space A)"
by rule simp
lemma sigma_finite_measure_count_space_finite:
assumes A: "finite A" shows "sigma_finite_measure (count_space A)"
proof -
interpret finite_measure "count_space A" using A by (rule finite_measure_count_space)
show "sigma_finite_measure (count_space A)" ..
qed
subsection\<^marker>\<open>tag unimportant\<close> \<open>Measure restricted to space\<close>
lemma emeasure_restrict_space:
assumes "\<Omega> \<inter> space M \<in> sets M" "A \<subseteq> \<Omega>"
shows "emeasure (restrict_space M \<Omega>) A = emeasure M A"
proof (cases "A \<in> sets M")
case True
show ?thesis
proof (rule emeasure_measure_of[OF restrict_space_def])
show "(\<inter>) \<Omega> ` sets M \<subseteq> Pow (\<Omega> \<inter> space M)" "A \<in> sets (restrict_space M \<Omega>)"
using \<open>A \<subseteq> \<Omega>\<close> \<open>A \<in> sets M\<close> sets.space_closed by (auto simp: sets_restrict_space)
show "positive (sets (restrict_space M \<Omega>)) (emeasure M)"
by (auto simp: positive_def)
show "countably_additive (sets (restrict_space M \<Omega>)) (emeasure M)"
proof (rule countably_additiveI)
fix A :: "nat \<Rightarrow> _" assume "range A \<subseteq> sets (restrict_space M \<Omega>)" "disjoint_family A"
with assms have "\<And>i. A i \<in> sets M" "\<And>i. A i \<subseteq> space M" "disjoint_family A"
by (fastforce simp: sets_restrict_space_iff[OF assms(1)] image_subset_iff
dest: sets.sets_into_space)+
then show "(\<Sum>i. emeasure M (A i)) = emeasure M (\<Union>i. A i)"
by (subst suminf_emeasure) (auto simp: disjoint_family_subset)
qed
qed
next
case False
with assms have "A \<notin> sets (restrict_space M \<Omega>)"
by (simp add: sets_restrict_space_iff)
with False show ?thesis
by (simp add: emeasure_notin_sets)
qed
lemma measure_restrict_space:
assumes "\<Omega> \<inter> space M \<in> sets M" "A \<subseteq> \<Omega>"
shows "measure (restrict_space M \<Omega>) A = measure M A"
using emeasure_restrict_space[OF assms] by (simp add: measure_def)
lemma AE_restrict_space_iff:
assumes "\<Omega> \<inter> space M \<in> sets M"
shows "(AE x in restrict_space M \<Omega>. P x) \<longleftrightarrow> (AE x in M. x \<in> \<Omega> \<longrightarrow> P x)"
proof -
have ex_cong: "\<And>P Q f. (\<And>x. P x \<Longrightarrow> Q x) \<Longrightarrow> (\<And>x. Q x \<Longrightarrow> P (f x)) \<Longrightarrow> (\<exists>x. P x) \<longleftrightarrow> (\<exists>x. Q x)"
by auto
{ fix X assume X: "X \<in> sets M" "emeasure M X = 0"
then have "emeasure M (\<Omega> \<inter> space M \<inter> X) \<le> emeasure M X"
by (intro emeasure_mono) auto
then have "emeasure M (\<Omega> \<inter> space M \<inter> X) = 0"
using X by (auto intro!: antisym) }
with assms show ?thesis
unfolding eventually_ae_filter
by (auto simp add: space_restrict_space null_sets_def sets_restrict_space_iff
emeasure_restrict_space cong: conj_cong
intro!: ex_cong[where f="\<lambda>X. (\<Omega> \<inter> space M) \<inter> X"])
qed
lemma restrict_restrict_space:
assumes "A \<inter> space M \<in> sets M" "B \<inter> space M \<in> sets M"
shows "restrict_space (restrict_space M A) B = restrict_space M (A \<inter> B)" (is "?l = ?r")
proof (rule measure_eqI[symmetric])
show "sets ?r = sets ?l"
unfolding sets_restrict_space image_comp by (intro image_cong) auto
next
fix X assume "X \<in> sets (restrict_space M (A \<inter> B))"
then obtain Y where "Y \<in> sets M" "X = Y \<inter> A \<inter> B"
by (auto simp: sets_restrict_space)
with assms sets.Int[OF assms] show "emeasure ?r X = emeasure ?l X"
by (subst (1 2) emeasure_restrict_space)
(auto simp: space_restrict_space sets_restrict_space_iff emeasure_restrict_space ac_simps)
qed
lemma restrict_count_space: "restrict_space (count_space B) A = count_space (A \<inter> B)"
proof (rule measure_eqI)
show "sets (restrict_space (count_space B) A) = sets (count_space (A \<inter> B))"
by (subst sets_restrict_space) auto
moreover fix X assume "X \<in> sets (restrict_space (count_space B) A)"
ultimately have "X \<subseteq> A \<inter> B" by auto
then show "emeasure (restrict_space (count_space B) A) X = emeasure (count_space (A \<inter> B)) X"
by (cases "finite X") (auto simp add: emeasure_restrict_space)
qed
lemma sigma_finite_measure_restrict_space:
assumes "sigma_finite_measure M"
and A: "A \<in> sets M"
shows "sigma_finite_measure (restrict_space M A)"
proof -
interpret sigma_finite_measure M by fact
from sigma_finite_countable obtain C
where C: "countable C" "C \<subseteq> sets M" "(\<Union>C) = space M" "\<forall>a\<in>C. emeasure M a \<noteq> \<infinity>"
by blast
let ?C = "(\<inter>) A ` C"
from C have "countable ?C" "?C \<subseteq> sets (restrict_space M A)" "(\<Union>?C) = space (restrict_space M A)"
by(auto simp add: sets_restrict_space space_restrict_space)
moreover {
fix a
assume "a \<in> ?C"
then obtain a' where "a = A \<inter> a'" "a' \<in> C" ..
then have "emeasure (restrict_space M A) a \<le> emeasure M a'"
using A C by(auto simp add: emeasure_restrict_space intro: emeasure_mono)
also have "\<dots> < \<infinity>" using C(4)[rule_format, of a'] \<open>a' \<in> C\<close> by (simp add: less_top)
finally have "emeasure (restrict_space M A) a \<noteq> \<infinity>" by simp }
ultimately show ?thesis
by unfold_locales (rule exI conjI|assumption|blast)+
qed
lemma finite_measure_restrict_space:
assumes "finite_measure M"
and A: "A \<in> sets M"
shows "finite_measure (restrict_space M A)"
proof -
interpret finite_measure M by fact
show ?thesis
by(rule finite_measureI)(simp add: emeasure_restrict_space A space_restrict_space)
qed
lemma restrict_distr:
assumes [measurable]: "f \<in> measurable M N"
assumes [simp]: "\<Omega> \<inter> space N \<in> sets N" and restrict: "f \<in> space M \<rightarrow> \<Omega>"
shows "restrict_space (distr M N f) \<Omega> = distr M (restrict_space N \<Omega>) f"
(is "?l = ?r")
proof (rule measure_eqI)
fix A assume "A \<in> sets (restrict_space (distr M N f) \<Omega>)"
with restrict show "emeasure ?l A = emeasure ?r A"
by (subst emeasure_distr)
(auto simp: sets_restrict_space_iff emeasure_restrict_space emeasure_distr
intro!: measurable_restrict_space2)
qed (simp add: sets_restrict_space)
lemma measure_eqI_restrict_generator:
assumes E: "Int_stable E" "E \<subseteq> Pow \<Omega>" "\<And>X. X \<in> E \<Longrightarrow> emeasure M X = emeasure N X"
assumes sets_eq: "sets M = sets N" and \<Omega>: "\<Omega> \<in> sets M"
assumes "sets (restrict_space M \<Omega>) = sigma_sets \<Omega> E"
assumes "sets (restrict_space N \<Omega>) = sigma_sets \<Omega> E"
assumes ae: "AE x in M. x \<in> \<Omega>" "AE x in N. x \<in> \<Omega>"
assumes A: "countable A" "A \<noteq> {}" "A \<subseteq> E" "\<Union>A = \<Omega>" "\<And>a. a \<in> A \<Longrightarrow> emeasure M a \<noteq> \<infinity>"
shows "M = N"
proof (rule measure_eqI)
fix X assume X: "X \<in> sets M"
then have "emeasure M X = emeasure (restrict_space M \<Omega>) (X \<inter> \<Omega>)"
using ae \<Omega> by (auto simp add: emeasure_restrict_space intro!: emeasure_eq_AE)
also have "restrict_space M \<Omega> = restrict_space N \<Omega>"
proof (rule measure_eqI_generator_eq)
fix X assume "X \<in> E"
then show "emeasure (restrict_space M \<Omega>) X = emeasure (restrict_space N \<Omega>) X"
using E \<Omega> by (subst (1 2) emeasure_restrict_space) (auto simp: sets_eq sets_eq[THEN sets_eq_imp_space_eq])
next
show "range (from_nat_into A) \<subseteq> E" "(\<Union>i. from_nat_into A i) = \<Omega>"
using A by (auto cong del: SUP_cong_simp)
next
fix i
have "emeasure (restrict_space M \<Omega>) (from_nat_into A i) = emeasure M (from_nat_into A i)"
using A \<Omega> by (subst emeasure_restrict_space)
(auto simp: sets_eq sets_eq[THEN sets_eq_imp_space_eq] intro: from_nat_into)
with A show "emeasure (restrict_space M \<Omega>) (from_nat_into A i) \<noteq> \<infinity>"
by (auto intro: from_nat_into)
qed fact+
also have "emeasure (restrict_space N \<Omega>) (X \<inter> \<Omega>) = emeasure N X"
using X ae \<Omega> by (auto simp add: emeasure_restrict_space sets_eq intro!: emeasure_eq_AE)
finally show "emeasure M X = emeasure N X" .
qed fact
subsection\<^marker>\<open>tag unimportant\<close> \<open>Null measure\<close>
definition null_measure :: "'a measure \<Rightarrow> 'a measure" where
"null_measure M = sigma (space M) (sets M)"
lemma space_null_measure[simp]: "space (null_measure M) = space M"
by (simp add: null_measure_def)
lemma sets_null_measure[simp, measurable_cong]: "sets (null_measure M) = sets M"
by (simp add: null_measure_def)
lemma emeasure_null_measure[simp]: "emeasure (null_measure M) X = 0"
by (cases "X \<in> sets M", rule emeasure_measure_of)
(auto simp: positive_def countably_additive_def emeasure_notin_sets null_measure_def
dest: sets.sets_into_space)
lemma measure_null_measure[simp]: "measure (null_measure M) X = 0"
by (intro measure_eq_emeasure_eq_ennreal) auto
lemma null_measure_idem [simp]: "null_measure (null_measure M) = null_measure M"
by(rule measure_eqI) simp_all
subsection \<open>Scaling a measure\<close>
definition\<^marker>\<open>tag important\<close> scale_measure :: "ennreal \<Rightarrow> 'a measure \<Rightarrow> 'a measure" where
"scale_measure r M = measure_of (space M) (sets M) (\<lambda>A. r * emeasure M A)"
lemma space_scale_measure: "space (scale_measure r M) = space M"
by (simp add: scale_measure_def)
lemma sets_scale_measure [simp, measurable_cong]: "sets (scale_measure r M) = sets M"
by (simp add: scale_measure_def)
lemma emeasure_scale_measure [simp]:
"emeasure (scale_measure r M) A = r * emeasure M A"
(is "_ = ?\<mu> A")
proof(cases "A \<in> sets M")
case True
show ?thesis unfolding scale_measure_def
proof(rule emeasure_measure_of_sigma)
show "sigma_algebra (space M) (sets M)" ..
show "positive (sets M) ?\<mu>" by (simp add: positive_def)
show "countably_additive (sets M) ?\<mu>"
proof (rule countably_additiveI)
fix A :: "nat \<Rightarrow> _" assume *: "range A \<subseteq> sets M" "disjoint_family A"
have "(\<Sum>i. ?\<mu> (A i)) = r * (\<Sum>i. emeasure M (A i))"
by simp
also have "\<dots> = ?\<mu> (\<Union>i. A i)" using * by(simp add: suminf_emeasure)
finally show "(\<Sum>i. ?\<mu> (A i)) = ?\<mu> (\<Union>i. A i)" .
qed
qed(fact True)
qed(simp add: emeasure_notin_sets)
lemma scale_measure_1 [simp]: "scale_measure 1 M = M"
by(rule measure_eqI) simp_all
lemma scale_measure_0[simp]: "scale_measure 0 M = null_measure M"
by(rule measure_eqI) simp_all
lemma measure_scale_measure [simp]: "0 \<le> r \<Longrightarrow> measure (scale_measure r M) A = r * measure M A"
using emeasure_scale_measure[of r M A]
emeasure_eq_ennreal_measure[of M A]
measure_eq_emeasure_eq_ennreal[of _ "scale_measure r M" A]
by (cases "emeasure (scale_measure r M) A = top")
(auto simp del: emeasure_scale_measure
simp: ennreal_top_eq_mult_iff ennreal_mult_eq_top_iff measure_zero_top ennreal_mult[symmetric])
lemma scale_scale_measure [simp]:
"scale_measure r (scale_measure r' M) = scale_measure (r * r') M"
by (rule measure_eqI) (simp_all add: max_def mult.assoc)
lemma scale_null_measure [simp]: "scale_measure r (null_measure M) = null_measure M"
by (rule measure_eqI) simp_all
subsection \<open>Complete lattice structure on measures\<close>
lemma (in finite_measure) finite_measure_Diff':
"A \<in> sets M \<Longrightarrow> B \<in> sets M \<Longrightarrow> measure M (A - B) = measure M A - measure M (A \<inter> B)"
using finite_measure_Diff[of A "A \<inter> B"] by (auto simp: Diff_Int)
lemma (in finite_measure) finite_measure_Union':
"A \<in> sets M \<Longrightarrow> B \<in> sets M \<Longrightarrow> measure M (A \<union> B) = measure M A + measure M (B - A)"
using finite_measure_Union[of A "B - A"] by auto
lemma finite_unsigned_Hahn_decomposition:
assumes "finite_measure M" "finite_measure N" and [simp]: "sets N = sets M"
shows "\<exists>Y\<in>sets M. (\<forall>X\<in>sets M. X \<subseteq> Y \<longrightarrow> N X \<le> M X) \<and> (\<forall>X\<in>sets M. X \<inter> Y = {} \<longrightarrow> M X \<le> N X)"
proof -
interpret M: finite_measure M by fact
interpret N: finite_measure N by fact
define d where "d X = measure M X - measure N X" for X
have [intro]: "bdd_above (d`sets M)"
using sets.sets_into_space[of _ M]
by (intro bdd_aboveI[where M="measure M (space M)"])
(auto simp: d_def field_simps subset_eq intro!: add_increasing M.finite_measure_mono)
define \<gamma> where "\<gamma> = (SUP X\<in>sets M. d X)"
have le_\<gamma>[intro]: "X \<in> sets M \<Longrightarrow> d X \<le> \<gamma>" for X
by (auto simp: \<gamma>_def intro!: cSUP_upper)
have "\<exists>f. \<forall>n. f n \<in> sets M \<and> d (f n) > \<gamma> - 1 / 2^n"
proof (intro choice_iff[THEN iffD1] allI)
fix n
have "\<exists>X\<in>sets M. \<gamma> - 1 / 2^n < d X"
unfolding \<gamma>_def by (intro less_cSUP_iff[THEN iffD1]) auto
then show "\<exists>y. y \<in> sets M \<and> \<gamma> - 1 / 2 ^ n < d y"
by auto
qed
then obtain E where [measurable]: "E n \<in> sets M" and E: "d (E n) > \<gamma> - 1 / 2^n" for n
by auto
define F where "F m n = (if m \<le> n then \<Inter>i\<in>{m..n}. E i else space M)" for m n
have [measurable]: "m \<le> n \<Longrightarrow> F m n \<in> sets M" for m n
by (auto simp: F_def)
have 1: "\<gamma> - 2 / 2 ^ m + 1 / 2 ^ n \<le> d (F m n)" if "m \<le> n" for m n
using that
proof (induct rule: dec_induct)
case base with E[of m] show ?case
by (simp add: F_def field_simps)
next
case (step i)
have F_Suc: "F m (Suc i) = F m i \<inter> E (Suc i)"
using \<open>m \<le> i\<close> by (auto simp: F_def le_Suc_eq)
have "\<gamma> + (\<gamma> - 2 / 2^m + 1 / 2 ^ Suc i) \<le> (\<gamma> - 1 / 2^Suc i) + (\<gamma> - 2 / 2^m + 1 / 2^i)"
by (simp add: field_simps)
also have "\<dots> \<le> d (E (Suc i)) + d (F m i)"
using E[of "Suc i"] by (intro add_mono step) auto
also have "\<dots> = d (E (Suc i)) + d (F m i - E (Suc i)) + d (F m (Suc i))"
using \<open>m \<le> i\<close> by (simp add: d_def field_simps F_Suc M.finite_measure_Diff' N.finite_measure_Diff')
also have "\<dots> = d (E (Suc i) \<union> F m i) + d (F m (Suc i))"
using \<open>m \<le> i\<close> by (simp add: d_def field_simps M.finite_measure_Union' N.finite_measure_Union')
also have "\<dots> \<le> \<gamma> + d (F m (Suc i))"
using \<open>m \<le> i\<close> by auto
finally show ?case
by auto
qed
define F' where "F' m = (\<Inter>i\<in>{m..}. E i)" for m
have F'_eq: "F' m = (\<Inter>i. F m (i + m))" for m
by (fastforce simp: le_iff_add[of m] F'_def F_def)
have [measurable]: "F' m \<in> sets M" for m
by (auto simp: F'_def)
have \<gamma>_le: "\<gamma> - 0 \<le> d (\<Union>m. F' m)"
proof (rule LIMSEQ_le)
show "(\<lambda>n. \<gamma> - 2 / 2 ^ n) \<longlonglongrightarrow> \<gamma> - 0"
by (intro tendsto_intros LIMSEQ_divide_realpow_zero) auto
have "incseq F'"
by (auto simp: incseq_def F'_def)
then show "(\<lambda>m. d (F' m)) \<longlonglongrightarrow> d (\<Union>m. F' m)"
unfolding d_def
by (intro tendsto_diff M.finite_Lim_measure_incseq N.finite_Lim_measure_incseq) auto
have "\<gamma> - 2 / 2 ^ m + 0 \<le> d (F' m)" for m
proof (rule LIMSEQ_le)
have *: "decseq (\<lambda>n. F m (n + m))"
by (auto simp: decseq_def F_def)
show "(\<lambda>n. d (F m n)) \<longlonglongrightarrow> d (F' m)"
unfolding d_def F'_eq
by (rule LIMSEQ_offset[where k=m])
(auto intro!: tendsto_diff M.finite_Lim_measure_decseq N.finite_Lim_measure_decseq *)
show "(\<lambda>n. \<gamma> - 2 / 2 ^ m + 1 / 2 ^ n) \<longlonglongrightarrow> \<gamma> - 2 / 2 ^ m + 0"
by (intro tendsto_add LIMSEQ_divide_realpow_zero tendsto_const) auto
show "\<exists>N. \<forall>n\<ge>N. \<gamma> - 2 / 2 ^ m + 1 / 2 ^ n \<le> d (F m n)"
using 1[of m] by (intro exI[of _ m]) auto
qed
then show "\<exists>N. \<forall>n\<ge>N. \<gamma> - 2 / 2 ^ n \<le> d (F' n)"
by auto
qed
show ?thesis
proof (safe intro!: bexI[of _ "\<Union>m. F' m"])
fix X assume [measurable]: "X \<in> sets M" and X: "X \<subseteq> (\<Union>m. F' m)"
have "d (\<Union>m. F' m) - d X = d ((\<Union>m. F' m) - X)"
using X by (auto simp: d_def M.finite_measure_Diff N.finite_measure_Diff)
also have "\<dots> \<le> \<gamma>"
by auto
finally have "0 \<le> d X"
using \<gamma>_le by auto
then show "emeasure N X \<le> emeasure M X"
by (auto simp: d_def M.emeasure_eq_measure N.emeasure_eq_measure)
next
fix X assume [measurable]: "X \<in> sets M" and X: "X \<inter> (\<Union>m. F' m) = {}"
then have "d (\<Union>m. F' m) + d X = d (X \<union> (\<Union>m. F' m))"
by (auto simp: d_def M.finite_measure_Union N.finite_measure_Union)
also have "\<dots> \<le> \<gamma>"
by auto
finally have "d X \<le> 0"
using \<gamma>_le by auto
then show "emeasure M X \<le> emeasure N X"
by (auto simp: d_def M.emeasure_eq_measure N.emeasure_eq_measure)
qed auto
qed
proposition unsigned_Hahn_decomposition:
assumes [simp]: "sets N = sets M" and [measurable]: "A \<in> sets M"
and [simp]: "emeasure M A \<noteq> top" "emeasure N A \<noteq> top"
shows "\<exists>Y\<in>sets M. Y \<subseteq> A \<and> (\<forall>X\<in>sets M. X \<subseteq> Y \<longrightarrow> N X \<le> M X) \<and> (\<forall>X\<in>sets M. X \<subseteq> A \<longrightarrow> X \<inter> Y = {} \<longrightarrow> M X \<le> N X)"
proof -
have "\<exists>Y\<in>sets (restrict_space M A).
(\<forall>X\<in>sets (restrict_space M A). X \<subseteq> Y \<longrightarrow> (restrict_space N A) X \<le> (restrict_space M A) X) \<and>
(\<forall>X\<in>sets (restrict_space M A). X \<inter> Y = {} \<longrightarrow> (restrict_space M A) X \<le> (restrict_space N A) X)"
proof (rule finite_unsigned_Hahn_decomposition)
show "finite_measure (restrict_space M A)" "finite_measure (restrict_space N A)"
by (auto simp: space_restrict_space emeasure_restrict_space less_top intro!: finite_measureI)
qed (simp add: sets_restrict_space)
with assms show ?thesis
by (metis Int_subset_iff emeasure_restrict_space sets.Int_space_eq2 sets_restrict_space_iff space_restrict_space)
qed
text\<^marker>\<open>tag important\<close> \<open>
Define a lexicographical order on \<^type>\<open>measure\<close>, in the order space, sets and measure. The parts
of the lexicographical order are point-wise ordered.
\<close>
instantiation measure :: (type) order_bot
begin
inductive less_eq_measure :: "'a measure \<Rightarrow> 'a measure \<Rightarrow> bool" where
"space M \<subset> space N \<Longrightarrow> less_eq_measure M N"
| "space M = space N \<Longrightarrow> sets M \<subset> sets N \<Longrightarrow> less_eq_measure M N"
| "space M = space N \<Longrightarrow> sets M = sets N \<Longrightarrow> emeasure M \<le> emeasure N \<Longrightarrow> less_eq_measure M N"
lemma le_measure_iff:
"M \<le> N \<longleftrightarrow> (if space M = space N then
if sets M = sets N then emeasure M \<le> emeasure N else sets M \<subseteq> sets N else space M \<subseteq> space N)"
by (auto elim: less_eq_measure.cases intro: less_eq_measure.intros)
definition\<^marker>\<open>tag important\<close> less_measure :: "'a measure \<Rightarrow> 'a measure \<Rightarrow> bool" where
"less_measure M N \<longleftrightarrow> (M \<le> N \<and> \<not> N \<le> M)"
definition\<^marker>\<open>tag important\<close> bot_measure :: "'a measure" where
"bot_measure = sigma {} {}"
lemma
shows space_bot[simp]: "space bot = {}"
and sets_bot[simp]: "sets bot = {{}}"
and emeasure_bot[simp]: "emeasure bot X = 0"
by (auto simp: bot_measure_def sigma_sets_empty_eq emeasure_sigma)
instance
proof standard
show "bot \<le> a" for a :: "'a measure"
by (simp add: le_measure_iff bot_measure_def sigma_sets_empty_eq emeasure_sigma le_fun_def)
qed (auto simp: le_measure_iff less_measure_def split: if_split_asm intro: measure_eqI)
end
proposition le_measure: "sets M = sets N \<Longrightarrow> M \<le> N \<longleftrightarrow> (\<forall>A\<in>sets M. emeasure M A \<le> emeasure N A)"
by (metis emeasure_neq_0_sets le_fun_def le_measure_iff order_class.order_eq_iff sets_eq_imp_space_eq)
definition\<^marker>\<open>tag important\<close> sup_measure' :: "'a measure \<Rightarrow> 'a measure \<Rightarrow> 'a measure" where
"sup_measure' A B =
measure_of (space A) (sets A)
(\<lambda>X. SUP Y\<in>sets A. emeasure A (X \<inter> Y) + emeasure B (X \<inter> - Y))"
lemma assumes [simp]: "sets B = sets A"
shows space_sup_measure'[simp]: "space (sup_measure' A B) = space A"
and sets_sup_measure'[simp]: "sets (sup_measure' A B) = sets A"
using sets_eq_imp_space_eq[OF assms] by (simp_all add: sup_measure'_def)
lemma emeasure_sup_measure':
assumes sets_eq[simp]: "sets B = sets A" and [simp, intro]: "X \<in> sets A"
shows "emeasure (sup_measure' A B) X = (SUP Y\<in>sets A. emeasure A (X \<inter> Y) + emeasure B (X \<inter> - Y))"
(is "_ = ?S X")
proof -
note sets_eq_imp_space_eq[OF sets_eq, simp]
show ?thesis
using sup_measure'_def
proof (rule emeasure_measure_of)
let ?d = "\<lambda>X Y. emeasure A (X \<inter> Y) + emeasure B (X \<inter> - Y)"
show "countably_additive (sets (sup_measure' A B)) (\<lambda>X. SUP Y \<in> sets A. emeasure A (X \<inter> Y) + emeasure B (X \<inter> - Y))"
proof (rule countably_additiveI, goal_cases)
case (1 X)
then have [measurable]: "\<And>i. X i \<in> sets A" and "disjoint_family X"
by auto
have disjoint: "disjoint_family (\<lambda>i. X i \<inter> Y)" "disjoint_family (\<lambda>i. X i - Y)" for Y
using "1"(2) disjoint_family_subset by fastforce+
have "(\<Sum>i. ?S (X i)) = (SUP Y\<in>sets A. \<Sum>i. ?d (X i) Y)"
proof (rule ennreal_suminf_SUP_eq_directed)
fix J :: "nat set" and a b assume "finite J" and [measurable]: "a \<in> sets A" "b \<in> sets A"
have "\<exists>c\<in>sets A. c \<subseteq> X i \<and> (\<forall>a\<in>sets A. ?d (X i) a \<le> ?d (X i) c)" for i
proof cases
assume "emeasure A (X i) = top \<or> emeasure B (X i) = top"
then show ?thesis
by force
next
assume finite: "\<not> (emeasure A (X i) = top \<or> emeasure B (X i) = top)"
then have "\<exists>Y\<in>sets A. Y \<subseteq> X i \<and> (\<forall>C\<in>sets A. C \<subseteq> Y \<longrightarrow> B C \<le> A C) \<and> (\<forall>C\<in>sets A. C \<subseteq> X i \<longrightarrow> C \<inter> Y = {} \<longrightarrow> A C \<le> B C)"
using unsigned_Hahn_decomposition[of B A "X i"] by simp
then obtain Y where [measurable]: "Y \<in> sets A" and [simp]: "Y \<subseteq> X i"
and B_le_A: "\<And>C. C \<in> sets A \<Longrightarrow> C \<subseteq> Y \<Longrightarrow> B C \<le> A C"
and A_le_B: "\<And>C. C \<in> sets A \<Longrightarrow> C \<subseteq> X i \<Longrightarrow> C \<inter> Y = {} \<Longrightarrow> A C \<le> B C"
by auto
show ?thesis
proof (intro bexI ballI conjI)
fix a assume [measurable]: "a \<in> sets A"
have *: "(X i \<inter> a \<inter> Y \<union> (X i \<inter> a - Y)) = X i \<inter> a" "(X i - a) \<inter> Y \<union> (X i - a - Y) = X i \<inter> - a"
for a Y by auto
then have "?d (X i) a =
(A (X i \<inter> a \<inter> Y) + A (X i \<inter> a \<inter> - Y)) + (B (X i \<inter> - a \<inter> Y) + B (X i \<inter> - a \<inter> - Y))"
by (subst (1 2) plus_emeasure) (auto simp: Diff_eq[symmetric])
also have "\<dots> \<le> (A (X i \<inter> a \<inter> Y) + B (X i \<inter> a \<inter> - Y)) + (A (X i \<inter> - a \<inter> Y) + B (X i \<inter> - a \<inter> - Y))"
by (intro add_mono order_refl B_le_A A_le_B) (auto simp: Diff_eq[symmetric])
also have "\<dots> \<le> (A (X i \<inter> Y \<inter> a) + A (X i \<inter> Y \<inter> - a)) + (B (X i \<inter> - Y \<inter> a) + B (X i \<inter> - Y \<inter> - a))"
by (simp add: ac_simps)
also have "\<dots> \<le> A (X i \<inter> Y) + B (X i \<inter> - Y)"
by (subst (1 2) plus_emeasure) (auto simp: Diff_eq[symmetric] *)
finally show "?d (X i) a \<le> ?d (X i) Y" .
qed auto
qed
then obtain C where [measurable]: "C i \<in> sets A" and "C i \<subseteq> X i"
and C: "\<And>a. a \<in> sets A \<Longrightarrow> ?d (X i) a \<le> ?d (X i) (C i)" for i
by metis
have *: "X i \<inter> (\<Union>i. C i) = X i \<inter> C i" for i
using \<open>disjoint_family X\<close> \<open>\<And>i. C i \<subseteq> X i\<close>
by (simp add: disjoint_family_on_def disjoint_iff_not_equal set_eq_iff) (metis subsetD)
then have **: "X i \<inter> - (\<Union>i. C i) = X i \<inter> - C i" for i by blast
moreover have "(\<Union>i. C i) \<in> sets A"
by fastforce
ultimately show "\<exists>c\<in>sets A. \<forall>i\<in>J. ?d (X i) a \<le> ?d (X i) c \<and> ?d (X i) b \<le> ?d (X i) c"
by (metis "*" C \<open>a \<in> sets A\<close> \<open>b \<in> sets A\<close>)
qed
also have "\<dots> = ?S (\<Union>i. X i)"
proof -
have "\<And>Y. Y \<in> sets A \<Longrightarrow> (\<Sum>i. emeasure A (X i \<inter> Y) + emeasure B (X i \<inter> -Y))
= emeasure A (\<Union>i. X i \<inter> Y) + emeasure B (\<Union>i. X i \<inter> -Y)"
using disjoint
by (auto simp flip: suminf_add Diff_eq simp add: image_subset_iff suminf_emeasure)
then show ?thesis by force
qed
finally show "(\<Sum>i. ?S (X i)) = ?S (\<Union>i. X i)" .
qed
qed (auto dest: sets.sets_into_space simp: positive_def intro!: SUP_const)
qed
lemma le_emeasure_sup_measure'1:
assumes "sets B = sets A" "X \<in> sets A" shows "emeasure A X \<le> emeasure (sup_measure' A B) X"
by (subst emeasure_sup_measure'[OF assms]) (auto intro!: SUP_upper2[of "X"] assms)
lemma le_emeasure_sup_measure'2:
assumes "sets B = sets A" "X \<in> sets A" shows "emeasure B X \<le> emeasure (sup_measure' A B) X"
by (subst emeasure_sup_measure'[OF assms]) (auto intro!: SUP_upper2[of "{}"] assms)
lemma emeasure_sup_measure'_le2:
assumes [simp]: "sets B = sets C" "sets A = sets C" and [measurable]: "X \<in> sets C"
assumes A: "\<And>Y. Y \<subseteq> X \<Longrightarrow> Y \<in> sets A \<Longrightarrow> emeasure A Y \<le> emeasure C Y"
assumes B: "\<And>Y. Y \<subseteq> X \<Longrightarrow> Y \<in> sets A \<Longrightarrow> emeasure B Y \<le> emeasure C Y"
shows "emeasure (sup_measure' A B) X \<le> emeasure C X"
proof (subst emeasure_sup_measure')
show "(SUP Y\<in>sets A. emeasure A (X \<inter> Y) + emeasure B (X \<inter> - Y)) \<le> emeasure C X"
unfolding \<open>sets A = sets C\<close>
proof (intro SUP_least)
fix Y assume [measurable]: "Y \<in> sets C"
have [simp]: "X \<inter> Y \<union> (X - Y) = X"
by auto
have "emeasure A (X \<inter> Y) + emeasure B (X \<inter> - Y) \<le> emeasure C (X \<inter> Y) + emeasure C (X \<inter> - Y)"
by (intro add_mono A B) (auto simp: Diff_eq[symmetric])
also have "\<dots> = emeasure C X"
by (subst plus_emeasure) (auto simp: Diff_eq[symmetric])
finally show "emeasure A (X \<inter> Y) + emeasure B (X \<inter> - Y) \<le> emeasure C X" .
qed
qed simp_all
definition\<^marker>\<open>tag important\<close> sup_lexord :: "'a \<Rightarrow> 'a \<Rightarrow> ('a \<Rightarrow> 'b::order) \<Rightarrow> 'a \<Rightarrow> 'a \<Rightarrow> 'a" where
"sup_lexord A B k s c =
(if k A = k B then c else
if \<not> k A \<le> k B \<and> \<not> k B \<le> k A then s else
if k B \<le> k A then A else B)"
lemma sup_lexord:
"(k A < k B \<Longrightarrow> P B) \<Longrightarrow> (k B < k A \<Longrightarrow> P A) \<Longrightarrow> (k A = k B \<Longrightarrow> P c) \<Longrightarrow>
(\<not> k B \<le> k A \<Longrightarrow> \<not> k A \<le> k B \<Longrightarrow> P s) \<Longrightarrow> P (sup_lexord A B k s c)"
by (auto simp: sup_lexord_def)
lemmas le_sup_lexord = sup_lexord[where P="\<lambda>a. c \<le> a" for c]
lemma sup_lexord1: "k A = k B \<Longrightarrow> sup_lexord A B k s c = c"
by (simp add: sup_lexord_def)
lemma sup_lexord_commute: "sup_lexord A B k s c = sup_lexord B A k s c"
by (auto simp: sup_lexord_def)
lemma sigma_sets_le_sets_iff: "(sigma_sets (space x) \<A> \<subseteq> sets x) = (\<A> \<subseteq> sets x)"
using sets.sigma_sets_subset[of \<A> x] by auto
lemma sigma_le_iff: "\<A> \<subseteq> Pow \<Omega> \<Longrightarrow> sigma \<Omega> \<A> \<le> x \<longleftrightarrow> (\<Omega> \<subseteq> space x \<and> (space x = \<Omega> \<longrightarrow> \<A> \<subseteq> sets x))"
by (cases "\<Omega> = space x")
(simp_all add: eq_commute[of _ "sets x"] le_measure_iff emeasure_sigma le_fun_def
sigma_sets_superset_generator sigma_sets_le_sets_iff)
instantiation measure :: (type) semilattice_sup
begin
definition\<^marker>\<open>tag important\<close> sup_measure :: "'a measure \<Rightarrow> 'a measure \<Rightarrow> 'a measure" where
"sup_measure A B =
sup_lexord A B space (sigma (space A \<union> space B) {})
(sup_lexord A B sets (sigma (space A) (sets A \<union> sets B)) (sup_measure' A B))"
instance
proof
fix x y z :: "'a measure"
show "x \<le> sup x y"
unfolding sup_measure_def
proof (intro le_sup_lexord)
assume "space x = space y"
then have *: "sets x \<union> sets y \<subseteq> Pow (space x)"
using sets.space_closed by auto
assume "\<not> sets y \<subseteq> sets x" "\<not> sets x \<subseteq> sets y"
then have "sets x \<subset> sets x \<union> sets y"
by auto
also have "\<dots> \<le> sigma (space x) (sets x \<union> sets y)"
by (subst sets_measure_of[OF *]) (rule sigma_sets_superset_generator)
finally show "x \<le> sigma (space x) (sets x \<union> sets y)"
by (simp add: space_measure_of[OF *] less_eq_measure.intros(2))
next
assume "\<not> space y \<subseteq> space x" "\<not> space x \<subseteq> space y"
then show "x \<le> sigma (space x \<union> space y) {}"
by (intro less_eq_measure.intros) auto
next
assume "sets x = sets y" then show "x \<le> sup_measure' x y"
by (simp add: le_measure le_emeasure_sup_measure'1)
qed (auto intro: less_eq_measure.intros)
show "y \<le> sup x y"
unfolding sup_measure_def
proof (intro le_sup_lexord)
assume **: "space x = space y"
then have *: "sets x \<union> sets y \<subseteq> Pow (space y)"
using sets.space_closed by auto
assume "\<not> sets y \<subseteq> sets x" "\<not> sets x \<subseteq> sets y"
then have "sets y \<subset> sets x \<union> sets y"
by auto
also have "\<dots> \<le> sigma (space y) (sets x \<union> sets y)"
by (subst sets_measure_of[OF *]) (rule sigma_sets_superset_generator)
finally show "y \<le> sigma (space x) (sets x \<union> sets y)"
by (simp add: ** space_measure_of[OF *] less_eq_measure.intros(2))
next
assume "\<not> space y \<subseteq> space x" "\<not> space x \<subseteq> space y"
then show "y \<le> sigma (space x \<union> space y) {}"
by (intro less_eq_measure.intros) auto
next
assume "sets x = sets y" then show "y \<le> sup_measure' x y"
by (simp add: le_measure le_emeasure_sup_measure'2)
qed (auto intro: less_eq_measure.intros)
show "x \<le> y \<Longrightarrow> z \<le> y \<Longrightarrow> sup x z \<le> y"
unfolding sup_measure_def
proof (intro sup_lexord[where P="\<lambda>x. x \<le> y"])
assume "x \<le> y" "z \<le> y" and [simp]: "space x = space z" "sets x = sets z"
from \<open>x \<le> y\<close> show "sup_measure' x z \<le> y"
proof cases
case 1 then show ?thesis
by (intro less_eq_measure.intros(1)) simp
next
case 2 then show ?thesis
by (intro less_eq_measure.intros(2)) simp_all
next
case 3 with \<open>z \<le> y\<close> \<open>x \<le> y\<close> show ?thesis
by (auto simp add: le_measure intro!: emeasure_sup_measure'_le2)
qed
next
assume **: "x \<le> y" "z \<le> y" "space x = space z" "\<not> sets z \<subseteq> sets x" "\<not> sets x \<subseteq> sets z"
then have *: "sets x \<union> sets z \<subseteq> Pow (space x)"
using sets.space_closed by auto
show "sigma (space x) (sets x \<union> sets z) \<le> y"
unfolding sigma_le_iff[OF *] using ** by (auto simp: le_measure_iff split: if_split_asm)
next
assume "x \<le> y" "z \<le> y" "\<not> space z \<subseteq> space x" "\<not> space x \<subseteq> space z"
then have "space x \<subseteq> space y" "space z \<subseteq> space y"
by (auto simp: le_measure_iff split: if_split_asm)
then show "sigma (space x \<union> space z) {} \<le> y"
by (simp add: sigma_le_iff)
qed
qed
end
lemma space_empty_eq_bot: "space a = {} \<longleftrightarrow> a = bot"
using space_empty[of a] by (auto intro!: measure_eqI)
lemma sets_eq_iff_bounded: "A \<le> B \<Longrightarrow> B \<le> C \<Longrightarrow> sets A = sets C \<Longrightarrow> sets B = sets A"
by (auto dest: sets_eq_imp_space_eq simp add: le_measure_iff split: if_split_asm)
lemma sets_sup: "sets A = sets M \<Longrightarrow> sets B = sets M \<Longrightarrow> sets (sup A B) = sets M"
by (auto simp add: sup_measure_def sup_lexord_def dest: sets_eq_imp_space_eq)
lemma le_measureD1: "A \<le> B \<Longrightarrow> space A \<le> space B"
by (auto simp: le_measure_iff split: if_split_asm)
lemma le_measureD2: "A \<le> B \<Longrightarrow> space A = space B \<Longrightarrow> sets A \<le> sets B"
by (auto simp: le_measure_iff split: if_split_asm)
lemma le_measureD3: "A \<le> B \<Longrightarrow> sets A = sets B \<Longrightarrow> emeasure A X \<le> emeasure B X"
by (auto simp: le_measure_iff le_fun_def dest: sets_eq_imp_space_eq split: if_split_asm)
lemma UN_space_closed: "\<Union>(sets ` S) \<subseteq> Pow (\<Union>(space ` S))"
using sets.space_closed by auto
definition\<^marker>\<open>tag important\<close>
Sup_lexord :: "('a \<Rightarrow> 'b::complete_lattice) \<Rightarrow> ('a set \<Rightarrow> 'a) \<Rightarrow> ('a set \<Rightarrow> 'a) \<Rightarrow> 'a set \<Rightarrow> 'a"
where
"Sup_lexord k c s A =
(let U = (SUP a\<in>A. k a)
in if \<exists>a\<in>A. k a = U then c {a\<in>A. k a = U} else s A)"
lemma Sup_lexord:
"(\<And>a S. a \<in> A \<Longrightarrow> k a = (SUP a\<in>A. k a) \<Longrightarrow> S = {a'\<in>A. k a' = k a} \<Longrightarrow> P (c S)) \<Longrightarrow> ((\<And>a. a \<in> A \<Longrightarrow> k a \<noteq> (SUP a\<in>A. k a)) \<Longrightarrow> P (s A)) \<Longrightarrow>
P (Sup_lexord k c s A)"
by (auto simp: Sup_lexord_def Let_def)
lemma Sup_lexord1:
assumes A: "A \<noteq> {}" "(\<And>a. a \<in> A \<Longrightarrow> k a = (\<Union>a\<in>A. k a))" "P (c A)"
shows "P (Sup_lexord k c s A)"
unfolding Sup_lexord_def Let_def
proof (clarsimp, safe)
show "\<forall>a\<in>A. k a \<noteq> (\<Union>x\<in>A. k x) \<Longrightarrow> P (s A)"
by (metis assms(1,2) ex_in_conv)
next
fix a assume "a \<in> A" "k a = (\<Union>x\<in>A. k x)"
then have "{a \<in> A. k a = (\<Union>x\<in>A. k x)} = {a \<in> A. k a = k a}"
by (metis A(2)[symmetric])
then show "P (c {a \<in> A. k a = (\<Union>x\<in>A. k x)})"
by (simp add: A(3))
qed
instantiation measure :: (type) complete_lattice
begin
interpretation sup_measure: comm_monoid_set sup "bot :: 'a measure"
by standard (auto intro!: antisym)
lemma sup_measure_F_mono':
"finite J \<Longrightarrow> finite I \<Longrightarrow> sup_measure.F id I \<le> sup_measure.F id (I \<union> J)"
proof (induction J rule: finite_induct)
case empty then show ?case
by simp
next
case (insert i J)
show ?case
proof cases
assume "i \<in> I" with insert show ?thesis
by (auto simp: insert_absorb)
next
assume "i \<notin> I"
have "sup_measure.F id I \<le> sup_measure.F id (I \<union> J)"
by (intro insert)
also have "\<dots> \<le> sup_measure.F id (insert i (I \<union> J))"
using insert \<open>i \<notin> I\<close> by (subst sup_measure.insert) auto
finally show ?thesis
by auto
qed
qed
lemma sup_measure_F_mono: "finite I \<Longrightarrow> J \<subseteq> I \<Longrightarrow> sup_measure.F id J \<le> sup_measure.F id I"
using sup_measure_F_mono'[of I J] by (auto simp: finite_subset Un_absorb1)
lemma sets_sup_measure_F:
"finite I \<Longrightarrow> I \<noteq> {} \<Longrightarrow> (\<And>i. i \<in> I \<Longrightarrow> sets i = sets M) \<Longrightarrow> sets (sup_measure.F id I) = sets M"
by (induction I rule: finite_ne_induct) (simp_all add: sets_sup)
definition\<^marker>\<open>tag important\<close> Sup_measure' :: "'a measure set \<Rightarrow> 'a measure" where
"Sup_measure' M =
measure_of (\<Union>a\<in>M. space a) (\<Union>a\<in>M. sets a)
(\<lambda>X. (SUP P\<in>{P. finite P \<and> P \<subseteq> M }. sup_measure.F id P X))"
lemma space_Sup_measure'2: "space (Sup_measure' M) = (\<Union>m\<in>M. space m)"
unfolding Sup_measure'_def by (intro space_measure_of[OF UN_space_closed])
lemma sets_Sup_measure'2: "sets (Sup_measure' M) = sigma_sets (\<Union>m\<in>M. space m) (\<Union>m\<in>M. sets m)"
unfolding Sup_measure'_def by (intro sets_measure_of[OF UN_space_closed])
lemma sets_Sup_measure':
assumes sets_eq[simp]: "\<And>m. m \<in> M \<Longrightarrow> sets m = sets A" and "M \<noteq> {}"
shows "sets (Sup_measure' M) = sets A"
using sets_eq[THEN sets_eq_imp_space_eq, simp] \<open>M \<noteq> {}\<close> by (simp add: Sup_measure'_def)
lemma space_Sup_measure':
assumes sets_eq[simp]: "\<And>m. m \<in> M \<Longrightarrow> sets m = sets A" and "M \<noteq> {}"
shows "space (Sup_measure' M) = space A"
using sets_eq[THEN sets_eq_imp_space_eq, simp] \<open>M \<noteq> {}\<close>
by (simp add: Sup_measure'_def )
lemma emeasure_Sup_measure':
assumes sets_eq[simp]: "\<And>m. m \<in> M \<Longrightarrow> sets m = sets A" and "X \<in> sets A" "M \<noteq> {}"
shows "emeasure (Sup_measure' M) X = (SUP P\<in>{P. finite P \<and> P \<subseteq> M}. sup_measure.F id P X)"
(is "_ = ?S X")
using Sup_measure'_def
proof (rule emeasure_measure_of)
note sets_eq[THEN sets_eq_imp_space_eq, simp]
have *: "sets (Sup_measure' M) = sets A" "space (Sup_measure' M) = space A"
using \<open>M \<noteq> {}\<close> by (simp_all add: Sup_measure'_def)
let ?\<mu> = "sup_measure.F id"
show "countably_additive (sets (Sup_measure' M)) ?S"
proof (rule countably_additiveI, goal_cases)
case (1 F)
then have **: "range F \<subseteq> sets A"
by (auto simp: *)
show "(\<Sum>i. ?S (F i)) = ?S (\<Union>i. F i)"
proof (subst ennreal_suminf_SUP_eq_directed)
fix i j and N :: "nat set" assume ij: "i \<in> {P. finite P \<and> P \<subseteq> M}" "j \<in> {P. finite P \<and> P \<subseteq> M}"
have "(i \<noteq> {} \<longrightarrow> sets (?\<mu> i) = sets A) \<and> (j \<noteq> {} \<longrightarrow> sets (?\<mu> j) = sets A) \<and>
(i \<noteq> {} \<or> j \<noteq> {} \<longrightarrow> sets (?\<mu> (i \<union> j)) = sets A)"
using ij by (intro impI sets_sup_measure_F conjI) auto
then have "?\<mu> j (F n) \<le> ?\<mu> (i \<union> j) (F n) \<and> ?\<mu> i (F n) \<le> ?\<mu> (i \<union> j) (F n)" for n
using ij
by (cases "i = {}"; cases "j = {}")
(auto intro!: le_measureD3 sup_measure_F_mono simp: sets_sup_measure_F
simp del: id_apply)
with ij show "\<exists>k\<in>{P. finite P \<and> P \<subseteq> M}. \<forall>n\<in>N. ?\<mu> i (F n) \<le> ?\<mu> k (F n) \<and> ?\<mu> j (F n) \<le> ?\<mu> k (F n)"
by (safe intro!: bexI[of _ "i \<union> j"]) auto
next
show "(SUP P \<in> {P. finite P \<and> P \<subseteq> M}. \<Sum>n. ?\<mu> P (F n)) = (SUP P \<in> {P. finite P \<and> P \<subseteq> M}. ?\<mu> P (\<Union>(F ` UNIV)))"
proof (intro arg_cong [of _ _ Sup] image_cong refl)
fix i assume i: "i \<in> {P. finite P \<and> P \<subseteq> M}"
show "(\<Sum>n. ?\<mu> i (F n)) = ?\<mu> i (\<Union>(F ` UNIV))"
proof cases
assume "i \<noteq> {}" with i ** show ?thesis
by (smt (verit, best) "1"(2) Measure_Space.sets_sup_measure_F assms(1) mem_Collect_eq subset_eq suminf_cong suminf_emeasure)
qed simp
qed
qed
qed
show "positive (sets (Sup_measure' M)) ?S"
by (auto simp: positive_def bot_ennreal[symmetric])
show "X \<in> sets (Sup_measure' M)"
using assms * by auto
qed (rule UN_space_closed)
definition\<^marker>\<open>tag important\<close> Sup_measure :: "'a measure set \<Rightarrow> 'a measure" where
"Sup_measure =
Sup_lexord space
(Sup_lexord sets Sup_measure'
(\<lambda>U. sigma (\<Union>u\<in>U. space u) (\<Union>u\<in>U. sets u)))
(\<lambda>U. sigma (\<Union>u\<in>U. space u) {})"
definition\<^marker>\<open>tag important\<close> Inf_measure :: "'a measure set \<Rightarrow> 'a measure" where
"Inf_measure A = Sup {x. \<forall>a\<in>A. x \<le> a}"
definition\<^marker>\<open>tag important\<close> inf_measure :: "'a measure \<Rightarrow> 'a measure \<Rightarrow> 'a measure" where
"inf_measure a b = Inf {a, b}"
definition\<^marker>\<open>tag important\<close> top_measure :: "'a measure" where
"top_measure = Inf {}"
instance
proof
note UN_space_closed [simp]
show upper: "x \<le> Sup A" if x: "x \<in> A" for x :: "'a measure" and A
unfolding Sup_measure_def
proof (intro Sup_lexord[where P="\<lambda>y. x \<le> y"])
assume "\<And>a. a \<in> A \<Longrightarrow> space a \<noteq> (\<Union>a\<in>A. space a)"
from this[OF \<open>x \<in> A\<close>] \<open>x \<in> A\<close> show "x \<le> sigma (\<Union>a\<in>A. space a) {}"
by (intro less_eq_measure.intros) auto
next
fix a S assume "a \<in> A" and a: "space a = (\<Union>a\<in>A. space a)" and S: "S = {a' \<in> A. space a' = space a}"
and neq: "\<And>aa. aa \<in> S \<Longrightarrow> sets aa \<noteq> (\<Union>a\<in>S. sets a)"
have sp_a: "space a = (\<Union>(space ` S))"
using \<open>a\<in>A\<close> by (auto simp: S)
show "x \<le> sigma (\<Union>(space ` S)) (\<Union>(sets ` S))"
proof cases
assume [simp]: "space x = space a"
have "sets x \<subset> (\<Union>a\<in>S. sets a)"
using \<open>x\<in>A\<close> neq[of x] by (auto simp: S)
also have "\<dots> \<subseteq> sigma_sets (\<Union>x\<in>S. space x) (\<Union>x\<in>S. sets x)"
by (rule sigma_sets_superset_generator)
finally show ?thesis
by (intro less_eq_measure.intros(2)) (simp_all add: sp_a)
next
assume "space x \<noteq> space a"
moreover have "space x \<le> space a"
unfolding a using \<open>x\<in>A\<close> by auto
ultimately show ?thesis
by (intro less_eq_measure.intros) (simp add: less_le sp_a)
qed
next
fix a b S S' assume "a \<in> A" and a: "space a = (\<Union>a\<in>A. space a)" and S: "S = {a' \<in> A. space a' = space a}"
and "b \<in> S" and b: "sets b = (\<Union>a\<in>S. sets a)" and S': "S' = {a' \<in> S. sets a' = sets b}"
then have "S' \<noteq> {}" "space b = space a"
by auto
have sets_eq: "\<And>x. x \<in> S' \<Longrightarrow> sets x = sets b"
by (auto simp: S')
note sets_eq[THEN sets_eq_imp_space_eq, simp]
have *: "sets (Sup_measure' S') = sets b" "space (Sup_measure' S') = space b"
using \<open>S' \<noteq> {}\<close> by (simp_all add: Sup_measure'_def sets_eq)
show "x \<le> Sup_measure' S'"
proof cases
assume "x \<in> S"
with \<open>b \<in> S\<close> have "space x = space b"
by (simp add: S)
show ?thesis
proof cases
assume "x \<in> S'"
show "x \<le> Sup_measure' S'"
proof (intro le_measure[THEN iffD2] ballI)
show "sets x = sets (Sup_measure' S')"
using \<open>x\<in>S'\<close> * by (simp add: S')
fix X assume "X \<in> sets x"
show "emeasure x X \<le> emeasure (Sup_measure' S') X"
proof (subst emeasure_Sup_measure'[OF _ \<open>X \<in> sets x\<close>])
show "emeasure x X \<le> (SUP P \<in> {P. finite P \<and> P \<subseteq> S'}. emeasure (sup_measure.F id P) X)"
using \<open>x\<in>S'\<close> by (intro SUP_upper2[where i="{x}"]) auto
qed (insert \<open>x\<in>S'\<close> S', auto)
qed
next
assume "x \<notin> S'"
then have "sets x \<noteq> sets b"
using \<open>x\<in>S\<close> by (auto simp: S')
moreover have "sets x \<le> sets b"
using \<open>x\<in>S\<close> unfolding b by auto
ultimately show ?thesis
using * \<open>x \<in> S\<close>
by (intro less_eq_measure.intros(2))
(simp_all add: * \<open>space x = space b\<close> less_le)
qed
next
assume "x \<notin> S"
with \<open>x\<in>A\<close> \<open>x \<notin> S\<close> \<open>space b = space a\<close> show ?thesis
by (intro less_eq_measure.intros)
(simp_all add: * less_le a SUP_upper S)
qed
qed
show least: "Sup A \<le> x" if x: "\<And>z. z \<in> A \<Longrightarrow> z \<le> x" for x :: "'a measure" and A
unfolding Sup_measure_def
proof (intro Sup_lexord[where P="\<lambda>y. y \<le> x"])
assume "\<And>a. a \<in> A \<Longrightarrow> space a \<noteq> (\<Union>a\<in>A. space a)"
show "sigma (\<Union>(space ` A)) {} \<le> x"
using x[THEN le_measureD1] by (subst sigma_le_iff) auto
next
fix a S assume "a \<in> A" "space a = (\<Union>a\<in>A. space a)" and S: "S = {a' \<in> A. space a' = space a}"
"\<And>a. a \<in> S \<Longrightarrow> sets a \<noteq> (\<Union>a\<in>S. sets a)"
have "\<Union>(space ` S) \<subseteq> space x"
using S le_measureD1[OF x] by auto
moreover
have "\<Union>(space ` S) = space a"
using \<open>a\<in>A\<close> S by auto
then have "space x = \<Union>(space ` S) \<Longrightarrow> \<Union>(sets ` S) \<subseteq> sets x"
using \<open>a \<in> A\<close> le_measureD2[OF x] by (auto simp: S)
ultimately show "sigma (\<Union>(space ` S)) (\<Union>(sets ` S)) \<le> x"
by (subst sigma_le_iff) simp_all
next
fix a b S S' assume "a \<in> A" and a: "space a = (\<Union>a\<in>A. space a)" and S: "S = {a' \<in> A. space a' = space a}"
and "b \<in> S" and b: "sets b = (\<Union>a\<in>S. sets a)" and S': "S' = {a' \<in> S. sets a' = sets b}"
then have "S' \<noteq> {}" "space b = space a"
by auto
have sets_eq: "\<And>x. x \<in> S' \<Longrightarrow> sets x = sets b"
by (auto simp: S')
note sets_eq[THEN sets_eq_imp_space_eq, simp]
have *: "sets (Sup_measure' S') = sets b" "space (Sup_measure' S') = space b"
using \<open>S' \<noteq> {}\<close> by (simp_all add: Sup_measure'_def sets_eq)
show "Sup_measure' S' \<le> x"
proof cases
assume "space x = space a"
show ?thesis
proof cases
assume **: "sets x = sets b"
show ?thesis
proof (intro le_measure[THEN iffD2] ballI)
show ***: "sets (Sup_measure' S') = sets x"
by (simp add: * **)
fix X assume "X \<in> sets (Sup_measure' S')"
show "emeasure (Sup_measure' S') X \<le> emeasure x X"
unfolding ***
proof (subst emeasure_Sup_measure'[OF _ \<open>X \<in> sets (Sup_measure' S')\<close>])
show "(SUP P \<in> {P. finite P \<and> P \<subseteq> S'}. emeasure (sup_measure.F id P) X) \<le> emeasure x X"
proof (safe intro!: SUP_least)
fix P assume P: "finite P" "P \<subseteq> S'"
show "emeasure (sup_measure.F id P) X \<le> emeasure x X"
proof cases
assume "P = {}" then show ?thesis
by auto
next
assume "P \<noteq> {}"
from P have "finite P" "P \<subseteq> A"
unfolding S' S by (simp_all add: subset_eq)
then have "sup_measure.F id P \<le> x"
by (induction P) (auto simp: x)
moreover have "sets (sup_measure.F id P) = sets x"
using \<open>finite P\<close> \<open>P \<noteq> {}\<close> \<open>P \<subseteq> S'\<close> \<open>sets x = sets b\<close>
by (intro sets_sup_measure_F) (auto simp: S')
ultimately show "emeasure (sup_measure.F id P) X \<le> emeasure x X"
by (rule le_measureD3)
qed
qed
show "m \<in> S' \<Longrightarrow> sets m = sets (Sup_measure' S')" for m
unfolding * by (simp add: S')
qed fact
qed
next
assume "sets x \<noteq> sets b"
moreover have "sets b \<le> sets x"
unfolding b S using x[THEN le_measureD2] \<open>space x = space a\<close> by auto
ultimately show "Sup_measure' S' \<le> x"
using \<open>space x = space a\<close> \<open>b \<in> S\<close>
by (intro less_eq_measure.intros(2)) (simp_all add: * S)
qed
next
assume "space x \<noteq> space a"
then have "space a < space x"
using le_measureD1[OF x[OF \<open>a\<in>A\<close>]] by auto
then show "Sup_measure' S' \<le> x"
by (intro less_eq_measure.intros) (simp add: * \<open>space b = space a\<close>)
qed
qed
show "Sup {} = (bot::'a measure)" "Inf {} = (top::'a measure)"
by (auto intro!: antisym least simp: top_measure_def)
show lower: "x \<in> A \<Longrightarrow> Inf A \<le> x" for x :: "'a measure" and A
unfolding Inf_measure_def by (intro least) auto
show greatest: "(\<And>z. z \<in> A \<Longrightarrow> x \<le> z) \<Longrightarrow> x \<le> Inf A" for x :: "'a measure" and A
unfolding Inf_measure_def by (intro upper) auto
show "inf x y \<le> x" "inf x y \<le> y" "x \<le> y \<Longrightarrow> x \<le> z \<Longrightarrow> x \<le> inf y z" for x y z :: "'a measure"
by (auto simp: inf_measure_def intro!: lower greatest)
qed
end
lemma sets_SUP:
assumes "\<And>x. x \<in> I \<Longrightarrow> sets (M x) = sets N"
shows "I \<noteq> {} \<Longrightarrow> sets (SUP i\<in>I. M i) = sets N"
unfolding Sup_measure_def
using assms assms[THEN sets_eq_imp_space_eq]
sets_Sup_measure'[where A=N and M="M`I"]
by (intro Sup_lexord1[where P="\<lambda>x. sets x = sets N"]) auto
lemma emeasure_SUP:
assumes sets: "\<And>i. i \<in> I \<Longrightarrow> sets (M i) = sets N" "X \<in> sets N" "I \<noteq> {}"
shows "emeasure (SUP i\<in>I. M i) X = (SUP J\<in>{J. J \<noteq> {} \<and> finite J \<and> J \<subseteq> I}. emeasure (SUP i\<in>J. M i) X)"
proof -
interpret sup_measure: comm_monoid_set sup "bot :: 'b measure"
by standard (auto intro!: antisym)
have eq: "finite J \<Longrightarrow> sup_measure.F id J = (SUP i\<in>J. i)" for J :: "'b measure set"
by (induction J rule: finite_induct) auto
have 1: "J \<noteq> {} \<Longrightarrow> J \<subseteq> I \<Longrightarrow> sets (SUP x\<in>J. M x) = sets N" for J
by (intro sets_SUP sets) (auto )
from \<open>I \<noteq> {}\<close> obtain i where "i\<in>I" by auto
have "Sup_measure' (M`I) X = (SUP P\<in>{P. finite P \<and> P \<subseteq> M`I}. sup_measure.F id P X)"
using sets by (intro emeasure_Sup_measure') auto
also have "Sup_measure' (M`I) = (SUP i\<in>I. M i)"
unfolding Sup_measure_def using \<open>I \<noteq> {}\<close> sets sets(1)[THEN sets_eq_imp_space_eq]
by (intro Sup_lexord1[where P="\<lambda>x. _ = x"]) auto
also have "(SUP P\<in>{P. finite P \<and> P \<subseteq> M`I}. sup_measure.F id P X) =
(SUP J\<in>{J. J \<noteq> {} \<and> finite J \<and> J \<subseteq> I}. (SUP i\<in>J. M i) X)"
proof (intro SUP_eq)
fix J assume "J \<in> {P. finite P \<and> P \<subseteq> M`I}"
then obtain J' where J': "J' \<subseteq> I" "finite J'" and J: "J = M`J'" and "finite J"
using finite_subset_image[of J M I] by auto
show "\<exists>j\<in>{J. J \<noteq> {} \<and> finite J \<and> J \<subseteq> I}. sup_measure.F id J X \<le> (SUP i\<in>j. M i) X"
proof cases
assume "J' = {}" with \<open>i \<in> I\<close> show ?thesis
by (auto simp add: J)
next
assume "J' \<noteq> {}" with J J' show ?thesis
by (intro bexI[of _ "J'"]) (auto simp add: eq simp del: id_apply)
qed
next
fix J assume J: "J \<in> {P. P \<noteq> {} \<and> finite P \<and> P \<subseteq> I}"
show "\<exists>J'\<in>{J. finite J \<and> J \<subseteq> M`I}. (SUP i\<in>J. M i) X \<le> sup_measure.F id J' X"
using J by (intro bexI[of _ "M`J"]) (auto simp add: eq simp del: id_apply)
qed
finally show ?thesis .
qed
lemma emeasure_SUP_chain:
assumes sets: "\<And>i. i \<in> A \<Longrightarrow> sets (M i) = sets N" "X \<in> sets N"
assumes ch: "Complete_Partial_Order.chain (\<le>) (M ` A)" and "A \<noteq> {}"
shows "emeasure (SUP i\<in>A. M i) X = (SUP i\<in>A. emeasure (M i) X)"
proof (subst emeasure_SUP[OF sets \<open>A \<noteq> {}\<close>])
show "(SUP J\<in>{J. J \<noteq> {} \<and> finite J \<and> J \<subseteq> A}. emeasure (Sup (M ` J)) X) = (SUP i\<in>A. emeasure (M i) X)"
proof (rule SUP_eq)
fix J assume "J \<in> {J. J \<noteq> {} \<and> finite J \<and> J \<subseteq> A}"
then have J: "Complete_Partial_Order.chain (\<le>) (M ` J)" "finite J" "J \<noteq> {}" and "J \<subseteq> A"
using ch[THEN chain_subset, of "M`J"] by auto
with in_chain_finite[OF J(1)] obtain j where "j \<in> J" "(SUP j\<in>J. M j) = M j"
by auto
with \<open>J \<subseteq> A\<close> show "\<exists>j\<in>A. emeasure (Sup (M ` J)) X \<le> emeasure (M j) X"
by auto
next
fix j assume "j\<in>A" then show "\<exists>i\<in>{J. J \<noteq> {} \<and> finite J \<and> J \<subseteq> A}. emeasure (M j) X \<le> emeasure (Sup (M ` i)) X"
by (intro bexI[of _ "{j}"]) auto
qed
qed
subsubsection\<^marker>\<open>tag unimportant\<close> \<open>Supremum of a set of \<open>\<sigma>\<close>-algebras\<close>
lemma space_Sup_eq_UN: "space (Sup M) = (\<Union>x\<in>M. space x)" (is "?L=?R")
proof
show "?L \<subseteq> ?R"
using Sup_lexord[where P="\<lambda>x. space x = _"]
apply (clarsimp simp: Sup_measure_def)
by (smt (verit) Sup_lexord_def UN_E mem_Collect_eq space_Sup_measure'2 space_measure_of_conv)
qed (use Sup_upper le_measureD1 in fastforce)
lemma sets_Sup_eq:
assumes *: "\<And>m. m \<in> M \<Longrightarrow> space m = X" and "M \<noteq> {}"
shows "sets (Sup M) = sigma_sets X (\<Union>x\<in>M. sets x)"
unfolding Sup_measure_def
proof (rule Sup_lexord1 [OF \<open>M \<noteq> {}\<close>])
show "sets (Sup_lexord sets Sup_measure' (\<lambda>U. sigma (\<Union> (space ` U)) (\<Union> (sets ` U))) M)
= sigma_sets X (\<Union> (sets ` M))"
apply (rule Sup_lexord)
apply (metis (mono_tags, lifting) "*" empty_iff mem_Collect_eq sets.sigma_sets_eq sets_Sup_measure')
by (metis "*" SUP_eq_const UN_space_closed assms(2) sets_measure_of)
qed (use * in blast)
lemma in_sets_Sup: "(\<And>m. m \<in> M \<Longrightarrow> space m = X) \<Longrightarrow> m \<in> M \<Longrightarrow> A \<in> sets m \<Longrightarrow> A \<in> sets (Sup M)"
by (subst sets_Sup_eq[where X=X]) auto
lemma Sup_lexord_rel:
assumes "\<And>i. i \<in> I \<Longrightarrow> k (A i) = k (B i)"
"R (c (A ` {a \<in> I. k (B a) = (SUP x\<in>I. k (B x))})) (c (B ` {a \<in> I. k (B a) = (SUP x\<in>I. k (B x))}))"
"R (s (A`I)) (s (B`I))"
shows "R (Sup_lexord k c s (A`I)) (Sup_lexord k c s (B`I))"
proof -
have "A ` {a \<in> I. k (B a) = (SUP x\<in>I. k (B x))} = {a \<in> A ` I. k a = (SUP x\<in>I. k (B x))}"
using assms(1) by auto
moreover have "B ` {a \<in> I. k (B a) = (SUP x\<in>I. k (B x))} = {a \<in> B ` I. k a = (SUP x\<in>I. k (B x))}"
by auto
ultimately show ?thesis
using assms by (auto simp: Sup_lexord_def Let_def image_comp)
qed
lemma sets_SUP_cong:
assumes eq: "\<And>i. i \<in> I \<Longrightarrow> sets (M i) = sets (N i)"
shows "sets (SUP i\<in>I. M i) = sets (SUP i\<in>I. N i)"
unfolding Sup_measure_def
using eq eq[THEN sets_eq_imp_space_eq]
by (intro Sup_lexord_rel[where R="\<lambda>x y. sets x = sets y"], simp_all add: sets_Sup_measure'2)
lemma sets_Sup_in_sets:
assumes "M \<noteq> {}"
assumes "\<And>m. m \<in> M \<Longrightarrow> space m = space N"
assumes "\<And>m. m \<in> M \<Longrightarrow> sets m \<subseteq> sets N"
shows "sets (Sup M) \<subseteq> sets N"
proof -
have *: "\<Union>(space ` M) = space N"
using assms by auto
show ?thesis
unfolding * using assms by (subst sets_Sup_eq[of M "space N"]) (auto intro!: sets.sigma_sets_subset)
qed
lemma measurable_Sup1:
assumes m: "m \<in> M" and f: "f \<in> measurable m N"
and const_space: "\<And>m n. m \<in> M \<Longrightarrow> n \<in> M \<Longrightarrow> space m = space n"
shows "f \<in> measurable (Sup M) N"
proof -
have "space (Sup M) = space m"
using m by (auto simp add: space_Sup_eq_UN dest: const_space)
then show ?thesis
using m f unfolding measurable_def by (auto intro: in_sets_Sup[OF const_space])
qed
lemma measurable_Sup2:
assumes M: "M \<noteq> {}"
assumes f: "\<And>m. m \<in> M \<Longrightarrow> f \<in> measurable N m"
and const_space: "\<And>m n. m \<in> M \<Longrightarrow> n \<in> M \<Longrightarrow> space m = space n"
shows "f \<in> measurable N (Sup M)"
proof -
from M obtain m where "m \<in> M" by auto
have space_eq: "\<And>n. n \<in> M \<Longrightarrow> space n = space m"
by (intro const_space \<open>m \<in> M\<close>)
have eq: "sets (sigma (\<Union> (space ` M)) (\<Union> (sets ` M))) = sets (Sup M)"
by (metis M SUP_eq_const UN_space_closed sets_Sup_eq sets_measure_of space_eq)
have "f \<in> measurable N (sigma (\<Union>m\<in>M. space m) (\<Union>m\<in>M. sets m))"
proof (rule measurable_measure_of)
show "f \<in> space N \<rightarrow> \<Union>(space ` M)"
using measurable_space[OF f] M by auto
qed (auto intro: measurable_sets f dest: sets.sets_into_space)
also have "measurable N (sigma (\<Union>m\<in>M. space m) (\<Union>m\<in>M. sets m)) = measurable N (Sup M)"
using eq measurable_cong_sets by blast
finally show ?thesis .
qed
lemma measurable_SUP2:
"I \<noteq> {} \<Longrightarrow> (\<And>i. i \<in> I \<Longrightarrow> f \<in> measurable N (M i)) \<Longrightarrow>
(\<And>i j. i \<in> I \<Longrightarrow> j \<in> I \<Longrightarrow> space (M i) = space (M j)) \<Longrightarrow> f \<in> measurable N (SUP i\<in>I. M i)"
by (auto intro!: measurable_Sup2)
lemma sets_Sup_sigma:
assumes [simp]: "M \<noteq> {}" and M: "\<And>m. m \<in> M \<Longrightarrow> m \<subseteq> Pow \<Omega>"
shows "sets (SUP m\<in>M. sigma \<Omega> m) = sets (sigma \<Omega> (\<Union>M))"
proof -
{ fix a m assume "a \<in> sigma_sets \<Omega> m" "m \<in> M"
then have "a \<in> sigma_sets \<Omega> (\<Union>M)"
by induction (auto intro: sigma_sets.intros(2-)) }
then have "sigma_sets \<Omega> (\<Union> (sigma_sets \<Omega> ` M)) = sigma_sets \<Omega> (\<Union> M)"
by (smt (verit, best) UN_iff Union_iff sigma_sets.Basic sigma_sets_eqI)
then show "sets (SUP m\<in>M. sigma \<Omega> m) = sets (sigma \<Omega> (\<Union>M))"
by (subst sets_Sup_eq) (fastforce simp add: M Union_least)+
qed
lemma Sup_sigma:
assumes [simp]: "M \<noteq> {}" and M: "\<And>m. m \<in> M \<Longrightarrow> m \<subseteq> Pow \<Omega>"
shows "(SUP m\<in>M. sigma \<Omega> m) = (sigma \<Omega> (\<Union>M))"
proof (intro antisym SUP_least)
have *: "\<Union>M \<subseteq> Pow \<Omega>"
using M by auto
show "sigma \<Omega> (\<Union>M) \<le> (SUP m\<in>M. sigma \<Omega> m)"
proof (intro less_eq_measure.intros(3))
show "space (sigma \<Omega> (\<Union>M)) = space (SUP m\<in>M. sigma \<Omega> m)"
"sets (sigma \<Omega> (\<Union>M)) = sets (SUP m\<in>M. sigma \<Omega> m)"
by (auto simp add: M sets_Sup_sigma sets_eq_imp_space_eq space_measure_of_conv)
qed (simp add: emeasure_sigma le_fun_def)
fix m assume "m \<in> M" then show "sigma \<Omega> m \<le> sigma \<Omega> (\<Union>M)"
by (subst sigma_le_iff) (auto simp add: M *)
qed
lemma SUP_sigma_sigma:
"M \<noteq> {} \<Longrightarrow> (\<And>m. m \<in> M \<Longrightarrow> f m \<subseteq> Pow \<Omega>) \<Longrightarrow> (SUP m\<in>M. sigma \<Omega> (f m)) = sigma \<Omega> (\<Union>m\<in>M. f m)"
using Sup_sigma[of "f`M" \<Omega>] by (auto simp: image_comp)
lemma sets_vimage_Sup_eq:
assumes *: "M \<noteq> {}" "f \<in> X \<rightarrow> Y" "\<And>m. m \<in> M \<Longrightarrow> space m = Y"
shows "sets (vimage_algebra X f (Sup M)) = sets (SUP m \<in> M. vimage_algebra X f m)"
(is "?L = ?R")
proof
have "\<And>m. m \<in> M \<Longrightarrow> f \<in> Sup (vimage_algebra X f ` M) \<rightarrow>\<^sub>M m"
using assms
by (smt (verit, del_insts) Pi_iff imageE image_eqI measurable_Sup1
measurable_vimage_algebra1 space_vimage_algebra)
then show "?L \<subseteq> ?R"
by (intro sets_image_in_sets measurable_Sup2) (simp_all add: space_Sup_eq_UN *)
show "?R \<subseteq> ?L"
apply (intro sets_Sup_in_sets)
apply (force simp add: * space_Sup_eq_UN sets_vimage_algebra2 intro: in_sets_Sup)+
done
qed
lemma restrict_space_eq_vimage_algebra':
"sets (restrict_space M \<Omega>) = sets (vimage_algebra (\<Omega> \<inter> space M) (\<lambda>x. x) M)"
proof -
have *: "{A \<inter> (\<Omega> \<inter> space M) |A. A \<in> sets M} = {A \<inter> \<Omega> |A. A \<in> sets M}"
using sets.sets_into_space[of _ M] by blast
show ?thesis
unfolding restrict_space_def
by (subst sets_measure_of)
(auto simp add: image_subset_iff sets_vimage_algebra * dest: sets.sets_into_space intro!: arg_cong2[where f=sigma_sets])
qed
lemma sigma_le_sets:
assumes [simp]: "A \<subseteq> Pow X" shows "sets (sigma X A) \<subseteq> sets N \<longleftrightarrow> X \<in> sets N \<and> A \<subseteq> sets N"
proof
have "X \<in> sigma_sets X A" "A \<subseteq> sigma_sets X A"
by (auto intro: sigma_sets_top)
moreover assume "sets (sigma X A) \<subseteq> sets N"
ultimately show "X \<in> sets N \<and> A \<subseteq> sets N"
by auto
next
assume *: "X \<in> sets N \<and> A \<subseteq> sets N"
{ fix Y assume "Y \<in> sigma_sets X A" from this * have "Y \<in> sets N"
by induction auto }
then show "sets (sigma X A) \<subseteq> sets N"
by auto
qed
lemma measurable_iff_sets:
"f \<in> measurable M N \<longleftrightarrow> (f \<in> space M \<rightarrow> space N \<and> sets (vimage_algebra (space M) f N) \<subseteq> sets M)"
unfolding measurable_def
by (smt (verit, ccfv_threshold) mem_Collect_eq sets_vimage_algebra sigma_sets_le_sets_iff subset_eq)
lemma sets_vimage_algebra_space: "X \<in> sets (vimage_algebra X f M)"
using sets.top[of "vimage_algebra X f M"] by simp
lemma measurable_mono:
assumes N: "sets N' \<le> sets N" "space N = space N'"
assumes M: "sets M \<le> sets M'" "space M = space M'"
shows "measurable M N \<subseteq> measurable M' N'"
unfolding measurable_def
proof safe
fix f A assume "f \<in> space M \<rightarrow> space N" "A \<in> sets N'"
moreover assume "\<forall>y\<in>sets N. f -` y \<inter> space M \<in> sets M" note this[THEN bspec, of A]
ultimately show "f -` A \<inter> space M' \<in> sets M'"
using assms by auto
qed (use N M in auto)
lemma measurable_Sup_measurable:
assumes f: "f \<in> space N \<rightarrow> A"
shows "f \<in> measurable N (Sup {M. space M = A \<and> f \<in> measurable N M})"
proof (rule measurable_Sup2)
show "{M. space M = A \<and> f \<in> measurable N M} \<noteq> {}"
using f unfolding ex_in_conv[symmetric]
by (intro exI[of _ "sigma A {}"]) (auto intro!: measurable_measure_of)
qed auto
lemma (in sigma_algebra) sigma_sets_subset':
assumes a: "a \<subseteq> M" "\<Omega>' \<in> M"
shows "sigma_sets \<Omega>' a \<subseteq> M"
proof
show "x \<in> M" if x: "x \<in> sigma_sets \<Omega>' a" for x
using x by (induct rule: sigma_sets.induct) (use a in auto)
qed
lemma in_sets_SUP: "i \<in> I \<Longrightarrow> (\<And>i. i \<in> I \<Longrightarrow> space (M i) = Y) \<Longrightarrow> X \<in> sets (M i) \<Longrightarrow> X \<in> sets (SUP i\<in>I. M i)"
by (intro in_sets_Sup[where X=Y]) auto
lemma measurable_SUP1:
"i \<in> I \<Longrightarrow> f \<in> measurable (M i) N \<Longrightarrow> (\<And>m n. m \<in> I \<Longrightarrow> n \<in> I \<Longrightarrow> space (M m) = space (M n)) \<Longrightarrow>
f \<in> measurable (SUP i\<in>I. M i) N"
by (auto intro: measurable_Sup1)
lemma sets_image_in_sets':
assumes X: "X \<in> sets N"
assumes f: "\<And>A. A \<in> sets M \<Longrightarrow> f -` A \<inter> X \<in> sets N"
shows "sets (vimage_algebra X f M) \<subseteq> sets N"
unfolding sets_vimage_algebra
by (rule sets.sigma_sets_subset') (auto intro!: measurable_sets X f)
lemma mono_vimage_algebra:
"sets M \<le> sets N \<Longrightarrow> sets (vimage_algebra X f M) \<subseteq> sets (vimage_algebra X f N)"
using sets.top[of "sigma X {f -` A \<inter> X |A. A \<in> sets N}"]
unfolding vimage_algebra_def
by (smt (verit, del_insts) space_measure_of sigma_le_sets Pow_iff inf_le2 mem_Collect_eq subset_eq)
lemma mono_restrict_space: "sets M \<le> sets N \<Longrightarrow> sets (restrict_space M X) \<subseteq> sets (restrict_space N X)"
unfolding sets_restrict_space by (rule image_mono)
lemma sets_eq_bot: "sets M = {{}} \<longleftrightarrow> M = bot"
by (metis measure_eqI emeasure_empty sets_bot singletonD)
lemma sets_eq_bot2: "{{}} = sets M \<longleftrightarrow> M = bot"
using sets_eq_bot[of M] by blast
lemma (in finite_measure) countable_support:
"countable {x. measure M {x} \<noteq> 0}"
proof cases
assume "measure M (space M) = 0"
then show ?thesis
by (metis (mono_tags, lifting) bounded_measure measure_le_0_iff Collect_empty_eq countable_empty)
next
let ?M = "measure M (space M)" and ?m = "\<lambda>x. measure M {x}"
assume "?M \<noteq> 0"
then have *: "{x. ?m x \<noteq> 0} = (\<Union>n. {x. ?M / Suc n < ?m x})"
using reals_Archimedean[of "?m x / ?M" for x]
by (auto simp: field_simps not_le[symmetric] divide_le_0_iff measure_le_0_iff)
have **: "\<And>n. finite {x. ?M / Suc n < ?m x}"
proof (rule ccontr)
fix n assume "infinite {x. ?M / Suc n < ?m x}" (is "infinite ?X")
then obtain X where "finite X" "card X = Suc (Suc n)" "X \<subseteq> ?X"
by (metis infinite_arbitrarily_large)
then have *: "\<And>x. x \<in> X \<Longrightarrow> ?M / Suc n \<le> ?m x"
by auto
{ fix x assume "x \<in> X"
from \<open>?M \<noteq> 0\<close> *[OF this] have "?m x \<noteq> 0" by (auto simp: field_simps measure_le_0_iff)
then have "{x} \<in> sets M" by (auto dest: measure_notin_sets) }
note singleton_sets = this
have "?M < (\<Sum>x\<in>X. ?M / Suc n)"
using \<open>?M \<noteq> 0\<close>
by (simp add: \<open>card X = Suc (Suc n)\<close> field_simps less_le)
also have "\<dots> \<le> (\<Sum>x\<in>X. ?m x)"
by (rule sum_mono) fact
also have "\<dots> = measure M (\<Union>x\<in>X. {x})"
using singleton_sets \<open>finite X\<close>
by (intro finite_measure_finite_Union[symmetric]) (auto simp: disjoint_family_on_def)
finally have "?M < measure M (\<Union>x\<in>X. {x})" .
moreover have "measure M (\<Union>x\<in>X. {x}) \<le> ?M"
using singleton_sets[THEN sets.sets_into_space] by (intro finite_measure_mono) auto
ultimately show False by simp
qed
show ?thesis
unfolding * by (intro countable_UN countableI_type countable_finite[OF **])
qed
end
diff --git a/src/HOL/Analysis/Product_Topology.thy b/src/HOL/Analysis/Product_Topology.thy
--- a/src/HOL/Analysis/Product_Topology.thy
+++ b/src/HOL/Analysis/Product_Topology.thy
@@ -1,643 +1,684 @@
section\<open>The binary product topology\<close>
theory Product_Topology
imports Function_Topology
begin
section \<open>Product Topology\<close>
subsection\<open>Definition\<close>
definition prod_topology :: "'a topology \<Rightarrow> 'b topology \<Rightarrow> ('a \<times> 'b) topology" where
"prod_topology X Y \<equiv> topology (arbitrary union_of (\<lambda>U. U \<in> {S \<times> T |S T. openin X S \<and> openin Y T}))"
lemma open_product_open:
assumes "open A"
shows "\<exists>\<U>. \<U> \<subseteq> {S \<times> T |S T. open S \<and> open T} \<and> \<Union> \<U> = A"
proof -
obtain f g where *: "\<And>u. u \<in> A \<Longrightarrow> open (f u) \<and> open (g u) \<and> u \<in> (f u) \<times> (g u) \<and> (f u) \<times> (g u) \<subseteq> A"
using open_prod_def [of A] assms by metis
let ?\<U> = "(\<lambda>u. f u \<times> g u) ` A"
show ?thesis
by (rule_tac x="?\<U>" in exI) (auto simp: dest: *)
qed
lemma open_product_open_eq: "(arbitrary union_of (\<lambda>U. \<exists>S T. U = S \<times> T \<and> open S \<and> open T)) = open"
by (force simp: union_of_def arbitrary_def intro: open_product_open open_Times)
lemma openin_prod_topology:
"openin (prod_topology X Y) = arbitrary union_of (\<lambda>U. U \<in> {S \<times> T |S T. openin X S \<and> openin Y T})"
unfolding prod_topology_def
proof (rule topology_inverse')
show "istopology (arbitrary union_of (\<lambda>U. U \<in> {S \<times> T |S T. openin X S \<and> openin Y T}))"
apply (rule istopology_base, simp)
by (metis openin_Int Times_Int_Times)
qed
lemma topspace_prod_topology [simp]:
"topspace (prod_topology X Y) = topspace X \<times> topspace Y"
proof -
have "topspace(prod_topology X Y) = \<Union> (Collect (openin (prod_topology X Y)))" (is "_ = ?Z")
unfolding topspace_def ..
also have "\<dots> = topspace X \<times> topspace Y"
proof
show "?Z \<subseteq> topspace X \<times> topspace Y"
apply (auto simp: openin_prod_topology union_of_def arbitrary_def)
using openin_subset by force+
next
have *: "\<exists>A B. topspace X \<times> topspace Y = A \<times> B \<and> openin X A \<and> openin Y B"
by blast
show "topspace X \<times> topspace Y \<subseteq> ?Z"
apply (rule Union_upper)
using * by (simp add: openin_prod_topology arbitrary_union_of_inc)
qed
finally show ?thesis .
qed
lemma subtopology_Times:
shows "subtopology (prod_topology X Y) (S \<times> T) = prod_topology (subtopology X S) (subtopology Y T)"
proof -
have "((\<lambda>U. \<exists>S T. U = S \<times> T \<and> openin X S \<and> openin Y T) relative_to S \<times> T) =
(\<lambda>U. \<exists>S' T'. U = S' \<times> T' \<and> (openin X relative_to S) S' \<and> (openin Y relative_to T) T')"
by (auto simp: relative_to_def Times_Int_Times fun_eq_iff) metis
then show ?thesis
by (simp add: topology_eq openin_prod_topology arbitrary_union_of_relative_to flip: openin_relative_to)
qed
lemma prod_topology_subtopology:
"prod_topology (subtopology X S) Y = subtopology (prod_topology X Y) (S \<times> topspace Y)"
"prod_topology X (subtopology Y T) = subtopology (prod_topology X Y) (topspace X \<times> T)"
by (auto simp: subtopology_Times)
lemma prod_topology_discrete_topology:
"discrete_topology (S \<times> T) = prod_topology (discrete_topology S) (discrete_topology T)"
by (auto simp: discrete_topology_unique openin_prod_topology intro: arbitrary_union_of_inc)
lemma prod_topology_euclidean [simp]: "prod_topology euclidean euclidean = euclidean"
by (simp add: prod_topology_def open_product_open_eq)
lemma prod_topology_subtopology_eu [simp]:
"prod_topology (subtopology euclidean S) (subtopology euclidean T) = subtopology euclidean (S \<times> T)"
by (simp add: prod_topology_subtopology subtopology_subtopology Times_Int_Times)
lemma openin_prod_topology_alt:
"openin (prod_topology X Y) S \<longleftrightarrow>
(\<forall>x y. (x,y) \<in> S \<longrightarrow> (\<exists>U V. openin X U \<and> openin Y V \<and> x \<in> U \<and> y \<in> V \<and> U \<times> V \<subseteq> S))"
apply (auto simp: openin_prod_topology arbitrary_union_of_alt, fastforce)
by (metis mem_Sigma_iff)
lemma open_map_fst: "open_map (prod_topology X Y) X fst"
unfolding open_map_def openin_prod_topology_alt
by (force simp: openin_subopen [of X "fst ` _"] intro: subset_fst_imageI)
lemma open_map_snd: "open_map (prod_topology X Y) Y snd"
unfolding open_map_def openin_prod_topology_alt
by (force simp: openin_subopen [of Y "snd ` _"] intro: subset_snd_imageI)
lemma openin_prod_Times_iff:
"openin (prod_topology X Y) (S \<times> T) \<longleftrightarrow> S = {} \<or> T = {} \<or> openin X S \<and> openin Y T"
proof (cases "S = {} \<or> T = {}")
case False
then show ?thesis
apply (simp add: openin_prod_topology_alt openin_subopen [of X S] openin_subopen [of Y T] times_subset_iff, safe)
apply (meson|force)+
done
qed force
lemma closure_of_Times:
"(prod_topology X Y) closure_of (S \<times> T) = (X closure_of S) \<times> (Y closure_of T)" (is "?lhs = ?rhs")
proof
show "?lhs \<subseteq> ?rhs"
by (clarsimp simp: closure_of_def openin_prod_topology_alt) blast
show "?rhs \<subseteq> ?lhs"
by (clarsimp simp: closure_of_def openin_prod_topology_alt) (meson SigmaI subsetD)
qed
lemma closedin_prod_Times_iff:
"closedin (prod_topology X Y) (S \<times> T) \<longleftrightarrow> S = {} \<or> T = {} \<or> closedin X S \<and> closedin Y T"
by (auto simp: closure_of_Times times_eq_iff simp flip: closure_of_eq)
lemma interior_of_Times: "(prod_topology X Y) interior_of (S \<times> T) = (X interior_of S) \<times> (Y interior_of T)"
proof (rule interior_of_unique)
show "(X interior_of S) \<times> Y interior_of T \<subseteq> S \<times> T"
by (simp add: Sigma_mono interior_of_subset)
show "openin (prod_topology X Y) ((X interior_of S) \<times> Y interior_of T)"
by (simp add: openin_prod_Times_iff)
next
show "T' \<subseteq> (X interior_of S) \<times> Y interior_of T" if "T' \<subseteq> S \<times> T" "openin (prod_topology X Y) T'" for T'
proof (clarsimp; intro conjI)
fix a :: "'a" and b :: "'b"
assume "(a, b) \<in> T'"
with that obtain U V where UV: "openin X U" "openin Y V" "a \<in> U" "b \<in> V" "U \<times> V \<subseteq> T'"
by (metis openin_prod_topology_alt)
then show "a \<in> X interior_of S"
using interior_of_maximal_eq that(1) by fastforce
show "b \<in> Y interior_of T"
using UV interior_of_maximal_eq that(1)
by (metis SigmaI mem_Sigma_iff subset_eq)
qed
qed
+text \<open>Missing the opposite direction. Does it hold? A converse is proved for proper maps, a stronger condition\<close>
+lemma closed_map_prod:
+ assumes "closed_map (prod_topology X Y) (prod_topology X' Y') (\<lambda>(x,y). (f x, g y))"
+ shows "topspace(prod_topology X Y) = {} \<or> closed_map X X' f \<and> closed_map Y Y' g"
+proof (cases "topspace(prod_topology X Y) = {}")
+ case False
+ then have ne: "topspace X \<noteq> {}" "topspace Y \<noteq> {}"
+ by auto
+ have "closed_map X X' f"
+ unfolding closed_map_def
+ proof (intro strip)
+ fix C
+ assume "closedin X C"
+ show "closedin X' (f ` C)"
+ proof (cases "C={}")
+ case False
+ with assms have "closedin (prod_topology X' Y') ((\<lambda>(x,y). (f x, g y)) ` (C \<times> topspace Y))"
+ by (simp add: \<open>closedin X C\<close> closed_map_def closedin_prod_Times_iff)
+ with False ne show ?thesis
+ by (simp add: image_paired_Times closedin_Times closedin_prod_Times_iff)
+ qed auto
+ qed
+ moreover
+ have "closed_map Y Y' g"
+ unfolding closed_map_def
+ proof (intro strip)
+ fix C
+ assume "closedin Y C"
+ show "closedin Y' (g ` C)"
+ proof (cases "C={}")
+ case False
+ with assms have "closedin (prod_topology X' Y') ((\<lambda>(x,y). (f x, g y)) ` (topspace X \<times> C))"
+ by (simp add: \<open>closedin Y C\<close> closed_map_def closedin_prod_Times_iff)
+ with False ne show ?thesis
+ by (simp add: image_paired_Times closedin_Times closedin_prod_Times_iff)
+ qed auto
+ qed
+ ultimately show ?thesis
+ by (auto simp: False)
+qed auto
+
subsection \<open>Continuity\<close>
lemma continuous_map_pairwise:
"continuous_map Z (prod_topology X Y) f \<longleftrightarrow> continuous_map Z X (fst \<circ> f) \<and> continuous_map Z Y (snd \<circ> f)"
(is "?lhs = ?rhs")
proof -
let ?g = "fst \<circ> f" and ?h = "snd \<circ> f"
have f: "f x = (?g x, ?h x)" for x
by auto
show ?thesis
proof (cases "(\<forall>x \<in> topspace Z. ?g x \<in> topspace X) \<and> (\<forall>x \<in> topspace Z. ?h x \<in> topspace Y)")
case True
show ?thesis
proof safe
assume "continuous_map Z (prod_topology X Y) f"
then have "openin Z {x \<in> topspace Z. fst (f x) \<in> U}" if "openin X U" for U
unfolding continuous_map_def using True that
apply clarify
apply (drule_tac x="U \<times> topspace Y" in spec)
by (simp add: openin_prod_Times_iff mem_Times_iff cong: conj_cong)
with True show "continuous_map Z X (fst \<circ> f)"
by (auto simp: continuous_map_def)
next
assume "continuous_map Z (prod_topology X Y) f"
then have "openin Z {x \<in> topspace Z. snd (f x) \<in> V}" if "openin Y V" for V
unfolding continuous_map_def using True that
apply clarify
apply (drule_tac x="topspace X \<times> V" in spec)
by (simp add: openin_prod_Times_iff mem_Times_iff cong: conj_cong)
with True show "continuous_map Z Y (snd \<circ> f)"
by (auto simp: continuous_map_def)
next
assume Z: "continuous_map Z X (fst \<circ> f)" "continuous_map Z Y (snd \<circ> f)"
have *: "openin Z {x \<in> topspace Z. f x \<in> W}"
if "\<And>w. w \<in> W \<Longrightarrow> \<exists>U V. openin X U \<and> openin Y V \<and> w \<in> U \<times> V \<and> U \<times> V \<subseteq> W" for W
proof (subst openin_subopen, clarify)
fix x :: "'a"
assume "x \<in> topspace Z" and "f x \<in> W"
with that [OF \<open>f x \<in> W\<close>]
obtain U V where UV: "openin X U" "openin Y V" "f x \<in> U \<times> V" "U \<times> V \<subseteq> W"
by auto
with Z UV show "\<exists>T. openin Z T \<and> x \<in> T \<and> T \<subseteq> {x \<in> topspace Z. f x \<in> W}"
apply (rule_tac x="{x \<in> topspace Z. ?g x \<in> U} \<inter> {x \<in> topspace Z. ?h x \<in> V}" in exI)
apply (auto simp: \<open>x \<in> topspace Z\<close> continuous_map_def)
done
qed
show "continuous_map Z (prod_topology X Y) f"
using True by (simp add: continuous_map_def openin_prod_topology_alt mem_Times_iff *)
qed
qed (auto simp: continuous_map_def)
qed
lemma continuous_map_paired:
"continuous_map Z (prod_topology X Y) (\<lambda>x. (f x,g x)) \<longleftrightarrow> continuous_map Z X f \<and> continuous_map Z Y g"
by (simp add: continuous_map_pairwise o_def)
lemma continuous_map_pairedI [continuous_intros]:
"\<lbrakk>continuous_map Z X f; continuous_map Z Y g\<rbrakk> \<Longrightarrow> continuous_map Z (prod_topology X Y) (\<lambda>x. (f x,g x))"
by (simp add: continuous_map_pairwise o_def)
lemma continuous_map_fst [continuous_intros]: "continuous_map (prod_topology X Y) X fst"
using continuous_map_pairwise [of "prod_topology X Y" X Y id]
by (simp add: continuous_map_pairwise)
lemma continuous_map_snd [continuous_intros]: "continuous_map (prod_topology X Y) Y snd"
using continuous_map_pairwise [of "prod_topology X Y" X Y id]
by (simp add: continuous_map_pairwise)
lemma continuous_map_fst_of [continuous_intros]:
"continuous_map Z (prod_topology X Y) f \<Longrightarrow> continuous_map Z X (fst \<circ> f)"
by (simp add: continuous_map_pairwise)
lemma continuous_map_snd_of [continuous_intros]:
"continuous_map Z (prod_topology X Y) f \<Longrightarrow> continuous_map Z Y (snd \<circ> f)"
by (simp add: continuous_map_pairwise)
lemma continuous_map_prod_fst:
"i \<in> I \<Longrightarrow> continuous_map (prod_topology (product_topology (\<lambda>i. Y) I) X) Y (\<lambda>x. fst x i)"
using continuous_map_componentwise_UNIV continuous_map_fst by fastforce
lemma continuous_map_prod_snd:
"i \<in> I \<Longrightarrow> continuous_map (prod_topology X (product_topology (\<lambda>i. Y) I)) Y (\<lambda>x. snd x i)"
using continuous_map_componentwise_UNIV continuous_map_snd by fastforce
lemma continuous_map_if_iff [simp]: "continuous_map X Y (\<lambda>x. if P then f x else g x) \<longleftrightarrow> continuous_map X Y (if P then f else g)"
by simp
lemma continuous_map_if [continuous_intros]: "\<lbrakk>P \<Longrightarrow> continuous_map X Y f; ~P \<Longrightarrow> continuous_map X Y g\<rbrakk>
\<Longrightarrow> continuous_map X Y (\<lambda>x. if P then f x else g x)"
by simp
lemma continuous_map_subtopology_fst [continuous_intros]: "continuous_map (subtopology (prod_topology X Y) Z) X fst"
using continuous_map_from_subtopology continuous_map_fst by force
lemma continuous_map_subtopology_snd [continuous_intros]: "continuous_map (subtopology (prod_topology X Y) Z) Y snd"
using continuous_map_from_subtopology continuous_map_snd by force
lemma quotient_map_fst [simp]:
"quotient_map(prod_topology X Y) X fst \<longleftrightarrow> (topspace Y = {} \<longrightarrow> topspace X = {})"
by (auto simp: continuous_open_quotient_map open_map_fst continuous_map_fst)
lemma quotient_map_snd [simp]:
"quotient_map(prod_topology X Y) Y snd \<longleftrightarrow> (topspace X = {} \<longrightarrow> topspace Y = {})"
by (auto simp: continuous_open_quotient_map open_map_snd continuous_map_snd)
lemma retraction_map_fst:
"retraction_map (prod_topology X Y) X fst \<longleftrightarrow> (topspace Y = {} \<longrightarrow> topspace X = {})"
proof (cases "topspace Y = {}")
case True
then show ?thesis
using continuous_map_image_subset_topspace
by (fastforce simp: retraction_map_def retraction_maps_def continuous_map_fst continuous_map_on_empty)
next
case False
have "\<exists>g. continuous_map X (prod_topology X Y) g \<and> (\<forall>x\<in>topspace X. fst (g x) = x)"
if y: "y \<in> topspace Y" for y
by (rule_tac x="\<lambda>x. (x,y)" in exI) (auto simp: y continuous_map_paired)
with False have "retraction_map (prod_topology X Y) X fst"
by (fastforce simp: retraction_map_def retraction_maps_def continuous_map_fst)
with False show ?thesis
by simp
qed
lemma retraction_map_snd:
"retraction_map (prod_topology X Y) Y snd \<longleftrightarrow> (topspace X = {} \<longrightarrow> topspace Y = {})"
proof (cases "topspace X = {}")
case True
then show ?thesis
using continuous_map_image_subset_topspace
by (fastforce simp: retraction_map_def retraction_maps_def continuous_map_fst continuous_map_on_empty)
next
case False
have "\<exists>g. continuous_map Y (prod_topology X Y) g \<and> (\<forall>y\<in>topspace Y. snd (g y) = y)"
if x: "x \<in> topspace X" for x
by (rule_tac x="\<lambda>y. (x,y)" in exI) (auto simp: x continuous_map_paired)
with False have "retraction_map (prod_topology X Y) Y snd"
by (fastforce simp: retraction_map_def retraction_maps_def continuous_map_snd)
with False show ?thesis
by simp
qed
lemma continuous_map_of_fst:
"continuous_map (prod_topology X Y) Z (f \<circ> fst) \<longleftrightarrow> topspace Y = {} \<or> continuous_map X Z f"
proof (cases "topspace Y = {}")
case True
then show ?thesis
by (simp add: continuous_map_on_empty)
next
case False
then show ?thesis
by (simp add: continuous_compose_quotient_map_eq)
qed
lemma continuous_map_of_snd:
"continuous_map (prod_topology X Y) Z (f \<circ> snd) \<longleftrightarrow> topspace X = {} \<or> continuous_map Y Z f"
proof (cases "topspace X = {}")
case True
then show ?thesis
by (simp add: continuous_map_on_empty)
next
case False
then show ?thesis
by (simp add: continuous_compose_quotient_map_eq)
qed
lemma continuous_map_prod_top:
"continuous_map (prod_topology X Y) (prod_topology X' Y') (\<lambda>(x,y). (f x, g y)) \<longleftrightarrow>
topspace (prod_topology X Y) = {} \<or> continuous_map X X' f \<and> continuous_map Y Y' g"
proof (cases "topspace (prod_topology X Y) = {}")
case True
then show ?thesis
by (simp add: continuous_map_on_empty)
next
case False
then show ?thesis
by (simp add: continuous_map_paired case_prod_unfold continuous_map_of_fst [unfolded o_def] continuous_map_of_snd [unfolded o_def])
qed
lemma in_prod_topology_closure_of:
assumes "z \<in> (prod_topology X Y) closure_of S"
shows "fst z \<in> X closure_of (fst ` S)" "snd z \<in> Y closure_of (snd ` S)"
using assms continuous_map_eq_image_closure_subset continuous_map_fst apply fastforce
using assms continuous_map_eq_image_closure_subset continuous_map_snd apply fastforce
done
proposition compact_space_prod_topology:
"compact_space(prod_topology X Y) \<longleftrightarrow> topspace(prod_topology X Y) = {} \<or> compact_space X \<and> compact_space Y"
proof (cases "topspace(prod_topology X Y) = {}")
case True
then show ?thesis
using compact_space_topspace_empty by blast
next
case False
then have non_mt: "topspace X \<noteq> {}" "topspace Y \<noteq> {}"
by auto
have "compact_space X" "compact_space Y" if "compact_space(prod_topology X Y)"
proof -
have "compactin X (fst ` (topspace X \<times> topspace Y))"
by (metis compact_space_def continuous_map_fst image_compactin that topspace_prod_topology)
moreover
have "compactin Y (snd ` (topspace X \<times> topspace Y))"
by (metis compact_space_def continuous_map_snd image_compactin that topspace_prod_topology)
ultimately show "compact_space X" "compact_space Y"
by (simp_all add: non_mt compact_space_def)
qed
moreover
define \<X> where "\<X> \<equiv> (\<lambda>V. topspace X \<times> V) ` Collect (openin Y)"
define \<Y> where "\<Y> \<equiv> (\<lambda>U. U \<times> topspace Y) ` Collect (openin X)"
have "compact_space(prod_topology X Y)" if "compact_space X" "compact_space Y"
proof (rule Alexander_subbase_alt)
show toptop: "topspace X \<times> topspace Y \<subseteq> \<Union>(\<X> \<union> \<Y>)"
unfolding \<X>_def \<Y>_def by auto
fix \<C> :: "('a \<times> 'b) set set"
assume \<C>: "\<C> \<subseteq> \<X> \<union> \<Y>" "topspace X \<times> topspace Y \<subseteq> \<Union>\<C>"
then obtain \<X>' \<Y>' where XY: "\<X>' \<subseteq> \<X>" "\<Y>' \<subseteq> \<Y>" and \<C>eq: "\<C> = \<X>' \<union> \<Y>'"
using subset_UnE by metis
then have sub: "topspace X \<times> topspace Y \<subseteq> \<Union>(\<X>' \<union> \<Y>')"
using \<C> by simp
obtain \<U> \<V> where \<U>: "\<And>U. U \<in> \<U> \<Longrightarrow> openin X U" "\<Y>' = (\<lambda>U. U \<times> topspace Y) ` \<U>"
and \<V>: "\<And>V. V \<in> \<V> \<Longrightarrow> openin Y V" "\<X>' = (\<lambda>V. topspace X \<times> V) ` \<V>"
using XY by (clarsimp simp add: \<X>_def \<Y>_def subset_image_iff) (force simp add: subset_iff)
have "\<exists>\<D>. finite \<D> \<and> \<D> \<subseteq> \<X>' \<union> \<Y>' \<and> topspace X \<times> topspace Y \<subseteq> \<Union> \<D>"
proof -
have "topspace X \<subseteq> \<Union>\<U> \<or> topspace Y \<subseteq> \<Union>\<V>"
using \<U> \<V> \<C> \<C>eq by auto
then have *: "\<exists>\<D>. finite \<D> \<and>
(\<forall>x \<in> \<D>. x \<in> (\<lambda>V. topspace X \<times> V) ` \<V> \<or> x \<in> (\<lambda>U. U \<times> topspace Y) ` \<U>) \<and>
(topspace X \<times> topspace Y \<subseteq> \<Union>\<D>)"
proof
assume "topspace X \<subseteq> \<Union>\<U>"
with \<open>compact_space X\<close> \<U> obtain \<F> where "finite \<F>" "\<F> \<subseteq> \<U>" "topspace X \<subseteq> \<Union>\<F>"
by (meson compact_space_alt)
with that show ?thesis
by (rule_tac x="(\<lambda>D. D \<times> topspace Y) ` \<F>" in exI) auto
next
assume "topspace Y \<subseteq> \<Union>\<V>"
with \<open>compact_space Y\<close> \<V> obtain \<F> where "finite \<F>" "\<F> \<subseteq> \<V>" "topspace Y \<subseteq> \<Union>\<F>"
by (meson compact_space_alt)
with that show ?thesis
by (rule_tac x="(\<lambda>C. topspace X \<times> C) ` \<F>" in exI) auto
qed
then show ?thesis
using that \<U> \<V> by blast
qed
then show "\<exists>\<D>. finite \<D> \<and> \<D> \<subseteq> \<C> \<and> topspace X \<times> topspace Y \<subseteq> \<Union> \<D>"
using \<C> \<C>eq by blast
next
have "(finite intersection_of (\<lambda>x. x \<in> \<X> \<or> x \<in> \<Y>) relative_to topspace X \<times> topspace Y)
= (\<lambda>U. \<exists>S T. U = S \<times> T \<and> openin X S \<and> openin Y T)"
(is "?lhs = ?rhs")
proof -
have "?rhs U" if "?lhs U" for U
proof -
have "topspace X \<times> topspace Y \<inter> \<Inter> T \<in> {A \<times> B |A B. A \<in> Collect (openin X) \<and> B \<in> Collect (openin Y)}"
if "finite T" "T \<subseteq> \<X> \<union> \<Y>" for T
using that
proof induction
case (insert B \<B>)
then show ?case
unfolding \<X>_def \<Y>_def
apply (simp add: Int_ac subset_eq image_def)
apply (metis (no_types) openin_Int openin_topspace Times_Int_Times)
done
qed auto
then show ?thesis
using that
by (auto simp: subset_eq elim!: relative_toE intersection_ofE)
qed
moreover
have "?lhs Z" if Z: "?rhs Z" for Z
proof -
obtain U V where "Z = U \<times> V" "openin X U" "openin Y V"
using Z by blast
then have UV: "U \<times> V = (topspace X \<times> topspace Y) \<inter> (U \<times> V)"
by (simp add: Sigma_mono inf_absorb2 openin_subset)
moreover
have "?lhs ((topspace X \<times> topspace Y) \<inter> (U \<times> V))"
proof (rule relative_to_inc)
show "(finite intersection_of (\<lambda>x. x \<in> \<X> \<or> x \<in> \<Y>)) (U \<times> V)"
apply (simp add: intersection_of_def \<X>_def \<Y>_def)
apply (rule_tac x="{(U \<times> topspace Y),(topspace X \<times> V)}" in exI)
using \<open>openin X U\<close> \<open>openin Y V\<close> openin_subset UV apply (fastforce simp add:)
done
qed
ultimately show ?thesis
using \<open>Z = U \<times> V\<close> by auto
qed
ultimately show ?thesis
by meson
qed
then show "topology (arbitrary union_of (finite intersection_of (\<lambda>x. x \<in> \<X> \<union> \<Y>)
relative_to (topspace X \<times> topspace Y))) =
prod_topology X Y"
by (simp add: prod_topology_def)
qed
ultimately show ?thesis
using False by blast
qed
lemma compactin_Times:
"compactin (prod_topology X Y) (S \<times> T) \<longleftrightarrow> S = {} \<or> T = {} \<or> compactin X S \<and> compactin Y T"
by (auto simp: compactin_subspace subtopology_Times compact_space_prod_topology)
subsection\<open>Homeomorphic maps\<close>
lemma homeomorphic_maps_prod:
"homeomorphic_maps (prod_topology X Y) (prod_topology X' Y') (\<lambda>(x,y). (f x, g y)) (\<lambda>(x,y). (f' x, g' y)) \<longleftrightarrow>
topspace(prod_topology X Y) = {} \<and>
topspace(prod_topology X' Y') = {} \<or>
homeomorphic_maps X X' f f' \<and>
homeomorphic_maps Y Y' g g'"
unfolding homeomorphic_maps_def continuous_map_prod_top
by (auto simp: continuous_map_def homeomorphic_maps_def continuous_map_prod_top)
lemma homeomorphic_maps_swap:
"homeomorphic_maps (prod_topology X Y) (prod_topology Y X)
(\<lambda>(x,y). (y,x)) (\<lambda>(y,x). (x,y))"
by (auto simp: homeomorphic_maps_def case_prod_unfold continuous_map_fst continuous_map_pairedI continuous_map_snd)
lemma homeomorphic_map_swap:
"homeomorphic_map (prod_topology X Y) (prod_topology Y X) (\<lambda>(x,y). (y,x))"
using homeomorphic_map_maps homeomorphic_maps_swap by metis
lemma homeomorphic_space_prod_topology_swap:
"(prod_topology X Y) homeomorphic_space (prod_topology Y X)"
using homeomorphic_map_swap homeomorphic_space by blast
lemma embedding_map_graph:
"embedding_map X (prod_topology X Y) (\<lambda>x. (x, f x)) \<longleftrightarrow> continuous_map X Y f"
(is "?lhs = ?rhs")
proof
assume L: ?lhs
have "snd \<circ> (\<lambda>x. (x, f x)) = f"
by force
moreover have "continuous_map X Y (snd \<circ> (\<lambda>x. (x, f x)))"
using L unfolding embedding_map_def
by (meson continuous_map_in_subtopology continuous_map_snd_of homeomorphic_imp_continuous_map)
ultimately show ?rhs
by simp
next
assume R: ?rhs
then show ?lhs
unfolding homeomorphic_map_maps embedding_map_def homeomorphic_maps_def
by (rule_tac x=fst in exI)
(auto simp: continuous_map_in_subtopology continuous_map_paired continuous_map_from_subtopology
continuous_map_fst)
qed
lemma homeomorphic_space_prod_topology:
"\<lbrakk>X homeomorphic_space X''; Y homeomorphic_space Y'\<rbrakk>
\<Longrightarrow> prod_topology X Y homeomorphic_space prod_topology X'' Y'"
using homeomorphic_maps_prod unfolding homeomorphic_space_def by blast
lemma prod_topology_homeomorphic_space_left:
"topspace Y = {b} \<Longrightarrow> prod_topology X Y homeomorphic_space X"
unfolding homeomorphic_space_def
by (rule_tac x=fst in exI) (simp add: homeomorphic_map_def inj_on_def flip: homeomorphic_map_maps)
lemma prod_topology_homeomorphic_space_right:
"topspace X = {a} \<Longrightarrow> prod_topology X Y homeomorphic_space Y"
unfolding homeomorphic_space_def
by (rule_tac x=snd in exI) (simp add: homeomorphic_map_def inj_on_def flip: homeomorphic_map_maps)
lemma homeomorphic_space_prod_topology_sing1:
"b \<in> topspace Y \<Longrightarrow> X homeomorphic_space (prod_topology X (subtopology Y {b}))"
by (metis empty_subsetI homeomorphic_space_sym inf.absorb_iff2 insert_subset prod_topology_homeomorphic_space_left topspace_subtopology)
lemma homeomorphic_space_prod_topology_sing2:
"a \<in> topspace X \<Longrightarrow> Y homeomorphic_space (prod_topology (subtopology X {a}) Y)"
by (metis empty_subsetI homeomorphic_space_sym inf.absorb_iff2 insert_subset prod_topology_homeomorphic_space_right topspace_subtopology)
lemma topological_property_of_prod_component:
assumes major: "P(prod_topology X Y)"
and X: "\<And>x. \<lbrakk>x \<in> topspace X; P(prod_topology X Y)\<rbrakk> \<Longrightarrow> P(subtopology (prod_topology X Y) ({x} \<times> topspace Y))"
and Y: "\<And>y. \<lbrakk>y \<in> topspace Y; P(prod_topology X Y)\<rbrakk> \<Longrightarrow> P(subtopology (prod_topology X Y) (topspace X \<times> {y}))"
and PQ: "\<And>X X'. X homeomorphic_space X' \<Longrightarrow> (P X \<longleftrightarrow> Q X')"
and PR: "\<And>X X'. X homeomorphic_space X' \<Longrightarrow> (P X \<longleftrightarrow> R X')"
shows "topspace(prod_topology X Y) = {} \<or> Q X \<and> R Y"
proof -
have "Q X \<and> R Y" if "topspace(prod_topology X Y) \<noteq> {}"
proof -
from that obtain a b where a: "a \<in> topspace X" and b: "b \<in> topspace Y"
by force
show ?thesis
using X [OF a major] and Y [OF b major] homeomorphic_space_prod_topology_sing1 [OF b, of X] homeomorphic_space_prod_topology_sing2 [OF a, of Y]
by (simp add: subtopology_Times) (meson PQ PR homeomorphic_space_prod_topology_sing2 homeomorphic_space_sym)
qed
then show ?thesis by metis
qed
lemma limitin_pairwise:
"limitin (prod_topology X Y) f l F \<longleftrightarrow> limitin X (fst \<circ> f) (fst l) F \<and> limitin Y (snd \<circ> f) (snd l) F"
(is "?lhs = ?rhs")
proof
assume ?lhs
then obtain a b where ev: "\<And>U. \<lbrakk>(a,b) \<in> U; openin (prod_topology X Y) U\<rbrakk> \<Longrightarrow> \<forall>\<^sub>F x in F. f x \<in> U"
and a: "a \<in> topspace X" and b: "b \<in> topspace Y" and l: "l = (a,b)"
by (auto simp: limitin_def)
moreover have "\<forall>\<^sub>F x in F. fst (f x) \<in> U" if "openin X U" "a \<in> U" for U
proof -
have "\<forall>\<^sub>F c in F. f c \<in> U \<times> topspace Y"
using b that ev [of "U \<times> topspace Y"] by (auto simp: openin_prod_topology_alt)
then show ?thesis
by (rule eventually_mono) (metis (mono_tags, lifting) SigmaE2 prod.collapse)
qed
moreover have "\<forall>\<^sub>F x in F. snd (f x) \<in> U" if "openin Y U" "b \<in> U" for U
proof -
have "\<forall>\<^sub>F c in F. f c \<in> topspace X \<times> U"
using a that ev [of "topspace X \<times> U"] by (auto simp: openin_prod_topology_alt)
then show ?thesis
by (rule eventually_mono) (metis (mono_tags, lifting) SigmaE2 prod.collapse)
qed
ultimately show ?rhs
by (simp add: limitin_def)
next
have "limitin (prod_topology X Y) f (a,b) F"
if "limitin X (fst \<circ> f) a F" "limitin Y (snd \<circ> f) b F" for a b
using that
proof (clarsimp simp: limitin_def)
fix Z :: "('a \<times> 'b) set"
assume a: "a \<in> topspace X" "\<forall>U. openin X U \<and> a \<in> U \<longrightarrow> (\<forall>\<^sub>F x in F. fst (f x) \<in> U)"
and b: "b \<in> topspace Y" "\<forall>U. openin Y U \<and> b \<in> U \<longrightarrow> (\<forall>\<^sub>F x in F. snd (f x) \<in> U)"
and Z: "openin (prod_topology X Y) Z" "(a, b) \<in> Z"
then obtain U V where "openin X U" "openin Y V" "a \<in> U" "b \<in> V" "U \<times> V \<subseteq> Z"
using Z by (force simp: openin_prod_topology_alt)
then have "\<forall>\<^sub>F x in F. fst (f x) \<in> U" "\<forall>\<^sub>F x in F. snd (f x) \<in> V"
by (simp_all add: a b)
then show "\<forall>\<^sub>F x in F. f x \<in> Z"
by (rule eventually_elim2) (use \<open>U \<times> V \<subseteq> Z\<close> subsetD in auto)
qed
then show "?rhs \<Longrightarrow> ?lhs"
by (metis prod.collapse)
qed
proposition connected_space_prod_topology:
"connected_space(prod_topology X Y) \<longleftrightarrow>
topspace(prod_topology X Y) = {} \<or> connected_space X \<and> connected_space Y" (is "?lhs=?rhs")
proof (cases "topspace(prod_topology X Y) = {}")
case True
then show ?thesis
using connected_space_topspace_empty by blast
next
case False
then have nonempty: "topspace X \<noteq> {}" "topspace Y \<noteq> {}"
by force+
show ?thesis
proof
assume ?lhs
then show ?rhs
by (meson connected_space_quotient_map_image nonempty quotient_map_fst quotient_map_snd)
next
assume ?rhs
then have conX: "connected_space X" and conY: "connected_space Y"
using False by blast+
have False
if "openin (prod_topology X Y) U" and "openin (prod_topology X Y) V"
and UV: "topspace X \<times> topspace Y \<subseteq> U \<union> V" "U \<inter> V = {}"
and "U \<noteq> {}" and "V \<noteq> {}"
for U V
proof -
have Usub: "U \<subseteq> topspace X \<times> topspace Y" and Vsub: "V \<subseteq> topspace X \<times> topspace Y"
using that by (metis openin_subset topspace_prod_topology)+
obtain a b where ab: "(a,b) \<in> U" and a: "a \<in> topspace X" and b: "b \<in> topspace Y"
using \<open>U \<noteq> {}\<close> Usub by auto
have "\<not> topspace X \<times> topspace Y \<subseteq> U"
using Usub Vsub \<open>U \<inter> V = {}\<close> \<open>V \<noteq> {}\<close> by auto
then obtain x y where x: "x \<in> topspace X" and y: "y \<in> topspace Y" and "(x,y) \<notin> U"
by blast
have oX: "openin X {x \<in> topspace X. (x,y) \<in> U}" "openin X {x \<in> topspace X. (x,y) \<in> V}"
and oY: "openin Y {y \<in> topspace Y. (a,y) \<in> U}" "openin Y {y \<in> topspace Y. (a,y) \<in> V}"
by (force intro: openin_continuous_map_preimage [where Y = "prod_topology X Y"]
simp: that continuous_map_pairwise o_def x y a)+
have 1: "topspace Y \<subseteq> {y \<in> topspace Y. (a,y) \<in> U} \<union> {y \<in> topspace Y. (a,y) \<in> V}"
using a that(3) by auto
have 2: "{y \<in> topspace Y. (a,y) \<in> U} \<inter> {y \<in> topspace Y. (a,y) \<in> V} = {}"
using that(4) by auto
have 3: "{y \<in> topspace Y. (a,y) \<in> U} \<noteq> {}"
using ab b by auto
have 4: "{y \<in> topspace Y. (a,y) \<in> V} \<noteq> {}"
proof -
show ?thesis
using connected_spaceD [OF conX oX] UV \<open>(x,y) \<notin> U\<close> a x y
disjoint_iff_not_equal by blast
qed
show ?thesis
using connected_spaceD [OF conY oY 1 2 3 4] by auto
qed
then show ?lhs
unfolding connected_space_def topspace_prod_topology by blast
qed
qed
lemma connectedin_Times:
"connectedin (prod_topology X Y) (S \<times> T) \<longleftrightarrow>
S = {} \<or> T = {} \<or> connectedin X S \<and> connectedin Y T"
by (force simp: connectedin_def subtopology_Times connected_space_prod_topology)
end
diff --git a/src/HOL/Analysis/T1_Spaces.thy b/src/HOL/Analysis/T1_Spaces.thy
--- a/src/HOL/Analysis/T1_Spaces.thy
+++ b/src/HOL/Analysis/T1_Spaces.thy
@@ -1,750 +1,756 @@
section\<open>T1 and Hausdorff spaces\<close>
theory T1_Spaces
imports Product_Topology
begin
section\<open>T1 spaces with equivalences to many naturally "nice" properties. \<close>
definition t1_space where
"t1_space X \<equiv> \<forall>x \<in> topspace X. \<forall>y \<in> topspace X. x\<noteq>y \<longrightarrow> (\<exists>U. openin X U \<and> x \<in> U \<and> y \<notin> U)"
lemma t1_space_expansive:
"\<lbrakk>topspace Y = topspace X; \<And>U. openin X U \<Longrightarrow> openin Y U\<rbrakk> \<Longrightarrow> t1_space X \<Longrightarrow> t1_space Y"
by (metis t1_space_def)
lemma t1_space_alt:
"t1_space X \<longleftrightarrow> (\<forall>x \<in> topspace X. \<forall>y \<in> topspace X. x\<noteq>y \<longrightarrow> (\<exists>U. closedin X U \<and> x \<in> U \<and> y \<notin> U))"
by (metis DiffE DiffI closedin_def openin_closedin_eq t1_space_def)
lemma t1_space_empty: "topspace X = {} \<Longrightarrow> t1_space X"
by (simp add: t1_space_def)
lemma t1_space_derived_set_of_singleton:
"t1_space X \<longleftrightarrow> (\<forall>x \<in> topspace X. X derived_set_of {x} = {})"
apply (simp add: t1_space_def derived_set_of_def, safe)
apply (metis openin_topspace)
by force
lemma t1_space_derived_set_of_finite:
"t1_space X \<longleftrightarrow> (\<forall>S. finite S \<longrightarrow> X derived_set_of S = {})"
proof (intro iffI allI impI)
fix S :: "'a set"
assume "finite S"
then have fin: "finite ((\<lambda>x. {x}) ` (topspace X \<inter> S))"
by blast
assume "t1_space X"
then have "X derived_set_of (\<Union>x \<in> topspace X \<inter> S. {x}) = {}"
unfolding derived_set_of_Union [OF fin]
by (auto simp: t1_space_derived_set_of_singleton)
then have "X derived_set_of (topspace X \<inter> S) = {}"
by simp
then show "X derived_set_of S = {}"
by simp
qed (auto simp: t1_space_derived_set_of_singleton)
lemma t1_space_closedin_singleton:
"t1_space X \<longleftrightarrow> (\<forall>x \<in> topspace X. closedin X {x})"
apply (rule iffI)
apply (simp add: closedin_contains_derived_set t1_space_derived_set_of_singleton)
using t1_space_alt by auto
+lemma continuous_closed_imp_proper_map:
+ "\<lbrakk>compact_space X; t1_space Y; continuous_map X Y f; closed_map X Y f\<rbrakk> \<Longrightarrow> proper_map X Y f"
+ unfolding proper_map_def
+ by (smt (verit) closedin_compact_space closedin_continuous_map_preimage
+ Collect_cong singleton_iff t1_space_closedin_singleton)
+
lemma t1_space_euclidean: "t1_space (euclidean :: 'a::metric_space topology)"
by (simp add: t1_space_closedin_singleton)
lemma closedin_t1_singleton:
"\<lbrakk>t1_space X; a \<in> topspace X\<rbrakk> \<Longrightarrow> closedin X {a}"
by (simp add: t1_space_closedin_singleton)
lemma t1_space_closedin_finite:
"t1_space X \<longleftrightarrow> (\<forall>S. finite S \<and> S \<subseteq> topspace X \<longrightarrow> closedin X S)"
apply (rule iffI)
apply (simp add: closedin_contains_derived_set t1_space_derived_set_of_finite)
by (simp add: t1_space_closedin_singleton)
lemma closure_of_singleton:
"t1_space X \<Longrightarrow> X closure_of {a} = (if a \<in> topspace X then {a} else {})"
by (simp add: closure_of_eq t1_space_closedin_singleton closure_of_eq_empty_gen)
lemma separated_in_singleton:
assumes "t1_space X"
shows "separatedin X {a} S \<longleftrightarrow> a \<in> topspace X \<and> S \<subseteq> topspace X \<and> (a \<notin> X closure_of S)"
"separatedin X S {a} \<longleftrightarrow> a \<in> topspace X \<and> S \<subseteq> topspace X \<and> (a \<notin> X closure_of S)"
unfolding separatedin_def
using assms closure_of closure_of_singleton by fastforce+
lemma t1_space_openin_delete:
"t1_space X \<longleftrightarrow> (\<forall>U x. openin X U \<and> x \<in> U \<longrightarrow> openin X (U - {x}))"
apply (rule iffI)
apply (meson closedin_t1_singleton in_mono openin_diff openin_subset)
by (simp add: closedin_def t1_space_closedin_singleton)
lemma t1_space_openin_delete_alt:
"t1_space X \<longleftrightarrow> (\<forall>U x. openin X U \<longrightarrow> openin X (U - {x}))"
by (metis Diff_empty Diff_insert0 t1_space_openin_delete)
lemma t1_space_singleton_Inter_open:
"t1_space X \<longleftrightarrow> (\<forall>x \<in> topspace X. \<Inter>{U. openin X U \<and> x \<in> U} = {x})" (is "?P=?Q")
and t1_space_Inter_open_supersets:
"t1_space X \<longleftrightarrow> (\<forall>S. S \<subseteq> topspace X \<longrightarrow> \<Inter>{U. openin X U \<and> S \<subseteq> U} = S)" (is "?P=?R")
proof -
have "?R \<Longrightarrow> ?Q"
apply clarify
apply (drule_tac x="{x}" in spec, simp)
done
moreover have "?Q \<Longrightarrow> ?P"
apply (clarsimp simp add: t1_space_def)
apply (drule_tac x=x in bspec)
apply (simp_all add: set_eq_iff)
by (metis (no_types, lifting))
moreover have "?P \<Longrightarrow> ?R"
proof (clarsimp simp add: t1_space_closedin_singleton, rule subset_antisym)
fix S
assume S: "\<forall>x\<in>topspace X. closedin X {x}" "S \<subseteq> topspace X"
then show "\<Inter> {U. openin X U \<and> S \<subseteq> U} \<subseteq> S"
apply clarsimp
by (metis Diff_insert_absorb Set.set_insert closedin_def openin_topspace subset_insert)
qed force
ultimately show "?P=?Q" "?P=?R"
by auto
qed
lemma t1_space_derived_set_of_infinite_openin:
"t1_space X \<longleftrightarrow>
(\<forall>S. X derived_set_of S =
{x \<in> topspace X. \<forall>U. x \<in> U \<and> openin X U \<longrightarrow> infinite(S \<inter> U)})"
(is "_ = ?rhs")
proof
assume "t1_space X"
show ?rhs
proof safe
fix S x U
assume "x \<in> X derived_set_of S" "x \<in> U" "openin X U" "finite (S \<inter> U)"
with \<open>t1_space X\<close> show "False"
apply (simp add: t1_space_derived_set_of_finite)
by (metis IntI empty_iff empty_subsetI inf_commute openin_Int_derived_set_of_subset subset_antisym)
next
fix S x
have eq: "(\<exists>y. (y \<noteq> x) \<and> y \<in> S \<and> y \<in> T) \<longleftrightarrow> ~((S \<inter> T) \<subseteq> {x})" for x S T
by blast
assume "x \<in> topspace X" "\<forall>U. x \<in> U \<and> openin X U \<longrightarrow> infinite (S \<inter> U)"
then show "x \<in> X derived_set_of S"
apply (clarsimp simp add: derived_set_of_def eq)
by (meson finite.emptyI finite.insertI finite_subset)
qed (auto simp: in_derived_set_of)
qed (auto simp: t1_space_derived_set_of_singleton)
lemma finite_t1_space_imp_discrete_topology:
"\<lbrakk>topspace X = U; finite U; t1_space X\<rbrakk> \<Longrightarrow> X = discrete_topology U"
by (metis discrete_topology_unique_derived_set t1_space_derived_set_of_finite)
lemma t1_space_subtopology: "t1_space X \<Longrightarrow> t1_space(subtopology X U)"
by (simp add: derived_set_of_subtopology t1_space_derived_set_of_finite)
lemma closedin_derived_set_of_gen:
"t1_space X \<Longrightarrow> closedin X (X derived_set_of S)"
apply (clarsimp simp add: in_derived_set_of closedin_contains_derived_set derived_set_of_subset_topspace)
by (metis DiffD2 insert_Diff insert_iff t1_space_openin_delete)
lemma derived_set_of_derived_set_subset_gen:
"t1_space X \<Longrightarrow> X derived_set_of (X derived_set_of S) \<subseteq> X derived_set_of S"
by (meson closedin_contains_derived_set closedin_derived_set_of_gen)
lemma subtopology_eq_discrete_topology_gen_finite:
"\<lbrakk>t1_space X; finite S\<rbrakk> \<Longrightarrow> subtopology X S = discrete_topology(topspace X \<inter> S)"
by (simp add: subtopology_eq_discrete_topology_gen t1_space_derived_set_of_finite)
lemma subtopology_eq_discrete_topology_finite:
"\<lbrakk>t1_space X; S \<subseteq> topspace X; finite S\<rbrakk>
\<Longrightarrow> subtopology X S = discrete_topology S"
by (simp add: subtopology_eq_discrete_topology_eq t1_space_derived_set_of_finite)
lemma t1_space_closed_map_image:
"\<lbrakk>closed_map X Y f; f ` (topspace X) = topspace Y; t1_space X\<rbrakk> \<Longrightarrow> t1_space Y"
by (metis closed_map_def finite_subset_image t1_space_closedin_finite)
lemma homeomorphic_t1_space: "X homeomorphic_space Y \<Longrightarrow> (t1_space X \<longleftrightarrow> t1_space Y)"
apply (clarsimp simp add: homeomorphic_space_def)
by (meson homeomorphic_eq_everything_map homeomorphic_maps_map t1_space_closed_map_image)
proposition t1_space_product_topology:
"t1_space (product_topology X I)
\<longleftrightarrow> topspace(product_topology X I) = {} \<or> (\<forall>i \<in> I. t1_space (X i))"
proof (cases "topspace(product_topology X I) = {}")
case True
then show ?thesis
using True t1_space_empty by blast
next
case False
then obtain f where f: "f \<in> (\<Pi>\<^sub>E i\<in>I. topspace(X i))"
by fastforce
have "t1_space (product_topology X I) \<longleftrightarrow> (\<forall>i\<in>I. t1_space (X i))"
proof (intro iffI ballI)
show "t1_space (X i)" if "t1_space (product_topology X I)" and "i \<in> I" for i
proof -
have clo: "\<And>h. h \<in> (\<Pi>\<^sub>E i\<in>I. topspace (X i)) \<Longrightarrow> closedin (product_topology X I) {h}"
using that by (simp add: t1_space_closedin_singleton)
show ?thesis
unfolding t1_space_closedin_singleton
proof clarify
show "closedin (X i) {xi}" if "xi \<in> topspace (X i)" for xi
using clo [of "\<lambda>j \<in> I. if i=j then xi else f j"] f that \<open>i \<in> I\<close>
by (fastforce simp add: closedin_product_topology_singleton)
qed
qed
next
next
show "t1_space (product_topology X I)" if "\<forall>i\<in>I. t1_space (X i)"
using that
by (simp add: t1_space_closedin_singleton Ball_def PiE_iff closedin_product_topology_singleton)
qed
then show ?thesis
using False by blast
qed
lemma t1_space_prod_topology:
"t1_space(prod_topology X Y) \<longleftrightarrow> topspace(prod_topology X Y) = {} \<or> t1_space X \<and> t1_space Y"
proof (cases "topspace (prod_topology X Y) = {}")
case True then show ?thesis
by (auto simp: t1_space_empty)
next
case False
have eq: "{(x,y)} = {x} \<times> {y}" for x y
by simp
have "t1_space (prod_topology X Y) \<longleftrightarrow> (t1_space X \<and> t1_space Y)"
using False
by (force simp: t1_space_closedin_singleton closedin_prod_Times_iff eq simp del: insert_Times_insert)
with False show ?thesis
by simp
qed
subsection\<open>Hausdorff Spaces\<close>
definition Hausdorff_space
where
"Hausdorff_space X \<equiv>
\<forall>x y. x \<in> topspace X \<and> y \<in> topspace X \<and> (x \<noteq> y)
\<longrightarrow> (\<exists>U V. openin X U \<and> openin X V \<and> x \<in> U \<and> y \<in> V \<and> disjnt U V)"
lemma Hausdorff_space_expansive:
"\<lbrakk>Hausdorff_space X; topspace X = topspace Y; \<And>U. openin X U \<Longrightarrow> openin Y U\<rbrakk> \<Longrightarrow> Hausdorff_space Y"
by (metis Hausdorff_space_def)
lemma Hausdorff_space_topspace_empty:
"topspace X = {} \<Longrightarrow> Hausdorff_space X"
by (simp add: Hausdorff_space_def)
lemma Hausdorff_imp_t1_space:
"Hausdorff_space X \<Longrightarrow> t1_space X"
by (metis Hausdorff_space_def disjnt_iff t1_space_def)
lemma closedin_derived_set_of:
"Hausdorff_space X \<Longrightarrow> closedin X (X derived_set_of S)"
by (simp add: Hausdorff_imp_t1_space closedin_derived_set_of_gen)
lemma t1_or_Hausdorff_space:
"t1_space X \<or> Hausdorff_space X \<longleftrightarrow> t1_space X"
using Hausdorff_imp_t1_space by blast
lemma Hausdorff_space_sing_Inter_opens:
"\<lbrakk>Hausdorff_space X; a \<in> topspace X\<rbrakk> \<Longrightarrow> \<Inter>{u. openin X u \<and> a \<in> u} = {a}"
using Hausdorff_imp_t1_space t1_space_singleton_Inter_open by force
lemma Hausdorff_space_subtopology:
assumes "Hausdorff_space X" shows "Hausdorff_space(subtopology X S)"
proof -
have *: "disjnt U V \<Longrightarrow> disjnt (S \<inter> U) (S \<inter> V)" for U V
by (simp add: disjnt_iff)
from assms show ?thesis
apply (simp add: Hausdorff_space_def openin_subtopology_alt)
apply (fast intro: * elim!: all_forward)
done
qed
lemma Hausdorff_space_compact_separation:
assumes X: "Hausdorff_space X" and S: "compactin X S" and T: "compactin X T" and "disjnt S T"
obtains U V where "openin X U" "openin X V" "S \<subseteq> U" "T \<subseteq> V" "disjnt U V"
proof (cases "S = {}")
case True
then show thesis
by (metis \<open>compactin X T\<close> compactin_subset_topspace disjnt_empty1 empty_subsetI openin_empty openin_topspace that)
next
case False
have "\<forall>x \<in> S. \<exists>U V. openin X U \<and> openin X V \<and> x \<in> U \<and> T \<subseteq> V \<and> disjnt U V"
proof
fix a
assume "a \<in> S"
then have "a \<notin> T"
by (meson assms(4) disjnt_iff)
have a: "a \<in> topspace X"
using S \<open>a \<in> S\<close> compactin_subset_topspace by blast
show "\<exists>U V. openin X U \<and> openin X V \<and> a \<in> U \<and> T \<subseteq> V \<and> disjnt U V"
proof (cases "T = {}")
case True
then show ?thesis
using a disjnt_empty2 openin_empty by blast
next
case False
have "\<forall>x \<in> topspace X - {a}. \<exists>U V. openin X U \<and> openin X V \<and> x \<in> U \<and> a \<in> V \<and> disjnt U V"
using X a by (simp add: Hausdorff_space_def)
then obtain U V where UV: "\<forall>x \<in> topspace X - {a}. openin X (U x) \<and> openin X (V x) \<and> x \<in> U x \<and> a \<in> V x \<and> disjnt (U x) (V x)"
by metis
with \<open>a \<notin> T\<close> compactin_subset_topspace [OF T]
have Topen: "\<forall>W \<in> U ` T. openin X W" and Tsub: "T \<subseteq> \<Union> (U ` T)"
by auto
then obtain \<F> where \<F>: "finite \<F>" "\<F> \<subseteq> U ` T" and "T \<subseteq> \<Union>\<F>"
using T unfolding compactin_def by meson
then obtain F where F: "finite F" "F \<subseteq> T" "\<F> = U ` F" and SUF: "T \<subseteq> \<Union>(U ` F)" and "a \<notin> F"
using finite_subset_image [OF \<F>] \<open>a \<notin> T\<close> by (metis subsetD)
have U: "\<And>x. \<lbrakk>x \<in> topspace X; x \<noteq> a\<rbrakk> \<Longrightarrow> openin X (U x)"
and V: "\<And>x. \<lbrakk>x \<in> topspace X; x \<noteq> a\<rbrakk> \<Longrightarrow> openin X (V x)"
and disj: "\<And>x. \<lbrakk>x \<in> topspace X; x \<noteq> a\<rbrakk> \<Longrightarrow> disjnt (U x) (V x)"
using UV by blast+
show ?thesis
proof (intro exI conjI)
have "F \<noteq> {}"
using False SUF by blast
with \<open>a \<notin> F\<close> show "openin X (\<Inter>(V ` F))"
using F compactin_subset_topspace [OF T] by (force intro: V)
show "openin X (\<Union>(U ` F))"
using F Topen Tsub by (force intro: U)
show "disjnt (\<Inter>(V ` F)) (\<Union>(U ` F))"
using disj
apply (auto simp: disjnt_def)
using \<open>F \<subseteq> T\<close> \<open>a \<notin> F\<close> compactin_subset_topspace [OF T] by blast
show "a \<in> (\<Inter>(V ` F))"
using \<open>F \<subseteq> T\<close> T UV \<open>a \<notin> T\<close> compactin_subset_topspace by blast
qed (auto simp: SUF)
qed
qed
then obtain U V where UV: "\<forall>x \<in> S. openin X (U x) \<and> openin X (V x) \<and> x \<in> U x \<and> T \<subseteq> V x \<and> disjnt (U x) (V x)"
by metis
then have "S \<subseteq> \<Union> (U ` S)"
by auto
moreover have "\<forall>W \<in> U ` S. openin X W"
using UV by blast
ultimately obtain I where I: "S \<subseteq> \<Union> (U ` I)" "I \<subseteq> S" "finite I"
by (metis S compactin_def finite_subset_image)
show thesis
proof
show "openin X (\<Union>(U ` I))"
using \<open>I \<subseteq> S\<close> UV by blast
show "openin X (\<Inter> (V ` I))"
using False UV \<open>I \<subseteq> S\<close> \<open>S \<subseteq> \<Union> (U ` I)\<close> \<open>finite I\<close> by blast
show "disjnt (\<Union>(U ` I)) (\<Inter> (V ` I))"
by simp (meson UV \<open>I \<subseteq> S\<close> disjnt_subset2 in_mono le_INF_iff order_refl)
qed (use UV I in auto)
qed
lemma Hausdorff_space_compact_sets:
"Hausdorff_space X \<longleftrightarrow>
(\<forall>S T. compactin X S \<and> compactin X T \<and> disjnt S T
\<longrightarrow> (\<exists>U V. openin X U \<and> openin X V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> disjnt U V))"
(is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
by (meson Hausdorff_space_compact_separation)
next
assume R [rule_format]: ?rhs
show ?lhs
proof (clarsimp simp add: Hausdorff_space_def)
fix x y
assume "x \<in> topspace X" "y \<in> topspace X" "x \<noteq> y"
then show "\<exists>U. openin X U \<and> (\<exists>V. openin X V \<and> x \<in> U \<and> y \<in> V \<and> disjnt U V)"
using R [of "{x}" "{y}"] by auto
qed
qed
lemma compactin_imp_closedin:
assumes X: "Hausdorff_space X" and S: "compactin X S" shows "closedin X S"
proof -
have "S \<subseteq> topspace X"
by (simp add: assms compactin_subset_topspace)
moreover
have "\<exists>T. openin X T \<and> x \<in> T \<and> T \<subseteq> topspace X - S" if "x \<in> topspace X" "x \<notin> S" for x
using Hausdorff_space_compact_separation [OF X _ S, of "{x}"] that
apply (simp add: disjnt_def)
by (metis Diff_mono Diff_triv openin_subset)
ultimately show ?thesis
using closedin_def openin_subopen by force
qed
lemma closedin_Hausdorff_singleton:
"\<lbrakk>Hausdorff_space X; x \<in> topspace X\<rbrakk> \<Longrightarrow> closedin X {x}"
by (simp add: Hausdorff_imp_t1_space closedin_t1_singleton)
lemma closedin_Hausdorff_sing_eq:
"Hausdorff_space X \<Longrightarrow> closedin X {x} \<longleftrightarrow> x \<in> topspace X"
by (meson closedin_Hausdorff_singleton closedin_subset insert_subset)
lemma Hausdorff_space_discrete_topology [simp]:
"Hausdorff_space (discrete_topology U)"
unfolding Hausdorff_space_def
by (metis Hausdorff_space_compact_sets Hausdorff_space_def compactin_discrete_topology equalityE openin_discrete_topology)
lemma compactin_Int:
"\<lbrakk>Hausdorff_space X; compactin X S; compactin X T\<rbrakk> \<Longrightarrow> compactin X (S \<inter> T)"
by (simp add: closed_Int_compactin compactin_imp_closedin)
lemma finite_topspace_imp_discrete_topology:
"\<lbrakk>topspace X = U; finite U; Hausdorff_space X\<rbrakk> \<Longrightarrow> X = discrete_topology U"
using Hausdorff_imp_t1_space finite_t1_space_imp_discrete_topology by blast
lemma derived_set_of_finite:
"\<lbrakk>Hausdorff_space X; finite S\<rbrakk> \<Longrightarrow> X derived_set_of S = {}"
using Hausdorff_imp_t1_space t1_space_derived_set_of_finite by auto
lemma infinite_perfect_set:
"\<lbrakk>Hausdorff_space X; S \<subseteq> X derived_set_of S; S \<noteq> {}\<rbrakk> \<Longrightarrow> infinite S"
using derived_set_of_finite by blast
lemma derived_set_of_singleton:
"Hausdorff_space X \<Longrightarrow> X derived_set_of {x} = {}"
by (simp add: derived_set_of_finite)
lemma closedin_Hausdorff_finite:
"\<lbrakk>Hausdorff_space X; S \<subseteq> topspace X; finite S\<rbrakk> \<Longrightarrow> closedin X S"
by (simp add: compactin_imp_closedin finite_imp_compactin_eq)
lemma open_in_Hausdorff_delete:
"\<lbrakk>Hausdorff_space X; openin X S\<rbrakk> \<Longrightarrow> openin X (S - {x})"
using Hausdorff_imp_t1_space t1_space_openin_delete_alt by auto
lemma closedin_Hausdorff_finite_eq:
"\<lbrakk>Hausdorff_space X; finite S\<rbrakk> \<Longrightarrow> closedin X S \<longleftrightarrow> S \<subseteq> topspace X"
by (meson closedin_Hausdorff_finite closedin_def)
lemma derived_set_of_infinite_openin:
"Hausdorff_space X
\<Longrightarrow> X derived_set_of S =
{x \<in> topspace X. \<forall>U. x \<in> U \<and> openin X U \<longrightarrow> infinite(S \<inter> U)}"
using Hausdorff_imp_t1_space t1_space_derived_set_of_infinite_openin by fastforce
lemma Hausdorff_space_discrete_compactin:
"Hausdorff_space X
\<Longrightarrow> S \<inter> X derived_set_of S = {} \<and> compactin X S \<longleftrightarrow> S \<subseteq> topspace X \<and> finite S"
using derived_set_of_finite discrete_compactin_eq_finite by fastforce
lemma Hausdorff_space_finite_topspace:
"Hausdorff_space X \<Longrightarrow> X derived_set_of (topspace X) = {} \<and> compact_space X \<longleftrightarrow> finite(topspace X)"
using derived_set_of_finite discrete_compact_space_eq_finite by auto
lemma derived_set_of_derived_set_subset:
"Hausdorff_space X \<Longrightarrow> X derived_set_of (X derived_set_of S) \<subseteq> X derived_set_of S"
by (simp add: Hausdorff_imp_t1_space derived_set_of_derived_set_subset_gen)
lemma Hausdorff_space_injective_preimage:
assumes "Hausdorff_space Y" and cmf: "continuous_map X Y f" and "inj_on f (topspace X)"
shows "Hausdorff_space X"
unfolding Hausdorff_space_def
proof clarify
fix x y
assume x: "x \<in> topspace X" and y: "y \<in> topspace X" and "x \<noteq> y"
then obtain U V where "openin Y U" "openin Y V" "f x \<in> U" "f y \<in> V" "disjnt U V"
using assms unfolding Hausdorff_space_def continuous_map_def by (meson inj_onD)
show "\<exists>U V. openin X U \<and> openin X V \<and> x \<in> U \<and> y \<in> V \<and> disjnt U V"
proof (intro exI conjI)
show "openin X {x \<in> topspace X. f x \<in> U}"
using \<open>openin Y U\<close> cmf continuous_map by fastforce
show "openin X {x \<in> topspace X. f x \<in> V}"
using \<open>openin Y V\<close> cmf openin_continuous_map_preimage by blast
show "disjnt {x \<in> topspace X. f x \<in> U} {x \<in> topspace X. f x \<in> V}"
using \<open>disjnt U V\<close> by (auto simp add: disjnt_def)
qed (use x \<open>f x \<in> U\<close> y \<open>f y \<in> V\<close> in auto)
qed
lemma homeomorphic_Hausdorff_space:
"X homeomorphic_space Y \<Longrightarrow> Hausdorff_space X \<longleftrightarrow> Hausdorff_space Y"
unfolding homeomorphic_space_def homeomorphic_maps_map
by (auto simp: homeomorphic_eq_everything_map Hausdorff_space_injective_preimage)
lemma Hausdorff_space_retraction_map_image:
"\<lbrakk>retraction_map X Y r; Hausdorff_space X\<rbrakk> \<Longrightarrow> Hausdorff_space Y"
unfolding retraction_map_def
using Hausdorff_space_subtopology homeomorphic_Hausdorff_space retraction_maps_section_image2 by blast
lemma compact_Hausdorff_space_optimal:
assumes eq: "topspace Y = topspace X" and XY: "\<And>U. openin X U \<Longrightarrow> openin Y U"
and "Hausdorff_space X" "compact_space Y"
shows "Y = X"
proof -
have "\<And>U. closedin X U \<Longrightarrow> closedin Y U"
using XY using topology_finer_closedin [OF eq]
by metis
have "openin Y S = openin X S" for S
by (metis XY assms(3) assms(4) closedin_compact_space compactin_contractive compactin_imp_closedin eq openin_closedin_eq)
then show ?thesis
by (simp add: topology_eq)
qed
lemma continuous_map_imp_closed_graph:
assumes f: "continuous_map X Y f" and Y: "Hausdorff_space Y"
shows "closedin (prod_topology X Y) ((\<lambda>x. (x,f x)) ` topspace X)"
unfolding closedin_def
proof
show "(\<lambda>x. (x, f x)) ` topspace X \<subseteq> topspace (prod_topology X Y)"
using continuous_map_def f by fastforce
show "openin (prod_topology X Y) (topspace (prod_topology X Y) - (\<lambda>x. (x, f x)) ` topspace X)"
unfolding openin_prod_topology_alt
proof (intro allI impI)
show "\<exists>U V. openin X U \<and> openin Y V \<and> x \<in> U \<and> y \<in> V \<and> U \<times> V \<subseteq> topspace (prod_topology X Y) - (\<lambda>x. (x, f x)) ` topspace X"
if "(x,y) \<in> topspace (prod_topology X Y) - (\<lambda>x. (x, f x)) ` topspace X"
for x y
proof -
have "x \<in> topspace X" "y \<in> topspace Y" "y \<noteq> f x"
using that by auto
moreover have "f x \<in> topspace Y"
by (meson \<open>x \<in> topspace X\<close> continuous_map_def f)
ultimately obtain U V where UV: "openin Y U" "openin Y V" "f x \<in> U" "y \<in> V" "disjnt U V"
using Y Hausdorff_space_def by metis
show ?thesis
proof (intro exI conjI)
show "openin X {x \<in> topspace X. f x \<in> U}"
using \<open>openin Y U\<close> f openin_continuous_map_preimage by blast
show "{x \<in> topspace X. f x \<in> U} \<times> V \<subseteq> topspace (prod_topology X Y) - (\<lambda>x. (x, f x)) ` topspace X"
using UV by (auto simp: disjnt_iff dest: openin_subset)
qed (use UV \<open>x \<in> topspace X\<close> in auto)
qed
qed
qed
lemma continuous_imp_closed_map:
"\<lbrakk>continuous_map X Y f; compact_space X; Hausdorff_space Y\<rbrakk> \<Longrightarrow> closed_map X Y f"
by (meson closed_map_def closedin_compact_space compactin_imp_closedin image_compactin)
lemma continuous_imp_quotient_map:
"\<lbrakk>continuous_map X Y f; compact_space X; Hausdorff_space Y; f ` (topspace X) = topspace Y\<rbrakk>
\<Longrightarrow> quotient_map X Y f"
by (simp add: continuous_imp_closed_map continuous_closed_imp_quotient_map)
lemma continuous_imp_homeomorphic_map:
"\<lbrakk>continuous_map X Y f; compact_space X; Hausdorff_space Y;
f ` (topspace X) = topspace Y; inj_on f (topspace X)\<rbrakk>
\<Longrightarrow> homeomorphic_map X Y f"
by (simp add: continuous_imp_closed_map bijective_closed_imp_homeomorphic_map)
lemma continuous_imp_embedding_map:
"\<lbrakk>continuous_map X Y f; compact_space X; Hausdorff_space Y; inj_on f (topspace X)\<rbrakk>
\<Longrightarrow> embedding_map X Y f"
by (simp add: continuous_imp_closed_map injective_closed_imp_embedding_map)
lemma continuous_inverse_map:
assumes "compact_space X" "Hausdorff_space Y"
and cmf: "continuous_map X Y f" and gf: "\<And>x. x \<in> topspace X \<Longrightarrow> g(f x) = x"
and Sf: "S \<subseteq> f ` (topspace X)"
shows "continuous_map (subtopology Y S) X g"
proof (rule continuous_map_from_subtopology_mono [OF _ \<open>S \<subseteq> f ` (topspace X)\<close>])
show "continuous_map (subtopology Y (f ` (topspace X))) X g"
unfolding continuous_map_closedin
proof (intro conjI ballI allI impI)
fix x
assume "x \<in> topspace (subtopology Y (f ` topspace X))"
then show "g x \<in> topspace X"
by (auto simp: gf)
next
fix C
assume C: "closedin X C"
show "closedin (subtopology Y (f ` topspace X))
{x \<in> topspace (subtopology Y (f ` topspace X)). g x \<in> C}"
proof (rule compactin_imp_closedin)
show "Hausdorff_space (subtopology Y (f ` topspace X))"
using Hausdorff_space_subtopology [OF \<open>Hausdorff_space Y\<close>] by blast
have "compactin Y (f ` C)"
using C cmf image_compactin closedin_compact_space [OF \<open>compact_space X\<close>] by blast
moreover have "{x \<in> topspace Y. x \<in> f ` topspace X \<and> g x \<in> C} = f ` C"
using closedin_subset [OF C] cmf by (auto simp: gf continuous_map_def)
ultimately have "compactin Y {x \<in> topspace Y. x \<in> f ` topspace X \<and> g x \<in> C}"
by simp
then show "compactin (subtopology Y (f ` topspace X))
{x \<in> topspace (subtopology Y (f ` topspace X)). g x \<in> C}"
by (auto simp add: compactin_subtopology)
qed
qed
qed
lemma closed_map_paired_continuous_map_right:
"\<lbrakk>continuous_map X Y f; Hausdorff_space Y\<rbrakk> \<Longrightarrow> closed_map X (prod_topology X Y) (\<lambda>x. (x,f x))"
by (simp add: continuous_map_imp_closed_graph embedding_map_graph embedding_imp_closed_map)
lemma closed_map_paired_continuous_map_left:
assumes f: "continuous_map X Y f" and Y: "Hausdorff_space Y"
shows "closed_map X (prod_topology Y X) (\<lambda>x. (f x,x))"
proof -
have eq: "(\<lambda>x. (f x,x)) = (\<lambda>(a,b). (b,a)) \<circ> (\<lambda>x. (x,f x))"
by auto
show ?thesis
unfolding eq
proof (rule closed_map_compose)
show "closed_map X (prod_topology X Y) (\<lambda>x. (x, f x))"
using Y closed_map_paired_continuous_map_right f by blast
show "closed_map (prod_topology X Y) (prod_topology Y X) (\<lambda>(a, b). (b, a))"
by (metis homeomorphic_map_swap homeomorphic_imp_closed_map)
qed
qed
lemma proper_map_paired_continuous_map_right:
"\<lbrakk>continuous_map X Y f; Hausdorff_space Y\<rbrakk>
\<Longrightarrow> proper_map X (prod_topology X Y) (\<lambda>x. (x,f x))"
using closed_injective_imp_proper_map closed_map_paired_continuous_map_right
by (metis (mono_tags, lifting) Pair_inject inj_onI)
lemma proper_map_paired_continuous_map_left:
"\<lbrakk>continuous_map X Y f; Hausdorff_space Y\<rbrakk>
\<Longrightarrow> proper_map X (prod_topology Y X) (\<lambda>x. (f x,x))"
using closed_injective_imp_proper_map closed_map_paired_continuous_map_left
by (metis (mono_tags, lifting) Pair_inject inj_onI)
lemma Hausdorff_space_prod_topology:
"Hausdorff_space(prod_topology X Y) \<longleftrightarrow> topspace(prod_topology X Y) = {} \<or> Hausdorff_space X \<and> Hausdorff_space Y"
(is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
by (rule topological_property_of_prod_component) (auto simp: Hausdorff_space_subtopology homeomorphic_Hausdorff_space)
next
assume R: ?rhs
show ?lhs
proof (cases "(topspace X \<times> topspace Y) = {}")
case False
with R have ne: "topspace X \<noteq> {}" "topspace Y \<noteq> {}" and X: "Hausdorff_space X" and Y: "Hausdorff_space Y"
by auto
show ?thesis
unfolding Hausdorff_space_def
proof clarify
fix x y x' y'
assume xy: "(x, y) \<in> topspace (prod_topology X Y)"
and xy': "(x',y') \<in> topspace (prod_topology X Y)"
and *: "\<nexists>U V. openin (prod_topology X Y) U \<and> openin (prod_topology X Y) V
\<and> (x, y) \<in> U \<and> (x', y') \<in> V \<and> disjnt U V"
have False if "x \<noteq> x' \<or> y \<noteq> y'"
using that
proof
assume "x \<noteq> x'"
then obtain U V where "openin X U" "openin X V" "x \<in> U" "x' \<in> V" "disjnt U V"
by (metis Hausdorff_space_def X mem_Sigma_iff topspace_prod_topology xy xy')
let ?U = "U \<times> topspace Y"
let ?V = "V \<times> topspace Y"
have "openin (prod_topology X Y) ?U" "openin (prod_topology X Y) ?V"
by (simp_all add: openin_prod_Times_iff \<open>openin X U\<close> \<open>openin X V\<close>)
moreover have "disjnt ?U ?V"
by (simp add: \<open>disjnt U V\<close>)
ultimately show False
using * \<open>x \<in> U\<close> \<open>x' \<in> V\<close> xy xy' by (metis SigmaD2 SigmaI topspace_prod_topology)
next
assume "y \<noteq> y'"
then obtain U V where "openin Y U" "openin Y V" "y \<in> U" "y' \<in> V" "disjnt U V"
by (metis Hausdorff_space_def Y mem_Sigma_iff topspace_prod_topology xy xy')
let ?U = "topspace X \<times> U"
let ?V = "topspace X \<times> V"
have "openin (prod_topology X Y) ?U" "openin (prod_topology X Y) ?V"
by (simp_all add: openin_prod_Times_iff \<open>openin Y U\<close> \<open>openin Y V\<close>)
moreover have "disjnt ?U ?V"
by (simp add: \<open>disjnt U V\<close>)
ultimately show False
using "*" \<open>y \<in> U\<close> \<open>y' \<in> V\<close> xy xy' by (metis SigmaD1 SigmaI topspace_prod_topology)
qed
then show "x = x' \<and> y = y'"
by blast
qed
qed (simp add: Hausdorff_space_topspace_empty)
qed
lemma Hausdorff_space_product_topology:
"Hausdorff_space (product_topology X I) \<longleftrightarrow> (\<Pi>\<^sub>E i\<in>I. topspace(X i)) = {} \<or> (\<forall>i \<in> I. Hausdorff_space (X i))"
(is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
apply (rule topological_property_of_product_component)
apply (blast dest: Hausdorff_space_subtopology homeomorphic_Hausdorff_space)+
done
next
assume R: ?rhs
show ?lhs
proof (cases "(\<Pi>\<^sub>E i\<in>I. topspace(X i)) = {}")
case True
then show ?thesis
by (simp add: Hausdorff_space_topspace_empty)
next
case False
have "\<exists>U V. openin (product_topology X I) U \<and> openin (product_topology X I) V \<and> f \<in> U \<and> g \<in> V \<and> disjnt U V"
if f: "f \<in> (\<Pi>\<^sub>E i\<in>I. topspace (X i))" and g: "g \<in> (\<Pi>\<^sub>E i\<in>I. topspace (X i))" and "f \<noteq> g"
for f g :: "'a \<Rightarrow> 'b"
proof -
obtain m where "f m \<noteq> g m"
using \<open>f \<noteq> g\<close> by blast
then have "m \<in> I"
using f g by fastforce
then have "Hausdorff_space (X m)"
using False that R by blast
then obtain U V where U: "openin (X m) U" and V: "openin (X m) V" and "f m \<in> U" "g m \<in> V" "disjnt U V"
by (metis Hausdorff_space_def PiE_mem \<open>f m \<noteq> g m\<close> \<open>m \<in> I\<close> f g)
show ?thesis
proof (intro exI conjI)
let ?U = "(\<Pi>\<^sub>E i\<in>I. topspace(X i)) \<inter> {x. x m \<in> U}"
let ?V = "(\<Pi>\<^sub>E i\<in>I. topspace(X i)) \<inter> {x. x m \<in> V}"
show "openin (product_topology X I) ?U" "openin (product_topology X I) ?V"
using \<open>m \<in> I\<close> U V
by (force simp add: openin_product_topology intro: arbitrary_union_of_inc relative_to_inc finite_intersection_of_inc)+
show "f \<in> ?U"
using \<open>f m \<in> U\<close> f by blast
show "g \<in> ?V"
using \<open>g m \<in> V\<close> g by blast
show "disjnt ?U ?V"
using \<open>disjnt U V\<close> by (auto simp: PiE_def Pi_def disjnt_def)
qed
qed
then show ?thesis
by (simp add: Hausdorff_space_def)
qed
qed
lemma Hausdorff_space_closed_neighbourhood:
"Hausdorff_space X \<longleftrightarrow>
(\<forall>x \<in> topspace X. \<exists>U C. openin X U \<and> closedin X C \<and>
Hausdorff_space(subtopology X C) \<and> x \<in> U \<and> U \<subseteq> C)" (is "_ = ?rhs")
proof
assume R: ?rhs
show "Hausdorff_space X"
unfolding Hausdorff_space_def
proof clarify
fix x y
assume x: "x \<in> topspace X" and y: "y \<in> topspace X" and "x \<noteq> y"
obtain T C where *: "openin X T" "closedin X C" "x \<in> T" "T \<subseteq> C"
and C: "Hausdorff_space (subtopology X C)"
by (meson R \<open>x \<in> topspace X\<close>)
show "\<exists>U V. openin X U \<and> openin X V \<and> x \<in> U \<and> y \<in> V \<and> disjnt U V"
proof (cases "y \<in> C")
case True
with * C obtain U V where U: "openin (subtopology X C) U"
and V: "openin (subtopology X C) V"
and "x \<in> U" "y \<in> V" "disjnt U V"
unfolding Hausdorff_space_def
by (smt (verit, best) \<open>x \<noteq> y\<close> closedin_subset subsetD topspace_subtopology_subset)
then obtain U' V' where UV': "U = U' \<inter> C" "openin X U'" "V = V' \<inter> C" "openin X V'"
by (meson openin_subtopology)
have "disjnt (T \<inter> U') V'"
using \<open>disjnt U V\<close> UV' \<open>T \<subseteq> C\<close> by (force simp: disjnt_iff)
with \<open>T \<subseteq> C\<close> have "disjnt (T \<inter> U') (V' \<union> (topspace X - C))"
unfolding disjnt_def by blast
moreover
have "openin X (T \<inter> U')"
by (simp add: \<open>openin X T\<close> \<open>openin X U'\<close> openin_Int)
moreover have "openin X (V' \<union> (topspace X - C))"
using \<open>closedin X C\<close> \<open>openin X V'\<close> by auto
ultimately show ?thesis
using UV' \<open>x \<in> T\<close> \<open>x \<in> U\<close> \<open>y \<in> V\<close> by blast
next
case False
with * y show ?thesis
by (force simp: closedin_def disjnt_def)
qed
qed
qed fastforce
end
diff --git a/src/HOL/Bali/Eval.thy b/src/HOL/Bali/Eval.thy
--- a/src/HOL/Bali/Eval.thy
+++ b/src/HOL/Bali/Eval.thy
@@ -1,1179 +1,1179 @@
(* Title: HOL/Bali/Eval.thy
Author: David von Oheimb
*)
subsection \<open>Operational evaluation (big-step) semantics of Java expressions and
statements
\<close>
theory Eval imports State DeclConcepts begin
text \<open>
improvements over Java Specification 1.0:
\begin{itemize}
\item dynamic method lookup does not need to consider the return type
(cf.15.11.4.4)
\item throw raises a NullPointer exception if a null reference is given, and
each throw of a standard exception yield a fresh exception object
(was not specified)
\item if there is not enough memory even to allocate an OutOfMemory exception,
evaluation/execution fails, i.e. simply stops (was not specified)
\item array assignment checks lhs (and may throw exceptions) before evaluating
rhs
\item fixed exact positions of class initializations
(immediate at first active use)
\end{itemize}
design issues:
\begin{itemize}
\item evaluation vs. (single-step) transition semantics
evaluation semantics chosen, because:
\begin{itemize}
\item[++] less verbose and therefore easier to read (and to handle in proofs)
\item[+] more abstract
\item[+] intermediate values (appearing in recursive rules) need not be
stored explicitly, e.g. no call body construct or stack of invocation
frames containing local variables and return addresses for method calls
needed
\item[+] convenient rule induction for subject reduction theorem
\item[-] no interleaving (for parallelism) can be described
\item[-] stating a property of infinite executions requires the meta-level
argument that this property holds for any finite prefixes of it
(e.g. stopped using a counter that is decremented to zero and then
throwing an exception)
\end{itemize}
\item unified evaluation for variables, expressions, expression lists,
statements
\item the value entry in statement rules is redundant
\item the value entry in rules is irrelevant in case of exceptions, but its full
inclusion helps to make the rule structure independent of exception occurrence.
\item as irrelevant value entries are ignored, it does not matter if they are
unique.
For simplicity, (fixed) arbitrary values are preferred over "free" values.
\item the rule format is such that the start state may contain an exception.
\begin{itemize}
\item[++] faciliates exception handling
\item[+] symmetry
\end{itemize}
\item the rules are defined carefully in order to be applicable even in not
type-correct situations (yielding undefined values),
e.g. \<open>the_Addr (Val (Bool b)) = undefined\<close>.
\begin{itemize}
\item[++] fewer rules
\item[-] less readable because of auxiliary functions like \<open>the_Addr\<close>
\end{itemize}
Alternative: "defensive" evaluation throwing some InternalError exception
in case of (impossible, for correct programs) type mismatches
\item there is exactly one rule per syntactic construct
\begin{itemize}
\item[+] no redundancy in case distinctions
\end{itemize}
\item halloc fails iff there is no free heap address. When there is
only one free heap address left, it returns an OutOfMemory exception.
In this way it is guaranteed that when an OutOfMemory exception is thrown for
the first time, there is a free location on the heap to allocate it.
\item the allocation of objects that represent standard exceptions is deferred
until execution of any enclosing catch clause, which is transparent to
the program.
\begin{itemize}
\item[-] requires an auxiliary execution relation
\item[++] avoids copies of allocation code and awkward case distinctions
(whether there is enough memory to allocate the exception) in
evaluation rules
\end{itemize}
\item unfortunately \<open>new_Addr\<close> is not directly executable because of
Hilbert operator.
\end{itemize}
simplifications:
\begin{itemize}
\item local variables are initialized with default values
(no definite assignment)
\item garbage collection not considered, therefore also no finalizers
\item stack overflow and memory overflow during class initialization not
modelled
\item exceptions in initializations not replaced by ExceptionInInitializerError
\end{itemize}
\<close>
type_synonym vvar = "val \<times> (val \<Rightarrow> state \<Rightarrow> state)"
type_synonym vals = "(val, vvar, val list) sum3"
translations
(type) "vvar" <= (type) "val \<times> (val \<Rightarrow> state \<Rightarrow> state)"
(type) "vals" <= (type) "(val, vvar, val list) sum3"
text \<open>To avoid redundancy and to reduce the number of rules, there is only
one evaluation rule for each syntactic term. This is also true for variables
(e.g. see the rules below for \<open>LVar\<close>, \<open>FVar\<close> and \<open>AVar\<close>).
So evaluation of a variable must capture both possible further uses:
read (rule \<open>Acc\<close>) or write (rule \<open>Ass\<close>) to the variable.
Therefor a variable evaluates to a special value \<^term>\<open>vvar\<close>, which is
a pair, consisting of the current value (for later read access) and an update
function (for later write access). Because
during assignment to an array variable an exception may occur if the types
don't match, the update function is very generic: it transforms the
full state. This generic update function causes some technical trouble during
some proofs (e.g. type safety, correctness of definite assignment). There we
need to prove some additional invariant on this update function to prove the
assignment correct, since the update function could potentially alter the whole
state in an arbitrary manner. This invariant must be carried around through
the whole induction.
So for future approaches it may be better not to take
such a generic update function, but only to store the address and the kind
of variable (array (+ element type), local variable or field) for later
assignment.
\<close>
abbreviation
dummy_res :: "vals" ("\<diamondsuit>")
where "\<diamondsuit> == In1 Unit"
abbreviation (input)
val_inj_vals ("\<lfloor>_\<rfloor>\<^sub>e" 1000)
where "\<lfloor>e\<rfloor>\<^sub>e == In1 e"
abbreviation (input)
var_inj_vals ("\<lfloor>_\<rfloor>\<^sub>v" 1000)
where "\<lfloor>v\<rfloor>\<^sub>v == In2 v"
abbreviation (input)
lst_inj_vals ("\<lfloor>_\<rfloor>\<^sub>l" 1000)
where "\<lfloor>es\<rfloor>\<^sub>l == In3 es"
definition undefined3 :: "('al + 'ar, 'b, 'c) sum3 \<Rightarrow> vals" where
"undefined3 = case_sum3 (In1 \<circ> case_sum (\<lambda>x. undefined) (\<lambda>x. Unit))
(\<lambda>x. In2 undefined) (\<lambda>x. In3 undefined)"
lemma [simp]: "undefined3 (In1l x) = In1 undefined"
by (simp add: undefined3_def)
lemma [simp]: "undefined3 (In1r x) = \<diamondsuit>"
by (simp add: undefined3_def)
lemma [simp]: "undefined3 (In2 x) = In2 undefined"
by (simp add: undefined3_def)
lemma [simp]: "undefined3 (In3 x) = In3 undefined"
by (simp add: undefined3_def)
subsubsection "exception throwing and catching"
definition
throw :: "val \<Rightarrow> abopt \<Rightarrow> abopt" where
"throw a' x = abrupt_if True (Some (Xcpt (Loc (the_Addr a')))) (np a' x)"
lemma throw_def2:
"throw a' x = abrupt_if True (Some (Xcpt (Loc (the_Addr a')))) (np a' x)"
apply (unfold throw_def)
apply (simp (no_asm))
done
definition
fits :: "prog \<Rightarrow> st \<Rightarrow> val \<Rightarrow> ty \<Rightarrow> bool" ("_,_\<turnstile>_ fits _"[61,61,61,61]60)
where "G,s\<turnstile>a' fits T = ((\<exists>rt. T=RefT rt) \<longrightarrow> a'=Null \<or> G\<turnstile>obj_ty(lookup_obj s a')\<preceq>T)"
lemma fits_Null [simp]: "G,s\<turnstile>Null fits T"
by (simp add: fits_def)
lemma fits_Addr_RefT [simp]:
"G,s\<turnstile>Addr a fits RefT t = G\<turnstile>obj_ty (the (heap s a))\<preceq>RefT t"
by (simp add: fits_def)
lemma fitsD: "\<And>X. G,s\<turnstile>a' fits T \<Longrightarrow> (\<exists>pt. T = PrimT pt) \<or>
(\<exists>t. T = RefT t) \<and> a' = Null \<or>
(\<exists>t. T = RefT t) \<and> a' \<noteq> Null \<and> G\<turnstile>obj_ty (lookup_obj s a')\<preceq>T"
apply (unfold fits_def)
apply (case_tac "\<exists>pt. T = PrimT pt")
apply simp_all
apply (case_tac "T")
defer
apply (case_tac "a' = Null")
apply simp_all
done
definition
catch :: "prog \<Rightarrow> state \<Rightarrow> qtname \<Rightarrow> bool" ("_,_\<turnstile>catch _"[61,61,61]60) where
"G,s\<turnstile>catch C = (\<exists>xc. abrupt s=Some (Xcpt xc) \<and>
G,store s\<turnstile>Addr (the_Loc xc) fits Class C)"
lemma catch_Norm [simp]: "\<not>G,Norm s\<turnstile>catch tn"
apply (unfold catch_def)
apply (simp (no_asm))
done
lemma catch_XcptLoc [simp]:
"G,(Some (Xcpt (Loc a)),s)\<turnstile>catch C = G,s\<turnstile>Addr a fits Class C"
apply (unfold catch_def)
apply (simp (no_asm))
done
lemma catch_Jump [simp]: "\<not>G,(Some (Jump j),s)\<turnstile>catch tn"
apply (unfold catch_def)
apply (simp (no_asm))
done
lemma catch_Error [simp]: "\<not>G,(Some (Error e),s)\<turnstile>catch tn"
apply (unfold catch_def)
apply (simp (no_asm))
done
definition
new_xcpt_var :: "vname \<Rightarrow> state \<Rightarrow> state" where
"new_xcpt_var vn = (\<lambda>(x,s). Norm (lupd(VName vn\<mapsto>Addr (the_Loc (the_Xcpt (the x)))) s))"
lemma new_xcpt_var_def2 [simp]:
"new_xcpt_var vn (x,s) =
Norm (lupd(VName vn\<mapsto>Addr (the_Loc (the_Xcpt (the x)))) s)"
apply (unfold new_xcpt_var_def)
apply (simp (no_asm))
done
subsubsection "misc"
definition
assign :: "('a \<Rightarrow> state \<Rightarrow> state) \<Rightarrow> 'a \<Rightarrow> state \<Rightarrow> state" where
"assign f v = (\<lambda>(x,s). let (x',s') = (if x = None then f v else id) (x,s)
in (x',if x' = None then s' else s))"
(*
lemma assign_Norm_Norm [simp]:
"f v \<lparr>abrupt=None,store=s\<rparr> = \<lparr>abrupt=None,store=s'\<rparr>
\<Longrightarrow> assign f v \<lparr>abrupt=None,store=s\<rparr> = \<lparr>abrupt=None,store=s'\<rparr>"
by (simp add: assign_def Let_def)
*)
lemma assign_Norm_Norm [simp]:
"f v (Norm s) = Norm s' \<Longrightarrow> assign f v (Norm s) = Norm s'"
by (simp add: assign_def Let_def)
(*
lemma assign_Norm_Some [simp]:
"\<lbrakk>abrupt (f v \<lparr>abrupt=None,store=s\<rparr>) = Some y\<rbrakk>
\<Longrightarrow> assign f v \<lparr>abrupt=None,store=s\<rparr> = \<lparr>abrupt=Some y,store =s\<rparr>"
by (simp add: assign_def Let_def split_beta)
*)
lemma assign_Norm_Some [simp]:
"\<lbrakk>abrupt (f v (Norm s)) = Some y\<rbrakk>
\<Longrightarrow> assign f v (Norm s) = (Some y,s)"
by (simp add: assign_def Let_def split_beta)
lemma assign_Some [simp]:
"assign f v (Some x,s) = (Some x,s)"
by (simp add: assign_def Let_def split_beta)
lemma assign_Some1 [simp]: "\<not> normal s \<Longrightarrow> assign f v s = s"
by (auto simp add: assign_def Let_def split_beta)
lemma assign_supd [simp]:
"assign (\<lambda>v. supd (f v)) v (x,s)
= (x, if x = None then f v s else s)"
apply auto
done
lemma assign_raise_if [simp]:
"assign (\<lambda>v (x,s). ((raise_if (b s v) xcpt) x, f v s)) v (x, s) =
(raise_if (b s v) xcpt x, if x=None \<and> \<not>b s v then f v s else s)"
apply (case_tac "x = None")
apply auto
done
(*
lemma assign_raise_if [simp]:
"assign (\<lambda>v s. \<lparr>abrupt=(raise_if (b (store s) v) xcpt) (abrupt s),
store = f v (store s)\<rparr>) v s =
\<lparr>abrupt=raise_if (b (store s) v) xcpt (abrupt s),
store= if (abrupt s)=None \<and> \<not>b (store s) v
then f v (store s) else (store s)\<rparr>"
apply (case_tac "abrupt s = None")
apply auto
done
*)
definition
init_comp_ty :: "ty \<Rightarrow> stmt"
where "init_comp_ty T = (if (\<exists>C. T = Class C) then Init (the_Class T) else Skip)"
lemma init_comp_ty_PrimT [simp]: "init_comp_ty (PrimT pt) = Skip"
apply (unfold init_comp_ty_def)
apply (simp (no_asm))
done
definition
invocation_class :: "inv_mode \<Rightarrow> st \<Rightarrow> val \<Rightarrow> ref_ty \<Rightarrow> qtname" where
"invocation_class m s a' statT =
(case m of
Static \<Rightarrow> if (\<exists> statC. statT = ClassT statC)
then the_Class (RefT statT)
else Object
| SuperM \<Rightarrow> the_Class (RefT statT)
| IntVir \<Rightarrow> obj_class (lookup_obj s a'))"
definition
invocation_declclass :: "prog \<Rightarrow> inv_mode \<Rightarrow> st \<Rightarrow> val \<Rightarrow> ref_ty \<Rightarrow> sig \<Rightarrow> qtname" where
"invocation_declclass G m s a' statT sig =
declclass (the (dynlookup G statT
(invocation_class m s a' statT)
sig))"
lemma invocation_class_IntVir [simp]:
"invocation_class IntVir s a' statT = obj_class (lookup_obj s a')"
by (simp add: invocation_class_def)
lemma dynclass_SuperM [simp]:
"invocation_class SuperM s a' statT = the_Class (RefT statT)"
by (simp add: invocation_class_def)
lemma invocation_class_Static [simp]:
"invocation_class Static s a' statT = (if (\<exists> statC. statT = ClassT statC)
then the_Class (RefT statT)
else Object)"
by (simp add: invocation_class_def)
definition
init_lvars :: "prog \<Rightarrow> qtname \<Rightarrow> sig \<Rightarrow> inv_mode \<Rightarrow> val \<Rightarrow> val list \<Rightarrow> state \<Rightarrow> state"
where
"init_lvars G C sig mode a' pvs =
(\<lambda>(x,s).
let m = mthd (the (methd G C sig));
l = \<lambda> k.
(case k of
EName e
\<Rightarrow> (case e of
VNam v \<Rightarrow> (Map.empty ((pars m)[\<mapsto>]pvs)) v
| Res \<Rightarrow> None)
| This
\<Rightarrow> (if mode=Static then None else Some a'))
in set_lvars l (if mode = Static then x else np a' x,s))"
lemma init_lvars_def2: \<comment> \<open>better suited for simplification\<close>
"init_lvars G C sig mode a' pvs (x,s) =
set_lvars
(\<lambda> k.
(case k of
EName e
\<Rightarrow> (case e of
VNam v
\<Rightarrow> (Map.empty ((pars (mthd (the (methd G C sig))))[\<mapsto>]pvs)) v
| Res \<Rightarrow> None)
| This
\<Rightarrow> (if mode=Static then None else Some a')))
(if mode = Static then x else np a' x,s)"
apply (unfold init_lvars_def)
apply (simp (no_asm) add: Let_def)
done
definition
body :: "prog \<Rightarrow> qtname \<Rightarrow> sig \<Rightarrow> expr" where
"body G C sig =
(let m = the (methd G C sig)
in Body (declclass m) (stmt (mbody (mthd m))))"
lemma body_def2: \<comment> \<open>better suited for simplification\<close>
"body G C sig = Body (declclass (the (methd G C sig)))
(stmt (mbody (mthd (the (methd G C sig)))))"
apply (unfold body_def Let_def)
apply auto
done
subsubsection "variables"
definition
lvar :: "lname \<Rightarrow> st \<Rightarrow> vvar"
where "lvar vn s = (the (locals s vn), \<lambda>v. supd (lupd(vn\<mapsto>v)))"
definition
fvar :: "qtname \<Rightarrow> bool \<Rightarrow> vname \<Rightarrow> val \<Rightarrow> state \<Rightarrow> vvar \<times> state" where
"fvar C stat fn a' s =
(let (oref,xf) = if stat then (Stat C,id)
else (Heap (the_Addr a'),np a');
n = Inl (fn,C);
f = (\<lambda>v. supd (upd_gobj oref n v))
in ((the (values (the (globs (store s) oref)) n),f),abupd xf s))"
definition
avar :: "prog \<Rightarrow> val \<Rightarrow> val \<Rightarrow> state \<Rightarrow> vvar \<times> state" where
"avar G i' a' s =
(let oref = Heap (the_Addr a');
i = the_Intg i';
n = Inr i;
(T,k,cs) = the_Arr (globs (store s) oref);
f = (\<lambda>v (x,s). (raise_if (\<not>G,s\<turnstile>v fits T)
ArrStore x
,upd_gobj oref n v s))
in ((the (cs n),f),abupd (raise_if (\<not>i in_bounds k) IndOutBound \<circ> np a') s))"
lemma fvar_def2: \<comment> \<open>better suited for simplification\<close>
"fvar C stat fn a' s =
((the
(values
(the (globs (store s) (if stat then Stat C else Heap (the_Addr a'))))
(Inl (fn,C)))
,(\<lambda>v. supd (upd_gobj (if stat then Stat C else Heap (the_Addr a'))
(Inl (fn,C))
v)))
,abupd (if stat then id else np a') s)
"
apply (unfold fvar_def)
apply (simp (no_asm) add: Let_def split_beta)
done
lemma avar_def2: \<comment> \<open>better suited for simplification\<close>
"avar G i' a' s =
((the ((snd(snd(the_Arr (globs (store s) (Heap (the_Addr a'))))))
(Inr (the_Intg i')))
,(\<lambda>v (x,s'). (raise_if (\<not>G,s'\<turnstile>v fits (fst(the_Arr (globs (store s)
(Heap (the_Addr a'))))))
ArrStore x
,upd_gobj (Heap (the_Addr a'))
(Inr (the_Intg i')) v s')))
,abupd (raise_if (\<not>(the_Intg i') in_bounds (fst(snd(the_Arr (globs (store s)
(Heap (the_Addr a'))))))) IndOutBound \<circ> np a')
s)"
apply (unfold avar_def)
apply (simp (no_asm) add: Let_def split_beta)
done
definition
check_field_access :: "prog \<Rightarrow> qtname \<Rightarrow> qtname \<Rightarrow> vname \<Rightarrow> bool \<Rightarrow> val \<Rightarrow> state \<Rightarrow> state" where
"check_field_access G accC statDeclC fn stat a' s =
(let oref = if stat then Stat statDeclC
else Heap (the_Addr a');
dynC = case oref of
Heap a \<Rightarrow> obj_class (the (globs (store s) oref))
| Stat C \<Rightarrow> C;
f = (the (table_of (DeclConcepts.fields G dynC) (fn,statDeclC)))
in abupd
(error_if (\<not> G\<turnstile>Field fn (statDeclC,f) in dynC dyn_accessible_from accC)
AccessViolation)
s)"
definition
check_method_access :: "prog \<Rightarrow> qtname \<Rightarrow> ref_ty \<Rightarrow> inv_mode \<Rightarrow> sig \<Rightarrow> val \<Rightarrow> state \<Rightarrow> state" where
"check_method_access G accC statT mode sig a' s =
(let invC = invocation_class mode (store s) a' statT;
dynM = the (dynlookup G statT invC sig)
in abupd
(error_if (\<not> G\<turnstile>Methd sig dynM in invC dyn_accessible_from accC)
AccessViolation)
s)"
subsubsection "evaluation judgments"
inductive
halloc :: "[prog,state,obj_tag,loc,state]\<Rightarrow>bool" ("_\<turnstile>_ \<midarrow>halloc _\<succ>_\<rightarrow> _"[61,61,61,61,61]60) for G::prog
where \<comment> \<open>allocating objects on the heap, cf. 12.5\<close>
Abrupt:
"G\<turnstile>(Some x,s) \<midarrow>halloc oi\<succ>undefined\<rightarrow> (Some x,s)"
| New: "\<lbrakk>new_Addr (heap s) = Some a;
(x,oi') = (if atleast_free (heap s) (Suc (Suc 0)) then (None,oi)
else (Some (Xcpt (Loc a)),CInst (SXcpt OutOfMemory)))\<rbrakk>
\<Longrightarrow>
G\<turnstile>Norm s \<midarrow>halloc oi\<succ>a\<rightarrow> (x,init_obj G oi' (Heap a) s)"
inductive sxalloc :: "[prog,state,state]\<Rightarrow>bool" ("_\<turnstile>_ \<midarrow>sxalloc\<rightarrow> _"[61,61,61]60) for G::prog
where \<comment> \<open>allocating exception objects for
standard exceptions (other than OutOfMemory)\<close>
Norm: "G\<turnstile> Norm s \<midarrow>sxalloc\<rightarrow> Norm s"
| Jmp: "G\<turnstile>(Some (Jump j), s) \<midarrow>sxalloc\<rightarrow> (Some (Jump j), s)"
| Error: "G\<turnstile>(Some (Error e), s) \<midarrow>sxalloc\<rightarrow> (Some (Error e), s)"
| XcptL: "G\<turnstile>(Some (Xcpt (Loc a) ),s) \<midarrow>sxalloc\<rightarrow> (Some (Xcpt (Loc a)),s)"
| SXcpt: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>halloc (CInst (SXcpt xn))\<succ>a\<rightarrow> (x,s1)\<rbrakk> \<Longrightarrow>
G\<turnstile>(Some (Xcpt (Std xn)),s0) \<midarrow>sxalloc\<rightarrow> (Some (Xcpt (Loc a)),s1)"
inductive
eval :: "[prog,state,term,vals,state]\<Rightarrow>bool" ("_\<turnstile>_ \<midarrow>_\<succ>\<rightarrow> '(_, _')" [61,61,80,0,0]60)
and exec ::"[prog,state,stmt ,state]\<Rightarrow>bool"("_\<turnstile>_ \<midarrow>_\<rightarrow> _" [61,61,65, 61]60)
and evar ::"[prog,state,var ,vvar,state]\<Rightarrow>bool"("_\<turnstile>_ \<midarrow>_=\<succ>_\<rightarrow> _"[61,61,90,61,61]60)
and eval'::"[prog,state,expr ,val ,state]\<Rightarrow>bool"("_\<turnstile>_ \<midarrow>_-\<succ>_\<rightarrow> _"[61,61,80,61,61]60)
and evals::"[prog,state,expr list ,
val list ,state]\<Rightarrow>bool"("_\<turnstile>_ \<midarrow>_\<doteq>\<succ>_\<rightarrow> _"[61,61,61,61,61]60)
for G::prog
where
"G\<turnstile>s \<midarrow>c \<rightarrow> s' \<equiv> G\<turnstile>s \<midarrow>In1r c\<succ>\<rightarrow> (\<diamondsuit>, s')"
| "G\<turnstile>s \<midarrow>e-\<succ>v \<rightarrow> s' \<equiv> G\<turnstile>s \<midarrow>In1l e\<succ>\<rightarrow> (In1 v, s')"
| "G\<turnstile>s \<midarrow>e=\<succ>vf\<rightarrow> s' \<equiv> G\<turnstile>s \<midarrow>In2 e\<succ>\<rightarrow> (In2 vf, s')"
| "G\<turnstile>s \<midarrow>e\<doteq>\<succ>v \<rightarrow> s' \<equiv> G\<turnstile>s \<midarrow>In3 e\<succ>\<rightarrow> (In3 v, s')"
\<comment> \<open>propagation of abrupt completion\<close>
\<comment> \<open>cf. 14.1, 15.5\<close>
| Abrupt:
"G\<turnstile>(Some xc,s) \<midarrow>t\<succ>\<rightarrow> (undefined3 t, (Some xc, s))"
\<comment> \<open>execution of statements\<close>
\<comment> \<open>cf. 14.5\<close>
| Skip: "G\<turnstile>Norm s \<midarrow>Skip\<rightarrow> Norm s"
\<comment> \<open>cf. 14.7\<close>
| Expr: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<rightarrow> s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Expr e\<rightarrow> s1"
| Lab: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>c \<rightarrow> s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>l\<bullet> c\<rightarrow> abupd (absorb l) s1"
\<comment> \<open>cf. 14.2\<close>
| Comp: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>c1 \<rightarrow> s1;
G\<turnstile> s1 \<midarrow>c2 \<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>c1;; c2\<rightarrow> s2"
\<comment> \<open>cf. 14.8.2\<close>
| If: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>b\<rightarrow> s1;
G\<turnstile> s1\<midarrow>(if the_Bool b then c1 else c2)\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>If(e) c1 Else c2 \<rightarrow> s2"
\<comment> \<open>cf. 14.10, 14.10.1\<close>
\<comment> \<open>A continue jump from the while body \<^term>\<open>c\<close> is handled by
this rule. If a continue jump with the proper label was invoked inside
\<^term>\<open>c\<close> this label (Cont l) is deleted out of the abrupt component of
the state before the iterative evaluation of the while statement.
A break jump is handled by the Lab Statement \<open>Lab l (while\<dots>)\<close>.\<close>
| Loop: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>b\<rightarrow> s1;
if the_Bool b
then (G\<turnstile>s1 \<midarrow>c\<rightarrow> s2 \<and>
G\<turnstile>(abupd (absorb (Cont l)) s2) \<midarrow>l\<bullet> While(e) c\<rightarrow> s3)
else s3 = s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>l\<bullet> While(e) c\<rightarrow> s3"
| Jmp: "G\<turnstile>Norm s \<midarrow>Jmp j\<rightarrow> (Some (Jump j), s)"
\<comment> \<open>cf. 14.16\<close>
| Throw: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>a'\<rightarrow> s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Throw e\<rightarrow> abupd (throw a') s1"
\<comment> \<open>cf. 14.18.1\<close>
| Try: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>c1\<rightarrow> s1; G\<turnstile>s1 \<midarrow>sxalloc\<rightarrow> s2;
if G,s2\<turnstile>catch C then G\<turnstile>new_xcpt_var vn s2 \<midarrow>c2\<rightarrow> s3 else s3 = s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Try c1 Catch(C vn) c2\<rightarrow> s3"
\<comment> \<open>cf. 14.18.2\<close>
| Fin: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>c1\<rightarrow> (x1,s1);
G\<turnstile>Norm s1 \<midarrow>c2\<rightarrow> s2;
s3=(if (\<exists> err. x1=Some (Error err))
then (x1,s1)
else abupd (abrupt_if (x1\<noteq>None) x1) s2) \<rbrakk>
\<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>c1 Finally c2\<rightarrow> s3"
\<comment> \<open>cf. 12.4.2, 8.5\<close>
| Init: "\<lbrakk>the (class G C) = c;
if inited C (globs s0) then s3 = Norm s0
else (G\<turnstile>Norm (init_class_obj G C s0)
\<midarrow>(if C = Object then Skip else Init (super c))\<rightarrow> s1 \<and>
G\<turnstile>set_lvars Map.empty s1 \<midarrow>init c\<rightarrow> s2 \<and> s3 = restore_lvars s1 s2)\<rbrakk>
\<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Init C\<rightarrow> s3"
\<comment> \<open>This class initialisation rule is a little bit inaccurate. Look at the
exact sequence:
(1) The current class object (the static fields) are initialised
(\<open>init_class_obj\<close>),
(2) the superclasses are initialised,
(3) the static initialiser of the current class is invoked.
More precisely we should expect another ordering, namely 2 1 3.
But we can't just naively toggle 1 and 2. By calling
\<open>init_class_obj\<close>
before initialising the superclasses, we also implicitly record that
we have started to initialise the current class (by setting an
value for the class object). This becomes
crucial for the completeness proof of the axiomatic semantics
\<open>AxCompl.thy\<close>. Static initialisation requires an induction on
the number of classes not yet initialised (or to be more precise,
classes were the initialisation has not yet begun).
So we could first assign a dummy value to the class before
superclass initialisation and afterwards set the correct values.
But as long as we don't take memory overflow into account
when allocating class objects, we can leave things as they are for
convenience.\<close>
\<comment> \<open>evaluation of expressions\<close>
\<comment> \<open>cf. 15.8.1, 12.4.1\<close>
| NewC: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>Init C\<rightarrow> s1;
G\<turnstile> s1 \<midarrow>halloc (CInst C)\<succ>a\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>NewC C-\<succ>Addr a\<rightarrow> s2"
\<comment> \<open>cf. 15.9.1, 12.4.1\<close>
| NewA: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>init_comp_ty T\<rightarrow> s1; G\<turnstile>s1 \<midarrow>e-\<succ>i'\<rightarrow> s2;
G\<turnstile>abupd (check_neg i') s2 \<midarrow>halloc (Arr T (the_Intg i'))\<succ>a\<rightarrow> s3\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>New T[e]-\<succ>Addr a\<rightarrow> s3"
\<comment> \<open>cf. 15.15\<close>
| Cast: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<rightarrow> s1;
s2 = abupd (raise_if (\<not>G,store s1\<turnstile>v fits T) ClassCast) s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Cast T e-\<succ>v\<rightarrow> s2"
\<comment> \<open>cf. 15.19.2\<close>
| Inst: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<rightarrow> s1;
b = (v\<noteq>Null \<and> G,store s1\<turnstile>v fits RefT T)\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>e InstOf T-\<succ>Bool b\<rightarrow> s1"
\<comment> \<open>cf. 15.7.1\<close>
| Lit: "G\<turnstile>Norm s \<midarrow>Lit v-\<succ>v\<rightarrow> Norm s"
| UnOp: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<rightarrow> s1\<rbrakk>
\<Longrightarrow> G\<turnstile>Norm s0 \<midarrow>UnOp unop e-\<succ>(eval_unop unop v)\<rightarrow> s1"
| BinOp: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e1-\<succ>v1\<rightarrow> s1;
G\<turnstile>s1 \<midarrow>(if need_second_arg binop v1 then (In1l e2) else (In1r Skip))
\<succ>\<rightarrow> (In1 v2, s2)
\<rbrakk>
\<Longrightarrow> G\<turnstile>Norm s0 \<midarrow>BinOp binop e1 e2-\<succ>(eval_binop binop v1 v2)\<rightarrow> s2"
\<comment> \<open>cf. 15.10.2\<close>
| Super: "G\<turnstile>Norm s \<midarrow>Super-\<succ>val_this s\<rightarrow> Norm s"
\<comment> \<open>cf. 15.2\<close>
| Acc: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>va=\<succ>(v,f)\<rightarrow> s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Acc va-\<succ>v\<rightarrow> s1"
\<comment> \<open>cf. 15.25.1\<close>
| Ass: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>va=\<succ>(w,f)\<rightarrow> s1;
G\<turnstile> s1 \<midarrow>e-\<succ>v \<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>va:=e-\<succ>v\<rightarrow> assign f v s2"
\<comment> \<open>cf. 15.24\<close>
| Cond: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e0-\<succ>b\<rightarrow> s1;
G\<turnstile> s1 \<midarrow>(if the_Bool b then e1 else e2)-\<succ>v\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>e0 ? e1 : e2-\<succ>v\<rightarrow> s2"
\<comment> \<open>The interplay of \<^term>\<open>Call\<close>, \<^term>\<open>Methd\<close> and \<^term>\<open>Body\<close>:
Method invocation is split up into these three rules:
\begin{itemize}
\item [\<^term>\<open>Call\<close>] Calculates the target address and evaluates the
arguments of the method, and then performs dynamic
or static lookup of the method, corresponding to the
call mode. Then the \<^term>\<open>Methd\<close> rule is evaluated
on the calculated declaration class of the method
invocation.
\item [\<^term>\<open>Methd\<close>] A syntactic bridge for the folded method body.
It is used by the axiomatic semantics to add the
proper hypothesis for recursive calls of the method.
\item [\<^term>\<open>Body\<close>] An extra syntactic entity for the unfolded method
body was introduced to properly trigger class
initialisation. Without class initialisation we
could just evaluate the body statement.
\end{itemize}\<close>
\<comment> \<open>cf. 15.11.4.1, 15.11.4.2, 15.11.4.4, 15.11.4.5\<close>
| Call:
"\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>a'\<rightarrow> s1; G\<turnstile>s1 \<midarrow>args\<doteq>\<succ>vs\<rightarrow> s2;
D = invocation_declclass G mode (store s2) a' statT \<lparr>name=mn,parTs=pTs\<rparr>;
s3=init_lvars G D \<lparr>name=mn,parTs=pTs\<rparr> mode a' vs s2;
s3' = check_method_access G accC statT mode \<lparr>name=mn,parTs=pTs\<rparr> a' s3;
G\<turnstile>s3' \<midarrow>Methd D \<lparr>name=mn,parTs=pTs\<rparr>-\<succ>v\<rightarrow> s4\<rbrakk>
\<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>{accC,statT,mode}e\<cdot>mn({pTs}args)-\<succ>v\<rightarrow> (restore_lvars s2 s4)"
\<comment> \<open>The accessibility check is after \<^term>\<open>init_lvars\<close>, to keep it simple.
\<^term>\<open>init_lvars\<close> already tests for the absence of a null-pointer
reference in case of an instance method invocation.\<close>
| Methd: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>body G D sig-\<succ>v\<rightarrow> s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Methd D sig-\<succ>v\<rightarrow> s1"
| Body: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>Init D\<rightarrow> s1; G\<turnstile>s1 \<midarrow>c\<rightarrow> s2;
s3 = (if (\<exists> l. abrupt s2 = Some (Jump (Break l)) \<or>
abrupt s2 = Some (Jump (Cont l)))
then abupd (\<lambda> x. Some (Error CrossMethodJump)) s2
else s2)\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Body D c-\<succ>the (locals (store s2) Result)
\<rightarrow>abupd (absorb Ret) s3"
\<comment> \<open>cf. 14.15, 12.4.1\<close>
\<comment> \<open>We filter out a break/continue in \<^term>\<open>s2\<close>, so that we can proof
definite assignment
correct, without the need of conformance of the state. By this the
different parts of the typesafety proof can be disentangled a little.\<close>
\<comment> \<open>evaluation of variables\<close>
\<comment> \<open>cf. 15.13.1, 15.7.2\<close>
| LVar: "G\<turnstile>Norm s \<midarrow>LVar vn=\<succ>lvar vn s\<rightarrow> Norm s"
\<comment> \<open>cf. 15.10.1, 12.4.1\<close>
| FVar: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>Init statDeclC\<rightarrow> s1; G\<turnstile>s1 \<midarrow>e-\<succ>a\<rightarrow> s2;
(v,s2') = fvar statDeclC stat fn a s2;
s3 = check_field_access G accC statDeclC fn stat a s2' \<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>{accC,statDeclC,stat}e..fn=\<succ>v\<rightarrow> s3"
\<comment> \<open>The accessibility check is after \<^term>\<open>fvar\<close>, to keep it simple.
\<^term>\<open>fvar\<close> already tests for the absence of a null-pointer reference
in case of an instance field\<close>
\<comment> \<open>cf. 15.12.1, 15.25.1\<close>
| AVar: "\<lbrakk>G\<turnstile> Norm s0 \<midarrow>e1-\<succ>a\<rightarrow> s1; G\<turnstile>s1 \<midarrow>e2-\<succ>i\<rightarrow> s2;
(v,s2') = avar G i a s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>e1.[e2]=\<succ>v\<rightarrow> s2'"
\<comment> \<open>evaluation of expression lists\<close>
\<comment> \<open>cf. 15.11.4.2\<close>
| Nil:
"G\<turnstile>Norm s0 \<midarrow>[]\<doteq>\<succ>[]\<rightarrow> Norm s0"
\<comment> \<open>cf. 15.6.4\<close>
| Cons: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e -\<succ> v \<rightarrow> s1;
G\<turnstile> s1 \<midarrow>es\<doteq>\<succ>vs\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>e#es\<doteq>\<succ>v#vs\<rightarrow> s2"
(* Rearrangement of premisses:
[0,1(Abrupt),2(Skip),8(Jmp),4(Lab),30(Nil),31(Cons),27(LVar),17(Cast),18(Inst),
17(Lit),18(UnOp),19(BinOp),20(Super),21(Acc),3(Expr),5(Comp),25(Methd),26(Body),23(Cond),6(If),
7(Loop),11(Fin),9(Throw),13(NewC),14(NewA),12(Init),22(Ass),10(Try),28(FVar),
29(AVar),24(Call)]
*)
ML \<open>
ML_Thms.bind_thm ("eval_induct", rearrange_prems
[0,1,2,8,4,30,31,27,15,16,
17,18,19,20,21,3,5,25,26,23,6,
7,11,9,13,14,12,22,10,28,
29,24] @{thm eval.induct})
\<close>
declare if_split [split del] if_split_asm [split del]
option.split [split del] option.split_asm [split del]
inductive_cases halloc_elim_cases:
"G\<turnstile>(Some xc,s) \<midarrow>halloc oi\<succ>a\<rightarrow> s'"
"G\<turnstile>(Norm s) \<midarrow>halloc oi\<succ>a\<rightarrow> s'"
inductive_cases sxalloc_elim_cases:
"G\<turnstile> Norm s \<midarrow>sxalloc\<rightarrow> s'"
"G\<turnstile>(Some (Jump j),s) \<midarrow>sxalloc\<rightarrow> s'"
"G\<turnstile>(Some (Error e),s) \<midarrow>sxalloc\<rightarrow> s'"
"G\<turnstile>(Some (Xcpt (Loc a )),s) \<midarrow>sxalloc\<rightarrow> s'"
"G\<turnstile>(Some (Xcpt (Std xn)),s) \<midarrow>sxalloc\<rightarrow> s'"
inductive_cases sxalloc_cases: "G\<turnstile>s \<midarrow>sxalloc\<rightarrow> s'"
lemma sxalloc_elim_cases2: "\<lbrakk>G\<turnstile>s \<midarrow>sxalloc\<rightarrow> s';
\<And>s. \<lbrakk>s' = Norm s\<rbrakk> \<Longrightarrow> P;
\<And>j s. \<lbrakk>s' = (Some (Jump j),s)\<rbrakk> \<Longrightarrow> P;
\<And>e s. \<lbrakk>s' = (Some (Error e),s)\<rbrakk> \<Longrightarrow> P;
\<And>a s. \<lbrakk>s' = (Some (Xcpt (Loc a)),s)\<rbrakk> \<Longrightarrow> P
\<rbrakk> \<Longrightarrow> P"
apply cut_tac
apply (erule sxalloc_cases)
apply blast+
done
declare not_None_eq [simp del] (* IntDef.Zero_def [simp del] *)
declare split_paired_All [simp del] split_paired_Ex [simp del]
setup \<open>map_theory_simpset (fn ctxt => ctxt delloop "split_all_tac")\<close>
inductive_cases eval_cases: "G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (v, s')"
inductive_cases eval_elim_cases [cases set]:
"G\<turnstile>(Some xc,s) \<midarrow>t \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1r Skip \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (Jmp j) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (l\<bullet> c) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In3 ([]) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In3 (e#es) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Lit w) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (UnOp unop e) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (BinOp binop e1 e2) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In2 (LVar vn) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Cast T e) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (e InstOf T) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Super) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Acc va) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1r (Expr e) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (c1;; c2) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Methd C sig) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Body D c) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1l (e0 ? e1 : e2) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1r (If(e) c1 Else c2) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (l\<bullet> While(e) c) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (c1 Finally c2) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (Throw e) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1l (NewC C) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (New T[e]) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Ass va e) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1r (Try c1 Catch(tn vn) c2) \<succ>\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In2 ({accC,statDeclC,stat}e..fn) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In2 (e1.[e2]) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l ({accC,statT,mode}e\<cdot>mn({pT}p)) \<succ>\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1r (Init C) \<succ>\<rightarrow> (x, s')"
declare not_None_eq [simp] (* IntDef.Zero_def [simp] *)
declare split_paired_All [simp] split_paired_Ex [simp]
declaration \<open>K (Simplifier.map_ss (fn ss => ss addloop ("split_all_tac", split_all_tac)))\<close>
declare if_split [split] if_split_asm [split]
option.split [split] option.split_asm [split]
lemma eval_Inj_elim:
"G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (w,s')
\<Longrightarrow> case t of
In1 ec \<Rightarrow> (case ec of
Inl e \<Rightarrow> (\<exists>v. w = In1 v)
| Inr c \<Rightarrow> w = \<diamondsuit>)
| In2 e \<Rightarrow> (\<exists>v. w = In2 v)
| In3 e \<Rightarrow> (\<exists>v. w = In3 v)"
apply (erule eval_cases)
apply auto
apply (induct_tac "t")
apply (rename_tac a, induct_tac "a")
apply auto
done
text \<open>The following simplification procedures set up the proper injections of
terms and their corresponding values in the evaluation relation:
E.g. an expression
(injection \<^term>\<open>In1l\<close> into terms) always evaluates to ordinary values
(injection \<^term>\<open>In1\<close> into generalised values \<^term>\<open>vals\<close>).
\<close>
lemma eval_expr_eq: "G\<turnstile>s \<midarrow>In1l t\<succ>\<rightarrow> (w, s') = (\<exists>v. w=In1 v \<and> G\<turnstile>s \<midarrow>t-\<succ>v \<rightarrow> s')"
by (auto, frule eval_Inj_elim, auto)
lemma eval_var_eq: "G\<turnstile>s \<midarrow>In2 t\<succ>\<rightarrow> (w, s') = (\<exists>vf. w=In2 vf \<and> G\<turnstile>s \<midarrow>t=\<succ>vf\<rightarrow> s')"
by (auto, frule eval_Inj_elim, auto)
lemma eval_exprs_eq: "G\<turnstile>s \<midarrow>In3 t\<succ>\<rightarrow> (w, s') = (\<exists>vs. w=In3 vs \<and> G\<turnstile>s \<midarrow>t\<doteq>\<succ>vs\<rightarrow> s')"
by (auto, frule eval_Inj_elim, auto)
lemma eval_stmt_eq: "G\<turnstile>s \<midarrow>In1r t\<succ>\<rightarrow> (w, s') = (w=\<diamondsuit> \<and> G\<turnstile>s \<midarrow>t \<rightarrow> s')"
by (auto, frule eval_Inj_elim, auto, frule eval_Inj_elim, auto)
simproc_setup eval_expr ("G\<turnstile>s \<midarrow>In1l t\<succ>\<rightarrow> (w, s')") = \<open>
- fn _ => fn _ => fn ct =>
- (case Thm.term_of ct of
- (_ $ _ $ _ $ _ $ (Const _ $ _) $ _) => NONE
- | _ => SOME (mk_meta_eq @{thm eval_expr_eq}))\<close>
-
-simproc_setup eval_var ("G\<turnstile>s \<midarrow>In2 t\<succ>\<rightarrow> (w, s')") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ (Const _ $ _) $ _) => NONE
- | _ => SOME (mk_meta_eq @{thm eval_var_eq}))\<close>
+ | _ => SOME (mk_meta_eq @{thm eval_expr_eq}))))\<close>
-simproc_setup eval_exprs ("G\<turnstile>s \<midarrow>In3 t\<succ>\<rightarrow> (w, s')") = \<open>
- fn _ => fn _ => fn ct =>
+simproc_setup eval_var ("G\<turnstile>s \<midarrow>In2 t\<succ>\<rightarrow> (w, s')") = \<open>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ (Const _ $ _) $ _) => NONE
- | _ => SOME (mk_meta_eq @{thm eval_exprs_eq}))\<close>
+ | _ => SOME (mk_meta_eq @{thm eval_var_eq}))))\<close>
+
+simproc_setup eval_exprs ("G\<turnstile>s \<midarrow>In3 t\<succ>\<rightarrow> (w, s')") = \<open>
+ K (K (fn ct =>
+ (case Thm.term_of ct of
+ (_ $ _ $ _ $ _ $ (Const _ $ _) $ _) => NONE
+ | _ => SOME (mk_meta_eq @{thm eval_exprs_eq}))))\<close>
simproc_setup eval_stmt ("G\<turnstile>s \<midarrow>In1r t\<succ>\<rightarrow> (w, s')") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ (Const _ $ _) $ _) => NONE
- | _ => SOME (mk_meta_eq @{thm eval_stmt_eq}))\<close>
+ | _ => SOME (mk_meta_eq @{thm eval_stmt_eq}))))\<close>
ML \<open>
ML_Thms.bind_thms ("AbruptIs", sum3_instantiate \<^context> @{thm eval.Abrupt})
\<close>
declare halloc.Abrupt [intro!] eval.Abrupt [intro!] AbruptIs [intro!]
text\<open>\<open>Callee\<close>,\<open>InsInitE\<close>, \<open>InsInitV\<close>, \<open>FinA\<close> are only
used in smallstep semantics, not in the bigstep semantics. So their is no
valid evaluation of these terms
\<close>
lemma eval_Callee: "G\<turnstile>Norm s\<midarrow>Callee l e-\<succ>v\<rightarrow> s' = False"
proof -
{ fix s t v s'
assume eval: "G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (v,s')" and
normal: "normal s" and
callee: "t=In1l (Callee l e)"
then have "False" by induct auto
}
then show ?thesis
by (cases s') fastforce
qed
lemma eval_InsInitE: "G\<turnstile>Norm s\<midarrow>InsInitE c e-\<succ>v\<rightarrow> s' = False"
proof -
{ fix s t v s'
assume eval: "G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (v,s')" and
normal: "normal s" and
callee: "t=In1l (InsInitE c e)"
then have "False" by induct auto
}
then show ?thesis
by (cases s') fastforce
qed
lemma eval_InsInitV: "G\<turnstile>Norm s\<midarrow>InsInitV c w=\<succ>v\<rightarrow> s' = False"
proof -
{ fix s t v s'
assume eval: "G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (v,s')" and
normal: "normal s" and
callee: "t=In2 (InsInitV c w)"
then have "False" by induct auto
}
then show ?thesis
by (cases s') fastforce
qed
lemma eval_FinA: "G\<turnstile>Norm s\<midarrow>FinA a c\<rightarrow> s' = False"
proof -
{ fix s t v s'
assume eval: "G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (v,s')" and
normal: "normal s" and
callee: "t=In1r (FinA a c)"
then have "False" by induct auto
}
then show ?thesis
by (cases s') fastforce
qed
lemma eval_no_abrupt_lemma:
"\<And>s s'. G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (w,s') \<Longrightarrow> normal s' \<longrightarrow> normal s"
by (erule eval_cases, auto)
lemma eval_no_abrupt:
"G\<turnstile>(x,s) \<midarrow>t\<succ>\<rightarrow> (w,Norm s') =
(x = None \<and> G\<turnstile>Norm s \<midarrow>t\<succ>\<rightarrow> (w,Norm s'))"
apply auto
apply (frule eval_no_abrupt_lemma, auto)+
done
simproc_setup eval_no_abrupt ("G\<turnstile>(x,s) \<midarrow>e\<succ>\<rightarrow> (w,Norm s')") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ (Const (\<^const_name>\<open>Pair\<close>, _) $ (Const (\<^const_name>\<open>None\<close>, _)) $ _) $ _ $ _ $ _) => NONE
- | _ => SOME (mk_meta_eq @{thm eval_no_abrupt}))
+ | _ => SOME (mk_meta_eq @{thm eval_no_abrupt}))))
\<close>
lemma eval_abrupt_lemma:
"G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (v,s') \<Longrightarrow> abrupt s=Some xc \<longrightarrow> s'= s \<and> v = undefined3 t"
by (erule eval_cases, auto)
lemma eval_abrupt:
" G\<turnstile>(Some xc,s) \<midarrow>t\<succ>\<rightarrow> (w,s') =
(s'=(Some xc,s) \<and> w=undefined3 t \<and>
G\<turnstile>(Some xc,s) \<midarrow>t\<succ>\<rightarrow> (undefined3 t,(Some xc,s)))"
apply auto
apply (frule eval_abrupt_lemma, auto)+
done
simproc_setup eval_abrupt ("G\<turnstile>(Some xc,s) \<midarrow>e\<succ>\<rightarrow> (w,s')") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ _ $ (Const (\<^const_name>\<open>Pair\<close>, _) $ (Const (\<^const_name>\<open>Some\<close>, _) $ _)$ _)) => NONE
- | _ => SOME (mk_meta_eq @{thm eval_abrupt}))
+ | _ => SOME (mk_meta_eq @{thm eval_abrupt}))))
\<close>
lemma LitI: "G\<turnstile>s \<midarrow>Lit v-\<succ>(if normal s then v else undefined)\<rightarrow> s"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: eval.Lit)
lemma SkipI [intro!]: "G\<turnstile>s \<midarrow>Skip\<rightarrow> s"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: eval.Skip)
lemma ExprI: "G\<turnstile>s \<midarrow>e-\<succ>v\<rightarrow> s' \<Longrightarrow> G\<turnstile>s \<midarrow>Expr e\<rightarrow> s'"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: eval.Expr)
lemma CompI: "\<lbrakk>G\<turnstile>s \<midarrow>c1\<rightarrow> s1; G\<turnstile>s1 \<midarrow>c2\<rightarrow> s2\<rbrakk> \<Longrightarrow> G\<turnstile>s \<midarrow>c1;; c2\<rightarrow> s2"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: eval.Comp)
lemma CondI:
"\<And>s1. \<lbrakk>G\<turnstile>s \<midarrow>e-\<succ>b\<rightarrow> s1; G\<turnstile>s1 \<midarrow>(if the_Bool b then e1 else e2)-\<succ>v\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>s \<midarrow>e ? e1 : e2-\<succ>(if normal s1 then v else undefined)\<rightarrow> s2"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: eval.Cond)
lemma IfI: "\<lbrakk>G\<turnstile>s \<midarrow>e-\<succ>v\<rightarrow> s1; G\<turnstile>s1 \<midarrow>(if the_Bool v then c1 else c2)\<rightarrow> s2\<rbrakk>
\<Longrightarrow> G\<turnstile>s \<midarrow>If(e) c1 Else c2\<rightarrow> s2"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: eval.If)
lemma MethdI: "G\<turnstile>s \<midarrow>body G C sig-\<succ>v\<rightarrow> s'
\<Longrightarrow> G\<turnstile>s \<midarrow>Methd C sig-\<succ>v\<rightarrow> s'"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: eval.Methd)
lemma eval_Call:
"\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>a'\<rightarrow> s1; G\<turnstile>s1 \<midarrow>ps\<doteq>\<succ>pvs\<rightarrow> s2;
D = invocation_declclass G mode (store s2) a' statT \<lparr>name=mn,parTs=pTs\<rparr>;
s3 = init_lvars G D \<lparr>name=mn,parTs=pTs\<rparr> mode a' pvs s2;
s3' = check_method_access G accC statT mode \<lparr>name=mn,parTs=pTs\<rparr> a' s3;
G\<turnstile>s3'\<midarrow>Methd D \<lparr>name=mn,parTs=pTs\<rparr>-\<succ> v\<rightarrow> s4;
s4' = restore_lvars s2 s4\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>{accC,statT,mode}e\<cdot>mn({pTs}ps)-\<succ>v\<rightarrow> s4'"
apply (drule eval.Call, assumption)
apply (rule HOL.refl)
apply simp+
done
lemma eval_Init:
"\<lbrakk>if inited C (globs s0) then s3 = Norm s0
else G\<turnstile>Norm (init_class_obj G C s0)
\<midarrow>(if C = Object then Skip else Init (super (the (class G C))))\<rightarrow> s1 \<and>
G\<turnstile>set_lvars Map.empty s1 \<midarrow>(init (the (class G C)))\<rightarrow> s2 \<and>
s3 = restore_lvars s1 s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Init C\<rightarrow> s3"
apply (rule eval.Init)
apply auto
done
lemma init_done: "initd C s \<Longrightarrow> G\<turnstile>s \<midarrow>Init C\<rightarrow> s"
apply (case_tac "s", simp)
apply (case_tac "a")
apply safe
apply (rule eval_Init)
apply auto
done
lemma eval_StatRef:
"G\<turnstile>s \<midarrow>StatRef rt-\<succ>(if abrupt s=None then Null else undefined)\<rightarrow> s"
apply (case_tac "s", simp)
apply (case_tac "a = None")
apply (auto del: eval.Abrupt intro!: eval.intros)
done
lemma SkipD [dest!]: "G\<turnstile>s \<midarrow>Skip\<rightarrow> s' \<Longrightarrow> s' = s"
apply (erule eval_cases)
by auto
lemma Skip_eq [simp]: "G\<turnstile>s \<midarrow>Skip\<rightarrow> s' = (s = s')"
by auto
(*unused*)
lemma init_retains_locals [rule_format (no_asm)]: "G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (w,s') \<Longrightarrow>
(\<forall>C. t=In1r (Init C) \<longrightarrow> locals (store s) = locals (store s'))"
apply (erule eval.induct)
apply (simp (no_asm_use) split del: if_split_asm option.split_asm)+
apply auto
done
lemma halloc_xcpt [dest!]:
"\<And>s'. G\<turnstile>(Some xc,s) \<midarrow>halloc oi\<succ>a\<rightarrow> s' \<Longrightarrow> s'=(Some xc,s)"
apply (erule_tac halloc_elim_cases)
by auto
(*
G\<turnstile>(x,(h,l)) \<midarrow>e\<succ>v\<rightarrow> (x',(h',l'))) \<Longrightarrow> l This = l' This"
G\<turnstile>(x,(h,l)) \<midarrow>s \<rightarrow> (x',(h',l'))) \<Longrightarrow> l This = l' This"
*)
lemma eval_Methd:
"G\<turnstile>s \<midarrow>In1l(body G C sig)\<succ>\<rightarrow> (w,s')
\<Longrightarrow> G\<turnstile>s \<midarrow>In1l(Methd C sig)\<succ>\<rightarrow> (w,s')"
apply (case_tac "s")
apply (case_tac "a")
apply clarsimp+
apply (erule eval.Methd)
apply (drule eval_abrupt_lemma)
apply force
done
lemma eval_Body: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>Init D\<rightarrow> s1; G\<turnstile>s1 \<midarrow>c\<rightarrow> s2;
res=the (locals (store s2) Result);
s3 = (if (\<exists> l. abrupt s2 = Some (Jump (Break l)) \<or>
abrupt s2 = Some (Jump (Cont l)))
then abupd (\<lambda> x. Some (Error CrossMethodJump)) s2
else s2);
s4=abupd (absorb Ret) s3\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Body D c-\<succ>res\<rightarrow>s4"
by (auto elim: eval.Body)
lemma eval_binop_arg2_indep:
"\<not> need_second_arg binop v1 \<Longrightarrow> eval_binop binop v1 x = eval_binop binop v1 y"
by (cases binop)
(simp_all add: need_second_arg_def)
lemma eval_BinOp_arg2_indepI:
assumes eval_e1: "G\<turnstile>Norm s0 \<midarrow>e1-\<succ>v1\<rightarrow> s1" and
no_need: "\<not> need_second_arg binop v1"
shows "G\<turnstile>Norm s0 \<midarrow>BinOp binop e1 e2-\<succ>(eval_binop binop v1 v2)\<rightarrow> s1"
(is "?EvalBinOp v2")
proof -
from eval_e1
have "?EvalBinOp Unit"
by (rule eval.BinOp)
(simp add: no_need)
moreover
from no_need
have "eval_binop binop v1 Unit = eval_binop binop v1 v2"
by (simp add: eval_binop_arg2_indep)
ultimately
show ?thesis
by simp
qed
subsubsection "single valued"
lemma unique_halloc [rule_format (no_asm)]:
"G\<turnstile>s \<midarrow>halloc oi\<succ>a \<rightarrow> s' \<Longrightarrow> G\<turnstile>s \<midarrow>halloc oi\<succ>a' \<rightarrow> s'' \<longrightarrow> a' = a \<and> s'' = s'"
apply (erule halloc.induct)
apply (auto elim!: halloc_elim_cases split del: if_split if_split_asm)
apply (drule trans [THEN sym], erule sym)
defer
apply (drule trans [THEN sym], erule sym)
apply auto
done
lemma single_valued_halloc:
"single_valued {((s,oi),(a,s')). G\<turnstile>s \<midarrow>halloc oi\<succ>a \<rightarrow> s'}"
apply (unfold single_valued_def)
by (clarsimp, drule (1) unique_halloc, auto)
lemma unique_sxalloc [rule_format (no_asm)]:
"G\<turnstile>s \<midarrow>sxalloc\<rightarrow> s' \<Longrightarrow> G\<turnstile>s \<midarrow>sxalloc\<rightarrow> s'' \<longrightarrow> s'' = s'"
apply (erule sxalloc.induct)
apply (auto dest: unique_halloc elim!: sxalloc_elim_cases
split del: if_split if_split_asm)
done
lemma single_valued_sxalloc: "single_valued {(s,s'). G\<turnstile>s \<midarrow>sxalloc\<rightarrow> s'}"
apply (unfold single_valued_def)
apply (blast dest: unique_sxalloc)
done
lemma split_pairD: "(x,y) = p \<Longrightarrow> x = fst p & y = snd p"
by auto
lemma unique_eval [rule_format (no_asm)]:
"G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (w, s') \<Longrightarrow> (\<forall>w' s''. G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (w', s'') \<longrightarrow> w' = w \<and> s'' = s')"
apply (erule eval_induct)
apply (tactic \<open>ALLGOALS (EVERY'
[strip_tac \<^context>, rotate_tac ~1, eresolve_tac \<^context> @{thms eval_elim_cases}])\<close>)
(* 31 subgoals *)
prefer 28 (* Try *)
apply (simp (no_asm_use) only: split: if_split_asm)
(* 34 subgoals *)
prefer 30 (* Init *)
apply (case_tac "inited C (globs s0)", (simp only: if_True if_False simp_thms)+)
prefer 26 (* While *)
apply (simp (no_asm_use) only: split: if_split_asm, blast)
(* 36 subgoals *)
apply (blast dest: unique_sxalloc unique_halloc split_pairD)+
done
(* unused *)
lemma single_valued_eval:
"single_valued {((s, t), (v, s')). G\<turnstile>s \<midarrow>t\<succ>\<rightarrow> (v, s')}"
apply (unfold single_valued_def)
by (clarify, drule (1) unique_eval, auto)
end
diff --git a/src/HOL/Bali/Evaln.thy b/src/HOL/Bali/Evaln.thy
--- a/src/HOL/Bali/Evaln.thy
+++ b/src/HOL/Bali/Evaln.thy
@@ -1,832 +1,832 @@
(* Title: HOL/Bali/Evaln.thy
Author: David von Oheimb and Norbert Schirmer
*)
subsection \<open>Operational evaluation (big-step) semantics of Java expressions and
statements
\<close>
theory Evaln imports TypeSafe begin
text \<open>
Variant of \<^term>\<open>eval\<close> relation with counter for bounded recursive depth.
In principal \<^term>\<open>evaln\<close> could replace \<^term>\<open>eval\<close>.
Validity of the axiomatic semantics builds on \<^term>\<open>evaln\<close>.
For recursive method calls the axiomatic semantics rule assumes the method ok
to derive a proof for the body. To prove the method rule sound we need to
perform induction on the recursion depth.
For the completeness proof of the axiomatic semantics the notion of the most
general formula is used. The most general formula right now builds on the
ordinary evaluation relation \<^term>\<open>eval\<close>.
So sometimes we have to switch between \<^term>\<open>evaln\<close> and \<^term>\<open>eval\<close> and vice
versa. To make
this switch easy \<^term>\<open>evaln\<close> also does all the technical accessibility tests
\<^term>\<open>check_field_access\<close> and \<^term>\<open>check_method_access\<close> like \<^term>\<open>eval\<close>.
If it would omit them \<^term>\<open>evaln\<close> and \<^term>\<open>eval\<close> would only be equivalent
for welltyped, and definitely assigned terms.
\<close>
inductive
evaln :: "[prog, state, term, nat, vals, state] \<Rightarrow> bool"
("_\<turnstile>_ \<midarrow>_\<succ>\<midarrow>_\<rightarrow> '(_, _')" [61,61,80,61,0,0] 60)
and evarn :: "[prog, state, var, vvar, nat, state] \<Rightarrow> bool"
("_\<turnstile>_ \<midarrow>_=\<succ>_\<midarrow>_\<rightarrow> _" [61,61,90,61,61,61] 60)
and eval_n:: "[prog, state, expr, val, nat, state] \<Rightarrow> bool"
("_\<turnstile>_ \<midarrow>_-\<succ>_\<midarrow>_\<rightarrow> _" [61,61,80,61,61,61] 60)
and evalsn :: "[prog, state, expr list, val list, nat, state] \<Rightarrow> bool"
("_\<turnstile>_ \<midarrow>_\<doteq>\<succ>_\<midarrow>_\<rightarrow> _" [61,61,61,61,61,61] 60)
and execn :: "[prog, state, stmt, nat, state] \<Rightarrow> bool"
("_\<turnstile>_ \<midarrow>_\<midarrow>_\<rightarrow> _" [61,61,65, 61,61] 60)
for G :: prog
where
"G\<turnstile>s \<midarrow>c \<midarrow>n\<rightarrow> s' \<equiv> G\<turnstile>s \<midarrow>In1r c\<succ>\<midarrow>n\<rightarrow> (\<diamondsuit> , s')"
| "G\<turnstile>s \<midarrow>e-\<succ>v \<midarrow>n\<rightarrow> s' \<equiv> G\<turnstile>s \<midarrow>In1l e\<succ>\<midarrow>n\<rightarrow> (In1 v , s')"
| "G\<turnstile>s \<midarrow>e=\<succ>vf \<midarrow>n\<rightarrow> s' \<equiv> G\<turnstile>s \<midarrow>In2 e\<succ>\<midarrow>n\<rightarrow> (In2 vf, s')"
| "G\<turnstile>s \<midarrow>e\<doteq>\<succ>v \<midarrow>n\<rightarrow> s' \<equiv> G\<turnstile>s \<midarrow>In3 e\<succ>\<midarrow>n\<rightarrow> (In3 v , s')"
\<comment> \<open>propagation of abrupt completion\<close>
| Abrupt: "G\<turnstile>(Some xc,s) \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (undefined3 t,(Some xc,s))"
\<comment> \<open>evaluation of variables\<close>
| LVar: "G\<turnstile>Norm s \<midarrow>LVar vn=\<succ>lvar vn s\<midarrow>n\<rightarrow> Norm s"
| FVar: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>Init statDeclC\<midarrow>n\<rightarrow> s1; G\<turnstile>s1 \<midarrow>e-\<succ>a\<midarrow>n\<rightarrow> s2;
(v,s2') = fvar statDeclC stat fn a s2;
s3 = check_field_access G accC statDeclC fn stat a s2'\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>{accC,statDeclC,stat}e..fn=\<succ>v\<midarrow>n\<rightarrow> s3"
| AVar: "\<lbrakk>G\<turnstile> Norm s0 \<midarrow>e1-\<succ>a\<midarrow>n\<rightarrow> s1 ; G\<turnstile>s1 \<midarrow>e2-\<succ>i\<midarrow>n\<rightarrow> s2;
(v,s2') = avar G i a s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>e1.[e2]=\<succ>v\<midarrow>n\<rightarrow> s2'"
\<comment> \<open>evaluation of expressions\<close>
| NewC: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>Init C\<midarrow>n\<rightarrow> s1;
G\<turnstile> s1 \<midarrow>halloc (CInst C)\<succ>a\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>NewC C-\<succ>Addr a\<midarrow>n\<rightarrow> s2"
| NewA: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>init_comp_ty T\<midarrow>n\<rightarrow> s1; G\<turnstile>s1 \<midarrow>e-\<succ>i'\<midarrow>n\<rightarrow> s2;
G\<turnstile>abupd (check_neg i') s2 \<midarrow>halloc (Arr T (the_Intg i'))\<succ>a\<rightarrow> s3\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>New T[e]-\<succ>Addr a\<midarrow>n\<rightarrow> s3"
| Cast: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<midarrow>n\<rightarrow> s1;
s2 = abupd (raise_if (\<not>G,snd s1\<turnstile>v fits T) ClassCast) s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Cast T e-\<succ>v\<midarrow>n\<rightarrow> s2"
| Inst: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<midarrow>n\<rightarrow> s1;
b = (v\<noteq>Null \<and> G,store s1\<turnstile>v fits RefT T)\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>e InstOf T-\<succ>Bool b\<midarrow>n\<rightarrow> s1"
| Lit: "G\<turnstile>Norm s \<midarrow>Lit v-\<succ>v\<midarrow>n\<rightarrow> Norm s"
| UnOp: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<midarrow>n\<rightarrow> s1\<rbrakk>
\<Longrightarrow> G\<turnstile>Norm s0 \<midarrow>UnOp unop e-\<succ>(eval_unop unop v)\<midarrow>n\<rightarrow> s1"
| BinOp: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e1-\<succ>v1\<midarrow>n\<rightarrow> s1;
G\<turnstile>s1 \<midarrow>(if need_second_arg binop v1 then (In1l e2) else (In1r Skip))
\<succ>\<midarrow>n\<rightarrow> (In1 v2,s2)\<rbrakk>
\<Longrightarrow> G\<turnstile>Norm s0 \<midarrow>BinOp binop e1 e2-\<succ>(eval_binop binop v1 v2)\<midarrow>n\<rightarrow> s2"
| Super: "G\<turnstile>Norm s \<midarrow>Super-\<succ>val_this s\<midarrow>n\<rightarrow> Norm s"
| Acc: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>va=\<succ>(v,f)\<midarrow>n\<rightarrow> s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Acc va-\<succ>v\<midarrow>n\<rightarrow> s1"
| Ass: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>va=\<succ>(w,f)\<midarrow>n\<rightarrow> s1;
G\<turnstile> s1 \<midarrow>e-\<succ>v \<midarrow>n\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>va:=e-\<succ>v\<midarrow>n\<rightarrow> assign f v s2"
| Cond: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e0-\<succ>b\<midarrow>n\<rightarrow> s1;
G\<turnstile> s1 \<midarrow>(if the_Bool b then e1 else e2)-\<succ>v\<midarrow>n\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>e0 ? e1 : e2-\<succ>v\<midarrow>n\<rightarrow> s2"
| Call:
"\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>a'\<midarrow>n\<rightarrow> s1; G\<turnstile>s1 \<midarrow>args\<doteq>\<succ>vs\<midarrow>n\<rightarrow> s2;
D = invocation_declclass G mode (store s2) a' statT \<lparr>name=mn,parTs=pTs\<rparr>;
s3=init_lvars G D \<lparr>name=mn,parTs=pTs\<rparr> mode a' vs s2;
s3' = check_method_access G accC statT mode \<lparr>name=mn,parTs=pTs\<rparr> a' s3;
G\<turnstile>s3'\<midarrow>Methd D \<lparr>name=mn,parTs=pTs\<rparr>-\<succ>v\<midarrow>n\<rightarrow> s4
\<rbrakk>
\<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>{accC,statT,mode}e\<cdot>mn({pTs}args)-\<succ>v\<midarrow>n\<rightarrow> (restore_lvars s2 s4)"
| Methd:"\<lbrakk>G\<turnstile>Norm s0 \<midarrow>body G D sig-\<succ>v\<midarrow>n\<rightarrow> s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Methd D sig-\<succ>v\<midarrow>Suc n\<rightarrow> s1"
| Body: "\<lbrakk>G\<turnstile>Norm s0\<midarrow>Init D\<midarrow>n\<rightarrow> s1; G\<turnstile>s1 \<midarrow>c\<midarrow>n\<rightarrow> s2;
s3 = (if (\<exists> l. abrupt s2 = Some (Jump (Break l)) \<or>
abrupt s2 = Some (Jump (Cont l)))
then abupd (\<lambda> x. Some (Error CrossMethodJump)) s2
else s2)\<rbrakk>\<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Body D c
-\<succ>the (locals (store s2) Result)\<midarrow>n\<rightarrow>abupd (absorb Ret) s3"
\<comment> \<open>evaluation of expression lists\<close>
| Nil:
"G\<turnstile>Norm s0 \<midarrow>[]\<doteq>\<succ>[]\<midarrow>n\<rightarrow> Norm s0"
| Cons: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e -\<succ> v \<midarrow>n\<rightarrow> s1;
G\<turnstile> s1 \<midarrow>es\<doteq>\<succ>vs\<midarrow>n\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>e#es\<doteq>\<succ>v#vs\<midarrow>n\<rightarrow> s2"
\<comment> \<open>execution of statements\<close>
| Skip: "G\<turnstile>Norm s \<midarrow>Skip\<midarrow>n\<rightarrow> Norm s"
| Expr: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<midarrow>n\<rightarrow> s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Expr e\<midarrow>n\<rightarrow> s1"
| Lab: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>c \<midarrow>n\<rightarrow> s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>l\<bullet> c\<midarrow>n\<rightarrow> abupd (absorb l) s1"
| Comp: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>c1 \<midarrow>n\<rightarrow> s1;
G\<turnstile> s1 \<midarrow>c2 \<midarrow>n\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>c1;; c2\<midarrow>n\<rightarrow> s2"
| If: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>b\<midarrow>n\<rightarrow> s1;
G\<turnstile> s1\<midarrow>(if the_Bool b then c1 else c2)\<midarrow>n\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>If(e) c1 Else c2 \<midarrow>n\<rightarrow> s2"
| Loop: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>b\<midarrow>n\<rightarrow> s1;
if the_Bool b
then (G\<turnstile>s1 \<midarrow>c\<midarrow>n\<rightarrow> s2 \<and>
G\<turnstile>(abupd (absorb (Cont l)) s2) \<midarrow>l\<bullet> While(e) c\<midarrow>n\<rightarrow> s3)
else s3 = s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>l\<bullet> While(e) c\<midarrow>n\<rightarrow> s3"
| Jmp: "G\<turnstile>Norm s \<midarrow>Jmp j\<midarrow>n\<rightarrow> (Some (Jump j), s)"
| Throw:"\<lbrakk>G\<turnstile>Norm s0 \<midarrow>e-\<succ>a'\<midarrow>n\<rightarrow> s1\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Throw e\<midarrow>n\<rightarrow> abupd (throw a') s1"
| Try: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>c1\<midarrow>n\<rightarrow> s1; G\<turnstile>s1 \<midarrow>sxalloc\<rightarrow> s2;
if G,s2\<turnstile>catch tn then G\<turnstile>new_xcpt_var vn s2 \<midarrow>c2\<midarrow>n\<rightarrow> s3 else s3 = s2\<rbrakk>
\<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Try c1 Catch(tn vn) c2\<midarrow>n\<rightarrow> s3"
| Fin: "\<lbrakk>G\<turnstile>Norm s0 \<midarrow>c1\<midarrow>n\<rightarrow> (x1,s1);
G\<turnstile>Norm s1 \<midarrow>c2\<midarrow>n\<rightarrow> s2;
s3=(if (\<exists> err. x1=Some (Error err))
then (x1,s1)
else abupd (abrupt_if (x1\<noteq>None) x1) s2)\<rbrakk> \<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>c1 Finally c2\<midarrow>n\<rightarrow> s3"
| Init: "\<lbrakk>the (class G C) = c;
if inited C (globs s0) then s3 = Norm s0
else (G\<turnstile>Norm (init_class_obj G C s0)
\<midarrow>(if C = Object then Skip else Init (super c))\<midarrow>n\<rightarrow> s1 \<and>
G\<turnstile>set_lvars Map.empty s1 \<midarrow>init c\<midarrow>n\<rightarrow> s2 \<and>
s3 = restore_lvars s1 s2)\<rbrakk>
\<Longrightarrow>
G\<turnstile>Norm s0 \<midarrow>Init C\<midarrow>n\<rightarrow> s3"
monos
if_bool_eq_conj
declare if_split [split del] if_split_asm [split del]
option.split [split del] option.split_asm [split del]
not_None_eq [simp del]
split_paired_All [simp del] split_paired_Ex [simp del]
setup \<open>map_theory_simpset (fn ctxt => ctxt delloop "split_all_tac")\<close>
inductive_cases evaln_cases: "G\<turnstile>s \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (v, s')"
inductive_cases evaln_elim_cases:
"G\<turnstile>(Some xc, s) \<midarrow>t \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1r Skip \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (Jmp j) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (l\<bullet> c) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In3 ([]) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In3 (e#es) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Lit w) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (UnOp unop e) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (BinOp binop e1 e2) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In2 (LVar vn) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Cast T e) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (e InstOf T) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Super) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Acc va) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1r (Expr e) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (c1;; c2) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Methd C sig) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Body D c) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1l (e0 ? e1 : e2) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1r (If(e) c1 Else c2) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (l\<bullet> While(e) c) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (c1 Finally c2) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1r (Throw e) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In1l (NewC C) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (New T[e]) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l (Ass va e) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1r (Try c1 Catch(tn vn) c2) \<succ>\<midarrow>n\<rightarrow> (x, s')"
"G\<turnstile>Norm s \<midarrow>In2 ({accC,statDeclC,stat}e..fn) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In2 (e1.[e2]) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1l ({accC,statT,mode}e\<cdot>mn({pT}p)) \<succ>\<midarrow>n\<rightarrow> (v, s')"
"G\<turnstile>Norm s \<midarrow>In1r (Init C) \<succ>\<midarrow>n\<rightarrow> (x, s')"
declare if_split [split] if_split_asm [split]
option.split [split] option.split_asm [split]
not_None_eq [simp]
split_paired_All [simp] split_paired_Ex [simp]
declaration \<open>K (Simplifier.map_ss (fn ss => ss addloop ("split_all_tac", split_all_tac)))\<close>
lemma evaln_Inj_elim: "G\<turnstile>s \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (w,s') \<Longrightarrow> case t of In1 ec \<Rightarrow>
(case ec of Inl e \<Rightarrow> (\<exists>v. w = In1 v) | Inr c \<Rightarrow> w = \<diamondsuit>)
| In2 e \<Rightarrow> (\<exists>v. w = In2 v) | In3 e \<Rightarrow> (\<exists>v. w = In3 v)"
apply (erule evaln_cases , auto)
apply (induct_tac "t")
apply (rename_tac a, induct_tac "a")
apply auto
done
text \<open>The following simplification procedures set up the proper injections of
terms and their corresponding values in the evaluation relation:
E.g. an expression
(injection \<^term>\<open>In1l\<close> into terms) always evaluates to ordinary values
(injection \<^term>\<open>In1\<close> into generalised values \<^term>\<open>vals\<close>).
\<close>
lemma evaln_expr_eq: "G\<turnstile>s \<midarrow>In1l t\<succ>\<midarrow>n\<rightarrow> (w, s') = (\<exists>v. w=In1 v \<and> G\<turnstile>s \<midarrow>t-\<succ>v \<midarrow>n\<rightarrow> s')"
by (auto, frule evaln_Inj_elim, auto)
lemma evaln_var_eq: "G\<turnstile>s \<midarrow>In2 t\<succ>\<midarrow>n\<rightarrow> (w, s') = (\<exists>vf. w=In2 vf \<and> G\<turnstile>s \<midarrow>t=\<succ>vf\<midarrow>n\<rightarrow> s')"
by (auto, frule evaln_Inj_elim, auto)
lemma evaln_exprs_eq: "G\<turnstile>s \<midarrow>In3 t\<succ>\<midarrow>n\<rightarrow> (w, s') = (\<exists>vs. w=In3 vs \<and> G\<turnstile>s \<midarrow>t\<doteq>\<succ>vs\<midarrow>n\<rightarrow> s')"
by (auto, frule evaln_Inj_elim, auto)
lemma evaln_stmt_eq: "G\<turnstile>s \<midarrow>In1r t\<succ>\<midarrow>n\<rightarrow> (w, s') = (w=\<diamondsuit> \<and> G\<turnstile>s \<midarrow>t \<midarrow>n\<rightarrow> s')"
by (auto, frule evaln_Inj_elim, auto, frule evaln_Inj_elim, auto)
simproc_setup evaln_expr ("G\<turnstile>s \<midarrow>In1l t\<succ>\<midarrow>n\<rightarrow> (w, s')") = \<open>
- fn _ => fn _ => fn ct =>
- (case Thm.term_of ct of
- (_ $ _ $ _ $ _ $ _ $ (Const _ $ _) $ _) => NONE
- | _ => SOME (mk_meta_eq @{thm evaln_expr_eq}))\<close>
-
-simproc_setup evaln_var ("G\<turnstile>s \<midarrow>In2 t\<succ>\<midarrow>n\<rightarrow> (w, s')") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ _ $ (Const _ $ _) $ _) => NONE
- | _ => SOME (mk_meta_eq @{thm evaln_var_eq}))\<close>
+ | _ => SOME (mk_meta_eq @{thm evaln_expr_eq}))))\<close>
-simproc_setup evaln_exprs ("G\<turnstile>s \<midarrow>In3 t\<succ>\<midarrow>n\<rightarrow> (w, s')") = \<open>
- fn _ => fn _ => fn ct =>
+simproc_setup evaln_var ("G\<turnstile>s \<midarrow>In2 t\<succ>\<midarrow>n\<rightarrow> (w, s')") = \<open>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ _ $ (Const _ $ _) $ _) => NONE
- | _ => SOME (mk_meta_eq @{thm evaln_exprs_eq}))\<close>
+ | _ => SOME (mk_meta_eq @{thm evaln_var_eq}))))\<close>
+
+simproc_setup evaln_exprs ("G\<turnstile>s \<midarrow>In3 t\<succ>\<midarrow>n\<rightarrow> (w, s')") = \<open>
+ K (K (fn ct =>
+ (case Thm.term_of ct of
+ (_ $ _ $ _ $ _ $ _ $ (Const _ $ _) $ _) => NONE
+ | _ => SOME (mk_meta_eq @{thm evaln_exprs_eq}))))\<close>
simproc_setup evaln_stmt ("G\<turnstile>s \<midarrow>In1r t\<succ>\<midarrow>n\<rightarrow> (w, s')") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ _ $ (Const _ $ _) $ _) => NONE
- | _ => SOME (mk_meta_eq @{thm evaln_stmt_eq}))\<close>
+ | _ => SOME (mk_meta_eq @{thm evaln_stmt_eq}))))\<close>
ML \<open>ML_Thms.bind_thms ("evaln_AbruptIs", sum3_instantiate \<^context> @{thm evaln.Abrupt})\<close>
declare evaln_AbruptIs [intro!]
lemma evaln_Callee: "G\<turnstile>Norm s\<midarrow>In1l (Callee l e)\<succ>\<midarrow>n\<rightarrow> (v,s') = False"
proof -
{ fix s t v s'
assume eval: "G\<turnstile>s \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (v,s')" and
normal: "normal s" and
callee: "t=In1l (Callee l e)"
then have "False" by induct auto
}
then show ?thesis
by (cases s') fastforce
qed
lemma evaln_InsInitE: "G\<turnstile>Norm s\<midarrow>In1l (InsInitE c e)\<succ>\<midarrow>n\<rightarrow> (v,s') = False"
proof -
{ fix s t v s'
assume eval: "G\<turnstile>s \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (v,s')" and
normal: "normal s" and
callee: "t=In1l (InsInitE c e)"
then have "False" by induct auto
}
then show ?thesis
by (cases s') fastforce
qed
lemma evaln_InsInitV: "G\<turnstile>Norm s\<midarrow>In2 (InsInitV c w)\<succ>\<midarrow>n\<rightarrow> (v,s') = False"
proof -
{ fix s t v s'
assume eval: "G\<turnstile>s \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (v,s')" and
normal: "normal s" and
callee: "t=In2 (InsInitV c w)"
then have "False" by induct auto
}
then show ?thesis
by (cases s') fastforce
qed
lemma evaln_FinA: "G\<turnstile>Norm s\<midarrow>In1r (FinA a c)\<succ>\<midarrow>n\<rightarrow> (v,s') = False"
proof -
{ fix s t v s'
assume eval: "G\<turnstile>s \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (v,s')" and
normal: "normal s" and
callee: "t=In1r (FinA a c)"
then have "False" by induct auto
}
then show ?thesis
by (cases s') fastforce
qed
lemma evaln_abrupt_lemma: "G\<turnstile>s \<midarrow>e\<succ>\<midarrow>n\<rightarrow> (v,s') \<Longrightarrow>
fst s = Some xc \<longrightarrow> s' = s \<and> v = undefined3 e"
apply (erule evaln_cases , auto)
done
lemma evaln_abrupt:
"\<And>s'. G\<turnstile>(Some xc,s) \<midarrow>e\<succ>\<midarrow>n\<rightarrow> (w,s') = (s' = (Some xc,s) \<and>
w=undefined3 e \<and> G\<turnstile>(Some xc,s) \<midarrow>e\<succ>\<midarrow>n\<rightarrow> (undefined3 e,(Some xc,s)))"
apply auto
apply (frule evaln_abrupt_lemma, auto)+
done
simproc_setup evaln_abrupt ("G\<turnstile>(Some xc,s) \<midarrow>e\<succ>\<midarrow>n\<rightarrow> (w,s')") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ _ $ _ $ (Const (\<^const_name>\<open>Pair\<close>, _) $ (Const (\<^const_name>\<open>Some\<close>,_) $ _)$ _))
=> NONE
- | _ => SOME (mk_meta_eq @{thm evaln_abrupt}))
+ | _ => SOME (mk_meta_eq @{thm evaln_abrupt}))))
\<close>
lemma evaln_LitI: "G\<turnstile>s \<midarrow>Lit v-\<succ>(if normal s then v else undefined)\<midarrow>n\<rightarrow> s"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: evaln.Lit)
lemma CondI:
"\<And>s1. \<lbrakk>G\<turnstile>s \<midarrow>e-\<succ>b\<midarrow>n\<rightarrow> s1; G\<turnstile>s1 \<midarrow>(if the_Bool b then e1 else e2)-\<succ>v\<midarrow>n\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>s \<midarrow>e ? e1 : e2-\<succ>(if normal s1 then v else undefined)\<midarrow>n\<rightarrow> s2"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: evaln.Cond)
lemma evaln_SkipI [intro!]: "G\<turnstile>s \<midarrow>Skip\<midarrow>n\<rightarrow> s"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: evaln.Skip)
lemma evaln_ExprI: "G\<turnstile>s \<midarrow>e-\<succ>v\<midarrow>n\<rightarrow> s' \<Longrightarrow> G\<turnstile>s \<midarrow>Expr e\<midarrow>n\<rightarrow> s'"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: evaln.Expr)
lemma evaln_CompI: "\<lbrakk>G\<turnstile>s \<midarrow>c1\<midarrow>n\<rightarrow> s1; G\<turnstile>s1 \<midarrow>c2\<midarrow>n\<rightarrow> s2\<rbrakk> \<Longrightarrow> G\<turnstile>s \<midarrow>c1;; c2\<midarrow>n\<rightarrow> s2"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: evaln.Comp)
lemma evaln_IfI:
"\<lbrakk>G\<turnstile>s \<midarrow>e-\<succ>v\<midarrow>n\<rightarrow> s1; G\<turnstile>s1 \<midarrow>(if the_Bool v then c1 else c2)\<midarrow>n\<rightarrow> s2\<rbrakk> \<Longrightarrow>
G\<turnstile>s \<midarrow>If(e) c1 Else c2\<midarrow>n\<rightarrow> s2"
apply (case_tac "s", case_tac "a = None")
by (auto intro!: evaln.If)
lemma evaln_SkipD [dest!]: "G\<turnstile>s \<midarrow>Skip\<midarrow>n\<rightarrow> s' \<Longrightarrow> s' = s"
by (erule evaln_cases, auto)
lemma evaln_Skip_eq [simp]: "G\<turnstile>s \<midarrow>Skip\<midarrow>n\<rightarrow> s' = (s = s')"
apply auto
done
subsubsection \<open>evaln implies eval\<close>
lemma evaln_eval:
assumes evaln: "G\<turnstile>s0 \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (v,s1)"
shows "G\<turnstile>s0 \<midarrow>t\<succ>\<rightarrow> (v,s1)"
using evaln
proof (induct)
case (Loop s0 e b n s1 c s2 l s3)
note \<open>G\<turnstile>Norm s0 \<midarrow>e-\<succ>b\<rightarrow> s1\<close>
moreover
have "if the_Bool b
then (G\<turnstile>s1 \<midarrow>c\<rightarrow> s2) \<and>
G\<turnstile>abupd (absorb (Cont l)) s2 \<midarrow>l\<bullet> While(e) c\<rightarrow> s3
else s3 = s1"
using Loop.hyps by simp
ultimately show ?case by (rule eval.Loop)
next
case (Try s0 c1 n s1 s2 C vn c2 s3)
note \<open>G\<turnstile>Norm s0 \<midarrow>c1\<rightarrow> s1\<close>
moreover
note \<open>G\<turnstile>s1 \<midarrow>sxalloc\<rightarrow> s2\<close>
moreover
have "if G,s2\<turnstile>catch C then G\<turnstile>new_xcpt_var vn s2 \<midarrow>c2\<rightarrow> s3 else s3 = s2"
using Try.hyps by simp
ultimately show ?case by (rule eval.Try)
next
case (Init C c s0 s3 n s1 s2)
note \<open>the (class G C) = c\<close>
moreover
have "if inited C (globs s0)
then s3 = Norm s0
else G\<turnstile>Norm ((init_class_obj G C) s0)
\<midarrow>(if C = Object then Skip else Init (super c))\<rightarrow> s1 \<and>
G\<turnstile>(set_lvars Map.empty) s1 \<midarrow>init c\<rightarrow> s2 \<and>
s3 = (set_lvars (locals (store s1))) s2"
using Init.hyps by simp
ultimately show ?case by (rule eval.Init)
qed (rule eval.intros,(assumption+ | assumption?))+
lemma Suc_le_D_lemma: "\<lbrakk>Suc n <= m'; (\<And>m. n <= m \<Longrightarrow> P (Suc m)) \<rbrakk> \<Longrightarrow> P m'"
apply (frule Suc_le_D)
apply fast
done
lemma evaln_nonstrict [rule_format (no_asm), elim]:
"G\<turnstile>s \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (w, s') \<Longrightarrow> \<forall>m. n\<le>m \<longrightarrow> G\<turnstile>s \<midarrow>t\<succ>\<midarrow>m\<rightarrow> (w, s')"
apply (erule evaln.induct)
apply (tactic \<open>ALLGOALS (EVERY' [strip_tac \<^context>,
TRY o eresolve_tac \<^context> @{thms Suc_le_D_lemma},
REPEAT o smp_tac \<^context> 1,
resolve_tac \<^context> @{thms evaln.intros} THEN_ALL_NEW TRY o assume_tac \<^context>])\<close>)
(* 3 subgoals *)
apply (auto split del: if_split)
done
lemmas evaln_nonstrict_Suc = evaln_nonstrict [OF _ le_refl [THEN le_SucI]]
lemma evaln_max2: "\<lbrakk>G\<turnstile>s1 \<midarrow>t1\<succ>\<midarrow>n1\<rightarrow> (w1, s1'); G\<turnstile>s2 \<midarrow>t2\<succ>\<midarrow>n2\<rightarrow> (w2, s2')\<rbrakk> \<Longrightarrow>
G\<turnstile>s1 \<midarrow>t1\<succ>\<midarrow>max n1 n2\<rightarrow> (w1, s1') \<and> G\<turnstile>s2 \<midarrow>t2\<succ>\<midarrow>max n1 n2\<rightarrow> (w2, s2')"
by (fast intro: max.cobounded1 max.cobounded2)
corollary evaln_max2E [consumes 2]:
"\<lbrakk>G\<turnstile>s1 \<midarrow>t1\<succ>\<midarrow>n1\<rightarrow> (w1, s1'); G\<turnstile>s2 \<midarrow>t2\<succ>\<midarrow>n2\<rightarrow> (w2, s2');
\<lbrakk>G\<turnstile>s1 \<midarrow>t1\<succ>\<midarrow>max n1 n2\<rightarrow> (w1, s1');G\<turnstile>s2 \<midarrow>t2\<succ>\<midarrow>max n1 n2\<rightarrow> (w2, s2') \<rbrakk> \<Longrightarrow> P \<rbrakk> \<Longrightarrow> P"
by (drule (1) evaln_max2) simp
lemma evaln_max3:
"\<lbrakk>G\<turnstile>s1 \<midarrow>t1\<succ>\<midarrow>n1\<rightarrow> (w1, s1'); G\<turnstile>s2 \<midarrow>t2\<succ>\<midarrow>n2\<rightarrow> (w2, s2'); G\<turnstile>s3 \<midarrow>t3\<succ>\<midarrow>n3\<rightarrow> (w3, s3')\<rbrakk> \<Longrightarrow>
G\<turnstile>s1 \<midarrow>t1\<succ>\<midarrow>max (max n1 n2) n3\<rightarrow> (w1, s1') \<and>
G\<turnstile>s2 \<midarrow>t2\<succ>\<midarrow>max (max n1 n2) n3\<rightarrow> (w2, s2') \<and>
G\<turnstile>s3 \<midarrow>t3\<succ>\<midarrow>max (max n1 n2) n3\<rightarrow> (w3, s3')"
apply (drule (1) evaln_max2, erule thin_rl)
apply (fast intro!: max.cobounded1 max.cobounded2)
done
corollary evaln_max3E:
"\<lbrakk>G\<turnstile>s1 \<midarrow>t1\<succ>\<midarrow>n1\<rightarrow> (w1, s1'); G\<turnstile>s2 \<midarrow>t2\<succ>\<midarrow>n2\<rightarrow> (w2, s2'); G\<turnstile>s3 \<midarrow>t3\<succ>\<midarrow>n3\<rightarrow> (w3, s3');
\<lbrakk>G\<turnstile>s1 \<midarrow>t1\<succ>\<midarrow>max (max n1 n2) n3\<rightarrow> (w1, s1');
G\<turnstile>s2 \<midarrow>t2\<succ>\<midarrow>max (max n1 n2) n3\<rightarrow> (w2, s2');
G\<turnstile>s3 \<midarrow>t3\<succ>\<midarrow>max (max n1 n2) n3\<rightarrow> (w3, s3')
\<rbrakk> \<Longrightarrow> P
\<rbrakk> \<Longrightarrow> P"
by (drule (2) evaln_max3) simp
lemma le_max3I1: "(n2::nat) \<le> max n1 (max n2 n3)"
proof -
have "n2 \<le> max n2 n3"
by (rule max.cobounded1)
also
have "max n2 n3 \<le> max n1 (max n2 n3)"
by (rule max.cobounded2)
finally
show ?thesis .
qed
lemma le_max3I2: "(n3::nat) \<le> max n1 (max n2 n3)"
proof -
have "n3 \<le> max n2 n3"
by (rule max.cobounded2)
also
have "max n2 n3 \<le> max n1 (max n2 n3)"
by (rule max.cobounded2)
finally
show ?thesis .
qed
declare [[simproc del: wt_expr wt_var wt_exprs wt_stmt]]
subsubsection \<open>eval implies evaln\<close>
lemma eval_evaln:
assumes eval: "G\<turnstile>s0 \<midarrow>t\<succ>\<rightarrow> (v,s1)"
shows "\<exists>n. G\<turnstile>s0 \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (v,s1)"
using eval
proof (induct)
case (Abrupt xc s t)
obtain n where
"G\<turnstile>(Some xc, s) \<midarrow>t\<succ>\<midarrow>n\<rightarrow> (undefined3 t, (Some xc, s))"
by (iprover intro: evaln.Abrupt)
then show ?case ..
next
case Skip
show ?case by (blast intro: evaln.Skip)
next
case (Expr s0 e v s1)
then obtain n where
"G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<midarrow>n\<rightarrow> s1"
by (iprover)
then have "G\<turnstile>Norm s0 \<midarrow>Expr e\<midarrow>n\<rightarrow> s1"
by (rule evaln.Expr)
then show ?case ..
next
case (Lab s0 c s1 l)
then obtain n where
"G\<turnstile>Norm s0 \<midarrow>c\<midarrow>n\<rightarrow> s1"
by (iprover)
then have "G\<turnstile>Norm s0 \<midarrow>l\<bullet> c\<midarrow>n\<rightarrow> abupd (absorb l) s1"
by (rule evaln.Lab)
then show ?case ..
next
case (Comp s0 c1 s1 c2 s2)
then obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>c1\<midarrow>n1\<rightarrow> s1"
"G\<turnstile>s1 \<midarrow>c2\<midarrow>n2\<rightarrow> s2"
by (iprover)
then have "G\<turnstile>Norm s0 \<midarrow>c1;; c2\<midarrow>max n1 n2\<rightarrow> s2"
by (blast intro: evaln.Comp dest: evaln_max2 )
then show ?case ..
next
case (If s0 e b s1 c1 c2 s2)
then obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>e-\<succ>b\<midarrow>n1\<rightarrow> s1"
"G\<turnstile>s1 \<midarrow>(if the_Bool b then c1 else c2)\<midarrow>n2\<rightarrow> s2"
by (iprover)
then have "G\<turnstile>Norm s0 \<midarrow>If(e) c1 Else c2\<midarrow>max n1 n2\<rightarrow> s2"
by (blast intro: evaln.If dest: evaln_max2)
then show ?case ..
next
case (Loop s0 e b s1 c s2 l s3)
from Loop.hyps obtain n1 where
"G\<turnstile>Norm s0 \<midarrow>e-\<succ>b\<midarrow>n1\<rightarrow> s1"
by (iprover)
moreover from Loop.hyps obtain n2 where
"if the_Bool b
then (G\<turnstile>s1 \<midarrow>c\<midarrow>n2\<rightarrow> s2 \<and>
G\<turnstile>(abupd (absorb (Cont l)) s2)\<midarrow>l\<bullet> While(e) c\<midarrow>n2\<rightarrow> s3)
else s3 = s1"
by simp (iprover intro: evaln_nonstrict max.cobounded1 max.cobounded2)
ultimately
have "G\<turnstile>Norm s0 \<midarrow>l\<bullet> While(e) c\<midarrow>max n1 n2\<rightarrow> s3"
apply -
apply (rule evaln.Loop)
apply (iprover intro: evaln_nonstrict intro: max.cobounded1)
apply (auto intro: evaln_nonstrict intro: max.cobounded2)
done
then show ?case ..
next
case (Jmp s j)
fix n have "G\<turnstile>Norm s \<midarrow>Jmp j\<midarrow>n\<rightarrow> (Some (Jump j), s)"
by (rule evaln.Jmp)
then show ?case ..
next
case (Throw s0 e a s1)
then obtain n where
"G\<turnstile>Norm s0 \<midarrow>e-\<succ>a\<midarrow>n\<rightarrow> s1"
by (iprover)
then have "G\<turnstile>Norm s0 \<midarrow>Throw e\<midarrow>n\<rightarrow> abupd (throw a) s1"
by (rule evaln.Throw)
then show ?case ..
next
case (Try s0 c1 s1 s2 catchC vn c2 s3)
from Try.hyps obtain n1 where
"G\<turnstile>Norm s0 \<midarrow>c1\<midarrow>n1\<rightarrow> s1"
by (iprover)
moreover
note sxalloc = \<open>G\<turnstile>s1 \<midarrow>sxalloc\<rightarrow> s2\<close>
moreover
from Try.hyps obtain n2 where
"if G,s2\<turnstile>catch catchC then G\<turnstile>new_xcpt_var vn s2 \<midarrow>c2\<midarrow>n2\<rightarrow> s3 else s3 = s2"
by fastforce
ultimately
have "G\<turnstile>Norm s0 \<midarrow>Try c1 Catch(catchC vn) c2\<midarrow>max n1 n2\<rightarrow> s3"
by (auto intro!: evaln.Try max.cobounded1 max.cobounded2)
then show ?case ..
next
case (Fin s0 c1 x1 s1 c2 s2 s3)
from Fin obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>c1\<midarrow>n1\<rightarrow> (x1, s1)"
"G\<turnstile>Norm s1 \<midarrow>c2\<midarrow>n2\<rightarrow> s2"
by iprover
moreover
note s3 = \<open>s3 = (if \<exists>err. x1 = Some (Error err)
then (x1, s1)
else abupd (abrupt_if (x1 \<noteq> None) x1) s2)\<close>
ultimately
have
"G\<turnstile>Norm s0 \<midarrow>c1 Finally c2\<midarrow>max n1 n2\<rightarrow> s3"
by (blast intro: evaln.Fin dest: evaln_max2)
then show ?case ..
next
case (Init C c s0 s3 s1 s2)
note cls = \<open>the (class G C) = c\<close>
moreover from Init.hyps obtain n where
"if inited C (globs s0) then s3 = Norm s0
else (G\<turnstile>Norm (init_class_obj G C s0)
\<midarrow>(if C = Object then Skip else Init (super c))\<midarrow>n\<rightarrow> s1 \<and>
G\<turnstile>set_lvars Map.empty s1 \<midarrow>init c\<midarrow>n\<rightarrow> s2 \<and>
s3 = restore_lvars s1 s2)"
by (auto intro: evaln_nonstrict max.cobounded1 max.cobounded2)
ultimately have "G\<turnstile>Norm s0 \<midarrow>Init C\<midarrow>n\<rightarrow> s3"
by (rule evaln.Init)
then show ?case ..
next
case (NewC s0 C s1 a s2)
then obtain n where
"G\<turnstile>Norm s0 \<midarrow>Init C\<midarrow>n\<rightarrow> s1"
by (iprover)
with NewC
have "G\<turnstile>Norm s0 \<midarrow>NewC C-\<succ>Addr a\<midarrow>n\<rightarrow> s2"
by (iprover intro: evaln.NewC)
then show ?case ..
next
case (NewA s0 T s1 e i s2 a s3)
then obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>init_comp_ty T\<midarrow>n1\<rightarrow> s1"
"G\<turnstile>s1 \<midarrow>e-\<succ>i\<midarrow>n2\<rightarrow> s2"
by (iprover)
moreover
note \<open>G\<turnstile>abupd (check_neg i) s2 \<midarrow>halloc Arr T (the_Intg i)\<succ>a\<rightarrow> s3\<close>
ultimately
have "G\<turnstile>Norm s0 \<midarrow>New T[e]-\<succ>Addr a\<midarrow>max n1 n2\<rightarrow> s3"
by (blast intro: evaln.NewA dest: evaln_max2)
then show ?case ..
next
case (Cast s0 e v s1 s2 castT)
then obtain n where
"G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<midarrow>n\<rightarrow> s1"
by (iprover)
moreover
note \<open>s2 = abupd (raise_if (\<not> G,snd s1\<turnstile>v fits castT) ClassCast) s1\<close>
ultimately
have "G\<turnstile>Norm s0 \<midarrow>Cast castT e-\<succ>v\<midarrow>n\<rightarrow> s2"
by (rule evaln.Cast)
then show ?case ..
next
case (Inst s0 e v s1 b T)
then obtain n where
"G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<midarrow>n\<rightarrow> s1"
by (iprover)
moreover
note \<open>b = (v \<noteq> Null \<and> G,snd s1\<turnstile>v fits RefT T)\<close>
ultimately
have "G\<turnstile>Norm s0 \<midarrow>e InstOf T-\<succ>Bool b\<midarrow>n\<rightarrow> s1"
by (rule evaln.Inst)
then show ?case ..
next
case (Lit s v)
fix n have "G\<turnstile>Norm s \<midarrow>Lit v-\<succ>v\<midarrow>n\<rightarrow> Norm s"
by (rule evaln.Lit)
then show ?case ..
next
case (UnOp s0 e v s1 unop)
then obtain n where
"G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<midarrow>n\<rightarrow> s1"
by (iprover)
hence "G\<turnstile>Norm s0 \<midarrow>UnOp unop e-\<succ>eval_unop unop v\<midarrow>n\<rightarrow> s1"
by (rule evaln.UnOp)
then show ?case ..
next
case (BinOp s0 e1 v1 s1 binop e2 v2 s2)
then obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>e1-\<succ>v1\<midarrow>n1\<rightarrow> s1"
"G\<turnstile>s1 \<midarrow>(if need_second_arg binop v1 then In1l e2
else In1r Skip)\<succ>\<midarrow>n2\<rightarrow> (In1 v2, s2)"
by (iprover)
hence "G\<turnstile>Norm s0 \<midarrow>BinOp binop e1 e2-\<succ>(eval_binop binop v1 v2)\<midarrow>max n1 n2
\<rightarrow> s2"
by (blast intro!: evaln.BinOp dest: evaln_max2)
then show ?case ..
next
case (Super s )
fix n have "G\<turnstile>Norm s \<midarrow>Super-\<succ>val_this s\<midarrow>n\<rightarrow> Norm s"
by (rule evaln.Super)
then show ?case ..
next
case (Acc s0 va v f s1)
then obtain n where
"G\<turnstile>Norm s0 \<midarrow>va=\<succ>(v, f)\<midarrow>n\<rightarrow> s1"
by (iprover)
then
have "G\<turnstile>Norm s0 \<midarrow>Acc va-\<succ>v\<midarrow>n\<rightarrow> s1"
by (rule evaln.Acc)
then show ?case ..
next
case (Ass s0 var w f s1 e v s2)
then obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>var=\<succ>(w, f)\<midarrow>n1\<rightarrow> s1"
"G\<turnstile>s1 \<midarrow>e-\<succ>v\<midarrow>n2\<rightarrow> s2"
by (iprover)
then
have "G\<turnstile>Norm s0 \<midarrow>var:=e-\<succ>v\<midarrow>max n1 n2\<rightarrow> assign f v s2"
by (blast intro: evaln.Ass dest: evaln_max2)
then show ?case ..
next
case (Cond s0 e0 b s1 e1 e2 v s2)
then obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>e0-\<succ>b\<midarrow>n1\<rightarrow> s1"
"G\<turnstile>s1 \<midarrow>(if the_Bool b then e1 else e2)-\<succ>v\<midarrow>n2\<rightarrow> s2"
by (iprover)
then
have "G\<turnstile>Norm s0 \<midarrow>e0 ? e1 : e2-\<succ>v\<midarrow>max n1 n2\<rightarrow> s2"
by (blast intro: evaln.Cond dest: evaln_max2)
then show ?case ..
next
case (Call s0 e a' s1 args vs s2 invDeclC mode statT mn pTs' s3 s3' accC' v s4)
then obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>e-\<succ>a'\<midarrow>n1\<rightarrow> s1"
"G\<turnstile>s1 \<midarrow>args\<doteq>\<succ>vs\<midarrow>n2\<rightarrow> s2"
by iprover
moreover
note \<open>invDeclC = invocation_declclass G mode (store s2) a' statT
\<lparr>name=mn,parTs=pTs'\<rparr>\<close>
moreover
note \<open>s3 = init_lvars G invDeclC \<lparr>name=mn,parTs=pTs'\<rparr> mode a' vs s2\<close>
moreover
note \<open>s3'=check_method_access G accC' statT mode \<lparr>name=mn,parTs=pTs'\<rparr> a' s3\<close>
moreover
from Call.hyps
obtain m where
"G\<turnstile>s3' \<midarrow>Methd invDeclC \<lparr>name=mn, parTs=pTs'\<rparr>-\<succ>v\<midarrow>m\<rightarrow> s4"
by iprover
ultimately
have "G\<turnstile>Norm s0 \<midarrow>{accC',statT,mode}e\<cdot>mn( {pTs'}args)-\<succ>v\<midarrow>max n1 (max n2 m)\<rightarrow>
(set_lvars (locals (store s2))) s4"
by (auto intro!: evaln.Call max.cobounded1 le_max3I1 le_max3I2)
thus ?case ..
next
case (Methd s0 D sig v s1)
then obtain n where
"G\<turnstile>Norm s0 \<midarrow>body G D sig-\<succ>v\<midarrow>n\<rightarrow> s1"
by iprover
then have "G\<turnstile>Norm s0 \<midarrow>Methd D sig-\<succ>v\<midarrow>Suc n\<rightarrow> s1"
by (rule evaln.Methd)
then show ?case ..
next
case (Body s0 D s1 c s2 s3)
from Body.hyps obtain n1 n2 where
evaln_init: "G\<turnstile>Norm s0 \<midarrow>Init D\<midarrow>n1\<rightarrow> s1" and
evaln_c: "G\<turnstile>s1 \<midarrow>c\<midarrow>n2\<rightarrow> s2"
by (iprover)
moreover
note \<open>s3 = (if \<exists>l. fst s2 = Some (Jump (Break l)) \<or>
fst s2 = Some (Jump (Cont l))
then abupd (\<lambda>x. Some (Error CrossMethodJump)) s2
else s2)\<close>
ultimately
have
"G\<turnstile>Norm s0 \<midarrow>Body D c-\<succ>the (locals (store s2) Result)\<midarrow>max n1 n2
\<rightarrow> abupd (absorb Ret) s3"
by (iprover intro: evaln.Body dest: evaln_max2)
then show ?case ..
next
case (LVar s vn )
obtain n where
"G\<turnstile>Norm s \<midarrow>LVar vn=\<succ>lvar vn s\<midarrow>n\<rightarrow> Norm s"
by (iprover intro: evaln.LVar)
then show ?case ..
next
case (FVar s0 statDeclC s1 e a s2 v s2' stat fn s3 accC)
then obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>Init statDeclC\<midarrow>n1\<rightarrow> s1"
"G\<turnstile>s1 \<midarrow>e-\<succ>a\<midarrow>n2\<rightarrow> s2"
by iprover
moreover
note \<open>s3 = check_field_access G accC statDeclC fn stat a s2'\<close>
and \<open>(v, s2') = fvar statDeclC stat fn a s2\<close>
ultimately
have "G\<turnstile>Norm s0 \<midarrow>{accC,statDeclC,stat}e..fn=\<succ>v\<midarrow>max n1 n2\<rightarrow> s3"
by (iprover intro: evaln.FVar dest: evaln_max2)
then show ?case ..
next
case (AVar s0 e1 a s1 e2 i s2 v s2')
then obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>e1-\<succ>a\<midarrow>n1\<rightarrow> s1"
"G\<turnstile>s1 \<midarrow>e2-\<succ>i\<midarrow>n2\<rightarrow> s2"
by iprover
moreover
note \<open>(v, s2') = avar G i a s2\<close>
ultimately
have "G\<turnstile>Norm s0 \<midarrow>e1.[e2]=\<succ>v\<midarrow>max n1 n2\<rightarrow> s2'"
by (blast intro!: evaln.AVar dest: evaln_max2)
then show ?case ..
next
case (Nil s0)
show ?case by (iprover intro: evaln.Nil)
next
case (Cons s0 e v s1 es vs s2)
then obtain n1 n2 where
"G\<turnstile>Norm s0 \<midarrow>e-\<succ>v\<midarrow>n1\<rightarrow> s1"
"G\<turnstile>s1 \<midarrow>es\<doteq>\<succ>vs\<midarrow>n2\<rightarrow> s2"
by iprover
then
have "G\<turnstile>Norm s0 \<midarrow>e # es\<doteq>\<succ>v # vs\<midarrow>max n1 n2\<rightarrow> s2"
by (blast intro!: evaln.Cons dest: evaln_max2)
then show ?case ..
qed
end
diff --git a/src/HOL/Bali/WellType.thy b/src/HOL/Bali/WellType.thy
--- a/src/HOL/Bali/WellType.thy
+++ b/src/HOL/Bali/WellType.thy
@@ -1,686 +1,686 @@
(* Title: HOL/Bali/WellType.thy
Author: David von Oheimb
*)
subsection \<open>Well-typedness of Java programs\<close>
theory WellType
imports DeclConcepts
begin
text \<open>
improvements over Java Specification 1.0:
\begin{itemize}
\item methods of Object can be called upon references of interface or array type
\end{itemize}
simplifications:
\begin{itemize}
\item the type rules include all static checks on statements and expressions,
e.g. definedness of names (of parameters, locals, fields, methods)
\end{itemize}
design issues:
\begin{itemize}
\item unified type judgment for statements, variables, expressions,
expression lists
\item statements are typed like expressions with dummy type Void
\item the typing rules take an extra argument that is capable of determining
the dynamic type of objects. Therefore, they can be used for both
checking static types and determining runtime types in transition semantics.
\end{itemize}
\<close>
type_synonym lenv
= "(lname, ty) table" \<comment> \<open>local variables, including This and Result\<close>
record env =
prg:: "prog" \<comment> \<open>program\<close>
cls:: "qtname" \<comment> \<open>current package and class name\<close>
lcl:: "lenv" \<comment> \<open>local environment\<close>
translations
(type) "lenv" <= (type) "(lname, ty) table"
(type) "lenv" <= (type) "lname \<Rightarrow> ty option"
(type) "env" <= (type) "\<lparr>prg::prog,cls::qtname,lcl::lenv\<rparr>"
(type) "env" <= (type) "\<lparr>prg::prog,cls::qtname,lcl::lenv,\<dots>::'a\<rparr>"
abbreviation
pkg :: "env \<Rightarrow> pname" \<comment> \<open>select the current package from an environment\<close>
where "pkg e == pid (cls e)"
subsubsection "Static overloading: maximally specific methods "
type_synonym
emhead = "ref_ty \<times> mhead"
\<comment> \<open>Some mnemotic selectors for emhead\<close>
definition
"declrefT" :: "emhead \<Rightarrow> ref_ty"
where "declrefT = fst"
definition
"mhd" :: "emhead \<Rightarrow> mhead"
where "mhd \<equiv> snd"
lemma declrefT_simp[simp]:"declrefT (r,m) = r"
by (simp add: declrefT_def)
lemma mhd_simp[simp]:"mhd (r,m) = m"
by (simp add: mhd_def)
lemma static_mhd_simp[simp]: "static (mhd m) = is_static m"
by (cases m) (simp add: member_is_static_simp mhd_def)
lemma mhd_resTy_simp [simp]: "resTy (mhd m) = resTy m"
by (cases m) simp
lemma mhd_is_static_simp [simp]: "is_static (mhd m) = is_static m"
by (cases m) simp
lemma mhd_accmodi_simp [simp]: "accmodi (mhd m) = accmodi m"
by (cases m) simp
definition
cmheads :: "prog \<Rightarrow> qtname \<Rightarrow> qtname \<Rightarrow> sig \<Rightarrow> emhead set"
where "cmheads G S C = (\<lambda>sig. (\<lambda>(Cls,mthd). (ClassT Cls,(mhead mthd))) ` set_option (accmethd G S C sig))"
definition
Objectmheads :: "prog \<Rightarrow> qtname \<Rightarrow> sig \<Rightarrow> emhead set" where
"Objectmheads G S =
(\<lambda>sig. (\<lambda>(Cls,mthd). (ClassT Cls,(mhead mthd)))
` set_option (filter_tab (\<lambda>sig m. accmodi m \<noteq> Private) (accmethd G S Object) sig))"
definition
accObjectmheads :: "prog \<Rightarrow> qtname \<Rightarrow> ref_ty \<Rightarrow> sig \<Rightarrow> emhead set"
where
"accObjectmheads G S T =
(if G\<turnstile>RefT T accessible_in (pid S)
then Objectmheads G S
else (\<lambda>sig. {}))"
primrec mheads :: "prog \<Rightarrow> qtname \<Rightarrow> ref_ty \<Rightarrow> sig \<Rightarrow> emhead set"
where
"mheads G S NullT = (\<lambda>sig. {})"
| "mheads G S (IfaceT I) = (\<lambda>sig. (\<lambda>(I,h).(IfaceT I,h))
` accimethds G (pid S) I sig \<union>
accObjectmheads G S (IfaceT I) sig)"
| "mheads G S (ClassT C) = cmheads G S C"
| "mheads G S (ArrayT T) = accObjectmheads G S (ArrayT T)"
definition
\<comment> \<open>applicable methods, cf. 15.11.2.1\<close>
appl_methds :: "prog \<Rightarrow> qtname \<Rightarrow> ref_ty \<Rightarrow> sig \<Rightarrow> (emhead \<times> ty list) set" where
"appl_methds G S rt = (\<lambda> sig.
{(mh,pTs') |mh pTs'. mh \<in> mheads G S rt \<lparr>name=name sig,parTs=pTs'\<rparr> \<and>
G\<turnstile>(parTs sig)[\<preceq>]pTs'})"
definition
\<comment> \<open>more specific methods, cf. 15.11.2.2\<close>
more_spec :: "prog \<Rightarrow> emhead \<times> ty list \<Rightarrow> emhead \<times> ty list \<Rightarrow> bool" where
"more_spec G = (\<lambda>(mh,pTs). \<lambda>(mh',pTs'). G\<turnstile>pTs[\<preceq>]pTs')"
(*more_spec G \<equiv>\<lambda>((d,h),pTs). \<lambda>((d',h'),pTs'). G\<turnstile>RefT d\<preceq>RefT d'\<and>G\<turnstile>pTs[\<preceq>]pTs'*)
definition
\<comment> \<open>maximally specific methods, cf. 15.11.2.2\<close>
max_spec :: "prog \<Rightarrow> qtname \<Rightarrow> ref_ty \<Rightarrow> sig \<Rightarrow> (emhead \<times> ty list) set" where
"max_spec G S rt sig = {m. m \<in>appl_methds G S rt sig \<and>
(\<forall>m'\<in>appl_methds G S rt sig. more_spec G m' m \<longrightarrow> m'=m)}"
lemma max_spec2appl_meths:
"x \<in> max_spec G S T sig \<Longrightarrow> x \<in> appl_methds G S T sig"
by (auto simp: max_spec_def)
lemma appl_methsD: "(mh,pTs')\<in>appl_methds G S T \<lparr>name=mn,parTs=pTs\<rparr> \<Longrightarrow>
mh \<in> mheads G S T \<lparr>name=mn,parTs=pTs'\<rparr> \<and> G\<turnstile>pTs[\<preceq>]pTs'"
by (auto simp: appl_methds_def)
lemma max_spec2mheads:
"max_spec G S rt \<lparr>name=mn,parTs=pTs\<rparr> = insert (mh, pTs') A
\<Longrightarrow> mh \<in> mheads G S rt \<lparr>name=mn,parTs=pTs'\<rparr> \<and> G\<turnstile>pTs[\<preceq>]pTs'"
apply (auto dest: equalityD2 subsetD max_spec2appl_meths appl_methsD)
done
definition
empty_dt :: "dyn_ty"
where "empty_dt = (\<lambda>a. None)"
definition
invmode :: "('a::type)member_scheme \<Rightarrow> expr \<Rightarrow> inv_mode" where
"invmode m e = (if is_static m
then Static
else if e=Super then SuperM else IntVir)"
lemma invmode_nonstatic [simp]:
"invmode \<lparr>access=a,static=False,\<dots>=x\<rparr> (Acc (LVar e)) = IntVir"
apply (unfold invmode_def)
apply (simp (no_asm) add: member_is_static_simp)
done
lemma invmode_Static_eq [simp]: "(invmode m e = Static) = is_static m"
apply (unfold invmode_def)
apply (simp (no_asm))
done
lemma invmode_IntVir_eq: "(invmode m e = IntVir) = (\<not>(is_static m) \<and> e\<noteq>Super)"
apply (unfold invmode_def)
apply (simp (no_asm))
done
lemma Null_staticD:
"a'=Null \<longrightarrow> (is_static m) \<Longrightarrow> invmode m e = IntVir \<longrightarrow> a' \<noteq> Null"
apply (clarsimp simp add: invmode_IntVir_eq)
done
subsubsection "Typing for unary operations"
primrec unop_type :: "unop \<Rightarrow> prim_ty"
where
"unop_type UPlus = Integer"
| "unop_type UMinus = Integer"
| "unop_type UBitNot = Integer"
| "unop_type UNot = Boolean"
primrec wt_unop :: "unop \<Rightarrow> ty \<Rightarrow> bool"
where
"wt_unop UPlus t = (t = PrimT Integer)"
| "wt_unop UMinus t = (t = PrimT Integer)"
| "wt_unop UBitNot t = (t = PrimT Integer)"
| "wt_unop UNot t = (t = PrimT Boolean)"
subsubsection "Typing for binary operations"
primrec binop_type :: "binop \<Rightarrow> prim_ty"
where
"binop_type Mul = Integer"
| "binop_type Div = Integer"
| "binop_type Mod = Integer"
| "binop_type Plus = Integer"
| "binop_type Minus = Integer"
| "binop_type LShift = Integer"
| "binop_type RShift = Integer"
| "binop_type RShiftU = Integer"
| "binop_type Less = Boolean"
| "binop_type Le = Boolean"
| "binop_type Greater = Boolean"
| "binop_type Ge = Boolean"
| "binop_type Eq = Boolean"
| "binop_type Neq = Boolean"
| "binop_type BitAnd = Integer"
| "binop_type And = Boolean"
| "binop_type BitXor = Integer"
| "binop_type Xor = Boolean"
| "binop_type BitOr = Integer"
| "binop_type Or = Boolean"
| "binop_type CondAnd = Boolean"
| "binop_type CondOr = Boolean"
primrec wt_binop :: "prog \<Rightarrow> binop \<Rightarrow> ty \<Rightarrow> ty \<Rightarrow> bool"
where
"wt_binop G Mul t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Div t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Mod t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Plus t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Minus t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G LShift t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G RShift t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G RShiftU t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Less t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Le t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Greater t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Ge t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Eq t1 t2 = (G\<turnstile>t1\<preceq>t2 \<or> G\<turnstile>t2\<preceq>t1)"
| "wt_binop G Neq t1 t2 = (G\<turnstile>t1\<preceq>t2 \<or> G\<turnstile>t2\<preceq>t1)"
| "wt_binop G BitAnd t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G And t1 t2 = ((t1 = PrimT Boolean) \<and> (t2 = PrimT Boolean))"
| "wt_binop G BitXor t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Xor t1 t2 = ((t1 = PrimT Boolean) \<and> (t2 = PrimT Boolean))"
| "wt_binop G BitOr t1 t2 = ((t1 = PrimT Integer) \<and> (t2 = PrimT Integer))"
| "wt_binop G Or t1 t2 = ((t1 = PrimT Boolean) \<and> (t2 = PrimT Boolean))"
| "wt_binop G CondAnd t1 t2 = ((t1 = PrimT Boolean) \<and> (t2 = PrimT Boolean))"
| "wt_binop G CondOr t1 t2 = ((t1 = PrimT Boolean) \<and> (t2 = PrimT Boolean))"
subsubsection "Typing for terms"
type_synonym tys = "ty + ty list"
translations
(type) "tys" <= (type) "ty + ty list"
inductive wt :: "env \<Rightarrow> dyn_ty \<Rightarrow> [term,tys] \<Rightarrow> bool" ("_,_\<Turnstile>_\<Colon>_" [51,51,51,51] 50)
and wt_stmt :: "env \<Rightarrow> dyn_ty \<Rightarrow> stmt \<Rightarrow> bool" ("_,_\<Turnstile>_\<Colon>\<surd>" [51,51,51] 50)
and ty_expr :: "env \<Rightarrow> dyn_ty \<Rightarrow> [expr ,ty ] \<Rightarrow> bool" ("_,_\<Turnstile>_\<Colon>-_" [51,51,51,51] 50)
and ty_var :: "env \<Rightarrow> dyn_ty \<Rightarrow> [var ,ty ] \<Rightarrow> bool" ("_,_\<Turnstile>_\<Colon>=_" [51,51,51,51] 50)
and ty_exprs :: "env \<Rightarrow> dyn_ty \<Rightarrow> [expr list, ty list] \<Rightarrow> bool"
("_,_\<Turnstile>_\<Colon>\<doteq>_" [51,51,51,51] 50)
where
"E,dt\<Turnstile>s\<Colon>\<surd> \<equiv> E,dt\<Turnstile>In1r s\<Colon>Inl (PrimT Void)"
| "E,dt\<Turnstile>e\<Colon>-T \<equiv> E,dt\<Turnstile>In1l e\<Colon>Inl T"
| "E,dt\<Turnstile>e\<Colon>=T \<equiv> E,dt\<Turnstile>In2 e\<Colon>Inl T"
| "E,dt\<Turnstile>e\<Colon>\<doteq>T \<equiv> E,dt\<Turnstile>In3 e\<Colon>Inr T"
\<comment> \<open>well-typed statements\<close>
| Skip: "E,dt\<Turnstile>Skip\<Colon>\<surd>"
| Expr: "\<lbrakk>E,dt\<Turnstile>e\<Colon>-T\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>Expr e\<Colon>\<surd>"
\<comment> \<open>cf. 14.6\<close>
| Lab: "E,dt\<Turnstile>c\<Colon>\<surd> \<Longrightarrow>
E,dt\<Turnstile>l\<bullet> c\<Colon>\<surd>"
| Comp: "\<lbrakk>E,dt\<Turnstile>c1\<Colon>\<surd>;
E,dt\<Turnstile>c2\<Colon>\<surd>\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>c1;; c2\<Colon>\<surd>"
\<comment> \<open>cf. 14.8\<close>
| If: "\<lbrakk>E,dt\<Turnstile>e\<Colon>-PrimT Boolean;
E,dt\<Turnstile>c1\<Colon>\<surd>;
E,dt\<Turnstile>c2\<Colon>\<surd>\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>If(e) c1 Else c2\<Colon>\<surd>"
\<comment> \<open>cf. 14.10\<close>
| Loop: "\<lbrakk>E,dt\<Turnstile>e\<Colon>-PrimT Boolean;
E,dt\<Turnstile>c\<Colon>\<surd>\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>l\<bullet> While(e) c\<Colon>\<surd>"
\<comment> \<open>cf. 14.13, 14.15, 14.16\<close>
| Jmp: "E,dt\<Turnstile>Jmp jump\<Colon>\<surd>"
\<comment> \<open>cf. 14.16\<close>
| Throw: "\<lbrakk>E,dt\<Turnstile>e\<Colon>-Class tn;
prg E\<turnstile>tn\<preceq>\<^sub>C SXcpt Throwable\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>Throw e\<Colon>\<surd>"
\<comment> \<open>cf. 14.18\<close>
| Try: "\<lbrakk>E,dt\<Turnstile>c1\<Colon>\<surd>; prg E\<turnstile>tn\<preceq>\<^sub>C SXcpt Throwable;
lcl E (VName vn)=None; E \<lparr>lcl := (lcl E)(VName vn\<mapsto>Class tn)\<rparr>,dt\<Turnstile>c2\<Colon>\<surd>\<rbrakk>
\<Longrightarrow>
E,dt\<Turnstile>Try c1 Catch(tn vn) c2\<Colon>\<surd>"
\<comment> \<open>cf. 14.18\<close>
| Fin: "\<lbrakk>E,dt\<Turnstile>c1\<Colon>\<surd>; E,dt\<Turnstile>c2\<Colon>\<surd>\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>c1 Finally c2\<Colon>\<surd>"
| Init: "\<lbrakk>is_class (prg E) C\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>Init C\<Colon>\<surd>"
\<comment> \<open>\<^term>\<open>Init\<close> is created on the fly during evaluation (see Eval.thy).
The class isn't necessarily accessible from the points \<^term>\<open>Init\<close>
is called. Therefor we only demand \<^term>\<open>is_class\<close> and not
\<^term>\<open>is_acc_class\<close> here.\<close>
\<comment> \<open>well-typed expressions\<close>
\<comment> \<open>cf. 15.8\<close>
| NewC: "\<lbrakk>is_acc_class (prg E) (pkg E) C\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>NewC C\<Colon>-Class C"
\<comment> \<open>cf. 15.9\<close>
| NewA: "\<lbrakk>is_acc_type (prg E) (pkg E) T;
E,dt\<Turnstile>i\<Colon>-PrimT Integer\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>New T[i]\<Colon>-T.[]"
\<comment> \<open>cf. 15.15\<close>
| Cast: "\<lbrakk>E,dt\<Turnstile>e\<Colon>-T; is_acc_type (prg E) (pkg E) T';
prg E\<turnstile>T\<preceq>? T'\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>Cast T' e\<Colon>-T'"
\<comment> \<open>cf. 15.19.2\<close>
| Inst: "\<lbrakk>E,dt\<Turnstile>e\<Colon>-RefT T; is_acc_type (prg E) (pkg E) (RefT T');
prg E\<turnstile>RefT T\<preceq>? RefT T'\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>e InstOf T'\<Colon>-PrimT Boolean"
\<comment> \<open>cf. 15.7.1\<close>
| Lit: "\<lbrakk>typeof dt x = Some T\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>Lit x\<Colon>-T"
| UnOp: "\<lbrakk>E,dt\<Turnstile>e\<Colon>-Te; wt_unop unop Te; T=PrimT (unop_type unop)\<rbrakk>
\<Longrightarrow>
E,dt\<Turnstile>UnOp unop e\<Colon>-T"
| BinOp: "\<lbrakk>E,dt\<Turnstile>e1\<Colon>-T1; E,dt\<Turnstile>e2\<Colon>-T2; wt_binop (prg E) binop T1 T2;
T=PrimT (binop_type binop)\<rbrakk>
\<Longrightarrow>
E,dt\<Turnstile>BinOp binop e1 e2\<Colon>-T"
\<comment> \<open>cf. 15.10.2, 15.11.1\<close>
| Super: "\<lbrakk>lcl E This = Some (Class C); C \<noteq> Object;
class (prg E) C = Some c\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>Super\<Colon>-Class (super c)"
\<comment> \<open>cf. 15.13.1, 15.10.1, 15.12\<close>
| Acc: "\<lbrakk>E,dt\<Turnstile>va\<Colon>=T\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>Acc va\<Colon>-T"
\<comment> \<open>cf. 15.25, 15.25.1\<close>
| Ass: "\<lbrakk>E,dt\<Turnstile>va\<Colon>=T; va \<noteq> LVar This;
E,dt\<Turnstile>v \<Colon>-T';
prg E\<turnstile>T'\<preceq>T\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>va:=v\<Colon>-T'"
\<comment> \<open>cf. 15.24\<close>
| Cond: "\<lbrakk>E,dt\<Turnstile>e0\<Colon>-PrimT Boolean;
E,dt\<Turnstile>e1\<Colon>-T1; E,dt\<Turnstile>e2\<Colon>-T2;
prg E\<turnstile>T1\<preceq>T2 \<and> T = T2 \<or> prg E\<turnstile>T2\<preceq>T1 \<and> T = T1\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>e0 ? e1 : e2\<Colon>-T"
\<comment> \<open>cf. 15.11.1, 15.11.2, 15.11.3\<close>
| Call: "\<lbrakk>E,dt\<Turnstile>e\<Colon>-RefT statT;
E,dt\<Turnstile>ps\<Colon>\<doteq>pTs;
max_spec (prg E) (cls E) statT \<lparr>name=mn,parTs=pTs\<rparr>
= {((statDeclT,m),pTs')}
\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>{cls E,statT,invmode m e}e\<cdot>mn({pTs'}ps)\<Colon>-(resTy m)"
| Methd: "\<lbrakk>is_class (prg E) C;
methd (prg E) C sig = Some m;
E,dt\<Turnstile>Body (declclass m) (stmt (mbody (mthd m)))\<Colon>-T\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>Methd C sig\<Colon>-T"
\<comment> \<open>The class \<^term>\<open>C\<close> is the dynamic class of the method call
(cf. Eval.thy).
It hasn't got to be directly accessible from the current package
\<^term>\<open>(pkg E)\<close>.
Only the static class must be accessible (enshured indirectly by
\<^term>\<open>Call\<close>).
Note that l is just a dummy value. It is only used in the smallstep
semantics. To proof typesafety directly for the smallstep semantics
we would have to assume conformance of l here!\<close>
| Body: "\<lbrakk>is_class (prg E) D;
E,dt\<Turnstile>blk\<Colon>\<surd>;
(lcl E) Result = Some T;
is_type (prg E) T\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>Body D blk\<Colon>-T"
\<comment> \<open>The class \<^term>\<open>D\<close> implementing the method must not directly be
accessible from the current package \<^term>\<open>(pkg E)\<close>, but can also
be indirectly accessible due to inheritance (enshured in \<^term>\<open>Call\<close>)
The result type hasn't got to be accessible in Java! (If it is not
accessible you can only assign it to Object).
For dummy value l see rule \<^term>\<open>Methd\<close>.\<close>
\<comment> \<open>well-typed variables\<close>
\<comment> \<open>cf. 15.13.1\<close>
| LVar: "\<lbrakk>lcl E vn = Some T; is_acc_type (prg E) (pkg E) T\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>LVar vn\<Colon>=T"
\<comment> \<open>cf. 15.10.1\<close>
| FVar: "\<lbrakk>E,dt\<Turnstile>e\<Colon>-Class C;
accfield (prg E) (cls E) C fn = Some (statDeclC,f)\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>{cls E,statDeclC,is_static f}e..fn\<Colon>=(type f)"
\<comment> \<open>cf. 15.12\<close>
| AVar: "\<lbrakk>E,dt\<Turnstile>e\<Colon>-T.[];
E,dt\<Turnstile>i\<Colon>-PrimT Integer\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>e.[i]\<Colon>=T"
\<comment> \<open>well-typed expression lists\<close>
\<comment> \<open>cf. 15.11.???\<close>
| Nil: "E,dt\<Turnstile>[]\<Colon>\<doteq>[]"
\<comment> \<open>cf. 15.11.???\<close>
| Cons: "\<lbrakk>E,dt\<Turnstile>e \<Colon>-T;
E,dt\<Turnstile>es\<Colon>\<doteq>Ts\<rbrakk> \<Longrightarrow>
E,dt\<Turnstile>e#es\<Colon>\<doteq>T#Ts"
(* for purely static typing *)
abbreviation
wt_syntax :: "env \<Rightarrow> [term,tys] \<Rightarrow> bool" ("_\<turnstile>_\<Colon>_" [51,51,51] 50)
where "E\<turnstile>t\<Colon>T == E,empty_dt\<Turnstile>t\<Colon> T"
abbreviation
wt_stmt_syntax :: "env \<Rightarrow> stmt \<Rightarrow> bool" ("_\<turnstile>_\<Colon>\<surd>" [51,51 ] 50)
where "E\<turnstile>s\<Colon>\<surd> == E\<turnstile>In1r s \<Colon> Inl (PrimT Void)"
abbreviation
ty_expr_syntax :: "env \<Rightarrow> [expr, ty] \<Rightarrow> bool" ("_\<turnstile>_\<Colon>-_" [51,51,51] 50)
where "E\<turnstile>e\<Colon>-T == E\<turnstile>In1l e \<Colon> Inl T"
abbreviation
ty_var_syntax :: "env \<Rightarrow> [var, ty] \<Rightarrow> bool" ("_\<turnstile>_\<Colon>=_" [51,51,51] 50)
where "E\<turnstile>e\<Colon>=T == E\<turnstile>In2 e \<Colon> Inl T"
abbreviation
ty_exprs_syntax :: "env \<Rightarrow> [expr list, ty list] \<Rightarrow> bool" ("_\<turnstile>_\<Colon>\<doteq>_" [51,51,51] 50)
where "E\<turnstile>e\<Colon>\<doteq>T == E\<turnstile>In3 e \<Colon> Inr T"
notation (ASCII)
wt_syntax ("_|-_::_" [51,51,51] 50) and
wt_stmt_syntax ("_|-_:<>" [51,51 ] 50) and
ty_expr_syntax ("_|-_:-_" [51,51,51] 50) and
ty_var_syntax ("_|-_:=_" [51,51,51] 50) and
ty_exprs_syntax ("_|-_:#_" [51,51,51] 50)
declare not_None_eq [simp del]
declare if_split [split del] if_split_asm [split del]
declare split_paired_All [simp del] split_paired_Ex [simp del]
setup \<open>map_theory_simpset (fn ctxt => ctxt delloop "split_all_tac")\<close>
inductive_cases wt_elim_cases [cases set]:
"E,dt\<Turnstile>In2 (LVar vn) \<Colon>T"
"E,dt\<Turnstile>In2 ({accC,statDeclC,s}e..fn)\<Colon>T"
"E,dt\<Turnstile>In2 (e.[i]) \<Colon>T"
"E,dt\<Turnstile>In1l (NewC C) \<Colon>T"
"E,dt\<Turnstile>In1l (New T'[i]) \<Colon>T"
"E,dt\<Turnstile>In1l (Cast T' e) \<Colon>T"
"E,dt\<Turnstile>In1l (e InstOf T') \<Colon>T"
"E,dt\<Turnstile>In1l (Lit x) \<Colon>T"
"E,dt\<Turnstile>In1l (UnOp unop e) \<Colon>T"
"E,dt\<Turnstile>In1l (BinOp binop e1 e2) \<Colon>T"
"E,dt\<Turnstile>In1l (Super) \<Colon>T"
"E,dt\<Turnstile>In1l (Acc va) \<Colon>T"
"E,dt\<Turnstile>In1l (Ass va v) \<Colon>T"
"E,dt\<Turnstile>In1l (e0 ? e1 : e2) \<Colon>T"
"E,dt\<Turnstile>In1l ({accC,statT,mode}e\<cdot>mn({pT'}p))\<Colon>T"
"E,dt\<Turnstile>In1l (Methd C sig) \<Colon>T"
"E,dt\<Turnstile>In1l (Body D blk) \<Colon>T"
"E,dt\<Turnstile>In3 ([]) \<Colon>Ts"
"E,dt\<Turnstile>In3 (e#es) \<Colon>Ts"
"E,dt\<Turnstile>In1r Skip \<Colon>x"
"E,dt\<Turnstile>In1r (Expr e) \<Colon>x"
"E,dt\<Turnstile>In1r (c1;; c2) \<Colon>x"
"E,dt\<Turnstile>In1r (l\<bullet> c) \<Colon>x"
"E,dt\<Turnstile>In1r (If(e) c1 Else c2) \<Colon>x"
"E,dt\<Turnstile>In1r (l\<bullet> While(e) c) \<Colon>x"
"E,dt\<Turnstile>In1r (Jmp jump) \<Colon>x"
"E,dt\<Turnstile>In1r (Throw e) \<Colon>x"
"E,dt\<Turnstile>In1r (Try c1 Catch(tn vn) c2)\<Colon>x"
"E,dt\<Turnstile>In1r (c1 Finally c2) \<Colon>x"
"E,dt\<Turnstile>In1r (Init C) \<Colon>x"
declare not_None_eq [simp]
declare if_split [split] if_split_asm [split]
declare split_paired_All [simp] split_paired_Ex [simp]
setup \<open>map_theory_simpset (fn ctxt => ctxt addloop ("split_all_tac", split_all_tac))\<close>
lemma is_acc_class_is_accessible:
"is_acc_class G P C \<Longrightarrow> G\<turnstile>(Class C) accessible_in P"
by (auto simp add: is_acc_class_def)
lemma is_acc_iface_is_iface: "is_acc_iface G P I \<Longrightarrow> is_iface G I"
by (auto simp add: is_acc_iface_def)
lemma is_acc_iface_Iface_is_accessible:
"is_acc_iface G P I \<Longrightarrow> G\<turnstile>(Iface I) accessible_in P"
by (auto simp add: is_acc_iface_def)
lemma is_acc_type_is_type: "is_acc_type G P T \<Longrightarrow> is_type G T"
by (auto simp add: is_acc_type_def)
lemma is_acc_iface_is_accessible:
"is_acc_type G P T \<Longrightarrow> G\<turnstile>T accessible_in P"
by (auto simp add: is_acc_type_def)
lemma wt_Methd_is_methd:
"E\<turnstile>In1l (Methd C sig)\<Colon>T \<Longrightarrow> is_methd (prg E) C sig"
apply (erule_tac wt_elim_cases)
apply clarsimp
apply (erule is_methdI, assumption)
done
text \<open>Special versions of some typing rules, better suited to pattern
match the conclusion (no selectors in the conclusion)
\<close>
lemma wt_Call:
"\<lbrakk>E,dt\<Turnstile>e\<Colon>-RefT statT; E,dt\<Turnstile>ps\<Colon>\<doteq>pTs;
max_spec (prg E) (cls E) statT \<lparr>name=mn,parTs=pTs\<rparr>
= {((statDeclC,m),pTs')};rT=(resTy m);accC=cls E;
mode = invmode m e\<rbrakk> \<Longrightarrow> E,dt\<Turnstile>{accC,statT,mode}e\<cdot>mn({pTs'}ps)\<Colon>-rT"
by (auto elim: wt.Call)
lemma invocationTypeExpr_noClassD:
"\<lbrakk> E\<turnstile>e\<Colon>-RefT statT\<rbrakk>
\<Longrightarrow> (\<forall> statC. statT \<noteq> ClassT statC) \<longrightarrow> invmode m e \<noteq> SuperM"
proof -
assume wt: "E\<turnstile>e\<Colon>-RefT statT"
show ?thesis
proof (cases "e=Super")
case True
with wt obtain "C" where "statT = ClassT C" by (blast elim: wt_elim_cases)
then show ?thesis by blast
next
case False then show ?thesis
by (auto simp add: invmode_def)
qed
qed
lemma wt_Super:
"\<lbrakk>lcl E This = Some (Class C); C \<noteq> Object; class (prg E) C = Some c; D=super c\<rbrakk>
\<Longrightarrow> E,dt\<Turnstile>Super\<Colon>-Class D"
by (auto elim: wt.Super)
lemma wt_FVar:
"\<lbrakk>E,dt\<Turnstile>e\<Colon>-Class C; accfield (prg E) (cls E) C fn = Some (statDeclC,f);
sf=is_static f; fT=(type f); accC=cls E\<rbrakk>
\<Longrightarrow> E,dt\<Turnstile>{accC,statDeclC,sf}e..fn\<Colon>=fT"
by (auto dest: wt.FVar)
lemma wt_init [iff]: "E,dt\<Turnstile>Init C\<Colon>\<surd> = is_class (prg E) C"
by (auto elim: wt_elim_cases intro: "wt.Init")
declare wt.Skip [iff]
lemma wt_StatRef:
"is_acc_type (prg E) (pkg E) (RefT rt) \<Longrightarrow> E\<turnstile>StatRef rt\<Colon>-RefT rt"
apply (rule wt.Cast)
apply (rule wt.Lit)
apply (simp (no_asm))
apply (simp (no_asm_simp))
apply (rule cast.widen)
apply (simp (no_asm))
done
lemma wt_Inj_elim:
"\<And>E. E,dt\<Turnstile>t\<Colon>U \<Longrightarrow> case t of
In1 ec \<Rightarrow> (case ec of
Inl e \<Rightarrow> \<exists>T. U=Inl T
| Inr s \<Rightarrow> U=Inl (PrimT Void))
| In2 e \<Rightarrow> (\<exists>T. U=Inl T)
| In3 e \<Rightarrow> (\<exists>T. U=Inr T)"
apply (erule wt.induct)
apply auto
done
\<comment> \<open>In the special syntax to distinguish the typing judgements for expressions,
statements, variables and expression lists the kind of term corresponds
to the kind of type in the end e.g. An statement (injection \<^term>\<open>In3\<close>
into terms, always has type void (injection \<^term>\<open>Inl\<close> into the generalised
types. The following simplification procedures establish these kinds of
correlation.\<close>
lemma wt_expr_eq: "E,dt\<Turnstile>In1l t\<Colon>U = (\<exists>T. U=Inl T \<and> E,dt\<Turnstile>t\<Colon>-T)"
by (auto, frule wt_Inj_elim, auto)
lemma wt_var_eq: "E,dt\<Turnstile>In2 t\<Colon>U = (\<exists>T. U=Inl T \<and> E,dt\<Turnstile>t\<Colon>=T)"
by (auto, frule wt_Inj_elim, auto)
lemma wt_exprs_eq: "E,dt\<Turnstile>In3 t\<Colon>U = (\<exists>Ts. U=Inr Ts \<and> E,dt\<Turnstile>t\<Colon>\<doteq>Ts)"
by (auto, frule wt_Inj_elim, auto)
lemma wt_stmt_eq: "E,dt\<Turnstile>In1r t\<Colon>U = (U=Inl(PrimT Void)\<and>E,dt\<Turnstile>t\<Colon>\<surd>)"
by (auto, frule wt_Inj_elim, auto, frule wt_Inj_elim, auto)
simproc_setup wt_expr ("E,dt\<Turnstile>In1l t\<Colon>U") = \<open>
- fn _ => fn _ => fn ct =>
- (case Thm.term_of ct of
- (_ $ _ $ _ $ _ $ (Const _ $ _)) => NONE
- | _ => SOME (mk_meta_eq @{thm wt_expr_eq}))\<close>
-
-simproc_setup wt_var ("E,dt\<Turnstile>In2 t\<Colon>U") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ (Const _ $ _)) => NONE
- | _ => SOME (mk_meta_eq @{thm wt_var_eq}))\<close>
+ | _ => SOME (mk_meta_eq @{thm wt_expr_eq}))))\<close>
-simproc_setup wt_exprs ("E,dt\<Turnstile>In3 t\<Colon>U") = \<open>
- fn _ => fn _ => fn ct =>
+simproc_setup wt_var ("E,dt\<Turnstile>In2 t\<Colon>U") = \<open>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ (Const _ $ _)) => NONE
- | _ => SOME (mk_meta_eq @{thm wt_exprs_eq}))\<close>
+ | _ => SOME (mk_meta_eq @{thm wt_var_eq}))))\<close>
+
+simproc_setup wt_exprs ("E,dt\<Turnstile>In3 t\<Colon>U") = \<open>
+ K (K (fn ct =>
+ (case Thm.term_of ct of
+ (_ $ _ $ _ $ _ $ (Const _ $ _)) => NONE
+ | _ => SOME (mk_meta_eq @{thm wt_exprs_eq}))))\<close>
simproc_setup wt_stmt ("E,dt\<Turnstile>In1r t\<Colon>U") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
(case Thm.term_of ct of
(_ $ _ $ _ $ _ $ (Const _ $ _)) => NONE
- | _ => SOME (mk_meta_eq @{thm wt_stmt_eq}))\<close>
+ | _ => SOME (mk_meta_eq @{thm wt_stmt_eq}))))\<close>
lemma wt_elim_BinOp:
"\<lbrakk>E,dt\<Turnstile>In1l (BinOp binop e1 e2)\<Colon>T;
\<And>T1 T2 T3.
\<lbrakk>E,dt\<Turnstile>e1\<Colon>-T1; E,dt\<Turnstile>e2\<Colon>-T2; wt_binop (prg E) binop T1 T2;
E,dt\<Turnstile>(if b then In1l e2 else In1r Skip)\<Colon>T3;
T = Inl (PrimT (binop_type binop))\<rbrakk>
\<Longrightarrow> P\<rbrakk>
\<Longrightarrow> P"
apply (erule wt_elim_cases)
apply (cases b)
apply auto
done
lemma Inj_eq_lemma [simp]:
"(\<forall>T. (\<exists>T'. T = Inj T' \<and> P T') \<longrightarrow> Q T) = (\<forall>T'. P T' \<longrightarrow> Q (Inj T'))"
by auto
(* unused *)
lemma single_valued_tys_lemma [rule_format (no_asm)]:
"\<forall>S T. G\<turnstile>S\<preceq>T \<longrightarrow> G\<turnstile>T\<preceq>S \<longrightarrow> S = T \<Longrightarrow> E,dt\<Turnstile>t\<Colon>T \<Longrightarrow>
G = prg E \<longrightarrow> (\<forall>T'. E,dt\<Turnstile>t\<Colon>T' \<longrightarrow> T = T')"
apply (cases "E", erule wt.induct)
apply (safe del: disjE)
apply (simp_all (no_asm_use) split del: if_split_asm)
apply (safe del: disjE)
(* 17 subgoals *)
apply (tactic \<open>ALLGOALS (fn i =>
if i = 11 then EVERY'
[Rule_Insts.thin_tac \<^context> "E,dt\<Turnstile>e0\<Colon>-PrimT Boolean" [(\<^binding>\<open>E\<close>, NONE, NoSyn)],
Rule_Insts.thin_tac \<^context> "E,dt\<Turnstile>e1\<Colon>-T1" [(\<^binding>\<open>E\<close>, NONE, NoSyn), (\<^binding>\<open>T1\<close>, NONE, NoSyn)],
Rule_Insts.thin_tac \<^context> "E,dt\<Turnstile>e2\<Colon>-T2" [(\<^binding>\<open>E\<close>, NONE, NoSyn), (\<^binding>\<open>T2\<close>, NONE, NoSyn)]] i
else Rule_Insts.thin_tac \<^context> "All P" [(\<^binding>\<open>P\<close>, NONE, NoSyn)] i)\<close>)
(*apply (safe del: disjE elim!: wt_elim_cases)*)
apply (tactic \<open>ALLGOALS (eresolve_tac \<^context> @{thms wt_elim_cases})\<close>)
apply (simp_all (no_asm_use) split del: if_split_asm)
apply (erule_tac [12] V = "All P" for P in thin_rl) (* Call *)
apply (blast del: equalityCE dest: sym [THEN trans])+
done
(* unused *)
lemma single_valued_tys:
"ws_prog (prg E) \<Longrightarrow> single_valued {(t,T). E,dt\<Turnstile>t\<Colon>T}"
apply (unfold single_valued_def)
apply clarsimp
apply (rule single_valued_tys_lemma)
apply (auto intro!: widen_antisym)
done
lemma typeof_empty_is_type: "typeof (\<lambda>a. None) v = Some T \<Longrightarrow> is_type G T"
by (induct v) auto
(* unused *)
lemma typeof_is_type: "(\<forall>a. v \<noteq> Addr a) \<Longrightarrow> \<exists>T. typeof dt v = Some T \<and> is_type G T"
by (induct v) auto
end
diff --git a/src/HOL/Boolean_Algebras.thy b/src/HOL/Boolean_Algebras.thy
--- a/src/HOL/Boolean_Algebras.thy
+++ b/src/HOL/Boolean_Algebras.thy
@@ -1,573 +1,573 @@
(* Title: HOL/Boolean_Algebras.thy
Author: Brian Huffman
Author: Florian Haftmann
*)
section \<open>Boolean Algebras\<close>
theory Boolean_Algebras
imports Lattices
begin
subsection \<open>Abstract boolean algebra\<close>
locale abstract_boolean_algebra = conj: abel_semigroup \<open>(\<^bold>\<sqinter>)\<close> + disj: abel_semigroup \<open>(\<^bold>\<squnion>)\<close>
for conj :: \<open>'a \<Rightarrow> 'a \<Rightarrow> 'a\<close> (infixr \<open>\<^bold>\<sqinter>\<close> 70)
and disj :: \<open>'a \<Rightarrow> 'a \<Rightarrow> 'a\<close> (infixr \<open>\<^bold>\<squnion>\<close> 65) +
fixes compl :: \<open>'a \<Rightarrow> 'a\<close> (\<open>\<^bold>- _\<close> [81] 80)
and zero :: \<open>'a\<close> (\<open>\<^bold>0\<close>)
and one :: \<open>'a\<close> (\<open>\<^bold>1\<close>)
assumes conj_disj_distrib: \<open>x \<^bold>\<sqinter> (y \<^bold>\<squnion> z) = (x \<^bold>\<sqinter> y) \<^bold>\<squnion> (x \<^bold>\<sqinter> z)\<close>
and disj_conj_distrib: \<open>x \<^bold>\<squnion> (y \<^bold>\<sqinter> z) = (x \<^bold>\<squnion> y) \<^bold>\<sqinter> (x \<^bold>\<squnion> z)\<close>
and conj_one_right: \<open>x \<^bold>\<sqinter> \<^bold>1 = x\<close>
and disj_zero_right: \<open>x \<^bold>\<squnion> \<^bold>0 = x\<close>
and conj_cancel_right [simp]: \<open>x \<^bold>\<sqinter> \<^bold>- x = \<^bold>0\<close>
and disj_cancel_right [simp]: \<open>x \<^bold>\<squnion> \<^bold>- x = \<^bold>1\<close>
begin
sublocale conj: semilattice_neutr \<open>(\<^bold>\<sqinter>)\<close> \<open>\<^bold>1\<close>
proof
show "x \<^bold>\<sqinter> \<^bold>1 = x" for x
by (fact conj_one_right)
show "x \<^bold>\<sqinter> x = x" for x
proof -
have "x \<^bold>\<sqinter> x = (x \<^bold>\<sqinter> x) \<^bold>\<squnion> \<^bold>0"
by (simp add: disj_zero_right)
also have "\<dots> = (x \<^bold>\<sqinter> x) \<^bold>\<squnion> (x \<^bold>\<sqinter> \<^bold>- x)"
by simp
also have "\<dots> = x \<^bold>\<sqinter> (x \<^bold>\<squnion> \<^bold>- x)"
by (simp only: conj_disj_distrib)
also have "\<dots> = x \<^bold>\<sqinter> \<^bold>1"
by simp
also have "\<dots> = x"
by (simp add: conj_one_right)
finally show ?thesis .
qed
qed
sublocale disj: semilattice_neutr \<open>(\<^bold>\<squnion>)\<close> \<open>\<^bold>0\<close>
proof
show "x \<^bold>\<squnion> \<^bold>0 = x" for x
by (fact disj_zero_right)
show "x \<^bold>\<squnion> x = x" for x
proof -
have "x \<^bold>\<squnion> x = (x \<^bold>\<squnion> x) \<^bold>\<sqinter> \<^bold>1"
by simp
also have "\<dots> = (x \<^bold>\<squnion> x) \<^bold>\<sqinter> (x \<^bold>\<squnion> \<^bold>- x)"
by simp
also have "\<dots> = x \<^bold>\<squnion> (x \<^bold>\<sqinter> \<^bold>- x)"
by (simp only: disj_conj_distrib)
also have "\<dots> = x \<^bold>\<squnion> \<^bold>0"
by simp
also have "\<dots> = x"
by (simp add: disj_zero_right)
finally show ?thesis .
qed
qed
subsubsection \<open>Complement\<close>
lemma complement_unique:
assumes 1: "a \<^bold>\<sqinter> x = \<^bold>0"
assumes 2: "a \<^bold>\<squnion> x = \<^bold>1"
assumes 3: "a \<^bold>\<sqinter> y = \<^bold>0"
assumes 4: "a \<^bold>\<squnion> y = \<^bold>1"
shows "x = y"
proof -
from 1 3 have "(a \<^bold>\<sqinter> x) \<^bold>\<squnion> (x \<^bold>\<sqinter> y) = (a \<^bold>\<sqinter> y) \<^bold>\<squnion> (x \<^bold>\<sqinter> y)"
by simp
then have "(x \<^bold>\<sqinter> a) \<^bold>\<squnion> (x \<^bold>\<sqinter> y) = (y \<^bold>\<sqinter> a) \<^bold>\<squnion> (y \<^bold>\<sqinter> x)"
by (simp add: ac_simps)
then have "x \<^bold>\<sqinter> (a \<^bold>\<squnion> y) = y \<^bold>\<sqinter> (a \<^bold>\<squnion> x)"
by (simp add: conj_disj_distrib)
with 2 4 have "x \<^bold>\<sqinter> \<^bold>1 = y \<^bold>\<sqinter> \<^bold>1"
by simp
then show "x = y"
by simp
qed
lemma compl_unique: "x \<^bold>\<sqinter> y = \<^bold>0 \<Longrightarrow> x \<^bold>\<squnion> y = \<^bold>1 \<Longrightarrow> \<^bold>- x = y"
by (rule complement_unique [OF conj_cancel_right disj_cancel_right])
lemma double_compl [simp]: "\<^bold>- (\<^bold>- x) = x"
proof (rule compl_unique)
show "\<^bold>- x \<^bold>\<sqinter> x = \<^bold>0"
by (simp only: conj_cancel_right conj.commute)
show "\<^bold>- x \<^bold>\<squnion> x = \<^bold>1"
by (simp only: disj_cancel_right disj.commute)
qed
lemma compl_eq_compl_iff [simp]:
\<open>\<^bold>- x = \<^bold>- y \<longleftrightarrow> x = y\<close> (is \<open>?P \<longleftrightarrow> ?Q\<close>)
proof
assume \<open>?Q\<close>
then show ?P by simp
next
assume \<open>?P\<close>
then have \<open>\<^bold>- (\<^bold>- x) = \<^bold>- (\<^bold>- y)\<close>
by simp
then show ?Q
by simp
qed
subsubsection \<open>Conjunction\<close>
lemma conj_zero_right [simp]: "x \<^bold>\<sqinter> \<^bold>0 = \<^bold>0"
using conj.left_idem conj_cancel_right by fastforce
lemma compl_one [simp]: "\<^bold>- \<^bold>1 = \<^bold>0"
by (rule compl_unique [OF conj_zero_right disj_zero_right])
lemma conj_zero_left [simp]: "\<^bold>0 \<^bold>\<sqinter> x = \<^bold>0"
by (subst conj.commute) (rule conj_zero_right)
lemma conj_cancel_left [simp]: "\<^bold>- x \<^bold>\<sqinter> x = \<^bold>0"
by (subst conj.commute) (rule conj_cancel_right)
lemma conj_disj_distrib2: "(y \<^bold>\<squnion> z) \<^bold>\<sqinter> x = (y \<^bold>\<sqinter> x) \<^bold>\<squnion> (z \<^bold>\<sqinter> x)"
by (simp only: conj.commute conj_disj_distrib)
lemmas conj_disj_distribs = conj_disj_distrib conj_disj_distrib2
subsubsection \<open>Disjunction\<close>
context
begin
interpretation dual: abstract_boolean_algebra \<open>(\<^bold>\<squnion>)\<close> \<open>(\<^bold>\<sqinter>)\<close> compl \<open>\<^bold>1\<close> \<open>\<^bold>0\<close>
apply standard
apply (rule disj_conj_distrib)
apply (rule conj_disj_distrib)
apply simp_all
done
lemma disj_one_right [simp]: "x \<^bold>\<squnion> \<^bold>1 = \<^bold>1"
by (fact dual.conj_zero_right)
lemma compl_zero [simp]: "\<^bold>- \<^bold>0 = \<^bold>1"
by (fact dual.compl_one)
lemma disj_one_left [simp]: "\<^bold>1 \<^bold>\<squnion> x = \<^bold>1"
by (fact dual.conj_zero_left)
lemma disj_cancel_left [simp]: "\<^bold>- x \<^bold>\<squnion> x = \<^bold>1"
by (fact dual.conj_cancel_left)
lemma disj_conj_distrib2: "(y \<^bold>\<sqinter> z) \<^bold>\<squnion> x = (y \<^bold>\<squnion> x) \<^bold>\<sqinter> (z \<^bold>\<squnion> x)"
by (fact dual.conj_disj_distrib2)
lemmas disj_conj_distribs = disj_conj_distrib disj_conj_distrib2
end
subsubsection \<open>De Morgan's Laws\<close>
lemma de_Morgan_conj [simp]: "\<^bold>- (x \<^bold>\<sqinter> y) = \<^bold>- x \<^bold>\<squnion> \<^bold>- y"
proof (rule compl_unique)
have "(x \<^bold>\<sqinter> y) \<^bold>\<sqinter> (\<^bold>- x \<^bold>\<squnion> \<^bold>- y) = ((x \<^bold>\<sqinter> y) \<^bold>\<sqinter> \<^bold>- x) \<^bold>\<squnion> ((x \<^bold>\<sqinter> y) \<^bold>\<sqinter> \<^bold>- y)"
by (rule conj_disj_distrib)
also have "\<dots> = (y \<^bold>\<sqinter> (x \<^bold>\<sqinter> \<^bold>- x)) \<^bold>\<squnion> (x \<^bold>\<sqinter> (y \<^bold>\<sqinter> \<^bold>- y))"
by (simp only: ac_simps)
finally show "(x \<^bold>\<sqinter> y) \<^bold>\<sqinter> (\<^bold>- x \<^bold>\<squnion> \<^bold>- y) = \<^bold>0"
by (simp only: conj_cancel_right conj_zero_right disj_zero_right)
next
have "(x \<^bold>\<sqinter> y) \<^bold>\<squnion> (\<^bold>- x \<^bold>\<squnion> \<^bold>- y) = (x \<^bold>\<squnion> (\<^bold>- x \<^bold>\<squnion> \<^bold>- y)) \<^bold>\<sqinter> (y \<^bold>\<squnion> (\<^bold>- x \<^bold>\<squnion> \<^bold>- y))"
by (rule disj_conj_distrib2)
also have "\<dots> = (\<^bold>- y \<^bold>\<squnion> (x \<^bold>\<squnion> \<^bold>- x)) \<^bold>\<sqinter> (\<^bold>- x \<^bold>\<squnion> (y \<^bold>\<squnion> \<^bold>- y))"
by (simp only: ac_simps)
finally show "(x \<^bold>\<sqinter> y) \<^bold>\<squnion> (\<^bold>- x \<^bold>\<squnion> \<^bold>- y) = \<^bold>1"
by (simp only: disj_cancel_right disj_one_right conj_one_right)
qed
context
begin
interpretation dual: abstract_boolean_algebra \<open>(\<^bold>\<squnion>)\<close> \<open>(\<^bold>\<sqinter>)\<close> compl \<open>\<^bold>1\<close> \<open>\<^bold>0\<close>
apply standard
apply (rule disj_conj_distrib)
apply (rule conj_disj_distrib)
apply simp_all
done
lemma de_Morgan_disj [simp]: "\<^bold>- (x \<^bold>\<squnion> y) = \<^bold>- x \<^bold>\<sqinter> \<^bold>- y"
by (fact dual.de_Morgan_conj)
end
end
subsection \<open>Symmetric Difference\<close>
locale abstract_boolean_algebra_sym_diff = abstract_boolean_algebra +
fixes xor :: \<open>'a \<Rightarrow> 'a \<Rightarrow> 'a\<close> (infixr \<open>\<^bold>\<ominus>\<close> 65)
assumes xor_def : \<open>x \<^bold>\<ominus> y = (x \<^bold>\<sqinter> \<^bold>- y) \<^bold>\<squnion> (\<^bold>- x \<^bold>\<sqinter> y)\<close>
begin
sublocale xor: comm_monoid xor \<open>\<^bold>0\<close>
proof
fix x y z :: 'a
let ?t = "(x \<^bold>\<sqinter> y \<^bold>\<sqinter> z) \<^bold>\<squnion> (x \<^bold>\<sqinter> \<^bold>- y \<^bold>\<sqinter> \<^bold>- z) \<^bold>\<squnion> (\<^bold>- x \<^bold>\<sqinter> y \<^bold>\<sqinter> \<^bold>- z) \<^bold>\<squnion> (\<^bold>- x \<^bold>\<sqinter> \<^bold>- y \<^bold>\<sqinter> z)"
have "?t \<^bold>\<squnion> (z \<^bold>\<sqinter> x \<^bold>\<sqinter> \<^bold>- x) \<^bold>\<squnion> (z \<^bold>\<sqinter> y \<^bold>\<sqinter> \<^bold>- y) = ?t \<^bold>\<squnion> (x \<^bold>\<sqinter> y \<^bold>\<sqinter> \<^bold>- y) \<^bold>\<squnion> (x \<^bold>\<sqinter> z \<^bold>\<sqinter> \<^bold>- z)"
by (simp only: conj_cancel_right conj_zero_right)
then show "(x \<^bold>\<ominus> y) \<^bold>\<ominus> z = x \<^bold>\<ominus> (y \<^bold>\<ominus> z)"
by (simp only: xor_def de_Morgan_disj de_Morgan_conj double_compl)
(simp only: conj_disj_distribs conj_ac ac_simps)
show "x \<^bold>\<ominus> y = y \<^bold>\<ominus> x"
by (simp only: xor_def ac_simps)
show "x \<^bold>\<ominus> \<^bold>0 = x"
by (simp add: xor_def)
qed
lemma xor_def2:
\<open>x \<^bold>\<ominus> y = (x \<^bold>\<squnion> y) \<^bold>\<sqinter> (\<^bold>- x \<^bold>\<squnion> \<^bold>- y)\<close>
proof -
note xor_def [of x y]
also have \<open>x \<^bold>\<sqinter> \<^bold>- y \<^bold>\<squnion> \<^bold>- x \<^bold>\<sqinter> y = ((x \<^bold>\<squnion> \<^bold>- x) \<^bold>\<sqinter> (\<^bold>- y \<^bold>\<squnion> \<^bold>- x)) \<^bold>\<sqinter> (x \<^bold>\<squnion> y) \<^bold>\<sqinter> (\<^bold>- y \<^bold>\<squnion> y)\<close>
by (simp add: ac_simps disj_conj_distribs)
also have \<open>\<dots> = (x \<^bold>\<squnion> y) \<^bold>\<sqinter> (\<^bold>- x \<^bold>\<squnion> \<^bold>- y)\<close>
by (simp add: ac_simps)
finally show ?thesis .
qed
lemma xor_one_right [simp]: "x \<^bold>\<ominus> \<^bold>1 = \<^bold>- x"
by (simp only: xor_def compl_one conj_zero_right conj_one_right disj.left_neutral)
lemma xor_one_left [simp]: "\<^bold>1 \<^bold>\<ominus> x = \<^bold>- x"
using xor_one_right [of x] by (simp add: ac_simps)
lemma xor_self [simp]: "x \<^bold>\<ominus> x = \<^bold>0"
by (simp only: xor_def conj_cancel_right conj_cancel_left disj_zero_right)
lemma xor_left_self [simp]: "x \<^bold>\<ominus> (x \<^bold>\<ominus> y) = y"
by (simp only: xor.assoc [symmetric] xor_self xor.left_neutral)
lemma xor_compl_left [simp]: "\<^bold>- x \<^bold>\<ominus> y = \<^bold>- (x \<^bold>\<ominus> y)"
by (simp add: ac_simps flip: xor_one_left)
lemma xor_compl_right [simp]: "x \<^bold>\<ominus> \<^bold>- y = \<^bold>- (x \<^bold>\<ominus> y)"
using xor.commute xor_compl_left by auto
lemma xor_cancel_right [simp]: "x \<^bold>\<ominus> \<^bold>- x = \<^bold>1"
by (simp only: xor_compl_right xor_self compl_zero)
lemma xor_cancel_left [simp]: "\<^bold>- x \<^bold>\<ominus> x = \<^bold>1"
by (simp only: xor_compl_left xor_self compl_zero)
lemma conj_xor_distrib: "x \<^bold>\<sqinter> (y \<^bold>\<ominus> z) = (x \<^bold>\<sqinter> y) \<^bold>\<ominus> (x \<^bold>\<sqinter> z)"
proof -
have *: "(x \<^bold>\<sqinter> y \<^bold>\<sqinter> \<^bold>- z) \<^bold>\<squnion> (x \<^bold>\<sqinter> \<^bold>- y \<^bold>\<sqinter> z) =
(y \<^bold>\<sqinter> x \<^bold>\<sqinter> \<^bold>- x) \<^bold>\<squnion> (z \<^bold>\<sqinter> x \<^bold>\<sqinter> \<^bold>- x) \<^bold>\<squnion> (x \<^bold>\<sqinter> y \<^bold>\<sqinter> \<^bold>- z) \<^bold>\<squnion> (x \<^bold>\<sqinter> \<^bold>- y \<^bold>\<sqinter> z)"
by (simp only: conj_cancel_right conj_zero_right disj.left_neutral)
then show "x \<^bold>\<sqinter> (y \<^bold>\<ominus> z) = (x \<^bold>\<sqinter> y) \<^bold>\<ominus> (x \<^bold>\<sqinter> z)"
by (simp (no_asm_use) only:
xor_def de_Morgan_disj de_Morgan_conj double_compl
conj_disj_distribs ac_simps)
qed
lemma conj_xor_distrib2: "(y \<^bold>\<ominus> z) \<^bold>\<sqinter> x = (y \<^bold>\<sqinter> x) \<^bold>\<ominus> (z \<^bold>\<sqinter> x)"
by (simp add: conj.commute conj_xor_distrib)
lemmas conj_xor_distribs = conj_xor_distrib conj_xor_distrib2
end
subsection \<open>Type classes\<close>
class boolean_algebra = distrib_lattice + bounded_lattice + minus + uminus +
assumes inf_compl_bot: \<open>x \<sqinter> - x = \<bottom>\<close>
and sup_compl_top: \<open>x \<squnion> - x = \<top>\<close>
assumes diff_eq: \<open>x - y = x \<sqinter> - y\<close>
begin
sublocale boolean_algebra: abstract_boolean_algebra \<open>(\<sqinter>)\<close> \<open>(\<squnion>)\<close> uminus \<bottom> \<top>
apply standard
apply (rule inf_sup_distrib1)
apply (rule sup_inf_distrib1)
apply (simp_all add: ac_simps inf_compl_bot sup_compl_top)
done
lemma compl_inf_bot: "- x \<sqinter> x = \<bottom>"
by (fact boolean_algebra.conj_cancel_left)
lemma compl_sup_top: "- x \<squnion> x = \<top>"
by (fact boolean_algebra.disj_cancel_left)
lemma compl_unique:
assumes "x \<sqinter> y = \<bottom>"
and "x \<squnion> y = \<top>"
shows "- x = y"
using assms by (rule boolean_algebra.compl_unique)
lemma double_compl: "- (- x) = x"
by (fact boolean_algebra.double_compl)
lemma compl_eq_compl_iff: "- x = - y \<longleftrightarrow> x = y"
by (fact boolean_algebra.compl_eq_compl_iff)
lemma compl_bot_eq: "- \<bottom> = \<top>"
by (fact boolean_algebra.compl_zero)
lemma compl_top_eq: "- \<top> = \<bottom>"
by (fact boolean_algebra.compl_one)
lemma compl_inf: "- (x \<sqinter> y) = - x \<squnion> - y"
by (fact boolean_algebra.de_Morgan_conj)
lemma compl_sup: "- (x \<squnion> y) = - x \<sqinter> - y"
by (fact boolean_algebra.de_Morgan_disj)
lemma compl_mono:
assumes "x \<le> y"
shows "- y \<le> - x"
proof -
from assms have "x \<squnion> y = y" by (simp only: le_iff_sup)
then have "- (x \<squnion> y) = - y" by simp
then have "- x \<sqinter> - y = - y" by simp
then have "- y \<sqinter> - x = - y" by (simp only: inf_commute)
then show ?thesis by (simp only: le_iff_inf)
qed
lemma compl_le_compl_iff [simp]: "- x \<le> - y \<longleftrightarrow> y \<le> x"
by (auto dest: compl_mono)
lemma compl_le_swap1:
assumes "y \<le> - x"
shows "x \<le> -y"
proof -
from assms have "- (- x) \<le> - y" by (simp only: compl_le_compl_iff)
then show ?thesis by simp
qed
lemma compl_le_swap2:
assumes "- y \<le> x"
shows "- x \<le> y"
proof -
from assms have "- x \<le> - (- y)" by (simp only: compl_le_compl_iff)
then show ?thesis by simp
qed
lemma compl_less_compl_iff [simp]: "- x < - y \<longleftrightarrow> y < x"
by (auto simp add: less_le)
lemma compl_less_swap1:
assumes "y < - x"
shows "x < - y"
proof -
from assms have "- (- x) < - y" by (simp only: compl_less_compl_iff)
then show ?thesis by simp
qed
lemma compl_less_swap2:
assumes "- y < x"
shows "- x < y"
proof -
from assms have "- x < - (- y)"
by (simp only: compl_less_compl_iff)
then show ?thesis by simp
qed
lemma sup_cancel_left1: \<open>x \<squnion> a \<squnion> (- x \<squnion> b) = \<top>\<close>
by (simp add: ac_simps)
lemma sup_cancel_left2: \<open>- x \<squnion> a \<squnion> (x \<squnion> b) = \<top>\<close>
by (simp add: ac_simps)
lemma inf_cancel_left1: \<open>x \<sqinter> a \<sqinter> (- x \<sqinter> b) = \<bottom>\<close>
by (simp add: ac_simps)
lemma inf_cancel_left2: \<open>- x \<sqinter> a \<sqinter> (x \<sqinter> b) = \<bottom>\<close>
by (simp add: ac_simps)
lemma sup_compl_top_left1 [simp]: \<open>- x \<squnion> (x \<squnion> y) = \<top>\<close>
by (simp add: sup_assoc [symmetric])
lemma sup_compl_top_left2 [simp]: \<open>x \<squnion> (- x \<squnion> y) = \<top>\<close>
using sup_compl_top_left1 [of "- x" y] by simp
lemma inf_compl_bot_left1 [simp]: \<open>- x \<sqinter> (x \<sqinter> y) = \<bottom>\<close>
by (simp add: inf_assoc [symmetric])
lemma inf_compl_bot_left2 [simp]: \<open>x \<sqinter> (- x \<sqinter> y) = \<bottom>\<close>
using inf_compl_bot_left1 [of "- x" y] by simp
lemma inf_compl_bot_right [simp]: \<open>x \<sqinter> (y \<sqinter> - x) = \<bottom>\<close>
by (subst inf_left_commute) simp
end
subsection \<open>Lattice on \<^typ>\<open>bool\<close>\<close>
instantiation bool :: boolean_algebra
begin
definition bool_Compl_def [simp]: "uminus = Not"
definition bool_diff_def [simp]: "A - B \<longleftrightarrow> A \<and> \<not> B"
definition [simp]: "P \<sqinter> Q \<longleftrightarrow> P \<and> Q"
definition [simp]: "P \<squnion> Q \<longleftrightarrow> P \<or> Q"
instance by standard auto
end
lemma sup_boolI1: "P \<Longrightarrow> P \<squnion> Q"
by simp
lemma sup_boolI2: "Q \<Longrightarrow> P \<squnion> Q"
by simp
lemma sup_boolE: "P \<squnion> Q \<Longrightarrow> (P \<Longrightarrow> R) \<Longrightarrow> (Q \<Longrightarrow> R) \<Longrightarrow> R"
by auto
instance "fun" :: (type, boolean_algebra) boolean_algebra
by standard (rule ext, simp_all add: inf_compl_bot sup_compl_top diff_eq)+
subsection \<open>Lattice on unary and binary predicates\<close>
lemma inf1I: "A x \<Longrightarrow> B x \<Longrightarrow> (A \<sqinter> B) x"
by (simp add: inf_fun_def)
lemma inf2I: "A x y \<Longrightarrow> B x y \<Longrightarrow> (A \<sqinter> B) x y"
by (simp add: inf_fun_def)
lemma inf1E: "(A \<sqinter> B) x \<Longrightarrow> (A x \<Longrightarrow> B x \<Longrightarrow> P) \<Longrightarrow> P"
by (simp add: inf_fun_def)
lemma inf2E: "(A \<sqinter> B) x y \<Longrightarrow> (A x y \<Longrightarrow> B x y \<Longrightarrow> P) \<Longrightarrow> P"
by (simp add: inf_fun_def)
lemma inf1D1: "(A \<sqinter> B) x \<Longrightarrow> A x"
by (rule inf1E)
lemma inf2D1: "(A \<sqinter> B) x y \<Longrightarrow> A x y"
by (rule inf2E)
lemma inf1D2: "(A \<sqinter> B) x \<Longrightarrow> B x"
by (rule inf1E)
lemma inf2D2: "(A \<sqinter> B) x y \<Longrightarrow> B x y"
by (rule inf2E)
lemma sup1I1: "A x \<Longrightarrow> (A \<squnion> B) x"
by (simp add: sup_fun_def)
lemma sup2I1: "A x y \<Longrightarrow> (A \<squnion> B) x y"
by (simp add: sup_fun_def)
lemma sup1I2: "B x \<Longrightarrow> (A \<squnion> B) x"
by (simp add: sup_fun_def)
lemma sup2I2: "B x y \<Longrightarrow> (A \<squnion> B) x y"
by (simp add: sup_fun_def)
lemma sup1E: "(A \<squnion> B) x \<Longrightarrow> (A x \<Longrightarrow> P) \<Longrightarrow> (B x \<Longrightarrow> P) \<Longrightarrow> P"
by (simp add: sup_fun_def) iprover
lemma sup2E: "(A \<squnion> B) x y \<Longrightarrow> (A x y \<Longrightarrow> P) \<Longrightarrow> (B x y \<Longrightarrow> P) \<Longrightarrow> P"
by (simp add: sup_fun_def) iprover
text \<open> \<^medskip> Classical introduction rule: no commitment to \<open>A\<close> vs \<open>B\<close>.\<close>
lemma sup1CI: "(\<not> B x \<Longrightarrow> A x) \<Longrightarrow> (A \<squnion> B) x"
by (auto simp add: sup_fun_def)
lemma sup2CI: "(\<not> B x y \<Longrightarrow> A x y) \<Longrightarrow> (A \<squnion> B) x y"
by (auto simp add: sup_fun_def)
subsection \<open>Simproc setup\<close>
locale boolean_algebra_cancel
begin
lemma sup1: "(A::'a::semilattice_sup) \<equiv> sup k a \<Longrightarrow> sup A b \<equiv> sup k (sup a b)"
by (simp only: ac_simps)
lemma sup2: "(B::'a::semilattice_sup) \<equiv> sup k b \<Longrightarrow> sup a B \<equiv> sup k (sup a b)"
by (simp only: ac_simps)
lemma sup0: "(a::'a::bounded_semilattice_sup_bot) \<equiv> sup a bot"
by simp
lemma inf1: "(A::'a::semilattice_inf) \<equiv> inf k a \<Longrightarrow> inf A b \<equiv> inf k (inf a b)"
by (simp only: ac_simps)
lemma inf2: "(B::'a::semilattice_inf) \<equiv> inf k b \<Longrightarrow> inf a B \<equiv> inf k (inf a b)"
by (simp only: ac_simps)
lemma inf0: "(a::'a::bounded_semilattice_inf_top) \<equiv> inf a top"
by simp
end
ML_file \<open>Tools/boolean_algebra_cancel.ML\<close>
simproc_setup boolean_algebra_cancel_sup ("sup a b::'a::boolean_algebra") =
- \<open>fn phi => fn ss => try Boolean_Algebra_Cancel.cancel_sup_conv\<close>
+ \<open>K (K (try Boolean_Algebra_Cancel.cancel_sup_conv))\<close>
simproc_setup boolean_algebra_cancel_inf ("inf a b::'a::boolean_algebra") =
- \<open>fn phi => fn ss => try Boolean_Algebra_Cancel.cancel_inf_conv\<close>
+ \<open>K (K (try Boolean_Algebra_Cancel.cancel_inf_conv))\<close>
context boolean_algebra
begin
lemma shunt1: "(x \<sqinter> y \<le> z) \<longleftrightarrow> (x \<le> -y \<squnion> z)"
proof
assume "x \<sqinter> y \<le> z"
hence "-y \<squnion> (x \<sqinter> y) \<le> -y \<squnion> z"
using sup.mono by blast
hence "-y \<squnion> x \<le> -y \<squnion> z"
by (simp add: sup_inf_distrib1)
thus "x \<le> -y \<squnion> z"
by simp
next
assume "x \<le> -y \<squnion> z"
hence "x \<sqinter> y \<le> (-y \<squnion> z) \<sqinter> y"
using inf_mono by auto
thus "x \<sqinter> y \<le> z"
using inf.boundedE inf_sup_distrib2 by auto
qed
lemma shunt2: "(x \<sqinter> -y \<le> z) \<longleftrightarrow> (x \<le> y \<squnion> z)"
by (simp add: shunt1)
lemma inf_shunt: "(x \<sqinter> y = \<bottom>) \<longleftrightarrow> (x \<le> - y)"
by (simp add: order.eq_iff shunt1)
lemma sup_shunt: "(x \<squnion> y = \<top>) \<longleftrightarrow> (- x \<le> y)"
using inf_shunt [of \<open>- x\<close> \<open>- y\<close>, symmetric]
by (simp flip: compl_sup compl_top_eq)
lemma diff_shunt_var: "(x - y = \<bottom>) \<longleftrightarrow> (x \<le> y)"
by (simp add: diff_eq inf_shunt)
lemma sup_neg_inf:
\<open>p \<le> q \<squnion> r \<longleftrightarrow> p \<sqinter> -q \<le> r\<close> (is \<open>?P \<longleftrightarrow> ?Q\<close>)
proof
assume ?P
then have \<open>p \<sqinter> - q \<le> (q \<squnion> r) \<sqinter> - q\<close>
by (rule inf_mono) simp
then show ?Q
by (simp add: inf_sup_distrib2)
next
assume ?Q
then have \<open>p \<sqinter> - q \<squnion> q \<le> r \<squnion> q\<close>
by (rule sup_mono) simp
then show ?P
by (simp add: sup_inf_distrib ac_simps)
qed
end
end
diff --git a/src/HOL/Data_Structures/Less_False.thy b/src/HOL/Data_Structures/Less_False.thy
--- a/src/HOL/Data_Structures/Less_False.thy
+++ b/src/HOL/Data_Structures/Less_False.thy
@@ -1,31 +1,31 @@
(* Author: Tobias Nipkow *)
section \<open>Improved Simproc for $<$\<close>
theory Less_False
imports Main
begin
-simproc_setup less_False ("(x::'a::order) < y") = \<open>fn _ => fn ctxt => fn ct =>
+simproc_setup less_False ("(x::'a::order) < y") = \<open>K (fn ctxt => fn ct =>
let
fun prp t thm = Thm.full_prop_of thm aconv t;
val eq_False_if_not = @{thm eq_False} RS iffD2
fun prove_less_False ((less as Const(_,T)) $ r $ s) =
let val prems = Simplifier.prems_of ctxt;
val le = Const (\<^const_name>\<open>less_eq\<close>, T);
val t = HOLogic.mk_Trueprop(le $ s $ r);
in case find_first (prp t) prems of
NONE =>
let val t = HOLogic.mk_Trueprop(less $ s $ r)
in case find_first (prp t) prems of
NONE => NONE
| SOME thm => SOME(mk_meta_eq((thm RS @{thm less_not_sym}) RS eq_False_if_not))
end
| SOME thm => NONE
end;
- in prove_less_False (Thm.term_of ct) end
+ in prove_less_False (Thm.term_of ct) end)
\<close>
end
diff --git a/src/HOL/Decision_Procs/Commutative_Ring.thy b/src/HOL/Decision_Procs/Commutative_Ring.thy
--- a/src/HOL/Decision_Procs/Commutative_Ring.thy
+++ b/src/HOL/Decision_Procs/Commutative_Ring.thy
@@ -1,963 +1,962 @@
(* Title: HOL/Decision_Procs/Commutative_Ring.thy
Author: Bernhard Haeupler, Stefan Berghofer, and Amine Chaieb
Proving equalities in commutative rings done "right" in Isabelle/HOL.
*)
section \<open>Proving equalities in commutative rings\<close>
theory Commutative_Ring
imports
Conversions
Algebra_Aux
"HOL-Library.Code_Target_Numeral"
begin
text \<open>Syntax of multivariate polynomials (pol) and polynomial expressions.\<close>
datatype pol =
Pc int
| Pinj nat pol
| PX pol nat pol
datatype polex =
Var nat
| Const int
| Add polex polex
| Sub polex polex
| Mul polex polex
| Pow polex nat
| Neg polex
text \<open>Interpretation functions for the shadow syntax.\<close>
context cring
begin
definition in_carrier :: "'a list \<Rightarrow> bool"
where "in_carrier xs \<longleftrightarrow> (\<forall>x\<in>set xs. x \<in> carrier R)"
lemma in_carrier_Nil: "in_carrier []"
by (simp add: in_carrier_def)
lemma in_carrier_Cons: "x \<in> carrier R \<Longrightarrow> in_carrier xs \<Longrightarrow> in_carrier (x # xs)"
by (simp add: in_carrier_def)
lemma drop_in_carrier [simp]: "in_carrier xs \<Longrightarrow> in_carrier (drop n xs)"
using set_drop_subset [of n xs] by (auto simp add: in_carrier_def)
primrec head :: "'a list \<Rightarrow> 'a"
where
"head [] = \<zero>"
| "head (x # xs) = x"
lemma head_closed [simp]: "in_carrier xs \<Longrightarrow> head xs \<in> carrier R"
by (cases xs) (simp_all add: in_carrier_def)
primrec Ipol :: "'a list \<Rightarrow> pol \<Rightarrow> 'a"
where
"Ipol l (Pc c) = \<guillemotleft>c\<guillemotright>"
| "Ipol l (Pinj i P) = Ipol (drop i l) P"
| "Ipol l (PX P x Q) = Ipol l P \<otimes> head l [^] x \<oplus> Ipol (drop 1 l) Q"
lemma Ipol_Pc:
"Ipol l (Pc 0) = \<zero>"
"Ipol l (Pc 1) = \<one>"
"Ipol l (Pc (numeral n)) = \<guillemotleft>numeral n\<guillemotright>"
"Ipol l (Pc (- numeral n)) = \<ominus> \<guillemotleft>numeral n\<guillemotright>"
by simp_all
lemma Ipol_closed [simp]: "in_carrier l \<Longrightarrow> Ipol l p \<in> carrier R"
by (induct p arbitrary: l) simp_all
primrec Ipolex :: "'a list \<Rightarrow> polex \<Rightarrow> 'a"
where
"Ipolex l (Var n) = head (drop n l)"
| "Ipolex l (Const i) = \<guillemotleft>i\<guillemotright>"
| "Ipolex l (Add P Q) = Ipolex l P \<oplus> Ipolex l Q"
| "Ipolex l (Sub P Q) = Ipolex l P \<ominus> Ipolex l Q"
| "Ipolex l (Mul P Q) = Ipolex l P \<otimes> Ipolex l Q"
| "Ipolex l (Pow p n) = Ipolex l p [^] n"
| "Ipolex l (Neg P) = \<ominus> Ipolex l P"
lemma Ipolex_Const:
"Ipolex l (Const 0) = \<zero>"
"Ipolex l (Const 1) = \<one>"
"Ipolex l (Const (numeral n)) = \<guillemotleft>numeral n\<guillemotright>"
by simp_all
end
text \<open>Create polynomial normalized polynomials given normalized inputs.\<close>
definition mkPinj :: "nat \<Rightarrow> pol \<Rightarrow> pol"
where "mkPinj x P =
(case P of
Pc c \<Rightarrow> Pc c
| Pinj y P \<Rightarrow> Pinj (x + y) P
| PX p1 y p2 \<Rightarrow> Pinj x P)"
definition mkPX :: "pol \<Rightarrow> nat \<Rightarrow> pol \<Rightarrow> pol"
where "mkPX P i Q =
(case P of
Pc c \<Rightarrow> if c = 0 then mkPinj 1 Q else PX P i Q
| Pinj j R \<Rightarrow> PX P i Q
| PX P2 i2 Q2 \<Rightarrow> if Q2 = Pc 0 then PX P2 (i + i2) Q else PX P i Q)"
text \<open>Defining the basic ring operations on normalized polynomials\<close>
function add :: "pol \<Rightarrow> pol \<Rightarrow> pol" (infixl "\<langle>+\<rangle>" 65)
where
"Pc a \<langle>+\<rangle> Pc b = Pc (a + b)"
| "Pc c \<langle>+\<rangle> Pinj i P = Pinj i (P \<langle>+\<rangle> Pc c)"
| "Pinj i P \<langle>+\<rangle> Pc c = Pinj i (P \<langle>+\<rangle> Pc c)"
| "Pc c \<langle>+\<rangle> PX P i Q = PX P i (Q \<langle>+\<rangle> Pc c)"
| "PX P i Q \<langle>+\<rangle> Pc c = PX P i (Q \<langle>+\<rangle> Pc c)"
| "Pinj x P \<langle>+\<rangle> Pinj y Q =
(if x = y then mkPinj x (P \<langle>+\<rangle> Q)
else (if x > y then mkPinj y (Pinj (x - y) P \<langle>+\<rangle> Q)
else mkPinj x (Pinj (y - x) Q \<langle>+\<rangle> P)))"
| "Pinj x P \<langle>+\<rangle> PX Q y R =
(if x = 0 then P \<langle>+\<rangle> PX Q y R
else (if x = 1 then PX Q y (R \<langle>+\<rangle> P)
else PX Q y (R \<langle>+\<rangle> Pinj (x - 1) P)))"
| "PX P x R \<langle>+\<rangle> Pinj y Q =
(if y = 0 then PX P x R \<langle>+\<rangle> Q
else (if y = 1 then PX P x (R \<langle>+\<rangle> Q)
else PX P x (R \<langle>+\<rangle> Pinj (y - 1) Q)))"
| "PX P1 x P2 \<langle>+\<rangle> PX Q1 y Q2 =
(if x = y then mkPX (P1 \<langle>+\<rangle> Q1) x (P2 \<langle>+\<rangle> Q2)
else (if x > y then mkPX (PX P1 (x - y) (Pc 0) \<langle>+\<rangle> Q1) y (P2 \<langle>+\<rangle> Q2)
else mkPX (PX Q1 (y - x) (Pc 0) \<langle>+\<rangle> P1) x (P2 \<langle>+\<rangle> Q2)))"
by pat_completeness auto
termination by (relation "measure (\<lambda>(x, y). size x + size y)") auto
function mul :: "pol \<Rightarrow> pol \<Rightarrow> pol" (infixl "\<langle>*\<rangle>" 70)
where
"Pc a \<langle>*\<rangle> Pc b = Pc (a * b)"
| "Pc c \<langle>*\<rangle> Pinj i P =
(if c = 0 then Pc 0 else mkPinj i (P \<langle>*\<rangle> Pc c))"
| "Pinj i P \<langle>*\<rangle> Pc c =
(if c = 0 then Pc 0 else mkPinj i (P \<langle>*\<rangle> Pc c))"
| "Pc c \<langle>*\<rangle> PX P i Q =
(if c = 0 then Pc 0 else mkPX (P \<langle>*\<rangle> Pc c) i (Q \<langle>*\<rangle> Pc c))"
| "PX P i Q \<langle>*\<rangle> Pc c =
(if c = 0 then Pc 0 else mkPX (P \<langle>*\<rangle> Pc c) i (Q \<langle>*\<rangle> Pc c))"
| "Pinj x P \<langle>*\<rangle> Pinj y Q =
(if x = y then mkPinj x (P \<langle>*\<rangle> Q)
else
(if x > y then mkPinj y (Pinj (x - y) P \<langle>*\<rangle> Q)
else mkPinj x (Pinj (y - x) Q \<langle>*\<rangle> P)))"
| "Pinj x P \<langle>*\<rangle> PX Q y R =
(if x = 0 then P \<langle>*\<rangle> PX Q y R
else
(if x = 1 then mkPX (Pinj x P \<langle>*\<rangle> Q) y (R \<langle>*\<rangle> P)
else mkPX (Pinj x P \<langle>*\<rangle> Q) y (R \<langle>*\<rangle> Pinj (x - 1) P)))"
| "PX P x R \<langle>*\<rangle> Pinj y Q =
(if y = 0 then PX P x R \<langle>*\<rangle> Q
else
(if y = 1 then mkPX (Pinj y Q \<langle>*\<rangle> P) x (R \<langle>*\<rangle> Q)
else mkPX (Pinj y Q \<langle>*\<rangle> P) x (R \<langle>*\<rangle> Pinj (y - 1) Q)))"
| "PX P1 x P2 \<langle>*\<rangle> PX Q1 y Q2 =
mkPX (P1 \<langle>*\<rangle> Q1) (x + y) (P2 \<langle>*\<rangle> Q2) \<langle>+\<rangle>
(mkPX (P1 \<langle>*\<rangle> mkPinj 1 Q2) x (Pc 0) \<langle>+\<rangle>
(mkPX (Q1 \<langle>*\<rangle> mkPinj 1 P2) y (Pc 0)))"
by pat_completeness auto
termination by (relation "measure (\<lambda>(x, y). size x + size y)")
(auto simp add: mkPinj_def split: pol.split)
text \<open>Negation\<close>
primrec neg :: "pol \<Rightarrow> pol"
where
"neg (Pc c) = Pc (- c)"
| "neg (Pinj i P) = Pinj i (neg P)"
| "neg (PX P x Q) = PX (neg P) x (neg Q)"
text \<open>Subtraction\<close>
definition sub :: "pol \<Rightarrow> pol \<Rightarrow> pol" (infixl "\<langle>-\<rangle>" 65)
where "sub P Q = P \<langle>+\<rangle> neg Q"
text \<open>Square for Fast Exponentiation\<close>
primrec sqr :: "pol \<Rightarrow> pol"
where
"sqr (Pc c) = Pc (c * c)"
| "sqr (Pinj i P) = mkPinj i (sqr P)"
| "sqr (PX A x B) = mkPX (sqr A) (x + x) (sqr B) \<langle>+\<rangle> mkPX (Pc 2 \<langle>*\<rangle> A \<langle>*\<rangle> mkPinj 1 B) x (Pc 0)"
text \<open>Fast Exponentiation\<close>
fun pow :: "nat \<Rightarrow> pol \<Rightarrow> pol"
where pow_if [simp del]: "pow n P =
(if n = 0 then Pc 1
else if even n then pow (n div 2) (sqr P)
else P \<langle>*\<rangle> pow (n div 2) (sqr P))"
lemma pow_simps [simp]:
"pow 0 P = Pc 1"
"pow (2 * n) P = pow n (sqr P)"
"pow (Suc (2 * n)) P = P \<langle>*\<rangle> pow n (sqr P)"
by (simp_all add: pow_if)
lemma even_pow: "even n \<Longrightarrow> pow n P = pow (n div 2) (sqr P)"
by (erule evenE) simp
lemma odd_pow: "odd n \<Longrightarrow> pow n P = P \<langle>*\<rangle> pow (n div 2) (sqr P)"
by (erule oddE) simp
text \<open>Normalization of polynomial expressions\<close>
primrec norm :: "polex \<Rightarrow> pol"
where
"norm (Var n) =
(if n = 0 then PX (Pc 1) 1 (Pc 0)
else Pinj n (PX (Pc 1) 1 (Pc 0)))"
| "norm (Const i) = Pc i"
| "norm (Add P Q) = norm P \<langle>+\<rangle> norm Q"
| "norm (Sub P Q) = norm P \<langle>-\<rangle> norm Q"
| "norm (Mul P Q) = norm P \<langle>*\<rangle> norm Q"
| "norm (Pow P n) = pow n (norm P)"
| "norm (Neg P) = neg (norm P)"
context cring
begin
text \<open>mkPinj preserve semantics\<close>
lemma mkPinj_ci: "Ipol l (mkPinj a B) = Ipol l (Pinj a B)"
by (induct B) (auto simp add: mkPinj_def algebra_simps)
text \<open>mkPX preserves semantics\<close>
lemma mkPX_ci: "in_carrier l \<Longrightarrow> Ipol l (mkPX A b C) = Ipol l (PX A b C)"
by (cases A) (auto simp add: mkPX_def mkPinj_ci nat_pow_mult [symmetric] m_ac)
text \<open>Correctness theorems for the implemented operations\<close>
text \<open>Negation\<close>
lemma neg_ci: "in_carrier l \<Longrightarrow> Ipol l (neg P) = \<ominus> (Ipol l P)"
by (induct P arbitrary: l) (auto simp add: minus_add l_minus)
text \<open>Addition\<close>
lemma add_ci: "in_carrier l \<Longrightarrow> Ipol l (P \<langle>+\<rangle> Q) = Ipol l P \<oplus> Ipol l Q"
proof (induct P Q arbitrary: l rule: add.induct)
case (6 x P y Q)
consider "x < y" | "x = y" | "x > y" by arith
then show ?case
proof cases
case 1
with 6 show ?thesis by (simp add: mkPinj_ci a_ac)
next
case 2
with 6 show ?thesis by (simp add: mkPinj_ci)
next
case 3
with 6 show ?thesis by (simp add: mkPinj_ci)
qed
next
case (7 x P Q y R)
consider "x = 0" | "x = 1" | "x > 1" by arith
then show ?case
proof cases
case 1
with 7 show ?thesis by simp
next
case 2
with 7 show ?thesis by (simp add: a_ac)
next
case 3
with 7 show ?thesis by (cases x) (simp_all add: a_ac)
qed
next
case (8 P x R y Q)
then show ?case by (simp add: a_ac)
next
case (9 P1 x P2 Q1 y Q2)
consider "x = y" | d where "d + x = y" | d where "d + y = x"
by atomize_elim arith
then show ?case
proof cases
case 1
with 9 show ?thesis by (simp add: mkPX_ci r_distr a_ac m_ac)
next
case 2
with 9 show ?thesis by (auto simp add: mkPX_ci nat_pow_mult [symmetric] r_distr a_ac m_ac)
next
case 3
with 9 show ?thesis by (auto simp add: nat_pow_mult [symmetric] mkPX_ci r_distr a_ac m_ac)
qed
qed (auto simp add: a_ac m_ac)
text \<open>Multiplication\<close>
lemma mul_ci: "in_carrier l \<Longrightarrow> Ipol l (P \<langle>*\<rangle> Q) = Ipol l P \<otimes> Ipol l Q"
by (induct P Q arbitrary: l rule: mul.induct)
(simp_all add: mkPX_ci mkPinj_ci a_ac m_ac r_distr add_ci nat_pow_mult [symmetric])
text \<open>Subtraction\<close>
lemma sub_ci: "in_carrier l \<Longrightarrow> Ipol l (P \<langle>-\<rangle> Q) = Ipol l P \<ominus> Ipol l Q"
by (simp add: add_ci neg_ci sub_def minus_eq)
text \<open>Square\<close>
lemma sqr_ci: "in_carrier ls \<Longrightarrow> Ipol ls (sqr P) = Ipol ls P \<otimes> Ipol ls P"
by (induct P arbitrary: ls)
(simp_all add: add_ci mkPinj_ci mkPX_ci mul_ci l_distr r_distr
a_ac m_ac nat_pow_mult [symmetric] of_int_2)
text \<open>Power\<close>
lemma pow_ci: "in_carrier ls \<Longrightarrow> Ipol ls (pow n P) = Ipol ls P [^] n"
proof (induct n arbitrary: P rule: less_induct)
case (less k)
consider "k = 0" | "k > 0" by arith
then show ?case
proof cases
case 1
then show ?thesis by simp
next
case 2
then have "k div 2 < k" by arith
with less have *: "Ipol ls (pow (k div 2) (sqr P)) = Ipol ls (sqr P) [^] (k div 2)"
by simp
show ?thesis
proof (cases "even k")
case True
with * \<open>in_carrier ls\<close> show ?thesis
by (simp add: even_pow sqr_ci nat_pow_distrib nat_pow_mult
mult_2 [symmetric] even_two_times_div_two)
next
case False
with * \<open>in_carrier ls\<close> show ?thesis
by (simp add: odd_pow mul_ci sqr_ci nat_pow_distrib nat_pow_mult
mult_2 [symmetric] trans [OF nat_pow_Suc m_comm, symmetric])
qed
qed
qed
text \<open>Normalization preserves semantics\<close>
lemma norm_ci: "in_carrier l \<Longrightarrow> Ipolex l Pe = Ipol l (norm Pe)"
by (induct Pe) (simp_all add: add_ci sub_ci mul_ci neg_ci pow_ci)
text \<open>Reflection lemma: Key to the (incomplete) decision procedure\<close>
lemma norm_eq:
assumes "in_carrier l"
and "norm P1 = norm P2"
shows "Ipolex l P1 = Ipolex l P2"
proof -
from assms have "Ipol l (norm P1) = Ipol l (norm P2)" by simp
with assms show ?thesis by (simp only: norm_ci)
qed
end
text \<open>Monomials\<close>
datatype mon =
Mc int
| Minj nat mon
| MX nat mon
primrec (in cring) Imon :: "'a list \<Rightarrow> mon \<Rightarrow> 'a"
where
"Imon l (Mc c) = \<guillemotleft>c\<guillemotright>"
| "Imon l (Minj i M) = Imon (drop i l) M"
| "Imon l (MX x M) = Imon (drop 1 l) M \<otimes> head l [^] x"
lemma (in cring) Imon_closed [simp]: "in_carrier l \<Longrightarrow> Imon l m \<in> carrier R"
by (induct m arbitrary: l) simp_all
definition mkMinj :: "nat \<Rightarrow> mon \<Rightarrow> mon"
where "mkMinj i M =
(case M of
Mc c \<Rightarrow> Mc c
| Minj j M \<Rightarrow> Minj (i + j) M
| _ \<Rightarrow> Minj i M)"
definition Minj_pred :: "nat \<Rightarrow> mon \<Rightarrow> mon"
where "Minj_pred i M = (if i = 1 then M else mkMinj (i - 1) M)"
primrec mkMX :: "nat \<Rightarrow> mon \<Rightarrow> mon"
where
"mkMX i (Mc c) = MX i (Mc c)"
| "mkMX i (Minj j M) = (if j = 0 then mkMX i M else MX i (Minj_pred j M))"
| "mkMX i (MX j M) = MX (i + j) M"
lemma (in cring) mkMinj_correct: "Imon l (mkMinj i M) = Imon l (Minj i M)"
by (simp add: mkMinj_def add.commute split: mon.split)
lemma (in cring) Minj_pred_correct: "0 < i \<Longrightarrow> Imon (drop 1 l) (Minj_pred i M) = Imon l (Minj i M)"
by (simp add: Minj_pred_def mkMinj_correct)
lemma (in cring) mkMX_correct: "in_carrier l \<Longrightarrow> Imon l (mkMX i M) = Imon l M \<otimes> head l [^] i"
by (induct M)
(simp_all add: Minj_pred_correct [simplified] nat_pow_mult [symmetric] m_ac split: mon.split)
fun cfactor :: "pol \<Rightarrow> int \<Rightarrow> pol \<times> pol"
where
"cfactor (Pc c') c = (Pc (c' mod c), Pc (c' div c))"
| "cfactor (Pinj i P) c =
(let (R, S) = cfactor P c
in (mkPinj i R, mkPinj i S))"
| "cfactor (PX P i Q) c =
(let
(R1, S1) = cfactor P c;
(R2, S2) = cfactor Q c
in (mkPX R1 i R2, mkPX S1 i S2))"
lemma (in cring) cfactor_correct:
"in_carrier l \<Longrightarrow> Ipol l P = Ipol l (fst (cfactor P c)) \<oplus> \<guillemotleft>c\<guillemotright> \<otimes> Ipol l (snd (cfactor P c))"
proof (induct P c arbitrary: l rule: cfactor.induct)
case (1 c' c)
show ?case
by (simp add: of_int_mult [symmetric] of_int_add [symmetric] del: of_int_mult)
next
case (2 i P c)
then show ?case
by (simp add: mkPinj_ci split_beta)
next
case (3 P i Q c)
from 3(1) 3(2) [OF refl prod.collapse] 3(3)
show ?case
by (simp add: mkPX_ci r_distr a_ac m_ac split_beta)
qed
fun mfactor :: "pol \<Rightarrow> mon \<Rightarrow> pol \<times> pol"
where
"mfactor P (Mc c) = (if c = 1 then (Pc 0, P) else cfactor P c)"
| "mfactor (Pc d) M = (Pc d, Pc 0)"
| "mfactor (Pinj i P) (Minj j M) =
(if i = j then
let (R, S) = mfactor P M
in (mkPinj i R, mkPinj i S)
else if i < j then
let (R, S) = mfactor P (Minj (j - i) M)
in (mkPinj i R, mkPinj i S)
else (Pinj i P, Pc 0))"
| "mfactor (Pinj i P) (MX j M) = (Pinj i P, Pc 0)"
| "mfactor (PX P i Q) (Minj j M) =
(if j = 0 then mfactor (PX P i Q) M
else
let
(R1, S1) = mfactor P (Minj j M);
(R2, S2) = mfactor Q (Minj_pred j M)
in (mkPX R1 i R2, mkPX S1 i S2))"
| "mfactor (PX P i Q) (MX j M) =
(if i = j then
let (R, S) = mfactor P (mkMinj 1 M)
in (mkPX R i Q, S)
else if i < j then
let (R, S) = mfactor P (MX (j - i) M)
in (mkPX R i Q, S)
else
let (R, S) = mfactor P (mkMinj 1 M)
in (mkPX R i Q, mkPX S (i - j) (Pc 0)))"
lemmas mfactor_induct = mfactor.induct
[case_names Mc Pc_Minj Pc_MX Pinj_Minj Pinj_MX PX_Minj PX_MX]
lemma (in cring) mfactor_correct:
"in_carrier l \<Longrightarrow>
Ipol l P =
Ipol l (fst (mfactor P M)) \<oplus>
Imon l M \<otimes> Ipol l (snd (mfactor P M))"
proof (induct P M arbitrary: l rule: mfactor_induct)
case (Mc P c)
then show ?case
by (simp add: cfactor_correct)
next
case (Pc_Minj d i M)
then show ?case
by simp
next
case (Pc_MX d i M)
then show ?case
by simp
next
case (Pinj_Minj i P j M)
then show ?case
by (simp add: mkPinj_ci split_beta)
next
case (Pinj_MX i P j M)
then show ?case
by simp
next
case (PX_Minj P i Q j M)
from PX_Minj(1,2) PX_Minj(3) [OF _ refl prod.collapse] PX_Minj(4)
show ?case
by (simp add: mkPX_ci Minj_pred_correct [simplified] r_distr a_ac m_ac split_beta)
next
case (PX_MX P i Q j M)
from \<open>in_carrier l\<close>
have eq1: "(Imon (drop (Suc 0) l) M \<otimes> head l [^] (j - i)) \<otimes>
Ipol l (snd (mfactor P (MX (j - i) M))) \<otimes> head l [^] i =
Imon (drop (Suc 0) l) M \<otimes>
Ipol l (snd (mfactor P (MX (j - i) M))) \<otimes>
(head l [^] (j - i) \<otimes> head l [^] i)"
by (simp add: m_ac)
from \<open>in_carrier l\<close>
have eq2: "(Imon (drop (Suc 0) l) M \<otimes> head l [^] j) \<otimes>
(Ipol l (snd (mfactor P (mkMinj (Suc 0) M))) \<otimes> head l [^] (i - j)) =
Imon (drop (Suc 0) l) M \<otimes>
Ipol l (snd (mfactor P (mkMinj (Suc 0) M))) \<otimes>
(head l [^] (i - j) \<otimes> head l [^] j)"
by (simp add: m_ac)
from PX_MX \<open>in_carrier l\<close> show ?case
by (simp add: mkPX_ci mkMinj_correct l_distr eq1 eq2 split_beta nat_pow_mult)
(simp add: a_ac m_ac)
qed
primrec mon_of_pol :: "pol \<Rightarrow> mon option"
where
"mon_of_pol (Pc c) = Some (Mc c)"
| "mon_of_pol (Pinj i P) = (case mon_of_pol P of
None \<Rightarrow> None
| Some M \<Rightarrow> Some (mkMinj i M))"
| "mon_of_pol (PX P i Q) =
(if Q = Pc 0 then (case mon_of_pol P of
None \<Rightarrow> None
| Some M \<Rightarrow> Some (mkMX i M))
else None)"
lemma (in cring) mon_of_pol_correct:
assumes "in_carrier l"
and "mon_of_pol P = Some M"
shows "Ipol l P = Imon l M"
using assms
proof (induct P arbitrary: M l)
case (PX P1 i P2)
from PX(1,3,4)
show ?case
by (auto simp add: mkMinj_correct mkMX_correct split: if_split_asm option.split_asm)
qed (auto simp add: mkMinj_correct split: option.split_asm)
fun (in cring) Ipolex_polex_list :: "'a list \<Rightarrow> (polex \<times> polex) list \<Rightarrow> bool"
where
"Ipolex_polex_list l [] = True"
| "Ipolex_polex_list l ((P, Q) # pps) = ((Ipolex l P = Ipolex l Q) \<and> Ipolex_polex_list l pps)"
fun (in cring) Imon_pol_list :: "'a list \<Rightarrow> (mon \<times> pol) list \<Rightarrow> bool"
where
"Imon_pol_list l [] = True"
| "Imon_pol_list l ((M, P) # mps) = ((Imon l M = Ipol l P) \<and> Imon_pol_list l mps)"
fun mk_monpol_list :: "(polex \<times> polex) list \<Rightarrow> (mon \<times> pol) list"
where
"mk_monpol_list [] = []"
| "mk_monpol_list ((P, Q) # pps) =
(case mon_of_pol (norm P) of
None \<Rightarrow> mk_monpol_list pps
| Some M \<Rightarrow> (M, norm Q) # mk_monpol_list pps)"
lemma (in cring) mk_monpol_list_correct:
"in_carrier l \<Longrightarrow> Ipolex_polex_list l pps \<Longrightarrow> Imon_pol_list l (mk_monpol_list pps)"
by (induct pps rule: mk_monpol_list.induct)
(auto split: option.split simp add: norm_ci [symmetric] mon_of_pol_correct [symmetric])
definition ponesubst :: "pol \<Rightarrow> mon \<Rightarrow> pol \<Rightarrow> pol option"
where "ponesubst P1 M P2 =
(let (Q, R) = mfactor P1 M in
(case R of
Pc c \<Rightarrow> if c = 0 then None else Some (add Q (mul P2 R))
| _ \<Rightarrow> Some (add Q (mul P2 R))))"
fun pnsubst1 :: "pol \<Rightarrow> mon \<Rightarrow> pol \<Rightarrow> nat \<Rightarrow> pol"
where "pnsubst1 P1 M P2 n =
(case ponesubst P1 M P2 of
None \<Rightarrow> P1
| Some P3 \<Rightarrow> if n = 0 then P3 else pnsubst1 P3 M P2 (n - 1))"
lemma pnsubst1_0 [simp]: "pnsubst1 P1 M P2 0 = (case ponesubst P1 M P2 of
None \<Rightarrow> P1 | Some P3 \<Rightarrow> P3)"
by (simp split: option.split)
lemma pnsubst1_Suc [simp]:
"pnsubst1 P1 M P2 (Suc n) =
(case ponesubst P1 M P2 of
None \<Rightarrow> P1
| Some P3 \<Rightarrow> pnsubst1 P3 M P2 n)"
by (simp split: option.split)
declare pnsubst1.simps [simp del]
definition pnsubst :: "pol \<Rightarrow> mon \<Rightarrow> pol \<Rightarrow> nat \<Rightarrow> pol option"
where "pnsubst P1 M P2 n =
(case ponesubst P1 M P2 of
None \<Rightarrow> None
| Some P3 \<Rightarrow> Some (pnsubst1 P3 M P2 n))"
fun psubstl1 :: "pol \<Rightarrow> (mon \<times> pol) list \<Rightarrow> nat \<Rightarrow> pol"
where
"psubstl1 P1 [] n = P1"
| "psubstl1 P1 ((M, P2) # mps) n = psubstl1 (pnsubst1 P1 M P2 n) mps n"
fun psubstl :: "pol \<Rightarrow> (mon \<times> pol) list \<Rightarrow> nat \<Rightarrow> pol option"
where
"psubstl P1 [] n = None"
| "psubstl P1 ((M, P2) # mps) n =
(case pnsubst P1 M P2 n of
None \<Rightarrow> psubstl P1 mps n
| Some P3 \<Rightarrow> Some (psubstl1 P3 mps n))"
fun pnsubstl :: "pol \<Rightarrow> (mon \<times> pol) list \<Rightarrow> nat \<Rightarrow> nat \<Rightarrow> pol"
where "pnsubstl P1 mps m n =
(case psubstl P1 mps n of
None \<Rightarrow> P1
| Some P3 \<Rightarrow> if m = 0 then P3 else pnsubstl P3 mps (m - 1) n)"
lemma pnsubstl_0 [simp]:
"pnsubstl P1 mps 0 n = (case psubstl P1 mps n of None \<Rightarrow> P1 | Some P3 \<Rightarrow> P3)"
by (simp split: option.split)
lemma pnsubstl_Suc [simp]:
"pnsubstl P1 mps (Suc m) n = (case psubstl P1 mps n of None \<Rightarrow> P1 | Some P3 \<Rightarrow> pnsubstl P3 mps m n)"
by (simp split: option.split)
declare pnsubstl.simps [simp del]
lemma (in cring) ponesubst_correct:
"in_carrier l \<Longrightarrow> ponesubst P1 M P2 = Some P3 \<Longrightarrow> Imon l M = Ipol l P2 \<Longrightarrow> Ipol l P1 = Ipol l P3"
by (auto simp add: ponesubst_def split_beta mfactor_correct [of l P1 M]
add_ci mul_ci split: pol.split_asm if_split_asm)
lemma (in cring) pnsubst1_correct:
"in_carrier l \<Longrightarrow> Imon l M = Ipol l P2 \<Longrightarrow> Ipol l (pnsubst1 P1 M P2 n) = Ipol l P1"
by (induct n arbitrary: P1)
(simp_all add: ponesubst_correct split: option.split)
lemma (in cring) pnsubst_correct:
"in_carrier l \<Longrightarrow> pnsubst P1 M P2 n = Some P3 \<Longrightarrow> Imon l M = Ipol l P2 \<Longrightarrow> Ipol l P1 = Ipol l P3"
by (auto simp add: pnsubst_def
pnsubst1_correct ponesubst_correct split: option.split_asm)
lemma (in cring) psubstl1_correct:
"in_carrier l \<Longrightarrow> Imon_pol_list l mps \<Longrightarrow> Ipol l (psubstl1 P1 mps n) = Ipol l P1"
by (induct P1 mps n rule: psubstl1.induct) (simp_all add: pnsubst1_correct)
lemma (in cring) psubstl_correct:
"in_carrier l \<Longrightarrow> psubstl P1 mps n = Some P2 \<Longrightarrow> Imon_pol_list l mps \<Longrightarrow> Ipol l P1 = Ipol l P2"
by (induct P1 mps n rule: psubstl.induct)
(auto simp add: psubstl1_correct pnsubst_correct split: option.split_asm)
lemma (in cring) pnsubstl_correct:
"in_carrier l \<Longrightarrow> Imon_pol_list l mps \<Longrightarrow> Ipol l (pnsubstl P1 mps m n) = Ipol l P1"
by (induct m arbitrary: P1)
(simp_all add: psubstl_correct split: option.split)
lemma (in cring) norm_subst_correct:
"in_carrier l \<Longrightarrow> Ipolex_polex_list l pps \<Longrightarrow>
Ipolex l P = Ipol l (pnsubstl (norm P) (mk_monpol_list pps) m n)"
by (simp add: pnsubstl_correct mk_monpol_list_correct norm_ci)
lemma in_carrier_trivial: "cring_class.in_carrier l"
by (induct l) (simp_all add: cring_class.in_carrier_def carrier_class)
ML \<open>
val term_of_nat = HOLogic.mk_number \<^Type>\<open>nat\<close> o @{code integer_of_nat};
val term_of_int = HOLogic.mk_number \<^Type>\<open>int\<close> o @{code integer_of_int};
fun term_of_pol (@{code Pc} k) = \<^Const>\<open>Pc\<close> $ term_of_int k
| term_of_pol (@{code Pinj} (n, p)) = \<^Const>\<open>Pinj\<close> $ term_of_nat n $ term_of_pol p
| term_of_pol (@{code PX} (p, n, q)) = \<^Const>\<open>PX\<close> $ term_of_pol p $ term_of_nat n $ term_of_pol q;
local
fun pol (ctxt, ct, t) =
\<^instantiate>\<open>x = ct and y = \<open>Thm.cterm_of ctxt t\<close>
in cterm \<open>x \<equiv> y\<close> for x y :: pol\<close>;
val (_, raw_pol_oracle) = Context.>>> (Context.map_theory_result
(Thm.add_oracle (\<^binding>\<open>pnsubstl\<close>, pol)));
fun pol_oracle ctxt ct t = raw_pol_oracle (ctxt, ct, t);
in
val cv = @{computation_conv pol
terms: pnsubstl mk_monpol_list norm
nat_of_integer Code_Target_Nat.natural
"0::nat" "1::nat" "2::nat" "3::nat"
"0::int" "1::int" "2::int" "3::int" "-1::int"
datatypes: polex "(polex * polex) list" int integer num}
(fn ctxt => fn p => fn ct => pol_oracle ctxt ct (term_of_pol p))
end
\<close>
ML \<open>
signature RING_TAC =
sig
structure Ring_Simps:
sig
type T
val get: Context.generic -> T
val put: T -> Context.generic -> Context.generic
val map: (T -> T) -> Context.generic -> Context.generic
end
val insert_rules: ((term * 'a) * (term * 'a) -> bool) -> (term * 'a) ->
(term * 'a) Net.net -> (term * 'a) Net.net
val eq_ring_simps:
(term * (thm list * thm list * thm list * thm list * thm * thm)) *
(term * (thm list * thm list * thm list * thm list * thm * thm)) -> bool
val ring_tac: bool -> thm list -> Proof.context -> int -> tactic
val get_matching_rules: Proof.context -> (term * 'a) Net.net -> term -> 'a option
val norm: thm -> thm
val mk_in_carrier: Proof.context -> term -> thm list -> (string * typ) list -> thm
val mk_ring: typ -> term
end
structure Ring_Tac : RING_TAC =
struct
fun eq_ring_simps
((t, (ths1, ths2, ths3, ths4, th5, th)),
(t', (ths1', ths2', ths3', ths4', th5', th'))) =
t aconv t' andalso
eq_list Thm.eq_thm (ths1, ths1') andalso
eq_list Thm.eq_thm (ths2, ths2') andalso
eq_list Thm.eq_thm (ths3, ths3') andalso
eq_list Thm.eq_thm (ths4, ths4') andalso
Thm.eq_thm (th5, th5') andalso
Thm.eq_thm (th, th');
structure Ring_Simps = Generic_Data
(struct
type T = (term * (thm list * thm list * thm list * thm list * thm * thm)) Net.net
val empty = Net.empty
val merge = Net.merge eq_ring_simps
end);
fun get_matching_rules ctxt net t = get_first
(fn (p, x) =>
if Pattern.matches (Proof_Context.theory_of ctxt) (p, t) then SOME x else NONE)
(Net.match_term net t);
fun insert_rules eq (t, x) = Net.insert_term eq (t, (t, x));
fun norm thm = thm COMP_INCR asm_rl;
fun get_ring_simps ctxt optcT t =
(case get_matching_rules ctxt (Ring_Simps.get (Context.Proof ctxt)) t of
SOME (ths1, ths2, ths3, ths4, th5, th) =>
let val tr =
Thm.transfer' ctxt #>
(case optcT of NONE => I | SOME cT => inst [cT] [] #> norm)
in (map tr ths1, map tr ths2, map tr ths3, map tr ths4, tr th5, tr th) end
| NONE => error "get_ring_simps: lookup failed");
fun ring_struct \<^Const_>\<open>Ring.ring.add _ _ for R _ _\<close> = SOME R
| ring_struct \<^Const_>\<open>Ring.a_minus _ _ for R _ _\<close> = SOME R
| ring_struct \<^Const_>\<open>Group.monoid.mult _ _ for R _ _\<close> = SOME R
| ring_struct \<^Const_>\<open>Ring.a_inv _ _ for R _\<close> = SOME R
| ring_struct \<^Const_>\<open>Group.pow _ _ _ for R _ _\<close> = SOME R
| ring_struct \<^Const_>\<open>Ring.ring.zero _ _ for R\<close> = SOME R
| ring_struct \<^Const_>\<open>Group.monoid.one _ _ for R\<close> = SOME R
| ring_struct \<^Const_>\<open>Algebra_Aux.of_integer _ _ for R _\<close> = SOME R
| ring_struct _ = NONE;
fun reif_polex vs \<^Const_>\<open>Ring.ring.add _ _ for _ a b\<close> =
\<^Const>\<open>Add for \<open>reif_polex vs a\<close> \<open>reif_polex vs b\<close>\<close>
| reif_polex vs \<^Const_>\<open>Ring.a_minus _ _ for _ a b\<close> =
\<^Const>\<open>Sub for \<open>reif_polex vs a\<close> \<open>reif_polex vs b\<close>\<close>
| reif_polex vs \<^Const_>\<open>Group.monoid.mult _ _ for _ a b\<close> =
\<^Const>\<open>Mul for \<open>reif_polex vs a\<close> \<open>reif_polex vs b\<close>\<close>
| reif_polex vs \<^Const_>\<open>Ring.a_inv _ _ for _ a\<close> =
\<^Const>\<open>Neg for \<open>reif_polex vs a\<close>\<close>
| reif_polex vs \<^Const_>\<open>Group.pow _ _ _ for _ a n\<close> =
\<^Const>\<open>Pow for \<open>reif_polex vs a\<close> n\<close>
| reif_polex vs (Free x) =
\<^Const>\<open>Var for \<open>HOLogic.mk_number HOLogic.natT (find_index (equal x) vs)\<close>\<close>
| reif_polex _ \<^Const_>\<open>Ring.ring.zero _ _ for _\<close> = \<^term>\<open>Const 0\<close>
| reif_polex _ \<^Const_>\<open>Group.monoid.one _ _ for _\<close> = \<^term>\<open>Const 1\<close>
| reif_polex _ \<^Const_>\<open>Algebra_Aux.of_integer _ _ for _ n\<close> = \<^Const>\<open>Const for n\<close>
| reif_polex _ _ = error "reif_polex: bad expression";
fun reif_polex' vs \<^Const_>\<open>plus _ for a b\<close> = \<^Const>\<open>Add for \<open>reif_polex' vs a\<close> \<open>reif_polex' vs b\<close>\<close>
| reif_polex' vs \<^Const_>\<open>minus _ for a b\<close> = \<^Const>\<open>Sub for \<open>reif_polex' vs a\<close> \<open>reif_polex' vs b\<close>\<close>
| reif_polex' vs \<^Const_>\<open>times _ for a b\<close> = \<^Const>\<open>Mul for \<open>reif_polex' vs a\<close> \<open>reif_polex' vs b\<close>\<close>
| reif_polex' vs \<^Const_>\<open>uminus _ for a\<close> = \<^Const>\<open>Neg for \<open>reif_polex' vs a\<close>\<close>
| reif_polex' vs \<^Const_>\<open>power _ for a n\<close> = \<^Const>\<open>Pow for \<open>reif_polex' vs a\<close> n\<close>
| reif_polex' vs (Free x) = \<^Const>\<open>Var for \<open>HOLogic.mk_number \<^Type>\<open>nat\<close> (find_index (equal x) vs)\<close>\<close>
| reif_polex' _ \<^Const_>\<open>numeral _ for b\<close> = \<^Const>\<open>Const for \<^Const>\<open>numeral \<^Type>\<open>int\<close> for b\<close>\<close>
| reif_polex' _ \<^Const_>\<open>zero_class.zero _\<close> = \<^term>\<open>Const 0\<close>
| reif_polex' _ \<^Const_>\<open>one_class.one _\<close> = \<^term>\<open>Const 1\<close>
| reif_polex' _ t = error "reif_polex: bad expression";
fun head_conv (_, _, _, _, head_simp, _) ys =
(case strip_app ys of
(\<^const_name>\<open>Cons\<close>, [y, xs]) => inst [] [y, xs] head_simp);
fun Ipol_conv (rls as
([Ipol_simps_1, Ipol_simps_2, Ipol_simps_3,
Ipol_simps_4, Ipol_simps_5, Ipol_simps_6,
Ipol_simps_7], _, _, _, _, _)) =
let
val a = type_of_eqn Ipol_simps_1;
val drop_conv_a = drop_conv a;
fun conv l p = (case strip_app p of
(\<^const_name>\<open>Pc\<close>, [c]) => (case strip_numeral c of
(\<^const_name>\<open>zero_class.zero\<close>, _) => inst [] [l] Ipol_simps_4
| (\<^const_name>\<open>one_class.one\<close>, _) => inst [] [l] Ipol_simps_5
| (\<^const_name>\<open>numeral\<close>, [m]) => inst [] [l, m] Ipol_simps_6
| (\<^const_name>\<open>uminus\<close>, [m]) => inst [] [l, m] Ipol_simps_7
| _ => inst [] [l, c] Ipol_simps_1)
| (\<^const_name>\<open>Pinj\<close>, [i, P]) =>
transitive'
(inst [] [l, i, P] Ipol_simps_2)
(cong2' conv (args2 drop_conv_a) Thm.reflexive)
| (\<^const_name>\<open>PX\<close>, [P, x, Q]) =>
transitive'
(inst [] [l, P, x, Q] Ipol_simps_3)
(cong2
(cong2
(args2 conv) (cong2 (args1 (head_conv rls)) Thm.reflexive))
(cong2' conv (args2 drop_conv_a) Thm.reflexive)))
in conv end;
fun Ipolex_conv (rls as
(_,
[Ipolex_Var, Ipolex_Const, Ipolex_Add,
Ipolex_Sub, Ipolex_Mul, Ipolex_Pow,
Ipolex_Neg, Ipolex_Const_0, Ipolex_Const_1,
Ipolex_Const_numeral], _, _, _, _)) =
let
val a = type_of_eqn Ipolex_Var;
val drop_conv_a = drop_conv a;
fun conv l r = (case strip_app r of
(\<^const_name>\<open>Var\<close>, [n]) =>
transitive'
(inst [] [l, n] Ipolex_Var)
(cong1' (head_conv rls) (args2 drop_conv_a))
| (\<^const_name>\<open>Const\<close>, [i]) => (case strip_app i of
(\<^const_name>\<open>zero_class.zero\<close>, _) => inst [] [l] Ipolex_Const_0
| (\<^const_name>\<open>one_class.one\<close>, _) => inst [] [l] Ipolex_Const_1
| (\<^const_name>\<open>numeral\<close>, [m]) => inst [] [l, m] Ipolex_Const_numeral
| _ => inst [] [l, i] Ipolex_Const)
| (\<^const_name>\<open>Add\<close>, [P, Q]) =>
transitive'
(inst [] [l, P, Q] Ipolex_Add)
(cong2 (args2 conv) (args2 conv))
| (\<^const_name>\<open>Sub\<close>, [P, Q]) =>
transitive'
(inst [] [l, P, Q] Ipolex_Sub)
(cong2 (args2 conv) (args2 conv))
| (\<^const_name>\<open>Mul\<close>, [P, Q]) =>
transitive'
(inst [] [l, P, Q] Ipolex_Mul)
(cong2 (args2 conv) (args2 conv))
| (\<^const_name>\<open>Pow\<close>, [P, n]) =>
transitive'
(inst [] [l, P, n] Ipolex_Pow)
(cong2 (args2 conv) Thm.reflexive)
| (\<^const_name>\<open>Neg\<close>, [P]) =>
transitive'
(inst [] [l, P] Ipolex_Neg)
(cong1 (args2 conv)))
in conv end;
fun Ipolex_polex_list_conv (rls as
(_, _,
[Ipolex_polex_list_Nil, Ipolex_polex_list_Cons], _, _, _)) l pps =
(case strip_app pps of
(\<^const_name>\<open>Nil\<close>, []) => inst [] [l] Ipolex_polex_list_Nil
| (\<^const_name>\<open>Cons\<close>, [p, pps']) => (case strip_app p of
(\<^const_name>\<open>Pair\<close>, [P, Q]) =>
transitive'
(inst [] [l, P, Q, pps'] Ipolex_polex_list_Cons)
(cong2
(cong2 (args2 (Ipolex_conv rls)) (args2 (Ipolex_conv rls)))
(args2 (Ipolex_polex_list_conv rls)))));
fun dest_conj th =
let
val th1 = th RS @{thm conjunct1};
val th2 = th RS @{thm conjunct2}
in
dest_conj th1 @ dest_conj th2
end handle THM _ => [th];
fun mk_in_carrier ctxt R prems xs =
let
val (_, _, _, [in_carrier_Nil, in_carrier_Cons], _, _) =
get_ring_simps ctxt NONE R;
val props = map fst (Facts.props (Proof_Context.facts_of ctxt)) @ maps dest_conj prems;
val ths = map (fn p as (x, _) =>
(case find_first
((fn \<^Const_>\<open>Trueprop
for \<^Const_>\<open>Set.member _ for \<open>Free (y, _)\<close> \<^Const_>\<open>carrier _ _ for S\<close>\<close>\<close> =>
x = y andalso R aconv S
| _ => false) o Thm.prop_of) props of
SOME th => th
| NONE => error ("Variable " ^ Syntax.string_of_term ctxt (Free p) ^
" not in carrier"))) xs
in
fold_rev (fn th1 => fn th2 => [th1, th2] MRS in_carrier_Cons)
ths in_carrier_Nil
end;
fun mk_ring T = \<^Const>\<open>cring_class_ops T\<close>;
val iterations = \<^cterm>\<open>1000::nat\<close>;
val Trueprop_cong = Thm.combination (Thm.reflexive \<^cterm>\<open>Trueprop\<close>);
fun commutative_ring_conv ctxt prems eqs ct =
let
val cT = Thm.ctyp_of_cterm ct;
val T = Thm.typ_of cT;
val eqs' = map (HOLogic.dest_eq o HOLogic.dest_Trueprop o Thm.prop_of) eqs;
val xs = filter (equal T o snd) (rev (fold Term.add_frees
(map fst eqs' @ map snd eqs' @ [Thm.term_of ct]) []));
val (R, optcT, prem', reif) = (case ring_struct (Thm.term_of ct) of
SOME R => (R, NONE, mk_in_carrier ctxt R prems xs, reif_polex xs)
| NONE => (mk_ring T, SOME cT, @{thm in_carrier_trivial}, reif_polex' xs));
val rls as (_, _, _, _, _, norm_subst_correct) = get_ring_simps ctxt optcT R;
val cxs = Thm.cterm_of ctxt (HOLogic.mk_list T (map Free xs));
val ceqs = Thm.cterm_of ctxt (HOLogic.mk_list \<^typ>\<open>polex \<times> polex\<close>
(map (HOLogic.mk_prod o apply2 reif) eqs'));
val cp = Thm.cterm_of ctxt (reif (Thm.term_of ct));
val prem = Thm.equal_elim
(Trueprop_cong (Thm.symmetric (Ipolex_polex_list_conv rls cxs ceqs)))
(fold_rev (fn th1 => fn th2 => [th1, th2] MRS @{thm conjI})
eqs @{thm TrueI});
in
Thm.transitive
(Thm.symmetric (Ipolex_conv rls cxs cp))
(transitive'
([prem', prem] MRS inst [] [cxs, ceqs, cp, iterations, iterations]
norm_subst_correct)
(cong2' (Ipol_conv rls)
Thm.reflexive
(cv ctxt)))
end;
fun ring_tac in_prems thms ctxt =
tactic_of_conv (fn ct =>
(if in_prems then Conv.prems_conv else Conv.concl_conv)
(Logic.count_prems (Thm.term_of ct))
(Conv.arg_conv (Conv.binop_conv (commutative_ring_conv ctxt [] thms))) ct) THEN'
TRY o (assume_tac ctxt ORELSE' resolve_tac ctxt [@{thm refl}]);
end
\<close>
context cring
begin
-local_setup \<open>
-Local_Theory.declaration {syntax = false, pervasive = false}
- (fn phi => Ring_Tac.Ring_Simps.map (Ring_Tac.insert_rules Ring_Tac.eq_ring_simps
+declaration \<open>fn phi =>
+ Ring_Tac.Ring_Simps.map (Ring_Tac.insert_rules Ring_Tac.eq_ring_simps
(Morphism.term phi \<^term>\<open>R\<close>,
(Morphism.fact phi @{thms Ipol.simps [meta] Ipol_Pc [meta]},
Morphism.fact phi @{thms Ipolex.simps [meta] Ipolex_Const [meta]},
Morphism.fact phi @{thms Ipolex_polex_list.simps [meta]},
Morphism.fact phi @{thms in_carrier_Nil in_carrier_Cons},
singleton (Morphism.fact phi) @{thm head.simps(2) [meta]},
- singleton (Morphism.fact phi) @{thm norm_subst_correct [meta]}))))
+ singleton (Morphism.fact phi) @{thm norm_subst_correct [meta]})))
\<close>
end
method_setup ring = \<open>
Scan.lift (Args.mode "prems") -- Attrib.thms >> (SIMPLE_METHOD' oo uncurry Ring_Tac.ring_tac)
\<close> "simplify equations involving rings"
end
diff --git a/src/HOL/Decision_Procs/Conversions.thy b/src/HOL/Decision_Procs/Conversions.thy
--- a/src/HOL/Decision_Procs/Conversions.thy
+++ b/src/HOL/Decision_Procs/Conversions.thy
@@ -1,839 +1,839 @@
(* Title: HOL/Decision_Procs/Conversions.thy
Author: Stefan Berghofer
*)
theory Conversions
imports Main
begin
ML \<open>
fun tactic_of_conv cv i st =
if i > Thm.nprems_of st then Seq.empty
else Seq.single (Conv.gconv_rule cv i st);
fun binop_conv cv cv' = Conv.combination_conv (Conv.arg_conv cv) cv';
\<close>
ML \<open>
fun err s ct =
error (s ^ ": " ^ Syntax.string_of_term_global (Thm.theory_of_cterm ct) (Thm.term_of ct));
\<close>
attribute_setup meta =
- \<open>Scan.succeed (fn (ctxt, th) => (NONE, SOME (mk_meta_eq th)))\<close>
+ \<open>Scan.succeed (Thm.rule_attribute [] (K mk_meta_eq))\<close>
\<open>convert equality to meta equality\<close>
ML \<open>
fun strip_app ct = ct |> Drule.strip_comb |>> Thm.term_of |>> dest_Const |>> fst;
fun inst cTs cts th =
Thm.instantiate' (map SOME cTs) (map SOME cts) th;
fun transitive' eq eq' = Thm.transitive eq (eq' (Thm.rhs_of eq));
fun type_of_eqn eqn = Thm.ctyp_of_cterm (Thm.dest_arg1 (Thm.cprop_of eqn));
fun cong1 conv ct =
Thm.combination (Thm.reflexive (Thm.dest_fun ct)) (conv (Thm.dest_arg ct));
fun cong1' conv' conv ct =
let val eqn = conv (Thm.dest_arg ct)
in
Thm.transitive
(Thm.combination (Thm.reflexive (Thm.dest_fun ct)) eqn)
(conv' (Thm.rhs_of eqn))
end;
fun cong2 conv1 conv2 ct =
Thm.combination
(Thm.combination
(Thm.reflexive (Thm.dest_fun2 ct))
(conv1 (Thm.dest_arg1 ct)))
(conv2 (Thm.dest_arg ct));
fun cong2' conv conv1 conv2 ct =
let
val eqn1 = conv1 (Thm.dest_arg1 ct);
val eqn2 = conv2 (Thm.dest_arg ct)
in
Thm.transitive
(Thm.combination
(Thm.combination (Thm.reflexive (Thm.dest_fun2 ct)) eqn1)
eqn2)
(conv (Thm.rhs_of eqn1) (Thm.rhs_of eqn2))
end;
fun cong2'' conv eqn1 eqn2 =
let val eqn3 = conv (Thm.rhs_of eqn1) (Thm.rhs_of eqn2)
in
Thm.transitive
(Thm.combination
(Thm.combination (Thm.reflexive (Thm.dest_fun2 (Thm.lhs_of eqn3))) eqn1)
eqn2)
eqn3
end;
fun args1 conv ct = conv (Thm.dest_arg ct);
fun args2 conv ct = conv (Thm.dest_arg1 ct) (Thm.dest_arg ct);
\<close>
ML \<open>
fun strip_numeral ct = (case strip_app ct of
(\<^const_name>\<open>uminus\<close>, [n]) => (case strip_app n of
(\<^const_name>\<open>numeral\<close>, [b]) => (\<^const_name>\<open>uminus\<close>, [b])
| _ => ("", []))
| x => x);
\<close>
lemma nat_minus1_eq: "nat (- 1) = 0"
by simp
ML \<open>
fun nat_conv i = (case strip_app i of
(\<^const_name>\<open>zero_class.zero\<close>, []) => @{thm nat_0 [meta]}
| (\<^const_name>\<open>one_class.one\<close>, []) => @{thm nat_one_as_int [meta, symmetric]}
| (\<^const_name>\<open>numeral\<close>, [b]) => inst [] [b] @{thm nat_numeral [meta]}
| (\<^const_name>\<open>uminus\<close>, [b]) => (case strip_app b of
(\<^const_name>\<open>one_class.one\<close>, []) => @{thm nat_minus1_eq [meta]}
| (\<^const_name>\<open>numeral\<close>, [b']) => inst [] [b'] @{thm nat_neg_numeral [meta]}));
\<close>
ML \<open>
fun add_num_conv b b' = (case (strip_app b, strip_app b') of
((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.One\<close>, [])) =>
@{thm add_num_simps(1) [meta]}
| ((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
inst [] [n] @{thm add_num_simps(2) [meta]}
| ((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
transitive'
(inst [] [n] @{thm add_num_simps(3) [meta]})
(cong1 (args2 add_num_conv))
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.One\<close>, [])) =>
inst [] [m] @{thm add_num_simps(4) [meta]}
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
transitive'
(inst [] [m, n] @{thm add_num_simps(5) [meta]})
(cong1 (args2 add_num_conv))
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
transitive'
(inst [] [m, n] @{thm add_num_simps(6) [meta]})
(cong1 (args2 add_num_conv))
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.One\<close>, [])) =>
transitive'
(inst [] [m] @{thm add_num_simps(7) [meta]})
(cong1 (args2 add_num_conv))
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
transitive'
(inst [] [m, n] @{thm add_num_simps(8) [meta]})
(cong1 (args2 add_num_conv))
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
transitive'
(inst [] [m, n] @{thm add_num_simps(9) [meta]})
(cong1 (cong2' add_num_conv (args2 add_num_conv) Thm.reflexive)));
\<close>
ML \<open>
fun BitM_conv m = (case strip_app m of
(\<^const_name>\<open>Num.One\<close>, []) => @{thm BitM.simps(1) [meta]}
| (\<^const_name>\<open>Num.Bit0\<close>, [n]) =>
transitive'
(inst [] [n] @{thm BitM.simps(2) [meta]})
(cong1 (args1 BitM_conv))
| (\<^const_name>\<open>Num.Bit1\<close>, [n]) =>
inst [] [n] @{thm BitM.simps(3) [meta]});
\<close>
lemma dbl_neg_numeral:
"Num.dbl (- Num.numeral k) = - Num.numeral (Num.Bit0 k)"
by simp
ML \<open>
fun dbl_conv a =
let
val dbl_neg_numeral_a = inst [a] [] @{thm dbl_neg_numeral [meta]};
val dbl_0_a = inst [a] [] @{thm dbl_simps(2) [meta]};
val dbl_numeral_a = inst [a] [] @{thm dbl_simps(5) [meta]}
in
fn n =>
case strip_numeral n of
(\<^const_name>\<open>zero_class.zero\<close>, []) => dbl_0_a
| (\<^const_name>\<open>numeral\<close>, [k]) => inst [] [k] dbl_numeral_a
| (\<^const_name>\<open>uminus\<close>, [k]) => inst [] [k] dbl_neg_numeral_a
end;
\<close>
lemma dbl_inc_neg_numeral:
"Num.dbl_inc (- Num.numeral k) = - Num.numeral (Num.BitM k)"
by simp
ML \<open>
fun dbl_inc_conv a =
let
val dbl_inc_neg_numeral_a = inst [a] [] @{thm dbl_inc_neg_numeral [meta]};
val dbl_inc_0_a = inst [a] [] @{thm dbl_inc_simps(2) [folded numeral_One, meta]};
val dbl_inc_numeral_a = inst [a] [] @{thm dbl_inc_simps(5) [meta]};
in
fn n =>
case strip_numeral n of
(\<^const_name>\<open>zero_class.zero\<close>, []) => dbl_inc_0_a
| (\<^const_name>\<open>numeral\<close>, [k]) => inst [] [k] dbl_inc_numeral_a
| (\<^const_name>\<open>uminus\<close>, [k]) =>
transitive'
(inst [] [k] dbl_inc_neg_numeral_a)
(cong1 (cong1 (args1 BitM_conv)))
end;
\<close>
lemma dbl_dec_neg_numeral:
"Num.dbl_dec (- Num.numeral k) = - Num.numeral (Num.Bit1 k)"
by simp
ML \<open>
fun dbl_dec_conv a =
let
val dbl_dec_neg_numeral_a = inst [a] [] @{thm dbl_dec_neg_numeral [meta]};
val dbl_dec_0_a = inst [a] [] @{thm dbl_dec_simps(2) [folded numeral_One, meta]};
val dbl_dec_numeral_a = inst [a] [] @{thm dbl_dec_simps(5) [meta]};
in
fn n =>
case strip_numeral n of
(\<^const_name>\<open>zero_class.zero\<close>, []) => dbl_dec_0_a
| (\<^const_name>\<open>uminus\<close>, [k]) => inst [] [k] dbl_dec_neg_numeral_a
| (\<^const_name>\<open>numeral\<close>, [k]) =>
transitive'
(inst [] [k] dbl_dec_numeral_a)
(cong1 (args1 BitM_conv))
end;
\<close>
ML \<open>
fun sub_conv a =
let
val [sub_One_One, sub_One_Bit0, sub_One_Bit1,
sub_Bit0_One, sub_Bit1_One, sub_Bit0_Bit0,
sub_Bit0_Bit1, sub_Bit1_Bit0, sub_Bit1_Bit1] =
map (inst [a] []) @{thms sub_num_simps [meta]};
val dbl_conv_a = dbl_conv a;
val dbl_inc_conv_a = dbl_inc_conv a;
val dbl_dec_conv_a = dbl_dec_conv a;
fun conv m n = (case (strip_app m, strip_app n) of
((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.One\<close>, [])) =>
sub_One_One
| ((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.Bit0\<close>, [l])) =>
transitive'
(inst [] [l] sub_One_Bit0)
(cong1 (cong1 (args1 BitM_conv)))
| ((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.Bit1\<close>, [l])) =>
inst [] [l] sub_One_Bit1
| ((\<^const_name>\<open>Num.Bit0\<close>, [k]), (\<^const_name>\<open>Num.One\<close>, [])) =>
transitive'
(inst [] [k] sub_Bit0_One)
(cong1 (args1 BitM_conv))
| ((\<^const_name>\<open>Num.Bit1\<close>, [k]), (\<^const_name>\<open>Num.One\<close>, [])) =>
inst [] [k] sub_Bit1_One
| ((\<^const_name>\<open>Num.Bit0\<close>, [k]), (\<^const_name>\<open>Num.Bit0\<close>, [l])) =>
transitive'
(inst [] [k, l] sub_Bit0_Bit0)
(cong1' dbl_conv_a (args2 conv))
| ((\<^const_name>\<open>Num.Bit0\<close>, [k]), (\<^const_name>\<open>Num.Bit1\<close>, [l])) =>
transitive'
(inst [] [k, l] sub_Bit0_Bit1)
(cong1' dbl_dec_conv_a (args2 conv))
| ((\<^const_name>\<open>Num.Bit1\<close>, [k]), (\<^const_name>\<open>Num.Bit0\<close>, [l])) =>
transitive'
(inst [] [k, l] sub_Bit1_Bit0)
(cong1' dbl_inc_conv_a (args2 conv))
| ((\<^const_name>\<open>Num.Bit1\<close>, [k]), (\<^const_name>\<open>Num.Bit1\<close>, [l])) =>
transitive'
(inst [] [k, l] sub_Bit1_Bit1)
(cong1' dbl_conv_a (args2 conv)))
in conv end;
\<close>
ML \<open>
fun expand1 a =
let val numeral_1_eq_1_a = inst [a] [] @{thm numeral_One [meta, symmetric]}
in
fn n =>
case Thm.term_of n of
\<^Const_>\<open>one_class.one _\<close> => numeral_1_eq_1_a
| \<^Const_>\<open>uminus _ for \<^Const_>\<open>one_class.one _\<close>\<close> =>
Thm.combination (Thm.reflexive (Thm.dest_fun n)) numeral_1_eq_1_a
| \<^Const_>\<open>zero_class.zero _\<close> => Thm.reflexive n
| \<^Const_>\<open>numeral _ for _\<close> => Thm.reflexive n
| \<^Const_>\<open>uminus _ for \<^Const_>\<open>numeral _ for _\<close>\<close> => Thm.reflexive n
| _ => err "expand1" n
end;
fun norm1_eq a =
let val numeral_1_eq_1_a = inst [a] [] @{thm numeral_One [meta]}
in
fn eq =>
case Thm.term_of (Thm.rhs_of eq) of
\<^Const_>\<open>Num.numeral _ for \<^Const_>\<open>Num.One\<close>\<close> => Thm.transitive eq numeral_1_eq_1_a
| \<^Const_>\<open>uminus _ for \<^Const_>\<open>Num.numeral _ for \<^Const_>\<open>Num.One\<close>\<close>\<close> =>
Thm.transitive eq
(Thm.combination (Thm.reflexive (Thm.dest_fun (Thm.rhs_of eq)))
numeral_1_eq_1_a)
| _ => eq
end;
\<close>
ML \<open>
fun plus_conv f a =
let
val add_0_a = inst [a] [] @{thm add_0 [meta]};
val add_0_right_a = inst [a] [] @{thm add_0_right [meta]};
val numeral_plus_numeral_a = inst [a] [] @{thm numeral_plus_numeral [meta]};
val expand1_a = expand1 a;
fun conv m n = (case (strip_app m, strip_app n) of
((\<^const_name>\<open>zero_class.zero\<close>, []), _) => inst [] [n] add_0_a
| (_, (\<^const_name>\<open>zero_class.zero\<close>, [])) => inst [] [m] add_0_right_a
| ((\<^const_name>\<open>numeral\<close>, [m]), (\<^const_name>\<open>numeral\<close>, [n])) =>
transitive'
(inst [] [m, n] numeral_plus_numeral_a)
(cong1 (args2 add_num_conv))
| _ => cong2'' (f conv) (expand1_a m) (expand1_a n))
in f conv end;
val nat_plus_conv = plus_conv I \<^ctyp>\<open>nat\<close>;
\<close>
lemma neg_numeral_plus_neg_numeral:
"- Num.numeral m + - Num.numeral n = (- Num.numeral (m + n) ::'a::neg_numeral)"
by simp
ML \<open>
fun plus_neg_conv a =
let
val numeral_plus_neg_numeral_a =
inst [a] [] @{thm add_neg_numeral_simps(1) [meta]};
val neg_numeral_plus_numeral_a =
inst [a] [] @{thm add_neg_numeral_simps(2) [meta]};
val neg_numeral_plus_neg_numeral_a =
inst [a] [] @{thm neg_numeral_plus_neg_numeral [meta]};
val sub_conv_a = sub_conv a;
in
fn conv => fn m => fn n =>
case (strip_numeral m, strip_numeral n) of
((\<^const_name>\<open>Num.numeral\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] numeral_plus_neg_numeral_a)
(sub_conv_a m n)
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>Num.numeral\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] neg_numeral_plus_numeral_a)
(sub_conv_a n m)
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
transitive'
(inst [] [m, n] neg_numeral_plus_neg_numeral_a)
(cong1 (cong1 (args2 add_num_conv)))
| _ => conv m n
end;
fun plus_conv' a = norm1_eq a oo plus_conv (plus_neg_conv a) a;
val int_plus_conv = plus_conv' \<^ctyp>\<open>int\<close>;
\<close>
lemma minus_one: "- 1 = - 1" by simp
lemma minus_numeral: "- numeral b = - numeral b" by simp
ML \<open>
fun uminus_conv a =
let
val minus_zero_a = inst [a] [] @{thm minus_zero [meta]};
val minus_one_a = inst [a] [] @{thm minus_one [meta]};
val minus_numeral_a = inst [a] [] @{thm minus_numeral [meta]};
val minus_minus_a = inst [a] [] @{thm minus_minus [meta]}
in
fn n =>
case strip_app n of
(\<^const_name>\<open>zero_class.zero\<close>, []) => minus_zero_a
| (\<^const_name>\<open>one_class.one\<close>, []) => minus_one_a
| (\<^const_name>\<open>Num.numeral\<close>, [m]) => inst [] [m] minus_numeral_a
| (\<^const_name>\<open>uminus\<close>, [m]) => inst [] [m] minus_minus_a
end;
val int_neg_conv = uminus_conv \<^ctyp>\<open>int\<close>;
\<close>
ML \<open>
fun minus_conv a =
let
val [numeral_minus_numeral_a, numeral_minus_neg_numeral_a,
neg_numeral_minus_numeral_a, neg_numeral_minus_neg_numeral_a] =
map (inst [a] []) @{thms diff_numeral_simps [meta]};
val diff_0_a = inst [a] [] @{thm diff_0 [meta]};
val diff_0_right_a = inst [a] [] @{thm diff_0_right [meta]};
val sub_conv_a = sub_conv a;
val uminus_conv_a = uminus_conv a;
val expand1_a = expand1 a;
val norm1_eq_a = norm1_eq a;
fun conv m n = (case (strip_numeral m, strip_numeral n) of
((\<^const_name>\<open>zero_class.zero\<close>, []), _) =>
Thm.transitive (inst [] [n] diff_0_a) (uminus_conv_a n)
| (_, (\<^const_name>\<open>zero_class.zero\<close>, [])) => inst [] [m] diff_0_right_a
| ((\<^const_name>\<open>Num.numeral\<close>, [m]), (\<^const_name>\<open>Num.numeral\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] numeral_minus_numeral_a)
(sub_conv_a m n)
| ((\<^const_name>\<open>Num.numeral\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
transitive'
(inst [] [m, n] numeral_minus_neg_numeral_a)
(cong1 (args2 add_num_conv))
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>Num.numeral\<close>, [n])) =>
transitive'
(inst [] [m, n] neg_numeral_minus_numeral_a)
(cong1 (cong1 (args2 add_num_conv)))
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] neg_numeral_minus_neg_numeral_a)
(sub_conv_a n m)
| _ => cong2'' conv (expand1_a m) (expand1_a n))
in norm1_eq_a oo conv end;
val int_minus_conv = minus_conv \<^ctyp>\<open>int\<close>;
\<close>
ML \<open>
val int_numeral = Thm.apply \<^cterm>\<open>numeral :: num \<Rightarrow> int\<close>;
val nat_minus_refl = Thm.reflexive \<^cterm>\<open>minus :: nat \<Rightarrow> nat \<Rightarrow> nat\<close>;
val expand1_nat = expand1 \<^ctyp>\<open>nat\<close>;
fun nat_minus_conv m n = (case (strip_app m, strip_app n) of
((\<^const_name>\<open>zero_class.zero\<close>, []), _) =>
inst [] [n] @{thm diff_0_eq_0 [meta]}
| (_, (\<^const_name>\<open>zero_class.zero\<close>, [])) =>
inst [] [m] @{thm minus_nat.diff_0 [meta]}
| ((\<^const_name>\<open>numeral\<close>, [m]), (\<^const_name>\<open>numeral\<close>, [n])) =>
transitive'
(inst [] [m, n] @{thm diff_nat_numeral [meta]})
(cong1' nat_conv (args2 int_minus_conv))
| _ => cong2'' nat_minus_conv (expand1_nat m) (expand1_nat n));
\<close>
ML \<open>
fun mult_num_conv m n = (case (strip_app m, strip_app n) of
(_, (\<^const_name>\<open>Num.One\<close>, [])) =>
inst [] [m] @{thm mult_num_simps(1) [meta]}
| ((\<^const_name>\<open>Num.One\<close>, []), _) =>
inst [] [n] @{thm mult_num_simps(2) [meta]}
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
transitive'
(inst [] [m, n] @{thm mult_num_simps(3) [meta]})
(cong1 (cong1 (args2 mult_num_conv)))
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.Bit1\<close>, [n'])) =>
transitive'
(inst [] [m, n'] @{thm mult_num_simps(4) [meta]})
(cong1 (args2 mult_num_conv))
| ((\<^const_name>\<open>Num.Bit1\<close>, [m']), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
transitive'
(inst [] [m', n] @{thm mult_num_simps(5) [meta]})
(cong1 (args2 mult_num_conv))
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
transitive'
(inst [] [m, n] @{thm mult_num_simps(6) [meta]})
(cong1 (cong2' add_num_conv
(args2 add_num_conv)
(cong1 (args2 mult_num_conv)))));
\<close>
ML \<open>
fun mult_conv f a =
let
val mult_zero_left_a = inst [a] [] @{thm mult_zero_left [meta]};
val mult_zero_right_a = inst [a] [] @{thm mult_zero_right [meta]};
val numeral_times_numeral_a = inst [a] [] @{thm numeral_times_numeral [meta]};
val expand1_a = expand1 a;
val norm1_eq_a = norm1_eq a;
fun conv m n = (case (strip_app m, strip_app n) of
((\<^const_name>\<open>zero_class.zero\<close>, []), _) => inst [] [n] mult_zero_left_a
| (_, (\<^const_name>\<open>zero_class.zero\<close>, [])) => inst [] [m] mult_zero_right_a
| ((\<^const_name>\<open>numeral\<close>, [m]), (\<^const_name>\<open>numeral\<close>, [n])) =>
transitive'
(inst [] [m, n] numeral_times_numeral_a)
(cong1 (args2 mult_num_conv))
| _ => cong2'' (f conv) (expand1_a m) (expand1_a n))
in norm1_eq_a oo f conv end;
val nat_mult_conv = mult_conv I \<^ctyp>\<open>nat\<close>;
\<close>
ML \<open>
fun mult_neg_conv a =
let
val [neg_numeral_times_neg_numeral_a, neg_numeral_times_numeral_a,
numeral_times_neg_numeral_a] =
map (inst [a] []) @{thms mult_neg_numeral_simps [meta]};
in
fn conv => fn m => fn n =>
case (strip_numeral m, strip_numeral n) of
((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
transitive'
(inst [] [m, n] neg_numeral_times_neg_numeral_a)
(cong1 (args2 mult_num_conv))
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>numeral\<close>, [n])) =>
transitive'
(inst [] [m, n] neg_numeral_times_numeral_a)
(cong1 (cong1 (args2 mult_num_conv)))
| ((\<^const_name>\<open>numeral\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
transitive'
(inst [] [m, n] numeral_times_neg_numeral_a)
(cong1 (cong1 (args2 mult_num_conv)))
| _ => conv m n
end;
fun mult_conv' a = mult_conv (mult_neg_conv a) a;
val int_mult_conv = mult_conv' \<^ctyp>\<open>int\<close>;
\<close>
ML \<open>
fun eq_num_conv m n = (case (strip_app m, strip_app n) of
((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.One\<close>, [])) =>
@{thm eq_num_simps(1) [meta]}
| ((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
inst [] [n] @{thm eq_num_simps(2) [meta]}
| ((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
inst [] [n] @{thm eq_num_simps(3) [meta]}
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.One\<close>, [])) =>
inst [] [m] @{thm eq_num_simps(4) [meta]}
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.One\<close>, [])) =>
inst [] [m] @{thm eq_num_simps(5) [meta]}
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] @{thm eq_num_simps(6) [meta]})
(eq_num_conv m n)
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
inst [] [m, n] @{thm eq_num_simps(7) [meta]}
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
inst [] [m, n] @{thm eq_num_simps(8) [meta]}
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] @{thm eq_num_simps(9) [meta]})
(eq_num_conv m n));
\<close>
ML \<open>
fun eq_conv f a =
let
val zero_eq_zero_a = inst [a] [] @{thm refl [of 0, THEN Eq_TrueI]};
val zero_neq_numeral_a =
inst [a] [] @{thm zero_neq_numeral [THEN Eq_FalseI]};
val numeral_neq_zero_a =
inst [a] [] @{thm numeral_neq_zero [THEN Eq_FalseI]};
val numeral_eq_iff_a = inst [a] [] @{thm numeral_eq_iff [meta]};
val expand1_a = expand1 a;
fun conv m n = (case (strip_app m, strip_app n) of
((\<^const_name>\<open>zero_class.zero\<close>, []), (\<^const_name>\<open>zero_class.zero\<close>, [])) =>
zero_eq_zero_a
| ((\<^const_name>\<open>zero_class.zero\<close>, []), (\<^const_name>\<open>numeral\<close>, [n])) =>
inst [] [n] zero_neq_numeral_a
| ((\<^const_name>\<open>numeral\<close>, [m]), (\<^const_name>\<open>zero_class.zero\<close>, [])) =>
inst [] [m] numeral_neq_zero_a
| ((\<^const_name>\<open>numeral\<close>, [m]), (\<^const_name>\<open>numeral\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] numeral_eq_iff_a)
(eq_num_conv m n)
| _ => cong2'' (f conv) (expand1_a m) (expand1_a n))
in f conv end;
val nat_eq_conv = eq_conv I \<^ctyp>\<open>nat\<close>;
\<close>
ML \<open>
fun eq_neg_conv a =
let
val neg_numeral_neq_zero_a =
inst [a] [] @{thm neg_numeral_neq_zero [THEN Eq_FalseI]};
val zero_neq_neg_numeral_a =
inst [a] [] @{thm zero_neq_neg_numeral [THEN Eq_FalseI]};
val neg_numeral_neq_numeral_a =
inst [a] [] @{thm neg_numeral_neq_numeral [THEN Eq_FalseI]};
val numeral_neq_neg_numeral_a =
inst [a] [] @{thm numeral_neq_neg_numeral [THEN Eq_FalseI]};
val neg_numeral_eq_iff_a = inst [a] [] @{thm neg_numeral_eq_iff [meta]}
in
fn conv => fn m => fn n =>
case (strip_numeral m, strip_numeral n) of
((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>zero_class.zero\<close>, [])) =>
inst [] [m] neg_numeral_neq_zero_a
| ((\<^const_name>\<open>zero_class.zero\<close>, []), (\<^const_name>\<open>uminus\<close>, [n])) =>
inst [] [n] zero_neq_neg_numeral_a
| ((\<^const_name>\<open>Num.numeral\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
inst [] [m, n] numeral_neq_neg_numeral_a
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>Num.numeral\<close>, [n])) =>
inst [] [m, n] neg_numeral_neq_numeral_a
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] neg_numeral_eq_iff_a)
(eq_num_conv m n)
| _ => conv m n
end;
fun eq_conv' a = eq_conv (eq_neg_conv a) a;
val int_eq_conv = eq_conv' \<^ctyp>\<open>int\<close>;
\<close>
ML \<open>
fun le_num_conv m n = (case (strip_app m, strip_app n) of
((\<^const_name>\<open>Num.One\<close>, []), _) =>
inst [] [n] @{thm le_num_simps(1) [meta]}
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.One\<close>, [])) =>
inst [] [m] @{thm le_num_simps(2) [meta]}
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.One\<close>, [])) =>
inst [] [m] @{thm le_num_simps(3) [meta]}
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] @{thm le_num_simps(4) [meta]})
(le_num_conv m n)
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] @{thm le_num_simps(5) [meta]})
(le_num_conv m n)
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] @{thm le_num_simps(6) [meta]})
(le_num_conv m n)
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] @{thm le_num_simps(7) [meta]})
(less_num_conv m n))
and less_num_conv m n = (case (strip_app m, strip_app n) of
(_, (\<^const_name>\<open>Num.One\<close>, [])) =>
inst [] [m] @{thm less_num_simps(1) [meta]}
| ((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
inst [] [n] @{thm less_num_simps(2) [meta]}
| ((\<^const_name>\<open>Num.One\<close>, []), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
inst [] [n] @{thm less_num_simps(3) [meta]}
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] @{thm less_num_simps(4) [meta]})
(less_num_conv m n)
| ((\<^const_name>\<open>Num.Bit0\<close>, [m]), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] @{thm less_num_simps(5) [meta]})
(le_num_conv m n)
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.Bit1\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] @{thm less_num_simps(6) [meta]})
(less_num_conv m n)
| ((\<^const_name>\<open>Num.Bit1\<close>, [m]), (\<^const_name>\<open>Num.Bit0\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] @{thm less_num_simps(7) [meta]})
(less_num_conv m n));
\<close>
ML \<open>
fun le_conv f a =
let
val zero_le_zero_a = inst [a] [] @{thm order_refl [of 0, THEN Eq_TrueI]};
val zero_le_numeral_a =
inst [a] [] @{thm zero_le_numeral [THEN Eq_TrueI]};
val not_numeral_le_zero_a =
inst [a] [] @{thm not_numeral_le_zero [THEN Eq_FalseI]};
val numeral_le_iff_a = inst [a] [] @{thm numeral_le_iff [meta]};
val expand1_a = expand1 a;
fun conv m n = (case (strip_app m, strip_app n) of
((\<^const_name>\<open>zero_class.zero\<close>, []), (\<^const_name>\<open>zero_class.zero\<close>, [])) =>
zero_le_zero_a
| ((\<^const_name>\<open>zero_class.zero\<close>, []), (\<^const_name>\<open>numeral\<close>, [n])) =>
inst [] [n] zero_le_numeral_a
| ((\<^const_name>\<open>numeral\<close>, [m]), (\<^const_name>\<open>zero_class.zero\<close>, [])) =>
inst [] [m] not_numeral_le_zero_a
| ((\<^const_name>\<open>numeral\<close>, [m]), (\<^const_name>\<open>numeral\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] numeral_le_iff_a)
(le_num_conv m n)
| _ => cong2'' (f conv) (expand1_a m) (expand1_a n))
in f conv end;
val nat_le_conv = le_conv I \<^ctyp>\<open>nat\<close>;
\<close>
ML \<open>
fun le_neg_conv a =
let
val neg_numeral_le_zero_a =
inst [a] [] @{thm neg_numeral_le_zero [THEN Eq_TrueI]};
val not_zero_le_neg_numeral_a =
inst [a] [] @{thm not_zero_le_neg_numeral [THEN Eq_FalseI]};
val neg_numeral_le_numeral_a =
inst [a] [] @{thm neg_numeral_le_numeral [THEN Eq_TrueI]};
val not_numeral_le_neg_numeral_a =
inst [a] [] @{thm not_numeral_le_neg_numeral [THEN Eq_FalseI]};
val neg_numeral_le_iff_a = inst [a] [] @{thm neg_numeral_le_iff [meta]}
in
fn conv => fn m => fn n =>
case (strip_numeral m, strip_numeral n) of
((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>zero_class.zero\<close>, [])) =>
inst [] [m] neg_numeral_le_zero_a
| ((\<^const_name>\<open>zero_class.zero\<close>, []), (\<^const_name>\<open>uminus\<close>, [n])) =>
inst [] [n] not_zero_le_neg_numeral_a
| ((\<^const_name>\<open>Num.numeral\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
inst [] [m, n] not_numeral_le_neg_numeral_a
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>Num.numeral\<close>, [n])) =>
inst [] [m, n] neg_numeral_le_numeral_a
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] neg_numeral_le_iff_a)
(le_num_conv n m)
| _ => conv m n
end;
fun le_conv' a = le_conv (le_neg_conv a) a;
val int_le_conv = le_conv' \<^ctyp>\<open>int\<close>;
\<close>
ML \<open>
fun less_conv f a =
let
val not_zero_less_zero_a = inst [a] [] @{thm less_irrefl [of 0, THEN Eq_FalseI]};
val zero_less_numeral_a =
inst [a] [] @{thm zero_less_numeral [THEN Eq_TrueI]};
val not_numeral_less_zero_a =
inst [a] [] @{thm not_numeral_less_zero [THEN Eq_FalseI]};
val numeral_less_iff_a = inst [a] [] @{thm numeral_less_iff [meta]};
val expand1_a = expand1 a;
fun conv m n = (case (strip_app m, strip_app n) of
((\<^const_name>\<open>zero_class.zero\<close>, []), (\<^const_name>\<open>zero_class.zero\<close>, [])) =>
not_zero_less_zero_a
| ((\<^const_name>\<open>zero_class.zero\<close>, []), (\<^const_name>\<open>numeral\<close>, [n])) =>
inst [] [n] zero_less_numeral_a
| ((\<^const_name>\<open>numeral\<close>, [m]), (\<^const_name>\<open>zero_class.zero\<close>, [])) =>
inst [] [m] not_numeral_less_zero_a
| ((\<^const_name>\<open>numeral\<close>, [m]), (\<^const_name>\<open>numeral\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] numeral_less_iff_a)
(less_num_conv m n)
| _ => cong2'' (f conv) (expand1_a m) (expand1_a n))
in f conv end;
val nat_less_conv = less_conv I \<^ctyp>\<open>nat\<close>;
\<close>
ML \<open>
fun less_neg_conv a =
let
val neg_numeral_less_zero_a =
inst [a] [] @{thm neg_numeral_less_zero [THEN Eq_TrueI]};
val not_zero_less_neg_numeral_a =
inst [a] [] @{thm not_zero_less_neg_numeral [THEN Eq_FalseI]};
val neg_numeral_less_numeral_a =
inst [a] [] @{thm neg_numeral_less_numeral [THEN Eq_TrueI]};
val not_numeral_less_neg_numeral_a =
inst [a] [] @{thm not_numeral_less_neg_numeral [THEN Eq_FalseI]};
val neg_numeral_less_iff_a = inst [a] [] @{thm neg_numeral_less_iff [meta]}
in
fn conv => fn m => fn n =>
case (strip_numeral m, strip_numeral n) of
((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>zero_class.zero\<close>, [])) =>
inst [] [m] neg_numeral_less_zero_a
| ((\<^const_name>\<open>zero_class.zero\<close>, []), (\<^const_name>\<open>uminus\<close>, [n])) =>
inst [] [n] not_zero_less_neg_numeral_a
| ((\<^const_name>\<open>Num.numeral\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
inst [] [m, n] not_numeral_less_neg_numeral_a
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>Num.numeral\<close>, [n])) =>
inst [] [m, n] neg_numeral_less_numeral_a
| ((\<^const_name>\<open>uminus\<close>, [m]), (\<^const_name>\<open>uminus\<close>, [n])) =>
Thm.transitive
(inst [] [m, n] neg_numeral_less_iff_a)
(less_num_conv n m)
| _ => conv m n
end;
fun less_conv' a = less_conv (less_neg_conv a) a;
val int_less_conv = less_conv' \<^ctyp>\<open>int\<close>;
\<close>
ML \<open>
fun If_conv a =
let
val if_True = inst [a] [] @{thm if_True [meta]};
val if_False = inst [a] [] @{thm if_False [meta]}
in
fn p => fn x => fn y => fn ct =>
case strip_app ct of
(\<^const_name>\<open>If\<close>, [cb, cx, cy]) =>
let
val p_eq = p cb
val eq = Thm.combination (Thm.reflexive (Thm.dest_fun (Thm.dest_fun2 ct))) p_eq
in
case Thm.term_of (Thm.rhs_of p_eq) of
\<^Const_>\<open>True\<close> =>
let
val x_eq = x cx;
val cx = Thm.rhs_of x_eq;
in
Thm.transitive
(Thm.combination
(Thm.combination eq x_eq)
(Thm.reflexive cy))
(inst [] [cx, cy] if_True)
end
| \<^Const_>\<open>False\<close> =>
let
val y_eq = y cy;
val cy = Thm.rhs_of y_eq;
in
Thm.transitive
(Thm.combination
(Thm.combination eq (Thm.reflexive cx))
y_eq)
(inst [] [cx, cy] if_False)
end
| _ => err "If_conv" (Thm.rhs_of p_eq)
end
end;
\<close>
ML \<open>
fun drop_conv a =
let
val drop_0_a = inst [a] [] @{thm drop_0 [meta]};
val drop_Cons_a = inst [a] [] @{thm drop_Cons' [meta]};
val If_conv_a = If_conv (type_of_eqn drop_0_a);
fun conv n ys = (case Thm.term_of n of
\<^Const_>\<open>zero_class.zero _\<close> => inst [] [ys] drop_0_a
| _ => (case strip_app ys of
(\<^const_name>\<open>Cons\<close>, [x, xs]) =>
transitive'
(inst [] [n, x, xs] drop_Cons_a)
(If_conv_a (args2 nat_eq_conv)
Thm.reflexive
(cong2' conv (args2 nat_minus_conv) Thm.reflexive))))
in conv end;
\<close>
ML \<open>
fun nth_conv a =
let
val nth_Cons_a = inst [a] [] @{thm nth_Cons' [meta]};
val If_conv_a = If_conv a;
fun conv ys n = (case strip_app ys of
(\<^const_name>\<open>Cons\<close>, [x, xs]) =>
transitive'
(inst [] [x, xs, n] nth_Cons_a)
(If_conv_a (args2 nat_eq_conv)
Thm.reflexive
(cong2' conv Thm.reflexive (args2 nat_minus_conv))))
in conv end;
\<close>
end
diff --git a/src/HOL/Decision_Procs/Reflective_Field.thy b/src/HOL/Decision_Procs/Reflective_Field.thy
--- a/src/HOL/Decision_Procs/Reflective_Field.thy
+++ b/src/HOL/Decision_Procs/Reflective_Field.thy
@@ -1,930 +1,929 @@
(* Title: HOL/Decision_Procs/Reflective_Field.thy
Author: Stefan Berghofer
Reducing equalities in fields to equalities in rings.
*)
theory Reflective_Field
imports Commutative_Ring
begin
datatype fexpr =
FCnst int
| FVar nat
| FAdd fexpr fexpr
| FSub fexpr fexpr
| FMul fexpr fexpr
| FNeg fexpr
| FDiv fexpr fexpr
| FPow fexpr nat
fun (in field) nth_el :: "'a list \<Rightarrow> nat \<Rightarrow> 'a"
where
"nth_el [] n = \<zero>"
| "nth_el (x # xs) 0 = x"
| "nth_el (x # xs) (Suc n) = nth_el xs n"
lemma (in field) nth_el_Cons: "nth_el (x # xs) n = (if n = 0 then x else nth_el xs (n - 1))"
by (cases n) simp_all
lemma (in field) nth_el_closed [simp]: "in_carrier xs \<Longrightarrow> nth_el xs n \<in> carrier R"
by (induct xs n rule: nth_el.induct) (simp_all add: in_carrier_def)
primrec (in field) feval :: "'a list \<Rightarrow> fexpr \<Rightarrow> 'a"
where
"feval xs (FCnst c) = \<guillemotleft>c\<guillemotright>"
| "feval xs (FVar n) = nth_el xs n"
| "feval xs (FAdd a b) = feval xs a \<oplus> feval xs b"
| "feval xs (FSub a b) = feval xs a \<ominus> feval xs b"
| "feval xs (FMul a b) = feval xs a \<otimes> feval xs b"
| "feval xs (FNeg a) = \<ominus> feval xs a"
| "feval xs (FDiv a b) = feval xs a \<oslash> feval xs b"
| "feval xs (FPow a n) = feval xs a [^] n"
lemma (in field) feval_Cnst:
"feval xs (FCnst 0) = \<zero>"
"feval xs (FCnst 1) = \<one>"
"feval xs (FCnst (numeral n)) = \<guillemotleft>numeral n\<guillemotright>"
by simp_all
datatype pexpr =
PExpr1 pexpr1
| PExpr2 pexpr2
and pexpr1 =
PCnst int
| PVar nat
| PAdd pexpr pexpr
| PSub pexpr pexpr
| PNeg pexpr
and pexpr2 =
PMul pexpr pexpr
| PPow pexpr nat
lemma pexpr_cases [case_names PCnst PVar PAdd PSub PNeg PMul PPow]:
assumes
"\<And>c. e = PExpr1 (PCnst c) \<Longrightarrow> P"
"\<And>n. e = PExpr1 (PVar n) \<Longrightarrow> P"
"\<And>e1 e2. e = PExpr1 (PAdd e1 e2) \<Longrightarrow> P"
"\<And>e1 e2. e = PExpr1 (PSub e1 e2) \<Longrightarrow> P"
"\<And>e'. e = PExpr1 (PNeg e') \<Longrightarrow> P"
"\<And>e1 e2. e = PExpr2 (PMul e1 e2) \<Longrightarrow> P"
"\<And>e' n. e = PExpr2 (PPow e' n) \<Longrightarrow> P"
shows P
proof (cases e)
case (PExpr1 e')
then show ?thesis
apply (cases e')
apply simp_all
apply (erule assms)+
done
next
case (PExpr2 e')
then show ?thesis
apply (cases e')
apply simp_all
apply (erule assms)+
done
qed
lemmas pexpr_cases2 = pexpr_cases [case_product pexpr_cases]
fun (in field) peval :: "'a list \<Rightarrow> pexpr \<Rightarrow> 'a"
where
"peval xs (PExpr1 (PCnst c)) = \<guillemotleft>c\<guillemotright>"
| "peval xs (PExpr1 (PVar n)) = nth_el xs n"
| "peval xs (PExpr1 (PAdd a b)) = peval xs a \<oplus> peval xs b"
| "peval xs (PExpr1 (PSub a b)) = peval xs a \<ominus> peval xs b"
| "peval xs (PExpr1 (PNeg a)) = \<ominus> peval xs a"
| "peval xs (PExpr2 (PMul a b)) = peval xs a \<otimes> peval xs b"
| "peval xs (PExpr2 (PPow a n)) = peval xs a [^] n"
lemma (in field) peval_Cnst:
"peval xs (PExpr1 (PCnst 0)) = \<zero>"
"peval xs (PExpr1 (PCnst 1)) = \<one>"
"peval xs (PExpr1 (PCnst (numeral n))) = \<guillemotleft>numeral n\<guillemotright>"
"peval xs (PExpr1 (PCnst (- numeral n))) = \<ominus> \<guillemotleft>numeral n\<guillemotright>"
by simp_all
lemma (in field) peval_closed [simp]:
"in_carrier xs \<Longrightarrow> peval xs e \<in> carrier R"
"in_carrier xs \<Longrightarrow> peval xs (PExpr1 e1) \<in> carrier R"
"in_carrier xs \<Longrightarrow> peval xs (PExpr2 e2) \<in> carrier R"
by (induct e and e1 and e2) simp_all
definition npepow :: "pexpr \<Rightarrow> nat \<Rightarrow> pexpr"
where "npepow e n =
(if n = 0 then PExpr1 (PCnst 1)
else if n = 1 then e
else
(case e of
PExpr1 (PCnst c) \<Rightarrow> PExpr1 (PCnst (c ^ n))
| _ \<Rightarrow> PExpr2 (PPow e n)))"
lemma (in field) npepow_correct:
"in_carrier xs \<Longrightarrow> peval xs (npepow e n) = peval xs (PExpr2 (PPow e n))"
by (cases e rule: pexpr_cases) (simp_all add: npepow_def)
fun npemul :: "pexpr \<Rightarrow> pexpr \<Rightarrow> pexpr"
where "npemul x y =
(case x of
PExpr1 (PCnst c) \<Rightarrow>
if c = 0 then x
else if c = 1 then y else
(case y of
PExpr1 (PCnst d) \<Rightarrow> PExpr1 (PCnst (c * d))
| _ \<Rightarrow> PExpr2 (PMul x y))
| PExpr2 (PPow e1 n) \<Rightarrow>
(case y of
PExpr2 (PPow e2 m) \<Rightarrow>
if n = m then npepow (npemul e1 e2) n
else PExpr2 (PMul x y)
| PExpr1 (PCnst d) \<Rightarrow>
if d = 0 then y
else if d = 1 then x
else PExpr2 (PMul x y)
| _ \<Rightarrow> PExpr2 (PMul x y))
| _ \<Rightarrow>
(case y of
PExpr1 (PCnst d) \<Rightarrow>
if d = 0 then y
else if d = 1 then x
else PExpr2 (PMul x y)
| _ \<Rightarrow> PExpr2 (PMul x y)))"
lemma (in field) npemul_correct:
"in_carrier xs \<Longrightarrow> peval xs (npemul e1 e2) = peval xs (PExpr2 (PMul e1 e2))"
proof (induct e1 e2 rule: npemul.induct)
case (1 x y)
then show ?case
proof (cases x y rule: pexpr_cases2)
case (PPow_PPow e n e' m)
then show ?thesis
by (simp del: npemul.simps add: 1 npepow_correct nat_pow_distrib
npemul.simps [of "PExpr2 (PPow e n)" "PExpr2 (PPow e' m)"])
qed simp_all
qed
declare npemul.simps [simp del]
definition npeadd :: "pexpr \<Rightarrow> pexpr \<Rightarrow> pexpr"
where "npeadd x y =
(case x of
PExpr1 (PCnst c) \<Rightarrow>
if c = 0 then y
else
(case y of
PExpr1 (PCnst d) \<Rightarrow> PExpr1 (PCnst (c + d))
| _ \<Rightarrow> PExpr1 (PAdd x y))
| _ \<Rightarrow>
(case y of
PExpr1 (PCnst d) \<Rightarrow>
if d = 0 then x
else PExpr1 (PAdd x y)
| _ \<Rightarrow> PExpr1 (PAdd x y)))"
lemma (in field) npeadd_correct:
"in_carrier xs \<Longrightarrow> peval xs (npeadd e1 e2) = peval xs (PExpr1 (PAdd e1 e2))"
by (cases e1 e2 rule: pexpr_cases2) (simp_all add: npeadd_def)
definition npesub :: "pexpr \<Rightarrow> pexpr \<Rightarrow> pexpr"
where "npesub x y =
(case y of
PExpr1 (PCnst d) \<Rightarrow>
if d = 0 then x
else
(case x of
PExpr1 (PCnst c) \<Rightarrow> PExpr1 (PCnst (c - d))
| _ \<Rightarrow> PExpr1 (PSub x y))
| _ \<Rightarrow>
(case x of
PExpr1 (PCnst c) \<Rightarrow>
if c = 0 then PExpr1 (PNeg y)
else PExpr1 (PSub x y)
| _ \<Rightarrow> PExpr1 (PSub x y)))"
lemma (in field) npesub_correct:
"in_carrier xs \<Longrightarrow> peval xs (npesub e1 e2) = peval xs (PExpr1 (PSub e1 e2))"
by (cases e1 e2 rule: pexpr_cases2) (simp_all add: npesub_def)
definition npeneg :: "pexpr \<Rightarrow> pexpr"
where "npeneg e =
(case e of
PExpr1 (PCnst c) \<Rightarrow> PExpr1 (PCnst (- c))
| _ \<Rightarrow> PExpr1 (PNeg e))"
lemma (in field) npeneg_correct: "peval xs (npeneg e) = peval xs (PExpr1 (PNeg e))"
by (cases e rule: pexpr_cases) (simp_all add: npeneg_def)
lemma option_pair_cases [case_names None Some]:
obtains (None) "x = None" | (Some) p q where "x = Some (p, q)"
proof (cases x)
case None
then show ?thesis by (rule that)
next
case (Some r)
then show ?thesis
by (cases r, simp) (rule that)
qed
fun isin :: "pexpr \<Rightarrow> nat \<Rightarrow> pexpr \<Rightarrow> nat \<Rightarrow> (nat \<times> pexpr) option"
where
"isin e n (PExpr2 (PMul e1 e2)) m =
(case isin e n e1 m of
Some (k, e3) \<Rightarrow>
if k = 0 then Some (0, npemul e3 (npepow e2 m))
else
(case isin e k e2 m of
Some (l, e4) \<Rightarrow> Some (l, npemul e3 e4)
| None \<Rightarrow> Some (k, npemul e3 (npepow e2 m)))
| None \<Rightarrow>
(case isin e n e2 m of
Some (k, e3) \<Rightarrow> Some (k, npemul (npepow e1 m) e3)
| None \<Rightarrow> None))"
| "isin e n (PExpr2 (PPow e' k)) m =
(if k = 0 then None else isin e n e' (k * m))"
| "isin (PExpr1 e) n (PExpr1 e') m =
(if e = e' then
if n \<ge> m then Some (n - m, PExpr1 (PCnst 1))
else Some (0, npepow (PExpr1 e) (m - n))
else None)"
| "isin (PExpr2 e) n (PExpr1 e') m = None"
lemma (in field) isin_correct:
assumes "in_carrier xs"
and "isin e n e' m = Some (p, e'')"
shows "peval xs (PExpr2 (PPow e' m)) = peval xs (PExpr2 (PMul (PExpr2 (PPow e (n - p))) e''))"
and "p \<le> n"
using assms
by (induct e n e' m arbitrary: p e'' rule: isin.induct)
(force
simp add:
nat_pow_distrib nat_pow_pow nat_pow_mult m_ac
npemul_correct npepow_correct
split: option.split_asm prod.split_asm if_split_asm)+
lemma (in field) isin_correct':
"in_carrier xs \<Longrightarrow> isin e n e' 1 = Some (p, e'') \<Longrightarrow>
peval xs e' = peval xs e [^] (n - p) \<otimes> peval xs e''"
"in_carrier xs \<Longrightarrow> isin e n e' 1 = Some (p, e'') \<Longrightarrow> p \<le> n"
using isin_correct [where m = 1] by simp_all
fun split_aux :: "pexpr \<Rightarrow> nat \<Rightarrow> pexpr \<Rightarrow> pexpr \<times> pexpr \<times> pexpr"
where
"split_aux (PExpr2 (PMul e1 e2)) n e =
(let
(left1, common1, right1) = split_aux e1 n e;
(left2, common2, right2) = split_aux e2 n right1
in (npemul left1 left2, npemul common1 common2, right2))"
| "split_aux (PExpr2 (PPow e' m)) n e =
(if m = 0 then (PExpr1 (PCnst 1), PExpr1 (PCnst 1), e)
else split_aux e' (m * n) e)"
| "split_aux (PExpr1 e') n e =
(case isin (PExpr1 e') n e 1 of
Some (m, e'') \<Rightarrow>
(if m = 0 then (PExpr1 (PCnst 1), npepow (PExpr1 e') n, e'')
else (npepow (PExpr1 e') m, npepow (PExpr1 e') (n - m), e''))
| None \<Rightarrow> (npepow (PExpr1 e') n, PExpr1 (PCnst 1), e))"
hide_const Left Right (* FIXME !? *)
abbreviation Left :: "pexpr \<Rightarrow> pexpr \<Rightarrow> pexpr"
where "Left e1 e2 \<equiv> fst (split_aux e1 (Suc 0) e2)"
abbreviation Common :: "pexpr \<Rightarrow> pexpr \<Rightarrow> pexpr"
where "Common e1 e2 \<equiv> fst (snd (split_aux e1 (Suc 0) e2))"
abbreviation Right :: "pexpr \<Rightarrow> pexpr \<Rightarrow> pexpr"
where "Right e1 e2 \<equiv> snd (snd (split_aux e1 (Suc 0) e2))"
lemma split_aux_induct [case_names 1 2 3]:
assumes I1: "\<And>e1 e2 n e. P e1 n e \<Longrightarrow> P e2 n (snd (snd (split_aux e1 n e))) \<Longrightarrow>
P (PExpr2 (PMul e1 e2)) n e"
and I2: "\<And>e' m n e. (m \<noteq> 0 \<Longrightarrow> P e' (m * n) e) \<Longrightarrow> P (PExpr2 (PPow e' m)) n e"
and I3: "\<And>e' n e. P (PExpr1 e') n e"
shows "P x y z"
proof (induct x y z rule: split_aux.induct)
case 1
from 1(1) 1(2) [OF refl prod.collapse prod.collapse]
show ?case by (rule I1)
next
case 2
then show ?case by (rule I2)
next
case 3
then show ?case by (rule I3)
qed
lemma (in field) split_aux_correct:
"in_carrier xs \<Longrightarrow>
peval xs (PExpr2 (PPow e\<^sub>1 n)) =
peval xs (PExpr2 (PMul (fst (split_aux e\<^sub>1 n e\<^sub>2)) (fst (snd (split_aux e\<^sub>1 n e\<^sub>2)))))"
"in_carrier xs \<Longrightarrow>
peval xs e\<^sub>2 =
peval xs (PExpr2 (PMul (snd (snd (split_aux e\<^sub>1 n e\<^sub>2))) (fst (snd (split_aux e\<^sub>1 n e\<^sub>2)))))"
by (induct e\<^sub>1 n e\<^sub>2 rule: split_aux_induct)
(auto simp add: split_beta
nat_pow_distrib nat_pow_pow nat_pow_mult m_ac
npemul_correct npepow_correct isin_correct'
split: option.split)
lemma (in field) split_aux_correct':
"in_carrier xs \<Longrightarrow>
peval xs e\<^sub>1 = peval xs (Left e\<^sub>1 e\<^sub>2) \<otimes> peval xs (Common e\<^sub>1 e\<^sub>2)"
"in_carrier xs \<Longrightarrow>
peval xs e\<^sub>2 = peval xs (Right e\<^sub>1 e\<^sub>2) \<otimes> peval xs (Common e\<^sub>1 e\<^sub>2)"
using split_aux_correct [where n = 1] by simp_all
fun fnorm :: "fexpr \<Rightarrow> pexpr \<times> pexpr \<times> pexpr list"
where
"fnorm (FCnst c) = (PExpr1 (PCnst c), PExpr1 (PCnst 1), [])"
| "fnorm (FVar n) = (PExpr1 (PVar n), PExpr1 (PCnst 1), [])"
| "fnorm (FAdd e1 e2) =
(let
(xn, xd, xc) = fnorm e1;
(yn, yd, yc) = fnorm e2;
(left, common, right) = split_aux xd 1 yd
in
(npeadd (npemul xn right) (npemul yn left),
npemul left (npemul right common),
List.union xc yc))"
| "fnorm (FSub e1 e2) =
(let
(xn, xd, xc) = fnorm e1;
(yn, yd, yc) = fnorm e2;
(left, common, right) = split_aux xd 1 yd
in
(npesub (npemul xn right) (npemul yn left),
npemul left (npemul right common),
List.union xc yc))"
| "fnorm (FMul e1 e2) =
(let
(xn, xd, xc) = fnorm e1;
(yn, yd, yc) = fnorm e2;
(left1, common1, right1) = split_aux xn 1 yd;
(left2, common2, right2) = split_aux yn 1 xd
in
(npemul left1 left2,
npemul right2 right1,
List.union xc yc))"
| "fnorm (FNeg e) =
(let (n, d, c) = fnorm e
in (npeneg n, d, c))"
| "fnorm (FDiv e1 e2) =
(let
(xn, xd, xc) = fnorm e1;
(yn, yd, yc) = fnorm e2;
(left1, common1, right1) = split_aux xn 1 yn;
(left2, common2, right2) = split_aux xd 1 yd
in
(npemul left1 right2,
npemul left2 right1,
List.insert yn (List.union xc yc)))"
| "fnorm (FPow e m) =
(let (n, d, c) = fnorm e
in (npepow n m, npepow d m, c))"
abbreviation Num :: "fexpr \<Rightarrow> pexpr"
where "Num e \<equiv> fst (fnorm e)"
abbreviation Denom :: "fexpr \<Rightarrow> pexpr"
where "Denom e \<equiv> fst (snd (fnorm e))"
abbreviation Cond :: "fexpr \<Rightarrow> pexpr list"
where "Cond e \<equiv> snd (snd (fnorm e))"
primrec (in field) nonzero :: "'a list \<Rightarrow> pexpr list \<Rightarrow> bool"
where
"nonzero xs [] \<longleftrightarrow> True"
| "nonzero xs (p # ps) \<longleftrightarrow> peval xs p \<noteq> \<zero> \<and> nonzero xs ps"
lemma (in field) nonzero_singleton: "nonzero xs [p] = (peval xs p \<noteq> \<zero>)"
by simp
lemma (in field) nonzero_append: "nonzero xs (ps @ qs) = (nonzero xs ps \<and> nonzero xs qs)"
by (induct ps) simp_all
lemma (in field) nonzero_idempotent:
"p \<in> set ps \<Longrightarrow> (peval xs p \<noteq> \<zero> \<and> nonzero xs ps) = nonzero xs ps"
by (induct ps) auto
lemma (in field) nonzero_insert:
"nonzero xs (List.insert p ps) = (peval xs p \<noteq> \<zero> \<and> nonzero xs ps)"
by (simp add: List.insert_def nonzero_idempotent)
lemma (in field) nonzero_union:
"nonzero xs (List.union ps qs) = (nonzero xs ps \<and> nonzero xs qs)"
by (induct ps rule: rev_induct)
(auto simp add: List.union_def nonzero_insert nonzero_append)
lemma (in field) fnorm_correct:
assumes "in_carrier xs"
and "nonzero xs (Cond e)"
shows "feval xs e = peval xs (Num e) \<oslash> peval xs (Denom e)"
and "peval xs (Denom e) \<noteq> \<zero>"
using assms
proof (induct e)
case (FCnst c)
{
case 1
show ?case by simp
next
case 2
show ?case by simp
}
next
case (FVar n)
{
case 1
then show ?case by simp
next
case 2
show ?case by simp
}
next
case (FAdd e1 e2)
note split = split_aux_correct' [where xs=xs and e\<^sub>1 = "Denom e1" and e\<^sub>2 = "Denom e2"]
{
case 1
let ?left = "peval xs (Left (Denom e1) (Denom e2))"
let ?common = "peval xs (Common (Denom e1) (Denom e2))"
let ?right = "peval xs (Right (Denom e1) (Denom e2))"
from 1 FAdd have "feval xs (FAdd e1 e2) =
(?common \<otimes> (peval xs (Num e1) \<otimes> ?right \<oplus> peval xs (Num e2) \<otimes> ?left)) \<oslash>
(?common \<otimes> (?left \<otimes> (?right \<otimes> ?common)))"
by (simp add: split_beta split nonzero_union add_frac_eq r_distr m_ac)
also from 1 FAdd have "\<dots> = peval xs (Num (FAdd e1 e2)) \<oslash> peval xs (Denom (FAdd e1 e2))"
by (simp add: split_beta split nonzero_union npeadd_correct npemul_correct integral_iff)
finally show ?case .
next
case 2
with FAdd show ?case
by (simp add: split_beta split nonzero_union npemul_correct integral_iff)
}
next
case (FSub e1 e2)
note split = split_aux_correct' [where xs=xs and e\<^sub>1 = "Denom e1" and e\<^sub>2 = "Denom e2"]
{
case 1
let ?left = "peval xs (Left (Denom e1) (Denom e2))"
let ?common = "peval xs (Common (Denom e1) (Denom e2))"
let ?right = "peval xs (Right (Denom e1) (Denom e2))"
from 1 FSub
have "feval xs (FSub e1 e2) =
(?common \<otimes> (peval xs (Num e1) \<otimes> ?right \<ominus> peval xs (Num e2) \<otimes> ?left)) \<oslash>
(?common \<otimes> (?left \<otimes> (?right \<otimes> ?common)))"
by (simp add: split_beta split nonzero_union diff_frac_eq r_diff_distr m_ac)
also from 1 FSub have "\<dots> = peval xs (Num (FSub e1 e2)) \<oslash> peval xs (Denom (FSub e1 e2))"
by (simp add: split_beta split nonzero_union npesub_correct npemul_correct integral_iff)
finally show ?case .
next
case 2
with FSub show ?case
by (simp add: split_beta split nonzero_union npemul_correct integral_iff)
}
next
case (FMul e1 e2)
note split =
split_aux_correct' [where xs=xs and e\<^sub>1 = "Num e1" and e\<^sub>2 = "Denom e2"]
split_aux_correct' [where xs=xs and e\<^sub>1 = "Num e2" and e\<^sub>2 = "Denom e1"]
{
case 1
let ?left\<^sub>1 = "peval xs (Left (Num e1) (Denom e2))"
let ?common\<^sub>1 = "peval xs (Common (Num e1) (Denom e2))"
let ?right\<^sub>1 = "peval xs (Right (Num e1) (Denom e2))"
let ?left\<^sub>2 = "peval xs (Left (Num e2) (Denom e1))"
let ?common\<^sub>2 = "peval xs (Common (Num e2) (Denom e1))"
let ?right\<^sub>2 = "peval xs (Right (Num e2) (Denom e1))"
from 1 FMul have "feval xs (FMul e1 e2) =
((?common\<^sub>1 \<otimes> ?common\<^sub>2) \<otimes> (?left\<^sub>1 \<otimes> ?left\<^sub>2)) \<oslash>
((?common\<^sub>1 \<otimes> ?common\<^sub>2) \<otimes> (?right\<^sub>2 \<otimes> ?right\<^sub>1))"
by (simp add: split_beta split nonzero_union
nonzero_divide_divide_eq_left m_ac)
also from 1 FMul have "\<dots> = peval xs (Num (FMul e1 e2)) \<oslash> peval xs (Denom (FMul e1 e2))"
by (simp add: split_beta split nonzero_union npemul_correct integral_iff)
finally show ?case .
next
case 2
with FMul show ?case
by (simp add: split_beta split nonzero_union npemul_correct integral_iff)
}
next
case (FNeg e)
{
case 1
with FNeg show ?case
by (simp add: split_beta npeneg_correct)
next
case 2
with FNeg show ?case
by (simp add: split_beta)
}
next
case (FDiv e1 e2)
note split =
split_aux_correct' [where xs=xs and e\<^sub>1 = "Num e1" and e\<^sub>2 = "Num e2"]
split_aux_correct' [where xs=xs and e\<^sub>1 = "Denom e1" and e\<^sub>2 = "Denom e2"]
{
case 1
let ?left\<^sub>1 = "peval xs (Left (Num e1) (Num e2))"
let ?common\<^sub>1 = "peval xs (Common (Num e1) (Num e2))"
let ?right\<^sub>1 = "peval xs (Right (Num e1) (Num e2))"
let ?left\<^sub>2 = "peval xs (Left (Denom e1) (Denom e2))"
let ?common\<^sub>2 = "peval xs (Common (Denom e1) (Denom e2))"
let ?right\<^sub>2 = "peval xs (Right (Denom e1) (Denom e2))"
from 1 FDiv
have "feval xs (FDiv e1 e2) =
((?common\<^sub>1 \<otimes> ?common\<^sub>2) \<otimes> (?left\<^sub>1 \<otimes> ?right\<^sub>2)) \<oslash>
((?common\<^sub>1 \<otimes> ?common\<^sub>2) \<otimes> (?left\<^sub>2 \<otimes> ?right\<^sub>1))"
by (simp add: split_beta split nonzero_union nonzero_insert
nonzero_divide_divide_eq m_ac)
also from 1 FDiv have "\<dots> = peval xs (Num (FDiv e1 e2)) \<oslash> peval xs (Denom (FDiv e1 e2))"
by (simp add: split_beta split nonzero_union nonzero_insert npemul_correct integral_iff)
finally show ?case .
next
case 2
with FDiv show ?case
by (simp add: split_beta split nonzero_union nonzero_insert npemul_correct integral_iff)
}
next
case (FPow e n)
{
case 1
with FPow show ?case
by (simp add: split_beta nonzero_power_divide npepow_correct)
next
case 2
with FPow show ?case
by (simp add: split_beta npepow_correct)
}
qed
lemma (in field) feval_eq0:
assumes "in_carrier xs"
and "fnorm e = (n, d, c)"
and "nonzero xs c"
and "peval xs n = \<zero>"
shows "feval xs e = \<zero>"
using assms fnorm_correct [of xs e] by simp
lemma (in field) fexpr_in_carrier:
assumes "in_carrier xs"
and "nonzero xs (Cond e)"
shows "feval xs e \<in> carrier R"
using assms
proof (induct e)
case (FDiv e1 e2)
then have "feval xs e1 \<in> carrier R" "feval xs e2 \<in> carrier R"
"peval xs (Num e2) \<noteq> \<zero>" "nonzero xs (Cond e2)"
by (simp_all add: nonzero_union nonzero_insert split: prod.split_asm)
from \<open>in_carrier xs\<close> \<open>nonzero xs (Cond e2)\<close>
have "feval xs e2 = peval xs (Num e2) \<oslash> peval xs (Denom e2)"
by (rule fnorm_correct)
moreover from \<open>in_carrier xs\<close> \<open>nonzero xs (Cond e2)\<close>
have "peval xs (Denom e2) \<noteq> \<zero>" by (rule fnorm_correct)
ultimately have "feval xs e2 \<noteq> \<zero>" using \<open>peval xs (Num e2) \<noteq> \<zero>\<close> \<open>in_carrier xs\<close>
by (simp add: divide_eq_0_iff)
with \<open>feval xs e1 \<in> carrier R\<close> \<open>feval xs e2 \<in> carrier R\<close>
show ?case by simp
qed (simp_all add: nonzero_union split: prod.split_asm)
lemma (in field) feval_eq:
assumes "in_carrier xs"
and "fnorm (FSub e e') = (n, d, c)"
and "nonzero xs c"
shows "(feval xs e = feval xs e') = (peval xs n = \<zero>)"
proof -
from assms have "nonzero xs (Cond e)" "nonzero xs (Cond e')"
by (auto simp add: nonzero_union split: prod.split_asm)
with assms fnorm_correct [of xs "FSub e e'"]
have "feval xs e \<ominus> feval xs e' = peval xs n \<oslash> peval xs d"
"peval xs d \<noteq> \<zero>"
by simp_all
show ?thesis
proof
assume "feval xs e = feval xs e'"
with \<open>feval xs e \<ominus> feval xs e' = peval xs n \<oslash> peval xs d\<close>
\<open>in_carrier xs\<close> \<open>nonzero xs (Cond e')\<close>
have "peval xs n \<oslash> peval xs d = \<zero>"
by (simp add: fexpr_in_carrier minus_eq r_neg)
with \<open>peval xs d \<noteq> \<zero>\<close> \<open>in_carrier xs\<close>
show "peval xs n = \<zero>"
by (simp add: divide_eq_0_iff)
next
assume "peval xs n = \<zero>"
with \<open>feval xs e \<ominus> feval xs e' = peval xs n \<oslash> peval xs d\<close> \<open>peval xs d \<noteq> \<zero>\<close>
\<open>nonzero xs (Cond e)\<close> \<open>nonzero xs (Cond e')\<close> \<open>in_carrier xs\<close>
show "feval xs e = feval xs e'"
by (simp add: eq_diff0 fexpr_in_carrier)
qed
qed
ML \<open>
val term_of_nat = HOLogic.mk_number \<^Type>\<open>nat\<close> o @{code integer_of_nat};
val term_of_int = HOLogic.mk_number \<^Type>\<open>int\<close> o @{code integer_of_int};
fun term_of_pexpr (@{code PExpr1} x) = \<^Const>\<open>PExpr1\<close> $ term_of_pexpr1 x
| term_of_pexpr (@{code PExpr2} x) = \<^Const>\<open>PExpr2\<close> $ term_of_pexpr2 x
and term_of_pexpr1 (@{code PCnst} k) = \<^Const>\<open>PCnst\<close> $ term_of_int k
| term_of_pexpr1 (@{code PVar} n) = \<^Const>\<open>PVar\<close> $ term_of_nat n
| term_of_pexpr1 (@{code PAdd} (x, y)) = \<^Const>\<open>PAdd\<close> $ term_of_pexpr x $ term_of_pexpr y
| term_of_pexpr1 (@{code PSub} (x, y)) = \<^Const>\<open>PSub\<close> $ term_of_pexpr x $ term_of_pexpr y
| term_of_pexpr1 (@{code PNeg} x) = \<^Const>\<open>PNeg\<close> $ term_of_pexpr x
and term_of_pexpr2 (@{code PMul} (x, y)) = \<^Const>\<open>PMul\<close> $ term_of_pexpr x $ term_of_pexpr y
| term_of_pexpr2 (@{code PPow} (x, n)) = \<^Const>\<open>PPow\<close> $ term_of_pexpr x $ term_of_nat n
fun term_of_result (x, (y, zs)) =
HOLogic.mk_prod (term_of_pexpr x, HOLogic.mk_prod
(term_of_pexpr y, HOLogic.mk_list \<^Type>\<open>pexpr\<close> (map term_of_pexpr zs)));
local
fun fnorm (ctxt, ct, t) =
\<^instantiate>\<open>x = ct and y = \<open>Thm.cterm_of ctxt t\<close>
in cterm \<open>x \<equiv> y\<close> for x y :: \<open>pexpr \<times> pexpr \<times> pexpr list\<close>\<close>;
val (_, raw_fnorm_oracle) = Context.>>> (Context.map_theory_result
(Thm.add_oracle (\<^binding>\<open>fnorm\<close>, fnorm)));
fun fnorm_oracle ctxt ct t = raw_fnorm_oracle (ctxt, ct, t);
in
val cv = @{computation_conv "pexpr \<times> pexpr \<times> pexpr list"
terms: fnorm nat_of_integer Code_Target_Nat.natural
"0::nat" "1::nat" "2::nat" "3::nat"
"0::int" "1::int" "2::int" "3::int" "-1::int"
datatypes: fexpr int integer num}
(fn ctxt => fn result => fn ct => fnorm_oracle ctxt ct (term_of_result result))
end
\<close>
ML \<open>
signature FIELD_TAC =
sig
structure Field_Simps:
sig
type T
val get: Context.generic -> T
val put: T -> Context.generic -> Context.generic
val map: (T -> T) -> Context.generic -> Context.generic
end
val eq_field_simps:
(term * (thm list * thm list * thm list * thm * thm)) *
(term * (thm list * thm list * thm list * thm * thm)) -> bool
val field_tac: bool -> Proof.context -> int -> tactic
end
structure Field_Tac : FIELD_TAC =
struct
open Ring_Tac;
fun field_struct \<^Const_>\<open>Ring.ring.add _ _ for R _ _\<close> = SOME R
| field_struct \<^Const_>\<open>Ring.a_minus _ _ for R _ _\<close> = SOME R
| field_struct \<^Const_>\<open>Group.monoid.mult _ _ for R _ _\<close> = SOME R
| field_struct \<^Const_>\<open>Ring.a_inv _ _ for R _\<close> = SOME R
| field_struct \<^Const_>\<open>Group.pow _ _ _ for R _ _\<close> = SOME R
| field_struct \<^Const_>\<open>Algebra_Aux.m_div _ _for R _ _\<close> = SOME R
| field_struct \<^Const_>\<open>Ring.ring.zero _ _ for R\<close> = SOME R
| field_struct \<^Const_>\<open>Group.monoid.one _ _ for R\<close> = SOME R
| field_struct \<^Const_>\<open>Algebra_Aux.of_integer _ _ for R _\<close> = SOME R
| field_struct _ = NONE;
fun reif_fexpr vs \<^Const_>\<open>Ring.ring.add _ _ for _ a b\<close> =
\<^Const>\<open>FAdd for \<open>reif_fexpr vs a\<close> \<open>reif_fexpr vs b\<close>\<close>
| reif_fexpr vs \<^Const_>\<open>Ring.a_minus _ _ for _ a b\<close> =
\<^Const>\<open>FSub for \<open>reif_fexpr vs a\<close> \<open>reif_fexpr vs b\<close>\<close>
| reif_fexpr vs \<^Const_>\<open>Group.monoid.mult _ _ for _ a b\<close> =
\<^Const>\<open>FMul for \<open>reif_fexpr vs a\<close> \<open>reif_fexpr vs b\<close>\<close>
| reif_fexpr vs \<^Const_>\<open>Ring.a_inv _ _ for _ a\<close> =
\<^Const>\<open>FNeg for \<open>reif_fexpr vs a\<close>\<close>
| reif_fexpr vs \<^Const>\<open>Group.pow _ _ _ for _ a n\<close> =
\<^Const>\<open>FPow for \<open>reif_fexpr vs a\<close> n\<close>
| reif_fexpr vs \<^Const_>\<open>Algebra_Aux.m_div _ _ for _ a b\<close> =
\<^Const>\<open>FDiv for \<open>reif_fexpr vs a\<close> \<open>reif_fexpr vs b\<close>\<close>
| reif_fexpr vs (Free x) =
\<^Const>\<open>FVar for \<open>HOLogic.mk_number HOLogic.natT (find_index (equal x) vs)\<close>\<close>
| reif_fexpr vs \<^Const_>\<open>Ring.ring.zero _ _ for _\<close> = \<^term>\<open>FCnst 0\<close>
| reif_fexpr vs \<^Const_>\<open>Group.monoid.one _ _ for _\<close> = \<^term>\<open>FCnst 1\<close>
| reif_fexpr vs \<^Const_>\<open>Algebra_Aux.of_integer _ _ for _ n\<close> = \<^Const>\<open>FCnst for n\<close>
| reif_fexpr _ _ = error "reif_fexpr: bad expression";
fun reif_fexpr' vs \<^Const_>\<open>plus _ for a b\<close> = \<^Const>\<open>FAdd for \<open>reif_fexpr' vs a\<close> \<open>reif_fexpr' vs b\<close>\<close>
| reif_fexpr' vs \<^Const_>\<open>minus _ for a b\<close> = \<^Const>\<open>FSub for \<open>reif_fexpr' vs a\<close> \<open>reif_fexpr' vs b\<close>\<close>
| reif_fexpr' vs \<^Const_>\<open>times _ for a b\<close> = \<^Const>\<open>FMul for \<open>reif_fexpr' vs a\<close> \<open>reif_fexpr' vs b\<close>\<close>
| reif_fexpr' vs \<^Const_>\<open>uminus _ for a\<close> = \<^Const>\<open>FNeg for \<open>reif_fexpr' vs a\<close>\<close>
| reif_fexpr' vs \<^Const_>\<open>power _ for a n\<close> = \<^Const>\<open>FPow for \<open>reif_fexpr' vs a\<close> n\<close>
| reif_fexpr' vs \<^Const_>\<open>divide _ for a b\<close> = \<^Const>\<open>FDiv for \<open>reif_fexpr' vs a\<close> \<open>reif_fexpr' vs b\<close>\<close>
| reif_fexpr' vs (Free x) =
\<^Const>\<open>FVar for \<open>HOLogic.mk_number HOLogic.natT (find_index (equal x) vs)\<close>\<close>
| reif_fexpr' vs \<^Const_>\<open>zero_class.zero _\<close> = \<^term>\<open>FCnst 0\<close>
| reif_fexpr' vs \<^Const_>\<open>one_class.one _\<close> = \<^term>\<open>FCnst 1\<close>
| reif_fexpr' vs \<^Const_>\<open>numeral _ for b\<close> = \<^Const>\<open>FCnst for \<^Const>\<open>numeral \<^Type>\<open>int\<close> for b\<close>\<close>
| reif_fexpr' _ _ = error "reif_fexpr: bad expression";
fun eq_field_simps
((t, (ths1, ths2, ths3, th4, th)),
(t', (ths1', ths2', ths3', th4', th'))) =
t aconv t' andalso
eq_list Thm.eq_thm (ths1, ths1') andalso
eq_list Thm.eq_thm (ths2, ths2') andalso
eq_list Thm.eq_thm (ths3, ths3') andalso
Thm.eq_thm (th4, th4') andalso
Thm.eq_thm (th, th');
structure Field_Simps = Generic_Data
(struct
type T = (term * (thm list * thm list * thm list * thm * thm)) Net.net
val empty = Net.empty
val merge = Net.merge eq_field_simps
end);
fun get_field_simps ctxt optcT t =
(case get_matching_rules ctxt (Field_Simps.get (Context.Proof ctxt)) t of
SOME (ths1, ths2, ths3, th4, th) =>
let val tr =
Thm.transfer' ctxt #>
(case optcT of NONE => I | SOME cT => inst [cT] [] #> norm)
in (map tr ths1, map tr ths2, map tr ths3, tr th4, tr th) end
| NONE => error "get_field_simps: lookup failed");
fun nth_el_conv (_, _, _, nth_el_Cons, _) =
let
val a = type_of_eqn nth_el_Cons;
val If_conv_a = If_conv a;
fun conv ys n = (case strip_app ys of
(\<^const_name>\<open>Cons\<close>, [x, xs]) =>
transitive'
(inst [] [x, xs, n] nth_el_Cons)
(If_conv_a (args2 nat_eq_conv)
Thm.reflexive
(cong2' conv Thm.reflexive (args2 nat_minus_conv))))
in conv end;
fun feval_conv (rls as
([feval_simps_1, feval_simps_2, feval_simps_3,
feval_simps_4, feval_simps_5, feval_simps_6,
feval_simps_7, feval_simps_8, feval_simps_9,
feval_simps_10, feval_simps_11],
_, _, _, _)) =
let
val nth_el_conv' = nth_el_conv rls;
fun conv xs x = (case strip_app x of
(\<^const_name>\<open>FCnst\<close>, [c]) => (case strip_app c of
(\<^const_name>\<open>zero_class.zero\<close>, _) => inst [] [xs] feval_simps_9
| (\<^const_name>\<open>one_class.one\<close>, _) => inst [] [xs] feval_simps_10
| (\<^const_name>\<open>numeral\<close>, [n]) => inst [] [xs, n] feval_simps_11
| _ => inst [] [xs, c] feval_simps_1)
| (\<^const_name>\<open>FVar\<close>, [n]) =>
transitive' (inst [] [xs, n] feval_simps_2) (args2 nth_el_conv')
| (\<^const_name>\<open>FAdd\<close>, [a, b]) =>
transitive' (inst [] [xs, a, b] feval_simps_3)
(cong2 (args2 conv) (args2 conv))
| (\<^const_name>\<open>FSub\<close>, [a, b]) =>
transitive' (inst [] [xs, a, b] feval_simps_4)
(cong2 (args2 conv) (args2 conv))
| (\<^const_name>\<open>FMul\<close>, [a, b]) =>
transitive' (inst [] [xs, a, b] feval_simps_5)
(cong2 (args2 conv) (args2 conv))
| (\<^const_name>\<open>FNeg\<close>, [a]) =>
transitive' (inst [] [xs, a] feval_simps_6)
(cong1 (args2 conv))
| (\<^const_name>\<open>FDiv\<close>, [a, b]) =>
transitive' (inst [] [xs, a, b] feval_simps_7)
(cong2 (args2 conv) (args2 conv))
| (\<^const_name>\<open>FPow\<close>, [a, n]) =>
transitive' (inst [] [xs, a, n] feval_simps_8)
(cong2 (args2 conv) Thm.reflexive))
in conv end;
fun peval_conv (rls as
(_,
[peval_simps_1, peval_simps_2, peval_simps_3,
peval_simps_4, peval_simps_5, peval_simps_6,
peval_simps_7, peval_simps_8, peval_simps_9,
peval_simps_10, peval_simps_11],
_, _, _)) =
let
val nth_el_conv' = nth_el_conv rls;
fun conv xs x = (case strip_app x of
(\<^const_name>\<open>PExpr1\<close>, [e]) => (case strip_app e of
(\<^const_name>\<open>PCnst\<close>, [c]) => (case strip_numeral c of
(\<^const_name>\<open>zero_class.zero\<close>, _) => inst [] [xs] peval_simps_8
| (\<^const_name>\<open>one_class.one\<close>, _) => inst [] [xs] peval_simps_9
| (\<^const_name>\<open>numeral\<close>, [n]) => inst [] [xs, n] peval_simps_10
| (\<^const_name>\<open>uminus\<close>, [n]) => inst [] [xs, n] peval_simps_11
| _ => inst [] [xs, c] peval_simps_1)
| (\<^const_name>\<open>PVar\<close>, [n]) =>
transitive' (inst [] [xs, n] peval_simps_2) (args2 nth_el_conv')
| (\<^const_name>\<open>PAdd\<close>, [a, b]) =>
transitive' (inst [] [xs, a, b] peval_simps_3)
(cong2 (args2 conv) (args2 conv))
| (\<^const_name>\<open>PSub\<close>, [a, b]) =>
transitive' (inst [] [xs, a, b] peval_simps_4)
(cong2 (args2 conv) (args2 conv))
| (\<^const_name>\<open>PNeg\<close>, [a]) =>
transitive' (inst [] [xs, a] peval_simps_5)
(cong1 (args2 conv)))
| (\<^const_name>\<open>PExpr2\<close>, [e]) => (case strip_app e of
(\<^const_name>\<open>PMul\<close>, [a, b]) =>
transitive' (inst [] [xs, a, b] peval_simps_6)
(cong2 (args2 conv) (args2 conv))
| (\<^const_name>\<open>PPow\<close>, [a, n]) =>
transitive' (inst [] [xs, a, n] peval_simps_7)
(cong2 (args2 conv) Thm.reflexive)))
in conv end;
fun nonzero_conv (rls as
(_, _,
[nonzero_Nil, nonzero_Cons, nonzero_singleton],
_, _)) =
let
val peval_conv' = peval_conv rls;
fun conv xs qs = (case strip_app qs of
(\<^const_name>\<open>Nil\<close>, []) => inst [] [xs] nonzero_Nil
| (\<^const_name>\<open>Cons\<close>, [p, ps]) => (case Thm.term_of ps of
\<^Const_>\<open>Nil _\<close> =>
transitive' (inst [] [xs, p] nonzero_singleton)
(cong1 (cong2 (args2 peval_conv') Thm.reflexive))
| _ => transitive' (inst [] [xs, p, ps] nonzero_Cons)
(cong2 (cong1 (cong2 (args2 peval_conv') Thm.reflexive)) (args2 conv))))
in conv end;
fun field_tac in_prem ctxt =
SUBGOAL (fn (g, i) =>
let
val (prems, concl) = Logic.strip_horn g;
fun find_eq s = (case s of
(_ $ \<^Const_>\<open>HOL.eq T for t u\<close>) =>
(case (field_struct t, field_struct u) of
(SOME R, _) => SOME ((t, u), R, T, NONE, mk_in_carrier ctxt R [], reif_fexpr)
| (_, SOME R) => SOME ((t, u), R, T, NONE, mk_in_carrier ctxt R [], reif_fexpr)
| _ =>
if Sign.of_sort (Proof_Context.theory_of ctxt) (T, \<^sort>\<open>field\<close>)
then SOME ((t, u), mk_ring T, T, SOME T, K @{thm in_carrier_trivial}, reif_fexpr')
else NONE)
| _ => NONE);
val ((t, u), R, T, optT, mkic, reif) =
(case get_first find_eq
(if in_prem then prems else [concl]) of
SOME q => q
| NONE => error "cannot determine field");
val rls as (_, _, _, _, feval_eq) =
get_field_simps ctxt (Option.map (Thm.ctyp_of ctxt) optT) R;
val xs = [] |> Term.add_frees t |> Term.add_frees u |> filter (equal T o snd);
val cxs = Thm.cterm_of ctxt (HOLogic.mk_list T (map Free xs));
val ce = Thm.cterm_of ctxt (reif xs t);
val ce' = Thm.cterm_of ctxt (reif xs u);
val fnorm = cv ctxt \<^instantiate>\<open>e = ce and e' = ce' in cterm \<open>fnorm (FSub e e')\<close>\<close>;
val (_, [n, dc]) = strip_app (Thm.rhs_of fnorm);
val (_, [_, c]) = strip_app dc;
val th =
Conv.fconv_rule (Conv.concl_conv 1 (Conv.arg_conv
(binop_conv
(binop_conv
(K (feval_conv rls cxs ce)) (K (feval_conv rls cxs ce')))
(Conv.arg1_conv (K (peval_conv rls cxs n))))))
([mkic xs,
HOLogic.mk_obj_eq fnorm,
HOLogic.mk_obj_eq (nonzero_conv rls cxs c) RS @{thm iffD2}] MRS
feval_eq);
val th' = Drule.rotate_prems 1
(th RS (if in_prem then @{thm iffD1} else @{thm iffD2}));
in
if in_prem then
dresolve_tac ctxt [th'] 1 THEN defer_tac 1
else
resolve_tac ctxt [th'] 1
end);
end
\<close>
context field
begin
-local_setup \<open>
-Local_Theory.declaration {syntax = false, pervasive = false}
- (fn phi => Field_Tac.Field_Simps.map (Ring_Tac.insert_rules Field_Tac.eq_field_simps
+declaration \<open>fn phi =>
+ Field_Tac.Field_Simps.map (Ring_Tac.insert_rules Field_Tac.eq_field_simps
(Morphism.term phi \<^term>\<open>R\<close>,
(Morphism.fact phi @{thms feval.simps [meta] feval_Cnst [meta]},
Morphism.fact phi @{thms peval.simps [meta] peval_Cnst [meta]},
Morphism.fact phi @{thms nonzero.simps [meta] nonzero_singleton [meta]},
singleton (Morphism.fact phi) @{thm nth_el_Cons [meta]},
- singleton (Morphism.fact phi) @{thm feval_eq}))))
+ singleton (Morphism.fact phi) @{thm feval_eq})))
\<close>
end
method_setup field = \<open>
Scan.lift (Args.mode "prems") -- Attrib.thms >> (fn (in_prem, thms) => fn ctxt =>
SIMPLE_METHOD' (Field_Tac.field_tac in_prem ctxt THEN' Ring_Tac.ring_tac in_prem thms ctxt))
\<close> "reduce equations over fields to equations over rings"
end
diff --git a/src/HOL/Decision_Procs/ferrante_rackoff_data.ML b/src/HOL/Decision_Procs/ferrante_rackoff_data.ML
--- a/src/HOL/Decision_Procs/ferrante_rackoff_data.ML
+++ b/src/HOL/Decision_Procs/ferrante_rackoff_data.ML
@@ -1,140 +1,140 @@
(* Title: HOL/Decision_Procs/ferrante_rackoff_data.ML
Author: Amine Chaieb, TU Muenchen
Context data for Ferrante and Rackoff's algorithm for quantifier
elimination in dense linear orders.
*)
signature FERRANTE_RACKOF_DATA =
sig
datatype ord = Lt | Le | Gt | Ge | Eq | NEq | Nox
type entry
val get: Proof.context -> (thm * entry) list
val del: attribute
val add: entry -> attribute
val funs: thm ->
{isolate_conv: morphism -> Proof.context -> cterm list -> cterm -> thm,
whatis: morphism -> cterm -> cterm -> ord,
- simpset: morphism -> Proof.context -> simpset} -> declaration
+ simpset: morphism -> Proof.context -> simpset} -> Morphism.declaration
val match: Proof.context -> cterm -> entry option
end;
structure Ferrante_Rackoff_Data: FERRANTE_RACKOF_DATA =
struct
(* data *)
datatype ord = Lt | Le | Gt | Ge | Eq | NEq | Nox
type entry =
{minf: thm list, pinf: thm list, nmi: thm list, npi: thm list,
ld: thm list, qe: thm, atoms : cterm list} *
{isolate_conv: Proof.context -> cterm list -> cterm -> thm,
whatis : cterm -> cterm -> ord,
simpset : simpset};
val eq_key = Thm.eq_thm;
fun eq_data arg = eq_fst eq_key arg;
structure Data = Generic_Data
(
type T = (thm * entry) list;
val empty = [];
fun merge data : T = AList.merge eq_key (K true) data;
);
val get = Data.get o Context.Proof;
fun del_data key = remove eq_data (key, []);
val del = Thm.declaration_attribute (Data.map o del_data);
fun add entry =
Thm.declaration_attribute (fn key => fn context => context |> Data.map
(del_data key #> cons (key, entry)));
(* extra-logical functions *)
fun funs raw_key {isolate_conv = icv, whatis = wi, simpset = ss} phi context =
context |> Data.map (fn data =>
let
val key = Morphism.thm phi raw_key;
val _ = AList.defined eq_key data key orelse
raise THM ("No data entry for structure key", 0, [key]);
val fns =
{isolate_conv = icv phi, whatis = wi phi, simpset = ss phi (Context.proof_of context)};
in AList.map_entry eq_key key (apsnd (K fns)) data end);
fun match ctxt tm =
let
fun match_inst ({minf, pinf, nmi, npi, ld, qe, atoms}, fns) pat =
let
fun h instT =
let
val substT = Thm.instantiate (instT, Vars.empty);
val substT_cterm = Drule.cterm_rule substT;
val minf' = map substT minf
val pinf' = map substT pinf
val nmi' = map substT nmi
val npi' = map substT npi
val ld' = map substT ld
val qe' = substT qe
val atoms' = map substT_cterm atoms
val result = ({minf = minf', pinf = pinf', nmi = nmi', npi = npi',
ld = ld', qe = qe', atoms = atoms'}, fns)
in SOME result end
in (case try Thm.match (pat, tm) of
NONE => NONE
| SOME (instT, _) => h instT)
end;
fun match_struct (_,
entry as ({atoms = atoms, ...}, _): entry) =
get_first (match_inst entry) atoms;
in get_first match_struct (get ctxt) end;
(* concrete syntax *)
local
val minfN = "minf";
val pinfN = "pinf";
val nmiN = "nmi";
val npiN = "npi";
val lin_denseN = "lindense";
val qeN = "qe"
val atomsN = "atoms"
val simpsN = "simps"
fun keyword k = Scan.lift (Args.$$$ k -- Args.colon) >> K ();
val any_keyword =
keyword minfN || keyword pinfN || keyword nmiN
|| keyword npiN || keyword lin_denseN || keyword qeN
|| keyword atomsN || keyword simpsN;
val thms = Scan.repeats (Scan.unless any_keyword Attrib.multi_thm);
val terms = thms >> map Drule.dest_term;
in
val _ =
Theory.setup
(Attrib.setup \<^binding>\<open>ferrack\<close>
((keyword minfN |-- thms)
-- (keyword pinfN |-- thms)
-- (keyword nmiN |-- thms)
-- (keyword npiN |-- thms)
-- (keyword lin_denseN |-- thms)
-- (keyword qeN |-- thms)
-- (keyword atomsN |-- terms) >>
(fn ((((((minf,pinf),nmi),npi),lin_dense),qe), atoms)=>
if length qe = 1 then
add ({minf = minf, pinf = pinf, nmi = nmi, npi = npi, ld = lin_dense,
qe = hd qe, atoms = atoms},
{isolate_conv = undefined, whatis = undefined, simpset = HOL_ss})
else error "only one theorem for qe!"))
"Ferrante Rackoff data");
end;
end;
diff --git a/src/HOL/Eisbach/Tests.thy b/src/HOL/Eisbach/Tests.thy
--- a/src/HOL/Eisbach/Tests.thy
+++ b/src/HOL/Eisbach/Tests.thy
@@ -1,606 +1,604 @@
(* Title: HOL/Eisbach/Tests.thy
Author: Daniel Matichuk, NICTA/UNSW
*)
section \<open>Miscellaneous Eisbach tests\<close>
theory Tests
imports Main Eisbach_Tools
begin
subsection \<open>Named Theorems Tests\<close>
named_theorems foo
method foo declares foo = (rule foo)
lemma
assumes A [foo]: A
shows A
apply foo
done
method abs_used for P = (match (P) in "\<lambda>a. ?Q" \<Rightarrow> fail \<bar> _ \<Rightarrow> -)
subsection \<open>Match Tests\<close>
notepad
begin
have dup: "\<And>A. A \<Longrightarrow> A \<Longrightarrow> A" by simp
fix A y
have "(\<And>x. A x) \<Longrightarrow> A y"
apply (rule dup, match premises in Y: "\<And>B. P B" for P \<Rightarrow> \<open>match (P) in A \<Rightarrow> \<open>print_fact Y, rule Y\<close>\<close>)
apply (rule dup, match premises in Y: "\<And>B :: 'a. P B" for P \<Rightarrow> \<open>match (P) in A \<Rightarrow> \<open>print_fact Y, rule Y\<close>\<close>)
apply (rule dup, match premises in Y: "\<And>B :: 'a. P B" for P \<Rightarrow> \<open>match conclusion in "P y" for y \<Rightarrow> \<open>print_fact Y, print_term y, rule Y[where B=y]\<close>\<close>)
apply (rule dup, match premises in Y: "\<And>B :: 'a. P B" for P \<Rightarrow> \<open>match conclusion in "P z" for z \<Rightarrow> \<open>print_fact Y, print_term y, rule Y[where B=z]\<close>\<close>)
apply (rule dup, match conclusion in "P y" for P \<Rightarrow> \<open>match premises in Y: "\<And>z. P z" \<Rightarrow> \<open>print_fact Y, rule Y[where z=y]\<close>\<close>)
apply (match premises in Y: "\<And>z :: 'a. P z" for P \<Rightarrow> \<open>match conclusion in "P y" \<Rightarrow> \<open>print_fact Y, rule Y[where z=y]\<close>\<close>)
done
assume X: "\<And>x. A x" "A y"
have "A y"
apply (match X in Y:"\<And>B. A B" and Y':"B ?x" for B \<Rightarrow> \<open>print_fact Y[where B=y], print_term B\<close>)
apply (match X in Y:"B ?x" and Y':"B ?x" for B \<Rightarrow> \<open>print_fact Y, print_term B\<close>)
apply (match X in Y:"B x" and Y':"B x" for B x \<Rightarrow> \<open>print_fact Y, print_term B, print_term x\<close>)
apply (insert X)
apply (match premises in Y:"\<And>B. A B" and Y':"B y" for B and y :: 'a \<Rightarrow> \<open>print_fact Y[where B=y], print_term B\<close>)
apply (match premises in Y:"B ?x" and Y':"B ?x" for B \<Rightarrow> \<open>print_fact Y, print_term B\<close>)
apply (match premises in Y:"B x" and Y':"B x" for B x \<Rightarrow> \<open>print_fact Y, print_term B\<close>)
apply (match conclusion in "P x" and "P y" for P x \<Rightarrow> \<open>print_term P, print_term x\<close>)
apply assumption
done
{
fix B x y
assume X: "\<And>x y. B x x y"
have "B x x y"
by (match X in Y:"\<And>y. B y y z" for z \<Rightarrow> \<open>rule Y[where y=x]\<close>)
fix A B
have "(\<And>x y. A (B x) y) \<Longrightarrow> A (B x) y"
by (match premises in Y: "\<And>xx. ?H (B xx)" \<Rightarrow> \<open>rule Y\<close>)
}
(* match focusing retains prems *)
fix B x
have "(\<And>x. A x) \<Longrightarrow> (\<And>z. B z) \<Longrightarrow> A y \<Longrightarrow> B x"
apply (match premises in Y: "\<And>z :: 'a. A z" \<Rightarrow> \<open>match premises in Y': "\<And>z :: 'b. B z" \<Rightarrow> \<open>print_fact Y, print_fact Y', rule Y'[where z=x]\<close>\<close>)
done
(*Attributes *)
fix C
have "(\<And>x :: 'a. A x) \<Longrightarrow> (\<And>z. B z) \<Longrightarrow> A y \<Longrightarrow> B x \<and> B x \<and> A y"
apply (intro conjI)
apply (match premises in Y: "\<And>z :: 'a. A z" and Y'[intro]:"\<And>z :: 'b. B z" \<Rightarrow> fastforce)
apply (match premises in Y: "\<And>z :: 'a. A z" \<Rightarrow> \<open>match premises in Y'[intro]:"\<And>z :: 'b. B z" \<Rightarrow> fastforce\<close>)
apply (match premises in Y[thin]: "\<And>z :: 'a. A z" \<Rightarrow> \<open>(match premises in Y':"\<And>z :: 'a. A z" \<Rightarrow> \<open>print_fact Y,fail\<close> \<bar> _ \<Rightarrow> \<open>print_fact Y\<close>)\<close>)
(*apply (match premises in Y: "\<And>z :: 'b. B z" \<Rightarrow> \<open>(match premises in Y'[thin]:"\<And>z :: 'b. B z" \<Rightarrow> \<open>(match premises in Y':"\<And>z :: 'a. A z" \<Rightarrow> fail \<bar> Y': _ \<Rightarrow> -)\<close>)\<close>)*)
apply assumption
done
fix A B C D
have "\<And>uu'' uu''' uu uu'. (\<And>x :: 'a. A uu' x) \<Longrightarrow> D uu y \<Longrightarrow> (\<And>z. B uu z) \<Longrightarrow> C uu y \<Longrightarrow> (\<And>z y. C uu z) \<Longrightarrow> B uu x \<and> B uu x \<and> C uu y"
apply (match premises in Y[thin]: "\<And>z :: 'a. A ?zz' z" and
Y'[thin]: "\<And>rr :: 'b. B ?zz rr" \<Rightarrow>
\<open>print_fact Y, print_fact Y', intro conjI, rule Y', insert Y', insert Y'[where rr=x]\<close>)
apply (match premises in Y:"B ?u ?x" \<Rightarrow> \<open>rule Y\<close>)
apply (insert TrueI)
apply (match premises in Y'[thin]: "\<And>ff. B uu ff" for uu \<Rightarrow> \<open>insert Y', drule meta_spec[where x=x]\<close>)
apply assumption
done
(* Multi-matches. As many facts as match are bound. *)
fix A B C x
have "(\<And>x :: 'a. A x) \<Longrightarrow> (\<And>y :: 'a. B y) \<Longrightarrow> C y \<Longrightarrow> (A x \<and> B y \<and> C y)"
apply (match premises in Y[thin]: "\<And>z :: 'a. ?A z" (multi) \<Rightarrow> \<open>intro conjI, (rule Y)+\<close>)
apply (match premises in Y[thin]: "\<And>z :: 'a. ?A z" (multi) \<Rightarrow> fail \<bar> "C y" \<Rightarrow> -) (* multi-match must bind something *)
apply (match premises in Y: "C y" \<Rightarrow> \<open>rule Y\<close>)
done
fix A B C x
have "(\<And>x :: 'a. A x) \<Longrightarrow> (\<And>y :: 'a. B y) \<Longrightarrow> C y \<Longrightarrow> (A x \<and> B y \<and> C y)"
apply (match premises in Y[thin]: "\<And>z. ?A z" (multi) \<Rightarrow> \<open>intro conjI, (rule Y)+\<close>)
apply (match premises in Y[thin]: "\<And>z. ?A z" (multi) \<Rightarrow> fail \<bar> "C y" \<Rightarrow> -) (* multi-match must bind something *)
apply (match premises in Y: "C y" \<Rightarrow> \<open>rule Y\<close>)
done
fix A B C P Q and x :: 'a and y :: 'a
have "(\<And>x y :: 'a. A x y \<and> Q) \<Longrightarrow> (\<And>a b. B (a :: 'a) (b :: 'a) \<and> Q) \<Longrightarrow> (\<And>x y. C (x :: 'a) (y :: 'a) \<and> P) \<Longrightarrow> A y x \<and> B y x"
by (match premises in Y: "\<And>z a. ?A (z :: 'a) (a :: 'a) \<and> R" (multi) for R \<Rightarrow> \<open>rule conjI, rule Y[where z=x,THEN conjunct1], rule Y[THEN conjunct1]\<close>)
(*We may use for-fixes in multi-matches too. All bound facts must agree on the fixed term *)
fix A B C x
have "(\<And>y :: 'a. B y \<and> C y) \<Longrightarrow> (\<And>x :: 'a. A x \<and> B x) \<Longrightarrow> (\<And>y :: 'a. A y \<and> C y) \<Longrightarrow> C y \<Longrightarrow> (A x \<and> B y \<and> C y)"
apply (match premises in Y: "\<And>x :: 'a. P x \<and> ?U x" (multi) for P \<Rightarrow>
\<open>match (P) in B \<Rightarrow> fail
\<bar> "\<lambda>a. B" \<Rightarrow> fail
\<bar> _ \<Rightarrow> -,
intro conjI, (rule Y[THEN conjunct1])\<close>)
apply (rule dup)
apply (match premises in Y':"\<And>x :: 'a. ?U x \<and> Q x" and Y: "\<And>x :: 'a. Q x \<and> ?U x" (multi) for Q \<Rightarrow> \<open>insert Y[THEN conjunct1]\<close>)
apply assumption (* Previous match requires that Q is consistent *)
apply (match premises in Y: "\<And>z :: 'a. ?A z \<longrightarrow> False" (multi) \<Rightarrow> \<open>print_fact Y, fail\<close> \<bar> "C y" \<Rightarrow> \<open>print_term C\<close>) (* multi-match must bind something *)
apply (match premises in Y: "\<And>x. B x \<and> C x" \<Rightarrow> \<open>intro conjI Y[THEN conjunct1]\<close>)
apply (match premises in Y: "C ?x" \<Rightarrow> \<open>rule Y\<close>)
done
(* All bindings must be tried for a particular theorem.
However all combinations are NOT explored. *)
fix B A C
assume asms:"\<And>a b. B (a :: 'a) (b :: 'a) \<and> Q" "\<And>x :: 'a. A x x \<and> Q" "\<And>a b. C (a :: 'a) (b :: 'a) \<and> Q"
have "B y x \<and> C x y \<and> B x y \<and> C y x \<and> A x x"
apply (intro conjI)
apply (match asms in Y: "\<And>z a. ?A (z :: 'a) (a :: 'a) \<and> R" (multi) for R \<Rightarrow> \<open>rule Y[where z=x,THEN conjunct1]\<close>)
apply (match asms in Y: "\<And>z a. ?A (z :: 'a) (a :: 'a) \<and> R" (multi) for R \<Rightarrow> \<open>rule Y[where a=x,THEN conjunct1]\<close>)
apply (match asms in Y: "\<And>z a. ?A (z :: 'a) (a :: 'a) \<and> R" (multi) for R \<Rightarrow> \<open>rule Y[where a=x,THEN conjunct1]\<close>)
apply (match asms in Y: "\<And>z a. ?A (z :: 'a) (a :: 'a) \<and> R" (multi) for R \<Rightarrow> \<open>rule Y[where z=x,THEN conjunct1]\<close>)
apply (match asms in Y: "\<And>z a. A (z :: 'a) (a :: 'a) \<and> R" for R \<Rightarrow> fail \<bar> _ \<Rightarrow> -)
apply (rule asms[THEN conjunct1])
done
(* Attributes *)
fix A B C x
have "(\<And>x :: 'a. A x \<and> B x) \<Longrightarrow> (\<And>y :: 'a. A y \<and> C y) \<Longrightarrow> (\<And>y :: 'a. B y \<and> C y) \<Longrightarrow> C y \<Longrightarrow> (A x \<and> B y \<and> C y)"
apply (match premises in Y: "\<And>x :: 'a. P x \<and> ?U x" (multi) for P \<Rightarrow> \<open>match Y[THEN conjunct1] in Y':"?H" (multi) \<Rightarrow> \<open>intro conjI,rule Y'\<close>\<close>)
apply (match premises in Y: "\<And>x :: 'a. P x \<and> ?U x" (multi) for P \<Rightarrow> \<open>match Y[THEN conjunct2] in Y':"?H" (multi) \<Rightarrow> \<open>rule Y'\<close>\<close>)
apply assumption
done
(* Removed feature for now *)
(*
fix A B C x
have "(\<And>x :: 'a. A x \<and> B x) \<Longrightarrow> (\<And>y :: 'a. A y \<and> C y) \<Longrightarrow> (\<And>y :: 'a. B y \<and> C y) \<Longrightarrow> C y \<Longrightarrow> (A x \<and> B y \<and> C y)"
apply (match prems in Y: "\<And>x :: 'a. P x \<and> ?U x" (multi) for P \<Rightarrow> \<open>match \<open>K @{thms Y TrueI}\<close> in Y':"?H" (multi) \<Rightarrow> \<open>rule conjI; (rule Y')?\<close>\<close>)
apply (match prems in Y: "\<And>x :: 'a. P x \<and> ?U x" (multi) for P \<Rightarrow> \<open>match \<open>K [@{thm Y}]\<close> in Y':"?H" (multi) \<Rightarrow> \<open>rule Y'\<close>\<close>)
done
*)
(* Testing THEN_ALL_NEW within match *)
fix A B C x
have "(\<And>x :: 'a. A x \<and> B x) \<Longrightarrow> (\<And>y :: 'a. A y \<and> C y) \<Longrightarrow> (\<And>y :: 'a. B y \<and> C y) \<Longrightarrow> C y \<Longrightarrow> (A x \<and> B y \<and> C y)"
apply (match premises in Y: "\<And>x :: 'a. P x \<and> ?U x" (multi) for P \<Rightarrow> \<open>intro conjI ; ((rule Y[THEN conjunct1])?); rule Y[THEN conjunct2] \<close>)
done
(* Cut tests *)
fix A B C D
have "D \<and> C \<Longrightarrow> A \<and> B \<Longrightarrow> A \<longrightarrow> C \<Longrightarrow> D \<longrightarrow> True \<Longrightarrow> C"
by (((match premises in I: "P \<and> Q" (cut)
and I': "P \<longrightarrow> ?U" for P Q \<Rightarrow> \<open>rule mp [OF I' I[THEN conjunct1]]\<close>)?), simp)
have "D \<and> C \<Longrightarrow> A \<and> B \<Longrightarrow> A \<longrightarrow> C \<Longrightarrow> D \<longrightarrow> True \<Longrightarrow> C"
by (match premises in I: "P \<and> Q" (cut 2)
and I': "P \<longrightarrow> ?U" for P Q \<Rightarrow> \<open>rule mp [OF I' I[THEN conjunct1]]\<close>)
have "A \<and> B \<Longrightarrow> A \<longrightarrow> C \<Longrightarrow> C"
by (((match premises in I: "P \<and> Q" (cut)
and I': "P \<longrightarrow> ?U" for P Q \<Rightarrow> \<open>rule mp [OF I' I[THEN conjunct1]]\<close>)?, simp) | simp)
fix f x y
have "f x y \<Longrightarrow> f x y"
by (match conclusion in "f x y" for f x y \<Rightarrow> \<open>print_term f\<close>)
fix A B C
assume X: "A \<and> B" "A \<and> C" C
have "A \<and> B \<and> C"
by (match X in H: "A \<and> ?H" (multi, cut) \<Rightarrow>
\<open>match H in "A \<and> C" and "A \<and> B" \<Rightarrow> fail\<close>
| simp add: X)
(* Thinning an inner focus *)
(* Thinning should persist within a match, even when on an external premise *)
fix A
have "(\<And>x. A x \<and> B) \<Longrightarrow> B \<and> C \<Longrightarrow> C"
apply (match premises in H:"\<And>x. A x \<and> B" \<Rightarrow>
\<open>match premises in H'[thin]: "\<And>x. A x \<and> B" \<Rightarrow>
\<open>match premises in H'':"\<And>x. A x \<and> B" \<Rightarrow> fail
\<bar> _ \<Rightarrow> -\<close>
,match premises in H'':"\<And>x. A x \<and> B" \<Rightarrow> fail \<bar> _ \<Rightarrow> -\<close>)
apply (match premises in H:"\<And>x. A x \<and> B" \<Rightarrow> fail
\<bar> H':_ \<Rightarrow> \<open>rule H'[THEN conjunct2]\<close>)
done
(* Local premises *)
(* Only match premises which actually existed in the goal we just focused.*)
fix A
assume asms: "C \<and> D"
have "B \<and> C \<Longrightarrow> C"
by (match premises in _ \<Rightarrow> \<open>insert asms,
match premises (local) in "B \<and> C" \<Rightarrow> fail
\<bar> H:"C \<and> D" \<Rightarrow> \<open>rule H[THEN conjunct1]\<close>\<close>)
end
(* Testing inner focusing. This fails if we don't smash flex-flex pairs produced
by retrofitting. This needs to be done more carefully to avoid smashing
legitimate pairs.*)
schematic_goal "?A x \<Longrightarrow> A x"
apply (match conclusion in "H" for H \<Rightarrow> \<open>match conclusion in Y for Y \<Rightarrow> \<open>print_term Y\<close>\<close>)
apply assumption
done
(* Ensure short-circuit after first match failure *)
lemma
assumes A: "P \<and> Q"
shows "P"
by ((match A in "P \<and> Q" \<Rightarrow> fail \<bar> "?H" \<Rightarrow> -) | simp add: A)
lemma
assumes A: "D \<and> C" "A \<and> B" "A \<longrightarrow> B"
shows "A"
apply ((match A in U: "P \<and> Q" (cut) and "P' \<longrightarrow> Q'" for P Q P' Q' \<Rightarrow>
\<open>simp add: U\<close> \<bar> "?H" \<Rightarrow> -) | -)
apply (simp add: A)
done
subsection \<open>Uses Tests\<close>
ML \<open>
fun test_internal_fact ctxt factnm =
(case \<^try>\<open>Proof_Context.get_thms ctxt factnm\<close> of
NONE => ()
| SOME _ => error "Found internal fact");
\<close>
method uses_test\<^sub>1 uses uses_test\<^sub>1_uses = (rule uses_test\<^sub>1_uses)
lemma assumes A shows A by (uses_test\<^sub>1 uses_test\<^sub>1_uses: assms)
ML \<open>test_internal_fact \<^context> "uses_test\<^sub>1_uses"\<close>
ML \<open>test_internal_fact \<^context> "Tests.uses_test\<^sub>1_uses"\<close>
ML \<open>test_internal_fact \<^context> "Tests.uses_test\<^sub>1.uses_test\<^sub>1_uses"\<close>
subsection \<open>Basic fact passing\<close>
method find_fact for x y :: bool uses facts1 facts2 =
(match facts1 in U: "x" \<Rightarrow> \<open>insert U,
match facts2 in U: "y" \<Rightarrow> \<open>insert U\<close>\<close>)
lemma assumes A: A and B: B shows "A \<and> B"
apply (find_fact "A" "B" facts1: A facts2: B)
apply (rule conjI; assumption)
done
subsection \<open>Testing term and fact passing in recursion\<close>
method recursion_example for x :: bool uses facts =
(match (x) in
"A \<and> B" for A B \<Rightarrow> \<open>(recursion_example A facts: facts, recursion_example B facts: facts)\<close>
\<bar> "?H" \<Rightarrow> \<open>match facts in U: "x" \<Rightarrow> \<open>insert U\<close>\<close>)
lemma
assumes asms: "A" "B" "C" "D"
shows "(A \<and> B) \<and> (C \<and> D)"
apply (recursion_example "(A \<and> B) \<and> (C \<and> D)" facts: asms)
apply simp
done
(* uses facts are not accumulated *)
method recursion_example' for A :: bool and B :: bool uses facts =
(match facts in
H: "A" and H': "B" \<Rightarrow> \<open>recursion_example' "A" "B" facts: H TrueI\<close>
\<bar> "A" and "True" \<Rightarrow> \<open>recursion_example' "A" "B" facts: TrueI\<close>
\<bar> "True" \<Rightarrow> -
\<bar> "PROP ?P" \<Rightarrow> fail)
lemma
assumes asms: "A" "B"
shows "True"
apply (recursion_example' "A" "B" facts: asms)
apply simp
done
(*Method.sections in existing method*)
method my_simp\<^sub>1 uses my_simp\<^sub>1_facts = (simp add: my_simp\<^sub>1_facts)
lemma assumes A shows A by (my_simp\<^sub>1 my_simp\<^sub>1_facts: assms)
(*Method.sections via Eisbach argument parser*)
method uses_test\<^sub>2 uses uses_test\<^sub>2_uses = (uses_test\<^sub>1 uses_test\<^sub>1_uses: uses_test\<^sub>2_uses)
lemma assumes A shows A by (uses_test\<^sub>2 uses_test\<^sub>2_uses: assms)
subsection \<open>Declaration Tests\<close>
named_theorems declare_facts\<^sub>1
method declares_test\<^sub>1 declares declare_facts\<^sub>1 = (rule declare_facts\<^sub>1)
lemma assumes A shows A by (declares_test\<^sub>1 declare_facts\<^sub>1: assms)
lemma assumes A[declare_facts\<^sub>1]: A shows A by declares_test\<^sub>1
subsection \<open>Rule Instantiation Tests\<close>
method my_allE\<^sub>1 for x :: 'a and P :: "'a \<Rightarrow> bool" =
(erule allE [where x = x and P = P])
lemma "\<forall>x. Q x \<Longrightarrow> Q x" by (my_allE\<^sub>1 x Q)
method my_allE\<^sub>2 for x :: 'a and P :: "'a \<Rightarrow> bool" =
(erule allE [of P x])
lemma "\<forall>x. Q x \<Longrightarrow> Q x" by (my_allE\<^sub>2 x Q)
method my_allE\<^sub>3 for x :: 'a and P :: "'a \<Rightarrow> bool" =
(match allE [where 'a = 'a] in X: "\<And>(x :: 'a) P R. \<forall>x. P x \<Longrightarrow> (P x \<Longrightarrow> R) \<Longrightarrow> R" \<Rightarrow>
\<open>erule X [where x = x and P = P]\<close>)
lemma "\<forall>x. Q x \<Longrightarrow> Q x" by (my_allE\<^sub>3 x Q)
method my_allE\<^sub>4 for x :: 'a and P :: "'a \<Rightarrow> bool" =
(match allE [where 'a = 'a] in X: "\<And>(x :: 'a) P R. \<forall>x. P x \<Longrightarrow> (P x \<Longrightarrow> R) \<Longrightarrow> R" \<Rightarrow>
\<open>erule X [of x P]\<close>)
lemma "\<forall>x. Q x \<Longrightarrow> Q x" by (my_allE\<^sub>4 x Q)
subsection \<open>Polymorphism test\<close>
axiomatization foo' :: "'a \<Rightarrow> 'b \<Rightarrow> 'c \<Rightarrow> bool"
axiomatization where foo'_ax1: "foo' x y z \<Longrightarrow> z \<and> y"
axiomatization where foo'_ax2: "foo' x y y \<Longrightarrow> x \<and> z"
axiomatization where foo'_ax3: "foo' (x :: int) y y \<Longrightarrow> y \<and> y"
lemmas my_thms = foo'_ax1 foo'_ax2 foo'_ax3
definition first_id where "first_id x = x"
lemmas my_thms' = my_thms[of "first_id x" for x]
method print_conclusion = (match conclusion in concl for concl \<Rightarrow> \<open>print_term concl\<close>)
lemma
assumes foo: "\<And>x (y :: bool). foo' (A x) B (A x)"
shows "\<And>z. A z \<and> B"
apply
(match conclusion in "f x y" for f y and x :: "'d :: type" \<Rightarrow> \<open>
match my_thms' in R:"\<And>(x :: 'f :: type). ?P (first_id x) \<Longrightarrow> ?R"
and R':"\<And>(x :: 'f :: type). ?P' (first_id x) \<Longrightarrow> ?R'" \<Rightarrow> \<open>
match (x) in "q :: 'f" for q \<Rightarrow> \<open>
rule R[of q,simplified first_id_def],
print_conclusion,
rule foo
\<close>\<close>\<close>)
done
subsection \<open>Unchecked rule instantiation, with the possibility of runtime errors\<close>
named_theorems my_thms_named
declare foo'_ax3[my_thms_named]
method foo_method3 declares my_thms_named =
(match my_thms_named[of (unchecked) z for z] in R:"PROP ?H" \<Rightarrow> \<open>rule R\<close>)
notepad
begin
(*FIXME: Shouldn't need unchecked keyword here. See Tests_Failing.thy *)
fix A B x
have "foo' x B A \<Longrightarrow> A \<and> B"
by (match my_thms[of (unchecked) z for z] in R:"PROP ?H" \<Rightarrow> \<open>rule R\<close>)
fix A B x
note foo'_ax1[my_thms_named]
have "foo' x B A \<Longrightarrow> A \<and> B"
by (match my_thms_named[where x=z for z] in R:"PROP ?H" \<Rightarrow> \<open>rule R\<close>)
fix A B x
note foo'_ax1[my_thms_named] foo'_ax2[my_thms_named] foo'_ax3[my_thms_named]
have "foo' x B A \<Longrightarrow> A \<and> B"
by foo_method3
end
ML \<open>
structure Data = Generic_Data
(
type T = thm list;
val empty: T = [];
fun merge data : T = Thm.merge_thms data;
);
\<close>
local_setup \<open>Local_Theory.add_thms_dynamic (\<^binding>\<open>test_dyn\<close>, Data.get)\<close>
setup \<open>Context.theory_map (Data.put @{thms TrueI})\<close>
method dynamic_thms_test = (rule test_dyn)
locale foo =
fixes A
assumes A : "A"
begin
-local_setup
- \<open>Local_Theory.declaration {pervasive = false, syntax = false}
- (fn phi => Data.put (Morphism.fact phi @{thms A}))\<close>
+declaration \<open>fn phi => Data.put (Morphism.fact phi @{thms A})\<close>
lemma A by dynamic_thms_test
end
notepad
begin
fix A x
assume X: "\<And>x. A x"
have "A x"
by (match X in H[of x]:"\<And>x. A x" \<Rightarrow> \<open>print_fact H,match H in "A x" \<Rightarrow> \<open>rule H\<close>\<close>)
fix A x B
assume X: "\<And>x :: bool. A x \<Longrightarrow> B" "\<And>x. A x"
assume Y: "A B"
have "B \<and> B \<and> B \<and> B \<and> B \<and> B"
apply (intro conjI)
apply (match X in H[OF X(2)]:"\<And>x. A x \<Longrightarrow> B" \<Rightarrow> \<open>print_fact H,rule H\<close>)
apply (match X in H':"\<And>x. A x" and H[OF H']:"\<And>x. A x \<Longrightarrow> B" \<Rightarrow> \<open>print_fact H',print_fact H,rule H\<close>)
apply (match X in H[of Q]:"\<And>x. A x \<Longrightarrow> ?R" and "?P \<Longrightarrow> Q" for Q \<Rightarrow> \<open>print_fact H,rule H, rule Y\<close>)
apply (match X in H[of Q,OF Y]:"\<And>x. A x \<Longrightarrow> ?R" and "?P \<Longrightarrow> Q" for Q \<Rightarrow> \<open>print_fact H,rule H\<close>)
apply (match X in H[OF Y,intro]:"\<And>x. A x \<Longrightarrow> ?R" \<Rightarrow> \<open>print_fact H,fastforce\<close>)
apply (match X in H[intro]:"\<And>x. A x \<Longrightarrow> ?R" \<Rightarrow> \<open>rule H[where x=B], rule Y\<close>)
done
fix x :: "prop" and A
assume X: "TERM x"
assume Y: "\<And>x :: prop. A x"
have "A TERM x"
apply (match X in "PROP y" for y \<Rightarrow> \<open>rule Y[where x="PROP y"]\<close>)
done
end
subsection \<open>Proper context for method parameters\<close>
method add_simp methods m uses f = (match f in H[simp]:_ \<Rightarrow> m)
method add_my_thms methods m uses f = (match f in H[my_thms_named]:_ \<Rightarrow> m)
method rule_my_thms = (rule my_thms_named)
method rule_my_thms' declares my_thms_named = (rule my_thms_named)
lemma
assumes A: A and B: B
shows
"(A \<or> B) \<and> A \<and> A \<and> A"
apply (intro conjI)
apply (add_simp \<open>add_simp simp f: B\<close> f: A)
apply (add_my_thms rule_my_thms f:A)
apply (add_my_thms rule_my_thms' f:A)
apply (add_my_thms \<open>rule my_thms_named\<close> f:A)
done
subsection \<open>Shallow parser tests\<close>
method all_args for A B methods m1 m2 uses f1 f2 declares my_thms_named = fail
lemma True
by (all_args True False - fail f1: TrueI f2: TrueI my_thms_named: TrueI | rule TrueI)
subsection \<open>Method name internalization test\<close>
method test2 = (simp)
method simp = fail
lemma "A \<Longrightarrow> A" by test2
subsection \<open>Dynamic facts\<close>
named_theorems my_thms_named'
method foo_method1 for x =
(match my_thms_named' [of (unchecked) x] in R: "PROP ?H" \<Rightarrow> \<open>rule R\<close>)
lemma
assumes A [my_thms_named']: "\<And>x. A x"
shows "A y"
by (foo_method1 y)
subsection \<open>Eisbach method invocation from ML\<close>
method test_method for x y uses r = (print_term x, print_term y, rule r)
method_setup test_method' = \<open>
Args.term -- Args.term --
(Scan.lift (Args.$$$ "rule" -- Args.colon) |-- Attrib.thms) >>
(fn ((x, y), r) => fn ctxt =>
Method_Closure.apply_method ctxt \<^method>\<open>test_method\<close> [x, y] [r] [] ctxt)
\<close>
lemma
fixes a b :: nat
assumes "a = b"
shows "b = a"
apply (test_method a b)?
apply (test_method' a b rule: refl)?
apply (test_method' a b rule: assms [symmetric])?
done
subsection \<open>Eisbach methods in locales\<close>
locale my_locale1 = fixes A assumes A: A begin
method apply_A =
(match conclusion in "A" \<Rightarrow>
\<open>match A in U:"A" \<Rightarrow>
\<open>print_term A, print_fact A, rule U\<close>\<close>)
end
locale my_locale2 = fixes B assumes B: B begin
interpretation my_locale1 B by (unfold_locales; rule B)
lemma B by apply_A
end
context fixes C assumes C: C begin
interpretation my_locale1 C by (unfold_locales; rule C)
lemma C by apply_A
end
context begin
interpretation my_locale1 "True \<longrightarrow> True" by (unfold_locales; blast)
lemma "True \<longrightarrow> True" by apply_A
end
locale locale_poly = fixes P assumes P: "\<And>x :: 'a. P x" begin
method solve_P for z :: 'a = (rule P[where x = z])
end
context begin
interpretation locale_poly "\<lambda>x:: nat. 0 \<le> x" by (unfold_locales; blast)
lemma "0 \<le> (n :: nat)" by (solve_P n)
end
subsection \<open>Mutual recursion via higher-order methods\<close>
experiment begin
method inner_method methods passed_method = (rule conjI; passed_method)
method outer_method = (inner_method \<open>outer_method\<close> | assumption)
lemma "Q \<Longrightarrow> R \<Longrightarrow> P \<Longrightarrow> (Q \<and> R) \<and> P"
by outer_method
end
end
diff --git a/src/HOL/Eisbach/method_closure.ML b/src/HOL/Eisbach/method_closure.ML
--- a/src/HOL/Eisbach/method_closure.ML
+++ b/src/HOL/Eisbach/method_closure.ML
@@ -1,251 +1,252 @@
(* Title: HOL/Eisbach/method_closure.ML
Author: Daniel Matichuk, NICTA/UNSW
Facilities for treating method syntax as a closure, with abstraction
over terms, facts and other methods.
The 'method' command allows to define new proof methods by combining
existing ones with their usual syntax.
*)
signature METHOD_CLOSURE =
sig
val apply_method: Proof.context -> string -> term list -> thm list list ->
(Proof.context -> Method.method) list -> Proof.context -> thm list -> context_tactic
val method: binding -> (binding * typ option * mixfix) list -> binding list ->
binding list -> binding list -> Token.src -> local_theory -> string * local_theory
val method_cmd: binding -> (binding * string option * mixfix) list -> binding list ->
binding list -> binding list -> Token.src -> local_theory -> string * local_theory
end;
structure Method_Closure: METHOD_CLOSURE =
struct
(* auxiliary data for method definition *)
structure Method_Definition = Proof_Data
(
type T =
(Proof.context -> Method.method) Symtab.table * (*dynamic methods*)
(term list -> Proof.context -> Method.method) Symtab.table (*recursive methods*);
fun init _ : T = (Symtab.empty, Symtab.empty);
);
fun lookup_dynamic_method ctxt full_name =
(case Symtab.lookup (#1 (Method_Definition.get ctxt)) full_name of
SOME m => m ctxt
| NONE => error ("Illegal use of internal Eisbach method: " ^ quote full_name));
val update_dynamic_method = Method_Definition.map o apfst o Symtab.update;
fun get_recursive_method full_name ts ctxt =
(case Symtab.lookup (#2 (Method_Definition.get ctxt)) full_name of
SOME m => m ts ctxt
| NONE => error ("Illegal use of internal Eisbach method: " ^ quote full_name));
val put_recursive_method = Method_Definition.map o apsnd o Symtab.update;
(* stored method closures *)
type closure = {vars: term list, named_thms: string list, methods: string list, body: Method.text};
structure Data = Generic_Data
(
type T = closure Symtab.table;
val empty: T = Symtab.empty;
fun merge data : T = Symtab.merge (K true) data;
);
fun get_closure ctxt name =
(case Symtab.lookup (Data.get (Context.Proof ctxt)) name of
SOME closure => closure
| NONE => error ("Unknown Eisbach method: " ^ quote name));
fun put_closure binding (closure: closure) lthy =
let
val name = Local_Theory.full_name lthy binding;
in
- lthy |> Local_Theory.declaration {syntax = false, pervasive = true} (fn phi =>
- Data.map
- (Symtab.update (name,
- {vars = map (Morphism.term phi) (#vars closure),
- named_thms = #named_thms closure,
- methods = #methods closure,
- body = (Method.map_source o map) (Token.transform phi) (#body closure)})))
+ lthy |> Local_Theory.declaration {syntax = false, pervasive = true, pos = Binding.pos_of binding}
+ (fn phi =>
+ Data.map
+ (Symtab.update (name,
+ {vars = map (Morphism.term phi) (#vars closure),
+ named_thms = #named_thms closure,
+ methods = #methods closure,
+ body = (Method.map_source o map) (Token.transform phi) (#body closure)})))
end;
(* instantiate and evaluate method text *)
fun method_instantiate vars body ts ctxt =
let
val thy = Proof_Context.theory_of ctxt;
val subst = fold (Pattern.match thy) (vars ~~ ts) (Vartab.empty, Vartab.empty);
val morphism = Morphism.term_morphism "method_instantiate" (Envir.subst_term subst);
val body' = (Method.map_source o map) (Token.transform morphism) body;
in Method.evaluate_runtime body' ctxt end;
(** apply method closure **)
fun recursive_method full_name vars body ts =
let val m = method_instantiate vars body
in put_recursive_method (full_name, m) #> m ts end;
fun apply_method ctxt method_name terms facts methods =
let
fun declare_facts (name :: names) (fact :: facts) =
fold (Context.proof_map o Named_Theorems.add_thm name) fact
#> declare_facts names facts
| declare_facts _ [] = I
| declare_facts [] (_ :: _) = error ("Excessive facts for method " ^ quote method_name);
val {vars, named_thms, methods = method_args, body} = get_closure ctxt method_name;
in
declare_facts named_thms facts
#> fold update_dynamic_method (method_args ~~ methods)
#> recursive_method method_name vars body terms
end;
(** define method closure **)
local
fun setup_local_method binding lthy =
let
val full_name = Local_Theory.full_name lthy binding;
fun dynamic_method ctxt = lookup_dynamic_method ctxt full_name;
in
lthy
|> update_dynamic_method (full_name, K Method.fail)
|> Method.local_setup binding (Scan.succeed dynamic_method) "(internal)"
end;
fun check_named_thm ctxt binding =
let
val bname = Binding.name_of binding;
val pos = Binding.pos_of binding;
val full_name = Named_Theorems.check ctxt (bname, pos);
val parser: Method.modifier parser =
Args.$$$ bname -- Args.colon
>> K {init = I, attribute = Named_Theorems.add full_name, pos = pos};
in (full_name, parser) end;
fun parse_term_args args =
Args.context :|-- (fn ctxt =>
let
val ctxt' = Proof_Context.set_mode (Proof_Context.mode_schematic) ctxt;
fun parse T =
(if T = propT then Syntax.parse_prop ctxt' else Syntax.parse_term ctxt')
#> Type.constraint (Type_Infer.paramify_vars T);
fun do_parse' T =
Parse_Tools.name_term >> Parse_Tools.parse_val_cases (parse T);
fun do_parse (Var (_, T)) = do_parse' T
| do_parse (Free (_, T)) = do_parse' T
| do_parse t = error ("Unexpected method parameter: " ^ Syntax.string_of_term ctxt' t);
fun rep [] x = Scan.succeed [] x
| rep (t :: ts) x = (do_parse t ::: rep ts) x;
fun check ts =
let
val (ts, fs) = split_list ts;
val ts' = Syntax.check_terms ctxt' ts |> Variable.polymorphic ctxt';
val _ = ListPair.app (fn (f, t) => f t) (fs, ts');
in ts' end;
in Scan.lift (rep args) >> check end);
fun parse_method_args method_args =
let
fun bind_method (name, text) ctxt =
let
val method = Method.evaluate_runtime text;
val inner_update = method o update_dynamic_method (name, K (method ctxt));
in update_dynamic_method (name, inner_update) ctxt end;
fun rep [] x = Scan.succeed [] x
| rep (m :: ms) x = ((Method.text_closure >> pair m) ::: rep ms) x;
in rep method_args >> fold bind_method end;
fun gen_method add_fixes name vars uses declares methods source lthy =
let
val (uses_internal, lthy1) = lthy
|> Proof_Context.concealed
|> Local_Theory.begin_nested
|-> Proof_Context.private_scope
|> Local_Theory.map_background_naming (Name_Space.add_path (Binding.name_of name))
|> Config.put Method.old_section_parser true
|> fold setup_local_method methods
|> fold_map (fn b => Named_Theorems.declare b "") uses;
val (term_args, lthy2) = lthy1
|> add_fixes vars |-> fold_map Proof_Context.inferred_param |>> map Free;
val (named_thms, modifiers) = map (check_named_thm lthy2) (declares @ uses) |> split_list;
val method_args = map (Local_Theory.full_name lthy2) methods;
fun parser args meth =
apfst (Config.put_generic Method.old_section_parser true) #>
(parse_term_args args --
parse_method_args method_args --|
(Scan.depend (fn context =>
Scan.succeed (fold Named_Theorems.clear uses_internal context, ())) --
Method.sections modifiers)) >> (fn (ts, decl) => meth ts o decl);
val full_name = Local_Theory.full_name lthy name;
val lthy3 = lthy2
|> Method.local_setup (Binding.make (Binding.name_of name, Position.none))
(parser term_args (get_recursive_method full_name)) "(internal)"
|> put_recursive_method (full_name, fn _ => fn _ => Method.fail);
val (text, src) =
Method.read_closure (Config.put Proof_Context.dynamic_facts_dummy true lthy3) source;
val morphism =
Variable.export_morphism lthy3
(lthy
|> Proof_Context.transfer (Proof_Context.theory_of lthy3)
|> fold Token.declare_maxidx src
|> Variable.declare_maxidx (Variable.maxidx_of lthy3));
val text' = (Method.map_source o map) (Token.transform morphism) text;
val term_args' = map (Morphism.term morphism) term_args;
in
lthy3
|> Local_Theory.end_nested
|> Proof_Context.restore_naming lthy
|> put_closure name
{vars = term_args', named_thms = named_thms, methods = method_args, body = text'}
|> Method.local_setup name
(Args.context :|-- (fn ctxt =>
let val {body, vars, ...} = get_closure ctxt full_name in
parser vars (recursive_method full_name vars body) end)) ""
|> pair full_name
end;
in
val method = gen_method Proof_Context.add_fixes;
val method_cmd = gen_method Proof_Context.add_fixes_cmd;
end;
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>method\<close> "Eisbach method definition"
(Parse.binding -- Parse.for_fixes --
((Scan.optional (\<^keyword>\<open>methods\<close> |-- Parse.!!! (Scan.repeat1 Parse.binding)) []) --
(Scan.optional (\<^keyword>\<open>uses\<close> |-- Parse.!!! (Scan.repeat1 Parse.binding)) [])) --
(Scan.optional (\<^keyword>\<open>declares\<close> |-- Parse.!!! (Scan.repeat1 Parse.binding)) []) --
Parse.!!! (\<^keyword>\<open>=\<close> |-- Parse.args1 (K true)) >>
(fn ((((name, vars), (methods, uses)), declares), src) =>
#2 o method_cmd name vars uses declares methods src));
end;
diff --git a/src/HOL/Enum.thy b/src/HOL/Enum.thy
--- a/src/HOL/Enum.thy
+++ b/src/HOL/Enum.thy
@@ -1,1160 +1,1160 @@
(* Author: Florian Haftmann, TU Muenchen *)
section \<open>Finite types as explicit enumerations\<close>
theory Enum
imports Map Groups_List
begin
subsection \<open>Class \<open>enum\<close>\<close>
class enum =
fixes enum :: "'a list"
fixes enum_all :: "('a \<Rightarrow> bool) \<Rightarrow> bool"
fixes enum_ex :: "('a \<Rightarrow> bool) \<Rightarrow> bool"
assumes UNIV_enum: "UNIV = set enum"
and enum_distinct: "distinct enum"
assumes enum_all_UNIV: "enum_all P \<longleftrightarrow> Ball UNIV P"
assumes enum_ex_UNIV: "enum_ex P \<longleftrightarrow> Bex UNIV P"
\<comment> \<open>tailored towards simple instantiation\<close>
begin
subclass finite proof
qed (simp add: UNIV_enum)
lemma enum_UNIV:
"set enum = UNIV"
by (simp only: UNIV_enum)
lemma in_enum: "x \<in> set enum"
by (simp add: enum_UNIV)
lemma enum_eq_I:
assumes "\<And>x. x \<in> set xs"
shows "set enum = set xs"
proof -
from assms UNIV_eq_I have "UNIV = set xs" by auto
with enum_UNIV show ?thesis by simp
qed
lemma card_UNIV_length_enum:
"card (UNIV :: 'a set) = length enum"
by (simp add: UNIV_enum distinct_card enum_distinct)
lemma enum_all [simp]:
"enum_all = HOL.All"
by (simp add: fun_eq_iff enum_all_UNIV)
lemma enum_ex [simp]:
"enum_ex = HOL.Ex"
by (simp add: fun_eq_iff enum_ex_UNIV)
end
subsection \<open>Implementations using \<^class>\<open>enum\<close>\<close>
subsubsection \<open>Unbounded operations and quantifiers\<close>
lemma Collect_code [code]:
"Collect P = set (filter P enum)"
by (simp add: enum_UNIV)
lemma vimage_code [code]:
"f -` B = set (filter (\<lambda>x. f x \<in> B) enum_class.enum)"
unfolding vimage_def Collect_code ..
definition card_UNIV :: "'a itself \<Rightarrow> nat"
where
[code del]: "card_UNIV TYPE('a) = card (UNIV :: 'a set)"
lemma [code]:
"card_UNIV TYPE('a :: enum) = card (set (Enum.enum :: 'a list))"
by (simp only: card_UNIV_def enum_UNIV)
lemma all_code [code]: "(\<forall>x. P x) \<longleftrightarrow> enum_all P"
by simp
lemma exists_code [code]: "(\<exists>x. P x) \<longleftrightarrow> enum_ex P"
by simp
lemma exists1_code [code]: "(\<exists>!x. P x) \<longleftrightarrow> list_ex1 P enum"
by (auto simp add: list_ex1_iff enum_UNIV)
subsubsection \<open>An executable choice operator\<close>
definition
[code del]: "enum_the = The"
lemma [code]:
"The P = (case filter P enum of [x] \<Rightarrow> x | _ \<Rightarrow> enum_the P)"
proof -
{
fix a
assume filter_enum: "filter P enum = [a]"
have "The P = a"
proof (rule the_equality)
fix x
assume "P x"
show "x = a"
proof (rule ccontr)
assume "x \<noteq> a"
from filter_enum obtain us vs
where enum_eq: "enum = us @ [a] @ vs"
and "\<forall> x \<in> set us. \<not> P x"
and "\<forall> x \<in> set vs. \<not> P x"
and "P a"
by (auto simp add: filter_eq_Cons_iff) (simp only: filter_empty_conv[symmetric])
with \<open>P x\<close> in_enum[of x, unfolded enum_eq] \<open>x \<noteq> a\<close> show "False" by auto
qed
next
from filter_enum show "P a" by (auto simp add: filter_eq_Cons_iff)
qed
}
from this show ?thesis
unfolding enum_the_def by (auto split: list.split)
qed
declare [[code abort: enum_the]]
code_printing
constant enum_the \<rightharpoonup> (Eval) "(fn '_ => raise Match)"
subsubsection \<open>Equality and order on functions\<close>
instantiation "fun" :: (enum, equal) equal
begin
definition
"HOL.equal f g \<longleftrightarrow> (\<forall>x \<in> set enum. f x = g x)"
instance proof
qed (simp_all add: equal_fun_def fun_eq_iff enum_UNIV)
end
lemma [code]:
"HOL.equal f g \<longleftrightarrow> enum_all (%x. f x = g x)"
by (auto simp add: equal fun_eq_iff)
lemma [code nbe]:
"HOL.equal (f :: _ \<Rightarrow> _) f \<longleftrightarrow> True"
by (fact equal_refl)
lemma order_fun [code]:
fixes f g :: "'a::enum \<Rightarrow> 'b::order"
shows "f \<le> g \<longleftrightarrow> enum_all (\<lambda>x. f x \<le> g x)"
and "f < g \<longleftrightarrow> f \<le> g \<and> enum_ex (\<lambda>x. f x \<noteq> g x)"
by (simp_all add: fun_eq_iff le_fun_def order_less_le)
subsubsection \<open>Operations on relations\<close>
lemma [code]:
"Id = image (\<lambda>x. (x, x)) (set Enum.enum)"
by (auto intro: imageI in_enum)
lemma tranclp_unfold [code]:
"tranclp r a b \<longleftrightarrow> (a, b) \<in> trancl {(x, y). r x y}"
by (simp add: trancl_def)
lemma rtranclp_rtrancl_eq [code]:
"rtranclp r x y \<longleftrightarrow> (x, y) \<in> rtrancl {(x, y). r x y}"
by (simp add: rtrancl_def)
lemma max_ext_eq [code]:
"max_ext R = {(X, Y). finite X \<and> finite Y \<and> Y \<noteq> {} \<and> (\<forall>x. x \<in> X \<longrightarrow> (\<exists>xa \<in> Y. (x, xa) \<in> R))}"
by (auto simp add: max_ext.simps)
lemma max_extp_eq [code]:
"max_extp r x y \<longleftrightarrow> (x, y) \<in> max_ext {(x, y). r x y}"
by (simp add: max_ext_def)
lemma mlex_eq [code]:
"f <*mlex*> R = {(x, y). f x < f y \<or> (f x \<le> f y \<and> (x, y) \<in> R)}"
by (auto simp add: mlex_prod_def)
subsubsection \<open>Bounded accessible part\<close>
primrec bacc :: "('a \<times> 'a) set \<Rightarrow> nat \<Rightarrow> 'a set"
where
"bacc r 0 = {x. \<forall> y. (y, x) \<notin> r}"
| "bacc r (Suc n) = (bacc r n \<union> {x. \<forall>y. (y, x) \<in> r \<longrightarrow> y \<in> bacc r n})"
lemma bacc_subseteq_acc:
"bacc r n \<subseteq> Wellfounded.acc r"
by (induct n) (auto intro: acc.intros)
lemma bacc_mono:
"n \<le> m \<Longrightarrow> bacc r n \<subseteq> bacc r m"
by (induct rule: dec_induct) auto
lemma bacc_upper_bound:
"bacc (r :: ('a \<times> 'a) set) (card (UNIV :: 'a::finite set)) = (\<Union>n. bacc r n)"
proof -
have "mono (bacc r)" unfolding mono_def by (simp add: bacc_mono)
moreover have "\<forall>n. bacc r n = bacc r (Suc n) \<longrightarrow> bacc r (Suc n) = bacc r (Suc (Suc n))" by auto
moreover have "finite (range (bacc r))" by auto
ultimately show ?thesis
by (intro finite_mono_strict_prefix_implies_finite_fixpoint)
(auto intro: finite_mono_remains_stable_implies_strict_prefix)
qed
lemma acc_subseteq_bacc:
assumes "finite r"
shows "Wellfounded.acc r \<subseteq> (\<Union>n. bacc r n)"
proof
fix x
assume "x \<in> Wellfounded.acc r"
then have "\<exists>n. x \<in> bacc r n"
proof (induct x arbitrary: rule: acc.induct)
case (accI x)
then have "\<forall>y. \<exists> n. (y, x) \<in> r \<longrightarrow> y \<in> bacc r n" by simp
from choice[OF this] obtain n where n: "\<forall>y. (y, x) \<in> r \<longrightarrow> y \<in> bacc r (n y)" ..
obtain n where "\<And>y. (y, x) \<in> r \<Longrightarrow> y \<in> bacc r n"
proof
fix y assume y: "(y, x) \<in> r"
with n have "y \<in> bacc r (n y)" by auto
moreover have "n y <= Max ((\<lambda>(y, x). n y) ` r)"
using y \<open>finite r\<close> by (auto intro!: Max_ge)
note bacc_mono[OF this, of r]
ultimately show "y \<in> bacc r (Max ((\<lambda>(y, x). n y) ` r))" by auto
qed
then show ?case
by (auto simp add: Let_def intro!: exI[of _ "Suc n"])
qed
then show "x \<in> (\<Union>n. bacc r n)" by auto
qed
lemma acc_bacc_eq:
fixes A :: "('a :: finite \<times> 'a) set"
assumes "finite A"
shows "Wellfounded.acc A = bacc A (card (UNIV :: 'a set))"
using assms by (metis acc_subseteq_bacc bacc_subseteq_acc bacc_upper_bound order_eq_iff)
lemma [code]:
fixes xs :: "('a::finite \<times> 'a) list"
shows "Wellfounded.acc (set xs) = bacc (set xs) (card_UNIV TYPE('a))"
by (simp add: card_UNIV_def acc_bacc_eq)
subsection \<open>Default instances for \<^class>\<open>enum\<close>\<close>
lemma map_of_zip_enum_is_Some:
assumes "length ys = length (enum :: 'a::enum list)"
shows "\<exists>y. map_of (zip (enum :: 'a::enum list) ys) x = Some y"
proof -
from assms have "x \<in> set (enum :: 'a::enum list) \<longleftrightarrow>
(\<exists>y. map_of (zip (enum :: 'a::enum list) ys) x = Some y)"
by (auto intro!: map_of_zip_is_Some)
then show ?thesis using enum_UNIV by auto
qed
lemma map_of_zip_enum_inject:
fixes xs ys :: "'b::enum list"
assumes length: "length xs = length (enum :: 'a::enum list)"
"length ys = length (enum :: 'a::enum list)"
and map_of: "the \<circ> map_of (zip (enum :: 'a::enum list) xs) = the \<circ> map_of (zip (enum :: 'a::enum list) ys)"
shows "xs = ys"
proof -
have "map_of (zip (enum :: 'a list) xs) = map_of (zip (enum :: 'a list) ys)"
proof
fix x :: 'a
from length map_of_zip_enum_is_Some obtain y1 y2
where "map_of (zip (enum :: 'a list) xs) x = Some y1"
and "map_of (zip (enum :: 'a list) ys) x = Some y2" by blast
moreover from map_of
have "the (map_of (zip (enum :: 'a::enum list) xs) x) = the (map_of (zip (enum :: 'a::enum list) ys) x)"
by (auto dest: fun_cong)
ultimately show "map_of (zip (enum :: 'a::enum list) xs) x = map_of (zip (enum :: 'a::enum list) ys) x"
by simp
qed
with length enum_distinct show "xs = ys" by (rule map_of_zip_inject)
qed
definition all_n_lists :: "(('a :: enum) list \<Rightarrow> bool) \<Rightarrow> nat \<Rightarrow> bool"
where
"all_n_lists P n \<longleftrightarrow> (\<forall>xs \<in> set (List.n_lists n enum). P xs)"
lemma [code]:
"all_n_lists P n \<longleftrightarrow> (if n = 0 then P [] else enum_all (%x. all_n_lists (%xs. P (x # xs)) (n - 1)))"
unfolding all_n_lists_def enum_all
by (cases n) (auto simp add: enum_UNIV)
definition ex_n_lists :: "(('a :: enum) list \<Rightarrow> bool) \<Rightarrow> nat \<Rightarrow> bool"
where
"ex_n_lists P n \<longleftrightarrow> (\<exists>xs \<in> set (List.n_lists n enum). P xs)"
lemma [code]:
"ex_n_lists P n \<longleftrightarrow> (if n = 0 then P [] else enum_ex (%x. ex_n_lists (%xs. P (x # xs)) (n - 1)))"
unfolding ex_n_lists_def enum_ex
by (cases n) (auto simp add: enum_UNIV)
instantiation "fun" :: (enum, enum) enum
begin
definition
"enum = map (\<lambda>ys. the \<circ> map_of (zip (enum::'a list) ys)) (List.n_lists (length (enum::'a::enum list)) enum)"
definition
"enum_all P = all_n_lists (\<lambda>bs. P (the \<circ> map_of (zip enum bs))) (length (enum :: 'a list))"
definition
"enum_ex P = ex_n_lists (\<lambda>bs. P (the \<circ> map_of (zip enum bs))) (length (enum :: 'a list))"
instance proof
show "UNIV = set (enum :: ('a \<Rightarrow> 'b) list)"
proof (rule UNIV_eq_I)
fix f :: "'a \<Rightarrow> 'b"
have "f = the \<circ> map_of (zip (enum :: 'a::enum list) (map f enum))"
by (auto simp add: map_of_zip_map fun_eq_iff intro: in_enum)
then show "f \<in> set enum"
by (auto simp add: enum_fun_def set_n_lists intro: in_enum)
qed
next
from map_of_zip_enum_inject
show "distinct (enum :: ('a \<Rightarrow> 'b) list)"
by (auto intro!: inj_onI simp add: enum_fun_def
distinct_map distinct_n_lists enum_distinct set_n_lists)
next
fix P
show "enum_all (P :: ('a \<Rightarrow> 'b) \<Rightarrow> bool) = Ball UNIV P"
proof
assume "enum_all P"
show "Ball UNIV P"
proof
fix f :: "'a \<Rightarrow> 'b"
have f: "f = the \<circ> map_of (zip (enum :: 'a::enum list) (map f enum))"
by (auto simp add: map_of_zip_map fun_eq_iff intro: in_enum)
from \<open>enum_all P\<close> have "P (the \<circ> map_of (zip enum (map f enum)))"
unfolding enum_all_fun_def all_n_lists_def
apply (simp add: set_n_lists)
apply (erule_tac x="map f enum" in allE)
apply (auto intro!: in_enum)
done
from this f show "P f" by auto
qed
next
assume "Ball UNIV P"
from this show "enum_all P"
unfolding enum_all_fun_def all_n_lists_def by auto
qed
next
fix P
show "enum_ex (P :: ('a \<Rightarrow> 'b) \<Rightarrow> bool) = Bex UNIV P"
proof
assume "enum_ex P"
from this show "Bex UNIV P"
unfolding enum_ex_fun_def ex_n_lists_def by auto
next
assume "Bex UNIV P"
from this obtain f where "P f" ..
have f: "f = the \<circ> map_of (zip (enum :: 'a::enum list) (map f enum))"
by (auto simp add: map_of_zip_map fun_eq_iff intro: in_enum)
from \<open>P f\<close> this have "P (the \<circ> map_of (zip (enum :: 'a::enum list) (map f enum)))"
by auto
from this show "enum_ex P"
unfolding enum_ex_fun_def ex_n_lists_def
apply (auto simp add: set_n_lists)
apply (rule_tac x="map f enum" in exI)
apply (auto intro!: in_enum)
done
qed
qed
end
lemma enum_fun_code [code]: "enum = (let enum_a = (enum :: 'a::{enum, equal} list)
in map (\<lambda>ys. the \<circ> map_of (zip enum_a ys)) (List.n_lists (length enum_a) enum))"
by (simp add: enum_fun_def Let_def)
lemma enum_all_fun_code [code]:
"enum_all P = (let enum_a = (enum :: 'a::{enum, equal} list)
in all_n_lists (\<lambda>bs. P (the \<circ> map_of (zip enum_a bs))) (length enum_a))"
by (simp only: enum_all_fun_def Let_def)
lemma enum_ex_fun_code [code]:
"enum_ex P = (let enum_a = (enum :: 'a::{enum, equal} list)
in ex_n_lists (\<lambda>bs. P (the \<circ> map_of (zip enum_a bs))) (length enum_a))"
by (simp only: enum_ex_fun_def Let_def)
instantiation set :: (enum) enum
begin
definition
"enum = map set (subseqs enum)"
definition
"enum_all P \<longleftrightarrow> (\<forall>A\<in>set enum. P (A::'a set))"
definition
"enum_ex P \<longleftrightarrow> (\<exists>A\<in>set enum. P (A::'a set))"
instance proof
qed (simp_all add: enum_set_def enum_all_set_def enum_ex_set_def subseqs_powset distinct_set_subseqs
enum_distinct enum_UNIV)
end
instantiation unit :: enum
begin
definition
"enum = [()]"
definition
"enum_all P = P ()"
definition
"enum_ex P = P ()"
instance proof
qed (auto simp add: enum_unit_def enum_all_unit_def enum_ex_unit_def)
end
instantiation bool :: enum
begin
definition
"enum = [False, True]"
definition
"enum_all P \<longleftrightarrow> P False \<and> P True"
definition
"enum_ex P \<longleftrightarrow> P False \<or> P True"
instance proof
qed (simp_all only: enum_bool_def enum_all_bool_def enum_ex_bool_def UNIV_bool, simp_all)
end
instantiation prod :: (enum, enum) enum
begin
definition
"enum = List.product enum enum"
definition
"enum_all P = enum_all (%x. enum_all (%y. P (x, y)))"
definition
"enum_ex P = enum_ex (%x. enum_ex (%y. P (x, y)))"
instance
by standard
(simp_all add: enum_prod_def distinct_product
enum_UNIV enum_distinct enum_all_prod_def enum_ex_prod_def)
end
instantiation sum :: (enum, enum) enum
begin
definition
"enum = map Inl enum @ map Inr enum"
definition
"enum_all P \<longleftrightarrow> enum_all (\<lambda>x. P (Inl x)) \<and> enum_all (\<lambda>x. P (Inr x))"
definition
"enum_ex P \<longleftrightarrow> enum_ex (\<lambda>x. P (Inl x)) \<or> enum_ex (\<lambda>x. P (Inr x))"
instance proof
qed (simp_all only: enum_sum_def enum_all_sum_def enum_ex_sum_def UNIV_sum,
auto simp add: enum_UNIV distinct_map enum_distinct)
end
instantiation option :: (enum) enum
begin
definition
"enum = None # map Some enum"
definition
"enum_all P \<longleftrightarrow> P None \<and> enum_all (\<lambda>x. P (Some x))"
definition
"enum_ex P \<longleftrightarrow> P None \<or> enum_ex (\<lambda>x. P (Some x))"
instance proof
qed (simp_all only: enum_option_def enum_all_option_def enum_ex_option_def UNIV_option_conv,
auto simp add: distinct_map enum_UNIV enum_distinct)
end
subsection \<open>Small finite types\<close>
text \<open>We define small finite types for use in Quickcheck\<close>
datatype (plugins only: code "quickcheck" extraction) finite_1 =
a\<^sub>1
notation (output) a\<^sub>1 ("a\<^sub>1")
lemma UNIV_finite_1:
"UNIV = {a\<^sub>1}"
by (auto intro: finite_1.exhaust)
instantiation finite_1 :: enum
begin
definition
"enum = [a\<^sub>1]"
definition
"enum_all P = P a\<^sub>1"
definition
"enum_ex P = P a\<^sub>1"
instance proof
qed (simp_all only: enum_finite_1_def enum_all_finite_1_def enum_ex_finite_1_def UNIV_finite_1, simp_all)
end
instantiation finite_1 :: linorder
begin
definition less_finite_1 :: "finite_1 \<Rightarrow> finite_1 \<Rightarrow> bool"
where
"x < (y :: finite_1) \<longleftrightarrow> False"
definition less_eq_finite_1 :: "finite_1 \<Rightarrow> finite_1 \<Rightarrow> bool"
where
"x \<le> (y :: finite_1) \<longleftrightarrow> True"
instance
apply (intro_classes)
apply (auto simp add: less_finite_1_def less_eq_finite_1_def)
apply (metis (full_types) finite_1.exhaust)
done
end
instance finite_1 :: "{dense_linorder, wellorder}"
by intro_classes (simp_all add: less_finite_1_def)
instantiation finite_1 :: complete_lattice
begin
definition [simp]: "Inf = (\<lambda>_. a\<^sub>1)"
definition [simp]: "Sup = (\<lambda>_. a\<^sub>1)"
definition [simp]: "bot = a\<^sub>1"
definition [simp]: "top = a\<^sub>1"
definition [simp]: "inf = (\<lambda>_ _. a\<^sub>1)"
definition [simp]: "sup = (\<lambda>_ _. a\<^sub>1)"
instance by intro_classes(simp_all add: less_eq_finite_1_def)
end
instance finite_1 :: complete_distrib_lattice
by standard simp_all
instance finite_1 :: complete_linorder ..
lemma finite_1_eq: "x = a\<^sub>1"
by(cases x) simp
simproc_setup finite_1_eq ("x::finite_1") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
(case Thm.term_of ct of
Const (\<^const_name>\<open>a\<^sub>1\<close>, _) => NONE
- | _ => SOME (mk_meta_eq @{thm finite_1_eq}))
+ | _ => SOME (mk_meta_eq @{thm finite_1_eq}))))
\<close>
instantiation finite_1 :: complete_boolean_algebra
begin
definition [simp]: "(-) = (\<lambda>_ _. a\<^sub>1)"
definition [simp]: "uminus = (\<lambda>_. a\<^sub>1)"
instance by intro_classes simp_all
end
instantiation finite_1 ::
"{linordered_ring_strict, linordered_comm_semiring_strict, ordered_comm_ring,
ordered_cancel_comm_monoid_diff, comm_monoid_mult, ordered_ring_abs,
one, modulo, sgn, inverse}"
begin
definition [simp]: "Groups.zero = a\<^sub>1"
definition [simp]: "Groups.one = a\<^sub>1"
definition [simp]: "(+) = (\<lambda>_ _. a\<^sub>1)"
definition [simp]: "(*) = (\<lambda>_ _. a\<^sub>1)"
definition [simp]: "(mod) = (\<lambda>_ _. a\<^sub>1)"
definition [simp]: "abs = (\<lambda>_. a\<^sub>1)"
definition [simp]: "sgn = (\<lambda>_. a\<^sub>1)"
definition [simp]: "inverse = (\<lambda>_. a\<^sub>1)"
definition [simp]: "divide = (\<lambda>_ _. a\<^sub>1)"
instance by intro_classes(simp_all add: less_finite_1_def)
end
declare [[simproc del: finite_1_eq]]
hide_const (open) a\<^sub>1
datatype (plugins only: code "quickcheck" extraction) finite_2 =
a\<^sub>1 | a\<^sub>2
notation (output) a\<^sub>1 ("a\<^sub>1")
notation (output) a\<^sub>2 ("a\<^sub>2")
lemma UNIV_finite_2:
"UNIV = {a\<^sub>1, a\<^sub>2}"
by (auto intro: finite_2.exhaust)
instantiation finite_2 :: enum
begin
definition
"enum = [a\<^sub>1, a\<^sub>2]"
definition
"enum_all P \<longleftrightarrow> P a\<^sub>1 \<and> P a\<^sub>2"
definition
"enum_ex P \<longleftrightarrow> P a\<^sub>1 \<or> P a\<^sub>2"
instance proof
qed (simp_all only: enum_finite_2_def enum_all_finite_2_def enum_ex_finite_2_def UNIV_finite_2, simp_all)
end
instantiation finite_2 :: linorder
begin
definition less_finite_2 :: "finite_2 \<Rightarrow> finite_2 \<Rightarrow> bool"
where
"x < y \<longleftrightarrow> x = a\<^sub>1 \<and> y = a\<^sub>2"
definition less_eq_finite_2 :: "finite_2 \<Rightarrow> finite_2 \<Rightarrow> bool"
where
"x \<le> y \<longleftrightarrow> x = y \<or> x < (y :: finite_2)"
instance
apply (intro_classes)
apply (auto simp add: less_finite_2_def less_eq_finite_2_def)
apply (metis finite_2.nchotomy)+
done
end
instance finite_2 :: wellorder
by(rule wf_wellorderI)(simp add: less_finite_2_def, intro_classes)
instantiation finite_2 :: complete_lattice
begin
definition "\<Sqinter>A = (if a\<^sub>1 \<in> A then a\<^sub>1 else a\<^sub>2)"
definition "\<Squnion>A = (if a\<^sub>2 \<in> A then a\<^sub>2 else a\<^sub>1)"
definition [simp]: "bot = a\<^sub>1"
definition [simp]: "top = a\<^sub>2"
definition "x \<sqinter> y = (if x = a\<^sub>1 \<or> y = a\<^sub>1 then a\<^sub>1 else a\<^sub>2)"
definition "x \<squnion> y = (if x = a\<^sub>2 \<or> y = a\<^sub>2 then a\<^sub>2 else a\<^sub>1)"
lemma neq_finite_2_a\<^sub>1_iff [simp]: "x \<noteq> a\<^sub>1 \<longleftrightarrow> x = a\<^sub>2"
by(cases x) simp_all
lemma neq_finite_2_a\<^sub>1_iff' [simp]: "a\<^sub>1 \<noteq> x \<longleftrightarrow> x = a\<^sub>2"
by(cases x) simp_all
lemma neq_finite_2_a\<^sub>2_iff [simp]: "x \<noteq> a\<^sub>2 \<longleftrightarrow> x = a\<^sub>1"
by(cases x) simp_all
lemma neq_finite_2_a\<^sub>2_iff' [simp]: "a\<^sub>2 \<noteq> x \<longleftrightarrow> x = a\<^sub>1"
by(cases x) simp_all
instance
proof
fix x :: finite_2 and A
assume "x \<in> A"
then show "\<Sqinter>A \<le> x" "x \<le> \<Squnion>A"
by(cases x; auto simp add: less_eq_finite_2_def less_finite_2_def Inf_finite_2_def Sup_finite_2_def)+
qed(auto simp add: less_eq_finite_2_def less_finite_2_def inf_finite_2_def sup_finite_2_def Inf_finite_2_def Sup_finite_2_def)
end
instance finite_2 :: complete_linorder ..
instance finite_2 :: complete_distrib_lattice ..
instantiation finite_2 :: "{field, idom_abs_sgn, idom_modulo}" begin
definition [simp]: "0 = a\<^sub>1"
definition [simp]: "1 = a\<^sub>2"
definition "x + y = (case (x, y) of (a\<^sub>1, a\<^sub>1) \<Rightarrow> a\<^sub>1 | (a\<^sub>2, a\<^sub>2) \<Rightarrow> a\<^sub>1 | _ \<Rightarrow> a\<^sub>2)"
definition "uminus = (\<lambda>x :: finite_2. x)"
definition "(-) = ((+) :: finite_2 \<Rightarrow> _)"
definition "x * y = (case (x, y) of (a\<^sub>2, a\<^sub>2) \<Rightarrow> a\<^sub>2 | _ \<Rightarrow> a\<^sub>1)"
definition "inverse = (\<lambda>x :: finite_2. x)"
definition "divide = ((*) :: finite_2 \<Rightarrow> _)"
definition "x mod y = (case (x, y) of (a\<^sub>2, a\<^sub>1) \<Rightarrow> a\<^sub>2 | _ \<Rightarrow> a\<^sub>1)"
definition "abs = (\<lambda>x :: finite_2. x)"
definition "sgn = (\<lambda>x :: finite_2. x)"
instance
by standard
(subproofs
\<open>simp_all add: plus_finite_2_def uminus_finite_2_def minus_finite_2_def
times_finite_2_def
inverse_finite_2_def divide_finite_2_def modulo_finite_2_def
abs_finite_2_def sgn_finite_2_def
split: finite_2.splits\<close>)
end
lemma two_finite_2 [simp]:
"2 = a\<^sub>1"
by (simp add: numeral.simps plus_finite_2_def)
lemma dvd_finite_2_unfold:
"x dvd y \<longleftrightarrow> x = a\<^sub>2 \<or> y = a\<^sub>1"
by (auto simp add: dvd_def times_finite_2_def split: finite_2.splits)
instantiation finite_2 :: "{normalization_semidom, unique_euclidean_semiring}" begin
definition [simp]: "normalize = (id :: finite_2 \<Rightarrow> _)"
definition [simp]: "unit_factor = (id :: finite_2 \<Rightarrow> _)"
definition [simp]: "euclidean_size x = (case x of a\<^sub>1 \<Rightarrow> 0 | a\<^sub>2 \<Rightarrow> 1)"
definition [simp]: "division_segment (x :: finite_2) = 1"
instance
by standard
(subproofs
\<open>auto simp add: divide_finite_2_def times_finite_2_def dvd_finite_2_unfold
split: finite_2.splits\<close>)
end
hide_const (open) a\<^sub>1 a\<^sub>2
datatype (plugins only: code "quickcheck" extraction) finite_3 =
a\<^sub>1 | a\<^sub>2 | a\<^sub>3
notation (output) a\<^sub>1 ("a\<^sub>1")
notation (output) a\<^sub>2 ("a\<^sub>2")
notation (output) a\<^sub>3 ("a\<^sub>3")
lemma UNIV_finite_3:
"UNIV = {a\<^sub>1, a\<^sub>2, a\<^sub>3}"
by (auto intro: finite_3.exhaust)
instantiation finite_3 :: enum
begin
definition
"enum = [a\<^sub>1, a\<^sub>2, a\<^sub>3]"
definition
"enum_all P \<longleftrightarrow> P a\<^sub>1 \<and> P a\<^sub>2 \<and> P a\<^sub>3"
definition
"enum_ex P \<longleftrightarrow> P a\<^sub>1 \<or> P a\<^sub>2 \<or> P a\<^sub>3"
instance proof
qed (simp_all only: enum_finite_3_def enum_all_finite_3_def enum_ex_finite_3_def UNIV_finite_3, simp_all)
end
lemma finite_3_not_eq_unfold:
"x \<noteq> a\<^sub>1 \<longleftrightarrow> x \<in> {a\<^sub>2, a\<^sub>3}"
"x \<noteq> a\<^sub>2 \<longleftrightarrow> x \<in> {a\<^sub>1, a\<^sub>3}"
"x \<noteq> a\<^sub>3 \<longleftrightarrow> x \<in> {a\<^sub>1, a\<^sub>2}"
by (cases x; simp)+
instantiation finite_3 :: linorder
begin
definition less_finite_3 :: "finite_3 \<Rightarrow> finite_3 \<Rightarrow> bool"
where
"x < y = (case x of a\<^sub>1 \<Rightarrow> y \<noteq> a\<^sub>1 | a\<^sub>2 \<Rightarrow> y = a\<^sub>3 | a\<^sub>3 \<Rightarrow> False)"
definition less_eq_finite_3 :: "finite_3 \<Rightarrow> finite_3 \<Rightarrow> bool"
where
"x \<le> y \<longleftrightarrow> x = y \<or> x < (y :: finite_3)"
instance proof (intro_classes)
qed (auto simp add: less_finite_3_def less_eq_finite_3_def split: finite_3.split_asm)
end
instance finite_3 :: wellorder
proof(rule wf_wellorderI)
have "inv_image less_than (case_finite_3 0 1 2) = {(x, y). x < y}"
by(auto simp add: less_finite_3_def split: finite_3.splits)
from this[symmetric] show "wf \<dots>" by simp
qed intro_classes
class finite_lattice = finite + lattice + Inf + Sup + bot + top +
assumes Inf_finite_empty: "Inf {} = Sup UNIV"
assumes Inf_finite_insert: "Inf (insert a A) = a \<sqinter> Inf A"
assumes Sup_finite_empty: "Sup {} = Inf UNIV"
assumes Sup_finite_insert: "Sup (insert a A) = a \<squnion> Sup A"
assumes bot_finite_def: "bot = Inf UNIV"
assumes top_finite_def: "top = Sup UNIV"
begin
subclass complete_lattice
proof
fix x A
show "x \<in> A \<Longrightarrow> \<Sqinter>A \<le> x"
by (metis Set.set_insert abel_semigroup.commute local.Inf_finite_insert local.inf.abel_semigroup_axioms local.inf.left_idem local.inf.orderI)
show "x \<in> A \<Longrightarrow> x \<le> \<Squnion>A"
by (metis Set.set_insert insert_absorb2 local.Sup_finite_insert local.sup.absorb_iff2)
next
fix A z
have "\<Squnion> UNIV = z \<squnion> \<Squnion>UNIV"
by (subst Sup_finite_insert [symmetric], simp add: insert_UNIV)
from this have [simp]: "z \<le> \<Squnion>UNIV"
using local.le_iff_sup by auto
have "(\<forall> x. x \<in> A \<longrightarrow> z \<le> x) \<longrightarrow> z \<le> \<Sqinter>A"
by (rule finite_induct [of A "\<lambda> A . (\<forall> x. x \<in> A \<longrightarrow> z \<le> x) \<longrightarrow> z \<le> \<Sqinter>A"])
(simp_all add: Inf_finite_empty Inf_finite_insert)
from this show "(\<And>x. x \<in> A \<Longrightarrow> z \<le> x) \<Longrightarrow> z \<le> \<Sqinter>A"
by simp
have "\<Sqinter> UNIV = z \<sqinter> \<Sqinter>UNIV"
by (subst Inf_finite_insert [symmetric], simp add: insert_UNIV)
from this have [simp]: "\<Sqinter>UNIV \<le> z"
by (simp add: local.inf.absorb_iff2)
have "(\<forall> x. x \<in> A \<longrightarrow> x \<le> z) \<longrightarrow> \<Squnion>A \<le> z"
by (rule finite_induct [of A "\<lambda> A . (\<forall> x. x \<in> A \<longrightarrow> x \<le> z) \<longrightarrow> \<Squnion>A \<le> z" ], simp_all add: Sup_finite_empty Sup_finite_insert)
from this show " (\<And>x. x \<in> A \<Longrightarrow> x \<le> z) \<Longrightarrow> \<Squnion>A \<le> z"
by blast
next
show "\<Sqinter>{} = \<top>"
by (simp add: Inf_finite_empty top_finite_def)
show " \<Squnion>{} = \<bottom>"
by (simp add: Sup_finite_empty bot_finite_def)
qed
end
class finite_distrib_lattice = finite_lattice + distrib_lattice
begin
lemma finite_inf_Sup: "a \<sqinter> (Sup A) = Sup {a \<sqinter> b | b . b \<in> A}"
proof (rule finite_induct [of A "\<lambda> A . a \<sqinter> (Sup A) = Sup {a \<sqinter> b | b . b \<in> A}"], simp_all)
fix x::"'a"
fix F
assume "x \<notin> F"
assume [simp]: "a \<sqinter> \<Squnion>F = \<Squnion>{a \<sqinter> b |b. b \<in> F}"
have [simp]: " insert (a \<sqinter> x) {a \<sqinter> b |b. b \<in> F} = {a \<sqinter> b |b. b = x \<or> b \<in> F}"
by blast
have "a \<sqinter> (x \<squnion> \<Squnion>F) = a \<sqinter> x \<squnion> a \<sqinter> \<Squnion>F"
by (simp add: inf_sup_distrib1)
also have "... = a \<sqinter> x \<squnion> \<Squnion>{a \<sqinter> b |b. b \<in> F}"
by simp
also have "... = \<Squnion>{a \<sqinter> b |b. b = x \<or> b \<in> F}"
by (unfold Sup_insert[THEN sym], simp)
finally show "a \<sqinter> (x \<squnion> \<Squnion>F) = \<Squnion>{a \<sqinter> b |b. b = x \<or> b \<in> F}"
by simp
qed
lemma finite_Inf_Sup: "\<Sqinter>(Sup ` A) \<le> \<Squnion>(Inf ` {f ` A |f. \<forall>Y\<in>A. f Y \<in> Y})"
proof (rule finite_induct [of A "\<lambda>A. \<Sqinter>(Sup ` A) \<le> \<Squnion>(Inf ` {f ` A |f. \<forall>Y\<in>A. f Y \<in> Y})"], simp_all add: finite_UnionD)
fix x::"'a set"
fix F
assume "x \<notin> F"
have [simp]: "{\<Squnion>x \<sqinter> b |b . b \<in> Inf ` {f ` F |f. \<forall>Y\<in>F. f Y \<in> Y} } = {\<Squnion>x \<sqinter> (Inf (f ` F)) |f . (\<forall>Y\<in>F. f Y \<in> Y)}"
by auto
define fa where "fa = (\<lambda> (b::'a) f Y . (if Y = x then b else f Y))"
have "\<And>f b. \<forall>Y\<in>F. f Y \<in> Y \<Longrightarrow> b \<in> x \<Longrightarrow> insert b (f ` (F \<inter> {Y. Y \<noteq> x})) = insert (fa b f x) (fa b f ` F) \<and> fa b f x \<in> x \<and> (\<forall>Y\<in>F. fa b f Y \<in> Y)"
by (auto simp add: fa_def)
from this have B: "\<And>f b. \<forall>Y\<in>F. f Y \<in> Y \<Longrightarrow> b \<in> x \<Longrightarrow> fa b f ` ({x} \<union> F) \<in> {insert (f x) (f ` F) |f. f x \<in> x \<and> (\<forall>Y\<in>F. f Y \<in> Y)}"
by blast
have [simp]: "\<And>f b. \<forall>Y\<in>F. f Y \<in> Y \<Longrightarrow> b \<in> x \<Longrightarrow> b \<sqinter> (\<Sqinter>x\<in>F. f x) \<le> \<Squnion>(Inf ` {insert (f x) (f ` F) |f. f x \<in> x \<and> (\<forall>Y\<in>F. f Y \<in> Y)})"
using B apply (rule SUP_upper2)
using \<open>x \<notin> F\<close> apply (simp_all add: fa_def Inf_union_distrib)
apply (simp add: image_mono Inf_superset_mono inf.coboundedI2)
done
assume "\<Sqinter>(Sup ` F) \<le> \<Squnion>(Inf ` {f ` F |f. \<forall>Y\<in>F. f Y \<in> Y})"
from this have "\<Squnion>x \<sqinter> \<Sqinter>(Sup ` F) \<le> \<Squnion>x \<sqinter> \<Squnion>(Inf ` {f ` F |f. \<forall>Y\<in>F. f Y \<in> Y})"
using inf.coboundedI2 by auto
also have "... = Sup {\<Squnion>x \<sqinter> (Inf (f ` F)) |f . (\<forall>Y\<in>F. f Y \<in> Y)}"
by (simp add: finite_inf_Sup)
also have "... = Sup {Sup {Inf (f ` F) \<sqinter> b | b . b \<in> x} |f . (\<forall>Y\<in>F. f Y \<in> Y)}"
by (subst inf_commute) (simp add: finite_inf_Sup)
also have "... \<le> \<Squnion>(Inf ` {insert (f x) (f ` F) |f. f x \<in> x \<and> (\<forall>Y\<in>F. f Y \<in> Y)})"
apply (rule Sup_least, clarsimp)+
apply (subst inf_commute, simp)
done
finally show "\<Squnion>x \<sqinter> \<Sqinter>(Sup ` F) \<le> \<Squnion>(Inf ` {insert (f x) (f ` F) |f. f x \<in> x \<and> (\<forall>Y\<in>F. f Y \<in> Y)})"
by simp
qed
subclass complete_distrib_lattice
by (standard, rule finite_Inf_Sup)
end
instantiation finite_3 :: finite_lattice
begin
definition "\<Sqinter>A = (if a\<^sub>1 \<in> A then a\<^sub>1 else if a\<^sub>2 \<in> A then a\<^sub>2 else a\<^sub>3)"
definition "\<Squnion>A = (if a\<^sub>3 \<in> A then a\<^sub>3 else if a\<^sub>2 \<in> A then a\<^sub>2 else a\<^sub>1)"
definition [simp]: "bot = a\<^sub>1"
definition [simp]: "top = a\<^sub>3"
definition [simp]: "inf = (min :: finite_3 \<Rightarrow> _)"
definition [simp]: "sup = (max :: finite_3 \<Rightarrow> _)"
instance
proof
qed (auto simp add: Inf_finite_3_def Sup_finite_3_def max_def min_def less_eq_finite_3_def less_finite_3_def split: finite_3.split)
end
instance finite_3 :: complete_lattice ..
instance finite_3 :: finite_distrib_lattice
proof
qed (auto simp add: min_def max_def)
instance finite_3 :: complete_distrib_lattice ..
instance finite_3 :: complete_linorder ..
instantiation finite_3 :: "{field, idom_abs_sgn, idom_modulo}" begin
definition [simp]: "0 = a\<^sub>1"
definition [simp]: "1 = a\<^sub>2"
definition
"x + y = (case (x, y) of
(a\<^sub>1, a\<^sub>1) \<Rightarrow> a\<^sub>1 | (a\<^sub>2, a\<^sub>3) \<Rightarrow> a\<^sub>1 | (a\<^sub>3, a\<^sub>2) \<Rightarrow> a\<^sub>1
| (a\<^sub>1, a\<^sub>2) \<Rightarrow> a\<^sub>2 | (a\<^sub>2, a\<^sub>1) \<Rightarrow> a\<^sub>2 | (a\<^sub>3, a\<^sub>3) \<Rightarrow> a\<^sub>2
| _ \<Rightarrow> a\<^sub>3)"
definition "- x = (case x of a\<^sub>1 \<Rightarrow> a\<^sub>1 | a\<^sub>2 \<Rightarrow> a\<^sub>3 | a\<^sub>3 \<Rightarrow> a\<^sub>2)"
definition "x - y = x + (- y :: finite_3)"
definition "x * y = (case (x, y) of (a\<^sub>2, a\<^sub>2) \<Rightarrow> a\<^sub>2 | (a\<^sub>3, a\<^sub>3) \<Rightarrow> a\<^sub>2 | (a\<^sub>2, a\<^sub>3) \<Rightarrow> a\<^sub>3 | (a\<^sub>3, a\<^sub>2) \<Rightarrow> a\<^sub>3 | _ \<Rightarrow> a\<^sub>1)"
definition "inverse = (\<lambda>x :: finite_3. x)"
definition "x div y = x * inverse (y :: finite_3)"
definition "x mod y = (case y of a\<^sub>1 \<Rightarrow> x | _ \<Rightarrow> a\<^sub>1)"
definition "abs = (\<lambda>x. case x of a\<^sub>3 \<Rightarrow> a\<^sub>2 | _ \<Rightarrow> x)"
definition "sgn = (\<lambda>x :: finite_3. x)"
instance
by standard
(subproofs
\<open>simp_all add: plus_finite_3_def uminus_finite_3_def minus_finite_3_def
times_finite_3_def
inverse_finite_3_def divide_finite_3_def modulo_finite_3_def
abs_finite_3_def sgn_finite_3_def
less_finite_3_def
split: finite_3.splits\<close>)
end
lemma two_finite_3 [simp]:
"2 = a\<^sub>3"
by (simp add: numeral.simps plus_finite_3_def)
lemma dvd_finite_3_unfold:
"x dvd y \<longleftrightarrow> x = a\<^sub>2 \<or> x = a\<^sub>3 \<or> y = a\<^sub>1"
by (cases x) (auto simp add: dvd_def times_finite_3_def split: finite_3.splits)
instantiation finite_3 :: "{normalization_semidom, unique_euclidean_semiring}" begin
definition [simp]: "normalize x = (case x of a\<^sub>3 \<Rightarrow> a\<^sub>2 | _ \<Rightarrow> x)"
definition [simp]: "unit_factor = (id :: finite_3 \<Rightarrow> _)"
definition [simp]: "euclidean_size x = (case x of a\<^sub>1 \<Rightarrow> 0 | _ \<Rightarrow> 1)"
definition [simp]: "division_segment (x :: finite_3) = 1"
instance
proof
fix x :: finite_3
assume "x \<noteq> 0"
then show "is_unit (unit_factor x)"
by (cases x) (simp_all add: dvd_finite_3_unfold)
qed
(subproofs
\<open>auto simp add: divide_finite_3_def times_finite_3_def
dvd_finite_3_unfold inverse_finite_3_def plus_finite_3_def
split: finite_3.splits\<close>)
end
hide_const (open) a\<^sub>1 a\<^sub>2 a\<^sub>3
datatype (plugins only: code "quickcheck" extraction) finite_4 =
a\<^sub>1 | a\<^sub>2 | a\<^sub>3 | a\<^sub>4
notation (output) a\<^sub>1 ("a\<^sub>1")
notation (output) a\<^sub>2 ("a\<^sub>2")
notation (output) a\<^sub>3 ("a\<^sub>3")
notation (output) a\<^sub>4 ("a\<^sub>4")
lemma UNIV_finite_4:
"UNIV = {a\<^sub>1, a\<^sub>2, a\<^sub>3, a\<^sub>4}"
by (auto intro: finite_4.exhaust)
instantiation finite_4 :: enum
begin
definition
"enum = [a\<^sub>1, a\<^sub>2, a\<^sub>3, a\<^sub>4]"
definition
"enum_all P \<longleftrightarrow> P a\<^sub>1 \<and> P a\<^sub>2 \<and> P a\<^sub>3 \<and> P a\<^sub>4"
definition
"enum_ex P \<longleftrightarrow> P a\<^sub>1 \<or> P a\<^sub>2 \<or> P a\<^sub>3 \<or> P a\<^sub>4"
instance proof
qed (simp_all only: enum_finite_4_def enum_all_finite_4_def enum_ex_finite_4_def UNIV_finite_4, simp_all)
end
instantiation finite_4 :: finite_distrib_lattice begin
text \<open>\<^term>\<open>a\<^sub>1\<close> $<$ \<^term>\<open>a\<^sub>2\<close>,\<^term>\<open>a\<^sub>3\<close> $<$ \<^term>\<open>a\<^sub>4\<close>,
but \<^term>\<open>a\<^sub>2\<close> and \<^term>\<open>a\<^sub>3\<close> are incomparable.\<close>
definition
"x < y \<longleftrightarrow> (case (x, y) of
(a\<^sub>1, a\<^sub>1) \<Rightarrow> False | (a\<^sub>1, _) \<Rightarrow> True
| (a\<^sub>2, a\<^sub>4) \<Rightarrow> True
| (a\<^sub>3, a\<^sub>4) \<Rightarrow> True | _ \<Rightarrow> False)"
definition
"x \<le> y \<longleftrightarrow> (case (x, y) of
(a\<^sub>1, _) \<Rightarrow> True
| (a\<^sub>2, a\<^sub>2) \<Rightarrow> True | (a\<^sub>2, a\<^sub>4) \<Rightarrow> True
| (a\<^sub>3, a\<^sub>3) \<Rightarrow> True | (a\<^sub>3, a\<^sub>4) \<Rightarrow> True
| (a\<^sub>4, a\<^sub>4) \<Rightarrow> True | _ \<Rightarrow> False)"
definition
"\<Sqinter>A = (if a\<^sub>1 \<in> A \<or> a\<^sub>2 \<in> A \<and> a\<^sub>3 \<in> A then a\<^sub>1 else if a\<^sub>2 \<in> A then a\<^sub>2 else if a\<^sub>3 \<in> A then a\<^sub>3 else a\<^sub>4)"
definition
"\<Squnion>A = (if a\<^sub>4 \<in> A \<or> a\<^sub>2 \<in> A \<and> a\<^sub>3 \<in> A then a\<^sub>4 else if a\<^sub>2 \<in> A then a\<^sub>2 else if a\<^sub>3 \<in> A then a\<^sub>3 else a\<^sub>1)"
definition [simp]: "bot = a\<^sub>1"
definition [simp]: "top = a\<^sub>4"
definition
"x \<sqinter> y = (case (x, y) of
(a\<^sub>1, _) \<Rightarrow> a\<^sub>1 | (_, a\<^sub>1) \<Rightarrow> a\<^sub>1 | (a\<^sub>2, a\<^sub>3) \<Rightarrow> a\<^sub>1 | (a\<^sub>3, a\<^sub>2) \<Rightarrow> a\<^sub>1
| (a\<^sub>2, _) \<Rightarrow> a\<^sub>2 | (_, a\<^sub>2) \<Rightarrow> a\<^sub>2
| (a\<^sub>3, _) \<Rightarrow> a\<^sub>3 | (_, a\<^sub>3) \<Rightarrow> a\<^sub>3
| _ \<Rightarrow> a\<^sub>4)"
definition
"x \<squnion> y = (case (x, y) of
(a\<^sub>4, _) \<Rightarrow> a\<^sub>4 | (_, a\<^sub>4) \<Rightarrow> a\<^sub>4 | (a\<^sub>2, a\<^sub>3) \<Rightarrow> a\<^sub>4 | (a\<^sub>3, a\<^sub>2) \<Rightarrow> a\<^sub>4
| (a\<^sub>2, _) \<Rightarrow> a\<^sub>2 | (_, a\<^sub>2) \<Rightarrow> a\<^sub>2
| (a\<^sub>3, _) \<Rightarrow> a\<^sub>3 | (_, a\<^sub>3) \<Rightarrow> a\<^sub>3
| _ \<Rightarrow> a\<^sub>1)"
instance
by standard
(subproofs
\<open>auto simp add: less_finite_4_def less_eq_finite_4_def Inf_finite_4_def Sup_finite_4_def
inf_finite_4_def sup_finite_4_def split: finite_4.splits\<close>)
end
instance finite_4 :: complete_lattice ..
instance finite_4 :: complete_distrib_lattice ..
instantiation finite_4 :: complete_boolean_algebra begin
definition "- x = (case x of a\<^sub>1 \<Rightarrow> a\<^sub>4 | a\<^sub>2 \<Rightarrow> a\<^sub>3 | a\<^sub>3 \<Rightarrow> a\<^sub>2 | a\<^sub>4 \<Rightarrow> a\<^sub>1)"
definition "x - y = x \<sqinter> - (y :: finite_4)"
instance
by standard
(subproofs
\<open>simp_all add: inf_finite_4_def sup_finite_4_def uminus_finite_4_def minus_finite_4_def
split: finite_4.splits\<close>)
end
hide_const (open) a\<^sub>1 a\<^sub>2 a\<^sub>3 a\<^sub>4
datatype (plugins only: code "quickcheck" extraction) finite_5 =
a\<^sub>1 | a\<^sub>2 | a\<^sub>3 | a\<^sub>4 | a\<^sub>5
notation (output) a\<^sub>1 ("a\<^sub>1")
notation (output) a\<^sub>2 ("a\<^sub>2")
notation (output) a\<^sub>3 ("a\<^sub>3")
notation (output) a\<^sub>4 ("a\<^sub>4")
notation (output) a\<^sub>5 ("a\<^sub>5")
lemma UNIV_finite_5:
"UNIV = {a\<^sub>1, a\<^sub>2, a\<^sub>3, a\<^sub>4, a\<^sub>5}"
by (auto intro: finite_5.exhaust)
instantiation finite_5 :: enum
begin
definition
"enum = [a\<^sub>1, a\<^sub>2, a\<^sub>3, a\<^sub>4, a\<^sub>5]"
definition
"enum_all P \<longleftrightarrow> P a\<^sub>1 \<and> P a\<^sub>2 \<and> P a\<^sub>3 \<and> P a\<^sub>4 \<and> P a\<^sub>5"
definition
"enum_ex P \<longleftrightarrow> P a\<^sub>1 \<or> P a\<^sub>2 \<or> P a\<^sub>3 \<or> P a\<^sub>4 \<or> P a\<^sub>5"
instance proof
qed (simp_all only: enum_finite_5_def enum_all_finite_5_def enum_ex_finite_5_def UNIV_finite_5, simp_all)
end
instantiation finite_5 :: finite_lattice
begin
text \<open>The non-distributive pentagon lattice $N_5$\<close>
definition
"x < y \<longleftrightarrow> (case (x, y) of
(a\<^sub>1, a\<^sub>1) \<Rightarrow> False | (a\<^sub>1, _) \<Rightarrow> True
| (a\<^sub>2, a\<^sub>3) \<Rightarrow> True | (a\<^sub>2, a\<^sub>5) \<Rightarrow> True
| (a\<^sub>3, a\<^sub>5) \<Rightarrow> True
| (a\<^sub>4, a\<^sub>5) \<Rightarrow> True | _ \<Rightarrow> False)"
definition
"x \<le> y \<longleftrightarrow> (case (x, y) of
(a\<^sub>1, _) \<Rightarrow> True
| (a\<^sub>2, a\<^sub>2) \<Rightarrow> True | (a\<^sub>2, a\<^sub>3) \<Rightarrow> True | (a\<^sub>2, a\<^sub>5) \<Rightarrow> True
| (a\<^sub>3, a\<^sub>3) \<Rightarrow> True | (a\<^sub>3, a\<^sub>5) \<Rightarrow> True
| (a\<^sub>4, a\<^sub>4) \<Rightarrow> True | (a\<^sub>4, a\<^sub>5) \<Rightarrow> True
| (a\<^sub>5, a\<^sub>5) \<Rightarrow> True | _ \<Rightarrow> False)"
definition
"\<Sqinter>A =
(if a\<^sub>1 \<in> A \<or> a\<^sub>4 \<in> A \<and> (a\<^sub>2 \<in> A \<or> a\<^sub>3 \<in> A) then a\<^sub>1
else if a\<^sub>2 \<in> A then a\<^sub>2
else if a\<^sub>3 \<in> A then a\<^sub>3
else if a\<^sub>4 \<in> A then a\<^sub>4
else a\<^sub>5)"
definition
"\<Squnion>A =
(if a\<^sub>5 \<in> A \<or> a\<^sub>4 \<in> A \<and> (a\<^sub>2 \<in> A \<or> a\<^sub>3 \<in> A) then a\<^sub>5
else if a\<^sub>3 \<in> A then a\<^sub>3
else if a\<^sub>2 \<in> A then a\<^sub>2
else if a\<^sub>4 \<in> A then a\<^sub>4
else a\<^sub>1)"
definition [simp]: "bot = a\<^sub>1"
definition [simp]: "top = a\<^sub>5"
definition
"x \<sqinter> y = (case (x, y) of
(a\<^sub>1, _) \<Rightarrow> a\<^sub>1 | (_, a\<^sub>1) \<Rightarrow> a\<^sub>1 | (a\<^sub>2, a\<^sub>4) \<Rightarrow> a\<^sub>1 | (a\<^sub>4, a\<^sub>2) \<Rightarrow> a\<^sub>1 | (a\<^sub>3, a\<^sub>4) \<Rightarrow> a\<^sub>1 | (a\<^sub>4, a\<^sub>3) \<Rightarrow> a\<^sub>1
| (a\<^sub>2, _) \<Rightarrow> a\<^sub>2 | (_, a\<^sub>2) \<Rightarrow> a\<^sub>2
| (a\<^sub>3, _) \<Rightarrow> a\<^sub>3 | (_, a\<^sub>3) \<Rightarrow> a\<^sub>3
| (a\<^sub>4, _) \<Rightarrow> a\<^sub>4 | (_, a\<^sub>4) \<Rightarrow> a\<^sub>4
| _ \<Rightarrow> a\<^sub>5)"
definition
"x \<squnion> y = (case (x, y) of
(a\<^sub>5, _) \<Rightarrow> a\<^sub>5 | (_, a\<^sub>5) \<Rightarrow> a\<^sub>5 | (a\<^sub>2, a\<^sub>4) \<Rightarrow> a\<^sub>5 | (a\<^sub>4, a\<^sub>2) \<Rightarrow> a\<^sub>5 | (a\<^sub>3, a\<^sub>4) \<Rightarrow> a\<^sub>5 | (a\<^sub>4, a\<^sub>3) \<Rightarrow> a\<^sub>5
| (a\<^sub>3, _) \<Rightarrow> a\<^sub>3 | (_, a\<^sub>3) \<Rightarrow> a\<^sub>3
| (a\<^sub>2, _) \<Rightarrow> a\<^sub>2 | (_, a\<^sub>2) \<Rightarrow> a\<^sub>2
| (a\<^sub>4, _) \<Rightarrow> a\<^sub>4 | (_, a\<^sub>4) \<Rightarrow> a\<^sub>4
| _ \<Rightarrow> a\<^sub>1)"
instance
by standard
(subproofs
\<open>auto simp add: less_eq_finite_5_def less_finite_5_def inf_finite_5_def sup_finite_5_def
Inf_finite_5_def Sup_finite_5_def split: finite_5.splits if_split_asm\<close>)
end
instance finite_5 :: complete_lattice ..
hide_const (open) a\<^sub>1 a\<^sub>2 a\<^sub>3 a\<^sub>4 a\<^sub>5
subsection \<open>Closing up\<close>
hide_type (open) finite_1 finite_2 finite_3 finite_4 finite_5
hide_const (open) enum enum_all enum_ex all_n_lists ex_n_lists ntrancl
end
diff --git a/src/HOL/Finite_Set.thy b/src/HOL/Finite_Set.thy
--- a/src/HOL/Finite_Set.thy
+++ b/src/HOL/Finite_Set.thy
@@ -1,3044 +1,3044 @@
(* Title: HOL/Finite_Set.thy
Author: Tobias Nipkow
Author: Lawrence C Paulson
Author: Markus Wenzel
Author: Jeremy Avigad
Author: Andrei Popescu
*)
section \<open>Finite sets\<close>
theory Finite_Set
imports Product_Type Sum_Type Fields Relation
begin
subsection \<open>Predicate for finite sets\<close>
context notes [[inductive_internals]]
begin
inductive finite :: "'a set \<Rightarrow> bool"
where
emptyI [simp, intro!]: "finite {}"
| insertI [simp, intro!]: "finite A \<Longrightarrow> finite (insert a A)"
end
simproc_setup finite_Collect ("finite (Collect P)") = \<open>K Set_Comprehension_Pointfree.simproc\<close>
declare [[simproc del: finite_Collect]]
lemma finite_induct [case_names empty insert, induct set: finite]:
\<comment> \<open>Discharging \<open>x \<notin> F\<close> entails extra work.\<close>
assumes "finite F"
assumes "P {}"
and insert: "\<And>x F. finite F \<Longrightarrow> x \<notin> F \<Longrightarrow> P F \<Longrightarrow> P (insert x F)"
shows "P F"
using \<open>finite F\<close>
proof induct
show "P {}" by fact
next
fix x F
assume F: "finite F" and P: "P F"
show "P (insert x F)"
proof cases
assume "x \<in> F"
then have "insert x F = F" by (rule insert_absorb)
with P show ?thesis by (simp only:)
next
assume "x \<notin> F"
from F this P show ?thesis by (rule insert)
qed
qed
lemma infinite_finite_induct [case_names infinite empty insert]:
assumes infinite: "\<And>A. \<not> finite A \<Longrightarrow> P A"
and empty: "P {}"
and insert: "\<And>x F. finite F \<Longrightarrow> x \<notin> F \<Longrightarrow> P F \<Longrightarrow> P (insert x F)"
shows "P A"
proof (cases "finite A")
case False
with infinite show ?thesis .
next
case True
then show ?thesis by (induct A) (fact empty insert)+
qed
subsubsection \<open>Choice principles\<close>
lemma ex_new_if_finite: \<comment> \<open>does not depend on def of finite at all\<close>
assumes "\<not> finite (UNIV :: 'a set)" and "finite A"
shows "\<exists>a::'a. a \<notin> A"
proof -
from assms have "A \<noteq> UNIV" by blast
then show ?thesis by blast
qed
text \<open>A finite choice principle. Does not need the SOME choice operator.\<close>
lemma finite_set_choice: "finite A \<Longrightarrow> \<forall>x\<in>A. \<exists>y. P x y \<Longrightarrow> \<exists>f. \<forall>x\<in>A. P x (f x)"
proof (induct rule: finite_induct)
case empty
then show ?case by simp
next
case (insert a A)
then obtain f b where f: "\<forall>x\<in>A. P x (f x)" and ab: "P a b"
by auto
show ?case (is "\<exists>f. ?P f")
proof
show "?P (\<lambda>x. if x = a then b else f x)"
using f ab by auto
qed
qed
subsubsection \<open>Finite sets are the images of initial segments of natural numbers\<close>
lemma finite_imp_nat_seg_image_inj_on:
assumes "finite A"
shows "\<exists>(n::nat) f. A = f ` {i. i < n} \<and> inj_on f {i. i < n}"
using assms
proof induct
case empty
show ?case
proof
show "\<exists>f. {} = f ` {i::nat. i < 0} \<and> inj_on f {i. i < 0}"
by simp
qed
next
case (insert a A)
have notinA: "a \<notin> A" by fact
from insert.hyps obtain n f where "A = f ` {i::nat. i < n}" "inj_on f {i. i < n}"
by blast
then have "insert a A = f(n:=a) ` {i. i < Suc n}" and "inj_on (f(n:=a)) {i. i < Suc n}"
using notinA by (auto simp add: image_def Ball_def inj_on_def less_Suc_eq)
then show ?case by blast
qed
lemma nat_seg_image_imp_finite: "A = f ` {i::nat. i < n} \<Longrightarrow> finite A"
proof (induct n arbitrary: A)
case 0
then show ?case by simp
next
case (Suc n)
let ?B = "f ` {i. i < n}"
have finB: "finite ?B" by (rule Suc.hyps[OF refl])
show ?case
proof (cases "\<exists>k<n. f n = f k")
case True
then have "A = ?B"
using Suc.prems by (auto simp:less_Suc_eq)
then show ?thesis
using finB by simp
next
case False
then have "A = insert (f n) ?B"
using Suc.prems by (auto simp:less_Suc_eq)
then show ?thesis using finB by simp
qed
qed
lemma finite_conv_nat_seg_image: "finite A \<longleftrightarrow> (\<exists>n f. A = f ` {i::nat. i < n})"
by (blast intro: nat_seg_image_imp_finite dest: finite_imp_nat_seg_image_inj_on)
lemma finite_imp_inj_to_nat_seg:
assumes "finite A"
shows "\<exists>f n. f ` A = {i::nat. i < n} \<and> inj_on f A"
proof -
from finite_imp_nat_seg_image_inj_on [OF \<open>finite A\<close>]
obtain f and n :: nat where bij: "bij_betw f {i. i<n} A"
by (auto simp: bij_betw_def)
let ?f = "the_inv_into {i. i<n} f"
have "inj_on ?f A \<and> ?f ` A = {i. i<n}"
by (fold bij_betw_def) (rule bij_betw_the_inv_into[OF bij])
then show ?thesis by blast
qed
lemma finite_Collect_less_nat [iff]: "finite {n::nat. n < k}"
by (fastforce simp: finite_conv_nat_seg_image)
lemma finite_Collect_le_nat [iff]: "finite {n::nat. n \<le> k}"
by (simp add: le_eq_less_or_eq Collect_disj_eq)
subsection \<open>Finiteness and common set operations\<close>
lemma rev_finite_subset: "finite B \<Longrightarrow> A \<subseteq> B \<Longrightarrow> finite A"
proof (induct arbitrary: A rule: finite_induct)
case empty
then show ?case by simp
next
case (insert x F A)
have A: "A \<subseteq> insert x F" and r: "A - {x} \<subseteq> F \<Longrightarrow> finite (A - {x})"
by fact+
show "finite A"
proof cases
assume x: "x \<in> A"
with A have "A - {x} \<subseteq> F" by (simp add: subset_insert_iff)
with r have "finite (A - {x})" .
then have "finite (insert x (A - {x}))" ..
also have "insert x (A - {x}) = A"
using x by (rule insert_Diff)
finally show ?thesis .
next
show ?thesis when "A \<subseteq> F"
using that by fact
assume "x \<notin> A"
with A show "A \<subseteq> F"
by (simp add: subset_insert_iff)
qed
qed
lemma finite_subset: "A \<subseteq> B \<Longrightarrow> finite B \<Longrightarrow> finite A"
by (rule rev_finite_subset)
-simproc_setup finite ("finite A") = \<open>fn _ =>
+simproc_setup finite ("finite A") = \<open>
let
val finite_subset = @{thm finite_subset}
val Eq_TrueI = @{thm Eq_TrueI}
fun is_subset A th = case Thm.prop_of th of
(_ $ (Const (\<^const_name>\<open>less_eq\<close>, Type (\<^type_name>\<open>fun\<close>, [Type (\<^type_name>\<open>set\<close>, _), _])) $ A' $ B))
=> if A aconv A' then SOME(B,th) else NONE
| _ => NONE;
fun is_finite th = case Thm.prop_of th of
(_ $ (Const (\<^const_name>\<open>finite\<close>, _) $ A)) => SOME(A,th)
| _ => NONE;
fun comb (A,sub_th) (A',fin_th) ths = if A aconv A' then (sub_th,fin_th) :: ths else ths
- fun proc ss ct =
+ fun proc ctxt ct =
(let
val _ $ A = Thm.term_of ct
- val prems = Simplifier.prems_of ss
+ val prems = Simplifier.prems_of ctxt
val fins = map_filter is_finite prems
val subsets = map_filter (is_subset A) prems
in case fold_product comb subsets fins [] of
(sub_th,fin_th) :: _ => SOME((fin_th RS (sub_th RS finite_subset)) RS Eq_TrueI)
| _ => NONE
end)
-in proc end
+in K proc end
\<close>
(* Needs to be used with care *)
declare [[simproc del: finite]]
lemma finite_UnI:
assumes "finite F" and "finite G"
shows "finite (F \<union> G)"
using assms by induct simp_all
lemma finite_Un [iff]: "finite (F \<union> G) \<longleftrightarrow> finite F \<and> finite G"
by (blast intro: finite_UnI finite_subset [of _ "F \<union> G"])
lemma finite_insert [simp]: "finite (insert a A) \<longleftrightarrow> finite A"
proof -
have "finite {a} \<and> finite A \<longleftrightarrow> finite A" by simp
then have "finite ({a} \<union> A) \<longleftrightarrow> finite A" by (simp only: finite_Un)
then show ?thesis by simp
qed
lemma finite_Int [simp, intro]: "finite F \<or> finite G \<Longrightarrow> finite (F \<inter> G)"
by (blast intro: finite_subset)
lemma finite_Collect_conjI [simp, intro]:
"finite {x. P x} \<or> finite {x. Q x} \<Longrightarrow> finite {x. P x \<and> Q x}"
by (simp add: Collect_conj_eq)
lemma finite_Collect_disjI [simp]:
"finite {x. P x \<or> Q x} \<longleftrightarrow> finite {x. P x} \<and> finite {x. Q x}"
by (simp add: Collect_disj_eq)
lemma finite_Diff [simp, intro]: "finite A \<Longrightarrow> finite (A - B)"
by (rule finite_subset, rule Diff_subset)
lemma finite_Diff2 [simp]:
assumes "finite B"
shows "finite (A - B) \<longleftrightarrow> finite A"
proof -
have "finite A \<longleftrightarrow> finite ((A - B) \<union> (A \<inter> B))"
by (simp add: Un_Diff_Int)
also have "\<dots> \<longleftrightarrow> finite (A - B)"
using \<open>finite B\<close> by simp
finally show ?thesis ..
qed
lemma finite_Diff_insert [iff]: "finite (A - insert a B) \<longleftrightarrow> finite (A - B)"
proof -
have "finite (A - B) \<longleftrightarrow> finite (A - B - {a})" by simp
moreover have "A - insert a B = A - B - {a}" by auto
ultimately show ?thesis by simp
qed
lemma finite_compl [simp]:
"finite (A :: 'a set) \<Longrightarrow> finite (- A) \<longleftrightarrow> finite (UNIV :: 'a set)"
by (simp add: Compl_eq_Diff_UNIV)
lemma finite_Collect_not [simp]:
"finite {x :: 'a. P x} \<Longrightarrow> finite {x. \<not> P x} \<longleftrightarrow> finite (UNIV :: 'a set)"
by (simp add: Collect_neg_eq)
lemma finite_Union [simp, intro]:
"finite A \<Longrightarrow> (\<And>M. M \<in> A \<Longrightarrow> finite M) \<Longrightarrow> finite (\<Union>A)"
by (induct rule: finite_induct) simp_all
lemma finite_UN_I [intro]:
"finite A \<Longrightarrow> (\<And>a. a \<in> A \<Longrightarrow> finite (B a)) \<Longrightarrow> finite (\<Union>a\<in>A. B a)"
by (induct rule: finite_induct) simp_all
lemma finite_UN [simp]: "finite A \<Longrightarrow> finite (\<Union>(B ` A)) \<longleftrightarrow> (\<forall>x\<in>A. finite (B x))"
by (blast intro: finite_subset)
lemma finite_Inter [intro]: "\<exists>A\<in>M. finite A \<Longrightarrow> finite (\<Inter>M)"
by (blast intro: Inter_lower finite_subset)
lemma finite_INT [intro]: "\<exists>x\<in>I. finite (A x) \<Longrightarrow> finite (\<Inter>x\<in>I. A x)"
by (blast intro: INT_lower finite_subset)
lemma finite_imageI [simp, intro]: "finite F \<Longrightarrow> finite (h ` F)"
by (induct rule: finite_induct) simp_all
lemma finite_image_set [simp]: "finite {x. P x} \<Longrightarrow> finite {f x |x. P x}"
by (simp add: image_Collect [symmetric])
lemma finite_image_set2:
"finite {x. P x} \<Longrightarrow> finite {y. Q y} \<Longrightarrow> finite {f x y |x y. P x \<and> Q y}"
by (rule finite_subset [where B = "\<Union>x \<in> {x. P x}. \<Union>y \<in> {y. Q y}. {f x y}"]) auto
lemma finite_imageD:
assumes "finite (f ` A)" and "inj_on f A"
shows "finite A"
using assms
proof (induct "f ` A" arbitrary: A)
case empty
then show ?case by simp
next
case (insert x B)
then have B_A: "insert x B = f ` A"
by simp
then obtain y where "x = f y" and "y \<in> A"
by blast
from B_A \<open>x \<notin> B\<close> have "B = f ` A - {x}"
by blast
with B_A \<open>x \<notin> B\<close> \<open>x = f y\<close> \<open>inj_on f A\<close> \<open>y \<in> A\<close> have "B = f ` (A - {y})"
by (simp add: inj_on_image_set_diff)
moreover from \<open>inj_on f A\<close> have "inj_on f (A - {y})"
by (rule inj_on_diff)
ultimately have "finite (A - {y})"
by (rule insert.hyps)
then show "finite A"
by simp
qed
lemma finite_image_iff: "inj_on f A \<Longrightarrow> finite (f ` A) \<longleftrightarrow> finite A"
using finite_imageD by blast
lemma finite_surj: "finite A \<Longrightarrow> B \<subseteq> f ` A \<Longrightarrow> finite B"
by (erule finite_subset) (rule finite_imageI)
lemma finite_range_imageI: "finite (range g) \<Longrightarrow> finite (range (\<lambda>x. f (g x)))"
by (drule finite_imageI) (simp add: range_composition)
lemma finite_subset_image:
assumes "finite B"
shows "B \<subseteq> f ` A \<Longrightarrow> \<exists>C\<subseteq>A. finite C \<and> B = f ` C"
using assms
proof induct
case empty
then show ?case by simp
next
case insert
then show ?case
by (clarsimp simp del: image_insert simp add: image_insert [symmetric]) blast
qed
lemma all_subset_image: "(\<forall>B. B \<subseteq> f ` A \<longrightarrow> P B) \<longleftrightarrow> (\<forall>B. B \<subseteq> A \<longrightarrow> P(f ` B))"
by (safe elim!: subset_imageE) (use image_mono in \<open>blast+\<close>) (* slow *)
lemma all_finite_subset_image:
"(\<forall>B. finite B \<and> B \<subseteq> f ` A \<longrightarrow> P B) \<longleftrightarrow> (\<forall>B. finite B \<and> B \<subseteq> A \<longrightarrow> P (f ` B))"
proof safe
fix B :: "'a set"
assume B: "finite B" "B \<subseteq> f ` A" and P: "\<forall>B. finite B \<and> B \<subseteq> A \<longrightarrow> P (f ` B)"
show "P B"
using finite_subset_image [OF B] P by blast
qed blast
lemma ex_finite_subset_image:
"(\<exists>B. finite B \<and> B \<subseteq> f ` A \<and> P B) \<longleftrightarrow> (\<exists>B. finite B \<and> B \<subseteq> A \<and> P (f ` B))"
proof safe
fix B :: "'a set"
assume B: "finite B" "B \<subseteq> f ` A" and "P B"
show "\<exists>B. finite B \<and> B \<subseteq> A \<and> P (f ` B)"
using finite_subset_image [OF B] \<open>P B\<close> by blast
qed blast
lemma finite_vimage_IntI: "finite F \<Longrightarrow> inj_on h A \<Longrightarrow> finite (h -` F \<inter> A)"
proof (induct rule: finite_induct)
case (insert x F)
then show ?case
by (simp add: vimage_insert [of h x F] finite_subset [OF inj_on_vimage_singleton] Int_Un_distrib2)
qed simp
lemma finite_finite_vimage_IntI:
assumes "finite F"
and "\<And>y. y \<in> F \<Longrightarrow> finite ((h -` {y}) \<inter> A)"
shows "finite (h -` F \<inter> A)"
proof -
have *: "h -` F \<inter> A = (\<Union> y\<in>F. (h -` {y}) \<inter> A)"
by blast
show ?thesis
by (simp only: * assms finite_UN_I)
qed
lemma finite_vimageI: "finite F \<Longrightarrow> inj h \<Longrightarrow> finite (h -` F)"
using finite_vimage_IntI[of F h UNIV] by auto
lemma finite_vimageD': "finite (f -` A) \<Longrightarrow> A \<subseteq> range f \<Longrightarrow> finite A"
by (auto simp add: subset_image_iff intro: finite_subset[rotated])
lemma finite_vimageD: "finite (h -` F) \<Longrightarrow> surj h \<Longrightarrow> finite F"
by (auto dest: finite_vimageD')
lemma finite_vimage_iff: "bij h \<Longrightarrow> finite (h -` F) \<longleftrightarrow> finite F"
unfolding bij_def by (auto elim: finite_vimageD finite_vimageI)
lemma finite_inverse_image_gen:
assumes "finite A" "inj_on f D"
shows "finite {j\<in>D. f j \<in> A}"
using finite_vimage_IntI [OF assms]
by (simp add: Collect_conj_eq inf_commute vimage_def)
lemma finite_inverse_image:
assumes "finite A" "inj f"
shows "finite {j. f j \<in> A}"
using finite_inverse_image_gen [OF assms] by simp
lemma finite_Collect_bex [simp]:
assumes "finite A"
shows "finite {x. \<exists>y\<in>A. Q x y} \<longleftrightarrow> (\<forall>y\<in>A. finite {x. Q x y})"
proof -
have "{x. \<exists>y\<in>A. Q x y} = (\<Union>y\<in>A. {x. Q x y})" by auto
with assms show ?thesis by simp
qed
lemma finite_Collect_bounded_ex [simp]:
assumes "finite {y. P y}"
shows "finite {x. \<exists>y. P y \<and> Q x y} \<longleftrightarrow> (\<forall>y. P y \<longrightarrow> finite {x. Q x y})"
proof -
have "{x. \<exists>y. P y \<and> Q x y} = (\<Union>y\<in>{y. P y}. {x. Q x y})"
by auto
with assms show ?thesis
by simp
qed
lemma finite_Plus: "finite A \<Longrightarrow> finite B \<Longrightarrow> finite (A <+> B)"
by (simp add: Plus_def)
lemma finite_PlusD:
fixes A :: "'a set" and B :: "'b set"
assumes fin: "finite (A <+> B)"
shows "finite A" "finite B"
proof -
have "Inl ` A \<subseteq> A <+> B"
by auto
then have "finite (Inl ` A :: ('a + 'b) set)"
using fin by (rule finite_subset)
then show "finite A"
by (rule finite_imageD) (auto intro: inj_onI)
next
have "Inr ` B \<subseteq> A <+> B"
by auto
then have "finite (Inr ` B :: ('a + 'b) set)"
using fin by (rule finite_subset)
then show "finite B"
by (rule finite_imageD) (auto intro: inj_onI)
qed
lemma finite_Plus_iff [simp]: "finite (A <+> B) \<longleftrightarrow> finite A \<and> finite B"
by (auto intro: finite_PlusD finite_Plus)
lemma finite_Plus_UNIV_iff [simp]:
"finite (UNIV :: ('a + 'b) set) \<longleftrightarrow> finite (UNIV :: 'a set) \<and> finite (UNIV :: 'b set)"
by (subst UNIV_Plus_UNIV [symmetric]) (rule finite_Plus_iff)
lemma finite_SigmaI [simp, intro]:
"finite A \<Longrightarrow> (\<And>a. a\<in>A \<Longrightarrow> finite (B a)) \<Longrightarrow> finite (SIGMA a:A. B a)"
unfolding Sigma_def by blast
lemma finite_SigmaI2:
assumes "finite {x\<in>A. B x \<noteq> {}}"
and "\<And>a. a \<in> A \<Longrightarrow> finite (B a)"
shows "finite (Sigma A B)"
proof -
from assms have "finite (Sigma {x\<in>A. B x \<noteq> {}} B)"
by auto
also have "Sigma {x:A. B x \<noteq> {}} B = Sigma A B"
by auto
finally show ?thesis .
qed
lemma finite_cartesian_product: "finite A \<Longrightarrow> finite B \<Longrightarrow> finite (A \<times> B)"
by (rule finite_SigmaI)
lemma finite_Prod_UNIV:
"finite (UNIV :: 'a set) \<Longrightarrow> finite (UNIV :: 'b set) \<Longrightarrow> finite (UNIV :: ('a \<times> 'b) set)"
by (simp only: UNIV_Times_UNIV [symmetric] finite_cartesian_product)
lemma finite_cartesian_productD1:
assumes "finite (A \<times> B)" and "B \<noteq> {}"
shows "finite A"
proof -
from assms obtain n f where "A \<times> B = f ` {i::nat. i < n}"
by (auto simp add: finite_conv_nat_seg_image)
then have "fst ` (A \<times> B) = fst ` f ` {i::nat. i < n}"
by simp
with \<open>B \<noteq> {}\<close> have "A = (fst \<circ> f) ` {i::nat. i < n}"
by (simp add: image_comp)
then have "\<exists>n f. A = f ` {i::nat. i < n}"
by blast
then show ?thesis
by (auto simp add: finite_conv_nat_seg_image)
qed
lemma finite_cartesian_productD2:
assumes "finite (A \<times> B)" and "A \<noteq> {}"
shows "finite B"
proof -
from assms obtain n f where "A \<times> B = f ` {i::nat. i < n}"
by (auto simp add: finite_conv_nat_seg_image)
then have "snd ` (A \<times> B) = snd ` f ` {i::nat. i < n}"
by simp
with \<open>A \<noteq> {}\<close> have "B = (snd \<circ> f) ` {i::nat. i < n}"
by (simp add: image_comp)
then have "\<exists>n f. B = f ` {i::nat. i < n}"
by blast
then show ?thesis
by (auto simp add: finite_conv_nat_seg_image)
qed
lemma finite_cartesian_product_iff:
"finite (A \<times> B) \<longleftrightarrow> (A = {} \<or> B = {} \<or> (finite A \<and> finite B))"
by (auto dest: finite_cartesian_productD1 finite_cartesian_productD2 finite_cartesian_product)
lemma finite_prod:
"finite (UNIV :: ('a \<times> 'b) set) \<longleftrightarrow> finite (UNIV :: 'a set) \<and> finite (UNIV :: 'b set)"
using finite_cartesian_product_iff[of UNIV UNIV] by simp
lemma finite_Pow_iff [iff]: "finite (Pow A) \<longleftrightarrow> finite A"
proof
assume "finite (Pow A)"
then have "finite ((\<lambda>x. {x}) ` A)"
by (blast intro: finite_subset) (* somewhat slow *)
then show "finite A"
by (rule finite_imageD [unfolded inj_on_def]) simp
next
assume "finite A"
then show "finite (Pow A)"
by induct (simp_all add: Pow_insert)
qed
corollary finite_Collect_subsets [simp, intro]: "finite A \<Longrightarrow> finite {B. B \<subseteq> A}"
by (simp add: Pow_def [symmetric])
lemma finite_set: "finite (UNIV :: 'a set set) \<longleftrightarrow> finite (UNIV :: 'a set)"
by (simp only: finite_Pow_iff Pow_UNIV[symmetric])
lemma finite_UnionD: "finite (\<Union>A) \<Longrightarrow> finite A"
by (blast intro: finite_subset [OF subset_Pow_Union])
lemma finite_bind:
assumes "finite S"
assumes "\<forall>x \<in> S. finite (f x)"
shows "finite (Set.bind S f)"
using assms by (simp add: bind_UNION)
lemma finite_filter [simp]: "finite S \<Longrightarrow> finite (Set.filter P S)"
unfolding Set.filter_def by simp
lemma finite_set_of_finite_funs:
assumes "finite A" "finite B"
shows "finite {f. \<forall>x. (x \<in> A \<longrightarrow> f x \<in> B) \<and> (x \<notin> A \<longrightarrow> f x = d)}" (is "finite ?S")
proof -
let ?F = "\<lambda>f. {(a,b). a \<in> A \<and> b = f a}"
have "?F ` ?S \<subseteq> Pow(A \<times> B)"
by auto
from finite_subset[OF this] assms have 1: "finite (?F ` ?S)"
by simp
have 2: "inj_on ?F ?S"
by (fastforce simp add: inj_on_def set_eq_iff fun_eq_iff) (* somewhat slow *)
show ?thesis
by (rule finite_imageD [OF 1 2])
qed
lemma not_finite_existsD:
assumes "\<not> finite {a. P a}"
shows "\<exists>a. P a"
proof (rule classical)
assume "\<not> ?thesis"
with assms show ?thesis by auto
qed
lemma finite_converse [iff]: "finite (r\<inverse>) \<longleftrightarrow> finite r"
unfolding converse_def conversep_iff
using [[simproc add: finite_Collect]]
by (auto elim: finite_imageD simp: inj_on_def)
lemma finite_Domain: "finite r \<Longrightarrow> finite (Domain r)"
by (induct set: finite) auto
lemma finite_Range: "finite r \<Longrightarrow> finite (Range r)"
by (induct set: finite) auto
lemma finite_Field: "finite r \<Longrightarrow> finite (Field r)"
by (simp add: Field_def finite_Domain finite_Range)
lemma finite_Image[simp]: "finite R \<Longrightarrow> finite (R `` A)"
by(rule finite_subset[OF _ finite_Range]) auto
subsection \<open>Further induction rules on finite sets\<close>
lemma finite_ne_induct [case_names singleton insert, consumes 2]:
assumes "finite F" and "F \<noteq> {}"
assumes "\<And>x. P {x}"
and "\<And>x F. finite F \<Longrightarrow> F \<noteq> {} \<Longrightarrow> x \<notin> F \<Longrightarrow> P F \<Longrightarrow> P (insert x F)"
shows "P F"
using assms
proof induct
case empty
then show ?case by simp
next
case (insert x F)
then show ?case by cases auto
qed
lemma finite_subset_induct [consumes 2, case_names empty insert]:
assumes "finite F" and "F \<subseteq> A"
and empty: "P {}"
and insert: "\<And>a F. finite F \<Longrightarrow> a \<in> A \<Longrightarrow> a \<notin> F \<Longrightarrow> P F \<Longrightarrow> P (insert a F)"
shows "P F"
using \<open>finite F\<close> \<open>F \<subseteq> A\<close>
proof induct
show "P {}" by fact
next
fix x F
assume "finite F" and "x \<notin> F" and P: "F \<subseteq> A \<Longrightarrow> P F" and i: "insert x F \<subseteq> A"
show "P (insert x F)"
proof (rule insert)
from i show "x \<in> A" by blast
from i have "F \<subseteq> A" by blast
with P show "P F" .
show "finite F" by fact
show "x \<notin> F" by fact
qed
qed
lemma finite_empty_induct:
assumes "finite A"
and "P A"
and remove: "\<And>a A. finite A \<Longrightarrow> a \<in> A \<Longrightarrow> P A \<Longrightarrow> P (A - {a})"
shows "P {}"
proof -
have "P (A - B)" if "B \<subseteq> A" for B :: "'a set"
proof -
from \<open>finite A\<close> that have "finite B"
by (rule rev_finite_subset)
from this \<open>B \<subseteq> A\<close> show "P (A - B)"
proof induct
case empty
from \<open>P A\<close> show ?case by simp
next
case (insert b B)
have "P (A - B - {b})"
proof (rule remove)
from \<open>finite A\<close> show "finite (A - B)"
by induct auto
from insert show "b \<in> A - B"
by simp
from insert show "P (A - B)"
by simp
qed
also have "A - B - {b} = A - insert b B"
by (rule Diff_insert [symmetric])
finally show ?case .
qed
qed
then have "P (A - A)" by blast
then show ?thesis by simp
qed
lemma finite_update_induct [consumes 1, case_names const update]:
assumes finite: "finite {a. f a \<noteq> c}"
and const: "P (\<lambda>a. c)"
and update: "\<And>a b f. finite {a. f a \<noteq> c} \<Longrightarrow> f a = c \<Longrightarrow> b \<noteq> c \<Longrightarrow> P f \<Longrightarrow> P (f(a := b))"
shows "P f"
using finite
proof (induct "{a. f a \<noteq> c}" arbitrary: f)
case empty
with const show ?case by simp
next
case (insert a A)
then have "A = {a'. (f(a := c)) a' \<noteq> c}" and "f a \<noteq> c"
by auto
with \<open>finite A\<close> have "finite {a'. (f(a := c)) a' \<noteq> c}"
by simp
have "(f(a := c)) a = c"
by simp
from insert \<open>A = {a'. (f(a := c)) a' \<noteq> c}\<close> have "P (f(a := c))"
by simp
with \<open>finite {a'. (f(a := c)) a' \<noteq> c}\<close> \<open>(f(a := c)) a = c\<close> \<open>f a \<noteq> c\<close>
have "P ((f(a := c))(a := f a))"
by (rule update)
then show ?case by simp
qed
lemma finite_subset_induct' [consumes 2, case_names empty insert]:
assumes "finite F" and "F \<subseteq> A"
and empty: "P {}"
and insert: "\<And>a F. \<lbrakk>finite F; a \<in> A; F \<subseteq> A; a \<notin> F; P F \<rbrakk> \<Longrightarrow> P (insert a F)"
shows "P F"
using assms(1,2)
proof induct
show "P {}" by fact
next
fix x F
assume "finite F" and "x \<notin> F" and
P: "F \<subseteq> A \<Longrightarrow> P F" and i: "insert x F \<subseteq> A"
show "P (insert x F)"
proof (rule insert)
from i show "x \<in> A" by blast
from i have "F \<subseteq> A" by blast
with P show "P F" .
show "finite F" by fact
show "x \<notin> F" by fact
show "F \<subseteq> A" by fact
qed
qed
subsection \<open>Class \<open>finite\<close>\<close>
class finite =
assumes finite_UNIV: "finite (UNIV :: 'a set)"
begin
lemma finite [simp]: "finite (A :: 'a set)"
by (rule subset_UNIV finite_UNIV finite_subset)+
lemma finite_code [code]: "finite (A :: 'a set) \<longleftrightarrow> True"
by simp
end
instance prod :: (finite, finite) finite
by standard (simp only: UNIV_Times_UNIV [symmetric] finite_cartesian_product finite)
lemma inj_graph: "inj (\<lambda>f. {(x, y). y = f x})"
by (rule inj_onI) (auto simp add: set_eq_iff fun_eq_iff)
instance "fun" :: (finite, finite) finite
proof
show "finite (UNIV :: ('a \<Rightarrow> 'b) set)"
proof (rule finite_imageD)
let ?graph = "\<lambda>f::'a \<Rightarrow> 'b. {(x, y). y = f x}"
have "range ?graph \<subseteq> Pow UNIV"
by simp
moreover have "finite (Pow (UNIV :: ('a * 'b) set))"
by (simp only: finite_Pow_iff finite)
ultimately show "finite (range ?graph)"
by (rule finite_subset)
show "inj ?graph"
by (rule inj_graph)
qed
qed
instance bool :: finite
by standard (simp add: UNIV_bool)
instance set :: (finite) finite
by standard (simp only: Pow_UNIV [symmetric] finite_Pow_iff finite)
instance unit :: finite
by standard (simp add: UNIV_unit)
instance sum :: (finite, finite) finite
by standard (simp only: UNIV_Plus_UNIV [symmetric] finite_Plus finite)
subsection \<open>A basic fold functional for finite sets\<close>
text \<open>
The intended behaviour is \<open>fold f z {x\<^sub>1, \<dots>, x\<^sub>n} = f x\<^sub>1 (\<dots> (f x\<^sub>n z)\<dots>)\<close>
if \<open>f\<close> is ``left-commutative''.
The commutativity requirement is relativised to the carrier set \<open>S\<close>:
\<close>
locale comp_fun_commute_on =
fixes S :: "'a set"
fixes f :: "'a \<Rightarrow> 'b \<Rightarrow> 'b"
assumes comp_fun_commute_on: "x \<in> S \<Longrightarrow> y \<in> S \<Longrightarrow> f y \<circ> f x = f x \<circ> f y"
begin
lemma fun_left_comm: "x \<in> S \<Longrightarrow> y \<in> S \<Longrightarrow> f y (f x z) = f x (f y z)"
using comp_fun_commute_on by (simp add: fun_eq_iff)
lemma commute_left_comp: "x \<in> S \<Longrightarrow> y \<in> S \<Longrightarrow> f y \<circ> (f x \<circ> g) = f x \<circ> (f y \<circ> g)"
by (simp add: o_assoc comp_fun_commute_on)
end
inductive fold_graph :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'b \<Rightarrow> 'a set \<Rightarrow> 'b \<Rightarrow> bool"
for f :: "'a \<Rightarrow> 'b \<Rightarrow> 'b" and z :: 'b
where
emptyI [intro]: "fold_graph f z {} z"
| insertI [intro]: "x \<notin> A \<Longrightarrow> fold_graph f z A y \<Longrightarrow> fold_graph f z (insert x A) (f x y)"
inductive_cases empty_fold_graphE [elim!]: "fold_graph f z {} x"
lemma fold_graph_closed_lemma:
"fold_graph f z A x \<and> x \<in> B"
if "fold_graph g z A x"
"\<And>a b. a \<in> A \<Longrightarrow> b \<in> B \<Longrightarrow> f a b = g a b"
"\<And>a b. a \<in> A \<Longrightarrow> b \<in> B \<Longrightarrow> g a b \<in> B"
"z \<in> B"
using that(1-3)
proof (induction rule: fold_graph.induct)
case (insertI x A y)
have "fold_graph f z A y" "y \<in> B"
unfolding atomize_conj
by (rule insertI.IH) (auto intro: insertI.prems)
then have "g x y \<in> B" and f_eq: "f x y = g x y"
by (auto simp: insertI.prems)
moreover have "fold_graph f z (insert x A) (f x y)"
by (rule fold_graph.insertI; fact)
ultimately
show ?case
by (simp add: f_eq)
qed (auto intro!: that)
lemma fold_graph_closed_eq:
"fold_graph f z A = fold_graph g z A"
if "\<And>a b. a \<in> A \<Longrightarrow> b \<in> B \<Longrightarrow> f a b = g a b"
"\<And>a b. a \<in> A \<Longrightarrow> b \<in> B \<Longrightarrow> g a b \<in> B"
"z \<in> B"
using fold_graph_closed_lemma[of f z A _ B g] fold_graph_closed_lemma[of g z A _ B f] that
by auto
definition fold :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'b \<Rightarrow> 'a set \<Rightarrow> 'b"
where "fold f z A = (if finite A then (THE y. fold_graph f z A y) else z)"
lemma fold_closed_eq: "fold f z A = fold g z A"
if "\<And>a b. a \<in> A \<Longrightarrow> b \<in> B \<Longrightarrow> f a b = g a b"
"\<And>a b. a \<in> A \<Longrightarrow> b \<in> B \<Longrightarrow> g a b \<in> B"
"z \<in> B"
unfolding Finite_Set.fold_def
by (subst fold_graph_closed_eq[where B=B and g=g]) (auto simp: that)
text \<open>
A tempting alternative for the definition is
\<^term>\<open>if finite A then THE y. fold_graph f z A y else e\<close>.
It allows the removal of finiteness assumptions from the theorems
\<open>fold_comm\<close>, \<open>fold_reindex\<close> and \<open>fold_distrib\<close>.
The proofs become ugly. It is not worth the effort. (???)
\<close>
lemma finite_imp_fold_graph: "finite A \<Longrightarrow> \<exists>x. fold_graph f z A x"
by (induct rule: finite_induct) auto
subsubsection \<open>From \<^const>\<open>fold_graph\<close> to \<^term>\<open>fold\<close>\<close>
context comp_fun_commute_on
begin
lemma fold_graph_finite:
assumes "fold_graph f z A y"
shows "finite A"
using assms by induct simp_all
lemma fold_graph_insertE_aux:
assumes "A \<subseteq> S"
assumes "fold_graph f z A y" "a \<in> A"
shows "\<exists>y'. y = f a y' \<and> fold_graph f z (A - {a}) y'"
using assms(2-,1)
proof (induct set: fold_graph)
case emptyI
then show ?case by simp
next
case (insertI x A y)
show ?case
proof (cases "x = a")
case True
with insertI show ?thesis by auto
next
case False
then obtain y' where y: "y = f a y'" and y': "fold_graph f z (A - {a}) y'"
using insertI by auto
from insertI have "x \<in> S" "a \<in> S" by auto
then have "f x y = f a (f x y')"
unfolding y by (intro fun_left_comm; simp)
moreover have "fold_graph f z (insert x A - {a}) (f x y')"
using y' and \<open>x \<noteq> a\<close> and \<open>x \<notin> A\<close>
by (simp add: insert_Diff_if fold_graph.insertI)
ultimately show ?thesis
by fast
qed
qed
lemma fold_graph_insertE:
assumes "insert x A \<subseteq> S"
assumes "fold_graph f z (insert x A) v" and "x \<notin> A"
obtains y where "v = f x y" and "fold_graph f z A y"
using assms by (auto dest: fold_graph_insertE_aux[OF \<open>insert x A \<subseteq> S\<close> _ insertI1])
lemma fold_graph_determ:
assumes "A \<subseteq> S"
assumes "fold_graph f z A x" "fold_graph f z A y"
shows "y = x"
using assms(2-,1)
proof (induct arbitrary: y set: fold_graph)
case emptyI
then show ?case by fast
next
case (insertI x A y v)
from \<open>insert x A \<subseteq> S\<close> and \<open>fold_graph f z (insert x A) v\<close> and \<open>x \<notin> A\<close>
obtain y' where "v = f x y'" and "fold_graph f z A y'"
by (rule fold_graph_insertE)
from \<open>fold_graph f z A y'\<close> insertI have "y' = y"
by simp
with \<open>v = f x y'\<close> show "v = f x y"
by simp
qed
lemma fold_equality: "A \<subseteq> S \<Longrightarrow> fold_graph f z A y \<Longrightarrow> fold f z A = y"
by (cases "finite A") (auto simp add: fold_def intro: fold_graph_determ dest: fold_graph_finite)
lemma fold_graph_fold:
assumes "A \<subseteq> S"
assumes "finite A"
shows "fold_graph f z A (fold f z A)"
proof -
from \<open>finite A\<close> have "\<exists>x. fold_graph f z A x"
by (rule finite_imp_fold_graph)
moreover note fold_graph_determ[OF \<open>A \<subseteq> S\<close>]
ultimately have "\<exists>!x. fold_graph f z A x"
by (rule ex_ex1I)
then have "fold_graph f z A (The (fold_graph f z A))"
by (rule theI')
with assms show ?thesis
by (simp add: fold_def)
qed
text \<open>The base case for \<open>fold\<close>:\<close>
lemma (in -) fold_infinite [simp]: "\<not> finite A \<Longrightarrow> fold f z A = z"
by (auto simp: fold_def)
lemma (in -) fold_empty [simp]: "fold f z {} = z"
by (auto simp: fold_def)
text \<open>The various recursion equations for \<^const>\<open>fold\<close>:\<close>
lemma fold_insert [simp]:
assumes "insert x A \<subseteq> S"
assumes "finite A" and "x \<notin> A"
shows "fold f z (insert x A) = f x (fold f z A)"
proof (rule fold_equality[OF \<open>insert x A \<subseteq> S\<close>])
fix z
from \<open>insert x A \<subseteq> S\<close> \<open>finite A\<close> have "fold_graph f z A (fold f z A)"
by (blast intro: fold_graph_fold)
with \<open>x \<notin> A\<close> have "fold_graph f z (insert x A) (f x (fold f z A))"
by (rule fold_graph.insertI)
then show "fold_graph f z (insert x A) (f x (fold f z A))"
by simp
qed
declare (in -) empty_fold_graphE [rule del] fold_graph.intros [rule del]
\<comment> \<open>No more proofs involve these.\<close>
lemma fold_fun_left_comm:
assumes "insert x A \<subseteq> S" "finite A"
shows "f x (fold f z A) = fold f (f x z) A"
using assms(2,1)
proof (induct rule: finite_induct)
case empty
then show ?case by simp
next
case (insert y F)
then have "fold f (f x z) (insert y F) = f y (fold f (f x z) F)"
by simp
also have "\<dots> = f x (f y (fold f z F))"
using insert by (simp add: fun_left_comm[where ?y=x])
also have "\<dots> = f x (fold f z (insert y F))"
proof -
from insert have "insert y F \<subseteq> S" by simp
from fold_insert[OF this] insert show ?thesis by simp
qed
finally show ?case ..
qed
lemma fold_insert2:
"insert x A \<subseteq> S \<Longrightarrow> finite A \<Longrightarrow> x \<notin> A \<Longrightarrow> fold f z (insert x A) = fold f (f x z) A"
by (simp add: fold_fun_left_comm)
lemma fold_rec:
assumes "A \<subseteq> S"
assumes "finite A" and "x \<in> A"
shows "fold f z A = f x (fold f z (A - {x}))"
proof -
have A: "A = insert x (A - {x})"
using \<open>x \<in> A\<close> by blast
then have "fold f z A = fold f z (insert x (A - {x}))"
by simp
also have "\<dots> = f x (fold f z (A - {x}))"
by (rule fold_insert) (use assms in \<open>auto\<close>)
finally show ?thesis .
qed
lemma fold_insert_remove:
assumes "insert x A \<subseteq> S"
assumes "finite A"
shows "fold f z (insert x A) = f x (fold f z (A - {x}))"
proof -
from \<open>finite A\<close> have "finite (insert x A)"
by auto
moreover have "x \<in> insert x A"
by auto
ultimately have "fold f z (insert x A) = f x (fold f z (insert x A - {x}))"
using \<open>insert x A \<subseteq> S\<close> by (blast intro: fold_rec)
then show ?thesis
by simp
qed
lemma fold_set_union_disj:
assumes "A \<subseteq> S" "B \<subseteq> S"
assumes "finite A" "finite B" "A \<inter> B = {}"
shows "Finite_Set.fold f z (A \<union> B) = Finite_Set.fold f (Finite_Set.fold f z A) B"
using \<open>finite B\<close> assms(1,2,3,5)
proof induct
case (insert x F)
have "fold f z (A \<union> insert x F) = f x (fold f (fold f z A) F)"
using insert by auto
also have "\<dots> = fold f (fold f z A) (insert x F)"
using insert by (blast intro: fold_insert[symmetric])
finally show ?case .
qed simp
end
text \<open>Other properties of \<^const>\<open>fold\<close>:\<close>
lemma fold_graph_image:
assumes "inj_on g A"
shows "fold_graph f z (g ` A) = fold_graph (f \<circ> g) z A"
proof
fix w
show "fold_graph f z (g ` A) w = fold_graph (f o g) z A w"
proof
assume "fold_graph f z (g ` A) w"
then show "fold_graph (f \<circ> g) z A w"
using assms
proof (induct "g ` A" w arbitrary: A)
case emptyI
then show ?case by (auto intro: fold_graph.emptyI)
next
case (insertI x A r B)
from \<open>inj_on g B\<close> \<open>x \<notin> A\<close> \<open>insert x A = image g B\<close> obtain x' A'
where "x' \<notin> A'" and [simp]: "B = insert x' A'" "x = g x'" "A = g ` A'"
by (rule inj_img_insertE)
from insertI.prems have "fold_graph (f \<circ> g) z A' r"
by (auto intro: insertI.hyps)
with \<open>x' \<notin> A'\<close> have "fold_graph (f \<circ> g) z (insert x' A') ((f \<circ> g) x' r)"
by (rule fold_graph.insertI)
then show ?case
by simp
qed
next
assume "fold_graph (f \<circ> g) z A w"
then show "fold_graph f z (g ` A) w"
using assms
proof induct
case emptyI
then show ?case
by (auto intro: fold_graph.emptyI)
next
case (insertI x A r)
from \<open>x \<notin> A\<close> insertI.prems have "g x \<notin> g ` A"
by auto
moreover from insertI have "fold_graph f z (g ` A) r"
by simp
ultimately have "fold_graph f z (insert (g x) (g ` A)) (f (g x) r)"
by (rule fold_graph.insertI)
then show ?case
by simp
qed
qed
qed
lemma fold_image:
assumes "inj_on g A"
shows "fold f z (g ` A) = fold (f \<circ> g) z A"
proof (cases "finite A")
case False
with assms show ?thesis
by (auto dest: finite_imageD simp add: fold_def)
next
case True
then show ?thesis
by (auto simp add: fold_def fold_graph_image[OF assms])
qed
lemma fold_cong:
assumes "comp_fun_commute_on S f" "comp_fun_commute_on S g"
and "A \<subseteq> S" "finite A"
and cong: "\<And>x. x \<in> A \<Longrightarrow> f x = g x"
and "s = t" and "A = B"
shows "fold f s A = fold g t B"
proof -
have "fold f s A = fold g s A"
using \<open>finite A\<close> \<open>A \<subseteq> S\<close> cong
proof (induct A)
case empty
then show ?case by simp
next
case insert
interpret f: comp_fun_commute_on S f by (fact \<open>comp_fun_commute_on S f\<close>)
interpret g: comp_fun_commute_on S g by (fact \<open>comp_fun_commute_on S g\<close>)
from insert show ?case by simp
qed
with assms show ?thesis by simp
qed
text \<open>A simplified version for idempotent functions:\<close>
locale comp_fun_idem_on = comp_fun_commute_on +
assumes comp_fun_idem_on: "x \<in> S \<Longrightarrow> f x \<circ> f x = f x"
begin
lemma fun_left_idem: "x \<in> S \<Longrightarrow> f x (f x z) = f x z"
using comp_fun_idem_on by (simp add: fun_eq_iff)
lemma fold_insert_idem:
assumes "insert x A \<subseteq> S"
assumes fin: "finite A"
shows "fold f z (insert x A) = f x (fold f z A)"
proof cases
assume "x \<in> A"
then obtain B where "A = insert x B" and "x \<notin> B"
by (rule set_insert)
then show ?thesis
using assms by (simp add: comp_fun_idem_on fun_left_idem)
next
assume "x \<notin> A"
then show ?thesis
using assms by auto
qed
declare fold_insert [simp del] fold_insert_idem [simp]
lemma fold_insert_idem2: "insert x A \<subseteq> S \<Longrightarrow> finite A \<Longrightarrow> fold f z (insert x A) = fold f (f x z) A"
by (simp add: fold_fun_left_comm)
end
subsubsection \<open>Liftings to \<open>comp_fun_commute_on\<close> etc.\<close>
lemma (in comp_fun_commute_on) comp_comp_fun_commute_on:
"range g \<subseteq> S \<Longrightarrow> comp_fun_commute_on R (f \<circ> g)"
by standard (force intro: comp_fun_commute_on)
lemma (in comp_fun_idem_on) comp_comp_fun_idem_on:
assumes "range g \<subseteq> S"
shows "comp_fun_idem_on R (f \<circ> g)"
proof
interpret f_g: comp_fun_commute_on R "f o g"
by (fact comp_comp_fun_commute_on[OF \<open>range g \<subseteq> S\<close>])
show "x \<in> R \<Longrightarrow> y \<in> R \<Longrightarrow> (f \<circ> g) y \<circ> (f \<circ> g) x = (f \<circ> g) x \<circ> (f \<circ> g) y" for x y
by (fact f_g.comp_fun_commute_on)
qed (use \<open>range g \<subseteq> S\<close> in \<open>force intro: comp_fun_idem_on\<close>)
lemma (in comp_fun_commute_on) comp_fun_commute_on_funpow:
"comp_fun_commute_on S (\<lambda>x. f x ^^ g x)"
proof
fix x y assume "x \<in> S" "y \<in> S"
show "f y ^^ g y \<circ> f x ^^ g x = f x ^^ g x \<circ> f y ^^ g y"
proof (cases "x = y")
case True
then show ?thesis by simp
next
case False
show ?thesis
proof (induct "g x" arbitrary: g)
case 0
then show ?case by simp
next
case (Suc n g)
have hyp1: "f y ^^ g y \<circ> f x = f x \<circ> f y ^^ g y"
proof (induct "g y" arbitrary: g)
case 0
then show ?case by simp
next
case (Suc n g)
define h where "h z = g z - 1" for z
with Suc have "n = h y"
by simp
with Suc have hyp: "f y ^^ h y \<circ> f x = f x \<circ> f y ^^ h y"
by auto
from Suc h_def have "g y = Suc (h y)"
by simp
with \<open>x \<in> S\<close> \<open>y \<in> S\<close> show ?case
by (simp add: comp_assoc hyp) (simp add: o_assoc comp_fun_commute_on)
qed
define h where "h z = (if z = x then g x - 1 else g z)" for z
with Suc have "n = h x"
by simp
with Suc have "f y ^^ h y \<circ> f x ^^ h x = f x ^^ h x \<circ> f y ^^ h y"
by auto
with False h_def have hyp2: "f y ^^ g y \<circ> f x ^^ h x = f x ^^ h x \<circ> f y ^^ g y"
by simp
from Suc h_def have "g x = Suc (h x)"
by simp
then show ?case
by (simp del: funpow.simps add: funpow_Suc_right o_assoc hyp2) (simp add: comp_assoc hyp1)
qed
qed
qed
subsubsection \<open>\<^term>\<open>UNIV\<close> as carrier set\<close>
locale comp_fun_commute =
fixes f :: "'a \<Rightarrow> 'b \<Rightarrow> 'b"
assumes comp_fun_commute: "f y \<circ> f x = f x \<circ> f y"
begin
lemma (in -) comp_fun_commute_def': "comp_fun_commute f = comp_fun_commute_on UNIV f"
unfolding comp_fun_commute_def comp_fun_commute_on_def by blast
text \<open>
We abuse the \<open>rewrites\<close> functionality of locales to remove trivial assumptions that
result from instantiating the carrier set to \<^term>\<open>UNIV\<close>.
\<close>
sublocale comp_fun_commute_on UNIV f
rewrites "\<And>X. (X \<subseteq> UNIV) \<equiv> True"
and "\<And>x. x \<in> UNIV \<equiv> True"
and "\<And>P. (True \<Longrightarrow> P) \<equiv> Trueprop P"
and "\<And>P Q. (True \<Longrightarrow> PROP P \<Longrightarrow> PROP Q) \<equiv> (PROP P \<Longrightarrow> True \<Longrightarrow> PROP Q)"
proof -
show "comp_fun_commute_on UNIV f"
by standard (simp add: comp_fun_commute)
qed simp_all
end
lemma (in comp_fun_commute) comp_comp_fun_commute: "comp_fun_commute (f o g)"
unfolding comp_fun_commute_def' by (fact comp_comp_fun_commute_on)
lemma (in comp_fun_commute) comp_fun_commute_funpow: "comp_fun_commute (\<lambda>x. f x ^^ g x)"
unfolding comp_fun_commute_def' by (fact comp_fun_commute_on_funpow)
locale comp_fun_idem = comp_fun_commute +
assumes comp_fun_idem: "f x o f x = f x"
begin
lemma (in -) comp_fun_idem_def': "comp_fun_idem f = comp_fun_idem_on UNIV f"
unfolding comp_fun_idem_on_def comp_fun_idem_def comp_fun_commute_def'
unfolding comp_fun_idem_axioms_def comp_fun_idem_on_axioms_def
by blast
text \<open>
Again, we abuse the \<open>rewrites\<close> functionality of locales to remove trivial assumptions that
result from instantiating the carrier set to \<^term>\<open>UNIV\<close>.
\<close>
sublocale comp_fun_idem_on UNIV f
rewrites "\<And>X. (X \<subseteq> UNIV) \<equiv> True"
and "\<And>x. x \<in> UNIV \<equiv> True"
and "\<And>P. (True \<Longrightarrow> P) \<equiv> Trueprop P"
and "\<And>P Q. (True \<Longrightarrow> PROP P \<Longrightarrow> PROP Q) \<equiv> (PROP P \<Longrightarrow> True \<Longrightarrow> PROP Q)"
proof -
show "comp_fun_idem_on UNIV f"
by standard (simp_all add: comp_fun_idem comp_fun_commute)
qed simp_all
end
lemma (in comp_fun_idem) comp_comp_fun_idem: "comp_fun_idem (f o g)"
unfolding comp_fun_idem_def' by (fact comp_comp_fun_idem_on)
subsubsection \<open>Expressing set operations via \<^const>\<open>fold\<close>\<close>
lemma comp_fun_commute_const: "comp_fun_commute (\<lambda>_. f)"
by standard (rule refl)
lemma comp_fun_idem_insert: "comp_fun_idem insert"
by standard auto
lemma comp_fun_idem_remove: "comp_fun_idem Set.remove"
by standard auto
lemma (in semilattice_inf) comp_fun_idem_inf: "comp_fun_idem inf"
by standard (auto simp add: inf_left_commute)
lemma (in semilattice_sup) comp_fun_idem_sup: "comp_fun_idem sup"
by standard (auto simp add: sup_left_commute)
lemma union_fold_insert:
assumes "finite A"
shows "A \<union> B = fold insert B A"
proof -
interpret comp_fun_idem insert
by (fact comp_fun_idem_insert)
from \<open>finite A\<close> show ?thesis
by (induct A arbitrary: B) simp_all
qed
lemma minus_fold_remove:
assumes "finite A"
shows "B - A = fold Set.remove B A"
proof -
interpret comp_fun_idem Set.remove
by (fact comp_fun_idem_remove)
from \<open>finite A\<close> have "fold Set.remove B A = B - A"
by (induct A arbitrary: B) auto (* slow *)
then show ?thesis ..
qed
lemma comp_fun_commute_filter_fold:
"comp_fun_commute (\<lambda>x A'. if P x then Set.insert x A' else A')"
proof -
interpret comp_fun_idem Set.insert by (fact comp_fun_idem_insert)
show ?thesis by standard (auto simp: fun_eq_iff)
qed
lemma Set_filter_fold:
assumes "finite A"
shows "Set.filter P A = fold (\<lambda>x A'. if P x then Set.insert x A' else A') {} A"
using assms
proof -
interpret commute_insert: comp_fun_commute "(\<lambda>x A'. if P x then Set.insert x A' else A')"
by (fact comp_fun_commute_filter_fold)
from \<open>finite A\<close> show ?thesis
by induct (auto simp add: Set.filter_def)
qed
lemma inter_Set_filter:
assumes "finite B"
shows "A \<inter> B = Set.filter (\<lambda>x. x \<in> A) B"
using assms
by induct (auto simp: Set.filter_def)
lemma image_fold_insert:
assumes "finite A"
shows "image f A = fold (\<lambda>k A. Set.insert (f k) A) {} A"
proof -
interpret comp_fun_commute "\<lambda>k A. Set.insert (f k) A"
by standard auto
show ?thesis
using assms by (induct A) auto
qed
lemma Ball_fold:
assumes "finite A"
shows "Ball A P = fold (\<lambda>k s. s \<and> P k) True A"
proof -
interpret comp_fun_commute "\<lambda>k s. s \<and> P k"
by standard auto
show ?thesis
using assms by (induct A) auto
qed
lemma Bex_fold:
assumes "finite A"
shows "Bex A P = fold (\<lambda>k s. s \<or> P k) False A"
proof -
interpret comp_fun_commute "\<lambda>k s. s \<or> P k"
by standard auto
show ?thesis
using assms by (induct A) auto
qed
lemma comp_fun_commute_Pow_fold: "comp_fun_commute (\<lambda>x A. A \<union> Set.insert x ` A)"
by (clarsimp simp: fun_eq_iff comp_fun_commute_def) blast
lemma Pow_fold:
assumes "finite A"
shows "Pow A = fold (\<lambda>x A. A \<union> Set.insert x ` A) {{}} A"
proof -
interpret comp_fun_commute "\<lambda>x A. A \<union> Set.insert x ` A"
by (rule comp_fun_commute_Pow_fold)
show ?thesis
using assms by (induct A) (auto simp: Pow_insert)
qed
lemma fold_union_pair:
assumes "finite B"
shows "(\<Union>y\<in>B. {(x, y)}) \<union> A = fold (\<lambda>y. Set.insert (x, y)) A B"
proof -
interpret comp_fun_commute "\<lambda>y. Set.insert (x, y)"
by standard auto
show ?thesis
using assms by (induct arbitrary: A) simp_all
qed
lemma comp_fun_commute_product_fold:
"finite B \<Longrightarrow> comp_fun_commute (\<lambda>x z. fold (\<lambda>y. Set.insert (x, y)) z B)"
by standard (auto simp: fold_union_pair [symmetric])
lemma product_fold:
assumes "finite A" "finite B"
shows "A \<times> B = fold (\<lambda>x z. fold (\<lambda>y. Set.insert (x, y)) z B) {} A"
proof -
interpret commute_product: comp_fun_commute "(\<lambda>x z. fold (\<lambda>y. Set.insert (x, y)) z B)"
by (fact comp_fun_commute_product_fold[OF \<open>finite B\<close>])
from assms show ?thesis unfolding Sigma_def
by (induct A) (simp_all add: fold_union_pair)
qed
context complete_lattice
begin
lemma inf_Inf_fold_inf:
assumes "finite A"
shows "inf (Inf A) B = fold inf B A"
proof -
interpret comp_fun_idem inf
by (fact comp_fun_idem_inf)
from \<open>finite A\<close> fold_fun_left_comm show ?thesis
by (induct A arbitrary: B) (simp_all add: inf_commute fun_eq_iff)
qed
lemma sup_Sup_fold_sup:
assumes "finite A"
shows "sup (Sup A) B = fold sup B A"
proof -
interpret comp_fun_idem sup
by (fact comp_fun_idem_sup)
from \<open>finite A\<close> fold_fun_left_comm show ?thesis
by (induct A arbitrary: B) (simp_all add: sup_commute fun_eq_iff)
qed
lemma Inf_fold_inf: "finite A \<Longrightarrow> Inf A = fold inf top A"
using inf_Inf_fold_inf [of A top] by (simp add: inf_absorb2)
lemma Sup_fold_sup: "finite A \<Longrightarrow> Sup A = fold sup bot A"
using sup_Sup_fold_sup [of A bot] by (simp add: sup_absorb2)
lemma inf_INF_fold_inf:
assumes "finite A"
shows "inf B (\<Sqinter>(f ` A)) = fold (inf \<circ> f) B A" (is "?inf = ?fold")
proof -
interpret comp_fun_idem inf by (fact comp_fun_idem_inf)
interpret comp_fun_idem "inf \<circ> f" by (fact comp_comp_fun_idem)
from \<open>finite A\<close> have "?fold = ?inf"
by (induct A arbitrary: B) (simp_all add: inf_left_commute)
then show ?thesis ..
qed
lemma sup_SUP_fold_sup:
assumes "finite A"
shows "sup B (\<Squnion>(f ` A)) = fold (sup \<circ> f) B A" (is "?sup = ?fold")
proof -
interpret comp_fun_idem sup by (fact comp_fun_idem_sup)
interpret comp_fun_idem "sup \<circ> f" by (fact comp_comp_fun_idem)
from \<open>finite A\<close> have "?fold = ?sup"
by (induct A arbitrary: B) (simp_all add: sup_left_commute)
then show ?thesis ..
qed
lemma INF_fold_inf: "finite A \<Longrightarrow> \<Sqinter>(f ` A) = fold (inf \<circ> f) top A"
using inf_INF_fold_inf [of A top] by simp
lemma SUP_fold_sup: "finite A \<Longrightarrow> \<Squnion>(f ` A) = fold (sup \<circ> f) bot A"
using sup_SUP_fold_sup [of A bot] by simp
lemma finite_Inf_in:
assumes "finite A" "A\<noteq>{}" and inf: "\<And>x y. \<lbrakk>x \<in> A; y \<in> A\<rbrakk> \<Longrightarrow> inf x y \<in> A"
shows "Inf A \<in> A"
proof -
have "Inf B \<in> A" if "B \<le> A" "B\<noteq>{}" for B
using finite_subset [OF \<open>B \<subseteq> A\<close> \<open>finite A\<close>] that
by (induction B) (use inf in \<open>force+\<close>)
then show ?thesis
by (simp add: assms)
qed
lemma finite_Sup_in:
assumes "finite A" "A\<noteq>{}" and sup: "\<And>x y. \<lbrakk>x \<in> A; y \<in> A\<rbrakk> \<Longrightarrow> sup x y \<in> A"
shows "Sup A \<in> A"
proof -
have "Sup B \<in> A" if "B \<le> A" "B\<noteq>{}" for B
using finite_subset [OF \<open>B \<subseteq> A\<close> \<open>finite A\<close>] that
by (induction B) (use sup in \<open>force+\<close>)
then show ?thesis
by (simp add: assms)
qed
end
subsubsection \<open>Expressing relation operations via \<^const>\<open>fold\<close>\<close>
lemma Id_on_fold:
assumes "finite A"
shows "Id_on A = Finite_Set.fold (\<lambda>x. Set.insert (Pair x x)) {} A"
proof -
interpret comp_fun_commute "\<lambda>x. Set.insert (Pair x x)"
by standard auto
from assms show ?thesis
unfolding Id_on_def by (induct A) simp_all
qed
lemma comp_fun_commute_Image_fold:
"comp_fun_commute (\<lambda>(x,y) A. if x \<in> S then Set.insert y A else A)"
proof -
interpret comp_fun_idem Set.insert
by (fact comp_fun_idem_insert)
show ?thesis
by standard (auto simp: fun_eq_iff comp_fun_commute split: prod.split)
qed
lemma Image_fold:
assumes "finite R"
shows "R `` S = Finite_Set.fold (\<lambda>(x,y) A. if x \<in> S then Set.insert y A else A) {} R"
proof -
interpret comp_fun_commute "(\<lambda>(x,y) A. if x \<in> S then Set.insert y A else A)"
by (rule comp_fun_commute_Image_fold)
have *: "\<And>x F. Set.insert x F `` S = (if fst x \<in> S then Set.insert (snd x) (F `` S) else (F `` S))"
by (force intro: rev_ImageI)
show ?thesis
using assms by (induct R) (auto simp: * )
qed
lemma insert_relcomp_union_fold:
assumes "finite S"
shows "{x} O S \<union> X = Finite_Set.fold (\<lambda>(w,z) A'. if snd x = w then Set.insert (fst x,z) A' else A') X S"
proof -
interpret comp_fun_commute "\<lambda>(w,z) A'. if snd x = w then Set.insert (fst x,z) A' else A'"
proof -
interpret comp_fun_idem Set.insert
by (fact comp_fun_idem_insert)
show "comp_fun_commute (\<lambda>(w,z) A'. if snd x = w then Set.insert (fst x,z) A' else A')"
by standard (auto simp add: fun_eq_iff split: prod.split)
qed
have *: "{x} O S = {(x', z). x' = fst x \<and> (snd x, z) \<in> S}"
by (auto simp: relcomp_unfold intro!: exI)
show ?thesis
unfolding * using \<open>finite S\<close> by (induct S) (auto split: prod.split)
qed
lemma insert_relcomp_fold:
assumes "finite S"
shows "Set.insert x R O S =
Finite_Set.fold (\<lambda>(w,z) A'. if snd x = w then Set.insert (fst x,z) A' else A') (R O S) S"
proof -
have "Set.insert x R O S = ({x} O S) \<union> (R O S)"
by auto
then show ?thesis
by (auto simp: insert_relcomp_union_fold [OF assms])
qed
lemma comp_fun_commute_relcomp_fold:
assumes "finite S"
shows "comp_fun_commute (\<lambda>(x,y) A.
Finite_Set.fold (\<lambda>(w,z) A'. if y = w then Set.insert (x,z) A' else A') A S)"
proof -
have *: "\<And>a b A.
Finite_Set.fold (\<lambda>(w, z) A'. if b = w then Set.insert (a, z) A' else A') A S = {(a,b)} O S \<union> A"
by (auto simp: insert_relcomp_union_fold[OF assms] cong: if_cong)
show ?thesis
by standard (auto simp: * )
qed
lemma relcomp_fold:
assumes "finite R" "finite S"
shows "R O S = Finite_Set.fold
(\<lambda>(x,y) A. Finite_Set.fold (\<lambda>(w,z) A'. if y = w then Set.insert (x,z) A' else A') A S) {} R"
proof -
interpret commute_relcomp_fold: comp_fun_commute
"(\<lambda>(x, y) A. Finite_Set.fold (\<lambda>(w, z) A'. if y = w then insert (x, z) A' else A') A S)"
by (fact comp_fun_commute_relcomp_fold[OF \<open>finite S\<close>])
from assms show ?thesis
by (induct R) (auto simp: comp_fun_commute_relcomp_fold insert_relcomp_fold cong: if_cong)
qed
subsection \<open>Locales as mini-packages for fold operations\<close>
subsubsection \<open>The natural case\<close>
locale folding_on =
fixes S :: "'a set"
fixes f :: "'a \<Rightarrow> 'b \<Rightarrow> 'b" and z :: "'b"
assumes comp_fun_commute_on: "x \<in> S \<Longrightarrow> y \<in> S \<Longrightarrow> f y o f x = f x o f y"
begin
interpretation fold?: comp_fun_commute_on S f
by standard (simp add: comp_fun_commute_on)
definition F :: "'a set \<Rightarrow> 'b"
where eq_fold: "F A = Finite_Set.fold f z A"
lemma empty [simp]: "F {} = z"
by (simp add: eq_fold)
lemma infinite [simp]: "\<not> finite A \<Longrightarrow> F A = z"
by (simp add: eq_fold)
lemma insert [simp]:
assumes "insert x A \<subseteq> S" and "finite A" and "x \<notin> A"
shows "F (insert x A) = f x (F A)"
proof -
from fold_insert assms
have "Finite_Set.fold f z (insert x A)
= f x (Finite_Set.fold f z A)"
by simp
with \<open>finite A\<close> show ?thesis by (simp add: eq_fold fun_eq_iff)
qed
lemma remove:
assumes "A \<subseteq> S" and "finite A" and "x \<in> A"
shows "F A = f x (F (A - {x}))"
proof -
from \<open>x \<in> A\<close> obtain B where A: "A = insert x B" and "x \<notin> B"
by (auto dest: mk_disjoint_insert)
moreover from \<open>finite A\<close> A have "finite B" by simp
ultimately show ?thesis
using \<open>A \<subseteq> S\<close> by auto
qed
lemma insert_remove:
assumes "insert x A \<subseteq> S" and "finite A"
shows "F (insert x A) = f x (F (A - {x}))"
using assms by (cases "x \<in> A") (simp_all add: remove insert_absorb)
end
subsubsection \<open>With idempotency\<close>
locale folding_idem_on = folding_on +
assumes comp_fun_idem_on: "x \<in> S \<Longrightarrow> y \<in> S \<Longrightarrow> f x \<circ> f x = f x"
begin
declare insert [simp del]
interpretation fold?: comp_fun_idem_on S f
by standard (simp_all add: comp_fun_commute_on comp_fun_idem_on)
lemma insert_idem [simp]:
assumes "insert x A \<subseteq> S" and "finite A"
shows "F (insert x A) = f x (F A)"
proof -
from fold_insert_idem assms
have "fold f z (insert x A) = f x (fold f z A)" by simp
with \<open>finite A\<close> show ?thesis by (simp add: eq_fold fun_eq_iff)
qed
end
subsubsection \<open>\<^term>\<open>UNIV\<close> as the carrier set\<close>
locale folding =
fixes f :: "'a \<Rightarrow> 'b \<Rightarrow> 'b" and z :: "'b"
assumes comp_fun_commute: "f y \<circ> f x = f x \<circ> f y"
begin
lemma (in -) folding_def': "folding f = folding_on UNIV f"
unfolding folding_def folding_on_def by blast
text \<open>
Again, we abuse the \<open>rewrites\<close> functionality of locales to remove trivial assumptions that
result from instantiating the carrier set to \<^term>\<open>UNIV\<close>.
\<close>
sublocale folding_on UNIV f
rewrites "\<And>X. (X \<subseteq> UNIV) \<equiv> True"
and "\<And>x. x \<in> UNIV \<equiv> True"
and "\<And>P. (True \<Longrightarrow> P) \<equiv> Trueprop P"
and "\<And>P Q. (True \<Longrightarrow> PROP P \<Longrightarrow> PROP Q) \<equiv> (PROP P \<Longrightarrow> True \<Longrightarrow> PROP Q)"
proof -
show "folding_on UNIV f"
by standard (simp add: comp_fun_commute)
qed simp_all
end
locale folding_idem = folding +
assumes comp_fun_idem: "f x \<circ> f x = f x"
begin
lemma (in -) folding_idem_def': "folding_idem f = folding_idem_on UNIV f"
unfolding folding_idem_def folding_def' folding_idem_on_def
unfolding folding_idem_axioms_def folding_idem_on_axioms_def
by blast
text \<open>
Again, we abuse the \<open>rewrites\<close> functionality of locales to remove trivial assumptions that
result from instantiating the carrier set to \<^term>\<open>UNIV\<close>.
\<close>
sublocale folding_idem_on UNIV f
rewrites "\<And>X. (X \<subseteq> UNIV) \<equiv> True"
and "\<And>x. x \<in> UNIV \<equiv> True"
and "\<And>P. (True \<Longrightarrow> P) \<equiv> Trueprop P"
and "\<And>P Q. (True \<Longrightarrow> PROP P \<Longrightarrow> PROP Q) \<equiv> (PROP P \<Longrightarrow> True \<Longrightarrow> PROP Q)"
proof -
show "folding_idem_on UNIV f"
by standard (simp add: comp_fun_idem)
qed simp_all
end
subsection \<open>Finite cardinality\<close>
text \<open>
The traditional definition
\<^prop>\<open>card A \<equiv> LEAST n. \<exists>f. A = {f i |i. i < n}\<close>
is ugly to work with.
But now that we have \<^const>\<open>fold\<close> things are easy:
\<close>
global_interpretation card: folding "\<lambda>_. Suc" 0
defines card = "folding_on.F (\<lambda>_. Suc) 0"
by standard (rule refl)
lemma card_insert_disjoint: "finite A \<Longrightarrow> x \<notin> A \<Longrightarrow> card (insert x A) = Suc (card A)"
by (fact card.insert)
lemma card_insert_if: "finite A \<Longrightarrow> card (insert x A) = (if x \<in> A then card A else Suc (card A))"
by auto (simp add: card.insert_remove card.remove)
lemma card_ge_0_finite: "card A > 0 \<Longrightarrow> finite A"
by (rule ccontr) simp
lemma card_0_eq [simp]: "finite A \<Longrightarrow> card A = 0 \<longleftrightarrow> A = {}"
by (auto dest: mk_disjoint_insert)
lemma finite_UNIV_card_ge_0: "finite (UNIV :: 'a set) \<Longrightarrow> card (UNIV :: 'a set) > 0"
by (rule ccontr) simp
lemma card_eq_0_iff: "card A = 0 \<longleftrightarrow> A = {} \<or> \<not> finite A"
by auto
lemma card_range_greater_zero: "finite (range f) \<Longrightarrow> card (range f) > 0"
by (rule ccontr) (simp add: card_eq_0_iff)
lemma card_gt_0_iff: "0 < card A \<longleftrightarrow> A \<noteq> {} \<and> finite A"
by (simp add: neq0_conv [symmetric] card_eq_0_iff)
lemma card_Suc_Diff1:
assumes "finite A" "x \<in> A" shows "Suc (card (A - {x})) = card A"
proof -
have "Suc (card (A - {x})) = card (insert x (A - {x}))"
using assms by (simp add: card.insert_remove)
also have "... = card A"
using assms by (simp add: card_insert_if)
finally show ?thesis .
qed
lemma card_insert_le_m1:
assumes "n > 0" "card y \<le> n - 1" shows "card (insert x y) \<le> n"
using assms
by (cases "finite y") (auto simp: card_insert_if)
lemma card_Diff_singleton:
assumes "x \<in> A" shows "card (A - {x}) = card A - 1"
proof (cases "finite A")
case True
with assms show ?thesis
by (simp add: card_Suc_Diff1 [symmetric])
qed auto
lemma card_Diff_singleton_if:
"card (A - {x}) = (if x \<in> A then card A - 1 else card A)"
by (simp add: card_Diff_singleton)
lemma card_Diff_insert[simp]:
assumes "a \<in> A" and "a \<notin> B"
shows "card (A - insert a B) = card (A - B) - 1"
proof -
have "A - insert a B = (A - B) - {a}"
using assms by blast
then show ?thesis
using assms by (simp add: card_Diff_singleton)
qed
lemma card_insert_le: "card A \<le> card (insert x A)"
proof (cases "finite A")
case True
then show ?thesis by (simp add: card_insert_if)
qed auto
lemma card_Collect_less_nat[simp]: "card {i::nat. i < n} = n"
by (induct n) (simp_all add:less_Suc_eq Collect_disj_eq)
lemma card_Collect_le_nat[simp]: "card {i::nat. i \<le> n} = Suc n"
using card_Collect_less_nat[of "Suc n"] by (simp add: less_Suc_eq_le)
lemma card_mono:
assumes "finite B" and "A \<subseteq> B"
shows "card A \<le> card B"
proof -
from assms have "finite A"
by (auto intro: finite_subset)
then show ?thesis
using assms
proof (induct A arbitrary: B)
case empty
then show ?case by simp
next
case (insert x A)
then have "x \<in> B"
by simp
from insert have "A \<subseteq> B - {x}" and "finite (B - {x})"
by auto
with insert.hyps have "card A \<le> card (B - {x})"
by auto
with \<open>finite A\<close> \<open>x \<notin> A\<close> \<open>finite B\<close> \<open>x \<in> B\<close> show ?case
by simp (simp only: card.remove)
qed
qed
lemma card_seteq:
assumes "finite B" and A: "A \<subseteq> B" "card B \<le> card A"
shows "A = B"
using assms
proof (induction arbitrary: A rule: finite_induct)
case (insert b B)
then have A: "finite A" "A - {b} \<subseteq> B"
by force+
then have "card B \<le> card (A - {b})"
using insert by (auto simp add: card_Diff_singleton_if)
then have "A - {b} = B"
using A insert.IH by auto
then show ?case
using insert.hyps insert.prems by auto
qed auto
lemma psubset_card_mono: "finite B \<Longrightarrow> A < B \<Longrightarrow> card A < card B"
using card_seteq [of B A] by (auto simp add: psubset_eq)
lemma card_Un_Int:
assumes "finite A" "finite B"
shows "card A + card B = card (A \<union> B) + card (A \<inter> B)"
using assms
proof (induct A)
case empty
then show ?case by simp
next
case insert
then show ?case
by (auto simp add: insert_absorb Int_insert_left)
qed
lemma card_Un_disjoint: "finite A \<Longrightarrow> finite B \<Longrightarrow> A \<inter> B = {} \<Longrightarrow> card (A \<union> B) = card A + card B"
using card_Un_Int [of A B] by simp
lemma card_Un_disjnt: "\<lbrakk>finite A; finite B; disjnt A B\<rbrakk> \<Longrightarrow> card (A \<union> B) = card A + card B"
by (simp add: card_Un_disjoint disjnt_def)
lemma card_Un_le: "card (A \<union> B) \<le> card A + card B"
proof (cases "finite A \<and> finite B")
case True
then show ?thesis
using le_iff_add card_Un_Int [of A B] by auto
qed auto
lemma card_Diff_subset:
assumes "finite B"
and "B \<subseteq> A"
shows "card (A - B) = card A - card B"
using assms
proof (cases "finite A")
case False
with assms show ?thesis
by simp
next
case True
with assms show ?thesis
by (induct B arbitrary: A) simp_all
qed
lemma card_Diff_subset_Int:
assumes "finite (A \<inter> B)"
shows "card (A - B) = card A - card (A \<inter> B)"
proof -
have "A - B = A - A \<inter> B" by auto
with assms show ?thesis
by (simp add: card_Diff_subset)
qed
lemma diff_card_le_card_Diff:
assumes "finite B"
shows "card A - card B \<le> card (A - B)"
proof -
have "card A - card B \<le> card A - card (A \<inter> B)"
using card_mono[OF assms Int_lower2, of A] by arith
also have "\<dots> = card (A - B)"
using assms by (simp add: card_Diff_subset_Int)
finally show ?thesis .
qed
lemma card_le_sym_Diff:
assumes "finite A" "finite B" "card A \<le> card B"
shows "card(A - B) \<le> card(B - A)"
proof -
have "card(A - B) = card A - card (A \<inter> B)" using assms(1,2) by(simp add: card_Diff_subset_Int)
also have "\<dots> \<le> card B - card (A \<inter> B)" using assms(3) by linarith
also have "\<dots> = card(B - A)" using assms(1,2) by(simp add: card_Diff_subset_Int Int_commute)
finally show ?thesis .
qed
lemma card_less_sym_Diff:
assumes "finite A" "finite B" "card A < card B"
shows "card(A - B) < card(B - A)"
proof -
have "card(A - B) = card A - card (A \<inter> B)" using assms(1,2) by(simp add: card_Diff_subset_Int)
also have "\<dots> < card B - card (A \<inter> B)" using assms(1,3) by (simp add: card_mono diff_less_mono)
also have "\<dots> = card(B - A)" using assms(1,2) by(simp add: card_Diff_subset_Int Int_commute)
finally show ?thesis .
qed
lemma card_Diff1_less_iff: "card (A - {x}) < card A \<longleftrightarrow> finite A \<and> x \<in> A"
proof (cases "finite A \<and> x \<in> A")
case True
then show ?thesis
by (auto simp: card_gt_0_iff intro: diff_less)
qed auto
lemma card_Diff1_less: "finite A \<Longrightarrow> x \<in> A \<Longrightarrow> card (A - {x}) < card A"
unfolding card_Diff1_less_iff by auto
lemma card_Diff2_less:
assumes "finite A" "x \<in> A" "y \<in> A" shows "card (A - {x} - {y}) < card A"
proof (cases "x = y")
case True
with assms show ?thesis
by (simp add: card_Diff1_less del: card_Diff_insert)
next
case False
then have "card (A - {x} - {y}) < card (A - {x})" "card (A - {x}) < card A"
using assms by (intro card_Diff1_less; simp)+
then show ?thesis
by (blast intro: less_trans)
qed
lemma card_Diff1_le: "card (A - {x}) \<le> card A"
proof (cases "finite A")
case True
then show ?thesis
by (cases "x \<in> A") (simp_all add: card_Diff1_less less_imp_le)
qed auto
lemma card_psubset: "finite B \<Longrightarrow> A \<subseteq> B \<Longrightarrow> card A < card B \<Longrightarrow> A < B"
by (erule psubsetI) blast
lemma card_le_inj:
assumes fA: "finite A"
and fB: "finite B"
and c: "card A \<le> card B"
shows "\<exists>f. f ` A \<subseteq> B \<and> inj_on f A"
using fA fB c
proof (induct arbitrary: B rule: finite_induct)
case empty
then show ?case by simp
next
case (insert x s t)
then show ?case
proof (induct rule: finite_induct [OF insert.prems(1)])
case 1
then show ?case by simp
next
case (2 y t)
from "2.prems"(1,2,5) "2.hyps"(1,2) have cst: "card s \<le> card t"
by simp
from "2.prems"(3) [OF "2.hyps"(1) cst]
obtain f where *: "f ` s \<subseteq> t" "inj_on f s"
by blast
let ?g = "(\<lambda>a. if a = x then y else f a)"
have "?g ` insert x s \<subseteq> insert y t \<and> inj_on ?g (insert x s)"
using * "2.prems"(2) "2.hyps"(2) unfolding inj_on_def by auto
then show ?case by (rule exI[where ?x="?g"])
qed
qed
lemma card_subset_eq:
assumes fB: "finite B"
and AB: "A \<subseteq> B"
and c: "card A = card B"
shows "A = B"
proof -
from fB AB have fA: "finite A"
by (auto intro: finite_subset)
from fA fB have fBA: "finite (B - A)"
by auto
have e: "A \<inter> (B - A) = {}"
by blast
have eq: "A \<union> (B - A) = B"
using AB by blast
from card_Un_disjoint[OF fA fBA e, unfolded eq c] have "card (B - A) = 0"
by arith
then have "B - A = {}"
unfolding card_eq_0_iff using fA fB by simp
with AB show "A = B"
by blast
qed
lemma insert_partition:
"x \<notin> F \<Longrightarrow> \<forall>c1 \<in> insert x F. \<forall>c2 \<in> insert x F. c1 \<noteq> c2 \<longrightarrow> c1 \<inter> c2 = {} \<Longrightarrow> x \<inter> \<Union>F = {}"
by auto
lemma finite_psubset_induct [consumes 1, case_names psubset]:
assumes finite: "finite A"
and major: "\<And>A. finite A \<Longrightarrow> (\<And>B. B \<subset> A \<Longrightarrow> P B) \<Longrightarrow> P A"
shows "P A"
using finite
proof (induct A taking: card rule: measure_induct_rule)
case (less A)
have fin: "finite A" by fact
have ih: "card B < card A \<Longrightarrow> finite B \<Longrightarrow> P B" for B by fact
have "P B" if "B \<subset> A" for B
proof -
from that have "card B < card A"
using psubset_card_mono fin by blast
moreover
from that have "B \<subseteq> A"
by auto
then have "finite B"
using fin finite_subset by blast
ultimately show ?thesis using ih by simp
qed
with fin show "P A" using major by blast
qed
lemma finite_induct_select [consumes 1, case_names empty select]:
assumes "finite S"
and "P {}"
and select: "\<And>T. T \<subset> S \<Longrightarrow> P T \<Longrightarrow> \<exists>s\<in>S - T. P (insert s T)"
shows "P S"
proof -
have "0 \<le> card S" by simp
then have "\<exists>T \<subseteq> S. card T = card S \<and> P T"
proof (induct rule: dec_induct)
case base with \<open>P {}\<close>
show ?case
by (intro exI[of _ "{}"]) auto
next
case (step n)
then obtain T where T: "T \<subseteq> S" "card T = n" "P T"
by auto
with \<open>n < card S\<close> have "T \<subset> S" "P T"
by auto
with select[of T] obtain s where "s \<in> S" "s \<notin> T" "P (insert s T)"
by auto
with step(2) T \<open>finite S\<close> show ?case
by (intro exI[of _ "insert s T"]) (auto dest: finite_subset)
qed
with \<open>finite S\<close> show "P S"
by (auto dest: card_subset_eq)
qed
lemma remove_induct [case_names empty infinite remove]:
assumes empty: "P ({} :: 'a set)"
and infinite: "\<not> finite B \<Longrightarrow> P B"
and remove: "\<And>A. finite A \<Longrightarrow> A \<noteq> {} \<Longrightarrow> A \<subseteq> B \<Longrightarrow> (\<And>x. x \<in> A \<Longrightarrow> P (A - {x})) \<Longrightarrow> P A"
shows "P B"
proof (cases "finite B")
case False
then show ?thesis by (rule infinite)
next
case True
define A where "A = B"
with True have "finite A" "A \<subseteq> B"
by simp_all
then show "P A"
proof (induct "card A" arbitrary: A)
case 0
then have "A = {}" by auto
with empty show ?case by simp
next
case (Suc n A)
from \<open>A \<subseteq> B\<close> and \<open>finite B\<close> have "finite A"
by (rule finite_subset)
moreover from Suc.hyps have "A \<noteq> {}" by auto
moreover note \<open>A \<subseteq> B\<close>
moreover have "P (A - {x})" if x: "x \<in> A" for x
using x Suc.prems \<open>Suc n = card A\<close> by (intro Suc) auto
ultimately show ?case by (rule remove)
qed
qed
lemma finite_remove_induct [consumes 1, case_names empty remove]:
fixes P :: "'a set \<Rightarrow> bool"
assumes "finite B"
and "P {}"
and "\<And>A. finite A \<Longrightarrow> A \<noteq> {} \<Longrightarrow> A \<subseteq> B \<Longrightarrow> (\<And>x. x \<in> A \<Longrightarrow> P (A - {x})) \<Longrightarrow> P A"
defines "B' \<equiv> B"
shows "P B'"
by (induct B' rule: remove_induct) (simp_all add: assms)
text \<open>Main cardinality theorem.\<close>
lemma card_partition [rule_format]:
"finite C \<Longrightarrow> finite (\<Union>C) \<Longrightarrow> (\<forall>c\<in>C. card c = k) \<Longrightarrow>
(\<forall>c1 \<in> C. \<forall>c2 \<in> C. c1 \<noteq> c2 \<longrightarrow> c1 \<inter> c2 = {}) \<Longrightarrow>
k * card C = card (\<Union>C)"
proof (induct rule: finite_induct)
case empty
then show ?case by simp
next
case (insert x F)
then show ?case
by (simp add: card_Un_disjoint insert_partition finite_subset [of _ "\<Union>(insert _ _)"])
qed
lemma card_eq_UNIV_imp_eq_UNIV:
assumes fin: "finite (UNIV :: 'a set)"
and card: "card A = card (UNIV :: 'a set)"
shows "A = (UNIV :: 'a set)"
proof
show "A \<subseteq> UNIV" by simp
show "UNIV \<subseteq> A"
proof
show "x \<in> A" for x
proof (rule ccontr)
assume "x \<notin> A"
then have "A \<subset> UNIV" by auto
with fin have "card A < card (UNIV :: 'a set)"
by (fact psubset_card_mono)
with card show False by simp
qed
qed
qed
text \<open>The form of a finite set of given cardinality\<close>
lemma card_eq_SucD:
assumes "card A = Suc k"
shows "\<exists>b B. A = insert b B \<and> b \<notin> B \<and> card B = k \<and> (k = 0 \<longrightarrow> B = {})"
proof -
have fin: "finite A"
using assms by (auto intro: ccontr)
moreover have "card A \<noteq> 0"
using assms by auto
ultimately obtain b where b: "b \<in> A"
by auto
show ?thesis
proof (intro exI conjI)
show "A = insert b (A - {b})"
using b by blast
show "b \<notin> A - {b}"
by blast
show "card (A - {b}) = k" and "k = 0 \<longrightarrow> A - {b} = {}"
using assms b fin by (fastforce dest: mk_disjoint_insert)+
qed
qed
lemma card_Suc_eq:
"card A = Suc k \<longleftrightarrow>
(\<exists>b B. A = insert b B \<and> b \<notin> B \<and> card B = k \<and> (k = 0 \<longrightarrow> B = {}))"
by (auto simp: card_insert_if card_gt_0_iff elim!: card_eq_SucD)
lemma card_Suc_eq_finite:
"card A = Suc k \<longleftrightarrow> (\<exists>b B. A = insert b B \<and> b \<notin> B \<and> card B = k \<and> finite B)"
unfolding card_Suc_eq using card_gt_0_iff by fastforce
lemma card_1_singletonE:
assumes "card A = 1"
obtains x where "A = {x}"
using assms by (auto simp: card_Suc_eq)
lemma is_singleton_altdef: "is_singleton A \<longleftrightarrow> card A = 1"
unfolding is_singleton_def
by (auto elim!: card_1_singletonE is_singletonE simp del: One_nat_def)
lemma card_1_singleton_iff: "card A = Suc 0 \<longleftrightarrow> (\<exists>x. A = {x})"
by (simp add: card_Suc_eq)
lemma card_le_Suc0_iff_eq:
assumes "finite A"
shows "card A \<le> Suc 0 \<longleftrightarrow> (\<forall>a1 \<in> A. \<forall>a2 \<in> A. a1 = a2)" (is "?C = ?A")
proof
assume ?C thus ?A using assms by (auto simp: le_Suc_eq dest: card_eq_SucD)
next
assume ?A
show ?C
proof cases
assume "A = {}" thus ?C using \<open>?A\<close> by simp
next
assume "A \<noteq> {}"
then obtain a where "A = {a}" using \<open>?A\<close> by blast
thus ?C by simp
qed
qed
lemma card_le_Suc_iff:
"Suc n \<le> card A = (\<exists>a B. A = insert a B \<and> a \<notin> B \<and> n \<le> card B \<and> finite B)"
proof (cases "finite A")
case True
then show ?thesis
by (fastforce simp: card_Suc_eq less_eq_nat.simps split: nat.splits)
qed auto
lemma finite_fun_UNIVD2:
assumes fin: "finite (UNIV :: ('a \<Rightarrow> 'b) set)"
shows "finite (UNIV :: 'b set)"
proof -
from fin have "finite (range (\<lambda>f :: 'a \<Rightarrow> 'b. f arbitrary))" for arbitrary
by (rule finite_imageI)
moreover have "UNIV = range (\<lambda>f :: 'a \<Rightarrow> 'b. f arbitrary)" for arbitrary
by (rule UNIV_eq_I) auto
ultimately show "finite (UNIV :: 'b set)"
by simp
qed
lemma card_UNIV_unit [simp]: "card (UNIV :: unit set) = 1"
unfolding UNIV_unit by simp
lemma infinite_arbitrarily_large:
assumes "\<not> finite A"
shows "\<exists>B. finite B \<and> card B = n \<and> B \<subseteq> A"
proof (induction n)
case 0
show ?case by (intro exI[of _ "{}"]) auto
next
case (Suc n)
then obtain B where B: "finite B \<and> card B = n \<and> B \<subseteq> A" ..
with \<open>\<not> finite A\<close> have "A \<noteq> B" by auto
with B have "B \<subset> A" by auto
then have "\<exists>x. x \<in> A - B"
by (elim psubset_imp_ex_mem)
then obtain x where x: "x \<in> A - B" ..
with B have "finite (insert x B) \<and> card (insert x B) = Suc n \<and> insert x B \<subseteq> A"
by auto
then show "\<exists>B. finite B \<and> card B = Suc n \<and> B \<subseteq> A" ..
qed
text \<open>Sometimes, to prove that a set is finite, it is convenient to work with finite subsets
and to show that their cardinalities are uniformly bounded. This possibility is formalized in
the next criterion.\<close>
lemma finite_if_finite_subsets_card_bdd:
assumes "\<And>G. G \<subseteq> F \<Longrightarrow> finite G \<Longrightarrow> card G \<le> C"
shows "finite F \<and> card F \<le> C"
proof (cases "finite F")
case False
obtain n::nat where n: "n > max C 0" by auto
obtain G where G: "G \<subseteq> F" "card G = n" using infinite_arbitrarily_large[OF False] by auto
hence "finite G" using \<open>n > max C 0\<close> using card.infinite gr_implies_not0 by blast
hence False using assms G n not_less by auto
thus ?thesis ..
next
case True thus ?thesis using assms[of F] by auto
qed
lemma obtain_subset_with_card_n:
assumes "n \<le> card S"
obtains T where "T \<subseteq> S" "card T = n" "finite T"
proof -
obtain n' where "card S = n + n'"
using le_Suc_ex[OF assms] by blast
with that show thesis
proof (induct n' arbitrary: S)
case 0
thus ?case by (cases "finite S") auto
next
case Suc
thus ?case by (auto simp add: card_Suc_eq)
qed
qed
lemma exists_subset_between:
assumes
"card A \<le> n"
"n \<le> card C"
"A \<subseteq> C"
"finite C"
shows "\<exists>B. A \<subseteq> B \<and> B \<subseteq> C \<and> card B = n"
using assms
proof (induct n arbitrary: A C)
case 0
thus ?case using finite_subset[of A C] by (intro exI[of _ "{}"], auto)
next
case (Suc n A C)
show ?case
proof (cases "A = {}")
case True
from obtain_subset_with_card_n[OF Suc(3)]
obtain B where "B \<subseteq> C" "card B = Suc n" by blast
thus ?thesis unfolding True by blast
next
case False
then obtain a where a: "a \<in> A" by auto
let ?A = "A - {a}"
let ?C = "C - {a}"
have 1: "card ?A \<le> n" using Suc(2-) a
using finite_subset by fastforce
have 2: "card ?C \<ge> n" using Suc(2-) a by auto
from Suc(1)[OF 1 2 _ finite_subset[OF _ Suc(5)]] Suc(2-)
obtain B where "?A \<subseteq> B" "B \<subseteq> ?C" "card B = n" by blast
thus ?thesis using a Suc(2-)
by (intro exI[of _ "insert a B"], auto intro!: card_insert_disjoint finite_subset[of B C])
qed
qed
subsubsection \<open>Cardinality of image\<close>
lemma card_image_le: "finite A \<Longrightarrow> card (f ` A) \<le> card A"
by (induct rule: finite_induct) (simp_all add: le_SucI card_insert_if)
lemma card_image: "inj_on f A \<Longrightarrow> card (f ` A) = card A"
proof (induct A rule: infinite_finite_induct)
case (infinite A)
then have "\<not> finite (f ` A)" by (auto dest: finite_imageD)
with infinite show ?case by simp
qed simp_all
lemma bij_betw_same_card: "bij_betw f A B \<Longrightarrow> card A = card B"
by (auto simp: card_image bij_betw_def)
lemma endo_inj_surj: "finite A \<Longrightarrow> f ` A \<subseteq> A \<Longrightarrow> inj_on f A \<Longrightarrow> f ` A = A"
by (simp add: card_seteq card_image)
lemma eq_card_imp_inj_on:
assumes "finite A" "card(f ` A) = card A"
shows "inj_on f A"
using assms
proof (induct rule:finite_induct)
case empty
show ?case by simp
next
case (insert x A)
then show ?case
using card_image_le [of A f] by (simp add: card_insert_if split: if_splits)
qed
lemma inj_on_iff_eq_card: "finite A \<Longrightarrow> inj_on f A \<longleftrightarrow> card (f ` A) = card A"
by (blast intro: card_image eq_card_imp_inj_on)
lemma card_inj_on_le:
assumes "inj_on f A" "f ` A \<subseteq> B" "finite B"
shows "card A \<le> card B"
proof -
have "finite A"
using assms by (blast intro: finite_imageD dest: finite_subset)
then show ?thesis
using assms by (force intro: card_mono simp: card_image [symmetric])
qed
lemma inj_on_iff_card_le:
"\<lbrakk> finite A; finite B \<rbrakk> \<Longrightarrow> (\<exists>f. inj_on f A \<and> f ` A \<le> B) = (card A \<le> card B)"
using card_inj_on_le[of _ A B] card_le_inj[of A B] by blast
lemma surj_card_le: "finite A \<Longrightarrow> B \<subseteq> f ` A \<Longrightarrow> card B \<le> card A"
by (blast intro: card_image_le card_mono le_trans)
lemma card_bij_eq:
"inj_on f A \<Longrightarrow> f ` A \<subseteq> B \<Longrightarrow> inj_on g B \<Longrightarrow> g ` B \<subseteq> A \<Longrightarrow> finite A \<Longrightarrow> finite B
\<Longrightarrow> card A = card B"
by (auto intro: le_antisym card_inj_on_le)
lemma bij_betw_finite: "bij_betw f A B \<Longrightarrow> finite A \<longleftrightarrow> finite B"
unfolding bij_betw_def using finite_imageD [of f A] by auto
lemma inj_on_finite: "inj_on f A \<Longrightarrow> f ` A \<le> B \<Longrightarrow> finite B \<Longrightarrow> finite A"
using finite_imageD finite_subset by blast
lemma card_vimage_inj_on_le:
assumes "inj_on f D" "finite A"
shows "card (f-`A \<inter> D) \<le> card A"
proof (rule card_inj_on_le)
show "inj_on f (f -` A \<inter> D)"
by (blast intro: assms inj_on_subset)
qed (use assms in auto)
lemma card_vimage_inj: "inj f \<Longrightarrow> A \<subseteq> range f \<Longrightarrow> card (f -` A) = card A"
by (auto 4 3 simp: subset_image_iff inj_vimage_image_eq
intro: card_image[symmetric, OF subset_inj_on])
lemma card_inverse[simp]: "card (R\<inverse>) = card R"
proof -
have *: "\<And>R. prod.swap ` R = R\<inverse>" by auto
{
assume "\<not>finite R"
hence ?thesis
by auto
} moreover {
assume "finite R"
with card_image_le[of R prod.swap] card_image_le[of "R\<inverse>" prod.swap]
have ?thesis by (auto simp: * )
} ultimately show ?thesis by blast
qed
subsubsection \<open>Pigeonhole Principles\<close>
lemma pigeonhole: "card A > card (f ` A) \<Longrightarrow> \<not> inj_on f A "
by (auto dest: card_image less_irrefl_nat)
lemma pigeonhole_infinite:
assumes "\<not> finite A" and "finite (f`A)"
shows "\<exists>a0\<in>A. \<not> finite {a\<in>A. f a = f a0}"
using assms(2,1)
proof (induct "f`A" arbitrary: A rule: finite_induct)
case empty
then show ?case by simp
next
case (insert b F)
show ?case
proof (cases "finite {a\<in>A. f a = b}")
case True
with \<open>\<not> finite A\<close> have "\<not> finite (A - {a\<in>A. f a = b})"
by simp
also have "A - {a\<in>A. f a = b} = {a\<in>A. f a \<noteq> b}"
by blast
finally have "\<not> finite {a\<in>A. f a \<noteq> b}" .
from insert(3)[OF _ this] insert(2,4) show ?thesis
by simp (blast intro: rev_finite_subset)
next
case False
then have "{a \<in> A. f a = b} \<noteq> {}" by force
with False show ?thesis by blast
qed
qed
lemma pigeonhole_infinite_rel:
assumes "\<not> finite A"
and "finite B"
and "\<forall>a\<in>A. \<exists>b\<in>B. R a b"
shows "\<exists>b\<in>B. \<not> finite {a:A. R a b}"
proof -
let ?F = "\<lambda>a. {b\<in>B. R a b}"
from finite_Pow_iff[THEN iffD2, OF \<open>finite B\<close>] have "finite (?F ` A)"
by (blast intro: rev_finite_subset)
from pigeonhole_infinite [where f = ?F, OF assms(1) this]
obtain a0 where "a0 \<in> A" and infinite: "\<not> finite {a\<in>A. ?F a = ?F a0}" ..
obtain b0 where "b0 \<in> B" and "R a0 b0"
using \<open>a0 \<in> A\<close> assms(3) by blast
have "finite {a\<in>A. ?F a = ?F a0}" if "finite {a\<in>A. R a b0}"
using \<open>b0 \<in> B\<close> \<open>R a0 b0\<close> that by (blast intro: rev_finite_subset)
with infinite \<open>b0 \<in> B\<close> show ?thesis
by blast
qed
subsubsection \<open>Cardinality of sums\<close>
lemma card_Plus:
assumes "finite A" "finite B"
shows "card (A <+> B) = card A + card B"
proof -
have "Inl`A \<inter> Inr`B = {}" by fast
with assms show ?thesis
by (simp add: Plus_def card_Un_disjoint card_image)
qed
lemma card_Plus_conv_if:
"card (A <+> B) = (if finite A \<and> finite B then card A + card B else 0)"
by (auto simp add: card_Plus)
text \<open>Relates to equivalence classes. Based on a theorem of F. Kammüller.\<close>
lemma dvd_partition:
assumes f: "finite (\<Union>C)"
and "\<forall>c\<in>C. k dvd card c" "\<forall>c1\<in>C. \<forall>c2\<in>C. c1 \<noteq> c2 \<longrightarrow> c1 \<inter> c2 = {}"
shows "k dvd card (\<Union>C)"
proof -
have "finite C"
by (rule finite_UnionD [OF f])
then show ?thesis
using assms
proof (induct rule: finite_induct)
case empty
show ?case by simp
next
case (insert c C)
then have "c \<inter> \<Union>C = {}"
by auto
with insert show ?case
by (simp add: card_Un_disjoint)
qed
qed
subsection \<open>Minimal and maximal elements of finite sets\<close>
context begin
qualified lemma
assumes "finite A" and "asymp_on A R" and "transp_on A R" and "\<exists>x \<in> A. P x"
shows
bex_min_element_with_property: "\<exists>x \<in> A. P x \<and> (\<forall>y \<in> A. R y x \<longrightarrow> \<not> P y)" and
bex_max_element_with_property: "\<exists>x \<in> A. P x \<and> (\<forall>y \<in> A. R x y \<longrightarrow> \<not> P y)"
unfolding atomize_conj
using assms
proof (induction A rule: finite_induct)
case empty
hence False
by simp_all
thus ?case ..
next
case (insert x F)
from insert.prems have "asymp_on F R"
using asymp_on_subset by blast
from insert.prems have "transp_on F R"
using transp_on_subset by blast
show ?case
proof (cases "P x")
case True
show ?thesis
proof (cases "\<exists>a\<in>F. P a")
case True
with insert.IH obtain min max where
"min \<in> F" and "P min" and "\<forall>z \<in> F. R z min \<longrightarrow> \<not> P z"
"max \<in> F" and "P max" and "\<forall>z \<in> F. R max z \<longrightarrow> \<not> P z"
using \<open>asymp_on F R\<close> \<open>transp_on F R\<close> by auto
show ?thesis
proof (rule conjI)
show "\<exists>y \<in> insert x F. P y \<and> (\<forall>z \<in> insert x F. R y z \<longrightarrow> \<not> P z)"
proof (cases "R max x")
case True
show ?thesis
proof (intro bexI conjI ballI impI)
show "x \<in> insert x F"
by simp
next
show "P x"
using \<open>P x\<close> by simp
next
fix z assume "z \<in> insert x F" and "R x z"
hence "z = x \<or> z \<in> F"
by simp
thus "\<not> P z"
proof (rule disjE)
assume "z = x"
hence "R x x"
using \<open>R x z\<close> by simp
moreover have "\<not> R x x"
using \<open>asymp_on (insert x F) R\<close>[THEN irreflp_on_if_asymp_on, THEN irreflp_onD]
by simp
ultimately have False
by simp
thus ?thesis ..
next
assume "z \<in> F"
moreover have "R max z"
using \<open>R max x\<close> \<open>R x z\<close>
using \<open>transp_on (insert x F) R\<close>[THEN transp_onD, of max x z]
using \<open>max \<in> F\<close> \<open>z \<in> F\<close> by simp
ultimately show ?thesis
using \<open>\<forall>z \<in> F. R max z \<longrightarrow> \<not> P z\<close> by simp
qed
qed
next
case False
show ?thesis
proof (intro bexI conjI ballI impI)
show "max \<in> insert x F"
using \<open>max \<in> F\<close> by simp
next
show "P max"
using \<open>P max\<close> by simp
next
fix z assume "z \<in> insert x F" and "R max z"
hence "z = x \<or> z \<in> F"
by simp
thus "\<not> P z"
proof (rule disjE)
assume "z = x"
hence False
using \<open>\<not> R max x\<close> \<open>R max z\<close> by simp
thus ?thesis ..
next
assume "z \<in> F"
thus ?thesis
using \<open>R max z\<close> \<open>\<forall>z\<in>F. R max z \<longrightarrow> \<not> P z\<close> by simp
qed
qed
qed
next
show "\<exists>y \<in> insert x F. P y \<and> (\<forall>z \<in> insert x F. R z y \<longrightarrow> \<not> P z)"
proof (cases "R x min")
case True
show ?thesis
proof (intro bexI conjI ballI impI)
show "x \<in> insert x F"
by simp
next
show "P x"
using \<open>P x\<close> by simp
next
fix z assume "z \<in> insert x F" and "R z x"
hence "z = x \<or> z \<in> F"
by simp
thus "\<not> P z"
proof (rule disjE)
assume "z = x"
hence "R x x"
using \<open>R z x\<close> by simp
moreover have "\<not> R x x"
using \<open>asymp_on (insert x F) R\<close>[THEN irreflp_on_if_asymp_on, THEN irreflp_onD]
by simp
ultimately have False
by simp
thus ?thesis ..
next
assume "z \<in> F"
moreover have "R z min"
using \<open>R z x\<close> \<open>R x min\<close>
using \<open>transp_on (insert x F) R\<close>[THEN transp_onD, of z x min]
using \<open>min \<in> F\<close> \<open>z \<in> F\<close> by simp
ultimately show ?thesis
using \<open>\<forall>z \<in> F. R z min \<longrightarrow> \<not> P z\<close> by simp
qed
qed
next
case False
show ?thesis
proof (intro bexI conjI ballI impI)
show "min \<in> insert x F"
using \<open>min \<in> F\<close> by simp
next
show "P min"
using \<open>P min\<close> by simp
next
fix z assume "z \<in> insert x F" and "R z min"
hence "z = x \<or> z \<in> F"
by simp
thus "\<not> P z"
proof (rule disjE)
assume "z = x"
hence False
using \<open>\<not> R x min\<close> \<open>R z min\<close> by simp
thus ?thesis ..
next
assume "z \<in> F"
thus ?thesis
using \<open>R z min\<close> \<open>\<forall>z\<in>F. R z min \<longrightarrow> \<not> P z\<close> by simp
qed
qed
qed
qed
next
case False
then show ?thesis
using \<open>\<exists>a\<in>insert x F. P a\<close>
using \<open>asymp_on (insert x F) R\<close>[THEN asymp_onD, of x] insert_iff[of _ x F]
by blast
qed
next
case False
with insert.prems have "\<exists>x \<in> F. P x"
by simp
with insert.IH have
"\<exists>y \<in> F. P y \<and> (\<forall>z\<in>F. R z y \<longrightarrow> \<not> P z)"
"\<exists>y \<in> F. P y \<and> (\<forall>z\<in>F. R y z \<longrightarrow> \<not> P z)"
using \<open>asymp_on F R\<close> \<open>transp_on F R\<close> by auto
thus ?thesis
using False by auto
qed
qed
qualified lemma
assumes "finite A" and "asymp_on A R" and "transp_on A R" and "A \<noteq> {}"
shows
bex_min_element: "\<exists>m \<in> A. \<forall>x \<in> A. x \<noteq> m \<longrightarrow> \<not> R x m" and
bex_max_element: "\<exists>m \<in> A. \<forall>x \<in> A. x \<noteq> m \<longrightarrow> \<not> R m x"
using \<open>A \<noteq> {}\<close>
bex_min_element_with_property[OF assms(1,2,3), of "\<lambda>_. True", simplified]
bex_max_element_with_property[OF assms(1,2,3), of "\<lambda>_. True", simplified]
by blast+
end
text \<open>The following alternative form might sometimes be easier to work with.\<close>
lemma is_min_element_in_set_iff:
"asymp_on A R \<Longrightarrow> (\<forall>y \<in> A. y \<noteq> x \<longrightarrow> \<not> R y x) \<longleftrightarrow> (\<forall>y. R y x \<longrightarrow> y \<notin> A)"
by (auto dest: asymp_onD)
lemma is_max_element_in_set_iff:
"asymp_on A R \<Longrightarrow> (\<forall>y \<in> A. y \<noteq> x \<longrightarrow> \<not> R x y) \<longleftrightarrow> (\<forall>y. R x y \<longrightarrow> y \<notin> A)"
by (auto dest: asymp_onD)
context begin
qualified lemma
assumes "finite A" and "A \<noteq> {}" and "transp_on A R" and "totalp_on A R"
shows
bex_least_element: "\<exists>l \<in> A. \<forall>x \<in> A. x \<noteq> l \<longrightarrow> R l x" and
bex_greatest_element: "\<exists>g \<in> A. \<forall>x \<in> A. x \<noteq> g \<longrightarrow> R x g"
unfolding atomize_conj
using assms
proof (induction A rule: finite_induct)
case empty
hence False by simp
thus ?case ..
next
case (insert a A')
from insert.prems(2) have transp_on_A': "transp_on A' R"
by (auto intro: transp_onI dest: transp_onD)
from insert.prems(3) have
totalp_on_a_A'_raw: "\<forall>y \<in> A'. a \<noteq> y \<longrightarrow> R a y \<or> R y a" and
totalp_on_A': "totalp_on A' R"
by (simp_all add: totalp_on_def)
show ?case
proof (cases "A' = {}")
case True
thus ?thesis by simp
next
case False
then obtain least greatest where
"least \<in> A'" and least_of_A': "\<forall>x\<in>A'. x \<noteq> least \<longrightarrow> R least x" and
"greatest \<in> A'" and greatest_of_A': "\<forall>x\<in>A'. x \<noteq> greatest \<longrightarrow> R x greatest"
using insert.IH[OF _ transp_on_A' totalp_on_A'] by auto
show ?thesis
proof (rule conjI)
show "\<exists>l\<in>insert a A'. \<forall>x\<in>insert a A'. x \<noteq> l \<longrightarrow> R l x"
proof (cases "R a least")
case True
show ?thesis
proof (intro bexI ballI impI)
show "a \<in> insert a A'"
by simp
next
fix x
show "\<And>x. x \<in> insert a A' \<Longrightarrow> x \<noteq> a \<Longrightarrow> R a x"
using True \<open>least \<in> A'\<close> least_of_A'
using insert.prems(2)[THEN transp_onD, of a least]
by auto
qed
next
case False
show ?thesis
proof (intro bexI ballI impI)
show "least \<in> insert a A'"
using \<open>least \<in> A'\<close> by simp
next
fix x
show "x \<in> insert a A' \<Longrightarrow> x \<noteq> least \<Longrightarrow> R least x"
using False \<open>least \<in> A'\<close> least_of_A' totalp_on_a_A'_raw
by (cases "x = a") auto
qed
qed
next
show "\<exists>g \<in> insert a A'. \<forall>x \<in> insert a A'. x \<noteq> g \<longrightarrow> R x g"
proof (cases "R greatest a")
case True
show ?thesis
proof (intro bexI ballI impI)
show "a \<in> insert a A'"
by simp
next
fix x
show "\<And>x. x \<in> insert a A' \<Longrightarrow> x \<noteq> a \<Longrightarrow> R x a"
using True \<open>greatest \<in> A'\<close> greatest_of_A'
using insert.prems(2)[THEN transp_onD, of _ greatest a]
by auto
qed
next
case False
show ?thesis
proof (intro bexI ballI impI)
show "greatest \<in> insert a A'"
using \<open>greatest \<in> A'\<close> by simp
next
fix x
show "x \<in> insert a A' \<Longrightarrow> x \<noteq> greatest \<Longrightarrow> R x greatest"
using False \<open>greatest \<in> A'\<close> greatest_of_A' totalp_on_a_A'_raw
by (cases "x = a") auto
qed
qed
qed
qed
qed
end
subsubsection \<open>Finite orders\<close>
context order
begin
lemma finite_has_maximal:
assumes "finite A" and "A \<noteq> {}"
shows "\<exists> m \<in> A. \<forall> b \<in> A. m \<le> b \<longrightarrow> m = b"
proof -
obtain m where "m \<in> A" and m_is_max: "\<forall>x\<in>A. x \<noteq> m \<longrightarrow> \<not> m < x"
using Finite_Set.bex_max_element[OF \<open>finite A\<close> _ _ \<open>A \<noteq> {}\<close>, of "(<)"] by auto
moreover have "\<forall>b \<in> A. m \<le> b \<longrightarrow> m = b"
using m_is_max by (auto simp: le_less)
ultimately show ?thesis
by auto
qed
lemma finite_has_maximal2:
"\<lbrakk> finite A; a \<in> A \<rbrakk> \<Longrightarrow> \<exists> m \<in> A. a \<le> m \<and> (\<forall> b \<in> A. m \<le> b \<longrightarrow> m = b)"
using finite_has_maximal[of "{b \<in> A. a \<le> b}"] by fastforce
lemma finite_has_minimal:
assumes "finite A" and "A \<noteq> {}"
shows "\<exists> m \<in> A. \<forall> b \<in> A. b \<le> m \<longrightarrow> m = b"
proof -
obtain m where "m \<in> A" and m_is_min: "\<forall>x\<in>A. x \<noteq> m \<longrightarrow> \<not> x < m"
using Finite_Set.bex_min_element[OF \<open>finite A\<close> _ _ \<open>A \<noteq> {}\<close>, of "(<)"] by auto
moreover have "\<forall>b \<in> A. b \<le> m \<longrightarrow> m = b"
using m_is_min by (auto simp: le_less)
ultimately show ?thesis
by auto
qed
lemma finite_has_minimal2:
"\<lbrakk> finite A; a \<in> A \<rbrakk> \<Longrightarrow> \<exists> m \<in> A. m \<le> a \<and> (\<forall> b \<in> A. b \<le> m \<longrightarrow> m = b)"
using finite_has_minimal[of "{b \<in> A. b \<le> a}"] by fastforce
end
subsubsection \<open>Relating injectivity and surjectivity\<close>
lemma finite_surj_inj:
assumes "finite A" "A \<subseteq> f ` A"
shows "inj_on f A"
proof -
have "f ` A = A"
by (rule card_seteq [THEN sym]) (auto simp add: assms card_image_le)
then show ?thesis using assms
by (simp add: eq_card_imp_inj_on)
qed
lemma finite_UNIV_surj_inj: "finite(UNIV:: 'a set) \<Longrightarrow> surj f \<Longrightarrow> inj f"
for f :: "'a \<Rightarrow> 'a"
by (blast intro: finite_surj_inj subset_UNIV)
lemma finite_UNIV_inj_surj: "finite(UNIV:: 'a set) \<Longrightarrow> inj f \<Longrightarrow> surj f"
for f :: "'a \<Rightarrow> 'a"
by (fastforce simp:surj_def dest!: endo_inj_surj)
lemma surjective_iff_injective_gen:
assumes fS: "finite S"
and fT: "finite T"
and c: "card S = card T"
and ST: "f ` S \<subseteq> T"
shows "(\<forall>y \<in> T. \<exists>x \<in> S. f x = y) \<longleftrightarrow> inj_on f S"
(is "?lhs \<longleftrightarrow> ?rhs")
proof
assume h: "?lhs"
{
fix x y
assume x: "x \<in> S"
assume y: "y \<in> S"
assume f: "f x = f y"
from x fS have S0: "card S \<noteq> 0"
by auto
have "x = y"
proof (rule ccontr)
assume xy: "\<not> ?thesis"
have th: "card S \<le> card (f ` (S - {y}))"
unfolding c
proof (rule card_mono)
show "finite (f ` (S - {y}))"
by (simp add: fS)
have "\<lbrakk>x \<noteq> y; x \<in> S; z \<in> S; f x = f y\<rbrakk>
\<Longrightarrow> \<exists>x \<in> S. x \<noteq> y \<and> f z = f x" for z
by (cases "z = y \<longrightarrow> z = x") auto
then show "T \<subseteq> f ` (S - {y})"
using h xy x y f by fastforce
qed
also have " \<dots> \<le> card (S - {y})"
by (simp add: card_image_le fS)
also have "\<dots> \<le> card S - 1" using y fS by simp
finally show False using S0 by arith
qed
}
then show ?rhs
unfolding inj_on_def by blast
next
assume h: ?rhs
have "f ` S = T"
by (simp add: ST c card_image card_subset_eq fT h)
then show ?lhs by blast
qed
hide_const (open) Finite_Set.fold
subsection \<open>Infinite Sets\<close>
text \<open>
Some elementary facts about infinite sets, mostly by Stephan Merz.
Beware! Because "infinite" merely abbreviates a negation, these
lemmas may not work well with \<open>blast\<close>.
\<close>
abbreviation infinite :: "'a set \<Rightarrow> bool"
where "infinite S \<equiv> \<not> finite S"
text \<open>
Infinite sets are non-empty, and if we remove some elements from an
infinite set, the result is still infinite.
\<close>
lemma infinite_UNIV_nat [iff]: "infinite (UNIV :: nat set)"
proof
assume "finite (UNIV :: nat set)"
with finite_UNIV_inj_surj [of Suc] show False
by simp (blast dest: Suc_neq_Zero surjD)
qed
lemma infinite_UNIV_char_0: "infinite (UNIV :: 'a::semiring_char_0 set)"
proof
assume "finite (UNIV :: 'a set)"
with subset_UNIV have "finite (range of_nat :: 'a set)"
by (rule finite_subset)
moreover have "inj (of_nat :: nat \<Rightarrow> 'a)"
by (simp add: inj_on_def)
ultimately have "finite (UNIV :: nat set)"
by (rule finite_imageD)
then show False
by simp
qed
lemma infinite_imp_nonempty: "infinite S \<Longrightarrow> S \<noteq> {}"
by auto
lemma infinite_remove: "infinite S \<Longrightarrow> infinite (S - {a})"
by simp
lemma Diff_infinite_finite:
assumes "finite T" "infinite S"
shows "infinite (S - T)"
using \<open>finite T\<close>
proof induct
from \<open>infinite S\<close> show "infinite (S - {})"
by auto
next
fix T x
assume ih: "infinite (S - T)"
have "S - (insert x T) = (S - T) - {x}"
by (rule Diff_insert)
with ih show "infinite (S - (insert x T))"
by (simp add: infinite_remove)
qed
lemma Un_infinite: "infinite S \<Longrightarrow> infinite (S \<union> T)"
by simp
lemma infinite_Un: "infinite (S \<union> T) \<longleftrightarrow> infinite S \<or> infinite T"
by simp
lemma infinite_super:
assumes "S \<subseteq> T"
and "infinite S"
shows "infinite T"
proof
assume "finite T"
with \<open>S \<subseteq> T\<close> have "finite S" by (simp add: finite_subset)
with \<open>infinite S\<close> show False by simp
qed
proposition infinite_coinduct [consumes 1, case_names infinite]:
assumes "X A"
and step: "\<And>A. X A \<Longrightarrow> \<exists>x\<in>A. X (A - {x}) \<or> infinite (A - {x})"
shows "infinite A"
proof
assume "finite A"
then show False
using \<open>X A\<close>
proof (induction rule: finite_psubset_induct)
case (psubset A)
then obtain x where "x \<in> A" "X (A - {x}) \<or> infinite (A - {x})"
using local.step psubset.prems by blast
then have "X (A - {x})"
using psubset.hyps by blast
show False
proof (rule psubset.IH [where B = "A - {x}"])
show "A - {x} \<subset> A"
using \<open>x \<in> A\<close> by blast
qed fact
qed
qed
text \<open>
For any function with infinite domain and finite range there is some
element that is the image of infinitely many domain elements. In
particular, any infinite sequence of elements from a finite set
contains some element that occurs infinitely often.
\<close>
lemma inf_img_fin_dom':
assumes img: "finite (f ` A)"
and dom: "infinite A"
shows "\<exists>y \<in> f ` A. infinite (f -` {y} \<inter> A)"
proof (rule ccontr)
have "A \<subseteq> (\<Union>y\<in>f ` A. f -` {y} \<inter> A)" by auto
moreover assume "\<not> ?thesis"
with img have "finite (\<Union>y\<in>f ` A. f -` {y} \<inter> A)" by blast
ultimately have "finite A" by (rule finite_subset)
with dom show False by contradiction
qed
lemma inf_img_fin_domE':
assumes "finite (f ` A)" and "infinite A"
obtains y where "y \<in> f`A" and "infinite (f -` {y} \<inter> A)"
using assms by (blast dest: inf_img_fin_dom')
lemma inf_img_fin_dom:
assumes img: "finite (f`A)" and dom: "infinite A"
shows "\<exists>y \<in> f`A. infinite (f -` {y})"
using inf_img_fin_dom'[OF assms] by auto
lemma inf_img_fin_domE:
assumes "finite (f`A)" and "infinite A"
obtains y where "y \<in> f`A" and "infinite (f -` {y})"
using assms by (blast dest: inf_img_fin_dom)
proposition finite_image_absD: "finite (abs ` S) \<Longrightarrow> finite S"
for S :: "'a::linordered_ring set"
by (rule ccontr) (auto simp: abs_eq_iff vimage_def dest: inf_img_fin_dom)
subsection \<open>The finite powerset operator\<close>
definition Fpow :: "'a set \<Rightarrow> 'a set set"
where "Fpow A \<equiv> {X. X \<subseteq> A \<and> finite X}"
lemma Fpow_mono: "A \<subseteq> B \<Longrightarrow> Fpow A \<subseteq> Fpow B"
unfolding Fpow_def by auto
lemma empty_in_Fpow: "{} \<in> Fpow A"
unfolding Fpow_def by auto
lemma Fpow_not_empty: "Fpow A \<noteq> {}"
using empty_in_Fpow by blast
lemma Fpow_subset_Pow: "Fpow A \<subseteq> Pow A"
unfolding Fpow_def by auto
lemma Fpow_Pow_finite: "Fpow A = Pow A Int {A. finite A}"
unfolding Fpow_def Pow_def by blast
lemma inj_on_image_Fpow:
assumes "inj_on f A"
shows "inj_on (image f) (Fpow A)"
using assms Fpow_subset_Pow[of A] subset_inj_on[of "image f" "Pow A"]
inj_on_image_Pow by blast
lemma image_Fpow_mono:
assumes "f ` A \<subseteq> B"
shows "(image f) ` (Fpow A) \<subseteq> Fpow B"
using assms by(unfold Fpow_def, auto)
end
diff --git a/src/HOL/Fun.thy b/src/HOL/Fun.thy
--- a/src/HOL/Fun.thy
+++ b/src/HOL/Fun.thy
@@ -1,1410 +1,1410 @@
(* Title: HOL/Fun.thy
Author: Tobias Nipkow, Cambridge University Computer Laboratory
Author: Andrei Popescu, TU Muenchen
Copyright 1994, 2012
*)
section \<open>Notions about functions\<close>
theory Fun
imports Set
keywords "functor" :: thy_goal_defn
begin
lemma apply_inverse: "f x = u \<Longrightarrow> (\<And>x. P x \<Longrightarrow> g (f x) = x) \<Longrightarrow> P x \<Longrightarrow> x = g u"
by auto
text \<open>Uniqueness, so NOT the axiom of choice.\<close>
lemma uniq_choice: "\<forall>x. \<exists>!y. Q x y \<Longrightarrow> \<exists>f. \<forall>x. Q x (f x)"
by (force intro: theI')
lemma b_uniq_choice: "\<forall>x\<in>S. \<exists>!y. Q x y \<Longrightarrow> \<exists>f. \<forall>x\<in>S. Q x (f x)"
by (force intro: theI')
subsection \<open>The Identity Function \<open>id\<close>\<close>
definition id :: "'a \<Rightarrow> 'a"
where "id = (\<lambda>x. x)"
lemma id_apply [simp]: "id x = x"
by (simp add: id_def)
lemma image_id [simp]: "image id = id"
by (simp add: id_def fun_eq_iff)
lemma vimage_id [simp]: "vimage id = id"
by (simp add: id_def fun_eq_iff)
lemma eq_id_iff: "(\<forall>x. f x = x) \<longleftrightarrow> f = id"
by auto
code_printing
constant id \<rightharpoonup> (Haskell) "id"
subsection \<open>The Composition Operator \<open>f \<circ> g\<close>\<close>
definition comp :: "('b \<Rightarrow> 'c) \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'c" (infixl "\<circ>" 55)
where "f \<circ> g = (\<lambda>x. f (g x))"
notation (ASCII)
comp (infixl "o" 55)
lemma comp_apply [simp]: "(f \<circ> g) x = f (g x)"
by (simp add: comp_def)
lemma comp_assoc: "(f \<circ> g) \<circ> h = f \<circ> (g \<circ> h)"
by (simp add: fun_eq_iff)
lemma id_comp [simp]: "id \<circ> g = g"
by (simp add: fun_eq_iff)
lemma comp_id [simp]: "f \<circ> id = f"
by (simp add: fun_eq_iff)
lemma comp_eq_dest: "a \<circ> b = c \<circ> d \<Longrightarrow> a (b v) = c (d v)"
by (simp add: fun_eq_iff)
lemma comp_eq_elim: "a \<circ> b = c \<circ> d \<Longrightarrow> ((\<And>v. a (b v) = c (d v)) \<Longrightarrow> R) \<Longrightarrow> R"
by (simp add: fun_eq_iff)
lemma comp_eq_dest_lhs: "a \<circ> b = c \<Longrightarrow> a (b v) = c v"
by clarsimp
lemma comp_eq_id_dest: "a \<circ> b = id \<circ> c \<Longrightarrow> a (b v) = c v"
by clarsimp
lemma image_comp: "f ` (g ` r) = (f \<circ> g) ` r"
by auto
lemma vimage_comp: "f -` (g -` x) = (g \<circ> f) -` x"
by auto
lemma image_eq_imp_comp: "f ` A = g ` B \<Longrightarrow> (h \<circ> f) ` A = (h \<circ> g) ` B"
by (auto simp: comp_def elim!: equalityE)
lemma image_bind: "f ` (Set.bind A g) = Set.bind A ((`) f \<circ> g)"
by (auto simp add: Set.bind_def)
lemma bind_image: "Set.bind (f ` A) g = Set.bind A (g \<circ> f)"
by (auto simp add: Set.bind_def)
lemma (in group_add) minus_comp_minus [simp]: "uminus \<circ> uminus = id"
by (simp add: fun_eq_iff)
lemma (in boolean_algebra) minus_comp_minus [simp]: "uminus \<circ> uminus = id"
by (simp add: fun_eq_iff)
code_printing
constant comp \<rightharpoonup> (SML) infixl 5 "o" and (Haskell) infixr 9 "."
subsection \<open>The Forward Composition Operator \<open>fcomp\<close>\<close>
definition fcomp :: "('a \<Rightarrow> 'b) \<Rightarrow> ('b \<Rightarrow> 'c) \<Rightarrow> 'a \<Rightarrow> 'c" (infixl "\<circ>>" 60)
where "f \<circ>> g = (\<lambda>x. g (f x))"
lemma fcomp_apply [simp]: "(f \<circ>> g) x = g (f x)"
by (simp add: fcomp_def)
lemma fcomp_assoc: "(f \<circ>> g) \<circ>> h = f \<circ>> (g \<circ>> h)"
by (simp add: fcomp_def)
lemma id_fcomp [simp]: "id \<circ>> g = g"
by (simp add: fcomp_def)
lemma fcomp_id [simp]: "f \<circ>> id = f"
by (simp add: fcomp_def)
lemma fcomp_comp: "fcomp f g = comp g f"
by (simp add: ext)
code_printing
constant fcomp \<rightharpoonup> (Eval) infixl 1 "#>"
no_notation fcomp (infixl "\<circ>>" 60)
subsection \<open>Mapping functions\<close>
definition map_fun :: "('c \<Rightarrow> 'a) \<Rightarrow> ('b \<Rightarrow> 'd) \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> 'c \<Rightarrow> 'd"
where "map_fun f g h = g \<circ> h \<circ> f"
lemma map_fun_apply [simp]: "map_fun f g h x = g (h (f x))"
by (simp add: map_fun_def)
subsection \<open>Injectivity and Bijectivity\<close>
definition inj_on :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a set \<Rightarrow> bool" \<comment> \<open>injective\<close>
where "inj_on f A \<longleftrightarrow> (\<forall>x\<in>A. \<forall>y\<in>A. f x = f y \<longrightarrow> x = y)"
definition bij_betw :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a set \<Rightarrow> 'b set \<Rightarrow> bool" \<comment> \<open>bijective\<close>
where "bij_betw f A B \<longleftrightarrow> inj_on f A \<and> f ` A = B"
text \<open>
A common special case: functions injective, surjective or bijective over
the entire domain type.
\<close>
abbreviation inj :: "('a \<Rightarrow> 'b) \<Rightarrow> bool"
where "inj f \<equiv> inj_on f UNIV"
abbreviation surj :: "('a \<Rightarrow> 'b) \<Rightarrow> bool"
where "surj f \<equiv> range f = UNIV"
translations \<comment> \<open>The negated case:\<close>
"\<not> CONST surj f" \<leftharpoondown> "CONST range f \<noteq> CONST UNIV"
abbreviation bij :: "('a \<Rightarrow> 'b) \<Rightarrow> bool"
where "bij f \<equiv> bij_betw f UNIV UNIV"
lemma inj_def: "inj f \<longleftrightarrow> (\<forall>x y. f x = f y \<longrightarrow> x = y)"
unfolding inj_on_def by blast
lemma injI: "(\<And>x y. f x = f y \<Longrightarrow> x = y) \<Longrightarrow> inj f"
unfolding inj_def by blast
theorem range_ex1_eq: "inj f \<Longrightarrow> b \<in> range f \<longleftrightarrow> (\<exists>!x. b = f x)"
unfolding inj_def by blast
lemma injD: "inj f \<Longrightarrow> f x = f y \<Longrightarrow> x = y"
by (simp add: inj_def)
lemma inj_on_eq_iff: "inj_on f A \<Longrightarrow> x \<in> A \<Longrightarrow> y \<in> A \<Longrightarrow> f x = f y \<longleftrightarrow> x = y"
by (auto simp: inj_on_def)
lemma inj_on_cong: "(\<And>a. a \<in> A \<Longrightarrow> f a = g a) \<Longrightarrow> inj_on f A \<longleftrightarrow> inj_on g A"
by (auto simp: inj_on_def)
lemma image_strict_mono: "inj_on f B \<Longrightarrow> A \<subset> B \<Longrightarrow> f ` A \<subset> f ` B"
unfolding inj_on_def by blast
lemma inj_compose: "inj f \<Longrightarrow> inj g \<Longrightarrow> inj (f \<circ> g)"
by (simp add: inj_def)
lemma inj_fun: "inj f \<Longrightarrow> inj (\<lambda>x y. f x)"
by (simp add: inj_def fun_eq_iff)
lemma inj_eq: "inj f \<Longrightarrow> f x = f y \<longleftrightarrow> x = y"
by (simp add: inj_on_eq_iff)
lemma inj_on_iff_Uniq: "inj_on f A \<longleftrightarrow> (\<forall>x\<in>A. \<exists>\<^sub>\<le>\<^sub>1y. y\<in>A \<and> f x = f y)"
by (auto simp: Uniq_def inj_on_def)
lemma inj_on_id[simp]: "inj_on id A"
by (simp add: inj_on_def)
lemma inj_on_id2[simp]: "inj_on (\<lambda>x. x) A"
by (simp add: inj_on_def)
lemma inj_on_Int: "inj_on f A \<or> inj_on f B \<Longrightarrow> inj_on f (A \<inter> B)"
unfolding inj_on_def by blast
lemma surj_id: "surj id"
by simp
lemma bij_id[simp]: "bij id"
by (simp add: bij_betw_def)
lemma bij_uminus: "bij (uminus :: 'a \<Rightarrow> 'a::group_add)"
unfolding bij_betw_def inj_on_def
by (force intro: minus_minus [symmetric])
lemma bij_betwE: "bij_betw f A B \<Longrightarrow> \<forall>a\<in>A. f a \<in> B"
unfolding bij_betw_def by auto
lemma inj_onI [intro?]: "(\<And>x y. x \<in> A \<Longrightarrow> y \<in> A \<Longrightarrow> f x = f y \<Longrightarrow> x = y) \<Longrightarrow> inj_on f A"
by (simp add: inj_on_def)
lemma inj_on_inverseI: "(\<And>x. x \<in> A \<Longrightarrow> g (f x) = x) \<Longrightarrow> inj_on f A"
by (auto dest: arg_cong [of concl: g] simp add: inj_on_def)
lemma inj_onD: "inj_on f A \<Longrightarrow> f x = f y \<Longrightarrow> x \<in> A \<Longrightarrow> y \<in> A \<Longrightarrow> x = y"
unfolding inj_on_def by blast
lemma inj_on_subset:
assumes "inj_on f A"
and "B \<subseteq> A"
shows "inj_on f B"
proof (rule inj_onI)
fix a b
assume "a \<in> B" and "b \<in> B"
with assms have "a \<in> A" and "b \<in> A"
by auto
moreover assume "f a = f b"
ultimately show "a = b"
using assms by (auto dest: inj_onD)
qed
lemma comp_inj_on: "inj_on f A \<Longrightarrow> inj_on g (f ` A) \<Longrightarrow> inj_on (g \<circ> f) A"
by (simp add: comp_def inj_on_def)
lemma inj_on_imageI: "inj_on (g \<circ> f) A \<Longrightarrow> inj_on g (f ` A)"
by (auto simp add: inj_on_def)
lemma inj_on_image_iff:
"\<forall>x\<in>A. \<forall>y\<in>A. g (f x) = g (f y) \<longleftrightarrow> g x = g y \<Longrightarrow> inj_on f A \<Longrightarrow> inj_on g (f ` A) \<longleftrightarrow> inj_on g A"
unfolding inj_on_def by blast
lemma inj_on_contraD: "inj_on f A \<Longrightarrow> x \<noteq> y \<Longrightarrow> x \<in> A \<Longrightarrow> y \<in> A \<Longrightarrow> f x \<noteq> f y"
unfolding inj_on_def by blast
lemma inj_singleton [simp]: "inj_on (\<lambda>x. {x}) A"
by (simp add: inj_on_def)
lemma inj_on_empty[iff]: "inj_on f {}"
by (simp add: inj_on_def)
lemma subset_inj_on: "inj_on f B \<Longrightarrow> A \<subseteq> B \<Longrightarrow> inj_on f A"
unfolding inj_on_def by blast
lemma inj_on_Un: "inj_on f (A \<union> B) \<longleftrightarrow> inj_on f A \<and> inj_on f B \<and> f ` (A - B) \<inter> f ` (B - A) = {}"
unfolding inj_on_def by (blast intro: sym)
lemma inj_on_insert [iff]: "inj_on f (insert a A) \<longleftrightarrow> inj_on f A \<and> f a \<notin> f ` (A - {a})"
unfolding inj_on_def by (blast intro: sym)
lemma inj_on_diff: "inj_on f A \<Longrightarrow> inj_on f (A - B)"
unfolding inj_on_def by blast
lemma comp_inj_on_iff: "inj_on f A \<Longrightarrow> inj_on f' (f ` A) \<longleftrightarrow> inj_on (f' \<circ> f) A"
by (auto simp: comp_inj_on inj_on_def)
lemma inj_on_imageI2: "inj_on (f' \<circ> f) A \<Longrightarrow> inj_on f A"
by (auto simp: comp_inj_on inj_on_def)
lemma inj_img_insertE:
assumes "inj_on f A"
assumes "x \<notin> B"
and "insert x B = f ` A"
obtains x' A' where "x' \<notin> A'" and "A = insert x' A'" and "x = f x'" and "B = f ` A'"
proof -
from assms have "x \<in> f ` A" by auto
then obtain x' where *: "x' \<in> A" "x = f x'" by auto
then have A: "A = insert x' (A - {x'})" by auto
with assms * have B: "B = f ` (A - {x'})" by (auto dest: inj_on_contraD)
have "x' \<notin> A - {x'}" by simp
from this A \<open>x = f x'\<close> B show ?thesis ..
qed
lemma linorder_inj_onI:
fixes A :: "'a::order set"
assumes ne: "\<And>x y. \<lbrakk>x < y; x\<in>A; y\<in>A\<rbrakk> \<Longrightarrow> f x \<noteq> f y" and lin: "\<And>x y. \<lbrakk>x\<in>A; y\<in>A\<rbrakk> \<Longrightarrow> x\<le>y \<or> y\<le>x"
shows "inj_on f A"
proof (rule inj_onI)
fix x y
assume eq: "f x = f y" and "x\<in>A" "y\<in>A"
then show "x = y"
using lin [of x y] ne by (force simp: dual_order.order_iff_strict)
qed
lemma linorder_inj_onI':
fixes A :: "'a :: linorder set"
assumes "\<And>i j. i \<in> A \<Longrightarrow> j \<in> A \<Longrightarrow> i < j \<Longrightarrow> f i \<noteq> f j"
shows "inj_on f A"
by (intro linorder_inj_onI) (auto simp add: assms)
lemma linorder_injI:
assumes "\<And>x y::'a::linorder. x < y \<Longrightarrow> f x \<noteq> f y"
shows "inj f"
\<comment> \<open>Courtesy of Stephan Merz\<close>
using assms by (simp add: linorder_inj_onI')
lemma inj_on_image_Pow: "inj_on f A \<Longrightarrow>inj_on (image f) (Pow A)"
unfolding Pow_def inj_on_def by blast
lemma bij_betw_image_Pow: "bij_betw f A B \<Longrightarrow> bij_betw (image f) (Pow A) (Pow B)"
by (auto simp add: bij_betw_def inj_on_image_Pow image_Pow_surj)
lemma surj_def: "surj f \<longleftrightarrow> (\<forall>y. \<exists>x. y = f x)"
by auto
lemma surjI:
assumes "\<And>x. g (f x) = x"
shows "surj g"
using assms [symmetric] by auto
lemma surjD: "surj f \<Longrightarrow> \<exists>x. y = f x"
by (simp add: surj_def)
lemma surjE: "surj f \<Longrightarrow> (\<And>x. y = f x \<Longrightarrow> C) \<Longrightarrow> C"
by (simp add: surj_def) blast
lemma comp_surj: "surj f \<Longrightarrow> surj g \<Longrightarrow> surj (g \<circ> f)"
using image_comp [of g f UNIV] by simp
lemma bij_betw_imageI: "inj_on f A \<Longrightarrow> f ` A = B \<Longrightarrow> bij_betw f A B"
unfolding bij_betw_def by clarify
lemma bij_betw_imp_surj_on: "bij_betw f A B \<Longrightarrow> f ` A = B"
unfolding bij_betw_def by clarify
lemma bij_betw_imp_surj: "bij_betw f A UNIV \<Longrightarrow> surj f"
unfolding bij_betw_def by auto
lemma bij_betw_empty1: "bij_betw f {} A \<Longrightarrow> A = {}"
unfolding bij_betw_def by blast
lemma bij_betw_empty2: "bij_betw f A {} \<Longrightarrow> A = {}"
unfolding bij_betw_def by blast
lemma inj_on_imp_bij_betw: "inj_on f A \<Longrightarrow> bij_betw f A (f ` A)"
unfolding bij_betw_def by simp
lemma bij_betw_DiffI:
assumes "bij_betw f A B" "bij_betw f C D" "C \<subseteq> A" "D \<subseteq> B"
shows "bij_betw f (A - C) (B - D)"
using assms unfolding bij_betw_def inj_on_def by auto
lemma bij_betw_singleton_iff [simp]: "bij_betw f {x} {y} \<longleftrightarrow> f x = y"
by (auto simp: bij_betw_def)
lemma bij_betw_singletonI [intro]: "f x = y \<Longrightarrow> bij_betw f {x} {y}"
by auto
lemma bij_betw_apply: "\<lbrakk>bij_betw f A B; a \<in> A\<rbrakk> \<Longrightarrow> f a \<in> B"
unfolding bij_betw_def by auto
lemma bij_def: "bij f \<longleftrightarrow> inj f \<and> surj f"
by (rule bij_betw_def)
lemma bijI: "inj f \<Longrightarrow> surj f \<Longrightarrow> bij f"
by (rule bij_betw_imageI)
lemma bij_is_inj: "bij f \<Longrightarrow> inj f"
by (simp add: bij_def)
lemma bij_is_surj: "bij f \<Longrightarrow> surj f"
by (simp add: bij_def)
lemma bij_betw_imp_inj_on: "bij_betw f A B \<Longrightarrow> inj_on f A"
by (simp add: bij_betw_def)
lemma bij_betw_trans: "bij_betw f A B \<Longrightarrow> bij_betw g B C \<Longrightarrow> bij_betw (g \<circ> f) A C"
by (auto simp add:bij_betw_def comp_inj_on)
lemma bij_comp: "bij f \<Longrightarrow> bij g \<Longrightarrow> bij (g \<circ> f)"
by (rule bij_betw_trans)
lemma bij_betw_comp_iff: "bij_betw f A A' \<Longrightarrow> bij_betw f' A' A'' \<longleftrightarrow> bij_betw (f' \<circ> f) A A''"
by (auto simp add: bij_betw_def inj_on_def)
lemma bij_betw_comp_iff2:
assumes bij: "bij_betw f' A' A''"
and img: "f ` A \<le> A'"
shows "bij_betw f A A' \<longleftrightarrow> bij_betw (f' \<circ> f) A A''" (is "?L \<longleftrightarrow> ?R")
proof
assume "?L"
then show "?R"
using assms by (auto simp add: bij_betw_comp_iff)
next
assume *: "?R"
have "inj_on (f' \<circ> f) A \<Longrightarrow> inj_on f A"
using inj_on_imageI2 by blast
moreover have "A' \<subseteq> f ` A"
proof
fix a'
assume **: "a' \<in> A'"
with bij have "f' a' \<in> A''"
unfolding bij_betw_def by auto
with * obtain a where 1: "a \<in> A \<and> f' (f a) = f' a'"
unfolding bij_betw_def by force
with img have "f a \<in> A'" by auto
with bij ** 1 have "f a = a'"
unfolding bij_betw_def inj_on_def by auto
with 1 show "a' \<in> f ` A" by auto
qed
ultimately show "?L"
using img * by (auto simp add: bij_betw_def)
qed
lemma bij_betw_inv:
assumes "bij_betw f A B"
shows "\<exists>g. bij_betw g B A"
proof -
have i: "inj_on f A" and s: "f ` A = B"
using assms by (auto simp: bij_betw_def)
let ?P = "\<lambda>b a. a \<in> A \<and> f a = b"
let ?g = "\<lambda>b. The (?P b)"
have g: "?g b = a" if P: "?P b a" for a b
proof -
from that s have ex1: "\<exists>a. ?P b a" by blast
then have uex1: "\<exists>!a. ?P b a" by (blast dest:inj_onD[OF i])
then show ?thesis
using the1_equality[OF uex1, OF P] P by simp
qed
have "inj_on ?g B"
proof (rule inj_onI)
fix x y
assume "x \<in> B" "y \<in> B" "?g x = ?g y"
from s \<open>x \<in> B\<close> obtain a1 where a1: "?P x a1" by blast
from s \<open>y \<in> B\<close> obtain a2 where a2: "?P y a2" by blast
from g [OF a1] a1 g [OF a2] a2 \<open>?g x = ?g y\<close> show "x = y" by simp
qed
moreover have "?g ` B = A"
proof safe
fix b
assume "b \<in> B"
with s obtain a where P: "?P b a" by blast
with g[OF P] show "?g b \<in> A" by auto
next
fix a
assume "a \<in> A"
with s obtain b where P: "?P b a" by blast
with s have "b \<in> B" by blast
with g[OF P] have "\<exists>b\<in>B. a = ?g b" by blast
then show "a \<in> ?g ` B"
by auto
qed
ultimately show ?thesis
by (auto simp: bij_betw_def)
qed
lemma bij_betw_cong: "(\<And>a. a \<in> A \<Longrightarrow> f a = g a) \<Longrightarrow> bij_betw f A A' = bij_betw g A A'"
unfolding bij_betw_def inj_on_def by safe force+ (* somewhat slow *)
lemma bij_betw_id[intro, simp]: "bij_betw id A A"
unfolding bij_betw_def id_def by auto
lemma bij_betw_id_iff: "bij_betw id A B \<longleftrightarrow> A = B"
by (auto simp add: bij_betw_def)
lemma bij_betw_combine:
"bij_betw f A B \<Longrightarrow> bij_betw f C D \<Longrightarrow> B \<inter> D = {} \<Longrightarrow> bij_betw f (A \<union> C) (B \<union> D)"
unfolding bij_betw_def inj_on_Un image_Un by auto
lemma bij_betw_subset: "bij_betw f A A' \<Longrightarrow> B \<subseteq> A \<Longrightarrow> f ` B = B' \<Longrightarrow> bij_betw f B B'"
by (auto simp add: bij_betw_def inj_on_def)
lemma bij_betw_ball: "bij_betw f A B \<Longrightarrow> (\<forall>b \<in> B. phi b) = (\<forall>a \<in> A. phi (f a))"
unfolding bij_betw_def inj_on_def by blast
lemma bij_pointE:
assumes "bij f"
obtains x where "y = f x" and "\<And>x'. y = f x' \<Longrightarrow> x' = x"
proof -
from assms have "inj f" by (rule bij_is_inj)
moreover from assms have "surj f" by (rule bij_is_surj)
then have "y \<in> range f" by simp
ultimately have "\<exists>!x. y = f x" by (simp add: range_ex1_eq)
with that show thesis by blast
qed
lemma bij_iff: \<^marker>\<open>contributor \<open>Amine Chaieb\<close>\<close>
\<open>bij f \<longleftrightarrow> (\<forall>x. \<exists>!y. f y = x)\<close> (is \<open>?P \<longleftrightarrow> ?Q\<close>)
proof
assume ?P
then have \<open>inj f\<close> \<open>surj f\<close>
by (simp_all add: bij_def)
show ?Q
proof
fix y
from \<open>surj f\<close> obtain x where \<open>y = f x\<close>
by (auto simp add: surj_def)
with \<open>inj f\<close> show \<open>\<exists>!x. f x = y\<close>
by (auto simp add: inj_def)
qed
next
assume ?Q
then have \<open>inj f\<close>
by (auto simp add: inj_def)
moreover have \<open>\<exists>x. y = f x\<close> for y
proof -
from \<open>?Q\<close> obtain x where \<open>f x = y\<close>
by blast
then have \<open>y = f x\<close>
by simp
then show ?thesis ..
qed
then have \<open>surj f\<close>
by (auto simp add: surj_def)
ultimately show ?P
by (rule bijI)
qed
lemma bij_betw_partition:
\<open>bij_betw f A B\<close>
if \<open>bij_betw f (A \<union> C) (B \<union> D)\<close> \<open>bij_betw f C D\<close> \<open>A \<inter> C = {}\<close> \<open>B \<inter> D = {}\<close>
proof -
from that have \<open>inj_on f (A \<union> C)\<close> \<open>inj_on f C\<close> \<open>f ` (A \<union> C) = B \<union> D\<close> \<open>f ` C = D\<close>
by (simp_all add: bij_betw_def)
then have \<open>inj_on f A\<close> and \<open>f ` (A - C) \<inter> f ` (C - A) = {}\<close>
by (simp_all add: inj_on_Un)
with \<open>A \<inter> C = {}\<close> have \<open>f ` A \<inter> f ` C = {}\<close>
by auto
with \<open>f ` (A \<union> C) = B \<union> D\<close> \<open>f ` C = D\<close> \<open>B \<inter> D = {}\<close>
have \<open>f ` A = B\<close>
by blast
with \<open>inj_on f A\<close> show ?thesis
by (simp add: bij_betw_def)
qed
lemma surj_image_vimage_eq: "surj f \<Longrightarrow> f ` (f -` A) = A"
by simp
lemma surj_vimage_empty:
assumes "surj f"
shows "f -` A = {} \<longleftrightarrow> A = {}"
using surj_image_vimage_eq [OF \<open>surj f\<close>, of A]
by (intro iffI) fastforce+
lemma inj_vimage_image_eq: "inj f \<Longrightarrow> f -` (f ` A) = A"
unfolding inj_def by blast
lemma vimage_subsetD: "surj f \<Longrightarrow> f -` B \<subseteq> A \<Longrightarrow> B \<subseteq> f ` A"
by (blast intro: sym)
lemma vimage_subsetI: "inj f \<Longrightarrow> B \<subseteq> f ` A \<Longrightarrow> f -` B \<subseteq> A"
unfolding inj_def by blast
lemma vimage_subset_eq: "bij f \<Longrightarrow> f -` B \<subseteq> A \<longleftrightarrow> B \<subseteq> f ` A"
unfolding bij_def by (blast del: subsetI intro: vimage_subsetI vimage_subsetD)
lemma inj_on_image_eq_iff: "inj_on f C \<Longrightarrow> A \<subseteq> C \<Longrightarrow> B \<subseteq> C \<Longrightarrow> f ` A = f ` B \<longleftrightarrow> A = B"
by (fastforce simp: inj_on_def)
lemma inj_on_Un_image_eq_iff: "inj_on f (A \<union> B) \<Longrightarrow> f ` A = f ` B \<longleftrightarrow> A = B"
by (erule inj_on_image_eq_iff) simp_all
lemma inj_on_image_Int: "inj_on f C \<Longrightarrow> A \<subseteq> C \<Longrightarrow> B \<subseteq> C \<Longrightarrow> f ` (A \<inter> B) = f ` A \<inter> f ` B"
unfolding inj_on_def by blast
lemma inj_on_image_set_diff: "inj_on f C \<Longrightarrow> A - B \<subseteq> C \<Longrightarrow> B \<subseteq> C \<Longrightarrow> f ` (A - B) = f ` A - f ` B"
unfolding inj_on_def by blast
lemma image_Int: "inj f \<Longrightarrow> f ` (A \<inter> B) = f ` A \<inter> f ` B"
unfolding inj_def by blast
lemma image_set_diff: "inj f \<Longrightarrow> f ` (A - B) = f ` A - f ` B"
unfolding inj_def by blast
lemma inj_on_image_mem_iff: "inj_on f B \<Longrightarrow> a \<in> B \<Longrightarrow> A \<subseteq> B \<Longrightarrow> f a \<in> f ` A \<longleftrightarrow> a \<in> A"
by (auto simp: inj_on_def)
lemma inj_image_mem_iff: "inj f \<Longrightarrow> f a \<in> f ` A \<longleftrightarrow> a \<in> A"
by (blast dest: injD)
lemma inj_image_subset_iff: "inj f \<Longrightarrow> f ` A \<subseteq> f ` B \<longleftrightarrow> A \<subseteq> B"
by (blast dest: injD)
lemma inj_image_eq_iff: "inj f \<Longrightarrow> f ` A = f ` B \<longleftrightarrow> A = B"
by (blast dest: injD)
lemma surj_Compl_image_subset: "surj f \<Longrightarrow> - (f ` A) \<subseteq> f ` (- A)"
by auto
lemma inj_image_Compl_subset: "inj f \<Longrightarrow> f ` (- A) \<subseteq> - (f ` A)"
by (auto simp: inj_def)
lemma bij_image_Compl_eq: "bij f \<Longrightarrow> f ` (- A) = - (f ` A)"
by (simp add: bij_def inj_image_Compl_subset surj_Compl_image_subset equalityI)
lemma inj_vimage_singleton: "inj f \<Longrightarrow> f -` {a} \<subseteq> {THE x. f x = a}"
\<comment> \<open>The inverse image of a singleton under an injective function is included in a singleton.\<close>
by (simp add: inj_def) (blast intro: the_equality [symmetric])
lemma inj_on_vimage_singleton: "inj_on f A \<Longrightarrow> f -` {a} \<inter> A \<subseteq> {THE x. x \<in> A \<and> f x = a}"
by (auto simp add: inj_on_def intro: the_equality [symmetric])
lemma bij_betw_byWitness:
assumes left: "\<forall>a \<in> A. f' (f a) = a"
and right: "\<forall>a' \<in> A'. f (f' a') = a'"
and "f ` A \<subseteq> A'"
and img2: "f' ` A' \<subseteq> A"
shows "bij_betw f A A'"
using assms
unfolding bij_betw_def inj_on_def
proof safe
fix a b
assume "a \<in> A" "b \<in> A"
with left have "a = f' (f a) \<and> b = f' (f b)" by simp
moreover assume "f a = f b"
ultimately show "a = b" by simp
next
fix a' assume *: "a' \<in> A'"
with img2 have "f' a' \<in> A" by blast
moreover from * right have "a' = f (f' a')" by simp
ultimately show "a' \<in> f ` A" by blast
qed
corollary notIn_Un_bij_betw:
assumes "b \<notin> A"
and "f b \<notin> A'"
and "bij_betw f A A'"
shows "bij_betw f (A \<union> {b}) (A' \<union> {f b})"
proof -
have "bij_betw f {b} {f b}"
unfolding bij_betw_def inj_on_def by simp
with assms show ?thesis
using bij_betw_combine[of f A A' "{b}" "{f b}"] by blast
qed
lemma notIn_Un_bij_betw3:
assumes "b \<notin> A"
and "f b \<notin> A'"
shows "bij_betw f A A' = bij_betw f (A \<union> {b}) (A' \<union> {f b})"
proof
assume "bij_betw f A A'"
then show "bij_betw f (A \<union> {b}) (A' \<union> {f b})"
using assms notIn_Un_bij_betw [of b A f A'] by blast
next
assume *: "bij_betw f (A \<union> {b}) (A' \<union> {f b})"
have "f ` A = A'"
proof safe
fix a
assume **: "a \<in> A"
then have "f a \<in> A' \<union> {f b}"
using * unfolding bij_betw_def by blast
moreover
have False if "f a = f b"
proof -
have "a = b"
using * ** that unfolding bij_betw_def inj_on_def by blast
with \<open>b \<notin> A\<close> ** show ?thesis by blast
qed
ultimately show "f a \<in> A'" by blast
next
fix a'
assume **: "a' \<in> A'"
then have "a' \<in> f ` (A \<union> {b})"
using * by (auto simp add: bij_betw_def)
then obtain a where 1: "a \<in> A \<union> {b} \<and> f a = a'" by blast
moreover
have False if "a = b" using 1 ** \<open>f b \<notin> A'\<close> that by blast
ultimately have "a \<in> A" by blast
with 1 show "a' \<in> f ` A" by blast
qed
then show "bij_betw f A A'"
using * bij_betw_subset[of f "A \<union> {b}" _ A] by blast
qed
lemma inj_on_disjoint_Un:
assumes "inj_on f A" and "inj_on g B"
and "f ` A \<inter> g ` B = {}"
shows "inj_on (\<lambda>x. if x \<in> A then f x else g x) (A \<union> B)"
using assms by (simp add: inj_on_def disjoint_iff) (blast)
lemma bij_betw_disjoint_Un:
assumes "bij_betw f A C" and "bij_betw g B D"
and "A \<inter> B = {}"
and "C \<inter> D = {}"
shows "bij_betw (\<lambda>x. if x \<in> A then f x else g x) (A \<union> B) (C \<union> D)"
using assms by (auto simp: inj_on_disjoint_Un bij_betw_def)
lemma involuntory_imp_bij:
\<open>bij f\<close> if \<open>\<And>x. f (f x) = x\<close>
proof (rule bijI)
from that show \<open>surj f\<close>
by (rule surjI)
show \<open>inj f\<close>
proof (rule injI)
fix x y
assume \<open>f x = f y\<close>
then have \<open>f (f x) = f (f y)\<close>
by simp
then show \<open>x = y\<close>
by (simp add: that)
qed
qed
subsubsection \<open>Inj/surj/bij of Algebraic Operations\<close>
context cancel_semigroup_add
begin
lemma inj_on_add [simp]:
"inj_on ((+) a) A"
by (rule inj_onI) simp
lemma inj_on_add' [simp]:
"inj_on (\<lambda>b. b + a) A"
by (rule inj_onI) simp
lemma bij_betw_add [simp]:
"bij_betw ((+) a) A B \<longleftrightarrow> (+) a ` A = B"
by (simp add: bij_betw_def)
end
context group_add
begin
lemma diff_left_imp_eq: "a - b = a - c \<Longrightarrow> b = c"
unfolding add_uminus_conv_diff[symmetric]
by(drule local.add_left_imp_eq) simp
lemma inj_uminus[simp, intro]: "inj_on uminus A"
by (auto intro!: inj_onI)
lemma surj_uminus[simp]: "surj uminus"
using surjI minus_minus by blast
lemma surj_plus [simp]:
"surj ((+) a)"
proof (standard, simp, standard, simp)
fix x
have "x = a + (-a + x)" by (simp add: add.assoc)
thus "x \<in> range ((+) a)" by blast
qed
lemma surj_plus_right [simp]:
"surj (\<lambda>b. b+a)"
proof (standard, simp, standard, simp)
fix b show "b \<in> range (\<lambda>b. b+a)"
using diff_add_cancel[of b a, symmetric] by blast
qed
lemma inj_on_diff_left [simp]:
\<open>inj_on ((-) a) A\<close>
by (auto intro: inj_onI dest!: diff_left_imp_eq)
lemma inj_on_diff_right [simp]:
\<open>inj_on (\<lambda>b. b - a) A\<close>
by (auto intro: inj_onI simp add: algebra_simps)
lemma surj_diff [simp]:
"surj ((-) a)"
proof (standard, simp, standard, simp)
fix x
have "x = a - (- x + a)" by (simp add: algebra_simps)
thus "x \<in> range ((-) a)" by blast
qed
lemma surj_diff_right [simp]:
"surj (\<lambda>x. x - a)"
proof (standard, simp, standard, simp)
fix x
have "x = x + a - a" by simp
thus "x \<in> range (\<lambda>x. x - a)" by fast
qed
lemma shows bij_plus: "bij ((+) a)" and bij_plus_right: "bij (\<lambda>x. x + a)"
and bij_uminus: "bij uminus"
and bij_diff: "bij ((-) a)" and bij_diff_right: "bij (\<lambda>x. x - a)"
by(simp_all add: bij_def)
lemma translation_subtract_Compl:
"(\<lambda>x. x - a) ` (- t) = - ((\<lambda>x. x - a) ` t)"
by(rule bij_image_Compl_eq)
(auto simp add: bij_def surj_def inj_def diff_eq_eq intro!: add_diff_cancel[symmetric])
lemma translation_diff:
"(+) a ` (s - t) = ((+) a ` s) - ((+) a ` t)"
by auto
lemma translation_subtract_diff:
"(\<lambda>x. x - a) ` (s - t) = ((\<lambda>x. x - a) ` s) - ((\<lambda>x. x - a) ` t)"
by(rule image_set_diff)(simp add: inj_on_def diff_eq_eq)
lemma translation_Int:
"(+) a ` (s \<inter> t) = ((+) a ` s) \<inter> ((+) a ` t)"
by auto
lemma translation_subtract_Int:
"(\<lambda>x. x - a) ` (s \<inter> t) = ((\<lambda>x. x - a) ` s) \<inter> ((\<lambda>x. x - a) ` t)"
by(rule image_Int)(simp add: inj_on_def diff_eq_eq)
end
(* TODO: prove in group_add *)
context ab_group_add
begin
lemma translation_Compl:
"(+) a ` (- t) = - ((+) a ` t)"
proof (rule set_eqI)
fix b
show "b \<in> (+) a ` (- t) \<longleftrightarrow> b \<in> - (+) a ` t"
by (auto simp: image_iff algebra_simps intro!: bexI [of _ "b - a"])
qed
end
subsection \<open>Function Updating\<close>
definition fun_upd :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> ('a \<Rightarrow> 'b)"
where "fun_upd f a b = (\<lambda>x. if x = a then b else f x)"
nonterminal updbinds and updbind
syntax
"_updbind" :: "'a \<Rightarrow> 'a \<Rightarrow> updbind" ("(2_ :=/ _)")
"" :: "updbind \<Rightarrow> updbinds" ("_")
"_updbinds":: "updbind \<Rightarrow> updbinds \<Rightarrow> updbinds" ("_,/ _")
"_Update" :: "'a \<Rightarrow> updbinds \<Rightarrow> 'a" ("_/'((_)')" [1000, 0] 900)
translations
"_Update f (_updbinds b bs)" \<rightleftharpoons> "_Update (_Update f b) bs"
"f(x:=y)" \<rightleftharpoons> "CONST fun_upd f x y"
(* Hint: to define the sum of two functions (or maps), use case_sum.
A nice infix syntax could be defined by
notation
case_sum (infixr "'(+')"80)
*)
lemma fun_upd_idem_iff: "f(x:=y) = f \<longleftrightarrow> f x = y"
unfolding fun_upd_def
apply safe
apply (erule subst)
apply auto
done
lemma fun_upd_idem: "f x = y \<Longrightarrow> f(x := y) = f"
by (simp only: fun_upd_idem_iff)
lemma fun_upd_triv [iff]: "f(x := f x) = f"
by (simp only: fun_upd_idem)
lemma fun_upd_apply [simp]: "(f(x := y)) z = (if z = x then y else f z)"
by (simp add: fun_upd_def)
(* fun_upd_apply supersedes these two, but they are useful
if fun_upd_apply is intentionally removed from the simpset *)
lemma fun_upd_same: "(f(x := y)) x = y"
by simp
lemma fun_upd_other: "z \<noteq> x \<Longrightarrow> (f(x := y)) z = f z"
by simp
lemma fun_upd_upd [simp]: "f(x := y, x := z) = f(x := z)"
by (simp add: fun_eq_iff)
lemma fun_upd_twist: "a \<noteq> c \<Longrightarrow> (m(a := b))(c := d) = (m(c := d))(a := b)"
by auto
lemma inj_on_fun_updI: "inj_on f A \<Longrightarrow> y \<notin> f ` A \<Longrightarrow> inj_on (f(x := y)) A"
by (auto simp: inj_on_def)
lemma fun_upd_image: "f(x := y) ` A = (if x \<in> A then insert y (f ` (A - {x})) else f ` A)"
by auto
lemma fun_upd_comp: "f \<circ> (g(x := y)) = (f \<circ> g)(x := f y)"
by auto
lemma fun_upd_eqD: "f(x := y) = g(x := z) \<Longrightarrow> y = z"
by (simp add: fun_eq_iff split: if_split_asm)
subsection \<open>\<open>override_on\<close>\<close>
definition override_on :: "('a \<Rightarrow> 'b) \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> 'a set \<Rightarrow> 'a \<Rightarrow> 'b"
where "override_on f g A = (\<lambda>a. if a \<in> A then g a else f a)"
lemma override_on_emptyset[simp]: "override_on f g {} = f"
by (simp add: override_on_def)
lemma override_on_apply_notin[simp]: "a \<notin> A \<Longrightarrow> (override_on f g A) a = f a"
by (simp add: override_on_def)
lemma override_on_apply_in[simp]: "a \<in> A \<Longrightarrow> (override_on f g A) a = g a"
by (simp add: override_on_def)
lemma override_on_insert: "override_on f g (insert x X) = (override_on f g X)(x:=g x)"
by (simp add: override_on_def fun_eq_iff)
lemma override_on_insert': "override_on f g (insert x X) = (override_on (f(x:=g x)) g X)"
by (simp add: override_on_def fun_eq_iff)
subsection \<open>Inversion of injective functions\<close>
definition the_inv_into :: "'a set \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> ('b \<Rightarrow> 'a)"
where "the_inv_into A f = (\<lambda>x. THE y. y \<in> A \<and> f y = x)"
lemma the_inv_into_f_f: "inj_on f A \<Longrightarrow> x \<in> A \<Longrightarrow> the_inv_into A f (f x) = x"
unfolding the_inv_into_def inj_on_def by blast
lemma f_the_inv_into_f: "inj_on f A \<Longrightarrow> y \<in> f ` A \<Longrightarrow> f (the_inv_into A f y) = y"
unfolding the_inv_into_def
by (rule the1I2; blast dest: inj_onD)
lemma f_the_inv_into_f_bij_betw:
"bij_betw f A B \<Longrightarrow> (bij_betw f A B \<Longrightarrow> x \<in> B) \<Longrightarrow> f (the_inv_into A f x) = x"
unfolding bij_betw_def by (blast intro: f_the_inv_into_f)
lemma the_inv_into_into: "inj_on f A \<Longrightarrow> x \<in> f ` A \<Longrightarrow> A \<subseteq> B \<Longrightarrow> the_inv_into A f x \<in> B"
unfolding the_inv_into_def
by (rule the1I2; blast dest: inj_onD)
lemma the_inv_into_onto [simp]: "inj_on f A \<Longrightarrow> the_inv_into A f ` (f ` A) = A"
by (fast intro: the_inv_into_into the_inv_into_f_f [symmetric])
lemma the_inv_into_f_eq: "inj_on f A \<Longrightarrow> f x = y \<Longrightarrow> x \<in> A \<Longrightarrow> the_inv_into A f y = x"
by (force simp add: the_inv_into_f_f)
lemma the_inv_into_comp:
"inj_on f (g ` A) \<Longrightarrow> inj_on g A \<Longrightarrow> x \<in> f ` g ` A \<Longrightarrow>
the_inv_into A (f \<circ> g) x = (the_inv_into A g \<circ> the_inv_into (g ` A) f) x"
apply (rule the_inv_into_f_eq)
apply (fast intro: comp_inj_on)
apply (simp add: f_the_inv_into_f the_inv_into_into)
apply (simp add: the_inv_into_into)
done
lemma inj_on_the_inv_into: "inj_on f A \<Longrightarrow> inj_on (the_inv_into A f) (f ` A)"
by (auto intro: inj_onI simp: the_inv_into_f_f)
lemma bij_betw_the_inv_into: "bij_betw f A B \<Longrightarrow> bij_betw (the_inv_into A f) B A"
by (auto simp add: bij_betw_def inj_on_the_inv_into the_inv_into_into)
lemma bij_betw_iff_bijections:
"bij_betw f A B \<longleftrightarrow> (\<exists>g. (\<forall>x \<in> A. f x \<in> B \<and> g(f x) = x) \<and> (\<forall>y \<in> B. g y \<in> A \<and> f(g y) = y))"
(is "?lhs = ?rhs")
proof
show "?lhs \<Longrightarrow> ?rhs"
by (auto simp: bij_betw_def f_the_inv_into_f the_inv_into_f_f the_inv_into_into
exI[where ?x="the_inv_into A f"])
next
show "?rhs \<Longrightarrow> ?lhs"
by (force intro: bij_betw_byWitness)
qed
abbreviation the_inv :: "('a \<Rightarrow> 'b) \<Rightarrow> ('b \<Rightarrow> 'a)"
where "the_inv f \<equiv> the_inv_into UNIV f"
lemma the_inv_f_f: "the_inv f (f x) = x" if "inj f"
using that UNIV_I by (rule the_inv_into_f_f)
subsection \<open>Monotonicity\<close>
definition monotone_on :: "'a set \<Rightarrow> ('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> ('b \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> bool"
where "monotone_on A orda ordb f \<longleftrightarrow> (\<forall>x\<in>A. \<forall>y\<in>A. orda x y \<longrightarrow> ordb (f x) (f y))"
abbreviation monotone :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> ('b \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> bool"
where "monotone \<equiv> monotone_on UNIV"
lemma monotone_def[no_atp]: "monotone orda ordb f \<longleftrightarrow> (\<forall>x y. orda x y \<longrightarrow> ordb (f x) (f y))"
by (simp add: monotone_on_def)
text \<open>Lemma @{thm [source] monotone_def} is provided for backward compatibility.\<close>
lemma monotone_onI:
"(\<And>x y. x \<in> A \<Longrightarrow> y \<in> A \<Longrightarrow> orda x y \<Longrightarrow> ordb (f x) (f y)) \<Longrightarrow> monotone_on A orda ordb f"
by (simp add: monotone_on_def)
lemma monotoneI[intro?]: "(\<And>x y. orda x y \<Longrightarrow> ordb (f x) (f y)) \<Longrightarrow> monotone orda ordb f"
by (rule monotone_onI)
lemma monotone_onD:
"monotone_on A orda ordb f \<Longrightarrow> x \<in> A \<Longrightarrow> y \<in> A \<Longrightarrow> orda x y \<Longrightarrow> ordb (f x) (f y)"
by (simp add: monotone_on_def)
lemma monotoneD[dest?]: "monotone orda ordb f \<Longrightarrow> orda x y \<Longrightarrow> ordb (f x) (f y)"
by (rule monotone_onD[of UNIV, simplified])
lemma monotone_on_subset: "monotone_on A orda ordb f \<Longrightarrow> B \<subseteq> A \<Longrightarrow> monotone_on B orda ordb f"
by (auto intro: monotone_onI dest: monotone_onD)
lemma monotone_on_empty[simp]: "monotone_on {} orda ordb f"
by (auto intro: monotone_onI dest: monotone_onD)
lemma monotone_on_o:
assumes
mono_f: "monotone_on A orda ordb f" and
mono_g: "monotone_on B ordc orda g" and
"g ` B \<subseteq> A"
shows "monotone_on B ordc ordb (f \<circ> g)"
proof (rule monotone_onI)
fix x y assume "x \<in> B" and "y \<in> B" and "ordc x y"
hence "orda (g x) (g y)"
by (rule mono_g[THEN monotone_onD])
moreover from \<open>g ` B \<subseteq> A\<close> \<open>x \<in> B\<close> \<open>y \<in> B\<close> have "g x \<in> A" and "g y \<in> A"
unfolding image_subset_iff by simp_all
ultimately show "ordb ((f \<circ> g) x) ((f \<circ> g) y)"
using mono_f[THEN monotone_onD] by simp
qed
subsubsection \<open>Specializations For @{class ord} Type Class And More\<close>
context ord begin
abbreviation mono_on :: "'a set \<Rightarrow> ('a \<Rightarrow> 'b :: ord) \<Rightarrow> bool"
where "mono_on A \<equiv> monotone_on A (\<le>) (\<le>)"
abbreviation strict_mono_on :: "'a set \<Rightarrow> ('a \<Rightarrow> 'b :: ord) \<Rightarrow> bool"
where "strict_mono_on A \<equiv> monotone_on A (<) (<)"
abbreviation antimono_on :: "'a set \<Rightarrow> ('a \<Rightarrow> 'b :: ord) \<Rightarrow> bool"
where "antimono_on A \<equiv> monotone_on A (\<le>) (\<ge>)"
abbreviation strict_antimono_on :: "'a set \<Rightarrow> ('a \<Rightarrow> 'b :: ord) \<Rightarrow> bool"
where "strict_antimono_on A \<equiv> monotone_on A (<) (>)"
lemma mono_on_def[no_atp]: "mono_on A f \<longleftrightarrow> (\<forall>r s. r \<in> A \<and> s \<in> A \<and> r \<le> s \<longrightarrow> f r \<le> f s)"
by (auto simp add: monotone_on_def)
lemma strict_mono_on_def[no_atp]:
"strict_mono_on A f \<longleftrightarrow> (\<forall>r s. r \<in> A \<and> s \<in> A \<and> r < s \<longrightarrow> f r < f s)"
by (auto simp add: monotone_on_def)
text \<open>Lemmas @{thm [source] mono_on_def} and @{thm [source] strict_mono_on_def} are provided for
backward compatibility.\<close>
lemma mono_onI:
"(\<And>r s. r \<in> A \<Longrightarrow> s \<in> A \<Longrightarrow> r \<le> s \<Longrightarrow> f r \<le> f s) \<Longrightarrow> mono_on A f"
by (rule monotone_onI)
lemma strict_mono_onI:
"(\<And>r s. r \<in> A \<Longrightarrow> s \<in> A \<Longrightarrow> r < s \<Longrightarrow> f r < f s) \<Longrightarrow> strict_mono_on A f"
by (rule monotone_onI)
lemma mono_onD: "\<lbrakk>mono_on A f; r \<in> A; s \<in> A; r \<le> s\<rbrakk> \<Longrightarrow> f r \<le> f s"
by (rule monotone_onD)
lemma strict_mono_onD: "\<lbrakk>strict_mono_on A f; r \<in> A; s \<in> A; r < s\<rbrakk> \<Longrightarrow> f r < f s"
by (rule monotone_onD)
lemma mono_on_subset: "mono_on A f \<Longrightarrow> B \<subseteq> A \<Longrightarrow> mono_on B f"
by (rule monotone_on_subset)
end
lemma mono_on_greaterD:
assumes "mono_on A g" "x \<in> A" "y \<in> A" "g x > (g (y::_::linorder) :: _ :: linorder)"
shows "x > y"
proof (rule ccontr)
assume "\<not>x > y"
hence "x \<le> y" by (simp add: not_less)
from assms(1-3) and this have "g x \<le> g y" by (rule mono_onD)
with assms(4) show False by simp
qed
context order begin
abbreviation mono :: "('a \<Rightarrow> 'b::order) \<Rightarrow> bool"
where "mono \<equiv> mono_on UNIV"
abbreviation strict_mono :: "('a \<Rightarrow> 'b::order) \<Rightarrow> bool"
where "strict_mono \<equiv> strict_mono_on UNIV"
abbreviation antimono :: "('a \<Rightarrow> 'b::order) \<Rightarrow> bool"
where "antimono \<equiv> monotone (\<le>) (\<lambda>x y. y \<le> x)"
lemma mono_def[no_atp]: "mono f \<longleftrightarrow> (\<forall>x y. x \<le> y \<longrightarrow> f x \<le> f y)"
by (simp add: monotone_on_def)
lemma strict_mono_def[no_atp]: "strict_mono f \<longleftrightarrow> (\<forall>x y. x < y \<longrightarrow> f x < f y)"
by (simp add: monotone_on_def)
lemma antimono_def[no_atp]: "antimono f \<longleftrightarrow> (\<forall>x y. x \<le> y \<longrightarrow> f x \<ge> f y)"
by (simp add: monotone_on_def)
text \<open>Lemmas @{thm [source] mono_def}, @{thm [source] strict_mono_def}, and
@{thm [source] antimono_def} are provided for backward compatibility.\<close>
lemma monoI [intro?]: "(\<And>x y. x \<le> y \<Longrightarrow> f x \<le> f y) \<Longrightarrow> mono f"
by (rule monotoneI)
lemma strict_monoI [intro?]: "(\<And>x y. x < y \<Longrightarrow> f x < f y) \<Longrightarrow> strict_mono f"
by (rule monotoneI)
lemma antimonoI [intro?]: "(\<And>x y. x \<le> y \<Longrightarrow> f x \<ge> f y) \<Longrightarrow> antimono f"
by (rule monotoneI)
lemma monoD [dest?]: "mono f \<Longrightarrow> x \<le> y \<Longrightarrow> f x \<le> f y"
by (rule monotoneD)
lemma strict_monoD [dest?]: "strict_mono f \<Longrightarrow> x < y \<Longrightarrow> f x < f y"
by (rule monotoneD)
lemma antimonoD [dest?]: "antimono f \<Longrightarrow> x \<le> y \<Longrightarrow> f x \<ge> f y"
by (rule monotoneD)
lemma monoE:
assumes "mono f"
assumes "x \<le> y"
obtains "f x \<le> f y"
proof
from assms show "f x \<le> f y" by (simp add: mono_def)
qed
lemma antimonoE:
fixes f :: "'a \<Rightarrow> 'b::order"
assumes "antimono f"
assumes "x \<le> y"
obtains "f x \<ge> f y"
proof
from assms show "f x \<ge> f y" by (simp add: antimono_def)
qed
lemma mono_imp_mono_on: "mono f \<Longrightarrow> mono_on A f"
by (rule monotone_on_subset[OF _ subset_UNIV])
lemma strict_mono_mono [dest?]:
assumes "strict_mono f"
shows "mono f"
proof (rule monoI)
fix x y
assume "x \<le> y"
show "f x \<le> f y"
proof (cases "x = y")
case True then show ?thesis by simp
next
case False with \<open>x \<le> y\<close> have "x < y" by simp
with assms strict_monoD have "f x < f y" by auto
then show ?thesis by simp
qed
qed
end
context linorder begin
lemma mono_invE:
fixes f :: "'a \<Rightarrow> 'b::order"
assumes "mono f"
assumes "f x < f y"
obtains "x \<le> y"
proof
show "x \<le> y"
proof (rule ccontr)
assume "\<not> x \<le> y"
then have "y \<le> x" by simp
with \<open>mono f\<close> obtain "f y \<le> f x" by (rule monoE)
with \<open>f x < f y\<close> show False by simp
qed
qed
lemma mono_strict_invE:
fixes f :: "'a \<Rightarrow> 'b::order"
assumes "mono f"
assumes "f x < f y"
obtains "x < y"
proof
show "x < y"
proof (rule ccontr)
assume "\<not> x < y"
then have "y \<le> x" by simp
with \<open>mono f\<close> obtain "f y \<le> f x" by (rule monoE)
with \<open>f x < f y\<close> show False by simp
qed
qed
lemma strict_mono_eq:
assumes "strict_mono f"
shows "f x = f y \<longleftrightarrow> x = y"
proof
assume "f x = f y"
show "x = y" proof (cases x y rule: linorder_cases)
case less with assms strict_monoD have "f x < f y" by auto
with \<open>f x = f y\<close> show ?thesis by simp
next
case equal then show ?thesis .
next
case greater with assms strict_monoD have "f y < f x" by auto
with \<open>f x = f y\<close> show ?thesis by simp
qed
qed simp
lemma strict_mono_less_eq:
assumes "strict_mono f"
shows "f x \<le> f y \<longleftrightarrow> x \<le> y"
proof
assume "x \<le> y"
with assms strict_mono_mono monoD show "f x \<le> f y" by auto
next
assume "f x \<le> f y"
show "x \<le> y" proof (rule ccontr)
assume "\<not> x \<le> y" then have "y < x" by simp
with assms strict_monoD have "f y < f x" by auto
with \<open>f x \<le> f y\<close> show False by simp
qed
qed
lemma strict_mono_less:
assumes "strict_mono f"
shows "f x < f y \<longleftrightarrow> x < y"
using assms
by (auto simp add: less_le Orderings.less_le strict_mono_eq strict_mono_less_eq)
end
lemma strict_mono_inv:
fixes f :: "('a::linorder) \<Rightarrow> ('b::linorder)"
assumes "strict_mono f" and "surj f" and inv: "\<And>x. g (f x) = x"
shows "strict_mono g"
proof
fix x y :: 'b assume "x < y"
from \<open>surj f\<close> obtain x' y' where [simp]: "x = f x'" "y = f y'" by blast
with \<open>x < y\<close> and \<open>strict_mono f\<close> have "x' < y'" by (simp add: strict_mono_less)
with inv show "g x < g y" by simp
qed
lemma strict_mono_on_imp_inj_on:
assumes "strict_mono_on A (f :: (_ :: linorder) \<Rightarrow> (_ :: preorder))"
shows "inj_on f A"
proof (rule inj_onI)
fix x y assume "x \<in> A" "y \<in> A" "f x = f y"
thus "x = y"
by (cases x y rule: linorder_cases)
(auto dest: strict_mono_onD[OF assms, of x y] strict_mono_onD[OF assms, of y x])
qed
lemma strict_mono_on_leD:
assumes "strict_mono_on A (f :: (_ :: linorder) \<Rightarrow> _ :: preorder)" "x \<in> A" "y \<in> A" "x \<le> y"
shows "f x \<le> f y"
proof (cases "x = y")
case True
then show ?thesis by simp
next
case False
with assms have "f x < f y"
using strict_mono_onD[OF assms(1)] by simp
then show ?thesis by (rule less_imp_le)
qed
lemma strict_mono_on_eqD:
fixes f :: "(_ :: linorder) \<Rightarrow> (_ :: preorder)"
assumes "strict_mono_on A f" "f x = f y" "x \<in> A" "y \<in> A"
shows "y = x"
using assms by (cases rule: linorder_cases) (auto dest: strict_mono_onD)
lemma strict_mono_on_imp_mono_on:
"strict_mono_on A (f :: (_ :: linorder) \<Rightarrow> _ :: preorder) \<Longrightarrow> mono_on A f"
by (rule mono_onI, rule strict_mono_on_leD)
lemma mono_imp_strict_mono:
fixes f :: "'a::order \<Rightarrow> 'b::order"
shows "\<lbrakk>mono_on S f; inj_on f S\<rbrakk> \<Longrightarrow> strict_mono_on S f"
by (auto simp add: monotone_on_def order_less_le inj_on_eq_iff)
lemma strict_mono_iff_mono:
fixes f :: "'a::linorder \<Rightarrow> 'b::order"
shows "strict_mono_on S f \<longleftrightarrow> mono_on S f \<and> inj_on f S"
proof
show "strict_mono_on S f \<Longrightarrow> mono_on S f \<and> inj_on f S"
by (simp add: strict_mono_on_imp_inj_on strict_mono_on_imp_mono_on)
qed (auto intro: mono_imp_strict_mono)
lemma antimono_imp_strict_antimono:
fixes f :: "'a::order \<Rightarrow> 'b::order"
shows "\<lbrakk>antimono_on S f; inj_on f S\<rbrakk> \<Longrightarrow> strict_antimono_on S f"
by (auto simp add: monotone_on_def order_less_le inj_on_eq_iff)
lemma strict_antimono_iff_antimono:
fixes f :: "'a::linorder \<Rightarrow> 'b::order"
shows "strict_antimono_on S f \<longleftrightarrow> antimono_on S f \<and> inj_on f S"
proof
show "strict_antimono_on S f \<Longrightarrow> antimono_on S f \<and> inj_on f S"
by (force simp add: monotone_on_def intro: linorder_inj_onI)
qed (auto intro: antimono_imp_strict_antimono)
lemma mono_compose: "mono Q \<Longrightarrow> mono (\<lambda>i x. Q i (f x))"
unfolding mono_def le_fun_def by auto
lemma mono_add:
fixes a :: "'a::ordered_ab_semigroup_add"
shows "mono ((+) a)"
by (simp add: add_left_mono monoI)
lemma (in semilattice_inf) mono_inf: "mono f \<Longrightarrow> f (A \<sqinter> B) \<le> f A \<sqinter> f B"
for f :: "'a \<Rightarrow> 'b::semilattice_inf"
by (auto simp add: mono_def intro: Lattices.inf_greatest)
lemma (in semilattice_sup) mono_sup: "mono f \<Longrightarrow> f A \<squnion> f B \<le> f (A \<squnion> B)"
for f :: "'a \<Rightarrow> 'b::semilattice_sup"
by (auto simp add: mono_def intro: Lattices.sup_least)
lemma (in linorder) min_of_mono: "mono f \<Longrightarrow> min (f m) (f n) = f (min m n)"
by (auto simp: mono_def Orderings.min_def min_def intro: Orderings.antisym)
lemma (in linorder) max_of_mono: "mono f \<Longrightarrow> max (f m) (f n) = f (max m n)"
by (auto simp: mono_def Orderings.max_def max_def intro: Orderings.antisym)
lemma (in linorder)
max_of_antimono: "antimono f \<Longrightarrow> max (f x) (f y) = f (min x y)" and
min_of_antimono: "antimono f \<Longrightarrow> min (f x) (f y) = f (max x y)"
by (auto simp: antimono_def Orderings.max_def max_def Orderings.min_def min_def intro!: antisym)
lemma (in linorder) strict_mono_imp_inj_on: "strict_mono f \<Longrightarrow> inj_on f A"
by (auto intro!: inj_onI dest: strict_mono_eq)
lemma mono_Int: "mono f \<Longrightarrow> f (A \<inter> B) \<subseteq> f A \<inter> f B"
by (fact mono_inf)
lemma mono_Un: "mono f \<Longrightarrow> f A \<union> f B \<subseteq> f (A \<union> B)"
by (fact mono_sup)
subsubsection \<open>Least value operator\<close>
lemma Least_mono: "mono f \<Longrightarrow> \<exists>x\<in>S. \<forall>y\<in>S. x \<le> y \<Longrightarrow> (LEAST y. y \<in> f ` S) = f (LEAST x. x \<in> S)"
for f :: "'a::order \<Rightarrow> 'b::order"
\<comment> \<open>Courtesy of Stephan Merz\<close>
apply clarify
apply (erule_tac P = "\<lambda>x. x \<in> S" in LeastI2_order)
apply fast
apply (rule LeastI2_order)
apply (auto elim: monoD intro!: order_antisym)
done
subsection \<open>Setup\<close>
subsubsection \<open>Proof tools\<close>
text \<open>Simplify terms of the form \<open>f(\<dots>,x:=y,\<dots>,x:=z,\<dots>)\<close> to \<open>f(\<dots>,x:=z,\<dots>)\<close>\<close>
-simproc_setup fun_upd2 ("f(v := w, x := y)") = \<open>fn _ =>
+simproc_setup fun_upd2 ("f(v := w, x := y)") = \<open>
let
fun gen_fun_upd NONE T _ _ = NONE
| gen_fun_upd (SOME f) T x y = SOME (Const (\<^const_name>\<open>fun_upd\<close>, T) $ f $ x $ y)
fun dest_fun_T1 (Type (_, T :: Ts)) = T
fun find_double (t as Const (\<^const_name>\<open>fun_upd\<close>,T) $ f $ x $ y) =
let
fun find (Const (\<^const_name>\<open>fun_upd\<close>,T) $ g $ v $ w) =
if v aconv x then SOME g else gen_fun_upd (find g) T v w
| find t = NONE
in (dest_fun_T1 T, gen_fun_upd (find f) T x y) end
val ss = simpset_of \<^context>
fun proc ctxt ct =
let
val t = Thm.term_of ct
in
(case find_double t of
(T, NONE) => NONE
| (T, SOME rhs) =>
SOME (Goal.prove ctxt [] [] (Logic.mk_equals (t, rhs))
(fn _ =>
resolve_tac ctxt [eq_reflection] 1 THEN
resolve_tac ctxt @{thms ext} 1 THEN
simp_tac (put_simpset ss ctxt) 1)))
end
- in proc end
+ in K proc end
\<close>
subsubsection \<open>Functorial structure of types\<close>
ML_file \<open>Tools/functor.ML\<close>
functor map_fun: map_fun
by (simp_all add: fun_eq_iff)
functor vimage
by (simp_all add: fun_eq_iff vimage_comp)
text \<open>Legacy theorem names\<close>
lemmas o_def = comp_def
lemmas o_apply = comp_apply
lemmas o_assoc = comp_assoc [symmetric]
lemmas id_o = id_comp
lemmas o_id = comp_id
lemmas o_eq_dest = comp_eq_dest
lemmas o_eq_elim = comp_eq_elim
lemmas o_eq_dest_lhs = comp_eq_dest_lhs
lemmas o_eq_id_dest = comp_eq_id_dest
end
diff --git a/src/HOL/Groups.thy b/src/HOL/Groups.thy
--- a/src/HOL/Groups.thy
+++ b/src/HOL/Groups.thy
@@ -1,1495 +1,1495 @@
(* Title: HOL/Groups.thy
Author: Gertrud Bauer
Author: Steven Obua
Author: Lawrence C Paulson
Author: Markus Wenzel
Author: Jeremy Avigad
*)
section \<open>Groups, also combined with orderings\<close>
theory Groups
imports Orderings
begin
subsection \<open>Dynamic facts\<close>
named_theorems ac_simps "associativity and commutativity simplification rules"
and algebra_simps "algebra simplification rules for rings"
and algebra_split_simps "algebra simplification rules for rings, with potential goal splitting"
and field_simps "algebra simplification rules for fields"
and field_split_simps "algebra simplification rules for fields, with potential goal splitting"
text \<open>
The rewrites accumulated in \<open>algebra_simps\<close> deal with the classical
algebraic structures of groups, rings and family. They simplify terms by
multiplying everything out (in case of a ring) and bringing sums and
products into a canonical form (by ordered rewriting). As a result it
decides group and ring equalities but also helps with inequalities.
Of course it also works for fields, but it knows nothing about
multiplicative inverses or division. This is catered for by \<open>field_simps\<close>.
Facts in \<open>field_simps\<close> multiply with denominators in (in)equations if they
can be proved to be non-zero (for equations) or positive/negative (for
inequalities). Can be too aggressive and is therefore separate from the more
benign \<open>algebra_simps\<close>.
Collections \<open>algebra_split_simps\<close> and \<open>field_split_simps\<close>
correspond to \<open>algebra_simps\<close> and \<open>field_simps\<close>
but contain more aggresive rules that may lead to goal splitting.
\<close>
subsection \<open>Abstract structures\<close>
text \<open>
These locales provide basic structures for interpretation into bigger
structures; extensions require careful thinking, otherwise undesired effects
may occur due to interpretation.
\<close>
locale semigroup =
fixes f :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixl "\<^bold>*" 70)
assumes assoc [ac_simps]: "a \<^bold>* b \<^bold>* c = a \<^bold>* (b \<^bold>* c)"
locale abel_semigroup = semigroup +
assumes commute [ac_simps]: "a \<^bold>* b = b \<^bold>* a"
begin
lemma left_commute [ac_simps]: "b \<^bold>* (a \<^bold>* c) = a \<^bold>* (b \<^bold>* c)"
proof -
have "(b \<^bold>* a) \<^bold>* c = (a \<^bold>* b) \<^bold>* c"
by (simp only: commute)
then show ?thesis
by (simp only: assoc)
qed
end
locale monoid = semigroup +
fixes z :: 'a ("\<^bold>1")
assumes left_neutral [simp]: "\<^bold>1 \<^bold>* a = a"
assumes right_neutral [simp]: "a \<^bold>* \<^bold>1 = a"
locale comm_monoid = abel_semigroup +
fixes z :: 'a ("\<^bold>1")
assumes comm_neutral: "a \<^bold>* \<^bold>1 = a"
begin
sublocale monoid
by standard (simp_all add: commute comm_neutral)
end
locale group = semigroup +
fixes z :: 'a ("\<^bold>1")
fixes inverse :: "'a \<Rightarrow> 'a"
assumes group_left_neutral: "\<^bold>1 \<^bold>* a = a"
assumes left_inverse [simp]: "inverse a \<^bold>* a = \<^bold>1"
begin
lemma left_cancel: "a \<^bold>* b = a \<^bold>* c \<longleftrightarrow> b = c"
proof
assume "a \<^bold>* b = a \<^bold>* c"
then have "inverse a \<^bold>* (a \<^bold>* b) = inverse a \<^bold>* (a \<^bold>* c)" by simp
then have "(inverse a \<^bold>* a) \<^bold>* b = (inverse a \<^bold>* a) \<^bold>* c"
by (simp only: assoc)
then show "b = c" by (simp add: group_left_neutral)
qed simp
sublocale monoid
proof
fix a
have "inverse a \<^bold>* a = \<^bold>1" by simp
then have "inverse a \<^bold>* (a \<^bold>* \<^bold>1) = inverse a \<^bold>* a"
by (simp add: group_left_neutral assoc [symmetric])
with left_cancel show "a \<^bold>* \<^bold>1 = a"
by (simp only: left_cancel)
qed (fact group_left_neutral)
lemma inverse_unique:
assumes "a \<^bold>* b = \<^bold>1"
shows "inverse a = b"
proof -
from assms have "inverse a \<^bold>* (a \<^bold>* b) = inverse a"
by simp
then show ?thesis
by (simp add: assoc [symmetric])
qed
lemma inverse_neutral [simp]: "inverse \<^bold>1 = \<^bold>1"
by (rule inverse_unique) simp
lemma inverse_inverse [simp]: "inverse (inverse a) = a"
by (rule inverse_unique) simp
lemma right_inverse [simp]: "a \<^bold>* inverse a = \<^bold>1"
proof -
have "a \<^bold>* inverse a = inverse (inverse a) \<^bold>* inverse a"
by simp
also have "\<dots> = \<^bold>1"
by (rule left_inverse)
then show ?thesis by simp
qed
lemma inverse_distrib_swap: "inverse (a \<^bold>* b) = inverse b \<^bold>* inverse a"
proof (rule inverse_unique)
have "a \<^bold>* b \<^bold>* (inverse b \<^bold>* inverse a) =
a \<^bold>* (b \<^bold>* inverse b) \<^bold>* inverse a"
by (simp only: assoc)
also have "\<dots> = \<^bold>1"
by simp
finally show "a \<^bold>* b \<^bold>* (inverse b \<^bold>* inverse a) = \<^bold>1" .
qed
lemma right_cancel: "b \<^bold>* a = c \<^bold>* a \<longleftrightarrow> b = c"
proof
assume "b \<^bold>* a = c \<^bold>* a"
then have "b \<^bold>* a \<^bold>* inverse a= c \<^bold>* a \<^bold>* inverse a"
by simp
then show "b = c"
by (simp add: assoc)
qed simp
end
subsection \<open>Generic operations\<close>
class zero =
fixes zero :: 'a ("0")
class one =
fixes one :: 'a ("1")
hide_const (open) zero one
lemma Let_0 [simp]: "Let 0 f = f 0"
unfolding Let_def ..
lemma Let_1 [simp]: "Let 1 f = f 1"
unfolding Let_def ..
setup \<open>
Reorient_Proc.add
(fn Const(\<^const_name>\<open>Groups.zero\<close>, _) => true
| Const(\<^const_name>\<open>Groups.one\<close>, _) => true
| _ => false)
\<close>
-simproc_setup reorient_zero ("0 = x") = Reorient_Proc.proc
-simproc_setup reorient_one ("1 = x") = Reorient_Proc.proc
+simproc_setup reorient_zero ("0 = x") = \<open>K Reorient_Proc.proc\<close>
+simproc_setup reorient_one ("1 = x") = \<open>K Reorient_Proc.proc\<close>
typed_print_translation \<open>
let
fun tr' c = (c, fn ctxt => fn T => fn ts =>
if null ts andalso Printer.type_emphasis ctxt T then
Syntax.const \<^syntax_const>\<open>_constrain\<close> $ Syntax.const c $
Syntax_Phases.term_of_typ ctxt T
else raise Match);
in map tr' [\<^const_syntax>\<open>Groups.one\<close>, \<^const_syntax>\<open>Groups.zero\<close>] end
\<close> \<comment> \<open>show types that are presumably too general\<close>
class plus =
fixes plus :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixl "+" 65)
class minus =
fixes minus :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixl "-" 65)
class uminus =
fixes uminus :: "'a \<Rightarrow> 'a" ("- _" [81] 80)
class times =
fixes times :: "'a \<Rightarrow> 'a \<Rightarrow> 'a" (infixl "*" 70)
subsection \<open>Semigroups and Monoids\<close>
class semigroup_add = plus +
assumes add_assoc [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"(a + b) + c = a + (b + c)"
begin
sublocale add: semigroup plus
by standard (fact add_assoc)
end
hide_fact add_assoc
class ab_semigroup_add = semigroup_add +
assumes add_commute [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a + b = b + a"
begin
sublocale add: abel_semigroup plus
by standard (fact add_commute)
declare add.left_commute [algebra_simps, algebra_split_simps, field_simps, field_split_simps]
lemmas add_ac = add.assoc add.commute add.left_commute
end
hide_fact add_commute
lemmas add_ac = add.assoc add.commute add.left_commute
class semigroup_mult = times +
assumes mult_assoc [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"(a * b) * c = a * (b * c)"
begin
sublocale mult: semigroup times
by standard (fact mult_assoc)
end
hide_fact mult_assoc
class ab_semigroup_mult = semigroup_mult +
assumes mult_commute [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a * b = b * a"
begin
sublocale mult: abel_semigroup times
by standard (fact mult_commute)
declare mult.left_commute [algebra_simps, algebra_split_simps, field_simps, field_split_simps]
lemmas mult_ac = mult.assoc mult.commute mult.left_commute
end
hide_fact mult_commute
lemmas mult_ac = mult.assoc mult.commute mult.left_commute
class monoid_add = zero + semigroup_add +
assumes add_0_left: "0 + a = a"
and add_0_right: "a + 0 = a"
begin
sublocale add: monoid plus 0
by standard (fact add_0_left add_0_right)+
end
lemma zero_reorient: "0 = x \<longleftrightarrow> x = 0"
by (fact eq_commute)
class comm_monoid_add = zero + ab_semigroup_add +
assumes add_0: "0 + a = a"
begin
subclass monoid_add
by standard (simp_all add: add_0 add.commute [of _ 0])
sublocale add: comm_monoid plus 0
by standard (simp add: ac_simps)
end
class monoid_mult = one + semigroup_mult +
assumes mult_1_left: "1 * a = a"
and mult_1_right: "a * 1 = a"
begin
sublocale mult: monoid times 1
by standard (fact mult_1_left mult_1_right)+
end
lemma one_reorient: "1 = x \<longleftrightarrow> x = 1"
by (fact eq_commute)
class comm_monoid_mult = one + ab_semigroup_mult +
assumes mult_1: "1 * a = a"
begin
subclass monoid_mult
by standard (simp_all add: mult_1 mult.commute [of _ 1])
sublocale mult: comm_monoid times 1
by standard (simp add: ac_simps)
end
class cancel_semigroup_add = semigroup_add +
assumes add_left_imp_eq: "a + b = a + c \<Longrightarrow> b = c"
assumes add_right_imp_eq: "b + a = c + a \<Longrightarrow> b = c"
begin
lemma add_left_cancel [simp]: "a + b = a + c \<longleftrightarrow> b = c"
by (blast dest: add_left_imp_eq)
lemma add_right_cancel [simp]: "b + a = c + a \<longleftrightarrow> b = c"
by (blast dest: add_right_imp_eq)
end
class cancel_ab_semigroup_add = ab_semigroup_add + minus +
assumes add_diff_cancel_left' [simp]: "(a + b) - a = b"
assumes diff_diff_add [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a - b - c = a - (b + c)"
begin
lemma add_diff_cancel_right' [simp]: "(a + b) - b = a"
using add_diff_cancel_left' [of b a] by (simp add: ac_simps)
subclass cancel_semigroup_add
proof
fix a b c :: 'a
assume "a + b = a + c"
then have "a + b - a = a + c - a"
by simp
then show "b = c"
by simp
next
fix a b c :: 'a
assume "b + a = c + a"
then have "b + a - a = c + a - a"
by simp
then show "b = c"
by simp
qed
lemma add_diff_cancel_left [simp]: "(c + a) - (c + b) = a - b"
unfolding diff_diff_add [symmetric] by simp
lemma add_diff_cancel_right [simp]: "(a + c) - (b + c) = a - b"
using add_diff_cancel_left [symmetric] by (simp add: ac_simps)
lemma diff_right_commute: "a - c - b = a - b - c"
by (simp add: diff_diff_add add.commute)
end
class cancel_comm_monoid_add = cancel_ab_semigroup_add + comm_monoid_add
begin
lemma diff_zero [simp]: "a - 0 = a"
using add_diff_cancel_right' [of a 0] by simp
lemma diff_cancel [simp]: "a - a = 0"
proof -
have "(a + 0) - (a + 0) = 0"
by (simp only: add_diff_cancel_left diff_zero)
then show ?thesis by simp
qed
lemma add_implies_diff:
assumes "c + b = a"
shows "c = a - b"
proof -
from assms have "(b + c) - (b + 0) = a - b"
by (simp add: add.commute)
then show "c = a - b" by simp
qed
lemma add_cancel_right_right [simp]: "a = a + b \<longleftrightarrow> b = 0"
(is "?P \<longleftrightarrow> ?Q")
proof
assume ?Q
then show ?P by simp
next
assume ?P
then have "a - a = a + b - a" by simp
then show ?Q by simp
qed
lemma add_cancel_right_left [simp]: "a = b + a \<longleftrightarrow> b = 0"
using add_cancel_right_right [of a b] by (simp add: ac_simps)
lemma add_cancel_left_right [simp]: "a + b = a \<longleftrightarrow> b = 0"
by (auto dest: sym)
lemma add_cancel_left_left [simp]: "b + a = a \<longleftrightarrow> b = 0"
by (auto dest: sym)
end
class comm_monoid_diff = cancel_comm_monoid_add +
assumes zero_diff [simp]: "0 - a = 0"
begin
lemma diff_add_zero [simp]: "a - (a + b) = 0"
proof -
have "a - (a + b) = (a + 0) - (a + b)"
by simp
also have "\<dots> = 0"
by (simp only: add_diff_cancel_left zero_diff)
finally show ?thesis .
qed
end
subsection \<open>Groups\<close>
class group_add = minus + uminus + monoid_add +
assumes left_minus: "- a + a = 0"
assumes add_uminus_conv_diff [simp]: "a + (- b) = a - b"
begin
lemma diff_conv_add_uminus: "a - b = a + (- b)"
by simp
sublocale add: group plus 0 uminus
by standard (simp_all add: left_minus)
lemma minus_unique: "a + b = 0 \<Longrightarrow> - a = b"
by (fact add.inverse_unique)
lemma minus_zero: "- 0 = 0"
by (fact add.inverse_neutral)
lemma minus_minus: "- (- a) = a"
by (fact add.inverse_inverse)
lemma right_minus: "a + - a = 0"
by (fact add.right_inverse)
lemma diff_self [simp]: "a - a = 0"
using right_minus [of a] by simp
subclass cancel_semigroup_add
by standard (simp_all add: add.left_cancel add.right_cancel)
lemma minus_add_cancel [simp]: "- a + (a + b) = b"
by (simp add: add.assoc [symmetric])
lemma add_minus_cancel [simp]: "a + (- a + b) = b"
by (simp add: add.assoc [symmetric])
lemma diff_add_cancel [simp]: "a - b + b = a"
by (simp only: diff_conv_add_uminus add.assoc) simp
lemma add_diff_cancel [simp]: "a + b - b = a"
by (simp only: diff_conv_add_uminus add.assoc) simp
lemma minus_add: "- (a + b) = - b + - a"
by (fact add.inverse_distrib_swap)
lemma right_minus_eq [simp]: "a - b = 0 \<longleftrightarrow> a = b"
proof
assume "a - b = 0"
have "a = (a - b) + b" by (simp add: add.assoc)
also have "\<dots> = b" using \<open>a - b = 0\<close> by simp
finally show "a = b" .
next
assume "a = b"
then show "a - b = 0" by simp
qed
lemma eq_iff_diff_eq_0: "a = b \<longleftrightarrow> a - b = 0"
by (fact right_minus_eq [symmetric])
lemma diff_0 [simp]: "0 - a = - a"
by (simp only: diff_conv_add_uminus add_0_left)
lemma diff_0_right [simp]: "a - 0 = a"
by (simp only: diff_conv_add_uminus minus_zero add_0_right)
lemma diff_minus_eq_add [simp]: "a - - b = a + b"
by (simp only: diff_conv_add_uminus minus_minus)
lemma neg_equal_iff_equal [simp]: "- a = - b \<longleftrightarrow> a = b"
proof
assume "- a = - b"
then have "- (- a) = - (- b)" by simp
then show "a = b" by simp
next
assume "a = b"
then show "- a = - b" by simp
qed
lemma neg_equal_0_iff_equal [simp]: "- a = 0 \<longleftrightarrow> a = 0"
by (subst neg_equal_iff_equal [symmetric]) simp
lemma neg_0_equal_iff_equal [simp]: "0 = - a \<longleftrightarrow> 0 = a"
by (subst neg_equal_iff_equal [symmetric]) simp
text \<open>The next two equations can make the simplifier loop!\<close>
lemma equation_minus_iff: "a = - b \<longleftrightarrow> b = - a"
proof -
have "- (- a) = - b \<longleftrightarrow> - a = b"
by (rule neg_equal_iff_equal)
then show ?thesis
by (simp add: eq_commute)
qed
lemma minus_equation_iff: "- a = b \<longleftrightarrow> - b = a"
proof -
have "- a = - (- b) \<longleftrightarrow> a = -b"
by (rule neg_equal_iff_equal)
then show ?thesis
by (simp add: eq_commute)
qed
lemma eq_neg_iff_add_eq_0: "a = - b \<longleftrightarrow> a + b = 0"
proof
assume "a = - b"
then show "a + b = 0" by simp
next
assume "a + b = 0"
moreover have "a + (b + - b) = (a + b) + - b"
by (simp only: add.assoc)
ultimately show "a = - b"
by simp
qed
lemma add_eq_0_iff2: "a + b = 0 \<longleftrightarrow> a = - b"
by (fact eq_neg_iff_add_eq_0 [symmetric])
lemma neg_eq_iff_add_eq_0: "- a = b \<longleftrightarrow> a + b = 0"
by (auto simp add: add_eq_0_iff2)
lemma add_eq_0_iff: "a + b = 0 \<longleftrightarrow> b = - a"
by (auto simp add: neg_eq_iff_add_eq_0 [symmetric])
lemma minus_diff_eq [simp]: "- (a - b) = b - a"
by (simp only: neg_eq_iff_add_eq_0 diff_conv_add_uminus add.assoc minus_add_cancel) simp
lemma add_diff_eq [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a + (b - c) = (a + b) - c"
by (simp only: diff_conv_add_uminus add.assoc)
lemma diff_add_eq_diff_diff_swap: "a - (b + c) = a - c - b"
by (simp only: diff_conv_add_uminus add.assoc minus_add)
lemma diff_eq_eq [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a - b = c \<longleftrightarrow> a = c + b"
by auto
lemma eq_diff_eq [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a = c - b \<longleftrightarrow> a + b = c"
by auto
lemma diff_diff_eq2 [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a - (b - c) = (a + c) - b"
by (simp only: diff_conv_add_uminus add.assoc) simp
lemma diff_eq_diff_eq: "a - b = c - d \<Longrightarrow> a = b \<longleftrightarrow> c = d"
by (simp only: eq_iff_diff_eq_0 [of a b] eq_iff_diff_eq_0 [of c d])
end
class ab_group_add = minus + uminus + comm_monoid_add +
assumes ab_left_minus: "- a + a = 0"
assumes ab_diff_conv_add_uminus: "a - b = a + (- b)"
begin
subclass group_add
by standard (simp_all add: ab_left_minus ab_diff_conv_add_uminus)
subclass cancel_comm_monoid_add
proof
fix a b c :: 'a
have "b + a - a = b"
by simp
then show "a + b - a = b"
by (simp add: ac_simps)
show "a - b - c = a - (b + c)"
by (simp add: algebra_simps)
qed
lemma uminus_add_conv_diff [simp]: "- a + b = b - a"
by (simp add: add.commute)
lemma minus_add_distrib [simp]: "- (a + b) = - a + - b"
by (simp add: algebra_simps)
lemma diff_add_eq [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"(a - b) + c = (a + c) - b"
by (simp add: algebra_simps)
lemma minus_diff_commute:
"- b - a = - a - b"
by (simp only: diff_conv_add_uminus add.commute)
end
subsection \<open>(Partially) Ordered Groups\<close>
text \<open>
The theory of partially ordered groups is taken from the books:
\<^item> \<^emph>\<open>Lattice Theory\<close> by Garret Birkhoff, American Mathematical Society, 1979
\<^item> \<^emph>\<open>Partially Ordered Algebraic Systems\<close>, Pergamon Press, 1963
Most of the used notions can also be looked up in
\<^item> \<^url>\<open>http://www.mathworld.com\<close> by Eric Weisstein et. al.
\<^item> \<^emph>\<open>Algebra I\<close> by van der Waerden, Springer
\<close>
class ordered_ab_semigroup_add = order + ab_semigroup_add +
assumes add_left_mono: "a \<le> b \<Longrightarrow> c + a \<le> c + b"
begin
lemma add_right_mono: "a \<le> b \<Longrightarrow> a + c \<le> b + c"
by (simp add: add.commute [of _ c] add_left_mono)
text \<open>non-strict, in both arguments\<close>
lemma add_mono: "a \<le> b \<Longrightarrow> c \<le> d \<Longrightarrow> a + c \<le> b + d"
by (simp add: add.commute add_left_mono add_right_mono [THEN order_trans])
end
text \<open>Strict monotonicity in both arguments\<close>
class strict_ordered_ab_semigroup_add = ordered_ab_semigroup_add +
assumes add_strict_mono: "a < b \<Longrightarrow> c < d \<Longrightarrow> a + c < b + d"
class ordered_cancel_ab_semigroup_add =
ordered_ab_semigroup_add + cancel_ab_semigroup_add
begin
lemma add_strict_left_mono: "a < b \<Longrightarrow> c + a < c + b"
by (auto simp add: less_le add_left_mono)
lemma add_strict_right_mono: "a < b \<Longrightarrow> a + c < b + c"
by (simp add: add.commute [of _ c] add_strict_left_mono)
subclass strict_ordered_ab_semigroup_add
proof
show "\<And>a b c d. \<lbrakk>a < b; c < d\<rbrakk> \<Longrightarrow> a + c < b + d"
by (iprover intro: add_strict_left_mono add_strict_right_mono less_trans)
qed
lemma add_less_le_mono: "a < b \<Longrightarrow> c \<le> d \<Longrightarrow> a + c < b + d"
by (iprover intro: add_left_mono add_strict_right_mono less_le_trans)
lemma add_le_less_mono: "a \<le> b \<Longrightarrow> c < d \<Longrightarrow> a + c < b + d"
by (iprover intro: add_strict_left_mono add_right_mono less_le_trans)
end
class ordered_ab_semigroup_add_imp_le = ordered_cancel_ab_semigroup_add +
assumes add_le_imp_le_left: "c + a \<le> c + b \<Longrightarrow> a \<le> b"
begin
lemma add_less_imp_less_left:
assumes less: "c + a < c + b"
shows "a < b"
proof -
from less have le: "c + a \<le> c + b"
by (simp add: order_le_less)
have "a \<le> b"
using add_le_imp_le_left [OF le] .
moreover have "a \<noteq> b"
proof (rule ccontr)
assume "\<not> ?thesis"
then have "a = b" by simp
then have "c + a = c + b" by simp
with less show "False" by simp
qed
ultimately show "a < b"
by (simp add: order_le_less)
qed
lemma add_less_imp_less_right: "a + c < b + c \<Longrightarrow> a < b"
by (rule add_less_imp_less_left [of c]) (simp add: add.commute)
lemma add_less_cancel_left [simp]: "c + a < c + b \<longleftrightarrow> a < b"
by (blast intro: add_less_imp_less_left add_strict_left_mono)
lemma add_less_cancel_right [simp]: "a + c < b + c \<longleftrightarrow> a < b"
by (blast intro: add_less_imp_less_right add_strict_right_mono)
lemma add_le_cancel_left [simp]: "c + a \<le> c + b \<longleftrightarrow> a \<le> b"
by (auto simp: dest: add_le_imp_le_left add_left_mono)
lemma add_le_cancel_right [simp]: "a + c \<le> b + c \<longleftrightarrow> a \<le> b"
by (simp add: add.commute [of a c] add.commute [of b c])
lemma add_le_imp_le_right: "a + c \<le> b + c \<Longrightarrow> a \<le> b"
by simp
lemma max_add_distrib_left: "max x y + z = max (x + z) (y + z)"
unfolding max_def by auto
lemma min_add_distrib_left: "min x y + z = min (x + z) (y + z)"
unfolding min_def by auto
lemma max_add_distrib_right: "x + max y z = max (x + y) (x + z)"
unfolding max_def by auto
lemma min_add_distrib_right: "x + min y z = min (x + y) (x + z)"
unfolding min_def by auto
end
subsection \<open>Support for reasoning about signs\<close>
class ordered_comm_monoid_add = comm_monoid_add + ordered_ab_semigroup_add
begin
lemma add_nonneg_nonneg [simp]: "0 \<le> a \<Longrightarrow> 0 \<le> b \<Longrightarrow> 0 \<le> a + b"
using add_mono[of 0 a 0 b] by simp
lemma add_nonpos_nonpos: "a \<le> 0 \<Longrightarrow> b \<le> 0 \<Longrightarrow> a + b \<le> 0"
using add_mono[of a 0 b 0] by simp
lemma add_nonneg_eq_0_iff: "0 \<le> x \<Longrightarrow> 0 \<le> y \<Longrightarrow> x + y = 0 \<longleftrightarrow> x = 0 \<and> y = 0"
using add_left_mono[of 0 y x] add_right_mono[of 0 x y] by auto
lemma add_nonpos_eq_0_iff: "x \<le> 0 \<Longrightarrow> y \<le> 0 \<Longrightarrow> x + y = 0 \<longleftrightarrow> x = 0 \<and> y = 0"
using add_left_mono[of y 0 x] add_right_mono[of x 0 y] by auto
lemma add_increasing: "0 \<le> a \<Longrightarrow> b \<le> c \<Longrightarrow> b \<le> a + c"
using add_mono [of 0 a b c] by simp
lemma add_increasing2: "0 \<le> c \<Longrightarrow> b \<le> a \<Longrightarrow> b \<le> a + c"
by (simp add: add_increasing add.commute [of a])
lemma add_decreasing: "a \<le> 0 \<Longrightarrow> c \<le> b \<Longrightarrow> a + c \<le> b"
using add_mono [of a 0 c b] by simp
lemma add_decreasing2: "c \<le> 0 \<Longrightarrow> a \<le> b \<Longrightarrow> a + c \<le> b"
using add_mono[of a b c 0] by simp
lemma add_pos_nonneg: "0 < a \<Longrightarrow> 0 \<le> b \<Longrightarrow> 0 < a + b"
using less_le_trans[of 0 a "a + b"] by (simp add: add_increasing2)
lemma add_pos_pos: "0 < a \<Longrightarrow> 0 < b \<Longrightarrow> 0 < a + b"
by (intro add_pos_nonneg less_imp_le)
lemma add_nonneg_pos: "0 \<le> a \<Longrightarrow> 0 < b \<Longrightarrow> 0 < a + b"
using add_pos_nonneg[of b a] by (simp add: add_commute)
lemma add_neg_nonpos: "a < 0 \<Longrightarrow> b \<le> 0 \<Longrightarrow> a + b < 0"
using le_less_trans[of "a + b" a 0] by (simp add: add_decreasing2)
lemma add_neg_neg: "a < 0 \<Longrightarrow> b < 0 \<Longrightarrow> a + b < 0"
by (intro add_neg_nonpos less_imp_le)
lemma add_nonpos_neg: "a \<le> 0 \<Longrightarrow> b < 0 \<Longrightarrow> a + b < 0"
using add_neg_nonpos[of b a] by (simp add: add_commute)
lemmas add_sign_intros =
add_pos_nonneg add_pos_pos add_nonneg_pos add_nonneg_nonneg
add_neg_nonpos add_neg_neg add_nonpos_neg add_nonpos_nonpos
end
class strict_ordered_comm_monoid_add = comm_monoid_add + strict_ordered_ab_semigroup_add
begin
lemma pos_add_strict: "0 < a \<Longrightarrow> b < c \<Longrightarrow> b < a + c"
using add_strict_mono [of 0 a b c] by simp
end
class ordered_cancel_comm_monoid_add = ordered_comm_monoid_add + cancel_ab_semigroup_add
begin
subclass ordered_cancel_ab_semigroup_add ..
subclass strict_ordered_comm_monoid_add ..
lemma add_strict_increasing: "0 < a \<Longrightarrow> b \<le> c \<Longrightarrow> b < a + c"
using add_less_le_mono [of 0 a b c] by simp
lemma add_strict_increasing2: "0 \<le> a \<Longrightarrow> b < c \<Longrightarrow> b < a + c"
using add_le_less_mono [of 0 a b c] by simp
end
class ordered_ab_semigroup_monoid_add_imp_le = monoid_add + ordered_ab_semigroup_add_imp_le
begin
lemma add_less_same_cancel1 [simp]: "b + a < b \<longleftrightarrow> a < 0"
using add_less_cancel_left [of _ _ 0] by simp
lemma add_less_same_cancel2 [simp]: "a + b < b \<longleftrightarrow> a < 0"
using add_less_cancel_right [of _ _ 0] by simp
lemma less_add_same_cancel1 [simp]: "a < a + b \<longleftrightarrow> 0 < b"
using add_less_cancel_left [of _ 0] by simp
lemma less_add_same_cancel2 [simp]: "a < b + a \<longleftrightarrow> 0 < b"
using add_less_cancel_right [of 0] by simp
lemma add_le_same_cancel1 [simp]: "b + a \<le> b \<longleftrightarrow> a \<le> 0"
using add_le_cancel_left [of _ _ 0] by simp
lemma add_le_same_cancel2 [simp]: "a + b \<le> b \<longleftrightarrow> a \<le> 0"
using add_le_cancel_right [of _ _ 0] by simp
lemma le_add_same_cancel1 [simp]: "a \<le> a + b \<longleftrightarrow> 0 \<le> b"
using add_le_cancel_left [of _ 0] by simp
lemma le_add_same_cancel2 [simp]: "a \<le> b + a \<longleftrightarrow> 0 \<le> b"
using add_le_cancel_right [of 0] by simp
subclass cancel_comm_monoid_add
by standard auto
subclass ordered_cancel_comm_monoid_add
by standard
end
class ordered_ab_group_add = ab_group_add + ordered_ab_semigroup_add
begin
subclass ordered_cancel_ab_semigroup_add ..
subclass ordered_ab_semigroup_monoid_add_imp_le
proof
fix a b c :: 'a
assume "c + a \<le> c + b"
then have "(-c) + (c + a) \<le> (-c) + (c + b)"
by (rule add_left_mono)
then have "((-c) + c) + a \<le> ((-c) + c) + b"
by (simp only: add.assoc)
then show "a \<le> b" by simp
qed
lemma max_diff_distrib_left: "max x y - z = max (x - z) (y - z)"
using max_add_distrib_left [of x y "- z"] by simp
lemma min_diff_distrib_left: "min x y - z = min (x - z) (y - z)"
using min_add_distrib_left [of x y "- z"] by simp
lemma le_imp_neg_le:
assumes "a \<le> b"
shows "- b \<le> - a"
proof -
from assms have "- a + a \<le> - a + b"
by (rule add_left_mono)
then have "0 \<le> - a + b"
by simp
then have "0 + (- b) \<le> (- a + b) + (- b)"
by (rule add_right_mono)
then show ?thesis
by (simp add: algebra_simps)
qed
lemma neg_le_iff_le [simp]: "- b \<le> - a \<longleftrightarrow> a \<le> b"
proof
assume "- b \<le> - a"
then have "- (- a) \<le> - (- b)"
by (rule le_imp_neg_le)
then show "a \<le> b"
by simp
next
assume "a \<le> b"
then show "- b \<le> - a"
by (rule le_imp_neg_le)
qed
lemma neg_le_0_iff_le [simp]: "- a \<le> 0 \<longleftrightarrow> 0 \<le> a"
by (subst neg_le_iff_le [symmetric]) simp
lemma neg_0_le_iff_le [simp]: "0 \<le> - a \<longleftrightarrow> a \<le> 0"
by (subst neg_le_iff_le [symmetric]) simp
lemma neg_less_iff_less [simp]: "- b < - a \<longleftrightarrow> a < b"
by (auto simp add: less_le)
lemma neg_less_0_iff_less [simp]: "- a < 0 \<longleftrightarrow> 0 < a"
by (subst neg_less_iff_less [symmetric]) simp
lemma neg_0_less_iff_less [simp]: "0 < - a \<longleftrightarrow> a < 0"
by (subst neg_less_iff_less [symmetric]) simp
text \<open>The next several equations can make the simplifier loop!\<close>
lemma less_minus_iff: "a < - b \<longleftrightarrow> b < - a"
proof -
have "- (- a) < - b \<longleftrightarrow> b < - a"
by (rule neg_less_iff_less)
then show ?thesis by simp
qed
lemma minus_less_iff: "- a < b \<longleftrightarrow> - b < a"
proof -
have "- a < - (- b) \<longleftrightarrow> - b < a"
by (rule neg_less_iff_less)
then show ?thesis by simp
qed
lemma le_minus_iff: "a \<le> - b \<longleftrightarrow> b \<le> - a"
by (auto simp: order.order_iff_strict less_minus_iff)
lemma minus_le_iff: "- a \<le> b \<longleftrightarrow> - b \<le> a"
by (auto simp add: le_less minus_less_iff)
lemma diff_less_0_iff_less [simp]: "a - b < 0 \<longleftrightarrow> a < b"
proof -
have "a - b < 0 \<longleftrightarrow> a + (- b) < b + (- b)"
by simp
also have "\<dots> \<longleftrightarrow> a < b"
by (simp only: add_less_cancel_right)
finally show ?thesis .
qed
lemmas less_iff_diff_less_0 = diff_less_0_iff_less [symmetric]
lemma diff_less_eq [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a - b < c \<longleftrightarrow> a < c + b"
proof (subst less_iff_diff_less_0 [of a])
show "(a - b < c) = (a - (c + b) < 0)"
by (simp add: algebra_simps less_iff_diff_less_0 [of _ c])
qed
lemma less_diff_eq [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a < c - b \<longleftrightarrow> a + b < c"
proof (subst less_iff_diff_less_0 [of "a + b"])
show "(a < c - b) = (a + b - c < 0)"
by (simp add: algebra_simps less_iff_diff_less_0 [of a])
qed
lemma diff_gt_0_iff_gt [simp]: "a - b > 0 \<longleftrightarrow> a > b"
by (simp add: less_diff_eq)
lemma diff_le_eq [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a - b \<le> c \<longleftrightarrow> a \<le> c + b"
by (auto simp add: le_less diff_less_eq )
lemma le_diff_eq [algebra_simps, algebra_split_simps, field_simps, field_split_simps]:
"a \<le> c - b \<longleftrightarrow> a + b \<le> c"
by (auto simp add: le_less less_diff_eq)
lemma diff_le_0_iff_le [simp]: "a - b \<le> 0 \<longleftrightarrow> a \<le> b"
by (simp add: algebra_simps)
lemmas le_iff_diff_le_0 = diff_le_0_iff_le [symmetric]
lemma diff_ge_0_iff_ge [simp]: "a - b \<ge> 0 \<longleftrightarrow> a \<ge> b"
by (simp add: le_diff_eq)
lemma diff_eq_diff_less: "a - b = c - d \<Longrightarrow> a < b \<longleftrightarrow> c < d"
by (auto simp only: less_iff_diff_less_0 [of a b] less_iff_diff_less_0 [of c d])
lemma diff_eq_diff_less_eq: "a - b = c - d \<Longrightarrow> a \<le> b \<longleftrightarrow> c \<le> d"
by (auto simp only: le_iff_diff_le_0 [of a b] le_iff_diff_le_0 [of c d])
lemma diff_mono: "a \<le> b \<Longrightarrow> d \<le> c \<Longrightarrow> a - c \<le> b - d"
by (simp add: field_simps add_mono)
lemma diff_left_mono: "b \<le> a \<Longrightarrow> c - a \<le> c - b"
by (simp add: field_simps)
lemma diff_right_mono: "a \<le> b \<Longrightarrow> a - c \<le> b - c"
by (simp add: field_simps)
lemma diff_strict_mono: "a < b \<Longrightarrow> d < c \<Longrightarrow> a - c < b - d"
by (simp add: field_simps add_strict_mono)
lemma diff_strict_left_mono: "b < a \<Longrightarrow> c - a < c - b"
by (simp add: field_simps)
lemma diff_strict_right_mono: "a < b \<Longrightarrow> a - c < b - c"
by (simp add: field_simps)
end
locale group_cancel
begin
lemma add1: "(A::'a::comm_monoid_add) \<equiv> k + a \<Longrightarrow> A + b \<equiv> k + (a + b)"
by (simp only: ac_simps)
lemma add2: "(B::'a::comm_monoid_add) \<equiv> k + b \<Longrightarrow> a + B \<equiv> k + (a + b)"
by (simp only: ac_simps)
lemma sub1: "(A::'a::ab_group_add) \<equiv> k + a \<Longrightarrow> A - b \<equiv> k + (a - b)"
by (simp only: add_diff_eq)
lemma sub2: "(B::'a::ab_group_add) \<equiv> k + b \<Longrightarrow> a - B \<equiv> - k + (a - b)"
by (simp only: minus_add diff_conv_add_uminus ac_simps)
lemma neg1: "(A::'a::ab_group_add) \<equiv> k + a \<Longrightarrow> - A \<equiv> - k + - a"
by (simp only: minus_add_distrib)
lemma rule0: "(a::'a::comm_monoid_add) \<equiv> a + 0"
by (simp only: add_0_right)
end
ML_file \<open>Tools/group_cancel.ML\<close>
simproc_setup group_cancel_add ("a + b::'a::ab_group_add") =
\<open>fn phi => fn ss => try Group_Cancel.cancel_add_conv\<close>
simproc_setup group_cancel_diff ("a - b::'a::ab_group_add") =
\<open>fn phi => fn ss => try Group_Cancel.cancel_diff_conv\<close>
simproc_setup group_cancel_eq ("a = (b::'a::ab_group_add)") =
\<open>fn phi => fn ss => try Group_Cancel.cancel_eq_conv\<close>
simproc_setup group_cancel_le ("a \<le> (b::'a::ordered_ab_group_add)") =
\<open>fn phi => fn ss => try Group_Cancel.cancel_le_conv\<close>
simproc_setup group_cancel_less ("a < (b::'a::ordered_ab_group_add)") =
\<open>fn phi => fn ss => try Group_Cancel.cancel_less_conv\<close>
class linordered_ab_semigroup_add =
linorder + ordered_ab_semigroup_add
class linordered_cancel_ab_semigroup_add =
linorder + ordered_cancel_ab_semigroup_add
begin
subclass linordered_ab_semigroup_add ..
subclass ordered_ab_semigroup_add_imp_le
proof
fix a b c :: 'a
assume le1: "c + a \<le> c + b"
show "a \<le> b"
proof (rule ccontr)
assume *: "\<not> ?thesis"
then have "b \<le> a" by (simp add: linorder_not_le)
then have "c + b \<le> c + a" by (rule add_left_mono)
then have "c + a = c + b"
using le1 by (iprover intro: order.antisym)
then have "a = b"
by simp
with * show False
by (simp add: linorder_not_le [symmetric])
qed
qed
end
class linordered_ab_group_add = linorder + ordered_ab_group_add
begin
subclass linordered_cancel_ab_semigroup_add ..
lemma equal_neg_zero [simp]: "a = - a \<longleftrightarrow> a = 0"
proof
assume "a = 0"
then show "a = - a" by simp
next
assume A: "a = - a"
show "a = 0"
proof (cases "0 \<le> a")
case True
with A have "0 \<le> - a" by auto
with le_minus_iff have "a \<le> 0" by simp
with True show ?thesis by (auto intro: order_trans)
next
case False
then have B: "a \<le> 0" by auto
with A have "- a \<le> 0" by auto
with B show ?thesis by (auto intro: order_trans)
qed
qed
lemma neg_equal_zero [simp]: "- a = a \<longleftrightarrow> a = 0"
by (auto dest: sym)
lemma neg_less_eq_nonneg [simp]: "- a \<le> a \<longleftrightarrow> 0 \<le> a"
proof
assume *: "- a \<le> a"
show "0 \<le> a"
proof (rule classical)
assume "\<not> ?thesis"
then have "a < 0" by auto
with * have "- a < 0" by (rule le_less_trans)
then show ?thesis by auto
qed
next
assume *: "0 \<le> a"
then have "- a \<le> 0" by (simp add: minus_le_iff)
from this * show "- a \<le> a" by (rule order_trans)
qed
lemma neg_less_pos [simp]: "- a < a \<longleftrightarrow> 0 < a"
by (auto simp add: less_le)
lemma less_eq_neg_nonpos [simp]: "a \<le> - a \<longleftrightarrow> a \<le> 0"
using neg_less_eq_nonneg [of "- a"] by simp
lemma less_neg_neg [simp]: "a < - a \<longleftrightarrow> a < 0"
using neg_less_pos [of "- a"] by simp
lemma double_zero [simp]: "a + a = 0 \<longleftrightarrow> a = 0"
proof
assume "a + a = 0"
then have a: "- a = a" by (rule minus_unique)
then show "a = 0" by (simp only: neg_equal_zero)
next
assume "a = 0"
then show "a + a = 0" by simp
qed
lemma double_zero_sym [simp]: "0 = a + a \<longleftrightarrow> a = 0"
using double_zero [of a] by (simp only: eq_commute)
lemma zero_less_double_add_iff_zero_less_single_add [simp]: "0 < a + a \<longleftrightarrow> 0 < a"
proof
assume "0 < a + a"
then have "0 - a < a" by (simp only: diff_less_eq)
then have "- a < a" by simp
then show "0 < a" by simp
next
assume "0 < a"
with this have "0 + 0 < a + a"
by (rule add_strict_mono)
then show "0 < a + a" by simp
qed
lemma zero_le_double_add_iff_zero_le_single_add [simp]: "0 \<le> a + a \<longleftrightarrow> 0 \<le> a"
by (auto simp add: le_less)
lemma double_add_less_zero_iff_single_add_less_zero [simp]: "a + a < 0 \<longleftrightarrow> a < 0"
proof -
have "\<not> a + a < 0 \<longleftrightarrow> \<not> a < 0"
by (simp add: not_less)
then show ?thesis by simp
qed
lemma double_add_le_zero_iff_single_add_le_zero [simp]: "a + a \<le> 0 \<longleftrightarrow> a \<le> 0"
proof -
have "\<not> a + a \<le> 0 \<longleftrightarrow> \<not> a \<le> 0"
by (simp add: not_le)
then show ?thesis by simp
qed
lemma minus_max_eq_min: "- max x y = min (- x) (- y)"
by (auto simp add: max_def min_def)
lemma minus_min_eq_max: "- min x y = max (- x) (- y)"
by (auto simp add: max_def min_def)
end
class abs =
fixes abs :: "'a \<Rightarrow> 'a" ("\<bar>_\<bar>")
class sgn =
fixes sgn :: "'a \<Rightarrow> 'a"
class ordered_ab_group_add_abs = ordered_ab_group_add + abs +
assumes abs_ge_zero [simp]: "\<bar>a\<bar> \<ge> 0"
and abs_ge_self: "a \<le> \<bar>a\<bar>"
and abs_leI: "a \<le> b \<Longrightarrow> - a \<le> b \<Longrightarrow> \<bar>a\<bar> \<le> b"
and abs_minus_cancel [simp]: "\<bar>-a\<bar> = \<bar>a\<bar>"
and abs_triangle_ineq: "\<bar>a + b\<bar> \<le> \<bar>a\<bar> + \<bar>b\<bar>"
begin
lemma abs_minus_le_zero: "- \<bar>a\<bar> \<le> 0"
unfolding neg_le_0_iff_le by simp
lemma abs_of_nonneg [simp]:
assumes nonneg: "0 \<le> a"
shows "\<bar>a\<bar> = a"
proof (rule order.antisym)
show "a \<le> \<bar>a\<bar>" by (rule abs_ge_self)
from nonneg le_imp_neg_le have "- a \<le> 0" by simp
from this nonneg have "- a \<le> a" by (rule order_trans)
then show "\<bar>a\<bar> \<le> a" by (auto intro: abs_leI)
qed
lemma abs_idempotent [simp]: "\<bar>\<bar>a\<bar>\<bar> = \<bar>a\<bar>"
by (rule order.antisym) (auto intro!: abs_ge_self abs_leI order_trans [of "- \<bar>a\<bar>" 0 "\<bar>a\<bar>"])
lemma abs_eq_0 [simp]: "\<bar>a\<bar> = 0 \<longleftrightarrow> a = 0"
proof -
have "\<bar>a\<bar> = 0 \<Longrightarrow> a = 0"
proof (rule order.antisym)
assume zero: "\<bar>a\<bar> = 0"
with abs_ge_self show "a \<le> 0" by auto
from zero have "\<bar>-a\<bar> = 0" by simp
with abs_ge_self [of "- a"] have "- a \<le> 0" by auto
with neg_le_0_iff_le show "0 \<le> a" by auto
qed
then show ?thesis by auto
qed
lemma abs_zero [simp]: "\<bar>0\<bar> = 0"
by simp
lemma abs_0_eq [simp]: "0 = \<bar>a\<bar> \<longleftrightarrow> a = 0"
proof -
have "0 = \<bar>a\<bar> \<longleftrightarrow> \<bar>a\<bar> = 0" by (simp only: eq_ac)
then show ?thesis by simp
qed
lemma abs_le_zero_iff [simp]: "\<bar>a\<bar> \<le> 0 \<longleftrightarrow> a = 0"
proof
assume "\<bar>a\<bar> \<le> 0"
then have "\<bar>a\<bar> = 0" by (rule order.antisym) simp
then show "a = 0" by simp
next
assume "a = 0"
then show "\<bar>a\<bar> \<le> 0" by simp
qed
lemma abs_le_self_iff [simp]: "\<bar>a\<bar> \<le> a \<longleftrightarrow> 0 \<le> a"
proof -
have "0 \<le> \<bar>a\<bar>"
using abs_ge_zero by blast
then have "\<bar>a\<bar> \<le> a \<Longrightarrow> 0 \<le> a"
using order.trans by blast
then show ?thesis
using abs_of_nonneg eq_refl by blast
qed
lemma zero_less_abs_iff [simp]: "0 < \<bar>a\<bar> \<longleftrightarrow> a \<noteq> 0"
by (simp add: less_le)
lemma abs_not_less_zero [simp]: "\<not> \<bar>a\<bar> < 0"
proof -
have "x \<le> y \<Longrightarrow> \<not> y < x" for x y by auto
then show ?thesis by simp
qed
lemma abs_ge_minus_self: "- a \<le> \<bar>a\<bar>"
proof -
have "- a \<le> \<bar>-a\<bar>" by (rule abs_ge_self)
then show ?thesis by simp
qed
lemma abs_minus_commute: "\<bar>a - b\<bar> = \<bar>b - a\<bar>"
proof -
have "\<bar>a - b\<bar> = \<bar>- (a - b)\<bar>"
by (simp only: abs_minus_cancel)
also have "\<dots> = \<bar>b - a\<bar>" by simp
finally show ?thesis .
qed
lemma abs_of_pos: "0 < a \<Longrightarrow> \<bar>a\<bar> = a"
by (rule abs_of_nonneg) (rule less_imp_le)
lemma abs_of_nonpos [simp]:
assumes "a \<le> 0"
shows "\<bar>a\<bar> = - a"
proof -
let ?b = "- a"
have "- ?b \<le> 0 \<Longrightarrow> \<bar>- ?b\<bar> = - (- ?b)"
unfolding abs_minus_cancel [of ?b]
unfolding neg_le_0_iff_le [of ?b]
unfolding minus_minus by (erule abs_of_nonneg)
then show ?thesis using assms by auto
qed
lemma abs_of_neg: "a < 0 \<Longrightarrow> \<bar>a\<bar> = - a"
by (rule abs_of_nonpos) (rule less_imp_le)
lemma abs_le_D1: "\<bar>a\<bar> \<le> b \<Longrightarrow> a \<le> b"
using abs_ge_self by (blast intro: order_trans)
lemma abs_le_D2: "\<bar>a\<bar> \<le> b \<Longrightarrow> - a \<le> b"
using abs_le_D1 [of "- a"] by simp
lemma abs_le_iff: "\<bar>a\<bar> \<le> b \<longleftrightarrow> a \<le> b \<and> - a \<le> b"
by (blast intro: abs_leI dest: abs_le_D1 abs_le_D2)
lemma abs_triangle_ineq2: "\<bar>a\<bar> - \<bar>b\<bar> \<le> \<bar>a - b\<bar>"
proof -
have "\<bar>a\<bar> = \<bar>b + (a - b)\<bar>"
by (simp add: algebra_simps)
then have "\<bar>a\<bar> \<le> \<bar>b\<bar> + \<bar>a - b\<bar>"
by (simp add: abs_triangle_ineq)
then show ?thesis
by (simp add: algebra_simps)
qed
lemma abs_triangle_ineq2_sym: "\<bar>a\<bar> - \<bar>b\<bar> \<le> \<bar>b - a\<bar>"
by (simp only: abs_minus_commute [of b] abs_triangle_ineq2)
lemma abs_triangle_ineq3: "\<bar>\<bar>a\<bar> - \<bar>b\<bar>\<bar> \<le> \<bar>a - b\<bar>"
by (simp add: abs_le_iff abs_triangle_ineq2 abs_triangle_ineq2_sym)
lemma abs_triangle_ineq4: "\<bar>a - b\<bar> \<le> \<bar>a\<bar> + \<bar>b\<bar>"
proof -
have "\<bar>a - b\<bar> = \<bar>a + - b\<bar>"
by (simp add: algebra_simps)
also have "\<dots> \<le> \<bar>a\<bar> + \<bar>- b\<bar>"
by (rule abs_triangle_ineq)
finally show ?thesis by simp
qed
lemma abs_diff_triangle_ineq: "\<bar>a + b - (c + d)\<bar> \<le> \<bar>a - c\<bar> + \<bar>b - d\<bar>"
proof -
have "\<bar>a + b - (c + d)\<bar> = \<bar>(a - c) + (b - d)\<bar>"
by (simp add: algebra_simps)
also have "\<dots> \<le> \<bar>a - c\<bar> + \<bar>b - d\<bar>"
by (rule abs_triangle_ineq)
finally show ?thesis .
qed
lemma abs_add_abs [simp]: "\<bar>\<bar>a\<bar> + \<bar>b\<bar>\<bar> = \<bar>a\<bar> + \<bar>b\<bar>"
(is "?L = ?R")
proof (rule order.antisym)
show "?L \<ge> ?R" by (rule abs_ge_self)
have "?L \<le> \<bar>\<bar>a\<bar>\<bar> + \<bar>\<bar>b\<bar>\<bar>" by (rule abs_triangle_ineq)
also have "\<dots> = ?R" by simp
finally show "?L \<le> ?R" .
qed
end
lemma dense_eq0_I:
fixes x::"'a::{dense_linorder,ordered_ab_group_add_abs}"
assumes "\<And>e. 0 < e \<Longrightarrow> \<bar>x\<bar> \<le> e"
shows "x = 0"
proof (cases "\<bar>x\<bar> = 0")
case False
then have "\<bar>x\<bar> > 0"
by simp
then obtain z where "0 < z" "z < \<bar>x\<bar>"
using dense by force
then show ?thesis
using assms by (simp flip: not_less)
qed auto
hide_fact (open) ab_diff_conv_add_uminus add_0 mult_1 ab_left_minus
lemmas add_0 = add_0_left (* FIXME duplicate *)
lemmas mult_1 = mult_1_left (* FIXME duplicate *)
lemmas ab_left_minus = left_minus (* FIXME duplicate *)
lemmas diff_diff_eq = diff_diff_add (* FIXME duplicate *)
subsection \<open>Canonically ordered monoids\<close>
text \<open>Canonically ordered monoids are never groups.\<close>
class canonically_ordered_monoid_add = comm_monoid_add + order +
assumes le_iff_add: "a \<le> b \<longleftrightarrow> (\<exists>c. b = a + c)"
begin
lemma zero_le[simp]: "0 \<le> x"
by (auto simp: le_iff_add)
lemma le_zero_eq[simp]: "n \<le> 0 \<longleftrightarrow> n = 0"
by (auto intro: order.antisym)
lemma not_less_zero[simp]: "\<not> n < 0"
by (auto simp: less_le)
lemma zero_less_iff_neq_zero: "0 < n \<longleftrightarrow> n \<noteq> 0"
by (auto simp: less_le)
text \<open>This theorem is useful with \<open>blast\<close>\<close>
lemma gr_zeroI: "(n = 0 \<Longrightarrow> False) \<Longrightarrow> 0 < n"
by (rule zero_less_iff_neq_zero[THEN iffD2]) iprover
lemma not_gr_zero[simp]: "\<not> 0 < n \<longleftrightarrow> n = 0"
by (simp add: zero_less_iff_neq_zero)
subclass ordered_comm_monoid_add
proof qed (auto simp: le_iff_add add_ac)
lemma gr_implies_not_zero: "m < n \<Longrightarrow> n \<noteq> 0"
by auto
lemma add_eq_0_iff_both_eq_0[simp]: "x + y = 0 \<longleftrightarrow> x = 0 \<and> y = 0"
by (intro add_nonneg_eq_0_iff zero_le)
lemma zero_eq_add_iff_both_eq_0[simp]: "0 = x + y \<longleftrightarrow> x = 0 \<and> y = 0"
using add_eq_0_iff_both_eq_0[of x y] unfolding eq_commute[of 0] .
lemma less_eqE:
assumes \<open>a \<le> b\<close>
obtains c where \<open>b = a + c\<close>
using assms by (auto simp add: le_iff_add)
lemma lessE:
assumes \<open>a < b\<close>
obtains c where \<open>b = a + c\<close> and \<open>c \<noteq> 0\<close>
proof -
from assms have \<open>a \<le> b\<close> \<open>a \<noteq> b\<close>
by simp_all
from \<open>a \<le> b\<close> obtain c where \<open>b = a + c\<close>
by (rule less_eqE)
moreover have \<open>c \<noteq> 0\<close> using \<open>a \<noteq> b\<close> \<open>b = a + c\<close>
by auto
ultimately show ?thesis
by (rule that)
qed
lemmas zero_order = zero_le le_zero_eq not_less_zero zero_less_iff_neq_zero not_gr_zero
\<comment> \<open>This should be attributed with \<open>[iff]\<close>, but then \<open>blast\<close> fails in \<open>Set\<close>.\<close>
end
class ordered_cancel_comm_monoid_diff =
canonically_ordered_monoid_add + comm_monoid_diff + ordered_ab_semigroup_add_imp_le
begin
context
fixes a b :: 'a
assumes le: "a \<le> b"
begin
lemma add_diff_inverse: "a + (b - a) = b"
using le by (auto simp add: le_iff_add)
lemma add_diff_assoc: "c + (b - a) = c + b - a"
using le by (auto simp add: le_iff_add add.left_commute [of c])
lemma add_diff_assoc2: "b - a + c = b + c - a"
using le by (auto simp add: le_iff_add add.assoc)
lemma diff_add_assoc: "c + b - a = c + (b - a)"
using le by (simp add: add.commute add_diff_assoc)
lemma diff_add_assoc2: "b + c - a = b - a + c"
using le by (simp add: add.commute add_diff_assoc)
lemma diff_diff_right: "c - (b - a) = c + a - b"
by (simp add: add_diff_inverse add_diff_cancel_left [of a c "b - a", symmetric] add.commute)
lemma diff_add: "b - a + a = b"
by (simp add: add.commute add_diff_inverse)
lemma le_add_diff: "c \<le> b + c - a"
by (auto simp add: add.commute diff_add_assoc2 le_iff_add)
lemma le_imp_diff_is_add: "a \<le> b \<Longrightarrow> b - a = c \<longleftrightarrow> b = c + a"
by (auto simp add: add.commute add_diff_inverse)
lemma le_diff_conv2: "c \<le> b - a \<longleftrightarrow> c + a \<le> b"
(is "?P \<longleftrightarrow> ?Q")
proof
assume ?P
then have "c + a \<le> b - a + a"
by (rule add_right_mono)
then show ?Q
by (simp add: add_diff_inverse add.commute)
next
assume ?Q
then have "a + c \<le> a + (b - a)"
by (simp add: add_diff_inverse add.commute)
then show ?P by simp
qed
end
end
subsection \<open>Tools setup\<close>
lemma add_mono_thms_linordered_semiring:
fixes i j k :: "'a::ordered_ab_semigroup_add"
shows "i \<le> j \<and> k \<le> l \<Longrightarrow> i + k \<le> j + l"
and "i = j \<and> k \<le> l \<Longrightarrow> i + k \<le> j + l"
and "i \<le> j \<and> k = l \<Longrightarrow> i + k \<le> j + l"
and "i = j \<and> k = l \<Longrightarrow> i + k = j + l"
by (rule add_mono, clarify+)+
lemma add_mono_thms_linordered_field:
fixes i j k :: "'a::ordered_cancel_ab_semigroup_add"
shows "i < j \<and> k = l \<Longrightarrow> i + k < j + l"
and "i = j \<and> k < l \<Longrightarrow> i + k < j + l"
and "i < j \<and> k \<le> l \<Longrightarrow> i + k < j + l"
and "i \<le> j \<and> k < l \<Longrightarrow> i + k < j + l"
and "i < j \<and> k < l \<Longrightarrow> i + k < j + l"
by (auto intro: add_strict_right_mono add_strict_left_mono
add_less_le_mono add_le_less_mono add_strict_mono)
code_identifier
code_module Groups \<rightharpoonup> (SML) Arith and (OCaml) Arith and (Haskell) Arith
end
diff --git a/src/HOL/HOL.thy b/src/HOL/HOL.thy
--- a/src/HOL/HOL.thy
+++ b/src/HOL/HOL.thy
@@ -1,2177 +1,2177 @@
(* Title: HOL/HOL.thy
Author: Tobias Nipkow, Markus Wenzel, and Larry Paulson
*)
section \<open>The basis of Higher-Order Logic\<close>
theory HOL
imports Pure Tools.Code_Generator
keywords
"try" "solve_direct" "quickcheck" "print_coercions" "print_claset"
"print_induct_rules" :: diag and
"quickcheck_params" :: thy_decl
abbrevs "?<" = "\<exists>\<^sub>\<le>\<^sub>1"
begin
ML_file \<open>~~/src/Tools/misc_legacy.ML\<close>
ML_file \<open>~~/src/Tools/try.ML\<close>
ML_file \<open>~~/src/Tools/quickcheck.ML\<close>
ML_file \<open>~~/src/Tools/solve_direct.ML\<close>
ML_file \<open>~~/src/Tools/IsaPlanner/zipper.ML\<close>
ML_file \<open>~~/src/Tools/IsaPlanner/isand.ML\<close>
ML_file \<open>~~/src/Tools/IsaPlanner/rw_inst.ML\<close>
ML_file \<open>~~/src/Provers/hypsubst.ML\<close>
ML_file \<open>~~/src/Provers/splitter.ML\<close>
ML_file \<open>~~/src/Provers/classical.ML\<close>
ML_file \<open>~~/src/Provers/blast.ML\<close>
ML_file \<open>~~/src/Provers/clasimp.ML\<close>
ML_file \<open>~~/src/Tools/eqsubst.ML\<close>
ML_file \<open>~~/src/Provers/quantifier1.ML\<close>
ML_file \<open>~~/src/Tools/atomize_elim.ML\<close>
ML_file \<open>~~/src/Tools/cong_tac.ML\<close>
ML_file \<open>~~/src/Tools/intuitionistic.ML\<close> setup \<open>Intuitionistic.method_setup \<^binding>\<open>iprover\<close>\<close>
ML_file \<open>~~/src/Tools/project_rule.ML\<close>
ML_file \<open>~~/src/Tools/subtyping.ML\<close>
ML_file \<open>~~/src/Tools/case_product.ML\<close>
ML \<open>Plugin_Name.declare_setup \<^binding>\<open>extraction\<close>\<close>
ML \<open>
Plugin_Name.declare_setup \<^binding>\<open>quickcheck_random\<close>;
Plugin_Name.declare_setup \<^binding>\<open>quickcheck_exhaustive\<close>;
Plugin_Name.declare_setup \<^binding>\<open>quickcheck_bounded_forall\<close>;
Plugin_Name.declare_setup \<^binding>\<open>quickcheck_full_exhaustive\<close>;
Plugin_Name.declare_setup \<^binding>\<open>quickcheck_narrowing\<close>;
\<close>
ML \<open>
Plugin_Name.define_setup \<^binding>\<open>quickcheck\<close>
[\<^plugin>\<open>quickcheck_exhaustive\<close>,
\<^plugin>\<open>quickcheck_random\<close>,
\<^plugin>\<open>quickcheck_bounded_forall\<close>,
\<^plugin>\<open>quickcheck_full_exhaustive\<close>,
\<^plugin>\<open>quickcheck_narrowing\<close>]
\<close>
subsection \<open>Primitive logic\<close>
text \<open>
The definition of the logic is based on Mike Gordon's technical report \<^cite>\<open>"Gordon-TR68"\<close> that
describes the first implementation of HOL. However, there are a number of differences.
In particular, we start with the definite description operator and introduce Hilbert's \<open>\<epsilon>\<close> operator
only much later. Moreover, axiom \<open>(P \<longrightarrow> Q) \<longrightarrow> (Q \<longrightarrow> P) \<longrightarrow> (P = Q)\<close> is derived from the other
axioms. The fact that this axiom is derivable was first noticed by Bruno Barras (for Mike Gordon's
line of HOL systems) and later independently by Alexander Maletzky (for Isabelle/HOL).
\<close>
subsubsection \<open>Core syntax\<close>
setup \<open>Axclass.class_axiomatization (\<^binding>\<open>type\<close>, [])\<close>
default_sort type
setup \<open>Object_Logic.add_base_sort \<^sort>\<open>type\<close>\<close>
setup \<open>Proofterm.set_preproc (Proof_Rewrite_Rules.standard_preproc [])\<close>
axiomatization where fun_arity: "OFCLASS('a \<Rightarrow> 'b, type_class)"
instance "fun" :: (type, type) type by (rule fun_arity)
axiomatization where itself_arity: "OFCLASS('a itself, type_class)"
instance itself :: (type) type by (rule itself_arity)
typedecl bool
judgment Trueprop :: "bool \<Rightarrow> prop" ("(_)" 5)
axiomatization implies :: "[bool, bool] \<Rightarrow> bool" (infixr "\<longrightarrow>" 25)
and eq :: "['a, 'a] \<Rightarrow> bool"
and The :: "('a \<Rightarrow> bool) \<Rightarrow> 'a"
notation (input)
eq (infixl "=" 50)
notation (output)
eq (infix "=" 50)
text \<open>The input syntax for \<open>eq\<close> is more permissive than the output syntax
because of the large amount of material that relies on infixl.\<close>
subsubsection \<open>Defined connectives and quantifiers\<close>
definition True :: bool
where "True \<equiv> ((\<lambda>x::bool. x) = (\<lambda>x. x))"
definition All :: "('a \<Rightarrow> bool) \<Rightarrow> bool" (binder "\<forall>" 10)
where "All P \<equiv> (P = (\<lambda>x. True))"
definition Ex :: "('a \<Rightarrow> bool) \<Rightarrow> bool" (binder "\<exists>" 10)
where "Ex P \<equiv> \<forall>Q. (\<forall>x. P x \<longrightarrow> Q) \<longrightarrow> Q"
definition False :: bool
where "False \<equiv> (\<forall>P. P)"
definition Not :: "bool \<Rightarrow> bool" ("\<not> _" [40] 40)
where not_def: "\<not> P \<equiv> P \<longrightarrow> False"
definition conj :: "[bool, bool] \<Rightarrow> bool" (infixr "\<and>" 35)
where and_def: "P \<and> Q \<equiv> \<forall>R. (P \<longrightarrow> Q \<longrightarrow> R) \<longrightarrow> R"
definition disj :: "[bool, bool] \<Rightarrow> bool" (infixr "\<or>" 30)
where or_def: "P \<or> Q \<equiv> \<forall>R. (P \<longrightarrow> R) \<longrightarrow> (Q \<longrightarrow> R) \<longrightarrow> R"
definition Uniq :: "('a \<Rightarrow> bool) \<Rightarrow> bool"
where "Uniq P \<equiv> (\<forall>x y. P x \<longrightarrow> P y \<longrightarrow> y = x)"
definition Ex1 :: "('a \<Rightarrow> bool) \<Rightarrow> bool"
where "Ex1 P \<equiv> \<exists>x. P x \<and> (\<forall>y. P y \<longrightarrow> y = x)"
subsubsection \<open>Additional concrete syntax\<close>
syntax (ASCII) "_Uniq" :: "pttrn \<Rightarrow> bool \<Rightarrow> bool" ("(4?< _./ _)" [0, 10] 10)
syntax "_Uniq" :: "pttrn \<Rightarrow> bool \<Rightarrow> bool" ("(2\<exists>\<^sub>\<le>\<^sub>1 _./ _)" [0, 10] 10)
translations "\<exists>\<^sub>\<le>\<^sub>1x. P" \<rightleftharpoons> "CONST Uniq (\<lambda>x. P)"
print_translation \<open>
[Syntax_Trans.preserve_binder_abs_tr' \<^const_syntax>\<open>Uniq\<close> \<^syntax_const>\<open>_Uniq\<close>]
\<close> \<comment> \<open>to avoid eta-contraction of body\<close>
syntax (ASCII)
"_Ex1" :: "pttrn \<Rightarrow> bool \<Rightarrow> bool" ("(3EX! _./ _)" [0, 10] 10)
syntax (input)
"_Ex1" :: "pttrn \<Rightarrow> bool \<Rightarrow> bool" ("(3?! _./ _)" [0, 10] 10)
syntax "_Ex1" :: "pttrn \<Rightarrow> bool \<Rightarrow> bool" ("(3\<exists>!_./ _)" [0, 10] 10)
translations "\<exists>!x. P" \<rightleftharpoons> "CONST Ex1 (\<lambda>x. P)"
print_translation \<open>
[Syntax_Trans.preserve_binder_abs_tr' \<^const_syntax>\<open>Ex1\<close> \<^syntax_const>\<open>_Ex1\<close>]
\<close> \<comment> \<open>to avoid eta-contraction of body\<close>
syntax
"_Not_Ex" :: "idts \<Rightarrow> bool \<Rightarrow> bool" ("(3\<nexists>_./ _)" [0, 10] 10)
"_Not_Ex1" :: "pttrn \<Rightarrow> bool \<Rightarrow> bool" ("(3\<nexists>!_./ _)" [0, 10] 10)
translations
"\<nexists>x. P" \<rightleftharpoons> "\<not> (\<exists>x. P)"
"\<nexists>!x. P" \<rightleftharpoons> "\<not> (\<exists>!x. P)"
abbreviation not_equal :: "['a, 'a] \<Rightarrow> bool" (infix "\<noteq>" 50)
where "x \<noteq> y \<equiv> \<not> (x = y)"
notation (ASCII)
Not ("~ _" [40] 40) and
conj (infixr "&" 35) and
disj (infixr "|" 30) and
implies (infixr "-->" 25) and
not_equal (infix "~=" 50)
abbreviation (iff)
iff :: "[bool, bool] \<Rightarrow> bool" (infixr "\<longleftrightarrow>" 25)
where "A \<longleftrightarrow> B \<equiv> A = B"
syntax "_The" :: "[pttrn, bool] \<Rightarrow> 'a" ("(3THE _./ _)" [0, 10] 10)
translations "THE x. P" \<rightleftharpoons> "CONST The (\<lambda>x. P)"
print_translation \<open>
[(\<^const_syntax>\<open>The\<close>, fn _ => fn [Abs abs] =>
let val (x, t) = Syntax_Trans.atomic_abs_tr' abs
in Syntax.const \<^syntax_const>\<open>_The\<close> $ x $ t end)]
\<close> \<comment> \<open>To avoid eta-contraction of body\<close>
nonterminal letbinds and letbind
syntax
"_bind" :: "[pttrn, 'a] \<Rightarrow> letbind" ("(2_ =/ _)" 10)
"" :: "letbind \<Rightarrow> letbinds" ("_")
"_binds" :: "[letbind, letbinds] \<Rightarrow> letbinds" ("_;/ _")
"_Let" :: "[letbinds, 'a] \<Rightarrow> 'a" ("(let (_)/ in (_))" [0, 10] 10)
nonterminal case_syn and cases_syn
syntax
"_case_syntax" :: "['a, cases_syn] \<Rightarrow> 'b" ("(case _ of/ _)" 10)
"_case1" :: "['a, 'b] \<Rightarrow> case_syn" ("(2_ \<Rightarrow>/ _)" 10)
"" :: "case_syn \<Rightarrow> cases_syn" ("_")
"_case2" :: "[case_syn, cases_syn] \<Rightarrow> cases_syn" ("_/ | _")
syntax (ASCII)
"_case1" :: "['a, 'b] \<Rightarrow> case_syn" ("(2_ =>/ _)" 10)
notation (ASCII)
All (binder "ALL " 10) and
Ex (binder "EX " 10)
notation (input)
All (binder "! " 10) and
Ex (binder "? " 10)
subsubsection \<open>Axioms and basic definitions\<close>
axiomatization where
refl: "t = (t::'a)" and
subst: "s = t \<Longrightarrow> P s \<Longrightarrow> P t" and
ext: "(\<And>x::'a. (f x ::'b) = g x) \<Longrightarrow> (\<lambda>x. f x) = (\<lambda>x. g x)"
\<comment> \<open>Extensionality is built into the meta-logic, and this rule expresses
a related property. It is an eta-expanded version of the traditional
rule, and similar to the ABS rule of HOL\<close> and
the_eq_trivial: "(THE x. x = a) = (a::'a)"
axiomatization where
impI: "(P \<Longrightarrow> Q) \<Longrightarrow> P \<longrightarrow> Q" and
mp: "\<lbrakk>P \<longrightarrow> Q; P\<rbrakk> \<Longrightarrow> Q" and
True_or_False: "(P = True) \<or> (P = False)"
definition If :: "bool \<Rightarrow> 'a \<Rightarrow> 'a \<Rightarrow> 'a" ("(if (_)/ then (_)/ else (_))" [0, 0, 10] 10)
where "If P x y \<equiv> (THE z::'a. (P = True \<longrightarrow> z = x) \<and> (P = False \<longrightarrow> z = y))"
definition Let :: "'a \<Rightarrow> ('a \<Rightarrow> 'b) \<Rightarrow> 'b"
where "Let s f \<equiv> f s"
translations
"_Let (_binds b bs) e" \<rightleftharpoons> "_Let b (_Let bs e)"
"let x = a in e" \<rightleftharpoons> "CONST Let a (\<lambda>x. e)"
axiomatization undefined :: 'a
class default = fixes default :: 'a
subsection \<open>Fundamental rules\<close>
subsubsection \<open>Equality\<close>
lemma sym: "s = t \<Longrightarrow> t = s"
by (erule subst) (rule refl)
lemma ssubst: "t = s \<Longrightarrow> P s \<Longrightarrow> P t"
by (drule sym) (erule subst)
lemma trans: "\<lbrakk>r = s; s = t\<rbrakk> \<Longrightarrow> r = t"
by (erule subst)
lemma trans_sym [Pure.elim?]: "r = s \<Longrightarrow> t = s \<Longrightarrow> r = t"
by (rule trans [OF _ sym])
lemma meta_eq_to_obj_eq:
assumes "A \<equiv> B"
shows "A = B"
unfolding assms by (rule refl)
text \<open>Useful with \<open>erule\<close> for proving equalities from known equalities.\<close>
(* a = b
| |
c = d *)
lemma box_equals: "\<lbrakk>a = b; a = c; b = d\<rbrakk> \<Longrightarrow> c = d"
by (iprover intro: sym trans)
text \<open>For calculational reasoning:\<close>
lemma forw_subst: "a = b \<Longrightarrow> P b \<Longrightarrow> P a"
by (rule ssubst)
lemma back_subst: "P a \<Longrightarrow> a = b \<Longrightarrow> P b"
by (rule subst)
subsubsection \<open>Congruence rules for application\<close>
text \<open>Similar to \<open>AP_THM\<close> in Gordon's HOL.\<close>
lemma fun_cong: "(f :: 'a \<Rightarrow> 'b) = g \<Longrightarrow> f x = g x"
by (iprover intro: refl elim: subst)
text \<open>Similar to \<open>AP_TERM\<close> in Gordon's HOL and FOL's \<open>subst_context\<close>.\<close>
lemma arg_cong: "x = y \<Longrightarrow> f x = f y"
by (iprover intro: refl elim: subst)
lemma arg_cong2: "\<lbrakk>a = b; c = d\<rbrakk> \<Longrightarrow> f a c = f b d"
by (iprover intro: refl elim: subst)
lemma cong: "\<lbrakk>f = g; (x::'a) = y\<rbrakk> \<Longrightarrow> f x = g y"
by (iprover intro: refl elim: subst)
ML \<open>fun cong_tac ctxt = Cong_Tac.cong_tac ctxt @{thm cong}\<close>
subsubsection \<open>Equality of booleans -- iff\<close>
lemma iffD2: "\<lbrakk>P = Q; Q\<rbrakk> \<Longrightarrow> P"
by (erule ssubst)
lemma rev_iffD2: "\<lbrakk>Q; P = Q\<rbrakk> \<Longrightarrow> P"
by (erule iffD2)
lemma iffD1: "Q = P \<Longrightarrow> Q \<Longrightarrow> P"
by (drule sym) (rule iffD2)
lemma rev_iffD1: "Q \<Longrightarrow> Q = P \<Longrightarrow> P"
by (drule sym) (rule rev_iffD2)
lemma iffE:
assumes major: "P = Q"
and minor: "\<lbrakk>P \<longrightarrow> Q; Q \<longrightarrow> P\<rbrakk> \<Longrightarrow> R"
shows R
by (iprover intro: minor impI major [THEN iffD2] major [THEN iffD1])
subsubsection \<open>True (1)\<close>
lemma TrueI: True
unfolding True_def by (rule refl)
lemma eqTrueE: "P = True \<Longrightarrow> P"
by (erule iffD2) (rule TrueI)
subsubsection \<open>Universal quantifier (1)\<close>
lemma spec: "\<forall>x::'a. P x \<Longrightarrow> P x"
unfolding All_def by (iprover intro: eqTrueE fun_cong)
lemma allE:
assumes major: "\<forall>x. P x" and minor: "P x \<Longrightarrow> R"
shows R
by (iprover intro: minor major [THEN spec])
lemma all_dupE:
assumes major: "\<forall>x. P x" and minor: "\<lbrakk>P x; \<forall>x. P x\<rbrakk> \<Longrightarrow> R"
shows R
by (iprover intro: minor major major [THEN spec])
subsubsection \<open>False\<close>
text \<open>
Depends upon \<open>spec\<close>; it is impossible to do propositional
logic before quantifiers!
\<close>
lemma FalseE: "False \<Longrightarrow> P"
unfolding False_def by (erule spec)
lemma False_neq_True: "False = True \<Longrightarrow> P"
by (erule eqTrueE [THEN FalseE])
subsubsection \<open>Negation\<close>
lemma notI:
assumes "P \<Longrightarrow> False"
shows "\<not> P"
unfolding not_def by (iprover intro: impI assms)
lemma False_not_True: "False \<noteq> True"
by (iprover intro: notI elim: False_neq_True)
lemma True_not_False: "True \<noteq> False"
by (iprover intro: notI dest: sym elim: False_neq_True)
lemma notE: "\<lbrakk>\<not> P; P\<rbrakk> \<Longrightarrow> R"
unfolding not_def
by (iprover intro: mp [THEN FalseE])
subsubsection \<open>Implication\<close>
lemma impE:
assumes "P \<longrightarrow> Q" P "Q \<Longrightarrow> R"
shows R
by (iprover intro: assms mp)
text \<open>Reduces \<open>Q\<close> to \<open>P \<longrightarrow> Q\<close>, allowing substitution in \<open>P\<close>.\<close>
lemma rev_mp: "\<lbrakk>P; P \<longrightarrow> Q\<rbrakk> \<Longrightarrow> Q"
by (rule mp)
lemma contrapos_nn:
assumes major: "\<not> Q"
and minor: "P \<Longrightarrow> Q"
shows "\<not> P"
by (iprover intro: notI minor major [THEN notE])
text \<open>Not used at all, but we already have the other 3 combinations.\<close>
lemma contrapos_pn:
assumes major: "Q"
and minor: "P \<Longrightarrow> \<not> Q"
shows "\<not> P"
by (iprover intro: notI minor major notE)
lemma not_sym: "t \<noteq> s \<Longrightarrow> s \<noteq> t"
by (erule contrapos_nn) (erule sym)
lemma eq_neq_eq_imp_neq: "\<lbrakk>x = a; a \<noteq> b; b = y\<rbrakk> \<Longrightarrow> x \<noteq> y"
by (erule subst, erule ssubst, assumption)
subsubsection \<open>Disjunction (1)\<close>
lemma disjE:
assumes major: "P \<or> Q"
and minorP: "P \<Longrightarrow> R"
and minorQ: "Q \<Longrightarrow> R"
shows R
by (iprover intro: minorP minorQ impI
major [unfolded or_def, THEN spec, THEN mp, THEN mp])
subsubsection \<open>Derivation of \<open>iffI\<close>\<close>
text \<open>In an intuitionistic version of HOL \<open>iffI\<close> needs to be an axiom.\<close>
lemma iffI:
assumes "P \<Longrightarrow> Q" and "Q \<Longrightarrow> P"
shows "P = Q"
proof (rule disjE[OF True_or_False[of P]])
assume 1: "P = True"
note Q = assms(1)[OF eqTrueE[OF this]]
from 1 show ?thesis
proof (rule ssubst)
from True_or_False[of Q] show "True = Q"
proof (rule disjE)
assume "Q = True"
thus ?thesis by(rule sym)
next
assume "Q = False"
with Q have False by (rule rev_iffD1)
thus ?thesis by (rule FalseE)
qed
qed
next
assume 2: "P = False"
thus ?thesis
proof (rule ssubst)
from True_or_False[of Q] show "False = Q"
proof (rule disjE)
assume "Q = True"
from 2 assms(2)[OF eqTrueE[OF this]] have False by (rule iffD1)
thus ?thesis by (rule FalseE)
next
assume "Q = False"
thus ?thesis by(rule sym)
qed
qed
qed
subsubsection \<open>True (2)\<close>
lemma eqTrueI: "P \<Longrightarrow> P = True"
by (iprover intro: iffI TrueI)
subsubsection \<open>Universal quantifier (2)\<close>
lemma allI:
assumes "\<And>x::'a. P x"
shows "\<forall>x. P x"
unfolding All_def by (iprover intro: ext eqTrueI assms)
subsubsection \<open>Existential quantifier\<close>
lemma exI: "P x \<Longrightarrow> \<exists>x::'a. P x"
unfolding Ex_def by (iprover intro: allI allE impI mp)
lemma exE:
assumes major: "\<exists>x::'a. P x"
and minor: "\<And>x. P x \<Longrightarrow> Q"
shows "Q"
by (rule major [unfolded Ex_def, THEN spec, THEN mp]) (iprover intro: impI [THEN allI] minor)
subsubsection \<open>Conjunction\<close>
lemma conjI: "\<lbrakk>P; Q\<rbrakk> \<Longrightarrow> P \<and> Q"
unfolding and_def by (iprover intro: impI [THEN allI] mp)
lemma conjunct1: "\<lbrakk>P \<and> Q\<rbrakk> \<Longrightarrow> P"
unfolding and_def by (iprover intro: impI dest: spec mp)
lemma conjunct2: "\<lbrakk>P \<and> Q\<rbrakk> \<Longrightarrow> Q"
unfolding and_def by (iprover intro: impI dest: spec mp)
lemma conjE:
assumes major: "P \<and> Q"
and minor: "\<lbrakk>P; Q\<rbrakk> \<Longrightarrow> R"
shows R
proof (rule minor)
show P by (rule major [THEN conjunct1])
show Q by (rule major [THEN conjunct2])
qed
lemma context_conjI:
assumes P "P \<Longrightarrow> Q"
shows "P \<and> Q"
by (iprover intro: conjI assms)
subsubsection \<open>Disjunction (2)\<close>
lemma disjI1: "P \<Longrightarrow> P \<or> Q"
unfolding or_def by (iprover intro: allI impI mp)
lemma disjI2: "Q \<Longrightarrow> P \<or> Q"
unfolding or_def by (iprover intro: allI impI mp)
subsubsection \<open>Classical logic\<close>
lemma classical:
assumes "\<not> P \<Longrightarrow> P"
shows P
proof (rule True_or_False [THEN disjE])
show P if "P = True"
using that by (iprover intro: eqTrueE)
show P if "P = False"
proof (intro notI assms)
assume P
with that show False
by (iprover elim: subst)
qed
qed
lemmas ccontr = FalseE [THEN classical]
text \<open>\<open>notE\<close> with premises exchanged; it discharges \<open>\<not> R\<close> so that it can be used to
make elimination rules.\<close>
lemma rev_notE:
assumes premp: P
and premnot: "\<not> R \<Longrightarrow> \<not> P"
shows R
by (iprover intro: ccontr notE [OF premnot premp])
text \<open>Double negation law.\<close>
lemma notnotD: "\<not>\<not> P \<Longrightarrow> P"
by (iprover intro: ccontr notE )
lemma contrapos_pp:
assumes p1: Q
and p2: "\<not> P \<Longrightarrow> \<not> Q"
shows P
by (iprover intro: classical p1 p2 notE)
subsubsection \<open>Unique existence\<close>
lemma Uniq_I [intro?]:
assumes "\<And>x y. \<lbrakk>P x; P y\<rbrakk> \<Longrightarrow> y = x"
shows "Uniq P"
unfolding Uniq_def by (iprover intro: assms allI impI)
lemma Uniq_D [dest?]: "\<lbrakk>Uniq P; P a; P b\<rbrakk> \<Longrightarrow> a=b"
unfolding Uniq_def by (iprover dest: spec mp)
lemma ex1I:
assumes "P a" "\<And>x. P x \<Longrightarrow> x = a"
shows "\<exists>!x. P x"
unfolding Ex1_def by (iprover intro: assms exI conjI allI impI)
text \<open>Sometimes easier to use: the premises have no shared variables. Safe!\<close>
lemma ex_ex1I:
assumes ex_prem: "\<exists>x. P x"
and eq: "\<And>x y. \<lbrakk>P x; P y\<rbrakk> \<Longrightarrow> x = y"
shows "\<exists>!x. P x"
by (iprover intro: ex_prem [THEN exE] ex1I eq)
lemma ex1E:
assumes major: "\<exists>!x. P x" and minor: "\<And>x. \<lbrakk>P x; \<forall>y. P y \<longrightarrow> y = x\<rbrakk> \<Longrightarrow> R"
shows R
proof (rule major [unfolded Ex1_def, THEN exE])
show "\<And>x. P x \<and> (\<forall>y. P y \<longrightarrow> y = x) \<Longrightarrow> R"
by (iprover intro: minor elim: conjE)
qed
lemma ex1_implies_ex: "\<exists>!x. P x \<Longrightarrow> \<exists>x. P x"
by (iprover intro: exI elim: ex1E)
subsubsection \<open>Classical intro rules for disjunction and existential quantifiers\<close>
lemma disjCI:
assumes "\<not> Q \<Longrightarrow> P"
shows "P \<or> Q"
by (rule classical) (iprover intro: assms disjI1 disjI2 notI elim: notE)
lemma excluded_middle: "\<not> P \<or> P"
by (iprover intro: disjCI)
text \<open>
case distinction as a natural deduction rule.
Note that \<open>\<not> P\<close> is the second case, not the first.
\<close>
lemma case_split [case_names True False]:
assumes "P \<Longrightarrow> Q" "\<not> P \<Longrightarrow> Q"
shows Q
using excluded_middle [of P]
by (iprover intro: assms elim: disjE)
text \<open>Classical implies (\<open>\<longrightarrow>\<close>) elimination.\<close>
lemma impCE:
assumes major: "P \<longrightarrow> Q"
and minor: "\<not> P \<Longrightarrow> R" "Q \<Longrightarrow> R"
shows R
using excluded_middle [of P]
by (iprover intro: minor major [THEN mp] elim: disjE)+
text \<open>
This version of \<open>\<longrightarrow>\<close> elimination works on \<open>Q\<close> before \<open>P\<close>. It works best for
those cases in which \<open>P\<close> holds "almost everywhere". Can't install as
default: would break old proofs.
\<close>
lemma impCE':
assumes major: "P \<longrightarrow> Q"
and minor: "Q \<Longrightarrow> R" "\<not> P \<Longrightarrow> R"
shows R
using assms by (elim impCE)
text \<open>Classical \<open>\<longleftrightarrow>\<close> elimination.\<close>
lemma iffCE:
assumes major: "P = Q"
and minor: "\<lbrakk>P; Q\<rbrakk> \<Longrightarrow> R" "\<lbrakk>\<not> P; \<not> Q\<rbrakk> \<Longrightarrow> R"
shows R
by (rule major [THEN iffE]) (iprover intro: minor elim: impCE notE)
lemma exCI:
assumes "\<forall>x. \<not> P x \<Longrightarrow> P a"
shows "\<exists>x. P x"
by (rule ccontr) (iprover intro: assms exI allI notI notE [of "\<exists>x. P x"])
subsubsection \<open>Intuitionistic Reasoning\<close>
lemma impE':
assumes 1: "P \<longrightarrow> Q"
and 2: "Q \<Longrightarrow> R"
and 3: "P \<longrightarrow> Q \<Longrightarrow> P"
shows R
proof -
from 3 and 1 have P .
with 1 have Q by (rule impE)
with 2 show R .
qed
lemma allE':
assumes 1: "\<forall>x. P x"
and 2: "P x \<Longrightarrow> \<forall>x. P x \<Longrightarrow> Q"
shows Q
proof -
from 1 have "P x" by (rule spec)
from this and 1 show Q by (rule 2)
qed
lemma notE':
assumes 1: "\<not> P"
and 2: "\<not> P \<Longrightarrow> P"
shows R
proof -
from 2 and 1 have P .
with 1 show R by (rule notE)
qed
lemma TrueE: "True \<Longrightarrow> P \<Longrightarrow> P" .
lemma notFalseE: "\<not> False \<Longrightarrow> P \<Longrightarrow> P" .
lemmas [Pure.elim!] = disjE iffE FalseE conjE exE TrueE notFalseE
and [Pure.intro!] = iffI conjI impI TrueI notI allI refl
and [Pure.elim 2] = allE notE' impE'
and [Pure.intro] = exI disjI2 disjI1
lemmas [trans] = trans
and [sym] = sym not_sym
and [Pure.elim?] = iffD1 iffD2 impE
subsubsection \<open>Atomizing meta-level connectives\<close>
axiomatization where
eq_reflection: "x = y \<Longrightarrow> x \<equiv> y" \<comment> \<open>admissible axiom\<close>
lemma atomize_all [atomize]: "(\<And>x. P x) \<equiv> Trueprop (\<forall>x. P x)"
proof
assume "\<And>x. P x"
then show "\<forall>x. P x" ..
next
assume "\<forall>x. P x"
then show "\<And>x. P x" by (rule allE)
qed
lemma atomize_imp [atomize]: "(A \<Longrightarrow> B) \<equiv> Trueprop (A \<longrightarrow> B)"
proof
assume r: "A \<Longrightarrow> B"
show "A \<longrightarrow> B" by (rule impI) (rule r)
next
assume "A \<longrightarrow> B" and A
then show B by (rule mp)
qed
lemma atomize_not: "(A \<Longrightarrow> False) \<equiv> Trueprop (\<not> A)"
proof
assume r: "A \<Longrightarrow> False"
show "\<not> A" by (rule notI) (rule r)
next
assume "\<not> A" and A
then show False by (rule notE)
qed
lemma atomize_eq [atomize, code]: "(x \<equiv> y) \<equiv> Trueprop (x = y)"
proof
assume "x \<equiv> y"
show "x = y" by (unfold \<open>x \<equiv> y\<close>) (rule refl)
next
assume "x = y"
then show "x \<equiv> y" by (rule eq_reflection)
qed
lemma atomize_conj [atomize]: "(A &&& B) \<equiv> Trueprop (A \<and> B)"
proof
assume conj: "A &&& B"
show "A \<and> B"
proof (rule conjI)
from conj show A by (rule conjunctionD1)
from conj show B by (rule conjunctionD2)
qed
next
assume conj: "A \<and> B"
show "A &&& B"
proof -
from conj show A ..
from conj show B ..
qed
qed
lemmas [symmetric, rulify] = atomize_all atomize_imp
and [symmetric, defn] = atomize_all atomize_imp atomize_eq
subsubsection \<open>Atomizing elimination rules\<close>
lemma atomize_exL[atomize_elim]: "(\<And>x. P x \<Longrightarrow> Q) \<equiv> ((\<exists>x. P x) \<Longrightarrow> Q)"
by (rule equal_intr_rule) iprover+
lemma atomize_conjL[atomize_elim]: "(A \<Longrightarrow> B \<Longrightarrow> C) \<equiv> (A \<and> B \<Longrightarrow> C)"
by (rule equal_intr_rule) iprover+
lemma atomize_disjL[atomize_elim]: "((A \<Longrightarrow> C) \<Longrightarrow> (B \<Longrightarrow> C) \<Longrightarrow> C) \<equiv> ((A \<or> B \<Longrightarrow> C) \<Longrightarrow> C)"
by (rule equal_intr_rule) iprover+
lemma atomize_elimL[atomize_elim]: "(\<And>B. (A \<Longrightarrow> B) \<Longrightarrow> B) \<equiv> Trueprop A" ..
subsection \<open>Package setup\<close>
ML_file \<open>Tools/hologic.ML\<close>
ML_file \<open>Tools/rewrite_hol_proof.ML\<close>
setup \<open>Proofterm.set_preproc (Proof_Rewrite_Rules.standard_preproc Rewrite_HOL_Proof.rews)\<close>
subsubsection \<open>Sledgehammer setup\<close>
text \<open>
Theorems blacklisted to Sledgehammer. These theorems typically produce clauses
that are prolific (match too many equality or membership literals) and relate to
seldom-used facts. Some duplicate other rules.
\<close>
named_theorems no_atp "theorems that should be filtered out by Sledgehammer"
subsubsection \<open>Classical Reasoner setup\<close>
lemma imp_elim: "P \<longrightarrow> Q \<Longrightarrow> (\<not> R \<Longrightarrow> P) \<Longrightarrow> (Q \<Longrightarrow> R) \<Longrightarrow> R"
by (rule classical) iprover
lemma swap: "\<not> P \<Longrightarrow> (\<not> R \<Longrightarrow> P) \<Longrightarrow> R"
by (rule classical) iprover
lemma thin_refl: "\<lbrakk>x = x; PROP W\<rbrakk> \<Longrightarrow> PROP W" .
ML \<open>
structure Hypsubst = Hypsubst
(
val dest_eq = HOLogic.dest_eq
val dest_Trueprop = HOLogic.dest_Trueprop
val dest_imp = HOLogic.dest_imp
val eq_reflection = @{thm eq_reflection}
val rev_eq_reflection = @{thm meta_eq_to_obj_eq}
val imp_intr = @{thm impI}
val rev_mp = @{thm rev_mp}
val subst = @{thm subst}
val sym = @{thm sym}
val thin_refl = @{thm thin_refl};
);
open Hypsubst;
structure Classical = Classical
(
val imp_elim = @{thm imp_elim}
val not_elim = @{thm notE}
val swap = @{thm swap}
val classical = @{thm classical}
val sizef = Drule.size_of_thm
val hyp_subst_tacs = [Hypsubst.hyp_subst_tac]
);
structure Basic_Classical: BASIC_CLASSICAL = Classical;
open Basic_Classical;
\<close>
setup \<open>
(*prevent substitution on bool*)
let
fun non_bool_eq (\<^const_name>\<open>HOL.eq\<close>, Type (_, [T, _])) = T <> \<^typ>\<open>bool\<close>
| non_bool_eq _ = false;
fun hyp_subst_tac' ctxt =
SUBGOAL (fn (goal, i) =>
if Term.exists_Const non_bool_eq goal
then Hypsubst.hyp_subst_tac ctxt i
else no_tac);
in
Context_Rules.addSWrapper (fn ctxt => fn tac => hyp_subst_tac' ctxt ORELSE' tac)
end
\<close>
declare iffI [intro!]
and notI [intro!]
and impI [intro!]
and disjCI [intro!]
and conjI [intro!]
and TrueI [intro!]
and refl [intro!]
declare iffCE [elim!]
and FalseE [elim!]
and impCE [elim!]
and disjE [elim!]
and conjE [elim!]
declare ex_ex1I [intro!]
and allI [intro!]
and exI [intro]
declare exE [elim!]
allE [elim]
ML \<open>val HOL_cs = claset_of \<^context>\<close>
lemma contrapos_np: "\<not> Q \<Longrightarrow> (\<not> P \<Longrightarrow> Q) \<Longrightarrow> P"
by (erule swap)
declare ex_ex1I [rule del, intro! 2]
and ex1I [intro]
declare ext [intro]
lemmas [intro?] = ext
and [elim?] = ex1_implies_ex
text \<open>Better than \<open>ex1E\<close> for classical reasoner: needs no quantifier duplication!\<close>
lemma alt_ex1E [elim!]:
assumes major: "\<exists>!x. P x"
and minor: "\<And>x. \<lbrakk>P x; \<forall>y y'. P y \<and> P y' \<longrightarrow> y = y'\<rbrakk> \<Longrightarrow> R"
shows R
proof (rule ex1E [OF major minor])
show "\<forall>y y'. P y \<and> P y' \<longrightarrow> y = y'" if "P x" and \<section>: "\<forall>y. P y \<longrightarrow> y = x" for x
using \<open>P x\<close> \<section> \<section> by fast
qed assumption
text \<open>And again using Uniq\<close>
lemma alt_ex1E':
assumes "\<exists>!x. P x" "\<And>x. \<lbrakk>P x; \<exists>\<^sub>\<le>\<^sub>1x. P x\<rbrakk> \<Longrightarrow> R"
shows R
using assms unfolding Uniq_def by fast
lemma ex1_iff_ex_Uniq: "(\<exists>!x. P x) \<longleftrightarrow> (\<exists>x. P x) \<and> (\<exists>\<^sub>\<le>\<^sub>1x. P x)"
unfolding Uniq_def by fast
ML \<open>
structure Blast = Blast
(
structure Classical = Classical
val Trueprop_const = dest_Const \<^Const>\<open>Trueprop\<close>
val equality_name = \<^const_name>\<open>HOL.eq\<close>
val not_name = \<^const_name>\<open>Not\<close>
val notE = @{thm notE}
val ccontr = @{thm ccontr}
val hyp_subst_tac = Hypsubst.blast_hyp_subst_tac
);
val blast_tac = Blast.blast_tac;
\<close>
subsubsection \<open>THE: definite description operator\<close>
lemma the_equality [intro]:
assumes "P a"
and "\<And>x. P x \<Longrightarrow> x = a"
shows "(THE x. P x) = a"
by (blast intro: assms trans [OF arg_cong [where f=The] the_eq_trivial])
lemma theI:
assumes "P a"
and "\<And>x. P x \<Longrightarrow> x = a"
shows "P (THE x. P x)"
by (iprover intro: assms the_equality [THEN ssubst])
lemma theI': "\<exists>!x. P x \<Longrightarrow> P (THE x. P x)"
by (blast intro: theI)
text \<open>Easier to apply than \<open>theI\<close>: only one occurrence of \<open>P\<close>.\<close>
lemma theI2:
assumes "P a" "\<And>x. P x \<Longrightarrow> x = a" "\<And>x. P x \<Longrightarrow> Q x"
shows "Q (THE x. P x)"
by (iprover intro: assms theI)
lemma the1I2:
assumes "\<exists>!x. P x" "\<And>x. P x \<Longrightarrow> Q x"
shows "Q (THE x. P x)"
by (iprover intro: assms(2) theI2[where P=P and Q=Q] ex1E[OF assms(1)] elim: allE impE)
lemma the1_equality [elim?]: "\<lbrakk>\<exists>!x. P x; P a\<rbrakk> \<Longrightarrow> (THE x. P x) = a"
by blast
lemma the1_equality': "\<lbrakk>\<exists>\<^sub>\<le>\<^sub>1x. P x; P a\<rbrakk> \<Longrightarrow> (THE x. P x) = a"
unfolding Uniq_def by blast
lemma the_sym_eq_trivial: "(THE y. x = y) = x"
by blast
subsubsection \<open>Simplifier\<close>
lemma eta_contract_eq: "(\<lambda>s. f s) = f" ..
lemma subst_all:
\<open>(\<And>x. x = a \<Longrightarrow> PROP P x) \<equiv> PROP P a\<close>
\<open>(\<And>x. a = x \<Longrightarrow> PROP P x) \<equiv> PROP P a\<close>
proof -
show \<open>(\<And>x. x = a \<Longrightarrow> PROP P x) \<equiv> PROP P a\<close>
proof (rule equal_intr_rule)
assume *: \<open>\<And>x. x = a \<Longrightarrow> PROP P x\<close>
show \<open>PROP P a\<close>
by (rule *) (rule refl)
next
fix x
assume \<open>PROP P a\<close> and \<open>x = a\<close>
from \<open>x = a\<close> have \<open>x \<equiv> a\<close>
by (rule eq_reflection)
with \<open>PROP P a\<close> show \<open>PROP P x\<close>
by simp
qed
show \<open>(\<And>x. a = x \<Longrightarrow> PROP P x) \<equiv> PROP P a\<close>
proof (rule equal_intr_rule)
assume *: \<open>\<And>x. a = x \<Longrightarrow> PROP P x\<close>
show \<open>PROP P a\<close>
by (rule *) (rule refl)
next
fix x
assume \<open>PROP P a\<close> and \<open>a = x\<close>
from \<open>a = x\<close> have \<open>a \<equiv> x\<close>
by (rule eq_reflection)
with \<open>PROP P a\<close> show \<open>PROP P x\<close>
by simp
qed
qed
lemma simp_thms:
shows not_not: "(\<not> \<not> P) = P"
and Not_eq_iff: "((\<not> P) = (\<not> Q)) = (P = Q)"
and
"(P \<noteq> Q) = (P = (\<not> Q))"
"(P \<or> \<not>P) = True" "(\<not> P \<or> P) = True"
"(x = x) = True"
and not_True_eq_False [code]: "(\<not> True) = False"
and not_False_eq_True [code]: "(\<not> False) = True"
and
"(\<not> P) \<noteq> P" "P \<noteq> (\<not> P)"
"(True = P) = P"
and eq_True: "(P = True) = P"
and "(False = P) = (\<not> P)"
and eq_False: "(P = False) = (\<not> P)"
and
"(True \<longrightarrow> P) = P" "(False \<longrightarrow> P) = True"
"(P \<longrightarrow> True) = True" "(P \<longrightarrow> P) = True"
"(P \<longrightarrow> False) = (\<not> P)" "(P \<longrightarrow> \<not> P) = (\<not> P)"
"(P \<and> True) = P" "(True \<and> P) = P"
"(P \<and> False) = False" "(False \<and> P) = False"
"(P \<and> P) = P" "(P \<and> (P \<and> Q)) = (P \<and> Q)"
"(P \<and> \<not> P) = False" "(\<not> P \<and> P) = False"
"(P \<or> True) = True" "(True \<or> P) = True"
"(P \<or> False) = P" "(False \<or> P) = P"
"(P \<or> P) = P" "(P \<or> (P \<or> Q)) = (P \<or> Q)" and
"(\<forall>x. P) = P" "(\<exists>x. P) = P" "\<exists>x. x = t" "\<exists>x. t = x"
and
"\<And>P. (\<exists>x. x = t \<and> P x) = P t"
"\<And>P. (\<exists>x. t = x \<and> P x) = P t"
"\<And>P. (\<forall>x. x = t \<longrightarrow> P x) = P t"
"\<And>P. (\<forall>x. t = x \<longrightarrow> P x) = P t"
"(\<forall>x. x \<noteq> t) = False" "(\<forall>x. t \<noteq> x) = False"
by (blast, blast, blast, blast, blast, iprover+)
lemma disj_absorb: "A \<or> A \<longleftrightarrow> A"
by blast
lemma disj_left_absorb: "A \<or> (A \<or> B) \<longleftrightarrow> A \<or> B"
by blast
lemma conj_absorb: "A \<and> A \<longleftrightarrow> A"
by blast
lemma conj_left_absorb: "A \<and> (A \<and> B) \<longleftrightarrow> A \<and> B"
by blast
lemma eq_ac:
shows eq_commute: "a = b \<longleftrightarrow> b = a"
and iff_left_commute: "(P \<longleftrightarrow> (Q \<longleftrightarrow> R)) \<longleftrightarrow> (Q \<longleftrightarrow> (P \<longleftrightarrow> R))"
and iff_assoc: "((P \<longleftrightarrow> Q) \<longleftrightarrow> R) \<longleftrightarrow> (P \<longleftrightarrow> (Q \<longleftrightarrow> R))"
by (iprover, blast+)
lemma neq_commute: "a \<noteq> b \<longleftrightarrow> b \<noteq> a" by iprover
lemma conj_comms:
shows conj_commute: "P \<and> Q \<longleftrightarrow> Q \<and> P"
and conj_left_commute: "P \<and> (Q \<and> R) \<longleftrightarrow> Q \<and> (P \<and> R)" by iprover+
lemma conj_assoc: "(P \<and> Q) \<and> R \<longleftrightarrow> P \<and> (Q \<and> R)" by iprover
lemmas conj_ac = conj_commute conj_left_commute conj_assoc
lemma disj_comms:
shows disj_commute: "P \<or> Q \<longleftrightarrow> Q \<or> P"
and disj_left_commute: "P \<or> (Q \<or> R) \<longleftrightarrow> Q \<or> (P \<or> R)" by iprover+
lemma disj_assoc: "(P \<or> Q) \<or> R \<longleftrightarrow> P \<or> (Q \<or> R)" by iprover
lemmas disj_ac = disj_commute disj_left_commute disj_assoc
lemma conj_disj_distribL: "P \<and> (Q \<or> R) \<longleftrightarrow> P \<and> Q \<or> P \<and> R" by iprover
lemma conj_disj_distribR: "(P \<or> Q) \<and> R \<longleftrightarrow> P \<and> R \<or> Q \<and> R" by iprover
lemma disj_conj_distribL: "P \<or> (Q \<and> R) \<longleftrightarrow> (P \<or> Q) \<and> (P \<or> R)" by iprover
lemma disj_conj_distribR: "(P \<and> Q) \<or> R \<longleftrightarrow> (P \<or> R) \<and> (Q \<or> R)" by iprover
lemma imp_conjR: "(P \<longrightarrow> (Q \<and> R)) = ((P \<longrightarrow> Q) \<and> (P \<longrightarrow> R))" by iprover
lemma imp_conjL: "((P \<and> Q) \<longrightarrow> R) = (P \<longrightarrow> (Q \<longrightarrow> R))" by iprover
lemma imp_disjL: "((P \<or> Q) \<longrightarrow> R) = ((P \<longrightarrow> R) \<and> (Q \<longrightarrow> R))" by iprover
text \<open>These two are specialized, but \<open>imp_disj_not1\<close> is useful in \<open>Auth/Yahalom\<close>.\<close>
lemma imp_disj_not1: "(P \<longrightarrow> Q \<or> R) \<longleftrightarrow> (\<not> Q \<longrightarrow> P \<longrightarrow> R)" by blast
lemma imp_disj_not2: "(P \<longrightarrow> Q \<or> R) \<longleftrightarrow> (\<not> R \<longrightarrow> P \<longrightarrow> Q)" by blast
lemma imp_disj1: "((P \<longrightarrow> Q) \<or> R) \<longleftrightarrow> (P \<longrightarrow> Q \<or> R)" by blast
lemma imp_disj2: "(Q \<or> (P \<longrightarrow> R)) \<longleftrightarrow> (P \<longrightarrow> Q \<or> R)" by blast
lemma imp_cong: "(P = P') \<Longrightarrow> (P' \<Longrightarrow> (Q = Q')) \<Longrightarrow> ((P \<longrightarrow> Q) \<longleftrightarrow> (P' \<longrightarrow> Q'))"
by iprover
lemma de_Morgan_disj: "\<not> (P \<or> Q) \<longleftrightarrow> \<not> P \<and> \<not> Q" by iprover
lemma de_Morgan_conj: "\<not> (P \<and> Q) \<longleftrightarrow> \<not> P \<or> \<not> Q" by blast
lemma not_imp: "\<not> (P \<longrightarrow> Q) \<longleftrightarrow> P \<and> \<not> Q" by blast
lemma not_iff: "P \<noteq> Q \<longleftrightarrow> (P \<longleftrightarrow> \<not> Q)" by blast
lemma disj_not1: "\<not> P \<or> Q \<longleftrightarrow> (P \<longrightarrow> Q)" by blast
lemma disj_not2: "P \<or> \<not> Q \<longleftrightarrow> (Q \<longrightarrow> P)" by blast \<comment> \<open>changes orientation :-(\<close>
lemma imp_conv_disj: "(P \<longrightarrow> Q) \<longleftrightarrow> (\<not> P) \<or> Q" by blast
lemma disj_imp: "P \<or> Q \<longleftrightarrow> \<not> P \<longrightarrow> Q" by blast
lemma iff_conv_conj_imp: "(P \<longleftrightarrow> Q) \<longleftrightarrow> (P \<longrightarrow> Q) \<and> (Q \<longrightarrow> P)" by iprover
lemma cases_simp: "(P \<longrightarrow> Q) \<and> (\<not> P \<longrightarrow> Q) \<longleftrightarrow> Q"
\<comment> \<open>Avoids duplication of subgoals after \<open>if_split\<close>, when the true and false\<close>
\<comment> \<open>cases boil down to the same thing.\<close>
by blast
lemma not_all: "\<not> (\<forall>x. P x) \<longleftrightarrow> (\<exists>x. \<not> P x)" by blast
lemma imp_all: "((\<forall>x. P x) \<longrightarrow> Q) \<longleftrightarrow> (\<exists>x. P x \<longrightarrow> Q)" by blast
lemma not_ex: "\<not> (\<exists>x. P x) \<longleftrightarrow> (\<forall>x. \<not> P x)" by iprover
lemma imp_ex: "((\<exists>x. P x) \<longrightarrow> Q) \<longleftrightarrow> (\<forall>x. P x \<longrightarrow> Q)" by iprover
lemma all_not_ex: "(\<forall>x. P x) \<longleftrightarrow> \<not> (\<exists>x. \<not> P x)" by blast
declare All_def [no_atp]
lemma ex_disj_distrib: "(\<exists>x. P x \<or> Q x) \<longleftrightarrow> (\<exists>x. P x) \<or> (\<exists>x. Q x)" by iprover
lemma all_conj_distrib: "(\<forall>x. P x \<and> Q x) \<longleftrightarrow> (\<forall>x. P x) \<and> (\<forall>x. Q x)" by iprover
text \<open>
\<^medskip> The \<open>\<and>\<close> congruence rule: not included by default!
May slow rewrite proofs down by as much as 50\%\<close>
lemma conj_cong: "P = P' \<Longrightarrow> (P' \<Longrightarrow> Q = Q') \<Longrightarrow> (P \<and> Q) = (P' \<and> Q')"
by iprover
lemma rev_conj_cong: "Q = Q' \<Longrightarrow> (Q' \<Longrightarrow> P = P') \<Longrightarrow> (P \<and> Q) = (P' \<and> Q')"
by iprover
text \<open>The \<open>|\<close> congruence rule: not included by default!\<close>
lemma disj_cong: "P = P' \<Longrightarrow> (\<not> P' \<Longrightarrow> Q = Q') \<Longrightarrow> (P \<or> Q) = (P' \<or> Q')"
by blast
text \<open>\<^medskip> if-then-else rules\<close>
lemma if_True [code]: "(if True then x else y) = x"
unfolding If_def by blast
lemma if_False [code]: "(if False then x else y) = y"
unfolding If_def by blast
lemma if_P: "P \<Longrightarrow> (if P then x else y) = x"
unfolding If_def by blast
lemma if_not_P: "\<not> P \<Longrightarrow> (if P then x else y) = y"
unfolding If_def by blast
lemma if_split: "P (if Q then x else y) = ((Q \<longrightarrow> P x) \<and> (\<not> Q \<longrightarrow> P y))"
proof (rule case_split [of Q])
show ?thesis if Q
using that by (simplesubst if_P) blast+
show ?thesis if "\<not> Q"
using that by (simplesubst if_not_P) blast+
qed
lemma if_split_asm: "P (if Q then x else y) = (\<not> ((Q \<and> \<not> P x) \<or> (\<not> Q \<and> \<not> P y)))"
by (simplesubst if_split) blast
lemmas if_splits [no_atp] = if_split if_split_asm
lemma if_cancel: "(if c then x else x) = x"
by (simplesubst if_split) blast
lemma if_eq_cancel: "(if x = y then y else x) = x"
by (simplesubst if_split) blast
lemma if_bool_eq_conj: "(if P then Q else R) = ((P \<longrightarrow> Q) \<and> (\<not> P \<longrightarrow> R))"
\<comment> \<open>This form is useful for expanding \<open>if\<close>s on the RIGHT of the \<open>\<Longrightarrow>\<close> symbol.\<close>
by (rule if_split)
lemma if_bool_eq_disj: "(if P then Q else R) = ((P \<and> Q) \<or> (\<not> P \<and> R))"
\<comment> \<open>And this form is useful for expanding \<open>if\<close>s on the LEFT.\<close>
by (simplesubst if_split) blast
lemma Eq_TrueI: "P \<Longrightarrow> P \<equiv> True" unfolding atomize_eq by iprover
lemma Eq_FalseI: "\<not> P \<Longrightarrow> P \<equiv> False" unfolding atomize_eq by iprover
text \<open>\<^medskip> let rules for simproc\<close>
lemma Let_folded: "f x \<equiv> g x \<Longrightarrow> Let x f \<equiv> Let x g"
by (unfold Let_def)
lemma Let_unfold: "f x \<equiv> g \<Longrightarrow> Let x f \<equiv> g"
by (unfold Let_def)
text \<open>
The following copy of the implication operator is useful for
fine-tuning congruence rules. It instructs the simplifier to simplify
its premise.
\<close>
definition simp_implies :: "prop \<Rightarrow> prop \<Rightarrow> prop" (infixr "=simp=>" 1)
where "simp_implies \<equiv> (\<Longrightarrow>)"
lemma simp_impliesI:
assumes PQ: "(PROP P \<Longrightarrow> PROP Q)"
shows "PROP P =simp=> PROP Q"
unfolding simp_implies_def
by (iprover intro: PQ)
lemma simp_impliesE:
assumes PQ: "PROP P =simp=> PROP Q"
and P: "PROP P"
and QR: "PROP Q \<Longrightarrow> PROP R"
shows "PROP R"
by (iprover intro: QR P PQ [unfolded simp_implies_def])
lemma simp_implies_cong:
assumes PP' :"PROP P \<equiv> PROP P'"
and P'QQ': "PROP P' \<Longrightarrow> (PROP Q \<equiv> PROP Q')"
shows "(PROP P =simp=> PROP Q) \<equiv> (PROP P' =simp=> PROP Q')"
unfolding simp_implies_def
proof (rule equal_intr_rule)
assume PQ: "PROP P \<Longrightarrow> PROP Q"
and P': "PROP P'"
from PP' [symmetric] and P' have "PROP P"
by (rule equal_elim_rule1)
then have "PROP Q" by (rule PQ)
with P'QQ' [OF P'] show "PROP Q'" by (rule equal_elim_rule1)
next
assume P'Q': "PROP P' \<Longrightarrow> PROP Q'"
and P: "PROP P"
from PP' and P have P': "PROP P'" by (rule equal_elim_rule1)
then have "PROP Q'" by (rule P'Q')
with P'QQ' [OF P', symmetric] show "PROP Q"
by (rule equal_elim_rule1)
qed
lemma uncurry:
assumes "P \<longrightarrow> Q \<longrightarrow> R"
shows "P \<and> Q \<longrightarrow> R"
using assms by blast
lemma iff_allI:
assumes "\<And>x. P x = Q x"
shows "(\<forall>x. P x) = (\<forall>x. Q x)"
using assms by blast
lemma iff_exI:
assumes "\<And>x. P x = Q x"
shows "(\<exists>x. P x) = (\<exists>x. Q x)"
using assms by blast
lemma all_comm: "(\<forall>x y. P x y) = (\<forall>y x. P x y)"
by blast
lemma ex_comm: "(\<exists>x y. P x y) = (\<exists>y x. P x y)"
by blast
ML_file \<open>Tools/simpdata.ML\<close>
ML \<open>open Simpdata\<close>
setup \<open>
map_theory_simpset (put_simpset HOL_basic_ss) #>
Simplifier.method_setup Splitter.split_modifiers
\<close>
simproc_setup defined_Ex ("\<exists>x. P x") = \<open>K Quantifier1.rearrange_Ex\<close>
simproc_setup defined_All ("\<forall>x. P x") = \<open>K Quantifier1.rearrange_All\<close>
simproc_setup defined_all("\<And>x. PROP P x") = \<open>K Quantifier1.rearrange_all\<close>
text \<open>Simproc for proving \<open>(y = x) \<equiv> False\<close> from premise \<open>\<not> (x = y)\<close>:\<close>
-simproc_setup neq ("x = y") = \<open>fn _ =>
+simproc_setup neq ("x = y") = \<open>
let
val neq_to_EQ_False = @{thm not_sym} RS @{thm Eq_FalseI};
fun is_neq eq lhs rhs thm =
(case Thm.prop_of thm of
_ $ (Not $ (eq' $ l' $ r')) =>
Not = HOLogic.Not andalso eq' = eq andalso
r' aconv lhs andalso l' aconv rhs
| _ => false);
fun proc ss ct =
(case Thm.term_of ct of
eq $ lhs $ rhs =>
(case find_first (is_neq eq lhs rhs) (Simplifier.prems_of ss) of
SOME thm => SOME (thm RS neq_to_EQ_False)
| NONE => NONE)
| _ => NONE);
- in proc end
+ in K proc end
\<close>
simproc_setup let_simp ("Let x f") = \<open>
let
fun count_loose (Bound i) k = if i >= k then 1 else 0
| count_loose (s $ t) k = count_loose s k + count_loose t k
| count_loose (Abs (_, _, t)) k = count_loose t (k + 1)
| count_loose _ _ = 0;
fun is_trivial_let (Const (\<^const_name>\<open>Let\<close>, _) $ x $ t) =
(case t of
Abs (_, _, t') => count_loose t' 0 <= 1
| _ => true);
in
- fn _ => fn ctxt => fn ct =>
+ K (fn ctxt => fn ct =>
if is_trivial_let (Thm.term_of ct)
then SOME @{thm Let_def} (*no or one ocurrence of bound variable*)
else
let (*Norbert Schirmer's case*)
val t = Thm.term_of ct;
val (t', ctxt') = yield_singleton (Variable.import_terms false) t ctxt;
in
Option.map (hd o Variable.export ctxt' ctxt o single)
(case t' of Const (\<^const_name>\<open>Let\<close>,_) $ x $ f => (* x and f are already in normal form *)
if is_Free x orelse is_Bound x orelse is_Const x
then SOME @{thm Let_def}
else
let
val n = case f of (Abs (x, _, _)) => x | _ => "x";
val cx = Thm.cterm_of ctxt x;
val xT = Thm.typ_of_cterm cx;
val cf = Thm.cterm_of ctxt f;
val fx_g = Simplifier.rewrite ctxt (Thm.apply cf cx);
val (_ $ _ $ g) = Thm.prop_of fx_g;
val g' = abstract_over (x, g);
val abs_g'= Abs (n, xT, g');
in
if g aconv g' then
let
val rl =
infer_instantiate ctxt [(("f", 0), cf), (("x", 0), cx)] @{thm Let_unfold};
in SOME (rl OF [fx_g]) end
else if (Envir.beta_eta_contract f) aconv (Envir.beta_eta_contract abs_g')
then NONE (*avoid identity conversion*)
else
let
val g'x = abs_g' $ x;
val g_g'x = Thm.symmetric (Thm.beta_conversion false (Thm.cterm_of ctxt g'x));
val rl =
@{thm Let_folded} |> infer_instantiate ctxt
[(("f", 0), Thm.cterm_of ctxt f),
(("x", 0), cx),
(("g", 0), Thm.cterm_of ctxt abs_g')];
in SOME (rl OF [Thm.transitive fx_g g_g'x]) end
end
| _ => NONE)
- end
+ end)
end
\<close>
lemma True_implies_equals: "(True \<Longrightarrow> PROP P) \<equiv> PROP P"
proof
assume "True \<Longrightarrow> PROP P"
from this [OF TrueI] show "PROP P" .
next
assume "PROP P"
then show "PROP P" .
qed
lemma implies_True_equals: "(PROP P \<Longrightarrow> True) \<equiv> Trueprop True"
by standard (intro TrueI)
lemma False_implies_equals: "(False \<Longrightarrow> P) \<equiv> Trueprop True"
by standard simp_all
(* It seems that making this a simp rule is slower than using the simproc below *)
lemma implies_False_swap:
"(False \<Longrightarrow> PROP P \<Longrightarrow> PROP Q) \<equiv> (PROP P \<Longrightarrow> False \<Longrightarrow> PROP Q)"
by (rule swap_prems_eq)
ML \<open>
fun eliminate_false_implies ct =
let
val (prems, concl) = Logic.strip_horn (Thm.term_of ct)
fun go n =
if n > 1 then
Conv.rewr_conv @{thm Pure.swap_prems_eq}
then_conv Conv.arg_conv (go (n - 1))
then_conv Conv.rewr_conv @{thm HOL.implies_True_equals}
else
Conv.rewr_conv @{thm HOL.False_implies_equals}
in
case concl of
Const (@{const_name HOL.Trueprop}, _) $ _ => SOME (go (length prems) ct)
| _ => NONE
end
\<close>
simproc_setup eliminate_false_implies ("False \<Longrightarrow> PROP P") = \<open>K (K eliminate_false_implies)\<close>
lemma ex_simps:
"\<And>P Q. (\<exists>x. P x \<and> Q) = ((\<exists>x. P x) \<and> Q)"
"\<And>P Q. (\<exists>x. P \<and> Q x) = (P \<and> (\<exists>x. Q x))"
"\<And>P Q. (\<exists>x. P x \<or> Q) = ((\<exists>x. P x) \<or> Q)"
"\<And>P Q. (\<exists>x. P \<or> Q x) = (P \<or> (\<exists>x. Q x))"
"\<And>P Q. (\<exists>x. P x \<longrightarrow> Q) = ((\<forall>x. P x) \<longrightarrow> Q)"
"\<And>P Q. (\<exists>x. P \<longrightarrow> Q x) = (P \<longrightarrow> (\<exists>x. Q x))"
\<comment> \<open>Miniscoping: pushing in existential quantifiers.\<close>
by (iprover | blast)+
lemma all_simps:
"\<And>P Q. (\<forall>x. P x \<and> Q) = ((\<forall>x. P x) \<and> Q)"
"\<And>P Q. (\<forall>x. P \<and> Q x) = (P \<and> (\<forall>x. Q x))"
"\<And>P Q. (\<forall>x. P x \<or> Q) = ((\<forall>x. P x) \<or> Q)"
"\<And>P Q. (\<forall>x. P \<or> Q x) = (P \<or> (\<forall>x. Q x))"
"\<And>P Q. (\<forall>x. P x \<longrightarrow> Q) = ((\<exists>x. P x) \<longrightarrow> Q)"
"\<And>P Q. (\<forall>x. P \<longrightarrow> Q x) = (P \<longrightarrow> (\<forall>x. Q x))"
\<comment> \<open>Miniscoping: pushing in universal quantifiers.\<close>
by (iprover | blast)+
lemmas [simp] =
triv_forall_equality \<comment> \<open>prunes params\<close>
True_implies_equals implies_True_equals \<comment> \<open>prune \<open>True\<close> in asms\<close>
False_implies_equals \<comment> \<open>prune \<open>False\<close> in asms\<close>
if_True
if_False
if_cancel
if_eq_cancel
imp_disjL \<comment> \<open>In general it seems wrong to add distributive laws by default: they
might cause exponential blow-up. But \<open>imp_disjL\<close> has been in for a while
and cannot be removed without affecting existing proofs. Moreover,
rewriting by \<open>(P \<or> Q \<longrightarrow> R) = ((P \<longrightarrow> R) \<and> (Q \<longrightarrow> R))\<close> might be justified on the
grounds that it allows simplification of \<open>R\<close> in the two cases.\<close>
conj_assoc
disj_assoc
de_Morgan_conj
de_Morgan_disj
imp_disj1
imp_disj2
not_imp
disj_not1
not_all
not_ex
cases_simp
the_eq_trivial
the_sym_eq_trivial
ex_simps
all_simps
simp_thms
subst_all
lemmas [cong] = imp_cong simp_implies_cong
lemmas [split] = if_split
ML \<open>val HOL_ss = simpset_of \<^context>\<close>
text \<open>Simplifies \<open>x\<close> assuming \<open>c\<close> and \<open>y\<close> assuming \<open>\<not> c\<close>.\<close>
lemma if_cong:
assumes "b = c"
and "c \<Longrightarrow> x = u"
and "\<not> c \<Longrightarrow> y = v"
shows "(if b then x else y) = (if c then u else v)"
using assms by simp
text \<open>Prevents simplification of \<open>x\<close> and \<open>y\<close>:
faster and allows the execution of functional programs.\<close>
lemma if_weak_cong [cong]:
assumes "b = c"
shows "(if b then x else y) = (if c then x else y)"
using assms by (rule arg_cong)
text \<open>Prevents simplification of t: much faster\<close>
lemma let_weak_cong:
assumes "a = b"
shows "(let x = a in t x) = (let x = b in t x)"
using assms by (rule arg_cong)
text \<open>To tidy up the result of a simproc. Only the RHS will be simplified.\<close>
lemma eq_cong2:
assumes "u = u'"
shows "(t \<equiv> u) \<equiv> (t \<equiv> u')"
using assms by simp
lemma if_distrib: "f (if c then x else y) = (if c then f x else f y)"
by simp
lemma if_distribR: "(if b then f else g) x = (if b then f x else g x)"
by simp
lemma all_if_distrib: "(\<forall>x. if x = a then P x else Q x) \<longleftrightarrow> P a \<and> (\<forall>x. x\<noteq>a \<longrightarrow> Q x)"
by auto
lemma ex_if_distrib: "(\<exists>x. if x = a then P x else Q x) \<longleftrightarrow> P a \<or> (\<exists>x. x\<noteq>a \<and> Q x)"
by auto
lemma if_if_eq_conj: "(if P then if Q then x else y else y) = (if P \<and> Q then x else y)"
by simp
text \<open>As a simplification rule, it replaces all function equalities by
first-order equalities.\<close>
lemma fun_eq_iff: "f = g \<longleftrightarrow> (\<forall>x. f x = g x)"
by auto
subsubsection \<open>Generic cases and induction\<close>
text \<open>Rule projections:\<close>
ML \<open>
structure Project_Rule = Project_Rule
(
val conjunct1 = @{thm conjunct1}
val conjunct2 = @{thm conjunct2}
val mp = @{thm mp}
);
\<close>
context
begin
qualified definition "induct_forall P \<equiv> \<forall>x. P x"
qualified definition "induct_implies A B \<equiv> A \<longrightarrow> B"
qualified definition "induct_equal x y \<equiv> x = y"
qualified definition "induct_conj A B \<equiv> A \<and> B"
qualified definition "induct_true \<equiv> True"
qualified definition "induct_false \<equiv> False"
lemma induct_forall_eq: "(\<And>x. P x) \<equiv> Trueprop (induct_forall (\<lambda>x. P x))"
by (unfold atomize_all induct_forall_def)
lemma induct_implies_eq: "(A \<Longrightarrow> B) \<equiv> Trueprop (induct_implies A B)"
by (unfold atomize_imp induct_implies_def)
lemma induct_equal_eq: "(x \<equiv> y) \<equiv> Trueprop (induct_equal x y)"
by (unfold atomize_eq induct_equal_def)
lemma induct_conj_eq: "(A &&& B) \<equiv> Trueprop (induct_conj A B)"
by (unfold atomize_conj induct_conj_def)
lemmas induct_atomize' = induct_forall_eq induct_implies_eq induct_conj_eq
lemmas induct_atomize = induct_atomize' induct_equal_eq
lemmas induct_rulify' [symmetric] = induct_atomize'
lemmas induct_rulify [symmetric] = induct_atomize
lemmas induct_rulify_fallback =
induct_forall_def induct_implies_def induct_equal_def induct_conj_def
induct_true_def induct_false_def
lemma induct_forall_conj: "induct_forall (\<lambda>x. induct_conj (A x) (B x)) =
induct_conj (induct_forall A) (induct_forall B)"
by (unfold induct_forall_def induct_conj_def) iprover
lemma induct_implies_conj: "induct_implies C (induct_conj A B) =
induct_conj (induct_implies C A) (induct_implies C B)"
by (unfold induct_implies_def induct_conj_def) iprover
lemma induct_conj_curry: "(induct_conj A B \<Longrightarrow> PROP C) \<equiv> (A \<Longrightarrow> B \<Longrightarrow> PROP C)"
proof
assume r: "induct_conj A B \<Longrightarrow> PROP C"
assume ab: A B
show "PROP C" by (rule r) (simp add: induct_conj_def ab)
next
assume r: "A \<Longrightarrow> B \<Longrightarrow> PROP C"
assume ab: "induct_conj A B"
show "PROP C" by (rule r) (simp_all add: ab [unfolded induct_conj_def])
qed
lemmas induct_conj = induct_forall_conj induct_implies_conj induct_conj_curry
lemma induct_trueI: "induct_true"
by (simp add: induct_true_def)
text \<open>Method setup.\<close>
ML_file \<open>~~/src/Tools/induct.ML\<close>
ML \<open>
structure Induct = Induct
(
val cases_default = @{thm case_split}
val atomize = @{thms induct_atomize}
val rulify = @{thms induct_rulify'}
val rulify_fallback = @{thms induct_rulify_fallback}
val equal_def = @{thm induct_equal_def}
fun dest_def (Const (\<^const_name>\<open>induct_equal\<close>, _) $ t $ u) = SOME (t, u)
| dest_def _ = NONE
fun trivial_tac ctxt = match_tac ctxt @{thms induct_trueI}
)
\<close>
ML_file \<open>~~/src/Tools/induction.ML\<close>
declaration \<open>
fn _ => Induct.map_simpset (fn ss => ss
addsimprocs
[Simplifier.make_simproc \<^context> "swap_induct_false"
{lhss = [\<^term>\<open>induct_false \<Longrightarrow> PROP P \<Longrightarrow> PROP Q\<close>],
proc = fn _ => fn _ => fn ct =>
(case Thm.term_of ct of
_ $ (P as _ $ \<^Const_>\<open>induct_false\<close>) $ (_ $ Q $ _) =>
if P <> Q then SOME Drule.swap_prems_eq else NONE
| _ => NONE)},
Simplifier.make_simproc \<^context> "induct_equal_conj_curry"
{lhss = [\<^term>\<open>induct_conj P Q \<Longrightarrow> PROP R\<close>],
proc = fn _ => fn _ => fn ct =>
(case Thm.term_of ct of
_ $ (_ $ P) $ _ =>
let
fun is_conj \<^Const_>\<open>induct_conj for P Q\<close> =
is_conj P andalso is_conj Q
| is_conj \<^Const_>\<open>induct_equal _ for _ _\<close> = true
| is_conj \<^Const_>\<open>induct_true\<close> = true
| is_conj \<^Const_>\<open>induct_false\<close> = true
| is_conj _ = false
in if is_conj P then SOME @{thm induct_conj_curry} else NONE end
| _ => NONE)}]
|> Simplifier.set_mksimps (fn ctxt =>
Simpdata.mksimps Simpdata.mksimps_pairs ctxt #>
map (rewrite_rule ctxt (map Thm.symmetric @{thms induct_rulify_fallback}))))
\<close>
text \<open>Pre-simplification of induction and cases rules\<close>
lemma [induct_simp]: "(\<And>x. induct_equal x t \<Longrightarrow> PROP P x) \<equiv> PROP P t"
unfolding induct_equal_def
proof
assume r: "\<And>x. x = t \<Longrightarrow> PROP P x"
show "PROP P t" by (rule r [OF refl])
next
fix x
assume "PROP P t" "x = t"
then show "PROP P x" by simp
qed
lemma [induct_simp]: "(\<And>x. induct_equal t x \<Longrightarrow> PROP P x) \<equiv> PROP P t"
unfolding induct_equal_def
proof
assume r: "\<And>x. t = x \<Longrightarrow> PROP P x"
show "PROP P t" by (rule r [OF refl])
next
fix x
assume "PROP P t" "t = x"
then show "PROP P x" by simp
qed
lemma [induct_simp]: "(induct_false \<Longrightarrow> P) \<equiv> Trueprop induct_true"
unfolding induct_false_def induct_true_def
by (iprover intro: equal_intr_rule)
lemma [induct_simp]: "(induct_true \<Longrightarrow> PROP P) \<equiv> PROP P"
unfolding induct_true_def
proof
assume "True \<Longrightarrow> PROP P"
then show "PROP P" using TrueI .
next
assume "PROP P"
then show "PROP P" .
qed
lemma [induct_simp]: "(PROP P \<Longrightarrow> induct_true) \<equiv> Trueprop induct_true"
unfolding induct_true_def
by (iprover intro: equal_intr_rule)
lemma [induct_simp]: "(\<And>x::'a::{}. induct_true) \<equiv> Trueprop induct_true"
unfolding induct_true_def
by (iprover intro: equal_intr_rule)
lemma [induct_simp]: "induct_implies induct_true P \<equiv> P"
by (simp add: induct_implies_def induct_true_def)
lemma [induct_simp]: "x = x \<longleftrightarrow> True"
by (rule simp_thms)
end
ML_file \<open>~~/src/Tools/induct_tacs.ML\<close>
subsubsection \<open>Coherent logic\<close>
ML_file \<open>~~/src/Tools/coherent.ML\<close>
ML \<open>
structure Coherent = Coherent
(
val atomize_elimL = @{thm atomize_elimL};
val atomize_exL = @{thm atomize_exL};
val atomize_conjL = @{thm atomize_conjL};
val atomize_disjL = @{thm atomize_disjL};
val operator_names = [\<^const_name>\<open>HOL.disj\<close>, \<^const_name>\<open>HOL.conj\<close>, \<^const_name>\<open>Ex\<close>];
);
\<close>
subsubsection \<open>Reorienting equalities\<close>
ML \<open>
signature REORIENT_PROC =
sig
val add : (term -> bool) -> theory -> theory
- val proc : morphism -> Proof.context -> cterm -> thm option
+ val proc : Proof.context -> cterm -> thm option
end;
structure Reorient_Proc : REORIENT_PROC =
struct
structure Data = Theory_Data
(
type T = ((term -> bool) * stamp) list;
val empty = [];
fun merge data : T = Library.merge (eq_snd (op =)) data;
);
fun add m = Data.map (cons (m, stamp ()));
fun matches thy t = exists (fn (m, _) => m t) (Data.get thy);
val meta_reorient = @{thm eq_commute [THEN eq_reflection]};
- fun proc phi ctxt ct =
+ fun proc ctxt ct =
let
val thy = Proof_Context.theory_of ctxt;
in
case Thm.term_of ct of
(_ $ t $ u) => if matches thy u then NONE else SOME meta_reorient
| _ => NONE
end;
end;
\<close>
subsection \<open>Other simple lemmas and lemma duplicates\<close>
lemma eq_iff_swap: "(x = y \<longleftrightarrow> P) \<Longrightarrow> (y = x \<longleftrightarrow> P)"
by blast
lemma all_cong1: "(\<And>x. P x = P' x) \<Longrightarrow> (\<forall>x. P x) = (\<forall>x. P' x)"
by auto
lemma ex_cong1: "(\<And>x. P x = P' x) \<Longrightarrow> (\<exists>x. P x) = (\<exists>x. P' x)"
by auto
lemma all_cong: "(\<And>x. Q x \<Longrightarrow> P x = P' x) \<Longrightarrow> (\<forall>x. Q x \<longrightarrow> P x) = (\<forall>x. Q x \<longrightarrow> P' x)"
by auto
lemma ex_cong: "(\<And>x. Q x \<Longrightarrow> P x = P' x) \<Longrightarrow> (\<exists>x. Q x \<and> P x) = (\<exists>x. Q x \<and> P' x)"
by auto
lemma ex1_eq [iff]: "\<exists>!x. x = t" "\<exists>!x. t = x"
by blast+
lemma choice_eq: "(\<forall>x. \<exists>!y. P x y) = (\<exists>!f. \<forall>x. P x (f x))" (is "?lhs = ?rhs")
proof (intro iffI allI)
assume L: ?lhs
then have \<section>: "\<forall>x. P x (THE y. P x y)"
by (best intro: theI')
show ?rhs
by (rule ex1I) (use L \<section> in \<open>fast+\<close>)
next
fix x
assume R: ?rhs
then obtain f where f: "\<forall>x. P x (f x)" and f1: "\<And>y. (\<forall>x. P x (y x)) \<Longrightarrow> y = f"
by (blast elim: ex1E)
show "\<exists>!y. P x y"
proof (rule ex1I)
show "P x (f x)"
using f by blast
show "y = f x" if "P x y" for y
proof -
have "P z (if z = x then y else f z)" for z
using f that by (auto split: if_split)
with f1 [of "\<lambda>z. if z = x then y else f z"] f
show ?thesis
by (auto simp add: split: if_split_asm dest: fun_cong)
qed
qed
qed
lemmas eq_sym_conv = eq_commute
lemma nnf_simps:
"(\<not> (P \<and> Q)) = (\<not> P \<or> \<not> Q)"
"(\<not> (P \<or> Q)) = (\<not> P \<and> \<not> Q)"
"(P \<longrightarrow> Q) = (\<not> P \<or> Q)"
"(P = Q) = ((P \<and> Q) \<or> (\<not> P \<and> \<not> Q))"
"(\<not> (P = Q)) = ((P \<and> \<not> Q) \<or> (\<not> P \<and> Q))"
"(\<not> \<not> P) = P"
by blast+
subsection \<open>Basic ML bindings\<close>
ML \<open>
val FalseE = @{thm FalseE}
val Let_def = @{thm Let_def}
val TrueI = @{thm TrueI}
val allE = @{thm allE}
val allI = @{thm allI}
val all_dupE = @{thm all_dupE}
val arg_cong = @{thm arg_cong}
val box_equals = @{thm box_equals}
val ccontr = @{thm ccontr}
val classical = @{thm classical}
val conjE = @{thm conjE}
val conjI = @{thm conjI}
val conjunct1 = @{thm conjunct1}
val conjunct2 = @{thm conjunct2}
val disjCI = @{thm disjCI}
val disjE = @{thm disjE}
val disjI1 = @{thm disjI1}
val disjI2 = @{thm disjI2}
val eq_reflection = @{thm eq_reflection}
val ex1E = @{thm ex1E}
val ex1I = @{thm ex1I}
val ex1_implies_ex = @{thm ex1_implies_ex}
val exE = @{thm exE}
val exI = @{thm exI}
val excluded_middle = @{thm excluded_middle}
val ext = @{thm ext}
val fun_cong = @{thm fun_cong}
val iffD1 = @{thm iffD1}
val iffD2 = @{thm iffD2}
val iffI = @{thm iffI}
val impE = @{thm impE}
val impI = @{thm impI}
val meta_eq_to_obj_eq = @{thm meta_eq_to_obj_eq}
val mp = @{thm mp}
val notE = @{thm notE}
val notI = @{thm notI}
val not_all = @{thm not_all}
val not_ex = @{thm not_ex}
val not_iff = @{thm not_iff}
val not_not = @{thm not_not}
val not_sym = @{thm not_sym}
val refl = @{thm refl}
val rev_mp = @{thm rev_mp}
val spec = @{thm spec}
val ssubst = @{thm ssubst}
val subst = @{thm subst}
val sym = @{thm sym}
val trans = @{thm trans}
\<close>
locale cnf
begin
lemma clause2raw_notE: "\<lbrakk>P; \<not>P\<rbrakk> \<Longrightarrow> False" by auto
lemma clause2raw_not_disj: "\<lbrakk>\<not> P; \<not> Q\<rbrakk> \<Longrightarrow> \<not> (P \<or> Q)" by auto
lemma clause2raw_not_not: "P \<Longrightarrow> \<not>\<not> P" by auto
lemma iff_refl: "(P::bool) = P" by auto
lemma iff_trans: "[| (P::bool) = Q; Q = R |] ==> P = R" by auto
lemma conj_cong: "[| P = P'; Q = Q' |] ==> (P \<and> Q) = (P' \<and> Q')" by auto
lemma disj_cong: "[| P = P'; Q = Q' |] ==> (P \<or> Q) = (P' \<or> Q')" by auto
lemma make_nnf_imp: "[| (\<not>P) = P'; Q = Q' |] ==> (P \<longrightarrow> Q) = (P' \<or> Q')" by auto
lemma make_nnf_iff: "[| P = P'; (\<not>P) = NP; Q = Q'; (\<not>Q) = NQ |] ==> (P = Q) = ((P' \<or> NQ) \<and> (NP \<or> Q'))" by auto
lemma make_nnf_not_false: "(\<not>False) = True" by auto
lemma make_nnf_not_true: "(\<not>True) = False" by auto
lemma make_nnf_not_conj: "[| (\<not>P) = P'; (\<not>Q) = Q' |] ==> (\<not>(P \<and> Q)) = (P' \<or> Q')" by auto
lemma make_nnf_not_disj: "[| (\<not>P) = P'; (\<not>Q) = Q' |] ==> (\<not>(P \<or> Q)) = (P' \<and> Q')" by auto
lemma make_nnf_not_imp: "[| P = P'; (\<not>Q) = Q' |] ==> (\<not>(P \<longrightarrow> Q)) = (P' \<and> Q')" by auto
lemma make_nnf_not_iff: "[| P = P'; (\<not>P) = NP; Q = Q'; (\<not>Q) = NQ |] ==> (\<not>(P = Q)) = ((P' \<or> Q') \<and> (NP \<or> NQ))" by auto
lemma make_nnf_not_not: "P = P' ==> (\<not>\<not>P) = P'" by auto
lemma simp_TF_conj_True_l: "[| P = True; Q = Q' |] ==> (P \<and> Q) = Q'" by auto
lemma simp_TF_conj_True_r: "[| P = P'; Q = True |] ==> (P \<and> Q) = P'" by auto
lemma simp_TF_conj_False_l: "P = False ==> (P \<and> Q) = False" by auto
lemma simp_TF_conj_False_r: "Q = False ==> (P \<and> Q) = False" by auto
lemma simp_TF_disj_True_l: "P = True ==> (P \<or> Q) = True" by auto
lemma simp_TF_disj_True_r: "Q = True ==> (P \<or> Q) = True" by auto
lemma simp_TF_disj_False_l: "[| P = False; Q = Q' |] ==> (P \<or> Q) = Q'" by auto
lemma simp_TF_disj_False_r: "[| P = P'; Q = False |] ==> (P \<or> Q) = P'" by auto
lemma make_cnf_disj_conj_l: "[| (P \<or> R) = PR; (Q \<or> R) = QR |] ==> ((P \<and> Q) \<or> R) = (PR \<and> QR)" by auto
lemma make_cnf_disj_conj_r: "[| (P \<or> Q) = PQ; (P \<or> R) = PR |] ==> (P \<or> (Q \<and> R)) = (PQ \<and> PR)" by auto
lemma make_cnfx_disj_ex_l: "((\<exists>(x::bool). P x) \<or> Q) = (\<exists>x. P x \<or> Q)" by auto
lemma make_cnfx_disj_ex_r: "(P \<or> (\<exists>(x::bool). Q x)) = (\<exists>x. P \<or> Q x)" by auto
lemma make_cnfx_newlit: "(P \<or> Q) = (\<exists>x. (P \<or> x) \<and> (Q \<or> \<not>x))" by auto
lemma make_cnfx_ex_cong: "(\<forall>(x::bool). P x = Q x) \<Longrightarrow> (\<exists>x. P x) = (\<exists>x. Q x)" by auto
lemma weakening_thm: "[| P; Q |] ==> Q" by auto
lemma cnftac_eq_imp: "[| P = Q; P |] ==> Q" by auto
end
ML_file \<open>Tools/cnf.ML\<close>
section \<open>\<open>NO_MATCH\<close> simproc\<close>
text \<open>
The simplification procedure can be used to avoid simplification of terms
of a certain form.
\<close>
definition NO_MATCH :: "'a \<Rightarrow> 'b \<Rightarrow> bool"
where "NO_MATCH pat val \<equiv> True"
lemma NO_MATCH_cong[cong]: "NO_MATCH pat val = NO_MATCH pat val"
by (rule refl)
declare [[coercion_args NO_MATCH - -]]
-simproc_setup NO_MATCH ("NO_MATCH pat val") = \<open>fn _ => fn ctxt => fn ct =>
+simproc_setup NO_MATCH ("NO_MATCH pat val") = \<open>K (fn ctxt => fn ct =>
let
val thy = Proof_Context.theory_of ctxt
val dest_binop = Term.dest_comb #> apfst (Term.dest_comb #> snd)
val m = Pattern.matches thy (dest_binop (Thm.term_of ct))
- in if m then NONE else SOME @{thm NO_MATCH_def} end
+ in if m then NONE else SOME @{thm NO_MATCH_def} end)
\<close>
text \<open>
This setup ensures that a rewrite rule of the form \<^term>\<open>NO_MATCH pat val \<Longrightarrow> t\<close>
is only applied, if the pattern \<open>pat\<close> does not match the value \<open>val\<close>.
\<close>
text\<open>
Tagging a premise of a simp rule with ASSUMPTION forces the simplifier
not to simplify the argument and to solve it by an assumption.
\<close>
definition ASSUMPTION :: "bool \<Rightarrow> bool"
where "ASSUMPTION A \<equiv> A"
lemma ASSUMPTION_cong[cong]: "ASSUMPTION A = ASSUMPTION A"
by (rule refl)
lemma ASSUMPTION_I: "A \<Longrightarrow> ASSUMPTION A"
by (simp add: ASSUMPTION_def)
lemma ASSUMPTION_D: "ASSUMPTION A \<Longrightarrow> A"
by (simp add: ASSUMPTION_def)
setup \<open>
let
val asm_sol = mk_solver "ASSUMPTION" (fn ctxt =>
resolve_tac ctxt [@{thm ASSUMPTION_I}] THEN'
resolve_tac ctxt (Simplifier.prems_of ctxt))
in
map_theory_simpset (fn ctxt => Simplifier.addSolver (ctxt,asm_sol))
end
\<close>
subsection \<open>Code generator setup\<close>
subsubsection \<open>Generic code generator preprocessor setup\<close>
lemma conj_left_cong: "P \<longleftrightarrow> Q \<Longrightarrow> P \<and> R \<longleftrightarrow> Q \<and> R"
by (fact arg_cong)
lemma disj_left_cong: "P \<longleftrightarrow> Q \<Longrightarrow> P \<or> R \<longleftrightarrow> Q \<or> R"
by (fact arg_cong)
setup \<open>
Code_Preproc.map_pre (put_simpset HOL_basic_ss) #>
Code_Preproc.map_post (put_simpset HOL_basic_ss) #>
Code_Simp.map_ss (put_simpset HOL_basic_ss #>
Simplifier.add_cong @{thm conj_left_cong} #>
Simplifier.add_cong @{thm disj_left_cong})
\<close>
subsubsection \<open>Equality\<close>
class equal =
fixes equal :: "'a \<Rightarrow> 'a \<Rightarrow> bool"
assumes equal_eq: "equal x y \<longleftrightarrow> x = y"
begin
lemma equal: "equal = (=)"
by (rule ext equal_eq)+
lemma equal_refl: "equal x x \<longleftrightarrow> True"
unfolding equal by (rule iffI TrueI refl)+
lemma eq_equal: "(=) \<equiv> equal"
by (rule eq_reflection) (rule ext, rule ext, rule sym, rule equal_eq)
end
declare eq_equal [symmetric, code_post]
declare eq_equal [code]
setup \<open>
Code_Preproc.map_pre (fn ctxt =>
ctxt addsimprocs
[Simplifier.make_simproc \<^context> "equal"
{lhss = [\<^term>\<open>HOL.eq\<close>],
proc = fn _ => fn _ => fn ct =>
(case Thm.term_of ct of
Const (_, Type (\<^type_name>\<open>fun\<close>, [Type _, _])) => SOME @{thm eq_equal}
| _ => NONE)}])
\<close>
subsubsection \<open>Generic code generator foundation\<close>
text \<open>Datatype \<^typ>\<open>bool\<close>\<close>
code_datatype True False
lemma [code]:
shows "False \<and> P \<longleftrightarrow> False"
and "True \<and> P \<longleftrightarrow> P"
and "P \<and> False \<longleftrightarrow> False"
and "P \<and> True \<longleftrightarrow> P"
by simp_all
lemma [code]:
shows "False \<or> P \<longleftrightarrow> P"
and "True \<or> P \<longleftrightarrow> True"
and "P \<or> False \<longleftrightarrow> P"
and "P \<or> True \<longleftrightarrow> True"
by simp_all
lemma [code]:
shows "(False \<longrightarrow> P) \<longleftrightarrow> True"
and "(True \<longrightarrow> P) \<longleftrightarrow> P"
and "(P \<longrightarrow> False) \<longleftrightarrow> \<not> P"
and "(P \<longrightarrow> True) \<longleftrightarrow> True"
by simp_all
text \<open>More about \<^typ>\<open>prop\<close>\<close>
lemma [code nbe]:
shows "(True \<Longrightarrow> PROP Q) \<equiv> PROP Q"
and "(PROP Q \<Longrightarrow> True) \<equiv> Trueprop True"
and "(P \<Longrightarrow> R) \<equiv> Trueprop (P \<longrightarrow> R)"
by (auto intro!: equal_intr_rule)
lemma Trueprop_code [code]: "Trueprop True \<equiv> Code_Generator.holds"
by (auto intro!: equal_intr_rule holds)
declare Trueprop_code [symmetric, code_post]
text \<open>Equality\<close>
declare simp_thms(6) [code nbe]
instantiation itself :: (type) equal
begin
definition equal_itself :: "'a itself \<Rightarrow> 'a itself \<Rightarrow> bool"
where "equal_itself x y \<longleftrightarrow> x = y"
instance
by standard (fact equal_itself_def)
end
lemma equal_itself_code [code]: "equal TYPE('a) TYPE('a) \<longleftrightarrow> True"
by (simp add: equal)
setup \<open>Sign.add_const_constraint (\<^const_name>\<open>equal\<close>, SOME \<^typ>\<open>'a::type \<Rightarrow> 'a \<Rightarrow> bool\<close>)\<close>
lemma equal_alias_cert: "OFCLASS('a, equal_class) \<equiv> (((=) :: 'a \<Rightarrow> 'a \<Rightarrow> bool) \<equiv> equal)"
(is "?ofclass \<equiv> ?equal")
proof
assume "PROP ?ofclass"
show "PROP ?equal"
by (tactic \<open>ALLGOALS (resolve_tac \<^context> [Thm.unconstrainT @{thm eq_equal}])\<close>)
(fact \<open>PROP ?ofclass\<close>)
next
assume "PROP ?equal"
show "PROP ?ofclass" proof
qed (simp add: \<open>PROP ?equal\<close>)
qed
setup \<open>Sign.add_const_constraint (\<^const_name>\<open>equal\<close>, SOME \<^typ>\<open>'a::equal \<Rightarrow> 'a \<Rightarrow> bool\<close>)\<close>
setup \<open>Nbe.add_const_alias @{thm equal_alias_cert}\<close>
text \<open>Cases\<close>
lemma Let_case_cert:
assumes "CASE \<equiv> (\<lambda>x. Let x f)"
shows "CASE x \<equiv> f x"
using assms by simp_all
setup \<open>
Code.declare_case_global @{thm Let_case_cert} #>
Code.declare_undefined_global \<^const_name>\<open>undefined\<close>
\<close>
declare [[code abort: undefined]]
subsubsection \<open>Generic code generator target languages\<close>
text \<open>type \<^typ>\<open>bool\<close>\<close>
code_printing
type_constructor bool \<rightharpoonup>
(SML) "bool" and (OCaml) "bool" and (Haskell) "Bool" and (Scala) "Boolean"
| constant True \<rightharpoonup>
(SML) "true" and (OCaml) "true" and (Haskell) "True" and (Scala) "true"
| constant False \<rightharpoonup>
(SML) "false" and (OCaml) "false" and (Haskell) "False" and (Scala) "false"
code_reserved SML
bool true false
code_reserved OCaml
bool
code_reserved Scala
Boolean
code_printing
constant Not \<rightharpoonup>
(SML) "not" and (OCaml) "not" and (Haskell) "not" and (Scala) "'! _"
| constant HOL.conj \<rightharpoonup>
(SML) infixl 1 "andalso" and (OCaml) infixl 3 "&&" and (Haskell) infixr 3 "&&" and (Scala) infixl 3 "&&"
| constant HOL.disj \<rightharpoonup>
(SML) infixl 0 "orelse" and (OCaml) infixl 2 "||" and (Haskell) infixl 2 "||" and (Scala) infixl 1 "||"
| constant HOL.implies \<rightharpoonup>
(SML) "!(if (_)/ then (_)/ else true)"
and (OCaml) "!(if (_)/ then (_)/ else true)"
and (Haskell) "!(if (_)/ then (_)/ else True)"
and (Scala) "!(if ((_))/ (_)/ else true)"
| constant If \<rightharpoonup>
(SML) "!(if (_)/ then (_)/ else (_))"
and (OCaml) "!(if (_)/ then (_)/ else (_))"
and (Haskell) "!(if (_)/ then (_)/ else (_))"
and (Scala) "!(if ((_))/ (_)/ else (_))"
code_reserved SML
not
code_reserved OCaml
not
code_identifier
code_module Pure \<rightharpoonup>
(SML) HOL and (OCaml) HOL and (Haskell) HOL and (Scala) HOL
text \<open>Using built-in Haskell equality.\<close>
code_printing
type_class equal \<rightharpoonup> (Haskell) "Eq"
| constant HOL.equal \<rightharpoonup> (Haskell) infix 4 "=="
| constant HOL.eq \<rightharpoonup> (Haskell) infix 4 "=="
text \<open>\<open>undefined\<close>\<close>
code_printing
constant undefined \<rightharpoonup>
(SML) "!(raise/ Fail/ \"undefined\")"
and (OCaml) "failwith/ \"undefined\""
and (Haskell) "error/ \"undefined\""
and (Scala) "!sys.error(\"undefined\")"
subsubsection \<open>Evaluation and normalization by evaluation\<close>
method_setup eval = \<open>
let
fun eval_tac ctxt =
let val conv = Code_Runtime.dynamic_holds_conv
in
CONVERSION (Conv.params_conv ~1 (Conv.concl_conv ~1 o conv) ctxt) THEN'
resolve_tac ctxt [TrueI]
end
in
Scan.succeed (SIMPLE_METHOD' o eval_tac)
end
\<close> "solve goal by evaluation"
method_setup normalization = \<open>
Scan.succeed (fn ctxt =>
SIMPLE_METHOD'
(CHANGED_PROP o
(CONVERSION (Nbe.dynamic_conv ctxt)
THEN_ALL_NEW (TRY o resolve_tac ctxt [TrueI]))))
\<close> "solve goal by normalization"
subsection \<open>Counterexample Search Units\<close>
subsubsection \<open>Quickcheck\<close>
quickcheck_params [size = 5, iterations = 50]
subsubsection \<open>Nitpick setup\<close>
named_theorems nitpick_unfold "alternative definitions of constants as needed by Nitpick"
and nitpick_simp "equational specification of constants as needed by Nitpick"
and nitpick_psimp "partial equational specification of constants as needed by Nitpick"
and nitpick_choice_spec "choice specification of constants as needed by Nitpick"
declare if_bool_eq_conj [nitpick_unfold, no_atp]
and if_bool_eq_disj [no_atp]
subsection \<open>Preprocessing for the predicate compiler\<close>
named_theorems code_pred_def "alternative definitions of constants for the Predicate Compiler"
and code_pred_inline "inlining definitions for the Predicate Compiler"
and code_pred_simp "simplification rules for the optimisations in the Predicate Compiler"
subsection \<open>Legacy tactics and ML bindings\<close>
ML \<open>
(* combination of (spec RS spec RS ...(j times) ... spec RS mp) *)
local
fun wrong_prem (Const (\<^const_name>\<open>All\<close>, _) $ Abs (_, _, t)) = wrong_prem t
| wrong_prem (Bound _) = true
| wrong_prem _ = false;
val filter_right = filter (not o wrong_prem o HOLogic.dest_Trueprop o hd o Thm.prems_of);
fun smp i = funpow i (fn m => filter_right ([spec] RL m)) [mp];
in
fun smp_tac ctxt j = EVERY' [dresolve_tac ctxt (smp j), assume_tac ctxt];
end;
local
val nnf_ss =
simpset_of (put_simpset HOL_basic_ss \<^context> addsimps @{thms simp_thms nnf_simps});
in
fun nnf_conv ctxt = Simplifier.rewrite (put_simpset nnf_ss ctxt);
end
\<close>
hide_const (open) eq equal
end
diff --git a/src/HOL/HOLCF/Cfun.thy b/src/HOL/HOLCF/Cfun.thy
--- a/src/HOL/HOLCF/Cfun.thy
+++ b/src/HOL/HOLCF/Cfun.thy
@@ -1,523 +1,523 @@
(* Title: HOL/HOLCF/Cfun.thy
Author: Franz Regensburger
Author: Brian Huffman
*)
section \<open>The type of continuous functions\<close>
theory Cfun
imports Cpodef Fun_Cpo Product_Cpo
begin
default_sort cpo
subsection \<open>Definition of continuous function type\<close>
definition "cfun = {f::'a \<Rightarrow> 'b. cont f}"
cpodef ('a, 'b) cfun ("(_ \<rightarrow>/ _)" [1, 0] 0) = "cfun :: ('a \<Rightarrow> 'b) set"
by (auto simp: cfun_def intro: cont_const adm_cont)
type_notation (ASCII)
cfun (infixr "->" 0)
notation (ASCII)
Rep_cfun ("(_$/_)" [999,1000] 999)
notation
Rep_cfun ("(_\<cdot>/_)" [999,1000] 999)
subsection \<open>Syntax for continuous lambda abstraction\<close>
syntax "_cabs" :: "[logic, logic] \<Rightarrow> logic"
parse_translation \<open>
(* rewrite (_cabs x t) => (Abs_cfun (%x. t)) *)
[Syntax_Trans.mk_binder_tr (\<^syntax_const>\<open>_cabs\<close>, \<^const_syntax>\<open>Abs_cfun\<close>)]
\<close>
print_translation \<open>
[(\<^const_syntax>\<open>Abs_cfun\<close>, fn _ => fn [Abs abs] =>
let val (x, t) = Syntax_Trans.atomic_abs_tr' abs
in Syntax.const \<^syntax_const>\<open>_cabs\<close> $ x $ t end)]
\<close> \<comment> \<open>To avoid eta-contraction of body\<close>
text \<open>Syntax for nested abstractions\<close>
syntax (ASCII)
"_Lambda" :: "[cargs, logic] \<Rightarrow> logic" ("(3LAM _./ _)" [1000, 10] 10)
syntax
"_Lambda" :: "[cargs, logic] \<Rightarrow> logic" ("(3\<Lambda> _./ _)" [1000, 10] 10)
parse_ast_translation \<open>
(* rewrite (LAM x y z. t) => (_cabs x (_cabs y (_cabs z t))) *)
(* cf. Syntax.lambda_ast_tr from src/Pure/Syntax/syn_trans.ML *)
let
fun Lambda_ast_tr [pats, body] =
Ast.fold_ast_p \<^syntax_const>\<open>_cabs\<close>
(Ast.unfold_ast \<^syntax_const>\<open>_cargs\<close> (Ast.strip_positions pats), body)
| Lambda_ast_tr asts = raise Ast.AST ("Lambda_ast_tr", asts);
in [(\<^syntax_const>\<open>_Lambda\<close>, K Lambda_ast_tr)] end
\<close>
print_ast_translation \<open>
(* rewrite (_cabs x (_cabs y (_cabs z t))) => (LAM x y z. t) *)
(* cf. Syntax.abs_ast_tr' from src/Pure/Syntax/syn_trans.ML *)
let
fun cabs_ast_tr' asts =
(case Ast.unfold_ast_p \<^syntax_const>\<open>_cabs\<close>
(Ast.Appl (Ast.Constant \<^syntax_const>\<open>_cabs\<close> :: asts)) of
([], _) => raise Ast.AST ("cabs_ast_tr'", asts)
| (xs, body) => Ast.Appl
[Ast.Constant \<^syntax_const>\<open>_Lambda\<close>,
Ast.fold_ast \<^syntax_const>\<open>_cargs\<close> xs, body]);
in [(\<^syntax_const>\<open>_cabs\<close>, K cabs_ast_tr')] end
\<close>
text \<open>Dummy patterns for continuous abstraction\<close>
translations
"\<Lambda> _. t" \<rightharpoonup> "CONST Abs_cfun (\<lambda>_. t)"
subsection \<open>Continuous function space is pointed\<close>
lemma bottom_cfun: "\<bottom> \<in> cfun"
by (simp add: cfun_def inst_fun_pcpo)
instance cfun :: (cpo, discrete_cpo) discrete_cpo
by intro_classes (simp add: below_cfun_def Rep_cfun_inject)
instance cfun :: (cpo, pcpo) pcpo
by (rule typedef_pcpo [OF type_definition_cfun below_cfun_def bottom_cfun])
lemmas Rep_cfun_strict =
typedef_Rep_strict [OF type_definition_cfun below_cfun_def bottom_cfun]
lemmas Abs_cfun_strict =
typedef_Abs_strict [OF type_definition_cfun below_cfun_def bottom_cfun]
text \<open>function application is strict in its first argument\<close>
lemma Rep_cfun_strict1 [simp]: "\<bottom>\<cdot>x = \<bottom>"
by (simp add: Rep_cfun_strict)
lemma LAM_strict [simp]: "(\<Lambda> x. \<bottom>) = \<bottom>"
by (simp add: inst_fun_pcpo [symmetric] Abs_cfun_strict)
text \<open>for compatibility with old HOLCF-Version\<close>
lemma inst_cfun_pcpo: "\<bottom> = (\<Lambda> x. \<bottom>)"
by simp
subsection \<open>Basic properties of continuous functions\<close>
text \<open>Beta-equality for continuous functions\<close>
lemma Abs_cfun_inverse2: "cont f \<Longrightarrow> Rep_cfun (Abs_cfun f) = f"
by (simp add: Abs_cfun_inverse cfun_def)
lemma beta_cfun: "cont f \<Longrightarrow> (\<Lambda> x. f x)\<cdot>u = f u"
by (simp add: Abs_cfun_inverse2)
subsubsection \<open>Beta-reduction simproc\<close>
text \<open>
Given the term \<^term>\<open>(\<Lambda> x. f x)\<cdot>y\<close>, the procedure tries to
construct the theorem \<^term>\<open>(\<Lambda> x. f x)\<cdot>y \<equiv> f y\<close>. If this
theorem cannot be completely solved by the cont2cont rules, then
the procedure returns the ordinary conditional \<open>beta_cfun\<close>
rule.
The simproc does not solve any more goals that would be solved by
using \<open>beta_cfun\<close> as a simp rule. The advantage of the
simproc is that it can avoid deeply-nested calls to the simplifier
that would otherwise be caused by large continuity side conditions.
Update: The simproc now uses rule \<open>Abs_cfun_inverse2\<close> instead
of \<open>beta_cfun\<close>, to avoid problems with eta-contraction.
\<close>
simproc_setup beta_cfun_proc ("Rep_cfun (Abs_cfun f)") = \<open>
- fn phi => fn ctxt => fn ct =>
+ K (fn ctxt => fn ct =>
let
val f = #2 (Thm.dest_comb (#2 (Thm.dest_comb ct)));
val [T, U] = Thm.dest_ctyp (Thm.ctyp_of_cterm f);
val tr = Thm.instantiate' [SOME T, SOME U] [SOME f] (mk_meta_eq @{thm Abs_cfun_inverse2});
val rules = Named_Theorems.get ctxt \<^named_theorems>\<open>cont2cont\<close>;
val tac = SOLVED' (REPEAT_ALL_NEW (match_tac ctxt (rev rules)));
- in SOME (perhaps (SINGLE (tac 1)) tr) end
+ in SOME (perhaps (SINGLE (tac 1)) tr) end)
\<close>
text \<open>Eta-equality for continuous functions\<close>
lemma eta_cfun: "(\<Lambda> x. f\<cdot>x) = f"
by (rule Rep_cfun_inverse)
text \<open>Extensionality for continuous functions\<close>
lemma cfun_eq_iff: "f = g \<longleftrightarrow> (\<forall>x. f\<cdot>x = g\<cdot>x)"
by (simp add: Rep_cfun_inject [symmetric] fun_eq_iff)
lemma cfun_eqI: "(\<And>x. f\<cdot>x = g\<cdot>x) \<Longrightarrow> f = g"
by (simp add: cfun_eq_iff)
text \<open>Extensionality wrt. ordering for continuous functions\<close>
lemma cfun_below_iff: "f \<sqsubseteq> g \<longleftrightarrow> (\<forall>x. f\<cdot>x \<sqsubseteq> g\<cdot>x)"
by (simp add: below_cfun_def fun_below_iff)
lemma cfun_belowI: "(\<And>x. f\<cdot>x \<sqsubseteq> g\<cdot>x) \<Longrightarrow> f \<sqsubseteq> g"
by (simp add: cfun_below_iff)
text \<open>Congruence for continuous function application\<close>
lemma cfun_cong: "f = g \<Longrightarrow> x = y \<Longrightarrow> f\<cdot>x = g\<cdot>y"
by simp
lemma cfun_fun_cong: "f = g \<Longrightarrow> f\<cdot>x = g\<cdot>x"
by simp
lemma cfun_arg_cong: "x = y \<Longrightarrow> f\<cdot>x = f\<cdot>y"
by simp
subsection \<open>Continuity of application\<close>
lemma cont_Rep_cfun1: "cont (\<lambda>f. f\<cdot>x)"
by (rule cont_Rep_cfun [OF cont_id, THEN cont2cont_fun])
lemma cont_Rep_cfun2: "cont (\<lambda>x. f\<cdot>x)"
using Rep_cfun [where x = f] by (simp add: cfun_def)
lemmas monofun_Rep_cfun = cont_Rep_cfun [THEN cont2mono]
lemmas monofun_Rep_cfun1 = cont_Rep_cfun1 [THEN cont2mono]
lemmas monofun_Rep_cfun2 = cont_Rep_cfun2 [THEN cont2mono]
text \<open>contlub, cont properties of \<^term>\<open>Rep_cfun\<close> in each argument\<close>
lemma contlub_cfun_arg: "chain Y \<Longrightarrow> f\<cdot>(\<Squnion>i. Y i) = (\<Squnion>i. f\<cdot>(Y i))"
by (rule cont_Rep_cfun2 [THEN cont2contlubE])
lemma contlub_cfun_fun: "chain F \<Longrightarrow> (\<Squnion>i. F i)\<cdot>x = (\<Squnion>i. F i\<cdot>x)"
by (rule cont_Rep_cfun1 [THEN cont2contlubE])
text \<open>monotonicity of application\<close>
lemma monofun_cfun_fun: "f \<sqsubseteq> g \<Longrightarrow> f\<cdot>x \<sqsubseteq> g\<cdot>x"
by (simp add: cfun_below_iff)
lemma monofun_cfun_arg: "x \<sqsubseteq> y \<Longrightarrow> f\<cdot>x \<sqsubseteq> f\<cdot>y"
by (rule monofun_Rep_cfun2 [THEN monofunE])
lemma monofun_cfun: "f \<sqsubseteq> g \<Longrightarrow> x \<sqsubseteq> y \<Longrightarrow> f\<cdot>x \<sqsubseteq> g\<cdot>y"
by (rule below_trans [OF monofun_cfun_fun monofun_cfun_arg])
text \<open>ch2ch - rules for the type \<^typ>\<open>'a \<rightarrow> 'b\<close>\<close>
lemma chain_monofun: "chain Y \<Longrightarrow> chain (\<lambda>i. f\<cdot>(Y i))"
by (erule monofun_Rep_cfun2 [THEN ch2ch_monofun])
lemma ch2ch_Rep_cfunR: "chain Y \<Longrightarrow> chain (\<lambda>i. f\<cdot>(Y i))"
by (rule monofun_Rep_cfun2 [THEN ch2ch_monofun])
lemma ch2ch_Rep_cfunL: "chain F \<Longrightarrow> chain (\<lambda>i. (F i)\<cdot>x)"
by (rule monofun_Rep_cfun1 [THEN ch2ch_monofun])
lemma ch2ch_Rep_cfun [simp]: "chain F \<Longrightarrow> chain Y \<Longrightarrow> chain (\<lambda>i. (F i)\<cdot>(Y i))"
by (simp add: chain_def monofun_cfun)
lemma ch2ch_LAM [simp]:
"(\<And>x. chain (\<lambda>i. S i x)) \<Longrightarrow> (\<And>i. cont (\<lambda>x. S i x)) \<Longrightarrow> chain (\<lambda>i. \<Lambda> x. S i x)"
by (simp add: chain_def cfun_below_iff)
text \<open>contlub, cont properties of \<^term>\<open>Rep_cfun\<close> in both arguments\<close>
lemma lub_APP: "chain F \<Longrightarrow> chain Y \<Longrightarrow> (\<Squnion>i. F i\<cdot>(Y i)) = (\<Squnion>i. F i)\<cdot>(\<Squnion>i. Y i)"
by (simp add: contlub_cfun_fun contlub_cfun_arg diag_lub)
lemma lub_LAM:
assumes "\<And>x. chain (\<lambda>i. F i x)"
and "\<And>i. cont (\<lambda>x. F i x)"
shows "(\<Squnion>i. \<Lambda> x. F i x) = (\<Lambda> x. \<Squnion>i. F i x)"
using assms by (simp add: lub_cfun lub_fun ch2ch_lambda)
lemmas lub_distribs = lub_APP lub_LAM
text \<open>strictness\<close>
lemma strictI: "f\<cdot>x = \<bottom> \<Longrightarrow> f\<cdot>\<bottom> = \<bottom>"
apply (rule bottomI)
apply (erule subst)
apply (rule minimal [THEN monofun_cfun_arg])
done
text \<open>type \<^typ>\<open>'a \<rightarrow> 'b\<close> is chain complete\<close>
lemma lub_cfun: "chain F \<Longrightarrow> (\<Squnion>i. F i) = (\<Lambda> x. \<Squnion>i. F i\<cdot>x)"
by (simp add: lub_cfun lub_fun ch2ch_lambda)
subsection \<open>Continuity simplification procedure\<close>
text \<open>cont2cont lemma for \<^term>\<open>Rep_cfun\<close>\<close>
lemma cont2cont_APP [simp, cont2cont]:
assumes f: "cont (\<lambda>x. f x)"
assumes t: "cont (\<lambda>x. t x)"
shows "cont (\<lambda>x. (f x)\<cdot>(t x))"
proof -
from cont_Rep_cfun1 f have "cont (\<lambda>x. (f x)\<cdot>y)" for y
by (rule cont_compose)
with t cont_Rep_cfun2 show "cont (\<lambda>x. (f x)\<cdot>(t x))"
by (rule cont_apply)
qed
text \<open>
Two specific lemmas for the combination of LCF and HOL terms.
These lemmas are needed in theories that use types like \<^typ>\<open>'a \<rightarrow> 'b \<Rightarrow> 'c\<close>.
\<close>
lemma cont_APP_app [simp]: "cont f \<Longrightarrow> cont g \<Longrightarrow> cont (\<lambda>x. ((f x)\<cdot>(g x)) s)"
by (rule cont2cont_APP [THEN cont2cont_fun])
lemma cont_APP_app_app [simp]: "cont f \<Longrightarrow> cont g \<Longrightarrow> cont (\<lambda>x. ((f x)\<cdot>(g x)) s t)"
by (rule cont_APP_app [THEN cont2cont_fun])
text \<open>cont2mono Lemma for \<^term>\<open>\<lambda>x. LAM y. c1(x)(y)\<close>\<close>
lemma cont2mono_LAM:
"\<lbrakk>\<And>x. cont (\<lambda>y. f x y); \<And>y. monofun (\<lambda>x. f x y)\<rbrakk>
\<Longrightarrow> monofun (\<lambda>x. \<Lambda> y. f x y)"
by (simp add: monofun_def cfun_below_iff)
text \<open>cont2cont Lemma for \<^term>\<open>\<lambda>x. LAM y. f x y\<close>\<close>
text \<open>
Not suitable as a cont2cont rule, because on nested lambdas
it causes exponential blow-up in the number of subgoals.
\<close>
lemma cont2cont_LAM:
assumes f1: "\<And>x. cont (\<lambda>y. f x y)"
assumes f2: "\<And>y. cont (\<lambda>x. f x y)"
shows "cont (\<lambda>x. \<Lambda> y. f x y)"
proof (rule cont_Abs_cfun)
from f1 show "f x \<in> cfun" for x
by (simp add: cfun_def)
from f2 show "cont f"
by (rule cont2cont_lambda)
qed
text \<open>
This version does work as a cont2cont rule, since it
has only a single subgoal.
\<close>
lemma cont2cont_LAM' [simp, cont2cont]:
fixes f :: "'a::cpo \<Rightarrow> 'b::cpo \<Rightarrow> 'c::cpo"
assumes f: "cont (\<lambda>p. f (fst p) (snd p))"
shows "cont (\<lambda>x. \<Lambda> y. f x y)"
using assms by (simp add: cont2cont_LAM prod_cont_iff)
lemma cont2cont_LAM_discrete [simp, cont2cont]:
"(\<And>y::'a::discrete_cpo. cont (\<lambda>x. f x y)) \<Longrightarrow> cont (\<lambda>x. \<Lambda> y. f x y)"
by (simp add: cont2cont_LAM)
subsection \<open>Miscellaneous\<close>
text \<open>Monotonicity of \<^term>\<open>Abs_cfun\<close>\<close>
lemma monofun_LAM: "cont f \<Longrightarrow> cont g \<Longrightarrow> (\<And>x. f x \<sqsubseteq> g x) \<Longrightarrow> (\<Lambda> x. f x) \<sqsubseteq> (\<Lambda> x. g x)"
by (simp add: cfun_below_iff)
text \<open>some lemmata for functions with flat/chfin domain/range types\<close>
lemma chfin_Rep_cfunR: "chain Y \<Longrightarrow> \<forall>s. \<exists>n. (LUB i. Y i)\<cdot>s = Y n\<cdot>s"
for Y :: "nat \<Rightarrow> 'a::cpo \<rightarrow> 'b::chfin"
apply (rule allI)
apply (subst contlub_cfun_fun)
apply assumption
apply (fast intro!: lub_eqI chfin lub_finch2 chfin2finch ch2ch_Rep_cfunL)
done
lemma adm_chfindom: "adm (\<lambda>(u::'a::cpo \<rightarrow> 'b::chfin). P(u\<cdot>s))"
by (rule adm_subst, simp, rule adm_chfin)
subsection \<open>Continuous injection-retraction pairs\<close>
text \<open>Continuous retractions are strict.\<close>
lemma retraction_strict: "\<forall>x. f\<cdot>(g\<cdot>x) = x \<Longrightarrow> f\<cdot>\<bottom> = \<bottom>"
apply (rule bottomI)
apply (drule_tac x="\<bottom>" in spec)
apply (erule subst)
apply (rule monofun_cfun_arg)
apply (rule minimal)
done
lemma injection_eq: "\<forall>x. f\<cdot>(g\<cdot>x) = x \<Longrightarrow> (g\<cdot>x = g\<cdot>y) = (x = y)"
apply (rule iffI)
apply (drule_tac f=f in cfun_arg_cong)
apply simp
apply simp
done
lemma injection_below: "\<forall>x. f\<cdot>(g\<cdot>x) = x \<Longrightarrow> (g\<cdot>x \<sqsubseteq> g\<cdot>y) = (x \<sqsubseteq> y)"
apply (rule iffI)
apply (drule_tac f=f in monofun_cfun_arg)
apply simp
apply (erule monofun_cfun_arg)
done
lemma injection_defined_rev: "\<forall>x. f\<cdot>(g\<cdot>x) = x \<Longrightarrow> g\<cdot>z = \<bottom> \<Longrightarrow> z = \<bottom>"
apply (drule_tac f=f in cfun_arg_cong)
apply (simp add: retraction_strict)
done
lemma injection_defined: "\<forall>x. f\<cdot>(g\<cdot>x) = x \<Longrightarrow> z \<noteq> \<bottom> \<Longrightarrow> g\<cdot>z \<noteq> \<bottom>"
by (erule contrapos_nn, rule injection_defined_rev)
text \<open>a result about functions with flat codomain\<close>
lemma flat_eqI: "x \<sqsubseteq> y \<Longrightarrow> x \<noteq> \<bottom> \<Longrightarrow> x = y"
for x y :: "'a::flat"
by (drule ax_flat) simp
lemma flat_codom: "f\<cdot>x = c \<Longrightarrow> f\<cdot>\<bottom> = \<bottom> \<or> (\<forall>z. f\<cdot>z = c)"
for c :: "'b::flat"
apply (cases "f\<cdot>x = \<bottom>")
apply (rule disjI1)
apply (rule bottomI)
apply (erule_tac t="\<bottom>" in subst)
apply (rule minimal [THEN monofun_cfun_arg])
apply clarify
apply (rule_tac a = "f\<cdot>\<bottom>" in refl [THEN box_equals])
apply (erule minimal [THEN monofun_cfun_arg, THEN flat_eqI])
apply (erule minimal [THEN monofun_cfun_arg, THEN flat_eqI])
done
subsection \<open>Identity and composition\<close>
definition ID :: "'a \<rightarrow> 'a"
where "ID = (\<Lambda> x. x)"
definition cfcomp :: "('b \<rightarrow> 'c) \<rightarrow> ('a \<rightarrow> 'b) \<rightarrow> 'a \<rightarrow> 'c"
where oo_def: "cfcomp = (\<Lambda> f g x. f\<cdot>(g\<cdot>x))"
abbreviation cfcomp_syn :: "['b \<rightarrow> 'c, 'a \<rightarrow> 'b] \<Rightarrow> 'a \<rightarrow> 'c" (infixr "oo" 100)
where "f oo g == cfcomp\<cdot>f\<cdot>g"
lemma ID1 [simp]: "ID\<cdot>x = x"
by (simp add: ID_def)
lemma cfcomp1: "(f oo g) = (\<Lambda> x. f\<cdot>(g\<cdot>x))"
by (simp add: oo_def)
lemma cfcomp2 [simp]: "(f oo g)\<cdot>x = f\<cdot>(g\<cdot>x)"
by (simp add: cfcomp1)
lemma cfcomp_LAM: "cont g \<Longrightarrow> f oo (\<Lambda> x. g x) = (\<Lambda> x. f\<cdot>(g x))"
by (simp add: cfcomp1)
lemma cfcomp_strict [simp]: "\<bottom> oo f = \<bottom>"
by (simp add: cfun_eq_iff)
text \<open>
Show that interpretation of (pcpo, \<open>_\<rightarrow>_\<close>) is a category.
\<^item> The class of objects is interpretation of syntactical class pcpo.
\<^item> The class of arrows between objects \<^typ>\<open>'a\<close> and \<^typ>\<open>'b\<close> is interpret. of \<^typ>\<open>'a \<rightarrow> 'b\<close>.
\<^item> The identity arrow is interpretation of \<^term>\<open>ID\<close>.
\<^item> The composition of f and g is interpretation of \<open>oo\<close>.
\<close>
lemma ID2 [simp]: "f oo ID = f"
by (rule cfun_eqI, simp)
lemma ID3 [simp]: "ID oo f = f"
by (rule cfun_eqI) simp
lemma assoc_oo: "f oo (g oo h) = (f oo g) oo h"
by (rule cfun_eqI) simp
subsection \<open>Strictified functions\<close>
default_sort pcpo
definition seq :: "'a \<rightarrow> 'b \<rightarrow> 'b"
where "seq = (\<Lambda> x. if x = \<bottom> then \<bottom> else ID)"
lemma cont2cont_if_bottom [cont2cont, simp]:
assumes f: "cont (\<lambda>x. f x)"
and g: "cont (\<lambda>x. g x)"
shows "cont (\<lambda>x. if f x = \<bottom> then \<bottom> else g x)"
proof (rule cont_apply [OF f])
show "cont (\<lambda>y. if y = \<bottom> then \<bottom> else g x)" for x
unfolding cont_def is_lub_def is_ub_def ball_simps
by (simp add: lub_eq_bottom_iff)
show "cont (\<lambda>x. if y = \<bottom> then \<bottom> else g x)" for y
by (simp add: g)
qed
lemma seq_conv_if: "seq\<cdot>x = (if x = \<bottom> then \<bottom> else ID)"
by (simp add: seq_def)
lemma seq_simps [simp]:
"seq\<cdot>\<bottom> = \<bottom>"
"seq\<cdot>x\<cdot>\<bottom> = \<bottom>"
"x \<noteq> \<bottom> \<Longrightarrow> seq\<cdot>x = ID"
by (simp_all add: seq_conv_if)
definition strictify :: "('a \<rightarrow> 'b) \<rightarrow> 'a \<rightarrow> 'b"
where "strictify = (\<Lambda> f x. seq\<cdot>x\<cdot>(f\<cdot>x))"
lemma strictify_conv_if: "strictify\<cdot>f\<cdot>x = (if x = \<bottom> then \<bottom> else f\<cdot>x)"
by (simp add: strictify_def)
lemma strictify1 [simp]: "strictify\<cdot>f\<cdot>\<bottom> = \<bottom>"
by (simp add: strictify_conv_if)
lemma strictify2 [simp]: "x \<noteq> \<bottom> \<Longrightarrow> strictify\<cdot>f\<cdot>x = f\<cdot>x"
by (simp add: strictify_conv_if)
subsection \<open>Continuity of let-bindings\<close>
lemma cont2cont_Let:
assumes f: "cont (\<lambda>x. f x)"
assumes g1: "\<And>y. cont (\<lambda>x. g x y)"
assumes g2: "\<And>x. cont (\<lambda>y. g x y)"
shows "cont (\<lambda>x. let y = f x in g x y)"
unfolding Let_def using f g2 g1 by (rule cont_apply)
lemma cont2cont_Let' [simp, cont2cont]:
assumes f: "cont (\<lambda>x. f x)"
assumes g: "cont (\<lambda>p. g (fst p) (snd p))"
shows "cont (\<lambda>x. let y = f x in g x y)"
using f
proof (rule cont2cont_Let)
from g show "cont (\<lambda>y. g x y)" for x
by (simp add: prod_cont_iff)
from g show "cont (\<lambda>x. g x y)" for y
by (simp add: prod_cont_iff)
qed
text \<open>The simple version (suggested by Joachim Breitner) is needed if
the type of the defined term is not a cpo.\<close>
lemma cont2cont_Let_simple [simp, cont2cont]:
assumes "\<And>y. cont (\<lambda>x. g x y)"
shows "cont (\<lambda>x. let y = t in g x y)"
unfolding Let_def using assms .
end
diff --git a/src/HOL/HOLCF/Pcpo.thy b/src/HOL/HOLCF/Pcpo.thy
--- a/src/HOL/HOLCF/Pcpo.thy
+++ b/src/HOL/HOLCF/Pcpo.thy
@@ -1,258 +1,258 @@
(* Title: HOL/HOLCF/Pcpo.thy
Author: Franz Regensburger
*)
section \<open>Classes cpo and pcpo\<close>
theory Pcpo
imports Porder
begin
subsection \<open>Complete partial orders\<close>
text \<open>The class cpo of chain complete partial orders\<close>
class cpo = po +
assumes cpo: "chain S \<Longrightarrow> \<exists>x. range S <<| x"
begin
text \<open>in cpo's everthing equal to THE lub has lub properties for every chain\<close>
lemma cpo_lubI: "chain S \<Longrightarrow> range S <<| (\<Squnion>i. S i)"
by (fast dest: cpo elim: is_lub_lub)
lemma thelubE: "\<lbrakk>chain S; (\<Squnion>i. S i) = l\<rbrakk> \<Longrightarrow> range S <<| l"
by (blast dest: cpo intro: is_lub_lub)
text \<open>Properties of the lub\<close>
lemma is_ub_thelub: "chain S \<Longrightarrow> S x \<sqsubseteq> (\<Squnion>i. S i)"
by (blast dest: cpo intro: is_lub_lub [THEN is_lub_rangeD1])
lemma is_lub_thelub: "\<lbrakk>chain S; range S <| x\<rbrakk> \<Longrightarrow> (\<Squnion>i. S i) \<sqsubseteq> x"
by (blast dest: cpo intro: is_lub_lub [THEN is_lubD2])
lemma lub_below_iff: "chain S \<Longrightarrow> (\<Squnion>i. S i) \<sqsubseteq> x \<longleftrightarrow> (\<forall>i. S i \<sqsubseteq> x)"
by (simp add: is_lub_below_iff [OF cpo_lubI] is_ub_def)
lemma lub_below: "\<lbrakk>chain S; \<And>i. S i \<sqsubseteq> x\<rbrakk> \<Longrightarrow> (\<Squnion>i. S i) \<sqsubseteq> x"
by (simp add: lub_below_iff)
lemma below_lub: "\<lbrakk>chain S; x \<sqsubseteq> S i\<rbrakk> \<Longrightarrow> x \<sqsubseteq> (\<Squnion>i. S i)"
by (erule below_trans, erule is_ub_thelub)
lemma lub_range_mono: "\<lbrakk>range X \<subseteq> range Y; chain Y; chain X\<rbrakk> \<Longrightarrow> (\<Squnion>i. X i) \<sqsubseteq> (\<Squnion>i. Y i)"
apply (erule lub_below)
apply (subgoal_tac "\<exists>j. X i = Y j")
apply clarsimp
apply (erule is_ub_thelub)
apply auto
done
lemma lub_range_shift: "chain Y \<Longrightarrow> (\<Squnion>i. Y (i + j)) = (\<Squnion>i. Y i)"
apply (rule below_antisym)
apply (rule lub_range_mono)
apply fast
apply assumption
apply (erule chain_shift)
apply (rule lub_below)
apply assumption
apply (rule_tac i="i" in below_lub)
apply (erule chain_shift)
apply (erule chain_mono)
apply (rule le_add1)
done
lemma maxinch_is_thelub: "chain Y \<Longrightarrow> max_in_chain i Y = ((\<Squnion>i. Y i) = Y i)"
apply (rule iffI)
apply (fast intro!: lub_eqI lub_finch1)
apply (unfold max_in_chain_def)
apply (safe intro!: below_antisym)
apply (fast elim!: chain_mono)
apply (drule sym)
apply (force elim!: is_ub_thelub)
done
text \<open>the \<open>\<sqsubseteq>\<close> relation between two chains is preserved by their lubs\<close>
lemma lub_mono: "\<lbrakk>chain X; chain Y; \<And>i. X i \<sqsubseteq> Y i\<rbrakk> \<Longrightarrow> (\<Squnion>i. X i) \<sqsubseteq> (\<Squnion>i. Y i)"
by (fast elim: lub_below below_lub)
text \<open>the = relation between two chains is preserved by their lubs\<close>
lemma lub_eq: "(\<And>i. X i = Y i) \<Longrightarrow> (\<Squnion>i. X i) = (\<Squnion>i. Y i)"
by simp
lemma ch2ch_lub:
assumes 1: "\<And>j. chain (\<lambda>i. Y i j)"
assumes 2: "\<And>i. chain (\<lambda>j. Y i j)"
shows "chain (\<lambda>i. \<Squnion>j. Y i j)"
apply (rule chainI)
apply (rule lub_mono [OF 2 2])
apply (rule chainE [OF 1])
done
lemma diag_lub:
assumes 1: "\<And>j. chain (\<lambda>i. Y i j)"
assumes 2: "\<And>i. chain (\<lambda>j. Y i j)"
shows "(\<Squnion>i. \<Squnion>j. Y i j) = (\<Squnion>i. Y i i)"
proof (rule below_antisym)
have 3: "chain (\<lambda>i. Y i i)"
apply (rule chainI)
apply (rule below_trans)
apply (rule chainE [OF 1])
apply (rule chainE [OF 2])
done
have 4: "chain (\<lambda>i. \<Squnion>j. Y i j)"
by (rule ch2ch_lub [OF 1 2])
show "(\<Squnion>i. \<Squnion>j. Y i j) \<sqsubseteq> (\<Squnion>i. Y i i)"
apply (rule lub_below [OF 4])
apply (rule lub_below [OF 2])
apply (rule below_lub [OF 3])
apply (rule below_trans)
apply (rule chain_mono [OF 1 max.cobounded1])
apply (rule chain_mono [OF 2 max.cobounded2])
done
show "(\<Squnion>i. Y i i) \<sqsubseteq> (\<Squnion>i. \<Squnion>j. Y i j)"
apply (rule lub_mono [OF 3 4])
apply (rule is_ub_thelub [OF 2])
done
qed
lemma ex_lub:
assumes 1: "\<And>j. chain (\<lambda>i. Y i j)"
assumes 2: "\<And>i. chain (\<lambda>j. Y i j)"
shows "(\<Squnion>i. \<Squnion>j. Y i j) = (\<Squnion>j. \<Squnion>i. Y i j)"
by (simp add: diag_lub 1 2)
end
subsection \<open>Pointed cpos\<close>
text \<open>The class pcpo of pointed cpos\<close>
class pcpo = cpo +
assumes least: "\<exists>x. \<forall>y. x \<sqsubseteq> y"
begin
definition bottom :: "'a" ("\<bottom>")
where "bottom = (THE x. \<forall>y. x \<sqsubseteq> y)"
lemma minimal [iff]: "\<bottom> \<sqsubseteq> x"
unfolding bottom_def
apply (rule the1I2)
apply (rule ex_ex1I)
apply (rule least)
apply (blast intro: below_antisym)
apply simp
done
end
text \<open>Old "UU" syntax:\<close>
syntax UU :: logic
translations "UU" \<rightharpoonup> "CONST bottom"
text \<open>Simproc to rewrite \<^term>\<open>\<bottom> = x\<close> to \<^term>\<open>x = \<bottom>\<close>.\<close>
setup \<open>Reorient_Proc.add (fn \<^Const_>\<open>bottom _\<close> => true | _ => false)\<close>
-simproc_setup reorient_bottom ("\<bottom> = x") = Reorient_Proc.proc
+simproc_setup reorient_bottom ("\<bottom> = x") = \<open>K Reorient_Proc.proc\<close>
text \<open>useful lemmas about \<^term>\<open>\<bottom>\<close>\<close>
lemma below_bottom_iff [simp]: "x \<sqsubseteq> \<bottom> \<longleftrightarrow> x = \<bottom>"
by (simp add: po_eq_conv)
lemma eq_bottom_iff: "x = \<bottom> \<longleftrightarrow> x \<sqsubseteq> \<bottom>"
by simp
lemma bottomI: "x \<sqsubseteq> \<bottom> \<Longrightarrow> x = \<bottom>"
by (subst eq_bottom_iff)
lemma lub_eq_bottom_iff: "chain Y \<Longrightarrow> (\<Squnion>i. Y i) = \<bottom> \<longleftrightarrow> (\<forall>i. Y i = \<bottom>)"
by (simp only: eq_bottom_iff lub_below_iff)
subsection \<open>Chain-finite and flat cpos\<close>
text \<open>further useful classes for HOLCF domains\<close>
class chfin = po +
assumes chfin: "chain Y \<Longrightarrow> \<exists>n. max_in_chain n Y"
begin
subclass cpo
apply standard
apply (frule chfin)
apply (blast intro: lub_finch1)
done
lemma chfin2finch: "chain Y \<Longrightarrow> finite_chain Y"
by (simp add: chfin finite_chain_def)
end
class flat = pcpo +
assumes ax_flat: "x \<sqsubseteq> y \<Longrightarrow> x = \<bottom> \<or> x = y"
begin
subclass chfin
proof
fix Y
assume *: "chain Y"
show "\<exists>n. max_in_chain n Y"
apply (unfold max_in_chain_def)
apply (cases "\<forall>i. Y i = \<bottom>")
apply simp
apply simp
apply (erule exE)
apply (rule_tac x="i" in exI)
apply clarify
using * apply (blast dest: chain_mono ax_flat)
done
qed
lemma flat_below_iff: "x \<sqsubseteq> y \<longleftrightarrow> x = \<bottom> \<or> x = y"
by (safe dest!: ax_flat)
lemma flat_eq: "a \<noteq> \<bottom> \<Longrightarrow> a \<sqsubseteq> b = (a = b)"
by (safe dest!: ax_flat)
end
subsection \<open>Discrete cpos\<close>
class discrete_cpo = below +
assumes discrete_cpo [simp]: "x \<sqsubseteq> y \<longleftrightarrow> x = y"
begin
subclass po
by standard simp_all
text \<open>In a discrete cpo, every chain is constant\<close>
lemma discrete_chain_const:
assumes S: "chain S"
shows "\<exists>x. S = (\<lambda>i. x)"
proof (intro exI ext)
fix i :: nat
from S le0 have "S 0 \<sqsubseteq> S i" by (rule chain_mono)
then have "S 0 = S i" by simp
then show "S i = S 0" by (rule sym)
qed
subclass chfin
proof
fix S :: "nat \<Rightarrow> 'a"
assume S: "chain S"
then have "\<exists>x. S = (\<lambda>i. x)"
by (rule discrete_chain_const)
then have "max_in_chain 0 S"
by (auto simp: max_in_chain_def)
then show "\<exists>i. max_in_chain i S" ..
qed
end
end
diff --git a/src/HOL/HOLCF/Tools/fixrec.ML b/src/HOL/HOLCF/Tools/fixrec.ML
--- a/src/HOL/HOLCF/Tools/fixrec.ML
+++ b/src/HOL/HOLCF/Tools/fixrec.ML
@@ -1,403 +1,403 @@
(* Title: HOL/HOLCF/Tools/fixrec.ML
Author: Amber Telfer and Brian Huffman
Recursive function definition package for HOLCF.
*)
signature FIXREC =
sig
val add_fixrec: (binding * typ option * mixfix) list
-> (bool * (Attrib.binding * term)) list -> local_theory -> local_theory
val add_fixrec_cmd: (binding * string option * mixfix) list
-> (bool * (Attrib.binding * string)) list -> local_theory -> local_theory
val add_matchers: (string * string) list -> theory -> theory
val fixrec_simp_tac: Proof.context -> int -> tactic
end
structure Fixrec : FIXREC =
struct
open HOLCF_Library
infixr 6 ->>
infix -->>
infix 9 `
val def_cont_fix_eq = @{thm def_cont_fix_eq}
val def_cont_fix_ind = @{thm def_cont_fix_ind}
fun fixrec_err s = error ("fixrec definition error:\n" ^ s)
(*************************************************************************)
(***************************** building types ****************************)
(*************************************************************************)
local
fun binder_cfun \<^Type>\<open>cfun T U\<close> = T :: binder_cfun U
| binder_cfun \<^Type>\<open>fun T U\<close> = T :: binder_cfun U
| binder_cfun _ = []
fun body_cfun \<^Type>\<open>cfun _ U\<close> = body_cfun U
| body_cfun \<^Type>\<open>fun _ U\<close> = body_cfun U
| body_cfun T = T
in
fun matcherT (T, U) =
body_cfun T ->> (binder_cfun T -->> U) ->> U
end
(*************************************************************************)
(***************************** building terms ****************************)
(*************************************************************************)
val mk_trp = HOLogic.mk_Trueprop
(* splits a cterm into the right and lefthand sides of equality *)
fun dest_eqs t = HOLogic.dest_eq (HOLogic.dest_Trueprop t)
(* similar to Thm.head_of, but for continuous application *)
fun chead_of \<^Const_>\<open>Rep_cfun _ _ for f _\<close> = chead_of f
| chead_of u = u
infix 1 === val (op ===) = HOLogic.mk_eq
fun mk_mplus (t, u) =
let val T = dest_matchT (Term.fastype_of t)
in \<^Const>\<open>Fixrec.mplus T\<close> ` t ` u end
fun mk_run t =
let
val mT = Term.fastype_of t
val T = dest_matchT mT
val run = \<^Const>\<open>Fixrec.run T\<close>
in
case t of
\<^Const_>\<open>Rep_cfun _ _\<close> $ \<^Const_>\<open>Fixrec.succeed _\<close> $ u => u
| _ => run ` t
end
(*************************************************************************)
(************* fixed-point definitions and unfolding theorems ************)
(*************************************************************************)
structure FixrecUnfoldData = Generic_Data
(
type T = thm Symtab.table
val empty = Symtab.empty
fun merge data : T = Symtab.merge (K true) data
)
local
fun name_of (Const (n, _)) = n
| name_of (Free (n, _)) = n
| name_of t = raise TERM ("Fixrec.add_unfold: lhs not a constant", [t])
val lhs_name =
name_of o head_of o fst o HOLogic.dest_eq o HOLogic.dest_Trueprop o Thm.prop_of
in
val add_unfold : attribute =
Thm.declaration_attribute
(fn th => FixrecUnfoldData.map (Symtab.insert (K true) (lhs_name th, th)))
end
fun add_fixdefs
(fixes : ((binding * typ) * mixfix) list)
(spec : (Attrib.binding * term) list)
(lthy : local_theory) =
let
val thy = Proof_Context.theory_of lthy
val names = map (Binding.name_of o fst o fst) fixes
val all_names = space_implode "_" names
val (lhss, rhss) = ListPair.unzip (map (dest_eqs o snd) spec)
val functional = lambda_tuple lhss (mk_tuple rhss)
val fixpoint = mk_fix (mk_cabs functional)
val cont_thm =
let
val prop = mk_trp (mk_cont functional)
fun err _ = error (
"Continuity proof failed please check that cont2cont rules\n" ^
"or simp rules are configured for all non-HOLCF constants.\n" ^
"The error occurred for the goal statement:\n" ^
Syntax.string_of_term lthy prop)
val rules = Named_Theorems.get lthy \<^named_theorems>\<open>cont2cont\<close>
val fast_tac = SOLVED' (REPEAT_ALL_NEW (match_tac lthy (rev rules)))
val slow_tac = SOLVED' (simp_tac lthy)
val tac = fast_tac 1 ORELSE slow_tac 1 ORELSE err
in
Goal.prove lthy [] [] prop (K tac)
end
fun one_def (Free(n,_)) r =
let val b = Long_Name.base_name n
in ((Binding.name (Thm.def_name b), []), r) end
| one_def _ _ = fixrec_err "fixdefs: lhs not of correct form"
fun defs [] _ = []
| defs (l::[]) r = [one_def l r]
| defs (l::ls) r = one_def l (mk_fst r) :: defs ls (mk_snd r)
val fixdefs = defs lhss fixpoint
val (fixdef_thms : (term * (string * thm)) list, lthy) = lthy
|> fold_map Local_Theory.define (map (apfst fst) fixes ~~ fixdefs)
fun pair_equalI (thm1, thm2) = @{thm Pair_equalI} OF [thm1, thm2]
val tuple_fixdef_thm = foldr1 pair_equalI (map (snd o snd) fixdef_thms)
val P = Var (("P", 0), map Term.fastype_of lhss ---> \<^Type>\<open>bool\<close>)
val predicate = lambda_tuple lhss (list_comb (P, lhss))
val tuple_induct_thm = (def_cont_fix_ind OF [tuple_fixdef_thm, cont_thm])
|> Thm.instantiate' [] [SOME (Thm.global_cterm_of thy predicate)]
|> Local_Defs.unfold lthy @{thms split_paired_all split_conv split_strict}
val tuple_unfold_thm = (def_cont_fix_eq OF [tuple_fixdef_thm, cont_thm])
|> Local_Defs.unfold lthy @{thms split_conv}
fun unfolds [] _ = []
| unfolds (n::[]) thm = [(n, thm)]
| unfolds (n::ns) thm = let
val thmL = thm RS @{thm Pair_eqD1}
val thmR = thm RS @{thm Pair_eqD2}
in (n, thmL) :: unfolds ns thmR end
val unfold_thms = unfolds names tuple_unfold_thm
val induct_note : Attrib.binding * Thm.thm list =
let
val thm_name = Binding.qualify true all_names (Binding.name "induct")
in
((thm_name, []), [tuple_induct_thm])
end
fun unfold_note (name, thm) : Attrib.binding * Thm.thm list =
let
val thm_name = Binding.qualify true name (Binding.name "unfold")
- val src = Attrib.internal (K add_unfold)
+ val src = Attrib.internal \<^here> (K add_unfold)
in
((thm_name, [src]), [thm])
end
val (_, lthy) = lthy
|> fold_map Local_Theory.note (induct_note :: map unfold_note unfold_thms)
in
(lthy, names, fixdef_thms, map snd unfold_thms)
end
(*************************************************************************)
(*********** monadic notation and pattern matching compilation ***********)
(*************************************************************************)
structure FixrecMatchData = Theory_Data
(
type T = string Symtab.table
val empty = Symtab.empty
fun merge data = Symtab.merge (K true) data
)
(* associate match functions with pattern constants *)
fun add_matchers ms = FixrecMatchData.map (fold Symtab.update ms)
fun taken_names (t : term) : bstring list =
let
fun taken (Const(a,_), bs) = insert (op =) (Long_Name.base_name a) bs
| taken (Free(a,_) , bs) = insert (op =) a bs
| taken (f $ u , bs) = taken (f, taken (u, bs))
| taken (Abs(a,_,t), bs) = taken (t, insert (op =) a bs)
| taken (_ , bs) = bs
in
taken (t, [])
end
(* builds a monadic term for matching a pattern *)
(* returns (rhs, free variable, used varnames) *)
fun compile_pat match_name pat rhs taken =
let
fun comp_pat p rhs taken =
if is_Free p then (rhs, p, taken)
else comp_con (fastype_of p) p rhs [] taken
(* compiles a monadic term for a constructor pattern *)
and comp_con T p rhs vs taken =
case p of
\<^Const_>\<open>Rep_cfun _ _ for f x\<close> =>
let val (rhs', v, taken') = comp_pat x rhs taken
in comp_con T f rhs' (v::vs) taken' end
| f $ x =>
let val (rhs', v, taken') = comp_pat x rhs taken
in comp_con T f rhs' (v::vs) taken' end
| Const (c, cT) =>
let
val n = singleton (Name.variant_list taken) "v"
val v = Free(n, T)
val m = Const(match_name c, matcherT (cT, fastype_of rhs))
val k = big_lambdas vs rhs
in
(m`v`k, v, n::taken)
end
| _ => raise TERM ("fixrec: invalid pattern ", [p])
in
comp_pat pat rhs taken
end
(* builds a monadic term for matching a function definition pattern *)
(* returns (constant, (vars, matcher)) *)
fun compile_lhs match_name pat rhs vs taken =
case pat of
\<^Const_>\<open>Rep_cfun _ _ for f x\<close> =>
let val (rhs', v, taken') = compile_pat match_name x rhs taken
in compile_lhs match_name f rhs' (v::vs) taken' end
| Free(_,_) => (pat, (vs, rhs))
| Const(_,_) => (pat, (vs, rhs))
| _ => fixrec_err ("invalid function pattern: "
^ ML_Syntax.print_term pat)
fun strip_alls t =
(case try Logic.dest_all_global t of
SOME (_, u) => strip_alls u
| NONE => t)
fun compile_eq match_name eq =
let
val (lhs,rhs) = dest_eqs (Logic.strip_imp_concl (strip_alls eq))
in
compile_lhs match_name lhs (mk_succeed rhs) [] (taken_names eq)
end
(* this is the pattern-matching compiler function *)
fun compile_eqs match_name eqs =
let
val (consts, matchers) =
ListPair.unzip (map (compile_eq match_name) eqs)
val const =
case distinct (op =) consts of
[n] => n
| [] => fixrec_err "no defining equations for function"
| _ => fixrec_err "all equations in block must define the same function"
val vars =
case distinct (op = o apply2 length) (map fst matchers) of
[vars] => vars
| _ => fixrec_err "all equations in block must have the same arity"
(* rename so all matchers use same free variables *)
fun rename (vs, t) = Term.subst_free (filter_out (op =) (vs ~~ vars)) t
val rhs = big_lambdas vars (mk_run (foldr1 mk_mplus (map rename matchers)))
in
mk_trp (const === rhs)
end
(*************************************************************************)
(********************** Proving associated theorems **********************)
(*************************************************************************)
fun eta_tac i = CONVERSION Thm.eta_conversion i
fun fixrec_simp_tac ctxt =
let
val tab = FixrecUnfoldData.get (Context.Proof ctxt)
val concl = HOLogic.dest_Trueprop o Logic.strip_imp_concl o strip_alls
fun tac (t, i) =
let
val (c, _) =
(dest_Const o head_of o chead_of o fst o HOLogic.dest_eq o concl) t
val unfold_thm = the (Symtab.lookup tab c)
val rule = unfold_thm RS @{thm ssubst_lhs}
in
CHANGED (resolve_tac ctxt [rule] i THEN eta_tac i THEN asm_simp_tac ctxt i)
end
in
SUBGOAL (fn ti => the_default no_tac (try tac ti))
end
(* proves a block of pattern matching equations as theorems, using unfold *)
fun make_simps ctxt (unfold_thm, eqns : (Attrib.binding * term) list) =
let
val rule = unfold_thm RS @{thm ssubst_lhs}
val tac = resolve_tac ctxt [rule] 1 THEN eta_tac 1 THEN asm_simp_tac ctxt 1
fun prove_term t = Goal.prove ctxt [] [] t (K tac)
fun prove_eqn (bind, eqn_t) = (bind, prove_term eqn_t)
in
map prove_eqn eqns
end
(*************************************************************************)
(************************* Main fixrec function **************************)
(*************************************************************************)
local
(* code adapted from HOL/Tools/Datatype/primrec.ML *)
fun gen_fixrec
prep_spec
(raw_fixes : (binding * 'a option * mixfix) list)
(raw_spec' : (bool * (Attrib.binding * 'b)) list)
(lthy : local_theory) =
let
val (skips, raw_spec) = ListPair.unzip raw_spec'
val (fixes : ((binding * typ) * mixfix) list,
spec : (Attrib.binding * term) list) =
fst (prep_spec raw_fixes (map (fn s => (s, [], [])) raw_spec) lthy)
val names = map (Binding.name_of o fst o fst) fixes
fun check_head name =
member (op =) names name orelse
fixrec_err ("Illegal equation head. Expected " ^ commas_quote names)
val chead_of_spec =
chead_of o fst o dest_eqs o Logic.strip_imp_concl o strip_alls o snd
fun name_of (Free (n, _)) = tap check_head n
| name_of _ = fixrec_err ("unknown term")
val all_names = map (name_of o chead_of_spec) spec
fun block_of_name n =
map_filter
(fn (m,eq) => if m = n then SOME eq else NONE)
(all_names ~~ (spec ~~ skips))
val blocks = map block_of_name names
val matcher_tab = FixrecMatchData.get (Proof_Context.theory_of lthy)
fun match_name c =
case Symtab.lookup matcher_tab c of SOME m => m
| NONE => fixrec_err ("unknown pattern constructor: " ^ c)
val matches = map (compile_eqs match_name) (map (map (snd o fst)) blocks)
val spec' = map (pair Binding.empty_atts) matches
val (lthy, _, _, unfold_thms) =
add_fixdefs fixes spec' lthy
val blocks' = map (map fst o filter_out snd) blocks
val simps : (Attrib.binding * thm) list list =
map (make_simps lthy) (unfold_thms ~~ blocks')
fun mk_bind n : Attrib.binding =
(Binding.qualify true n (Binding.name "simps"), @{attributes [simp]})
val simps1 : (Attrib.binding * thm list) list =
map (fn (n,xs) => (mk_bind n, map snd xs)) (names ~~ simps)
val simps2 : (Attrib.binding * thm list) list =
map (apsnd (fn thm => [thm])) (flat simps)
val (_, lthy) = lthy
|> fold_map Local_Theory.note (simps1 @ simps2)
in
lthy
end
in
val add_fixrec = gen_fixrec Specification.check_multi_specs
val add_fixrec_cmd = gen_fixrec Specification.read_multi_specs
end (* local *)
(*************************************************************************)
(******************************** Parsers ********************************)
(*************************************************************************)
val opt_thm_name' : (bool * Attrib.binding) parser =
\<^keyword>\<open>(\<close> -- \<^keyword>\<open>unchecked\<close> -- \<^keyword>\<open>)\<close> >> K (true, Binding.empty_atts)
|| Parse_Spec.opt_thm_name ":" >> pair false
val spec' : (bool * (Attrib.binding * string)) parser =
opt_thm_name' -- Parse.prop >> (fn ((a, b), c) => (a, (b, c)))
val multi_specs' : (bool * (Attrib.binding * string)) list parser =
let val unexpected = Scan.ahead (Parse.name || \<^keyword>\<open>[\<close> || \<^keyword>\<open>(\<close>)
in Parse.enum1 "|" (spec' --| Scan.option (unexpected -- Parse.!!! \<^keyword>\<open>|\<close>)) end
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>fixrec\<close> "define recursive functions (HOLCF)"
(Parse.vars -- (Parse.where_ |-- Parse.!!! multi_specs')
>> (fn (vars, specs) => add_fixrec_cmd vars specs))
end
diff --git a/src/HOL/Library/Extended_Nat.thy b/src/HOL/Library/Extended_Nat.thy
--- a/src/HOL/Library/Extended_Nat.thy
+++ b/src/HOL/Library/Extended_Nat.thy
@@ -1,707 +1,707 @@
(* Title: HOL/Library/Extended_Nat.thy
Author: David von Oheimb, TU Muenchen; Florian Haftmann, TU Muenchen
Contributions: David Trachtenherz, TU Muenchen
*)
section \<open>Extended natural numbers (i.e. with infinity)\<close>
theory Extended_Nat
imports Main Countable Order_Continuity
begin
class infinity =
fixes infinity :: "'a" ("\<infinity>")
context
fixes f :: "nat \<Rightarrow> 'a::{canonically_ordered_monoid_add, linorder_topology, complete_linorder}"
begin
lemma sums_SUP[simp, intro]: "f sums (SUP n. \<Sum>i<n. f i)"
unfolding sums_def by (intro LIMSEQ_SUP monoI sum_mono2 zero_le) auto
lemma suminf_eq_SUP: "suminf f = (SUP n. \<Sum>i<n. f i)"
using sums_SUP by (rule sums_unique[symmetric])
end
subsection \<open>Type definition\<close>
text \<open>
We extend the standard natural numbers by a special value indicating
infinity.
\<close>
typedef enat = "UNIV :: nat option set" ..
text \<open>TODO: introduce enat as coinductive datatype, enat is just \<^const>\<open>of_nat\<close>\<close>
definition enat :: "nat \<Rightarrow> enat" where
"enat n = Abs_enat (Some n)"
instantiation enat :: infinity
begin
definition "\<infinity> = Abs_enat None"
instance ..
end
instance enat :: countable
proof
show "\<exists>to_nat::enat \<Rightarrow> nat. inj to_nat"
by (rule exI[of _ "to_nat \<circ> Rep_enat"]) (simp add: inj_on_def Rep_enat_inject)
qed
old_rep_datatype enat "\<infinity> :: enat"
proof -
fix P i assume "\<And>j. P (enat j)" "P \<infinity>"
then show "P i"
proof induct
case (Abs_enat y) then show ?case
by (cases y rule: option.exhaust)
(auto simp: enat_def infinity_enat_def)
qed
qed (auto simp add: enat_def infinity_enat_def Abs_enat_inject)
declare [[coercion "enat::nat\<Rightarrow>enat"]]
lemmas enat2_cases = enat.exhaust[case_product enat.exhaust]
lemmas enat3_cases = enat.exhaust[case_product enat.exhaust enat.exhaust]
lemma not_infinity_eq [iff]: "(x \<noteq> \<infinity>) = (\<exists>i. x = enat i)"
by (cases x) auto
lemma not_enat_eq [iff]: "(\<forall>y. x \<noteq> enat y) = (x = \<infinity>)"
by (cases x) auto
lemma enat_ex_split: "(\<exists>c::enat. P c) \<longleftrightarrow> P \<infinity> \<or> (\<exists>c::nat. P c)"
by (metis enat.exhaust)
primrec the_enat :: "enat \<Rightarrow> nat"
where "the_enat (enat n) = n"
subsection \<open>Constructors and numbers\<close>
instantiation enat :: zero_neq_one
begin
definition
"0 = enat 0"
definition
"1 = enat 1"
instance
proof qed (simp add: zero_enat_def one_enat_def)
end
definition eSuc :: "enat \<Rightarrow> enat" where
"eSuc i = (case i of enat n \<Rightarrow> enat (Suc n) | \<infinity> \<Rightarrow> \<infinity>)"
lemma enat_0 [code_post]: "enat 0 = 0"
by (simp add: zero_enat_def)
lemma enat_1 [code_post]: "enat 1 = 1"
by (simp add: one_enat_def)
lemma enat_0_iff: "enat x = 0 \<longleftrightarrow> x = 0" "0 = enat x \<longleftrightarrow> x = 0"
by (auto simp add: zero_enat_def)
lemma enat_1_iff: "enat x = 1 \<longleftrightarrow> x = 1" "1 = enat x \<longleftrightarrow> x = 1"
by (auto simp add: one_enat_def)
lemma one_eSuc: "1 = eSuc 0"
by (simp add: zero_enat_def one_enat_def eSuc_def)
lemma infinity_ne_i0 [simp]: "(\<infinity>::enat) \<noteq> 0"
by (simp add: zero_enat_def)
lemma i0_ne_infinity [simp]: "0 \<noteq> (\<infinity>::enat)"
by (simp add: zero_enat_def)
lemma zero_one_enat_neq:
"\<not> 0 = (1::enat)"
"\<not> 1 = (0::enat)"
unfolding zero_enat_def one_enat_def by simp_all
lemma infinity_ne_i1 [simp]: "(\<infinity>::enat) \<noteq> 1"
by (simp add: one_enat_def)
lemma i1_ne_infinity [simp]: "1 \<noteq> (\<infinity>::enat)"
by (simp add: one_enat_def)
lemma eSuc_enat: "eSuc (enat n) = enat (Suc n)"
by (simp add: eSuc_def)
lemma eSuc_infinity [simp]: "eSuc \<infinity> = \<infinity>"
by (simp add: eSuc_def)
lemma eSuc_ne_0 [simp]: "eSuc n \<noteq> 0"
by (simp add: eSuc_def zero_enat_def split: enat.splits)
lemma zero_ne_eSuc [simp]: "0 \<noteq> eSuc n"
by (rule eSuc_ne_0 [symmetric])
lemma eSuc_inject [simp]: "eSuc m = eSuc n \<longleftrightarrow> m = n"
by (simp add: eSuc_def split: enat.splits)
lemma eSuc_enat_iff: "eSuc x = enat y \<longleftrightarrow> (\<exists>n. y = Suc n \<and> x = enat n)"
by (cases y) (auto simp: enat_0 eSuc_enat[symmetric])
lemma enat_eSuc_iff: "enat y = eSuc x \<longleftrightarrow> (\<exists>n. y = Suc n \<and> enat n = x)"
by (cases y) (auto simp: enat_0 eSuc_enat[symmetric])
subsection \<open>Addition\<close>
instantiation enat :: comm_monoid_add
begin
definition [nitpick_simp]:
"m + n = (case m of \<infinity> \<Rightarrow> \<infinity> | enat m \<Rightarrow> (case n of \<infinity> \<Rightarrow> \<infinity> | enat n \<Rightarrow> enat (m + n)))"
lemma plus_enat_simps [simp, code]:
fixes q :: enat
shows "enat m + enat n = enat (m + n)"
and "\<infinity> + q = \<infinity>"
and "q + \<infinity> = \<infinity>"
by (simp_all add: plus_enat_def split: enat.splits)
instance
proof
fix n m q :: enat
show "n + m + q = n + (m + q)"
by (cases n m q rule: enat3_cases) auto
show "n + m = m + n"
by (cases n m rule: enat2_cases) auto
show "0 + n = n"
by (cases n) (simp_all add: zero_enat_def)
qed
end
lemma eSuc_plus_1:
"eSuc n = n + 1"
by (cases n) (simp_all add: eSuc_enat one_enat_def)
lemma plus_1_eSuc:
"1 + q = eSuc q"
"q + 1 = eSuc q"
by (simp_all add: eSuc_plus_1 ac_simps)
lemma iadd_Suc: "eSuc m + n = eSuc (m + n)"
by (simp_all add: eSuc_plus_1 ac_simps)
lemma iadd_Suc_right: "m + eSuc n = eSuc (m + n)"
by (simp only: add.commute[of m] iadd_Suc)
subsection \<open>Multiplication\<close>
instantiation enat :: "{comm_semiring_1, semiring_no_zero_divisors}"
begin
definition times_enat_def [nitpick_simp]:
"m * n = (case m of \<infinity> \<Rightarrow> if n = 0 then 0 else \<infinity> | enat m \<Rightarrow>
(case n of \<infinity> \<Rightarrow> if m = 0 then 0 else \<infinity> | enat n \<Rightarrow> enat (m * n)))"
lemma times_enat_simps [simp, code]:
"enat m * enat n = enat (m * n)"
"\<infinity> * \<infinity> = (\<infinity>::enat)"
"\<infinity> * enat n = (if n = 0 then 0 else \<infinity>)"
"enat m * \<infinity> = (if m = 0 then 0 else \<infinity>)"
unfolding times_enat_def zero_enat_def
by (simp_all split: enat.split)
instance
proof
fix a b c :: enat
show "(a * b) * c = a * (b * c)"
unfolding times_enat_def zero_enat_def
by (simp split: enat.split)
show comm: "a * b = b * a"
unfolding times_enat_def zero_enat_def
by (simp split: enat.split)
show "1 * a = a"
unfolding times_enat_def zero_enat_def one_enat_def
by (simp split: enat.split)
show distr: "(a + b) * c = a * c + b * c"
unfolding times_enat_def zero_enat_def
by (simp split: enat.split add: distrib_right)
show "0 * a = 0"
unfolding times_enat_def zero_enat_def
by (simp split: enat.split)
show "a * 0 = 0"
unfolding times_enat_def zero_enat_def
by (simp split: enat.split)
show "a * (b + c) = a * b + a * c"
by (cases a b c rule: enat3_cases) (auto simp: times_enat_def zero_enat_def distrib_left)
show "a \<noteq> 0 \<Longrightarrow> b \<noteq> 0 \<Longrightarrow> a * b \<noteq> 0"
by (cases a b rule: enat2_cases) (auto simp: times_enat_def zero_enat_def)
qed
end
lemma mult_eSuc: "eSuc m * n = n + m * n"
unfolding eSuc_plus_1 by (simp add: algebra_simps)
lemma mult_eSuc_right: "m * eSuc n = m + m * n"
unfolding eSuc_plus_1 by (simp add: algebra_simps)
lemma of_nat_eq_enat: "of_nat n = enat n"
apply (induct n)
apply (simp add: enat_0)
apply (simp add: plus_1_eSuc eSuc_enat)
done
instance enat :: semiring_char_0
proof
have "inj enat" by (rule injI) simp
then show "inj (\<lambda>n. of_nat n :: enat)" by (simp add: of_nat_eq_enat)
qed
lemma imult_is_infinity: "((a::enat) * b = \<infinity>) = (a = \<infinity> \<and> b \<noteq> 0 \<or> b = \<infinity> \<and> a \<noteq> 0)"
by (auto simp add: times_enat_def zero_enat_def split: enat.split)
subsection \<open>Numerals\<close>
lemma numeral_eq_enat:
"numeral k = enat (numeral k)"
using of_nat_eq_enat [of "numeral k"] by simp
lemma enat_numeral [code_abbrev]:
"enat (numeral k) = numeral k"
using numeral_eq_enat ..
lemma infinity_ne_numeral [simp]: "(\<infinity>::enat) \<noteq> numeral k"
by (simp add: numeral_eq_enat)
lemma numeral_ne_infinity [simp]: "numeral k \<noteq> (\<infinity>::enat)"
by (simp add: numeral_eq_enat)
lemma eSuc_numeral [simp]: "eSuc (numeral k) = numeral (k + Num.One)"
by (simp only: eSuc_plus_1 numeral_plus_one)
subsection \<open>Subtraction\<close>
instantiation enat :: minus
begin
definition diff_enat_def:
"a - b = (case a of (enat x) \<Rightarrow> (case b of (enat y) \<Rightarrow> enat (x - y) | \<infinity> \<Rightarrow> 0)
| \<infinity> \<Rightarrow> \<infinity>)"
instance ..
end
lemma idiff_enat_enat [simp, code]: "enat a - enat b = enat (a - b)"
by (simp add: diff_enat_def)
lemma idiff_infinity [simp, code]: "\<infinity> - n = (\<infinity>::enat)"
by (simp add: diff_enat_def)
lemma idiff_infinity_right [simp, code]: "enat a - \<infinity> = 0"
by (simp add: diff_enat_def)
lemma idiff_0 [simp]: "(0::enat) - n = 0"
by (cases n, simp_all add: zero_enat_def)
lemmas idiff_enat_0 [simp] = idiff_0 [unfolded zero_enat_def]
lemma idiff_0_right [simp]: "(n::enat) - 0 = n"
by (cases n) (simp_all add: zero_enat_def)
lemmas idiff_enat_0_right [simp] = idiff_0_right [unfolded zero_enat_def]
lemma idiff_self [simp]: "n \<noteq> \<infinity> \<Longrightarrow> (n::enat) - n = 0"
by (auto simp: zero_enat_def)
lemma eSuc_minus_eSuc [simp]: "eSuc n - eSuc m = n - m"
by (simp add: eSuc_def split: enat.split)
lemma eSuc_minus_1 [simp]: "eSuc n - 1 = n"
by (simp add: one_enat_def flip: eSuc_enat zero_enat_def)
(*lemmas idiff_self_eq_0_enat = idiff_self_eq_0[unfolded zero_enat_def]*)
subsection \<open>Ordering\<close>
instantiation enat :: linordered_ab_semigroup_add
begin
definition [nitpick_simp]:
"m \<le> n = (case n of enat n1 \<Rightarrow> (case m of enat m1 \<Rightarrow> m1 \<le> n1 | \<infinity> \<Rightarrow> False)
| \<infinity> \<Rightarrow> True)"
definition [nitpick_simp]:
"m < n = (case m of enat m1 \<Rightarrow> (case n of enat n1 \<Rightarrow> m1 < n1 | \<infinity> \<Rightarrow> True)
| \<infinity> \<Rightarrow> False)"
lemma enat_ord_simps [simp]:
"enat m \<le> enat n \<longleftrightarrow> m \<le> n"
"enat m < enat n \<longleftrightarrow> m < n"
"q \<le> (\<infinity>::enat)"
"q < (\<infinity>::enat) \<longleftrightarrow> q \<noteq> \<infinity>"
"(\<infinity>::enat) \<le> q \<longleftrightarrow> q = \<infinity>"
"(\<infinity>::enat) < q \<longleftrightarrow> False"
by (simp_all add: less_eq_enat_def less_enat_def split: enat.splits)
lemma numeral_le_enat_iff[simp]:
shows "numeral m \<le> enat n \<longleftrightarrow> numeral m \<le> n"
by (auto simp: numeral_eq_enat)
lemma numeral_less_enat_iff[simp]:
shows "numeral m < enat n \<longleftrightarrow> numeral m < n"
by (auto simp: numeral_eq_enat)
lemma enat_ord_code [code]:
"enat m \<le> enat n \<longleftrightarrow> m \<le> n"
"enat m < enat n \<longleftrightarrow> m < n"
"q \<le> (\<infinity>::enat) \<longleftrightarrow> True"
"enat m < \<infinity> \<longleftrightarrow> True"
"\<infinity> \<le> enat n \<longleftrightarrow> False"
"(\<infinity>::enat) < q \<longleftrightarrow> False"
by simp_all
instance
by standard (auto simp add: less_eq_enat_def less_enat_def plus_enat_def split: enat.splits)
end
instance enat :: dioid
proof
fix a b :: enat show "(a \<le> b) = (\<exists>c. b = a + c)"
by (cases a b rule: enat2_cases) (auto simp: le_iff_add enat_ex_split)
qed
instance enat :: "{linordered_nonzero_semiring, strict_ordered_comm_monoid_add}"
proof
fix a b c :: enat
show "a \<le> b \<Longrightarrow> 0 \<le> c \<Longrightarrow>c * a \<le> c * b"
unfolding times_enat_def less_eq_enat_def zero_enat_def
by (simp split: enat.splits)
show "a < b \<Longrightarrow> c < d \<Longrightarrow> a + c < b + d" for a b c d :: enat
by (cases a b c d rule: enat2_cases[case_product enat2_cases]) auto
show "a < b \<Longrightarrow> a + 1 < b + 1"
by (metis add_right_mono eSuc_minus_1 eSuc_plus_1 less_le)
qed (simp add: zero_enat_def one_enat_def)
(* BH: These equations are already proven generally for any type in
class linordered_semidom. However, enat is not in that class because
it does not have the cancellation property. Would it be worthwhile to
a generalize linordered_semidom to a new class that includes enat? *)
lemma add_diff_assoc_enat: "z \<le> y \<Longrightarrow> x + (y - z) = x + y - (z::enat)"
by(cases x)(auto simp add: diff_enat_def split: enat.split)
lemma enat_ord_number [simp]:
"(numeral m :: enat) \<le> numeral n \<longleftrightarrow> (numeral m :: nat) \<le> numeral n"
"(numeral m :: enat) < numeral n \<longleftrightarrow> (numeral m :: nat) < numeral n"
by (simp_all add: numeral_eq_enat)
lemma infinity_ileE [elim!]: "\<infinity> \<le> enat m \<Longrightarrow> R"
by (simp add: zero_enat_def less_eq_enat_def split: enat.splits)
lemma infinity_ilessE [elim!]: "\<infinity> < enat m \<Longrightarrow> R"
by simp
lemma eSuc_ile_mono [simp]: "eSuc n \<le> eSuc m \<longleftrightarrow> n \<le> m"
by (simp add: eSuc_def less_eq_enat_def split: enat.splits)
lemma eSuc_mono [simp]: "eSuc n < eSuc m \<longleftrightarrow> n < m"
by (simp add: eSuc_def less_enat_def split: enat.splits)
lemma ile_eSuc [simp]: "n \<le> eSuc n"
by (simp add: eSuc_def less_eq_enat_def split: enat.splits)
lemma not_eSuc_ilei0 [simp]: "\<not> eSuc n \<le> 0"
by (simp add: zero_enat_def eSuc_def less_eq_enat_def split: enat.splits)
lemma i0_iless_eSuc [simp]: "0 < eSuc n"
by (simp add: zero_enat_def eSuc_def less_enat_def split: enat.splits)
lemma iless_eSuc0[simp]: "(n < eSuc 0) = (n = 0)"
by (simp add: zero_enat_def eSuc_def less_enat_def split: enat.split)
lemma ileI1: "m < n \<Longrightarrow> eSuc m \<le> n"
by (simp add: eSuc_def less_eq_enat_def less_enat_def split: enat.splits)
lemma Suc_ile_eq: "enat (Suc m) \<le> n \<longleftrightarrow> enat m < n"
by (cases n) auto
lemma iless_Suc_eq [simp]: "enat m < eSuc n \<longleftrightarrow> enat m \<le> n"
by (auto simp add: eSuc_def less_enat_def split: enat.splits)
lemma imult_infinity: "(0::enat) < n \<Longrightarrow> \<infinity> * n = \<infinity>"
by (simp add: zero_enat_def less_enat_def split: enat.splits)
lemma imult_infinity_right: "(0::enat) < n \<Longrightarrow> n * \<infinity> = \<infinity>"
by (simp add: zero_enat_def less_enat_def split: enat.splits)
lemma enat_0_less_mult_iff: "(0 < (m::enat) * n) = (0 < m \<and> 0 < n)"
by (simp only: zero_less_iff_neq_zero mult_eq_0_iff, simp)
lemma mono_eSuc: "mono eSuc"
by (simp add: mono_def)
lemma min_enat_simps [simp]:
"min (enat m) (enat n) = enat (min m n)"
"min q 0 = 0"
"min 0 q = 0"
"min q (\<infinity>::enat) = q"
"min (\<infinity>::enat) q = q"
by (auto simp add: min_def)
lemma max_enat_simps [simp]:
"max (enat m) (enat n) = enat (max m n)"
"max q 0 = q"
"max 0 q = q"
"max q \<infinity> = (\<infinity>::enat)"
"max \<infinity> q = (\<infinity>::enat)"
by (simp_all add: max_def)
lemma enat_ile: "n \<le> enat m \<Longrightarrow> \<exists>k. n = enat k"
by (cases n) simp_all
lemma enat_iless: "n < enat m \<Longrightarrow> \<exists>k. n = enat k"
by (cases n) simp_all
lemma iadd_le_enat_iff:
"x + y \<le> enat n \<longleftrightarrow> (\<exists>y' x'. x = enat x' \<and> y = enat y' \<and> x' + y' \<le> n)"
by(cases x y rule: enat.exhaust[case_product enat.exhaust]) simp_all
lemma chain_incr: "\<forall>i. \<exists>j. Y i < Y j \<Longrightarrow> \<exists>j. enat k < Y j"
apply (induct_tac k)
apply (simp (no_asm) only: enat_0)
apply (fast intro: le_less_trans [OF zero_le])
apply (erule exE)
apply (drule spec)
apply (erule exE)
apply (drule ileI1)
apply (rule eSuc_enat [THEN subst])
apply (rule exI)
apply (erule (1) le_less_trans)
done
lemma eSuc_max: "eSuc (max x y) = max (eSuc x) (eSuc y)"
by (simp add: eSuc_def split: enat.split)
lemma eSuc_Max:
assumes "finite A" "A \<noteq> {}"
shows "eSuc (Max A) = Max (eSuc ` A)"
using assms proof induction
case (insert x A)
thus ?case by(cases "A = {}")(simp_all add: eSuc_max)
qed simp
instantiation enat :: "{order_bot, order_top}"
begin
definition bot_enat :: enat where "bot_enat = 0"
definition top_enat :: enat where "top_enat = \<infinity>"
instance
by standard (simp_all add: bot_enat_def top_enat_def)
end
lemma finite_enat_bounded:
assumes le_fin: "\<And>y. y \<in> A \<Longrightarrow> y \<le> enat n"
shows "finite A"
proof (rule finite_subset)
show "finite (enat ` {..n})" by blast
have "A \<subseteq> {..enat n}" using le_fin by fastforce
also have "\<dots> \<subseteq> enat ` {..n}"
apply (rule subsetI)
subgoal for x by (cases x) auto
done
finally show "A \<subseteq> enat ` {..n}" .
qed
subsection \<open>Cancellation simprocs\<close>
lemma add_diff_cancel_enat[simp]: "x \<noteq> \<infinity> \<Longrightarrow> x + y - x = (y::enat)"
by (metis add.commute add.right_neutral add_diff_assoc_enat idiff_self order_refl)
lemma enat_add_left_cancel: "a + b = a + c \<longleftrightarrow> a = (\<infinity>::enat) \<or> b = c"
unfolding plus_enat_def by (simp split: enat.split)
lemma enat_add_left_cancel_le: "a + b \<le> a + c \<longleftrightarrow> a = (\<infinity>::enat) \<or> b \<le> c"
unfolding plus_enat_def by (simp split: enat.split)
lemma enat_add_left_cancel_less: "a + b < a + c \<longleftrightarrow> a \<noteq> (\<infinity>::enat) \<and> b < c"
unfolding plus_enat_def by (simp split: enat.split)
lemma plus_eq_infty_iff_enat: "(m::enat) + n = \<infinity> \<longleftrightarrow> m=\<infinity> \<or> n=\<infinity>"
using enat_add_left_cancel by fastforce
ML \<open>
structure Cancel_Enat_Common =
struct
(* copied from src/HOL/Tools/nat_numeral_simprocs.ML *)
fun find_first_t _ _ [] = raise TERM("find_first_t", [])
| find_first_t past u (t::terms) =
if u aconv t then (rev past @ terms)
else find_first_t (t::past) u terms
fun dest_summing (Const (\<^const_name>\<open>Groups.plus\<close>, _) $ t $ u, ts) =
dest_summing (t, dest_summing (u, ts))
| dest_summing (t, ts) = t :: ts
val mk_sum = Arith_Data.long_mk_sum
fun dest_sum t = dest_summing (t, [])
val find_first = find_first_t []
val trans_tac = Numeral_Simprocs.trans_tac
val norm_ss =
simpset_of (put_simpset HOL_basic_ss \<^context>
addsimps @{thms ac_simps add_0_left add_0_right})
fun norm_tac ctxt = ALLGOALS (simp_tac (put_simpset norm_ss ctxt))
fun simplify_meta_eq ctxt cancel_th th =
Arith_Data.simplify_meta_eq [] ctxt
([th, cancel_th] MRS trans)
fun mk_eq (a, b) = HOLogic.mk_Trueprop (HOLogic.mk_eq (a, b))
end
structure Eq_Enat_Cancel = ExtractCommonTermFun
(open Cancel_Enat_Common
val mk_bal = HOLogic.mk_eq
val dest_bal = HOLogic.dest_bin \<^const_name>\<open>HOL.eq\<close> \<^typ>\<open>enat\<close>
fun simp_conv _ _ = SOME @{thm enat_add_left_cancel}
)
structure Le_Enat_Cancel = ExtractCommonTermFun
(open Cancel_Enat_Common
val mk_bal = HOLogic.mk_binrel \<^const_name>\<open>Orderings.less_eq\<close>
val dest_bal = HOLogic.dest_bin \<^const_name>\<open>Orderings.less_eq\<close> \<^typ>\<open>enat\<close>
fun simp_conv _ _ = SOME @{thm enat_add_left_cancel_le}
)
structure Less_Enat_Cancel = ExtractCommonTermFun
(open Cancel_Enat_Common
val mk_bal = HOLogic.mk_binrel \<^const_name>\<open>Orderings.less\<close>
val dest_bal = HOLogic.dest_bin \<^const_name>\<open>Orderings.less\<close> \<^typ>\<open>enat\<close>
fun simp_conv _ _ = SOME @{thm enat_add_left_cancel_less}
)
\<close>
simproc_setup enat_eq_cancel
("(l::enat) + m = n" | "(l::enat) = m + n") =
- \<open>fn phi => fn ctxt => fn ct => Eq_Enat_Cancel.proc ctxt (Thm.term_of ct)\<close>
+ \<open>K (fn ctxt => fn ct => Eq_Enat_Cancel.proc ctxt (Thm.term_of ct))\<close>
simproc_setup enat_le_cancel
("(l::enat) + m \<le> n" | "(l::enat) \<le> m + n") =
- \<open>fn phi => fn ctxt => fn ct => Le_Enat_Cancel.proc ctxt (Thm.term_of ct)\<close>
+ \<open>K (fn ctxt => fn ct => Le_Enat_Cancel.proc ctxt (Thm.term_of ct))\<close>
simproc_setup enat_less_cancel
("(l::enat) + m < n" | "(l::enat) < m + n") =
- \<open>fn phi => fn ctxt => fn ct => Less_Enat_Cancel.proc ctxt (Thm.term_of ct)\<close>
+ \<open>K (fn ctxt => fn ct => Less_Enat_Cancel.proc ctxt (Thm.term_of ct))\<close>
text \<open>TODO: add regression tests for these simprocs\<close>
text \<open>TODO: add simprocs for combining and cancelling numerals\<close>
subsection \<open>Well-ordering\<close>
lemma less_enatE:
"[| n < enat m; !!k. n = enat k ==> k < m ==> P |] ==> P"
by (induct n) auto
lemma less_infinityE:
"[| n < \<infinity>; !!k. n = enat k ==> P |] ==> P"
by (induct n) auto
lemma enat_less_induct:
assumes prem: "\<And>n. \<forall>m::enat. m < n \<longrightarrow> P m \<Longrightarrow> P n" shows "P n"
proof -
have P_enat: "\<And>k. P (enat k)"
apply (rule nat_less_induct)
apply (rule prem, clarify)
apply (erule less_enatE, simp)
done
show ?thesis
proof (induct n)
fix nat
show "P (enat nat)" by (rule P_enat)
next
show "P \<infinity>"
apply (rule prem, clarify)
apply (erule less_infinityE)
apply (simp add: P_enat)
done
qed
qed
instance enat :: wellorder
proof
fix P and n
assume hyp: "(\<And>n::enat. (\<And>m::enat. m < n \<Longrightarrow> P m) \<Longrightarrow> P n)"
show "P n" by (blast intro: enat_less_induct hyp)
qed
subsection \<open>Complete Lattice\<close>
instantiation enat :: complete_lattice
begin
definition inf_enat :: "enat \<Rightarrow> enat \<Rightarrow> enat" where
"inf_enat = min"
definition sup_enat :: "enat \<Rightarrow> enat \<Rightarrow> enat" where
"sup_enat = max"
definition Inf_enat :: "enat set \<Rightarrow> enat" where
"Inf_enat A = (if A = {} then \<infinity> else (LEAST x. x \<in> A))"
definition Sup_enat :: "enat set \<Rightarrow> enat" where
"Sup_enat A = (if A = {} then 0 else if finite A then Max A else \<infinity>)"
instance
proof
fix x :: "enat" and A :: "enat set"
{ assume "x \<in> A" then show "Inf A \<le> x"
unfolding Inf_enat_def by (auto intro: Least_le) }
{ assume "\<And>y. y \<in> A \<Longrightarrow> x \<le> y" then show "x \<le> Inf A"
unfolding Inf_enat_def
by (cases "A = {}") (auto intro: LeastI2_ex) }
{ assume "x \<in> A" then show "x \<le> Sup A"
unfolding Sup_enat_def by (cases "finite A") auto }
{ assume "\<And>y. y \<in> A \<Longrightarrow> y \<le> x" then show "Sup A \<le> x"
unfolding Sup_enat_def using finite_enat_bounded by auto }
qed (simp_all add:
inf_enat_def sup_enat_def bot_enat_def top_enat_def Inf_enat_def Sup_enat_def)
end
instance enat :: complete_linorder ..
lemma eSuc_Sup: "A \<noteq> {} \<Longrightarrow> eSuc (Sup A) = Sup (eSuc ` A)"
by(auto simp add: Sup_enat_def eSuc_Max inj_on_def dest: finite_imageD)
lemma sup_continuous_eSuc: "sup_continuous f \<Longrightarrow> sup_continuous (\<lambda>x. eSuc (f x))"
using eSuc_Sup [of "_ ` UNIV"] by (auto simp: sup_continuous_def image_comp)
subsection \<open>Traditional theorem names\<close>
lemmas enat_defs = zero_enat_def one_enat_def eSuc_def
plus_enat_def less_eq_enat_def less_enat_def
lemma iadd_is_0: "(m + n = (0::enat)) = (m = 0 \<and> n = 0)"
by (rule add_eq_0_iff_both_eq_0)
lemma i0_lb : "(0::enat) \<le> n"
by (rule zero_le)
lemma ile0_eq: "n \<le> (0::enat) \<longleftrightarrow> n = 0"
by (rule le_zero_eq)
lemma not_iless0: "\<not> n < (0::enat)"
by (rule not_less_zero)
lemma i0_less[simp]: "(0::enat) < n \<longleftrightarrow> n \<noteq> 0"
by (rule zero_less_iff_neq_zero)
lemma imult_is_0: "((m::enat) * n = 0) = (m = 0 \<or> n = 0)"
by (rule mult_eq_0_iff)
end
diff --git a/src/HOL/Library/Extended_Nonnegative_Real.thy b/src/HOL/Library/Extended_Nonnegative_Real.thy
--- a/src/HOL/Library/Extended_Nonnegative_Real.thy
+++ b/src/HOL/Library/Extended_Nonnegative_Real.thy
@@ -1,2047 +1,2047 @@
(* Title: HOL/Library/Extended_Nonnegative_Real.thy
Author: Johannes Hölzl
*)
section \<open>The type of non-negative extended real numbers\<close>
theory Extended_Nonnegative_Real
imports Extended_Real Indicator_Function
begin
lemma ereal_ineq_diff_add:
assumes "b \<noteq> (-\<infinity>::ereal)" "a \<ge> b"
shows "a = b + (a-b)"
by (metis add.commute assms ereal_eq_minus_iff ereal_minus_le_iff ereal_plus_eq_PInfty)
lemma Limsup_const_add:
fixes c :: "'a::{complete_linorder, linorder_topology, topological_monoid_add, ordered_ab_semigroup_add}"
shows "F \<noteq> bot \<Longrightarrow> Limsup F (\<lambda>x. c + f x) = c + Limsup F f"
by (rule Limsup_compose_continuous_mono)
(auto intro!: monoI add_mono continuous_on_add continuous_on_id continuous_on_const)
lemma Liminf_const_add:
fixes c :: "'a::{complete_linorder, linorder_topology, topological_monoid_add, ordered_ab_semigroup_add}"
shows "F \<noteq> bot \<Longrightarrow> Liminf F (\<lambda>x. c + f x) = c + Liminf F f"
by (rule Liminf_compose_continuous_mono)
(auto intro!: monoI add_mono continuous_on_add continuous_on_id continuous_on_const)
lemma Liminf_add_const:
fixes c :: "'a::{complete_linorder, linorder_topology, topological_monoid_add, ordered_ab_semigroup_add}"
shows "F \<noteq> bot \<Longrightarrow> Liminf F (\<lambda>x. f x + c) = Liminf F f + c"
by (rule Liminf_compose_continuous_mono)
(auto intro!: monoI add_mono continuous_on_add continuous_on_id continuous_on_const)
lemma sums_offset:
fixes f g :: "nat \<Rightarrow> 'a :: {t2_space, topological_comm_monoid_add}"
assumes "(\<lambda>n. f (n + i)) sums l" shows "f sums (l + (\<Sum>j<i. f j))"
proof -
have "(\<lambda>k. (\<Sum>n<k. f (n + i)) + (\<Sum>j<i. f j)) \<longlonglongrightarrow> l + (\<Sum>j<i. f j)"
using assms by (auto intro!: tendsto_add simp: sums_def)
moreover
{ fix k :: nat
have "(\<Sum>j<k + i. f j) = (\<Sum>j=i..<k + i. f j) + (\<Sum>j=0..<i. f j)"
by (subst sum.union_disjoint[symmetric]) (auto intro!: sum.cong)
also have "(\<Sum>j=i..<k + i. f j) = (\<Sum>j\<in>(\<lambda>n. n + i)`{0..<k}. f j)"
unfolding image_add_atLeastLessThan by simp
finally have "(\<Sum>j<k + i. f j) = (\<Sum>n<k. f (n + i)) + (\<Sum>j<i. f j)"
by (auto simp: inj_on_def atLeast0LessThan sum.reindex) }
ultimately have "(\<lambda>k. (\<Sum>n<k + i. f n)) \<longlonglongrightarrow> l + (\<Sum>j<i. f j)"
by simp
then show ?thesis
unfolding sums_def by (rule LIMSEQ_offset)
qed
lemma suminf_offset:
fixes f g :: "nat \<Rightarrow> 'a :: {t2_space, topological_comm_monoid_add}"
shows "summable (\<lambda>j. f (j + i)) \<Longrightarrow> suminf f = (\<Sum>j. f (j + i)) + (\<Sum>j<i. f j)"
by (intro sums_unique[symmetric] sums_offset summable_sums)
lemma eventually_at_left_1: "(\<And>z::real. 0 < z \<Longrightarrow> z < 1 \<Longrightarrow> P z) \<Longrightarrow> eventually P (at_left 1)"
by (subst eventually_at_left[of 0]) (auto intro: exI[of _ 0])
lemma mult_eq_1:
fixes a b :: "'a :: {ordered_semiring, comm_monoid_mult}"
shows "0 \<le> a \<Longrightarrow> a \<le> 1 \<Longrightarrow> b \<le> 1 \<Longrightarrow> a * b = 1 \<longleftrightarrow> (a = 1 \<and> b = 1)"
by (metis mult.left_neutral eq_iff mult.commute mult_right_mono)
lemma ereal_add_diff_cancel:
fixes a b :: ereal
shows "\<bar>b\<bar> \<noteq> \<infinity> \<Longrightarrow> (a + b) - b = a"
by (cases a b rule: ereal2_cases) auto
lemma add_top:
fixes x :: "'a::{order_top, ordered_comm_monoid_add}"
shows "0 \<le> x \<Longrightarrow> x + top = top"
by (intro top_le add_increasing order_refl)
lemma top_add:
fixes x :: "'a::{order_top, ordered_comm_monoid_add}"
shows "0 \<le> x \<Longrightarrow> top + x = top"
by (intro top_le add_increasing2 order_refl)
lemma le_lfp: "mono f \<Longrightarrow> x \<le> lfp f \<Longrightarrow> f x \<le> lfp f"
by (subst lfp_unfold) (auto dest: monoD)
lemma lfp_transfer:
assumes \<alpha>: "sup_continuous \<alpha>" and f: "sup_continuous f" and mg: "mono g"
assumes bot: "\<alpha> bot \<le> lfp g" and eq: "\<And>x. x \<le> lfp f \<Longrightarrow> \<alpha> (f x) = g (\<alpha> x)"
shows "\<alpha> (lfp f) = lfp g"
proof (rule antisym)
note mf = sup_continuous_mono[OF f]
have f_le_lfp: "(f ^^ i) bot \<le> lfp f" for i
by (induction i) (auto intro: le_lfp mf)
have "\<alpha> ((f ^^ i) bot) \<le> lfp g" for i
by (induction i) (auto simp: bot eq f_le_lfp intro!: le_lfp mg)
then show "\<alpha> (lfp f) \<le> lfp g"
unfolding sup_continuous_lfp[OF f]
by (subst \<alpha>[THEN sup_continuousD])
(auto intro!: mono_funpow sup_continuous_mono[OF f] SUP_least)
show "lfp g \<le> \<alpha> (lfp f)"
by (rule lfp_lowerbound) (simp add: eq[symmetric] lfp_fixpoint[OF mf])
qed
lemma sup_continuous_applyD: "sup_continuous f \<Longrightarrow> sup_continuous (\<lambda>x. f x h)"
using sup_continuous_apply[THEN sup_continuous_compose] .
lemma sup_continuous_SUP[order_continuous_intros]:
fixes M :: "_ \<Rightarrow> _ \<Rightarrow> 'a::complete_lattice"
assumes M: "\<And>i. i \<in> I \<Longrightarrow> sup_continuous (M i)"
shows "sup_continuous (SUP i\<in>I. M i)"
unfolding sup_continuous_def by (auto simp add: sup_continuousD [OF M] image_comp intro: SUP_commute)
lemma sup_continuous_apply_SUP[order_continuous_intros]:
fixes M :: "_ \<Rightarrow> _ \<Rightarrow> 'a::complete_lattice"
shows "(\<And>i. i \<in> I \<Longrightarrow> sup_continuous (M i)) \<Longrightarrow> sup_continuous (\<lambda>x. SUP i\<in>I. M i x)"
unfolding SUP_apply[symmetric] by (rule sup_continuous_SUP)
lemma sup_continuous_lfp'[order_continuous_intros]:
assumes 1: "sup_continuous f"
assumes 2: "\<And>g. sup_continuous g \<Longrightarrow> sup_continuous (f g)"
shows "sup_continuous (lfp f)"
proof -
have "sup_continuous ((f ^^ i) bot)" for i
proof (induction i)
case (Suc i) then show ?case
by (auto intro!: 2)
qed (simp add: bot_fun_def sup_continuous_const)
then show ?thesis
unfolding sup_continuous_lfp[OF 1] by (intro order_continuous_intros)
qed
lemma sup_continuous_lfp''[order_continuous_intros]:
assumes 1: "\<And>s. sup_continuous (f s)"
assumes 2: "\<And>g. sup_continuous g \<Longrightarrow> sup_continuous (\<lambda>s. f s (g s))"
shows "sup_continuous (\<lambda>x. lfp (f x))"
proof -
have "sup_continuous (\<lambda>x. (f x ^^ i) bot)" for i
proof (induction i)
case (Suc i) then show ?case
by (auto intro!: 2)
qed (simp add: bot_fun_def sup_continuous_const)
then show ?thesis
unfolding sup_continuous_lfp[OF 1] by (intro order_continuous_intros)
qed
lemma mono_INF_fun:
"(\<And>x y. mono (F x y)) \<Longrightarrow> mono (\<lambda>z x. INF y \<in> X x. F x y z :: 'a :: complete_lattice)"
by (auto intro!: INF_mono[OF bexI] simp: le_fun_def mono_def)
lemma continuous_on_cmult_ereal:
"\<bar>c::ereal\<bar> \<noteq> \<infinity> \<Longrightarrow> continuous_on A f \<Longrightarrow> continuous_on A (\<lambda>x. c * f x)"
using tendsto_cmult_ereal[of c f "f x" "at x within A" for x]
by (auto simp: continuous_on_def simp del: tendsto_cmult_ereal)
lemma real_of_nat_Sup:
assumes "A \<noteq> {}" "bdd_above A"
shows "of_nat (Sup A) = (SUP a\<in>A. of_nat a :: real)"
proof (intro antisym)
show "(SUP a\<in>A. of_nat a::real) \<le> of_nat (Sup A)"
using assms by (intro cSUP_least of_nat_mono) (auto intro: cSup_upper)
have "Sup A \<in> A"
using assms by (auto simp: Sup_nat_def bdd_above_nat)
then show "of_nat (Sup A) \<le> (SUP a\<in>A. of_nat a::real)"
by (intro cSUP_upper bdd_above_image_mono assms) (auto simp: mono_def)
qed
lemma (in complete_lattice) SUP_sup_const1:
"I \<noteq> {} \<Longrightarrow> (SUP i\<in>I. sup c (f i)) = sup c (SUP i\<in>I. f i)"
using SUP_sup_distrib[of "\<lambda>_. c" I f] by simp
lemma (in complete_lattice) SUP_sup_const2:
"I \<noteq> {} \<Longrightarrow> (SUP i\<in>I. sup (f i) c) = sup (SUP i\<in>I. f i) c"
using SUP_sup_distrib[of f I "\<lambda>_. c"] by simp
lemma one_less_of_natD:
assumes "(1::'a::linordered_semidom) < of_nat n" shows "1 < n"
by (cases n) (use assms in auto)
subsection \<open>Defining the extended non-negative reals\<close>
text \<open>Basic definitions and type class setup\<close>
typedef ennreal = "{x :: ereal. 0 \<le> x}"
morphisms enn2ereal e2ennreal'
by auto
definition "e2ennreal x = e2ennreal' (max 0 x)"
lemma enn2ereal_range: "e2ennreal ` {0..} = UNIV"
proof -
have "\<exists>y\<ge>0. x = e2ennreal y" for x
by (cases x) (auto simp: e2ennreal_def max_absorb2)
then show ?thesis
by (auto simp: image_iff Bex_def)
qed
lemma type_definition_ennreal': "type_definition enn2ereal e2ennreal {x. 0 \<le> x}"
using type_definition_ennreal
by (auto simp: type_definition_def e2ennreal_def max_absorb2)
setup_lifting type_definition_ennreal'
declare [[coercion e2ennreal]]
instantiation ennreal :: complete_linorder
begin
lift_definition top_ennreal :: ennreal is top by (rule top_greatest)
lift_definition bot_ennreal :: ennreal is 0 by (rule order_refl)
lift_definition sup_ennreal :: "ennreal \<Rightarrow> ennreal \<Rightarrow> ennreal" is sup by (rule le_supI1)
lift_definition inf_ennreal :: "ennreal \<Rightarrow> ennreal \<Rightarrow> ennreal" is inf by (rule le_infI)
lift_definition Inf_ennreal :: "ennreal set \<Rightarrow> ennreal" is "Inf"
by (rule Inf_greatest)
lift_definition Sup_ennreal :: "ennreal set \<Rightarrow> ennreal" is "sup 0 \<circ> Sup"
by auto
lift_definition less_eq_ennreal :: "ennreal \<Rightarrow> ennreal \<Rightarrow> bool" is "(\<le>)" .
lift_definition less_ennreal :: "ennreal \<Rightarrow> ennreal \<Rightarrow> bool" is "(<)" .
instance
by standard
(transfer ; auto simp: Inf_lower Inf_greatest Sup_upper Sup_least le_max_iff_disj max.absorb1)+
end
lemma pcr_ennreal_enn2ereal[simp]: "pcr_ennreal (enn2ereal x) x"
by (simp add: ennreal.pcr_cr_eq cr_ennreal_def)
lemma rel_fun_eq_pcr_ennreal: "rel_fun (=) pcr_ennreal f g \<longleftrightarrow> f = enn2ereal \<circ> g"
by (auto simp: rel_fun_def ennreal.pcr_cr_eq cr_ennreal_def)
instantiation ennreal :: infinity
begin
definition infinity_ennreal :: ennreal
where
[simp]: "\<infinity> = (top::ennreal)"
instance ..
end
instantiation ennreal :: "{semiring_1_no_zero_divisors, comm_semiring_1}"
begin
lift_definition one_ennreal :: ennreal is 1 by simp
lift_definition zero_ennreal :: ennreal is 0 by simp
lift_definition plus_ennreal :: "ennreal \<Rightarrow> ennreal \<Rightarrow> ennreal" is "(+)" by simp
lift_definition times_ennreal :: "ennreal \<Rightarrow> ennreal \<Rightarrow> ennreal" is "(*)" by simp
instance
by standard (transfer; auto simp: field_simps ereal_right_distrib)+
end
instantiation ennreal :: minus
begin
lift_definition minus_ennreal :: "ennreal \<Rightarrow> ennreal \<Rightarrow> ennreal" is "\<lambda>a b. max 0 (a - b)"
by simp
instance ..
end
instance ennreal :: numeral ..
instantiation ennreal :: inverse
begin
lift_definition inverse_ennreal :: "ennreal \<Rightarrow> ennreal" is inverse
by (rule inverse_ereal_ge0I)
definition divide_ennreal :: "ennreal \<Rightarrow> ennreal \<Rightarrow> ennreal"
where "x div y = x * inverse (y :: ennreal)"
instance ..
end
lemma ennreal_zero_less_one: "0 < (1::ennreal)" \<comment> \<open>TODO: remove\<close>
by transfer auto
instance ennreal :: dioid
proof (standard; transfer)
fix a b :: ereal assume "0 \<le> a" "0 \<le> b" then show "(a \<le> b) = (\<exists>c\<in>Collect ((\<le>) 0). b = a + c)"
unfolding ereal_ex_split Bex_def
by (cases a b rule: ereal2_cases) (auto intro!: exI[of _ "real_of_ereal (b - a)"])
qed
instance ennreal :: ordered_comm_semiring
by standard
(transfer ; auto intro: add_mono mult_mono mult_ac ereal_left_distrib ereal_mult_left_mono)+
instance ennreal :: linordered_nonzero_semiring
proof
fix a b::ennreal
show "a < b \<Longrightarrow> a + 1 < b + 1"
by transfer (simp add: add_right_mono ereal_add_cancel_right less_le)
qed (transfer; simp)
instance ennreal :: strict_ordered_ab_semigroup_add
proof
fix a b c d :: ennreal show "a < b \<Longrightarrow> c < d \<Longrightarrow> a + c < b + d"
by transfer (auto intro!: ereal_add_strict_mono)
qed
declare [[coercion "of_nat :: nat \<Rightarrow> ennreal"]]
lemma e2ennreal_neg: "x \<le> 0 \<Longrightarrow> e2ennreal x = 0"
unfolding zero_ennreal_def e2ennreal_def by (simp add: max_absorb1)
lemma e2ennreal_mono: "x \<le> y \<Longrightarrow> e2ennreal x \<le> e2ennreal y"
by (cases "0 \<le> x" "0 \<le> y" rule: bool.exhaust[case_product bool.exhaust])
(auto simp: e2ennreal_neg less_eq_ennreal.abs_eq eq_onp_def)
lemma enn2ereal_nonneg[simp]: "0 \<le> enn2ereal x"
using ennreal.enn2ereal[of x] by simp
lemma ereal_ennreal_cases:
obtains b where "0 \<le> a" "a = enn2ereal b" | "a < 0"
using e2ennreal'_inverse[of a, symmetric] by (cases "0 \<le> a") (auto intro: enn2ereal_nonneg)
lemma rel_fun_liminf[transfer_rule]: "rel_fun (rel_fun (=) pcr_ennreal) pcr_ennreal liminf liminf"
proof -
have "rel_fun (rel_fun (=) pcr_ennreal) pcr_ennreal (\<lambda>x. sup 0 (liminf x)) liminf"
unfolding liminf_SUP_INF[abs_def] by (transfer_prover_start, transfer_step+; simp)
then show ?thesis
apply (subst (asm) (2) rel_fun_def)
apply (subst (2) rel_fun_def)
apply (auto simp: comp_def max.absorb2 Liminf_bounded rel_fun_eq_pcr_ennreal)
done
qed
lemma rel_fun_limsup[transfer_rule]: "rel_fun (rel_fun (=) pcr_ennreal) pcr_ennreal limsup limsup"
proof -
have "rel_fun (rel_fun (=) pcr_ennreal) pcr_ennreal (\<lambda>x. INF n. sup 0 (SUP i\<in>{n..}. x i)) limsup"
unfolding limsup_INF_SUP[abs_def] by (transfer_prover_start, transfer_step+; simp)
then show ?thesis
unfolding limsup_INF_SUP[abs_def]
apply (subst (asm) (2) rel_fun_def)
apply (subst (2) rel_fun_def)
apply (auto simp: comp_def max.absorb2 Sup_upper2 rel_fun_eq_pcr_ennreal)
apply (subst (asm) max.absorb2)
apply (auto intro: SUP_upper2)
done
qed
lemma sum_enn2ereal[simp]: "(\<And>i. i \<in> I \<Longrightarrow> 0 \<le> f i) \<Longrightarrow> (\<Sum>i\<in>I. enn2ereal (f i)) = enn2ereal (sum f I)"
by (induction I rule: infinite_finite_induct) (auto simp: sum_nonneg zero_ennreal.rep_eq plus_ennreal.rep_eq)
lemma transfer_e2ennreal_sum [transfer_rule]:
"rel_fun (rel_fun (=) pcr_ennreal) (rel_fun (=) pcr_ennreal) sum sum"
by (auto intro!: rel_funI simp: rel_fun_eq_pcr_ennreal comp_def)
lemma enn2ereal_of_nat[simp]: "enn2ereal (of_nat n) = ereal n"
by (induction n) (auto simp: zero_ennreal.rep_eq one_ennreal.rep_eq plus_ennreal.rep_eq)
lemma enn2ereal_numeral[simp]: "enn2ereal (numeral a) = numeral a"
by (metis enn2ereal_of_nat numeral_eq_ereal of_nat_numeral)
lemma transfer_numeral[transfer_rule]: "pcr_ennreal (numeral a) (numeral a)"
unfolding cr_ennreal_def pcr_ennreal_def by auto
subsection \<open>Cancellation simprocs\<close>
lemma ennreal_add_left_cancel: "a + b = a + c \<longleftrightarrow> a = (\<infinity>::ennreal) \<or> b = c"
unfolding infinity_ennreal_def by transfer (simp add: top_ereal_def ereal_add_cancel_left)
lemma ennreal_add_left_cancel_le: "a + b \<le> a + c \<longleftrightarrow> a = (\<infinity>::ennreal) \<or> b \<le> c"
unfolding infinity_ennreal_def by transfer (simp add: ereal_add_le_add_iff top_ereal_def disj_commute)
lemma ereal_add_left_cancel_less:
fixes a b c :: ereal
shows "0 \<le> a \<Longrightarrow> 0 \<le> b \<Longrightarrow> a + b < a + c \<longleftrightarrow> a \<noteq> \<infinity> \<and> b < c"
by (cases a b c rule: ereal3_cases) auto
lemma ennreal_add_left_cancel_less: "a + b < a + c \<longleftrightarrow> a \<noteq> (\<infinity>::ennreal) \<and> b < c"
unfolding infinity_ennreal_def
by transfer (simp add: top_ereal_def ereal_add_left_cancel_less)
ML \<open>
structure Cancel_Ennreal_Common =
struct
(* copied from src/HOL/Tools/nat_numeral_simprocs.ML *)
fun find_first_t _ _ [] = raise TERM("find_first_t", [])
| find_first_t past u (t::terms) =
if u aconv t then (rev past @ terms)
else find_first_t (t::past) u terms
fun dest_summing (Const (\<^const_name>\<open>Groups.plus\<close>, _) $ t $ u, ts) =
dest_summing (t, dest_summing (u, ts))
| dest_summing (t, ts) = t :: ts
val mk_sum = Arith_Data.long_mk_sum
fun dest_sum t = dest_summing (t, [])
val find_first = find_first_t []
val trans_tac = Numeral_Simprocs.trans_tac
val norm_ss =
simpset_of (put_simpset HOL_basic_ss \<^context>
addsimps @{thms ac_simps add_0_left add_0_right})
fun norm_tac ctxt = ALLGOALS (simp_tac (put_simpset norm_ss ctxt))
fun simplify_meta_eq ctxt cancel_th th =
Arith_Data.simplify_meta_eq [] ctxt
([th, cancel_th] MRS trans)
fun mk_eq (a, b) = HOLogic.mk_Trueprop (HOLogic.mk_eq (a, b))
end
structure Eq_Ennreal_Cancel = ExtractCommonTermFun
(open Cancel_Ennreal_Common
val mk_bal = HOLogic.mk_eq
val dest_bal = HOLogic.dest_bin \<^const_name>\<open>HOL.eq\<close> \<^typ>\<open>ennreal\<close>
fun simp_conv _ _ = SOME @{thm ennreal_add_left_cancel}
)
structure Le_Ennreal_Cancel = ExtractCommonTermFun
(open Cancel_Ennreal_Common
val mk_bal = HOLogic.mk_binrel \<^const_name>\<open>Orderings.less_eq\<close>
val dest_bal = HOLogic.dest_bin \<^const_name>\<open>Orderings.less_eq\<close> \<^typ>\<open>ennreal\<close>
fun simp_conv _ _ = SOME @{thm ennreal_add_left_cancel_le}
)
structure Less_Ennreal_Cancel = ExtractCommonTermFun
(open Cancel_Ennreal_Common
val mk_bal = HOLogic.mk_binrel \<^const_name>\<open>Orderings.less\<close>
val dest_bal = HOLogic.dest_bin \<^const_name>\<open>Orderings.less\<close> \<^typ>\<open>ennreal\<close>
fun simp_conv _ _ = SOME @{thm ennreal_add_left_cancel_less}
)
\<close>
simproc_setup ennreal_eq_cancel
("(l::ennreal) + m = n" | "(l::ennreal) = m + n") =
- \<open>fn phi => fn ctxt => fn ct => Eq_Ennreal_Cancel.proc ctxt (Thm.term_of ct)\<close>
+ \<open>K (fn ctxt => fn ct => Eq_Ennreal_Cancel.proc ctxt (Thm.term_of ct))\<close>
simproc_setup ennreal_le_cancel
("(l::ennreal) + m \<le> n" | "(l::ennreal) \<le> m + n") =
- \<open>fn phi => fn ctxt => fn ct => Le_Ennreal_Cancel.proc ctxt (Thm.term_of ct)\<close>
+ \<open>K (fn ctxt => fn ct => Le_Ennreal_Cancel.proc ctxt (Thm.term_of ct))\<close>
simproc_setup ennreal_less_cancel
("(l::ennreal) + m < n" | "(l::ennreal) < m + n") =
- \<open>fn phi => fn ctxt => fn ct => Less_Ennreal_Cancel.proc ctxt (Thm.term_of ct)\<close>
+ \<open>K (fn ctxt => fn ct => Less_Ennreal_Cancel.proc ctxt (Thm.term_of ct))\<close>
subsection \<open>Order with top\<close>
lemma ennreal_zero_less_top[simp]: "0 < (top::ennreal)"
by transfer (simp add: top_ereal_def)
lemma ennreal_one_less_top[simp]: "1 < (top::ennreal)"
by transfer (simp add: top_ereal_def)
lemma ennreal_zero_neq_top[simp]: "0 \<noteq> (top::ennreal)"
by transfer (simp add: top_ereal_def)
lemma ennreal_top_neq_zero[simp]: "(top::ennreal) \<noteq> 0"
by transfer (simp add: top_ereal_def)
lemma ennreal_top_neq_one[simp]: "top \<noteq> (1::ennreal)"
by transfer (simp add: top_ereal_def one_ereal_def flip: ereal_max)
lemma ennreal_one_neq_top[simp]: "1 \<noteq> (top::ennreal)"
by transfer (simp add: top_ereal_def one_ereal_def flip: ereal_max)
lemma ennreal_add_less_top[simp]:
fixes a b :: ennreal
shows "a + b < top \<longleftrightarrow> a < top \<and> b < top"
by transfer (auto simp: top_ereal_def)
lemma ennreal_add_eq_top[simp]:
fixes a b :: ennreal
shows "a + b = top \<longleftrightarrow> a = top \<or> b = top"
by transfer (auto simp: top_ereal_def)
lemma ennreal_sum_less_top[simp]:
fixes f :: "'a \<Rightarrow> ennreal"
shows "finite I \<Longrightarrow> (\<Sum>i\<in>I. f i) < top \<longleftrightarrow> (\<forall>i\<in>I. f i < top)"
by (induction I rule: finite_induct) auto
lemma ennreal_sum_eq_top[simp]:
fixes f :: "'a \<Rightarrow> ennreal"
shows "finite I \<Longrightarrow> (\<Sum>i\<in>I. f i) = top \<longleftrightarrow> (\<exists>i\<in>I. f i = top)"
by (induction I rule: finite_induct) auto
lemma ennreal_mult_eq_top_iff:
fixes a b :: ennreal
shows "a * b = top \<longleftrightarrow> (a = top \<and> b \<noteq> 0) \<or> (b = top \<and> a \<noteq> 0)"
by transfer (auto simp: top_ereal_def)
lemma ennreal_top_eq_mult_iff:
fixes a b :: ennreal
shows "top = a * b \<longleftrightarrow> (a = top \<and> b \<noteq> 0) \<or> (b = top \<and> a \<noteq> 0)"
using ennreal_mult_eq_top_iff[of a b] by auto
lemma ennreal_mult_less_top:
fixes a b :: ennreal
shows "a * b < top \<longleftrightarrow> (a = 0 \<or> b = 0 \<or> (a < top \<and> b < top))"
by transfer (auto simp add: top_ereal_def)
lemma top_power_ennreal: "top ^ n = (if n = 0 then 1 else top :: ennreal)"
by (induction n) (simp_all add: ennreal_mult_eq_top_iff)
lemma ennreal_prod_eq_0[simp]:
fixes f :: "'a \<Rightarrow> ennreal"
shows "(prod f A = 0) = (finite A \<and> (\<exists>i\<in>A. f i = 0))"
by (induction A rule: infinite_finite_induct) auto
lemma ennreal_prod_eq_top:
fixes f :: "'a \<Rightarrow> ennreal"
shows "(\<Prod>i\<in>I. f i) = top \<longleftrightarrow> (finite I \<and> ((\<forall>i\<in>I. f i \<noteq> 0) \<and> (\<exists>i\<in>I. f i = top)))"
by (induction I rule: infinite_finite_induct) (auto simp: ennreal_mult_eq_top_iff)
lemma ennreal_top_mult: "top * a = (if a = 0 then 0 else top :: ennreal)"
by (simp add: ennreal_mult_eq_top_iff)
lemma ennreal_mult_top: "a * top = (if a = 0 then 0 else top :: ennreal)"
by (simp add: ennreal_mult_eq_top_iff)
lemma enn2ereal_eq_top_iff[simp]: "enn2ereal x = \<infinity> \<longleftrightarrow> x = top"
by transfer (simp add: top_ereal_def)
lemma enn2ereal_top[simp]: "enn2ereal top = \<infinity>"
by transfer (simp add: top_ereal_def)
lemma e2ennreal_infty[simp]: "e2ennreal \<infinity> = top"
by (simp add: top_ennreal.abs_eq top_ereal_def)
lemma ennreal_top_minus[simp]: "top - x = (top::ennreal)"
by transfer (auto simp: top_ereal_def max_def)
lemma minus_top_ennreal: "x - top = (if x = top then top else 0:: ennreal)"
by transfer (use ereal_eq_minus_iff top_ereal_def in force)
lemma bot_ennreal: "bot = (0::ennreal)"
by transfer rule
lemma ennreal_of_nat_neq_top[simp]: "of_nat i \<noteq> (top::ennreal)"
by (induction i) auto
lemma numeral_eq_of_nat: "(numeral a::ennreal) = of_nat (numeral a)"
by simp
lemma of_nat_less_top: "of_nat i < (top::ennreal)"
using less_le_trans[of "of_nat i" "of_nat (Suc i)" "top::ennreal"]
by simp
lemma top_neq_numeral[simp]: "top \<noteq> (numeral i::ennreal)"
using of_nat_less_top[of "numeral i"] by simp
lemma ennreal_numeral_less_top[simp]: "numeral i < (top::ennreal)"
using of_nat_less_top[of "numeral i"] by simp
lemma ennreal_add_bot[simp]: "bot + x = (x::ennreal)"
by transfer simp
lemma add_top_right_ennreal [simp]: "x + top = (top :: ennreal)"
by (cases x) auto
lemma add_top_left_ennreal [simp]: "top + x = (top :: ennreal)"
by (cases x) auto
lemma ennreal_top_mult_left [simp]: "x \<noteq> 0 \<Longrightarrow> x * top = (top :: ennreal)"
by (subst ennreal_mult_eq_top_iff) auto
lemma ennreal_top_mult_right [simp]: "x \<noteq> 0 \<Longrightarrow> top * x = (top :: ennreal)"
by (subst ennreal_mult_eq_top_iff) auto
lemma power_top_ennreal [simp]: "n > 0 \<Longrightarrow> top ^ n = (top :: ennreal)"
by (induction n) auto
lemma power_eq_top_ennreal_iff: "x ^ n = top \<longleftrightarrow> x = (top :: ennreal) \<and> n > 0"
by (induction n) (auto simp: ennreal_mult_eq_top_iff)
lemma ennreal_mult_le_mult_iff: "c \<noteq> 0 \<Longrightarrow> c \<noteq> top \<Longrightarrow> c * a \<le> c * b \<longleftrightarrow> a \<le> (b :: ennreal)"
including ennreal.lifting
by (transfer, subst ereal_mult_le_mult_iff) (auto simp: top_ereal_def)
lemma power_mono_ennreal: "x \<le> y \<Longrightarrow> x ^ n \<le> (y ^ n :: ennreal)"
by (induction n) (auto intro!: mult_mono)
instance ennreal :: semiring_char_0
proof (standard, safe intro!: linorder_injI)
have *: "1 + of_nat k \<noteq> (0::ennreal)" for k
using add_pos_nonneg[OF zero_less_one, of "of_nat k :: ennreal"] by auto
fix x y :: nat assume "x < y" "of_nat x = (of_nat y::ennreal)" then show False
by (auto simp add: less_iff_Suc_add *)
qed
subsection \<open>Arithmetic\<close>
lemma ennreal_minus_zero[simp]: "a - (0::ennreal) = a"
by transfer (auto simp: max_def)
lemma ennreal_add_diff_cancel_right[simp]:
fixes x y z :: ennreal shows "y \<noteq> top \<Longrightarrow> (x + y) - y = x"
by transfer (metis ereal_eq_minus_iff max_absorb2 not_MInfty_nonneg top_ereal_def)
lemma ennreal_add_diff_cancel_left[simp]:
fixes x y z :: ennreal shows "y \<noteq> top \<Longrightarrow> (y + x) - y = x"
by (simp add: add.commute)
lemma
fixes a b :: ennreal
shows "a - b = 0 \<Longrightarrow> a \<le> b"
by transfer (metis ereal_diff_gr0 le_cases max.absorb2 not_less)
lemma ennreal_minus_cancel:
fixes a b c :: ennreal
shows "c \<noteq> top \<Longrightarrow> a \<le> c \<Longrightarrow> b \<le> c \<Longrightarrow> c - a = c - b \<Longrightarrow> a = b"
by (metis ennreal_add_diff_cancel_left ennreal_add_diff_cancel_right ennreal_add_eq_top less_eqE)
lemma sup_const_add_ennreal:
fixes a b c :: "ennreal"
shows "sup (c + a) (c + b) = c + sup a b"
by transfer (metis add_left_mono le_cases sup.absorb2 sup.orderE)
lemma ennreal_diff_add_assoc:
fixes a b c :: ennreal
shows "a \<le> b \<Longrightarrow> c + b - a = c + (b - a)"
by (metis add.left_commute ennreal_add_diff_cancel_left ennreal_add_eq_top ennreal_top_minus less_eqE)
lemma mult_divide_eq_ennreal:
fixes a b :: ennreal
shows "b \<noteq> 0 \<Longrightarrow> b \<noteq> top \<Longrightarrow> (a * b) / b = a"
unfolding divide_ennreal_def
apply transfer
by (metis abs_ereal_ge0 divide_ereal_def ereal_divide_eq ereal_times_divide_eq top_ereal_def)
lemma divide_mult_eq: "a \<noteq> 0 \<Longrightarrow> a \<noteq> \<infinity> \<Longrightarrow> x * a / (b * a) = x / (b::ennreal)"
unfolding divide_ennreal_def infinity_ennreal_def
apply transfer
subgoal for a b c
apply (cases a b c rule: ereal3_cases)
apply (auto simp: top_ereal_def)
done
done
lemma ennreal_mult_divide_eq:
fixes a b :: ennreal
shows "b \<noteq> 0 \<Longrightarrow> b \<noteq> top \<Longrightarrow> (a * b) / b = a"
by (fact mult_divide_eq_ennreal)
lemma ennreal_add_diff_cancel:
fixes a b :: ennreal
shows "b \<noteq> \<infinity> \<Longrightarrow> (a + b) - b = a"
by simp
lemma ennreal_minus_eq_0:
"a - b = 0 \<Longrightarrow> a \<le> (b::ennreal)"
by transfer (metis ereal_diff_gr0 le_cases max.absorb2 not_less)
lemma ennreal_mono_minus_cancel:
fixes a b c :: ennreal
shows "a - b \<le> a - c \<Longrightarrow> a < top \<Longrightarrow> b \<le> a \<Longrightarrow> c \<le> a \<Longrightarrow> c \<le> b"
by transfer
(auto simp add: max.absorb2 ereal_diff_positive top_ereal_def dest: ereal_mono_minus_cancel)
lemma ennreal_mono_minus:
fixes a b c :: ennreal
shows "c \<le> b \<Longrightarrow> a - b \<le> a - c"
by transfer (meson ereal_minus_mono max.mono order_refl)
lemma ennreal_minus_pos_iff:
fixes a b :: ennreal
shows "a < top \<or> b < top \<Longrightarrow> 0 < a - b \<Longrightarrow> b < a"
by transfer (use add.left_neutral ereal_minus_le_iff less_irrefl not_less in fastforce)
lemma ennreal_inverse_top[simp]: "inverse top = (0::ennreal)"
by transfer (simp add: top_ereal_def ereal_inverse_eq_0)
lemma ennreal_inverse_zero[simp]: "inverse 0 = (top::ennreal)"
by transfer (simp add: top_ereal_def ereal_inverse_eq_0)
lemma ennreal_top_divide: "top / (x::ennreal) = (if x = top then 0 else top)"
unfolding divide_ennreal_def
by transfer (simp add: top_ereal_def ereal_inverse_eq_0 ereal_0_gt_inverse)
lemma ennreal_zero_divide[simp]: "0 / (x::ennreal) = 0"
by (simp add: divide_ennreal_def)
lemma ennreal_divide_zero[simp]: "x / (0::ennreal) = (if x = 0 then 0 else top)"
by (simp add: divide_ennreal_def ennreal_mult_top)
lemma ennreal_divide_top[simp]: "x / (top::ennreal) = 0"
by (simp add: divide_ennreal_def ennreal_top_mult)
lemma ennreal_times_divide: "a * (b / c) = a * b / (c::ennreal)"
unfolding divide_ennreal_def
by transfer (simp add: divide_ereal_def[symmetric] ereal_times_divide_eq)
lemma ennreal_zero_less_divide: "0 < a / b \<longleftrightarrow> (0 < a \<and> b < (top::ennreal))"
unfolding divide_ennreal_def
by transfer (auto simp: ereal_zero_less_0_iff top_ereal_def ereal_0_gt_inverse)
lemma add_divide_distrib_ennreal: "(a + b) / c = a / c + b / (c :: ennreal)"
by (simp add: divide_ennreal_def ring_distribs)
lemma divide_right_mono_ennreal:
fixes a b c :: ennreal
shows "a \<le> b \<Longrightarrow> a / c \<le> b / c"
unfolding divide_ennreal_def by (intro mult_mono) auto
lemma ennreal_mult_strict_right_mono: "(a::ennreal) < c \<Longrightarrow> 0 < b \<Longrightarrow> b < top \<Longrightarrow> a * b < c * b"
by transfer (auto intro!: ereal_mult_strict_right_mono)
lemma ennreal_indicator_less[simp]:
"indicator A x \<le> (indicator B x::ennreal) \<longleftrightarrow> (x \<in> A \<longrightarrow> x \<in> B)"
by (simp add: indicator_def not_le)
lemma ennreal_inverse_positive: "0 < inverse x \<longleftrightarrow> (x::ennreal) \<noteq> top"
by transfer (simp add: ereal_0_gt_inverse top_ereal_def)
lemma ennreal_inverse_mult': "((0 < b \<or> a < top) \<and> (0 < a \<or> b < top)) \<Longrightarrow> inverse (a * b::ennreal) = inverse a * inverse b"
apply transfer
subgoal for a b
by (cases a b rule: ereal2_cases) (auto simp: top_ereal_def)
done
lemma ennreal_inverse_mult: "a < top \<Longrightarrow> b < top \<Longrightarrow> inverse (a * b::ennreal) = inverse a * inverse b"
apply transfer
subgoal for a b
by (cases a b rule: ereal2_cases) (auto simp: top_ereal_def)
done
lemma ennreal_inverse_1[simp]: "inverse (1::ennreal) = 1"
by transfer simp
lemma ennreal_inverse_eq_0_iff[simp]: "inverse (a::ennreal) = 0 \<longleftrightarrow> a = top"
by transfer (simp add: ereal_inverse_eq_0 top_ereal_def)
lemma ennreal_inverse_eq_top_iff[simp]: "inverse (a::ennreal) = top \<longleftrightarrow> a = 0"
by transfer (simp add: top_ereal_def)
lemma ennreal_divide_eq_0_iff[simp]: "(a::ennreal) / b = 0 \<longleftrightarrow> (a = 0 \<or> b = top)"
by (simp add: divide_ennreal_def)
lemma ennreal_divide_eq_top_iff: "(a::ennreal) / b = top \<longleftrightarrow> ((a \<noteq> 0 \<and> b = 0) \<or> (a = top \<and> b \<noteq> top))"
by (auto simp add: divide_ennreal_def ennreal_mult_eq_top_iff)
lemma one_divide_one_divide_ennreal[simp]: "1 / (1 / c) = (c::ennreal)"
including ennreal.lifting
unfolding divide_ennreal_def
by transfer auto
lemma ennreal_mult_left_cong:
"((a::ennreal) \<noteq> 0 \<Longrightarrow> b = c) \<Longrightarrow> a * b = a * c"
by (cases "a = 0") simp_all
lemma ennreal_mult_right_cong:
"((a::ennreal) \<noteq> 0 \<Longrightarrow> b = c) \<Longrightarrow> b * a = c * a"
by (cases "a = 0") simp_all
lemma ennreal_zero_less_mult_iff: "0 < a * b \<longleftrightarrow> 0 < a \<and> 0 < (b::ennreal)"
by transfer (auto simp add: ereal_zero_less_0_iff le_less)
lemma less_diff_eq_ennreal:
fixes a b c :: ennreal
shows "b < top \<or> c < top \<Longrightarrow> a < b - c \<longleftrightarrow> a + c < b"
apply transfer
subgoal for a b c
by (cases a b c rule: ereal3_cases) (auto split: split_max)
done
lemma diff_add_cancel_ennreal:
fixes a b :: ennreal shows "a \<le> b \<Longrightarrow> b - a + a = b"
unfolding infinity_ennreal_def
by transfer (metis (no_types) add.commute ereal_diff_positive ereal_ineq_diff_add max_def not_MInfty_nonneg)
lemma ennreal_diff_self[simp]: "a \<noteq> top \<Longrightarrow> a - a = (0::ennreal)"
by transfer (simp add: top_ereal_def)
lemma ennreal_minus_mono:
fixes a b c :: ennreal
shows "a \<le> c \<Longrightarrow> d \<le> b \<Longrightarrow> a - b \<le> c - d"
by transfer (meson ereal_minus_mono max.mono order_refl)
lemma ennreal_minus_eq_top[simp]: "a - (b::ennreal) = top \<longleftrightarrow> a = top"
by (metis add_top diff_add_cancel_ennreal ennreal_mono_minus ennreal_top_minus zero_le)
lemma ennreal_divide_self[simp]: "a \<noteq> 0 \<Longrightarrow> a < top \<Longrightarrow> a / a = (1::ennreal)"
by (metis mult_1 mult_divide_eq_ennreal top.not_eq_extremum)
subsection \<open>Coercion from \<^typ>\<open>real\<close> to \<^typ>\<open>ennreal\<close>\<close>
lift_definition ennreal :: "real \<Rightarrow> ennreal" is "sup 0 \<circ> ereal"
by simp
declare [[coercion ennreal]]
lemma ennreal_cong: "x = y \<Longrightarrow> ennreal x = ennreal y"
by simp
lemma ennreal_cases[cases type: ennreal]:
fixes x :: ennreal
obtains (real) r :: real where "0 \<le> r" "x = ennreal r" | (top) "x = top"
apply transfer
subgoal for x thesis
by (cases x) (auto simp: max.absorb2 top_ereal_def)
done
lemmas ennreal2_cases = ennreal_cases[case_product ennreal_cases]
lemmas ennreal3_cases = ennreal_cases[case_product ennreal2_cases]
lemma ennreal_neq_top[simp]: "ennreal r \<noteq> top"
by transfer (simp add: top_ereal_def zero_ereal_def flip: ereal_max)
lemma top_neq_ennreal[simp]: "top \<noteq> ennreal r"
using ennreal_neq_top[of r] by (auto simp del: ennreal_neq_top)
lemma ennreal_less_top[simp]: "ennreal x < top"
by transfer (simp add: top_ereal_def max_def)
lemma ennreal_neg: "x \<le> 0 \<Longrightarrow> ennreal x = 0"
by transfer (simp add: max.absorb1)
lemma ennreal_inj[simp]:
"0 \<le> a \<Longrightarrow> 0 \<le> b \<Longrightarrow> ennreal a = ennreal b \<longleftrightarrow> a = b"
by (transfer fixing: a b) (auto simp: max_absorb2)
lemma ennreal_le_iff[simp]: "0 \<le> y \<Longrightarrow> ennreal x \<le> ennreal y \<longleftrightarrow> x \<le> y"
by (auto simp: ennreal_def zero_ereal_def less_eq_ennreal.abs_eq eq_onp_def split: split_max)
lemma le_ennreal_iff: "0 \<le> r \<Longrightarrow> x \<le> ennreal r \<longleftrightarrow> (\<exists>q\<ge>0. x = ennreal q \<and> q \<le> r)"
by (cases x) (auto simp: top_unique)
lemma ennreal_less_iff: "0 \<le> r \<Longrightarrow> ennreal r < ennreal q \<longleftrightarrow> r < q"
unfolding not_le[symmetric] by auto
lemma ennreal_eq_zero_iff[simp]: "0 \<le> x \<Longrightarrow> ennreal x = 0 \<longleftrightarrow> x = 0"
by transfer (auto simp: max_absorb2)
lemma ennreal_less_zero_iff[simp]: "0 < ennreal x \<longleftrightarrow> 0 < x"
by transfer (auto simp: max_def)
lemma ennreal_lessI: "0 < q \<Longrightarrow> r < q \<Longrightarrow> ennreal r < ennreal q"
by (cases "0 \<le> r") (auto simp: ennreal_less_iff ennreal_neg)
lemma ennreal_leI: "x \<le> y \<Longrightarrow> ennreal x \<le> ennreal y"
by (cases "0 \<le> y") (auto simp: ennreal_neg)
lemma enn2ereal_ennreal[simp]: "0 \<le> x \<Longrightarrow> enn2ereal (ennreal x) = x"
by transfer (simp add: max_absorb2)
lemma e2ennreal_enn2ereal[simp]: "e2ennreal (enn2ereal x) = x"
by (simp add: e2ennreal_def max_absorb2 ennreal.enn2ereal_inverse)
lemma enn2ereal_e2ennreal: "x \<ge> 0 \<Longrightarrow> enn2ereal (e2ennreal x) = x"
by (metis e2ennreal_enn2ereal ereal_ennreal_cases not_le)
lemma e2ennreal_ereal [simp]: "e2ennreal (ereal x) = ennreal x"
by (metis e2ennreal_def enn2ereal_inverse ennreal.rep_eq sup_ereal_def)
lemma ennreal_0[simp]: "ennreal 0 = 0"
by (simp add: ennreal_def max.absorb1 zero_ennreal.abs_eq)
lemma ennreal_1[simp]: "ennreal 1 = 1"
by transfer (simp add: max_absorb2)
lemma ennreal_eq_0_iff: "ennreal x = 0 \<longleftrightarrow> x \<le> 0"
by (cases "0 \<le> x") (auto simp: ennreal_neg)
lemma ennreal_le_iff2: "ennreal x \<le> ennreal y \<longleftrightarrow> ((0 \<le> y \<and> x \<le> y) \<or> (x \<le> 0 \<and> y \<le> 0))"
by (cases "0 \<le> y") (auto simp: ennreal_eq_0_iff ennreal_neg)
lemma ennreal_eq_1[simp]: "ennreal x = 1 \<longleftrightarrow> x = 1"
by (cases "0 \<le> x") (auto simp: ennreal_neg simp flip: ennreal_1)
lemma ennreal_le_1[simp]: "ennreal x \<le> 1 \<longleftrightarrow> x \<le> 1"
by (cases "0 \<le> x") (auto simp: ennreal_neg simp flip: ennreal_1)
lemma ennreal_ge_1[simp]: "ennreal x \<ge> 1 \<longleftrightarrow> x \<ge> 1"
by (cases "0 \<le> x") (auto simp: ennreal_neg simp flip: ennreal_1)
lemma one_less_ennreal[simp]: "1 < ennreal x \<longleftrightarrow> 1 < x"
by (meson ennreal_le_1 linorder_not_le)
lemma ennreal_plus[simp]:
"0 \<le> a \<Longrightarrow> 0 \<le> b \<Longrightarrow> ennreal (a + b) = ennreal a + ennreal b"
by (transfer fixing: a b) (auto simp: max_absorb2)
lemma add_mono_ennreal: "x < ennreal y \<Longrightarrow> x' < ennreal y' \<Longrightarrow> x + x' < ennreal (y + y')"
by (metis (full_types) add_strict_mono ennreal_less_zero_iff ennreal_plus less_le not_less zero_le)
lemma sum_ennreal[simp]: "(\<And>i. i \<in> I \<Longrightarrow> 0 \<le> f i) \<Longrightarrow> (\<Sum>i\<in>I. ennreal (f i)) = ennreal (sum f I)"
by (induction I rule: infinite_finite_induct) (auto simp: sum_nonneg)
lemma sum_list_ennreal[simp]:
assumes "\<And>x. x \<in> set xs \<Longrightarrow> f x \<ge> 0"
shows "sum_list (map (\<lambda>x. ennreal (f x)) xs) = ennreal (sum_list (map f xs))"
using assms
proof (induction xs)
case (Cons x xs)
from Cons have "(\<Sum>x\<leftarrow>x # xs. ennreal (f x)) = ennreal (f x) + ennreal (sum_list (map f xs))"
by simp
also from Cons.prems have "\<dots> = ennreal (f x + sum_list (map f xs))"
by (intro ennreal_plus [symmetric] sum_list_nonneg) auto
finally show ?case by simp
qed simp_all
lemma ennreal_of_nat_eq_real_of_nat: "of_nat i = ennreal (of_nat i)"
by (induction i) simp_all
lemma of_nat_le_ennreal_iff[simp]: "0 \<le> r \<Longrightarrow> of_nat i \<le> ennreal r \<longleftrightarrow> of_nat i \<le> r"
by (simp add: ennreal_of_nat_eq_real_of_nat)
lemma ennreal_le_of_nat_iff[simp]: "ennreal r \<le> of_nat i \<longleftrightarrow> r \<le> of_nat i"
by (simp add: ennreal_of_nat_eq_real_of_nat)
lemma ennreal_indicator: "ennreal (indicator A x) = indicator A x"
by (auto split: split_indicator)
lemma ennreal_numeral[simp]: "ennreal (numeral n) = numeral n"
using ennreal_of_nat_eq_real_of_nat[of "numeral n"] by simp
lemma ennreal_less_numeral_iff [simp]: "ennreal n < numeral w \<longleftrightarrow> n < numeral w"
by (metis ennreal_less_iff ennreal_numeral less_le not_less zero_less_numeral)
lemma numeral_less_ennreal_iff [simp]: "numeral w < ennreal n \<longleftrightarrow> numeral w < n"
using ennreal_less_iff zero_le_numeral by fastforce
lemma numeral_le_ennreal_iff [simp]: "numeral n \<le> ennreal m \<longleftrightarrow> numeral n \<le> m"
by (metis not_le ennreal_less_numeral_iff)
lemma min_ennreal: "0 \<le> x \<Longrightarrow> 0 \<le> y \<Longrightarrow> min (ennreal x) (ennreal y) = ennreal (min x y)"
by (auto split: split_min)
lemma ennreal_half[simp]: "ennreal (1/2) = inverse 2"
by transfer (simp add: max.absorb2)
lemma ennreal_minus: "0 \<le> q \<Longrightarrow> ennreal r - ennreal q = ennreal (r - q)"
by transfer
(simp add: max.absorb2 zero_ereal_def flip: ereal_max)
lemma ennreal_minus_top[simp]: "ennreal a - top = 0"
by (simp add: minus_top_ennreal)
lemma e2eenreal_enn2ereal_diff [simp]:
"e2ennreal(enn2ereal x - enn2ereal y) = x - y" for x y
by (cases x, cases y, auto simp add: ennreal_minus e2ennreal_neg)
lemma ennreal_mult: "0 \<le> a \<Longrightarrow> 0 \<le> b \<Longrightarrow> ennreal (a * b) = ennreal a * ennreal b"
by transfer (simp add: max_absorb2)
lemma ennreal_mult': "0 \<le> a \<Longrightarrow> ennreal (a * b) = ennreal a * ennreal b"
by (cases "0 \<le> b") (auto simp: ennreal_mult ennreal_neg mult_nonneg_nonpos)
lemma indicator_mult_ennreal: "indicator A x * ennreal r = ennreal (indicator A x * r)"
by (simp split: split_indicator)
lemma ennreal_mult'': "0 \<le> b \<Longrightarrow> ennreal (a * b) = ennreal a * ennreal b"
by (cases "0 \<le> a") (auto simp: ennreal_mult ennreal_neg mult_nonpos_nonneg)
lemma numeral_mult_ennreal: "0 \<le> x \<Longrightarrow> numeral b * ennreal x = ennreal (numeral b * x)"
by (simp add: ennreal_mult)
lemma ennreal_power: "0 \<le> r \<Longrightarrow> ennreal r ^ n = ennreal (r ^ n)"
by (induction n) (auto simp: ennreal_mult)
lemma power_eq_top_ennreal: "x ^ n = top \<longleftrightarrow> (n \<noteq> 0 \<and> (x::ennreal) = top)"
by (cases x rule: ennreal_cases)
(auto simp: ennreal_power top_power_ennreal)
lemma inverse_ennreal: "0 < r \<Longrightarrow> inverse (ennreal r) = ennreal (inverse r)"
by transfer (simp add: max.absorb2)
lemma divide_ennreal: "0 \<le> r \<Longrightarrow> 0 < q \<Longrightarrow> ennreal r / ennreal q = ennreal (r / q)"
by (simp add: divide_ennreal_def inverse_ennreal ennreal_mult[symmetric] inverse_eq_divide)
lemma ennreal_inverse_power: "inverse (x ^ n :: ennreal) = inverse x ^ n"
proof (cases x rule: ennreal_cases)
case top with power_eq_top_ennreal[of x n] show ?thesis
by (cases "n = 0") auto
next
case (real r) then show ?thesis
proof (cases "x = 0")
case False then show ?thesis
by (smt (verit, best) ennreal_0 ennreal_power inverse_ennreal
inverse_nonnegative_iff_nonnegative power_inverse real zero_less_power)
qed (simp add: top_power_ennreal)
qed
lemma power_divide_distrib_ennreal [algebra_simps]:
"(x / y) ^ n = x ^ n / (y ^ n :: ennreal)"
by (simp add: divide_ennreal_def algebra_simps ennreal_inverse_power)
lemma ennreal_divide_numeral: "0 \<le> x \<Longrightarrow> ennreal x / numeral b = ennreal (x / numeral b)"
by (subst divide_ennreal[symmetric]) auto
lemma prod_ennreal: "(\<And>i. i \<in> A \<Longrightarrow> 0 \<le> f i) \<Longrightarrow> (\<Prod>i\<in>A. ennreal (f i)) = ennreal (prod f A)"
by (induction A rule: infinite_finite_induct)
(auto simp: ennreal_mult prod_nonneg)
lemma prod_mono_ennreal:
assumes "\<And>x. x \<in> A \<Longrightarrow> f x \<le> (g x :: ennreal)"
shows "prod f A \<le> prod g A"
using assms by (induction A rule: infinite_finite_induct) (auto intro!: mult_mono)
lemma mult_right_ennreal_cancel: "a * ennreal c = b * ennreal c \<longleftrightarrow> (a = b \<or> c \<le> 0)"
proof (cases "0 \<le> c")
case True
then show ?thesis
by (metis ennreal_eq_0_iff ennreal_mult_right_cong ennreal_neq_top mult_divide_eq_ennreal)
qed (use ennreal_neg in auto)
lemma ennreal_le_epsilon:
"(\<And>e::real. y < top \<Longrightarrow> 0 < e \<Longrightarrow> x \<le> y + ennreal e) \<Longrightarrow> x \<le> y"
apply (cases y rule: ennreal_cases)
apply (cases x rule: ennreal_cases)
apply (auto simp flip: ennreal_plus simp add: top_unique intro: zero_less_one field_le_epsilon)
done
lemma ennreal_rat_dense:
fixes x y :: ennreal
shows "x < y \<Longrightarrow> \<exists>r::rat. x < real_of_rat r \<and> real_of_rat r < y"
proof transfer
fix x y :: ereal assume xy: "0 \<le> x" "0 \<le> y" "x < y"
moreover
from ereal_dense3[OF \<open>x < y\<close>]
obtain r where r: "x < ereal (real_of_rat r)" "ereal (real_of_rat r) < y"
by auto
then have "0 \<le> r"
using le_less_trans[OF \<open>0 \<le> x\<close> \<open>x < ereal (real_of_rat r)\<close>] by auto
with r show "\<exists>r. x < (sup 0 \<circ> ereal) (real_of_rat r) \<and> (sup 0 \<circ> ereal) (real_of_rat r) < y"
by (intro exI[of _ r]) (auto simp: max_absorb2)
qed
lemma ennreal_Ex_less_of_nat: "(x::ennreal) < top \<Longrightarrow> \<exists>n. x < of_nat n"
by (cases x rule: ennreal_cases)
(auto simp: ennreal_of_nat_eq_real_of_nat ennreal_less_iff reals_Archimedean2)
subsection \<open>Coercion from \<^typ>\<open>ennreal\<close> to \<^typ>\<open>real\<close>\<close>
definition "enn2real x = real_of_ereal (enn2ereal x)"
lemma enn2real_nonneg[simp]: "0 \<le> enn2real x"
by (auto simp: enn2real_def intro!: real_of_ereal_pos enn2ereal_nonneg)
lemma enn2real_mono: "a \<le> b \<Longrightarrow> b < top \<Longrightarrow> enn2real a \<le> enn2real b"
by (auto simp add: enn2real_def less_eq_ennreal.rep_eq intro!: real_of_ereal_positive_mono enn2ereal_nonneg)
lemma enn2real_of_nat[simp]: "enn2real (of_nat n) = n"
by (auto simp: enn2real_def)
lemma enn2real_ennreal[simp]: "0 \<le> r \<Longrightarrow> enn2real (ennreal r) = r"
by (simp add: enn2real_def)
lemma ennreal_enn2real[simp]: "r < top \<Longrightarrow> ennreal (enn2real r) = r"
by (cases r rule: ennreal_cases) auto
lemma real_of_ereal_enn2ereal[simp]: "real_of_ereal (enn2ereal x) = enn2real x"
by (simp add: enn2real_def)
lemma enn2real_top[simp]: "enn2real top = 0"
unfolding enn2real_def top_ennreal.rep_eq top_ereal_def by simp
lemma enn2real_0[simp]: "enn2real 0 = 0"
unfolding enn2real_def zero_ennreal.rep_eq by simp
lemma enn2real_1[simp]: "enn2real 1 = 1"
unfolding enn2real_def one_ennreal.rep_eq by simp
lemma enn2real_numeral[simp]: "enn2real (numeral n) = (numeral n)"
unfolding enn2real_def by simp
lemma enn2real_mult: "enn2real (a * b) = enn2real a * enn2real b"
unfolding enn2real_def
by (simp del: real_of_ereal_enn2ereal add: times_ennreal.rep_eq)
lemma enn2real_leI: "0 \<le> B \<Longrightarrow> x \<le> ennreal B \<Longrightarrow> enn2real x \<le> B"
by (cases x rule: ennreal_cases) (auto simp: top_unique)
lemma enn2real_positive_iff: "0 < enn2real x \<longleftrightarrow> (0 < x \<and> x < top)"
by (cases x rule: ennreal_cases) auto
lemma enn2real_eq_posreal_iff[simp]: "c > 0 \<Longrightarrow> enn2real x = c \<longleftrightarrow> x = c"
by (cases x) auto
lemma ennreal_enn2real_if: "ennreal (enn2real r) = (if r = top then 0 else r)"
by(auto intro!: ennreal_enn2real simp add: less_top)
subsection \<open>Coercion from \<^typ>\<open>enat\<close> to \<^typ>\<open>ennreal\<close>\<close>
definition ennreal_of_enat :: "enat \<Rightarrow> ennreal"
where
"ennreal_of_enat n = (case n of \<infinity> \<Rightarrow> top | enat n \<Rightarrow> of_nat n)"
declare [[coercion ennreal_of_enat]]
declare [[coercion "of_nat :: nat \<Rightarrow> ennreal"]]
lemma ennreal_of_enat_infty[simp]: "ennreal_of_enat \<infinity> = \<infinity>"
by (simp add: ennreal_of_enat_def)
lemma ennreal_of_enat_enat[simp]: "ennreal_of_enat (enat n) = of_nat n"
by (simp add: ennreal_of_enat_def)
lemma ennreal_of_enat_0[simp]: "ennreal_of_enat 0 = 0"
using ennreal_of_enat_enat[of 0] unfolding enat_0 by simp
lemma ennreal_of_enat_1[simp]: "ennreal_of_enat 1 = 1"
using ennreal_of_enat_enat[of 1] unfolding enat_1 by simp
lemma ennreal_top_neq_of_nat[simp]: "(top::ennreal) \<noteq> of_nat i"
using ennreal_of_nat_neq_top[of i] by metis
lemma ennreal_of_enat_inj[simp]: "ennreal_of_enat i = ennreal_of_enat j \<longleftrightarrow> i = j"
by (cases i j rule: enat.exhaust[case_product enat.exhaust]) auto
lemma ennreal_of_enat_le_iff[simp]: "ennreal_of_enat m \<le> ennreal_of_enat n \<longleftrightarrow> m \<le> n"
by (auto simp: ennreal_of_enat_def top_unique split: enat.split)
lemma of_nat_less_ennreal_of_nat[simp]: "of_nat n \<le> ennreal_of_enat x \<longleftrightarrow> of_nat n \<le> x"
by (cases x) (auto simp: of_nat_eq_enat)
lemma ennreal_of_enat_Sup: "ennreal_of_enat (Sup X) = (SUP x\<in>X. ennreal_of_enat x)"
proof -
have "ennreal_of_enat (Sup X) \<le> (SUP x \<in> X. ennreal_of_enat x)"
unfolding Sup_enat_def
proof (clarsimp, intro conjI impI)
fix x assume "finite X" "X \<noteq> {}"
then show "ennreal_of_enat (Max X) \<le> (SUP x \<in> X. ennreal_of_enat x)"
by (intro SUP_upper Max_in)
next
assume "infinite X" "X \<noteq> {}"
have "\<exists>y\<in>X. r < ennreal_of_enat y" if r: "r < top" for r
proof -
obtain n where n: "r < of_nat n"
using ennreal_Ex_less_of_nat[OF r] ..
have "\<not> (X \<subseteq> enat ` {.. n})"
using \<open>infinite X\<close> by (auto dest: finite_subset)
then obtain x where x: "x \<in> X" "x \<notin> enat ` {..n}"
by blast
then have "of_nat n \<le> x"
by (cases x) (auto simp: of_nat_eq_enat)
with x show ?thesis
by (auto intro!: bexI[of _ x] less_le_trans[OF n])
qed
then have "(SUP x \<in> X. ennreal_of_enat x) = top"
by simp
then show "top \<le> (SUP x \<in> X. ennreal_of_enat x)"
unfolding top_unique by simp
qed
then show ?thesis
by (auto intro!: antisym Sup_least intro: Sup_upper)
qed
lemma ennreal_of_enat_eSuc[simp]: "ennreal_of_enat (eSuc x) = 1 + ennreal_of_enat x"
by (cases x) (auto simp: eSuc_enat)
(* Contributed by Dominique Unruh *)
lemma ennreal_of_enat_plus[simp]: \<open>ennreal_of_enat (a+b) = ennreal_of_enat a + ennreal_of_enat b\<close>
apply (induct a)
apply (metis enat.exhaust ennreal_add_eq_top ennreal_of_enat_enat ennreal_of_enat_infty infinity_ennreal_def of_nat_add plus_enat_simps(1) plus_eq_infty_iff_enat)
apply simp
done
(* Contributed by Dominique Unruh *)
lemma sum_ennreal_of_enat[simp]: "(\<Sum>i\<in>I. ennreal_of_enat (f i)) = ennreal_of_enat (sum f I)"
by (induct I rule: infinite_finite_induct) (auto simp: sum_nonneg)
subsection \<open>Topology on \<^typ>\<open>ennreal\<close>\<close>
lemma enn2ereal_Iio: "enn2ereal -` {..<a} = (if 0 \<le> a then {..< e2ennreal a} else {})"
using enn2ereal_nonneg
by (cases a rule: ereal_ennreal_cases)
(auto simp add: vimage_def set_eq_iff ennreal.enn2ereal_inverse less_ennreal.rep_eq e2ennreal_def max_absorb2
simp del: enn2ereal_nonneg
intro: le_less_trans less_imp_le)
lemma enn2ereal_Ioi: "enn2ereal -` {a <..} = (if 0 \<le> a then {e2ennreal a <..} else UNIV)"
by (cases a rule: ereal_ennreal_cases)
(auto simp add: vimage_def set_eq_iff ennreal.enn2ereal_inverse less_ennreal.rep_eq e2ennreal_def max_absorb2
intro: less_le_trans)
instantiation ennreal :: linear_continuum_topology
begin
definition open_ennreal :: "ennreal set \<Rightarrow> bool"
where "(open :: ennreal set \<Rightarrow> bool) = generate_topology (range lessThan \<union> range greaterThan)"
instance
proof
show "\<exists>a b::ennreal. a \<noteq> b"
using zero_neq_one by (intro exI)
show "\<And>x y::ennreal. x < y \<Longrightarrow> \<exists>z>x. z < y"
proof transfer
fix x y :: ereal
assume *: "0 \<le> x"
assume "x < y"
from dense[OF this] obtain z where "x < z \<and> z < y" ..
with * show "\<exists>z\<in>Collect ((\<le>) 0). x < z \<and> z < y"
by (intro bexI[of _ z]) auto
qed
qed (rule open_ennreal_def)
end
lemma continuous_on_e2ennreal: "continuous_on A e2ennreal"
proof (rule continuous_on_subset)
show "continuous_on ({0..} \<union> {..0}) e2ennreal"
proof (rule continuous_on_closed_Un)
show "continuous_on {0 ..} e2ennreal"
by (rule continuous_onI_mono)
(auto simp add: less_eq_ennreal.abs_eq eq_onp_def enn2ereal_range)
show "continuous_on {.. 0} e2ennreal"
by (subst continuous_on_cong[OF refl, of _ _ "\<lambda>_. 0"])
(auto simp add: e2ennreal_neg continuous_on_const)
qed auto
show "A \<subseteq> {0..} \<union> {..0::ereal}"
by auto
qed
lemma continuous_at_e2ennreal: "continuous (at x within A) e2ennreal"
by (rule continuous_on_imp_continuous_within[OF continuous_on_e2ennreal, of _ UNIV]) auto
lemma continuous_on_enn2ereal: "continuous_on UNIV enn2ereal"
by (rule continuous_on_generate_topology[OF open_generated_order])
(auto simp add: enn2ereal_Iio enn2ereal_Ioi)
lemma continuous_at_enn2ereal: "continuous (at x within A) enn2ereal"
by (rule continuous_on_imp_continuous_within[OF continuous_on_enn2ereal]) auto
lemma sup_continuous_e2ennreal[order_continuous_intros]:
assumes f: "sup_continuous f" shows "sup_continuous (\<lambda>x. e2ennreal (f x))"
proof (rule sup_continuous_compose[OF _ f])
show "sup_continuous e2ennreal"
by (simp add: continuous_at_e2ennreal continuous_at_left_imp_sup_continuous e2ennreal_mono mono_def)
qed
lemma sup_continuous_enn2ereal[order_continuous_intros]:
assumes f: "sup_continuous f" shows "sup_continuous (\<lambda>x. enn2ereal (f x))"
proof (rule sup_continuous_compose[OF _ f])
show "sup_continuous enn2ereal"
by (simp add: continuous_at_enn2ereal continuous_at_left_imp_sup_continuous less_eq_ennreal.rep_eq mono_def)
qed
lemma sup_continuous_mult_left_ennreal':
fixes c :: "ennreal"
shows "sup_continuous (\<lambda>x. c * x)"
unfolding sup_continuous_def
by transfer (auto simp: SUP_ereal_mult_left max.absorb2 SUP_upper2)
lemma sup_continuous_mult_left_ennreal[order_continuous_intros]:
"sup_continuous f \<Longrightarrow> sup_continuous (\<lambda>x. c * f x :: ennreal)"
by (rule sup_continuous_compose[OF sup_continuous_mult_left_ennreal'])
lemma sup_continuous_mult_right_ennreal[order_continuous_intros]:
"sup_continuous f \<Longrightarrow> sup_continuous (\<lambda>x. f x * c :: ennreal)"
using sup_continuous_mult_left_ennreal[of f c] by (simp add: mult.commute)
lemma sup_continuous_divide_ennreal[order_continuous_intros]:
fixes f g :: "'a::complete_lattice \<Rightarrow> ennreal"
shows "sup_continuous f \<Longrightarrow> sup_continuous (\<lambda>x. f x / c)"
unfolding divide_ennreal_def by (rule sup_continuous_mult_right_ennreal)
lemma transfer_enn2ereal_continuous_on [transfer_rule]:
"rel_fun (=) (rel_fun (rel_fun (=) pcr_ennreal) (=)) continuous_on continuous_on"
proof -
have "continuous_on A f" if "continuous_on A (\<lambda>x. enn2ereal (f x))" for A and f :: "'a \<Rightarrow> ennreal"
using continuous_on_compose2[OF continuous_on_e2ennreal[of "{0..}"] that]
by (auto simp: ennreal.enn2ereal_inverse subset_eq e2ennreal_def max_absorb2)
moreover
have "continuous_on A (\<lambda>x. enn2ereal (f x))" if "continuous_on A f" for A and f :: "'a \<Rightarrow> ennreal"
using continuous_on_compose2[OF continuous_on_enn2ereal that] by auto
ultimately
show ?thesis
by (auto simp add: rel_fun_def ennreal.pcr_cr_eq cr_ennreal_def)
qed
lemma transfer_sup_continuous[transfer_rule]:
"(rel_fun (rel_fun (=) pcr_ennreal) (=)) sup_continuous sup_continuous"
proof (safe intro!: rel_funI dest!: rel_fun_eq_pcr_ennreal[THEN iffD1])
show "sup_continuous (enn2ereal \<circ> f) \<Longrightarrow> sup_continuous f" for f :: "'a \<Rightarrow> _"
using sup_continuous_e2ennreal[of "enn2ereal \<circ> f"] by simp
show "sup_continuous f \<Longrightarrow> sup_continuous (enn2ereal \<circ> f)" for f :: "'a \<Rightarrow> _"
using sup_continuous_enn2ereal[of f] by (simp add: comp_def)
qed
lemma continuous_on_ennreal[tendsto_intros]:
"continuous_on A f \<Longrightarrow> continuous_on A (\<lambda>x. ennreal (f x))"
by transfer (auto intro!: continuous_on_max continuous_on_const continuous_on_ereal)
lemma tendsto_ennrealD:
assumes lim: "((\<lambda>x. ennreal (f x)) \<longlongrightarrow> ennreal x) F"
assumes *: "\<forall>\<^sub>F x in F. 0 \<le> f x" and x: "0 \<le> x"
shows "(f \<longlongrightarrow> x) F"
proof -
have "((\<lambda>x. enn2ereal (ennreal (f x))) \<longlongrightarrow> enn2ereal (ennreal x)) F
\<longleftrightarrow> (f \<longlongrightarrow> enn2ereal (ennreal x)) F"
using "*" eventually_mono
by (intro tendsto_cong) fastforce
then show ?thesis
using assms(1) continuous_at_enn2ereal isCont_tendsto_compose x by fastforce
qed
lemma tendsto_ennreal_iff [simp]:
\<open>((\<lambda>x. ennreal (f x)) \<longlongrightarrow> ennreal x) F \<longleftrightarrow> (f \<longlongrightarrow> x) F\<close> (is \<open>?P \<longleftrightarrow> ?Q\<close>)
if \<open>\<forall>\<^sub>F x in F. 0 \<le> f x\<close> \<open>0 \<le> x\<close>
proof
assume \<open>?P\<close>
then show \<open>?Q\<close>
using that by (rule tendsto_ennrealD)
next
assume \<open>?Q\<close>
have \<open>continuous_on UNIV ereal\<close>
using continuous_on_ereal [of _ id] by simp
then have \<open>continuous_on UNIV (e2ennreal \<circ> ereal)\<close>
by (rule continuous_on_compose) (simp_all add: continuous_on_e2ennreal)
then have \<open>((\<lambda>x. (e2ennreal \<circ> ereal) (f x)) \<longlongrightarrow> (e2ennreal \<circ> ereal) x) F\<close>
using \<open>?Q\<close> by (rule continuous_on_tendsto_compose) simp_all
then show \<open>?P\<close>
by (simp flip: e2ennreal_ereal)
qed
lemma tendsto_enn2ereal_iff[simp]: "((\<lambda>i. enn2ereal (f i)) \<longlongrightarrow> enn2ereal x) F \<longleftrightarrow> (f \<longlongrightarrow> x) F"
using continuous_on_enn2ereal[THEN continuous_on_tendsto_compose, of f x F]
continuous_on_e2ennreal[THEN continuous_on_tendsto_compose, of "\<lambda>x. enn2ereal (f x)" "enn2ereal x" F UNIV]
by auto
lemma ennreal_tendsto_0_iff: "(\<And>n. f n \<ge> 0) \<Longrightarrow> ((\<lambda>n. ennreal (f n)) \<longlonglongrightarrow> 0) \<longleftrightarrow> (f \<longlonglongrightarrow> 0)"
by (metis (mono_tags) ennreal_0 eventuallyI order_refl tendsto_ennreal_iff)
lemma continuous_on_add_ennreal:
fixes f g :: "'a::topological_space \<Rightarrow> ennreal"
shows "continuous_on A f \<Longrightarrow> continuous_on A g \<Longrightarrow> continuous_on A (\<lambda>x. f x + g x)"
by (transfer fixing: A) (auto intro!: tendsto_add_ereal_nonneg simp: continuous_on_def)
lemma continuous_on_inverse_ennreal[continuous_intros]:
fixes f :: "'a::topological_space \<Rightarrow> ennreal"
shows "continuous_on A f \<Longrightarrow> continuous_on A (\<lambda>x. inverse (f x))"
proof (transfer fixing: A)
show "pred_fun top ((\<le>) 0) f \<Longrightarrow> continuous_on A (\<lambda>x. inverse (f x))" if "continuous_on A f"
for f :: "'a \<Rightarrow> ereal"
using continuous_on_compose2[OF continuous_on_inverse_ereal that] by (auto simp: subset_eq)
qed
instance ennreal :: topological_comm_monoid_add
proof
show "((\<lambda>x. fst x + snd x) \<longlongrightarrow> a + b) (nhds a \<times>\<^sub>F nhds b)" for a b :: ennreal
using continuous_on_add_ennreal[of UNIV fst snd]
using tendsto_at_iff_tendsto_nhds[symmetric, of "\<lambda>x::(ennreal \<times> ennreal). fst x + snd x"]
by (auto simp: continuous_on_eq_continuous_at)
(simp add: isCont_def nhds_prod[symmetric])
qed
lemma sup_continuous_add_ennreal[order_continuous_intros]:
fixes f g :: "'a::complete_lattice \<Rightarrow> ennreal"
shows "sup_continuous f \<Longrightarrow> sup_continuous g \<Longrightarrow> sup_continuous (\<lambda>x. f x + g x)"
by transfer (auto intro!: sup_continuous_add)
lemma ennreal_suminf_lessD: "(\<Sum>i. f i :: ennreal) < x \<Longrightarrow> f i < x"
using le_less_trans[OF sum_le_suminf[OF summableI, of "{i}" f]] by simp
lemma sums_ennreal[simp]: "(\<And>i. 0 \<le> f i) \<Longrightarrow> 0 \<le> x \<Longrightarrow> (\<lambda>i. ennreal (f i)) sums ennreal x \<longleftrightarrow> f sums x"
unfolding sums_def by (simp add: always_eventually sum_nonneg)
lemma summable_suminf_not_top: "(\<And>i. 0 \<le> f i) \<Longrightarrow> (\<Sum>i. ennreal (f i)) \<noteq> top \<Longrightarrow> summable f"
using summable_sums[OF summableI, of "\<lambda>i. ennreal (f i)"]
by (cases "\<Sum>i. ennreal (f i)" rule: ennreal_cases)
(auto simp: summable_def)
lemma suminf_ennreal[simp]:
"(\<And>i. 0 \<le> f i) \<Longrightarrow> (\<Sum>i. ennreal (f i)) \<noteq> top \<Longrightarrow> (\<Sum>i. ennreal (f i)) = ennreal (\<Sum>i. f i)"
by (rule sums_unique[symmetric]) (simp add: summable_suminf_not_top suminf_nonneg summable_sums)
lemma sums_enn2ereal[simp]: "(\<lambda>i. enn2ereal (f i)) sums enn2ereal x \<longleftrightarrow> f sums x"
unfolding sums_def by (simp add: always_eventually sum_nonneg)
lemma suminf_enn2ereal[simp]: "(\<Sum>i. enn2ereal (f i)) = enn2ereal (suminf f)"
by (rule sums_unique[symmetric]) (simp add: summable_sums)
lemma transfer_e2ennreal_suminf [transfer_rule]: "rel_fun (rel_fun (=) pcr_ennreal) pcr_ennreal suminf suminf"
by (auto simp: rel_funI rel_fun_eq_pcr_ennreal comp_def)
lemma ennreal_suminf_cmult[simp]: "(\<Sum>i. r * f i) = r * (\<Sum>i. f i::ennreal)"
by transfer (auto intro!: suminf_cmult_ereal)
lemma ennreal_suminf_multc[simp]: "(\<Sum>i. f i * r) = (\<Sum>i. f i::ennreal) * r"
using ennreal_suminf_cmult[of r f] by (simp add: ac_simps)
lemma ennreal_suminf_divide[simp]: "(\<Sum>i. f i / r) = (\<Sum>i. f i::ennreal) / r"
by (simp add: divide_ennreal_def)
lemma ennreal_suminf_neq_top: "summable f \<Longrightarrow> (\<And>i. 0 \<le> f i) \<Longrightarrow> (\<Sum>i. ennreal (f i)) \<noteq> top"
using sums_ennreal[of f "suminf f"]
by (simp add: suminf_nonneg flip: sums_unique summable_sums_iff del: sums_ennreal)
lemma suminf_ennreal_eq:
"(\<And>i. 0 \<le> f i) \<Longrightarrow> f sums x \<Longrightarrow> (\<Sum>i. ennreal (f i)) = ennreal x"
using suminf_nonneg[of f] sums_unique[of f x]
by (intro sums_unique[symmetric]) (auto simp: summable_sums_iff)
lemma ennreal_suminf_bound_add:
fixes f :: "nat \<Rightarrow> ennreal"
shows "(\<And>N. (\<Sum>n<N. f n) + y \<le> x) \<Longrightarrow> suminf f + y \<le> x"
by transfer (auto intro!: suminf_bound_add)
lemma ennreal_suminf_SUP_eq_directed:
fixes f :: "'a \<Rightarrow> nat \<Rightarrow> ennreal"
assumes *: "\<And>N i j. i \<in> I \<Longrightarrow> j \<in> I \<Longrightarrow> finite N \<Longrightarrow> \<exists>k\<in>I. \<forall>n\<in>N. f i n \<le> f k n \<and> f j n \<le> f k n"
shows "(\<Sum>n. SUP i\<in>I. f i n) = (SUP i\<in>I. \<Sum>n. f i n)"
proof cases
assume "I \<noteq> {}"
then obtain i where "i \<in> I" by auto
from * show ?thesis
by (transfer fixing: I)
(auto simp: max_absorb2 SUP_upper2[OF \<open>i \<in> I\<close>] suminf_nonneg summable_ereal_pos \<open>I \<noteq> {}\<close>
intro!: suminf_SUP_eq_directed)
qed (simp add: bot_ennreal)
lemma INF_ennreal_add_const:
fixes f g :: "nat \<Rightarrow> ennreal"
shows "(INF i. f i + c) = (INF i. f i) + c"
using continuous_at_Inf_mono[of "\<lambda>x. x + c" "f`UNIV"]
using continuous_add[of "at_right (Inf (range f))", of "\<lambda>x. x" "\<lambda>x. c"]
by (auto simp: mono_def image_comp)
lemma INF_ennreal_const_add:
fixes f g :: "nat \<Rightarrow> ennreal"
shows "(INF i. c + f i) = c + (INF i. f i)"
using INF_ennreal_add_const[of f c] by (simp add: ac_simps)
lemma SUP_mult_left_ennreal: "c * (SUP i\<in>I. f i) = (SUP i\<in>I. c * f i ::ennreal)"
proof cases
assume "I \<noteq> {}" then show ?thesis
by transfer (auto simp add: SUP_ereal_mult_left max_absorb2 SUP_upper2)
qed (simp add: bot_ennreal)
lemma SUP_mult_right_ennreal: "(SUP i\<in>I. f i) * c = (SUP i\<in>I. f i * c ::ennreal)"
using SUP_mult_left_ennreal by (simp add: mult.commute)
lemma SUP_divide_ennreal: "(SUP i\<in>I. f i) / c = (SUP i\<in>I. f i / c ::ennreal)"
using SUP_mult_right_ennreal by (simp add: divide_ennreal_def)
lemma ennreal_SUP_of_nat_eq_top: "(SUP x. of_nat x :: ennreal) = top"
proof (intro antisym top_greatest le_SUP_iff[THEN iffD2] allI impI)
fix y :: ennreal assume "y < top"
then obtain r where "y = ennreal r"
by (cases y rule: ennreal_cases) auto
then show "\<exists>i\<in>UNIV. y < of_nat i"
using reals_Archimedean2[of "max 1 r"] zero_less_one
by (simp add: ennreal_Ex_less_of_nat)
qed
lemma ennreal_SUP_eq_top:
fixes f :: "'a \<Rightarrow> ennreal"
assumes "\<And>n. \<exists>i\<in>I. of_nat n \<le> f i"
shows "(SUP i \<in> I. f i) = top"
proof -
have "(SUP x. of_nat x :: ennreal) \<le> (SUP i \<in> I. f i)"
using assms by (auto intro!: SUP_least intro: SUP_upper2)
then show ?thesis
by (auto simp: ennreal_SUP_of_nat_eq_top top_unique)
qed
lemma ennreal_INF_const_minus:
fixes f :: "'a \<Rightarrow> ennreal"
shows "I \<noteq> {} \<Longrightarrow> (SUP x\<in>I. c - f x) = c - (INF x\<in>I. f x)"
by (transfer fixing: I)
(simp add: sup_max[symmetric] SUP_sup_const1 SUP_ereal_minus_right del: sup_ereal_def)
lemma of_nat_Sup_ennreal:
assumes "A \<noteq> {}" "bdd_above A"
shows "of_nat (Sup A) = (SUP a\<in>A. of_nat a :: ennreal)"
proof (intro antisym)
show "(SUP a\<in>A. of_nat a::ennreal) \<le> of_nat (Sup A)"
by (intro SUP_least of_nat_mono) (auto intro: cSup_upper assms)
have "Sup A \<in> A"
using assms by (auto simp: Sup_nat_def bdd_above_nat)
then show "of_nat (Sup A) \<le> (SUP a\<in>A. of_nat a::ennreal)"
by (intro SUP_upper)
qed
lemma ennreal_tendsto_const_minus:
fixes g :: "'a \<Rightarrow> ennreal"
assumes ae: "\<forall>\<^sub>F x in F. g x \<le> c"
assumes g: "((\<lambda>x. c - g x) \<longlongrightarrow> 0) F"
shows "(g \<longlongrightarrow> c) F"
proof (cases c rule: ennreal_cases)
case top with tendsto_unique[OF _ g, of "top"] show ?thesis
by (cases "F = bot") auto
next
case (real r)
then have "\<forall>x. \<exists>q\<ge>0. g x \<le> c \<longrightarrow> (g x = ennreal q \<and> q \<le> r)"
by (auto simp: le_ennreal_iff)
then obtain f where *: "0 \<le> f x" "g x = ennreal (f x)" "f x \<le> r" if "g x \<le> c" for x
by metis
from ae have ae2: "\<forall>\<^sub>F x in F. c - g x = ennreal (r - f x) \<and> f x \<le> r \<and> g x = ennreal (f x) \<and> 0 \<le> f x"
proof eventually_elim
fix x assume "g x \<le> c" with *[of x] \<open>0 \<le> r\<close> show "c - g x = ennreal (r - f x) \<and> f x \<le> r \<and> g x = ennreal (f x) \<and> 0 \<le> f x"
by (auto simp: real ennreal_minus)
qed
with g have "((\<lambda>x. ennreal (r - f x)) \<longlongrightarrow> ennreal 0) F"
by (auto simp add: tendsto_cong eventually_conj_iff)
with ae2 have "((\<lambda>x. r - f x) \<longlongrightarrow> 0) F"
by (subst (asm) tendsto_ennreal_iff) (auto elim: eventually_mono)
then have "(f \<longlongrightarrow> r) F"
by (rule Lim_transform2[OF tendsto_const])
with ae2 have "((\<lambda>x. ennreal (f x)) \<longlongrightarrow> ennreal r) F"
by (subst tendsto_ennreal_iff) (auto elim: eventually_mono simp: real)
with ae2 show ?thesis
by (auto simp: real tendsto_cong eventually_conj_iff)
qed
lemma ennreal_SUP_add:
fixes f g :: "nat \<Rightarrow> ennreal"
shows "incseq f \<Longrightarrow> incseq g \<Longrightarrow> (SUP i. f i + g i) = Sup (f ` UNIV) + Sup (g ` UNIV)"
unfolding incseq_def le_fun_def
by transfer
(simp add: SUP_ereal_add incseq_def le_fun_def max_absorb2 SUP_upper2)
lemma ennreal_SUP_sum:
fixes f :: "'a \<Rightarrow> nat \<Rightarrow> ennreal"
shows "(\<And>i. i \<in> I \<Longrightarrow> incseq (f i)) \<Longrightarrow> (SUP n. \<Sum>i\<in>I. f i n) = (\<Sum>i\<in>I. SUP n. f i n)"
unfolding incseq_def
by transfer
(simp add: SUP_ereal_sum incseq_def SUP_upper2 max_absorb2 sum_nonneg)
lemma ennreal_liminf_minus:
fixes f :: "nat \<Rightarrow> ennreal"
shows "(\<And>n. f n \<le> c) \<Longrightarrow> liminf (\<lambda>n. c - f n) = c - limsup f"
apply transfer
apply (simp add: ereal_diff_positive liminf_ereal_cminus)
by (metis max.absorb2 ereal_diff_positive Limsup_bounded eventually_sequentiallyI)
lemma ennreal_continuous_on_cmult:
"(c::ennreal) < top \<Longrightarrow> continuous_on A f \<Longrightarrow> continuous_on A (\<lambda>x. c * f x)"
by (transfer fixing: A) (auto intro: continuous_on_cmult_ereal)
lemma ennreal_tendsto_cmult:
"(c::ennreal) < top \<Longrightarrow> (f \<longlongrightarrow> x) F \<Longrightarrow> ((\<lambda>x. c * f x) \<longlongrightarrow> c * x) F"
by (rule continuous_on_tendsto_compose[where g=f, OF ennreal_continuous_on_cmult, where s=UNIV])
(auto simp: continuous_on_id)
lemma tendsto_ennrealI[intro, simp, tendsto_intros]:
"(f \<longlongrightarrow> x) F \<Longrightarrow> ((\<lambda>x. ennreal (f x)) \<longlongrightarrow> ennreal x) F"
by (auto simp: ennreal_def
intro!: continuous_on_tendsto_compose[OF continuous_on_e2ennreal[of UNIV]] tendsto_max)
lemma tendsto_enn2erealI [tendsto_intros]:
assumes "(f \<longlongrightarrow> l) F"
shows "((\<lambda>i. enn2ereal(f i)) \<longlongrightarrow> enn2ereal l) F"
using tendsto_enn2ereal_iff assms by auto
lemma tendsto_e2ennrealI [tendsto_intros]:
assumes "(f \<longlongrightarrow> l) F"
shows "((\<lambda>i. e2ennreal(f i)) \<longlongrightarrow> e2ennreal l) F"
proof -
have *: "e2ennreal (max x 0) = e2ennreal x" for x
by (simp add: e2ennreal_def max.commute)
have "((\<lambda>i. max (f i) 0) \<longlongrightarrow> max l 0) F"
apply (intro tendsto_intros) using assms by auto
then have "((\<lambda>i. enn2ereal(e2ennreal (max (f i) 0))) \<longlongrightarrow> enn2ereal (e2ennreal (max l 0))) F"
by (subst enn2ereal_e2ennreal, auto)+
then have "((\<lambda>i. e2ennreal (max (f i) 0)) \<longlongrightarrow> e2ennreal (max l 0)) F"
using tendsto_enn2ereal_iff by auto
then show ?thesis
unfolding * by auto
qed
lemma ennreal_suminf_minus:
fixes f g :: "nat \<Rightarrow> ennreal"
shows "(\<And>i. g i \<le> f i) \<Longrightarrow> suminf f \<noteq> top \<Longrightarrow> suminf g \<noteq> top \<Longrightarrow> (\<Sum>i. f i - g i) = suminf f - suminf g"
by transfer
(auto simp add: max.absorb2 ereal_diff_positive suminf_le_pos top_ereal_def intro!: suminf_ereal_minus)
lemma ennreal_Sup_countable_SUP:
"A \<noteq> {} \<Longrightarrow> \<exists>f::nat \<Rightarrow> ennreal. incseq f \<and> range f \<subseteq> A \<and> Sup A = (SUP i. f i)"
unfolding incseq_def
apply transfer
subgoal for A
using Sup_countable_SUP[of A]
by (force simp add: incseq_def[symmetric] SUP_upper2 max.absorb2 image_subset_iff Sup_upper2 cong: conj_cong)
done
lemma ennreal_Inf_countable_INF:
"A \<noteq> {} \<Longrightarrow> \<exists>f::nat \<Rightarrow> ennreal. decseq f \<and> range f \<subseteq> A \<and> Inf A = (INF i. f i)"
unfolding decseq_def
apply transfer
subgoal for A
using Inf_countable_INF[of A]
apply (clarsimp simp flip: decseq_def)
subgoal for f
by (intro exI[of _ f]) auto
done
done
lemma ennreal_SUP_countable_SUP:
"A \<noteq> {} \<Longrightarrow> \<exists>f::nat \<Rightarrow> ennreal. range f \<subseteq> g`A \<and> Sup (g ` A) = Sup (f ` UNIV)"
using ennreal_Sup_countable_SUP [of "g`A"] by auto
lemma of_nat_tendsto_top_ennreal: "(\<lambda>n::nat. of_nat n :: ennreal) \<longlonglongrightarrow> top"
using LIMSEQ_SUP[of "of_nat :: nat \<Rightarrow> ennreal"]
by (simp add: ennreal_SUP_of_nat_eq_top incseq_def)
lemma SUP_sup_continuous_ennreal:
fixes f :: "ennreal \<Rightarrow> 'a::complete_lattice"
assumes f: "sup_continuous f" and "I \<noteq> {}"
shows "(SUP i\<in>I. f (g i)) = f (SUP i\<in>I. g i)"
proof (rule antisym)
show "(SUP i\<in>I. f (g i)) \<le> f (SUP i\<in>I. g i)"
by (rule mono_SUP[OF sup_continuous_mono[OF f]])
from ennreal_Sup_countable_SUP[of "g`I"] \<open>I \<noteq> {}\<close>
obtain M :: "nat \<Rightarrow> ennreal" where "incseq M" and M: "range M \<subseteq> g ` I" and eq: "(SUP i \<in> I. g i) = (SUP i. M i)"
by auto
have "f (SUP i \<in> I. g i) = (SUP i \<in> range M. f i)"
unfolding eq sup_continuousD[OF f \<open>mono M\<close>] by (simp add: image_comp)
also have "\<dots> \<le> (SUP i \<in> I. f (g i))"
by (insert M, drule SUP_subset_mono) (auto simp add: image_comp)
finally show "f (SUP i \<in> I. g i) \<le> (SUP i \<in> I. f (g i))" .
qed
lemma ennreal_suminf_SUP_eq:
fixes f :: "nat \<Rightarrow> nat \<Rightarrow> ennreal"
shows "(\<And>i. incseq (\<lambda>n. f n i)) \<Longrightarrow> (\<Sum>i. SUP n. f n i) = (SUP n. \<Sum>i. f n i)"
apply (rule ennreal_suminf_SUP_eq_directed)
subgoal for N n j
by (auto simp: incseq_def intro!:exI[of _ "max n j"])
done
lemma ennreal_SUP_add_left:
fixes c :: ennreal
shows "I \<noteq> {} \<Longrightarrow> (SUP i\<in>I. f i + c) = (SUP i\<in>I. f i) + c"
apply transfer
apply (simp add: SUP_ereal_add_left)
by (metis SUP_upper all_not_in_conv ereal_le_add_mono1 max.absorb2 max.bounded_iff)
lemma ennreal_SUP_const_minus:
fixes f :: "'a \<Rightarrow> ennreal"
shows "I \<noteq> {} \<Longrightarrow> c < top \<Longrightarrow> (INF x\<in>I. c - f x) = c - (SUP x\<in>I. f x)"
apply (transfer fixing: I)
unfolding ex_in_conv[symmetric]
apply (auto simp add: SUP_upper2 sup_absorb2 simp flip: sup_ereal_def)
apply (subst INF_ereal_minus_right[symmetric])
apply (auto simp del: sup_ereal_def simp add: sup_INF)
done
(* Contributed by Dominique Unruh *)
lemma isCont_ennreal[simp]: \<open>isCont ennreal x\<close>
apply (auto intro!: sequentially_imp_eventually_within simp: continuous_within tendsto_def)
by (metis tendsto_def tendsto_ennrealI)
(* Contributed by Dominique Unruh *)
lemma isCont_ennreal_of_enat[simp]: \<open>isCont ennreal_of_enat x\<close>
proof -
have continuous_at_open:
\<comment> \<open>Copied lemma from \<^session>\<open>HOL-Analysis\<close> to avoid dependency.\<close>
"continuous (at x) f \<longleftrightarrow> (\<forall>t. open t \<and> f x \<in> t --> (\<exists>s. open s \<and> x \<in> s \<and> (\<forall>x' \<in> s. (f x') \<in> t)))" for f :: \<open>enat \<Rightarrow> 'z::topological_space\<close>
unfolding continuous_within_topological [of x UNIV f]
unfolding imp_conjL
by (intro all_cong imp_cong ex_cong conj_cong refl) auto
show ?thesis
proof (subst continuous_at_open, intro allI impI, cases \<open>x = \<infinity>\<close>)
case True
fix t assume \<open>open t \<and> ennreal_of_enat x \<in> t\<close>
then have \<open>\<exists>y<\<infinity>. {y <.. \<infinity>} \<subseteq> t\<close>
by (rule_tac open_left[where y=0]) (auto simp: True)
then obtain y where \<open>{y<..} \<subseteq> t\<close> and \<open>y \<noteq> \<infinity>\<close>
by fastforce
from \<open>y \<noteq> \<infinity>\<close>
obtain x' where x'y: \<open>ennreal_of_enat x' > y\<close> and \<open>x' \<noteq> \<infinity>\<close>
by (metis enat.simps(3) ennreal_Ex_less_of_nat ennreal_of_enat_enat infinity_ennreal_def top.not_eq_extremum)
define s where \<open>s = {x'<..}\<close>
have \<open>open s\<close>
by (simp add: s_def)
moreover have \<open>x \<in> s\<close>
by (simp add: \<open>x' \<noteq> \<infinity>\<close> s_def True)
moreover have \<open>ennreal_of_enat z \<in> t\<close> if \<open>z \<in> s\<close> for z
by (metis x'y \<open>{y<..} \<subseteq> t\<close> ennreal_of_enat_le_iff greaterThan_iff le_less_trans less_imp_le not_less s_def subsetD that)
ultimately show \<open>\<exists>s. open s \<and> x \<in> s \<and> (\<forall>z\<in>s. ennreal_of_enat z \<in> t)\<close>
by auto
next
case False
fix t assume asm: \<open>open t \<and> ennreal_of_enat x \<in> t\<close>
define s where \<open>s = {x}\<close>
have \<open>open s\<close>
using False open_enat_iff s_def by blast
moreover have \<open>x \<in> s\<close>
using s_def by auto
moreover have \<open>ennreal_of_enat z \<in> t\<close> if \<open>z \<in> s\<close> for z
using asm s_def that by blast
ultimately show \<open>\<exists>s. open s \<and> x \<in> s \<and> (\<forall>z\<in>s. ennreal_of_enat z \<in> t)\<close>
by auto
qed
qed
subsection \<open>Approximation lemmas\<close>
lemma INF_approx_ennreal:
fixes x::ennreal and e::real
assumes "e > 0"
assumes INF: "x = (INF i \<in> A. f i)"
assumes "x \<noteq> \<infinity>"
shows "\<exists>i \<in> A. f i < x + e"
proof -
have "(INF i \<in> A. f i) < x + e"
unfolding INF[symmetric] using \<open>0<e\<close> \<open>x \<noteq> \<infinity>\<close> by (cases x) auto
then show ?thesis
unfolding INF_less_iff .
qed
lemma SUP_approx_ennreal:
fixes x::ennreal and e::real
assumes "e > 0" "A \<noteq> {}"
assumes SUP: "x = (SUP i \<in> A. f i)"
assumes "x \<noteq> \<infinity>"
shows "\<exists>i \<in> A. x < f i + e"
proof -
have "x < x + e"
using \<open>0<e\<close> \<open>x \<noteq> \<infinity>\<close> by (cases x) auto
also have "x + e = (SUP i \<in> A. f i + e)"
unfolding SUP ennreal_SUP_add_left[OF \<open>A \<noteq> {}\<close>] ..
finally show ?thesis
unfolding less_SUP_iff .
qed
lemma ennreal_approx_SUP:
fixes x::ennreal
assumes f_bound: "\<And>i. i \<in> A \<Longrightarrow> f i \<le> x"
assumes approx: "\<And>e. (e::real) > 0 \<Longrightarrow> \<exists>i \<in> A. x \<le> f i + e"
shows "x = (SUP i \<in> A. f i)"
proof (rule antisym)
show "x \<le> (SUP i\<in>A. f i)"
proof (rule ennreal_le_epsilon)
fix e :: real assume "0 < e"
from approx[OF this] obtain i where "i \<in> A" and *: "x \<le> f i + ennreal e"
by blast
from * have "x \<le> f i + e"
by simp
also have "\<dots> \<le> (SUP i\<in>A. f i) + e"
by (intro add_mono \<open>i \<in> A\<close> SUP_upper order_refl)
finally show "x \<le> (SUP i\<in>A. f i) + e" .
qed
qed (intro SUP_least f_bound)
lemma ennreal_approx_INF:
fixes x::ennreal
assumes f_bound: "\<And>i. i \<in> A \<Longrightarrow> x \<le> f i"
assumes approx: "\<And>e. (e::real) > 0 \<Longrightarrow> \<exists>i \<in> A. f i \<le> x + e"
shows "x = (INF i \<in> A. f i)"
proof (rule antisym)
show "(INF i\<in>A. f i) \<le> x"
proof (rule ennreal_le_epsilon)
fix e :: real assume "0 < e"
from approx[OF this] obtain i where "i\<in>A" "f i \<le> x + ennreal e"
by blast
then have "(INF i\<in>A. f i) \<le> f i"
by (intro INF_lower)
also have "\<dots> \<le> x + e"
by fact
finally show "(INF i\<in>A. f i) \<le> x + e" .
qed
qed (intro INF_greatest f_bound)
lemma ennreal_approx_unit:
"(\<And>a::ennreal. 0 < a \<Longrightarrow> a < 1 \<Longrightarrow> a * z \<le> y) \<Longrightarrow> z \<le> y"
apply (subst SUP_mult_right_ennreal[of "\<lambda>x. x" "{0 <..< 1}" z, simplified])
apply (auto intro: SUP_least)
done
lemma suminf_ennreal2:
"(\<And>i. 0 \<le> f i) \<Longrightarrow> summable f \<Longrightarrow> (\<Sum>i. ennreal (f i)) = ennreal (\<Sum>i. f i)"
using suminf_ennreal_eq by blast
lemma less_top_ennreal: "x < top \<longleftrightarrow> (\<exists>r\<ge>0. x = ennreal r)"
by (cases x) auto
lemma enn2real_less_iff[simp]: "x < top \<Longrightarrow> enn2real x < c \<longleftrightarrow> x < c"
using ennreal_less_iff less_top_ennreal by auto
lemma enn2real_le_iff[simp]: "\<lbrakk>x < top; c > 0\<rbrakk> \<Longrightarrow> enn2real x \<le> c \<longleftrightarrow> x \<le> c"
by (cases x) auto
lemma enn2real_less:
assumes "enn2real e < r" "e \<noteq> top" shows "e < ennreal r"
using enn2real_less_iff assms top.not_eq_extremum by blast
lemma enn2real_le:
assumes "enn2real e \<le> r" "e \<noteq> top" shows "e \<le> ennreal r"
by (metis assms enn2real_less ennreal_enn2real_if eq_iff less_le)
lemma tendsto_top_iff_ennreal:
fixes f :: "'a \<Rightarrow> ennreal"
shows "(f \<longlongrightarrow> top) F \<longleftrightarrow> (\<forall>l\<ge>0. eventually (\<lambda>x. ennreal l < f x) F)"
by (auto simp: less_top_ennreal order_tendsto_iff )
lemma ennreal_tendsto_top_eq_at_top:
"((\<lambda>z. ennreal (f z)) \<longlongrightarrow> top) F \<longleftrightarrow> (LIM z F. f z :> at_top)"
unfolding filterlim_at_top_dense tendsto_top_iff_ennreal
apply (auto simp: ennreal_less_iff)
subgoal for y
by (auto elim!: eventually_mono allE[of _ "max 0 y"])
done
lemma tendsto_0_if_Limsup_eq_0_ennreal:
fixes f :: "_ \<Rightarrow> ennreal"
shows "Limsup F f = 0 \<Longrightarrow> (f \<longlongrightarrow> 0) F"
using Liminf_le_Limsup[of F f] tendsto_iff_Liminf_eq_Limsup[of F f 0]
by (cases "F = bot") auto
lemma diff_le_self_ennreal[simp]: "a - b \<le> (a::ennreal)"
by (cases a b rule: ennreal2_cases) (auto simp: ennreal_minus)
lemma ennreal_ineq_diff_add: "b \<le> a \<Longrightarrow> a = b + (a - b::ennreal)"
by transfer (auto simp: ereal_diff_positive max.absorb2 ereal_ineq_diff_add)
lemma ennreal_mult_strict_left_mono: "(a::ennreal) < c \<Longrightarrow> 0 < b \<Longrightarrow> b < top \<Longrightarrow> b * a < b * c"
by transfer (auto intro!: ereal_mult_strict_left_mono)
lemma ennreal_between: "0 < e \<Longrightarrow> 0 < x \<Longrightarrow> x < top \<Longrightarrow> x - e < (x::ennreal)"
by transfer (auto intro!: ereal_between)
lemma minus_less_iff_ennreal: "b < top \<Longrightarrow> b \<le> a \<Longrightarrow> a - b < c \<longleftrightarrow> a < c + (b::ennreal)"
by transfer
(auto simp: top_ereal_def ereal_minus_less le_less)
lemma tendsto_zero_ennreal:
assumes ev: "\<And>r. 0 < r \<Longrightarrow> \<forall>\<^sub>F x in F. f x < ennreal r"
shows "(f \<longlongrightarrow> 0) F"
proof (rule order_tendstoI)
fix e::ennreal assume "e > 0"
obtain e'::real where "e' > 0" "ennreal e' < e"
using \<open>0 < e\<close> dense[of 0 "if e = top then 1 else (enn2real e)"]
by (cases e) (auto simp: ennreal_less_iff)
from ev[OF \<open>e' > 0\<close>] show "\<forall>\<^sub>F x in F. f x < e"
by eventually_elim (insert \<open>ennreal e' < e\<close>, auto)
qed simp
lifting_update ennreal.lifting
lifting_forget ennreal.lifting
subsection \<open>\<^typ>\<open>ennreal\<close> theorems\<close>
lemma neq_top_trans: fixes x y :: ennreal shows "\<lbrakk> y \<noteq> top; x \<le> y \<rbrakk> \<Longrightarrow> x \<noteq> top"
by (auto simp: top_unique)
lemma diff_diff_ennreal: fixes a b :: ennreal shows "a \<le> b \<Longrightarrow> b \<noteq> \<infinity> \<Longrightarrow> b - (b - a) = a"
by (cases a b rule: ennreal2_cases) (auto simp: ennreal_minus top_unique)
lemma ennreal_less_one_iff[simp]: "ennreal x < 1 \<longleftrightarrow> x < 1"
by (cases "0 \<le> x") (auto simp: ennreal_neg ennreal_less_iff simp flip: ennreal_1)
lemma SUP_const_minus_ennreal:
fixes f :: "'a \<Rightarrow> ennreal" shows "I \<noteq> {} \<Longrightarrow> (SUP x\<in>I. c - f x) = c - (INF x\<in>I. f x)"
including ennreal.lifting
by (transfer fixing: I)
(simp add: SUP_sup_distrib[symmetric] SUP_ereal_minus_right
flip: sup_ereal_def)
lemma zero_minus_ennreal[simp]: "0 - (a::ennreal) = 0"
including ennreal.lifting
by transfer (simp split: split_max)
lemma diff_diff_commute_ennreal:
fixes a b c :: ennreal shows "a - b - c = a - c - b"
by (cases a b c rule: ennreal3_cases) (simp_all add: ennreal_minus field_simps)
lemma diff_gr0_ennreal: "b < (a::ennreal) \<Longrightarrow> 0 < a - b"
including ennreal.lifting by transfer (auto simp: ereal_diff_gr0 ereal_diff_positive split: split_max)
lemma divide_le_posI_ennreal:
fixes x y z :: ennreal
shows "x > 0 \<Longrightarrow> z \<le> x * y \<Longrightarrow> z / x \<le> y"
by (cases x y z rule: ennreal3_cases)
(auto simp: divide_ennreal ennreal_mult[symmetric] field_simps top_unique)
lemma add_diff_eq_ennreal:
fixes x y z :: ennreal
shows "z \<le> y \<Longrightarrow> x + (y - z) = x + y - z"
using ennreal_diff_add_assoc by auto
lemma add_diff_inverse_ennreal:
fixes x y :: ennreal shows "x \<le> y \<Longrightarrow> x + (y - x) = y"
by (cases x) (simp_all add: top_unique add_diff_eq_ennreal)
lemma add_diff_eq_iff_ennreal[simp]:
fixes x y :: ennreal shows "x + (y - x) = y \<longleftrightarrow> x \<le> y"
proof
assume *: "x + (y - x) = y" show "x \<le> y"
by (subst *[symmetric]) simp
qed (simp add: add_diff_inverse_ennreal)
lemma add_diff_le_ennreal: "a + b - c \<le> a + (b - c::ennreal)"
apply (cases a b c rule: ennreal3_cases)
subgoal for a' b' c'
by (cases "0 \<le> b' - c'") (simp_all add: ennreal_minus top_add ennreal_neg flip: ennreal_plus)
apply (simp_all add: top_add flip: ennreal_plus)
done
lemma diff_eq_0_ennreal: "a < top \<Longrightarrow> a \<le> b \<Longrightarrow> a - b = (0::ennreal)"
using ennreal_minus_pos_iff gr_zeroI not_less by blast
lemma diff_diff_ennreal': fixes x y z :: ennreal shows "z \<le> y \<Longrightarrow> y - z \<le> x \<Longrightarrow> x - (y - z) = x + z - y"
by (cases x; cases y; cases z)
(auto simp add: top_add add_top minus_top_ennreal ennreal_minus top_unique
simp flip: ennreal_plus)
lemma diff_diff_ennreal'': fixes x y z :: ennreal
shows "z \<le> y \<Longrightarrow> x - (y - z) = (if y - z \<le> x then x + z - y else 0)"
by (cases x; cases y; cases z)
(auto simp add: top_add add_top minus_top_ennreal ennreal_minus top_unique ennreal_neg
simp flip: ennreal_plus)
lemma power_less_top_ennreal: fixes x :: ennreal shows "x ^ n < top \<longleftrightarrow> x < top \<or> n = 0"
using power_eq_top_ennreal[of x n] by (auto simp: less_top)
lemma ennreal_divide_times: "(a / b) * c = a * (c / b :: ennreal)"
by (simp add: mult.commute ennreal_times_divide)
lemma diff_less_top_ennreal: "a - b < top \<longleftrightarrow> a < (top :: ennreal)"
by (cases a; cases b) (auto simp: ennreal_minus)
lemma divide_less_ennreal: "b \<noteq> 0 \<Longrightarrow> b < top \<Longrightarrow> a / b < c \<longleftrightarrow> a < (c * b :: ennreal)"
by (cases a; cases b; cases c)
(auto simp: divide_ennreal ennreal_mult[symmetric] ennreal_less_iff field_simps ennreal_top_mult ennreal_top_divide)
lemma one_less_numeral[simp]: "1 < (numeral n::ennreal) \<longleftrightarrow> (num.One < n)"
by (simp flip: ennreal_1 ennreal_numeral add: ennreal_less_iff)
lemma divide_eq_1_ennreal: "a / b = (1::ennreal) \<longleftrightarrow> (b \<noteq> top \<and> b \<noteq> 0 \<and> b = a)"
by (cases a ; cases b; cases "b = 0") (auto simp: ennreal_top_divide divide_ennreal split: if_split_asm)
lemma ennreal_mult_cancel_left: "(a * b = a * c) = (a = top \<and> b \<noteq> 0 \<and> c \<noteq> 0 \<or> a = 0 \<or> b = (c::ennreal))"
by (cases a; cases b; cases c) (auto simp: ennreal_mult[symmetric] ennreal_mult_top ennreal_top_mult)
lemma ennreal_minus_if: "ennreal a - ennreal b = ennreal (if 0 \<le> b then (if b \<le> a then a - b else 0) else a)"
by (auto simp: ennreal_minus ennreal_neg)
lemma ennreal_plus_if: "ennreal a + ennreal b = ennreal (if 0 \<le> a then (if 0 \<le> b then a + b else a) else b)"
by (auto simp: ennreal_neg)
lemma power_le_one_iff: "0 \<le> (a::real) \<Longrightarrow> a ^ n \<le> 1 \<longleftrightarrow> (n = 0 \<or> a \<le> 1)"
by (metis (mono_tags, opaque_lifting) le_less neq0_conv not_le one_le_power power_0 power_eq_imp_eq_base power_le_one zero_le_one)
lemma ennreal_diff_le_mono_left: "a \<le> b \<Longrightarrow> a - c \<le> (b::ennreal)"
using ennreal_mono_minus[of 0 c a, THEN order_trans, of b] by simp
lemma ennreal_minus_le_iff: "a - b \<le> c \<longleftrightarrow> (a \<le> b + (c::ennreal) \<and> (a = top \<and> b = top \<longrightarrow> c = top))"
by (cases a; cases b; cases c)
(auto simp: top_unique top_add add_top ennreal_minus simp flip: ennreal_plus)
lemma ennreal_le_minus_iff: "a \<le> b - c \<longleftrightarrow> (a + c \<le> (b::ennreal) \<or> (a = 0 \<and> b \<le> c))"
by (cases a; cases b; cases c)
(auto simp: top_unique top_add add_top ennreal_minus ennreal_le_iff2
simp flip: ennreal_plus)
lemma diff_add_eq_diff_diff_swap_ennreal: "x - (y + z :: ennreal) = x - y - z"
by (cases x; cases y; cases z)
(auto simp: ennreal_minus_if add_top top_add simp flip: ennreal_plus)
lemma diff_add_assoc2_ennreal: "b \<le> a \<Longrightarrow> (a - b + c::ennreal) = a + c - b"
by (cases a; cases b; cases c)
(auto simp add: ennreal_minus_if ennreal_plus_if add_top top_add top_unique simp del: ennreal_plus)
lemma diff_gt_0_iff_gt_ennreal: "0 < a - b \<longleftrightarrow> (a = top \<and> b = top \<or> b < (a::ennreal))"
by (cases a; cases b) (auto simp: ennreal_minus_if ennreal_less_iff)
lemma diff_eq_0_iff_ennreal: "(a - b::ennreal) = 0 \<longleftrightarrow> (a < top \<and> a \<le> b)"
by (cases a) (auto simp: ennreal_minus_eq_0 diff_eq_0_ennreal)
lemma add_diff_self_ennreal: "a + (b - a::ennreal) = (if a \<le> b then b else a)"
by (auto simp: diff_eq_0_iff_ennreal less_top)
lemma diff_add_self_ennreal: "(b - a + a::ennreal) = (if a \<le> b then b else a)"
by (auto simp: diff_add_cancel_ennreal diff_eq_0_iff_ennreal less_top)
lemma ennreal_minus_cancel_iff:
fixes a b c :: ennreal
shows "a - b = a - c \<longleftrightarrow> (b = c \<or> (a \<le> b \<and> a \<le> c) \<or> a = top)"
by (cases a; cases b; cases c) (auto simp: ennreal_minus_if)
text \<open>The next lemma is wrong for $a = top$, for $b = c = 1$ for instance.\<close>
lemma ennreal_right_diff_distrib:
fixes a b c :: ennreal
assumes "a \<noteq> top"
shows "a * (b - c) = a * b - a * c"
apply (cases a; cases b; cases c)
apply (use assms in \<open>auto simp add: ennreal_mult_top ennreal_minus ennreal_mult' [symmetric]\<close>)
apply (simp add: algebra_simps)
done
lemma SUP_diff_ennreal:
"c < top \<Longrightarrow> (SUP i\<in>I. f i - c :: ennreal) = (SUP i\<in>I. f i) - c"
by (auto intro!: SUP_eqI ennreal_minus_mono SUP_least intro: SUP_upper
simp: ennreal_minus_cancel_iff ennreal_minus_le_iff less_top[symmetric])
lemma ennreal_SUP_add_right:
fixes c :: ennreal shows "I \<noteq> {} \<Longrightarrow> c + (SUP i\<in>I. f i) = (SUP i\<in>I. c + f i)"
using ennreal_SUP_add_left[of I f c] by (simp add: add.commute)
lemma SUP_add_directed_ennreal:
fixes f g :: "_ \<Rightarrow> ennreal"
assumes directed: "\<And>i j. i \<in> I \<Longrightarrow> j \<in> I \<Longrightarrow> \<exists>k\<in>I. f i + g j \<le> f k + g k"
shows "(SUP i\<in>I. f i + g i) = (SUP i\<in>I. f i) + (SUP i\<in>I. g i)"
proof (cases "I = {}")
case False
show ?thesis
proof (rule antisym)
show "(SUP i\<in>I. f i + g i) \<le> (SUP i\<in>I. f i) + (SUP i\<in>I. g i)"
by (rule SUP_least; intro add_mono SUP_upper)
next
have "(SUP i\<in>I. f i) + (SUP i\<in>I. g i) = (SUP i\<in>I. f i + (SUP i\<in>I. g i))"
by (intro ennreal_SUP_add_left[symmetric] \<open>I \<noteq> {}\<close>)
also have "\<dots> = (SUP i\<in>I. (SUP j\<in>I. f i + g j))"
using \<open>I \<noteq> {}\<close> by (simp add: ennreal_SUP_add_right)
also have "\<dots> \<le> (SUP i\<in>I. f i + g i)"
using directed by (intro SUP_least) (blast intro: SUP_upper2)
finally show "(SUP i\<in>I. f i) + (SUP i\<in>I. g i) \<le> (SUP i\<in>I. f i + g i)" .
qed
qed (simp add: bot_ereal_def)
lemma enn2real_eq_0_iff: "enn2real x = 0 \<longleftrightarrow> x = 0 \<or> x = top"
by (cases x) auto
lemma continuous_on_diff_ennreal:
"continuous_on A f \<Longrightarrow> continuous_on A g \<Longrightarrow> (\<And>x. x \<in> A \<Longrightarrow> f x \<noteq> top) \<Longrightarrow> (\<And>x. x \<in> A \<Longrightarrow> g x \<noteq> top) \<Longrightarrow> continuous_on A (\<lambda>z. f z - g z::ennreal)"
including ennreal.lifting
proof (transfer fixing: A, simp add: top_ereal_def)
fix f g :: "'a \<Rightarrow> ereal" assume "\<forall>x. 0 \<le> f x" "\<forall>x. 0 \<le> g x" "continuous_on A f" "continuous_on A g"
moreover assume "f x \<noteq> \<infinity>" "g x \<noteq> \<infinity>" if "x \<in> A" for x
ultimately show "continuous_on A (\<lambda>z. max 0 (f z - g z))"
by (intro continuous_on_max continuous_on_const continuous_on_diff_ereal) auto
qed
lemma tendsto_diff_ennreal:
"(f \<longlongrightarrow> x) F \<Longrightarrow> (g \<longlongrightarrow> y) F \<Longrightarrow> x \<noteq> top \<Longrightarrow> y \<noteq> top \<Longrightarrow> ((\<lambda>z. f z - g z::ennreal) \<longlongrightarrow> x - y) F"
using continuous_on_tendsto_compose[where f="\<lambda>x. fst x - snd x::ennreal" and s="{(x, y). x \<noteq> top \<and> y \<noteq> top}" and g="\<lambda>x. (f x, g x)" and l="(x, y)" and F="F",
OF continuous_on_diff_ennreal]
by (auto simp: tendsto_Pair eventually_conj_iff less_top order_tendstoD continuous_on_fst continuous_on_snd continuous_on_id)
declare lim_real_of_ereal [tendsto_intros]
lemma tendsto_enn2real [tendsto_intros]:
assumes "(u \<longlongrightarrow> ennreal l) F" "l \<ge> 0"
shows "((\<lambda>n. enn2real (u n)) \<longlongrightarrow> l) F"
unfolding enn2real_def
by (metis assms enn2ereal_ennreal lim_real_of_ereal tendsto_enn2erealI)
end
diff --git a/src/HOL/Library/Multiset.thy b/src/HOL/Library/Multiset.thy
--- a/src/HOL/Library/Multiset.thy
+++ b/src/HOL/Library/Multiset.thy
@@ -1,4526 +1,4526 @@
(* Title: HOL/Library/Multiset.thy
Author: Tobias Nipkow, Markus Wenzel, Lawrence C Paulson, Norbert Voelker
Author: Andrei Popescu, TU Muenchen
Author: Jasmin Blanchette, Inria, LORIA, MPII
Author: Dmitriy Traytel, TU Muenchen
Author: Mathias Fleury, MPII
Author: Martin Desharnais, MPI-INF Saarbruecken
*)
section \<open>(Finite) Multisets\<close>
theory Multiset
imports Cancellation
begin
subsection \<open>The type of multisets\<close>
typedef 'a multiset = \<open>{f :: 'a \<Rightarrow> nat. finite {x. f x > 0}}\<close>
morphisms count Abs_multiset
proof
show \<open>(\<lambda>x. 0::nat) \<in> {f. finite {x. f x > 0}}\<close>
by simp
qed
setup_lifting type_definition_multiset
lemma count_Abs_multiset:
\<open>count (Abs_multiset f) = f\<close> if \<open>finite {x. f x > 0}\<close>
by (rule Abs_multiset_inverse) (simp add: that)
lemma multiset_eq_iff: "M = N \<longleftrightarrow> (\<forall>a. count M a = count N a)"
by (simp only: count_inject [symmetric] fun_eq_iff)
lemma multiset_eqI: "(\<And>x. count A x = count B x) \<Longrightarrow> A = B"
using multiset_eq_iff by auto
text \<open>Preservation of the representing set \<^term>\<open>multiset\<close>.\<close>
lemma diff_preserves_multiset:
\<open>finite {x. 0 < M x - N x}\<close> if \<open>finite {x. 0 < M x}\<close> for M N :: \<open>'a \<Rightarrow> nat\<close>
using that by (rule rev_finite_subset) auto
lemma filter_preserves_multiset:
\<open>finite {x. 0 < (if P x then M x else 0)}\<close> if \<open>finite {x. 0 < M x}\<close> for M N :: \<open>'a \<Rightarrow> nat\<close>
using that by (rule rev_finite_subset) auto
lemmas in_multiset = diff_preserves_multiset filter_preserves_multiset
subsection \<open>Representing multisets\<close>
text \<open>Multiset enumeration\<close>
instantiation multiset :: (type) cancel_comm_monoid_add
begin
lift_definition zero_multiset :: \<open>'a multiset\<close>
is \<open>\<lambda>a. 0\<close>
by simp
abbreviation empty_mset :: \<open>'a multiset\<close> (\<open>{#}\<close>)
where \<open>empty_mset \<equiv> 0\<close>
lift_definition plus_multiset :: \<open>'a multiset \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset\<close>
is \<open>\<lambda>M N a. M a + N a\<close>
by simp
lift_definition minus_multiset :: \<open>'a multiset \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset\<close>
is \<open>\<lambda>M N a. M a - N a\<close>
by (rule diff_preserves_multiset)
instance
by (standard; transfer) (simp_all add: fun_eq_iff)
end
context
begin
qualified definition is_empty :: "'a multiset \<Rightarrow> bool" where
[code_abbrev]: "is_empty A \<longleftrightarrow> A = {#}"
end
lemma add_mset_in_multiset:
\<open>finite {x. 0 < (if x = a then Suc (M x) else M x)}\<close>
if \<open>finite {x. 0 < M x}\<close>
using that by (simp add: flip: insert_Collect)
lift_definition add_mset :: "'a \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset" is
"\<lambda>a M b. if b = a then Suc (M b) else M b"
by (rule add_mset_in_multiset)
syntax
"_multiset" :: "args \<Rightarrow> 'a multiset" ("{#(_)#}")
translations
"{#x, xs#}" == "CONST add_mset x {#xs#}"
"{#x#}" == "CONST add_mset x {#}"
lemma count_empty [simp]: "count {#} a = 0"
by (simp add: zero_multiset.rep_eq)
lemma count_add_mset [simp]:
"count (add_mset b A) a = (if b = a then Suc (count A a) else count A a)"
by (simp add: add_mset.rep_eq)
lemma count_single: "count {#b#} a = (if b = a then 1 else 0)"
by simp
lemma
add_mset_not_empty [simp]: \<open>add_mset a A \<noteq> {#}\<close> and
empty_not_add_mset [simp]: "{#} \<noteq> add_mset a A"
by (auto simp: multiset_eq_iff)
lemma add_mset_add_mset_same_iff [simp]:
"add_mset a A = add_mset a B \<longleftrightarrow> A = B"
by (auto simp: multiset_eq_iff)
lemma add_mset_commute:
"add_mset x (add_mset y M) = add_mset y (add_mset x M)"
by (auto simp: multiset_eq_iff)
subsection \<open>Basic operations\<close>
subsubsection \<open>Conversion to set and membership\<close>
definition set_mset :: \<open>'a multiset \<Rightarrow> 'a set\<close>
where \<open>set_mset M = {x. count M x > 0}\<close>
abbreviation member_mset :: \<open>'a \<Rightarrow> 'a multiset \<Rightarrow> bool\<close>
where \<open>member_mset a M \<equiv> a \<in> set_mset M\<close>
notation
member_mset (\<open>'(\<in>#')\<close>) and
member_mset (\<open>(_/ \<in># _)\<close> [50, 51] 50)
notation (ASCII)
member_mset (\<open>'(:#')\<close>) and
member_mset (\<open>(_/ :# _)\<close> [50, 51] 50)
abbreviation not_member_mset :: \<open>'a \<Rightarrow> 'a multiset \<Rightarrow> bool\<close>
where \<open>not_member_mset a M \<equiv> a \<notin> set_mset M\<close>
notation
not_member_mset (\<open>'(\<notin>#')\<close>) and
not_member_mset (\<open>(_/ \<notin># _)\<close> [50, 51] 50)
notation (ASCII)
not_member_mset (\<open>'(~:#')\<close>) and
not_member_mset (\<open>(_/ ~:# _)\<close> [50, 51] 50)
context
begin
qualified abbreviation Ball :: "'a multiset \<Rightarrow> ('a \<Rightarrow> bool) \<Rightarrow> bool"
where "Ball M \<equiv> Set.Ball (set_mset M)"
qualified abbreviation Bex :: "'a multiset \<Rightarrow> ('a \<Rightarrow> bool) \<Rightarrow> bool"
where "Bex M \<equiv> Set.Bex (set_mset M)"
end
syntax
"_MBall" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3\<forall>_\<in>#_./ _)" [0, 0, 10] 10)
"_MBex" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3\<exists>_\<in>#_./ _)" [0, 0, 10] 10)
syntax (ASCII)
"_MBall" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3\<forall>_:#_./ _)" [0, 0, 10] 10)
"_MBex" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3\<exists>_:#_./ _)" [0, 0, 10] 10)
translations
"\<forall>x\<in>#A. P" \<rightleftharpoons> "CONST Multiset.Ball A (\<lambda>x. P)"
"\<exists>x\<in>#A. P" \<rightleftharpoons> "CONST Multiset.Bex A (\<lambda>x. P)"
print_translation \<open>
[Syntax_Trans.preserve_binder_abs2_tr' \<^const_syntax>\<open>Multiset.Ball\<close> \<^syntax_const>\<open>_MBall\<close>,
Syntax_Trans.preserve_binder_abs2_tr' \<^const_syntax>\<open>Multiset.Bex\<close> \<^syntax_const>\<open>_MBex\<close>]
\<close> \<comment> \<open>to avoid eta-contraction of body\<close>
lemma count_eq_zero_iff:
"count M x = 0 \<longleftrightarrow> x \<notin># M"
by (auto simp add: set_mset_def)
lemma not_in_iff:
"x \<notin># M \<longleftrightarrow> count M x = 0"
by (auto simp add: count_eq_zero_iff)
lemma count_greater_zero_iff [simp]:
"count M x > 0 \<longleftrightarrow> x \<in># M"
by (auto simp add: set_mset_def)
lemma count_inI:
assumes "count M x = 0 \<Longrightarrow> False"
shows "x \<in># M"
proof (rule ccontr)
assume "x \<notin># M"
with assms show False by (simp add: not_in_iff)
qed
lemma in_countE:
assumes "x \<in># M"
obtains n where "count M x = Suc n"
proof -
from assms have "count M x > 0" by simp
then obtain n where "count M x = Suc n"
using gr0_conv_Suc by blast
with that show thesis .
qed
lemma count_greater_eq_Suc_zero_iff [simp]:
"count M x \<ge> Suc 0 \<longleftrightarrow> x \<in># M"
by (simp add: Suc_le_eq)
lemma count_greater_eq_one_iff [simp]:
"count M x \<ge> 1 \<longleftrightarrow> x \<in># M"
by simp
lemma set_mset_empty [simp]:
"set_mset {#} = {}"
by (simp add: set_mset_def)
lemma set_mset_single:
"set_mset {#b#} = {b}"
by (simp add: set_mset_def)
lemma set_mset_eq_empty_iff [simp]:
"set_mset M = {} \<longleftrightarrow> M = {#}"
by (auto simp add: multiset_eq_iff count_eq_zero_iff)
lemma finite_set_mset [iff]:
"finite (set_mset M)"
using count [of M] by simp
lemma set_mset_add_mset_insert [simp]: \<open>set_mset (add_mset a A) = insert a (set_mset A)\<close>
by (auto simp flip: count_greater_eq_Suc_zero_iff split: if_splits)
lemma multiset_nonemptyE [elim]:
assumes "A \<noteq> {#}"
obtains x where "x \<in># A"
proof -
have "\<exists>x. x \<in># A" by (rule ccontr) (insert assms, auto)
with that show ?thesis by blast
qed
subsubsection \<open>Union\<close>
lemma count_union [simp]:
"count (M + N) a = count M a + count N a"
by (simp add: plus_multiset.rep_eq)
lemma set_mset_union [simp]:
"set_mset (M + N) = set_mset M \<union> set_mset N"
by (simp only: set_eq_iff count_greater_zero_iff [symmetric] count_union) simp
lemma union_mset_add_mset_left [simp]:
"add_mset a A + B = add_mset a (A + B)"
by (auto simp: multiset_eq_iff)
lemma union_mset_add_mset_right [simp]:
"A + add_mset a B = add_mset a (A + B)"
by (auto simp: multiset_eq_iff)
lemma add_mset_add_single: \<open>add_mset a A = A + {#a#}\<close>
by (subst union_mset_add_mset_right, subst add.comm_neutral) standard
subsubsection \<open>Difference\<close>
instance multiset :: (type) comm_monoid_diff
by standard (transfer; simp add: fun_eq_iff)
lemma count_diff [simp]:
"count (M - N) a = count M a - count N a"
by (simp add: minus_multiset.rep_eq)
lemma add_mset_diff_bothsides:
\<open>add_mset a M - add_mset a A = M - A\<close>
by (auto simp: multiset_eq_iff)
lemma in_diff_count:
"a \<in># M - N \<longleftrightarrow> count N a < count M a"
by (simp add: set_mset_def)
lemma count_in_diffI:
assumes "\<And>n. count N x = n + count M x \<Longrightarrow> False"
shows "x \<in># M - N"
proof (rule ccontr)
assume "x \<notin># M - N"
then have "count N x = (count N x - count M x) + count M x"
by (simp add: in_diff_count not_less)
with assms show False by auto
qed
lemma in_diff_countE:
assumes "x \<in># M - N"
obtains n where "count M x = Suc n + count N x"
proof -
from assms have "count M x - count N x > 0" by (simp add: in_diff_count)
then have "count M x > count N x" by simp
then obtain n where "count M x = Suc n + count N x"
using less_iff_Suc_add by auto
with that show thesis .
qed
lemma in_diffD:
assumes "a \<in># M - N"
shows "a \<in># M"
proof -
have "0 \<le> count N a" by simp
also from assms have "count N a < count M a"
by (simp add: in_diff_count)
finally show ?thesis by simp
qed
lemma set_mset_diff:
"set_mset (M - N) = {a. count N a < count M a}"
by (simp add: set_mset_def)
lemma diff_empty [simp]: "M - {#} = M \<and> {#} - M = {#}"
by rule (fact Groups.diff_zero, fact Groups.zero_diff)
lemma diff_cancel: "A - A = {#}"
by (fact Groups.diff_cancel)
lemma diff_union_cancelR: "M + N - N = (M::'a multiset)"
by (fact add_diff_cancel_right')
lemma diff_union_cancelL: "N + M - N = (M::'a multiset)"
by (fact add_diff_cancel_left')
lemma diff_right_commute:
fixes M N Q :: "'a multiset"
shows "M - N - Q = M - Q - N"
by (fact diff_right_commute)
lemma diff_add:
fixes M N Q :: "'a multiset"
shows "M - (N + Q) = M - N - Q"
by (rule sym) (fact diff_diff_add)
lemma insert_DiffM [simp]: "x \<in># M \<Longrightarrow> add_mset x (M - {#x#}) = M"
by (clarsimp simp: multiset_eq_iff)
lemma insert_DiffM2: "x \<in># M \<Longrightarrow> (M - {#x#}) + {#x#} = M"
by simp
lemma diff_union_swap: "a \<noteq> b \<Longrightarrow> add_mset b (M - {#a#}) = add_mset b M - {#a#}"
by (auto simp add: multiset_eq_iff)
lemma diff_add_mset_swap [simp]: "b \<notin># A \<Longrightarrow> add_mset b M - A = add_mset b (M - A)"
by (auto simp add: multiset_eq_iff simp: not_in_iff)
lemma diff_union_swap2 [simp]: "y \<in># M \<Longrightarrow> add_mset x M - {#y#} = add_mset x (M - {#y#})"
by (metis add_mset_diff_bothsides diff_union_swap diff_zero insert_DiffM)
lemma diff_diff_add_mset [simp]: "(M::'a multiset) - N - P = M - (N + P)"
by (rule diff_diff_add)
lemma diff_union_single_conv:
"a \<in># J \<Longrightarrow> I + J - {#a#} = I + (J - {#a#})"
by (simp add: multiset_eq_iff Suc_le_eq)
lemma mset_add [elim?]:
assumes "a \<in># A"
obtains B where "A = add_mset a B"
proof -
from assms have "A = add_mset a (A - {#a#})"
by simp
with that show thesis .
qed
lemma union_iff:
"a \<in># A + B \<longleftrightarrow> a \<in># A \<or> a \<in># B"
by auto
lemma count_minus_inter_lt_count_minus_inter_iff:
"count (M2 - M1) y < count (M1 - M2) y \<longleftrightarrow> y \<in># M1 - M2"
by (meson count_greater_zero_iff gr_implies_not_zero in_diff_count leI order.strict_trans2
order_less_asym)
lemma minus_inter_eq_minus_inter_iff:
"(M1 - M2) = (M2 - M1) \<longleftrightarrow> set_mset (M1 - M2) = set_mset (M2 - M1)"
by (metis add.commute count_diff count_eq_zero_iff diff_add_zero in_diff_countE multiset_eq_iff)
subsubsection \<open>Min and Max\<close>
abbreviation Min_mset :: "'a::linorder multiset \<Rightarrow> 'a" where
"Min_mset m \<equiv> Min (set_mset m)"
abbreviation Max_mset :: "'a::linorder multiset \<Rightarrow> 'a" where
"Max_mset m \<equiv> Max (set_mset m)"
subsubsection \<open>Equality of multisets\<close>
lemma single_eq_single [simp]: "{#a#} = {#b#} \<longleftrightarrow> a = b"
by (auto simp add: multiset_eq_iff)
lemma union_eq_empty [iff]: "M + N = {#} \<longleftrightarrow> M = {#} \<and> N = {#}"
by (auto simp add: multiset_eq_iff)
lemma empty_eq_union [iff]: "{#} = M + N \<longleftrightarrow> M = {#} \<and> N = {#}"
by (auto simp add: multiset_eq_iff)
lemma multi_self_add_other_not_self [simp]: "M = add_mset x M \<longleftrightarrow> False"
by (auto simp add: multiset_eq_iff)
lemma add_mset_remove_trivial [simp]: \<open>add_mset x M - {#x#} = M\<close>
by (auto simp: multiset_eq_iff)
lemma diff_single_trivial: "\<not> x \<in># M \<Longrightarrow> M - {#x#} = M"
by (auto simp add: multiset_eq_iff not_in_iff)
lemma diff_single_eq_union: "x \<in># M \<Longrightarrow> M - {#x#} = N \<longleftrightarrow> M = add_mset x N"
by auto
lemma union_single_eq_diff: "add_mset x M = N \<Longrightarrow> M = N - {#x#}"
unfolding add_mset_add_single[of _ M] by (fact add_implies_diff)
lemma union_single_eq_member: "add_mset x M = N \<Longrightarrow> x \<in># N"
by auto
lemma add_mset_remove_trivial_If:
"add_mset a (N - {#a#}) = (if a \<in># N then N else add_mset a N)"
by (simp add: diff_single_trivial)
lemma add_mset_remove_trivial_eq: \<open>N = add_mset a (N - {#a#}) \<longleftrightarrow> a \<in># N\<close>
by (auto simp: add_mset_remove_trivial_If)
lemma union_is_single:
"M + N = {#a#} \<longleftrightarrow> M = {#a#} \<and> N = {#} \<or> M = {#} \<and> N = {#a#}"
(is "?lhs = ?rhs")
proof
show ?lhs if ?rhs using that by auto
show ?rhs if ?lhs
by (metis Multiset.diff_cancel add.commute add_diff_cancel_left' diff_add_zero diff_single_trivial insert_DiffM that)
qed
lemma single_is_union: "{#a#} = M + N \<longleftrightarrow> {#a#} = M \<and> N = {#} \<or> M = {#} \<and> {#a#} = N"
by (auto simp add: eq_commute [of "{#a#}" "M + N"] union_is_single)
lemma add_eq_conv_diff:
"add_mset a M = add_mset b N \<longleftrightarrow> M = N \<and> a = b \<or> M = add_mset b (N - {#a#}) \<and> N = add_mset a (M - {#b#})"
(is "?lhs \<longleftrightarrow> ?rhs")
(* shorter: by (simp add: multiset_eq_iff) fastforce *)
proof
show ?lhs if ?rhs
using that
by (auto simp add: add_mset_commute[of a b])
show ?rhs if ?lhs
proof (cases "a = b")
case True with \<open>?lhs\<close> show ?thesis by simp
next
case False
from \<open>?lhs\<close> have "a \<in># add_mset b N" by (rule union_single_eq_member)
with False have "a \<in># N" by auto
moreover from \<open>?lhs\<close> have "M = add_mset b N - {#a#}" by (rule union_single_eq_diff)
moreover note False
ultimately show ?thesis by (auto simp add: diff_right_commute [of _ "{#a#}"])
qed
qed
lemma add_mset_eq_single [iff]: "add_mset b M = {#a#} \<longleftrightarrow> b = a \<and> M = {#}"
by (auto simp: add_eq_conv_diff)
lemma single_eq_add_mset [iff]: "{#a#} = add_mset b M \<longleftrightarrow> b = a \<and> M = {#}"
by (auto simp: add_eq_conv_diff)
lemma insert_noteq_member:
assumes BC: "add_mset b B = add_mset c C"
and bnotc: "b \<noteq> c"
shows "c \<in># B"
proof -
have "c \<in># add_mset c C" by simp
have nc: "\<not> c \<in># {#b#}" using bnotc by simp
then have "c \<in># add_mset b B" using BC by simp
then show "c \<in># B" using nc by simp
qed
lemma add_eq_conv_ex:
"(add_mset a M = add_mset b N) =
(M = N \<and> a = b \<or> (\<exists>K. M = add_mset b K \<and> N = add_mset a K))"
by (auto simp add: add_eq_conv_diff)
lemma multi_member_split: "x \<in># M \<Longrightarrow> \<exists>A. M = add_mset x A"
by (rule exI [where x = "M - {#x#}"]) simp
lemma multiset_add_sub_el_shuffle:
assumes "c \<in># B"
and "b \<noteq> c"
shows "add_mset b (B - {#c#}) = add_mset b B - {#c#}"
proof -
from \<open>c \<in># B\<close> obtain A where B: "B = add_mset c A"
by (blast dest: multi_member_split)
have "add_mset b A = add_mset c (add_mset b A) - {#c#}" by simp
then have "add_mset b A = add_mset b (add_mset c A) - {#c#}"
by (simp add: \<open>b \<noteq> c\<close>)
then show ?thesis using B by simp
qed
lemma add_mset_eq_singleton_iff[iff]:
"add_mset x M = {#y#} \<longleftrightarrow> M = {#} \<and> x = y"
by auto
subsubsection \<open>Pointwise ordering induced by count\<close>
definition subseteq_mset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" (infix "\<subseteq>#" 50)
where "A \<subseteq># B \<longleftrightarrow> (\<forall>a. count A a \<le> count B a)"
definition subset_mset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" (infix "\<subset>#" 50)
where "A \<subset># B \<longleftrightarrow> A \<subseteq># B \<and> A \<noteq> B"
abbreviation (input) supseteq_mset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" (infix "\<supseteq>#" 50)
where "supseteq_mset A B \<equiv> B \<subseteq># A"
abbreviation (input) supset_mset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" (infix "\<supset>#" 50)
where "supset_mset A B \<equiv> B \<subset># A"
notation (input)
subseteq_mset (infix "\<le>#" 50) and
supseteq_mset (infix "\<ge>#" 50)
notation (ASCII)
subseteq_mset (infix "<=#" 50) and
subset_mset (infix "<#" 50) and
supseteq_mset (infix ">=#" 50) and
supset_mset (infix ">#" 50)
global_interpretation subset_mset: ordering \<open>(\<subseteq>#)\<close> \<open>(\<subset>#)\<close>
by standard (auto simp add: subset_mset_def subseteq_mset_def multiset_eq_iff intro: order.trans order.antisym)
interpretation subset_mset: ordered_ab_semigroup_add_imp_le \<open>(+)\<close> \<open>(-)\<close> \<open>(\<subseteq>#)\<close> \<open>(\<subset>#)\<close>
by standard (auto simp add: subset_mset_def subseteq_mset_def multiset_eq_iff intro: order_trans antisym)
\<comment> \<open>FIXME: avoid junk stemming from type class interpretation\<close>
interpretation subset_mset: ordered_ab_semigroup_monoid_add_imp_le "(+)" 0 "(-)" "(\<subseteq>#)" "(\<subset>#)"
by standard
\<comment> \<open>FIXME: avoid junk stemming from type class interpretation\<close>
lemma mset_subset_eqI:
"(\<And>a. count A a \<le> count B a) \<Longrightarrow> A \<subseteq># B"
by (simp add: subseteq_mset_def)
lemma mset_subset_eq_count:
"A \<subseteq># B \<Longrightarrow> count A a \<le> count B a"
by (simp add: subseteq_mset_def)
lemma mset_subset_eq_exists_conv: "(A::'a multiset) \<subseteq># B \<longleftrightarrow> (\<exists>C. B = A + C)"
unfolding subseteq_mset_def
apply (rule iffI)
apply (rule exI [where x = "B - A"])
apply (auto intro: multiset_eq_iff [THEN iffD2])
done
interpretation subset_mset: ordered_cancel_comm_monoid_diff "(+)" 0 "(\<subseteq>#)" "(\<subset>#)" "(-)"
by standard (simp, fact mset_subset_eq_exists_conv)
\<comment> \<open>FIXME: avoid junk stemming from type class interpretation\<close>
declare subset_mset.add_diff_assoc[simp] subset_mset.add_diff_assoc2[simp]
lemma mset_subset_eq_mono_add_right_cancel: "(A::'a multiset) + C \<subseteq># B + C \<longleftrightarrow> A \<subseteq># B"
by (fact subset_mset.add_le_cancel_right)
lemma mset_subset_eq_mono_add_left_cancel: "C + (A::'a multiset) \<subseteq># C + B \<longleftrightarrow> A \<subseteq># B"
by (fact subset_mset.add_le_cancel_left)
lemma mset_subset_eq_mono_add: "(A::'a multiset) \<subseteq># B \<Longrightarrow> C \<subseteq># D \<Longrightarrow> A + C \<subseteq># B + D"
by (fact subset_mset.add_mono)
lemma mset_subset_eq_add_left: "(A::'a multiset) \<subseteq># A + B"
by simp
lemma mset_subset_eq_add_right: "B \<subseteq># (A::'a multiset) + B"
by simp
lemma single_subset_iff [simp]:
"{#a#} \<subseteq># M \<longleftrightarrow> a \<in># M"
by (auto simp add: subseteq_mset_def Suc_le_eq)
lemma mset_subset_eq_single: "a \<in># B \<Longrightarrow> {#a#} \<subseteq># B"
by simp
lemma mset_subset_eq_add_mset_cancel: \<open>add_mset a A \<subseteq># add_mset a B \<longleftrightarrow> A \<subseteq># B\<close>
unfolding add_mset_add_single[of _ A] add_mset_add_single[of _ B]
by (rule mset_subset_eq_mono_add_right_cancel)
lemma multiset_diff_union_assoc:
fixes A B C D :: "'a multiset"
shows "C \<subseteq># B \<Longrightarrow> A + B - C = A + (B - C)"
by (fact subset_mset.diff_add_assoc)
lemma mset_subset_eq_multiset_union_diff_commute:
fixes A B C D :: "'a multiset"
shows "B \<subseteq># A \<Longrightarrow> A - B + C = A + C - B"
by (fact subset_mset.add_diff_assoc2)
lemma diff_subset_eq_self[simp]:
"(M::'a multiset) - N \<subseteq># M"
by (simp add: subseteq_mset_def)
lemma mset_subset_eqD:
assumes "A \<subseteq># B" and "x \<in># A"
shows "x \<in># B"
proof -
from \<open>x \<in># A\<close> have "count A x > 0" by simp
also from \<open>A \<subseteq># B\<close> have "count A x \<le> count B x"
by (simp add: subseteq_mset_def)
finally show ?thesis by simp
qed
lemma mset_subsetD:
"A \<subset># B \<Longrightarrow> x \<in># A \<Longrightarrow> x \<in># B"
by (auto intro: mset_subset_eqD [of A])
lemma set_mset_mono:
"A \<subseteq># B \<Longrightarrow> set_mset A \<subseteq> set_mset B"
by (metis mset_subset_eqD subsetI)
lemma mset_subset_eq_insertD:
"add_mset x A \<subseteq># B \<Longrightarrow> x \<in># B \<and> A \<subset># B"
apply (rule conjI)
apply (simp add: mset_subset_eqD)
apply (clarsimp simp: subset_mset_def subseteq_mset_def)
apply safe
apply (erule_tac x = a in allE)
apply (auto split: if_split_asm)
done
lemma mset_subset_insertD:
"add_mset x A \<subset># B \<Longrightarrow> x \<in># B \<and> A \<subset># B"
by (rule mset_subset_eq_insertD) simp
lemma mset_subset_of_empty[simp]: "A \<subset># {#} \<longleftrightarrow> False"
by (simp only: subset_mset.not_less_zero)
lemma empty_subset_add_mset[simp]: "{#} \<subset># add_mset x M"
by (auto intro: subset_mset.gr_zeroI)
lemma empty_le: "{#} \<subseteq># A"
by (fact subset_mset.zero_le)
lemma insert_subset_eq_iff:
"add_mset a A \<subseteq># B \<longleftrightarrow> a \<in># B \<and> A \<subseteq># B - {#a#}"
using le_diff_conv2 [of "Suc 0" "count B a" "count A a"]
apply (auto simp add: subseteq_mset_def not_in_iff Suc_le_eq)
apply (rule ccontr)
apply (auto simp add: not_in_iff)
done
lemma insert_union_subset_iff:
"add_mset a A \<subset># B \<longleftrightarrow> a \<in># B \<and> A \<subset># B - {#a#}"
by (auto simp add: insert_subset_eq_iff subset_mset_def)
lemma subset_eq_diff_conv:
"A - C \<subseteq># B \<longleftrightarrow> A \<subseteq># B + C"
by (simp add: subseteq_mset_def le_diff_conv)
lemma multi_psub_of_add_self [simp]: "A \<subset># add_mset x A"
by (auto simp: subset_mset_def subseteq_mset_def)
lemma multi_psub_self: "A \<subset># A = False"
by simp
lemma mset_subset_add_mset [simp]: "add_mset x N \<subset># add_mset x M \<longleftrightarrow> N \<subset># M"
unfolding add_mset_add_single[of _ N] add_mset_add_single[of _ M]
by (fact subset_mset.add_less_cancel_right)
lemma mset_subset_diff_self: "c \<in># B \<Longrightarrow> B - {#c#} \<subset># B"
by (auto simp: subset_mset_def elim: mset_add)
lemma Diff_eq_empty_iff_mset: "A - B = {#} \<longleftrightarrow> A \<subseteq># B"
by (auto simp: multiset_eq_iff subseteq_mset_def)
lemma add_mset_subseteq_single_iff[iff]: "add_mset a M \<subseteq># {#b#} \<longleftrightarrow> M = {#} \<and> a = b"
proof
assume A: "add_mset a M \<subseteq># {#b#}"
then have \<open>a = b\<close>
by (auto dest: mset_subset_eq_insertD)
then show "M={#} \<and> a=b"
using A by (simp add: mset_subset_eq_add_mset_cancel)
qed simp
subsubsection \<open>Intersection and bounded union\<close>
definition inter_mset :: \<open>'a multiset \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset\<close> (infixl \<open>\<inter>#\<close> 70)
where \<open>A \<inter># B = A - (A - B)\<close>
lemma count_inter_mset [simp]:
\<open>count (A \<inter># B) x = min (count A x) (count B x)\<close>
by (simp add: inter_mset_def)
(*global_interpretation subset_mset: semilattice_order \<open>(\<inter>#)\<close> \<open>(\<subseteq>#)\<close> \<open>(\<subset>#)\<close>
by standard (simp_all add: multiset_eq_iff subseteq_mset_def subset_mset_def min_def)*)
interpretation subset_mset: semilattice_inf \<open>(\<inter>#)\<close> \<open>(\<subseteq>#)\<close> \<open>(\<subset>#)\<close>
by standard (simp_all add: multiset_eq_iff subseteq_mset_def)
\<comment> \<open>FIXME: avoid junk stemming from type class interpretation\<close>
definition union_mset :: \<open>'a multiset \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset\<close> (infixl \<open>\<union>#\<close> 70)
where \<open>A \<union># B = A + (B - A)\<close>
lemma count_union_mset [simp]:
\<open>count (A \<union># B) x = max (count A x) (count B x)\<close>
by (simp add: union_mset_def)
global_interpretation subset_mset: semilattice_neutr_order \<open>(\<union>#)\<close> \<open>{#}\<close> \<open>(\<supseteq>#)\<close> \<open>(\<supset>#)\<close>
apply standard
apply (simp_all add: multiset_eq_iff subseteq_mset_def subset_mset_def max_def)
apply (auto simp add: le_antisym dest: sym)
apply (metis nat_le_linear)+
done
interpretation subset_mset: semilattice_sup \<open>(\<union>#)\<close> \<open>(\<subseteq>#)\<close> \<open>(\<subset>#)\<close>
proof -
have [simp]: "m \<le> n \<Longrightarrow> q \<le> n \<Longrightarrow> m + (q - m) \<le> n" for m n q :: nat
by arith
show "class.semilattice_sup (\<union>#) (\<subseteq>#) (\<subset>#)"
by standard (auto simp add: union_mset_def subseteq_mset_def)
qed \<comment> \<open>FIXME: avoid junk stemming from type class interpretation\<close>
interpretation subset_mset: bounded_lattice_bot "(\<inter>#)" "(\<subseteq>#)" "(\<subset>#)"
"(\<union>#)" "{#}"
by standard auto
\<comment> \<open>FIXME: avoid junk stemming from type class interpretation\<close>
subsubsection \<open>Additional intersection facts\<close>
lemma set_mset_inter [simp]:
"set_mset (A \<inter># B) = set_mset A \<inter> set_mset B"
by (simp only: set_mset_def) auto
lemma diff_intersect_left_idem [simp]:
"M - M \<inter># N = M - N"
by (simp add: multiset_eq_iff min_def)
lemma diff_intersect_right_idem [simp]:
"M - N \<inter># M = M - N"
by (simp add: multiset_eq_iff min_def)
lemma multiset_inter_single[simp]: "a \<noteq> b \<Longrightarrow> {#a#} \<inter># {#b#} = {#}"
by (rule multiset_eqI) auto
lemma multiset_union_diff_commute:
assumes "B \<inter># C = {#}"
shows "A + B - C = A - C + B"
proof (rule multiset_eqI)
fix x
from assms have "min (count B x) (count C x) = 0"
by (auto simp add: multiset_eq_iff)
then have "count B x = 0 \<or> count C x = 0"
unfolding min_def by (auto split: if_splits)
then show "count (A + B - C) x = count (A - C + B) x"
by auto
qed
lemma disjunct_not_in:
"A \<inter># B = {#} \<longleftrightarrow> (\<forall>a. a \<notin># A \<or> a \<notin># B)" (is "?P \<longleftrightarrow> ?Q")
proof
assume ?P
show ?Q
proof
fix a
from \<open>?P\<close> have "min (count A a) (count B a) = 0"
by (simp add: multiset_eq_iff)
then have "count A a = 0 \<or> count B a = 0"
by (cases "count A a \<le> count B a") (simp_all add: min_def)
then show "a \<notin># A \<or> a \<notin># B"
by (simp add: not_in_iff)
qed
next
assume ?Q
show ?P
proof (rule multiset_eqI)
fix a
from \<open>?Q\<close> have "count A a = 0 \<or> count B a = 0"
by (auto simp add: not_in_iff)
then show "count (A \<inter># B) a = count {#} a"
by auto
qed
qed
lemma inter_mset_empty_distrib_right: "A \<inter># (B + C) = {#} \<longleftrightarrow> A \<inter># B = {#} \<and> A \<inter># C = {#}"
by (meson disjunct_not_in union_iff)
lemma inter_mset_empty_distrib_left: "(A + B) \<inter># C = {#} \<longleftrightarrow> A \<inter># C = {#} \<and> B \<inter># C = {#}"
by (meson disjunct_not_in union_iff)
lemma add_mset_inter_add_mset [simp]:
"add_mset a A \<inter># add_mset a B = add_mset a (A \<inter># B)"
by (rule multiset_eqI) simp
lemma add_mset_disjoint [simp]:
"add_mset a A \<inter># B = {#} \<longleftrightarrow> a \<notin># B \<and> A \<inter># B = {#}"
"{#} = add_mset a A \<inter># B \<longleftrightarrow> a \<notin># B \<and> {#} = A \<inter># B"
by (auto simp: disjunct_not_in)
lemma disjoint_add_mset [simp]:
"B \<inter># add_mset a A = {#} \<longleftrightarrow> a \<notin># B \<and> B \<inter># A = {#}"
"{#} = A \<inter># add_mset b B \<longleftrightarrow> b \<notin># A \<and> {#} = A \<inter># B"
by (auto simp: disjunct_not_in)
lemma inter_add_left1: "\<not> x \<in># N \<Longrightarrow> (add_mset x M) \<inter># N = M \<inter># N"
by (simp add: multiset_eq_iff not_in_iff)
lemma inter_add_left2: "x \<in># N \<Longrightarrow> (add_mset x M) \<inter># N = add_mset x (M \<inter># (N - {#x#}))"
by (auto simp add: multiset_eq_iff elim: mset_add)
lemma inter_add_right1: "\<not> x \<in># N \<Longrightarrow> N \<inter># (add_mset x M) = N \<inter># M"
by (simp add: multiset_eq_iff not_in_iff)
lemma inter_add_right2: "x \<in># N \<Longrightarrow> N \<inter># (add_mset x M) = add_mset x ((N - {#x#}) \<inter># M)"
by (auto simp add: multiset_eq_iff elim: mset_add)
lemma disjunct_set_mset_diff:
assumes "M \<inter># N = {#}"
shows "set_mset (M - N) = set_mset M"
proof (rule set_eqI)
fix a
from assms have "a \<notin># M \<or> a \<notin># N"
by (simp add: disjunct_not_in)
then show "a \<in># M - N \<longleftrightarrow> a \<in># M"
by (auto dest: in_diffD) (simp add: in_diff_count not_in_iff)
qed
lemma at_most_one_mset_mset_diff:
assumes "a \<notin># M - {#a#}"
shows "set_mset (M - {#a#}) = set_mset M - {a}"
using assms by (auto simp add: not_in_iff in_diff_count set_eq_iff)
lemma more_than_one_mset_mset_diff:
assumes "a \<in># M - {#a#}"
shows "set_mset (M - {#a#}) = set_mset M"
proof (rule set_eqI)
fix b
have "Suc 0 < count M b \<Longrightarrow> count M b > 0" by arith
then show "b \<in># M - {#a#} \<longleftrightarrow> b \<in># M"
using assms by (auto simp add: in_diff_count)
qed
lemma inter_iff:
"a \<in># A \<inter># B \<longleftrightarrow> a \<in># A \<and> a \<in># B"
by simp
lemma inter_union_distrib_left:
"A \<inter># B + C = (A + C) \<inter># (B + C)"
by (simp add: multiset_eq_iff min_add_distrib_left)
lemma inter_union_distrib_right:
"C + A \<inter># B = (C + A) \<inter># (C + B)"
using inter_union_distrib_left [of A B C] by (simp add: ac_simps)
lemma inter_subset_eq_union:
"A \<inter># B \<subseteq># A + B"
by (auto simp add: subseteq_mset_def)
subsubsection \<open>Additional bounded union facts\<close>
lemma set_mset_sup [simp]:
\<open>set_mset (A \<union># B) = set_mset A \<union> set_mset B\<close>
by (simp only: set_mset_def) (auto simp add: less_max_iff_disj)
lemma sup_union_left1 [simp]: "\<not> x \<in># N \<Longrightarrow> (add_mset x M) \<union># N = add_mset x (M \<union># N)"
by (simp add: multiset_eq_iff not_in_iff)
lemma sup_union_left2: "x \<in># N \<Longrightarrow> (add_mset x M) \<union># N = add_mset x (M \<union># (N - {#x#}))"
by (simp add: multiset_eq_iff)
lemma sup_union_right1 [simp]: "\<not> x \<in># N \<Longrightarrow> N \<union># (add_mset x M) = add_mset x (N \<union># M)"
by (simp add: multiset_eq_iff not_in_iff)
lemma sup_union_right2: "x \<in># N \<Longrightarrow> N \<union># (add_mset x M) = add_mset x ((N - {#x#}) \<union># M)"
by (simp add: multiset_eq_iff)
lemma sup_union_distrib_left:
"A \<union># B + C = (A + C) \<union># (B + C)"
by (simp add: multiset_eq_iff max_add_distrib_left)
lemma union_sup_distrib_right:
"C + A \<union># B = (C + A) \<union># (C + B)"
using sup_union_distrib_left [of A B C] by (simp add: ac_simps)
lemma union_diff_inter_eq_sup:
"A + B - A \<inter># B = A \<union># B"
by (auto simp add: multiset_eq_iff)
lemma union_diff_sup_eq_inter:
"A + B - A \<union># B = A \<inter># B"
by (auto simp add: multiset_eq_iff)
lemma add_mset_union:
\<open>add_mset a A \<union># add_mset a B = add_mset a (A \<union># B)\<close>
by (auto simp: multiset_eq_iff max_def)
subsection \<open>Replicate and repeat operations\<close>
definition replicate_mset :: "nat \<Rightarrow> 'a \<Rightarrow> 'a multiset" where
"replicate_mset n x = (add_mset x ^^ n) {#}"
lemma replicate_mset_0[simp]: "replicate_mset 0 x = {#}"
unfolding replicate_mset_def by simp
lemma replicate_mset_Suc [simp]: "replicate_mset (Suc n) x = add_mset x (replicate_mset n x)"
unfolding replicate_mset_def by (induct n) (auto intro: add.commute)
lemma count_replicate_mset[simp]: "count (replicate_mset n x) y = (if y = x then n else 0)"
unfolding replicate_mset_def by (induct n) auto
lift_definition repeat_mset :: \<open>nat \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset\<close>
is \<open>\<lambda>n M a. n * M a\<close> by simp
lemma count_repeat_mset [simp]: "count (repeat_mset i A) a = i * count A a"
by transfer rule
lemma repeat_mset_0 [simp]:
\<open>repeat_mset 0 M = {#}\<close>
by transfer simp
lemma repeat_mset_Suc [simp]:
\<open>repeat_mset (Suc n) M = M + repeat_mset n M\<close>
by transfer simp
lemma repeat_mset_right [simp]: "repeat_mset a (repeat_mset b A) = repeat_mset (a * b) A"
by (auto simp: multiset_eq_iff left_diff_distrib')
lemma left_diff_repeat_mset_distrib': \<open>repeat_mset (i - j) u = repeat_mset i u - repeat_mset j u\<close>
by (auto simp: multiset_eq_iff left_diff_distrib')
lemma left_add_mult_distrib_mset:
"repeat_mset i u + (repeat_mset j u + k) = repeat_mset (i+j) u + k"
by (auto simp: multiset_eq_iff add_mult_distrib)
lemma repeat_mset_distrib:
"repeat_mset (m + n) A = repeat_mset m A + repeat_mset n A"
by (auto simp: multiset_eq_iff Nat.add_mult_distrib)
lemma repeat_mset_distrib2[simp]:
"repeat_mset n (A + B) = repeat_mset n A + repeat_mset n B"
by (auto simp: multiset_eq_iff add_mult_distrib2)
lemma repeat_mset_replicate_mset[simp]:
"repeat_mset n {#a#} = replicate_mset n a"
by (auto simp: multiset_eq_iff)
lemma repeat_mset_distrib_add_mset[simp]:
"repeat_mset n (add_mset a A) = replicate_mset n a + repeat_mset n A"
by (auto simp: multiset_eq_iff)
lemma repeat_mset_empty[simp]: "repeat_mset n {#} = {#}"
by transfer simp
subsubsection \<open>Simprocs\<close>
lemma repeat_mset_iterate_add: \<open>repeat_mset n M = iterate_add n M\<close>
unfolding iterate_add_def by (induction n) auto
lemma mset_subseteq_add_iff1:
"j \<le> (i::nat) \<Longrightarrow> (repeat_mset i u + m \<subseteq># repeat_mset j u + n) = (repeat_mset (i-j) u + m \<subseteq># n)"
by (auto simp add: subseteq_mset_def nat_le_add_iff1)
lemma mset_subseteq_add_iff2:
"i \<le> (j::nat) \<Longrightarrow> (repeat_mset i u + m \<subseteq># repeat_mset j u + n) = (m \<subseteq># repeat_mset (j-i) u + n)"
by (auto simp add: subseteq_mset_def nat_le_add_iff2)
lemma mset_subset_add_iff1:
"j \<le> (i::nat) \<Longrightarrow> (repeat_mset i u + m \<subset># repeat_mset j u + n) = (repeat_mset (i-j) u + m \<subset># n)"
unfolding subset_mset_def repeat_mset_iterate_add
by (simp add: iterate_add_eq_add_iff1 mset_subseteq_add_iff1[unfolded repeat_mset_iterate_add])
lemma mset_subset_add_iff2:
"i \<le> (j::nat) \<Longrightarrow> (repeat_mset i u + m \<subset># repeat_mset j u + n) = (m \<subset># repeat_mset (j-i) u + n)"
unfolding subset_mset_def repeat_mset_iterate_add
by (simp add: iterate_add_eq_add_iff2 mset_subseteq_add_iff2[unfolded repeat_mset_iterate_add])
ML_file \<open>multiset_simprocs.ML\<close>
lemma add_mset_replicate_mset_safe[cancelation_simproc_pre]: \<open>NO_MATCH {#} M \<Longrightarrow> add_mset a M = {#a#} + M\<close>
by simp
declare repeat_mset_iterate_add[cancelation_simproc_pre]
declare iterate_add_distrib[cancelation_simproc_pre]
declare repeat_mset_iterate_add[symmetric, cancelation_simproc_post]
declare add_mset_not_empty[cancelation_simproc_eq_elim]
empty_not_add_mset[cancelation_simproc_eq_elim]
subset_mset.le_zero_eq[cancelation_simproc_eq_elim]
empty_not_add_mset[cancelation_simproc_eq_elim]
add_mset_not_empty[cancelation_simproc_eq_elim]
subset_mset.le_zero_eq[cancelation_simproc_eq_elim]
le_zero_eq[cancelation_simproc_eq_elim]
simproc_setup mseteq_cancel
("(l::'a multiset) + m = n" | "(l::'a multiset) = m + n" |
"add_mset a m = n" | "m = add_mset a n" |
"replicate_mset p a = n" | "m = replicate_mset p a" |
"repeat_mset p m = n" | "m = repeat_mset p m") =
- \<open>fn phi => Cancel_Simprocs.eq_cancel\<close>
+ \<open>K Cancel_Simprocs.eq_cancel\<close>
simproc_setup msetsubset_cancel
("(l::'a multiset) + m \<subset># n" | "(l::'a multiset) \<subset># m + n" |
"add_mset a m \<subset># n" | "m \<subset># add_mset a n" |
"replicate_mset p r \<subset># n" | "m \<subset># replicate_mset p r" |
"repeat_mset p m \<subset># n" | "m \<subset># repeat_mset p m") =
- \<open>fn phi => Multiset_Simprocs.subset_cancel_msets\<close>
+ \<open>K Multiset_Simprocs.subset_cancel_msets\<close>
simproc_setup msetsubset_eq_cancel
("(l::'a multiset) + m \<subseteq># n" | "(l::'a multiset) \<subseteq># m + n" |
"add_mset a m \<subseteq># n" | "m \<subseteq># add_mset a n" |
"replicate_mset p r \<subseteq># n" | "m \<subseteq># replicate_mset p r" |
"repeat_mset p m \<subseteq># n" | "m \<subseteq># repeat_mset p m") =
- \<open>fn phi => Multiset_Simprocs.subseteq_cancel_msets\<close>
+ \<open>K Multiset_Simprocs.subseteq_cancel_msets\<close>
simproc_setup msetdiff_cancel
("((l::'a multiset) + m) - n" | "(l::'a multiset) - (m + n)" |
"add_mset a m - n" | "m - add_mset a n" |
"replicate_mset p r - n" | "m - replicate_mset p r" |
"repeat_mset p m - n" | "m - repeat_mset p m") =
- \<open>fn phi => Cancel_Simprocs.diff_cancel\<close>
+ \<open>K Cancel_Simprocs.diff_cancel\<close>
subsubsection \<open>Conditionally complete lattice\<close>
instantiation multiset :: (type) Inf
begin
lift_definition Inf_multiset :: "'a multiset set \<Rightarrow> 'a multiset" is
"\<lambda>A i. if A = {} then 0 else Inf ((\<lambda>f. f i) ` A)"
proof -
fix A :: "('a \<Rightarrow> nat) set"
assume *: "\<And>f. f \<in> A \<Longrightarrow> finite {x. 0 < f x}"
show \<open>finite {i. 0 < (if A = {} then 0 else INF f\<in>A. f i)}\<close>
proof (cases "A = {}")
case False
then obtain f where "f \<in> A" by blast
hence "{i. Inf ((\<lambda>f. f i) ` A) > 0} \<subseteq> {i. f i > 0}"
by (auto intro: less_le_trans[OF _ cInf_lower])
moreover from \<open>f \<in> A\<close> * have "finite \<dots>" by simp
ultimately have "finite {i. Inf ((\<lambda>f. f i) ` A) > 0}" by (rule finite_subset)
with False show ?thesis by simp
qed simp_all
qed
instance ..
end
lemma Inf_multiset_empty: "Inf {} = {#}"
by transfer simp_all
lemma count_Inf_multiset_nonempty: "A \<noteq> {} \<Longrightarrow> count (Inf A) x = Inf ((\<lambda>X. count X x) ` A)"
by transfer simp_all
instantiation multiset :: (type) Sup
begin
definition Sup_multiset :: "'a multiset set \<Rightarrow> 'a multiset" where
"Sup_multiset A = (if A \<noteq> {} \<and> subset_mset.bdd_above A then
Abs_multiset (\<lambda>i. Sup ((\<lambda>X. count X i) ` A)) else {#})"
lemma Sup_multiset_empty: "Sup {} = {#}"
by (simp add: Sup_multiset_def)
lemma Sup_multiset_unbounded: "\<not> subset_mset.bdd_above A \<Longrightarrow> Sup A = {#}"
by (simp add: Sup_multiset_def)
instance ..
end
lemma bdd_above_multiset_imp_bdd_above_count:
assumes "subset_mset.bdd_above (A :: 'a multiset set)"
shows "bdd_above ((\<lambda>X. count X x) ` A)"
proof -
from assms obtain Y where Y: "\<forall>X\<in>A. X \<subseteq># Y"
by (meson subset_mset.bdd_above.E)
hence "count X x \<le> count Y x" if "X \<in> A" for X
using that by (auto intro: mset_subset_eq_count)
thus ?thesis by (intro bdd_aboveI[of _ "count Y x"]) auto
qed
lemma bdd_above_multiset_imp_finite_support:
assumes "A \<noteq> {}" "subset_mset.bdd_above (A :: 'a multiset set)"
shows "finite (\<Union>X\<in>A. {x. count X x > 0})"
proof -
from assms obtain Y where Y: "\<forall>X\<in>A. X \<subseteq># Y"
by (meson subset_mset.bdd_above.E)
hence "count X x \<le> count Y x" if "X \<in> A" for X x
using that by (auto intro: mset_subset_eq_count)
hence "(\<Union>X\<in>A. {x. count X x > 0}) \<subseteq> {x. count Y x > 0}"
by safe (erule less_le_trans)
moreover have "finite \<dots>" by simp
ultimately show ?thesis by (rule finite_subset)
qed
lemma Sup_multiset_in_multiset:
\<open>finite {i. 0 < (SUP M\<in>A. count M i)}\<close>
if \<open>A \<noteq> {}\<close> \<open>subset_mset.bdd_above A\<close>
proof -
have "{i. Sup ((\<lambda>X. count X i) ` A) > 0} \<subseteq> (\<Union>X\<in>A. {i. 0 < count X i})"
proof safe
fix i assume pos: "(SUP X\<in>A. count X i) > 0"
show "i \<in> (\<Union>X\<in>A. {i. 0 < count X i})"
proof (rule ccontr)
assume "i \<notin> (\<Union>X\<in>A. {i. 0 < count X i})"
hence "\<forall>X\<in>A. count X i \<le> 0" by (auto simp: count_eq_zero_iff)
with that have "(SUP X\<in>A. count X i) \<le> 0"
by (intro cSup_least bdd_above_multiset_imp_bdd_above_count) auto
with pos show False by simp
qed
qed
moreover from that have "finite \<dots>"
by (rule bdd_above_multiset_imp_finite_support)
ultimately show "finite {i. Sup ((\<lambda>X. count X i) ` A) > 0}"
by (rule finite_subset)
qed
lemma count_Sup_multiset_nonempty:
\<open>count (Sup A) x = (SUP X\<in>A. count X x)\<close>
if \<open>A \<noteq> {}\<close> \<open>subset_mset.bdd_above A\<close>
using that by (simp add: Sup_multiset_def Sup_multiset_in_multiset count_Abs_multiset)
interpretation subset_mset: conditionally_complete_lattice Inf Sup "(\<inter>#)" "(\<subseteq>#)" "(\<subset>#)" "(\<union>#)"
proof
fix X :: "'a multiset" and A
assume "X \<in> A"
show "Inf A \<subseteq># X"
proof (rule mset_subset_eqI)
fix x
from \<open>X \<in> A\<close> have "A \<noteq> {}" by auto
hence "count (Inf A) x = (INF X\<in>A. count X x)"
by (simp add: count_Inf_multiset_nonempty)
also from \<open>X \<in> A\<close> have "\<dots> \<le> count X x"
by (intro cInf_lower) simp_all
finally show "count (Inf A) x \<le> count X x" .
qed
next
fix X :: "'a multiset" and A
assume nonempty: "A \<noteq> {}" and le: "\<And>Y. Y \<in> A \<Longrightarrow> X \<subseteq># Y"
show "X \<subseteq># Inf A"
proof (rule mset_subset_eqI)
fix x
from nonempty have "count X x \<le> (INF X\<in>A. count X x)"
by (intro cInf_greatest) (auto intro: mset_subset_eq_count le)
also from nonempty have "\<dots> = count (Inf A) x" by (simp add: count_Inf_multiset_nonempty)
finally show "count X x \<le> count (Inf A) x" .
qed
next
fix X :: "'a multiset" and A
assume X: "X \<in> A" and bdd: "subset_mset.bdd_above A"
show "X \<subseteq># Sup A"
proof (rule mset_subset_eqI)
fix x
from X have "A \<noteq> {}" by auto
have "count X x \<le> (SUP X\<in>A. count X x)"
by (intro cSUP_upper X bdd_above_multiset_imp_bdd_above_count bdd)
also from count_Sup_multiset_nonempty[OF \<open>A \<noteq> {}\<close> bdd]
have "(SUP X\<in>A. count X x) = count (Sup A) x" by simp
finally show "count X x \<le> count (Sup A) x" .
qed
next
fix X :: "'a multiset" and A
assume nonempty: "A \<noteq> {}" and ge: "\<And>Y. Y \<in> A \<Longrightarrow> Y \<subseteq># X"
from ge have bdd: "subset_mset.bdd_above A"
by blast
show "Sup A \<subseteq># X"
proof (rule mset_subset_eqI)
fix x
from count_Sup_multiset_nonempty[OF \<open>A \<noteq> {}\<close> bdd]
have "count (Sup A) x = (SUP X\<in>A. count X x)" .
also from nonempty have "\<dots> \<le> count X x"
by (intro cSup_least) (auto intro: mset_subset_eq_count ge)
finally show "count (Sup A) x \<le> count X x" .
qed
qed \<comment> \<open>FIXME: avoid junk stemming from type class interpretation\<close>
lemma set_mset_Inf:
assumes "A \<noteq> {}"
shows "set_mset (Inf A) = (\<Inter>X\<in>A. set_mset X)"
proof safe
fix x X assume "x \<in># Inf A" "X \<in> A"
hence nonempty: "A \<noteq> {}" by (auto simp: Inf_multiset_empty)
from \<open>x \<in># Inf A\<close> have "{#x#} \<subseteq># Inf A" by auto
also from \<open>X \<in> A\<close> have "\<dots> \<subseteq># X" by (rule subset_mset.cInf_lower) simp_all
finally show "x \<in># X" by simp
next
fix x assume x: "x \<in> (\<Inter>X\<in>A. set_mset X)"
hence "{#x#} \<subseteq># X" if "X \<in> A" for X using that by auto
from assms and this have "{#x#} \<subseteq># Inf A" by (rule subset_mset.cInf_greatest)
thus "x \<in># Inf A" by simp
qed
lemma in_Inf_multiset_iff:
assumes "A \<noteq> {}"
shows "x \<in># Inf A \<longleftrightarrow> (\<forall>X\<in>A. x \<in># X)"
proof -
from assms have "set_mset (Inf A) = (\<Inter>X\<in>A. set_mset X)" by (rule set_mset_Inf)
also have "x \<in> \<dots> \<longleftrightarrow> (\<forall>X\<in>A. x \<in># X)" by simp
finally show ?thesis .
qed
lemma in_Inf_multisetD: "x \<in># Inf A \<Longrightarrow> X \<in> A \<Longrightarrow> x \<in># X"
by (subst (asm) in_Inf_multiset_iff) auto
lemma set_mset_Sup:
assumes "subset_mset.bdd_above A"
shows "set_mset (Sup A) = (\<Union>X\<in>A. set_mset X)"
proof safe
fix x assume "x \<in># Sup A"
hence nonempty: "A \<noteq> {}" by (auto simp: Sup_multiset_empty)
show "x \<in> (\<Union>X\<in>A. set_mset X)"
proof (rule ccontr)
assume x: "x \<notin> (\<Union>X\<in>A. set_mset X)"
have "count X x \<le> count (Sup A) x" if "X \<in> A" for X x
using that by (intro mset_subset_eq_count subset_mset.cSup_upper assms)
with x have "X \<subseteq># Sup A - {#x#}" if "X \<in> A" for X
using that by (auto simp: subseteq_mset_def algebra_simps not_in_iff)
hence "Sup A \<subseteq># Sup A - {#x#}" by (intro subset_mset.cSup_least nonempty)
with \<open>x \<in># Sup A\<close> show False
by (auto simp: subseteq_mset_def simp flip: count_greater_zero_iff
dest!: spec[of _ x])
qed
next
fix x X assume "x \<in> set_mset X" "X \<in> A"
hence "{#x#} \<subseteq># X" by auto
also have "X \<subseteq># Sup A" by (intro subset_mset.cSup_upper \<open>X \<in> A\<close> assms)
finally show "x \<in> set_mset (Sup A)" by simp
qed
lemma in_Sup_multiset_iff:
assumes "subset_mset.bdd_above A"
shows "x \<in># Sup A \<longleftrightarrow> (\<exists>X\<in>A. x \<in># X)"
proof -
from assms have "set_mset (Sup A) = (\<Union>X\<in>A. set_mset X)" by (rule set_mset_Sup)
also have "x \<in> \<dots> \<longleftrightarrow> (\<exists>X\<in>A. x \<in># X)" by simp
finally show ?thesis .
qed
lemma in_Sup_multisetD:
assumes "x \<in># Sup A"
shows "\<exists>X\<in>A. x \<in># X"
proof -
have "subset_mset.bdd_above A"
by (rule ccontr) (insert assms, simp_all add: Sup_multiset_unbounded)
with assms show ?thesis by (simp add: in_Sup_multiset_iff)
qed
interpretation subset_mset: distrib_lattice "(\<inter>#)" "(\<subseteq>#)" "(\<subset>#)" "(\<union>#)"
proof
fix A B C :: "'a multiset"
show "A \<union># (B \<inter># C) = A \<union># B \<inter># (A \<union># C)"
by (intro multiset_eqI) simp_all
qed \<comment> \<open>FIXME: avoid junk stemming from type class interpretation\<close>
subsubsection \<open>Filter (with comprehension syntax)\<close>
text \<open>Multiset comprehension\<close>
lift_definition filter_mset :: "('a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset"
is "\<lambda>P M. \<lambda>x. if P x then M x else 0"
by (rule filter_preserves_multiset)
syntax (ASCII)
"_MCollect" :: "pttrn \<Rightarrow> 'a multiset \<Rightarrow> bool \<Rightarrow> 'a multiset" ("(1{#_ :# _./ _#})")
syntax
"_MCollect" :: "pttrn \<Rightarrow> 'a multiset \<Rightarrow> bool \<Rightarrow> 'a multiset" ("(1{#_ \<in># _./ _#})")
translations
"{#x \<in># M. P#}" == "CONST filter_mset (\<lambda>x. P) M"
lemma count_filter_mset [simp]:
"count (filter_mset P M) a = (if P a then count M a else 0)"
by (simp add: filter_mset.rep_eq)
lemma set_mset_filter [simp]:
"set_mset (filter_mset P M) = {a \<in> set_mset M. P a}"
by (simp only: set_eq_iff count_greater_zero_iff [symmetric] count_filter_mset) simp
lemma filter_empty_mset [simp]: "filter_mset P {#} = {#}"
by (rule multiset_eqI) simp
lemma filter_single_mset: "filter_mset P {#x#} = (if P x then {#x#} else {#})"
by (rule multiset_eqI) simp
lemma filter_union_mset [simp]: "filter_mset P (M + N) = filter_mset P M + filter_mset P N"
by (rule multiset_eqI) simp
lemma filter_diff_mset [simp]: "filter_mset P (M - N) = filter_mset P M - filter_mset P N"
by (rule multiset_eqI) simp
lemma filter_inter_mset [simp]: "filter_mset P (M \<inter># N) = filter_mset P M \<inter># filter_mset P N"
by (rule multiset_eqI) simp
lemma filter_sup_mset[simp]: "filter_mset P (A \<union># B) = filter_mset P A \<union># filter_mset P B"
by (rule multiset_eqI) simp
lemma filter_mset_add_mset [simp]:
"filter_mset P (add_mset x A) =
(if P x then add_mset x (filter_mset P A) else filter_mset P A)"
by (auto simp: multiset_eq_iff)
lemma multiset_filter_subset[simp]: "filter_mset f M \<subseteq># M"
by (simp add: mset_subset_eqI)
lemma multiset_filter_mono:
assumes "A \<subseteq># B"
shows "filter_mset f A \<subseteq># filter_mset f B"
proof -
from assms[unfolded mset_subset_eq_exists_conv]
obtain C where B: "B = A + C" by auto
show ?thesis unfolding B by auto
qed
lemma filter_mset_eq_conv:
"filter_mset P M = N \<longleftrightarrow> N \<subseteq># M \<and> (\<forall>b\<in>#N. P b) \<and> (\<forall>a\<in>#M - N. \<not> P a)" (is "?P \<longleftrightarrow> ?Q")
proof
assume ?P then show ?Q by auto (simp add: multiset_eq_iff in_diff_count)
next
assume ?Q
then obtain Q where M: "M = N + Q"
by (auto simp add: mset_subset_eq_exists_conv)
then have MN: "M - N = Q" by simp
show ?P
proof (rule multiset_eqI)
fix a
from \<open>?Q\<close> MN have *: "\<not> P a \<Longrightarrow> a \<notin># N" "P a \<Longrightarrow> a \<notin># Q"
by auto
show "count (filter_mset P M) a = count N a"
proof (cases "a \<in># M")
case True
with * show ?thesis
by (simp add: not_in_iff M)
next
case False then have "count M a = 0"
by (simp add: not_in_iff)
with M show ?thesis by simp
qed
qed
qed
lemma filter_filter_mset: "filter_mset P (filter_mset Q M) = {#x \<in># M. Q x \<and> P x#}"
by (auto simp: multiset_eq_iff)
lemma
filter_mset_True[simp]: "{#y \<in># M. True#} = M" and
filter_mset_False[simp]: "{#y \<in># M. False#} = {#}"
by (auto simp: multiset_eq_iff)
lemma filter_mset_cong0:
assumes "\<And>x. x \<in># M \<Longrightarrow> f x \<longleftrightarrow> g x"
shows "filter_mset f M = filter_mset g M"
proof (rule subset_mset.antisym; unfold subseteq_mset_def; rule allI)
fix x
show "count (filter_mset f M) x \<le> count (filter_mset g M) x"
using assms by (cases "x \<in># M") (simp_all add: not_in_iff)
next
fix x
show "count (filter_mset g M) x \<le> count (filter_mset f M) x"
using assms by (cases "x \<in># M") (simp_all add: not_in_iff)
qed
lemma filter_mset_cong:
assumes "M = M'" and "\<And>x. x \<in># M' \<Longrightarrow> f x \<longleftrightarrow> g x"
shows "filter_mset f M = filter_mset g M'"
unfolding \<open>M = M'\<close>
using assms by (auto intro: filter_mset_cong0)
subsubsection \<open>Size\<close>
definition wcount where "wcount f M = (\<lambda>x. count M x * Suc (f x))"
lemma wcount_union: "wcount f (M + N) a = wcount f M a + wcount f N a"
by (auto simp: wcount_def add_mult_distrib)
lemma wcount_add_mset:
"wcount f (add_mset x M) a = (if x = a then Suc (f a) else 0) + wcount f M a"
unfolding add_mset_add_single[of _ M] wcount_union by (auto simp: wcount_def)
definition size_multiset :: "('a \<Rightarrow> nat) \<Rightarrow> 'a multiset \<Rightarrow> nat" where
"size_multiset f M = sum (wcount f M) (set_mset M)"
lemmas size_multiset_eq = size_multiset_def[unfolded wcount_def]
instantiation multiset :: (type) size
begin
definition size_multiset where
size_multiset_overloaded_def: "size_multiset = Multiset.size_multiset (\<lambda>_. 0)"
instance ..
end
lemmas size_multiset_overloaded_eq =
size_multiset_overloaded_def[THEN fun_cong, unfolded size_multiset_eq, simplified]
lemma size_multiset_empty [simp]: "size_multiset f {#} = 0"
by (simp add: size_multiset_def)
lemma size_empty [simp]: "size {#} = 0"
by (simp add: size_multiset_overloaded_def)
lemma size_multiset_single : "size_multiset f {#b#} = Suc (f b)"
by (simp add: size_multiset_eq)
lemma size_single: "size {#b#} = 1"
by (simp add: size_multiset_overloaded_def size_multiset_single)
lemma sum_wcount_Int:
"finite A \<Longrightarrow> sum (wcount f N) (A \<inter> set_mset N) = sum (wcount f N) A"
by (induct rule: finite_induct)
(simp_all add: Int_insert_left wcount_def count_eq_zero_iff)
lemma size_multiset_union [simp]:
"size_multiset f (M + N::'a multiset) = size_multiset f M + size_multiset f N"
apply (simp add: size_multiset_def sum_Un_nat sum.distrib sum_wcount_Int wcount_union)
apply (subst Int_commute)
apply (simp add: sum_wcount_Int)
done
lemma size_multiset_add_mset [simp]:
"size_multiset f (add_mset a M) = Suc (f a) + size_multiset f M"
unfolding add_mset_add_single[of _ M] size_multiset_union by (auto simp: size_multiset_single)
lemma size_add_mset [simp]: "size (add_mset a A) = Suc (size A)"
by (simp add: size_multiset_overloaded_def wcount_add_mset)
lemma size_union [simp]: "size (M + N::'a multiset) = size M + size N"
by (auto simp add: size_multiset_overloaded_def)
lemma size_multiset_eq_0_iff_empty [iff]:
"size_multiset f M = 0 \<longleftrightarrow> M = {#}"
by (auto simp add: size_multiset_eq count_eq_zero_iff)
lemma size_eq_0_iff_empty [iff]: "(size M = 0) = (M = {#})"
by (auto simp add: size_multiset_overloaded_def)
lemma nonempty_has_size: "(S \<noteq> {#}) = (0 < size S)"
by (metis gr0I gr_implies_not0 size_empty size_eq_0_iff_empty)
lemma size_eq_Suc_imp_elem: "size M = Suc n \<Longrightarrow> \<exists>a. a \<in># M"
apply (unfold size_multiset_overloaded_eq)
apply (drule sum_SucD)
apply auto
done
lemma size_eq_Suc_imp_eq_union:
assumes "size M = Suc n"
shows "\<exists>a N. M = add_mset a N"
proof -
from assms obtain a where "a \<in># M"
by (erule size_eq_Suc_imp_elem [THEN exE])
then have "M = add_mset a (M - {#a#})" by simp
then show ?thesis by blast
qed
lemma size_mset_mono:
fixes A B :: "'a multiset"
assumes "A \<subseteq># B"
shows "size A \<le> size B"
proof -
from assms[unfolded mset_subset_eq_exists_conv]
obtain C where B: "B = A + C" by auto
show ?thesis unfolding B by (induct C) auto
qed
lemma size_filter_mset_lesseq[simp]: "size (filter_mset f M) \<le> size M"
by (rule size_mset_mono[OF multiset_filter_subset])
lemma size_Diff_submset:
"M \<subseteq># M' \<Longrightarrow> size (M' - M) = size M' - size(M::'a multiset)"
by (metis add_diff_cancel_left' size_union mset_subset_eq_exists_conv)
subsection \<open>Induction and case splits\<close>
theorem multiset_induct [case_names empty add, induct type: multiset]:
assumes empty: "P {#}"
assumes add: "\<And>x M. P M \<Longrightarrow> P (add_mset x M)"
shows "P M"
proof (induct "size M" arbitrary: M)
case 0 thus "P M" by (simp add: empty)
next
case (Suc k)
obtain N x where "M = add_mset x N"
using \<open>Suc k = size M\<close> [symmetric]
using size_eq_Suc_imp_eq_union by fast
with Suc add show "P M" by simp
qed
lemma multiset_induct_min[case_names empty add]:
fixes M :: "'a::linorder multiset"
assumes
empty: "P {#}" and
add: "\<And>x M. P M \<Longrightarrow> (\<forall>y \<in># M. y \<ge> x) \<Longrightarrow> P (add_mset x M)"
shows "P M"
proof (induct "size M" arbitrary: M)
case (Suc k)
note ih = this(1) and Sk_eq_sz_M = this(2)
let ?y = "Min_mset M"
let ?N = "M - {#?y#}"
have M: "M = add_mset ?y ?N"
by (metis Min_in Sk_eq_sz_M finite_set_mset insert_DiffM lessI not_less_zero
set_mset_eq_empty_iff size_empty)
show ?case
by (subst M, rule add, rule ih, metis M Sk_eq_sz_M nat.inject size_add_mset,
meson Min_le finite_set_mset in_diffD)
qed (simp add: empty)
lemma multiset_induct_max[case_names empty add]:
fixes M :: "'a::linorder multiset"
assumes
empty: "P {#}" and
add: "\<And>x M. P M \<Longrightarrow> (\<forall>y \<in># M. y \<le> x) \<Longrightarrow> P (add_mset x M)"
shows "P M"
proof (induct "size M" arbitrary: M)
case (Suc k)
note ih = this(1) and Sk_eq_sz_M = this(2)
let ?y = "Max_mset M"
let ?N = "M - {#?y#}"
have M: "M = add_mset ?y ?N"
by (metis Max_in Sk_eq_sz_M finite_set_mset insert_DiffM lessI not_less_zero
set_mset_eq_empty_iff size_empty)
show ?case
by (subst M, rule add, rule ih, metis M Sk_eq_sz_M nat.inject size_add_mset,
meson Max_ge finite_set_mset in_diffD)
qed (simp add: empty)
lemma multi_nonempty_split: "M \<noteq> {#} \<Longrightarrow> \<exists>A a. M = add_mset a A"
by (induct M) auto
lemma multiset_cases [cases type]:
obtains (empty) "M = {#}"
| (add) x N where "M = add_mset x N"
by (induct M) simp_all
lemma multi_drop_mem_not_eq: "c \<in># B \<Longrightarrow> B - {#c#} \<noteq> B"
by (cases "B = {#}") (auto dest: multi_member_split)
lemma union_filter_mset_complement[simp]:
"\<forall>x. P x = (\<not> Q x) \<Longrightarrow> filter_mset P M + filter_mset Q M = M"
by (subst multiset_eq_iff) auto
lemma multiset_partition: "M = {#x \<in># M. P x#} + {#x \<in># M. \<not> P x#}"
by simp
lemma mset_subset_size: "A \<subset># B \<Longrightarrow> size A < size B"
proof (induct A arbitrary: B)
case empty
then show ?case
using nonempty_has_size by auto
next
case (add x A)
have "add_mset x A \<subseteq># B"
by (meson add.prems subset_mset_def)
then show ?case
by (metis (no_types) add.prems add.right_neutral add_diff_cancel_left' leD nat_neq_iff
size_Diff_submset size_eq_0_iff_empty size_mset_mono subset_mset.le_iff_add subset_mset_def)
qed
lemma size_1_singleton_mset: "size M = 1 \<Longrightarrow> \<exists>a. M = {#a#}"
by (cases M) auto
subsubsection \<open>Strong induction and subset induction for multisets\<close>
text \<open>Well-foundedness of strict subset relation\<close>
lemma wf_subset_mset_rel: "wf {(M, N :: 'a multiset). M \<subset># N}"
apply (rule wf_measure [THEN wf_subset, where f1=size])
apply (clarsimp simp: measure_def inv_image_def mset_subset_size)
done
lemma wfP_subset_mset[simp]: "wfP (\<subset>#)"
by (rule wf_subset_mset_rel[to_pred])
lemma full_multiset_induct [case_names less]:
assumes ih: "\<And>B. \<forall>(A::'a multiset). A \<subset># B \<longrightarrow> P A \<Longrightarrow> P B"
shows "P B"
apply (rule wf_subset_mset_rel [THEN wf_induct])
apply (rule ih, auto)
done
lemma multi_subset_induct [consumes 2, case_names empty add]:
assumes "F \<subseteq># A"
and empty: "P {#}"
and insert: "\<And>a F. a \<in># A \<Longrightarrow> P F \<Longrightarrow> P (add_mset a F)"
shows "P F"
proof -
from \<open>F \<subseteq># A\<close>
show ?thesis
proof (induct F)
show "P {#}" by fact
next
fix x F
assume P: "F \<subseteq># A \<Longrightarrow> P F" and i: "add_mset x F \<subseteq># A"
show "P (add_mset x F)"
proof (rule insert)
from i show "x \<in># A" by (auto dest: mset_subset_eq_insertD)
from i have "F \<subseteq># A" by (auto dest: mset_subset_eq_insertD)
with P show "P F" .
qed
qed
qed
subsection \<open>Least and greatest elements\<close>
context begin
qualified lemma
assumes
"M \<noteq> {#}" and
"transp_on (set_mset M) R" and
"totalp_on (set_mset M) R"
shows
bex_least_element: "(\<exists>l \<in># M. \<forall>x \<in># M. x \<noteq> l \<longrightarrow> R l x)" and
bex_greatest_element: "(\<exists>g \<in># M. \<forall>x \<in># M. x \<noteq> g \<longrightarrow> R x g)"
using assms
by (auto intro: Finite_Set.bex_least_element Finite_Set.bex_greatest_element)
end
subsection \<open>The fold combinator\<close>
definition fold_mset :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'b \<Rightarrow> 'a multiset \<Rightarrow> 'b"
where
"fold_mset f s M = Finite_Set.fold (\<lambda>x. f x ^^ count M x) s (set_mset M)"
lemma fold_mset_empty [simp]: "fold_mset f s {#} = s"
by (simp add: fold_mset_def)
context comp_fun_commute
begin
lemma fold_mset_add_mset [simp]: "fold_mset f s (add_mset x M) = f x (fold_mset f s M)"
proof -
interpret mset: comp_fun_commute "\<lambda>y. f y ^^ count M y"
by (fact comp_fun_commute_funpow)
interpret mset_union: comp_fun_commute "\<lambda>y. f y ^^ count (add_mset x M) y"
by (fact comp_fun_commute_funpow)
show ?thesis
proof (cases "x \<in> set_mset M")
case False
then have *: "count (add_mset x M) x = 1"
by (simp add: not_in_iff)
from False have "Finite_Set.fold (\<lambda>y. f y ^^ count (add_mset x M) y) s (set_mset M) =
Finite_Set.fold (\<lambda>y. f y ^^ count M y) s (set_mset M)"
by (auto intro!: Finite_Set.fold_cong comp_fun_commute_on_funpow)
with False * show ?thesis
by (simp add: fold_mset_def del: count_add_mset)
next
case True
define N where "N = set_mset M - {x}"
from N_def True have *: "set_mset M = insert x N" "x \<notin> N" "finite N" by auto
then have "Finite_Set.fold (\<lambda>y. f y ^^ count (add_mset x M) y) s N =
Finite_Set.fold (\<lambda>y. f y ^^ count M y) s N"
by (auto intro!: Finite_Set.fold_cong comp_fun_commute_on_funpow)
with * show ?thesis by (simp add: fold_mset_def del: count_add_mset) simp
qed
qed
corollary fold_mset_single: "fold_mset f s {#x#} = f x s"
by simp
lemma fold_mset_fun_left_comm: "f x (fold_mset f s M) = fold_mset f (f x s) M"
by (induct M) (simp_all add: fun_left_comm)
lemma fold_mset_union [simp]: "fold_mset f s (M + N) = fold_mset f (fold_mset f s M) N"
by (induct M) (simp_all add: fold_mset_fun_left_comm)
lemma fold_mset_fusion:
assumes "comp_fun_commute g"
and *: "\<And>x y. h (g x y) = f x (h y)"
shows "h (fold_mset g w A) = fold_mset f (h w) A"
proof -
interpret comp_fun_commute g by (fact assms)
from * show ?thesis by (induct A) auto
qed
end
lemma union_fold_mset_add_mset: "A + B = fold_mset add_mset A B"
proof -
interpret comp_fun_commute add_mset
by standard auto
show ?thesis
by (induction B) auto
qed
text \<open>
A note on code generation: When defining some function containing a
subterm \<^term>\<open>fold_mset F\<close>, code generation is not automatic. When
interpreting locale \<open>left_commutative\<close> with \<open>F\<close>, the
would be code thms for \<^const>\<open>fold_mset\<close> become thms like
\<^term>\<open>fold_mset F z {#} = z\<close> where \<open>F\<close> is not a pattern but
contains defined symbols, i.e.\ is not a code thm. Hence a separate
constant with its own code thms needs to be introduced for \<open>F\<close>. See the image operator below.
\<close>
subsection \<open>Image\<close>
definition image_mset :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a multiset \<Rightarrow> 'b multiset" where
"image_mset f = fold_mset (add_mset \<circ> f) {#}"
lemma comp_fun_commute_mset_image: "comp_fun_commute (add_mset \<circ> f)"
by unfold_locales (simp add: fun_eq_iff)
lemma image_mset_empty [simp]: "image_mset f {#} = {#}"
by (simp add: image_mset_def)
lemma image_mset_single: "image_mset f {#x#} = {#f x#}"
by (simp add: comp_fun_commute.fold_mset_add_mset comp_fun_commute_mset_image image_mset_def)
lemma image_mset_union [simp]: "image_mset f (M + N) = image_mset f M + image_mset f N"
proof -
interpret comp_fun_commute "add_mset \<circ> f"
by (fact comp_fun_commute_mset_image)
show ?thesis by (induct N) (simp_all add: image_mset_def)
qed
corollary image_mset_add_mset [simp]:
"image_mset f (add_mset a M) = add_mset (f a) (image_mset f M)"
unfolding image_mset_union add_mset_add_single[of a M] by (simp add: image_mset_single)
lemma set_image_mset [simp]: "set_mset (image_mset f M) = image f (set_mset M)"
by (induct M) simp_all
lemma size_image_mset [simp]: "size (image_mset f M) = size M"
by (induct M) simp_all
lemma image_mset_is_empty_iff [simp]: "image_mset f M = {#} \<longleftrightarrow> M = {#}"
by (cases M) auto
lemma image_mset_If:
"image_mset (\<lambda>x. if P x then f x else g x) A =
image_mset f (filter_mset P A) + image_mset g (filter_mset (\<lambda>x. \<not>P x) A)"
by (induction A) auto
lemma image_mset_Diff:
assumes "B \<subseteq># A"
shows "image_mset f (A - B) = image_mset f A - image_mset f B"
proof -
have "image_mset f (A - B + B) = image_mset f (A - B) + image_mset f B"
by simp
also from assms have "A - B + B = A"
by (simp add: subset_mset.diff_add)
finally show ?thesis by simp
qed
lemma count_image_mset:
\<open>count (image_mset f A) x = (\<Sum>y\<in>f -` {x} \<inter> set_mset A. count A y)\<close>
proof (induction A)
case empty
then show ?case by simp
next
case (add x A)
moreover have *: "(if x = y then Suc n else n) = n + (if x = y then 1 else 0)" for n y
by simp
ultimately show ?case
by (auto simp: sum.distrib intro!: sum.mono_neutral_left)
qed
lemma count_image_mset':
\<open>count (image_mset f X) y = (\<Sum>x | x \<in># X \<and> y = f x. count X x)\<close>
by (auto simp add: count_image_mset simp flip: singleton_conv2 simp add: Collect_conj_eq ac_simps)
lemma image_mset_subseteq_mono: "A \<subseteq># B \<Longrightarrow> image_mset f A \<subseteq># image_mset f B"
by (metis image_mset_union subset_mset.le_iff_add)
lemma image_mset_subset_mono: "M \<subset># N \<Longrightarrow> image_mset f M \<subset># image_mset f N"
by (metis (no_types) Diff_eq_empty_iff_mset image_mset_Diff image_mset_is_empty_iff
image_mset_subseteq_mono subset_mset.less_le_not_le)
syntax (ASCII)
"_comprehension_mset" :: "'a \<Rightarrow> 'b \<Rightarrow> 'b multiset \<Rightarrow> 'a multiset" ("({#_/. _ :# _#})")
syntax
"_comprehension_mset" :: "'a \<Rightarrow> 'b \<Rightarrow> 'b multiset \<Rightarrow> 'a multiset" ("({#_/. _ \<in># _#})")
translations
"{#e. x \<in># M#}" \<rightleftharpoons> "CONST image_mset (\<lambda>x. e) M"
syntax (ASCII)
"_comprehension_mset'" :: "'a \<Rightarrow> 'b \<Rightarrow> 'b multiset \<Rightarrow> bool \<Rightarrow> 'a multiset" ("({#_/ | _ :# _./ _#})")
syntax
"_comprehension_mset'" :: "'a \<Rightarrow> 'b \<Rightarrow> 'b multiset \<Rightarrow> bool \<Rightarrow> 'a multiset" ("({#_/ | _ \<in># _./ _#})")
translations
"{#e | x\<in>#M. P#}" \<rightharpoonup> "{#e. x \<in># {# x\<in>#M. P#}#}"
text \<open>
This allows to write not just filters like \<^term>\<open>{#x\<in>#M. x<c#}\<close>
but also images like \<^term>\<open>{#x+x. x\<in>#M #}\<close> and @{term [source]
"{#x+x|x\<in>#M. x<c#}"}, where the latter is currently displayed as
\<^term>\<open>{#x+x|x\<in>#M. x<c#}\<close>.
\<close>
lemma in_image_mset: "y \<in># {#f x. x \<in># M#} \<longleftrightarrow> y \<in> f ` set_mset M"
by simp
functor image_mset: image_mset
proof -
fix f g show "image_mset f \<circ> image_mset g = image_mset (f \<circ> g)"
proof
fix A
show "(image_mset f \<circ> image_mset g) A = image_mset (f \<circ> g) A"
by (induct A) simp_all
qed
show "image_mset id = id"
proof
fix A
show "image_mset id A = id A"
by (induct A) simp_all
qed
qed
declare
image_mset.id [simp]
image_mset.identity [simp]
lemma image_mset_id[simp]: "image_mset id x = x"
unfolding id_def by auto
lemma image_mset_cong: "(\<And>x. x \<in># M \<Longrightarrow> f x = g x) \<Longrightarrow> {#f x. x \<in># M#} = {#g x. x \<in># M#}"
by (induct M) auto
lemma image_mset_cong_pair:
"(\<forall>x y. (x, y) \<in># M \<longrightarrow> f x y = g x y) \<Longrightarrow> {#f x y. (x, y) \<in># M#} = {#g x y. (x, y) \<in># M#}"
by (metis image_mset_cong split_cong)
lemma image_mset_const_eq:
"{#c. a \<in># M#} = replicate_mset (size M) c"
by (induct M) simp_all
lemma image_mset_filter_mset_swap:
"image_mset f (filter_mset (\<lambda>x. P (f x)) M) = filter_mset P (image_mset f M)"
by (induction M rule: multiset_induct) simp_all
lemma image_mset_eq_plusD:
"image_mset f A = B + C \<Longrightarrow> \<exists>B' C'. A = B' + C' \<and> B = image_mset f B' \<and> C = image_mset f C'"
proof (induction A arbitrary: B C)
case empty
thus ?case by simp
next
case (add x A)
show ?case
proof (cases "f x \<in># B")
case True
with add.prems have "image_mset f A = (B - {#f x#}) + C"
by (metis add_mset_remove_trivial image_mset_add_mset mset_subset_eq_single
subset_mset.add_diff_assoc2)
thus ?thesis
using add.IH add.prems by force
next
case False
with add.prems have "image_mset f A = B + (C - {#f x#})"
by (metis diff_single_eq_union diff_union_single_conv image_mset_add_mset union_iff
union_single_eq_member)
then show ?thesis
using add.IH add.prems by force
qed
qed
lemma image_mset_eq_image_mset_plusD:
assumes "image_mset f A = image_mset f B + C" and inj_f: "inj_on f (set_mset A \<union> set_mset B)"
shows "\<exists>C'. A = B + C' \<and> C = image_mset f C'"
using assms
proof (induction A arbitrary: B C)
case empty
thus ?case by simp
next
case (add x A)
show ?case
proof (cases "x \<in># B")
case True
with add.prems have "image_mset f A = image_mset f (B - {#x#}) + C"
by (smt (verit, del_insts) add.left_commute add_cancel_right_left diff_union_cancelL
diff_union_single_conv image_mset_union union_mset_add_mset_left
union_mset_add_mset_right)
with add.IH have "\<exists>M3'. A = B - {#x#} + M3' \<and> image_mset f M3' = C"
by (smt (verit, del_insts) True Un_insert_left Un_insert_right add.prems(2) inj_on_insert
insert_DiffM set_mset_add_mset_insert)
with True show ?thesis
by auto
next
case False
with add.prems(2) have "f x \<notin># image_mset f B"
by auto
with add.prems(1) have "image_mset f A = image_mset f B + (C - {#f x#})"
by (metis (no_types, lifting) diff_union_single_conv image_eqI image_mset_Diff
image_mset_single mset_subset_eq_single set_image_mset union_iff union_single_eq_diff
union_single_eq_member)
with add.prems(2) add.IH have "\<exists>M3'. A = B + M3' \<and> C - {#f x#} = image_mset f M3'"
by auto
then show ?thesis
by (metis add.prems(1) add_diff_cancel_left' image_mset_Diff mset_subset_eq_add_left
union_mset_add_mset_right)
qed
qed
lemma image_mset_eq_plus_image_msetD:
"image_mset f A = B + image_mset f C \<Longrightarrow> inj_on f (set_mset A \<union> set_mset C) \<Longrightarrow>
\<exists>B'. A = B' + C \<and> B = image_mset f B'"
unfolding add.commute[of B] add.commute[of _ C]
by (rule image_mset_eq_image_mset_plusD; assumption)
subsection \<open>Further conversions\<close>
primrec mset :: "'a list \<Rightarrow> 'a multiset" where
"mset [] = {#}" |
"mset (a # x) = add_mset a (mset x)"
lemma in_multiset_in_set:
"x \<in># mset xs \<longleftrightarrow> x \<in> set xs"
by (induct xs) simp_all
lemma count_mset:
"count (mset xs) x = length (filter (\<lambda>y. x = y) xs)"
by (induct xs) simp_all
lemma mset_zero_iff[simp]: "(mset x = {#}) = (x = [])"
by (induct x) auto
lemma mset_zero_iff_right[simp]: "({#} = mset x) = (x = [])"
by (induct x) auto
lemma count_mset_gt_0: "x \<in> set xs \<Longrightarrow> count (mset xs) x > 0"
by (induction xs) auto
lemma count_mset_0_iff [simp]: "count (mset xs) x = 0 \<longleftrightarrow> x \<notin> set xs"
by (induction xs) auto
lemma mset_single_iff[iff]: "mset xs = {#x#} \<longleftrightarrow> xs = [x]"
by (cases xs) auto
lemma mset_single_iff_right[iff]: "{#x#} = mset xs \<longleftrightarrow> xs = [x]"
by (cases xs) auto
lemma set_mset_mset[simp]: "set_mset (mset xs) = set xs"
by (induct xs) auto
lemma set_mset_comp_mset [simp]: "set_mset \<circ> mset = set"
by (simp add: fun_eq_iff)
lemma size_mset [simp]: "size (mset xs) = length xs"
by (induct xs) simp_all
lemma mset_append [simp]: "mset (xs @ ys) = mset xs + mset ys"
by (induct xs arbitrary: ys) auto
lemma mset_filter[simp]: "mset (filter P xs) = {#x \<in># mset xs. P x #}"
by (induct xs) simp_all
lemma mset_rev [simp]:
"mset (rev xs) = mset xs"
by (induct xs) simp_all
lemma surj_mset: "surj mset"
unfolding surj_def
proof (rule allI)
fix M
show "\<exists>xs. M = mset xs"
by (induction M) (auto intro: exI[of _ "_ # _"])
qed
lemma distinct_count_atmost_1:
"distinct x = (\<forall>a. count (mset x) a = (if a \<in> set x then 1 else 0))"
proof (induct x)
case Nil then show ?case by simp
next
case (Cons x xs) show ?case (is "?lhs \<longleftrightarrow> ?rhs")
proof
assume ?lhs then show ?rhs using Cons by simp
next
assume ?rhs then have "x \<notin> set xs"
by (simp split: if_splits)
moreover from \<open>?rhs\<close> have "(\<forall>a. count (mset xs) a =
(if a \<in> set xs then 1 else 0))"
by (auto split: if_splits simp add: count_eq_zero_iff)
ultimately show ?lhs using Cons by simp
qed
qed
lemma mset_eq_setD:
assumes "mset xs = mset ys"
shows "set xs = set ys"
proof -
from assms have "set_mset (mset xs) = set_mset (mset ys)"
by simp
then show ?thesis by simp
qed
lemma set_eq_iff_mset_eq_distinct:
\<open>distinct x \<Longrightarrow> distinct y \<Longrightarrow> set x = set y \<longleftrightarrow> mset x = mset y\<close>
by (auto simp: multiset_eq_iff distinct_count_atmost_1)
lemma set_eq_iff_mset_remdups_eq:
\<open>set x = set y \<longleftrightarrow> mset (remdups x) = mset (remdups y)\<close>
apply (rule iffI)
apply (simp add: set_eq_iff_mset_eq_distinct[THEN iffD1])
apply (drule distinct_remdups [THEN distinct_remdups
[THEN set_eq_iff_mset_eq_distinct [THEN iffD2]]])
apply simp
done
lemma mset_eq_imp_distinct_iff:
\<open>distinct xs \<longleftrightarrow> distinct ys\<close> if \<open>mset xs = mset ys\<close>
using that by (auto simp add: distinct_count_atmost_1 dest: mset_eq_setD)
lemma nth_mem_mset: "i < length ls \<Longrightarrow> (ls ! i) \<in># mset ls"
proof (induct ls arbitrary: i)
case Nil
then show ?case by simp
next
case Cons
then show ?case by (cases i) auto
qed
lemma mset_remove1[simp]: "mset (remove1 a xs) = mset xs - {#a#}"
by (induct xs) (auto simp add: multiset_eq_iff)
lemma mset_eq_length:
assumes "mset xs = mset ys"
shows "length xs = length ys"
using assms by (metis size_mset)
lemma mset_eq_length_filter:
assumes "mset xs = mset ys"
shows "length (filter (\<lambda>x. z = x) xs) = length (filter (\<lambda>y. z = y) ys)"
using assms by (metis count_mset)
lemma fold_multiset_equiv:
\<open>List.fold f xs = List.fold f ys\<close>
if f: \<open>\<And>x y. x \<in> set xs \<Longrightarrow> y \<in> set xs \<Longrightarrow> f x \<circ> f y = f y \<circ> f x\<close>
and \<open>mset xs = mset ys\<close>
using f \<open>mset xs = mset ys\<close> [symmetric] proof (induction xs arbitrary: ys)
case Nil
then show ?case by simp
next
case (Cons x xs)
then have *: \<open>set ys = set (x # xs)\<close>
by (blast dest: mset_eq_setD)
have \<open>\<And>x y. x \<in> set ys \<Longrightarrow> y \<in> set ys \<Longrightarrow> f x \<circ> f y = f y \<circ> f x\<close>
by (rule Cons.prems(1)) (simp_all add: *)
moreover from * have \<open>x \<in> set ys\<close>
by simp
ultimately have \<open>List.fold f ys = List.fold f (remove1 x ys) \<circ> f x\<close>
by (fact fold_remove1_split)
moreover from Cons.prems have \<open>List.fold f xs = List.fold f (remove1 x ys)\<close>
by (auto intro: Cons.IH)
ultimately show ?case
by simp
qed
lemma fold_permuted_eq:
\<open>List.fold (\<odot>) xs z = List.fold (\<odot>) ys z\<close>
if \<open>mset xs = mset ys\<close>
and \<open>P z\<close> and P: \<open>\<And>x z. x \<in> set xs \<Longrightarrow> P z \<Longrightarrow> P (x \<odot> z)\<close>
and f: \<open>\<And>x y z. x \<in> set xs \<Longrightarrow> y \<in> set xs \<Longrightarrow> P z \<Longrightarrow> x \<odot> (y \<odot> z) = y \<odot> (x \<odot> z)\<close>
for f (infixl \<open>\<odot>\<close> 70)
using \<open>P z\<close> P f \<open>mset xs = mset ys\<close> [symmetric] proof (induction xs arbitrary: ys z)
case Nil
then show ?case by simp
next
case (Cons x xs)
then have *: \<open>set ys = set (x # xs)\<close>
by (blast dest: mset_eq_setD)
have \<open>P z\<close>
by (fact Cons.prems(1))
moreover have \<open>\<And>x z. x \<in> set ys \<Longrightarrow> P z \<Longrightarrow> P (x \<odot> z)\<close>
by (rule Cons.prems(2)) (simp_all add: *)
moreover have \<open>\<And>x y z. x \<in> set ys \<Longrightarrow> y \<in> set ys \<Longrightarrow> P z \<Longrightarrow> x \<odot> (y \<odot> z) = y \<odot> (x \<odot> z)\<close>
by (rule Cons.prems(3)) (simp_all add: *)
moreover from * have \<open>x \<in> set ys\<close>
by simp
ultimately have \<open>fold (\<odot>) ys z = fold (\<odot>) (remove1 x ys) (x \<odot> z)\<close>
by (induction ys arbitrary: z) auto
moreover from Cons.prems have \<open>fold (\<odot>) xs (x \<odot> z) = fold (\<odot>) (remove1 x ys) (x \<odot> z)\<close>
by (auto intro: Cons.IH)
ultimately show ?case
by simp
qed
lemma mset_shuffles: "zs \<in> shuffles xs ys \<Longrightarrow> mset zs = mset xs + mset ys"
by (induction xs ys arbitrary: zs rule: shuffles.induct) auto
lemma mset_insort [simp]: "mset (insort x xs) = add_mset x (mset xs)"
by (induct xs) simp_all
lemma mset_map[simp]: "mset (map f xs) = image_mset f (mset xs)"
by (induct xs) simp_all
global_interpretation mset_set: folding add_mset "{#}"
defines mset_set = "folding_on.F add_mset {#}"
by standard (simp add: fun_eq_iff)
lemma sum_multiset_singleton [simp]: "sum (\<lambda>n. {#n#}) A = mset_set A"
by (induction A rule: infinite_finite_induct) auto
lemma count_mset_set [simp]:
"finite A \<Longrightarrow> x \<in> A \<Longrightarrow> count (mset_set A) x = 1" (is "PROP ?P")
"\<not> finite A \<Longrightarrow> count (mset_set A) x = 0" (is "PROP ?Q")
"x \<notin> A \<Longrightarrow> count (mset_set A) x = 0" (is "PROP ?R")
proof -
have *: "count (mset_set A) x = 0" if "x \<notin> A" for A
proof (cases "finite A")
case False then show ?thesis by simp
next
case True from True \<open>x \<notin> A\<close> show ?thesis by (induct A) auto
qed
then show "PROP ?P" "PROP ?Q" "PROP ?R"
by (auto elim!: Set.set_insert)
qed \<comment> \<open>TODO: maybe define \<^const>\<open>mset_set\<close> also in terms of \<^const>\<open>Abs_multiset\<close>\<close>
lemma elem_mset_set[simp, intro]: "finite A \<Longrightarrow> x \<in># mset_set A \<longleftrightarrow> x \<in> A"
by (induct A rule: finite_induct) simp_all
lemma mset_set_Union:
"finite A \<Longrightarrow> finite B \<Longrightarrow> A \<inter> B = {} \<Longrightarrow> mset_set (A \<union> B) = mset_set A + mset_set B"
by (induction A rule: finite_induct) auto
lemma filter_mset_mset_set [simp]:
"finite A \<Longrightarrow> filter_mset P (mset_set A) = mset_set {x\<in>A. P x}"
proof (induction A rule: finite_induct)
case (insert x A)
from insert.hyps have "filter_mset P (mset_set (insert x A)) =
filter_mset P (mset_set A) + mset_set (if P x then {x} else {})"
by simp
also have "filter_mset P (mset_set A) = mset_set {x\<in>A. P x}"
by (rule insert.IH)
also from insert.hyps
have "\<dots> + mset_set (if P x then {x} else {}) =
mset_set ({x \<in> A. P x} \<union> (if P x then {x} else {}))" (is "_ = mset_set ?A")
by (intro mset_set_Union [symmetric]) simp_all
also from insert.hyps have "?A = {y\<in>insert x A. P y}" by auto
finally show ?case .
qed simp_all
lemma mset_set_Diff:
assumes "finite A" "B \<subseteq> A"
shows "mset_set (A - B) = mset_set A - mset_set B"
proof -
from assms have "mset_set ((A - B) \<union> B) = mset_set (A - B) + mset_set B"
by (intro mset_set_Union) (auto dest: finite_subset)
also from assms have "A - B \<union> B = A" by blast
finally show ?thesis by simp
qed
lemma mset_set_set: "distinct xs \<Longrightarrow> mset_set (set xs) = mset xs"
by (induction xs) simp_all
lemma count_mset_set': "count (mset_set A) x = (if finite A \<and> x \<in> A then 1 else 0)"
by auto
lemma subset_imp_msubset_mset_set:
assumes "A \<subseteq> B" "finite B"
shows "mset_set A \<subseteq># mset_set B"
proof (rule mset_subset_eqI)
fix x :: 'a
from assms have "finite A" by (rule finite_subset)
with assms show "count (mset_set A) x \<le> count (mset_set B) x"
by (cases "x \<in> A"; cases "x \<in> B") auto
qed
lemma mset_set_set_mset_msubset: "mset_set (set_mset A) \<subseteq># A"
proof (rule mset_subset_eqI)
fix x show "count (mset_set (set_mset A)) x \<le> count A x"
by (cases "x \<in># A") simp_all
qed
lemma mset_set_upto_eq_mset_upto:
\<open>mset_set {..<n} = mset [0..<n]\<close>
by (induction n) (auto simp: ac_simps lessThan_Suc)
context linorder
begin
definition sorted_list_of_multiset :: "'a multiset \<Rightarrow> 'a list"
where
"sorted_list_of_multiset M = fold_mset insort [] M"
lemma sorted_list_of_multiset_empty [simp]:
"sorted_list_of_multiset {#} = []"
by (simp add: sorted_list_of_multiset_def)
lemma sorted_list_of_multiset_singleton [simp]:
"sorted_list_of_multiset {#x#} = [x]"
proof -
interpret comp_fun_commute insort by (fact comp_fun_commute_insort)
show ?thesis by (simp add: sorted_list_of_multiset_def)
qed
lemma sorted_list_of_multiset_insert [simp]:
"sorted_list_of_multiset (add_mset x M) = List.insort x (sorted_list_of_multiset M)"
proof -
interpret comp_fun_commute insort by (fact comp_fun_commute_insort)
show ?thesis by (simp add: sorted_list_of_multiset_def)
qed
end
lemma mset_sorted_list_of_multiset[simp]: "mset (sorted_list_of_multiset M) = M"
by (induct M) simp_all
lemma sorted_list_of_multiset_mset[simp]: "sorted_list_of_multiset (mset xs) = sort xs"
by (induct xs) simp_all
lemma finite_set_mset_mset_set[simp]: "finite A \<Longrightarrow> set_mset (mset_set A) = A"
by auto
lemma mset_set_empty_iff: "mset_set A = {#} \<longleftrightarrow> A = {} \<or> infinite A"
using finite_set_mset_mset_set by fastforce
lemma infinite_set_mset_mset_set: "\<not> finite A \<Longrightarrow> set_mset (mset_set A) = {}"
by simp
lemma set_sorted_list_of_multiset [simp]:
"set (sorted_list_of_multiset M) = set_mset M"
by (induct M) (simp_all add: set_insort_key)
lemma sorted_list_of_mset_set [simp]:
"sorted_list_of_multiset (mset_set A) = sorted_list_of_set A"
by (cases "finite A") (induct A rule: finite_induct, simp_all)
lemma mset_upt [simp]: "mset [m..<n] = mset_set {m..<n}"
by (induction n) (simp_all add: atLeastLessThanSuc)
lemma image_mset_map_of:
"distinct (map fst xs) \<Longrightarrow> {#the (map_of xs i). i \<in># mset (map fst xs)#} = mset (map snd xs)"
proof (induction xs)
case (Cons x xs)
have "{#the (map_of (x # xs) i). i \<in># mset (map fst (x # xs))#} =
add_mset (snd x) {#the (if i = fst x then Some (snd x) else map_of xs i).
i \<in># mset (map fst xs)#}" (is "_ = add_mset _ ?A") by simp
also from Cons.prems have "?A = {#the (map_of xs i). i :# mset (map fst xs)#}"
by (cases x, intro image_mset_cong) (auto simp: in_multiset_in_set)
also from Cons.prems have "\<dots> = mset (map snd xs)" by (intro Cons.IH) simp_all
finally show ?case by simp
qed simp_all
lemma msubset_mset_set_iff[simp]:
assumes "finite A" "finite B"
shows "mset_set A \<subseteq># mset_set B \<longleftrightarrow> A \<subseteq> B"
using assms set_mset_mono subset_imp_msubset_mset_set by fastforce
lemma mset_set_eq_iff[simp]:
assumes "finite A" "finite B"
shows "mset_set A = mset_set B \<longleftrightarrow> A = B"
using assms by (fastforce dest: finite_set_mset_mset_set)
lemma image_mset_mset_set: \<^marker>\<open>contributor \<open>Lukas Bulwahn\<close>\<close>
assumes "inj_on f A"
shows "image_mset f (mset_set A) = mset_set (f ` A)"
proof cases
assume "finite A"
from this \<open>inj_on f A\<close> show ?thesis
by (induct A) auto
next
assume "infinite A"
from this \<open>inj_on f A\<close> have "infinite (f ` A)"
using finite_imageD by blast
from \<open>infinite A\<close> \<open>infinite (f ` A)\<close> show ?thesis by simp
qed
subsection \<open>More properties of the replicate and repeat operations\<close>
lemma in_replicate_mset[simp]: "x \<in># replicate_mset n y \<longleftrightarrow> n > 0 \<and> x = y"
unfolding replicate_mset_def by (induct n) auto
lemma set_mset_replicate_mset_subset[simp]: "set_mset (replicate_mset n x) = (if n = 0 then {} else {x})"
by (auto split: if_splits)
lemma size_replicate_mset[simp]: "size (replicate_mset n M) = n"
by (induct n, simp_all)
lemma count_le_replicate_mset_subset_eq: "n \<le> count M x \<longleftrightarrow> replicate_mset n x \<subseteq># M"
by (auto simp add: mset_subset_eqI) (metis count_replicate_mset subseteq_mset_def)
lemma filter_eq_replicate_mset: "{#y \<in># D. y = x#} = replicate_mset (count D x) x"
by (induct D) simp_all
lemma replicate_count_mset_eq_filter_eq: "replicate (count (mset xs) k) k = filter (HOL.eq k) xs"
by (induct xs) auto
lemma replicate_mset_eq_empty_iff [simp]: "replicate_mset n a = {#} \<longleftrightarrow> n = 0"
by (induct n) simp_all
lemma replicate_mset_eq_iff:
"replicate_mset m a = replicate_mset n b \<longleftrightarrow> m = 0 \<and> n = 0 \<or> m = n \<and> a = b"
by (auto simp add: multiset_eq_iff)
lemma repeat_mset_cancel1: "repeat_mset a A = repeat_mset a B \<longleftrightarrow> A = B \<or> a = 0"
by (auto simp: multiset_eq_iff)
lemma repeat_mset_cancel2: "repeat_mset a A = repeat_mset b A \<longleftrightarrow> a = b \<or> A = {#}"
by (auto simp: multiset_eq_iff)
lemma repeat_mset_eq_empty_iff: "repeat_mset n A = {#} \<longleftrightarrow> n = 0 \<or> A = {#}"
by (cases n) auto
lemma image_replicate_mset [simp]:
"image_mset f (replicate_mset n a) = replicate_mset n (f a)"
by (induct n) simp_all
lemma replicate_mset_msubseteq_iff:
"replicate_mset m a \<subseteq># replicate_mset n b \<longleftrightarrow> m = 0 \<or> a = b \<and> m \<le> n"
by (cases m)
(auto simp: insert_subset_eq_iff simp flip: count_le_replicate_mset_subset_eq)
lemma msubseteq_replicate_msetE:
assumes "A \<subseteq># replicate_mset n a"
obtains m where "m \<le> n" and "A = replicate_mset m a"
proof (cases "n = 0")
case True
with assms that show thesis
by simp
next
case False
from assms have "set_mset A \<subseteq> set_mset (replicate_mset n a)"
by (rule set_mset_mono)
with False have "set_mset A \<subseteq> {a}"
by simp
then have "\<exists>m. A = replicate_mset m a"
proof (induction A)
case empty
then show ?case
by simp
next
case (add b A)
then obtain m where "A = replicate_mset m a"
by auto
with add.prems show ?case
by (auto intro: exI [of _ "Suc m"])
qed
then obtain m where A: "A = replicate_mset m a" ..
with assms have "m \<le> n"
by (auto simp add: replicate_mset_msubseteq_iff)
then show thesis using A ..
qed
subsection \<open>Big operators\<close>
locale comm_monoid_mset = comm_monoid
begin
interpretation comp_fun_commute f
by standard (simp add: fun_eq_iff left_commute)
interpretation comp?: comp_fun_commute "f \<circ> g"
by (fact comp_comp_fun_commute)
context
begin
definition F :: "'a multiset \<Rightarrow> 'a"
where eq_fold: "F M = fold_mset f \<^bold>1 M"
lemma empty [simp]: "F {#} = \<^bold>1"
by (simp add: eq_fold)
lemma singleton [simp]: "F {#x#} = x"
proof -
interpret comp_fun_commute
by standard (simp add: fun_eq_iff left_commute)
show ?thesis by (simp add: eq_fold)
qed
lemma union [simp]: "F (M + N) = F M \<^bold>* F N"
proof -
interpret comp_fun_commute f
by standard (simp add: fun_eq_iff left_commute)
show ?thesis
by (induct N) (simp_all add: left_commute eq_fold)
qed
lemma add_mset [simp]: "F (add_mset x N) = x \<^bold>* F N"
unfolding add_mset_add_single[of x N] union by (simp add: ac_simps)
lemma insert [simp]:
shows "F (image_mset g (add_mset x A)) = g x \<^bold>* F (image_mset g A)"
by (simp add: eq_fold)
lemma remove:
assumes "x \<in># A"
shows "F A = x \<^bold>* F (A - {#x#})"
using multi_member_split[OF assms] by auto
lemma neutral:
"\<forall>x\<in>#A. x = \<^bold>1 \<Longrightarrow> F A = \<^bold>1"
by (induct A) simp_all
lemma neutral_const [simp]:
"F (image_mset (\<lambda>_. \<^bold>1) A) = \<^bold>1"
by (simp add: neutral)
private lemma F_image_mset_product:
"F {#g x j \<^bold>* F {#g i j. i \<in># A#}. j \<in># B#} =
F (image_mset (g x) B) \<^bold>* F {#F {#g i j. i \<in># A#}. j \<in># B#}"
by (induction B) (simp_all add: left_commute semigroup.assoc semigroup_axioms)
lemma swap:
"F (image_mset (\<lambda>i. F (image_mset (g i) B)) A) =
F (image_mset (\<lambda>j. F (image_mset (\<lambda>i. g i j) A)) B)"
apply (induction A, simp)
apply (induction B, auto simp add: F_image_mset_product ac_simps)
done
lemma distrib: "F (image_mset (\<lambda>x. g x \<^bold>* h x) A) = F (image_mset g A) \<^bold>* F (image_mset h A)"
by (induction A) (auto simp: ac_simps)
lemma union_disjoint:
"A \<inter># B = {#} \<Longrightarrow> F (image_mset g (A \<union># B)) = F (image_mset g A) \<^bold>* F (image_mset g B)"
by (induction A) (auto simp: ac_simps)
end
end
lemma comp_fun_commute_plus_mset[simp]: "comp_fun_commute ((+) :: 'a multiset \<Rightarrow> _ \<Rightarrow> _)"
by standard (simp add: add_ac comp_def)
declare comp_fun_commute.fold_mset_add_mset[OF comp_fun_commute_plus_mset, simp]
lemma in_mset_fold_plus_iff[iff]: "x \<in># fold_mset (+) M NN \<longleftrightarrow> x \<in># M \<or> (\<exists>N. N \<in># NN \<and> x \<in># N)"
by (induct NN) auto
context comm_monoid_add
begin
sublocale sum_mset: comm_monoid_mset plus 0
defines sum_mset = sum_mset.F ..
lemma sum_unfold_sum_mset:
"sum f A = sum_mset (image_mset f (mset_set A))"
by (cases "finite A") (induct A rule: finite_induct, simp_all)
end
notation sum_mset ("\<Sum>\<^sub>#")
syntax (ASCII)
"_sum_mset_image" :: "pttrn \<Rightarrow> 'b set \<Rightarrow> 'a \<Rightarrow> 'a::comm_monoid_add" ("(3SUM _:#_. _)" [0, 51, 10] 10)
syntax
"_sum_mset_image" :: "pttrn \<Rightarrow> 'b set \<Rightarrow> 'a \<Rightarrow> 'a::comm_monoid_add" ("(3\<Sum>_\<in>#_. _)" [0, 51, 10] 10)
translations
"\<Sum>i \<in># A. b" \<rightleftharpoons> "CONST sum_mset (CONST image_mset (\<lambda>i. b) A)"
context comm_monoid_add
begin
lemma sum_mset_sum_list:
"sum_mset (mset xs) = sum_list xs"
by (induction xs) auto
end
context canonically_ordered_monoid_add
begin
lemma sum_mset_0_iff [simp]:
"sum_mset M = 0 \<longleftrightarrow> (\<forall>x \<in> set_mset M. x = 0)"
by (induction M) auto
end
context ordered_comm_monoid_add
begin
lemma sum_mset_mono:
"sum_mset (image_mset f K) \<le> sum_mset (image_mset g K)"
if "\<And>i. i \<in># K \<Longrightarrow> f i \<le> g i"
using that by (induction K) (simp_all add: add_mono)
end
context cancel_comm_monoid_add
begin
lemma sum_mset_diff:
"sum_mset (M - N) = sum_mset M - sum_mset N" if "N \<subseteq># M" for M N :: "'a multiset"
using that by (auto simp add: subset_mset.le_iff_add)
end
context semiring_0
begin
lemma sum_mset_distrib_left:
"c * (\<Sum>x \<in># M. f x) = (\<Sum>x \<in># M. c * f(x))"
by (induction M) (simp_all add: algebra_simps)
lemma sum_mset_distrib_right:
"(\<Sum>x \<in># M. f x) * c = (\<Sum>x \<in># M. f x * c)"
by (induction M) (simp_all add: algebra_simps)
end
lemma sum_mset_product:
fixes f :: "'a::{comm_monoid_add,times} \<Rightarrow> 'b::semiring_0"
shows "(\<Sum>i \<in># A. f i) * (\<Sum>i \<in># B. g i) = (\<Sum>i\<in>#A. \<Sum>j\<in>#B. f i * g j)"
by (subst sum_mset.swap) (simp add: sum_mset_distrib_left sum_mset_distrib_right)
context semiring_1
begin
lemma sum_mset_replicate_mset [simp]:
"sum_mset (replicate_mset n a) = of_nat n * a"
by (induction n) (simp_all add: algebra_simps)
lemma sum_mset_delta:
"sum_mset (image_mset (\<lambda>x. if x = y then c else 0) A) = c * of_nat (count A y)"
by (induction A) (simp_all add: algebra_simps)
lemma sum_mset_delta':
"sum_mset (image_mset (\<lambda>x. if y = x then c else 0) A) = c * of_nat (count A y)"
by (induction A) (simp_all add: algebra_simps)
end
lemma of_nat_sum_mset [simp]:
"of_nat (sum_mset A) = sum_mset (image_mset of_nat A)"
by (induction A) auto
lemma size_eq_sum_mset:
"size M = (\<Sum>a\<in>#M. 1)"
using image_mset_const_eq [of "1::nat" M] by simp
lemma size_mset_set [simp]:
"size (mset_set A) = card A"
by (simp only: size_eq_sum_mset card_eq_sum sum_unfold_sum_mset)
lemma sum_mset_constant [simp]:
fixes y :: "'b::semiring_1"
shows \<open>(\<Sum>x\<in>#A. y) = of_nat (size A) * y\<close>
by (induction A) (auto simp: algebra_simps)
lemma set_mset_Union_mset[simp]: "set_mset (\<Sum>\<^sub># MM) = (\<Union>M \<in> set_mset MM. set_mset M)"
by (induct MM) auto
lemma in_Union_mset_iff[iff]: "x \<in># \<Sum>\<^sub># MM \<longleftrightarrow> (\<exists>M. M \<in># MM \<and> x \<in># M)"
by (induct MM) auto
lemma count_sum:
"count (sum f A) x = sum (\<lambda>a. count (f a) x) A"
by (induct A rule: infinite_finite_induct) simp_all
lemma sum_eq_empty_iff:
assumes "finite A"
shows "sum f A = {#} \<longleftrightarrow> (\<forall>a\<in>A. f a = {#})"
using assms by induct simp_all
lemma Union_mset_empty_conv[simp]: "\<Sum>\<^sub># M = {#} \<longleftrightarrow> (\<forall>i\<in>#M. i = {#})"
by (induction M) auto
lemma Union_image_single_mset[simp]: "\<Sum>\<^sub># (image_mset (\<lambda>x. {#x#}) m) = m"
by(induction m) auto
context comm_monoid_mult
begin
sublocale prod_mset: comm_monoid_mset times 1
defines prod_mset = prod_mset.F ..
lemma prod_mset_empty:
"prod_mset {#} = 1"
by (fact prod_mset.empty)
lemma prod_mset_singleton:
"prod_mset {#x#} = x"
by (fact prod_mset.singleton)
lemma prod_mset_Un:
"prod_mset (A + B) = prod_mset A * prod_mset B"
by (fact prod_mset.union)
lemma prod_mset_prod_list:
"prod_mset (mset xs) = prod_list xs"
by (induct xs) auto
lemma prod_mset_replicate_mset [simp]:
"prod_mset (replicate_mset n a) = a ^ n"
by (induct n) simp_all
lemma prod_unfold_prod_mset:
"prod f A = prod_mset (image_mset f (mset_set A))"
by (cases "finite A") (induct A rule: finite_induct, simp_all)
lemma prod_mset_multiplicity:
"prod_mset M = prod (\<lambda>x. x ^ count M x) (set_mset M)"
by (simp add: fold_mset_def prod.eq_fold prod_mset.eq_fold funpow_times_power comp_def)
lemma prod_mset_delta: "prod_mset (image_mset (\<lambda>x. if x = y then c else 1) A) = c ^ count A y"
by (induction A) simp_all
lemma prod_mset_delta': "prod_mset (image_mset (\<lambda>x. if y = x then c else 1) A) = c ^ count A y"
by (induction A) simp_all
lemma prod_mset_subset_imp_dvd:
assumes "A \<subseteq># B"
shows "prod_mset A dvd prod_mset B"
proof -
from assms have "B = (B - A) + A" by (simp add: subset_mset.diff_add)
also have "prod_mset \<dots> = prod_mset (B - A) * prod_mset A" by simp
also have "prod_mset A dvd \<dots>" by simp
finally show ?thesis .
qed
lemma dvd_prod_mset:
assumes "x \<in># A"
shows "x dvd prod_mset A"
using assms prod_mset_subset_imp_dvd [of "{#x#}" A] by simp
end
notation prod_mset ("\<Prod>\<^sub>#")
syntax (ASCII)
"_prod_mset_image" :: "pttrn \<Rightarrow> 'b set \<Rightarrow> 'a \<Rightarrow> 'a::comm_monoid_mult" ("(3PROD _:#_. _)" [0, 51, 10] 10)
syntax
"_prod_mset_image" :: "pttrn \<Rightarrow> 'b set \<Rightarrow> 'a \<Rightarrow> 'a::comm_monoid_mult" ("(3\<Prod>_\<in>#_. _)" [0, 51, 10] 10)
translations
"\<Prod>i \<in># A. b" \<rightleftharpoons> "CONST prod_mset (CONST image_mset (\<lambda>i. b) A)"
lemma prod_mset_constant [simp]: "(\<Prod>_\<in>#A. c) = c ^ size A"
by (simp add: image_mset_const_eq)
lemma (in semidom) prod_mset_zero_iff [iff]:
"prod_mset A = 0 \<longleftrightarrow> 0 \<in># A"
by (induct A) auto
lemma (in semidom_divide) prod_mset_diff:
assumes "B \<subseteq># A" and "0 \<notin># B"
shows "prod_mset (A - B) = prod_mset A div prod_mset B"
proof -
from assms obtain C where "A = B + C"
by (metis subset_mset.add_diff_inverse)
with assms show ?thesis by simp
qed
lemma (in semidom_divide) prod_mset_minus:
assumes "a \<in># A" and "a \<noteq> 0"
shows "prod_mset (A - {#a#}) = prod_mset A div a"
using assms prod_mset_diff [of "{#a#}" A] by auto
lemma (in normalization_semidom) normalize_prod_mset_normalize:
"normalize (prod_mset (image_mset normalize A)) = normalize (prod_mset A)"
proof (induction A)
case (add x A)
have "normalize (prod_mset (image_mset normalize (add_mset x A))) =
normalize (x * normalize (prod_mset (image_mset normalize A)))"
by simp
also note add.IH
finally show ?case by simp
qed auto
lemma (in algebraic_semidom) is_unit_prod_mset_iff:
"is_unit (prod_mset A) \<longleftrightarrow> (\<forall>x \<in># A. is_unit x)"
by (induct A) (auto simp: is_unit_mult_iff)
lemma (in normalization_semidom_multiplicative) normalize_prod_mset:
"normalize (prod_mset A) = prod_mset (image_mset normalize A)"
by (induct A) (simp_all add: normalize_mult)
lemma (in normalization_semidom_multiplicative) normalized_prod_msetI:
assumes "\<And>a. a \<in># A \<Longrightarrow> normalize a = a"
shows "normalize (prod_mset A) = prod_mset A"
proof -
from assms have "image_mset normalize A = A"
by (induct A) simp_all
then show ?thesis by (simp add: normalize_prod_mset)
qed
subsection \<open>Multiset as order-ignorant lists\<close>
context linorder
begin
lemma mset_insort [simp]:
"mset (insort_key k x xs) = add_mset x (mset xs)"
by (induct xs) simp_all
lemma mset_sort [simp]:
"mset (sort_key k xs) = mset xs"
by (induct xs) simp_all
text \<open>
This lemma shows which properties suffice to show that a function
\<open>f\<close> with \<open>f xs = ys\<close> behaves like sort.
\<close>
lemma properties_for_sort_key:
assumes "mset ys = mset xs"
and "\<And>k. k \<in> set ys \<Longrightarrow> filter (\<lambda>x. f k = f x) ys = filter (\<lambda>x. f k = f x) xs"
and "sorted (map f ys)"
shows "sort_key f xs = ys"
using assms
proof (induct xs arbitrary: ys)
case Nil then show ?case by simp
next
case (Cons x xs)
from Cons.prems(2) have
"\<forall>k \<in> set ys. filter (\<lambda>x. f k = f x) (remove1 x ys) = filter (\<lambda>x. f k = f x) xs"
by (simp add: filter_remove1)
with Cons.prems have "sort_key f xs = remove1 x ys"
by (auto intro!: Cons.hyps simp add: sorted_map_remove1)
moreover from Cons.prems have "x \<in># mset ys"
by auto
then have "x \<in> set ys"
by simp
ultimately show ?case using Cons.prems by (simp add: insort_key_remove1)
qed
lemma properties_for_sort:
assumes multiset: "mset ys = mset xs"
and "sorted ys"
shows "sort xs = ys"
proof (rule properties_for_sort_key)
from multiset show "mset ys = mset xs" .
from \<open>sorted ys\<close> show "sorted (map (\<lambda>x. x) ys)" by simp
from multiset have "length (filter (\<lambda>y. k = y) ys) = length (filter (\<lambda>x. k = x) xs)" for k
by (rule mset_eq_length_filter)
then have "replicate (length (filter (\<lambda>y. k = y) ys)) k =
replicate (length (filter (\<lambda>x. k = x) xs)) k" for k
by simp
then show "k \<in> set ys \<Longrightarrow> filter (\<lambda>y. k = y) ys = filter (\<lambda>x. k = x) xs" for k
by (simp add: replicate_length_filter)
qed
lemma sort_key_inj_key_eq:
assumes mset_equal: "mset xs = mset ys"
and "inj_on f (set xs)"
and "sorted (map f ys)"
shows "sort_key f xs = ys"
proof (rule properties_for_sort_key)
from mset_equal
show "mset ys = mset xs" by simp
from \<open>sorted (map f ys)\<close>
show "sorted (map f ys)" .
show "[x\<leftarrow>ys . f k = f x] = [x\<leftarrow>xs . f k = f x]" if "k \<in> set ys" for k
proof -
from mset_equal
have set_equal: "set xs = set ys" by (rule mset_eq_setD)
with that have "insert k (set ys) = set ys" by auto
with \<open>inj_on f (set xs)\<close> have inj: "inj_on f (insert k (set ys))"
by (simp add: set_equal)
from inj have "[x\<leftarrow>ys . f k = f x] = filter (HOL.eq k) ys"
by (auto intro!: inj_on_filter_key_eq)
also have "\<dots> = replicate (count (mset ys) k) k"
by (simp add: replicate_count_mset_eq_filter_eq)
also have "\<dots> = replicate (count (mset xs) k) k"
using mset_equal by simp
also have "\<dots> = filter (HOL.eq k) xs"
by (simp add: replicate_count_mset_eq_filter_eq)
also have "\<dots> = [x\<leftarrow>xs . f k = f x]"
using inj by (auto intro!: inj_on_filter_key_eq [symmetric] simp add: set_equal)
finally show ?thesis .
qed
qed
lemma sort_key_eq_sort_key:
assumes "mset xs = mset ys"
and "inj_on f (set xs)"
shows "sort_key f xs = sort_key f ys"
by (rule sort_key_inj_key_eq) (simp_all add: assms)
lemma sort_key_by_quicksort:
"sort_key f xs = sort_key f [x\<leftarrow>xs. f x < f (xs ! (length xs div 2))]
@ [x\<leftarrow>xs. f x = f (xs ! (length xs div 2))]
@ sort_key f [x\<leftarrow>xs. f x > f (xs ! (length xs div 2))]" (is "sort_key f ?lhs = ?rhs")
proof (rule properties_for_sort_key)
show "mset ?rhs = mset ?lhs"
by (rule multiset_eqI) auto
show "sorted (map f ?rhs)"
by (auto simp add: sorted_append intro: sorted_map_same)
next
fix l
assume "l \<in> set ?rhs"
let ?pivot = "f (xs ! (length xs div 2))"
have *: "\<And>x. f l = f x \<longleftrightarrow> f x = f l" by auto
have "[x \<leftarrow> sort_key f xs . f x = f l] = [x \<leftarrow> xs. f x = f l]"
unfolding filter_sort by (rule properties_for_sort_key) (auto intro: sorted_map_same)
with * have **: "[x \<leftarrow> sort_key f xs . f l = f x] = [x \<leftarrow> xs. f l = f x]" by simp
have "\<And>x P. P (f x) ?pivot \<and> f l = f x \<longleftrightarrow> P (f l) ?pivot \<and> f l = f x" by auto
then have "\<And>P. [x \<leftarrow> sort_key f xs . P (f x) ?pivot \<and> f l = f x] =
[x \<leftarrow> sort_key f xs. P (f l) ?pivot \<and> f l = f x]" by simp
note *** = this [of "(<)"] this [of "(>)"] this [of "(=)"]
show "[x \<leftarrow> ?rhs. f l = f x] = [x \<leftarrow> ?lhs. f l = f x]"
proof (cases "f l" ?pivot rule: linorder_cases)
case less
then have "f l \<noteq> ?pivot" and "\<not> f l > ?pivot" by auto
with less show ?thesis
by (simp add: filter_sort [symmetric] ** ***)
next
case equal then show ?thesis
by (simp add: * less_le)
next
case greater
then have "f l \<noteq> ?pivot" and "\<not> f l < ?pivot" by auto
with greater show ?thesis
by (simp add: filter_sort [symmetric] ** ***)
qed
qed
lemma sort_by_quicksort:
"sort xs = sort [x\<leftarrow>xs. x < xs ! (length xs div 2)]
@ [x\<leftarrow>xs. x = xs ! (length xs div 2)]
@ sort [x\<leftarrow>xs. x > xs ! (length xs div 2)]" (is "sort ?lhs = ?rhs")
using sort_key_by_quicksort [of "\<lambda>x. x", symmetric] by simp
text \<open>A stable parameterized quicksort\<close>
definition part :: "('b \<Rightarrow> 'a) \<Rightarrow> 'a \<Rightarrow> 'b list \<Rightarrow> 'b list \<times> 'b list \<times> 'b list" where
"part f pivot xs = ([x \<leftarrow> xs. f x < pivot], [x \<leftarrow> xs. f x = pivot], [x \<leftarrow> xs. pivot < f x])"
lemma part_code [code]:
"part f pivot [] = ([], [], [])"
"part f pivot (x # xs) = (let (lts, eqs, gts) = part f pivot xs; x' = f x in
if x' < pivot then (x # lts, eqs, gts)
else if x' > pivot then (lts, eqs, x # gts)
else (lts, x # eqs, gts))"
by (auto simp add: part_def Let_def split_def)
lemma sort_key_by_quicksort_code [code]:
"sort_key f xs =
(case xs of
[] \<Rightarrow> []
| [x] \<Rightarrow> xs
| [x, y] \<Rightarrow> (if f x \<le> f y then xs else [y, x])
| _ \<Rightarrow>
let (lts, eqs, gts) = part f (f (xs ! (length xs div 2))) xs
in sort_key f lts @ eqs @ sort_key f gts)"
proof (cases xs)
case Nil then show ?thesis by simp
next
case (Cons _ ys) note hyps = Cons show ?thesis
proof (cases ys)
case Nil with hyps show ?thesis by simp
next
case (Cons _ zs) note hyps = hyps Cons show ?thesis
proof (cases zs)
case Nil with hyps show ?thesis by auto
next
case Cons
from sort_key_by_quicksort [of f xs]
have "sort_key f xs = (let (lts, eqs, gts) = part f (f (xs ! (length xs div 2))) xs
in sort_key f lts @ eqs @ sort_key f gts)"
by (simp only: split_def Let_def part_def fst_conv snd_conv)
with hyps Cons show ?thesis by (simp only: list.cases)
qed
qed
qed
end
hide_const (open) part
lemma mset_remdups_subset_eq: "mset (remdups xs) \<subseteq># mset xs"
by (induct xs) (auto intro: subset_mset.order_trans)
lemma mset_update:
"i < length ls \<Longrightarrow> mset (ls[i := v]) = add_mset v (mset ls - {#ls ! i#})"
proof (induct ls arbitrary: i)
case Nil then show ?case by simp
next
case (Cons x xs)
show ?case
proof (cases i)
case 0 then show ?thesis by simp
next
case (Suc i')
with Cons show ?thesis
by (cases \<open>x = xs ! i'\<close>) auto
qed
qed
lemma mset_swap:
"i < length ls \<Longrightarrow> j < length ls \<Longrightarrow>
mset (ls[j := ls ! i, i := ls ! j]) = mset ls"
by (cases "i = j") (simp_all add: mset_update nth_mem_mset)
lemma mset_eq_finite:
\<open>finite {ys. mset ys = mset xs}\<close>
proof -
have \<open>{ys. mset ys = mset xs} \<subseteq> {ys. set ys \<subseteq> set xs \<and> length ys \<le> length xs}\<close>
by (auto simp add: dest: mset_eq_setD mset_eq_length)
moreover have \<open>finite {ys. set ys \<subseteq> set xs \<and> length ys \<le> length xs}\<close>
using finite_lists_length_le by blast
ultimately show ?thesis
by (rule finite_subset)
qed
subsection \<open>The multiset order\<close>
definition mult1 :: "('a \<times> 'a) set \<Rightarrow> ('a multiset \<times> 'a multiset) set" where
"mult1 r = {(N, M). \<exists>a M0 K. M = add_mset a M0 \<and> N = M0 + K \<and>
(\<forall>b. b \<in># K \<longrightarrow> (b, a) \<in> r)}"
definition mult :: "('a \<times> 'a) set \<Rightarrow> ('a multiset \<times> 'a multiset) set" where
"mult r = (mult1 r)\<^sup>+"
definition multp :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" where
"multp r M N \<longleftrightarrow> (M, N) \<in> mult {(x, y). r x y}"
declare multp_def[pred_set_conv]
lemma mult1I:
assumes "M = add_mset a M0" and "N = M0 + K" and "\<And>b. b \<in># K \<Longrightarrow> (b, a) \<in> r"
shows "(N, M) \<in> mult1 r"
using assms unfolding mult1_def by blast
lemma mult1E:
assumes "(N, M) \<in> mult1 r"
obtains a M0 K where "M = add_mset a M0" "N = M0 + K" "\<And>b. b \<in># K \<Longrightarrow> (b, a) \<in> r"
using assms unfolding mult1_def by blast
lemma mono_mult1:
assumes "r \<subseteq> r'" shows "mult1 r \<subseteq> mult1 r'"
unfolding mult1_def using assms by blast
lemma mono_mult:
assumes "r \<subseteq> r'" shows "mult r \<subseteq> mult r'"
unfolding mult_def using mono_mult1[OF assms] trancl_mono by blast
lemma mono_multp[mono]: "r \<le> r' \<Longrightarrow> multp r \<le> multp r'"
unfolding le_fun_def le_bool_def
proof (intro allI impI)
fix M N :: "'a multiset"
assume "\<forall>x xa. r x xa \<longrightarrow> r' x xa"
hence "{(x, y). r x y} \<subseteq> {(x, y). r' x y}"
by blast
thus "multp r M N \<Longrightarrow> multp r' M N"
unfolding multp_def
by (fact mono_mult[THEN subsetD, rotated])
qed
lemma not_less_empty [iff]: "(M, {#}) \<notin> mult1 r"
by (simp add: mult1_def)
subsubsection \<open>Well-foundedness\<close>
lemma less_add:
assumes mult1: "(N, add_mset a M0) \<in> mult1 r"
shows
"(\<exists>M. (M, M0) \<in> mult1 r \<and> N = add_mset a M) \<or>
(\<exists>K. (\<forall>b. b \<in># K \<longrightarrow> (b, a) \<in> r) \<and> N = M0 + K)"
proof -
let ?r = "\<lambda>K a. \<forall>b. b \<in># K \<longrightarrow> (b, a) \<in> r"
let ?R = "\<lambda>N M. \<exists>a M0 K. M = add_mset a M0 \<and> N = M0 + K \<and> ?r K a"
obtain a' M0' K where M0: "add_mset a M0 = add_mset a' M0'"
and N: "N = M0' + K"
and r: "?r K a'"
using mult1 unfolding mult1_def by auto
show ?thesis (is "?case1 \<or> ?case2")
proof -
from M0 consider "M0 = M0'" "a = a'"
| K' where "M0 = add_mset a' K'" "M0' = add_mset a K'"
by atomize_elim (simp only: add_eq_conv_ex)
then show ?thesis
proof cases
case 1
with N r have "?r K a \<and> N = M0 + K" by simp
then have ?case2 ..
then show ?thesis ..
next
case 2
from N 2(2) have n: "N = add_mset a (K' + K)" by simp
with r 2(1) have "?R (K' + K) M0" by blast
with n have ?case1 by (simp add: mult1_def)
then show ?thesis ..
qed
qed
qed
lemma all_accessible:
assumes "wf r"
shows "\<forall>M. M \<in> Wellfounded.acc (mult1 r)"
proof
let ?R = "mult1 r"
let ?W = "Wellfounded.acc ?R"
{
fix M M0 a
assume M0: "M0 \<in> ?W"
and wf_hyp: "\<And>b. (b, a) \<in> r \<Longrightarrow> (\<forall>M \<in> ?W. add_mset b M \<in> ?W)"
and acc_hyp: "\<forall>M. (M, M0) \<in> ?R \<longrightarrow> add_mset a M \<in> ?W"
have "add_mset a M0 \<in> ?W"
proof (rule accI [of "add_mset a M0"])
fix N
assume "(N, add_mset a M0) \<in> ?R"
then consider M where "(M, M0) \<in> ?R" "N = add_mset a M"
| K where "\<forall>b. b \<in># K \<longrightarrow> (b, a) \<in> r" "N = M0 + K"
by atomize_elim (rule less_add)
then show "N \<in> ?W"
proof cases
case 1
from acc_hyp have "(M, M0) \<in> ?R \<longrightarrow> add_mset a M \<in> ?W" ..
from this and \<open>(M, M0) \<in> ?R\<close> have "add_mset a M \<in> ?W" ..
then show "N \<in> ?W" by (simp only: \<open>N = add_mset a M\<close>)
next
case 2
from this(1) have "M0 + K \<in> ?W"
proof (induct K)
case empty
from M0 show "M0 + {#} \<in> ?W" by simp
next
case (add x K)
from add.prems have "(x, a) \<in> r" by simp
with wf_hyp have "\<forall>M \<in> ?W. add_mset x M \<in> ?W" by blast
moreover from add have "M0 + K \<in> ?W" by simp
ultimately have "add_mset x (M0 + K) \<in> ?W" ..
then show "M0 + (add_mset x K) \<in> ?W" by simp
qed
then show "N \<in> ?W" by (simp only: 2(2))
qed
qed
} note tedious_reasoning = this
show "M \<in> ?W" for M
proof (induct M)
show "{#} \<in> ?W"
proof (rule accI)
fix b assume "(b, {#}) \<in> ?R"
with not_less_empty show "b \<in> ?W" by contradiction
qed
fix M a assume "M \<in> ?W"
from \<open>wf r\<close> have "\<forall>M \<in> ?W. add_mset a M \<in> ?W"
proof induct
fix a
assume r: "\<And>b. (b, a) \<in> r \<Longrightarrow> (\<forall>M \<in> ?W. add_mset b M \<in> ?W)"
show "\<forall>M \<in> ?W. add_mset a M \<in> ?W"
proof
fix M assume "M \<in> ?W"
then show "add_mset a M \<in> ?W"
by (rule acc_induct) (rule tedious_reasoning [OF _ r])
qed
qed
from this and \<open>M \<in> ?W\<close> show "add_mset a M \<in> ?W" ..
qed
qed
lemma wf_mult1: "wf r \<Longrightarrow> wf (mult1 r)"
by (rule acc_wfI) (rule all_accessible)
lemma wf_mult: "wf r \<Longrightarrow> wf (mult r)"
unfolding mult_def by (rule wf_trancl) (rule wf_mult1)
lemma wfP_multp: "wfP r \<Longrightarrow> wfP (multp r)"
unfolding multp_def wfP_def
by (simp add: wf_mult)
subsubsection \<open>Closure-free presentation\<close>
text \<open>One direction.\<close>
lemma mult_implies_one_step:
assumes
trans: "trans r" and
MN: "(M, N) \<in> mult r"
shows "\<exists>I J K. N = I + J \<and> M = I + K \<and> J \<noteq> {#} \<and> (\<forall>k \<in> set_mset K. \<exists>j \<in> set_mset J. (k, j) \<in> r)"
using MN unfolding mult_def mult1_def
proof (induction rule: converse_trancl_induct)
case (base y)
then show ?case by force
next
case (step y z) note yz = this(1) and zN = this(2) and N_decomp = this(3)
obtain I J K where
N: "N = I + J" "z = I + K" "J \<noteq> {#}" "\<forall>k\<in>#K. \<exists>j\<in>#J. (k, j) \<in> r"
using N_decomp by blast
obtain a M0 K' where
z: "z = add_mset a M0" and y: "y = M0 + K'" and K: "\<forall>b. b \<in># K' \<longrightarrow> (b, a) \<in> r"
using yz by blast
show ?case
proof (cases "a \<in># K")
case True
moreover have "\<exists>j\<in>#J. (k, j) \<in> r" if "k \<in># K'" for k
using K N trans True by (meson that transE)
ultimately show ?thesis
by (rule_tac x = I in exI, rule_tac x = J in exI, rule_tac x = "(K - {#a#}) + K'" in exI)
(use z y N in \<open>auto simp del: subset_mset.add_diff_assoc2 dest: in_diffD\<close>)
next
case False
then have "a \<in># I" by (metis N(2) union_iff union_single_eq_member z)
moreover have "M0 = I + K - {#a#}"
using N(2) z by force
ultimately show ?thesis
by (rule_tac x = "I - {#a#}" in exI, rule_tac x = "add_mset a J" in exI,
rule_tac x = "K + K'" in exI)
(use z y N False K in \<open>auto simp: add.assoc\<close>)
qed
qed
lemma multp_implies_one_step:
"transp R \<Longrightarrow> multp R M N \<Longrightarrow> \<exists>I J K. N = I + J \<and> M = I + K \<and> J \<noteq> {#} \<and> (\<forall>k\<in>#K. \<exists>x\<in>#J. R k x)"
by (rule mult_implies_one_step[to_pred])
lemma one_step_implies_mult:
assumes
"J \<noteq> {#}" and
"\<forall>k \<in> set_mset K. \<exists>j \<in> set_mset J. (k, j) \<in> r"
shows "(I + K, I + J) \<in> mult r"
using assms
proof (induction "size J" arbitrary: I J K)
case 0
then show ?case by auto
next
case (Suc n) note IH = this(1) and size_J = this(2)[THEN sym]
obtain J' a where J: "J = add_mset a J'"
using size_J by (blast dest: size_eq_Suc_imp_eq_union)
show ?case
proof (cases "J' = {#}")
case True
then show ?thesis
using J Suc by (fastforce simp add: mult_def mult1_def)
next
case [simp]: False
have K: "K = {#x \<in># K. (x, a) \<in> r#} + {#x \<in># K. (x, a) \<notin> r#}"
by simp
have "(I + K, (I + {# x \<in># K. (x, a) \<in> r #}) + J') \<in> mult r"
using IH[of J' "{# x \<in># K. (x, a) \<notin> r#}" "I + {# x \<in># K. (x, a) \<in> r#}"]
J Suc.prems K size_J by (auto simp: ac_simps)
moreover have "(I + {#x \<in># K. (x, a) \<in> r#} + J', I + J) \<in> mult r"
by (fastforce simp: J mult1_def mult_def)
ultimately show ?thesis
unfolding mult_def by simp
qed
qed
lemma one_step_implies_multp:
"J \<noteq> {#} \<Longrightarrow> \<forall>k\<in>#K. \<exists>j\<in>#J. R k j \<Longrightarrow> multp R (I + K) (I + J)"
by (rule one_step_implies_mult[of _ _ "{(x, y). r x y}" for r, folded multp_def, simplified])
lemma subset_implies_mult:
assumes sub: "A \<subset># B"
shows "(A, B) \<in> mult r"
proof -
have ApBmA: "A + (B - A) = B"
using sub by simp
have BmA: "B - A \<noteq> {#}"
using sub by (simp add: Diff_eq_empty_iff_mset subset_mset.less_le_not_le)
thus ?thesis
by (rule one_step_implies_mult[of "B - A" "{#}" _ A, unfolded ApBmA, simplified])
qed
lemma subset_implies_multp: "A \<subset># B \<Longrightarrow> multp r A B"
by (rule subset_implies_mult[of _ _ "{(x, y). r x y}" for r, folded multp_def])
lemma multp_repeat_mset_repeat_msetI:
assumes "transp R" and "multp R A B" and "n \<noteq> 0"
shows "multp R (repeat_mset n A) (repeat_mset n B)"
proof -
from \<open>transp R\<close> \<open>multp R A B\<close> obtain I J K where
"B = I + J" and "A = I + K" and "J \<noteq> {#}" and "\<forall>k \<in># K. \<exists>x \<in># J. R k x"
by (auto dest: multp_implies_one_step)
have repeat_n_A_eq: "repeat_mset n A = repeat_mset n I + repeat_mset n K"
using \<open>A = I + K\<close> by simp
have repeat_n_B_eq: "repeat_mset n B = repeat_mset n I + repeat_mset n J"
using \<open>B = I + J\<close> by simp
show ?thesis
unfolding repeat_n_A_eq repeat_n_B_eq
proof (rule one_step_implies_multp)
from \<open>n \<noteq> 0\<close> show "repeat_mset n J \<noteq> {#}"
using \<open>J \<noteq> {#}\<close>
by (simp add: repeat_mset_eq_empty_iff)
next
show "\<forall>k \<in># repeat_mset n K. \<exists>j \<in># repeat_mset n J. R k j"
using \<open>\<forall>k \<in># K. \<exists>x \<in># J. R k x\<close>
by (metis count_greater_zero_iff nat_0_less_mult_iff repeat_mset.rep_eq)
qed
qed
subsubsection \<open>Monotonicity\<close>
lemma multp_mono_strong:
assumes "multp R M1 M2" and "transp R" and
S_if_R: "\<And>x y. x \<in> set_mset M1 \<Longrightarrow> y \<in> set_mset M2 \<Longrightarrow> R x y \<Longrightarrow> S x y"
shows "multp S M1 M2"
proof -
obtain I J K where "M2 = I + J" and "M1 = I + K" and "J \<noteq> {#}" and "\<forall>k\<in>#K. \<exists>x\<in>#J. R k x"
using multp_implies_one_step[OF \<open>transp R\<close> \<open>multp R M1 M2\<close>] by auto
show ?thesis
unfolding \<open>M2 = I + J\<close> \<open>M1 = I + K\<close>
proof (rule one_step_implies_multp[OF \<open>J \<noteq> {#}\<close>])
show "\<forall>k\<in>#K. \<exists>j\<in>#J. S k j"
using S_if_R
by (metis \<open>M1 = I + K\<close> \<open>M2 = I + J\<close> \<open>\<forall>k\<in>#K. \<exists>x\<in>#J. R k x\<close> union_iff)
qed
qed
lemma mult_mono_strong:
assumes "(M1, M2) \<in> mult r" and "trans r" and
S_if_R: "\<And>x y. x \<in> set_mset M1 \<Longrightarrow> y \<in> set_mset M2 \<Longrightarrow> (x, y) \<in> r \<Longrightarrow> (x, y) \<in> s"
shows "(M1, M2) \<in> mult s"
using assms multp_mono_strong[of "\<lambda>x y. (x, y) \<in> r" M1 M2 "\<lambda>x y. (x, y) \<in> s",
unfolded multp_def transp_trans_eq, simplified]
by blast
lemma monotone_on_multp_multp_image_mset:
assumes "monotone_on A orda ordb f" and "transp orda"
shows "monotone_on {M. set_mset M \<subseteq> A} (multp orda) (multp ordb) (image_mset f)"
proof (rule monotone_onI)
fix M1 M2
assume
M1_in: "M1 \<in> {M. set_mset M \<subseteq> A}" and
M2_in: "M2 \<in> {M. set_mset M \<subseteq> A}" and
M1_lt_M2: "multp orda M1 M2"
from multp_implies_one_step[OF \<open>transp orda\<close> M1_lt_M2] obtain I J K where
M2_eq: "M2 = I + J" and
M1_eq: "M1 = I + K" and
J_neq_mempty: "J \<noteq> {#}" and
ball_K_less: "\<forall>k\<in>#K. \<exists>x\<in>#J. orda k x"
by metis
have "multp ordb (image_mset f I + image_mset f K) (image_mset f I + image_mset f J)"
proof (intro one_step_implies_multp ballI)
show "image_mset f J \<noteq> {#}"
using J_neq_mempty by simp
next
fix k' assume "k'\<in>#image_mset f K"
then obtain k where "k' = f k" and k_in: "k \<in># K"
by auto
then obtain j where j_in: "j\<in>#J" and "orda k j"
using ball_K_less by auto
have "ordb (f k) (f j)"
proof (rule \<open>monotone_on A orda ordb f\<close>[THEN monotone_onD, OF _ _ \<open>orda k j\<close>])
show "k \<in> A"
using M1_eq M1_in k_in by auto
next
show "j \<in> A"
using M2_eq M2_in j_in by auto
qed
thus "\<exists>j\<in>#image_mset f J. ordb k' j"
using \<open>j \<in># J\<close> \<open>k' = f k\<close> by auto
qed
thus "multp ordb (image_mset f M1) (image_mset f M2)"
by (simp add: M1_eq M2_eq)
qed
lemma monotone_multp_multp_image_mset:
assumes "monotone orda ordb f" and "transp orda"
shows "monotone (multp orda) (multp ordb) (image_mset f)"
by (rule monotone_on_multp_multp_image_mset[OF assms, simplified])
lemma multp_image_mset_image_msetI:
assumes "multp (\<lambda>x y. R (f x) (f y)) M1 M2" and "transp R"
shows "multp R (image_mset f M1) (image_mset f M2)"
proof -
from \<open>transp R\<close> have "transp (\<lambda>x y. R (f x) (f y))"
by (auto intro: transpI dest: transpD)
with \<open>multp (\<lambda>x y. R (f x) (f y)) M1 M2\<close> obtain I J K where
"M2 = I + J" and "M1 = I + K" and "J \<noteq> {#}" and "\<forall>k\<in>#K. \<exists>x\<in>#J. R (f k) (f x)"
using multp_implies_one_step by blast
have "multp R (image_mset f I + image_mset f K) (image_mset f I + image_mset f J)"
proof (rule one_step_implies_multp)
show "image_mset f J \<noteq> {#}"
by (simp add: \<open>J \<noteq> {#}\<close>)
next
show "\<forall>k\<in>#image_mset f K. \<exists>j\<in>#image_mset f J. R k j"
by (simp add: \<open>\<forall>k\<in>#K. \<exists>x\<in>#J. R (f k) (f x)\<close>)
qed
thus ?thesis
by (simp add: \<open>M1 = I + K\<close> \<open>M2 = I + J\<close>)
qed
lemma multp_image_mset_image_msetD:
assumes
"multp R (image_mset f A) (image_mset f B)" and
"transp R" and
inj_on_f: "inj_on f (set_mset A \<union> set_mset B)"
shows "multp (\<lambda>x y. R (f x) (f y)) A B"
proof -
from assms(1,2) obtain I J K where
f_B_eq: "image_mset f B = I + J" and
f_A_eq: "image_mset f A = I + K" and
J_neq_mempty: "J \<noteq> {#}" and
ball_K_less: "\<forall>k\<in>#K. \<exists>x\<in>#J. R k x"
by (auto dest: multp_implies_one_step)
from f_B_eq obtain I' J' where
B_def: "B = I' + J'" and I_def: "I = image_mset f I'" and J_def: "J = image_mset f J'"
using image_mset_eq_plusD by blast
from inj_on_f have inj_on_f': "inj_on f (set_mset A \<union> set_mset I')"
by (rule inj_on_subset) (auto simp add: B_def)
from f_A_eq obtain K' where
A_def: "A = I' + K'" and K_def: "K = image_mset f K'"
by (auto simp: I_def dest: image_mset_eq_image_mset_plusD[OF _ inj_on_f'])
show ?thesis
unfolding A_def B_def
proof (intro one_step_implies_multp ballI)
from J_neq_mempty show "J' \<noteq> {#}"
by (simp add: J_def)
next
fix k assume "k \<in># K'"
with ball_K_less obtain j' where "j' \<in># J" and "R (f k) j'"
using K_def by auto
moreover then obtain j where "j \<in># J'" and "f j = j'"
using J_def by auto
ultimately show "\<exists>j\<in>#J'. R (f k) (f j)"
by blast
qed
qed
subsubsection \<open>The multiset extension is cancellative for multiset union\<close>
lemma mult_cancel:
assumes "trans s" and "irrefl_on (set_mset Z) s"
shows "(X + Z, Y + Z) \<in> mult s \<longleftrightarrow> (X, Y) \<in> mult s" (is "?L \<longleftrightarrow> ?R")
proof
assume ?L thus ?R
using \<open>irrefl_on (set_mset Z) s\<close>
proof (induct Z)
case (add z Z)
obtain X' Y' Z' where *: "add_mset z X + Z = Z' + X'" "add_mset z Y + Z = Z' + Y'" "Y' \<noteq> {#}"
"\<forall>x \<in> set_mset X'. \<exists>y \<in> set_mset Y'. (x, y) \<in> s"
using mult_implies_one_step[OF \<open>trans s\<close> add(2)] by auto
consider Z2 where "Z' = add_mset z Z2" | X2 Y2 where "X' = add_mset z X2" "Y' = add_mset z Y2"
using *(1,2) by (metis add_mset_remove_trivial_If insert_iff set_mset_add_mset_insert union_iff)
thus ?case
proof (cases)
case 1 thus ?thesis
using * one_step_implies_mult[of Y' X' s Z2] add(3)
by (auto simp: add.commute[of _ "{#_#}"] add.assoc intro: add(1) elim: irrefl_on_subset)
next
case 2 then obtain y where "y \<in> set_mset Y2" "(z, y) \<in> s"
using *(4) \<open>irrefl_on (set_mset (add_mset z Z)) s\<close>
by (auto simp: irrefl_on_def)
moreover from this transD[OF \<open>trans s\<close> _ this(2)]
have "x' \<in> set_mset X2 \<Longrightarrow> \<exists>y \<in> set_mset Y2. (x', y) \<in> s" for x'
using 2 *(4)[rule_format, of x'] by auto
ultimately show ?thesis
using * one_step_implies_mult[of Y2 X2 s Z'] 2 add(3)
by (force simp: add.commute[of "{#_#}"] add.assoc[symmetric] intro: add(1)
elim: irrefl_on_subset)
qed
qed auto
next
assume ?R then obtain I J K
where "Y = I + J" "X = I + K" "J \<noteq> {#}" "\<forall>k \<in> set_mset K. \<exists>j \<in> set_mset J. (k, j) \<in> s"
using mult_implies_one_step[OF \<open>trans s\<close>] by blast
thus ?L using one_step_implies_mult[of J K s "I + Z"] by (auto simp: ac_simps)
qed
lemma multp_cancel:
"transp R \<Longrightarrow> irreflp_on (set_mset Z) R \<Longrightarrow> multp R (X + Z) (Y + Z) \<longleftrightarrow> multp R X Y"
by (rule mult_cancel[to_pred])
lemma mult_cancel_add_mset:
"trans r \<Longrightarrow> irrefl_on {z} r \<Longrightarrow>
((add_mset z X, add_mset z Y) \<in> mult r) = ((X, Y) \<in> mult r)"
by (rule mult_cancel[of _ "{#_#}", simplified])
lemma multp_cancel_add_mset:
"transp R \<Longrightarrow> irreflp_on {z} R \<Longrightarrow> multp R (add_mset z X) (add_mset z Y) = multp R X Y"
by (rule mult_cancel_add_mset[to_pred, folded bot_set_def])
lemma mult_cancel_max0:
assumes "trans s" and "irrefl_on (set_mset X \<inter> set_mset Y) s"
shows "(X, Y) \<in> mult s \<longleftrightarrow> (X - X \<inter># Y, Y - X \<inter># Y) \<in> mult s" (is "?L \<longleftrightarrow> ?R")
proof -
have "(X - X \<inter># Y + X \<inter># Y, Y - X \<inter># Y + X \<inter># Y) \<in> mult s \<longleftrightarrow> (X - X \<inter># Y, Y - X \<inter># Y) \<in> mult s"
proof (rule mult_cancel)
from assms show "trans s"
by simp
next
from assms show "irrefl_on (set_mset (X \<inter># Y)) s"
by simp
qed
moreover have "X - X \<inter># Y + X \<inter># Y = X" "Y - X \<inter># Y + X \<inter># Y = Y"
by (auto simp flip: count_inject)
ultimately show ?thesis
by simp
qed
lemma mult_cancel_max:
"trans r \<Longrightarrow> irrefl_on (set_mset X \<inter> set_mset Y) r \<Longrightarrow>
(X, Y) \<in> mult r \<longleftrightarrow> (X - Y, Y - X) \<in> mult r"
by (rule mult_cancel_max0[simplified])
lemma multp_cancel_max:
"transp R \<Longrightarrow> irreflp_on (set_mset X \<inter> set_mset Y) R \<Longrightarrow> multp R X Y \<longleftrightarrow> multp R (X - Y) (Y - X)"
by (rule mult_cancel_max[to_pred])
subsubsection \<open>Strict partial-order properties\<close>
lemma mult1_lessE:
assumes "(N, M) \<in> mult1 {(a, b). r a b}" and "asymp r"
obtains a M0 K where "M = add_mset a M0" "N = M0 + K"
"a \<notin># K" "\<And>b. b \<in># K \<Longrightarrow> r b a"
proof -
from assms obtain a M0 K where "M = add_mset a M0" "N = M0 + K" and
*: "b \<in># K \<Longrightarrow> r b a" for b by (blast elim: mult1E)
moreover from * [of a] have "a \<notin># K"
using \<open>asymp r\<close> by (meson asympD)
ultimately show thesis by (auto intro: that)
qed
lemma trans_mult: "trans r \<Longrightarrow> trans (mult r)"
by (simp add: mult_def)
lemma transp_multp: "transp r \<Longrightarrow> transp (multp r)"
unfolding multp_def transp_trans_eq
by (fact trans_mult[of "{(x, y). r x y}" for r, folded transp_trans])
lemma irrefl_mult:
assumes "trans r" "irrefl r"
shows "irrefl (mult r)"
proof (intro irreflI notI)
fix M
assume "(M, M) \<in> mult r"
then obtain I J K where "M = I + J" and "M = I + K"
and "J \<noteq> {#}" and "(\<forall>k\<in>set_mset K. \<exists>j\<in>set_mset J. (k, j) \<in> r)"
using mult_implies_one_step[OF \<open>trans r\<close>] by blast
then have *: "K \<noteq> {#}" and **: "\<forall>k\<in>set_mset K. \<exists>j\<in>set_mset K. (k, j) \<in> r" by auto
have "finite (set_mset K)" by simp
hence "set_mset K = {}"
using **
proof (induction rule: finite_induct)
case empty
thus ?case by simp
next
case (insert x F)
have False
using \<open>irrefl r\<close>[unfolded irrefl_def, rule_format]
using \<open>trans r\<close>[THEN transD]
by (metis equals0D insert.IH insert.prems insertE insertI1 insertI2)
thus ?case ..
qed
with * show False by simp
qed
lemma irreflp_multp: "transp R \<Longrightarrow> irreflp R \<Longrightarrow> irreflp (multp R)"
by (rule irrefl_mult[of "{(x, y). r x y}" for r,
folded transp_trans_eq irreflp_irrefl_eq, simplified, folded multp_def])
instantiation multiset :: (preorder) order begin
definition less_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool"
where "M < N \<longleftrightarrow> multp (<) M N"
definition less_eq_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool"
where "less_eq_multiset M N \<longleftrightarrow> M < N \<or> M = N"
instance
proof intro_classes
fix M N :: "'a multiset"
show "(M < N) = (M \<le> N \<and> \<not> N \<le> M)"
unfolding less_eq_multiset_def less_multiset_def
by (metis irreflp_def irreflp_on_less irreflp_multp transpE transp_on_less transp_multp)
next
fix M :: "'a multiset"
show "M \<le> M"
unfolding less_eq_multiset_def
by simp
next
fix M1 M2 M3 :: "'a multiset"
show "M1 \<le> M2 \<Longrightarrow> M2 \<le> M3 \<Longrightarrow> M1 \<le> M3"
unfolding less_eq_multiset_def less_multiset_def
using transp_multp[OF transp_on_less, THEN transpD]
by blast
next
fix M N :: "'a multiset"
show "M \<le> N \<Longrightarrow> N \<le> M \<Longrightarrow> M = N"
unfolding less_eq_multiset_def less_multiset_def
using transp_multp[OF transp_on_less, THEN transpD]
using irreflp_multp[OF transp_on_less irreflp_on_less, unfolded irreflp_def, rule_format]
by blast
qed
end
lemma mset_le_irrefl [elim!]:
fixes M :: "'a::preorder multiset"
shows "M < M \<Longrightarrow> R"
by simp
lemma wfP_less_multiset[simp]:
assumes wfP_less: "wfP ((<) :: ('a :: preorder) \<Rightarrow> 'a \<Rightarrow> bool)"
shows "wfP ((<) :: 'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool)"
using wfP_multp[OF wfP_less] less_multiset_def
by (metis wfPUNIVI wfP_induct)
subsubsection \<open>Strict total-order properties\<close>
lemma total_on_mult:
assumes "total_on A r" and "trans r" and "\<And>M. M \<in> B \<Longrightarrow> set_mset M \<subseteq> A"
shows "total_on B (mult r)"
proof (rule total_onI)
fix M1 M2 assume "M1 \<in> B" and "M2 \<in> B" and "M1 \<noteq> M2"
let ?I = "M1 \<inter># M2"
show "(M1, M2) \<in> mult r \<or> (M2, M1) \<in> mult r"
proof (cases "M1 - ?I = {#} \<or> M2 - ?I = {#}")
case True
with \<open>M1 \<noteq> M2\<close> show ?thesis
by (metis Diff_eq_empty_iff_mset diff_intersect_left_idem diff_intersect_right_idem
subset_implies_mult subset_mset.less_le)
next
case False
from assms(1) have "total_on (set_mset (M1 - ?I)) r"
by (meson \<open>M1 \<in> B\<close> assms(3) diff_subset_eq_self set_mset_mono total_on_subset)
with False obtain greatest1 where
greatest1_in: "greatest1 \<in># M1 - ?I" and
greatest1_greatest: "\<forall>x \<in># M1 - ?I. greatest1 \<noteq> x \<longrightarrow> (x, greatest1) \<in> r"
using Multiset.bex_greatest_element[to_set, of "M1 - ?I" r]
by (metis assms(2) subset_UNIV trans_on_subset)
from assms(1) have "total_on (set_mset (M2 - ?I)) r"
by (meson \<open>M2 \<in> B\<close> assms(3) diff_subset_eq_self set_mset_mono total_on_subset)
with False obtain greatest2 where
greatest2_in: "greatest2 \<in># M2 - ?I" and
greatest2_greatest: "\<forall>x \<in># M2 - ?I. greatest2 \<noteq> x \<longrightarrow> (x, greatest2) \<in> r"
using Multiset.bex_greatest_element[to_set, of "M2 - ?I" r]
by (metis assms(2) subset_UNIV trans_on_subset)
have "greatest1 \<noteq> greatest2"
using greatest1_in \<open>greatest2 \<in># M2 - ?I\<close>
by (metis diff_intersect_left_idem diff_intersect_right_idem dual_order.eq_iff in_diff_count
in_diff_countE le_add_same_cancel2 less_irrefl zero_le)
hence "(greatest1, greatest2) \<in> r \<or> (greatest2, greatest1) \<in> r"
using \<open>total_on A r\<close>[unfolded total_on_def, rule_format, of greatest1 greatest2]
\<open>M1 \<in> B\<close> \<open>M2 \<in> B\<close> greatest1_in greatest2_in assms(3)
by (meson in_diffD in_mono)
thus ?thesis
proof (elim disjE)
assume "(greatest1, greatest2) \<in> r"
have "(?I + (M1 - ?I), ?I + (M2 - ?I)) \<in> mult r"
proof (rule one_step_implies_mult[of "M2 - ?I" "M1 - ?I" r ?I])
show "M2 - ?I \<noteq> {#}"
using False by force
next
show "\<forall>k\<in>#M1 - ?I. \<exists>j\<in>#M2 - ?I. (k, j) \<in> r"
using \<open>(greatest1, greatest2) \<in> r\<close> greatest2_in greatest1_greatest
by (metis assms(2) transD)
qed
hence "(M1, M2) \<in> mult r"
by (metis subset_mset.add_diff_inverse subset_mset.inf.cobounded1
subset_mset.inf.cobounded2)
thus "(M1, M2) \<in> mult r \<or> (M2, M1) \<in> mult r" ..
next
assume "(greatest2, greatest1) \<in> r"
have "(?I + (M2 - ?I), ?I + (M1 - ?I)) \<in> mult r"
proof (rule one_step_implies_mult[of "M1 - ?I" "M2 - ?I" r ?I])
show "M1 - M1 \<inter># M2 \<noteq> {#}"
using False by force
next
show "\<forall>k\<in>#M2 - ?I. \<exists>j\<in>#M1 - ?I. (k, j) \<in> r"
using \<open>(greatest2, greatest1) \<in> r\<close> greatest1_in greatest2_greatest
by (metis assms(2) transD)
qed
hence "(M2, M1) \<in> mult r"
by (metis subset_mset.add_diff_inverse subset_mset.inf.cobounded1
subset_mset.inf.cobounded2)
thus "(M1, M2) \<in> mult r \<or> (M2, M1) \<in> mult r" ..
qed
qed
qed
lemma total_mult: "total r \<Longrightarrow> trans r \<Longrightarrow> total (mult r)"
by (rule total_on_mult[of UNIV r UNIV, simplified])
lemma totalp_on_multp:
"totalp_on A R \<Longrightarrow> transp R \<Longrightarrow> (\<And>M. M \<in> B \<Longrightarrow> set_mset M \<subseteq> A) \<Longrightarrow> totalp_on B (multp R)"
using total_on_mult[of A "{(x,y). R x y}" B, to_pred]
by (simp add: multp_def total_on_def totalp_on_def)
lemma totalp_multp: "totalp R \<Longrightarrow> transp R \<Longrightarrow> totalp (multp R)"
by (rule totalp_on_multp[of UNIV R UNIV, simplified])
subsection \<open>Quasi-executable version of the multiset extension\<close>
text \<open>
Predicate variants of \<open>mult\<close> and the reflexive closure of \<open>mult\<close>, which are
executable whenever the given predicate \<open>P\<close> is. Together with the standard
code equations for \<open>(\<inter>#\<close>) and \<open>(-\<close>) this should yield quadratic
(with respect to calls to \<open>P\<close>) implementations of \<open>multp_code\<close> and \<open>multeqp_code\<close>.
\<close>
definition multp_code :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" where
"multp_code P N M =
(let Z = M \<inter># N; X = M - Z in
X \<noteq> {#} \<and> (let Y = N - Z in (\<forall>y \<in> set_mset Y. \<exists>x \<in> set_mset X. P y x)))"
definition multeqp_code :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset \<Rightarrow> bool" where
"multeqp_code P N M =
(let Z = M \<inter># N; X = M - Z; Y = N - Z in
(\<forall>y \<in> set_mset Y. \<exists>x \<in> set_mset X. P y x))"
lemma multp_code_iff_mult:
assumes "irrefl_on (set_mset N \<inter> set_mset M) R" and "trans R" and
[simp]: "\<And>x y. P x y \<longleftrightarrow> (x, y) \<in> R"
shows "multp_code P N M \<longleftrightarrow> (N, M) \<in> mult R" (is "?L \<longleftrightarrow> ?R")
proof -
have *: "M \<inter># N + (N - M \<inter># N) = N" "M \<inter># N + (M - M \<inter># N) = M"
"(M - M \<inter># N) \<inter># (N - M \<inter># N) = {#}" by (auto simp flip: count_inject)
show ?thesis
proof
assume ?L thus ?R
using one_step_implies_mult[of "M - M \<inter># N" "N - M \<inter># N" R "M \<inter># N"] *
by (auto simp: multp_code_def Let_def)
next
{ fix I J K :: "'a multiset" assume "(I + J) \<inter># (I + K) = {#}"
then have "I = {#}" by (metis inter_union_distrib_right union_eq_empty)
} note [dest!] = this
assume ?R thus ?L
using mult_cancel_max
using mult_implies_one_step[OF assms(2), of "N - M \<inter># N" "M - M \<inter># N"]
mult_cancel_max[OF assms(2,1)] * by (auto simp: multp_code_def)
qed
qed
lemma multp_code_iff_multp:
"irreflp_on (set_mset M \<inter> set_mset N) R \<Longrightarrow> transp R \<Longrightarrow> multp_code R M N \<longleftrightarrow> multp R M N"
using multp_code_iff_mult[simplified, to_pred, of M N R R] by simp
lemma multp_code_eq_multp:
assumes "irreflp R" and "transp R"
shows "multp_code R = multp R"
proof (intro ext)
fix M N
show "multp_code R M N = multp R M N"
proof (rule multp_code_iff_multp)
from assms show "irreflp_on (set_mset M \<inter> set_mset N) R"
by (auto intro: irreflp_on_subset)
next
from assms show "transp R"
by simp
qed
qed
lemma multeqp_code_iff_reflcl_mult:
assumes "irrefl_on (set_mset N \<inter> set_mset M) R" and "trans R" and "\<And>x y. P x y \<longleftrightarrow> (x, y) \<in> R"
shows "multeqp_code P N M \<longleftrightarrow> (N, M) \<in> (mult R)\<^sup>="
proof -
{ assume "N \<noteq> M" "M - M \<inter># N = {#}"
then obtain y where "count N y \<noteq> count M y" by (auto simp flip: count_inject)
then have "\<exists>y. count M y < count N y" using \<open>M - M \<inter># N = {#}\<close>
by (auto simp flip: count_inject dest!: le_neq_implies_less fun_cong[of _ _ y])
}
then have "multeqp_code P N M \<longleftrightarrow> multp_code P N M \<or> N = M"
by (auto simp: multeqp_code_def multp_code_def Let_def in_diff_count)
thus ?thesis
using multp_code_iff_mult[OF assms] by simp
qed
lemma multeqp_code_iff_reflclp_multp:
"irreflp_on (set_mset M \<inter> set_mset N) R \<Longrightarrow> transp R \<Longrightarrow> multeqp_code R M N \<longleftrightarrow> (multp R)\<^sup>=\<^sup>= M N"
using multeqp_code_iff_reflcl_mult[simplified, to_pred, of M N R R] by simp
lemma multeqp_code_eq_reflclp_multp:
assumes "irreflp R" and "transp R"
shows "multeqp_code R = (multp R)\<^sup>=\<^sup>="
proof (intro ext)
fix M N
show "multeqp_code R M N \<longleftrightarrow> (multp R)\<^sup>=\<^sup>= M N"
proof (rule multeqp_code_iff_reflclp_multp)
from assms show "irreflp_on (set_mset M \<inter> set_mset N) R"
by (auto intro: irreflp_on_subset)
next
from assms show "transp R"
by simp
qed
qed
subsubsection \<open>Monotonicity of multiset union\<close>
lemma mult1_union: "(B, D) \<in> mult1 r \<Longrightarrow> (C + B, C + D) \<in> mult1 r"
by (force simp: mult1_def)
lemma union_le_mono2: "B < D \<Longrightarrow> C + B < C + (D::'a::preorder multiset)"
apply (unfold less_multiset_def multp_def mult_def)
apply (erule trancl_induct)
apply (blast intro: mult1_union)
apply (blast intro: mult1_union trancl_trans)
done
lemma union_le_mono1: "B < D \<Longrightarrow> B + C < D + (C::'a::preorder multiset)"
apply (subst add.commute [of B C])
apply (subst add.commute [of D C])
apply (erule union_le_mono2)
done
lemma union_less_mono:
fixes A B C D :: "'a::preorder multiset"
shows "A < C \<Longrightarrow> B < D \<Longrightarrow> A + B < C + D"
by (blast intro!: union_le_mono1 union_le_mono2 less_trans)
instantiation multiset :: (preorder) ordered_ab_semigroup_add
begin
instance
by standard (auto simp add: less_eq_multiset_def intro: union_le_mono2)
end
subsubsection \<open>Termination proofs with multiset orders\<close>
lemma multi_member_skip: "x \<in># XS \<Longrightarrow> x \<in># {# y #} + XS"
and multi_member_this: "x \<in># {# x #} + XS"
and multi_member_last: "x \<in># {# x #}"
by auto
definition "ms_strict = mult pair_less"
definition "ms_weak = ms_strict \<union> Id"
lemma ms_reduction_pair: "reduction_pair (ms_strict, ms_weak)"
unfolding reduction_pair_def ms_strict_def ms_weak_def pair_less_def
by (auto intro: wf_mult1 wf_trancl simp: mult_def)
lemma smsI:
"(set_mset A, set_mset B) \<in> max_strict \<Longrightarrow> (Z + A, Z + B) \<in> ms_strict"
unfolding ms_strict_def
by (rule one_step_implies_mult) (auto simp add: max_strict_def pair_less_def elim!:max_ext.cases)
lemma wmsI:
"(set_mset A, set_mset B) \<in> max_strict \<or> A = {#} \<and> B = {#}
\<Longrightarrow> (Z + A, Z + B) \<in> ms_weak"
unfolding ms_weak_def ms_strict_def
by (auto simp add: pair_less_def max_strict_def elim!:max_ext.cases intro: one_step_implies_mult)
inductive pw_leq
where
pw_leq_empty: "pw_leq {#} {#}"
| pw_leq_step: "\<lbrakk>(x,y) \<in> pair_leq; pw_leq X Y \<rbrakk> \<Longrightarrow> pw_leq ({#x#} + X) ({#y#} + Y)"
lemma pw_leq_lstep:
"(x, y) \<in> pair_leq \<Longrightarrow> pw_leq {#x#} {#y#}"
by (drule pw_leq_step) (rule pw_leq_empty, simp)
lemma pw_leq_split:
assumes "pw_leq X Y"
shows "\<exists>A B Z. X = A + Z \<and> Y = B + Z \<and> ((set_mset A, set_mset B) \<in> max_strict \<or> (B = {#} \<and> A = {#}))"
using assms
proof induct
case pw_leq_empty thus ?case by auto
next
case (pw_leq_step x y X Y)
then obtain A B Z where
[simp]: "X = A + Z" "Y = B + Z"
and 1[simp]: "(set_mset A, set_mset B) \<in> max_strict \<or> (B = {#} \<and> A = {#})"
by auto
from pw_leq_step consider "x = y" | "(x, y) \<in> pair_less"
unfolding pair_leq_def by auto
thus ?case
proof cases
case [simp]: 1
have "{#x#} + X = A + ({#y#}+Z) \<and> {#y#} + Y = B + ({#y#}+Z) \<and>
((set_mset A, set_mset B) \<in> max_strict \<or> (B = {#} \<and> A = {#}))"
by auto
thus ?thesis by blast
next
case 2
let ?A' = "{#x#} + A" and ?B' = "{#y#} + B"
have "{#x#} + X = ?A' + Z"
"{#y#} + Y = ?B' + Z"
by auto
moreover have
"(set_mset ?A', set_mset ?B') \<in> max_strict"
using 1 2 unfolding max_strict_def
by (auto elim!: max_ext.cases)
ultimately show ?thesis by blast
qed
qed
lemma
assumes pwleq: "pw_leq Z Z'"
shows ms_strictI: "(set_mset A, set_mset B) \<in> max_strict \<Longrightarrow> (Z + A, Z' + B) \<in> ms_strict"
and ms_weakI1: "(set_mset A, set_mset B) \<in> max_strict \<Longrightarrow> (Z + A, Z' + B) \<in> ms_weak"
and ms_weakI2: "(Z + {#}, Z' + {#}) \<in> ms_weak"
proof -
from pw_leq_split[OF pwleq]
obtain A' B' Z''
where [simp]: "Z = A' + Z''" "Z' = B' + Z''"
and mx_or_empty: "(set_mset A', set_mset B') \<in> max_strict \<or> (A' = {#} \<and> B' = {#})"
by blast
{
assume max: "(set_mset A, set_mset B) \<in> max_strict"
from mx_or_empty
have "(Z'' + (A + A'), Z'' + (B + B')) \<in> ms_strict"
proof
assume max': "(set_mset A', set_mset B') \<in> max_strict"
with max have "(set_mset (A + A'), set_mset (B + B')) \<in> max_strict"
by (auto simp: max_strict_def intro: max_ext_additive)
thus ?thesis by (rule smsI)
next
assume [simp]: "A' = {#} \<and> B' = {#}"
show ?thesis by (rule smsI) (auto intro: max)
qed
thus "(Z + A, Z' + B) \<in> ms_strict" by (simp add: ac_simps)
thus "(Z + A, Z' + B) \<in> ms_weak" by (simp add: ms_weak_def)
}
from mx_or_empty
have "(Z'' + A', Z'' + B') \<in> ms_weak" by (rule wmsI)
thus "(Z + {#}, Z' + {#}) \<in> ms_weak" by (simp add: ac_simps)
qed
lemma empty_neutral: "{#} + x = x" "x + {#} = x"
and nonempty_plus: "{# x #} + rs \<noteq> {#}"
and nonempty_single: "{# x #} \<noteq> {#}"
by auto
setup \<open>
let
fun msetT T = \<^Type>\<open>multiset T\<close>;
fun mk_mset T [] = \<^instantiate>\<open>'a = T in term \<open>{#}\<close>\<close>
| mk_mset T [x] = \<^instantiate>\<open>'a = T and x in term \<open>{#x#}\<close>\<close>
| mk_mset T (x :: xs) = \<^Const>\<open>plus \<open>msetT T\<close> for \<open>mk_mset T [x]\<close> \<open>mk_mset T xs\<close>\<close>
fun mset_member_tac ctxt m i =
if m <= 0 then
resolve_tac ctxt @{thms multi_member_this} i ORELSE
resolve_tac ctxt @{thms multi_member_last} i
else
resolve_tac ctxt @{thms multi_member_skip} i THEN mset_member_tac ctxt (m - 1) i
fun mset_nonempty_tac ctxt =
resolve_tac ctxt @{thms nonempty_plus} ORELSE'
resolve_tac ctxt @{thms nonempty_single}
fun regroup_munion_conv ctxt =
Function_Lib.regroup_conv ctxt \<^const_abbrev>\<open>empty_mset\<close> \<^const_name>\<open>plus\<close>
(map (fn t => t RS eq_reflection) (@{thms ac_simps} @ @{thms empty_neutral}))
fun unfold_pwleq_tac ctxt i =
(resolve_tac ctxt @{thms pw_leq_step} i THEN (fn st => unfold_pwleq_tac ctxt (i + 1) st))
ORELSE (resolve_tac ctxt @{thms pw_leq_lstep} i)
ORELSE (resolve_tac ctxt @{thms pw_leq_empty} i)
val set_mset_simps = [@{thm set_mset_empty}, @{thm set_mset_single}, @{thm set_mset_union},
@{thm Un_insert_left}, @{thm Un_empty_left}]
in
ScnpReconstruct.multiset_setup (ScnpReconstruct.Multiset
{
msetT=msetT, mk_mset=mk_mset, mset_regroup_conv=regroup_munion_conv,
mset_member_tac=mset_member_tac, mset_nonempty_tac=mset_nonempty_tac,
mset_pwleq_tac=unfold_pwleq_tac, set_of_simps=set_mset_simps,
smsI'= @{thm ms_strictI}, wmsI2''= @{thm ms_weakI2}, wmsI1= @{thm ms_weakI1},
reduction_pair = @{thm ms_reduction_pair}
})
end
\<close>
subsection \<open>Legacy theorem bindings\<close>
lemmas multi_count_eq = multiset_eq_iff [symmetric]
lemma union_commute: "M + N = N + (M::'a multiset)"
by (fact add.commute)
lemma union_assoc: "(M + N) + K = M + (N + (K::'a multiset))"
by (fact add.assoc)
lemma union_lcomm: "M + (N + K) = N + (M + (K::'a multiset))"
by (fact add.left_commute)
lemmas union_ac = union_assoc union_commute union_lcomm add_mset_commute
lemma union_right_cancel: "M + K = N + K \<longleftrightarrow> M = (N::'a multiset)"
by (fact add_right_cancel)
lemma union_left_cancel: "K + M = K + N \<longleftrightarrow> M = (N::'a multiset)"
by (fact add_left_cancel)
lemma multi_union_self_other_eq: "(A::'a multiset) + X = A + Y \<Longrightarrow> X = Y"
by (fact add_left_imp_eq)
lemma mset_subset_trans: "(M::'a multiset) \<subset># K \<Longrightarrow> K \<subset># N \<Longrightarrow> M \<subset># N"
by (fact subset_mset.less_trans)
lemma multiset_inter_commute: "A \<inter># B = B \<inter># A"
by (fact subset_mset.inf.commute)
lemma multiset_inter_assoc: "A \<inter># (B \<inter># C) = A \<inter># B \<inter># C"
by (fact subset_mset.inf.assoc [symmetric])
lemma multiset_inter_left_commute: "A \<inter># (B \<inter># C) = B \<inter># (A \<inter># C)"
by (fact subset_mset.inf.left_commute)
lemmas multiset_inter_ac =
multiset_inter_commute
multiset_inter_assoc
multiset_inter_left_commute
lemma mset_le_not_refl: "\<not> M < (M::'a::preorder multiset)"
by (fact less_irrefl)
lemma mset_le_trans: "K < M \<Longrightarrow> M < N \<Longrightarrow> K < (N::'a::preorder multiset)"
by (fact less_trans)
lemma mset_le_not_sym: "M < N \<Longrightarrow> \<not> N < (M::'a::preorder multiset)"
by (fact less_not_sym)
lemma mset_le_asym: "M < N \<Longrightarrow> (\<not> P \<Longrightarrow> N < (M::'a::preorder multiset)) \<Longrightarrow> P"
by (fact less_asym)
declaration \<open>
let
fun multiset_postproc _ maybe_name all_values (T as Type (_, [elem_T])) (Const _ $ t') =
let
val (maybe_opt, ps) =
Nitpick_Model.dest_plain_fun t'
||> (~~)
||> map (apsnd (snd o HOLogic.dest_number))
fun elems_for t =
(case AList.lookup (=) ps t of
SOME n => replicate n t
| NONE => [Const (maybe_name, elem_T --> elem_T) $ t])
in
(case maps elems_for (all_values elem_T) @
(if maybe_opt then [Const (Nitpick_Model.unrep_mixfix (), elem_T)] else []) of
[] => \<^Const>\<open>Groups.zero T\<close>
| ts => foldl1 (fn (s, t) => \<^Const>\<open>add_mset elem_T for s t\<close>) ts)
end
| multiset_postproc _ _ _ _ t = t
in Nitpick_Model.register_term_postprocessor \<^typ>\<open>'a multiset\<close> multiset_postproc end
\<close>
subsection \<open>Naive implementation using lists\<close>
code_datatype mset
lemma [code]: "{#} = mset []"
by simp
lemma [code]: "add_mset x (mset xs) = mset (x # xs)"
by simp
lemma [code]: "Multiset.is_empty (mset xs) \<longleftrightarrow> List.null xs"
by (simp add: Multiset.is_empty_def List.null_def)
lemma union_code [code]: "mset xs + mset ys = mset (xs @ ys)"
by simp
lemma [code]: "image_mset f (mset xs) = mset (map f xs)"
by simp
lemma [code]: "filter_mset f (mset xs) = mset (filter f xs)"
by simp
lemma [code]: "mset xs - mset ys = mset (fold remove1 ys xs)"
by (rule sym, induct ys arbitrary: xs) (simp_all add: diff_add diff_right_commute diff_diff_add)
lemma [code]:
"mset xs \<inter># mset ys =
mset (snd (fold (\<lambda>x (ys, zs).
if x \<in> set ys then (remove1 x ys, x # zs) else (ys, zs)) xs (ys, [])))"
proof -
have "\<And>zs. mset (snd (fold (\<lambda>x (ys, zs).
if x \<in> set ys then (remove1 x ys, x # zs) else (ys, zs)) xs (ys, zs))) =
(mset xs \<inter># mset ys) + mset zs"
by (induct xs arbitrary: ys)
(auto simp add: inter_add_right1 inter_add_right2 ac_simps)
then show ?thesis by simp
qed
lemma [code]:
"mset xs \<union># mset ys =
mset (case_prod append (fold (\<lambda>x (ys, zs). (remove1 x ys, x # zs)) xs (ys, [])))"
proof -
have "\<And>zs. mset (case_prod append (fold (\<lambda>x (ys, zs). (remove1 x ys, x # zs)) xs (ys, zs))) =
(mset xs \<union># mset ys) + mset zs"
by (induct xs arbitrary: ys) (simp_all add: multiset_eq_iff)
then show ?thesis by simp
qed
declare in_multiset_in_set [code_unfold]
lemma [code]: "count (mset xs) x = fold (\<lambda>y. if x = y then Suc else id) xs 0"
proof -
have "\<And>n. fold (\<lambda>y. if x = y then Suc else id) xs n = count (mset xs) x + n"
by (induct xs) simp_all
then show ?thesis by simp
qed
declare set_mset_mset [code]
declare sorted_list_of_multiset_mset [code]
lemma [code]: \<comment> \<open>not very efficient, but representation-ignorant!\<close>
"mset_set A = mset (sorted_list_of_set A)"
apply (cases "finite A")
apply simp_all
apply (induct A rule: finite_induct)
apply simp_all
done
declare size_mset [code]
fun subset_eq_mset_impl :: "'a list \<Rightarrow> 'a list \<Rightarrow> bool option" where
"subset_eq_mset_impl [] ys = Some (ys \<noteq> [])"
| "subset_eq_mset_impl (Cons x xs) ys = (case List.extract ((=) x) ys of
None \<Rightarrow> None
| Some (ys1,_,ys2) \<Rightarrow> subset_eq_mset_impl xs (ys1 @ ys2))"
lemma subset_eq_mset_impl: "(subset_eq_mset_impl xs ys = None \<longleftrightarrow> \<not> mset xs \<subseteq># mset ys) \<and>
(subset_eq_mset_impl xs ys = Some True \<longleftrightarrow> mset xs \<subset># mset ys) \<and>
(subset_eq_mset_impl xs ys = Some False \<longrightarrow> mset xs = mset ys)"
proof (induct xs arbitrary: ys)
case (Nil ys)
show ?case by (auto simp: subset_mset.zero_less_iff_neq_zero)
next
case (Cons x xs ys)
show ?case
proof (cases "List.extract ((=) x) ys")
case None
hence x: "x \<notin> set ys" by (simp add: extract_None_iff)
{
assume "mset (x # xs) \<subseteq># mset ys"
from set_mset_mono[OF this] x have False by simp
} note nle = this
moreover
{
assume "mset (x # xs) \<subset># mset ys"
hence "mset (x # xs) \<subseteq># mset ys" by auto
from nle[OF this] have False .
}
ultimately show ?thesis using None by auto
next
case (Some res)
obtain ys1 y ys2 where res: "res = (ys1,y,ys2)" by (cases res, auto)
note Some = Some[unfolded res]
from extract_SomeE[OF Some] have "ys = ys1 @ x # ys2" by simp
hence id: "mset ys = add_mset x (mset (ys1 @ ys2))"
by auto
show ?thesis unfolding subset_eq_mset_impl.simps
unfolding Some option.simps split
unfolding id
using Cons[of "ys1 @ ys2"]
unfolding subset_mset_def subseteq_mset_def by auto
qed
qed
lemma [code]: "mset xs \<subseteq># mset ys \<longleftrightarrow> subset_eq_mset_impl xs ys \<noteq> None"
using subset_eq_mset_impl[of xs ys] by (cases "subset_eq_mset_impl xs ys", auto)
lemma [code]: "mset xs \<subset># mset ys \<longleftrightarrow> subset_eq_mset_impl xs ys = Some True"
using subset_eq_mset_impl[of xs ys] by (cases "subset_eq_mset_impl xs ys", auto)
instantiation multiset :: (equal) equal
begin
definition
[code del]: "HOL.equal A (B :: 'a multiset) \<longleftrightarrow> A = B"
lemma [code]: "HOL.equal (mset xs) (mset ys) \<longleftrightarrow> subset_eq_mset_impl xs ys = Some False"
unfolding equal_multiset_def
using subset_eq_mset_impl[of xs ys] by (cases "subset_eq_mset_impl xs ys", auto)
instance
by standard (simp add: equal_multiset_def)
end
declare sum_mset_sum_list [code]
lemma [code]: "prod_mset (mset xs) = fold times xs 1"
proof -
have "\<And>x. fold times xs x = prod_mset (mset xs) * x"
by (induct xs) (simp_all add: ac_simps)
then show ?thesis by simp
qed
text \<open>
Exercise for the casual reader: add implementations for \<^term>\<open>(\<le>)\<close>
and \<^term>\<open>(<)\<close> (multiset order).
\<close>
text \<open>Quickcheck generators\<close>
context
includes term_syntax
begin
definition
msetify :: "'a::typerep list \<times> (unit \<Rightarrow> Code_Evaluation.term)
\<Rightarrow> 'a multiset \<times> (unit \<Rightarrow> Code_Evaluation.term)" where
[code_unfold]: "msetify xs = Code_Evaluation.valtermify mset {\<cdot>} xs"
end
instantiation multiset :: (random) random
begin
context
includes state_combinator_syntax
begin
definition
"Quickcheck_Random.random i = Quickcheck_Random.random i \<circ>\<rightarrow> (\<lambda>xs. Pair (msetify xs))"
instance ..
end
end
instantiation multiset :: (full_exhaustive) full_exhaustive
begin
definition full_exhaustive_multiset :: "('a multiset \<times> (unit \<Rightarrow> term) \<Rightarrow> (bool \<times> term list) option) \<Rightarrow> natural \<Rightarrow> (bool \<times> term list) option"
where
"full_exhaustive_multiset f i = Quickcheck_Exhaustive.full_exhaustive (\<lambda>xs. f (msetify xs)) i"
instance ..
end
hide_const (open) msetify
subsection \<open>BNF setup\<close>
definition rel_mset where
"rel_mset R X Y \<longleftrightarrow> (\<exists>xs ys. mset xs = X \<and> mset ys = Y \<and> list_all2 R xs ys)"
lemma mset_zip_take_Cons_drop_twice:
assumes "length xs = length ys" "j \<le> length xs"
shows "mset (zip (take j xs @ x # drop j xs) (take j ys @ y # drop j ys)) =
add_mset (x,y) (mset (zip xs ys))"
using assms
proof (induct xs ys arbitrary: x y j rule: list_induct2)
case Nil
thus ?case
by simp
next
case (Cons x xs y ys)
thus ?case
proof (cases "j = 0")
case True
thus ?thesis
by simp
next
case False
then obtain k where k: "j = Suc k"
by (cases j) simp
hence "k \<le> length xs"
using Cons.prems by auto
hence "mset (zip (take k xs @ x # drop k xs) (take k ys @ y # drop k ys)) =
add_mset (x,y) (mset (zip xs ys))"
by (rule Cons.hyps(2))
thus ?thesis
unfolding k by auto
qed
qed
lemma ex_mset_zip_left:
assumes "length xs = length ys" "mset xs' = mset xs"
shows "\<exists>ys'. length ys' = length xs' \<and> mset (zip xs' ys') = mset (zip xs ys)"
using assms
proof (induct xs ys arbitrary: xs' rule: list_induct2)
case Nil
thus ?case
by auto
next
case (Cons x xs y ys xs')
obtain j where j_len: "j < length xs'" and nth_j: "xs' ! j = x"
by (metis Cons.prems in_set_conv_nth list.set_intros(1) mset_eq_setD)
define xsa where "xsa = take j xs' @ drop (Suc j) xs'"
have "mset xs' = {#x#} + mset xsa"
unfolding xsa_def using j_len nth_j
by (metis Cons_nth_drop_Suc union_mset_add_mset_right add_mset_remove_trivial add_diff_cancel_left'
append_take_drop_id mset.simps(2) mset_append)
hence ms_x: "mset xsa = mset xs"
by (simp add: Cons.prems)
then obtain ysa where
len_a: "length ysa = length xsa" and ms_a: "mset (zip xsa ysa) = mset (zip xs ys)"
using Cons.hyps(2) by blast
define ys' where "ys' = take j ysa @ y # drop j ysa"
have xs': "xs' = take j xsa @ x # drop j xsa"
using ms_x j_len nth_j Cons.prems xsa_def
by (metis append_eq_append_conv append_take_drop_id diff_Suc_Suc Cons_nth_drop_Suc length_Cons
length_drop size_mset)
have j_len': "j \<le> length xsa"
using j_len xs' xsa_def
by (metis add_Suc_right append_take_drop_id length_Cons length_append less_eq_Suc_le not_less)
have "length ys' = length xs'"
unfolding ys'_def using Cons.prems len_a ms_x
by (metis add_Suc_right append_take_drop_id length_Cons length_append mset_eq_length)
moreover have "mset (zip xs' ys') = mset (zip (x # xs) (y # ys))"
unfolding xs' ys'_def
by (rule trans[OF mset_zip_take_Cons_drop_twice])
(auto simp: len_a ms_a j_len')
ultimately show ?case
by blast
qed
lemma list_all2_reorder_left_invariance:
assumes rel: "list_all2 R xs ys" and ms_x: "mset xs' = mset xs"
shows "\<exists>ys'. list_all2 R xs' ys' \<and> mset ys' = mset ys"
proof -
have len: "length xs = length ys"
using rel list_all2_conv_all_nth by auto
obtain ys' where
len': "length xs' = length ys'" and ms_xy: "mset (zip xs' ys') = mset (zip xs ys)"
using len ms_x by (metis ex_mset_zip_left)
have "list_all2 R xs' ys'"
using assms(1) len' ms_xy unfolding list_all2_iff by (blast dest: mset_eq_setD)
moreover have "mset ys' = mset ys"
using len len' ms_xy map_snd_zip mset_map by metis
ultimately show ?thesis
by blast
qed
lemma ex_mset: "\<exists>xs. mset xs = X"
by (induct X) (simp, metis mset.simps(2))
inductive pred_mset :: "('a \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> bool"
where
"pred_mset P {#}"
| "\<lbrakk>P a; pred_mset P M\<rbrakk> \<Longrightarrow> pred_mset P (add_mset a M)"
lemma pred_mset_iff: \<comment> \<open>TODO: alias for \<^const>\<open>Multiset.Ball\<close>\<close>
\<open>pred_mset P M \<longleftrightarrow> Multiset.Ball M P\<close> (is \<open>?P \<longleftrightarrow> ?Q\<close>)
proof
assume ?P
then show ?Q by induction simp_all
next
assume ?Q
then show ?P
by (induction M) (auto intro: pred_mset.intros)
qed
bnf "'a multiset"
map: image_mset
sets: set_mset
bd: natLeq
wits: "{#}"
rel: rel_mset
pred: pred_mset
proof -
show "image_mset id = id"
by (rule image_mset.id)
show "image_mset (g \<circ> f) = image_mset g \<circ> image_mset f" for f g
unfolding comp_def by (rule ext) (simp add: comp_def image_mset.compositionality)
show "(\<And>z. z \<in> set_mset X \<Longrightarrow> f z = g z) \<Longrightarrow> image_mset f X = image_mset g X" for f g X
by (induct X) simp_all
show "set_mset \<circ> image_mset f = (`) f \<circ> set_mset" for f
by auto
show "card_order natLeq"
by (rule natLeq_card_order)
show "BNF_Cardinal_Arithmetic.cinfinite natLeq"
by (rule natLeq_cinfinite)
show "regularCard natLeq"
by (rule regularCard_natLeq)
show "ordLess2 (card_of (set_mset X)) natLeq" for X
by transfer
(auto simp: finite_iff_ordLess_natLeq[symmetric])
show "rel_mset R OO rel_mset S \<le> rel_mset (R OO S)" for R S
unfolding rel_mset_def[abs_def] OO_def
apply clarify
subgoal for X Z Y xs ys' ys zs
apply (drule list_all2_reorder_left_invariance [where xs = ys' and ys = zs and xs' = ys])
apply (auto intro: list_all2_trans)
done
done
show "rel_mset R =
(\<lambda>x y. \<exists>z. set_mset z \<subseteq> {(x, y). R x y} \<and>
image_mset fst z = x \<and> image_mset snd z = y)" for R
unfolding rel_mset_def[abs_def]
apply (rule ext)+
apply safe
apply (rule_tac x = "mset (zip xs ys)" in exI;
auto simp: in_set_zip list_all2_iff simp flip: mset_map)
apply (rename_tac XY)
apply (cut_tac X = XY in ex_mset)
apply (erule exE)
apply (rename_tac xys)
apply (rule_tac x = "map fst xys" in exI)
apply (auto simp: mset_map)
apply (rule_tac x = "map snd xys" in exI)
apply (auto simp: mset_map list_all2I subset_eq zip_map_fst_snd)
done
show "z \<in> set_mset {#} \<Longrightarrow> False" for z
by auto
show "pred_mset P = (\<lambda>x. Ball (set_mset x) P)" for P
by (simp add: fun_eq_iff pred_mset_iff)
qed
inductive rel_mset' :: \<open>('a \<Rightarrow> 'b \<Rightarrow> bool) \<Rightarrow> 'a multiset \<Rightarrow> 'b multiset \<Rightarrow> bool\<close>
where
Zero[intro]: "rel_mset' R {#} {#}"
| Plus[intro]: "\<lbrakk>R a b; rel_mset' R M N\<rbrakk> \<Longrightarrow> rel_mset' R (add_mset a M) (add_mset b N)"
lemma rel_mset_Zero: "rel_mset R {#} {#}"
unfolding rel_mset_def Grp_def by auto
declare multiset.count[simp]
declare count_Abs_multiset[simp]
declare multiset.count_inverse[simp]
lemma rel_mset_Plus:
assumes ab: "R a b"
and MN: "rel_mset R M N"
shows "rel_mset R (add_mset a M) (add_mset b N)"
proof -
have "\<exists>ya. add_mset a (image_mset fst y) = image_mset fst ya \<and>
add_mset b (image_mset snd y) = image_mset snd ya \<and>
set_mset ya \<subseteq> {(x, y). R x y}"
if "R a b" and "set_mset y \<subseteq> {(x, y). R x y}" for y
using that by (intro exI[of _ "add_mset (a,b) y"]) auto
thus ?thesis
using assms
unfolding multiset.rel_compp_Grp Grp_def by blast
qed
lemma rel_mset'_imp_rel_mset: "rel_mset' R M N \<Longrightarrow> rel_mset R M N"
by (induct rule: rel_mset'.induct) (auto simp: rel_mset_Zero rel_mset_Plus)
lemma rel_mset_size: "rel_mset R M N \<Longrightarrow> size M = size N"
unfolding multiset.rel_compp_Grp Grp_def by auto
lemma rel_mset_Zero_iff [simp]:
shows "rel_mset rel {#} Y \<longleftrightarrow> Y = {#}" and "rel_mset rel X {#} \<longleftrightarrow> X = {#}"
by (auto simp add: rel_mset_Zero dest: rel_mset_size)
lemma multiset_induct2[case_names empty addL addR]:
assumes empty: "P {#} {#}"
and addL: "\<And>a M N. P M N \<Longrightarrow> P (add_mset a M) N"
and addR: "\<And>a M N. P M N \<Longrightarrow> P M (add_mset a N)"
shows "P M N"
apply(induct N rule: multiset_induct)
apply(induct M rule: multiset_induct, rule empty, erule addL)
apply(induct M rule: multiset_induct, erule addR, erule addR)
done
lemma multiset_induct2_size[consumes 1, case_names empty add]:
assumes c: "size M = size N"
and empty: "P {#} {#}"
and add: "\<And>a b M N a b. P M N \<Longrightarrow> P (add_mset a M) (add_mset b N)"
shows "P M N"
using c
proof (induct M arbitrary: N rule: measure_induct_rule[of size])
case (less M)
show ?case
proof(cases "M = {#}")
case True hence "N = {#}" using less.prems by auto
thus ?thesis using True empty by auto
next
case False then obtain M1 a where M: "M = add_mset a M1" by (metis multi_nonempty_split)
have "N \<noteq> {#}" using False less.prems by auto
then obtain N1 b where N: "N = add_mset b N1" by (metis multi_nonempty_split)
have "size M1 = size N1" using less.prems unfolding M N by auto
thus ?thesis using M N less.hyps add by auto
qed
qed
lemma msed_map_invL:
assumes "image_mset f (add_mset a M) = N"
shows "\<exists>N1. N = add_mset (f a) N1 \<and> image_mset f M = N1"
proof -
have "f a \<in># N"
using assms multiset.set_map[of f "add_mset a M"] by auto
then obtain N1 where N: "N = add_mset (f a) N1" using multi_member_split by metis
have "image_mset f M = N1" using assms unfolding N by simp
thus ?thesis using N by blast
qed
lemma msed_map_invR:
assumes "image_mset f M = add_mset b N"
shows "\<exists>M1 a. M = add_mset a M1 \<and> f a = b \<and> image_mset f M1 = N"
proof -
obtain a where a: "a \<in># M" and fa: "f a = b"
using multiset.set_map[of f M] unfolding assms
by (metis image_iff union_single_eq_member)
then obtain M1 where M: "M = add_mset a M1" using multi_member_split by metis
have "image_mset f M1 = N" using assms unfolding M fa[symmetric] by simp
thus ?thesis using M fa by blast
qed
lemma msed_rel_invL:
assumes "rel_mset R (add_mset a M) N"
shows "\<exists>N1 b. N = add_mset b N1 \<and> R a b \<and> rel_mset R M N1"
proof -
obtain K where KM: "image_mset fst K = add_mset a M"
and KN: "image_mset snd K = N" and sK: "set_mset K \<subseteq> {(a, b). R a b}"
using assms
unfolding multiset.rel_compp_Grp Grp_def by auto
obtain K1 ab where K: "K = add_mset ab K1" and a: "fst ab = a"
and K1M: "image_mset fst K1 = M" using msed_map_invR[OF KM] by auto
obtain N1 where N: "N = add_mset (snd ab) N1" and K1N1: "image_mset snd K1 = N1"
using msed_map_invL[OF KN[unfolded K]] by auto
have Rab: "R a (snd ab)" using sK a unfolding K by auto
have "rel_mset R M N1" using sK K1M K1N1
unfolding K multiset.rel_compp_Grp Grp_def by auto
thus ?thesis using N Rab by auto
qed
lemma msed_rel_invR:
assumes "rel_mset R M (add_mset b N)"
shows "\<exists>M1 a. M = add_mset a M1 \<and> R a b \<and> rel_mset R M1 N"
proof -
obtain K where KN: "image_mset snd K = add_mset b N"
and KM: "image_mset fst K = M" and sK: "set_mset K \<subseteq> {(a, b). R a b}"
using assms
unfolding multiset.rel_compp_Grp Grp_def by auto
obtain K1 ab where K: "K = add_mset ab K1" and b: "snd ab = b"
and K1N: "image_mset snd K1 = N" using msed_map_invR[OF KN] by auto
obtain M1 where M: "M = add_mset (fst ab) M1" and K1M1: "image_mset fst K1 = M1"
using msed_map_invL[OF KM[unfolded K]] by auto
have Rab: "R (fst ab) b" using sK b unfolding K by auto
have "rel_mset R M1 N" using sK K1N K1M1
unfolding K multiset.rel_compp_Grp Grp_def by auto
thus ?thesis using M Rab by auto
qed
lemma rel_mset_imp_rel_mset':
assumes "rel_mset R M N"
shows "rel_mset' R M N"
using assms proof(induct M arbitrary: N rule: measure_induct_rule[of size])
case (less M)
have c: "size M = size N" using rel_mset_size[OF less.prems] .
show ?case
proof(cases "M = {#}")
case True hence "N = {#}" using c by simp
thus ?thesis using True rel_mset'.Zero by auto
next
case False then obtain M1 a where M: "M = add_mset a M1" by (metis multi_nonempty_split)
obtain N1 b where N: "N = add_mset b N1" and R: "R a b" and ms: "rel_mset R M1 N1"
using msed_rel_invL[OF less.prems[unfolded M]] by auto
have "rel_mset' R M1 N1" using less.hyps[of M1 N1] ms unfolding M by simp
thus ?thesis using rel_mset'.Plus[of R a b, OF R] unfolding M N by simp
qed
qed
lemma rel_mset_rel_mset': "rel_mset R M N = rel_mset' R M N"
using rel_mset_imp_rel_mset' rel_mset'_imp_rel_mset by auto
text \<open>The main end product for \<^const>\<open>rel_mset\<close>: inductive characterization:\<close>
lemmas rel_mset_induct[case_names empty add, induct pred: rel_mset] =
rel_mset'.induct[unfolded rel_mset_rel_mset'[symmetric]]
subsection \<open>Size setup\<close>
lemma size_multiset_o_map: "size_multiset g \<circ> image_mset f = size_multiset (g \<circ> f)"
apply (rule ext)
subgoal for x by (induct x) auto
done
setup \<open>
BNF_LFP_Size.register_size_global \<^type_name>\<open>multiset\<close> \<^const_name>\<open>size_multiset\<close>
@{thm size_multiset_overloaded_def}
@{thms size_multiset_empty size_multiset_single size_multiset_union size_empty size_single
size_union}
@{thms size_multiset_o_map}
\<close>
subsection \<open>Lemmas about Size\<close>
lemma size_mset_SucE: "size A = Suc n \<Longrightarrow> (\<And>a B. A = {#a#} + B \<Longrightarrow> size B = n \<Longrightarrow> P) \<Longrightarrow> P"
by (cases A) (auto simp add: ac_simps)
lemma size_Suc_Diff1: "x \<in># M \<Longrightarrow> Suc (size (M - {#x#})) = size M"
using arg_cong[OF insert_DiffM, of _ _ size] by simp
lemma size_Diff_singleton: "x \<in># M \<Longrightarrow> size (M - {#x#}) = size M - 1"
by (simp flip: size_Suc_Diff1)
lemma size_Diff_singleton_if: "size (A - {#x#}) = (if x \<in># A then size A - 1 else size A)"
by (simp add: diff_single_trivial size_Diff_singleton)
lemma size_Un_Int: "size A + size B = size (A \<union># B) + size (A \<inter># B)"
by (metis inter_subset_eq_union size_union subset_mset.diff_add union_diff_inter_eq_sup)
lemma size_Un_disjoint: "A \<inter># B = {#} \<Longrightarrow> size (A \<union># B) = size A + size B"
using size_Un_Int[of A B] by simp
lemma size_Diff_subset_Int: "size (M - M') = size M - size (M \<inter># M')"
by (metis diff_intersect_left_idem size_Diff_submset subset_mset.inf_le1)
lemma diff_size_le_size_Diff: "size (M :: _ multiset) - size M' \<le> size (M - M')"
by (simp add: diff_le_mono2 size_Diff_subset_Int size_mset_mono)
lemma size_Diff1_less: "x\<in># M \<Longrightarrow> size (M - {#x#}) < size M"
by (rule Suc_less_SucD) (simp add: size_Suc_Diff1)
lemma size_Diff2_less: "x\<in># M \<Longrightarrow> y\<in># M \<Longrightarrow> size (M - {#x#} - {#y#}) < size M"
by (metis less_imp_diff_less size_Diff1_less size_Diff_subset_Int)
lemma size_Diff1_le: "size (M - {#x#}) \<le> size M"
by (cases "x \<in># M") (simp_all add: size_Diff1_less less_imp_le diff_single_trivial)
lemma size_psubset: "M \<subseteq># M' \<Longrightarrow> size M < size M' \<Longrightarrow> M \<subset># M'"
using less_irrefl subset_mset_def by blast
lifting_update multiset.lifting
lifting_forget multiset.lifting
hide_const (open) wcount
end
diff --git a/src/HOL/Library/Multiset_Order.thy b/src/HOL/Library/Multiset_Order.thy
--- a/src/HOL/Library/Multiset_Order.thy
+++ b/src/HOL/Library/Multiset_Order.thy
@@ -1,856 +1,856 @@
(* Title: HOL/Library/Multiset_Order.thy
Author: Dmitriy Traytel, TU Muenchen
Author: Jasmin Blanchette, Inria, LORIA, MPII
Author: Martin Desharnais, MPI-INF Saarbruecken
*)
section \<open>More Theorems about the Multiset Order\<close>
theory Multiset_Order
imports Multiset
begin
subsection \<open>Alternative Characterizations\<close>
subsubsection \<open>The Dershowitz--Manna Ordering\<close>
definition multp\<^sub>D\<^sub>M where
"multp\<^sub>D\<^sub>M r M N \<longleftrightarrow>
(\<exists>X Y. X \<noteq> {#} \<and> X \<subseteq># N \<and> M = (N - X) + Y \<and> (\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> r k a)))"
lemma multp\<^sub>D\<^sub>M_imp_multp:
"multp\<^sub>D\<^sub>M r M N \<Longrightarrow> multp r M N"
proof -
assume "multp\<^sub>D\<^sub>M r M N"
then obtain X Y where
"X \<noteq> {#}" and "X \<subseteq># N" and "M = N - X + Y" and "\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> r k a)"
unfolding multp\<^sub>D\<^sub>M_def by blast
then have "multp r (N - X + Y) (N - X + X)"
by (intro one_step_implies_multp) (auto simp: Bex_def trans_def)
with \<open>M = N - X + Y\<close> \<open>X \<subseteq># N\<close> show "multp r M N"
by (metis subset_mset.diff_add)
qed
subsubsection \<open>The Huet--Oppen Ordering\<close>
definition multp\<^sub>H\<^sub>O where
"multp\<^sub>H\<^sub>O r M N \<longleftrightarrow> M \<noteq> N \<and> (\<forall>y. count N y < count M y \<longrightarrow> (\<exists>x. r y x \<and> count M x < count N x))"
lemma multp_imp_multp\<^sub>H\<^sub>O:
assumes "asymp r" and "transp r"
shows "multp r M N \<Longrightarrow> multp\<^sub>H\<^sub>O r M N"
unfolding multp_def mult_def
proof (induction rule: trancl_induct)
case (base P)
then show ?case
using \<open>asymp r\<close>
by (auto elim!: mult1_lessE simp: count_eq_zero_iff multp\<^sub>H\<^sub>O_def split: if_splits
dest!: Suc_lessD)
next
case (step N P)
from step(3) have "M \<noteq> N" and
**: "\<And>y. count N y < count M y \<Longrightarrow> (\<exists>x. r y x \<and> count M x < count N x)"
by (simp_all add: multp\<^sub>H\<^sub>O_def)
from step(2) obtain M0 a K where
*: "P = add_mset a M0" "N = M0 + K" "a \<notin># K" "\<And>b. b \<in># K \<Longrightarrow> r b a"
using \<open>asymp r\<close> by (auto elim: mult1_lessE)
from \<open>M \<noteq> N\<close> ** *(1,2,3) have "M \<noteq> P"
using *(4) \<open>asymp r\<close>
by (metis asympD add_cancel_right_right add_diff_cancel_left' add_mset_add_single count_inI
count_union diff_diff_add_mset diff_single_trivial in_diff_count multi_member_last)
moreover
{ assume "count P a \<le> count M a"
with \<open>a \<notin># K\<close> have "count N a < count M a" unfolding *(1,2)
by (auto simp add: not_in_iff)
with ** obtain z where z: "r a z" "count M z < count N z"
by blast
with * have "count N z \<le> count P z"
using \<open>asymp r\<close>
by (metis add_diff_cancel_left' add_mset_add_single asympD diff_diff_add_mset
diff_single_trivial in_diff_count not_le_imp_less)
with z have "\<exists>z. r a z \<and> count M z < count P z" by auto
} note count_a = this
{ fix y
assume count_y: "count P y < count M y"
have "\<exists>x. r y x \<and> count M x < count P x"
proof (cases "y = a")
case True
with count_y count_a show ?thesis by auto
next
case False
show ?thesis
proof (cases "y \<in># K")
case True
with *(4) have "r y a" by simp
then show ?thesis
by (cases "count P a \<le> count M a") (auto dest: count_a intro: \<open>transp r\<close>[THEN transpD])
next
case False
with \<open>y \<noteq> a\<close> have "count P y = count N y" unfolding *(1,2)
by (simp add: not_in_iff)
with count_y ** obtain z where z: "r y z" "count M z < count N z" by auto
show ?thesis
proof (cases "z \<in># K")
case True
with *(4) have "r z a" by simp
with z(1) show ?thesis
by (cases "count P a \<le> count M a") (auto dest!: count_a intro: \<open>transp r\<close>[THEN transpD])
next
case False
with \<open>a \<notin># K\<close> have "count N z \<le> count P z" unfolding *
by (auto simp add: not_in_iff)
with z show ?thesis by auto
qed
qed
qed
}
ultimately show ?case unfolding multp\<^sub>H\<^sub>O_def by blast
qed
lemma multp\<^sub>H\<^sub>O_imp_multp\<^sub>D\<^sub>M: "multp\<^sub>H\<^sub>O r M N \<Longrightarrow> multp\<^sub>D\<^sub>M r M N"
unfolding multp\<^sub>D\<^sub>M_def
proof (intro iffI exI conjI)
assume "multp\<^sub>H\<^sub>O r M N"
then obtain z where z: "count M z < count N z"
unfolding multp\<^sub>H\<^sub>O_def by (auto simp: multiset_eq_iff nat_neq_iff)
define X where "X = N - M"
define Y where "Y = M - N"
from z show "X \<noteq> {#}" unfolding X_def by (auto simp: multiset_eq_iff not_less_eq_eq Suc_le_eq)
from z show "X \<subseteq># N" unfolding X_def by auto
show "M = (N - X) + Y" unfolding X_def Y_def multiset_eq_iff count_union count_diff by force
show "\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> r k a)"
proof (intro allI impI)
fix k
assume "k \<in># Y"
then have "count N k < count M k" unfolding Y_def
by (auto simp add: in_diff_count)
with \<open>multp\<^sub>H\<^sub>O r M N\<close> obtain a where "r k a" and "count M a < count N a"
unfolding multp\<^sub>H\<^sub>O_def by blast
then show "\<exists>a. a \<in># X \<and> r k a" unfolding X_def
by (auto simp add: in_diff_count)
qed
qed
lemma multp_eq_multp\<^sub>D\<^sub>M: "asymp r \<Longrightarrow> transp r \<Longrightarrow> multp r = multp\<^sub>D\<^sub>M r"
using multp\<^sub>D\<^sub>M_imp_multp multp_imp_multp\<^sub>H\<^sub>O[THEN multp\<^sub>H\<^sub>O_imp_multp\<^sub>D\<^sub>M]
by blast
lemma multp_eq_multp\<^sub>H\<^sub>O: "asymp r \<Longrightarrow> transp r \<Longrightarrow> multp r = multp\<^sub>H\<^sub>O r"
using multp\<^sub>H\<^sub>O_imp_multp\<^sub>D\<^sub>M[THEN multp\<^sub>D\<^sub>M_imp_multp] multp_imp_multp\<^sub>H\<^sub>O
by blast
lemma multp\<^sub>D\<^sub>M_plus_plusI[simp]:
assumes "multp\<^sub>D\<^sub>M R M1 M2"
shows "multp\<^sub>D\<^sub>M R (M + M1) (M + M2)"
proof -
from assms obtain X Y where
"X \<noteq> {#}" and "X \<subseteq># M2" and "M1 = M2 - X + Y" and "\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> R k a)"
unfolding multp\<^sub>D\<^sub>M_def by auto
show "multp\<^sub>D\<^sub>M R (M + M1) (M + M2)"
unfolding multp\<^sub>D\<^sub>M_def
proof (intro exI conjI)
show "X \<noteq> {#}"
using \<open>X \<noteq> {#}\<close> by simp
next
show "X \<subseteq># M + M2"
using \<open>X \<subseteq># M2\<close>
by (simp add: subset_mset.add_increasing)
next
show "M + M1 = M + M2 - X + Y"
using \<open>X \<subseteq># M2\<close> \<open>M1 = M2 - X + Y\<close>
by (metis multiset_diff_union_assoc union_assoc)
next
show "\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> R k a)"
using \<open>\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> R k a)\<close> by simp
qed
qed
lemma multp\<^sub>H\<^sub>O_plus_plus[simp]: "multp\<^sub>H\<^sub>O R (M + M1) (M + M2) \<longleftrightarrow> multp\<^sub>H\<^sub>O R M1 M2"
unfolding multp\<^sub>H\<^sub>O_def by simp
lemma strict_subset_implies_multp\<^sub>D\<^sub>M: "A \<subset># B \<Longrightarrow> multp\<^sub>D\<^sub>M r A B"
unfolding multp\<^sub>D\<^sub>M_def
by (metis add.right_neutral add_diff_cancel_right' empty_iff mset_subset_eq_add_right
set_mset_empty subset_mset.lessE)
lemma strict_subset_implies_multp\<^sub>H\<^sub>O: "A \<subset># B \<Longrightarrow> multp\<^sub>H\<^sub>O r A B"
unfolding multp\<^sub>H\<^sub>O_def
by (simp add: leD mset_subset_eq_count)
lemma multp\<^sub>H\<^sub>O_implies_one_step_strong:
assumes "multp\<^sub>H\<^sub>O R A B"
defines "J \<equiv> B - A" and "K \<equiv> A - B"
shows "J \<noteq> {#}" and "\<forall>k \<in># K. \<exists>x \<in># J. R k x"
proof -
show "J \<noteq> {#}"
using \<open>multp\<^sub>H\<^sub>O R A B\<close>
by (metis Diff_eq_empty_iff_mset J_def add.right_neutral multp\<^sub>D\<^sub>M_def multp\<^sub>H\<^sub>O_imp_multp\<^sub>D\<^sub>M
multp\<^sub>H\<^sub>O_plus_plus subset_mset.add_diff_inverse subset_mset.le_zero_eq)
show "\<forall>k\<in>#K. \<exists>x\<in>#J. R k x"
using \<open>multp\<^sub>H\<^sub>O R A B\<close>
by (metis J_def K_def in_diff_count multp\<^sub>H\<^sub>O_def)
qed
lemma multp\<^sub>H\<^sub>O_minus_inter_minus_inter_iff:
fixes M1 M2 :: "_ multiset"
shows "multp\<^sub>H\<^sub>O R (M1 - M2) (M2 - M1) \<longleftrightarrow> multp\<^sub>H\<^sub>O R M1 M2"
by (metis diff_intersect_left_idem multiset_inter_commute multp\<^sub>H\<^sub>O_plus_plus
subset_mset.add_diff_inverse subset_mset.inf.cobounded1)
lemma multp\<^sub>H\<^sub>O_iff_set_mset_less\<^sub>H\<^sub>O_set_mset:
"multp\<^sub>H\<^sub>O R M1 M2 \<longleftrightarrow> (set_mset (M1 - M2) \<noteq> set_mset (M2 - M1) \<and>
(\<forall>y \<in># M1 - M2. (\<exists>x \<in># M2 - M1. R y x)))"
unfolding multp\<^sub>H\<^sub>O_minus_inter_minus_inter_iff[of R M1 M2, symmetric]
unfolding multp\<^sub>H\<^sub>O_def
unfolding count_minus_inter_lt_count_minus_inter_iff
unfolding minus_inter_eq_minus_inter_iff
by auto
subsubsection \<open>Monotonicity\<close>
lemma multp\<^sub>D\<^sub>M_mono_strong:
"multp\<^sub>D\<^sub>M R M1 M2 \<Longrightarrow> (\<And>x y. x \<in># M1 \<Longrightarrow> y \<in># M2 \<Longrightarrow> R x y \<Longrightarrow> S x y) \<Longrightarrow> multp\<^sub>D\<^sub>M S M1 M2"
unfolding multp\<^sub>D\<^sub>M_def
by (metis add_diff_cancel_left' in_diffD subset_mset.diff_add)
lemma multp\<^sub>H\<^sub>O_mono_strong:
"multp\<^sub>H\<^sub>O R M1 M2 \<Longrightarrow> (\<And>x y. x \<in># M1 \<Longrightarrow> y \<in># M2 \<Longrightarrow> R x y \<Longrightarrow> S x y) \<Longrightarrow> multp\<^sub>H\<^sub>O S M1 M2"
unfolding multp\<^sub>H\<^sub>O_def
by (metis count_inI less_zeroE)
subsubsection \<open>Properties of Orders\<close>
paragraph \<open>Asymmetry\<close>
text \<open>The following lemma is a negative result stating that asymmetry of an arbitrary binary
relation cannot be simply lifted to @{const multp\<^sub>H\<^sub>O}. It suffices to have four distinct values to
build a counterexample.\<close>
lemma asymp_not_liftable_to_multp\<^sub>H\<^sub>O:
fixes a b c d :: 'a
assumes "distinct [a, b, c, d]"
shows "\<not> (\<forall>(R :: 'a \<Rightarrow> 'a \<Rightarrow> bool). asymp R \<longrightarrow> asymp (multp\<^sub>H\<^sub>O R))"
proof -
define R :: "'a \<Rightarrow> 'a \<Rightarrow> bool" where
"R = (\<lambda>x y. x = a \<and> y = c \<or> x = b \<and> y = d \<or> x = c \<and> y = b \<or> x = d \<and> y = a)"
from assms(1) have "{#a, b#} \<noteq> {#c, d#}"
by (metis add_mset_add_single distinct.simps(2) list.set(1) list.simps(15) multi_member_this
set_mset_add_mset_insert set_mset_single)
from assms(1) have "asymp R"
by (auto simp: R_def intro: asymp_onI)
moreover have "\<not> asymp (multp\<^sub>H\<^sub>O R)"
unfolding asymp_on_def Set.ball_simps not_all not_imp not_not
proof (intro exI conjI)
show "multp\<^sub>H\<^sub>O R {#a, b#} {#c, d#}"
unfolding multp\<^sub>H\<^sub>O_def
using \<open>{#a, b#} \<noteq> {#c, d#}\<close> R_def assms by auto
next
show "multp\<^sub>H\<^sub>O R {#c, d#} {#a, b#}"
unfolding multp\<^sub>H\<^sub>O_def
using \<open>{#a, b#} \<noteq> {#c, d#}\<close> R_def assms by auto
qed
ultimately show ?thesis
unfolding not_all not_imp by auto
qed
text \<open>However, if the binary relation is both asymmetric and transitive, then @{const multp\<^sub>H\<^sub>O} is
also asymmetric.\<close>
lemma asymp_on_multp\<^sub>H\<^sub>O:
assumes "asymp_on A R" and "transp_on A R" and
B_sub_A: "\<And>M. M \<in> B \<Longrightarrow> set_mset M \<subseteq> A"
shows "asymp_on B (multp\<^sub>H\<^sub>O R)"
proof (rule asymp_onI)
fix M1 M2 :: "'a multiset"
assume "M1 \<in> B" "M2 \<in> B" "multp\<^sub>H\<^sub>O R M1 M2"
from \<open>transp_on A R\<close> B_sub_A have tran: "transp_on (set_mset (M1 - M2)) R"
using \<open>M1 \<in> B\<close>
by (meson in_diffD subset_eq transp_on_subset)
from \<open>asymp_on A R\<close> B_sub_A have asym: "asymp_on (set_mset (M1 - M2)) R"
using \<open>M1 \<in> B\<close>
by (meson in_diffD subset_eq asymp_on_subset)
show "\<not> multp\<^sub>H\<^sub>O R M2 M1"
proof (cases "M1 - M2 = {#}")
case True
then show ?thesis
using multp\<^sub>H\<^sub>O_implies_one_step_strong(1) by metis
next
case False
hence "\<exists>m\<in>#M1 - M2. \<forall>x\<in>#M1 - M2. x \<noteq> m \<longrightarrow> \<not> R m x"
using Finite_Set.bex_max_element[of "set_mset (M1 - M2)" R, OF finite_set_mset asym tran]
by simp
with \<open>transp_on A R\<close> B_sub_A have "\<exists>y\<in>#M2 - M1. \<forall>x\<in>#M1 - M2. \<not> R y x"
using \<open>multp\<^sub>H\<^sub>O R M1 M2\<close>[THEN multp\<^sub>H\<^sub>O_implies_one_step_strong(2)]
using asym[THEN irreflp_on_if_asymp_on, THEN irreflp_onD]
by (metis \<open>M1 \<in> B\<close> \<open>M2 \<in> B\<close> in_diffD subsetD transp_onD)
thus ?thesis
unfolding multp\<^sub>H\<^sub>O_iff_set_mset_less\<^sub>H\<^sub>O_set_mset by simp
qed
qed
lemma asymp_multp\<^sub>H\<^sub>O:
assumes "asymp R" and "transp R"
shows "asymp (multp\<^sub>H\<^sub>O R)"
using assms asymp_on_multp\<^sub>H\<^sub>O[of UNIV, simplified] by metis
paragraph \<open>Irreflexivity\<close>
lemma irreflp_on_multp\<^sub>H\<^sub>O[simp]: "irreflp_on B (multp\<^sub>H\<^sub>O R)"
by (simp add: irreflp_onI multp\<^sub>H\<^sub>O_def)
paragraph \<open>Transitivity\<close>
lemma transp_on_multp\<^sub>H\<^sub>O:
assumes "asymp_on A R" and "transp_on A R" and
B_sub_A: "\<And>M. M \<in> B \<Longrightarrow> set_mset M \<subseteq> A"
shows "transp_on B (multp\<^sub>H\<^sub>O R)"
proof (rule transp_onI)
from assms have "asymp_on B (multp\<^sub>H\<^sub>O R)"
using asymp_on_multp\<^sub>H\<^sub>O by metis
fix M1 M2 M3
assume hyps: "M1 \<in> B" "M2 \<in> B" "M3 \<in> B" "multp\<^sub>H\<^sub>O R M1 M2" "multp\<^sub>H\<^sub>O R M2 M3"
from assms have
[intro]: "asymp_on (set_mset M1 \<union> set_mset M2) R" "transp_on (set_mset M1 \<union> set_mset M2) R"
using \<open>M1 \<in> B\<close> \<open>M2 \<in> B\<close>
by (simp_all add: asymp_on_subset transp_on_subset)
from assms have "transp_on (set_mset M1) R"
by (meson transp_on_subset hyps(1))
from \<open>multp\<^sub>H\<^sub>O R M1 M2\<close> have
"M1 \<noteq> M2" and
"\<forall>y. count M2 y < count M1 y \<longrightarrow> (\<exists>x. R y x \<and> count M1 x < count M2 x)"
unfolding multp\<^sub>H\<^sub>O_def by simp_all
from \<open>multp\<^sub>H\<^sub>O R M2 M3\<close> have
"M2 \<noteq> M3" and
"\<forall>y. count M3 y < count M2 y \<longrightarrow> (\<exists>x. R y x \<and> count M2 x < count M3 x)"
unfolding multp\<^sub>H\<^sub>O_def by simp_all
show "multp\<^sub>H\<^sub>O R M1 M3"
proof (rule ccontr)
let ?P = "\<lambda>x. count M3 x < count M1 x \<and> (\<forall>y. R x y \<longrightarrow> count M1 y \<ge> count M3 y)"
assume "\<not> multp\<^sub>H\<^sub>O R M1 M3"
hence "M1 = M3 \<or> (\<exists>x. ?P x)"
unfolding multp\<^sub>H\<^sub>O_def by force
thus False
proof (elim disjE)
assume "M1 = M3"
thus False
using \<open>asymp_on B (multp\<^sub>H\<^sub>O R)\<close>[THEN asymp_onD]
using \<open>M2 \<in> B\<close> \<open>M3 \<in> B\<close> \<open>multp\<^sub>H\<^sub>O R M1 M2\<close> \<open>multp\<^sub>H\<^sub>O R M2 M3\<close>
by metis
next
assume "\<exists>x. ?P x"
hence "\<exists>x \<in># M1 + M2. ?P x"
by (auto simp: count_inI)
have "\<exists>y \<in># M1 + M2. ?P y \<and> (\<forall>z \<in># M1 + M2. R y z \<longrightarrow> \<not> ?P z)"
proof (rule Finite_Set.bex_max_element_with_property)
show "\<exists>x \<in># M1 + M2. ?P x"
using \<open>\<exists>x. ?P x\<close>
by (auto simp: count_inI)
qed auto
then obtain x where
"x \<in># M1 + M2" and
"count M3 x < count M1 x" and
"\<forall>y. R x y \<longrightarrow> count M1 y \<ge> count M3 y" and
"\<forall>y \<in># M1 + M2. R x y \<longrightarrow> count M3 y < count M1 y \<longrightarrow> (\<exists>z. R y z \<and> count M1 z < count M3 z)"
by force
let ?Q = "\<lambda>x'. R\<^sup>=\<^sup>= x x' \<and> count M3 x' < count M2 x'"
show False
proof (cases "\<exists>x'. ?Q x'")
case True
have "\<exists>y \<in># M1 + M2. ?Q y \<and> (\<forall>z \<in># M1 + M2. R y z \<longrightarrow> \<not> ?Q z)"
proof (rule Finite_Set.bex_max_element_with_property)
show "\<exists>x \<in># M1 + M2. ?Q x"
using \<open>\<exists>x. ?Q x\<close>
by (auto simp: count_inI)
qed auto
then obtain x' where
"x' \<in># M1 + M2" and
"R\<^sup>=\<^sup>= x x'" and
"count M3 x' < count M2 x'" and
maximality_x': "\<forall>z \<in># M1 + M2. R x' z \<longrightarrow> \<not> (R\<^sup>=\<^sup>= x z) \<or> count M3 z \<ge> count M2 z"
by (auto simp: linorder_not_less)
with \<open>multp\<^sub>H\<^sub>O R M2 M3\<close> obtain y' where
"R x' y'" and "count M2 y' < count M3 y'"
unfolding multp\<^sub>H\<^sub>O_def by auto
hence "count M2 y' < count M1 y'"
by (smt (verit) \<open>R\<^sup>=\<^sup>= x x'\<close> \<open>\<forall>y. R x y \<longrightarrow> count M3 y \<le> count M1 y\<close>
\<open>count M3 x < count M1 x\<close> \<open>count M3 x' < count M2 x'\<close> assms(2) count_inI
dual_order.strict_trans1 hyps(1) hyps(2) hyps(3) less_nat_zero_code B_sub_A subsetD
sup2E transp_onD)
with \<open>multp\<^sub>H\<^sub>O R M1 M2\<close> obtain y'' where
"R y' y''" and "count M1 y'' < count M2 y''"
unfolding multp\<^sub>H\<^sub>O_def by auto
hence "count M3 y'' < count M2 y''"
by (smt (verit, del_insts) \<open>R x' y'\<close> \<open>R\<^sup>=\<^sup>= x x'\<close> \<open>\<forall>y. R x y \<longrightarrow> count M3 y \<le> count M1 y\<close>
\<open>count M2 y' < count M3 y'\<close> \<open>count M3 x < count M1 x\<close> \<open>count M3 x' < count M2 x'\<close>
assms(2) count_greater_zero_iff dual_order.strict_trans1 hyps(1) hyps(2) hyps(3)
less_nat_zero_code linorder_not_less B_sub_A subset_iff sup2E transp_onD)
moreover have "count M2 y'' \<le> count M3 y''"
proof -
have "y'' \<in># M1 + M2"
by (metis \<open>count M1 y'' < count M2 y''\<close> count_inI not_less_iff_gr_or_eq union_iff)
moreover have "R x' y''"
by (metis \<open>R x' y'\<close> \<open>R y' y''\<close> \<open>count M2 y' < count M1 y'\<close>
\<open>transp_on (set_mset M1 \<union> set_mset M2) R\<close> \<open>x' \<in># M1 + M2\<close> calculation count_inI
nat_neq_iff set_mset_union transp_onD union_iff)
moreover have "R\<^sup>=\<^sup>= x y''"
using \<open>R\<^sup>=\<^sup>= x x'\<close>
by (metis (mono_tags, opaque_lifting) \<open>transp_on (set_mset M1 \<union> set_mset M2) R\<close>
\<open>x \<in># M1 + M2\<close> \<open>x' \<in># M1 + M2\<close> calculation(1) calculation(2) set_mset_union sup2I1
transp_onD transp_on_reflclp)
ultimately show ?thesis
using maximality_x'[rule_format, of y''] by metis
qed
ultimately show ?thesis
by linarith
next
case False
hence "\<And>x'. R\<^sup>=\<^sup>= x x' \<Longrightarrow> count M2 x' \<le> count M3 x'"
by auto
hence "count M2 x \<le> count M3 x"
by simp
hence "count M2 x < count M1 x"
using \<open>count M3 x < count M1 x\<close> by linarith
with \<open>multp\<^sub>H\<^sub>O R M1 M2\<close> obtain y where
"R x y" and "count M1 y < count M2 y"
unfolding multp\<^sub>H\<^sub>O_def by auto
hence "count M3 y < count M2 y"
using \<open>\<forall>y. R x y \<longrightarrow> count M3 y \<le> count M1 y\<close> dual_order.strict_trans2 by metis
then show ?thesis
using False \<open>R x y\<close> by auto
qed
qed
qed
qed
lemma transp_multp\<^sub>H\<^sub>O:
assumes "asymp R" and "transp R"
shows "transp (multp\<^sub>H\<^sub>O R)"
using assms transp_on_multp\<^sub>H\<^sub>O[of UNIV, simplified] by metis
paragraph \<open>Totality\<close>
lemma totalp_on_multp\<^sub>D\<^sub>M:
"totalp_on A R \<Longrightarrow> (\<And>M. M \<in> B \<Longrightarrow> set_mset M \<subseteq> A) \<Longrightarrow> totalp_on B (multp\<^sub>D\<^sub>M R)"
by (smt (verit, ccfv_SIG) count_inI in_mono multp\<^sub>H\<^sub>O_def multp\<^sub>H\<^sub>O_imp_multp\<^sub>D\<^sub>M not_less_iff_gr_or_eq
totalp_onD totalp_onI)
lemma totalp_multp\<^sub>D\<^sub>M: "totalp R \<Longrightarrow> totalp (multp\<^sub>D\<^sub>M R)"
by (rule totalp_on_multp\<^sub>D\<^sub>M[of UNIV R UNIV, simplified])
lemma totalp_on_multp\<^sub>H\<^sub>O:
"totalp_on A R \<Longrightarrow> (\<And>M. M \<in> B \<Longrightarrow> set_mset M \<subseteq> A) \<Longrightarrow> totalp_on B (multp\<^sub>H\<^sub>O R)"
by (smt (verit, ccfv_SIG) count_inI in_mono multp\<^sub>H\<^sub>O_def not_less_iff_gr_or_eq totalp_onD
totalp_onI)
lemma totalp_multp\<^sub>H\<^sub>O: "totalp R \<Longrightarrow> totalp (multp\<^sub>H\<^sub>O R)"
by (rule totalp_on_multp\<^sub>H\<^sub>O[of UNIV R UNIV, simplified])
paragraph \<open>Type Classes\<close>
context preorder
begin
lemma order_mult: "class.order
(\<lambda>M N. (M, N) \<in> mult {(x, y). x < y} \<or> M = N)
(\<lambda>M N. (M, N) \<in> mult {(x, y). x < y})"
(is "class.order ?le ?less")
proof -
have irrefl: "\<And>M :: 'a multiset. \<not> ?less M M"
proof
fix M :: "'a multiset"
have "trans {(x'::'a, x). x' < x}"
by (rule transI) (blast intro: less_trans)
moreover
assume "(M, M) \<in> mult {(x, y). x < y}"
ultimately have "\<exists>I J K. M = I + J \<and> M = I + K
\<and> J \<noteq> {#} \<and> (\<forall>k\<in>set_mset K. \<exists>j\<in>set_mset J. (k, j) \<in> {(x, y). x < y})"
by (rule mult_implies_one_step)
then obtain I J K where "M = I + J" and "M = I + K"
and "J \<noteq> {#}" and "(\<forall>k\<in>set_mset K. \<exists>j\<in>set_mset J. (k, j) \<in> {(x, y). x < y})" by blast
then have aux1: "K \<noteq> {#}" and aux2: "\<forall>k\<in>set_mset K. \<exists>j\<in>set_mset K. k < j" by auto
have "finite (set_mset K)" by simp
moreover note aux2
ultimately have "set_mset K = {}"
by (induct rule: finite_induct)
(simp, metis (mono_tags) insert_absorb insert_iff insert_not_empty less_irrefl less_trans)
with aux1 show False by simp
qed
have trans: "\<And>K M N :: 'a multiset. ?less K M \<Longrightarrow> ?less M N \<Longrightarrow> ?less K N"
unfolding mult_def by (blast intro: trancl_trans)
show "class.order ?le ?less"
by standard (auto simp add: less_eq_multiset_def irrefl dest: trans)
qed
text \<open>The Dershowitz--Manna ordering:\<close>
definition less_multiset\<^sub>D\<^sub>M where
"less_multiset\<^sub>D\<^sub>M M N \<longleftrightarrow>
(\<exists>X Y. X \<noteq> {#} \<and> X \<subseteq># N \<and> M = (N - X) + Y \<and> (\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> k < a)))"
text \<open>The Huet--Oppen ordering:\<close>
definition less_multiset\<^sub>H\<^sub>O where
"less_multiset\<^sub>H\<^sub>O M N \<longleftrightarrow> M \<noteq> N \<and> (\<forall>y. count N y < count M y \<longrightarrow> (\<exists>x. y < x \<and> count M x < count N x))"
lemma mult_imp_less_multiset\<^sub>H\<^sub>O:
"(M, N) \<in> mult {(x, y). x < y} \<Longrightarrow> less_multiset\<^sub>H\<^sub>O M N"
unfolding multp_def[of "(<)", symmetric]
using multp_imp_multp\<^sub>H\<^sub>O[of "(<)"]
by (simp add: less_multiset\<^sub>H\<^sub>O_def multp\<^sub>H\<^sub>O_def)
lemma less_multiset\<^sub>D\<^sub>M_imp_mult:
"less_multiset\<^sub>D\<^sub>M M N \<Longrightarrow> (M, N) \<in> mult {(x, y). x < y}"
unfolding multp_def[of "(<)", symmetric]
by (rule multp\<^sub>D\<^sub>M_imp_multp[of "(<)" M N]) (simp add: less_multiset\<^sub>D\<^sub>M_def multp\<^sub>D\<^sub>M_def)
lemma less_multiset\<^sub>H\<^sub>O_imp_less_multiset\<^sub>D\<^sub>M: "less_multiset\<^sub>H\<^sub>O M N \<Longrightarrow> less_multiset\<^sub>D\<^sub>M M N"
unfolding less_multiset\<^sub>D\<^sub>M_def less_multiset\<^sub>H\<^sub>O_def
unfolding multp\<^sub>D\<^sub>M_def[symmetric] multp\<^sub>H\<^sub>O_def[symmetric]
by (rule multp\<^sub>H\<^sub>O_imp_multp\<^sub>D\<^sub>M)
lemma mult_less_multiset\<^sub>D\<^sub>M: "(M, N) \<in> mult {(x, y). x < y} \<longleftrightarrow> less_multiset\<^sub>D\<^sub>M M N"
unfolding multp_def[of "(<)", symmetric]
using multp_eq_multp\<^sub>D\<^sub>M[of "(<)", simplified]
by (simp add: multp\<^sub>D\<^sub>M_def less_multiset\<^sub>D\<^sub>M_def)
lemma mult_less_multiset\<^sub>H\<^sub>O: "(M, N) \<in> mult {(x, y). x < y} \<longleftrightarrow> less_multiset\<^sub>H\<^sub>O M N"
unfolding multp_def[of "(<)", symmetric]
using multp_eq_multp\<^sub>H\<^sub>O[of "(<)", simplified]
by (simp add: multp\<^sub>H\<^sub>O_def less_multiset\<^sub>H\<^sub>O_def)
lemmas mult\<^sub>D\<^sub>M = mult_less_multiset\<^sub>D\<^sub>M[unfolded less_multiset\<^sub>D\<^sub>M_def]
lemmas mult\<^sub>H\<^sub>O = mult_less_multiset\<^sub>H\<^sub>O[unfolded less_multiset\<^sub>H\<^sub>O_def]
end
lemma less_multiset_less_multiset\<^sub>H\<^sub>O: "M < N \<longleftrightarrow> less_multiset\<^sub>H\<^sub>O M N"
unfolding less_multiset_def multp_def mult\<^sub>H\<^sub>O less_multiset\<^sub>H\<^sub>O_def ..
lemma less_multiset\<^sub>D\<^sub>M:
"M < N \<longleftrightarrow> (\<exists>X Y. X \<noteq> {#} \<and> X \<subseteq># N \<and> M = N - X + Y \<and> (\<forall>k. k \<in># Y \<longrightarrow> (\<exists>a. a \<in># X \<and> k < a)))"
by (rule mult\<^sub>D\<^sub>M[folded multp_def less_multiset_def])
lemma less_multiset\<^sub>H\<^sub>O:
"M < N \<longleftrightarrow> M \<noteq> N \<and> (\<forall>y. count N y < count M y \<longrightarrow> (\<exists>x>y. count M x < count N x))"
by (rule mult\<^sub>H\<^sub>O[folded multp_def less_multiset_def])
lemma subset_eq_imp_le_multiset:
shows "M \<subseteq># N \<Longrightarrow> M \<le> N"
unfolding less_eq_multiset_def less_multiset\<^sub>H\<^sub>O
by (simp add: less_le_not_le subseteq_mset_def)
(* FIXME: "le" should be "less" in this and other names *)
lemma le_multiset_right_total: "M < add_mset x M"
unfolding less_eq_multiset_def less_multiset\<^sub>H\<^sub>O by simp
lemma less_eq_multiset_empty_left[simp]:
shows "{#} \<le> M"
by (simp add: subset_eq_imp_le_multiset)
lemma ex_gt_imp_less_multiset: "(\<exists>y. y \<in># N \<and> (\<forall>x. x \<in># M \<longrightarrow> x < y)) \<Longrightarrow> M < N"
unfolding less_multiset\<^sub>H\<^sub>O
by (metis count_eq_zero_iff count_greater_zero_iff less_le_not_le)
lemma less_eq_multiset_empty_right[simp]: "M \<noteq> {#} \<Longrightarrow> \<not> M \<le> {#}"
by (metis less_eq_multiset_empty_left antisym)
(* FIXME: "le" should be "less" in this and other names *)
lemma le_multiset_empty_left[simp]: "M \<noteq> {#} \<Longrightarrow> {#} < M"
by (simp add: less_multiset\<^sub>H\<^sub>O)
(* FIXME: "le" should be "less" in this and other names *)
lemma le_multiset_empty_right[simp]: "\<not> M < {#}"
using subset_mset.le_zero_eq less_multiset_def multp_def less_multiset\<^sub>D\<^sub>M by blast
(* FIXME: "le" should be "less" in this and other names *)
lemma union_le_diff_plus: "P \<subseteq># M \<Longrightarrow> N < P \<Longrightarrow> M - P + N < M"
by (drule subset_mset.diff_add[symmetric]) (metis union_le_mono2)
instantiation multiset :: (preorder) ordered_ab_semigroup_monoid_add_imp_le
begin
lemma less_eq_multiset\<^sub>H\<^sub>O:
"M \<le> N \<longleftrightarrow> (\<forall>y. count N y < count M y \<longrightarrow> (\<exists>x. y < x \<and> count M x < count N x))"
by (auto simp: less_eq_multiset_def less_multiset\<^sub>H\<^sub>O)
instance by standard (auto simp: less_eq_multiset\<^sub>H\<^sub>O)
lemma
fixes M N :: "'a multiset"
shows
less_eq_multiset_plus_left: "N \<le> (M + N)" and
less_eq_multiset_plus_right: "M \<le> (M + N)"
by simp_all
lemma
fixes M N :: "'a multiset"
shows
le_multiset_plus_left_nonempty: "M \<noteq> {#} \<Longrightarrow> N < M + N" and
le_multiset_plus_right_nonempty: "N \<noteq> {#} \<Longrightarrow> M < M + N"
by simp_all
end
lemma all_lt_Max_imp_lt_mset: "N \<noteq> {#} \<Longrightarrow> (\<forall>a \<in># M. a < Max (set_mset N)) \<Longrightarrow> M < N"
by (meson Max_in[OF finite_set_mset] ex_gt_imp_less_multiset set_mset_eq_empty_iff)
lemma lt_imp_ex_count_lt: "M < N \<Longrightarrow> \<exists>y. count M y < count N y"
by (meson less_eq_multiset\<^sub>H\<^sub>O less_le_not_le)
lemma subset_imp_less_mset: "A \<subset># B \<Longrightarrow> A < B"
by (simp add: order.not_eq_order_implies_strict subset_eq_imp_le_multiset)
lemma image_mset_strict_mono:
assumes
mono_f: "\<forall>x \<in> set_mset M. \<forall>y \<in> set_mset N. x < y \<longrightarrow> f x < f y" and
less: "M < N"
shows "image_mset f M < image_mset f N"
proof -
obtain Y X where
y_nemp: "Y \<noteq> {#}" and y_sub_N: "Y \<subseteq># N" and M_eq: "M = N - Y + X" and
ex_y: "\<forall>x. x \<in># X \<longrightarrow> (\<exists>y. y \<in># Y \<and> x < y)"
using less[unfolded less_multiset\<^sub>D\<^sub>M] by blast
have x_sub_M: "X \<subseteq># M"
using M_eq by simp
let ?fY = "image_mset f Y"
let ?fX = "image_mset f X"
show ?thesis
unfolding less_multiset\<^sub>D\<^sub>M
proof (intro exI conjI)
show "image_mset f M = image_mset f N - ?fY + ?fX"
using M_eq[THEN arg_cong, of "image_mset f"] y_sub_N
by (metis image_mset_Diff image_mset_union)
next
obtain y where y: "\<forall>x. x \<in># X \<longrightarrow> y x \<in># Y \<and> x < y x"
using ex_y by moura
show "\<forall>fx. fx \<in># ?fX \<longrightarrow> (\<exists>fy. fy \<in># ?fY \<and> fx < fy)"
proof (intro allI impI)
fix fx
assume "fx \<in># ?fX"
then obtain x where fx: "fx = f x" and x_in: "x \<in># X"
by auto
hence y_in: "y x \<in># Y" and y_gt: "x < y x"
using y[rule_format, OF x_in] by blast+
hence "f (y x) \<in># ?fY \<and> f x < f (y x)"
using mono_f y_sub_N x_sub_M x_in
by (metis image_eqI in_image_mset mset_subset_eqD)
thus "\<exists>fy. fy \<in># ?fY \<and> fx < fy"
unfolding fx by auto
qed
qed (auto simp: y_nemp y_sub_N image_mset_subseteq_mono)
qed
lemma image_mset_mono:
assumes
mono_f: "\<forall>x \<in> set_mset M. \<forall>y \<in> set_mset N. x < y \<longrightarrow> f x < f y" and
less: "M \<le> N"
shows "image_mset f M \<le> image_mset f N"
by (metis eq_iff image_mset_strict_mono less less_imp_le mono_f order.not_eq_order_implies_strict)
lemma mset_lt_single_right_iff[simp]: "M < {#y#} \<longleftrightarrow> (\<forall>x \<in># M. x < y)" for y :: "'a::linorder"
proof (rule iffI)
assume M_lt_y: "M < {#y#}"
show "\<forall>x \<in># M. x < y"
proof
fix x
assume x_in: "x \<in># M"
hence M: "M - {#x#} + {#x#} = M"
by (meson insert_DiffM2)
hence "\<not> {#x#} < {#y#} \<Longrightarrow> x < y"
using x_in M_lt_y
by (metis diff_single_eq_union le_multiset_empty_left less_add_same_cancel2 mset_le_trans)
also have "\<not> {#y#} < M"
using M_lt_y mset_le_not_sym by blast
ultimately show "x < y"
by (metis (no_types) Max_ge all_lt_Max_imp_lt_mset empty_iff finite_set_mset insertE
less_le_trans linorder_less_linear mset_le_not_sym set_mset_add_mset_insert
set_mset_eq_empty_iff x_in)
qed
next
assume y_max: "\<forall>x \<in># M. x < y"
show "M < {#y#}"
by (rule all_lt_Max_imp_lt_mset) (auto intro!: y_max)
qed
lemma mset_le_single_right_iff[simp]:
"M \<le> {#y#} \<longleftrightarrow> M = {#y#} \<or> (\<forall>x \<in># M. x < y)" for y :: "'a::linorder"
by (meson less_eq_multiset_def mset_lt_single_right_iff)
subsubsection \<open>Simplifications\<close>
lemma multp\<^sub>H\<^sub>O_repeat_mset_repeat_mset[simp]:
assumes "n \<noteq> 0"
shows "multp\<^sub>H\<^sub>O R (repeat_mset n A) (repeat_mset n B) \<longleftrightarrow> multp\<^sub>H\<^sub>O R A B"
proof (rule iffI)
assume hyp: "multp\<^sub>H\<^sub>O R (repeat_mset n A) (repeat_mset n B)"
hence
1: "repeat_mset n A \<noteq> repeat_mset n B" and
2: "\<forall>y. n * count B y < n * count A y \<longrightarrow> (\<exists>x. R y x \<and> n * count A x < n * count B x)"
by (simp_all add: multp\<^sub>H\<^sub>O_def)
from 1 \<open>n \<noteq> 0\<close> have "A \<noteq> B"
by auto
moreover from 2 \<open>n \<noteq> 0\<close> have "\<forall>y. count B y < count A y \<longrightarrow> (\<exists>x. R y x \<and> count A x < count B x)"
by auto
ultimately show "multp\<^sub>H\<^sub>O R A B"
by (simp add: multp\<^sub>H\<^sub>O_def)
next
assume "multp\<^sub>H\<^sub>O R A B"
hence 1: "A \<noteq> B" and 2: "\<forall>y. count B y < count A y \<longrightarrow> (\<exists>x. R y x \<and> count A x < count B x)"
by (simp_all add: multp\<^sub>H\<^sub>O_def)
from 1 have "repeat_mset n A \<noteq> repeat_mset n B"
by (simp add: assms repeat_mset_cancel1)
moreover from 2 have "\<forall>y. n * count B y < n * count A y \<longrightarrow>
(\<exists>x. R y x \<and> n * count A x < n * count B x)"
by auto
ultimately show "multp\<^sub>H\<^sub>O R (repeat_mset n A) (repeat_mset n B)"
by (simp add: multp\<^sub>H\<^sub>O_def)
qed
lemma multp\<^sub>H\<^sub>O_double_double[simp]: "multp\<^sub>H\<^sub>O R (A + A) (B + B) \<longleftrightarrow> multp\<^sub>H\<^sub>O R A B"
using multp\<^sub>H\<^sub>O_repeat_mset_repeat_mset[of 2]
by (simp add: numeral_Bit0)
subsection \<open>Simprocs\<close>
lemma mset_le_add_iff1:
"j \<le> (i::nat) \<Longrightarrow> (repeat_mset i u + m \<le> repeat_mset j u + n) = (repeat_mset (i-j) u + m \<le> n)"
proof -
assume "j \<le> i"
then have "j + (i - j) = i"
using le_add_diff_inverse by blast
then show ?thesis
by (metis (no_types) add_le_cancel_left left_add_mult_distrib_mset)
qed
lemma mset_le_add_iff2:
"i \<le> (j::nat) \<Longrightarrow> (repeat_mset i u + m \<le> repeat_mset j u + n) = (m \<le> repeat_mset (j-i) u + n)"
proof -
assume "i \<le> j"
then have "i + (j - i) = j"
using le_add_diff_inverse by blast
then show ?thesis
by (metis (no_types) add_le_cancel_left left_add_mult_distrib_mset)
qed
simproc_setup msetless_cancel
("(l::'a::preorder multiset) + m < n" | "(l::'a multiset) < m + n" |
"add_mset a m < n" | "m < add_mset a n" |
"replicate_mset p a < n" | "m < replicate_mset p a" |
"repeat_mset p m < n" | "m < repeat_mset p n") =
- \<open>fn phi => Cancel_Simprocs.less_cancel\<close>
+ \<open>K Cancel_Simprocs.less_cancel\<close>
simproc_setup msetle_cancel
("(l::'a::preorder multiset) + m \<le> n" | "(l::'a multiset) \<le> m + n" |
"add_mset a m \<le> n" | "m \<le> add_mset a n" |
"replicate_mset p a \<le> n" | "m \<le> replicate_mset p a" |
"repeat_mset p m \<le> n" | "m \<le> repeat_mset p n") =
- \<open>fn phi => Cancel_Simprocs.less_eq_cancel\<close>
+ \<open>K Cancel_Simprocs.less_eq_cancel\<close>
subsection \<open>Additional facts and instantiations\<close>
lemma ex_gt_count_imp_le_multiset:
"(\<forall>y :: 'a :: order. y \<in># M + N \<longrightarrow> y \<le> x) \<Longrightarrow> count M x < count N x \<Longrightarrow> M < N"
unfolding less_multiset\<^sub>H\<^sub>O
by (metis count_greater_zero_iff le_imp_less_or_eq less_imp_not_less not_gr_zero union_iff)
lemma mset_lt_single_iff[iff]: "{#x#} < {#y#} \<longleftrightarrow> x < y"
unfolding less_multiset\<^sub>H\<^sub>O by simp
lemma mset_le_single_iff[iff]: "{#x#} \<le> {#y#} \<longleftrightarrow> x \<le> y" for x y :: "'a::order"
unfolding less_eq_multiset\<^sub>H\<^sub>O by force
instance multiset :: (linorder) linordered_cancel_ab_semigroup_add
by standard (metis less_eq_multiset\<^sub>H\<^sub>O not_less_iff_gr_or_eq)
lemma less_eq_multiset_total:
fixes M N :: "'a :: linorder multiset"
shows "\<not> M \<le> N \<Longrightarrow> N \<le> M"
by simp
instantiation multiset :: (wellorder) wellorder
begin
lemma wf_less_multiset: "wf {(M :: 'a multiset, N). M < N}"
unfolding less_multiset_def multp_def by (auto intro: wf_mult wf)
instance by standard (metis less_multiset_def multp_def wf wf_def wf_mult)
end
instantiation multiset :: (preorder) order_bot
begin
definition bot_multiset :: "'a multiset" where "bot_multiset = {#}"
instance by standard (simp add: bot_multiset_def)
end
instance multiset :: (preorder) no_top
proof standard
fix x :: "'a multiset"
obtain a :: 'a where True by simp
have "x < x + (x + {#a#})"
by simp
then show "\<exists>y. x < y"
by blast
qed
instance multiset :: (preorder) ordered_cancel_comm_monoid_add
by standard
instantiation multiset :: (linorder) distrib_lattice
begin
definition inf_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset" where
"inf_multiset A B = (if A < B then A else B)"
definition sup_multiset :: "'a multiset \<Rightarrow> 'a multiset \<Rightarrow> 'a multiset" where
"sup_multiset A B = (if B > A then B else A)"
instance
by intro_classes (auto simp: inf_multiset_def sup_multiset_def)
end
end
diff --git a/src/HOL/Library/adhoc_overloading.ML b/src/HOL/Library/adhoc_overloading.ML
--- a/src/HOL/Library/adhoc_overloading.ML
+++ b/src/HOL/Library/adhoc_overloading.ML
@@ -1,245 +1,245 @@
(* Author: Alexander Krauss, TU Muenchen
Author: Christian Sternagel, University of Innsbruck
Adhoc overloading of constants based on their types.
*)
signature ADHOC_OVERLOADING =
sig
val is_overloaded: Proof.context -> string -> bool
val generic_add_overloaded: string -> Context.generic -> Context.generic
val generic_remove_overloaded: string -> Context.generic -> Context.generic
val generic_add_variant: string -> term -> Context.generic -> Context.generic
(*If the list of variants is empty at the end of "generic_remove_variant", then
"generic_remove_overloaded" is called implicitly.*)
val generic_remove_variant: string -> term -> Context.generic -> Context.generic
val show_variants: bool Config.T
end
structure Adhoc_Overloading: ADHOC_OVERLOADING =
struct
val show_variants = Attrib.setup_config_bool \<^binding>\<open>show_variants\<close> (K false);
(* errors *)
fun err_duplicate_variant oconst =
error ("Duplicate variant of " ^ quote oconst);
fun err_not_a_variant oconst =
error ("Not a variant of " ^ quote oconst);
fun err_not_overloaded oconst =
error ("Constant " ^ quote oconst ^ " is not declared as overloaded");
fun err_unresolved_overloading ctxt0 (c, T) t instances =
let
val ctxt = Config.put show_variants true ctxt0
val const_space = Proof_Context.const_space ctxt
val prt_const =
Pretty.block [Name_Space.pretty ctxt const_space c, Pretty.str " ::", Pretty.brk 1,
Pretty.quote (Syntax.pretty_typ ctxt T)]
in
error (Pretty.string_of (Pretty.chunks [
Pretty.block [
Pretty.str "Unresolved adhoc overloading of constant", Pretty.brk 1,
prt_const, Pretty.brk 1, Pretty.str "in term", Pretty.brk 1,
Pretty.block [Pretty.quote (Syntax.pretty_term ctxt t)]],
Pretty.block (
(if null instances then [Pretty.str "no instances"]
else Pretty.fbreaks (
Pretty.str "multiple instances:" ::
map (Pretty.mark Markup.item o Syntax.pretty_term ctxt) instances)))]))
end;
(* generic data *)
fun variants_eq ((v1, T1), (v2, T2)) =
Term.aconv_untyped (v1, v2) andalso T1 = T2;
structure Overload_Data = Generic_Data
(
type T =
{variants : (term * typ) list Symtab.table,
oconsts : string Termtab.table};
val empty = {variants = Symtab.empty, oconsts = Termtab.empty};
fun merge
({variants = vtab1, oconsts = otab1},
{variants = vtab2, oconsts = otab2}) : T =
let
fun merge_oconsts _ (oconst1, oconst2) =
if oconst1 = oconst2 then oconst1
else err_duplicate_variant oconst1;
in
{variants = Symtab.merge_list variants_eq (vtab1, vtab2),
oconsts = Termtab.join merge_oconsts (otab1, otab2)}
end;
);
fun map_tables f g =
Overload_Data.map (fn {variants = vtab, oconsts = otab} =>
{variants = f vtab, oconsts = g otab});
val is_overloaded = Symtab.defined o #variants o Overload_Data.get o Context.Proof;
val get_variants = Symtab.lookup o #variants o Overload_Data.get o Context.Proof;
val get_overloaded = Termtab.lookup o #oconsts o Overload_Data.get o Context.Proof;
fun generic_add_overloaded oconst context =
if is_overloaded (Context.proof_of context) oconst then context
else map_tables (Symtab.update (oconst, [])) I context;
fun generic_remove_overloaded oconst context =
let
fun remove_oconst_and_variants context oconst =
let
val remove_variants =
(case get_variants (Context.proof_of context) oconst of
NONE => I
| SOME vs => fold (Termtab.remove (op =) o rpair oconst o fst) vs);
in map_tables (Symtab.delete_safe oconst) remove_variants context end;
in
if is_overloaded (Context.proof_of context) oconst then remove_oconst_and_variants context oconst
else err_not_overloaded oconst
end;
local
fun generic_variant add oconst t context =
let
val ctxt = Context.proof_of context;
val _ = if is_overloaded ctxt oconst then () else err_not_overloaded oconst;
val T = t |> fastype_of;
val t' = Term.map_types (K dummyT) t;
in
if add then
let
val _ =
(case get_overloaded ctxt t' of
NONE => ()
| SOME oconst' => err_duplicate_variant oconst');
in
map_tables (Symtab.cons_list (oconst, (t', T))) (Termtab.update (t', oconst)) context
end
else
let
val _ =
if member variants_eq (the (get_variants ctxt oconst)) (t', T) then ()
else err_not_a_variant oconst;
in
map_tables (Symtab.map_entry oconst (remove1 variants_eq (t', T)))
(Termtab.delete_safe t') context
|> (fn context =>
(case get_variants (Context.proof_of context) oconst of
SOME [] => generic_remove_overloaded oconst context
| _ => context))
end
end;
in
val generic_add_variant = generic_variant true;
val generic_remove_variant = generic_variant false;
end;
(* check / uncheck *)
fun unifiable_with thy T1 T2 =
let
val maxidx1 = Term.maxidx_of_typ T1;
val T2' = Logic.incr_tvar (maxidx1 + 1) T2;
val maxidx2 = Term.maxidx_typ T2' maxidx1;
in can (Sign.typ_unify thy (T1, T2')) (Vartab.empty, maxidx2) end;
fun get_candidates ctxt (c, T) =
get_variants ctxt c
|> Option.map (map_filter (fn (t, T') =>
if unifiable_with (Proof_Context.theory_of ctxt) T T' then SOME t
else NONE));
fun insert_variants ctxt t (oconst as Const (c, T)) =
(case get_candidates ctxt (c, T) of
SOME [] => err_unresolved_overloading ctxt (c, T) t []
| SOME [variant] => variant
| _ => oconst)
| insert_variants _ _ oconst = oconst;
fun insert_overloaded ctxt =
let
fun proc t =
Term.map_types (K dummyT) t
|> get_overloaded ctxt
|> Option.map (Const o rpair (Term.type_of t));
in
Pattern.rewrite_term_top (Proof_Context.theory_of ctxt) [] [proc]
end;
fun check ctxt =
map (fn t => Term.map_aterms (insert_variants ctxt t) t);
fun uncheck ctxt ts =
if Config.get ctxt show_variants orelse exists (is_none o try Term.type_of) ts then ts
else map (insert_overloaded ctxt) ts;
fun reject_unresolved ctxt =
let
val the_candidates = the o get_candidates ctxt;
fun check_unresolved t =
(case filter (is_overloaded ctxt o fst) (Term.add_consts t []) of
[] => t
| (cT :: _) => err_unresolved_overloading ctxt cT t (the_candidates cT));
in map check_unresolved end;
(* setup *)
val _ = Context.>>
(Syntax_Phases.term_check 0 "adhoc_overloading" check
#> Syntax_Phases.term_check 1 "adhoc_overloading_unresolved_check" reject_unresolved
#> Syntax_Phases.term_uncheck 0 "adhoc_overloading" uncheck);
(* commands *)
fun generic_adhoc_overloading_cmd add =
if add then
fold (fn (oconst, ts) =>
generic_add_overloaded oconst
#> fold (generic_add_variant oconst) ts)
else
fold (fn (oconst, ts) =>
fold (generic_remove_variant oconst) ts);
fun adhoc_overloading_cmd' add args phi =
let val args' = args
|> map (apsnd (map_filter (fn t =>
let val t' = Morphism.term phi t;
in if Term.aconv_untyped (t, t') then SOME t' else NONE end)));
in generic_adhoc_overloading_cmd add args' end;
fun adhoc_overloading_cmd add raw_args lthy =
let
fun const_name ctxt =
fst o dest_Const o Proof_Context.read_const {proper = false, strict = false} ctxt; (* FIXME {proper = true, strict = true} (!?) *)
fun read_term ctxt = singleton (Variable.polymorphic ctxt) o Syntax.read_term ctxt;
val args =
raw_args
|> map (apfst (const_name lthy))
|> map (apsnd (map (read_term lthy)));
in
- Local_Theory.declaration {syntax = true, pervasive = false}
+ Local_Theory.declaration {syntax = true, pervasive = false, pos = Position.thread_data ()}
(adhoc_overloading_cmd' add args) lthy
end;
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>adhoc_overloading\<close>
"add adhoc overloading for constants / fixed variables"
(Parse.and_list1 (Parse.const -- Scan.repeat Parse.term) >> adhoc_overloading_cmd true);
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>no_adhoc_overloading\<close>
"delete adhoc overloading for constants / fixed variables"
(Parse.and_list1 (Parse.const -- Scan.repeat Parse.term) >> adhoc_overloading_cmd false);
end;
diff --git a/src/HOL/List.thy b/src/HOL/List.thy
--- a/src/HOL/List.thy
+++ b/src/HOL/List.thy
@@ -1,8354 +1,8354 @@
(* Title: HOL/List.thy
Author: Tobias Nipkow; proofs tidied by LCP
*)
section \<open>The datatype of finite lists\<close>
theory List
imports Sledgehammer Lifting_Set
begin
datatype (set: 'a) list =
Nil ("[]")
| Cons (hd: 'a) (tl: "'a list") (infixr "#" 65)
for
map: map
rel: list_all2
pred: list_all
where
"tl [] = []"
datatype_compat list
lemma [case_names Nil Cons, cases type: list]:
\<comment> \<open>for backward compatibility -- names of variables differ\<close>
"(y = [] \<Longrightarrow> P) \<Longrightarrow> (\<And>a list. y = a # list \<Longrightarrow> P) \<Longrightarrow> P"
by (rule list.exhaust)
lemma [case_names Nil Cons, induct type: list]:
\<comment> \<open>for backward compatibility -- names of variables differ\<close>
"P [] \<Longrightarrow> (\<And>a list. P list \<Longrightarrow> P (a # list)) \<Longrightarrow> P list"
by (rule list.induct)
text \<open>Compatibility:\<close>
setup \<open>Sign.mandatory_path "list"\<close>
lemmas inducts = list.induct
lemmas recs = list.rec
lemmas cases = list.case
setup \<open>Sign.parent_path\<close>
lemmas set_simps = list.set (* legacy *)
syntax
\<comment> \<open>list Enumeration\<close>
"_list" :: "args => 'a list" ("[(_)]")
translations
"[x, xs]" == "x#[xs]"
"[x]" == "x#[]"
subsection \<open>Basic list processing functions\<close>
primrec (nonexhaustive) last :: "'a list \<Rightarrow> 'a" where
"last (x # xs) = (if xs = [] then x else last xs)"
primrec butlast :: "'a list \<Rightarrow> 'a list" where
"butlast [] = []" |
"butlast (x # xs) = (if xs = [] then [] else x # butlast xs)"
lemma set_rec: "set xs = rec_list {} (\<lambda>x _. insert x) xs"
by (induct xs) auto
definition coset :: "'a list \<Rightarrow> 'a set" where
[simp]: "coset xs = - set xs"
primrec append :: "'a list \<Rightarrow> 'a list \<Rightarrow> 'a list" (infixr "@" 65) where
append_Nil: "[] @ ys = ys" |
append_Cons: "(x#xs) @ ys = x # xs @ ys"
primrec rev :: "'a list \<Rightarrow> 'a list" where
"rev [] = []" |
"rev (x # xs) = rev xs @ [x]"
primrec filter:: "('a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'a list" where
"filter P [] = []" |
"filter P (x # xs) = (if P x then x # filter P xs else filter P xs)"
text \<open>Special input syntax for filter:\<close>
syntax (ASCII)
"_filter" :: "[pttrn, 'a list, bool] => 'a list" ("(1[_<-_./ _])")
syntax
"_filter" :: "[pttrn, 'a list, bool] => 'a list" ("(1[_\<leftarrow>_ ./ _])")
translations
"[x<-xs . P]" \<rightharpoonup> "CONST filter (\<lambda>x. P) xs"
primrec fold :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a list \<Rightarrow> 'b \<Rightarrow> 'b" where
fold_Nil: "fold f [] = id" |
fold_Cons: "fold f (x # xs) = fold f xs \<circ> f x"
primrec foldr :: "('a \<Rightarrow> 'b \<Rightarrow> 'b) \<Rightarrow> 'a list \<Rightarrow> 'b \<Rightarrow> 'b" where
foldr_Nil: "foldr f [] = id" |
foldr_Cons: "foldr f (x # xs) = f x \<circ> foldr f xs"
primrec foldl :: "('b \<Rightarrow> 'a \<Rightarrow> 'b) \<Rightarrow> 'b \<Rightarrow> 'a list \<Rightarrow> 'b" where
foldl_Nil: "foldl f a [] = a" |
foldl_Cons: "foldl f a (x # xs) = foldl f (f a x) xs"
primrec concat:: "'a list list \<Rightarrow> 'a list" where
"concat [] = []" |
"concat (x # xs) = x @ concat xs"
primrec drop:: "nat \<Rightarrow> 'a list \<Rightarrow> 'a list" where
drop_Nil: "drop n [] = []" |
drop_Cons: "drop n (x # xs) = (case n of 0 \<Rightarrow> x # xs | Suc m \<Rightarrow> drop m xs)"
\<comment> \<open>Warning: simpset does not contain this definition, but separate
theorems for \<open>n = 0\<close> and \<open>n = Suc k\<close>\<close>
primrec take:: "nat \<Rightarrow> 'a list \<Rightarrow> 'a list" where
take_Nil:"take n [] = []" |
take_Cons: "take n (x # xs) = (case n of 0 \<Rightarrow> [] | Suc m \<Rightarrow> x # take m xs)"
\<comment> \<open>Warning: simpset does not contain this definition, but separate
theorems for \<open>n = 0\<close> and \<open>n = Suc k\<close>\<close>
primrec (nonexhaustive) nth :: "'a list => nat => 'a" (infixl "!" 100) where
nth_Cons: "(x # xs) ! n = (case n of 0 \<Rightarrow> x | Suc k \<Rightarrow> xs ! k)"
\<comment> \<open>Warning: simpset does not contain this definition, but separate
theorems for \<open>n = 0\<close> and \<open>n = Suc k\<close>\<close>
primrec list_update :: "'a list \<Rightarrow> nat \<Rightarrow> 'a \<Rightarrow> 'a list" where
"list_update [] i v = []" |
"list_update (x # xs) i v =
(case i of 0 \<Rightarrow> v # xs | Suc j \<Rightarrow> x # list_update xs j v)"
nonterminal lupdbinds and lupdbind
syntax
"_lupdbind":: "['a, 'a] => lupdbind" ("(2_ :=/ _)")
"" :: "lupdbind => lupdbinds" ("_")
"_lupdbinds" :: "[lupdbind, lupdbinds] => lupdbinds" ("_,/ _")
"_LUpdate" :: "['a, lupdbinds] => 'a" ("_/[(_)]" [1000,0] 900)
translations
"_LUpdate xs (_lupdbinds b bs)" == "_LUpdate (_LUpdate xs b) bs"
"xs[i:=x]" == "CONST list_update xs i x"
primrec takeWhile :: "('a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'a list" where
"takeWhile P [] = []" |
"takeWhile P (x # xs) = (if P x then x # takeWhile P xs else [])"
primrec dropWhile :: "('a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'a list" where
"dropWhile P [] = []" |
"dropWhile P (x # xs) = (if P x then dropWhile P xs else x # xs)"
primrec zip :: "'a list \<Rightarrow> 'b list \<Rightarrow> ('a \<times> 'b) list" where
"zip xs [] = []" |
zip_Cons: "zip xs (y # ys) =
(case xs of [] \<Rightarrow> [] | z # zs \<Rightarrow> (z, y) # zip zs ys)"
\<comment> \<open>Warning: simpset does not contain this definition, but separate
theorems for \<open>xs = []\<close> and \<open>xs = z # zs\<close>\<close>
abbreviation map2 :: "('a \<Rightarrow> 'b \<Rightarrow> 'c) \<Rightarrow> 'a list \<Rightarrow> 'b list \<Rightarrow> 'c list" where
"map2 f xs ys \<equiv> map (\<lambda>(x,y). f x y) (zip xs ys)"
primrec product :: "'a list \<Rightarrow> 'b list \<Rightarrow> ('a \<times> 'b) list" where
"product [] _ = []" |
"product (x#xs) ys = map (Pair x) ys @ product xs ys"
hide_const (open) product
primrec product_lists :: "'a list list \<Rightarrow> 'a list list" where
"product_lists [] = [[]]" |
"product_lists (xs # xss) = concat (map (\<lambda>x. map (Cons x) (product_lists xss)) xs)"
primrec upt :: "nat \<Rightarrow> nat \<Rightarrow> nat list" ("(1[_..</_'])") where
upt_0: "[i..<0] = []" |
upt_Suc: "[i..<(Suc j)] = (if i \<le> j then [i..<j] @ [j] else [])"
definition insert :: "'a \<Rightarrow> 'a list \<Rightarrow> 'a list" where
"insert x xs = (if x \<in> set xs then xs else x # xs)"
definition union :: "'a list \<Rightarrow> 'a list \<Rightarrow> 'a list" where
"union = fold insert"
hide_const (open) insert union
hide_fact (open) insert_def union_def
primrec find :: "('a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'a option" where
"find _ [] = None" |
"find P (x#xs) = (if P x then Some x else find P xs)"
text \<open>In the context of multisets, \<open>count_list\<close> is equivalent to
\<^term>\<open>count \<circ> mset\<close> and it it advisable to use the latter.\<close>
primrec count_list :: "'a list \<Rightarrow> 'a \<Rightarrow> nat" where
"count_list [] y = 0" |
"count_list (x#xs) y = (if x=y then count_list xs y + 1 else count_list xs y)"
definition
"extract" :: "('a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> ('a list * 'a * 'a list) option"
where "extract P xs =
(case dropWhile (Not \<circ> P) xs of
[] \<Rightarrow> None |
y#ys \<Rightarrow> Some(takeWhile (Not \<circ> P) xs, y, ys))"
hide_const (open) "extract"
primrec those :: "'a option list \<Rightarrow> 'a list option"
where
"those [] = Some []" |
"those (x # xs) = (case x of
None \<Rightarrow> None
| Some y \<Rightarrow> map_option (Cons y) (those xs))"
primrec remove1 :: "'a \<Rightarrow> 'a list \<Rightarrow> 'a list" where
"remove1 x [] = []" |
"remove1 x (y # xs) = (if x = y then xs else y # remove1 x xs)"
primrec removeAll :: "'a \<Rightarrow> 'a list \<Rightarrow> 'a list" where
"removeAll x [] = []" |
"removeAll x (y # xs) = (if x = y then removeAll x xs else y # removeAll x xs)"
primrec distinct :: "'a list \<Rightarrow> bool" where
"distinct [] \<longleftrightarrow> True" |
"distinct (x # xs) \<longleftrightarrow> x \<notin> set xs \<and> distinct xs"
fun successively :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> bool" where
"successively P [] = True" |
"successively P [x] = True" |
"successively P (x # y # xs) = (P x y \<and> successively P (y#xs))"
definition distinct_adj where
"distinct_adj = successively (\<noteq>)"
primrec remdups :: "'a list \<Rightarrow> 'a list" where
"remdups [] = []" |
"remdups (x # xs) = (if x \<in> set xs then remdups xs else x # remdups xs)"
fun remdups_adj :: "'a list \<Rightarrow> 'a list" where
"remdups_adj [] = []" |
"remdups_adj [x] = [x]" |
"remdups_adj (x # y # xs) = (if x = y then remdups_adj (x # xs) else x # remdups_adj (y # xs))"
primrec replicate :: "nat \<Rightarrow> 'a \<Rightarrow> 'a list" where
replicate_0: "replicate 0 x = []" |
replicate_Suc: "replicate (Suc n) x = x # replicate n x"
text \<open>
Function \<open>size\<close> is overloaded for all datatypes. Users may
refer to the list version as \<open>length\<close>.\<close>
abbreviation length :: "'a list \<Rightarrow> nat" where
"length \<equiv> size"
definition enumerate :: "nat \<Rightarrow> 'a list \<Rightarrow> (nat \<times> 'a) list" where
enumerate_eq_zip: "enumerate n xs = zip [n..<n + length xs] xs"
primrec rotate1 :: "'a list \<Rightarrow> 'a list" where
"rotate1 [] = []" |
"rotate1 (x # xs) = xs @ [x]"
definition rotate :: "nat \<Rightarrow> 'a list \<Rightarrow> 'a list" where
"rotate n = rotate1 ^^ n"
definition nths :: "'a list => nat set => 'a list" where
"nths xs A = map fst (filter (\<lambda>p. snd p \<in> A) (zip xs [0..<size xs]))"
primrec subseqs :: "'a list \<Rightarrow> 'a list list" where
"subseqs [] = [[]]" |
"subseqs (x#xs) = (let xss = subseqs xs in map (Cons x) xss @ xss)"
primrec n_lists :: "nat \<Rightarrow> 'a list \<Rightarrow> 'a list list" where
"n_lists 0 xs = [[]]" |
"n_lists (Suc n) xs = concat (map (\<lambda>ys. map (\<lambda>y. y # ys) xs) (n_lists n xs))"
hide_const (open) n_lists
function splice :: "'a list \<Rightarrow> 'a list \<Rightarrow> 'a list" where
"splice [] ys = ys" |
"splice (x#xs) ys = x # splice ys xs"
by pat_completeness auto
termination
by(relation "measure(\<lambda>(xs,ys). size xs + size ys)") auto
function shuffles where
"shuffles [] ys = {ys}"
| "shuffles xs [] = {xs}"
| "shuffles (x # xs) (y # ys) = (#) x ` shuffles xs (y # ys) \<union> (#) y ` shuffles (x # xs) ys"
by pat_completeness simp_all
termination by lexicographic_order
text\<open>Use only if you cannot use \<^const>\<open>Min\<close> instead:\<close>
fun min_list :: "'a::ord list \<Rightarrow> 'a" where
"min_list (x # xs) = (case xs of [] \<Rightarrow> x | _ \<Rightarrow> min x (min_list xs))"
text\<open>Returns first minimum:\<close>
fun arg_min_list :: "('a \<Rightarrow> ('b::linorder)) \<Rightarrow> 'a list \<Rightarrow> 'a" where
"arg_min_list f [x] = x" |
"arg_min_list f (x#y#zs) = (let m = arg_min_list f (y#zs) in if f x \<le> f m then x else m)"
text\<open>
\begin{figure}[htbp]
\fbox{
\begin{tabular}{l}
@{lemma "[a,b]@[c,d] = [a,b,c,d]" by simp}\\
@{lemma "length [a,b,c] = 3" by simp}\\
@{lemma "set [a,b,c] = {a,b,c}" by simp}\\
@{lemma "map f [a,b,c] = [f a, f b, f c]" by simp}\\
@{lemma "rev [a,b,c] = [c,b,a]" by simp}\\
@{lemma "hd [a,b,c,d] = a" by simp}\\
@{lemma "tl [a,b,c,d] = [b,c,d]" by simp}\\
@{lemma "last [a,b,c,d] = d" by simp}\\
@{lemma "butlast [a,b,c,d] = [a,b,c]" by simp}\\
@{lemma[source] "filter (\<lambda>n::nat. n<2) [0,2,1] = [0,1]" by simp}\\
@{lemma "concat [[a,b],[c,d,e],[],[f]] = [a,b,c,d,e,f]" by simp}\\
@{lemma "fold f [a,b,c] x = f c (f b (f a x))" by simp}\\
@{lemma "foldr f [a,b,c] x = f a (f b (f c x))" by simp}\\
@{lemma "foldl f x [a,b,c] = f (f (f x a) b) c" by simp}\\
@{lemma "successively (\<noteq>) [True,False,True,False]" by simp}\\
@{lemma "zip [a,b,c] [x,y,z] = [(a,x),(b,y),(c,z)]" by simp}\\
@{lemma "zip [a,b] [x,y,z] = [(a,x),(b,y)]" by simp}\\
@{lemma "enumerate 3 [a,b,c] = [(3,a),(4,b),(5,c)]" by normalization}\\
@{lemma "List.product [a,b] [c,d] = [(a, c), (a, d), (b, c), (b, d)]" by simp}\\
@{lemma "product_lists [[a,b], [c], [d,e]] = [[a,c,d], [a,c,e], [b,c,d], [b,c,e]]" by simp}\\
@{lemma "splice [a,b,c] [x,y,z] = [a,x,b,y,c,z]" by simp}\\
@{lemma "splice [a,b,c,d] [x,y] = [a,x,b,y,c,d]" by simp}\\
@{lemma "shuffles [a,b] [c,d] = {[a,b,c,d],[a,c,b,d],[a,c,d,b],[c,a,b,d],[c,a,d,b],[c,d,a,b]}"
by (simp add: insert_commute)}\\
@{lemma "take 2 [a,b,c,d] = [a,b]" by simp}\\
@{lemma "take 6 [a,b,c,d] = [a,b,c,d]" by simp}\\
@{lemma "drop 2 [a,b,c,d] = [c,d]" by simp}\\
@{lemma "drop 6 [a,b,c,d] = []" by simp}\\
@{lemma "takeWhile (%n::nat. n<3) [1,2,3,0] = [1,2]" by simp}\\
@{lemma "dropWhile (%n::nat. n<3) [1,2,3,0] = [3,0]" by simp}\\
@{lemma "distinct [2,0,1::nat]" by simp}\\
@{lemma "remdups [2,0,2,1::nat,2] = [0,1,2]" by simp}\\
@{lemma "remdups_adj [2,2,3,1,1::nat,2,1] = [2,3,1,2,1]" by simp}\\
@{lemma "List.insert 2 [0::nat,1,2] = [0,1,2]" by (simp add: List.insert_def)}\\
@{lemma "List.insert 3 [0::nat,1,2] = [3,0,1,2]" by (simp add: List.insert_def)}\\
@{lemma "List.union [2,3,4] [0::int,1,2] = [4,3,0,1,2]" by (simp add: List.insert_def List.union_def)}\\
@{lemma "List.find (%i::int. i>0) [0,0] = None" by simp}\\
@{lemma "List.find (%i::int. i>0) [0,1,0,2] = Some 1" by simp}\\
@{lemma "count_list [0,1,0,2::int] 0 = 2" by (simp)}\\
@{lemma "List.extract (%i::int. i>0) [0,0] = None" by(simp add: extract_def)}\\
@{lemma "List.extract (%i::int. i>0) [0,1,0,2] = Some([0], 1, [0,2])" by(simp add: extract_def)}\\
@{lemma "remove1 2 [2,0,2,1::nat,2] = [0,2,1,2]" by simp}\\
@{lemma "removeAll 2 [2,0,2,1::nat,2] = [0,1]" by simp}\\
@{lemma "nth [a,b,c,d] 2 = c" by simp}\\
@{lemma "[a,b,c,d][2 := x] = [a,b,x,d]" by simp}\\
@{lemma "nths [a,b,c,d,e] {0,2,3} = [a,c,d]" by (simp add:nths_def)}\\
@{lemma "subseqs [a,b] = [[a, b], [a], [b], []]" by simp}\\
@{lemma "List.n_lists 2 [a,b,c] = [[a, a], [b, a], [c, a], [a, b], [b, b], [c, b], [a, c], [b, c], [c, c]]" by (simp add: eval_nat_numeral)}\\
@{lemma "rotate1 [a,b,c,d] = [b,c,d,a]" by simp}\\
@{lemma "rotate 3 [a,b,c,d] = [d,a,b,c]" by (simp add:rotate_def eval_nat_numeral)}\\
@{lemma "replicate 4 a = [a,a,a,a]" by (simp add:eval_nat_numeral)}\\
@{lemma "[2..<5] = [2,3,4]" by (simp add:eval_nat_numeral)}\\
@{lemma "min_list [3,1,-2::int] = -2" by (simp)}\\
@{lemma "arg_min_list (\<lambda>i. i*i) [3,-1,1,-2::int] = -1" by (simp)}
\end{tabular}}
\caption{Characteristic examples}
\label{fig:Characteristic}
\end{figure}
Figure~\ref{fig:Characteristic} shows characteristic examples
that should give an intuitive understanding of the above functions.
\<close>
text\<open>The following simple sort(ed) functions are intended for proofs,
not for efficient implementations.\<close>
text \<open>A sorted predicate w.r.t. a relation:\<close>
fun sorted_wrt :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> bool" where
"sorted_wrt P [] = True" |
"sorted_wrt P (x # ys) = ((\<forall>y \<in> set ys. P x y) \<and> sorted_wrt P ys)"
text \<open>A class-based sorted predicate:\<close>
context linorder
begin
abbreviation sorted :: "'a list \<Rightarrow> bool" where
"sorted \<equiv> sorted_wrt (\<le>)"
lemma sorted_simps: "sorted [] = True" "sorted (x # ys) = ((\<forall>y \<in> set ys. x\<le>y) \<and> sorted ys)"
by auto
lemma strict_sorted_simps: "sorted_wrt (<) [] = True" "sorted_wrt (<) (x # ys) = ((\<forall>y \<in> set ys. x<y) \<and> sorted_wrt (<) ys)"
by auto
primrec insort_key :: "('b \<Rightarrow> 'a) \<Rightarrow> 'b \<Rightarrow> 'b list \<Rightarrow> 'b list" where
"insort_key f x [] = [x]" |
"insort_key f x (y#ys) =
(if f x \<le> f y then (x#y#ys) else y#(insort_key f x ys))"
definition sort_key :: "('b \<Rightarrow> 'a) \<Rightarrow> 'b list \<Rightarrow> 'b list" where
"sort_key f xs = foldr (insort_key f) xs []"
definition insort_insert_key :: "('b \<Rightarrow> 'a) \<Rightarrow> 'b \<Rightarrow> 'b list \<Rightarrow> 'b list" where
"insort_insert_key f x xs =
(if f x \<in> f ` set xs then xs else insort_key f x xs)"
abbreviation "sort \<equiv> sort_key (\<lambda>x. x)"
abbreviation "insort \<equiv> insort_key (\<lambda>x. x)"
abbreviation "insort_insert \<equiv> insort_insert_key (\<lambda>x. x)"
definition stable_sort_key :: "(('b \<Rightarrow> 'a) \<Rightarrow> 'b list \<Rightarrow> 'b list) \<Rightarrow> bool" where
"stable_sort_key sk =
(\<forall>f xs k. filter (\<lambda>y. f y = k) (sk f xs) = filter (\<lambda>y. f y = k) xs)"
lemma strict_sorted_iff: "sorted_wrt (<) l \<longleftrightarrow> sorted l \<and> distinct l"
by (induction l) (auto iff: antisym_conv1)
lemma strict_sorted_imp_sorted: "sorted_wrt (<) xs \<Longrightarrow> sorted xs"
by (auto simp: strict_sorted_iff)
end
subsubsection \<open>List comprehension\<close>
text\<open>Input syntax for Haskell-like list comprehension notation.
Typical example: \<open>[(x,y). x \<leftarrow> xs, y \<leftarrow> ys, x \<noteq> y]\<close>,
the list of all pairs of distinct elements from \<open>xs\<close> and \<open>ys\<close>.
The syntax is as in Haskell, except that \<open>|\<close> becomes a dot
(like in Isabelle's set comprehension): \<open>[e. x \<leftarrow> xs, \<dots>]\<close> rather than
\verb![e| x <- xs, ...]!.
The qualifiers after the dot are
\begin{description}
\item[generators] \<open>p \<leftarrow> xs\<close>,
where \<open>p\<close> is a pattern and \<open>xs\<close> an expression of list type, or
\item[guards] \<open>b\<close>, where \<open>b\<close> is a boolean expression.
%\item[local bindings] @ {text"let x = e"}.
\end{description}
Just like in Haskell, list comprehension is just a shorthand. To avoid
misunderstandings, the translation into desugared form is not reversed
upon output. Note that the translation of \<open>[e. x \<leftarrow> xs]\<close> is
optmized to \<^term>\<open>map (%x. e) xs\<close>.
It is easy to write short list comprehensions which stand for complex
expressions. During proofs, they may become unreadable (and
mangled). In such cases it can be advisable to introduce separate
definitions for the list comprehensions in question.\<close>
nonterminal lc_qual and lc_quals
syntax
"_listcompr" :: "'a \<Rightarrow> lc_qual \<Rightarrow> lc_quals \<Rightarrow> 'a list" ("[_ . __")
"_lc_gen" :: "'a \<Rightarrow> 'a list \<Rightarrow> lc_qual" ("_ \<leftarrow> _")
"_lc_test" :: "bool \<Rightarrow> lc_qual" ("_")
(*"_lc_let" :: "letbinds => lc_qual" ("let _")*)
"_lc_end" :: "lc_quals" ("]")
"_lc_quals" :: "lc_qual \<Rightarrow> lc_quals \<Rightarrow> lc_quals" (", __")
syntax (ASCII)
"_lc_gen" :: "'a \<Rightarrow> 'a list \<Rightarrow> lc_qual" ("_ <- _")
parse_translation \<open>
let
val NilC = Syntax.const \<^const_syntax>\<open>Nil\<close>;
val ConsC = Syntax.const \<^const_syntax>\<open>Cons\<close>;
val mapC = Syntax.const \<^const_syntax>\<open>map\<close>;
val concatC = Syntax.const \<^const_syntax>\<open>concat\<close>;
val IfC = Syntax.const \<^const_syntax>\<open>If\<close>;
val dummyC = Syntax.const \<^const_syntax>\<open>Pure.dummy_pattern\<close>
fun single x = ConsC $ x $ NilC;
fun pat_tr ctxt p e opti = (* %x. case x of p => e | _ => [] *)
let
(* FIXME proper name context!? *)
val x =
Free (singleton (Name.variant_list (fold Term.add_free_names [p, e] [])) "x", dummyT);
val e = if opti then single e else e;
val case1 = Syntax.const \<^syntax_const>\<open>_case1\<close> $ p $ e;
val case2 =
Syntax.const \<^syntax_const>\<open>_case1\<close> $ dummyC $ NilC;
val cs = Syntax.const \<^syntax_const>\<open>_case2\<close> $ case1 $ case2;
in Syntax_Trans.abs_tr [x, Case_Translation.case_tr false ctxt [x, cs]] end;
fun pair_pat_tr (x as Free _) e = Syntax_Trans.abs_tr [x, e]
| pair_pat_tr (_ $ p1 $ p2) e =
Syntax.const \<^const_syntax>\<open>case_prod\<close> $ pair_pat_tr p1 (pair_pat_tr p2 e)
| pair_pat_tr dummy e = Syntax_Trans.abs_tr [Syntax.const "_idtdummy", e]
fun pair_pat ctxt (Const (\<^const_syntax>\<open>Pair\<close>,_) $ s $ t) =
pair_pat ctxt s andalso pair_pat ctxt t
| pair_pat ctxt (Free (s,_)) =
let
val thy = Proof_Context.theory_of ctxt;
val s' = Proof_Context.intern_const ctxt s;
in not (Sign.declared_const thy s') end
| pair_pat _ t = (t = dummyC);
fun abs_tr ctxt p e opti =
let val p = Term_Position.strip_positions p
in if pair_pat ctxt p
then (pair_pat_tr p e, true)
else (pat_tr ctxt p e opti, false)
end
fun lc_tr ctxt [e, Const (\<^syntax_const>\<open>_lc_test\<close>, _) $ b, qs] =
let
val res =
(case qs of
Const (\<^syntax_const>\<open>_lc_end\<close>, _) => single e
| Const (\<^syntax_const>\<open>_lc_quals\<close>, _) $ q $ qs => lc_tr ctxt [e, q, qs]);
in IfC $ b $ res $ NilC end
| lc_tr ctxt
[e, Const (\<^syntax_const>\<open>_lc_gen\<close>, _) $ p $ es,
Const(\<^syntax_const>\<open>_lc_end\<close>, _)] =
(case abs_tr ctxt p e true of
(f, true) => mapC $ f $ es
| (f, false) => concatC $ (mapC $ f $ es))
| lc_tr ctxt
[e, Const (\<^syntax_const>\<open>_lc_gen\<close>, _) $ p $ es,
Const (\<^syntax_const>\<open>_lc_quals\<close>, _) $ q $ qs] =
let val e' = lc_tr ctxt [e, q, qs];
in concatC $ (mapC $ (fst (abs_tr ctxt p e' false)) $ es) end;
in [(\<^syntax_const>\<open>_listcompr\<close>, lc_tr)] end
\<close>
ML_val \<open>
let
val read = Syntax.read_term \<^context> o Syntax.implode_input;
fun check s1 s2 =
read s1 aconv read s2 orelse
error ("Check failed: " ^
quote (#1 (Input.source_content s1)) ^ Position.here_list [Input.pos_of s1, Input.pos_of s2]);
in
check \<open>[(x,y,z). b]\<close> \<open>if b then [(x, y, z)] else []\<close>;
check \<open>[(x,y,z). (x,_,y)\<leftarrow>xs]\<close> \<open>map (\<lambda>(x,_,y). (x, y, z)) xs\<close>;
check \<open>[e x y. (x,_)\<leftarrow>xs, y\<leftarrow>ys]\<close> \<open>concat (map (\<lambda>(x,_). map (\<lambda>y. e x y) ys) xs)\<close>;
check \<open>[(x,y,z). x<a, x>b]\<close> \<open>if x < a then if b < x then [(x, y, z)] else [] else []\<close>;
check \<open>[(x,y,z). x\<leftarrow>xs, x>b]\<close> \<open>concat (map (\<lambda>x. if b < x then [(x, y, z)] else []) xs)\<close>;
check \<open>[(x,y,z). x<a, x\<leftarrow>xs]\<close> \<open>if x < a then map (\<lambda>x. (x, y, z)) xs else []\<close>;
check \<open>[(x,y). Cons True x \<leftarrow> xs]\<close>
\<open>concat (map (\<lambda>xa. case xa of [] \<Rightarrow> [] | True # x \<Rightarrow> [(x, y)] | False # x \<Rightarrow> []) xs)\<close>;
check \<open>[(x,y,z). Cons x [] \<leftarrow> xs]\<close>
\<open>concat (map (\<lambda>xa. case xa of [] \<Rightarrow> [] | [x] \<Rightarrow> [(x, y, z)] | x # aa # lista \<Rightarrow> []) xs)\<close>;
check \<open>[(x,y,z). x<a, x>b, x=d]\<close>
\<open>if x < a then if b < x then if x = d then [(x, y, z)] else [] else [] else []\<close>;
check \<open>[(x,y,z). x<a, x>b, y\<leftarrow>ys]\<close>
\<open>if x < a then if b < x then map (\<lambda>y. (x, y, z)) ys else [] else []\<close>;
check \<open>[(x,y,z). x<a, (_,x)\<leftarrow>xs,y>b]\<close>
\<open>if x < a then concat (map (\<lambda>(_,x). if b < y then [(x, y, z)] else []) xs) else []\<close>;
check \<open>[(x,y,z). x<a, x\<leftarrow>xs, y\<leftarrow>ys]\<close>
\<open>if x < a then concat (map (\<lambda>x. map (\<lambda>y. (x, y, z)) ys) xs) else []\<close>;
check \<open>[(x,y,z). x\<leftarrow>xs, x>b, y<a]\<close>
\<open>concat (map (\<lambda>x. if b < x then if y < a then [(x, y, z)] else [] else []) xs)\<close>;
check \<open>[(x,y,z). x\<leftarrow>xs, x>b, y\<leftarrow>ys]\<close>
\<open>concat (map (\<lambda>x. if b < x then map (\<lambda>y. (x, y, z)) ys else []) xs)\<close>;
check \<open>[(x,y,z). x\<leftarrow>xs, (y,_)\<leftarrow>ys,y>x]\<close>
\<open>concat (map (\<lambda>x. concat (map (\<lambda>(y,_). if x < y then [(x, y, z)] else []) ys)) xs)\<close>;
check \<open>[(x,y,z). x\<leftarrow>xs, y\<leftarrow>ys,z\<leftarrow>zs]\<close>
\<open>concat (map (\<lambda>x. concat (map (\<lambda>y. map (\<lambda>z. (x, y, z)) zs) ys)) xs)\<close>
end;
\<close>
ML \<open>
(* Simproc for rewriting list comprehensions applied to List.set to set
comprehension. *)
signature LIST_TO_SET_COMPREHENSION =
sig
val simproc : Proof.context -> cterm -> thm option
end
structure List_to_Set_Comprehension : LIST_TO_SET_COMPREHENSION =
struct
(* conversion *)
fun all_exists_conv cv ctxt ct =
(case Thm.term_of ct of
Const (\<^const_name>\<open>Ex\<close>, _) $ Abs _ =>
Conv.arg_conv (Conv.abs_conv (all_exists_conv cv o #2) ctxt) ct
| _ => cv ctxt ct)
fun all_but_last_exists_conv cv ctxt ct =
(case Thm.term_of ct of
Const (\<^const_name>\<open>Ex\<close>, _) $ Abs (_, _, Const (\<^const_name>\<open>Ex\<close>, _) $ _) =>
Conv.arg_conv (Conv.abs_conv (all_but_last_exists_conv cv o #2) ctxt) ct
| _ => cv ctxt ct)
fun Collect_conv cv ctxt ct =
(case Thm.term_of ct of
Const (\<^const_name>\<open>Collect\<close>, _) $ Abs _ => Conv.arg_conv (Conv.abs_conv cv ctxt) ct
| _ => raise CTERM ("Collect_conv", [ct]))
fun rewr_conv' th = Conv.rewr_conv (mk_meta_eq th)
fun conjunct_assoc_conv ct =
Conv.try_conv
(rewr_conv' @{thm conj_assoc} then_conv HOLogic.conj_conv Conv.all_conv conjunct_assoc_conv) ct
fun right_hand_set_comprehension_conv conv ctxt =
HOLogic.Trueprop_conv (HOLogic.eq_conv Conv.all_conv
(Collect_conv (all_exists_conv conv o #2) ctxt))
(* term abstraction of list comprehension patterns *)
datatype termlets = If | Case of typ * int
local
val set_Nil_I = @{lemma "set [] = {x. False}" by (simp add: empty_def [symmetric])}
val set_singleton = @{lemma "set [a] = {x. x = a}" by simp}
val inst_Collect_mem_eq = @{lemma "set A = {x. x \<in> set A}" by simp}
val del_refl_eq = @{lemma "(t = t \<and> P) \<equiv> P" by simp}
fun mk_set T = Const (\<^const_name>\<open>set\<close>, HOLogic.listT T --> HOLogic.mk_setT T)
fun dest_set (Const (\<^const_name>\<open>set\<close>, _) $ xs) = xs
fun dest_singleton_list (Const (\<^const_name>\<open>Cons\<close>, _) $ t $ (Const (\<^const_name>\<open>Nil\<close>, _))) = t
| dest_singleton_list t = raise TERM ("dest_singleton_list", [t])
(*We check that one case returns a singleton list and all other cases
return [], and return the index of the one singleton list case.*)
fun possible_index_of_singleton_case cases =
let
fun check (i, case_t) s =
(case strip_abs_body case_t of
(Const (\<^const_name>\<open>Nil\<close>, _)) => s
| _ => (case s of SOME NONE => SOME (SOME i) | _ => NONE))
in
fold_index check cases (SOME NONE) |> the_default NONE
end
(*returns condition continuing term option*)
fun dest_if (Const (\<^const_name>\<open>If\<close>, _) $ cond $ then_t $ Const (\<^const_name>\<open>Nil\<close>, _)) =
SOME (cond, then_t)
| dest_if _ = NONE
(*returns (case_expr type index chosen_case constr_name) option*)
fun dest_case ctxt case_term =
let
val (case_const, args) = strip_comb case_term
in
(case try dest_Const case_const of
SOME (c, T) =>
(case Ctr_Sugar.ctr_sugar_of_case ctxt c of
SOME {ctrs, ...} =>
(case possible_index_of_singleton_case (fst (split_last args)) of
SOME i =>
let
val constr_names = map (fst o dest_Const) ctrs
val (Ts, _) = strip_type T
val T' = List.last Ts
in SOME (List.last args, T', i, nth args i, nth constr_names i) end
| NONE => NONE)
| NONE => NONE)
| NONE => NONE)
end
fun tac ctxt [] =
resolve_tac ctxt [set_singleton] 1 ORELSE
resolve_tac ctxt [inst_Collect_mem_eq] 1
| tac ctxt (If :: cont) =
Splitter.split_tac ctxt @{thms if_split} 1
THEN resolve_tac ctxt @{thms conjI} 1
THEN resolve_tac ctxt @{thms impI} 1
THEN Subgoal.FOCUS (fn {prems, context = ctxt', ...} =>
CONVERSION (right_hand_set_comprehension_conv (K
(HOLogic.conj_conv (Conv.rewr_conv (List.last prems RS @{thm Eq_TrueI})) Conv.all_conv
then_conv
rewr_conv' @{lemma "(True \<and> P) = P" by simp})) ctxt') 1) ctxt 1
THEN tac ctxt cont
THEN resolve_tac ctxt @{thms impI} 1
THEN Subgoal.FOCUS (fn {prems, context = ctxt', ...} =>
CONVERSION (right_hand_set_comprehension_conv (K
(HOLogic.conj_conv (Conv.rewr_conv (List.last prems RS @{thm Eq_FalseI})) Conv.all_conv
then_conv rewr_conv' @{lemma "(False \<and> P) = False" by simp})) ctxt') 1) ctxt 1
THEN resolve_tac ctxt [set_Nil_I] 1
| tac ctxt (Case (T, i) :: cont) =
let
val SOME {injects, distincts, case_thms, split, ...} =
Ctr_Sugar.ctr_sugar_of ctxt (fst (dest_Type T))
in
(* do case distinction *)
Splitter.split_tac ctxt [split] 1
THEN EVERY (map_index (fn (i', _) =>
(if i' < length case_thms - 1 then resolve_tac ctxt @{thms conjI} 1 else all_tac)
THEN REPEAT_DETERM (resolve_tac ctxt @{thms allI} 1)
THEN resolve_tac ctxt @{thms impI} 1
THEN (if i' = i then
(* continue recursively *)
Subgoal.FOCUS (fn {prems, context = ctxt', ...} =>
CONVERSION (Thm.eta_conversion then_conv right_hand_set_comprehension_conv (K
((HOLogic.conj_conv
(HOLogic.eq_conv Conv.all_conv (rewr_conv' (List.last prems)) then_conv
(Conv.try_conv (Conv.rewrs_conv (map mk_meta_eq injects))))
Conv.all_conv)
then_conv (Conv.try_conv (Conv.rewr_conv del_refl_eq))
then_conv conjunct_assoc_conv)) ctxt'
then_conv
(HOLogic.Trueprop_conv
(HOLogic.eq_conv Conv.all_conv (Collect_conv (fn (_, ctxt'') =>
Conv.repeat_conv
(all_but_last_exists_conv
(K (rewr_conv'
@{lemma "(\<exists>x. x = t \<and> P x) = P t" by simp})) ctxt'')) ctxt')))) 1) ctxt 1
THEN tac ctxt cont
else
Subgoal.FOCUS (fn {prems, context = ctxt', ...} =>
CONVERSION
(right_hand_set_comprehension_conv (K
(HOLogic.conj_conv
((HOLogic.eq_conv Conv.all_conv
(rewr_conv' (List.last prems))) then_conv
(Conv.rewrs_conv (map (fn th => th RS @{thm Eq_FalseI}) distincts)))
Conv.all_conv then_conv
(rewr_conv' @{lemma "(False \<and> P) = False" by simp}))) ctxt' then_conv
HOLogic.Trueprop_conv
(HOLogic.eq_conv Conv.all_conv
(Collect_conv (fn (_, ctxt'') =>
Conv.repeat_conv
(Conv.bottom_conv
(K (rewr_conv' @{lemma "(\<exists>x. P) = P" by simp})) ctxt'')) ctxt'))) 1) ctxt 1
THEN resolve_tac ctxt [set_Nil_I] 1)) case_thms)
end
in
fun simproc ctxt redex =
let
fun make_inner_eqs bound_vs Tis eqs t =
(case dest_case ctxt t of
SOME (x, T, i, cont, constr_name) =>
let
val (vs, body) = strip_abs (Envir.eta_long (map snd bound_vs) cont)
val x' = incr_boundvars (length vs) x
val eqs' = map (incr_boundvars (length vs)) eqs
val constr_t =
list_comb
(Const (constr_name, map snd vs ---> T), map Bound (((length vs) - 1) downto 0))
val constr_eq = Const (\<^const_name>\<open>HOL.eq\<close>, T --> T --> \<^typ>\<open>bool\<close>) $ constr_t $ x'
in
make_inner_eqs (rev vs @ bound_vs) (Case (T, i) :: Tis) (constr_eq :: eqs') body
end
| NONE =>
(case dest_if t of
SOME (condition, cont) => make_inner_eqs bound_vs (If :: Tis) (condition :: eqs) cont
| NONE =>
if null eqs then NONE (*no rewriting, nothing to be done*)
else
let
val Type (\<^type_name>\<open>list\<close>, [rT]) = fastype_of1 (map snd bound_vs, t)
val pat_eq =
(case try dest_singleton_list t of
SOME t' =>
Const (\<^const_name>\<open>HOL.eq\<close>, rT --> rT --> \<^typ>\<open>bool\<close>) $
Bound (length bound_vs) $ t'
| NONE =>
Const (\<^const_name>\<open>Set.member\<close>, rT --> HOLogic.mk_setT rT --> \<^typ>\<open>bool\<close>) $
Bound (length bound_vs) $ (mk_set rT $ t))
val reverse_bounds = curry subst_bounds
((map Bound ((length bound_vs - 1) downto 0)) @ [Bound (length bound_vs)])
val eqs' = map reverse_bounds eqs
val pat_eq' = reverse_bounds pat_eq
val inner_t =
fold (fn (_, T) => fn t => HOLogic.exists_const T $ absdummy T t)
(rev bound_vs) (fold (curry HOLogic.mk_conj) eqs' pat_eq')
val lhs = Thm.term_of redex
val rhs = HOLogic.mk_Collect ("x", rT, inner_t)
val rewrite_rule_t = HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs, rhs))
in
SOME
((Goal.prove ctxt [] [] rewrite_rule_t
(fn {context = ctxt', ...} => tac ctxt' (rev Tis))) RS @{thm eq_reflection})
end))
in
make_inner_eqs [] [] [] (dest_set (Thm.term_of redex))
end
end
end
\<close>
simproc_setup list_to_set_comprehension ("set xs") =
\<open>K List_to_Set_Comprehension.simproc\<close>
code_datatype set coset
hide_const (open) coset
subsubsection \<open>\<^const>\<open>Nil\<close> and \<^const>\<open>Cons\<close>\<close>
lemma not_Cons_self [simp]:
"xs \<noteq> x # xs"
by (induct xs) auto
lemma not_Cons_self2 [simp]: "x # xs \<noteq> xs"
by (rule not_Cons_self [symmetric])
lemma neq_Nil_conv: "(xs \<noteq> []) = (\<exists>y ys. xs = y # ys)"
by (induct xs) auto
lemma tl_Nil: "tl xs = [] \<longleftrightarrow> xs = [] \<or> (\<exists>x. xs = [x])"
by (cases xs) auto
lemmas Nil_tl = tl_Nil[THEN eq_iff_swap]
lemma length_induct:
"(\<And>xs. \<forall>ys. length ys < length xs \<longrightarrow> P ys \<Longrightarrow> P xs) \<Longrightarrow> P xs"
by (fact measure_induct)
lemma induct_list012:
"\<lbrakk>P []; \<And>x. P [x]; \<And>x y zs. \<lbrakk> P zs; P (y # zs) \<rbrakk> \<Longrightarrow> P (x # y # zs)\<rbrakk> \<Longrightarrow> P xs"
by induction_schema (pat_completeness, lexicographic_order)
lemma list_nonempty_induct [consumes 1, case_names single cons]:
"\<lbrakk> xs \<noteq> []; \<And>x. P [x]; \<And>x xs. xs \<noteq> [] \<Longrightarrow> P xs \<Longrightarrow> P (x # xs)\<rbrakk> \<Longrightarrow> P xs"
by(induction xs rule: induct_list012) auto
lemma inj_split_Cons: "inj_on (\<lambda>(xs, n). n#xs) X"
by (auto intro!: inj_onI)
lemma inj_on_Cons1 [simp]: "inj_on ((#) x) A"
by(simp add: inj_on_def)
subsubsection \<open>\<^const>\<open>length\<close>\<close>
text \<open>
Needs to come before \<open>@\<close> because of theorem \<open>append_eq_append_conv\<close>.
\<close>
lemma length_append [simp]: "length (xs @ ys) = length xs + length ys"
by (induct xs) auto
lemma length_map [simp]: "length (map f xs) = length xs"
by (induct xs) auto
lemma length_rev [simp]: "length (rev xs) = length xs"
by (induct xs) auto
lemma length_tl [simp]: "length (tl xs) = length xs - 1"
by (cases xs) auto
lemma length_0_conv [iff]: "(length xs = 0) = (xs = [])"
by (induct xs) auto
lemma length_greater_0_conv [iff]: "(0 < length xs) = (xs \<noteq> [])"
by (induct xs) auto
lemma length_pos_if_in_set: "x \<in> set xs \<Longrightarrow> length xs > 0"
by auto
lemma length_Suc_conv: "(length xs = Suc n) = (\<exists>y ys. xs = y # ys \<and> length ys = n)"
by (induct xs) auto
lemmas Suc_length_conv = length_Suc_conv[THEN eq_iff_swap]
lemma Suc_le_length_iff:
"(Suc n \<le> length xs) = (\<exists>x ys. xs = x # ys \<and> n \<le> length ys)"
by (metis Suc_le_D[of n] Suc_le_mono[of n] Suc_length_conv[of _ xs])
lemma impossible_Cons: "length xs \<le> length ys \<Longrightarrow> xs = x # ys = False"
by (induct xs) auto
lemma list_induct2 [consumes 1, case_names Nil Cons]:
"length xs = length ys \<Longrightarrow> P [] [] \<Longrightarrow>
(\<And>x xs y ys. length xs = length ys \<Longrightarrow> P xs ys \<Longrightarrow> P (x#xs) (y#ys))
\<Longrightarrow> P xs ys"
proof (induct xs arbitrary: ys)
case (Cons x xs ys) then show ?case by (cases ys) simp_all
qed simp
lemma list_induct3 [consumes 2, case_names Nil Cons]:
"length xs = length ys \<Longrightarrow> length ys = length zs \<Longrightarrow> P [] [] [] \<Longrightarrow>
(\<And>x xs y ys z zs. length xs = length ys \<Longrightarrow> length ys = length zs \<Longrightarrow> P xs ys zs \<Longrightarrow> P (x#xs) (y#ys) (z#zs))
\<Longrightarrow> P xs ys zs"
proof (induct xs arbitrary: ys zs)
case Nil then show ?case by simp
next
case (Cons x xs ys zs) then show ?case by (cases ys, simp_all)
(cases zs, simp_all)
qed
lemma list_induct4 [consumes 3, case_names Nil Cons]:
"length xs = length ys \<Longrightarrow> length ys = length zs \<Longrightarrow> length zs = length ws \<Longrightarrow>
P [] [] [] [] \<Longrightarrow> (\<And>x xs y ys z zs w ws. length xs = length ys \<Longrightarrow>
length ys = length zs \<Longrightarrow> length zs = length ws \<Longrightarrow> P xs ys zs ws \<Longrightarrow>
P (x#xs) (y#ys) (z#zs) (w#ws)) \<Longrightarrow> P xs ys zs ws"
proof (induct xs arbitrary: ys zs ws)
case Nil then show ?case by simp
next
case (Cons x xs ys zs ws) then show ?case by ((cases ys, simp_all), (cases zs,simp_all)) (cases ws, simp_all)
qed
lemma list_induct2':
"\<lbrakk> P [] [];
\<And>x xs. P (x#xs) [];
\<And>y ys. P [] (y#ys);
\<And>x xs y ys. P xs ys \<Longrightarrow> P (x#xs) (y#ys) \<rbrakk>
\<Longrightarrow> P xs ys"
by (induct xs arbitrary: ys) (case_tac x, auto)+
lemma list_all2_iff:
"list_all2 P xs ys \<longleftrightarrow> length xs = length ys \<and> (\<forall>(x, y) \<in> set (zip xs ys). P x y)"
by (induct xs ys rule: list_induct2') auto
lemma neq_if_length_neq: "length xs \<noteq> length ys \<Longrightarrow> (xs = ys) == False"
by (rule Eq_FalseI) auto
subsubsection \<open>\<open>@\<close> -- append\<close>
global_interpretation append: monoid append Nil
proof
fix xs ys zs :: "'a list"
show "(xs @ ys) @ zs = xs @ (ys @ zs)"
by (induct xs) simp_all
show "xs @ [] = xs"
by (induct xs) simp_all
qed simp
lemma append_assoc [simp]: "(xs @ ys) @ zs = xs @ (ys @ zs)"
by (fact append.assoc)
lemma append_Nil2: "xs @ [] = xs"
by (fact append.right_neutral)
lemma append_is_Nil_conv [iff]: "(xs @ ys = []) = (xs = [] \<and> ys = [])"
by (induct xs) auto
lemmas Nil_is_append_conv [iff] = append_is_Nil_conv[THEN eq_iff_swap]
lemma append_self_conv [iff]: "(xs @ ys = xs) = (ys = [])"
by (induct xs) auto
lemmas self_append_conv [iff] = append_self_conv[THEN eq_iff_swap]
lemma append_eq_append_conv [simp]:
"length xs = length ys \<or> length us = length vs
\<Longrightarrow> (xs@us = ys@vs) = (xs=ys \<and> us=vs)"
by (induct xs arbitrary: ys; case_tac ys; force)
lemma append_eq_append_conv2: "(xs @ ys = zs @ ts) =
(\<exists>us. xs = zs @ us \<and> us @ ys = ts \<or> xs @ us = zs \<and> ys = us @ ts)"
proof (induct xs arbitrary: ys zs ts)
case (Cons x xs)
then show ?case
by (cases zs) auto
qed fastforce
lemma same_append_eq [iff, induct_simp]: "(xs @ ys = xs @ zs) = (ys = zs)"
by simp
lemma append1_eq_conv [iff]: "(xs @ [x] = ys @ [y]) = (xs = ys \<and> x = y)"
by simp
lemma append_same_eq [iff, induct_simp]: "(ys @ xs = zs @ xs) = (ys = zs)"
by simp
lemma append_self_conv2 [iff]: "(xs @ ys = ys) = (xs = [])"
using append_same_eq [of _ _ "[]"] by auto
lemmas self_append_conv2 [iff] = append_self_conv2[THEN eq_iff_swap]
lemma hd_Cons_tl: "xs \<noteq> [] \<Longrightarrow> hd xs # tl xs = xs"
by (fact list.collapse)
lemma hd_append: "hd (xs @ ys) = (if xs = [] then hd ys else hd xs)"
by (induct xs) auto
lemma hd_append2 [simp]: "xs \<noteq> [] \<Longrightarrow> hd (xs @ ys) = hd xs"
by (simp add: hd_append split: list.split)
lemma tl_append: "tl (xs @ ys) = (case xs of [] \<Rightarrow> tl ys | z#zs \<Rightarrow> zs @ ys)"
by (simp split: list.split)
lemma tl_append2 [simp]: "xs \<noteq> [] \<Longrightarrow> tl (xs @ ys) = tl xs @ ys"
by (simp add: tl_append split: list.split)
lemma tl_append_if: "tl (xs @ ys) = (if xs = [] then tl ys else tl xs @ ys)"
by (simp)
lemma Cons_eq_append_conv: "x#xs = ys@zs =
(ys = [] \<and> x#xs = zs \<or> (\<exists>ys'. x#ys' = ys \<and> xs = ys'@zs))"
by(cases ys) auto
lemma append_eq_Cons_conv: "(ys@zs = x#xs) =
(ys = [] \<and> zs = x#xs \<or> (\<exists>ys'. ys = x#ys' \<and> ys'@zs = xs))"
by(cases ys) auto
lemma longest_common_prefix:
"\<exists>ps xs' ys'. xs = ps @ xs' \<and> ys = ps @ ys'
\<and> (xs' = [] \<or> ys' = [] \<or> hd xs' \<noteq> hd ys')"
by (induct xs ys rule: list_induct2')
(blast, blast, blast,
metis (no_types, opaque_lifting) append_Cons append_Nil list.sel(1))
text \<open>Trivial rules for solving \<open>@\<close>-equations automatically.\<close>
lemma eq_Nil_appendI: "xs = ys \<Longrightarrow> xs = [] @ ys"
by simp
lemma Cons_eq_appendI: "\<lbrakk>x # xs1 = ys; xs = xs1 @ zs\<rbrakk> \<Longrightarrow> x # xs = ys @ zs"
by auto
lemma append_eq_appendI: "\<lbrakk>xs @ xs1 = zs; ys = xs1 @ us\<rbrakk> \<Longrightarrow> xs @ ys = zs @ us"
by auto
text \<open>
Simplification procedure for all list equalities.
Currently only tries to rearrange \<open>@\<close> to see if
- both lists end in a singleton list,
- or both lists end in the same list.
\<close>
simproc_setup list_eq ("(xs::'a list) = ys") = \<open>
let
fun last (cons as Const (\<^const_name>\<open>Cons\<close>, _) $ _ $ xs) =
(case xs of Const (\<^const_name>\<open>Nil\<close>, _) => cons | _ => last xs)
| last (Const(\<^const_name>\<open>append\<close>,_) $ _ $ ys) = last ys
| last t = t;
fun list1 (Const(\<^const_name>\<open>Cons\<close>,_) $ _ $ Const(\<^const_name>\<open>Nil\<close>,_)) = true
| list1 _ = false;
fun butlast ((cons as Const(\<^const_name>\<open>Cons\<close>,_) $ x) $ xs) =
(case xs of Const (\<^const_name>\<open>Nil\<close>, _) => xs | _ => cons $ butlast xs)
| butlast ((app as Const (\<^const_name>\<open>append\<close>, _) $ xs) $ ys) = app $ butlast ys
| butlast xs = Const(\<^const_name>\<open>Nil\<close>, fastype_of xs);
val rearr_ss =
simpset_of (put_simpset HOL_basic_ss \<^context>
addsimps [@{thm append_assoc}, @{thm append_Nil}, @{thm append_Cons}]);
fun list_eq ctxt (F as (eq as Const(_,eqT)) $ lhs $ rhs) =
let
val lastl = last lhs and lastr = last rhs;
fun rearr conv =
let
val lhs1 = butlast lhs and rhs1 = butlast rhs;
val Type(_,listT::_) = eqT
val appT = [listT,listT] ---> listT
val app = Const(\<^const_name>\<open>append\<close>,appT)
val F2 = eq $ (app$lhs1$lastl) $ (app$rhs1$lastr)
val eq = HOLogic.mk_Trueprop (HOLogic.mk_eq (F,F2));
val thm = Goal.prove ctxt [] [] eq
(K (simp_tac (put_simpset rearr_ss ctxt) 1));
in SOME ((conv RS (thm RS trans)) RS eq_reflection) end;
in
if list1 lastl andalso list1 lastr then rearr @{thm append1_eq_conv}
else if lastl aconv lastr then rearr @{thm append_same_eq}
else NONE
end;
- in fn _ => fn ctxt => fn ct => list_eq ctxt (Thm.term_of ct) end
+ in K (fn ctxt => fn ct => list_eq ctxt (Thm.term_of ct)) end
\<close>
subsubsection \<open>\<^const>\<open>map\<close>\<close>
lemma hd_map: "xs \<noteq> [] \<Longrightarrow> hd (map f xs) = f (hd xs)"
by (cases xs) simp_all
lemma map_tl: "map f (tl xs) = tl (map f xs)"
by (cases xs) simp_all
lemma map_ext: "(\<And>x. x \<in> set xs \<longrightarrow> f x = g x) \<Longrightarrow> map f xs = map g xs"
by (induct xs) simp_all
lemma map_ident [simp]: "map (\<lambda>x. x) = (\<lambda>xs. xs)"
by (rule ext, induct_tac xs) auto
lemma map_append [simp]: "map f (xs @ ys) = map f xs @ map f ys"
by (induct xs) auto
lemma map_map [simp]: "map f (map g xs) = map (f \<circ> g) xs"
by (induct xs) auto
lemma map_comp_map[simp]: "((map f) \<circ> (map g)) = map(f \<circ> g)"
by (rule ext) simp
lemma rev_map: "rev (map f xs) = map f (rev xs)"
by (induct xs) auto
lemma map_eq_conv[simp]: "(map f xs = map g xs) = (\<forall>x \<in> set xs. f x = g x)"
by (induct xs) auto
lemma map_cong [fundef_cong]:
"xs = ys \<Longrightarrow> (\<And>x. x \<in> set ys \<Longrightarrow> f x = g x) \<Longrightarrow> map f xs = map g ys"
by simp
lemma map_is_Nil_conv [iff]: "(map f xs = []) = (xs = [])"
by (rule list.map_disc_iff)
lemmas Nil_is_map_conv [iff] = map_is_Nil_conv[THEN eq_iff_swap]
lemma map_eq_Cons_conv:
"(map f xs = y#ys) = (\<exists>z zs. xs = z#zs \<and> f z = y \<and> map f zs = ys)"
by (cases xs) auto
lemma Cons_eq_map_conv:
"(x#xs = map f ys) = (\<exists>z zs. ys = z#zs \<and> x = f z \<and> xs = map f zs)"
by (cases ys) auto
lemmas map_eq_Cons_D = map_eq_Cons_conv [THEN iffD1]
lemmas Cons_eq_map_D = Cons_eq_map_conv [THEN iffD1]
declare map_eq_Cons_D [dest!] Cons_eq_map_D [dest!]
lemma ex_map_conv:
"(\<exists>xs. ys = map f xs) = (\<forall>y \<in> set ys. \<exists>x. y = f x)"
by(induct ys, auto simp add: Cons_eq_map_conv)
lemma map_eq_imp_length_eq:
assumes "map f xs = map g ys"
shows "length xs = length ys"
using assms
proof (induct ys arbitrary: xs)
case Nil then show ?case by simp
next
case (Cons y ys) then obtain z zs where xs: "xs = z # zs" by auto
from Cons xs have "map f zs = map g ys" by simp
with Cons have "length zs = length ys" by blast
with xs show ?case by simp
qed
lemma map_inj_on:
assumes map: "map f xs = map f ys" and inj: "inj_on f (set xs Un set ys)"
shows "xs = ys"
using map_eq_imp_length_eq [OF map] assms
proof (induct rule: list_induct2)
case (Cons x xs y ys)
then show ?case
by (auto intro: sym)
qed auto
lemma inj_on_map_eq_map:
"inj_on f (set xs Un set ys) \<Longrightarrow> (map f xs = map f ys) = (xs = ys)"
by(blast dest:map_inj_on)
lemma map_injective:
"map f xs = map f ys \<Longrightarrow> inj f \<Longrightarrow> xs = ys"
by (induct ys arbitrary: xs) (auto dest!:injD)
lemma inj_map_eq_map[simp]: "inj f \<Longrightarrow> (map f xs = map f ys) = (xs = ys)"
by(blast dest:map_injective)
lemma inj_mapI: "inj f \<Longrightarrow> inj (map f)"
by (iprover dest: map_injective injD intro: inj_onI)
lemma inj_mapD: "inj (map f) \<Longrightarrow> inj f"
by (metis (no_types, opaque_lifting) injI list.inject list.simps(9) the_inv_f_f)
lemma inj_map[iff]: "inj (map f) = inj f"
by (blast dest: inj_mapD intro: inj_mapI)
lemma inj_on_mapI: "inj_on f (\<Union>(set ` A)) \<Longrightarrow> inj_on (map f) A"
by (blast intro:inj_onI dest:inj_onD map_inj_on)
lemma map_idI: "(\<And>x. x \<in> set xs \<Longrightarrow> f x = x) \<Longrightarrow> map f xs = xs"
by (induct xs, auto)
lemma map_fun_upd [simp]: "y \<notin> set xs \<Longrightarrow> map (f(y:=v)) xs = map f xs"
by (induct xs) auto
lemma map_fst_zip[simp]:
"length xs = length ys \<Longrightarrow> map fst (zip xs ys) = xs"
by (induct rule:list_induct2, simp_all)
lemma map_snd_zip[simp]:
"length xs = length ys \<Longrightarrow> map snd (zip xs ys) = ys"
by (induct rule:list_induct2, simp_all)
lemma map_fst_zip_take:
"map fst (zip xs ys) = take (min (length xs) (length ys)) xs"
by (induct xs ys rule: list_induct2') simp_all
lemma map_snd_zip_take:
"map snd (zip xs ys) = take (min (length xs) (length ys)) ys"
by (induct xs ys rule: list_induct2') simp_all
lemma map2_map_map: "map2 h (map f xs) (map g xs) = map (\<lambda>x. h (f x) (g x)) xs"
by (induction xs) (auto)
functor map: map
by (simp_all add: id_def)
declare map.id [simp]
subsubsection \<open>\<^const>\<open>rev\<close>\<close>
lemma rev_append [simp]: "rev (xs @ ys) = rev ys @ rev xs"
by (induct xs) auto
lemma rev_rev_ident [simp]: "rev (rev xs) = xs"
by (induct xs) auto
lemma rev_swap: "(rev xs = ys) = (xs = rev ys)"
by auto
lemma rev_is_Nil_conv [iff]: "(rev xs = []) = (xs = [])"
by (induct xs) auto
lemmas Nil_is_rev_conv [iff] = rev_is_Nil_conv[THEN eq_iff_swap]
lemma rev_singleton_conv [simp]: "(rev xs = [x]) = (xs = [x])"
by (cases xs) auto
lemma singleton_rev_conv [simp]: "([x] = rev xs) = ([x] = xs)"
by (cases xs) auto
lemma rev_is_rev_conv [iff]: "(rev xs = rev ys) = (xs = ys)"
proof (induct xs arbitrary: ys)
case Nil
then show ?case by force
next
case Cons
then show ?case by (cases ys) auto
qed
lemma inj_on_rev[iff]: "inj_on rev A"
by(simp add:inj_on_def)
lemma rev_induct [case_names Nil snoc]:
assumes "P []" and "\<And>x xs. P xs \<Longrightarrow> P (xs @ [x])"
shows "P xs"
proof -
have "P (rev (rev xs))"
by (rule_tac list = "rev xs" in list.induct, simp_all add: assms)
then show ?thesis by simp
qed
lemma rev_exhaust [case_names Nil snoc]:
"(xs = [] \<Longrightarrow> P) \<Longrightarrow>(\<And>ys y. xs = ys @ [y] \<Longrightarrow> P) \<Longrightarrow> P"
by (induct xs rule: rev_induct) auto
lemmas rev_cases = rev_exhaust
lemma rev_nonempty_induct [consumes 1, case_names single snoc]:
assumes "xs \<noteq> []"
and single: "\<And>x. P [x]"
and snoc': "\<And>x xs. xs \<noteq> [] \<Longrightarrow> P xs \<Longrightarrow> P (xs@[x])"
shows "P xs"
using \<open>xs \<noteq> []\<close> proof (induct xs rule: rev_induct)
case (snoc x xs) then show ?case
proof (cases xs)
case Nil thus ?thesis by (simp add: single)
next
case Cons with snoc show ?thesis by (fastforce intro!: snoc')
qed
qed simp
lemma rev_eq_Cons_iff[iff]: "(rev xs = y#ys) = (xs = rev ys @ [y])"
by(rule rev_cases[of xs]) auto
lemma length_Suc_conv_rev: "(length xs = Suc n) = (\<exists>y ys. xs = ys @ [y] \<and> length ys = n)"
by (induct xs rule: rev_induct) auto
subsubsection \<open>\<^const>\<open>set\<close>\<close>
declare list.set[code_post] \<comment> \<open>pretty output\<close>
lemma finite_set [iff]: "finite (set xs)"
by (induct xs) auto
lemma set_append [simp]: "set (xs @ ys) = (set xs \<union> set ys)"
by (induct xs) auto
lemma hd_in_set[simp]: "xs \<noteq> [] \<Longrightarrow> hd xs \<in> set xs"
by(cases xs) auto
lemma set_subset_Cons: "set xs \<subseteq> set (x # xs)"
by auto
lemma set_ConsD: "y \<in> set (x # xs) \<Longrightarrow> y=x \<or> y \<in> set xs"
by auto
lemma set_empty [iff]: "(set xs = {}) = (xs = [])"
by (induct xs) auto
lemmas set_empty2[iff] = set_empty[THEN eq_iff_swap]
lemma set_rev [simp]: "set (rev xs) = set xs"
by (induct xs) auto
lemma set_map [simp]: "set (map f xs) = f`(set xs)"
by (induct xs) auto
lemma set_filter [simp]: "set (filter P xs) = {x. x \<in> set xs \<and> P x}"
by (induct xs) auto
lemma set_upt [simp]: "set[i..<j] = {i..<j}"
by (induct j) auto
lemma split_list: "x \<in> set xs \<Longrightarrow> \<exists>ys zs. xs = ys @ x # zs"
proof (induct xs)
case Nil thus ?case by simp
next
case Cons thus ?case by (auto intro: Cons_eq_appendI)
qed
lemma in_set_conv_decomp: "x \<in> set xs \<longleftrightarrow> (\<exists>ys zs. xs = ys @ x # zs)"
by (auto elim: split_list)
lemma split_list_first: "x \<in> set xs \<Longrightarrow> \<exists>ys zs. xs = ys @ x # zs \<and> x \<notin> set ys"
proof (induct xs)
case Nil thus ?case by simp
next
case (Cons a xs)
show ?case
proof cases
assume "x = a" thus ?case using Cons by fastforce
next
assume "x \<noteq> a" thus ?case using Cons by(fastforce intro!: Cons_eq_appendI)
qed
qed
lemma in_set_conv_decomp_first:
"(x \<in> set xs) = (\<exists>ys zs. xs = ys @ x # zs \<and> x \<notin> set ys)"
by (auto dest!: split_list_first)
lemma split_list_last: "x \<in> set xs \<Longrightarrow> \<exists>ys zs. xs = ys @ x # zs \<and> x \<notin> set zs"
proof (induct xs rule: rev_induct)
case Nil thus ?case by simp
next
case (snoc a xs)
show ?case
proof cases
assume "x = a" thus ?case using snoc by (auto intro!: exI)
next
assume "x \<noteq> a" thus ?case using snoc by fastforce
qed
qed
lemma in_set_conv_decomp_last:
"(x \<in> set xs) = (\<exists>ys zs. xs = ys @ x # zs \<and> x \<notin> set zs)"
by (auto dest!: split_list_last)
lemma split_list_prop: "\<exists>x \<in> set xs. P x \<Longrightarrow> \<exists>ys x zs. xs = ys @ x # zs \<and> P x"
proof (induct xs)
case Nil thus ?case by simp
next
case Cons thus ?case
by(simp add:Bex_def)(metis append_Cons append.simps(1))
qed
lemma split_list_propE:
assumes "\<exists>x \<in> set xs. P x"
obtains ys x zs where "xs = ys @ x # zs" and "P x"
using split_list_prop [OF assms] by blast
lemma split_list_first_prop:
"\<exists>x \<in> set xs. P x \<Longrightarrow>
\<exists>ys x zs. xs = ys@x#zs \<and> P x \<and> (\<forall>y \<in> set ys. \<not> P y)"
proof (induct xs)
case Nil thus ?case by simp
next
case (Cons x xs)
show ?case
proof cases
assume "P x"
hence "x # xs = [] @ x # xs \<and> P x \<and> (\<forall>y\<in>set []. \<not> P y)" by simp
thus ?thesis by fast
next
assume "\<not> P x"
hence "\<exists>x\<in>set xs. P x" using Cons(2) by simp
thus ?thesis using \<open>\<not> P x\<close> Cons(1) by (metis append_Cons set_ConsD)
qed
qed
lemma split_list_first_propE:
assumes "\<exists>x \<in> set xs. P x"
obtains ys x zs where "xs = ys @ x # zs" and "P x" and "\<forall>y \<in> set ys. \<not> P y"
using split_list_first_prop [OF assms] by blast
lemma split_list_first_prop_iff:
"(\<exists>x \<in> set xs. P x) \<longleftrightarrow>
(\<exists>ys x zs. xs = ys@x#zs \<and> P x \<and> (\<forall>y \<in> set ys. \<not> P y))"
by (rule, erule split_list_first_prop) auto
lemma split_list_last_prop:
"\<exists>x \<in> set xs. P x \<Longrightarrow>
\<exists>ys x zs. xs = ys@x#zs \<and> P x \<and> (\<forall>z \<in> set zs. \<not> P z)"
proof(induct xs rule:rev_induct)
case Nil thus ?case by simp
next
case (snoc x xs)
show ?case
proof cases
assume "P x" thus ?thesis by (auto intro!: exI)
next
assume "\<not> P x"
hence "\<exists>x\<in>set xs. P x" using snoc(2) by simp
thus ?thesis using \<open>\<not> P x\<close> snoc(1) by fastforce
qed
qed
lemma split_list_last_propE:
assumes "\<exists>x \<in> set xs. P x"
obtains ys x zs where "xs = ys @ x # zs" and "P x" and "\<forall>z \<in> set zs. \<not> P z"
using split_list_last_prop [OF assms] by blast
lemma split_list_last_prop_iff:
"(\<exists>x \<in> set xs. P x) \<longleftrightarrow>
(\<exists>ys x zs. xs = ys@x#zs \<and> P x \<and> (\<forall>z \<in> set zs. \<not> P z))"
by rule (erule split_list_last_prop, auto)
lemma finite_list: "finite A \<Longrightarrow> \<exists>xs. set xs = A"
by (erule finite_induct) (auto simp add: list.set(2)[symmetric] simp del: list.set(2))
lemma card_length: "card (set xs) \<le> length xs"
by (induct xs) (auto simp add: card_insert_if)
lemma set_minus_filter_out:
"set xs - {y} = set (filter (\<lambda>x. \<not> (x = y)) xs)"
by (induct xs) auto
lemma append_Cons_eq_iff:
"\<lbrakk> x \<notin> set xs; x \<notin> set ys \<rbrakk> \<Longrightarrow>
xs @ x # ys = xs' @ x # ys' \<longleftrightarrow> (xs = xs' \<and> ys = ys')"
by(auto simp: append_eq_Cons_conv Cons_eq_append_conv append_eq_append_conv2)
subsubsection \<open>\<^const>\<open>concat\<close>\<close>
lemma concat_append [simp]: "concat (xs @ ys) = concat xs @ concat ys"
by (induct xs) auto
lemma concat_eq_Nil_conv [simp]: "(concat xss = []) = (\<forall>xs \<in> set xss. xs = [])"
by (induct xss) auto
lemmas Nil_eq_concat_conv [simp] = concat_eq_Nil_conv[THEN eq_iff_swap]
lemma set_concat [simp]: "set (concat xs) = (\<Union>x\<in>set xs. set x)"
by (induct xs) auto
lemma concat_map_singleton[simp]: "concat(map (%x. [f x]) xs) = map f xs"
by (induct xs) auto
lemma map_concat: "map f (concat xs) = concat (map (map f) xs)"
by (induct xs) auto
lemma rev_concat: "rev (concat xs) = concat (map rev (rev xs))"
by (induct xs) auto
lemma length_concat_rev[simp]: "length (concat (rev xs)) = length (concat xs)"
by (induction xs) auto
lemma concat_eq_concat_iff: "\<forall>(x, y) \<in> set (zip xs ys). length x = length y \<Longrightarrow> length xs = length ys \<Longrightarrow> (concat xs = concat ys) = (xs = ys)"
proof (induct xs arbitrary: ys)
case (Cons x xs ys)
thus ?case by (cases ys) auto
qed (auto)
lemma concat_injective: "concat xs = concat ys \<Longrightarrow> length xs = length ys \<Longrightarrow> \<forall>(x, y) \<in> set (zip xs ys). length x = length y \<Longrightarrow> xs = ys"
by (simp add: concat_eq_concat_iff)
lemma concat_eq_appendD:
assumes "concat xss = ys @ zs" "xss \<noteq> []"
shows "\<exists>xss1 xs xs' xss2. xss = xss1 @ (xs @ xs') # xss2 \<and> ys = concat xss1 @ xs \<and> zs = xs' @ concat xss2"
using assms
proof(induction xss arbitrary: ys)
case (Cons xs xss)
from Cons.prems consider
us where "xs @ us = ys" "concat xss = us @ zs" |
us where "xs = ys @ us" "us @ concat xss = zs"
by(auto simp add: append_eq_append_conv2)
then show ?case
proof cases
case 1
then show ?thesis using Cons.IH[OF 1(2)]
by(cases xss)(auto intro: exI[where x="[]"], metis append.assoc append_Cons concat.simps(2))
qed(auto intro: exI[where x="[]"])
qed simp
lemma concat_eq_append_conv:
"concat xss = ys @ zs \<longleftrightarrow>
(if xss = [] then ys = [] \<and> zs = []
else \<exists>xss1 xs xs' xss2. xss = xss1 @ (xs @ xs') # xss2 \<and> ys = concat xss1 @ xs \<and> zs = xs' @ concat xss2)"
by(auto dest: concat_eq_appendD)
lemma hd_concat: "\<lbrakk>xs \<noteq> []; hd xs \<noteq> []\<rbrakk> \<Longrightarrow> hd (concat xs) = hd (hd xs)"
by (metis concat.simps(2) hd_Cons_tl hd_append2)
simproc_setup list_neq ("(xs::'a list) = ys") = \<open>
(*
Reduces xs=ys to False if xs and ys cannot be of the same length.
This is the case if the atomic sublists of one are a submultiset
of those of the other list and there are fewer Cons's in one than the other.
*)
let
fun len (Const(\<^const_name>\<open>Nil\<close>,_)) acc = acc
| len (Const(\<^const_name>\<open>Cons\<close>,_) $ _ $ xs) (ts,n) = len xs (ts,n+1)
| len (Const(\<^const_name>\<open>append\<close>,_) $ xs $ ys) acc = len xs (len ys acc)
| len (Const(\<^const_name>\<open>rev\<close>,_) $ xs) acc = len xs acc
| len (Const(\<^const_name>\<open>map\<close>,_) $ _ $ xs) acc = len xs acc
| len (Const(\<^const_name>\<open>concat\<close>,T) $ (Const(\<^const_name>\<open>rev\<close>,_) $ xss)) acc
= len (Const(\<^const_name>\<open>concat\<close>,T) $ xss) acc
| len t (ts,n) = (t::ts,n);
val ss = simpset_of \<^context>;
fun list_neq ctxt ct =
let
val (Const(_,eqT) $ lhs $ rhs) = Thm.term_of ct;
val (ls,m) = len lhs ([],0) and (rs,n) = len rhs ([],0);
fun prove_neq() =
let
val Type(_,listT::_) = eqT;
val size = HOLogic.size_const listT;
val eq_len = HOLogic.mk_eq (size $ lhs, size $ rhs);
val neq_len = HOLogic.mk_Trueprop (HOLogic.Not $ eq_len);
val thm = Goal.prove ctxt [] [] neq_len
(K (simp_tac (put_simpset ss ctxt) 1));
in SOME (thm RS @{thm neq_if_length_neq}) end
in
if m < n andalso submultiset (op aconv) (ls,rs) orelse
n < m andalso submultiset (op aconv) (rs,ls)
then prove_neq() else NONE
end;
in K list_neq end
\<close>
subsubsection \<open>\<^const>\<open>filter\<close>\<close>
lemma filter_append [simp]: "filter P (xs @ ys) = filter P xs @ filter P ys"
by (induct xs) auto
lemma rev_filter: "rev (filter P xs) = filter P (rev xs)"
by (induct xs) simp_all
lemma filter_filter [simp]: "filter P (filter Q xs) = filter (\<lambda>x. Q x \<and> P x) xs"
by (induct xs) auto
lemma filter_concat: "filter p (concat xs) = concat (map (filter p) xs)"
by (induct xs) auto
lemma length_filter_le [simp]: "length (filter P xs) \<le> length xs"
by (induct xs) (auto simp add: le_SucI)
lemma sum_length_filter_compl:
"length(filter P xs) + length(filter (\<lambda>x. \<not>P x) xs) = length xs"
by(induct xs) simp_all
lemma filter_True [simp]: "\<forall>x \<in> set xs. P x \<Longrightarrow> filter P xs = xs"
by (induct xs) auto
lemma filter_False [simp]: "\<forall>x \<in> set xs. \<not> P x \<Longrightarrow> filter P xs = []"
by (induct xs) auto
lemma filter_empty_conv: "(filter P xs = []) = (\<forall>x\<in>set xs. \<not> P x)"
by (induct xs) simp_all
lemmas empty_filter_conv = filter_empty_conv[THEN eq_iff_swap]
lemma filter_id_conv: "(filter P xs = xs) = (\<forall>x\<in>set xs. P x)"
proof (induct xs)
case (Cons x xs)
then show ?case
using length_filter_le
by (simp add: impossible_Cons)
qed auto
lemma filter_map: "filter P (map f xs) = map f (filter (P \<circ> f) xs)"
by (induct xs) simp_all
lemma length_filter_map[simp]:
"length (filter P (map f xs)) = length(filter (P \<circ> f) xs)"
by (simp add:filter_map)
lemma filter_is_subset [simp]: "set (filter P xs) \<le> set xs"
by auto
lemma length_filter_less:
"\<lbrakk> x \<in> set xs; \<not> P x \<rbrakk> \<Longrightarrow> length(filter P xs) < length xs"
proof (induct xs)
case Nil thus ?case by simp
next
case (Cons x xs) thus ?case
using Suc_le_eq by fastforce
qed
lemma length_filter_conv_card:
"length(filter p xs) = card{i. i < length xs \<and> p(xs!i)}"
proof (induct xs)
case Nil thus ?case by simp
next
case (Cons x xs)
let ?S = "{i. i < length xs \<and> p(xs!i)}"
have fin: "finite ?S" by(fast intro: bounded_nat_set_is_finite)
show ?case (is "?l = card ?S'")
proof (cases)
assume "p x"
hence eq: "?S' = insert 0 (Suc ` ?S)"
by(auto simp: image_def split:nat.split dest:gr0_implies_Suc)
have "length (filter p (x # xs)) = Suc(card ?S)"
using Cons \<open>p x\<close> by simp
also have "\<dots> = Suc(card(Suc ` ?S))" using fin
by (simp add: card_image)
also have "\<dots> = card ?S'" using eq fin
by (simp add:card_insert_if)
finally show ?thesis .
next
assume "\<not> p x"
hence eq: "?S' = Suc ` ?S"
by(auto simp add: image_def split:nat.split elim:lessE)
have "length (filter p (x # xs)) = card ?S"
using Cons \<open>\<not> p x\<close> by simp
also have "\<dots> = card(Suc ` ?S)" using fin
by (simp add: card_image)
also have "\<dots> = card ?S'" using eq fin
by (simp add:card_insert_if)
finally show ?thesis .
qed
qed
lemma Cons_eq_filterD:
"x#xs = filter P ys \<Longrightarrow>
\<exists>us vs. ys = us @ x # vs \<and> (\<forall>u\<in>set us. \<not> P u) \<and> P x \<and> xs = filter P vs"
(is "_ \<Longrightarrow> \<exists>us vs. ?P ys us vs")
proof(induct ys)
case Nil thus ?case by simp
next
case (Cons y ys)
show ?case (is "\<exists>x. ?Q x")
proof cases
assume Py: "P y"
show ?thesis
proof cases
assume "x = y"
with Py Cons.prems have "?Q []" by simp
then show ?thesis ..
next
assume "x \<noteq> y"
with Py Cons.prems show ?thesis by simp
qed
next
assume "\<not> P y"
with Cons obtain us vs where "?P (y#ys) (y#us) vs" by fastforce
then have "?Q (y#us)" by simp
then show ?thesis ..
qed
qed
lemma filter_eq_ConsD:
"filter P ys = x#xs \<Longrightarrow>
\<exists>us vs. ys = us @ x # vs \<and> (\<forall>u\<in>set us. \<not> P u) \<and> P x \<and> xs = filter P vs"
by(rule Cons_eq_filterD) simp
lemma filter_eq_Cons_iff:
"(filter P ys = x#xs) =
(\<exists>us vs. ys = us @ x # vs \<and> (\<forall>u\<in>set us. \<not> P u) \<and> P x \<and> xs = filter P vs)"
by(auto dest:filter_eq_ConsD)
lemmas Cons_eq_filter_iff = filter_eq_Cons_iff[THEN eq_iff_swap]
lemma inj_on_filter_key_eq:
assumes "inj_on f (insert y (set xs))"
shows "filter (\<lambda>x. f y = f x) xs = filter (HOL.eq y) xs"
using assms by (induct xs) auto
lemma filter_cong[fundef_cong]:
"xs = ys \<Longrightarrow> (\<And>x. x \<in> set ys \<Longrightarrow> P x = Q x) \<Longrightarrow> filter P xs = filter Q ys"
by (induct ys arbitrary: xs) auto
subsubsection \<open>List partitioning\<close>
primrec partition :: "('a \<Rightarrow> bool) \<Rightarrow>'a list \<Rightarrow> 'a list \<times> 'a list" where
"partition P [] = ([], [])" |
"partition P (x # xs) =
(let (yes, no) = partition P xs
in if P x then (x # yes, no) else (yes, x # no))"
lemma partition_filter1: "fst (partition P xs) = filter P xs"
by (induct xs) (auto simp add: Let_def split_def)
lemma partition_filter2: "snd (partition P xs) = filter (Not \<circ> P) xs"
by (induct xs) (auto simp add: Let_def split_def)
lemma partition_P:
assumes "partition P xs = (yes, no)"
shows "(\<forall>p \<in> set yes. P p) \<and> (\<forall>p \<in> set no. \<not> P p)"
proof -
from assms have "yes = fst (partition P xs)" and "no = snd (partition P xs)"
by simp_all
then show ?thesis by (simp_all add: partition_filter1 partition_filter2)
qed
lemma partition_set:
assumes "partition P xs = (yes, no)"
shows "set yes \<union> set no = set xs"
proof -
from assms have "yes = fst (partition P xs)" and "no = snd (partition P xs)"
by simp_all
then show ?thesis by (auto simp add: partition_filter1 partition_filter2)
qed
lemma partition_filter_conv[simp]:
"partition f xs = (filter f xs,filter (Not \<circ> f) xs)"
unfolding partition_filter2[symmetric]
unfolding partition_filter1[symmetric] by simp
declare partition.simps[simp del]
subsubsection \<open>\<^const>\<open>nth\<close>\<close>
lemma nth_Cons_0 [simp, code]: "(x # xs)!0 = x"
by auto
lemma nth_Cons_Suc [simp, code]: "(x # xs)!(Suc n) = xs!n"
by auto
declare nth.simps [simp del]
lemma nth_Cons_pos[simp]: "0 < n \<Longrightarrow> (x#xs) ! n = xs ! (n - 1)"
by(auto simp: Nat.gr0_conv_Suc)
lemma nth_append:
"(xs @ ys)!n = (if n < length xs then xs!n else ys!(n - length xs))"
proof (induct xs arbitrary: n)
case (Cons x xs)
then show ?case
using less_Suc_eq_0_disj by auto
qed simp
lemma nth_append_length [simp]: "(xs @ x # ys) ! length xs = x"
by (induct xs) auto
lemma nth_append_length_plus[simp]: "(xs @ ys) ! (length xs + n) = ys ! n"
by (induct xs) auto
lemma nth_map [simp]: "n < length xs \<Longrightarrow> (map f xs)!n = f(xs!n)"
proof (induct xs arbitrary: n)
case (Cons x xs)
then show ?case
using less_Suc_eq_0_disj by auto
qed simp
lemma nth_tl: "n < length (tl xs) \<Longrightarrow> tl xs ! n = xs ! Suc n"
by (induction xs) auto
lemma hd_conv_nth: "xs \<noteq> [] \<Longrightarrow> hd xs = xs!0"
by(cases xs) simp_all
lemma list_eq_iff_nth_eq:
"(xs = ys) = (length xs = length ys \<and> (\<forall>i<length xs. xs!i = ys!i))"
proof (induct xs arbitrary: ys)
case (Cons x xs ys)
show ?case
proof (cases ys)
case (Cons y ys)
with Cons.hyps show ?thesis by fastforce
qed simp
qed force
lemma map_equality_iff:
"map f xs = map g ys \<longleftrightarrow> length xs = length ys \<and> (\<forall>i<length ys. f (xs!i) = g (ys!i))"
by (fastforce simp: list_eq_iff_nth_eq)
lemma set_conv_nth: "set xs = {xs!i | i. i < length xs}"
proof (induct xs)
case (Cons x xs)
have "insert x {xs ! i |i. i < length xs} = {(x # xs) ! i |i. i < Suc (length xs)}" (is "?L=?R")
proof
show "?L \<subseteq> ?R"
by force
show "?R \<subseteq> ?L"
using less_Suc_eq_0_disj by auto
qed
with Cons show ?case
by simp
qed simp
lemma in_set_conv_nth: "(x \<in> set xs) = (\<exists>i < length xs. xs!i = x)"
by(auto simp:set_conv_nth)
lemma nth_equal_first_eq:
assumes "x \<notin> set xs"
assumes "n \<le> length xs"
shows "(x # xs) ! n = x \<longleftrightarrow> n = 0" (is "?lhs \<longleftrightarrow> ?rhs")
proof
assume ?lhs
show ?rhs
proof (rule ccontr)
assume "n \<noteq> 0"
then have "n > 0" by simp
with \<open>?lhs\<close> have "xs ! (n - 1) = x" by simp
moreover from \<open>n > 0\<close> \<open>n \<le> length xs\<close> have "n - 1 < length xs" by simp
ultimately have "\<exists>i<length xs. xs ! i = x" by auto
with \<open>x \<notin> set xs\<close> in_set_conv_nth [of x xs] show False by simp
qed
next
assume ?rhs then show ?lhs by simp
qed
lemma nth_non_equal_first_eq:
assumes "x \<noteq> y"
shows "(x # xs) ! n = y \<longleftrightarrow> xs ! (n - 1) = y \<and> n > 0" (is "?lhs \<longleftrightarrow> ?rhs")
proof
assume "?lhs" with assms have "n > 0" by (cases n) simp_all
with \<open>?lhs\<close> show ?rhs by simp
next
assume "?rhs" then show "?lhs" by simp
qed
lemma list_ball_nth: "\<lbrakk>n < length xs; \<forall>x \<in> set xs. P x\<rbrakk> \<Longrightarrow> P(xs!n)"
by (auto simp add: set_conv_nth)
lemma nth_mem [simp]: "n < length xs \<Longrightarrow> xs!n \<in> set xs"
by (auto simp add: set_conv_nth)
lemma all_nth_imp_all_set:
"\<lbrakk>\<forall>i < length xs. P(xs!i); x \<in> set xs\<rbrakk> \<Longrightarrow> P x"
by (auto simp add: set_conv_nth)
lemma all_set_conv_all_nth:
"(\<forall>x \<in> set xs. P x) = (\<forall>i. i < length xs \<longrightarrow> P (xs ! i))"
by (auto simp add: set_conv_nth)
lemma rev_nth:
"n < size xs \<Longrightarrow> rev xs ! n = xs ! (length xs - Suc n)"
proof (induct xs arbitrary: n)
case Nil thus ?case by simp
next
case (Cons x xs)
hence n: "n < Suc (length xs)" by simp
moreover
{ assume "n < length xs"
with n obtain n' where n': "length xs - n = Suc n'"
by (cases "length xs - n", auto)
moreover
from n' have "length xs - Suc n = n'" by simp
ultimately
have "xs ! (length xs - Suc n) = (x # xs) ! (length xs - n)" by simp
}
ultimately
show ?case by (clarsimp simp add: Cons nth_append)
qed
lemma Skolem_list_nth:
"(\<forall>i<k. \<exists>x. P i x) = (\<exists>xs. size xs = k \<and> (\<forall>i<k. P i (xs!i)))"
(is "_ = (\<exists>xs. ?P k xs)")
proof(induct k)
case 0 show ?case by simp
next
case (Suc k)
show ?case (is "?L = ?R" is "_ = (\<exists>xs. ?P' xs)")
proof
assume "?R" thus "?L" using Suc by auto
next
assume "?L"
with Suc obtain x xs where "?P k xs \<and> P k x" by (metis less_Suc_eq)
hence "?P'(xs@[x])" by(simp add:nth_append less_Suc_eq)
thus "?R" ..
qed
qed
subsubsection \<open>\<^const>\<open>list_update\<close>\<close>
lemma length_list_update [simp]: "length(xs[i:=x]) = length xs"
by (induct xs arbitrary: i) (auto split: nat.split)
lemma nth_list_update:
"i < length xs\<Longrightarrow> (xs[i:=x])!j = (if i = j then x else xs!j)"
by (induct xs arbitrary: i j) (auto simp add: nth_Cons split: nat.split)
lemma nth_list_update_eq [simp]: "i < length xs \<Longrightarrow> (xs[i:=x])!i = x"
by (simp add: nth_list_update)
lemma nth_list_update_neq [simp]: "i \<noteq> j \<Longrightarrow> xs[i:=x]!j = xs!j"
by (induct xs arbitrary: i j) (auto simp add: nth_Cons split: nat.split)
lemma list_update_id[simp]: "xs[i := xs!i] = xs"
by (induct xs arbitrary: i) (simp_all split:nat.splits)
lemma list_update_beyond[simp]: "length xs \<le> i \<Longrightarrow> xs[i:=x] = xs"
proof (induct xs arbitrary: i)
case (Cons x xs i)
then show ?case
by (metis leD length_list_update list_eq_iff_nth_eq nth_list_update_neq)
qed simp
lemma list_update_nonempty[simp]: "xs[k:=x] = [] \<longleftrightarrow> xs=[]"
by (simp only: length_0_conv[symmetric] length_list_update)
lemma list_update_same_conv:
"i < length xs \<Longrightarrow> (xs[i := x] = xs) = (xs!i = x)"
by (induct xs arbitrary: i) (auto split: nat.split)
lemma list_update_append1:
"i < size xs \<Longrightarrow> (xs @ ys)[i:=x] = xs[i:=x] @ ys"
by (induct xs arbitrary: i)(auto split:nat.split)
lemma list_update_append:
"(xs @ ys) [n:= x] =
(if n < length xs then xs[n:= x] @ ys else xs @ (ys [n-length xs:= x]))"
by (induct xs arbitrary: n) (auto split:nat.splits)
lemma list_update_length [simp]:
"(xs @ x # ys)[length xs := y] = (xs @ y # ys)"
by (induct xs, auto)
lemma map_update: "map f (xs[k:= y]) = (map f xs)[k := f y]"
by(induct xs arbitrary: k)(auto split:nat.splits)
lemma rev_update:
"k < length xs \<Longrightarrow> rev (xs[k:= y]) = (rev xs)[length xs - k - 1 := y]"
by (induct xs arbitrary: k) (auto simp: list_update_append split:nat.splits)
lemma update_zip:
"(zip xs ys)[i:=xy] = zip (xs[i:=fst xy]) (ys[i:=snd xy])"
by (induct ys arbitrary: i xy xs) (auto, case_tac xs, auto split: nat.split)
lemma set_update_subset_insert: "set(xs[i:=x]) \<le> insert x (set xs)"
by (induct xs arbitrary: i) (auto split: nat.split)
lemma set_update_subsetI: "\<lbrakk>set xs \<subseteq> A; x \<in> A\<rbrakk> \<Longrightarrow> set(xs[i := x]) \<subseteq> A"
by (blast dest!: set_update_subset_insert [THEN subsetD])
lemma set_update_memI: "n < length xs \<Longrightarrow> x \<in> set (xs[n := x])"
by (induct xs arbitrary: n) (auto split:nat.splits)
lemma list_update_overwrite[simp]:
"xs [i := x, i := y] = xs [i := y]"
by (induct xs arbitrary: i) (simp_all split: nat.split)
lemma list_update_swap:
"i \<noteq> i' \<Longrightarrow> xs [i := x, i' := x'] = xs [i' := x', i := x]"
by (induct xs arbitrary: i i') (simp_all split: nat.split)
lemma list_update_code [code]:
"[][i := y] = []"
"(x # xs)[0 := y] = y # xs"
"(x # xs)[Suc i := y] = x # xs[i := y]"
by simp_all
subsubsection \<open>\<^const>\<open>last\<close> and \<^const>\<open>butlast\<close>\<close>
lemma hd_Nil_eq_last: "hd Nil = last Nil"
unfolding hd_def last_def by simp
lemma last_snoc [simp]: "last (xs @ [x]) = x"
by (induct xs) auto
lemma butlast_snoc [simp]: "butlast (xs @ [x]) = xs"
by (induct xs) auto
lemma last_ConsL: "xs = [] \<Longrightarrow> last(x#xs) = x"
by simp
lemma last_ConsR: "xs \<noteq> [] \<Longrightarrow> last(x#xs) = last xs"
by simp
lemma last_append: "last(xs @ ys) = (if ys = [] then last xs else last ys)"
by (induct xs) (auto)
lemma last_appendL[simp]: "ys = [] \<Longrightarrow> last(xs @ ys) = last xs"
by(simp add:last_append)
lemma last_appendR[simp]: "ys \<noteq> [] \<Longrightarrow> last(xs @ ys) = last ys"
by(simp add:last_append)
lemma last_tl: "xs = [] \<or> tl xs \<noteq> [] \<Longrightarrow>last (tl xs) = last xs"
by (induct xs) simp_all
lemma butlast_tl: "butlast (tl xs) = tl (butlast xs)"
by (induct xs) simp_all
lemma hd_rev: "hd(rev xs) = last xs"
by (metis hd_Cons_tl hd_Nil_eq_last last_snoc rev_eq_Cons_iff rev_is_Nil_conv)
lemma last_rev: "last(rev xs) = hd xs"
by (metis hd_rev rev_swap)
lemma last_in_set[simp]: "as \<noteq> [] \<Longrightarrow> last as \<in> set as"
by (induct as) auto
lemma length_butlast [simp]: "length (butlast xs) = length xs - 1"
by (induct xs rule: rev_induct) auto
lemma butlast_append:
"butlast (xs @ ys) = (if ys = [] then butlast xs else xs @ butlast ys)"
by (induct xs arbitrary: ys) auto
lemma append_butlast_last_id [simp]:
"xs \<noteq> [] \<Longrightarrow> butlast xs @ [last xs] = xs"
by (induct xs) auto
lemma in_set_butlastD: "x \<in> set (butlast xs) \<Longrightarrow> x \<in> set xs"
by (induct xs) (auto split: if_split_asm)
lemma in_set_butlast_appendI:
"x \<in> set (butlast xs) \<or> x \<in> set (butlast ys) \<Longrightarrow> x \<in> set (butlast (xs @ ys))"
by (auto dest: in_set_butlastD simp add: butlast_append)
lemma last_drop[simp]: "n < length xs \<Longrightarrow> last (drop n xs) = last xs"
by (induct xs arbitrary: n)(auto split:nat.split)
lemma nth_butlast:
assumes "n < length (butlast xs)" shows "butlast xs ! n = xs ! n"
proof (cases xs)
case (Cons y ys)
moreover from assms have "butlast xs ! n = (butlast xs @ [last xs]) ! n"
by (simp add: nth_append)
ultimately show ?thesis using append_butlast_last_id by simp
qed simp
lemma last_conv_nth: "xs\<noteq>[] \<Longrightarrow> last xs = xs!(length xs - 1)"
by(induct xs)(auto simp:neq_Nil_conv)
lemma butlast_conv_take: "butlast xs = take (length xs - 1) xs"
by (induction xs rule: induct_list012) simp_all
lemma last_list_update:
"xs \<noteq> [] \<Longrightarrow> last(xs[k:=x]) = (if k = size xs - 1 then x else last xs)"
by (auto simp: last_conv_nth)
lemma butlast_list_update:
"butlast(xs[k:=x]) =
(if k = size xs - 1 then butlast xs else (butlast xs)[k:=x])"
by(cases xs rule:rev_cases)(auto simp: list_update_append split: nat.splits)
lemma last_map: "xs \<noteq> [] \<Longrightarrow> last (map f xs) = f (last xs)"
by (cases xs rule: rev_cases) simp_all
lemma map_butlast: "map f (butlast xs) = butlast (map f xs)"
by (induct xs) simp_all
lemma snoc_eq_iff_butlast:
"xs @ [x] = ys \<longleftrightarrow> (ys \<noteq> [] \<and> butlast ys = xs \<and> last ys = x)"
by fastforce
corollary longest_common_suffix:
"\<exists>ss xs' ys'. xs = xs' @ ss \<and> ys = ys' @ ss
\<and> (xs' = [] \<or> ys' = [] \<or> last xs' \<noteq> last ys')"
using longest_common_prefix[of "rev xs" "rev ys"]
unfolding rev_swap rev_append by (metis last_rev rev_is_Nil_conv)
lemma butlast_rev [simp]: "butlast (rev xs) = rev (tl xs)"
by (cases xs) simp_all
subsubsection \<open>\<^const>\<open>take\<close> and \<^const>\<open>drop\<close>\<close>
lemma take_0: "take 0 xs = []"
by (induct xs) auto
lemma drop_0: "drop 0 xs = xs"
by (induct xs) auto
lemma take0[simp]: "take 0 = (\<lambda>xs. [])"
by(rule ext) (rule take_0)
lemma drop0[simp]: "drop 0 = (\<lambda>x. x)"
by(rule ext) (rule drop_0)
lemma take_Suc_Cons [simp]: "take (Suc n) (x # xs) = x # take n xs"
by simp
lemma drop_Suc_Cons [simp]: "drop (Suc n) (x # xs) = drop n xs"
by simp
declare take_Cons [simp del] and drop_Cons [simp del]
lemma take_Suc: "xs \<noteq> [] \<Longrightarrow> take (Suc n) xs = hd xs # take n (tl xs)"
by(clarsimp simp add:neq_Nil_conv)
lemma drop_Suc: "drop (Suc n) xs = drop n (tl xs)"
by(cases xs, simp_all)
lemma hd_take[simp]: "j > 0 \<Longrightarrow> hd (take j xs) = hd xs"
by (metis gr0_conv_Suc list.sel(1) take.simps(1) take_Suc)
lemma take_tl: "take n (tl xs) = tl (take (Suc n) xs)"
by (induct xs arbitrary: n) simp_all
lemma drop_tl: "drop n (tl xs) = tl(drop n xs)"
by(induct xs arbitrary: n, simp_all add:drop_Cons drop_Suc split:nat.split)
lemma tl_take: "tl (take n xs) = take (n - 1) (tl xs)"
by (cases n, simp, cases xs, auto)
lemma tl_drop: "tl (drop n xs) = drop n (tl xs)"
by (simp only: drop_tl)
lemma nth_via_drop: "drop n xs = y#ys \<Longrightarrow> xs!n = y"
by (induct xs arbitrary: n, simp)(auto simp: drop_Cons nth_Cons split: nat.splits)
lemma take_Suc_conv_app_nth:
"i < length xs \<Longrightarrow> take (Suc i) xs = take i xs @ [xs!i]"
proof (induct xs arbitrary: i)
case Nil
then show ?case by simp
next
case Cons
then show ?case by (cases i) auto
qed
lemma Cons_nth_drop_Suc:
"i < length xs \<Longrightarrow> (xs!i) # (drop (Suc i) xs) = drop i xs"
proof (induct xs arbitrary: i)
case Nil
then show ?case by simp
next
case Cons
then show ?case by (cases i) auto
qed
lemma length_take [simp]: "length (take n xs) = min (length xs) n"
by (induct n arbitrary: xs) (auto, case_tac xs, auto)
lemma length_drop [simp]: "length (drop n xs) = (length xs - n)"
by (induct n arbitrary: xs) (auto, case_tac xs, auto)
lemma take_all [simp]: "length xs \<le> n \<Longrightarrow> take n xs = xs"
by (induct n arbitrary: xs) (auto, case_tac xs, auto)
lemma drop_all [simp]: "length xs \<le> n \<Longrightarrow> drop n xs = []"
by (induct n arbitrary: xs) (auto, case_tac xs, auto)
lemma take_all_iff [simp]: "take n xs = xs \<longleftrightarrow> length xs \<le> n"
by (metis length_take min.order_iff take_all)
(* Looks like a good simp rule but can cause looping;
too much interaction between take and length
lemmas take_all_iff2[simp] = take_all_iff[THEN eq_iff_swap]
*)
lemma take_eq_Nil[simp]: "(take n xs = []) = (n = 0 \<or> xs = [])"
by(induct xs arbitrary: n)(auto simp: take_Cons split:nat.split)
lemmas take_eq_Nil2[simp] = take_eq_Nil[THEN eq_iff_swap]
lemma drop_eq_Nil [simp]: "drop n xs = [] \<longleftrightarrow> length xs \<le> n"
by (metis diff_is_0_eq drop_all length_drop list.size(3))
lemmas drop_eq_Nil2 [simp] = drop_eq_Nil[THEN eq_iff_swap]
lemma take_append [simp]:
"take n (xs @ ys) = (take n xs @ take (n - length xs) ys)"
by (induct n arbitrary: xs) (auto, case_tac xs, auto)
lemma drop_append [simp]:
"drop n (xs @ ys) = drop n xs @ drop (n - length xs) ys"
by (induct n arbitrary: xs) (auto, case_tac xs, auto)
lemma take_take [simp]: "take n (take m xs) = take (min n m) xs"
proof (induct m arbitrary: xs n)
case 0
then show ?case by simp
next
case Suc
then show ?case by (cases xs; cases n) simp_all
qed
lemma drop_drop [simp]: "drop n (drop m xs) = drop (n + m) xs"
proof (induct m arbitrary: xs)
case 0
then show ?case by simp
next
case Suc
then show ?case by (cases xs) simp_all
qed
lemma take_drop: "take n (drop m xs) = drop m (take (n + m) xs)"
proof (induct m arbitrary: xs n)
case 0
then show ?case by simp
next
case Suc
then show ?case by (cases xs; cases n) simp_all
qed
lemma drop_take: "drop n (take m xs) = take (m-n) (drop n xs)"
by(induct xs arbitrary: m n)(auto simp: take_Cons drop_Cons split: nat.split)
lemma append_take_drop_id [simp]: "take n xs @ drop n xs = xs"
proof (induct n arbitrary: xs)
case 0
then show ?case by simp
next
case Suc
then show ?case by (cases xs) simp_all
qed
lemma take_map: "take n (map f xs) = map f (take n xs)"
proof (induct n arbitrary: xs)
case 0
then show ?case by simp
next
case Suc
then show ?case by (cases xs) simp_all
qed
lemma drop_map: "drop n (map f xs) = map f (drop n xs)"
proof (induct n arbitrary: xs)
case 0
then show ?case by simp
next
case Suc
then show ?case by (cases xs) simp_all
qed
lemma rev_take: "rev (take i xs) = drop (length xs - i) (rev xs)"
proof (induct xs arbitrary: i)
case Nil
then show ?case by simp
next
case Cons
then show ?case by (cases i) auto
qed
lemma rev_drop: "rev (drop i xs) = take (length xs - i) (rev xs)"
proof (induct xs arbitrary: i)
case Nil
then show ?case by simp
next
case Cons
then show ?case by (cases i) auto
qed
lemma drop_rev: "drop n (rev xs) = rev (take (length xs - n) xs)"
by (cases "length xs < n") (auto simp: rev_take)
lemma take_rev: "take n (rev xs) = rev (drop (length xs - n) xs)"
by (cases "length xs < n") (auto simp: rev_drop)
lemma nth_take [simp]: "i < n \<Longrightarrow> (take n xs)!i = xs!i"
proof (induct xs arbitrary: i n)
case Nil
then show ?case by simp
next
case Cons
then show ?case by (cases n; cases i) simp_all
qed
lemma nth_drop [simp]:
"n \<le> length xs \<Longrightarrow> (drop n xs)!i = xs!(n + i)"
proof (induct n arbitrary: xs)
case 0
then show ?case by simp
next
case Suc
then show ?case by (cases xs) simp_all
qed
lemma butlast_take:
"n \<le> length xs \<Longrightarrow> butlast (take n xs) = take (n - 1) xs"
by (simp add: butlast_conv_take)
lemma butlast_drop: "butlast (drop n xs) = drop n (butlast xs)"
by (simp add: butlast_conv_take drop_take ac_simps)
lemma take_butlast: "n < length xs \<Longrightarrow> take n (butlast xs) = take n xs"
by (simp add: butlast_conv_take)
lemma drop_butlast: "drop n (butlast xs) = butlast (drop n xs)"
by (simp add: butlast_conv_take drop_take ac_simps)
lemma butlast_power: "(butlast ^^ n) xs = take (length xs - n) xs"
by (induct n) (auto simp: butlast_take)
lemma hd_drop_conv_nth: "n < length xs \<Longrightarrow> hd(drop n xs) = xs!n"
by(simp add: hd_conv_nth)
lemma set_take_subset_set_take:
"m \<le> n \<Longrightarrow> set(take m xs) \<le> set(take n xs)"
proof (induct xs arbitrary: m n)
case (Cons x xs m n) then show ?case
by (cases n) (auto simp: take_Cons)
qed simp
lemma set_take_subset: "set(take n xs) \<subseteq> set xs"
by(induct xs arbitrary: n)(auto simp:take_Cons split:nat.split)
lemma set_drop_subset: "set(drop n xs) \<subseteq> set xs"
by(induct xs arbitrary: n)(auto simp:drop_Cons split:nat.split)
lemma set_drop_subset_set_drop:
"m \<ge> n \<Longrightarrow> set(drop m xs) \<le> set(drop n xs)"
proof (induct xs arbitrary: m n)
case (Cons x xs m n)
then show ?case
by (clarsimp simp: drop_Cons split: nat.split) (metis set_drop_subset subset_iff)
qed simp
lemma in_set_takeD: "x \<in> set(take n xs) \<Longrightarrow> x \<in> set xs"
using set_take_subset by fast
lemma in_set_dropD: "x \<in> set(drop n xs) \<Longrightarrow> x \<in> set xs"
using set_drop_subset by fast
lemma append_eq_conv_conj:
"(xs @ ys = zs) = (xs = take (length xs) zs \<and> ys = drop (length xs) zs)"
proof (induct xs arbitrary: zs)
case (Cons x xs zs) then show ?case
by (cases zs, auto)
qed auto
lemma map_eq_append_conv:
"map f xs = ys @ zs \<longleftrightarrow> (\<exists>us vs. xs = us @ vs \<and> ys = map f us \<and> zs = map f vs)"
proof -
have "map f xs \<noteq> ys @ zs \<and> map f xs \<noteq> ys @ zs \<or> map f xs \<noteq> ys @ zs \<or> map f xs = ys @ zs \<and>
(\<exists>bs bsa. xs = bs @ bsa \<and> ys = map f bs \<and> zs = map f bsa)"
by (metis append_eq_conv_conj append_take_drop_id drop_map take_map)
then show ?thesis
using map_append by blast
qed
lemmas append_eq_map_conv = map_eq_append_conv[THEN eq_iff_swap]
lemma take_add: "take (i+j) xs = take i xs @ take j (drop i xs)"
proof (induct xs arbitrary: i)
case (Cons x xs i) then show ?case
by (cases i, auto)
qed auto
lemma append_eq_append_conv_if:
"(xs\<^sub>1 @ xs\<^sub>2 = ys\<^sub>1 @ ys\<^sub>2) =
(if size xs\<^sub>1 \<le> size ys\<^sub>1
then xs\<^sub>1 = take (size xs\<^sub>1) ys\<^sub>1 \<and> xs\<^sub>2 = drop (size xs\<^sub>1) ys\<^sub>1 @ ys\<^sub>2
else take (size ys\<^sub>1) xs\<^sub>1 = ys\<^sub>1 \<and> drop (size ys\<^sub>1) xs\<^sub>1 @ xs\<^sub>2 = ys\<^sub>2)"
proof (induct xs\<^sub>1 arbitrary: ys\<^sub>1)
case (Cons a xs\<^sub>1 ys\<^sub>1) then show ?case
by (cases ys\<^sub>1, auto)
qed auto
lemma take_hd_drop:
"n < length xs \<Longrightarrow> take n xs @ [hd (drop n xs)] = take (Suc n) xs"
by (induct xs arbitrary: n) (simp_all add:drop_Cons split:nat.split)
lemma id_take_nth_drop:
"i < length xs \<Longrightarrow> xs = take i xs @ xs!i # drop (Suc i) xs"
proof -
assume si: "i < length xs"
hence "xs = take (Suc i) xs @ drop (Suc i) xs" by auto
moreover
from si have "take (Suc i) xs = take i xs @ [xs!i]"
using take_Suc_conv_app_nth by blast
ultimately show ?thesis by auto
qed
lemma take_update_cancel[simp]: "n \<le> m \<Longrightarrow> take n (xs[m := y]) = take n xs"
by(simp add: list_eq_iff_nth_eq)
lemma drop_update_cancel[simp]: "n < m \<Longrightarrow> drop m (xs[n := x]) = drop m xs"
by(simp add: list_eq_iff_nth_eq)
lemma upd_conv_take_nth_drop:
"i < length xs \<Longrightarrow> xs[i:=a] = take i xs @ a # drop (Suc i) xs"
proof -
assume i: "i < length xs"
have "xs[i:=a] = (take i xs @ xs!i # drop (Suc i) xs)[i:=a]"
by(rule arg_cong[OF id_take_nth_drop[OF i]])
also have "\<dots> = take i xs @ a # drop (Suc i) xs"
using i by (simp add: list_update_append)
finally show ?thesis .
qed
lemma take_update_swap: "take m (xs[n := x]) = (take m xs)[n := x]"
proof (cases "n \<ge> length xs")
case False
then show ?thesis
by (simp add: upd_conv_take_nth_drop take_Cons drop_take min_def diff_Suc split: nat.split)
qed auto
lemma drop_update_swap:
assumes "m \<le> n" shows "drop m (xs[n := x]) = (drop m xs)[n-m := x]"
proof (cases "n \<ge> length xs")
case False
with assms show ?thesis
by (simp add: upd_conv_take_nth_drop drop_take)
qed auto
lemma nth_image: "l \<le> size xs \<Longrightarrow> nth xs ` {0..<l} = set(take l xs)"
by (simp add: set_conv_nth) force
subsubsection \<open>\<^const>\<open>takeWhile\<close> and \<^const>\<open>dropWhile\<close>\<close>
lemma length_takeWhile_le: "length (takeWhile P xs) \<le> length xs"
by (induct xs) auto
lemma takeWhile_dropWhile_id [simp]: "takeWhile P xs @ dropWhile P xs = xs"
by (induct xs) auto
lemma takeWhile_append1 [simp]:
"\<lbrakk>x \<in> set xs; \<not>P(x)\<rbrakk> \<Longrightarrow> takeWhile P (xs @ ys) = takeWhile P xs"
by (induct xs) auto
lemma takeWhile_append2 [simp]:
"(\<And>x. x \<in> set xs \<Longrightarrow> P x) \<Longrightarrow> takeWhile P (xs @ ys) = xs @ takeWhile P ys"
by (induct xs) auto
lemma takeWhile_append:
"takeWhile P (xs @ ys) = (if \<forall>x\<in>set xs. P x then xs @ takeWhile P ys else takeWhile P xs)"
using takeWhile_append1[of _ xs P ys] takeWhile_append2[of xs P ys] by auto
lemma takeWhile_tail: "\<not> P x \<Longrightarrow> takeWhile P (xs @ (x#l)) = takeWhile P xs"
by (induct xs) auto
lemma takeWhile_eq_Nil_iff: "takeWhile P xs = [] \<longleftrightarrow> xs = [] \<or> \<not>P (hd xs)"
by (cases xs) auto
lemma takeWhile_nth: "j < length (takeWhile P xs) \<Longrightarrow> takeWhile P xs ! j = xs ! j"
by (metis nth_append takeWhile_dropWhile_id)
lemma takeWhile_takeWhile: "takeWhile Q (takeWhile P xs) = takeWhile (\<lambda>x. P x \<and> Q x) xs"
by(induct xs) simp_all
lemma dropWhile_nth: "j < length (dropWhile P xs) \<Longrightarrow>
dropWhile P xs ! j = xs ! (j + length (takeWhile P xs))"
by (metis add.commute nth_append_length_plus takeWhile_dropWhile_id)
lemma length_dropWhile_le: "length (dropWhile P xs) \<le> length xs"
by (induct xs) auto
lemma dropWhile_append1 [simp]:
"\<lbrakk>x \<in> set xs; \<not>P(x)\<rbrakk> \<Longrightarrow> dropWhile P (xs @ ys) = (dropWhile P xs)@ys"
by (induct xs) auto
lemma dropWhile_append2 [simp]:
"(\<And>x. x \<in> set xs \<Longrightarrow> P(x)) \<Longrightarrow> dropWhile P (xs @ ys) = dropWhile P ys"
by (induct xs) auto
lemma dropWhile_append3:
"\<not> P y \<Longrightarrow>dropWhile P (xs @ y # ys) = dropWhile P xs @ y # ys"
by (induct xs) auto
lemma dropWhile_append:
"dropWhile P (xs @ ys) = (if \<forall>x\<in>set xs. P x then dropWhile P ys else dropWhile P xs @ ys)"
using dropWhile_append1[of _ xs P ys] dropWhile_append2[of xs P ys] by auto
lemma dropWhile_last:
"x \<in> set xs \<Longrightarrow> \<not> P x \<Longrightarrow> last (dropWhile P xs) = last xs"
by (auto simp add: dropWhile_append3 in_set_conv_decomp)
lemma set_dropWhileD: "x \<in> set (dropWhile P xs) \<Longrightarrow> x \<in> set xs"
by (induct xs) (auto split: if_split_asm)
lemma set_takeWhileD: "x \<in> set (takeWhile P xs) \<Longrightarrow> x \<in> set xs \<and> P x"
by (induct xs) (auto split: if_split_asm)
lemma takeWhile_eq_all_conv[simp]:
"(takeWhile P xs = xs) = (\<forall>x \<in> set xs. P x)"
by(induct xs, auto)
lemma dropWhile_eq_Nil_conv[simp]:
"(dropWhile P xs = []) = (\<forall>x \<in> set xs. P x)"
by(induct xs, auto)
lemma dropWhile_eq_Cons_conv:
"(dropWhile P xs = y#ys) = (xs = takeWhile P xs @ y # ys \<and> \<not> P y)"
by(induct xs, auto)
lemma dropWhile_eq_self_iff: "dropWhile P xs = xs \<longleftrightarrow> xs = [] \<or> \<not>P (hd xs)"
by (cases xs) (auto simp: dropWhile_eq_Cons_conv)
lemma dropWhile_dropWhile1: "(\<And>x. Q x \<Longrightarrow> P x) \<Longrightarrow> dropWhile Q (dropWhile P xs) = dropWhile P xs"
by(induct xs) simp_all
lemma dropWhile_dropWhile2: "(\<And>x. P x \<Longrightarrow> Q x) \<Longrightarrow> takeWhile P (takeWhile Q xs) = takeWhile P xs"
by(induct xs) simp_all
lemma dropWhile_takeWhile:
"(\<And>x. P x \<Longrightarrow> Q x) \<Longrightarrow> dropWhile P (takeWhile Q xs) = takeWhile Q (dropWhile P xs)"
by (induction xs) auto
lemma distinct_takeWhile[simp]: "distinct xs \<Longrightarrow> distinct (takeWhile P xs)"
by (induct xs) (auto dest: set_takeWhileD)
lemma distinct_dropWhile[simp]: "distinct xs \<Longrightarrow> distinct (dropWhile P xs)"
by (induct xs) auto
lemma takeWhile_map: "takeWhile P (map f xs) = map f (takeWhile (P \<circ> f) xs)"
by (induct xs) auto
lemma dropWhile_map: "dropWhile P (map f xs) = map f (dropWhile (P \<circ> f) xs)"
by (induct xs) auto
lemma takeWhile_eq_take: "takeWhile P xs = take (length (takeWhile P xs)) xs"
by (induct xs) auto
lemma dropWhile_eq_drop: "dropWhile P xs = drop (length (takeWhile P xs)) xs"
by (induct xs) auto
lemma hd_dropWhile: "dropWhile P xs \<noteq> [] \<Longrightarrow> \<not> P (hd (dropWhile P xs))"
by (induct xs) auto
lemma takeWhile_eq_filter:
assumes "\<And> x. x \<in> set (dropWhile P xs) \<Longrightarrow> \<not> P x"
shows "takeWhile P xs = filter P xs"
proof -
have A: "filter P xs = filter P (takeWhile P xs @ dropWhile P xs)"
by simp
have B: "filter P (dropWhile P xs) = []"
unfolding filter_empty_conv using assms by blast
have "filter P xs = takeWhile P xs"
unfolding A filter_append B
by (auto simp add: filter_id_conv dest: set_takeWhileD)
thus ?thesis ..
qed
lemma takeWhile_eq_take_P_nth:
"\<lbrakk> \<And> i. \<lbrakk> i < n ; i < length xs \<rbrakk> \<Longrightarrow> P (xs ! i) ; n < length xs \<Longrightarrow> \<not> P (xs ! n) \<rbrakk> \<Longrightarrow>
takeWhile P xs = take n xs"
proof (induct xs arbitrary: n)
case Nil
thus ?case by simp
next
case (Cons x xs)
show ?case
proof (cases n)
case 0
with Cons show ?thesis by simp
next
case [simp]: (Suc n')
have "P x" using Cons.prems(1)[of 0] by simp
moreover have "takeWhile P xs = take n' xs"
proof (rule Cons.hyps)
fix i
assume "i < n'" "i < length xs"
thus "P (xs ! i)" using Cons.prems(1)[of "Suc i"] by simp
next
assume "n' < length xs"
thus "\<not> P (xs ! n')" using Cons by auto
qed
ultimately show ?thesis by simp
qed
qed
lemma nth_length_takeWhile:
"length (takeWhile P xs) < length xs \<Longrightarrow> \<not> P (xs ! length (takeWhile P xs))"
by (induct xs) auto
lemma length_takeWhile_less_P_nth:
assumes all: "\<And> i. i < j \<Longrightarrow> P (xs ! i)" and "j \<le> length xs"
shows "j \<le> length (takeWhile P xs)"
proof (rule classical)
assume "\<not> ?thesis"
hence "length (takeWhile P xs) < length xs" using assms by simp
thus ?thesis using all \<open>\<not> ?thesis\<close> nth_length_takeWhile[of P xs] by auto
qed
lemma takeWhile_neq_rev: "\<lbrakk>distinct xs; x \<in> set xs\<rbrakk> \<Longrightarrow>
takeWhile (\<lambda>y. y \<noteq> x) (rev xs) = rev (tl (dropWhile (\<lambda>y. y \<noteq> x) xs))"
by(induct xs) (auto simp: takeWhile_tail[where l="[]"])
lemma dropWhile_neq_rev: "\<lbrakk>distinct xs; x \<in> set xs\<rbrakk> \<Longrightarrow>
dropWhile (\<lambda>y. y \<noteq> x) (rev xs) = x # rev (takeWhile (\<lambda>y. y \<noteq> x) xs)"
proof (induct xs)
case (Cons a xs)
then show ?case
by(auto, subst dropWhile_append2, auto)
qed simp
lemma takeWhile_not_last:
"distinct xs \<Longrightarrow> takeWhile (\<lambda>y. y \<noteq> last xs) xs = butlast xs"
by(induction xs rule: induct_list012) auto
lemma takeWhile_cong [fundef_cong]:
"\<lbrakk>l = k; \<And>x. x \<in> set l \<Longrightarrow> P x = Q x\<rbrakk>
\<Longrightarrow> takeWhile P l = takeWhile Q k"
by (induct k arbitrary: l) (simp_all)
lemma dropWhile_cong [fundef_cong]:
"\<lbrakk>l = k; \<And>x. x \<in> set l \<Longrightarrow> P x = Q x\<rbrakk>
\<Longrightarrow> dropWhile P l = dropWhile Q k"
by (induct k arbitrary: l, simp_all)
lemma takeWhile_idem [simp]:
"takeWhile P (takeWhile P xs) = takeWhile P xs"
by (induct xs) auto
lemma dropWhile_idem [simp]:
"dropWhile P (dropWhile P xs) = dropWhile P xs"
by (induct xs) auto
subsubsection \<open>\<^const>\<open>zip\<close>\<close>
lemma zip_Nil [simp]: "zip [] ys = []"
by (induct ys) auto
lemma zip_Cons_Cons [simp]: "zip (x # xs) (y # ys) = (x, y) # zip xs ys"
by simp
declare zip_Cons [simp del]
lemma [code]:
"zip [] ys = []"
"zip xs [] = []"
"zip (x # xs) (y # ys) = (x, y) # zip xs ys"
by (fact zip_Nil zip.simps(1) zip_Cons_Cons)+
lemma zip_Cons1:
"zip (x#xs) ys = (case ys of [] \<Rightarrow> [] | y#ys \<Rightarrow> (x,y)#zip xs ys)"
by(auto split:list.split)
lemma length_zip [simp]:
"length (zip xs ys) = min (length xs) (length ys)"
by (induct xs ys rule:list_induct2') auto
lemma zip_obtain_same_length:
assumes "\<And>zs ws n. length zs = length ws \<Longrightarrow> n = min (length xs) (length ys)
\<Longrightarrow> zs = take n xs \<Longrightarrow> ws = take n ys \<Longrightarrow> P (zip zs ws)"
shows "P (zip xs ys)"
proof -
let ?n = "min (length xs) (length ys)"
have "P (zip (take ?n xs) (take ?n ys))"
by (rule assms) simp_all
moreover have "zip xs ys = zip (take ?n xs) (take ?n ys)"
proof (induct xs arbitrary: ys)
case Nil then show ?case by simp
next
case (Cons x xs) then show ?case by (cases ys) simp_all
qed
ultimately show ?thesis by simp
qed
lemma zip_append1:
"zip (xs @ ys) zs =
zip xs (take (length xs) zs) @ zip ys (drop (length xs) zs)"
by (induct xs zs rule:list_induct2') auto
lemma zip_append2:
"zip xs (ys @ zs) =
zip (take (length ys) xs) ys @ zip (drop (length ys) xs) zs"
by (induct xs ys rule:list_induct2') auto
lemma zip_append [simp]:
"\<lbrakk>length xs = length us\<rbrakk> \<Longrightarrow>
zip (xs@ys) (us@vs) = zip xs us @ zip ys vs"
by (simp add: zip_append1)
lemma zip_rev:
"length xs = length ys \<Longrightarrow> zip (rev xs) (rev ys) = rev (zip xs ys)"
by (induct rule:list_induct2, simp_all)
lemma zip_map_map:
"zip (map f xs) (map g ys) = map (\<lambda> (x, y). (f x, g y)) (zip xs ys)"
proof (induct xs arbitrary: ys)
case (Cons x xs) note Cons_x_xs = Cons.hyps
show ?case
proof (cases ys)
case (Cons y ys')
show ?thesis unfolding Cons using Cons_x_xs by simp
qed simp
qed simp
lemma zip_map1:
"zip (map f xs) ys = map (\<lambda>(x, y). (f x, y)) (zip xs ys)"
using zip_map_map[of f xs "\<lambda>x. x" ys] by simp
lemma zip_map2:
"zip xs (map f ys) = map (\<lambda>(x, y). (x, f y)) (zip xs ys)"
using zip_map_map[of "\<lambda>x. x" xs f ys] by simp
lemma map_zip_map:
"map f (zip (map g xs) ys) = map (%(x,y). f(g x, y)) (zip xs ys)"
by (auto simp: zip_map1)
lemma map_zip_map2:
"map f (zip xs (map g ys)) = map (%(x,y). f(x, g y)) (zip xs ys)"
by (auto simp: zip_map2)
text\<open>Courtesy of Andreas Lochbihler:\<close>
lemma zip_same_conv_map: "zip xs xs = map (\<lambda>x. (x, x)) xs"
by(induct xs) auto
lemma nth_zip [simp]:
"\<lbrakk>i < length xs; i < length ys\<rbrakk> \<Longrightarrow> (zip xs ys)!i = (xs!i, ys!i)"
proof (induct ys arbitrary: i xs)
case (Cons y ys)
then show ?case
by (cases xs) (simp_all add: nth.simps split: nat.split)
qed auto
lemma set_zip:
"set (zip xs ys) = {(xs!i, ys!i) | i. i < min (length xs) (length ys)}"
by(simp add: set_conv_nth cong: rev_conj_cong)
lemma zip_same: "((a,b) \<in> set (zip xs xs)) = (a \<in> set xs \<and> a = b)"
by(induct xs) auto
lemma zip_update: "zip (xs[i:=x]) (ys[i:=y]) = (zip xs ys)[i:=(x,y)]"
by (simp add: update_zip)
lemma zip_replicate [simp]:
"zip (replicate i x) (replicate j y) = replicate (min i j) (x,y)"
proof (induct i arbitrary: j)
case (Suc i)
then show ?case
by (cases j, auto)
qed auto
lemma zip_replicate1: "zip (replicate n x) ys = map (Pair x) (take n ys)"
by(induction ys arbitrary: n)(case_tac [2] n, simp_all)
lemma take_zip: "take n (zip xs ys) = zip (take n xs) (take n ys)"
proof (induct n arbitrary: xs ys)
case 0
then show ?case by simp
next
case Suc
then show ?case by (cases xs; cases ys) simp_all
qed
lemma drop_zip: "drop n (zip xs ys) = zip (drop n xs) (drop n ys)"
proof (induct n arbitrary: xs ys)
case 0
then show ?case by simp
next
case Suc
then show ?case by (cases xs; cases ys) simp_all
qed
lemma zip_takeWhile_fst: "zip (takeWhile P xs) ys = takeWhile (P \<circ> fst) (zip xs ys)"
proof (induct xs arbitrary: ys)
case Nil
then show ?case by simp
next
case Cons
then show ?case by (cases ys) auto
qed
lemma zip_takeWhile_snd: "zip xs (takeWhile P ys) = takeWhile (P \<circ> snd) (zip xs ys)"
proof (induct xs arbitrary: ys)
case Nil
then show ?case by simp
next
case Cons
then show ?case by (cases ys) auto
qed
lemma set_zip_leftD: "(x,y)\<in> set (zip xs ys) \<Longrightarrow> x \<in> set xs"
by (induct xs ys rule:list_induct2') auto
lemma set_zip_rightD: "(x,y)\<in> set (zip xs ys) \<Longrightarrow> y \<in> set ys"
by (induct xs ys rule:list_induct2') auto
lemma in_set_zipE:
"(x,y) \<in> set(zip xs ys) \<Longrightarrow> (\<lbrakk> x \<in> set xs; y \<in> set ys \<rbrakk> \<Longrightarrow> R) \<Longrightarrow> R"
by(blast dest: set_zip_leftD set_zip_rightD)
lemma zip_map_fst_snd: "zip (map fst zs) (map snd zs) = zs"
by (induct zs) simp_all
lemma zip_eq_conv:
"length xs = length ys \<Longrightarrow> zip xs ys = zs \<longleftrightarrow> map fst zs = xs \<and> map snd zs = ys"
by (auto simp add: zip_map_fst_snd)
lemma in_set_zip:
"p \<in> set (zip xs ys) \<longleftrightarrow> (\<exists>n. xs ! n = fst p \<and> ys ! n = snd p
\<and> n < length xs \<and> n < length ys)"
by (cases p) (auto simp add: set_zip)
lemma in_set_impl_in_set_zip1:
assumes "length xs = length ys"
assumes "x \<in> set xs"
obtains y where "(x, y) \<in> set (zip xs ys)"
proof -
from assms have "x \<in> set (map fst (zip xs ys))" by simp
from this that show ?thesis by fastforce
qed
lemma in_set_impl_in_set_zip2:
assumes "length xs = length ys"
assumes "y \<in> set ys"
obtains x where "(x, y) \<in> set (zip xs ys)"
proof -
from assms have "y \<in> set (map snd (zip xs ys))" by simp
from this that show ?thesis by fastforce
qed
lemma zip_eq_Nil_iff[simp]:
"zip xs ys = [] \<longleftrightarrow> xs = [] \<or> ys = []"
by (cases xs; cases ys) simp_all
lemmas Nil_eq_zip_iff[simp] = zip_eq_Nil_iff[THEN eq_iff_swap]
lemma zip_eq_ConsE:
assumes "zip xs ys = xy # xys"
obtains x xs' y ys' where "xs = x # xs'"
and "ys = y # ys'" and "xy = (x, y)"
and "xys = zip xs' ys'"
proof -
from assms have "xs \<noteq> []" and "ys \<noteq> []"
using zip_eq_Nil_iff [of xs ys] by simp_all
then obtain x xs' y ys' where xs: "xs = x # xs'"
and ys: "ys = y # ys'"
by (cases xs; cases ys) auto
with assms have "xy = (x, y)" and "xys = zip xs' ys'"
by simp_all
with xs ys show ?thesis ..
qed
lemma semilattice_map2:
"semilattice (map2 (\<^bold>*))" if "semilattice (\<^bold>*)"
for f (infixl "\<^bold>*" 70)
proof -
from that interpret semilattice f .
show ?thesis
proof
show "map2 (\<^bold>*) (map2 (\<^bold>*) xs ys) zs = map2 (\<^bold>*) xs (map2 (\<^bold>*) ys zs)"
for xs ys zs :: "'a list"
proof (induction "zip xs (zip ys zs)" arbitrary: xs ys zs)
case Nil
from Nil [symmetric] show ?case
by auto
next
case (Cons xyz xyzs)
from Cons.hyps(2) [symmetric] show ?case
by (rule zip_eq_ConsE) (erule zip_eq_ConsE,
auto intro: Cons.hyps(1) simp add: ac_simps)
qed
show "map2 (\<^bold>*) xs ys = map2 (\<^bold>*) ys xs"
for xs ys :: "'a list"
proof (induction "zip xs ys" arbitrary: xs ys)
case Nil
then show ?case
by auto
next
case (Cons xy xys)
from Cons.hyps(2) [symmetric] show ?case
by (rule zip_eq_ConsE) (auto intro: Cons.hyps(1) simp add: ac_simps)
qed
show "map2 (\<^bold>*) xs xs = xs"
for xs :: "'a list"
by (induction xs) simp_all
qed
qed
lemma pair_list_eqI:
assumes "map fst xs = map fst ys" and "map snd xs = map snd ys"
shows "xs = ys"
proof -
from assms(1) have "length xs = length ys" by (rule map_eq_imp_length_eq)
from this assms show ?thesis
by (induct xs ys rule: list_induct2) (simp_all add: prod_eqI)
qed
lemma hd_zip:
\<open>hd (zip xs ys) = (hd xs, hd ys)\<close> if \<open>xs \<noteq> []\<close> and \<open>ys \<noteq> []\<close>
using that by (cases xs; cases ys) simp_all
lemma last_zip:
\<open>last (zip xs ys) = (last xs, last ys)\<close> if \<open>xs \<noteq> []\<close> and \<open>ys \<noteq> []\<close>
and \<open>length xs = length ys\<close>
using that by (cases xs rule: rev_cases; cases ys rule: rev_cases) simp_all
subsubsection \<open>\<^const>\<open>list_all2\<close>\<close>
lemma list_all2_lengthD [intro?]:
"list_all2 P xs ys \<Longrightarrow> length xs = length ys"
by (simp add: list_all2_iff)
lemma list_all2_Nil [iff, code]: "list_all2 P [] ys = (ys = [])"
by (simp add: list_all2_iff)
lemma list_all2_Nil2 [iff, code]: "list_all2 P xs [] = (xs = [])"
by (simp add: list_all2_iff)
lemma list_all2_Cons [iff, code]:
"list_all2 P (x # xs) (y # ys) = (P x y \<and> list_all2 P xs ys)"
by (auto simp add: list_all2_iff)
lemma list_all2_Cons1:
"list_all2 P (x # xs) ys = (\<exists>z zs. ys = z # zs \<and> P x z \<and> list_all2 P xs zs)"
by (cases ys) auto
lemma list_all2_Cons2:
"list_all2 P xs (y # ys) = (\<exists>z zs. xs = z # zs \<and> P z y \<and> list_all2 P zs ys)"
by (cases xs) auto
lemma list_all2_induct
[consumes 1, case_names Nil Cons, induct set: list_all2]:
assumes P: "list_all2 P xs ys"
assumes Nil: "R [] []"
assumes Cons: "\<And>x xs y ys.
\<lbrakk>P x y; list_all2 P xs ys; R xs ys\<rbrakk> \<Longrightarrow> R (x # xs) (y # ys)"
shows "R xs ys"
using P
by (induct xs arbitrary: ys) (auto simp add: list_all2_Cons1 Nil Cons)
lemma list_all2_rev [iff]:
"list_all2 P (rev xs) (rev ys) = list_all2 P xs ys"
by (simp add: list_all2_iff zip_rev cong: conj_cong)
lemma list_all2_rev1:
"list_all2 P (rev xs) ys = list_all2 P xs (rev ys)"
by (subst list_all2_rev [symmetric]) simp
lemma list_all2_append1:
"list_all2 P (xs @ ys) zs =
(\<exists>us vs. zs = us @ vs \<and> length us = length xs \<and> length vs = length ys \<and>
list_all2 P xs us \<and> list_all2 P ys vs)" (is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
apply (rule_tac x = "take (length xs) zs" in exI)
apply (rule_tac x = "drop (length xs) zs" in exI)
apply (force split: nat_diff_split simp add: list_all2_iff zip_append1)
done
next
assume ?rhs
then show ?lhs
by (auto simp add: list_all2_iff)
qed
lemma list_all2_append2:
"list_all2 P xs (ys @ zs) =
(\<exists>us vs. xs = us @ vs \<and> length us = length ys \<and> length vs = length zs \<and>
list_all2 P us ys \<and> list_all2 P vs zs)" (is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
apply (rule_tac x = "take (length ys) xs" in exI)
apply (rule_tac x = "drop (length ys) xs" in exI)
apply (force split: nat_diff_split simp add: list_all2_iff zip_append2)
done
next
assume ?rhs
then show ?lhs
by (auto simp add: list_all2_iff)
qed
lemma list_all2_append:
"length xs = length ys \<Longrightarrow>
list_all2 P (xs@us) (ys@vs) = (list_all2 P xs ys \<and> list_all2 P us vs)"
by (induct rule:list_induct2, simp_all)
lemma list_all2_appendI [intro?, trans]:
"\<lbrakk> list_all2 P a b; list_all2 P c d \<rbrakk> \<Longrightarrow> list_all2 P (a@c) (b@d)"
by (simp add: list_all2_append list_all2_lengthD)
lemma list_all2_conv_all_nth:
"list_all2 P xs ys =
(length xs = length ys \<and> (\<forall>i < length xs. P (xs!i) (ys!i)))"
by (force simp add: list_all2_iff set_zip)
lemma list_all2_trans:
assumes tr: "!!a b c. P1 a b \<Longrightarrow> P2 b c \<Longrightarrow> P3 a c"
shows "!!bs cs. list_all2 P1 as bs \<Longrightarrow> list_all2 P2 bs cs \<Longrightarrow> list_all2 P3 as cs"
(is "!!bs cs. PROP ?Q as bs cs")
proof (induct as)
fix x xs bs assume I1: "!!bs cs. PROP ?Q xs bs cs"
show "!!cs. PROP ?Q (x # xs) bs cs"
proof (induct bs)
fix y ys cs assume I2: "!!cs. PROP ?Q (x # xs) ys cs"
show "PROP ?Q (x # xs) (y # ys) cs"
by (induct cs) (auto intro: tr I1 I2)
qed simp
qed simp
lemma list_all2_all_nthI [intro?]:
"length a = length b \<Longrightarrow> (\<And>n. n < length a \<Longrightarrow> P (a!n) (b!n)) \<Longrightarrow> list_all2 P a b"
by (simp add: list_all2_conv_all_nth)
lemma list_all2I:
"\<forall>x \<in> set (zip a b). case_prod P x \<Longrightarrow> length a = length b \<Longrightarrow> list_all2 P a b"
by (simp add: list_all2_iff)
lemma list_all2_nthD:
"\<lbrakk> list_all2 P xs ys; p < size xs \<rbrakk> \<Longrightarrow> P (xs!p) (ys!p)"
by (simp add: list_all2_conv_all_nth)
lemma list_all2_nthD2:
"\<lbrakk>list_all2 P xs ys; p < size ys\<rbrakk> \<Longrightarrow> P (xs!p) (ys!p)"
by (frule list_all2_lengthD) (auto intro: list_all2_nthD)
lemma list_all2_map1:
"list_all2 P (map f as) bs = list_all2 (\<lambda>x y. P (f x) y) as bs"
by (simp add: list_all2_conv_all_nth)
lemma list_all2_map2:
"list_all2 P as (map f bs) = list_all2 (\<lambda>x y. P x (f y)) as bs"
by (auto simp add: list_all2_conv_all_nth)
lemma list_all2_refl [intro?]:
"(\<And>x. P x x) \<Longrightarrow> list_all2 P xs xs"
by (simp add: list_all2_conv_all_nth)
lemma list_all2_update_cong:
"\<lbrakk> list_all2 P xs ys; P x y \<rbrakk> \<Longrightarrow> list_all2 P (xs[i:=x]) (ys[i:=y])"
by (cases "i < length ys") (auto simp add: list_all2_conv_all_nth nth_list_update)
lemma list_all2_takeI [simp,intro?]:
"list_all2 P xs ys \<Longrightarrow> list_all2 P (take n xs) (take n ys)"
proof (induct xs arbitrary: n ys)
case (Cons x xs)
then show ?case
by (cases n) (auto simp: list_all2_Cons1)
qed auto
lemma list_all2_dropI [simp,intro?]:
"list_all2 P xs ys \<Longrightarrow> list_all2 P (drop n xs) (drop n ys)"
proof (induct xs arbitrary: n ys)
case (Cons x xs)
then show ?case
by (cases n) (auto simp: list_all2_Cons1)
qed auto
lemma list_all2_mono [intro?]:
"list_all2 P xs ys \<Longrightarrow> (\<And>xs ys. P xs ys \<Longrightarrow> Q xs ys) \<Longrightarrow> list_all2 Q xs ys"
by (rule list.rel_mono_strong)
lemma list_all2_eq:
"xs = ys \<longleftrightarrow> list_all2 (=) xs ys"
by (induct xs ys rule: list_induct2') auto
lemma list_eq_iff_zip_eq:
"xs = ys \<longleftrightarrow> length xs = length ys \<and> (\<forall>(x,y) \<in> set (zip xs ys). x = y)"
by(auto simp add: set_zip list_all2_eq list_all2_conv_all_nth cong: conj_cong)
lemma list_all2_same: "list_all2 P xs xs \<longleftrightarrow> (\<forall>x\<in>set xs. P x x)"
by(auto simp add: list_all2_conv_all_nth set_conv_nth)
lemma zip_assoc:
"zip xs (zip ys zs) = map (\<lambda>((x, y), z). (x, y, z)) (zip (zip xs ys) zs)"
by(rule list_all2_all_nthI[where P="(=)", unfolded list.rel_eq]) simp_all
lemma zip_commute: "zip xs ys = map (\<lambda>(x, y). (y, x)) (zip ys xs)"
by(rule list_all2_all_nthI[where P="(=)", unfolded list.rel_eq]) simp_all
lemma zip_left_commute:
"zip xs (zip ys zs) = map (\<lambda>(y, (x, z)). (x, y, z)) (zip ys (zip xs zs))"
by(rule list_all2_all_nthI[where P="(=)", unfolded list.rel_eq]) simp_all
lemma zip_replicate2: "zip xs (replicate n y) = map (\<lambda>x. (x, y)) (take n xs)"
by(subst zip_commute)(simp add: zip_replicate1)
subsubsection \<open>\<^const>\<open>List.product\<close> and \<^const>\<open>product_lists\<close>\<close>
lemma product_concat_map:
"List.product xs ys = concat (map (\<lambda>x. map (\<lambda>y. (x,y)) ys) xs)"
by(induction xs) (simp)+
lemma set_product[simp]: "set (List.product xs ys) = set xs \<times> set ys"
by (induct xs) auto
lemma length_product [simp]:
"length (List.product xs ys) = length xs * length ys"
by (induct xs) simp_all
lemma product_nth:
assumes "n < length xs * length ys"
shows "List.product xs ys ! n = (xs ! (n div length ys), ys ! (n mod length ys))"
using assms proof (induct xs arbitrary: n)
case Nil then show ?case by simp
next
case (Cons x xs n)
then have "length ys > 0" by auto
with Cons show ?case
by (auto simp add: nth_append not_less le_mod_geq le_div_geq)
qed
lemma in_set_product_lists_length:
"xs \<in> set (product_lists xss) \<Longrightarrow> length xs = length xss"
by (induct xss arbitrary: xs) auto
lemma product_lists_set:
"set (product_lists xss) = {xs. list_all2 (\<lambda>x ys. x \<in> set ys) xs xss}" (is "?L = Collect ?R")
proof (intro equalityI subsetI, unfold mem_Collect_eq)
fix xs assume "xs \<in> ?L"
then have "length xs = length xss" by (rule in_set_product_lists_length)
from this \<open>xs \<in> ?L\<close> show "?R xs" by (induct xs xss rule: list_induct2) auto
next
fix xs assume "?R xs"
then show "xs \<in> ?L" by induct auto
qed
subsubsection \<open>\<^const>\<open>fold\<close> with natural argument order\<close>
lemma fold_simps [code]: \<comment> \<open>eta-expanded variant for generated code -- enables tail-recursion optimisation in Scala\<close>
"fold f [] s = s"
"fold f (x # xs) s = fold f xs (f x s)"
by simp_all
lemma fold_remove1_split:
"\<lbrakk> \<And>x y. x \<in> set xs \<Longrightarrow> y \<in> set xs \<Longrightarrow> f x \<circ> f y = f y \<circ> f x;
x \<in> set xs \<rbrakk>
\<Longrightarrow> fold f xs = fold f (remove1 x xs) \<circ> f x"
by (induct xs) (auto simp add: comp_assoc)
lemma fold_cong [fundef_cong]:
"a = b \<Longrightarrow> xs = ys \<Longrightarrow> (\<And>x. x \<in> set xs \<Longrightarrow> f x = g x)
\<Longrightarrow> fold f xs a = fold g ys b"
by (induct ys arbitrary: a b xs) simp_all
lemma fold_id: "(\<And>x. x \<in> set xs \<Longrightarrow> f x = id) \<Longrightarrow> fold f xs = id"
by (induct xs) simp_all
lemma fold_commute:
"(\<And>x. x \<in> set xs \<Longrightarrow> h \<circ> g x = f x \<circ> h) \<Longrightarrow> h \<circ> fold g xs = fold f xs \<circ> h"
by (induct xs) (simp_all add: fun_eq_iff)
lemma fold_commute_apply:
assumes "\<And>x. x \<in> set xs \<Longrightarrow> h \<circ> g x = f x \<circ> h"
shows "h (fold g xs s) = fold f xs (h s)"
proof -
from assms have "h \<circ> fold g xs = fold f xs \<circ> h" by (rule fold_commute)
then show ?thesis by (simp add: fun_eq_iff)
qed
lemma fold_invariant:
"\<lbrakk> \<And>x. x \<in> set xs \<Longrightarrow> Q x; P s; \<And>x s. Q x \<Longrightarrow> P s \<Longrightarrow> P (f x s) \<rbrakk>
\<Longrightarrow> P (fold f xs s)"
by (induct xs arbitrary: s) simp_all
lemma fold_append [simp]: "fold f (xs @ ys) = fold f ys \<circ> fold f xs"
by (induct xs) simp_all
lemma fold_map [code_unfold]: "fold g (map f xs) = fold (g \<circ> f) xs"
by (induct xs) simp_all
lemma fold_filter:
"fold f (filter P xs) = fold (\<lambda>x. if P x then f x else id) xs"
by (induct xs) simp_all
lemma fold_rev:
"(\<And>x y. x \<in> set xs \<Longrightarrow> y \<in> set xs \<Longrightarrow> f y \<circ> f x = f x \<circ> f y)
\<Longrightarrow> fold f (rev xs) = fold f xs"
by (induct xs) (simp_all add: fold_commute_apply fun_eq_iff)
lemma fold_Cons_rev: "fold Cons xs = append (rev xs)"
by (induct xs) simp_all
lemma rev_conv_fold [code]: "rev xs = fold Cons xs []"
by (simp add: fold_Cons_rev)
lemma fold_append_concat_rev: "fold append xss = append (concat (rev xss))"
by (induct xss) simp_all
text \<open>\<^const>\<open>Finite_Set.fold\<close> and \<^const>\<open>fold\<close>\<close>
lemma (in comp_fun_commute_on) fold_set_fold_remdups:
assumes "set xs \<subseteq> S"
shows "Finite_Set.fold f y (set xs) = fold f (remdups xs) y"
by (rule sym, use assms in \<open>induct xs arbitrary: y\<close>)
(simp_all add: insert_absorb fold_fun_left_comm)
lemma (in comp_fun_idem_on) fold_set_fold:
assumes "set xs \<subseteq> S"
shows "Finite_Set.fold f y (set xs) = fold f xs y"
by (rule sym, use assms in \<open>induct xs arbitrary: y\<close>) (simp_all add: fold_fun_left_comm)
lemma union_set_fold [code]: "set xs \<union> A = fold Set.insert xs A"
proof -
interpret comp_fun_idem Set.insert
by (fact comp_fun_idem_insert)
show ?thesis by (simp add: union_fold_insert fold_set_fold)
qed
lemma union_coset_filter [code]:
"List.coset xs \<union> A = List.coset (List.filter (\<lambda>x. x \<notin> A) xs)"
by auto
lemma minus_set_fold [code]: "A - set xs = fold Set.remove xs A"
proof -
interpret comp_fun_idem Set.remove
by (fact comp_fun_idem_remove)
show ?thesis
by (simp add: minus_fold_remove [of _ A] fold_set_fold)
qed
lemma minus_coset_filter [code]:
"A - List.coset xs = set (List.filter (\<lambda>x. x \<in> A) xs)"
by auto
lemma inter_set_filter [code]:
"A \<inter> set xs = set (List.filter (\<lambda>x. x \<in> A) xs)"
by auto
lemma inter_coset_fold [code]:
"A \<inter> List.coset xs = fold Set.remove xs A"
by (simp add: Diff_eq [symmetric] minus_set_fold)
lemma (in semilattice_set) set_eq_fold [code]:
"F (set (x # xs)) = fold f xs x"
proof -
interpret comp_fun_idem f
by standard (simp_all add: fun_eq_iff left_commute)
show ?thesis by (simp add: eq_fold fold_set_fold)
qed
lemma (in complete_lattice) Inf_set_fold:
"Inf (set xs) = fold inf xs top"
proof -
interpret comp_fun_idem "inf :: 'a \<Rightarrow> 'a \<Rightarrow> 'a"
by (fact comp_fun_idem_inf)
show ?thesis by (simp add: Inf_fold_inf fold_set_fold inf_commute)
qed
declare Inf_set_fold [where 'a = "'a set", code]
lemma (in complete_lattice) Sup_set_fold:
"Sup (set xs) = fold sup xs bot"
proof -
interpret comp_fun_idem "sup :: 'a \<Rightarrow> 'a \<Rightarrow> 'a"
by (fact comp_fun_idem_sup)
show ?thesis by (simp add: Sup_fold_sup fold_set_fold sup_commute)
qed
declare Sup_set_fold [where 'a = "'a set", code]
lemma (in complete_lattice) INF_set_fold:
"\<Sqinter>(f ` set xs) = fold (inf \<circ> f) xs top"
using Inf_set_fold [of "map f xs"] by (simp add: fold_map)
lemma (in complete_lattice) SUP_set_fold:
"\<Squnion>(f ` set xs) = fold (sup \<circ> f) xs bot"
using Sup_set_fold [of "map f xs"] by (simp add: fold_map)
subsubsection \<open>Fold variants: \<^const>\<open>foldr\<close> and \<^const>\<open>foldl\<close>\<close>
text \<open>Correspondence\<close>
lemma foldr_conv_fold [code_abbrev]: "foldr f xs = fold f (rev xs)"
by (induct xs) simp_all
lemma foldl_conv_fold: "foldl f s xs = fold (\<lambda>x s. f s x) xs s"
by (induct xs arbitrary: s) simp_all
lemma foldr_conv_foldl: \<comment> \<open>The ``Third Duality Theorem'' in Bird \& Wadler:\<close>
"foldr f xs a = foldl (\<lambda>x y. f y x) a (rev xs)"
by (simp add: foldr_conv_fold foldl_conv_fold)
lemma foldl_conv_foldr:
"foldl f a xs = foldr (\<lambda>x y. f y x) (rev xs) a"
by (simp add: foldr_conv_fold foldl_conv_fold)
lemma foldr_fold:
"(\<And>x y. x \<in> set xs \<Longrightarrow> y \<in> set xs \<Longrightarrow> f y \<circ> f x = f x \<circ> f y)
\<Longrightarrow> foldr f xs = fold f xs"
unfolding foldr_conv_fold by (rule fold_rev)
lemma foldr_cong [fundef_cong]:
"a = b \<Longrightarrow> l = k \<Longrightarrow> (\<And>a x. x \<in> set l \<Longrightarrow> f x a = g x a) \<Longrightarrow> foldr f l a = foldr g k b"
by (auto simp add: foldr_conv_fold intro!: fold_cong)
lemma foldl_cong [fundef_cong]:
"a = b \<Longrightarrow> l = k \<Longrightarrow> (\<And>a x. x \<in> set l \<Longrightarrow> f a x = g a x) \<Longrightarrow> foldl f a l = foldl g b k"
by (auto simp add: foldl_conv_fold intro!: fold_cong)
lemma foldr_append [simp]: "foldr f (xs @ ys) a = foldr f xs (foldr f ys a)"
by (simp add: foldr_conv_fold)
lemma foldl_append [simp]: "foldl f a (xs @ ys) = foldl f (foldl f a xs) ys"
by (simp add: foldl_conv_fold)
lemma foldr_map [code_unfold]: "foldr g (map f xs) a = foldr (g \<circ> f) xs a"
by (simp add: foldr_conv_fold fold_map rev_map)
lemma foldr_filter:
"foldr f (filter P xs) = foldr (\<lambda>x. if P x then f x else id) xs"
by (simp add: foldr_conv_fold rev_filter fold_filter)
lemma foldl_map [code_unfold]:
"foldl g a (map f xs) = foldl (\<lambda>a x. g a (f x)) a xs"
by (simp add: foldl_conv_fold fold_map comp_def)
lemma concat_conv_foldr [code]:
"concat xss = foldr append xss []"
by (simp add: fold_append_concat_rev foldr_conv_fold)
subsubsection \<open>\<^const>\<open>upt\<close>\<close>
lemma upt_rec[code]: "[i..<j] = (if i<j then i#[Suc i..<j] else [])"
\<comment> \<open>simp does not terminate!\<close>
by (induct j) auto
lemmas upt_rec_numeral[simp] = upt_rec[of "numeral m" "numeral n"] for m n
lemma upt_conv_Nil [simp]: "j \<le> i \<Longrightarrow> [i..<j] = []"
by (subst upt_rec) simp
lemma upt_eq_Nil_conv[simp]: "([i..<j] = []) = (j = 0 \<or> j \<le> i)"
by(induct j)simp_all
lemma upt_eq_Cons_conv:
"([i..<j] = x#xs) = (i < j \<and> i = x \<and> [i+1..<j] = xs)"
proof (induct j arbitrary: x xs)
case (Suc j)
then show ?case
by (simp add: upt_rec)
qed simp
lemma upt_Suc_append: "i \<le> j \<Longrightarrow> [i..<(Suc j)] = [i..<j]@[j]"
\<comment> \<open>Only needed if \<open>upt_Suc\<close> is deleted from the simpset.\<close>
by simp
lemma upt_conv_Cons: "i < j \<Longrightarrow> [i..<j] = i # [Suc i..<j]"
by (simp add: upt_rec)
lemma upt_conv_Cons_Cons: \<comment> \<open>no precondition\<close>
"m # n # ns = [m..<q] \<longleftrightarrow> n # ns = [Suc m..<q]"
proof (cases "m < q")
case False then show ?thesis by simp
next
case True then show ?thesis by (simp add: upt_conv_Cons)
qed
lemma upt_add_eq_append: "i<=j \<Longrightarrow> [i..<j+k] = [i..<j]@[j..<j+k]"
\<comment> \<open>LOOPS as a simprule, since \<open>j \<le> j\<close>.\<close>
by (induct k) auto
lemma length_upt [simp]: "length [i..<j] = j - i"
by (induct j) (auto simp add: Suc_diff_le)
lemma nth_upt [simp]: "i + k < j \<Longrightarrow> [i..<j] ! k = i + k"
by (induct j) (auto simp add: less_Suc_eq nth_append split: nat_diff_split)
lemma hd_upt[simp]: "i < j \<Longrightarrow> hd[i..<j] = i"
by(simp add:upt_conv_Cons)
lemma tl_upt [simp]: "tl [m..<n] = [Suc m..<n]"
by (simp add: upt_rec)
lemma last_upt[simp]: "i < j \<Longrightarrow> last[i..<j] = j - 1"
by(cases j)(auto simp: upt_Suc_append)
lemma take_upt [simp]: "i+m \<le> n \<Longrightarrow> take m [i..<n] = [i..<i+m]"
proof (induct m arbitrary: i)
case (Suc m)
then show ?case
by (subst take_Suc_conv_app_nth) auto
qed simp
lemma drop_upt[simp]: "drop m [i..<j] = [i+m..<j]"
by(induct j) auto
lemma map_Suc_upt: "map Suc [m..<n] = [Suc m..<Suc n]"
by (induct n) auto
lemma map_add_upt: "map (\<lambda>i. i + n) [0..<m] = [n..<m + n]"
by (induct m) simp_all
lemma nth_map_upt: "i < n-m \<Longrightarrow> (map f [m..<n]) ! i = f(m+i)"
proof (induct n m arbitrary: i rule: diff_induct)
case (3 x y)
then show ?case
by (metis add.commute length_upt less_diff_conv nth_map nth_upt)
qed auto
lemma map_decr_upt: "map (\<lambda>n. n - Suc 0) [Suc m..<Suc n] = [m..<n]"
by (induct n) simp_all
lemma map_upt_Suc: "map f [0 ..< Suc n] = f 0 # map (\<lambda>i. f (Suc i)) [0 ..< n]"
by (induct n arbitrary: f) auto
lemma nth_take_lemma:
"k \<le> length xs \<Longrightarrow> k \<le> length ys \<Longrightarrow>
(\<And>i. i < k \<longrightarrow> xs!i = ys!i) \<Longrightarrow> take k xs = take k ys"
proof (induct k arbitrary: xs ys)
case (Suc k)
then show ?case
apply (simp add: less_Suc_eq_0_disj)
by (simp add: Suc.prems(3) take_Suc_conv_app_nth)
qed simp
lemma nth_equalityI:
"\<lbrakk>length xs = length ys; \<And>i. i < length xs \<Longrightarrow> xs!i = ys!i\<rbrakk> \<Longrightarrow> xs = ys"
by (frule nth_take_lemma [OF le_refl eq_imp_le]) simp_all
lemma map_nth:
"map (\<lambda>i. xs ! i) [0..<length xs] = xs"
by (rule nth_equalityI, auto)
lemma list_all2_antisym:
"\<lbrakk> (\<And>x y. \<lbrakk>P x y; Q y x\<rbrakk> \<Longrightarrow> x = y); list_all2 P xs ys; list_all2 Q ys xs \<rbrakk>
\<Longrightarrow> xs = ys"
by (simp add: list_all2_conv_all_nth nth_equalityI)
lemma take_equalityI: "(\<forall>i. take i xs = take i ys) \<Longrightarrow> xs = ys"
\<comment> \<open>The famous take-lemma.\<close>
by (metis length_take min.commute order_refl take_all)
lemma take_Cons':
"take n (x # xs) = (if n = 0 then [] else x # take (n - 1) xs)"
by (cases n) simp_all
lemma drop_Cons':
"drop n (x # xs) = (if n = 0 then x # xs else drop (n - 1) xs)"
by (cases n) simp_all
lemma nth_Cons': "(x # xs)!n = (if n = 0 then x else xs!(n - 1))"
by (cases n) simp_all
lemma take_Cons_numeral [simp]:
"take (numeral v) (x # xs) = x # take (numeral v - 1) xs"
by (simp add: take_Cons')
lemma drop_Cons_numeral [simp]:
"drop (numeral v) (x # xs) = drop (numeral v - 1) xs"
by (simp add: drop_Cons')
lemma nth_Cons_numeral [simp]:
"(x # xs) ! numeral v = xs ! (numeral v - 1)"
by (simp add: nth_Cons')
lemma map_upt_eqI:
\<open>map f [m..<n] = xs\<close> if \<open>length xs = n - m\<close>
\<open>\<And>i. i < length xs \<Longrightarrow> xs ! i = f (m + i)\<close>
proof (rule nth_equalityI)
from \<open>length xs = n - m\<close> show \<open>length (map f [m..<n]) = length xs\<close>
by simp
next
fix i
assume \<open>i < length (map f [m..<n])\<close>
then have \<open>i < n - m\<close>
by simp
with that have \<open>xs ! i = f (m + i)\<close>
by simp
with \<open>i < n - m\<close> show \<open>map f [m..<n] ! i = xs ! i\<close>
by simp
qed
subsubsection \<open>\<open>upto\<close>: interval-list on \<^typ>\<open>int\<close>\<close>
function upto :: "int \<Rightarrow> int \<Rightarrow> int list" ("(1[_../_])") where
"upto i j = (if i \<le> j then i # [i+1..j] else [])"
by auto
termination
by(relation "measure(%(i::int,j). nat(j - i + 1))") auto
declare upto.simps[simp del]
lemmas upto_rec_numeral [simp] =
upto.simps[of "numeral m" "numeral n"]
upto.simps[of "numeral m" "- numeral n"]
upto.simps[of "- numeral m" "numeral n"]
upto.simps[of "- numeral m" "- numeral n"] for m n
lemma upto_empty[simp]: "j < i \<Longrightarrow> [i..j] = []"
by(simp add: upto.simps)
lemma upto_single[simp]: "[i..i] = [i]"
by(simp add: upto.simps)
lemma upto_Nil[simp]: "[i..j] = [] \<longleftrightarrow> j < i"
by (simp add: upto.simps)
lemmas upto_Nil2[simp] = upto_Nil[THEN eq_iff_swap]
lemma upto_rec1: "i \<le> j \<Longrightarrow> [i..j] = i#[i+1..j]"
by(simp add: upto.simps)
lemma upto_rec2: "i \<le> j \<Longrightarrow> [i..j] = [i..j - 1]@[j]"
proof(induct "nat(j-i)" arbitrary: i j)
case 0 thus ?case by(simp add: upto.simps)
next
case (Suc n)
hence "n = nat (j - (i + 1))" "i < j" by linarith+
from this(2) Suc.hyps(1)[OF this(1)] Suc(2,3) upto_rec1 show ?case by simp
qed
lemma length_upto[simp]: "length [i..j] = nat(j - i + 1)"
by(induction i j rule: upto.induct) (auto simp: upto.simps)
lemma set_upto[simp]: "set[i..j] = {i..j}"
proof(induct i j rule:upto.induct)
case (1 i j)
from this show ?case
unfolding upto.simps[of i j] by auto
qed
lemma nth_upto[simp]: "i + int k \<le> j \<Longrightarrow> [i..j] ! k = i + int k"
proof(induction i j arbitrary: k rule: upto.induct)
case (1 i j)
then show ?case
by (auto simp add: upto_rec1 [of i j] nth_Cons')
qed
lemma upto_split1:
"i \<le> j \<Longrightarrow> j \<le> k \<Longrightarrow> [i..k] = [i..j-1] @ [j..k]"
proof (induction j rule: int_ge_induct)
case base thus ?case by (simp add: upto_rec1)
next
case step thus ?case using upto_rec1 upto_rec2 by simp
qed
lemma upto_split2:
"i \<le> j \<Longrightarrow> j \<le> k \<Longrightarrow> [i..k] = [i..j] @ [j+1..k]"
using upto_rec1 upto_rec2 upto_split1 by auto
lemma upto_split3: "\<lbrakk> i \<le> j; j \<le> k \<rbrakk> \<Longrightarrow> [i..k] = [i..j-1] @ j # [j+1..k]"
using upto_rec1 upto_split1 by auto
text\<open>Tail recursive version for code generation:\<close>
definition upto_aux :: "int \<Rightarrow> int \<Rightarrow> int list \<Rightarrow> int list" where
"upto_aux i j js = [i..j] @ js"
lemma upto_aux_rec [code]:
"upto_aux i j js = (if j<i then js else upto_aux i (j - 1) (j#js))"
by (simp add: upto_aux_def upto_rec2)
lemma upto_code[code]: "[i..j] = upto_aux i j []"
by(simp add: upto_aux_def)
subsubsection \<open>\<^const>\<open>successively\<close>\<close>
lemma successively_Cons:
"successively P (x # xs) \<longleftrightarrow> xs = [] \<or> P x (hd xs) \<and> successively P xs"
by (cases xs) auto
lemma successively_cong [cong]:
assumes "\<And>x y. x \<in> set xs \<Longrightarrow> y \<in> set xs \<Longrightarrow> P x y \<longleftrightarrow> Q x y" "xs = ys"
shows "successively P xs \<longleftrightarrow> successively Q ys"
unfolding assms(2) [symmetric] using assms(1)
by (induction xs) (auto simp: successively_Cons)
lemma successively_append_iff:
"successively P (xs @ ys) \<longleftrightarrow>
successively P xs \<and> successively P ys \<and>
(xs = [] \<or> ys = [] \<or> P (last xs) (hd ys))"
by (induction xs) (auto simp: successively_Cons)
lemma successively_if_sorted_wrt: "sorted_wrt P xs \<Longrightarrow> successively P xs"
by (induction xs rule: induct_list012) auto
lemma successively_iff_sorted_wrt_strong:
assumes "\<And>x y z. x \<in> set xs \<Longrightarrow> y \<in> set xs \<Longrightarrow> z \<in> set xs \<Longrightarrow>
P x y \<Longrightarrow> P y z \<Longrightarrow> P x z"
shows "successively P xs \<longleftrightarrow> sorted_wrt P xs"
proof
assume "successively P xs"
from this and assms show "sorted_wrt P xs"
proof (induction xs rule: induct_list012)
case (3 x y xs)
from "3.prems" have "P x y"
by auto
have IH: "sorted_wrt P (y # xs)"
using "3.prems"
by(intro "3.IH"(2) list.set_intros(2))(simp, blast intro: list.set_intros(2))
have "P x z" if asm: "z \<in> set xs" for z
proof -
from IH and asm have "P y z"
by auto
with \<open>P x y\<close> show "P x z"
using "3.prems" asm by auto
qed
with IH and \<open>P x y\<close> show ?case by auto
qed auto
qed (use successively_if_sorted_wrt in blast)
lemma successively_conv_sorted_wrt:
assumes "transp P"
shows "successively P xs \<longleftrightarrow> sorted_wrt P xs"
using assms unfolding transp_def
by (intro successively_iff_sorted_wrt_strong) blast
lemma successively_rev [simp]: "successively P (rev xs) \<longleftrightarrow> successively (\<lambda>x y. P y x) xs"
by (induction xs rule: remdups_adj.induct)
(auto simp: successively_append_iff successively_Cons)
lemma successively_map: "successively P (map f xs) \<longleftrightarrow> successively (\<lambda>x y. P (f x) (f y)) xs"
by (induction xs rule: induct_list012) auto
lemma successively_mono:
assumes "successively P xs"
assumes "\<And>x y. x \<in> set xs \<Longrightarrow> y \<in> set xs \<Longrightarrow> P x y \<Longrightarrow> Q x y"
shows "successively Q xs"
using assms by (induction Q xs rule: successively.induct) auto
lemma successively_altdef:
"successively = (\<lambda>P. rec_list True (\<lambda>x xs b. case xs of [] \<Rightarrow> True | y # _ \<Rightarrow> P x y \<and> b))"
proof (intro ext)
fix P and xs :: "'a list"
show "successively P xs = rec_list True (\<lambda>x xs b. case xs of [] \<Rightarrow> True | y # _ \<Rightarrow> P x y \<and> b) xs"
by (induction xs) (auto simp: successively_Cons split: list.splits)
qed
subsubsection \<open>\<^const>\<open>distinct\<close> and \<^const>\<open>remdups\<close> and \<^const>\<open>remdups_adj\<close>\<close>
lemma distinct_tl: "distinct xs \<Longrightarrow> distinct (tl xs)"
by (cases xs) simp_all
lemma distinct_append [simp]:
"distinct (xs @ ys) = (distinct xs \<and> distinct ys \<and> set xs \<inter> set ys = {})"
by (induct xs) auto
lemma distinct_rev[simp]: "distinct(rev xs) = distinct xs"
by(induct xs) auto
lemma set_remdups [simp]: "set (remdups xs) = set xs"
by (induct xs) (auto simp add: insert_absorb)
lemma distinct_remdups [iff]: "distinct (remdups xs)"
by (induct xs) auto
lemma distinct_remdups_id: "distinct xs \<Longrightarrow> remdups xs = xs"
by (induct xs, auto)
lemma remdups_id_iff_distinct [simp]: "remdups xs = xs \<longleftrightarrow> distinct xs"
by (metis distinct_remdups distinct_remdups_id)
lemma finite_distinct_list: "finite A \<Longrightarrow> \<exists>xs. set xs = A \<and> distinct xs"
by (metis distinct_remdups finite_list set_remdups)
lemma remdups_eq_nil_iff [simp]: "(remdups x = []) = (x = [])"
by (induct x, auto)
lemmas remdups_eq_nil_right_iff [simp] = remdups_eq_nil_iff[THEN eq_iff_swap]
lemma length_remdups_leq[iff]: "length(remdups xs) \<le> length xs"
by (induct xs) auto
lemma length_remdups_eq[iff]:
"(length (remdups xs) = length xs) = (remdups xs = xs)"
proof (induct xs)
case (Cons a xs)
then show ?case
by simp (metis Suc_n_not_le_n impossible_Cons length_remdups_leq)
qed auto
lemma remdups_filter: "remdups(filter P xs) = filter P (remdups xs)"
by (induct xs) auto
lemma distinct_map:
"distinct(map f xs) = (distinct xs \<and> inj_on f (set xs))"
by (induct xs) auto
lemma distinct_map_filter:
"distinct (map f xs) \<Longrightarrow> distinct (map f (filter P xs))"
by (induct xs) auto
lemma distinct_filter [simp]: "distinct xs \<Longrightarrow> distinct (filter P xs)"
by (induct xs) auto
lemma distinct_upt[simp]: "distinct[i..<j]"
by (induct j) auto
lemma distinct_upto[simp]: "distinct[i..j]"
proof (induction i j rule: upto.induct)
case (1 i j)
then show ?case
by (simp add: upto.simps [of i])
qed
lemma distinct_take[simp]: "distinct xs \<Longrightarrow> distinct (take i xs)"
proof (induct xs arbitrary: i)
case (Cons a xs)
then show ?case
by (metis Cons.prems append_take_drop_id distinct_append)
qed auto
lemma distinct_drop[simp]: "distinct xs \<Longrightarrow> distinct (drop i xs)"
proof (induct xs arbitrary: i)
case (Cons a xs)
then show ?case
by (metis Cons.prems append_take_drop_id distinct_append)
qed auto
lemma distinct_list_update:
assumes d: "distinct xs" and a: "a \<notin> set xs - {xs!i}"
shows "distinct (xs[i:=a])"
proof (cases "i < length xs")
case True
with a have anot: "a \<notin> set (take i xs @ xs ! i # drop (Suc i) xs) - {xs!i}"
by simp (metis in_set_dropD in_set_takeD)
show ?thesis
proof (cases "a = xs!i")
case True
with d show ?thesis
by auto
next
case False
have "set (take i xs) \<inter> set (drop (Suc i) xs) = {}"
by (metis True d disjoint_insert(1) distinct_append id_take_nth_drop list.set(2))
then show ?thesis
using d False anot \<open>i < length xs\<close> by (simp add: upd_conv_take_nth_drop)
qed
next
case False with d show ?thesis by auto
qed
lemma distinct_concat:
"\<lbrakk> distinct xs;
\<And> ys. ys \<in> set xs \<Longrightarrow> distinct ys;
\<And> ys zs. \<lbrakk> ys \<in> set xs ; zs \<in> set xs ; ys \<noteq> zs \<rbrakk> \<Longrightarrow> set ys \<inter> set zs = {}
\<rbrakk> \<Longrightarrow> distinct (concat xs)"
by (induct xs) auto
text \<open>An iff-version of @{thm distinct_concat} is available further down as \<open>distinct_concat_iff\<close>.\<close>
text \<open>It is best to avoid the following indexed version of distinct, but sometimes it is useful.\<close>
lemma distinct_conv_nth: "distinct xs = (\<forall>i < size xs. \<forall>j < size xs. i \<noteq> j \<longrightarrow> xs!i \<noteq> xs!j)"
proof (induct xs)
case (Cons x xs)
show ?case
apply (auto simp add: Cons nth_Cons less_Suc_eq_le split: nat.split_asm)
apply (metis Suc_leI in_set_conv_nth length_pos_if_in_set lessI less_imp_le_nat less_nat_zero_code)
apply (metis Suc_le_eq)
done
qed auto
lemma nth_eq_iff_index_eq:
"\<lbrakk> distinct xs; i < length xs; j < length xs \<rbrakk> \<Longrightarrow> (xs!i = xs!j) = (i = j)"
by(auto simp: distinct_conv_nth)
lemma distinct_Ex1:
"distinct xs \<Longrightarrow> x \<in> set xs \<Longrightarrow> (\<exists>!i. i < length xs \<and> xs ! i = x)"
by (auto simp: in_set_conv_nth nth_eq_iff_index_eq)
lemma inj_on_nth: "distinct xs \<Longrightarrow> \<forall>i \<in> I. i < length xs \<Longrightarrow> inj_on (nth xs) I"
by (rule inj_onI) (simp add: nth_eq_iff_index_eq)
lemma bij_betw_nth:
assumes "distinct xs" "A = {..<length xs}" "B = set xs"
shows "bij_betw ((!) xs) A B"
using assms unfolding bij_betw_def
by (auto intro!: inj_on_nth simp: set_conv_nth)
lemma set_update_distinct: "\<lbrakk> distinct xs; n < length xs \<rbrakk> \<Longrightarrow>
set(xs[n := x]) = insert x (set xs - {xs!n})"
by(auto simp: set_eq_iff in_set_conv_nth nth_list_update nth_eq_iff_index_eq)
lemma distinct_swap[simp]: "\<lbrakk> i < size xs; j < size xs\<rbrakk> \<Longrightarrow>
distinct(xs[i := xs!j, j := xs!i]) = distinct xs"
apply (simp add: distinct_conv_nth nth_list_update)
apply (safe; metis)
done
lemma set_swap[simp]:
"\<lbrakk> i < size xs; j < size xs \<rbrakk> \<Longrightarrow> set(xs[i := xs!j, j := xs!i]) = set xs"
by(simp add: set_conv_nth nth_list_update) metis
lemma distinct_card: "distinct xs \<Longrightarrow> card (set xs) = size xs"
by (induct xs) auto
lemma card_distinct: "card (set xs) = size xs \<Longrightarrow> distinct xs"
proof (induct xs)
case (Cons x xs)
show ?case
proof (cases "x \<in> set xs")
case False with Cons show ?thesis by simp
next
case True with Cons.prems
have "card (set xs) = Suc (length xs)"
by (simp add: card_insert_if split: if_split_asm)
moreover have "card (set xs) \<le> length xs" by (rule card_length)
ultimately have False by simp
thus ?thesis ..
qed
qed simp
lemma distinct_length_filter: "distinct xs \<Longrightarrow> length (filter P xs) = card ({x. P x} Int set xs)"
by (induct xs) (auto)
lemma not_distinct_decomp: "\<not> distinct ws \<Longrightarrow> \<exists>xs ys zs y. ws = xs@[y]@ys@[y]@zs"
proof (induct n == "length ws" arbitrary:ws)
case (Suc n ws)
then show ?case
using length_Suc_conv [of ws n]
apply (auto simp: eq_commute)
apply (metis append_Nil in_set_conv_decomp_first)
by (metis append_Cons)
qed simp
lemma not_distinct_conv_prefix:
defines "dec as xs y ys \<equiv> y \<in> set xs \<and> distinct xs \<and> as = xs @ y # ys"
shows "\<not>distinct as \<longleftrightarrow> (\<exists>xs y ys. dec as xs y ys)" (is "?L = ?R")
proof
assume "?L" then show "?R"
proof (induct "length as" arbitrary: as rule: less_induct)
case less
obtain xs ys zs y where decomp: "as = (xs @ y # ys) @ y # zs"
using not_distinct_decomp[OF less.prems] by auto
show ?case
proof (cases "distinct (xs @ y # ys)")
case True
with decomp have "dec as (xs @ y # ys) y zs" by (simp add: dec_def)
then show ?thesis by blast
next
case False
with less decomp obtain xs' y' ys' where "dec (xs @ y # ys) xs' y' ys'"
by atomize_elim auto
with decomp have "dec as xs' y' (ys' @ y # zs)" by (simp add: dec_def)
then show ?thesis by blast
qed
qed
qed (auto simp: dec_def)
lemma distinct_product:
"distinct xs \<Longrightarrow> distinct ys \<Longrightarrow> distinct (List.product xs ys)"
by (induct xs) (auto intro: inj_onI simp add: distinct_map)
lemma distinct_product_lists:
assumes "\<forall>xs \<in> set xss. distinct xs"
shows "distinct (product_lists xss)"
using assms proof (induction xss)
case (Cons xs xss) note * = this
then show ?case
proof (cases "product_lists xss")
case Nil then show ?thesis by (induct xs) simp_all
next
case (Cons ps pss) with * show ?thesis
by (auto intro!: inj_onI distinct_concat simp add: distinct_map)
qed
qed simp
lemma length_remdups_concat:
"length (remdups (concat xss)) = card (\<Union>xs\<in>set xss. set xs)"
by (simp add: distinct_card [symmetric])
lemma remdups_append2:
"remdups (xs @ remdups ys) = remdups (xs @ ys)"
by(induction xs) auto
lemma length_remdups_card_conv: "length(remdups xs) = card(set xs)"
proof -
have xs: "concat[xs] = xs" by simp
from length_remdups_concat[of "[xs]"] show ?thesis unfolding xs by simp
qed
lemma remdups_remdups: "remdups (remdups xs) = remdups xs"
by (induct xs) simp_all
lemma distinct_butlast:
assumes "distinct xs"
shows "distinct (butlast xs)"
proof (cases "xs = []")
case False
from \<open>xs \<noteq> []\<close> obtain ys y where "xs = ys @ [y]" by (cases xs rule: rev_cases) auto
with \<open>distinct xs\<close> show ?thesis by simp
qed (auto)
lemma remdups_map_remdups:
"remdups (map f (remdups xs)) = remdups (map f xs)"
by (induct xs) simp_all
lemma distinct_zipI1:
assumes "distinct xs"
shows "distinct (zip xs ys)"
proof (rule zip_obtain_same_length)
fix xs' :: "'a list" and ys' :: "'b list" and n
assume "length xs' = length ys'"
assume "xs' = take n xs"
with assms have "distinct xs'" by simp
with \<open>length xs' = length ys'\<close> show "distinct (zip xs' ys')"
by (induct xs' ys' rule: list_induct2) (auto elim: in_set_zipE)
qed
lemma distinct_zipI2:
assumes "distinct ys"
shows "distinct (zip xs ys)"
proof (rule zip_obtain_same_length)
fix xs' :: "'b list" and ys' :: "'a list" and n
assume "length xs' = length ys'"
assume "ys' = take n ys"
with assms have "distinct ys'" by simp
with \<open>length xs' = length ys'\<close> show "distinct (zip xs' ys')"
by (induct xs' ys' rule: list_induct2) (auto elim: in_set_zipE)
qed
lemma set_take_disj_set_drop_if_distinct:
"distinct vs \<Longrightarrow> i \<le> j \<Longrightarrow> set (take i vs) \<inter> set (drop j vs) = {}"
by (auto simp: in_set_conv_nth distinct_conv_nth)
(* The next two lemmas help Sledgehammer. *)
lemma distinct_singleton: "distinct [x]" by simp
lemma distinct_length_2_or_more:
"distinct (a # b # xs) \<longleftrightarrow> (a \<noteq> b \<and> distinct (a # xs) \<and> distinct (b # xs))"
by force
lemma remdups_adj_altdef: "(remdups_adj xs = ys) \<longleftrightarrow>
(\<exists>f::nat => nat. mono f \<and> f ` {0 ..< size xs} = {0 ..< size ys}
\<and> (\<forall>i < size xs. xs!i = ys!(f i))
\<and> (\<forall>i. i + 1 < size xs \<longrightarrow> (xs!i = xs!(i+1) \<longleftrightarrow> f i = f(i+1))))" (is "?L \<longleftrightarrow> (\<exists>f. ?p f xs ys)")
proof
assume ?L
then show "\<exists>f. ?p f xs ys"
proof (induct xs arbitrary: ys rule: remdups_adj.induct)
case (1 ys)
thus ?case by (intro exI[of _ id]) (auto simp: mono_def)
next
case (2 x ys)
thus ?case by (intro exI[of _ id]) (auto simp: mono_def)
next
case (3 x1 x2 xs ys)
let ?xs = "x1 # x2 # xs"
let ?cond = "x1 = x2"
define zs where "zs = remdups_adj (x2 # xs)"
from 3(1-2)[of zs]
obtain f where p: "?p f (x2 # xs) zs" unfolding zs_def by (cases ?cond) auto
then have f0: "f 0 = 0"
by (intro mono_image_least[where f=f]) blast+
from p have mono: "mono f" and f_xs_zs: "f ` {0..<length (x2 # xs)} = {0..<length zs}" by auto
have ys: "ys = (if x1 = x2 then zs else x1 # zs)"
unfolding 3(3)[symmetric] zs_def by auto
have zs0: "zs ! 0 = x2" unfolding zs_def by (induct xs) auto
have zsne: "zs \<noteq> []" unfolding zs_def by (induct xs) auto
let ?Succ = "if ?cond then id else Suc"
let ?x1 = "if ?cond then id else Cons x1"
let ?f = "\<lambda> i. if i = 0 then 0 else ?Succ (f (i - 1))"
have ys: "ys = ?x1 zs" unfolding ys by (cases ?cond, auto)
have mono: "mono ?f" using \<open>mono f\<close> unfolding mono_def by auto
show ?case unfolding ys
proof (intro exI[of _ ?f] conjI allI impI)
show "mono ?f" by fact
next
fix i assume i: "i < length ?xs"
with p show "?xs ! i = ?x1 zs ! (?f i)" using zs0 by auto
next
fix i assume i: "i + 1 < length ?xs"
with p show "(?xs ! i = ?xs ! (i + 1)) = (?f i = ?f (i + 1))"
by (cases i) (auto simp: f0)
next
have id: "{0 ..< length (?x1 zs)} = insert 0 (?Succ ` {0 ..< length zs})"
using zsne by (cases ?cond, auto)
{ fix i assume "i < Suc (length xs)"
hence "Suc i \<in> {0..<Suc (Suc (length xs))} \<inter> Collect ((<) 0)" by auto
from imageI[OF this, of "\<lambda>i. ?Succ (f (i - Suc 0))"]
have "?Succ (f i) \<in> (\<lambda>i. ?Succ (f (i - Suc 0))) ` ({0..<Suc (Suc (length xs))} \<inter> Collect ((<) 0))" by auto
}
then show "?f ` {0 ..< length ?xs} = {0 ..< length (?x1 zs)}"
unfolding id f_xs_zs[symmetric] by auto
qed
qed
next
assume "\<exists> f. ?p f xs ys"
then show ?L
proof (induct xs arbitrary: ys rule: remdups_adj.induct)
case 1 then show ?case by auto
next
case (2 x) then obtain f where f_img: "f ` {0 ..< size [x]} = {0 ..< size ys}"
and f_nth: "\<And>i. i < size [x] \<Longrightarrow> [x]!i = ys!(f i)"
by blast
have "length ys = card (f ` {0 ..< size [x]})"
using f_img by auto
then have *: "length ys = 1" by auto
then have "f 0 = 0" using f_img by auto
with * show ?case using f_nth by (cases ys) auto
next
case (3 x1 x2 xs)
from "3.prems" obtain f where f_mono: "mono f"
and f_img: "f ` {0..<length (x1 # x2 # xs)} = {0..<length ys}"
and f_nth:
"\<And>i. i < length (x1 # x2 # xs) \<Longrightarrow> (x1 # x2 # xs) ! i = ys ! f i"
"\<And>i. i + 1 < length (x1 # x2 #xs) \<Longrightarrow>
((x1 # x2 # xs) ! i = (x1 # x2 # xs) ! (i + 1)) = (f i = f (i + 1))"
by blast
show ?case
proof cases
assume "x1 = x2"
let ?f' = "f \<circ> Suc"
have "remdups_adj (x1 # xs) = ys"
proof (intro "3.hyps" exI conjI impI allI)
show "mono ?f'"
using f_mono by (simp add: mono_iff_le_Suc)
next
have "?f' ` {0 ..< length (x1 # xs)} = f ` {Suc 0 ..< length (x1 # x2 # xs)}"
using less_Suc_eq_0_disj by auto
also have "\<dots> = f ` {0 ..< length (x1 # x2 # xs)}"
proof -
have "f 0 = f (Suc 0)" using \<open>x1 = x2\<close> f_nth[of 0] by simp
then show ?thesis
using less_Suc_eq_0_disj by auto
qed
also have "\<dots> = {0 ..< length ys}" by fact
finally show "?f' ` {0 ..< length (x1 # xs)} = {0 ..< length ys}" .
qed (insert f_nth[of "Suc i" for i], auto simp: \<open>x1 = x2\<close>)
then show ?thesis using \<open>x1 = x2\<close> by simp
next
assume "x1 \<noteq> x2"
have two: "Suc (Suc 0) \<le> length ys"
proof -
have "2 = card {f 0, f 1}" using \<open>x1 \<noteq> x2\<close> f_nth[of 0] by auto
also have "\<dots> \<le> card (f ` {0..< length (x1 # x2 # xs)})"
by (rule card_mono) auto
finally show ?thesis using f_img by simp
qed
have "f 0 = 0" using f_mono f_img by (rule mono_image_least) simp
have "f (Suc 0) = Suc 0"
proof (rule ccontr)
assume "f (Suc 0) \<noteq> Suc 0"
then have "Suc 0 < f (Suc 0)" using f_nth[of 0] \<open>x1 \<noteq> x2\<close> \<open>f 0 = 0\<close> by auto
then have "\<And>i. Suc 0 < f (Suc i)"
using f_mono
by (meson Suc_le_mono le0 less_le_trans monoD)
then have "Suc 0 \<noteq> f i" for i using \<open>f 0 = 0\<close>
by (cases i) fastforce+
then have "Suc 0 \<notin> f ` {0 ..< length (x1 # x2 # xs)}" by auto
then show False using f_img two by auto
qed
obtain ys' where "ys = x1 # x2 # ys'"
using two f_nth[of 0] f_nth[of 1]
by (auto simp: Suc_le_length_iff \<open>f 0 = 0\<close> \<open>f (Suc 0) = Suc 0\<close>)
have Suc0_le_f_Suc: "Suc 0 \<le> f (Suc i)" for i
by (metis Suc_le_mono \<open>f (Suc 0) = Suc 0\<close> f_mono le0 mono_def)
define f' where "f' x = f (Suc x) - 1" for x
have f_Suc: "f (Suc i) = Suc (f' i)" for i
using Suc0_le_f_Suc[of i] by (auto simp: f'_def)
have "remdups_adj (x2 # xs) = (x2 # ys')"
proof (intro "3.hyps" exI conjI impI allI)
show "mono f'"
using Suc0_le_f_Suc f_mono by (auto simp: f'_def mono_iff_le_Suc le_diff_iff)
next
have "f' ` {0 ..< length (x2 # xs)} = (\<lambda>x. f x - 1) ` {0 ..< length (x1 # x2 #xs)}"
by (auto simp: f'_def \<open>f 0 = 0\<close> \<open>f (Suc 0) = Suc 0\<close> image_def Bex_def less_Suc_eq_0_disj)
also have "\<dots> = (\<lambda>x. x - 1) ` f ` {0 ..< length (x1 # x2 #xs)}"
by (auto simp: image_comp)
also have "\<dots> = (\<lambda>x. x - 1) ` {0 ..< length ys}"
by (simp only: f_img)
also have "\<dots> = {0 ..< length (x2 # ys')}"
using \<open>ys = _\<close> by (fastforce intro: rev_image_eqI)
finally show "f' ` {0 ..< length (x2 # xs)} = {0 ..< length (x2 # ys')}" .
qed (insert f_nth[of "Suc i" for i] \<open>x1 \<noteq> x2\<close>, auto simp add: f_Suc \<open>ys = _\<close>)
then show ?case using \<open>ys = _\<close> \<open>x1 \<noteq> x2\<close> by simp
qed
qed
qed
lemma hd_remdups_adj[simp]: "hd (remdups_adj xs) = hd xs"
by (induction xs rule: remdups_adj.induct) simp_all
lemma remdups_adj_Cons: "remdups_adj (x # xs) =
(case remdups_adj xs of [] \<Rightarrow> [x] | y # xs \<Rightarrow> if x = y then y # xs else x # y # xs)"
by (induct xs arbitrary: x) (auto split: list.splits)
lemma remdups_adj_append_two:
"remdups_adj (xs @ [x,y]) = remdups_adj (xs @ [x]) @ (if x = y then [] else [y])"
by (induct xs rule: remdups_adj.induct, simp_all)
lemma remdups_adj_adjacent:
"Suc i < length (remdups_adj xs) \<Longrightarrow> remdups_adj xs ! i \<noteq> remdups_adj xs ! Suc i"
proof (induction xs arbitrary: i rule: remdups_adj.induct)
case (3 x y xs i)
thus ?case by (cases i, cases "x = y") (simp, auto simp: hd_conv_nth[symmetric])
qed simp_all
lemma remdups_adj_rev[simp]: "remdups_adj (rev xs) = rev (remdups_adj xs)"
by (induct xs rule: remdups_adj.induct, simp_all add: remdups_adj_append_two)
lemma remdups_adj_length[simp]: "length (remdups_adj xs) \<le> length xs"
by (induct xs rule: remdups_adj.induct, auto)
lemma remdups_adj_length_ge1[simp]: "xs \<noteq> [] \<Longrightarrow> length (remdups_adj xs) \<ge> Suc 0"
by (induct xs rule: remdups_adj.induct, simp_all)
lemma remdups_adj_Nil_iff[simp]: "remdups_adj xs = [] \<longleftrightarrow> xs = []"
by (induct xs rule: remdups_adj.induct, simp_all)
lemma remdups_adj_set[simp]: "set (remdups_adj xs) = set xs"
by (induct xs rule: remdups_adj.induct, simp_all)
lemma last_remdups_adj [simp]: "last (remdups_adj xs) = last xs"
by (induction xs rule: remdups_adj.induct) auto
lemma remdups_adj_Cons_alt[simp]: "x # tl (remdups_adj (x # xs)) = remdups_adj (x # xs)"
by (induct xs rule: remdups_adj.induct, auto)
lemma remdups_adj_distinct: "distinct xs \<Longrightarrow> remdups_adj xs = xs"
by (induct xs rule: remdups_adj.induct, simp_all)
lemma remdups_adj_append:
"remdups_adj (xs\<^sub>1 @ x # xs\<^sub>2) = remdups_adj (xs\<^sub>1 @ [x]) @ tl (remdups_adj (x # xs\<^sub>2))"
by (induct xs\<^sub>1 rule: remdups_adj.induct, simp_all)
lemma remdups_adj_singleton:
"remdups_adj xs = [x] \<Longrightarrow> xs = replicate (length xs) x"
by (induct xs rule: remdups_adj.induct, auto split: if_split_asm)
lemma remdups_adj_map_injective:
assumes "inj f"
shows "remdups_adj (map f xs) = map f (remdups_adj xs)"
by (induct xs rule: remdups_adj.induct) (auto simp add: injD[OF assms])
lemma remdups_adj_replicate:
"remdups_adj (replicate n x) = (if n = 0 then [] else [x])"
by (induction n) (auto simp: remdups_adj_Cons)
lemma remdups_upt [simp]: "remdups [m..<n] = [m..<n]"
proof (cases "m \<le> n")
case False then show ?thesis by simp
next
case True then obtain q where "n = m + q"
by (auto simp add: le_iff_add)
moreover have "remdups [m..<m + q] = [m..<m + q]"
by (induct q) simp_all
ultimately show ?thesis by simp
qed
lemma successively_remdups_adjI:
"successively P xs \<Longrightarrow> successively P (remdups_adj xs)"
by (induction xs rule: remdups_adj.induct) (auto simp: successively_Cons)
lemma successively_remdups_adj_iff:
"(\<And>x. x \<in> set xs \<Longrightarrow> P x x) \<Longrightarrow>
successively P (remdups_adj xs) \<longleftrightarrow> successively P xs"
by (induction xs rule: remdups_adj.induct)(auto simp: successively_Cons)
lemma remdups_adj_Cons':
"remdups_adj (x # xs) = x # remdups_adj (dropWhile (\<lambda>y. y = x) xs)"
by (induction xs) auto
lemma remdups_adj_singleton_iff:
"length (remdups_adj xs) = Suc 0 \<longleftrightarrow> xs \<noteq> [] \<and> xs = replicate (length xs) (hd xs)"
proof safe
assume *: "xs = replicate (length xs) (hd xs)" and [simp]: "xs \<noteq> []"
show "length (remdups_adj xs) = Suc 0"
by (subst *) (auto simp: remdups_adj_replicate)
next
assume "length (remdups_adj xs) = Suc 0"
thus "xs = replicate (length xs) (hd xs)"
by (induction xs rule: remdups_adj.induct) (auto split: if_splits)
qed auto
lemma tl_remdups_adj:
"ys \<noteq> [] \<Longrightarrow> tl (remdups_adj ys) = remdups_adj (dropWhile (\<lambda>x. x = hd ys) (tl ys))"
by (cases ys) (simp_all add: remdups_adj_Cons')
lemma remdups_adj_append_dropWhile:
"remdups_adj (xs @ y # ys) = remdups_adj (xs @ [y]) @ remdups_adj (dropWhile (\<lambda>x. x = y) ys)"
by (subst remdups_adj_append) (simp add: tl_remdups_adj)
lemma remdups_adj_append':
assumes "xs = [] \<or> ys = [] \<or> last xs \<noteq> hd ys"
shows "remdups_adj (xs @ ys) = remdups_adj xs @ remdups_adj ys"
proof -
have ?thesis if [simp]: "xs \<noteq> []" "ys \<noteq> []" and "last xs \<noteq> hd ys"
proof -
obtain x xs' where xs: "xs = xs' @ [x]"
by (cases xs rule: rev_cases) auto
have "remdups_adj (xs' @ x # ys) = remdups_adj (xs' @ [x]) @ remdups_adj ys"
using \<open>last xs \<noteq> hd ys\<close> unfolding xs
by (metis (full_types) dropWhile_eq_self_iff last_snoc remdups_adj_append_dropWhile)
thus ?thesis by (simp add: xs)
qed
thus ?thesis using assms
by (cases "xs = []"; cases "ys = []") auto
qed
lemma remdups_adj_append'': "xs \<noteq> []
\<Longrightarrow> remdups_adj (xs @ ys) = remdups_adj xs @ remdups_adj (dropWhile (\<lambda>y. y = last xs) ys)"
by (induction xs rule: remdups_adj.induct) (auto simp: remdups_adj_Cons')
subsection \<open>@{const distinct_adj}\<close>
lemma distinct_adj_Nil [simp]: "distinct_adj []"
and distinct_adj_singleton [simp]: "distinct_adj [x]"
and distinct_adj_Cons_Cons [simp]: "distinct_adj (x # y # xs) \<longleftrightarrow> x \<noteq> y \<and> distinct_adj (y # xs)"
by (auto simp: distinct_adj_def)
lemma distinct_adj_Cons: "distinct_adj (x # xs) \<longleftrightarrow> xs = [] \<or> x \<noteq> hd xs \<and> distinct_adj xs"
by (cases xs) auto
lemma distinct_adj_ConsD: "distinct_adj (x # xs) \<Longrightarrow> distinct_adj xs"
by (cases xs) auto
lemma distinct_adj_remdups_adj[simp]: "distinct_adj (remdups_adj xs)"
by (induction xs rule: remdups_adj.induct) (auto simp: distinct_adj_Cons)
lemma distinct_adj_altdef: "distinct_adj xs \<longleftrightarrow> remdups_adj xs = xs"
proof
assume "remdups_adj xs = xs"
with distinct_adj_remdups_adj[of xs] show "distinct_adj xs"
by simp
next
assume "distinct_adj xs"
thus "remdups_adj xs = xs"
by (induction xs rule: induct_list012) auto
qed
lemma distinct_adj_rev [simp]: "distinct_adj (rev xs) \<longleftrightarrow> distinct_adj xs"
by (simp add: distinct_adj_def eq_commute)
lemma distinct_adj_append_iff:
"distinct_adj (xs @ ys) \<longleftrightarrow>
distinct_adj xs \<and> distinct_adj ys \<and> (xs = [] \<or> ys = [] \<or> last xs \<noteq> hd ys)"
by (auto simp: distinct_adj_def successively_append_iff)
lemma distinct_adj_appendD1 [dest]: "distinct_adj (xs @ ys) \<Longrightarrow> distinct_adj xs"
and distinct_adj_appendD2 [dest]: "distinct_adj (xs @ ys) \<Longrightarrow> distinct_adj ys"
by (auto simp: distinct_adj_append_iff)
lemma distinct_adj_mapI: "distinct_adj xs \<Longrightarrow> inj_on f (set xs) \<Longrightarrow> distinct_adj (map f xs)"
unfolding distinct_adj_def successively_map
by (erule successively_mono) (auto simp: inj_on_def)
lemma distinct_adj_mapD: "distinct_adj (map f xs) \<Longrightarrow> distinct_adj xs"
unfolding distinct_adj_def successively_map by (erule successively_mono) auto
lemma distinct_adj_map_iff: "inj_on f (set xs) \<Longrightarrow> distinct_adj (map f xs) \<longleftrightarrow> distinct_adj xs"
using distinct_adj_mapD distinct_adj_mapI by blast
subsubsection \<open>\<^const>\<open>insert\<close>\<close>
lemma in_set_insert [simp]:
"x \<in> set xs \<Longrightarrow> List.insert x xs = xs"
by (simp add: List.insert_def)
lemma not_in_set_insert [simp]:
"x \<notin> set xs \<Longrightarrow> List.insert x xs = x # xs"
by (simp add: List.insert_def)
lemma insert_Nil [simp]: "List.insert x [] = [x]"
by simp
lemma set_insert [simp]: "set (List.insert x xs) = insert x (set xs)"
by (auto simp add: List.insert_def)
lemma distinct_insert [simp]: "distinct (List.insert x xs) = distinct xs"
by (simp add: List.insert_def)
lemma insert_remdups:
"List.insert x (remdups xs) = remdups (List.insert x xs)"
by (simp add: List.insert_def)
subsubsection \<open>\<^const>\<open>List.union\<close>\<close>
text\<open>This is all one should need to know about union:\<close>
lemma set_union[simp]: "set (List.union xs ys) = set xs \<union> set ys"
unfolding List.union_def
by(induct xs arbitrary: ys) simp_all
lemma distinct_union[simp]: "distinct(List.union xs ys) = distinct ys"
unfolding List.union_def
by(induct xs arbitrary: ys) simp_all
subsubsection \<open>\<^const>\<open>List.find\<close>\<close>
lemma find_None_iff: "List.find P xs = None \<longleftrightarrow> \<not> (\<exists>x. x \<in> set xs \<and> P x)"
proof (induction xs)
case Nil thus ?case by simp
next
case (Cons x xs) thus ?case by (fastforce split: if_splits)
qed
lemmas find_None_iff2 = find_None_iff[THEN eq_iff_swap]
lemma find_Some_iff:
"List.find P xs = Some x \<longleftrightarrow>
(\<exists>i<length xs. P (xs!i) \<and> x = xs!i \<and> (\<forall>j<i. \<not> P (xs!j)))"
proof (induction xs)
case Nil thus ?case by simp
next
case (Cons x xs) thus ?case
apply(auto simp: nth_Cons' split: if_splits)
using diff_Suc_1[unfolded One_nat_def] less_Suc_eq_0_disj by fastforce
qed
lemmas find_Some_iff2 = find_Some_iff[THEN eq_iff_swap]
lemma find_cong[fundef_cong]:
assumes "xs = ys" and "\<And>x. x \<in> set ys \<Longrightarrow> P x = Q x"
shows "List.find P xs = List.find Q ys"
proof (cases "List.find P xs")
case None thus ?thesis by (metis find_None_iff assms)
next
case (Some x)
hence "List.find Q ys = Some x" using assms
by (auto simp add: find_Some_iff)
thus ?thesis using Some by auto
qed
lemma find_dropWhile:
"List.find P xs = (case dropWhile (Not \<circ> P) xs
of [] \<Rightarrow> None
| x # _ \<Rightarrow> Some x)"
by (induct xs) simp_all
subsubsection \<open>\<^const>\<open>count_list\<close>\<close>
text \<open>This library is intentionally minimal. See the remark about multisets at the point above where @{const count_list} is defined.\<close>
lemma count_list_append[simp]: "count_list (xs @ ys) x = count_list xs x + count_list ys x"
by (induction xs) simp_all
lemma count_list_0_iff: "count_list xs x = 0 \<longleftrightarrow> x \<notin> set xs"
by (induction xs) simp_all
lemma count_notin[simp]: "x \<notin> set xs \<Longrightarrow> count_list xs x = 0"
by(simp add: count_list_0_iff)
lemma count_le_length: "count_list xs x \<le> length xs"
by (induction xs) auto
lemma count_list_map_ge: "count_list xs x \<le> count_list (map f xs) (f x)"
by (induction xs) auto
lemma count_list_inj_map:
"\<lbrakk> inj_on f (set xs); x \<in> set xs \<rbrakk> \<Longrightarrow> count_list (map f xs) (f x) = count_list xs x"
by (induction xs) (simp, fastforce)
lemma count_list_rev[simp]: "count_list (rev xs) x = count_list xs x"
by (induction xs) auto
lemma sum_count_set:
"set xs \<subseteq> X \<Longrightarrow> finite X \<Longrightarrow> sum (count_list xs) X = length xs"
proof (induction xs arbitrary: X)
case (Cons x xs)
then show ?case
using sum.remove [of X x "count_list xs"]
by (auto simp: sum.If_cases simp flip: diff_eq)
qed simp
subsubsection \<open>\<^const>\<open>List.extract\<close>\<close>
lemma extract_None_iff: "List.extract P xs = None \<longleftrightarrow> \<not> (\<exists> x\<in>set xs. P x)"
by(auto simp: extract_def dropWhile_eq_Cons_conv split: list.splits)
(metis in_set_conv_decomp)
lemma extract_SomeE:
"List.extract P xs = Some (ys, y, zs) \<Longrightarrow>
xs = ys @ y # zs \<and> P y \<and> \<not> (\<exists> y \<in> set ys. P y)"
by(auto simp: extract_def dropWhile_eq_Cons_conv split: list.splits)
lemma extract_Some_iff:
"List.extract P xs = Some (ys, y, zs) \<longleftrightarrow>
xs = ys @ y # zs \<and> P y \<and> \<not> (\<exists> y \<in> set ys. P y)"
by(auto simp: extract_def dropWhile_eq_Cons_conv dest: set_takeWhileD split: list.splits)
lemma extract_Nil_code[code]: "List.extract P [] = None"
by(simp add: extract_def)
lemma extract_Cons_code[code]:
"List.extract P (x # xs) = (if P x then Some ([], x, xs) else
(case List.extract P xs of
None \<Rightarrow> None |
Some (ys, y, zs) \<Rightarrow> Some (x#ys, y, zs)))"
by(auto simp add: extract_def comp_def split: list.splits)
(metis dropWhile_eq_Nil_conv list.distinct(1))
subsubsection \<open>\<^const>\<open>remove1\<close>\<close>
lemma remove1_append:
"remove1 x (xs @ ys) =
(if x \<in> set xs then remove1 x xs @ ys else xs @ remove1 x ys)"
by (induct xs) auto
lemma remove1_commute: "remove1 x (remove1 y zs) = remove1 y (remove1 x zs)"
by (induct zs) auto
lemma in_set_remove1[simp]:
"a \<noteq> b \<Longrightarrow> a \<in> set(remove1 b xs) = (a \<in> set xs)"
by (induct xs) auto
lemma set_remove1_subset: "set(remove1 x xs) \<subseteq> set xs"
by (induct xs) auto
lemma set_remove1_eq [simp]: "distinct xs \<Longrightarrow> set(remove1 x xs) = set xs - {x}"
by (induct xs) auto
lemma length_remove1:
"length(remove1 x xs) = (if x \<in> set xs then length xs - 1 else length xs)"
by (induct xs) (auto dest!:length_pos_if_in_set)
lemma remove1_filter_not[simp]:
"\<not> P x \<Longrightarrow> remove1 x (filter P xs) = filter P xs"
by(induct xs) auto
lemma filter_remove1:
"filter Q (remove1 x xs) = remove1 x (filter Q xs)"
by (induct xs) auto
lemma notin_set_remove1[simp]: "x \<notin> set xs \<Longrightarrow> x \<notin> set(remove1 y xs)"
by(insert set_remove1_subset) fast
lemma distinct_remove1[simp]: "distinct xs \<Longrightarrow> distinct(remove1 x xs)"
by (induct xs) simp_all
lemma remove1_remdups:
"distinct xs \<Longrightarrow> remove1 x (remdups xs) = remdups (remove1 x xs)"
by (induct xs) simp_all
lemma remove1_idem: "x \<notin> set xs \<Longrightarrow> remove1 x xs = xs"
by (induct xs) simp_all
lemma remove1_split:
"a \<in> set xs \<Longrightarrow> remove1 a xs = ys \<longleftrightarrow> (\<exists>ls rs. xs = ls @ a # rs \<and> a \<notin> set ls \<and> ys = ls @ rs)"
by (metis remove1.simps(2) remove1_append split_list_first)
subsubsection \<open>\<^const>\<open>removeAll\<close>\<close>
lemma removeAll_filter_not_eq:
"removeAll x = filter (\<lambda>y. x \<noteq> y)"
proof
fix xs
show "removeAll x xs = filter (\<lambda>y. x \<noteq> y) xs"
by (induct xs) auto
qed
lemma removeAll_append[simp]:
"removeAll x (xs @ ys) = removeAll x xs @ removeAll x ys"
by (induct xs) auto
lemma set_removeAll[simp]: "set(removeAll x xs) = set xs - {x}"
by (induct xs) auto
lemma removeAll_id[simp]: "x \<notin> set xs \<Longrightarrow> removeAll x xs = xs"
by (induct xs) auto
(* Needs count:: 'a \<Rightarrow> 'a list \<Rightarrow> nat
lemma length_removeAll:
"length(removeAll x xs) = length xs - count x xs"
*)
lemma removeAll_filter_not[simp]:
"\<not> P x \<Longrightarrow> removeAll x (filter P xs) = filter P xs"
by(induct xs) auto
lemma distinct_removeAll:
"distinct xs \<Longrightarrow> distinct (removeAll x xs)"
by (simp add: removeAll_filter_not_eq)
lemma distinct_remove1_removeAll:
"distinct xs \<Longrightarrow> remove1 x xs = removeAll x xs"
by (induct xs) simp_all
lemma map_removeAll_inj_on: "inj_on f (insert x (set xs)) \<Longrightarrow>
map f (removeAll x xs) = removeAll (f x) (map f xs)"
by (induct xs) (simp_all add:inj_on_def)
lemma map_removeAll_inj: "inj f \<Longrightarrow>
map f (removeAll x xs) = removeAll (f x) (map f xs)"
by (rule map_removeAll_inj_on, erule subset_inj_on, rule subset_UNIV)
lemma length_removeAll_less_eq [simp]:
"length (removeAll x xs) \<le> length xs"
by (simp add: removeAll_filter_not_eq)
lemma length_removeAll_less [termination_simp]:
"x \<in> set xs \<Longrightarrow> length (removeAll x xs) < length xs"
by (auto dest: length_filter_less simp add: removeAll_filter_not_eq)
lemma distinct_concat_iff: "distinct (concat xs) \<longleftrightarrow>
distinct (removeAll [] xs) \<and>
(\<forall>ys. ys \<in> set xs \<longrightarrow> distinct ys) \<and>
(\<forall>ys zs. ys \<in> set xs \<and> zs \<in> set xs \<and> ys \<noteq> zs \<longrightarrow> set ys \<inter> set zs = {})"
apply (induct xs)
apply(simp_all, safe, auto)
by (metis Int_iff UN_I empty_iff equals0I set_empty)
subsubsection \<open>\<^const>\<open>replicate\<close>\<close>
lemma length_replicate [simp]: "length (replicate n x) = n"
by (induct n) auto
lemma replicate_eqI:
assumes "length xs = n" and "\<And>y. y \<in> set xs \<Longrightarrow> y = x"
shows "xs = replicate n x"
using assms
proof (induct xs arbitrary: n)
case Nil then show ?case by simp
next
case (Cons x xs) then show ?case by (cases n) simp_all
qed
lemma Ex_list_of_length: "\<exists>xs. length xs = n"
by (rule exI[of _ "replicate n undefined"]) simp
lemma map_replicate [simp]: "map f (replicate n x) = replicate n (f x)"
by (induct n) auto
lemma map_replicate_const:
"map (\<lambda> x. k) lst = replicate (length lst) k"
by (induct lst) auto
lemma replicate_app_Cons_same:
"(replicate n x) @ (x # xs) = x # replicate n x @ xs"
by (induct n) auto
lemma rev_replicate [simp]: "rev (replicate n x) = replicate n x"
by (induct n) (auto simp: replicate_app_Cons_same)
lemma replicate_add: "replicate (n + m) x = replicate n x @ replicate m x"
by (induct n) auto
text\<open>Courtesy of Matthias Daum:\<close>
lemma append_replicate_commute:
"replicate n x @ replicate k x = replicate k x @ replicate n x"
by (metis add.commute replicate_add)
text\<open>Courtesy of Andreas Lochbihler:\<close>
lemma filter_replicate:
"filter P (replicate n x) = (if P x then replicate n x else [])"
by(induct n) auto
lemma hd_replicate [simp]: "n \<noteq> 0 \<Longrightarrow> hd (replicate n x) = x"
by (induct n) auto
lemma tl_replicate [simp]: "tl (replicate n x) = replicate (n - 1) x"
by (induct n) auto
lemma last_replicate [simp]: "n \<noteq> 0 \<Longrightarrow> last (replicate n x) = x"
by (atomize (full), induct n) auto
lemma nth_replicate[simp]: "i < n \<Longrightarrow> (replicate n x)!i = x"
by (induct n arbitrary: i)(auto simp: nth_Cons split: nat.split)
text\<open>Courtesy of Matthias Daum (2 lemmas):\<close>
lemma take_replicate[simp]: "take i (replicate k x) = replicate (min i k) x"
proof (cases "k \<le> i")
case True
then show ?thesis
by (simp add: min_def)
next
case False
then have "replicate k x = replicate i x @ replicate (k - i) x"
by (simp add: replicate_add [symmetric])
then show ?thesis
by (simp add: min_def)
qed
lemma drop_replicate[simp]: "drop i (replicate k x) = replicate (k-i) x"
proof (induct k arbitrary: i)
case (Suc k)
then show ?case
by (simp add: drop_Cons')
qed simp
lemma set_replicate_Suc: "set (replicate (Suc n) x) = {x}"
by (induct n) auto
lemma set_replicate [simp]: "n \<noteq> 0 \<Longrightarrow> set (replicate n x) = {x}"
by (fast dest!: not0_implies_Suc intro!: set_replicate_Suc)
lemma set_replicate_conv_if: "set (replicate n x) = (if n = 0 then {} else {x})"
by auto
lemma in_set_replicate[simp]: "(x \<in> set (replicate n y)) = (x = y \<and> n \<noteq> 0)"
by (simp add: set_replicate_conv_if)
lemma card_set_1_iff_replicate:
"card(set xs) = Suc 0 \<longleftrightarrow> xs \<noteq> [] \<and> (\<exists>x. xs = replicate (length xs) x)"
by (metis card_1_singleton_iff empty_iff insert_iff replicate_eqI set_empty2 set_replicate)
lemma Ball_set_replicate[simp]:
"(\<forall>x \<in> set(replicate n a). P x) = (P a \<or> n=0)"
by(simp add: set_replicate_conv_if)
lemma Bex_set_replicate[simp]:
"(\<exists>x \<in> set(replicate n a). P x) = (P a \<and> n\<noteq>0)"
by(simp add: set_replicate_conv_if)
lemma replicate_append_same:
"replicate i x @ [x] = x # replicate i x"
by (induct i) simp_all
lemma map_replicate_trivial:
"map (\<lambda>i. x) [0..<i] = replicate i x"
by (induct i) (simp_all add: replicate_append_same)
lemma concat_replicate_trivial[simp]:
"concat (replicate i []) = []"
by (induct i) (auto simp add: map_replicate_const)
lemma replicate_empty[simp]: "(replicate n x = []) \<longleftrightarrow> n=0"
by (induct n) auto
lemmas empty_replicate[simp] = replicate_empty[THEN eq_iff_swap]
lemma replicate_eq_replicate[simp]:
"(replicate m x = replicate n y) \<longleftrightarrow> (m=n \<and> (m\<noteq>0 \<longrightarrow> x=y))"
proof (induct m arbitrary: n)
case (Suc m n)
then show ?case
by (induct n) auto
qed simp
lemma takeWhile_replicate[simp]:
"takeWhile P (replicate n x) = (if P x then replicate n x else [])"
using takeWhile_eq_Nil_iff by fastforce
lemma dropWhile_replicate[simp]:
"dropWhile P (replicate n x) = (if P x then [] else replicate n x)"
using dropWhile_eq_self_iff by fastforce
lemma replicate_length_filter:
"replicate (length (filter (\<lambda>y. x = y) xs)) x = filter (\<lambda>y. x = y) xs"
by (induct xs) auto
lemma comm_append_are_replicate:
"xs @ ys = ys @ xs \<Longrightarrow> \<exists>m n zs. concat (replicate m zs) = xs \<and> concat (replicate n zs) = ys"
proof (induction "length (xs @ ys) + length xs" arbitrary: xs ys rule: less_induct)
case less
consider (1) "length ys < length xs" | (2) "xs = []" | (3) "length xs \<le> length ys \<and> xs \<noteq> []"
by linarith
then show ?case
proof (cases)
case 1
then show ?thesis
using less.hyps[OF _ less.prems[symmetric]] nat_add_left_cancel_less by auto
next
case 2
then have "concat (replicate 0 ys) = xs \<and> concat (replicate 1 ys) = ys"
by simp
then show ?thesis
by blast
next
case 3
then have "length xs \<le> length ys" and "xs \<noteq> []"
by blast+
from \<open>length xs \<le> length ys\<close> and \<open>xs @ ys = ys @ xs\<close>
obtain ws where "ys = xs @ ws"
by (auto simp: append_eq_append_conv2)
from this and \<open>xs \<noteq> []\<close>
have "length ws < length ys"
by simp
from \<open>xs @ ys = ys @ xs\<close>[unfolded \<open>ys = xs @ ws\<close>]
have "xs @ ws = ws @ xs"
by simp
from less.hyps[OF _ this] \<open>length ws < length ys\<close>
obtain m n' zs where "concat (replicate m zs) = xs" and "concat (replicate n' zs) = ws"
by auto
then have "concat (replicate (m+n') zs) = ys"
using \<open>ys = xs @ ws\<close>
by (simp add: replicate_add)
then show ?thesis
using \<open>concat (replicate m zs) = xs\<close> by blast
qed
qed
lemma comm_append_is_replicate:
fixes xs ys :: "'a list"
assumes "xs \<noteq> []" "ys \<noteq> []"
assumes "xs @ ys = ys @ xs"
shows "\<exists>n zs. n > 1 \<and> concat (replicate n zs) = xs @ ys"
proof -
obtain m n zs where "concat (replicate m zs) = xs"
and "concat (replicate n zs) = ys"
using comm_append_are_replicate[OF assms(3)] by blast
then have "m + n > 1" and "concat (replicate (m+n) zs) = xs @ ys"
using \<open>xs \<noteq> []\<close> and \<open>ys \<noteq> []\<close>
by (auto simp: replicate_add)
then show ?thesis by blast
qed
lemma Cons_replicate_eq:
"x # xs = replicate n y \<longleftrightarrow> x = y \<and> n > 0 \<and> xs = replicate (n - 1) x"
by (induct n) auto
lemma replicate_length_same:
"(\<forall>y\<in>set xs. y = x) \<Longrightarrow> replicate (length xs) x = xs"
by (induct xs) simp_all
lemma foldr_replicate [simp]:
"foldr f (replicate n x) = f x ^^ n"
by (induct n) (simp_all)
lemma fold_replicate [simp]:
"fold f (replicate n x) = f x ^^ n"
by (subst foldr_fold [symmetric]) simp_all
subsubsection \<open>\<^const>\<open>enumerate\<close>\<close>
lemma enumerate_simps [simp, code]:
"enumerate n [] = []"
"enumerate n (x # xs) = (n, x) # enumerate (Suc n) xs"
by (simp_all add: enumerate_eq_zip upt_rec)
lemma length_enumerate [simp]:
"length (enumerate n xs) = length xs"
by (simp add: enumerate_eq_zip)
lemma map_fst_enumerate [simp]:
"map fst (enumerate n xs) = [n..<n + length xs]"
by (simp add: enumerate_eq_zip)
lemma map_snd_enumerate [simp]:
"map snd (enumerate n xs) = xs"
by (simp add: enumerate_eq_zip)
lemma in_set_enumerate_eq:
"p \<in> set (enumerate n xs) \<longleftrightarrow> n \<le> fst p \<and> fst p < length xs + n \<and> nth xs (fst p - n) = snd p"
proof -
{ fix m
assume "n \<le> m"
moreover assume "m < length xs + n"
ultimately have "[n..<n + length xs] ! (m - n) = m \<and>
xs ! (m - n) = xs ! (m - n) \<and> m - n < length xs" by auto
then have "\<exists>q. [n..<n + length xs] ! q = m \<and>
xs ! q = xs ! (m - n) \<and> q < length xs" ..
} then show ?thesis by (cases p) (auto simp add: enumerate_eq_zip in_set_zip)
qed
lemma nth_enumerate_eq: "m < length xs \<Longrightarrow> enumerate n xs ! m = (n + m, xs ! m)"
by (simp add: enumerate_eq_zip)
lemma enumerate_replicate_eq:
"enumerate n (replicate m a) = map (\<lambda>q. (q, a)) [n..<n + m]"
by (rule pair_list_eqI)
(simp_all add: enumerate_eq_zip comp_def map_replicate_const)
lemma enumerate_Suc_eq:
"enumerate (Suc n) xs = map (apfst Suc) (enumerate n xs)"
by (rule pair_list_eqI)
(simp_all add: not_le, simp del: map_map add: map_Suc_upt map_map [symmetric])
lemma distinct_enumerate [simp]:
"distinct (enumerate n xs)"
by (simp add: enumerate_eq_zip distinct_zipI1)
lemma enumerate_append_eq:
"enumerate n (xs @ ys) = enumerate n xs @ enumerate (n + length xs) ys"
by (simp add: enumerate_eq_zip add.assoc zip_append2)
lemma enumerate_map_upt:
"enumerate n (map f [n..<m]) = map (\<lambda>k. (k, f k)) [n..<m]"
by (cases "n \<le> m") (simp_all add: zip_map2 zip_same_conv_map enumerate_eq_zip)
subsubsection \<open>\<^const>\<open>rotate1\<close> and \<^const>\<open>rotate\<close>\<close>
lemma rotate0[simp]: "rotate 0 = id"
by(simp add:rotate_def)
lemma rotate_Suc[simp]: "rotate (Suc n) xs = rotate1(rotate n xs)"
by(simp add:rotate_def)
lemma rotate_add:
"rotate (m+n) = rotate m \<circ> rotate n"
by(simp add:rotate_def funpow_add)
lemma rotate_rotate: "rotate m (rotate n xs) = rotate (m+n) xs"
by(simp add:rotate_add)
lemma rotate1_map: "rotate1 (map f xs) = map f (rotate1 xs)"
by(cases xs) simp_all
lemma rotate1_rotate_swap: "rotate1 (rotate n xs) = rotate n (rotate1 xs)"
by(simp add:rotate_def funpow_swap1)
lemma rotate1_length01[simp]: "length xs \<le> 1 \<Longrightarrow> rotate1 xs = xs"
by(cases xs) simp_all
lemma rotate_length01[simp]: "length xs \<le> 1 \<Longrightarrow> rotate n xs = xs"
by (induct n) (simp_all add:rotate_def)
lemma rotate1_hd_tl: "xs \<noteq> [] \<Longrightarrow> rotate1 xs = tl xs @ [hd xs]"
by (cases xs) simp_all
lemma rotate_drop_take:
"rotate n xs = drop (n mod length xs) xs @ take (n mod length xs) xs"
proof (induct n)
case (Suc n)
show ?case
proof (cases "xs = []")
case False
then show ?thesis
proof (cases "n mod length xs = 0")
case True
then show ?thesis
by (auto simp add: mod_Suc False Suc.hyps drop_Suc rotate1_hd_tl take_Suc Suc_length_conv)
next
case False
with \<open>xs \<noteq> []\<close> Suc
show ?thesis
by (simp add: rotate_def mod_Suc rotate1_hd_tl drop_Suc[symmetric] drop_tl[symmetric]
take_hd_drop linorder_not_le)
qed
qed simp
qed simp
lemma rotate_conv_mod: "rotate n xs = rotate (n mod length xs) xs"
by(simp add:rotate_drop_take)
lemma rotate_id[simp]: "n mod length xs = 0 \<Longrightarrow> rotate n xs = xs"
by(simp add:rotate_drop_take)
lemma length_rotate1[simp]: "length(rotate1 xs) = length xs"
by (cases xs) simp_all
lemma length_rotate[simp]: "length(rotate n xs) = length xs"
by (induct n arbitrary: xs) (simp_all add:rotate_def)
lemma distinct1_rotate[simp]: "distinct(rotate1 xs) = distinct xs"
by (cases xs) auto
lemma distinct_rotate[simp]: "distinct(rotate n xs) = distinct xs"
by (induct n) (simp_all add:rotate_def)
lemma rotate_map: "rotate n (map f xs) = map f (rotate n xs)"
by(simp add:rotate_drop_take take_map drop_map)
lemma set_rotate1[simp]: "set(rotate1 xs) = set xs"
by (cases xs) auto
lemma set_rotate[simp]: "set(rotate n xs) = set xs"
by (induct n) (simp_all add:rotate_def)
lemma rotate1_replicate[simp]: "rotate1 (replicate n a) = replicate n a"
by (cases n) (simp_all add: replicate_append_same)
lemma rotate1_is_Nil_conv[simp]: "(rotate1 xs = []) = (xs = [])"
by (cases xs) auto
lemma rotate_is_Nil_conv[simp]: "(rotate n xs = []) = (xs = [])"
by (induct n) (simp_all add:rotate_def)
lemma rotate_rev:
"rotate n (rev xs) = rev(rotate (length xs - (n mod length xs)) xs)"
proof (cases "length xs = 0 \<or> n mod length xs = 0")
case False
then show ?thesis
by(simp add:rotate_drop_take rev_drop rev_take)
qed force
lemma hd_rotate_conv_nth:
assumes "xs \<noteq> []" shows "hd(rotate n xs) = xs!(n mod length xs)"
proof -
have "n mod length xs < length xs"
using assms by simp
then show ?thesis
by (metis drop_eq_Nil hd_append2 hd_drop_conv_nth leD rotate_drop_take)
qed
lemma rotate_append: "rotate (length l) (l @ q) = q @ l"
by (induct l arbitrary: q) (auto simp add: rotate1_rotate_swap)
lemma nth_rotate:
\<open>rotate m xs ! n = xs ! ((m + n) mod length xs)\<close> if \<open>n < length xs\<close>
using that apply (auto simp add: rotate_drop_take nth_append not_less less_diff_conv ac_simps dest!: le_Suc_ex)
apply (metis add.commute mod_add_right_eq mod_less)
apply (metis (no_types, lifting) Nat.diff_diff_right add.commute add_diff_cancel_right' diff_le_self dual_order.strict_trans2 length_greater_0_conv less_nat_zero_code list.size(3) mod_add_right_eq mod_add_self2 mod_le_divisor mod_less)
done
lemma nth_rotate1:
\<open>rotate1 xs ! n = xs ! (Suc n mod length xs)\<close> if \<open>n < length xs\<close>
using that nth_rotate [of n xs 1] by simp
lemma inj_rotate1: "inj rotate1"
proof
fix xs ys :: "'a list" show "rotate1 xs = rotate1 ys \<Longrightarrow> xs = ys"
by (cases xs; cases ys; simp)
qed
lemma surj_rotate1: "surj rotate1"
proof (safe, simp_all)
fix xs :: "'a list" show "xs \<in> range rotate1"
proof (cases xs rule: rev_exhaust)
case Nil
hence "xs = rotate1 []" by auto
thus ?thesis by fast
next
case (snoc as a)
hence "xs = rotate1 (a#as)" by force
thus ?thesis by fast
qed
qed
lemma bij_rotate1: "bij (rotate1 :: 'a list \<Rightarrow> 'a list)"
using bijI inj_rotate1 surj_rotate1 by blast
lemma rotate1_fixpoint_card: "rotate1 xs = xs \<Longrightarrow> xs = [] \<or> card(set xs) = 1"
by(induction xs) (auto simp: card_insert_if[OF finite_set] append_eq_Cons_conv)
subsubsection \<open>\<^const>\<open>nths\<close> --- a generalization of \<^const>\<open>nth\<close> to sets\<close>
lemma nths_empty [simp]: "nths xs {} = []"
by (auto simp add: nths_def)
lemma nths_nil [simp]: "nths [] A = []"
by (auto simp add: nths_def)
lemma nths_all: "\<forall>i < length xs. i \<in> I \<Longrightarrow> nths xs I = xs"
apply (simp add: nths_def)
apply (subst filter_True)
apply (auto simp: in_set_zip subset_iff)
done
lemma length_nths:
"length (nths xs I) = card{i. i < length xs \<and> i \<in> I}"
by(simp add: nths_def length_filter_conv_card cong:conj_cong)
lemma nths_shift_lemma_Suc:
"map fst (filter (\<lambda>p. P(Suc(snd p))) (zip xs is)) =
map fst (filter (\<lambda>p. P(snd p)) (zip xs (map Suc is)))"
proof (induct xs arbitrary: "is")
case (Cons x xs "is")
show ?case
by (cases "is") (auto simp add: Cons.hyps)
qed simp
lemma nths_shift_lemma:
"map fst (filter (\<lambda>p. snd p \<in> A) (zip xs [i..<i + length xs])) =
map fst (filter (\<lambda>p. snd p + i \<in> A) (zip xs [0..<length xs]))"
by (induct xs rule: rev_induct) (simp_all add: add.commute)
lemma nths_append:
"nths (l @ l') A = nths l A @ nths l' {j. j + length l \<in> A}"
unfolding nths_def
proof (induct l' rule: rev_induct)
case (snoc x xs)
then show ?case
by (simp add: upt_add_eq_append[of 0] nths_shift_lemma add.commute)
qed auto
lemma nths_Cons:
"nths (x # l) A = (if 0 \<in> A then [x] else []) @ nths l {j. Suc j \<in> A}"
proof (induct l rule: rev_induct)
case (snoc x xs)
then show ?case
by (simp flip: append_Cons add: nths_append)
qed (auto simp: nths_def)
lemma nths_map: "nths (map f xs) I = map f (nths xs I)"
by(induction xs arbitrary: I) (simp_all add: nths_Cons)
lemma set_nths: "set(nths xs I) = {xs!i|i. i<size xs \<and> i \<in> I}"
by (induct xs arbitrary: I) (auto simp: nths_Cons nth_Cons split:nat.split dest!: gr0_implies_Suc)
lemma set_nths_subset: "set(nths xs I) \<subseteq> set xs"
by(auto simp add:set_nths)
lemma notin_set_nthsI[simp]: "x \<notin> set xs \<Longrightarrow> x \<notin> set(nths xs I)"
by(auto simp add:set_nths)
lemma in_set_nthsD: "x \<in> set(nths xs I) \<Longrightarrow> x \<in> set xs"
by(auto simp add:set_nths)
lemma nths_singleton [simp]: "nths [x] A = (if 0 \<in> A then [x] else [])"
by (simp add: nths_Cons)
lemma distinct_nthsI[simp]: "distinct xs \<Longrightarrow> distinct (nths xs I)"
by (induct xs arbitrary: I) (auto simp: nths_Cons)
lemma nths_upt_eq_take [simp]: "nths l {..<n} = take n l"
by (induct l rule: rev_induct) (simp_all split: nat_diff_split add: nths_append)
lemma nths_nths: "nths (nths xs A) B = nths xs {i \<in> A. \<exists>j \<in> B. card {i' \<in> A. i' < i} = j}"
by (induction xs arbitrary: A B) (auto simp add: nths_Cons card_less_Suc card_less_Suc2)
lemma drop_eq_nths: "drop n xs = nths xs {i. i \<ge> n}"
by (induction xs arbitrary: n) (auto simp add: nths_Cons nths_all drop_Cons' intro: arg_cong2[where f=nths, OF refl])
lemma nths_drop: "nths (drop n xs) I = nths xs ((+) n ` I)"
by(force simp: drop_eq_nths nths_nths simp flip: atLeastLessThan_iff
intro: arg_cong2[where f=nths, OF refl])
lemma filter_eq_nths: "filter P xs = nths xs {i. i<length xs \<and> P(xs!i)}"
by(induction xs) (auto simp: nths_Cons)
lemma filter_in_nths:
"distinct xs \<Longrightarrow> filter (%x. x \<in> set (nths xs s)) xs = nths xs s"
proof (induct xs arbitrary: s)
case Nil thus ?case by simp
next
case (Cons a xs)
then have "\<forall>x. x \<in> set xs \<longrightarrow> x \<noteq> a" by auto
with Cons show ?case by(simp add: nths_Cons cong:filter_cong)
qed
subsubsection \<open>\<^const>\<open>subseqs\<close> and \<^const>\<open>List.n_lists\<close>\<close>
lemma length_subseqs: "length (subseqs xs) = 2 ^ length xs"
by (induct xs) (simp_all add: Let_def)
lemma subseqs_powset: "set ` set (subseqs xs) = Pow (set xs)"
proof -
have aux: "\<And>x A. set ` Cons x ` A = insert x ` set ` A"
by (auto simp add: image_def)
have "set (map set (subseqs xs)) = Pow (set xs)"
by (induct xs) (simp_all add: aux Let_def Pow_insert Un_commute comp_def del: map_map)
then show ?thesis by simp
qed
lemma distinct_set_subseqs:
assumes "distinct xs"
shows "distinct (map set (subseqs xs))"
by (simp add: assms card_Pow card_distinct distinct_card length_subseqs subseqs_powset)
lemma n_lists_Nil [simp]: "List.n_lists n [] = (if n = 0 then [[]] else [])"
by (induct n) simp_all
lemma length_n_lists_elem: "ys \<in> set (List.n_lists n xs) \<Longrightarrow> length ys = n"
by (induct n arbitrary: ys) auto
lemma set_n_lists: "set (List.n_lists n xs) = {ys. length ys = n \<and> set ys \<subseteq> set xs}"
proof (rule set_eqI)
fix ys :: "'a list"
show "ys \<in> set (List.n_lists n xs) \<longleftrightarrow> ys \<in> {ys. length ys = n \<and> set ys \<subseteq> set xs}"
proof -
have "ys \<in> set (List.n_lists n xs) \<Longrightarrow> length ys = n"
by (induct n arbitrary: ys) auto
moreover have "\<And>x. ys \<in> set (List.n_lists n xs) \<Longrightarrow> x \<in> set ys \<Longrightarrow> x \<in> set xs"
by (induct n arbitrary: ys) auto
moreover have "set ys \<subseteq> set xs \<Longrightarrow> ys \<in> set (List.n_lists (length ys) xs)"
by (induct ys) auto
ultimately show ?thesis by auto
qed
qed
lemma subseqs_refl: "xs \<in> set (subseqs xs)"
by (induct xs) (simp_all add: Let_def)
lemma subset_subseqs: "X \<subseteq> set xs \<Longrightarrow> X \<in> set ` set (subseqs xs)"
unfolding subseqs_powset by simp
lemma Cons_in_subseqsD: "y # ys \<in> set (subseqs xs) \<Longrightarrow> ys \<in> set (subseqs xs)"
by (induct xs) (auto simp: Let_def)
lemma subseqs_distinctD: "\<lbrakk> ys \<in> set (subseqs xs); distinct xs \<rbrakk> \<Longrightarrow> distinct ys"
proof (induct xs arbitrary: ys)
case (Cons x xs ys)
then show ?case
by (auto simp: Let_def) (metis Pow_iff contra_subsetD image_eqI subseqs_powset)
qed simp
subsubsection \<open>\<^const>\<open>splice\<close>\<close>
lemma splice_Nil2 [simp]: "splice xs [] = xs"
by (cases xs) simp_all
lemma length_splice[simp]: "length(splice xs ys) = length xs + length ys"
by (induct xs ys rule: splice.induct) auto
lemma split_Nil_iff[simp]: "splice xs ys = [] \<longleftrightarrow> xs = [] \<and> ys = []"
by (induct xs ys rule: splice.induct) auto
lemma splice_replicate[simp]: "splice (replicate m x) (replicate n x) = replicate (m+n) x"
proof (induction "replicate m x" "replicate n x" arbitrary: m n rule: splice.induct)
case (2 x xs)
then show ?case
by (auto simp add: Cons_replicate_eq dest: gr0_implies_Suc)
qed auto
subsubsection \<open>\<^const>\<open>shuffles\<close>\<close>
lemma shuffles_commutes: "shuffles xs ys = shuffles ys xs"
by (induction xs ys rule: shuffles.induct) (simp_all add: Un_commute)
lemma Nil_in_shuffles[simp]: "[] \<in> shuffles xs ys \<longleftrightarrow> xs = [] \<and> ys = []"
by (induct xs ys rule: shuffles.induct) auto
lemma shufflesE:
"zs \<in> shuffles xs ys \<Longrightarrow>
(zs = xs \<Longrightarrow> ys = [] \<Longrightarrow> P) \<Longrightarrow>
(zs = ys \<Longrightarrow> xs = [] \<Longrightarrow> P) \<Longrightarrow>
(\<And>x xs' z zs'. xs = x # xs' \<Longrightarrow> zs = z # zs' \<Longrightarrow> x = z \<Longrightarrow> zs' \<in> shuffles xs' ys \<Longrightarrow> P) \<Longrightarrow>
(\<And>y ys' z zs'. ys = y # ys' \<Longrightarrow> zs = z # zs' \<Longrightarrow> y = z \<Longrightarrow> zs' \<in> shuffles xs ys' \<Longrightarrow> P) \<Longrightarrow> P"
by (induct xs ys rule: shuffles.induct) auto
lemma Cons_in_shuffles_iff:
"z # zs \<in> shuffles xs ys \<longleftrightarrow>
(xs \<noteq> [] \<and> hd xs = z \<and> zs \<in> shuffles (tl xs) ys \<or>
ys \<noteq> [] \<and> hd ys = z \<and> zs \<in> shuffles xs (tl ys))"
by (induct xs ys rule: shuffles.induct) auto
lemma splice_in_shuffles [simp, intro]: "splice xs ys \<in> shuffles xs ys"
by (induction xs ys rule: splice.induct) (simp_all add: Cons_in_shuffles_iff shuffles_commutes)
lemma Nil_in_shufflesI: "xs = [] \<Longrightarrow> ys = [] \<Longrightarrow> [] \<in> shuffles xs ys"
by simp
lemma Cons_in_shuffles_leftI: "zs \<in> shuffles xs ys \<Longrightarrow> z # zs \<in> shuffles (z # xs) ys"
by (cases ys) auto
lemma Cons_in_shuffles_rightI: "zs \<in> shuffles xs ys \<Longrightarrow> z # zs \<in> shuffles xs (z # ys)"
by (cases xs) auto
lemma finite_shuffles [simp, intro]: "finite (shuffles xs ys)"
by (induction xs ys rule: shuffles.induct) simp_all
lemma length_shuffles: "zs \<in> shuffles xs ys \<Longrightarrow> length zs = length xs + length ys"
by (induction xs ys arbitrary: zs rule: shuffles.induct) auto
lemma set_shuffles: "zs \<in> shuffles xs ys \<Longrightarrow> set zs = set xs \<union> set ys"
by (induction xs ys arbitrary: zs rule: shuffles.induct) auto
lemma distinct_disjoint_shuffles:
assumes "distinct xs" "distinct ys" "set xs \<inter> set ys = {}" "zs \<in> shuffles xs ys"
shows "distinct zs"
using assms
proof (induction xs ys arbitrary: zs rule: shuffles.induct)
case (3 x xs y ys)
show ?case
proof (cases zs)
case (Cons z zs')
with "3.prems" and "3.IH"[of zs'] show ?thesis by (force dest: set_shuffles)
qed simp_all
qed simp_all
lemma Cons_shuffles_subset1: "(#) x ` shuffles xs ys \<subseteq> shuffles (x # xs) ys"
by (cases ys) auto
lemma Cons_shuffles_subset2: "(#) y ` shuffles xs ys \<subseteq> shuffles xs (y # ys)"
by (cases xs) auto
lemma filter_shuffles:
"filter P ` shuffles xs ys = shuffles (filter P xs) (filter P ys)"
proof -
have *: "filter P ` (#) x ` A = (if P x then (#) x ` filter P ` A else filter P ` A)" for x A
by (auto simp: image_image)
show ?thesis
by (induction xs ys rule: shuffles.induct)
(simp_all split: if_splits add: image_Un * Un_absorb1 Un_absorb2
Cons_shuffles_subset1 Cons_shuffles_subset2)
qed
lemma filter_shuffles_disjoint1:
assumes "set xs \<inter> set ys = {}" "zs \<in> shuffles xs ys"
shows "filter (\<lambda>x. x \<in> set xs) zs = xs" (is "filter ?P _ = _")
and "filter (\<lambda>x. x \<notin> set xs) zs = ys" (is "filter ?Q _ = _")
using assms
proof -
from assms have "filter ?P zs \<in> filter ?P ` shuffles xs ys" by blast
also have "filter ?P ` shuffles xs ys = shuffles (filter ?P xs) (filter ?P ys)"
by (rule filter_shuffles)
also have "filter ?P xs = xs" by (rule filter_True) simp_all
also have "filter ?P ys = []" by (rule filter_False) (insert assms(1), auto)
also have "shuffles xs [] = {xs}" by simp
finally show "filter ?P zs = xs" by simp
next
from assms have "filter ?Q zs \<in> filter ?Q ` shuffles xs ys" by blast
also have "filter ?Q ` shuffles xs ys = shuffles (filter ?Q xs) (filter ?Q ys)"
by (rule filter_shuffles)
also have "filter ?Q ys = ys" by (rule filter_True) (insert assms(1), auto)
also have "filter ?Q xs = []" by (rule filter_False) (insert assms(1), auto)
also have "shuffles [] ys = {ys}" by simp
finally show "filter ?Q zs = ys" by simp
qed
lemma filter_shuffles_disjoint2:
assumes "set xs \<inter> set ys = {}" "zs \<in> shuffles xs ys"
shows "filter (\<lambda>x. x \<in> set ys) zs = ys" "filter (\<lambda>x. x \<notin> set ys) zs = xs"
using filter_shuffles_disjoint1[of ys xs zs] assms
by (simp_all add: shuffles_commutes Int_commute)
lemma partition_in_shuffles:
"xs \<in> shuffles (filter P xs) (filter (\<lambda>x. \<not>P x) xs)"
proof (induction xs)
case (Cons x xs)
show ?case
proof (cases "P x")
case True
hence "x # xs \<in> (#) x ` shuffles (filter P xs) (filter (\<lambda>x. \<not>P x) xs)"
by (intro imageI Cons.IH)
also have "\<dots> \<subseteq> shuffles (filter P (x # xs)) (filter (\<lambda>x. \<not>P x) (x # xs))"
by (simp add: True Cons_shuffles_subset1)
finally show ?thesis .
next
case False
hence "x # xs \<in> (#) x ` shuffles (filter P xs) (filter (\<lambda>x. \<not>P x) xs)"
by (intro imageI Cons.IH)
also have "\<dots> \<subseteq> shuffles (filter P (x # xs)) (filter (\<lambda>x. \<not>P x) (x # xs))"
by (simp add: False Cons_shuffles_subset2)
finally show ?thesis .
qed
qed auto
lemma inv_image_partition:
assumes "\<And>x. x \<in> set xs \<Longrightarrow> P x" "\<And>y. y \<in> set ys \<Longrightarrow> \<not>P y"
shows "partition P -` {(xs, ys)} = shuffles xs ys"
proof (intro equalityI subsetI)
fix zs assume zs: "zs \<in> shuffles xs ys"
hence [simp]: "set zs = set xs \<union> set ys" by (rule set_shuffles)
from assms have "filter P zs = filter (\<lambda>x. x \<in> set xs) zs"
"filter (\<lambda>x. \<not>P x) zs = filter (\<lambda>x. x \<in> set ys) zs"
by (intro filter_cong refl; force)+
moreover from assms have "set xs \<inter> set ys = {}" by auto
ultimately show "zs \<in> partition P -` {(xs, ys)}" using zs
by (simp add: o_def filter_shuffles_disjoint1 filter_shuffles_disjoint2)
next
fix zs assume "zs \<in> partition P -` {(xs, ys)}"
thus "zs \<in> shuffles xs ys" using partition_in_shuffles[of zs] by (auto simp: o_def)
qed
subsubsection \<open>Transpose\<close>
function transpose where
"transpose [] = []" |
"transpose ([] # xss) = transpose xss" |
"transpose ((x#xs) # xss) =
(x # [h. (h#t) \<leftarrow> xss]) # transpose (xs # [t. (h#t) \<leftarrow> xss])"
by pat_completeness auto
lemma transpose_aux_filter_head:
"concat (map (case_list [] (\<lambda>h t. [h])) xss) =
map (\<lambda>xs. hd xs) (filter (\<lambda>ys. ys \<noteq> []) xss)"
by (induct xss) (auto split: list.split)
lemma transpose_aux_filter_tail:
"concat (map (case_list [] (\<lambda>h t. [t])) xss) =
map (\<lambda>xs. tl xs) (filter (\<lambda>ys. ys \<noteq> []) xss)"
by (induct xss) (auto split: list.split)
lemma transpose_aux_max:
"max (Suc (length xs)) (foldr (\<lambda>xs. max (length xs)) xss 0) =
Suc (max (length xs) (foldr (\<lambda>x. max (length x - Suc 0)) (filter (\<lambda>ys. ys \<noteq> []) xss) 0))"
(is "max _ ?foldB = Suc (max _ ?foldA)")
proof (cases "(filter (\<lambda>ys. ys \<noteq> []) xss) = []")
case True
hence "foldr (\<lambda>xs. max (length xs)) xss 0 = 0"
proof (induct xss)
case (Cons x xs)
then have "x = []" by (cases x) auto
with Cons show ?case by auto
qed simp
thus ?thesis using True by simp
next
case False
have foldA: "?foldA = foldr (\<lambda>x. max (length x)) (filter (\<lambda>ys. ys \<noteq> []) xss) 0 - 1"
by (induct xss) auto
have foldB: "?foldB = foldr (\<lambda>x. max (length x)) (filter (\<lambda>ys. ys \<noteq> []) xss) 0"
by (induct xss) auto
have "0 < ?foldB"
proof -
from False
obtain z zs where zs: "(filter (\<lambda>ys. ys \<noteq> []) xss) = z#zs" by (auto simp: neq_Nil_conv)
hence "z \<in> set (filter (\<lambda>ys. ys \<noteq> []) xss)" by auto
hence "z \<noteq> []" by auto
thus ?thesis
unfolding foldB zs
by (auto simp: max_def intro: less_le_trans)
qed
thus ?thesis
unfolding foldA foldB max_Suc_Suc[symmetric]
by simp
qed
termination transpose
by (relation "measure (\<lambda>xs. foldr (\<lambda>xs. max (length xs)) xs 0 + length xs)")
(auto simp: transpose_aux_filter_tail foldr_map comp_def transpose_aux_max less_Suc_eq_le)
lemma transpose_empty: "(transpose xs = []) \<longleftrightarrow> (\<forall>x \<in> set xs. x = [])"
by (induct rule: transpose.induct) simp_all
lemma length_transpose:
fixes xs :: "'a list list"
shows "length (transpose xs) = foldr (\<lambda>xs. max (length xs)) xs 0"
by (induct rule: transpose.induct)
(auto simp: transpose_aux_filter_tail foldr_map comp_def transpose_aux_max
max_Suc_Suc[symmetric] simp del: max_Suc_Suc)
lemma nth_transpose:
fixes xs :: "'a list list"
assumes "i < length (transpose xs)"
shows "transpose xs ! i = map (\<lambda>xs. xs ! i) (filter (\<lambda>ys. i < length ys) xs)"
using assms proof (induct arbitrary: i rule: transpose.induct)
case (3 x xs xss)
define XS where "XS = (x # xs) # xss"
hence [simp]: "XS \<noteq> []" by auto
thus ?case
proof (cases i)
case 0
thus ?thesis by (simp add: transpose_aux_filter_head hd_conv_nth)
next
case (Suc j)
have *: "\<And>xss. xs # map tl xss = map tl ((x#xs)#xss)" by simp
have **: "\<And>xss. (x#xs) # filter (\<lambda>ys. ys \<noteq> []) xss = filter (\<lambda>ys. ys \<noteq> []) ((x#xs)#xss)" by simp
{ fix xs :: \<open>'a list\<close> have "Suc j < length xs \<longleftrightarrow> xs \<noteq> [] \<and> j < length xs - Suc 0"
by (cases xs) simp_all
} note *** = this
have j_less: "j < length (transpose (xs # concat (map (case_list [] (\<lambda>h t. [t])) xss)))"
using "3.prems" by (simp add: transpose_aux_filter_tail length_transpose Suc)
show ?thesis
unfolding transpose.simps \<open>i = Suc j\<close> nth_Cons_Suc "3.hyps"[OF j_less]
apply (auto simp: transpose_aux_filter_tail filter_map comp_def length_transpose * ** *** XS_def[symmetric])
by (simp add: nth_tl)
qed
qed simp_all
lemma transpose_map_map:
"transpose (map (map f) xs) = map (map f) (transpose xs)"
proof (rule nth_equalityI)
have [simp]: "length (transpose (map (map f) xs)) = length (transpose xs)"
by (simp add: length_transpose foldr_map comp_def)
show "length (transpose (map (map f) xs)) = length (map (map f) (transpose xs))" by simp
fix i assume "i < length (transpose (map (map f) xs))"
thus "transpose (map (map f) xs) ! i = map (map f) (transpose xs) ! i"
by (simp add: nth_transpose filter_map comp_def)
qed
subsubsection \<open>\<^const>\<open>min\<close> and \<^const>\<open>arg_min\<close>\<close>
lemma min_list_Min: "xs \<noteq> [] \<Longrightarrow> min_list xs = Min (set xs)"
by (induction xs rule: induct_list012)(auto)
lemma f_arg_min_list_f: "xs \<noteq> [] \<Longrightarrow> f (arg_min_list f xs) = Min (f ` (set xs))"
by(induction f xs rule: arg_min_list.induct) (auto simp: min_def intro!: antisym)
lemma arg_min_list_in: "xs \<noteq> [] \<Longrightarrow> arg_min_list f xs \<in> set xs"
by(induction xs rule: induct_list012) (auto simp: Let_def)
subsubsection \<open>(In)finiteness\<close>
lemma finite_maxlen:
"finite (M::'a list set) \<Longrightarrow> \<exists>n. \<forall>s\<in>M. size s < n"
proof (induct rule: finite.induct)
case emptyI show ?case by simp
next
case (insertI M xs)
then obtain n where "\<forall>s\<in>M. length s < n" by blast
hence "\<forall>s\<in>insert xs M. size s < max n (size xs) + 1" by auto
thus ?case ..
qed
lemma lists_length_Suc_eq:
"{xs. set xs \<subseteq> A \<and> length xs = Suc n} =
(\<lambda>(xs, n). n#xs) ` ({xs. set xs \<subseteq> A \<and> length xs = n} \<times> A)"
by (auto simp: length_Suc_conv)
lemma
assumes "finite A"
shows finite_lists_length_eq: "finite {xs. set xs \<subseteq> A \<and> length xs = n}"
and card_lists_length_eq: "card {xs. set xs \<subseteq> A \<and> length xs = n} = (card A)^n"
using \<open>finite A\<close>
by (induct n)
(auto simp: card_image inj_split_Cons lists_length_Suc_eq cong: conj_cong)
lemma finite_lists_length_le:
assumes "finite A" shows "finite {xs. set xs \<subseteq> A \<and> length xs \<le> n}"
(is "finite ?S")
proof-
have "?S = (\<Union>n\<in>{0..n}. {xs. set xs \<subseteq> A \<and> length xs = n})" by auto
thus ?thesis by (auto intro!: finite_lists_length_eq[OF \<open>finite A\<close>] simp only:)
qed
lemma card_lists_length_le:
assumes "finite A" shows "card {xs. set xs \<subseteq> A \<and> length xs \<le> n} = (\<Sum>i\<le>n. card A^i)"
proof -
have "(\<Sum>i\<le>n. card A^i) = card (\<Union>i\<le>n. {xs. set xs \<subseteq> A \<and> length xs = i})"
using \<open>finite A\<close>
by (subst card_UN_disjoint)
(auto simp add: card_lists_length_eq finite_lists_length_eq)
also have "(\<Union>i\<le>n. {xs. set xs \<subseteq> A \<and> length xs = i}) = {xs. set xs \<subseteq> A \<and> length xs \<le> n}"
by auto
finally show ?thesis by simp
qed
lemma finite_lists_distinct_length_eq [intro]:
assumes "finite A"
shows "finite {xs. length xs = n \<and> distinct xs \<and> set xs \<subseteq> A}" (is "finite ?S")
proof -
have "finite {xs. set xs \<subseteq> A \<and> length xs = n}"
using \<open>finite A\<close> by (rule finite_lists_length_eq)
moreover have "?S \<subseteq> {xs. set xs \<subseteq> A \<and> length xs = n}" by auto
ultimately show ?thesis using finite_subset by auto
qed
lemma card_lists_distinct_length_eq:
assumes "finite A" "k \<le> card A"
shows "card {xs. length xs = k \<and> distinct xs \<and> set xs \<subseteq> A} = \<Prod>{card A - k + 1 .. card A}"
using assms
proof (induct k)
case 0
then have "{xs. length xs = 0 \<and> distinct xs \<and> set xs \<subseteq> A} = {[]}" by auto
then show ?case by simp
next
case (Suc k)
let "?k_list" = "\<lambda>k xs. length xs = k \<and> distinct xs \<and> set xs \<subseteq> A"
have inj_Cons: "\<And>A. inj_on (\<lambda>(xs, n). n # xs) A" by (rule inj_onI) auto
from Suc have "k \<le> card A" by simp
moreover note \<open>finite A\<close>
moreover have "finite {xs. ?k_list k xs}"
by (rule finite_subset) (use finite_lists_length_eq[OF \<open>finite A\<close>, of k] in auto)
moreover have "\<And>i j. i \<noteq> j \<longrightarrow> {i} \<times> (A - set i) \<inter> {j} \<times> (A - set j) = {}"
by auto
moreover have "\<And>i. i \<in> {xs. ?k_list k xs} \<Longrightarrow> card (A - set i) = card A - k"
by (simp add: card_Diff_subset distinct_card)
moreover have "{xs. ?k_list (Suc k) xs} =
(\<lambda>(xs, n). n#xs) ` \<Union>((\<lambda>xs. {xs} \<times> (A - set xs)) ` {xs. ?k_list k xs})"
by (auto simp: length_Suc_conv)
moreover have "Suc (card A - Suc k) = card A - k" using Suc.prems by simp
then have "(card A - k) * \<Prod>{Suc (card A - k)..card A} = \<Prod>{Suc (card A - Suc k)..card A}"
by (subst prod.insert[symmetric]) (simp add: atLeastAtMost_insertL)+
ultimately show ?case
by (simp add: card_image inj_Cons card_UN_disjoint Suc.hyps algebra_simps)
qed
lemma card_lists_distinct_length_eq':
assumes "k < card A"
shows "card {xs. length xs = k \<and> distinct xs \<and> set xs \<subseteq> A} = \<Prod>{card A - k + 1 .. card A}"
proof -
from \<open>k < card A\<close> have "finite A" and "k \<le> card A" using card.infinite by force+
from this show ?thesis by (rule card_lists_distinct_length_eq)
qed
lemma infinite_UNIV_listI: "\<not> finite(UNIV::'a list set)"
by (metis UNIV_I finite_maxlen length_replicate less_irrefl)
lemma same_length_different:
assumes "xs \<noteq> ys" and "length xs = length ys"
shows "\<exists>pre x xs' y ys'. x\<noteq>y \<and> xs = pre @ [x] @ xs' \<and> ys = pre @ [y] @ ys'"
using assms
proof (induction xs arbitrary: ys)
case Nil
then show ?case by auto
next
case (Cons x xs)
then obtain z zs where ys: "ys = Cons z zs"
by (metis length_Suc_conv)
show ?case
proof (cases "x=z")
case True
then have "xs \<noteq> zs" "length xs = length zs"
using Cons.prems ys by auto
then obtain pre u xs' v ys' where "u\<noteq>v" and xs: "xs = pre @ [u] @ xs'" and zs: "zs = pre @ [v] @ys'"
using Cons.IH by meson
then have "x # xs = (z#pre) @ [u] @ xs' \<and> ys = (z#pre) @ [v] @ ys'"
by (simp add: True ys)
with \<open>u\<noteq>v\<close> show ?thesis
by blast
next
case False
then have "x # xs = [] @ [x] @ xs \<and> ys = [] @ [z] @ zs"
by (simp add: ys)
then show ?thesis
using False by blast
qed
qed
subsection \<open>Sorting\<close>
subsubsection \<open>\<^const>\<open>sorted_wrt\<close>\<close>
text \<open>Sometimes the second equation in the definition of \<^const>\<open>sorted_wrt\<close> is too aggressive
because it relates each list element to \emph{all} its successors. Then this equation
should be removed and \<open>sorted_wrt2_simps\<close> should be added instead.\<close>
lemma sorted_wrt1: "sorted_wrt P [x] = True"
by(simp)
lemma sorted_wrt2: "transp P \<Longrightarrow> sorted_wrt P (x # y # zs) = (P x y \<and> sorted_wrt P (y # zs))"
proof (induction zs arbitrary: x y)
case (Cons z zs)
then show ?case
by simp (meson transpD)+
qed auto
lemmas sorted_wrt2_simps = sorted_wrt1 sorted_wrt2
lemma sorted_wrt_true [simp]:
"sorted_wrt (\<lambda>_ _. True) xs"
by (induction xs) simp_all
lemma sorted_wrt_append:
"sorted_wrt P (xs @ ys) \<longleftrightarrow>
sorted_wrt P xs \<and> sorted_wrt P ys \<and> (\<forall>x\<in>set xs. \<forall>y\<in>set ys. P x y)"
by (induction xs) auto
lemma sorted_wrt_map:
"sorted_wrt R (map f xs) = sorted_wrt (\<lambda>x y. R (f x) (f y)) xs"
by (induction xs) simp_all
lemma
assumes "sorted_wrt f xs"
shows sorted_wrt_take: "sorted_wrt f (take n xs)"
and sorted_wrt_drop: "sorted_wrt f (drop n xs)"
proof -
from assms have "sorted_wrt f (take n xs @ drop n xs)" by simp
thus "sorted_wrt f (take n xs)" and "sorted_wrt f (drop n xs)"
unfolding sorted_wrt_append by simp_all
qed
lemma sorted_wrt_filter:
"sorted_wrt f xs \<Longrightarrow> sorted_wrt f (filter P xs)"
by (induction xs) auto
lemma sorted_wrt_rev:
"sorted_wrt P (rev xs) = sorted_wrt (\<lambda>x y. P y x) xs"
by (induction xs) (auto simp add: sorted_wrt_append)
lemma sorted_wrt_mono_rel:
"(\<And>x y. \<lbrakk> x \<in> set xs; y \<in> set xs; P x y \<rbrakk> \<Longrightarrow> Q x y) \<Longrightarrow> sorted_wrt P xs \<Longrightarrow> sorted_wrt Q xs"
by(induction xs)(auto)
lemma sorted_wrt01: "length xs \<le> 1 \<Longrightarrow> sorted_wrt P xs"
by(auto simp: le_Suc_eq length_Suc_conv)
lemma sorted_wrt_iff_nth_less:
"sorted_wrt P xs = (\<forall>i j. i < j \<longrightarrow> j < length xs \<longrightarrow> P (xs ! i) (xs ! j))"
by (induction xs) (auto simp add: in_set_conv_nth Ball_def nth_Cons split: nat.split)
lemma sorted_wrt_nth_less:
"\<lbrakk> sorted_wrt P xs; i < j; j < length xs \<rbrakk> \<Longrightarrow> P (xs ! i) (xs ! j)"
by(auto simp: sorted_wrt_iff_nth_less)
lemma sorted_wrt_iff_nth_Suc_transp: assumes "transp P"
shows "sorted_wrt P xs \<longleftrightarrow> (\<forall>i. Suc i < length xs \<longrightarrow> P (xs!i) (xs!(Suc i)))" (is "?L = ?R")
proof
assume ?L
thus ?R
by (simp add: sorted_wrt_iff_nth_less)
next
assume ?R
have "i < j \<Longrightarrow> j < length xs \<Longrightarrow> P (xs ! i) (xs ! j)" for i j
by(induct i j rule: less_Suc_induct)(simp add: \<open>?R\<close>, meson assms transpE transp_on_less)
thus ?L
by (simp add: sorted_wrt_iff_nth_less)
qed
lemma sorted_wrt_upt[simp]: "sorted_wrt (<) [m..<n]"
by(induction n) (auto simp: sorted_wrt_append)
lemma sorted_wrt_upto[simp]: "sorted_wrt (<) [i..j]"
proof(induct i j rule:upto.induct)
case (1 i j)
from this show ?case
unfolding upto.simps[of i j] by auto
qed
text \<open>Each element is greater or equal to its index:\<close>
lemma sorted_wrt_less_idx:
"sorted_wrt (<) ns \<Longrightarrow> i < length ns \<Longrightarrow> i \<le> ns!i"
proof (induction ns arbitrary: i rule: rev_induct)
case Nil thus ?case by simp
next
case snoc
thus ?case
by (simp add: nth_append sorted_wrt_append)
(metis less_antisym not_less nth_mem)
qed
subsubsection \<open>\<^const>\<open>sorted\<close>\<close>
context linorder
begin
text \<open>Sometimes the second equation in the definition of \<^const>\<open>sorted\<close> is too aggressive
because it relates each list element to \emph{all} its successors. Then this equation
should be removed and \<open>sorted2_simps\<close> should be added instead.
Executable code is one such use case.\<close>
lemma sorted0: "sorted [] = True"
by simp
lemma sorted1: "sorted [x] = True"
by simp
lemma sorted2: "sorted (x # y # zs) = (x \<le> y \<and> sorted (y # zs))"
by auto
lemmas sorted2_simps = sorted1 sorted2
lemma sorted_append:
"sorted (xs@ys) = (sorted xs \<and> sorted ys \<and> (\<forall>x \<in> set xs. \<forall>y \<in> set ys. x\<le>y))"
by (simp add: sorted_wrt_append)
lemma sorted_map:
"sorted (map f xs) = sorted_wrt (\<lambda>x y. f x \<le> f y) xs"
by (simp add: sorted_wrt_map)
lemma sorted01: "length xs \<le> 1 \<Longrightarrow> sorted xs"
by (simp add: sorted_wrt01)
lemma sorted_tl:
"sorted xs \<Longrightarrow> sorted (tl xs)"
by (cases xs) (simp_all)
lemma sorted_iff_nth_mono_less:
"sorted xs = (\<forall>i j. i < j \<longrightarrow> j < length xs \<longrightarrow> xs ! i \<le> xs ! j)"
by (simp add: sorted_wrt_iff_nth_less)
lemma sorted_iff_nth_mono:
"sorted xs = (\<forall>i j. i \<le> j \<longrightarrow> j < length xs \<longrightarrow> xs ! i \<le> xs ! j)"
by (auto simp: sorted_iff_nth_mono_less nat_less_le)
lemma sorted_nth_mono:
"sorted xs \<Longrightarrow> i \<le> j \<Longrightarrow> j < length xs \<Longrightarrow> xs!i \<le> xs!j"
by (auto simp: sorted_iff_nth_mono)
lemma sorted_iff_nth_Suc:
"sorted xs \<longleftrightarrow> (\<forall>i. Suc i < length xs \<longrightarrow> xs!i \<le> xs!(Suc i))"
by(simp add: sorted_wrt_iff_nth_Suc_transp)
lemma sorted_rev_nth_mono:
"sorted (rev xs) \<Longrightarrow> i \<le> j \<Longrightarrow> j < length xs \<Longrightarrow> xs!j \<le> xs!i"
by (metis local.nle_le order_class.antisym_conv1 sorted_wrt_iff_nth_less sorted_wrt_rev)
lemma sorted_rev_iff_nth_mono:
"sorted (rev xs) \<longleftrightarrow> (\<forall> i j. i \<le> j \<longrightarrow> j < length xs \<longrightarrow> xs!j \<le> xs!i)" (is "?L = ?R")
proof
assume ?L thus ?R
by (blast intro: sorted_rev_nth_mono)
next
assume ?R
have "rev xs ! k \<le> rev xs ! l" if asms: "k \<le> l" "l < length(rev xs)" for k l
proof -
have "k < length xs" "l < length xs"
"length xs - Suc l \<le> length xs - Suc k" "length xs - Suc k < length xs"
using asms by auto
thus "rev xs ! k \<le> rev xs ! l"
by (simp add: \<open>?R\<close> rev_nth)
qed
thus ?L by (simp add: sorted_iff_nth_mono)
qed
lemma sorted_rev_iff_nth_Suc:
"sorted (rev xs) \<longleftrightarrow> (\<forall>i. Suc i < length xs \<longrightarrow> xs!(Suc i) \<le> xs!i)"
proof-
interpret dual: linorder "(\<lambda>x y. y \<le> x)" "(\<lambda>x y. y < x)"
using dual_linorder .
show ?thesis
using dual_linorder dual.sorted_iff_nth_Suc dual.sorted_iff_nth_mono
unfolding sorted_rev_iff_nth_mono by simp
qed
lemma sorted_map_remove1:
"sorted (map f xs) \<Longrightarrow> sorted (map f (remove1 x xs))"
by (induct xs) (auto)
lemma sorted_remove1: "sorted xs \<Longrightarrow> sorted (remove1 a xs)"
using sorted_map_remove1 [of "\<lambda>x. x"] by simp
lemma sorted_butlast:
assumes "sorted xs"
shows "sorted (butlast xs)"
by (simp add: assms butlast_conv_take sorted_wrt_take)
lemma sorted_replicate [simp]: "sorted(replicate n x)"
by(induction n) (auto)
lemma sorted_remdups[simp]:
"sorted xs \<Longrightarrow> sorted (remdups xs)"
by (induct xs) (auto)
lemma sorted_remdups_adj[simp]:
"sorted xs \<Longrightarrow> sorted (remdups_adj xs)"
by (induct xs rule: remdups_adj.induct, simp_all split: if_split_asm)
lemma sorted_nths: "sorted xs \<Longrightarrow> sorted (nths xs I)"
by(induction xs arbitrary: I)(auto simp: nths_Cons)
lemma sorted_distinct_set_unique:
assumes "sorted xs" "distinct xs" "sorted ys" "distinct ys" "set xs = set ys"
shows "xs = ys"
proof -
from assms have 1: "length xs = length ys" by (auto dest!: distinct_card)
from assms show ?thesis
proof(induct rule:list_induct2[OF 1])
case 1 show ?case by simp
next
case (2 x xs y ys)
then show ?case
by (cases \<open>x = y\<close>) (auto simp add: insert_eq_iff)
qed
qed
lemma map_sorted_distinct_set_unique:
assumes "inj_on f (set xs \<union> set ys)"
assumes "sorted (map f xs)" "distinct (map f xs)"
"sorted (map f ys)" "distinct (map f ys)"
assumes "set xs = set ys"
shows "xs = ys"
using assms map_inj_on sorted_distinct_set_unique by fastforce
lemma sorted_dropWhile: "sorted xs \<Longrightarrow> sorted (dropWhile P xs)"
by (auto dest: sorted_wrt_drop simp add: dropWhile_eq_drop)
lemma sorted_takeWhile: "sorted xs \<Longrightarrow> sorted (takeWhile P xs)"
by (subst takeWhile_eq_take) (auto dest: sorted_wrt_take)
lemma sorted_filter:
"sorted (map f xs) \<Longrightarrow> sorted (map f (filter P xs))"
by (induct xs) simp_all
lemma foldr_max_sorted:
assumes "sorted (rev xs)"
shows "foldr max xs y = (if xs = [] then y else max (xs ! 0) y)"
using assms
proof (induct xs)
case (Cons x xs)
then have "sorted (rev xs)" using sorted_append by auto
with Cons show ?case
by (cases xs) (auto simp add: sorted_append max_def)
qed simp
lemma filter_equals_takeWhile_sorted_rev:
assumes sorted: "sorted (rev (map f xs))"
shows "filter (\<lambda>x. t < f x) xs = takeWhile (\<lambda> x. t < f x) xs"
(is "filter ?P xs = ?tW")
proof (rule takeWhile_eq_filter[symmetric])
let "?dW" = "dropWhile ?P xs"
fix x assume x: "x \<in> set ?dW"
then obtain i where i: "i < length ?dW" and nth_i: "x = ?dW ! i"
unfolding in_set_conv_nth by auto
hence "length ?tW + i < length (?tW @ ?dW)"
unfolding length_append by simp
hence i': "length (map f ?tW) + i < length (map f xs)" by simp
have "(map f ?tW @ map f ?dW) ! (length (map f ?tW) + i) \<le>
(map f ?tW @ map f ?dW) ! (length (map f ?tW) + 0)"
using sorted_rev_nth_mono[OF sorted _ i', of "length ?tW"]
unfolding map_append[symmetric] by simp
hence "f x \<le> f (?dW ! 0)"
unfolding nth_append_length_plus nth_i
using i preorder_class.le_less_trans[OF le0 i] by simp
also have "... \<le> t"
by (metis hd_conv_nth hd_dropWhile length_greater_0_conv length_pos_if_in_set local.leI x)
finally show "\<not> t < f x" by simp
qed
lemma sorted_map_same:
"sorted (map f (filter (\<lambda>x. f x = g xs) xs))"
proof (induct xs arbitrary: g)
case Nil then show ?case by simp
next
case (Cons x xs)
then have "sorted (map f (filter (\<lambda>y. f y = (\<lambda>xs. f x) xs) xs))" .
moreover from Cons have "sorted (map f (filter (\<lambda>y. f y = (g \<circ> Cons x) xs) xs))" .
ultimately show ?case by simp_all
qed
lemma sorted_same:
"sorted (filter (\<lambda>x. x = g xs) xs)"
using sorted_map_same [of "\<lambda>x. x"] by simp
end
lemma sorted_upt[simp]: "sorted [m..<n]"
by(simp add: sorted_wrt_mono_rel[OF _ sorted_wrt_upt])
lemma sorted_upto[simp]: "sorted [m..n]"
by(simp add: sorted_wrt_mono_rel[OF _ sorted_wrt_upto])
subsubsection \<open>Sorting functions\<close>
text\<open>Currently it is not shown that \<^const>\<open>sort\<close> returns a
permutation of its input because the nicest proof is via multisets,
which are not part of Main. Alternatively one could define a function
that counts the number of occurrences of an element in a list and use
that instead of multisets to state the correctness property.\<close>
context linorder
begin
lemma set_insort_key:
"set (insort_key f x xs) = insert x (set xs)"
by (induct xs) auto
lemma length_insort [simp]:
"length (insort_key f x xs) = Suc (length xs)"
by (induct xs) simp_all
lemma insort_key_left_comm:
assumes "f x \<noteq> f y"
shows "insort_key f y (insort_key f x xs) = insort_key f x (insort_key f y xs)"
by (induct xs) (auto simp add: assms dest: order.antisym)
lemma insort_left_comm:
"insort x (insort y xs) = insort y (insort x xs)"
by (cases "x = y") (auto intro: insort_key_left_comm)
lemma comp_fun_commute_insort: "comp_fun_commute insort"
proof
qed (simp add: insort_left_comm fun_eq_iff)
lemma sort_key_simps [simp]:
"sort_key f [] = []"
"sort_key f (x#xs) = insort_key f x (sort_key f xs)"
by (simp_all add: sort_key_def)
lemma sort_key_conv_fold:
assumes "inj_on f (set xs)"
shows "sort_key f xs = fold (insort_key f) xs []"
proof -
have "fold (insort_key f) (rev xs) = fold (insort_key f) xs"
proof (rule fold_rev, rule ext)
fix zs
fix x y
assume "x \<in> set xs" "y \<in> set xs"
with assms have *: "f y = f x \<Longrightarrow> y = x" by (auto dest: inj_onD)
have **: "x = y \<longleftrightarrow> y = x" by auto
show "(insort_key f y \<circ> insort_key f x) zs = (insort_key f x \<circ> insort_key f y) zs"
by (induct zs) (auto intro: * simp add: **)
qed
then show ?thesis by (simp add: sort_key_def foldr_conv_fold)
qed
lemma sort_conv_fold:
"sort xs = fold insort xs []"
by (rule sort_key_conv_fold) simp
lemma length_sort[simp]: "length (sort_key f xs) = length xs"
by (induct xs, auto)
lemma set_sort[simp]: "set(sort_key f xs) = set xs"
by (induct xs) (simp_all add: set_insort_key)
lemma distinct_insort: "distinct (insort_key f x xs) = (x \<notin> set xs \<and> distinct xs)"
by(induct xs)(auto simp: set_insort_key)
lemma distinct_insort_key:
"distinct (map f (insort_key f x xs)) = (f x \<notin> f ` set xs \<and> (distinct (map f xs)))"
by (induct xs) (auto simp: set_insort_key)
lemma distinct_sort[simp]: "distinct (sort_key f xs) = distinct xs"
by (induct xs) (simp_all add: distinct_insort)
lemma sorted_insort_key: "sorted (map f (insort_key f x xs)) = sorted (map f xs)"
by (induct xs) (auto simp: set_insort_key)
lemma sorted_insort: "sorted (insort x xs) = sorted xs"
using sorted_insort_key [where f="\<lambda>x. x"] by simp
theorem sorted_sort_key [simp]: "sorted (map f (sort_key f xs))"
by (induct xs) (auto simp:sorted_insort_key)
theorem sorted_sort [simp]: "sorted (sort xs)"
using sorted_sort_key [where f="\<lambda>x. x"] by simp
lemma insort_not_Nil [simp]:
"insort_key f a xs \<noteq> []"
by (induction xs) simp_all
lemma insort_is_Cons: "\<forall>x\<in>set xs. f a \<le> f x \<Longrightarrow> insort_key f a xs = a # xs"
by (cases xs) auto
lemma sort_key_id_if_sorted: "sorted (map f xs) \<Longrightarrow> sort_key f xs = xs"
by (induction xs) (auto simp add: insort_is_Cons)
text \<open>Subsumed by @{thm sort_key_id_if_sorted} but easier to find:\<close>
lemma sorted_sort_id: "sorted xs \<Longrightarrow> sort xs = xs"
by (simp add: sort_key_id_if_sorted)
lemma insort_key_remove1:
assumes "a \<in> set xs" and "sorted (map f xs)" and "hd (filter (\<lambda>x. f a = f x) xs) = a"
shows "insort_key f a (remove1 a xs) = xs"
using assms proof (induct xs)
case (Cons x xs)
then show ?case
proof (cases "x = a")
case False
then have "f x \<noteq> f a" using Cons.prems by auto
then have "f x < f a" using Cons.prems by auto
with \<open>f x \<noteq> f a\<close> show ?thesis using Cons by (auto simp: insort_is_Cons)
qed (auto simp: insort_is_Cons)
qed simp
lemma insort_remove1:
assumes "a \<in> set xs" and "sorted xs"
shows "insort a (remove1 a xs) = xs"
proof (rule insort_key_remove1)
define n where "n = length (filter ((=) a) xs) - 1"
from \<open>a \<in> set xs\<close> show "a \<in> set xs" .
from \<open>sorted xs\<close> show "sorted (map (\<lambda>x. x) xs)" by simp
from \<open>a \<in> set xs\<close> have "a \<in> set (filter ((=) a) xs)" by auto
then have "set (filter ((=) a) xs) \<noteq> {}" by auto
then have "filter ((=) a) xs \<noteq> []" by (auto simp only: set_empty)
then have "length (filter ((=) a) xs) > 0" by simp
then have n: "Suc n = length (filter ((=) a) xs)" by (simp add: n_def)
moreover have "replicate (Suc n) a = a # replicate n a"
by simp
ultimately show "hd (filter ((=) a) xs) = a" by (simp add: replicate_length_filter)
qed
lemma finite_sorted_distinct_unique:
assumes "finite A" shows "\<exists>!xs. set xs = A \<and> sorted xs \<and> distinct xs"
proof -
obtain xs where "distinct xs" "A = set xs"
using finite_distinct_list [OF assms] by metis
then show ?thesis
by (rule_tac a="sort xs" in ex1I) (auto simp: sorted_distinct_set_unique)
qed
lemma insort_insert_key_triv:
"f x \<in> f ` set xs \<Longrightarrow> insort_insert_key f x xs = xs"
by (simp add: insort_insert_key_def)
lemma insort_insert_triv:
"x \<in> set xs \<Longrightarrow> insort_insert x xs = xs"
using insort_insert_key_triv [of "\<lambda>x. x"] by simp
lemma insort_insert_insort_key:
"f x \<notin> f ` set xs \<Longrightarrow> insort_insert_key f x xs = insort_key f x xs"
by (simp add: insort_insert_key_def)
lemma insort_insert_insort:
"x \<notin> set xs \<Longrightarrow> insort_insert x xs = insort x xs"
using insort_insert_insort_key [of "\<lambda>x. x"] by simp
lemma set_insort_insert:
"set (insort_insert x xs) = insert x (set xs)"
by (auto simp add: insort_insert_key_def set_insort_key)
lemma distinct_insort_insert:
assumes "distinct xs"
shows "distinct (insort_insert_key f x xs)"
using assms by (induct xs) (auto simp add: insort_insert_key_def set_insort_key)
lemma sorted_insort_insert_key:
assumes "sorted (map f xs)"
shows "sorted (map f (insort_insert_key f x xs))"
using assms by (simp add: insort_insert_key_def sorted_insort_key)
lemma sorted_insort_insert:
assumes "sorted xs"
shows "sorted (insort_insert x xs)"
using assms sorted_insort_insert_key [of "\<lambda>x. x"] by simp
lemma filter_insort_triv:
"\<not> P x \<Longrightarrow> filter P (insort_key f x xs) = filter P xs"
by (induct xs) simp_all
lemma filter_insort:
"sorted (map f xs) \<Longrightarrow> P x \<Longrightarrow> filter P (insort_key f x xs) = insort_key f x (filter P xs)"
by (induct xs) (auto, subst insort_is_Cons, auto)
lemma filter_sort:
"filter P (sort_key f xs) = sort_key f (filter P xs)"
by (induct xs) (simp_all add: filter_insort_triv filter_insort)
lemma remove1_insort_key [simp]:
"remove1 x (insort_key f x xs) = xs"
by (induct xs) simp_all
end
lemma sort_upt [simp]: "sort [m..<n] = [m..<n]"
by (rule sort_key_id_if_sorted) simp
lemma sort_upto [simp]: "sort [i..j] = [i..j]"
by (rule sort_key_id_if_sorted) simp
lemma sorted_find_Min:
"sorted xs \<Longrightarrow> \<exists>x \<in> set xs. P x \<Longrightarrow> List.find P xs = Some (Min {x\<in>set xs. P x})"
proof (induct xs)
case Nil then show ?case by simp
next
case (Cons x xs) show ?case proof (cases "P x")
case True
with Cons show ?thesis by (auto intro: Min_eqI [symmetric])
next
case False then have "{y. (y = x \<or> y \<in> set xs) \<and> P y} = {y \<in> set xs. P y}"
by auto
with Cons False show ?thesis by (simp_all)
qed
qed
lemma sorted_enumerate [simp]: "sorted (map fst (enumerate n xs))"
by (simp add: enumerate_eq_zip)
lemma sorted_insort_is_snoc: "sorted xs \<Longrightarrow> \<forall>x \<in> set xs. a \<ge> x \<Longrightarrow> insort a xs = xs @ [a]"
by (induct xs) (auto dest!: insort_is_Cons)
text \<open>Stability of \<^const>\<open>sort_key\<close>:\<close>
lemma sort_key_stable: "filter (\<lambda>y. f y = k) (sort_key f xs) = filter (\<lambda>y. f y = k) xs"
by (induction xs) (auto simp: filter_insort insort_is_Cons filter_insort_triv)
corollary stable_sort_key_sort_key: "stable_sort_key sort_key"
by(simp add: stable_sort_key_def sort_key_stable)
lemma sort_key_const: "sort_key (\<lambda>x. c) xs = xs"
by (metis (mono_tags) filter_True sort_key_stable)
subsubsection \<open>\<^const>\<open>transpose\<close> on sorted lists\<close>
lemma sorted_transpose[simp]: "sorted (rev (map length (transpose xs)))"
by (auto simp: sorted_iff_nth_mono rev_nth nth_transpose
length_filter_conv_card intro: card_mono)
lemma transpose_max_length:
"foldr (\<lambda>xs. max (length xs)) (transpose xs) 0 = length (filter (\<lambda>x. x \<noteq> []) xs)"
(is "?L = ?R")
proof (cases "transpose xs = []")
case False
have "?L = foldr max (map length (transpose xs)) 0"
by (simp add: foldr_map comp_def)
also have "... = length (transpose xs ! 0)"
using False sorted_transpose by (simp add: foldr_max_sorted)
finally show ?thesis
using False by (simp add: nth_transpose)
next
case True
hence "filter (\<lambda>x. x \<noteq> []) xs = []"
by (auto intro!: filter_False simp: transpose_empty)
thus ?thesis by (simp add: transpose_empty True)
qed
lemma length_transpose_sorted:
fixes xs :: "'a list list"
assumes sorted: "sorted (rev (map length xs))"
shows "length (transpose xs) = (if xs = [] then 0 else length (xs ! 0))"
proof (cases "xs = []")
case False
thus ?thesis
using foldr_max_sorted[OF sorted] False
unfolding length_transpose foldr_map comp_def
by simp
qed simp
lemma nth_nth_transpose_sorted[simp]:
fixes xs :: "'a list list"
assumes sorted: "sorted (rev (map length xs))"
and i: "i < length (transpose xs)"
and j: "j < length (filter (\<lambda>ys. i < length ys) xs)"
shows "transpose xs ! i ! j = xs ! j ! i"
using j filter_equals_takeWhile_sorted_rev[OF sorted, of i]
nth_transpose[OF i] nth_map[OF j]
by (simp add: takeWhile_nth)
lemma transpose_column_length:
fixes xs :: "'a list list"
assumes sorted: "sorted (rev (map length xs))" and "i < length xs"
shows "length (filter (\<lambda>ys. i < length ys) (transpose xs)) = length (xs ! i)"
proof -
have "xs \<noteq> []" using \<open>i < length xs\<close> by auto
note filter_equals_takeWhile_sorted_rev[OF sorted, simp]
{ fix j assume "j \<le> i"
note sorted_rev_nth_mono[OF sorted, of j i, simplified, OF this \<open>i < length xs\<close>]
} note sortedE = this[consumes 1]
have "{j. j < length (transpose xs) \<and> i < length (transpose xs ! j)}
= {..< length (xs ! i)}"
proof safe
fix j
assume "j < length (transpose xs)" and "i < length (transpose xs ! j)"
with this(2) nth_transpose[OF this(1)]
have "i < length (takeWhile (\<lambda>ys. j < length ys) xs)" by simp
from nth_mem[OF this] takeWhile_nth[OF this]
show "j < length (xs ! i)" by (auto dest: set_takeWhileD)
next
fix j assume "j < length (xs ! i)"
thus "j < length (transpose xs)"
using foldr_max_sorted[OF sorted] \<open>xs \<noteq> []\<close> sortedE[OF le0]
by (auto simp: length_transpose comp_def foldr_map)
have "Suc i \<le> length (takeWhile (\<lambda>ys. j < length ys) xs)"
using \<open>i < length xs\<close> \<open>j < length (xs ! i)\<close> less_Suc_eq_le
by (auto intro!: length_takeWhile_less_P_nth dest!: sortedE)
with nth_transpose[OF \<open>j < length (transpose xs)\<close>]
show "i < length (transpose xs ! j)" by simp
qed
thus ?thesis by (simp add: length_filter_conv_card)
qed
lemma transpose_column:
fixes xs :: "'a list list"
assumes sorted: "sorted (rev (map length xs))" and "i < length xs"
shows "map (\<lambda>ys. ys ! i) (filter (\<lambda>ys. i < length ys) (transpose xs))
= xs ! i" (is "?R = _")
proof (rule nth_equalityI)
show length: "length ?R = length (xs ! i)"
using transpose_column_length[OF assms] by simp
fix j assume j: "j < length ?R"
note * = less_le_trans[OF this, unfolded length_map, OF length_filter_le]
from j have j_less: "j < length (xs ! i)" using length by simp
have i_less_tW: "Suc i \<le> length (takeWhile (\<lambda>ys. Suc j \<le> length ys) xs)"
proof (rule length_takeWhile_less_P_nth)
show "Suc i \<le> length xs" using \<open>i < length xs\<close> by simp
fix k assume "k < Suc i"
hence "k \<le> i" by auto
with sorted_rev_nth_mono[OF sorted this] \<open>i < length xs\<close>
have "length (xs ! i) \<le> length (xs ! k)" by simp
thus "Suc j \<le> length (xs ! k)" using j_less by simp
qed
have i_less_filter: "i < length (filter (\<lambda>ys. j < length ys) xs) "
unfolding filter_equals_takeWhile_sorted_rev[OF sorted, of j]
using i_less_tW by (simp_all add: Suc_le_eq)
from j show "?R ! j = xs ! i ! j"
unfolding filter_equals_takeWhile_sorted_rev[OF sorted_transpose, of i]
by (simp add: takeWhile_nth nth_nth_transpose_sorted[OF sorted * i_less_filter])
qed
lemma transpose_transpose:
fixes xs :: "'a list list"
assumes sorted: "sorted (rev (map length xs))"
shows "transpose (transpose xs) = takeWhile (\<lambda>x. x \<noteq> []) xs" (is "?L = ?R")
proof -
have len: "length ?L = length ?R"
unfolding length_transpose transpose_max_length
using filter_equals_takeWhile_sorted_rev[OF sorted, of 0]
by simp
{ fix i assume "i < length ?R"
with less_le_trans[OF _ length_takeWhile_le[of _ xs]]
have "i < length xs" by simp
} note * = this
show ?thesis
by (rule nth_equalityI)
(simp_all add: len nth_transpose transpose_column[OF sorted] * takeWhile_nth)
qed
theorem transpose_rectangle:
assumes "xs = [] \<Longrightarrow> n = 0"
assumes rect: "\<And> i. i < length xs \<Longrightarrow> length (xs ! i) = n"
shows "transpose xs = map (\<lambda> i. map (\<lambda> j. xs ! j ! i) [0..<length xs]) [0..<n]"
(is "?trans = ?map")
proof (rule nth_equalityI)
have "sorted (rev (map length xs))"
by (auto simp: rev_nth rect sorted_iff_nth_mono)
from foldr_max_sorted[OF this] assms
show len: "length ?trans = length ?map"
by (simp_all add: length_transpose foldr_map comp_def)
moreover
{ fix i assume "i < n" hence "filter (\<lambda>ys. i < length ys) xs = xs"
using rect by (auto simp: in_set_conv_nth intro!: filter_True) }
ultimately show "\<And>i. i < length (transpose xs) \<Longrightarrow> ?trans ! i = ?map ! i"
by (auto simp: nth_transpose intro: nth_equalityI)
qed
subsubsection \<open>\<open>sorted_key_list_of_set\<close>\<close>
text\<open>
This function maps (finite) linearly ordered sets to sorted lists.
The linear order is obtained by a key function that maps the elements of the set to a type
that is linearly ordered.
Warning: in most cases it is not a good idea to convert from
sets to lists but one should convert in the other direction (via \<^const>\<open>set\<close>).
Note: this is a generalisation of the older \<open>sorted_list_of_set\<close> that is obtained by setting
the key function to the identity. Consequently, new theorems should be added to the locale
below. They should also be aliased to more convenient names for use with \<open>sorted_list_of_set\<close>
as seen further below.
\<close>
definition (in linorder) sorted_key_list_of_set :: "('b \<Rightarrow> 'a) \<Rightarrow> 'b set \<Rightarrow> 'b list"
where "sorted_key_list_of_set f \<equiv> folding_on.F (insort_key f) []"
locale folding_insort_key = lo?: linorder "less_eq :: 'a \<Rightarrow> 'a \<Rightarrow> bool" less
for less_eq (infix "\<preceq>" 50) and less (infix "\<prec>" 50) +
fixes S
fixes f :: "'b \<Rightarrow> 'a"
assumes inj_on: "inj_on f S"
begin
lemma insort_key_commute:
"x \<in> S \<Longrightarrow> y \<in> S \<Longrightarrow> insort_key f y o insort_key f x = insort_key f x o insort_key f y"
proof(rule ext, goal_cases)
case (1 xs)
with inj_on show ?case by (induction xs) (auto simp: inj_onD)
qed
sublocale fold_insort_key: folding_on S "insort_key f" "[]"
rewrites "folding_on.F (insort_key f) [] = sorted_key_list_of_set f"
proof -
show "folding_on S (insort_key f)"
by standard (simp add: insort_key_commute)
qed (simp add: sorted_key_list_of_set_def)
lemma idem_if_sorted_distinct:
assumes "set xs \<subseteq> S" and "sorted (map f xs)" "distinct xs"
shows "sorted_key_list_of_set f (set xs) = xs"
proof(cases "S = {}")
case True
then show ?thesis using \<open>set xs \<subseteq> S\<close> by auto
next
case False
with assms show ?thesis
proof(induction xs)
case (Cons a xs)
with Cons show ?case by (cases xs) auto
qed simp
qed
lemma sorted_key_list_of_set_empty:
"sorted_key_list_of_set f {} = []"
by (fact fold_insort_key.empty)
lemma sorted_key_list_of_set_insert:
assumes "insert x A \<subseteq> S" and "finite A" "x \<notin> A"
shows "sorted_key_list_of_set f (insert x A)
= insort_key f x (sorted_key_list_of_set f A)"
using assms by (fact fold_insort_key.insert)
lemma sorted_key_list_of_set_insert_remove [simp]:
assumes "insert x A \<subseteq> S" and "finite A"
shows "sorted_key_list_of_set f (insert x A)
= insort_key f x (sorted_key_list_of_set f (A - {x}))"
using assms by (fact fold_insort_key.insert_remove)
lemma sorted_key_list_of_set_eq_Nil_iff [simp]:
assumes "A \<subseteq> S" and "finite A"
shows "sorted_key_list_of_set f A = [] \<longleftrightarrow> A = {}"
using assms by (auto simp: fold_insort_key.remove)
lemma set_sorted_key_list_of_set [simp]:
assumes "A \<subseteq> S" and "finite A"
shows "set (sorted_key_list_of_set f A) = A"
using assms(2,1)
by (induct A rule: finite_induct) (simp_all add: set_insort_key)
lemma sorted_sorted_key_list_of_set [simp]:
assumes "A \<subseteq> S"
shows "sorted (map f (sorted_key_list_of_set f A))"
proof (cases "finite A")
case True thus ?thesis using \<open>A \<subseteq> S\<close>
by (induction A) (simp_all add: sorted_insort_key)
next
case False thus ?thesis by simp
qed
lemma distinct_if_distinct_map: "distinct (map f xs) \<Longrightarrow> distinct xs"
using inj_on by (simp add: distinct_map)
lemma distinct_sorted_key_list_of_set [simp]:
assumes "A \<subseteq> S"
shows "distinct (map f (sorted_key_list_of_set f A))"
proof (cases "finite A")
case True thus ?thesis using \<open>A \<subseteq> S\<close> inj_on
by (induction A) (force simp: distinct_insort_key dest: inj_onD)+
next
case False thus ?thesis by simp
qed
lemma length_sorted_key_list_of_set [simp]:
assumes "A \<subseteq> S"
shows "length (sorted_key_list_of_set f A) = card A"
proof (cases "finite A")
case True
with assms inj_on show ?thesis
using distinct_card[symmetric, OF distinct_sorted_key_list_of_set]
by (auto simp: subset_inj_on intro!: card_image)
qed auto
lemmas sorted_key_list_of_set =
set_sorted_key_list_of_set sorted_sorted_key_list_of_set distinct_sorted_key_list_of_set
lemma sorted_key_list_of_set_remove:
assumes "insert x A \<subseteq> S" and "finite A"
shows "sorted_key_list_of_set f (A - {x}) = remove1 x (sorted_key_list_of_set f A)"
proof (cases "x \<in> A")
case False with assms have "x \<notin> set (sorted_key_list_of_set f A)" by simp
with False show ?thesis by (simp add: remove1_idem)
next
case True then obtain B where A: "A = insert x B" by (rule Set.set_insert)
with assms show ?thesis by simp
qed
lemma strict_sorted_key_list_of_set [simp]:
"A \<subseteq> S \<Longrightarrow> sorted_wrt (\<prec>) (map f (sorted_key_list_of_set f A))"
by (cases "finite A") (auto simp: strict_sorted_iff subset_inj_on[OF inj_on])
lemma finite_set_strict_sorted:
assumes "A \<subseteq> S" and "finite A"
obtains l where "sorted_wrt (\<prec>) (map f l)" "set l = A" "length l = card A"
using assms
by (meson length_sorted_key_list_of_set set_sorted_key_list_of_set strict_sorted_key_list_of_set)
lemma (in linorder) strict_sorted_equal:
assumes "sorted_wrt (<) xs"
and "sorted_wrt (<) ys"
and "set ys = set xs"
shows "ys = xs"
using assms
proof (induction xs arbitrary: ys)
case (Cons x xs)
show ?case
proof (cases ys)
case Nil
then show ?thesis
using Cons.prems by auto
next
case (Cons y ys')
then have "xs = ys'"
by (metis Cons.prems list.inject sorted_distinct_set_unique strict_sorted_iff)
moreover have "x = y"
using Cons.prems \<open>xs = ys'\<close> local.Cons by fastforce
ultimately show ?thesis
using local.Cons by blast
qed
qed auto
lemma (in linorder) strict_sorted_equal_Uniq: "\<exists>\<^sub>\<le>\<^sub>1xs. sorted_wrt (<) xs \<and> set xs = A"
by (simp add: Uniq_def strict_sorted_equal)
lemma sorted_key_list_of_set_inject:
assumes "A \<subseteq> S" "B \<subseteq> S"
assumes "sorted_key_list_of_set f A = sorted_key_list_of_set f B" "finite A" "finite B"
shows "A = B"
using assms set_sorted_key_list_of_set by metis
lemma sorted_key_list_of_set_unique:
assumes "A \<subseteq> S" and "finite A"
shows "sorted_wrt (\<prec>) (map f l) \<and> set l = A \<and> length l = card A
\<longleftrightarrow> sorted_key_list_of_set f A = l"
using assms
by (auto simp: strict_sorted_iff card_distinct idem_if_sorted_distinct)
end
context linorder
begin
definition "sorted_list_of_set \<equiv> sorted_key_list_of_set (\<lambda>x::'a. x)"
text \<open>
We abuse the \<open>rewrites\<close> functionality of locales to remove trivial assumptions that result
from instantiating the key function to the identity.
\<close>
sublocale sorted_list_of_set: folding_insort_key "(\<le>)" "(<)" UNIV "(\<lambda>x. x)"
rewrites "sorted_key_list_of_set (\<lambda>x. x) = sorted_list_of_set"
and "\<And>xs. map (\<lambda>x. x) xs \<equiv> xs"
and "\<And>X. (X \<subseteq> UNIV) \<equiv> True"
and "\<And>x. x \<in> UNIV \<equiv> True"
and "\<And>P. (True \<Longrightarrow> P) \<equiv> Trueprop P"
and "\<And>P Q. (True \<Longrightarrow> PROP P \<Longrightarrow> PROP Q) \<equiv> (PROP P \<Longrightarrow> True \<Longrightarrow> PROP Q)"
proof -
show "folding_insort_key (\<le>) (<) UNIV (\<lambda>x. x)"
by standard simp
qed (simp_all add: sorted_list_of_set_def)
text \<open>Alias theorems for backwards compatibility and ease of use.\<close>
lemmas sorted_list_of_set = sorted_list_of_set.sorted_key_list_of_set and
sorted_list_of_set_empty = sorted_list_of_set.sorted_key_list_of_set_empty and
sorted_list_of_set_insert = sorted_list_of_set.sorted_key_list_of_set_insert and
sorted_list_of_set_insert_remove = sorted_list_of_set.sorted_key_list_of_set_insert_remove and
sorted_list_of_set_eq_Nil_iff = sorted_list_of_set.sorted_key_list_of_set_eq_Nil_iff and
set_sorted_list_of_set = sorted_list_of_set.set_sorted_key_list_of_set and
sorted_sorted_list_of_set = sorted_list_of_set.sorted_sorted_key_list_of_set and
distinct_sorted_list_of_set = sorted_list_of_set.distinct_sorted_key_list_of_set and
length_sorted_list_of_set = sorted_list_of_set.length_sorted_key_list_of_set and
sorted_list_of_set_remove = sorted_list_of_set.sorted_key_list_of_set_remove and
strict_sorted_list_of_set = sorted_list_of_set.strict_sorted_key_list_of_set and
sorted_list_of_set_inject = sorted_list_of_set.sorted_key_list_of_set_inject and
sorted_list_of_set_unique = sorted_list_of_set.sorted_key_list_of_set_unique and
finite_set_strict_sorted = sorted_list_of_set.finite_set_strict_sorted
lemma sorted_list_of_set_sort_remdups [code]:
"sorted_list_of_set (set xs) = sort (remdups xs)"
proof -
interpret comp_fun_commute insort by (fact comp_fun_commute_insort)
show ?thesis
by (simp add: sorted_list_of_set.fold_insort_key.eq_fold sort_conv_fold fold_set_fold_remdups)
qed
end
lemma sorted_list_of_set_range [simp]:
"sorted_list_of_set {m..<n} = [m..<n]"
by (rule sorted_distinct_set_unique) simp_all
lemma sorted_list_of_set_lessThan_Suc [simp]:
"sorted_list_of_set {..<Suc k} = sorted_list_of_set {..<k} @ [k]"
using le0 lessThan_atLeast0 sorted_list_of_set_range upt_Suc_append by presburger
lemma sorted_list_of_set_atMost_Suc [simp]:
"sorted_list_of_set {..Suc k} = sorted_list_of_set {..k} @ [Suc k]"
using lessThan_Suc_atMost sorted_list_of_set_lessThan_Suc by fastforce
lemma sorted_list_of_set_nonempty:
assumes "finite A" "A \<noteq> {}"
shows "sorted_list_of_set A = Min A # sorted_list_of_set (A - {Min A})"
using assms
by (auto simp: less_le simp flip: sorted_list_of_set.sorted_key_list_of_set_unique intro: Min_in)
lemma sorted_list_of_set_greaterThanLessThan:
assumes "Suc i < j"
shows "sorted_list_of_set {i<..<j} = Suc i # sorted_list_of_set {Suc i<..<j}"
proof -
have "{i<..<j} = insert (Suc i) {Suc i<..<j}"
using assms by auto
then show ?thesis
by (metis assms atLeastSucLessThan_greaterThanLessThan sorted_list_of_set_range upt_conv_Cons)
qed
lemma sorted_list_of_set_greaterThanAtMost:
assumes "Suc i \<le> j"
shows "sorted_list_of_set {i<..j} = Suc i # sorted_list_of_set {Suc i<..j}"
using sorted_list_of_set_greaterThanLessThan [of i "Suc j"]
by (metis assms greaterThanAtMost_def greaterThanLessThan_eq le_imp_less_Suc lessThan_Suc_atMost)
lemma nth_sorted_list_of_set_greaterThanLessThan:
"n < j - Suc i \<Longrightarrow> sorted_list_of_set {i<..<j} ! n = Suc (i+n)"
by (induction n arbitrary: i) (auto simp: sorted_list_of_set_greaterThanLessThan)
lemma nth_sorted_list_of_set_greaterThanAtMost:
"n < j - i \<Longrightarrow> sorted_list_of_set {i<..j} ! n = Suc (i+n)"
using nth_sorted_list_of_set_greaterThanLessThan [of n "Suc j" i]
by (simp add: greaterThanAtMost_def greaterThanLessThan_eq lessThan_Suc_atMost)
subsubsection \<open>\<open>lists\<close>: the list-forming operator over sets\<close>
inductive_set
lists :: "'a set => 'a list set"
for A :: "'a set"
where
Nil [intro!, simp]: "[] \<in> lists A"
| Cons [intro!, simp]: "\<lbrakk>a \<in> A; l \<in> lists A\<rbrakk> \<Longrightarrow> a#l \<in> lists A"
inductive_cases listsE [elim!]: "x#l \<in> lists A"
inductive_cases listspE [elim!]: "listsp A (x # l)"
inductive_simps listsp_simps[code]:
"listsp A []"
"listsp A (x # xs)"
lemma listsp_mono [mono]: "A \<le> B \<Longrightarrow> listsp A \<le> listsp B"
by (rule predicate1I, erule listsp.induct, blast+)
lemmas lists_mono = listsp_mono [to_set]
lemma listsp_infI:
assumes l: "listsp A l" shows "listsp B l \<Longrightarrow> listsp (inf A B) l" using l
by induct blast+
lemmas lists_IntI = listsp_infI [to_set]
lemma listsp_inf_eq [simp]: "listsp (inf A B) = inf (listsp A) (listsp B)"
proof (rule mono_inf [where f=listsp, THEN order_antisym])
show "mono listsp" by (simp add: mono_def listsp_mono)
show "inf (listsp A) (listsp B) \<le> listsp (inf A B)" by (blast intro!: listsp_infI)
qed
lemmas listsp_conj_eq [simp] = listsp_inf_eq [simplified inf_fun_def inf_bool_def]
lemmas lists_Int_eq [simp] = listsp_inf_eq [to_set]
lemma Cons_in_lists_iff[simp]: "x#xs \<in> lists A \<longleftrightarrow> x \<in> A \<and> xs \<in> lists A"
by auto
lemma append_in_listsp_conv [iff]:
"(listsp A (xs @ ys)) = (listsp A xs \<and> listsp A ys)"
by (induct xs) auto
lemmas append_in_lists_conv [iff] = append_in_listsp_conv [to_set]
lemma in_listsp_conv_set: "(listsp A xs) = (\<forall>x \<in> set xs. A x)"
\<comment> \<open>eliminate \<open>listsp\<close> in favour of \<open>set\<close>\<close>
by (induct xs) auto
lemmas in_lists_conv_set [code_unfold] = in_listsp_conv_set [to_set]
lemma in_listspD [dest!]: "listsp A xs \<Longrightarrow> \<forall>x\<in>set xs. A x"
by (rule in_listsp_conv_set [THEN iffD1])
lemmas in_listsD [dest!] = in_listspD [to_set]
lemma in_listspI [intro!]: "\<forall>x\<in>set xs. A x \<Longrightarrow> listsp A xs"
by (rule in_listsp_conv_set [THEN iffD2])
lemmas in_listsI [intro!] = in_listspI [to_set]
lemma lists_eq_set: "lists A = {xs. set xs \<le> A}"
by auto
lemma lists_empty [simp]: "lists {} = {[]}"
by auto
lemma lists_UNIV [simp]: "lists UNIV = UNIV"
by auto
lemma lists_image: "lists (f`A) = map f ` lists A"
proof -
{ fix xs have "\<forall>x\<in>set xs. x \<in> f ` A \<Longrightarrow> xs \<in> map f ` lists A"
by (induct xs) (auto simp del: list.map simp add: list.map[symmetric] intro!: imageI) }
then show ?thesis by auto
qed
subsubsection \<open>Inductive definition for membership\<close>
inductive ListMem :: "'a \<Rightarrow> 'a list \<Rightarrow> bool"
where
elem: "ListMem x (x # xs)"
| insert: "ListMem x xs \<Longrightarrow> ListMem x (y # xs)"
lemma ListMem_iff: "(ListMem x xs) = (x \<in> set xs)"
proof
show "ListMem x xs \<Longrightarrow> x \<in> set xs"
by (induct set: ListMem) auto
show "x \<in> set xs \<Longrightarrow> ListMem x xs"
by (induct xs) (auto intro: ListMem.intros)
qed
subsubsection \<open>Lists as Cartesian products\<close>
text\<open>\<open>set_Cons A Xs\<close>: the set of lists with head drawn from
\<^term>\<open>A\<close> and tail drawn from \<^term>\<open>Xs\<close>.\<close>
definition set_Cons :: "'a set \<Rightarrow> 'a list set \<Rightarrow> 'a list set" where
"set_Cons A XS = {z. \<exists>x xs. z = x # xs \<and> x \<in> A \<and> xs \<in> XS}"
lemma set_Cons_sing_Nil [simp]: "set_Cons A {[]} = (%x. [x])`A"
by (auto simp add: set_Cons_def)
text\<open>Yields the set of lists, all of the same length as the argument and
with elements drawn from the corresponding element of the argument.\<close>
primrec listset :: "'a set list \<Rightarrow> 'a list set" where
"listset [] = {[]}" |
"listset (A # As) = set_Cons A (listset As)"
subsection \<open>Relations on Lists\<close>
subsubsection \<open>Length Lexicographic Ordering\<close>
text\<open>These orderings preserve well-foundedness: shorter lists
precede longer lists. These ordering are not used in dictionaries.\<close>
primrec \<comment> \<open>The lexicographic ordering for lists of the specified length\<close>
lexn :: "('a \<times> 'a) set \<Rightarrow> nat \<Rightarrow> ('a list \<times> 'a list) set" where
"lexn r 0 = {}" |
"lexn r (Suc n) =
(map_prod (%(x, xs). x#xs) (%(x, xs). x#xs) ` (r <*lex*> lexn r n)) Int
{(xs, ys). length xs = Suc n \<and> length ys = Suc n}"
definition lex :: "('a \<times> 'a) set \<Rightarrow> ('a list \<times> 'a list) set" where
"lex r = (\<Union>n. lexn r n)" \<comment> \<open>Holds only between lists of the same length\<close>
definition lenlex :: "('a \<times> 'a) set => ('a list \<times> 'a list) set" where
"lenlex r = inv_image (less_than <*lex*> lex r) (\<lambda>xs. (length xs, xs))"
\<comment> \<open>Compares lists by their length and then lexicographically\<close>
lemma wf_lexn: assumes "wf r" shows "wf (lexn r n)"
proof (induct n)
case (Suc n)
have inj: "inj (\<lambda>(x, xs). x # xs)"
using assms by (auto simp: inj_on_def)
have wf: "wf (map_prod (\<lambda>(x, xs). x # xs) (\<lambda>(x, xs). x # xs) ` (r <*lex*> lexn r n))"
by (simp add: Suc.hyps assms wf_lex_prod wf_map_prod_image [OF _ inj])
then show ?case
by (rule wf_subset) auto
qed auto
lemma lexn_length:
"(xs, ys) \<in> lexn r n \<Longrightarrow> length xs = n \<and> length ys = n"
by (induct n arbitrary: xs ys) auto
lemma wf_lex [intro!]:
assumes "wf r" shows "wf (lex r)"
unfolding lex_def
proof (rule wf_UN)
show "wf (lexn r i)" for i
by (simp add: assms wf_lexn)
show "\<And>i j. lexn r i \<noteq> lexn r j \<Longrightarrow> Domain (lexn r i) \<inter> Range (lexn r j) = {}"
by (metis DomainE Int_emptyI RangeE lexn_length)
qed
lemma lexn_conv:
"lexn r n =
{(xs,ys). length xs = n \<and> length ys = n \<and>
(\<exists>xys x y xs' ys'. xs= xys @ x#xs' \<and> ys= xys @ y # ys' \<and> (x, y) \<in> r)}"
proof (induction n)
case (Suc n)
then show ?case
apply (simp add: image_Collect lex_prod_def, safe, blast)
apply (rule_tac x = "ab # xys" in exI, simp)
apply (case_tac xys; force)
done
qed auto
text\<open>By Mathias Fleury:\<close>
proposition lexn_transI:
assumes "trans r" shows "trans (lexn r n)"
unfolding trans_def
proof (intro allI impI)
fix as bs cs
assume asbs: "(as, bs) \<in> lexn r n" and bscs: "(bs, cs) \<in> lexn r n"
obtain abs a b as' bs' where
n: "length as = n" and "length bs = n" and
as: "as = abs @ a # as'" and
bs: "bs = abs @ b # bs'" and
abr: "(a, b) \<in> r"
using asbs unfolding lexn_conv by blast
obtain bcs b' c' cs' bs' where
n': "length cs = n" and "length bs = n" and
bs': "bs = bcs @ b' # bs'" and
cs: "cs = bcs @ c' # cs'" and
b'c'r: "(b', c') \<in> r"
using bscs unfolding lexn_conv by blast
consider (le) "length bcs < length abs"
| (eq) "length bcs = length abs"
| (ge) "length bcs > length abs" by linarith
thus "(as, cs) \<in> lexn r n"
proof cases
let ?k = "length bcs"
case le
hence "as ! ?k = bs ! ?k" unfolding as bs by (simp add: nth_append)
hence "(as ! ?k, cs ! ?k) \<in> r" using b'c'r unfolding bs' cs by auto
moreover
have "length bcs < length as" using le unfolding as by simp
from id_take_nth_drop[OF this]
have "as = take ?k as @ as ! ?k # drop (Suc ?k) as" .
moreover
have "length bcs < length cs" unfolding cs by simp
from id_take_nth_drop[OF this]
have "cs = take ?k cs @ cs ! ?k # drop (Suc ?k) cs" .
moreover have "take ?k as = take ?k cs"
using le arg_cong[OF bs, of "take (length bcs)"]
unfolding cs as bs' by auto
ultimately show ?thesis using n n' unfolding lexn_conv by auto
next
let ?k = "length abs"
case ge
hence "bs ! ?k = cs ! ?k" unfolding bs' cs by (simp add: nth_append)
hence "(as ! ?k, cs ! ?k) \<in> r" using abr unfolding as bs by auto
moreover
have "length abs < length as" using ge unfolding as by simp
from id_take_nth_drop[OF this]
have "as = take ?k as @ as ! ?k # drop (Suc ?k) as" .
moreover have "length abs < length cs" using n n' unfolding as by simp
from id_take_nth_drop[OF this]
have "cs = take ?k cs @ cs ! ?k # drop (Suc ?k) cs" .
moreover have "take ?k as = take ?k cs"
using ge arg_cong[OF bs', of "take (length abs)"]
unfolding cs as bs by auto
ultimately show ?thesis using n n' unfolding lexn_conv by auto
next
let ?k = "length abs"
case eq
hence *: "abs = bcs" "b = b'" using bs bs' by auto
hence "(a, c') \<in> r"
using abr b'c'r assms unfolding trans_def by blast
with * show ?thesis using n n' unfolding lexn_conv as bs cs by auto
qed
qed
corollary lex_transI:
assumes "trans r" shows "trans (lex r)"
using lexn_transI [OF assms]
by (clarsimp simp add: lex_def trans_def) (metis lexn_length)
lemma lex_conv:
"lex r =
{(xs,ys). length xs = length ys \<and>
(\<exists>xys x y xs' ys'. xs = xys @ x # xs' \<and> ys = xys @ y # ys' \<and> (x, y) \<in> r)}"
by (force simp add: lex_def lexn_conv)
lemma wf_lenlex [intro!]: "wf r \<Longrightarrow> wf (lenlex r)"
by (unfold lenlex_def) blast
lemma lenlex_conv:
"lenlex r = {(xs,ys). length xs < length ys \<or>
length xs = length ys \<and> (xs, ys) \<in> lex r}"
by (auto simp add: lenlex_def Id_on_def lex_prod_def inv_image_def)
lemma total_lenlex:
assumes "total r"
shows "total (lenlex r)"
proof -
have "(xs,ys) \<in> lexn r (length xs) \<or> (ys,xs) \<in> lexn r (length xs)"
if "xs \<noteq> ys" and len: "length xs = length ys" for xs ys
proof -
obtain pre x xs' y ys' where "x\<noteq>y" and xs: "xs = pre @ [x] @ xs'" and ys: "ys = pre @ [y] @ys'"
by (meson len \<open>xs \<noteq> ys\<close> same_length_different)
then consider "(x,y) \<in> r" | "(y,x) \<in> r"
by (meson UNIV_I assms total_on_def)
then show ?thesis
by cases (use len in \<open>(force simp add: lexn_conv xs ys)+\<close>)
qed
then show ?thesis
by (fastforce simp: lenlex_def total_on_def lex_def)
qed
lemma lenlex_transI [intro]: "trans r \<Longrightarrow> trans (lenlex r)"
unfolding lenlex_def
by (meson lex_transI trans_inv_image trans_less_than trans_lex_prod)
lemma Nil_notin_lex [iff]: "([], ys) \<notin> lex r"
by (simp add: lex_conv)
lemma Nil2_notin_lex [iff]: "(xs, []) \<notin> lex r"
by (simp add:lex_conv)
lemma Cons_in_lex [simp]:
"(x # xs, y # ys) \<in> lex r \<longleftrightarrow> (x, y) \<in> r \<and> length xs = length ys \<or> x = y \<and> (xs, ys) \<in> lex r"
(is "?lhs = ?rhs")
proof
assume ?lhs then show ?rhs
by (simp add: lex_conv) (metis hd_append list.sel(1) list.sel(3) tl_append2)
next
assume ?rhs then show ?lhs
by (simp add: lex_conv) (blast intro: Cons_eq_appendI)
qed
lemma Nil_lenlex_iff1 [simp]: "([], ns) \<in> lenlex r \<longleftrightarrow> ns \<noteq> []"
and Nil_lenlex_iff2 [simp]: "(ns,[]) \<notin> lenlex r"
by (auto simp: lenlex_def)
lemma Cons_lenlex_iff:
"((m # ms, n # ns) \<in> lenlex r) \<longleftrightarrow>
length ms < length ns
\<or> length ms = length ns \<and> (m,n) \<in> r
\<or> (m = n \<and> (ms,ns) \<in> lenlex r)"
by (auto simp: lenlex_def)
lemma lenlex_irreflexive: "(\<And>x. (x,x) \<notin> r) \<Longrightarrow> (xs,xs) \<notin> lenlex r"
by (induction xs) (auto simp add: Cons_lenlex_iff)
lemma lenlex_trans:
"\<lbrakk>(x,y) \<in> lenlex r; (y,z) \<in> lenlex r; trans r\<rbrakk> \<Longrightarrow> (x,z) \<in> lenlex r"
by (meson lenlex_transI transD)
lemma lenlex_length: "(ms, ns) \<in> lenlex r \<Longrightarrow> length ms \<le> length ns"
by (auto simp: lenlex_def)
lemma lex_append_rightI:
"(xs, ys) \<in> lex r \<Longrightarrow> length vs = length us \<Longrightarrow> (xs @ us, ys @ vs) \<in> lex r"
by (fastforce simp: lex_def lexn_conv)
lemma lex_append_leftI:
"(ys, zs) \<in> lex r \<Longrightarrow> (xs @ ys, xs @ zs) \<in> lex r"
by (induct xs) auto
lemma lex_append_leftD:
"\<forall>x. (x,x) \<notin> r \<Longrightarrow> (xs @ ys, xs @ zs) \<in> lex r \<Longrightarrow> (ys, zs) \<in> lex r"
by (induct xs) auto
lemma lex_append_left_iff:
"\<forall>x. (x,x) \<notin> r \<Longrightarrow> (xs @ ys, xs @ zs) \<in> lex r \<longleftrightarrow> (ys, zs) \<in> lex r"
by(metis lex_append_leftD lex_append_leftI)
lemma lex_take_index:
assumes "(xs, ys) \<in> lex r"
obtains i where "i < length xs" and "i < length ys" and "take i xs = take i ys"
and "(xs ! i, ys ! i) \<in> r"
proof -
obtain n us x xs' y ys' where "(xs, ys) \<in> lexn r n" and "length xs = n" and "length ys = n"
and "xs = us @ x # xs'" and "ys = us @ y # ys'" and "(x, y) \<in> r"
using assms by (fastforce simp: lex_def lexn_conv)
then show ?thesis by (intro that [of "length us"]) auto
qed
lemma irrefl_lex: "irrefl r \<Longrightarrow> irrefl (lex r)"
by (meson irrefl_def lex_take_index)
lemma lexl_not_refl [simp]: "irrefl r \<Longrightarrow> (x,x) \<notin> lex r"
by (meson irrefl_def lex_take_index)
subsubsection \<open>Lexicographic Ordering\<close>
text \<open>Classical lexicographic ordering on lists, ie. "a" < "ab" < "b".
This ordering does \emph{not} preserve well-foundedness.
Author: N. Voelker, March 2005.\<close>
definition lexord :: "('a \<times> 'a) set \<Rightarrow> ('a list \<times> 'a list) set" where
"lexord r = {(x,y). \<exists> a v. y = x @ a # v \<or>
(\<exists> u a b v w. (a,b) \<in> r \<and> x = u @ (a # v) \<and> y = u @ (b # w))}"
lemma lexord_Nil_left[simp]: "([],y) \<in> lexord r = (\<exists> a x. y = a # x)"
by (unfold lexord_def, induct_tac y, auto)
lemma lexord_Nil_right[simp]: "(x,[]) \<notin> lexord r"
by (unfold lexord_def, induct_tac x, auto)
lemma lexord_cons_cons[simp]:
"(a # x, b # y) \<in> lexord r \<longleftrightarrow> (a,b)\<in> r \<or> (a = b \<and> (x,y)\<in> lexord r)" (is "?lhs = ?rhs")
proof
assume ?lhs
then show ?rhs
apply (simp add: lexord_def)
apply (metis hd_append list.sel(1) list.sel(3) tl_append2)
done
qed (auto simp add: lexord_def; (blast | meson Cons_eq_appendI))
lemmas lexord_simps = lexord_Nil_left lexord_Nil_right lexord_cons_cons
lemma lexord_same_pref_iff:
"(xs @ ys, xs @ zs) \<in> lexord r \<longleftrightarrow> (\<exists>x \<in> set xs. (x,x) \<in> r) \<or> (ys, zs) \<in> lexord r"
by(induction xs) auto
lemma lexord_same_pref_if_irrefl[simp]:
"irrefl r \<Longrightarrow> (xs @ ys, xs @ zs) \<in> lexord r \<longleftrightarrow> (ys, zs) \<in> lexord r"
by (simp add: irrefl_def lexord_same_pref_iff)
lemma lexord_append_rightI: "\<exists> b z. y = b # z \<Longrightarrow> (x, x @ y) \<in> lexord r"
by (metis append_Nil2 lexord_Nil_left lexord_same_pref_iff)
lemma lexord_append_left_rightI:
"(a,b) \<in> r \<Longrightarrow> (u @ a # x, u @ b # y) \<in> lexord r"
by (simp add: lexord_same_pref_iff)
lemma lexord_append_leftI: "(u,v) \<in> lexord r \<Longrightarrow> (x @ u, x @ v) \<in> lexord r"
by (simp add: lexord_same_pref_iff)
lemma lexord_append_leftD:
"\<lbrakk>(x @ u, x @ v) \<in> lexord r; (\<forall>a. (a,a) \<notin> r) \<rbrakk> \<Longrightarrow> (u,v) \<in> lexord r"
by (simp add: lexord_same_pref_iff)
lemma lexord_take_index_conv:
"((x,y) \<in> lexord r) =
((length x < length y \<and> take (length x) y = x) \<or>
(\<exists>i. i < min(length x)(length y) \<and> take i x = take i y \<and> (x!i,y!i) \<in> r))"
proof -
have "(\<exists>a v. y = x @ a # v) = (length x < length y \<and> take (length x) y = x)"
by (metis Cons_nth_drop_Suc append_eq_conv_conj drop_all list.simps(3) not_le)
moreover
have "(\<exists>u a b. (a, b) \<in> r \<and> (\<exists>v. x = u @ a # v) \<and> (\<exists>w. y = u @ b # w)) =
(\<exists>i<length x. i < length y \<and> take i x = take i y \<and> (x ! i, y ! i) \<in> r)"
apply safe
using less_iff_Suc_add apply auto[1]
by (metis id_take_nth_drop)
ultimately show ?thesis
by (auto simp: lexord_def Let_def)
qed
\<comment> \<open>lexord is extension of partial ordering List.lex\<close>
lemma lexord_lex: "(x,y) \<in> lex r = ((x,y) \<in> lexord r \<and> length x = length y)"
proof (induction x arbitrary: y)
case (Cons a x y) then show ?case
by (cases y) (force+)
qed auto
lemma lexord_sufI:
assumes "(u,w) \<in> lexord r" "length w \<le> length u"
shows "(u@v,w@z) \<in> lexord r"
proof-
from leD[OF assms(2)] assms(1)[unfolded lexord_take_index_conv[of u w r] min_absorb2[OF assms(2)]]
obtain i where "take i u = take i w" and "(u!i,w!i) \<in> r" and "i < length w"
by blast
hence "((u@v)!i, (w@z)!i) \<in> r"
unfolding nth_append using less_le_trans[OF \<open>i < length w\<close> assms(2)] \<open>(u!i,w!i) \<in> r\<close>
by presburger
moreover have "i < min (length (u@v)) (length (w@z))"
using assms(2) \<open>i < length w\<close> by simp
moreover have "take i (u@v) = take i (w@z)"
using assms(2) \<open>i < length w\<close> \<open>take i u = take i w\<close> by simp
ultimately show ?thesis
using lexord_take_index_conv by blast
qed
lemma lexord_sufE:
assumes "(xs@zs,ys@qs) \<in> lexord r" "xs \<noteq> ys" "length xs = length ys" "length zs = length qs"
shows "(xs,ys) \<in> lexord r"
proof-
obtain i where "i < length (xs@zs)" and "i < length (ys@qs)" and "take i (xs@zs) = take i (ys@qs)"
and "((xs@zs) ! i, (ys@qs) ! i) \<in> r"
using assms(1) lex_take_index[unfolded lexord_lex,of "xs @ zs" "ys @ qs" r]
length_append[of xs zs, unfolded assms(3,4), folded length_append[of ys qs]]
by blast
have "length (take i xs) = length (take i ys)"
by (simp add: assms(3))
have "i < length xs"
using assms(2,3) le_less_linear take_all[of xs i] take_all[of ys i]
\<open>take i (xs @ zs) = take i (ys @ qs)\<close> append_eq_append_conv take_append
by metis
hence "(xs ! i, ys ! i) \<in> r"
using \<open>((xs @ zs) ! i, (ys @ qs) ! i) \<in> r\<close> assms(3) by (simp add: nth_append)
moreover have "take i xs = take i ys"
using assms(3) \<open>take i (xs @ zs) = take i (ys @ qs)\<close> by auto
ultimately show ?thesis
unfolding lexord_take_index_conv using \<open>i < length xs\<close> assms(3) by fastforce
qed
lemma lexord_irreflexive: "\<forall>x. (x,x) \<notin> r \<Longrightarrow> (xs,xs) \<notin> lexord r"
by (induct xs) auto
text\<open>By Ren\'e Thiemann:\<close>
lemma lexord_partial_trans:
"(\<And>x y z. x \<in> set xs \<Longrightarrow> (x,y) \<in> r \<Longrightarrow> (y,z) \<in> r \<Longrightarrow> (x,z) \<in> r)
\<Longrightarrow> (xs,ys) \<in> lexord r \<Longrightarrow> (ys,zs) \<in> lexord r \<Longrightarrow> (xs,zs) \<in> lexord r"
proof (induct xs arbitrary: ys zs)
case Nil
from Nil(3) show ?case unfolding lexord_def by (cases zs, auto)
next
case (Cons x xs yys zzs)
from Cons(3) obtain y ys where yys: "yys = y # ys" unfolding lexord_def
by (cases yys, auto)
note Cons = Cons[unfolded yys]
from Cons(3) have one: "(x,y) \<in> r \<or> x = y \<and> (xs,ys) \<in> lexord r" by auto
from Cons(4) obtain z zs where zzs: "zzs = z # zs" unfolding lexord_def
by (cases zzs, auto)
note Cons = Cons[unfolded zzs]
from Cons(4) have two: "(y,z) \<in> r \<or> y = z \<and> (ys,zs) \<in> lexord r" by auto
{
assume "(xs,ys) \<in> lexord r" and "(ys,zs) \<in> lexord r"
from Cons(1)[OF _ this] Cons(2)
have "(xs,zs) \<in> lexord r" by auto
} note ind1 = this
{
assume "(x,y) \<in> r" and "(y,z) \<in> r"
from Cons(2)[OF _ this] have "(x,z) \<in> r" by auto
} note ind2 = this
from one two ind1 ind2
have "(x,z) \<in> r \<or> x = z \<and> (xs,zs) \<in> lexord r" by blast
thus ?case unfolding zzs by auto
qed
lemma lexord_trans:
"\<lbrakk> (x, y) \<in> lexord r; (y, z) \<in> lexord r; trans r \<rbrakk> \<Longrightarrow> (x, z) \<in> lexord r"
by(auto simp: trans_def intro:lexord_partial_trans)
lemma lexord_transI: "trans r \<Longrightarrow> trans (lexord r)"
by (meson lexord_trans transI)
lemma total_lexord: "total r \<Longrightarrow> total (lexord r)"
unfolding total_on_def
proof clarsimp
fix x y
assume "\<forall>x y. x \<noteq> y \<longrightarrow> (x, y) \<in> r \<or> (y, x) \<in> r"
and "(x::'a list) \<noteq> y"
and "(y, x) \<notin> lexord r"
then
show "(x, y) \<in> lexord r"
proof (induction x arbitrary: y)
case Nil
then show ?case
by (metis lexord_Nil_left list.exhaust)
next
case (Cons a x y) then show ?case
by (cases y) (force+)
qed
qed
corollary lexord_linear: "(\<forall>a b. (a,b) \<in> r \<or> a = b \<or> (b,a) \<in> r) \<Longrightarrow> (x,y) \<in> lexord r \<or> x = y \<or> (y,x) \<in> lexord r"
using total_lexord by (metis UNIV_I total_on_def)
lemma lexord_irrefl:
"irrefl R \<Longrightarrow> irrefl (lexord R)"
by (simp add: irrefl_def lexord_irreflexive)
lemma lexord_asym:
assumes "asym R"
shows "asym (lexord R)"
proof
fix xs ys
assume "(xs, ys) \<in> lexord R"
then show "(ys, xs) \<notin> lexord R"
proof (induct xs arbitrary: ys)
case Nil
then show ?case by simp
next
case (Cons x xs)
then obtain z zs where ys: "ys = z # zs" by (cases ys) auto
with assms Cons show ?case by (auto dest: asymD)
qed
qed
lemma lexord_asymmetric:
assumes "asym R"
assumes hyp: "(a, b) \<in> lexord R"
shows "(b, a) \<notin> lexord R"
proof -
from \<open>asym R\<close> have "asym (lexord R)" by (rule lexord_asym)
then show ?thesis by (auto simp: hyp dest: asymD)
qed
lemma asym_lex: "asym R \<Longrightarrow> asym (lex R)"
by (meson asymI asymD irrefl_lex lexord_asym lexord_lex)
lemma asym_lenlex: "asym R \<Longrightarrow> asym (lenlex R)"
by (simp add: lenlex_def asym_inv_image asym_less_than asym_lex asym_lex_prod)
lemma lenlex_append1:
assumes len: "(us,xs) \<in> lenlex R" and eq: "length vs = length ys"
shows "(us @ vs, xs @ ys) \<in> lenlex R"
using len
proof (induction us)
case Nil
then show ?case
by (simp add: lenlex_def eq)
next
case (Cons u us)
with lex_append_rightI show ?case
by (fastforce simp add: lenlex_def eq)
qed
lemma lenlex_append2 [simp]:
assumes "irrefl R"
shows "(us @ xs, us @ ys) \<in> lenlex R \<longleftrightarrow> (xs, ys) \<in> lenlex R"
proof (induction us)
case Nil
then show ?case
by (simp add: lenlex_def)
next
case (Cons u us)
with assms show ?case
by (auto simp: lenlex_def irrefl_def)
qed
text \<open>
Predicate version of lexicographic order integrated with Isabelle's order type classes.
Author: Andreas Lochbihler
\<close>
context ord
begin
context
notes [[inductive_internals]]
begin
inductive lexordp :: "'a list \<Rightarrow> 'a list \<Rightarrow> bool"
where
Nil: "lexordp [] (y # ys)"
| Cons: "x < y \<Longrightarrow> lexordp (x # xs) (y # ys)"
| Cons_eq:
"\<lbrakk> \<not> x < y; \<not> y < x; lexordp xs ys \<rbrakk> \<Longrightarrow> lexordp (x # xs) (y # ys)"
end
lemma lexordp_simps [simp, code]:
"lexordp [] ys = (ys \<noteq> [])"
"lexordp xs [] = False"
"lexordp (x # xs) (y # ys) \<longleftrightarrow> x < y \<or> \<not> y < x \<and> lexordp xs ys"
by(subst lexordp.simps, fastforce simp add: neq_Nil_conv)+
inductive lexordp_eq :: "'a list \<Rightarrow> 'a list \<Rightarrow> bool" where
Nil: "lexordp_eq [] ys"
| Cons: "x < y \<Longrightarrow> lexordp_eq (x # xs) (y # ys)"
| Cons_eq: "\<lbrakk> \<not> x < y; \<not> y < x; lexordp_eq xs ys \<rbrakk> \<Longrightarrow> lexordp_eq (x # xs) (y # ys)"
lemma lexordp_eq_simps [simp, code]:
"lexordp_eq [] ys = True"
"lexordp_eq xs [] \<longleftrightarrow> xs = []"
"lexordp_eq (x # xs) [] = False"
"lexordp_eq (x # xs) (y # ys) \<longleftrightarrow> x < y \<or> \<not> y < x \<and> lexordp_eq xs ys"
by(subst lexordp_eq.simps, fastforce)+
lemma lexordp_append_rightI: "ys \<noteq> Nil \<Longrightarrow> lexordp xs (xs @ ys)"
by(induct xs)(auto simp add: neq_Nil_conv)
lemma lexordp_append_left_rightI: "x < y \<Longrightarrow> lexordp (us @ x # xs) (us @ y # ys)"
by(induct us) auto
lemma lexordp_eq_refl: "lexordp_eq xs xs"
by(induct xs) simp_all
lemma lexordp_append_leftI: "lexordp us vs \<Longrightarrow> lexordp (xs @ us) (xs @ vs)"
by(induct xs) auto
lemma lexordp_append_leftD: "\<lbrakk> lexordp (xs @ us) (xs @ vs); \<forall>a. \<not> a < a \<rbrakk> \<Longrightarrow> lexordp us vs"
by(induct xs) auto
lemma lexordp_irreflexive:
assumes irrefl: "\<forall>x. \<not> x < x"
shows "\<not> lexordp xs xs"
proof
assume "lexordp xs xs"
thus False by(induct xs ys\<equiv>xs)(simp_all add: irrefl)
qed
lemma lexordp_into_lexordp_eq:
"lexordp xs ys \<Longrightarrow> lexordp_eq xs ys"
by (induction rule: lexordp.induct) simp_all
lemma lexordp_eq_pref: "lexordp_eq u (u @ v)"
by (metis append_Nil2 lexordp_append_rightI lexordp_eq_refl lexordp_into_lexordp_eq)
end
declare ord.lexordp_simps [simp, code]
declare ord.lexordp_eq_simps [simp, code]
context order
begin
lemma lexordp_antisym:
assumes "lexordp xs ys" "lexordp ys xs"
shows False
using assms by induct auto
lemma lexordp_irreflexive': "\<not> lexordp xs xs"
by(rule lexordp_irreflexive) simp
end
context linorder begin
lemma lexordp_cases [consumes 1, case_names Nil Cons Cons_eq, cases pred: lexordp]:
assumes "lexordp xs ys"
obtains (Nil) y ys' where "xs = []" "ys = y # ys'"
| (Cons) x xs' y ys' where "xs = x # xs'" "ys = y # ys'" "x < y"
| (Cons_eq) x xs' ys' where "xs = x # xs'" "ys = x # ys'" "lexordp xs' ys'"
using assms by cases (fastforce simp add: not_less_iff_gr_or_eq)+
lemma lexordp_induct [consumes 1, case_names Nil Cons Cons_eq, induct pred: lexordp]:
assumes major: "lexordp xs ys"
and Nil: "\<And>y ys. P [] (y # ys)"
and Cons: "\<And>x xs y ys. x < y \<Longrightarrow> P (x # xs) (y # ys)"
and Cons_eq: "\<And>x xs ys. \<lbrakk> lexordp xs ys; P xs ys \<rbrakk> \<Longrightarrow> P (x # xs) (x # ys)"
shows "P xs ys"
using major by induct (simp_all add: Nil Cons not_less_iff_gr_or_eq Cons_eq)
lemma lexordp_iff:
"lexordp xs ys \<longleftrightarrow> (\<exists>x vs. ys = xs @ x # vs) \<or> (\<exists>us a b vs ws. a < b \<and> xs = us @ a # vs \<and> ys = us @ b # ws)"
(is "?lhs = ?rhs")
proof
assume ?lhs thus ?rhs
proof induct
case Cons_eq thus ?case by simp (metis append.simps(2))
qed(fastforce intro: disjI2 del: disjCI intro: exI[where x="[]"])+
next
assume ?rhs thus ?lhs
by(auto intro: lexordp_append_leftI[where us="[]", simplified] lexordp_append_leftI)
qed
lemma lexordp_conv_lexord:
"lexordp xs ys \<longleftrightarrow> (xs, ys) \<in> lexord {(x, y). x < y}"
by(simp add: lexordp_iff lexord_def)
lemma lexordp_eq_antisym:
assumes "lexordp_eq xs ys" "lexordp_eq ys xs"
shows "xs = ys"
using assms by induct simp_all
lemma lexordp_eq_trans:
assumes "lexordp_eq xs ys" and "lexordp_eq ys zs"
shows "lexordp_eq xs zs"
using assms
by (induct arbitrary: zs) (case_tac zs; auto)+
lemma lexordp_trans:
assumes "lexordp xs ys" "lexordp ys zs"
shows "lexordp xs zs"
using assms
by (induct arbitrary: zs) (case_tac zs; auto)+
lemma lexordp_linear: "lexordp xs ys \<or> xs = ys \<or> lexordp ys xs"
by(induct xs arbitrary: ys; case_tac ys; fastforce)
lemma lexordp_conv_lexordp_eq: "lexordp xs ys \<longleftrightarrow> lexordp_eq xs ys \<and> \<not> lexordp_eq ys xs"
(is "?lhs \<longleftrightarrow> ?rhs")
proof
assume ?lhs
hence "\<not> lexordp_eq ys xs" by induct simp_all
with \<open>?lhs\<close> show ?rhs by (simp add: lexordp_into_lexordp_eq)
next
assume ?rhs
hence "lexordp_eq xs ys" "\<not> lexordp_eq ys xs" by simp_all
thus ?lhs by induct simp_all
qed
lemma lexordp_eq_conv_lexord: "lexordp_eq xs ys \<longleftrightarrow> xs = ys \<or> lexordp xs ys"
by(auto simp add: lexordp_conv_lexordp_eq lexordp_eq_refl dest: lexordp_eq_antisym)
lemma lexordp_eq_linear: "lexordp_eq xs ys \<or> lexordp_eq ys xs"
by (induct xs arbitrary: ys) (case_tac ys; auto)+
lemma lexordp_linorder: "class.linorder lexordp_eq lexordp"
by unfold_locales
(auto simp add: lexordp_conv_lexordp_eq lexordp_eq_refl lexordp_eq_antisym intro: lexordp_eq_trans del: disjCI intro: lexordp_eq_linear)
end
subsubsection \<open>Lexicographic combination of measure functions\<close>
text \<open>These are useful for termination proofs\<close>
definition "measures fs = inv_image (lex less_than) (%a. map (%f. f a) fs)"
lemma wf_measures[simp]: "wf (measures fs)"
unfolding measures_def
by blast
lemma in_measures[simp]:
"(x, y) \<in> measures [] = False"
"(x, y) \<in> measures (f # fs)
= (f x < f y \<or> (f x = f y \<and> (x, y) \<in> measures fs))"
unfolding measures_def
by auto
lemma measures_less: "f x < f y \<Longrightarrow> (x, y) \<in> measures (f#fs)"
by simp
lemma measures_lesseq: "f x \<le> f y \<Longrightarrow> (x, y) \<in> measures fs \<Longrightarrow> (x, y) \<in> measures (f#fs)"
by auto
subsubsection \<open>Lifting Relations to Lists: one element\<close>
definition listrel1 :: "('a \<times> 'a) set \<Rightarrow> ('a list \<times> 'a list) set" where
"listrel1 r = {(xs,ys).
\<exists>us z z' vs. xs = us @ z # vs \<and> (z,z') \<in> r \<and> ys = us @ z' # vs}"
lemma listrel1I:
"\<lbrakk> (x, y) \<in> r; xs = us @ x # vs; ys = us @ y # vs \<rbrakk> \<Longrightarrow>
(xs, ys) \<in> listrel1 r"
unfolding listrel1_def by auto
lemma listrel1E:
"\<lbrakk> (xs, ys) \<in> listrel1 r;
!!x y us vs. \<lbrakk> (x, y) \<in> r; xs = us @ x # vs; ys = us @ y # vs \<rbrakk> \<Longrightarrow> P
\<rbrakk> \<Longrightarrow> P"
unfolding listrel1_def by auto
lemma not_Nil_listrel1 [iff]: "([], xs) \<notin> listrel1 r"
unfolding listrel1_def by blast
lemma not_listrel1_Nil [iff]: "(xs, []) \<notin> listrel1 r"
unfolding listrel1_def by blast
lemma Cons_listrel1_Cons [iff]:
"(x # xs, y # ys) \<in> listrel1 r \<longleftrightarrow>
(x,y) \<in> r \<and> xs = ys \<or> x = y \<and> (xs, ys) \<in> listrel1 r"
by (simp add: listrel1_def Cons_eq_append_conv) (blast)
lemma listrel1I1: "(x,y) \<in> r \<Longrightarrow> (x # xs, y # xs) \<in> listrel1 r"
by fast
lemma listrel1I2: "(xs, ys) \<in> listrel1 r \<Longrightarrow> (x # xs, x # ys) \<in> listrel1 r"
by fast
lemma append_listrel1I:
"(xs, ys) \<in> listrel1 r \<and> us = vs \<or> xs = ys \<and> (us, vs) \<in> listrel1 r
\<Longrightarrow> (xs @ us, ys @ vs) \<in> listrel1 r"
unfolding listrel1_def
by auto (blast intro: append_eq_appendI)+
lemma Cons_listrel1E1[elim!]:
assumes "(x # xs, ys) \<in> listrel1 r"
and "\<And>y. ys = y # xs \<Longrightarrow> (x, y) \<in> r \<Longrightarrow> R"
and "\<And>zs. ys = x # zs \<Longrightarrow> (xs, zs) \<in> listrel1 r \<Longrightarrow> R"
shows R
using assms by (cases ys) blast+
lemma Cons_listrel1E2[elim!]:
assumes "(xs, y # ys) \<in> listrel1 r"
and "\<And>x. xs = x # ys \<Longrightarrow> (x, y) \<in> r \<Longrightarrow> R"
and "\<And>zs. xs = y # zs \<Longrightarrow> (zs, ys) \<in> listrel1 r \<Longrightarrow> R"
shows R
using assms by (cases xs) blast+
lemma snoc_listrel1_snoc_iff:
"(xs @ [x], ys @ [y]) \<in> listrel1 r
\<longleftrightarrow> (xs, ys) \<in> listrel1 r \<and> x = y \<or> xs = ys \<and> (x,y) \<in> r" (is "?L \<longleftrightarrow> ?R")
proof
assume ?L thus ?R
by (fastforce simp: listrel1_def snoc_eq_iff_butlast butlast_append)
next
assume ?R then show ?L unfolding listrel1_def by force
qed
lemma listrel1_eq_len: "(xs,ys) \<in> listrel1 r \<Longrightarrow> length xs = length ys"
unfolding listrel1_def by auto
lemma listrel1_mono:
"r \<subseteq> s \<Longrightarrow> listrel1 r \<subseteq> listrel1 s"
unfolding listrel1_def by blast
lemma listrel1_converse: "listrel1 (r\<inverse>) = (listrel1 r)\<inverse>"
unfolding listrel1_def by blast
lemma in_listrel1_converse:
"(x,y) \<in> listrel1 (r\<inverse>) \<longleftrightarrow> (x,y) \<in> (listrel1 r)\<inverse>"
unfolding listrel1_def by blast
lemma listrel1_iff_update:
"(xs,ys) \<in> (listrel1 r)
\<longleftrightarrow> (\<exists>y n. (xs ! n, y) \<in> r \<and> n < length xs \<and> ys = xs[n:=y])" (is "?L \<longleftrightarrow> ?R")
proof
assume "?L"
then obtain x y u v where "xs = u @ x # v" "ys = u @ y # v" "(x,y) \<in> r"
unfolding listrel1_def by auto
then have "ys = xs[length u := y]" and "length u < length xs"
and "(xs ! length u, y) \<in> r" by auto
then show "?R" by auto
next
assume "?R"
then obtain x y n where "(xs!n, y) \<in> r" "n < size xs" "ys = xs[n:=y]" "x = xs!n"
by auto
then obtain u v where "xs = u @ x # v" and "ys = u @ y # v" and "(x, y) \<in> r"
by (auto intro: upd_conv_take_nth_drop id_take_nth_drop)
then show "?L" by (auto simp: listrel1_def)
qed
text\<open>Accessible part and wellfoundedness:\<close>
lemma Cons_acc_listrel1I [intro!]:
"x \<in> Wellfounded.acc r \<Longrightarrow> xs \<in> Wellfounded.acc (listrel1 r) \<Longrightarrow> (x # xs) \<in> Wellfounded.acc (listrel1 r)"
proof (induction arbitrary: xs set: Wellfounded.acc)
case outer: (1 u)
show ?case
proof (induct xs rule: acc_induct)
case 1
show "xs \<in> Wellfounded.acc (listrel1 r)"
by (simp add: outer.prems)
qed (metis (no_types, lifting) Cons_listrel1E2 acc.simps outer.IH)
qed
lemma lists_accD: "xs \<in> lists (Wellfounded.acc r) \<Longrightarrow> xs \<in> Wellfounded.acc (listrel1 r)"
proof (induct set: lists)
case Nil
then show ?case
by (meson acc.intros not_listrel1_Nil)
next
case (Cons a l)
then show ?case
by blast
qed
lemma lists_accI: "xs \<in> Wellfounded.acc (listrel1 r) \<Longrightarrow> xs \<in> lists (Wellfounded.acc r)"
proof (induction set: Wellfounded.acc)
case (1 x)
then have "\<And>u v. \<lbrakk>u \<in> set x; (v, u) \<in> r\<rbrakk> \<Longrightarrow> v \<in> Wellfounded.acc r"
by (metis in_lists_conv_set in_set_conv_decomp listrel1I)
then show ?case
by (meson acc.intros in_listsI)
qed
lemma wf_listrel1_iff[simp]: "wf(listrel1 r) = wf r"
by (auto simp: wf_acc_iff
intro: lists_accD lists_accI[THEN Cons_in_lists_iff[THEN iffD1, THEN conjunct1]])
subsubsection \<open>Lifting Relations to Lists: all elements\<close>
inductive_set
listrel :: "('a \<times> 'b) set \<Rightarrow> ('a list \<times> 'b list) set"
for r :: "('a \<times> 'b) set"
where
Nil: "([],[]) \<in> listrel r"
| Cons: "\<lbrakk>(x,y) \<in> r; (xs,ys) \<in> listrel r\<rbrakk> \<Longrightarrow> (x#xs, y#ys) \<in> listrel r"
inductive_cases listrel_Nil1 [elim!]: "([],xs) \<in> listrel r"
inductive_cases listrel_Nil2 [elim!]: "(xs,[]) \<in> listrel r"
inductive_cases listrel_Cons1 [elim!]: "(y#ys,xs) \<in> listrel r"
inductive_cases listrel_Cons2 [elim!]: "(xs,y#ys) \<in> listrel r"
lemma listrel_eq_len: "(xs, ys) \<in> listrel r \<Longrightarrow> length xs = length ys"
by(induct rule: listrel.induct) auto
lemma listrel_iff_zip [code_unfold]: "(xs,ys) \<in> listrel r \<longleftrightarrow>
length xs = length ys \<and> (\<forall>(x,y) \<in> set(zip xs ys). (x,y) \<in> r)" (is "?L \<longleftrightarrow> ?R")
proof
assume ?L thus ?R by induct (auto intro: listrel_eq_len)
next
assume ?R thus ?L
apply (clarify)
by (induct rule: list_induct2) (auto intro: listrel.intros)
qed
lemma listrel_iff_nth: "(xs,ys) \<in> listrel r \<longleftrightarrow>
length xs = length ys \<and> (\<forall>n < length xs. (xs!n, ys!n) \<in> r)" (is "?L \<longleftrightarrow> ?R")
by (auto simp add: all_set_conv_all_nth listrel_iff_zip)
lemma listrel_mono: "r \<subseteq> s \<Longrightarrow> listrel r \<subseteq> listrel s"
by (meson listrel_iff_nth subrelI subset_eq)
lemma listrel_subset:
assumes "r \<subseteq> A \<times> A" shows "listrel r \<subseteq> lists A \<times> lists A"
proof clarify
show "a \<in> lists A \<and> b \<in> lists A" if "(a, b) \<in> listrel r" for a b
using that assms by (induction rule: listrel.induct, auto)
qed
lemma listrel_refl_on:
assumes "refl_on A r" shows "refl_on (lists A) (listrel r)"
proof -
have "l \<in> lists A \<Longrightarrow> (l, l) \<in> listrel r" for l
using assms unfolding refl_on_def
by (induction l, auto intro: listrel.intros)
then show ?thesis
by (meson assms listrel_subset refl_on_def)
qed
lemma listrel_sym: "sym r \<Longrightarrow> sym (listrel r)"
by (simp add: listrel_iff_nth sym_def)
lemma listrel_trans:
assumes "trans r" shows "trans (listrel r)"
proof -
have "(x, z) \<in> listrel r" if "(x, y) \<in> listrel r" "(y, z) \<in> listrel r" for x y z
using that
proof induction
case (Cons x y xs ys)
then show ?case
by clarsimp (metis assms listrel.Cons listrel_iff_nth transD)
qed auto
then show ?thesis
using transI by blast
qed
theorem equiv_listrel: "equiv A r \<Longrightarrow> equiv (lists A) (listrel r)"
by (simp add: equiv_def listrel_refl_on listrel_sym listrel_trans)
lemma listrel_rtrancl_refl[iff]: "(xs,xs) \<in> listrel(r\<^sup>*)"
using listrel_refl_on[of UNIV, OF refl_rtrancl]
by(auto simp: refl_on_def)
lemma listrel_rtrancl_trans:
"\<lbrakk>(xs,ys) \<in> listrel(r\<^sup>*); (ys,zs) \<in> listrel(r\<^sup>*)\<rbrakk> \<Longrightarrow> (xs,zs) \<in> listrel(r\<^sup>*)"
by (metis listrel_trans trans_def trans_rtrancl)
lemma listrel_Nil [simp]: "listrel r `` {[]} = {[]}"
by (blast intro: listrel.intros)
lemma listrel_Cons:
"listrel r `` {x#xs} = set_Cons (r``{x}) (listrel r `` {xs})"
by (auto simp add: set_Cons_def intro: listrel.intros)
text \<open>Relating \<^term>\<open>listrel1\<close>, \<^term>\<open>listrel\<close> and closures:\<close>
lemma listrel1_rtrancl_subset_rtrancl_listrel1: "listrel1 (r\<^sup>*) \<subseteq> (listrel1 r)\<^sup>*"
proof (rule subrelI)
fix xs ys assume 1: "(xs,ys) \<in> listrel1 (r\<^sup>*)"
{ fix x y us vs
have "(x,y) \<in> r\<^sup>* \<Longrightarrow> (us @ x # vs, us @ y # vs) \<in> (listrel1 r)\<^sup>*"
proof(induct rule: rtrancl.induct)
case rtrancl_refl show ?case by simp
next
case rtrancl_into_rtrancl thus ?case
by (metis listrel1I rtrancl.rtrancl_into_rtrancl)
qed }
thus "(xs,ys) \<in> (listrel1 r)\<^sup>*" using 1 by(blast elim: listrel1E)
qed
lemma rtrancl_listrel1_eq_len: "(x,y) \<in> (listrel1 r)\<^sup>* \<Longrightarrow> length x = length y"
by (induct rule: rtrancl.induct) (auto intro: listrel1_eq_len)
lemma rtrancl_listrel1_ConsI1:
"(xs,ys) \<in> (listrel1 r)\<^sup>* \<Longrightarrow> (x#xs,x#ys) \<in> (listrel1 r)\<^sup>*"
proof (induction rule: rtrancl.induct)
case (rtrancl_into_rtrancl a b c)
then show ?case
by (metis listrel1I2 rtrancl.rtrancl_into_rtrancl)
qed auto
lemma rtrancl_listrel1_ConsI2:
"(x,y) \<in> r\<^sup>* \<Longrightarrow> (xs, ys) \<in> (listrel1 r)\<^sup>* \<Longrightarrow> (x # xs, y # ys) \<in> (listrel1 r)\<^sup>*"
by (meson in_mono listrel1I1 listrel1_rtrancl_subset_rtrancl_listrel1 rtrancl_listrel1_ConsI1 rtrancl_trans)
lemma listrel1_subset_listrel:
"r \<subseteq> r' \<Longrightarrow> refl r' \<Longrightarrow> listrel1 r \<subseteq> listrel(r')"
by(auto elim!: listrel1E simp add: listrel_iff_zip set_zip refl_on_def)
lemma listrel_reflcl_if_listrel1:
"(xs,ys) \<in> listrel1 r \<Longrightarrow> (xs,ys) \<in> listrel(r\<^sup>*)"
by(erule listrel1E)(auto simp add: listrel_iff_zip set_zip)
lemma listrel_rtrancl_eq_rtrancl_listrel1: "listrel (r\<^sup>*) = (listrel1 r)\<^sup>*"
proof
{ fix x y assume "(x,y) \<in> listrel (r\<^sup>*)"
then have "(x,y) \<in> (listrel1 r)\<^sup>*"
by induct (auto intro: rtrancl_listrel1_ConsI2) }
then show "listrel (r\<^sup>*) \<subseteq> (listrel1 r)\<^sup>*"
by (rule subrelI)
next
show "listrel (r\<^sup>*) \<supseteq> (listrel1 r)\<^sup>*"
proof(rule subrelI)
fix xs ys assume "(xs,ys) \<in> (listrel1 r)\<^sup>*"
then show "(xs,ys) \<in> listrel (r\<^sup>*)"
proof induct
case base show ?case by(auto simp add: listrel_iff_zip set_zip)
next
case (step ys zs)
thus ?case by (metis listrel_reflcl_if_listrel1 listrel_rtrancl_trans)
qed
qed
qed
lemma rtrancl_listrel1_if_listrel:
"(xs,ys) \<in> listrel r \<Longrightarrow> (xs,ys) \<in> (listrel1 r)\<^sup>*"
by(metis listrel_rtrancl_eq_rtrancl_listrel1 subsetD[OF listrel_mono] r_into_rtrancl subsetI)
lemma listrel_subset_rtrancl_listrel1: "listrel r \<subseteq> (listrel1 r)\<^sup>*"
by(fast intro:rtrancl_listrel1_if_listrel)
subsection \<open>Size function\<close>
lemma [measure_function]: "is_measure f \<Longrightarrow> is_measure (size_list f)"
by (rule is_measure_trivial)
lemma [measure_function]: "is_measure f \<Longrightarrow> is_measure (size_option f)"
by (rule is_measure_trivial)
lemma size_list_estimation[termination_simp]:
"x \<in> set xs \<Longrightarrow> y < f x \<Longrightarrow> y < size_list f xs"
by (induct xs) auto
lemma size_list_estimation'[termination_simp]:
"x \<in> set xs \<Longrightarrow> y \<le> f x \<Longrightarrow> y \<le> size_list f xs"
by (induct xs) auto
lemma size_list_map[simp]: "size_list f (map g xs) = size_list (f \<circ> g) xs"
by (induct xs) auto
lemma size_list_append[simp]: "size_list f (xs @ ys) = size_list f xs + size_list f ys"
by (induct xs, auto)
lemma size_list_pointwise[termination_simp]:
"(\<And>x. x \<in> set xs \<Longrightarrow> f x \<le> g x) \<Longrightarrow> size_list f xs \<le> size_list g xs"
by (induct xs) force+
subsection \<open>Monad operation\<close>
definition bind :: "'a list \<Rightarrow> ('a \<Rightarrow> 'b list) \<Rightarrow> 'b list" where
"bind xs f = concat (map f xs)"
hide_const (open) bind
lemma bind_simps [simp]:
"List.bind [] f = []"
"List.bind (x # xs) f = f x @ List.bind xs f"
by (simp_all add: bind_def)
lemma list_bind_cong [fundef_cong]:
assumes "xs = ys" "(\<And>x. x \<in> set xs \<Longrightarrow> f x = g x)"
shows "List.bind xs f = List.bind ys g"
proof -
from assms(2) have "List.bind xs f = List.bind xs g"
by (induction xs) simp_all
with assms(1) show ?thesis by simp
qed
lemma set_list_bind: "set (List.bind xs f) = (\<Union>x\<in>set xs. set (f x))"
by (induction xs) simp_all
subsection \<open>Code generation\<close>
text\<open>Optional tail recursive version of \<^const>\<open>map\<close>. Can avoid
stack overflow in some target languages.\<close>
fun map_tailrec_rev :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a list \<Rightarrow> 'b list \<Rightarrow> 'b list" where
"map_tailrec_rev f [] bs = bs" |
"map_tailrec_rev f (a#as) bs = map_tailrec_rev f as (f a # bs)"
lemma map_tailrec_rev:
"map_tailrec_rev f as bs = rev(map f as) @ bs"
by(induction as arbitrary: bs) simp_all
definition map_tailrec :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a list \<Rightarrow> 'b list" where
"map_tailrec f as = rev (map_tailrec_rev f as [])"
text\<open>Code equation:\<close>
lemma map_eq_map_tailrec: "map = map_tailrec"
by(simp add: fun_eq_iff map_tailrec_def map_tailrec_rev)
subsubsection \<open>Counterparts for set-related operations\<close>
definition member :: "'a list \<Rightarrow> 'a \<Rightarrow> bool" where
[code_abbrev]: "member xs x \<longleftrightarrow> x \<in> set xs"
text \<open>
Use \<open>member\<close> only for generating executable code. Otherwise use
\<^prop>\<open>x \<in> set xs\<close> instead --- it is much easier to reason about.
\<close>
lemma member_rec [code]:
"member (x # xs) y \<longleftrightarrow> x = y \<or> member xs y"
"member [] y \<longleftrightarrow> False"
by (auto simp add: member_def)
lemma in_set_member (* FIXME delete candidate *):
"x \<in> set xs \<longleftrightarrow> member xs x"
by (simp add: member_def)
lemmas list_all_iff [code_abbrev] = fun_cong[OF list.pred_set]
definition list_ex :: "('a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> bool" where
list_ex_iff [code_abbrev]: "list_ex P xs \<longleftrightarrow> Bex (set xs) P"
definition list_ex1 :: "('a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> bool" where
list_ex1_iff [code_abbrev]: "list_ex1 P xs \<longleftrightarrow> (\<exists>! x. x \<in> set xs \<and> P x)"
text \<open>
Usually you should prefer \<open>\<forall>x\<in>set xs\<close>, \<open>\<exists>x\<in>set xs\<close>
and \<open>\<exists>!x. x\<in>set xs \<and> _\<close> over \<^const>\<open>list_all\<close>, \<^const>\<open>list_ex\<close>
and \<^const>\<open>list_ex1\<close> in specifications.
\<close>
lemma list_all_simps [code]:
"list_all P (x # xs) \<longleftrightarrow> P x \<and> list_all P xs"
"list_all P [] \<longleftrightarrow> True"
by (simp_all add: list_all_iff)
lemma list_ex_simps [simp, code]:
"list_ex P (x # xs) \<longleftrightarrow> P x \<or> list_ex P xs"
"list_ex P [] \<longleftrightarrow> False"
by (simp_all add: list_ex_iff)
lemma list_ex1_simps [simp, code]:
"list_ex1 P [] = False"
"list_ex1 P (x # xs) = (if P x then list_all (\<lambda>y. \<not> P y \<or> x = y) xs else list_ex1 P xs)"
by (auto simp add: list_ex1_iff list_all_iff)
lemma Ball_set_list_all: (* FIXME delete candidate *)
"Ball (set xs) P \<longleftrightarrow> list_all P xs"
by (simp add: list_all_iff)
lemma Bex_set_list_ex: (* FIXME delete candidate *)
"Bex (set xs) P \<longleftrightarrow> list_ex P xs"
by (simp add: list_ex_iff)
lemma list_all_append [simp]:
"list_all P (xs @ ys) \<longleftrightarrow> list_all P xs \<and> list_all P ys"
by (auto simp add: list_all_iff)
lemma list_ex_append [simp]:
"list_ex P (xs @ ys) \<longleftrightarrow> list_ex P xs \<or> list_ex P ys"
by (auto simp add: list_ex_iff)
lemma list_all_rev [simp]:
"list_all P (rev xs) \<longleftrightarrow> list_all P xs"
by (simp add: list_all_iff)
lemma list_ex_rev [simp]:
"list_ex P (rev xs) \<longleftrightarrow> list_ex P xs"
by (simp add: list_ex_iff)
lemma list_all_length:
"list_all P xs \<longleftrightarrow> (\<forall>n < length xs. P (xs ! n))"
by (auto simp add: list_all_iff set_conv_nth)
lemma list_ex_length:
"list_ex P xs \<longleftrightarrow> (\<exists>n < length xs. P (xs ! n))"
by (auto simp add: list_ex_iff set_conv_nth)
lemmas list_all_cong [fundef_cong] = list.pred_cong
lemma list_ex_cong [fundef_cong]:
"xs = ys \<Longrightarrow> (\<And>x. x \<in> set ys \<Longrightarrow> f x = g x) \<Longrightarrow> list_ex f xs = list_ex g ys"
by (simp add: list_ex_iff)
definition can_select :: "('a \<Rightarrow> bool) \<Rightarrow> 'a set \<Rightarrow> bool" where
[code_abbrev]: "can_select P A = (\<exists>!x\<in>A. P x)"
lemma can_select_set_list_ex1 [code]:
"can_select P (set A) = list_ex1 P A"
by (simp add: list_ex1_iff can_select_def)
text \<open>Executable checks for relations on sets\<close>
definition listrel1p :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'a list \<Rightarrow> bool" where
"listrel1p r xs ys = ((xs, ys) \<in> listrel1 {(x, y). r x y})"
lemma [code_unfold]:
"(xs, ys) \<in> listrel1 r = listrel1p (\<lambda>x y. (x, y) \<in> r) xs ys"
unfolding listrel1p_def by auto
lemma [code]:
"listrel1p r [] xs = False"
"listrel1p r xs [] = False"
"listrel1p r (x # xs) (y # ys) \<longleftrightarrow>
r x y \<and> xs = ys \<or> x = y \<and> listrel1p r xs ys"
by (simp add: listrel1p_def)+
definition
lexordp :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a list \<Rightarrow> 'a list \<Rightarrow> bool" where
"lexordp r xs ys = ((xs, ys) \<in> lexord {(x, y). r x y})"
lemma [code_unfold]:
"(xs, ys) \<in> lexord r = lexordp (\<lambda>x y. (x, y) \<in> r) xs ys"
unfolding lexordp_def by auto
lemma [code]:
"lexordp r xs [] = False"
"lexordp r [] (y#ys) = True"
"lexordp r (x # xs) (y # ys) = (r x y \<or> (x = y \<and> lexordp r xs ys))"
unfolding lexordp_def by auto
text \<open>Bounded quantification and summation over nats.\<close>
lemma atMost_upto [code_unfold]:
"{..n} = set [0..<Suc n]"
by auto
lemma atLeast_upt [code_unfold]:
"{..<n} = set [0..<n]"
by auto
lemma greaterThanLessThan_upt [code_unfold]:
"{n<..<m} = set [Suc n..<m]"
by auto
lemmas atLeastLessThan_upt [code_unfold] = set_upt [symmetric]
lemma greaterThanAtMost_upt [code_unfold]:
"{n<..m} = set [Suc n..<Suc m]"
by auto
lemma atLeastAtMost_upt [code_unfold]:
"{n..m} = set [n..<Suc m]"
by auto
lemma all_nat_less_eq [code_unfold]:
"(\<forall>m<n::nat. P m) \<longleftrightarrow> (\<forall>m \<in> {0..<n}. P m)"
by auto
lemma ex_nat_less_eq [code_unfold]:
"(\<exists>m<n::nat. P m) \<longleftrightarrow> (\<exists>m \<in> {0..<n}. P m)"
by auto
lemma all_nat_less [code_unfold]:
"(\<forall>m\<le>n::nat. P m) \<longleftrightarrow> (\<forall>m \<in> {0..n}. P m)"
by auto
lemma ex_nat_less [code_unfold]:
"(\<exists>m\<le>n::nat. P m) \<longleftrightarrow> (\<exists>m \<in> {0..n}. P m)"
by auto
text\<open>Bounded \<open>LEAST\<close> operator:\<close>
definition "Bleast S P = (LEAST x. x \<in> S \<and> P x)"
definition "abort_Bleast S P = (LEAST x. x \<in> S \<and> P x)"
declare [[code abort: abort_Bleast]]
lemma Bleast_code [code]:
"Bleast (set xs) P = (case filter P (sort xs) of
x#xs \<Rightarrow> x |
[] \<Rightarrow> abort_Bleast (set xs) P)"
proof (cases "filter P (sort xs)")
case Nil thus ?thesis by (simp add: Bleast_def abort_Bleast_def)
next
case (Cons x ys)
have "(LEAST x. x \<in> set xs \<and> P x) = x"
proof (rule Least_equality)
show "x \<in> set xs \<and> P x"
by (metis Cons Cons_eq_filter_iff in_set_conv_decomp set_sort)
next
fix y assume "y \<in> set xs \<and> P y"
hence "y \<in> set (filter P xs)" by auto
thus "x \<le> y"
by (metis Cons eq_iff filter_sort set_ConsD set_sort sorted_wrt.simps(2) sorted_sort)
qed
thus ?thesis using Cons by (simp add: Bleast_def)
qed
declare Bleast_def[symmetric, code_unfold]
text \<open>Summation over ints.\<close>
lemma greaterThanLessThan_upto [code_unfold]:
"{i<..<j::int} = set [i+1..j - 1]"
by auto
lemma atLeastLessThan_upto [code_unfold]:
"{i..<j::int} = set [i..j - 1]"
by auto
lemma greaterThanAtMost_upto [code_unfold]:
"{i<..j::int} = set [i+1..j]"
by auto
lemmas atLeastAtMost_upto [code_unfold] = set_upto [symmetric]
subsubsection \<open>Optimizing by rewriting\<close>
definition null :: "'a list \<Rightarrow> bool" where
[code_abbrev]: "null xs \<longleftrightarrow> xs = []"
text \<open>
Efficient emptyness check is implemented by \<^const>\<open>null\<close>.
\<close>
lemma null_rec [code]:
"null (x # xs) \<longleftrightarrow> False"
"null [] \<longleftrightarrow> True"
by (simp_all add: null_def)
lemma eq_Nil_null: (* FIXME delete candidate *)
"xs = [] \<longleftrightarrow> null xs"
by (simp add: null_def)
lemma equal_Nil_null [code_unfold]:
"HOL.equal xs [] \<longleftrightarrow> null xs"
"HOL.equal [] = null"
by (auto simp add: equal null_def)
definition maps :: "('a \<Rightarrow> 'b list) \<Rightarrow> 'a list \<Rightarrow> 'b list" where
[code_abbrev]: "maps f xs = concat (map f xs)"
definition map_filter :: "('a \<Rightarrow> 'b option) \<Rightarrow> 'a list \<Rightarrow> 'b list" where
[code_post]: "map_filter f xs = map (the \<circ> f) (filter (\<lambda>x. f x \<noteq> None) xs)"
text \<open>
Operations \<^const>\<open>maps\<close> and \<^const>\<open>map_filter\<close> avoid
intermediate lists on execution -- do not use for proving.
\<close>
lemma maps_simps [code]:
"maps f (x # xs) = f x @ maps f xs"
"maps f [] = []"
by (simp_all add: maps_def)
lemma map_filter_simps [code]:
"map_filter f (x # xs) = (case f x of None \<Rightarrow> map_filter f xs | Some y \<Rightarrow> y # map_filter f xs)"
"map_filter f [] = []"
by (simp_all add: map_filter_def split: option.split)
lemma concat_map_maps: (* FIXME delete candidate *)
"concat (map f xs) = maps f xs"
by (simp add: maps_def)
lemma map_filter_map_filter [code_unfold]:
"map f (filter P xs) = map_filter (\<lambda>x. if P x then Some (f x) else None) xs"
by (simp add: map_filter_def)
text \<open>Optimized code for \<open>\<forall>i\<in>{a..b::int}\<close> and \<open>\<forall>n:{a..<b::nat}\<close>
and similiarly for \<open>\<exists>\<close>.\<close>
definition all_interval_nat :: "(nat \<Rightarrow> bool) \<Rightarrow> nat \<Rightarrow> nat \<Rightarrow> bool" where
"all_interval_nat P i j \<longleftrightarrow> (\<forall>n \<in> {i..<j}. P n)"
lemma [code]:
"all_interval_nat P i j \<longleftrightarrow> i \<ge> j \<or> P i \<and> all_interval_nat P (Suc i) j"
proof -
have *: "\<And>n. P i \<Longrightarrow> \<forall>n\<in>{Suc i..<j}. P n \<Longrightarrow> i \<le> n \<Longrightarrow> n < j \<Longrightarrow> P n"
using le_less_Suc_eq by fastforce
show ?thesis by (auto simp add: all_interval_nat_def intro: *)
qed
lemma list_all_iff_all_interval_nat [code_unfold]:
"list_all P [i..<j] \<longleftrightarrow> all_interval_nat P i j"
by (simp add: list_all_iff all_interval_nat_def)
lemma list_ex_iff_not_all_inverval_nat [code_unfold]:
"list_ex P [i..<j] \<longleftrightarrow> \<not> (all_interval_nat (Not \<circ> P) i j)"
by (simp add: list_ex_iff all_interval_nat_def)
definition all_interval_int :: "(int \<Rightarrow> bool) \<Rightarrow> int \<Rightarrow> int \<Rightarrow> bool" where
"all_interval_int P i j \<longleftrightarrow> (\<forall>k \<in> {i..j}. P k)"
lemma [code]:
"all_interval_int P i j \<longleftrightarrow> i > j \<or> P i \<and> all_interval_int P (i + 1) j"
proof -
have *: "\<And>k. P i \<Longrightarrow> \<forall>k\<in>{i+1..j}. P k \<Longrightarrow> i \<le> k \<Longrightarrow> k \<le> j \<Longrightarrow> P k"
by (smt (verit, best) atLeastAtMost_iff)
show ?thesis by (auto simp add: all_interval_int_def intro: *)
qed
lemma list_all_iff_all_interval_int [code_unfold]:
"list_all P [i..j] \<longleftrightarrow> all_interval_int P i j"
by (simp add: list_all_iff all_interval_int_def)
lemma list_ex_iff_not_all_inverval_int [code_unfold]:
"list_ex P [i..j] \<longleftrightarrow> \<not> (all_interval_int (Not \<circ> P) i j)"
by (simp add: list_ex_iff all_interval_int_def)
text \<open>optimized code (tail-recursive) for \<^term>\<open>length\<close>\<close>
definition gen_length :: "nat \<Rightarrow> 'a list \<Rightarrow> nat"
where "gen_length n xs = n + length xs"
lemma gen_length_code [code]:
"gen_length n [] = n"
"gen_length n (x # xs) = gen_length (Suc n) xs"
by(simp_all add: gen_length_def)
declare list.size(3-4)[code del]
lemma length_code [code]: "length = gen_length 0"
by(simp add: gen_length_def fun_eq_iff)
hide_const (open) member null maps map_filter all_interval_nat all_interval_int gen_length
subsubsection \<open>Pretty lists\<close>
ML \<open>
(* Code generation for list literals. *)
signature LIST_CODE =
sig
val add_literal_list: string -> theory -> theory
end;
structure List_Code : LIST_CODE =
struct
open Basic_Code_Thingol;
fun implode_list t =
let
fun dest_cons (IConst { sym = Code_Symbol.Constant \<^const_name>\<open>Cons\<close>, ... } `$ t1 `$ t2) = SOME (t1, t2)
| dest_cons _ = NONE;
val (ts, t') = Code_Thingol.unfoldr dest_cons t;
in case t'
of IConst { sym = Code_Symbol.Constant \<^const_name>\<open>Nil\<close>, ... } => SOME ts
| _ => NONE
end;
fun print_list (target_fxy, target_cons) pr fxy t1 t2 =
Code_Printer.brackify_infix (target_fxy, Code_Printer.R) fxy (
pr (Code_Printer.INFX (target_fxy, Code_Printer.X)) t1,
Code_Printer.str target_cons,
pr (Code_Printer.INFX (target_fxy, Code_Printer.R)) t2
);
fun add_literal_list target =
let
fun pretty literals pr _ vars fxy [(t1, _), (t2, _)] =
case Option.map (cons t1) (implode_list t2)
of SOME ts =>
Code_Printer.literal_list literals (map (pr vars Code_Printer.NOBR) ts)
| NONE =>
print_list (Code_Printer.infix_cons literals) (pr vars) fxy t1 t2;
in
Code_Target.set_printings (Code_Symbol.Constant (\<^const_name>\<open>Cons\<close>,
[(target, SOME (Code_Printer.complex_const_syntax (2, pretty)))]))
end
end;
\<close>
code_printing
type_constructor list \<rightharpoonup>
(SML) "_ list"
and (OCaml) "_ list"
and (Haskell) "![(_)]"
and (Scala) "List[(_)]"
| constant Nil \<rightharpoonup>
(SML) "[]"
and (OCaml) "[]"
and (Haskell) "[]"
and (Scala) "!Nil"
| class_instance list :: equal \<rightharpoonup>
(Haskell) -
| constant "HOL.equal :: 'a list \<Rightarrow> 'a list \<Rightarrow> bool" \<rightharpoonup>
(Haskell) infix 4 "=="
setup \<open>fold (List_Code.add_literal_list) ["SML", "OCaml", "Haskell", "Scala"]\<close>
code_reserved SML
list
code_reserved OCaml
list
subsubsection \<open>Use convenient predefined operations\<close>
code_printing
constant "(@)" \<rightharpoonup>
(SML) infixr 7 "@"
and (OCaml) infixr 6 "@"
and (Haskell) infixr 5 "++"
and (Scala) infixl 7 "++"
| constant map \<rightharpoonup>
(Haskell) "map"
| constant filter \<rightharpoonup>
(Haskell) "filter"
| constant concat \<rightharpoonup>
(Haskell) "concat"
| constant List.maps \<rightharpoonup>
(Haskell) "concatMap"
| constant rev \<rightharpoonup>
(Haskell) "reverse"
| constant zip \<rightharpoonup>
(Haskell) "zip"
| constant List.null \<rightharpoonup>
(Haskell) "null"
| constant takeWhile \<rightharpoonup>
(Haskell) "takeWhile"
| constant dropWhile \<rightharpoonup>
(Haskell) "dropWhile"
| constant list_all \<rightharpoonup>
(Haskell) "all"
| constant list_ex \<rightharpoonup>
(Haskell) "any"
subsubsection \<open>Implementation of sets by lists\<close>
lemma is_empty_set [code]:
"Set.is_empty (set xs) \<longleftrightarrow> List.null xs"
by (simp add: Set.is_empty_def null_def)
lemma empty_set [code]:
"{} = set []"
by simp
lemma UNIV_coset [code]:
"UNIV = List.coset []"
by simp
lemma compl_set [code]:
"- set xs = List.coset xs"
by simp
lemma compl_coset [code]:
"- List.coset xs = set xs"
by simp
lemma [code]:
"x \<in> set xs \<longleftrightarrow> List.member xs x"
"x \<in> List.coset xs \<longleftrightarrow> \<not> List.member xs x"
by (simp_all add: member_def)
lemma insert_code [code]:
"insert x (set xs) = set (List.insert x xs)"
"insert x (List.coset xs) = List.coset (removeAll x xs)"
by simp_all
lemma remove_code [code]:
"Set.remove x (set xs) = set (removeAll x xs)"
"Set.remove x (List.coset xs) = List.coset (List.insert x xs)"
by (simp_all add: remove_def Compl_insert)
lemma filter_set [code]:
"Set.filter P (set xs) = set (filter P xs)"
by auto
lemma image_set [code]:
"image f (set xs) = set (map f xs)"
by simp
lemma subset_code [code]:
"set xs \<le> B \<longleftrightarrow> (\<forall>x\<in>set xs. x \<in> B)"
"A \<le> List.coset ys \<longleftrightarrow> (\<forall>y\<in>set ys. y \<notin> A)"
"List.coset [] \<subseteq> set [] \<longleftrightarrow> False"
by auto
text \<open>A frequent case -- avoid intermediate sets\<close>
lemma [code_unfold]:
"set xs \<subseteq> set ys \<longleftrightarrow> list_all (\<lambda>x. x \<in> set ys) xs"
by (auto simp: list_all_iff)
lemma Ball_set [code]:
"Ball (set xs) P \<longleftrightarrow> list_all P xs"
by (simp add: list_all_iff)
lemma Bex_set [code]:
"Bex (set xs) P \<longleftrightarrow> list_ex P xs"
by (simp add: list_ex_iff)
lemma card_set [code]:
"card (set xs) = length (remdups xs)"
by (simp add: length_remdups_card_conv)
lemma the_elem_set [code]:
"the_elem (set [x]) = x"
by simp
lemma Pow_set [code]:
"Pow (set []) = {{}}"
"Pow (set (x # xs)) = (let A = Pow (set xs) in A \<union> insert x ` A)"
by (simp_all add: Pow_insert Let_def)
definition map_project :: "('a \<Rightarrow> 'b option) \<Rightarrow> 'a set \<Rightarrow> 'b set" where
"map_project f A = {b. \<exists> a \<in> A. f a = Some b}"
lemma [code]:
"map_project f (set xs) = set (List.map_filter f xs)"
by (auto simp add: map_project_def map_filter_def image_def)
hide_const (open) map_project
text \<open>Operations on relations\<close>
lemma product_code [code]:
"Product_Type.product (set xs) (set ys) = set [(x, y). x \<leftarrow> xs, y \<leftarrow> ys]"
by (auto simp add: Product_Type.product_def)
lemma Id_on_set [code]:
"Id_on (set xs) = set [(x, x). x \<leftarrow> xs]"
by (auto simp add: Id_on_def)
lemma [code]:
"R `` S = List.map_project (\<lambda>(x, y). if x \<in> S then Some y else None) R"
unfolding map_project_def by (auto split: prod.split if_split_asm)
lemma trancl_set_ntrancl [code]:
"trancl (set xs) = ntrancl (card (set xs) - 1) (set xs)"
by (simp add: finite_trancl_ntranl)
lemma set_relcomp [code]:
"set xys O set yzs = set ([(fst xy, snd yz). xy \<leftarrow> xys, yz \<leftarrow> yzs, snd xy = fst yz])"
by auto (auto simp add: Bex_def image_def)
lemma wf_set [code]:
"wf (set xs) = acyclic (set xs)"
by (simp add: wf_iff_acyclic_if_finite)
subsection \<open>Setup for Lifting/Transfer\<close>
subsubsection \<open>Transfer rules for the Transfer package\<close>
context includes lifting_syntax
begin
lemma tl_transfer [transfer_rule]:
"(list_all2 A ===> list_all2 A) tl tl"
unfolding tl_def[abs_def] by transfer_prover
lemma butlast_transfer [transfer_rule]:
"(list_all2 A ===> list_all2 A) butlast butlast"
by (rule rel_funI, erule list_all2_induct, auto)
lemma map_rec: "map f xs = rec_list Nil (%x _ y. Cons (f x) y) xs"
by (induct xs) auto
lemma append_transfer [transfer_rule]:
"(list_all2 A ===> list_all2 A ===> list_all2 A) append append"
unfolding List.append_def by transfer_prover
lemma rev_transfer [transfer_rule]:
"(list_all2 A ===> list_all2 A) rev rev"
unfolding List.rev_def by transfer_prover
lemma filter_transfer [transfer_rule]:
"((A ===> (=)) ===> list_all2 A ===> list_all2 A) filter filter"
unfolding List.filter_def by transfer_prover
lemma fold_transfer [transfer_rule]:
"((A ===> B ===> B) ===> list_all2 A ===> B ===> B) fold fold"
unfolding List.fold_def by transfer_prover
lemma foldr_transfer [transfer_rule]:
"((A ===> B ===> B) ===> list_all2 A ===> B ===> B) foldr foldr"
unfolding List.foldr_def by transfer_prover
lemma foldl_transfer [transfer_rule]:
"((B ===> A ===> B) ===> B ===> list_all2 A ===> B) foldl foldl"
unfolding List.foldl_def by transfer_prover
lemma concat_transfer [transfer_rule]:
"(list_all2 (list_all2 A) ===> list_all2 A) concat concat"
unfolding List.concat_def by transfer_prover
lemma drop_transfer [transfer_rule]:
"((=) ===> list_all2 A ===> list_all2 A) drop drop"
unfolding List.drop_def by transfer_prover
lemma take_transfer [transfer_rule]:
"((=) ===> list_all2 A ===> list_all2 A) take take"
unfolding List.take_def by transfer_prover
lemma list_update_transfer [transfer_rule]:
"(list_all2 A ===> (=) ===> A ===> list_all2 A) list_update list_update"
unfolding list_update_def by transfer_prover
lemma takeWhile_transfer [transfer_rule]:
"((A ===> (=)) ===> list_all2 A ===> list_all2 A) takeWhile takeWhile"
unfolding takeWhile_def by transfer_prover
lemma dropWhile_transfer [transfer_rule]:
"((A ===> (=)) ===> list_all2 A ===> list_all2 A) dropWhile dropWhile"
unfolding dropWhile_def by transfer_prover
lemma zip_transfer [transfer_rule]:
"(list_all2 A ===> list_all2 B ===> list_all2 (rel_prod A B)) zip zip"
unfolding zip_def by transfer_prover
lemma product_transfer [transfer_rule]:
"(list_all2 A ===> list_all2 B ===> list_all2 (rel_prod A B)) List.product List.product"
unfolding List.product_def by transfer_prover
lemma product_lists_transfer [transfer_rule]:
"(list_all2 (list_all2 A) ===> list_all2 (list_all2 A)) product_lists product_lists"
unfolding product_lists_def by transfer_prover
lemma insert_transfer [transfer_rule]:
assumes [transfer_rule]: "bi_unique A"
shows "(A ===> list_all2 A ===> list_all2 A) List.insert List.insert"
unfolding List.insert_def [abs_def] by transfer_prover
lemma find_transfer [transfer_rule]:
"((A ===> (=)) ===> list_all2 A ===> rel_option A) List.find List.find"
unfolding List.find_def by transfer_prover
lemma those_transfer [transfer_rule]:
"(list_all2 (rel_option P) ===> rel_option (list_all2 P)) those those"
unfolding List.those_def by transfer_prover
lemma remove1_transfer [transfer_rule]:
assumes [transfer_rule]: "bi_unique A"
shows "(A ===> list_all2 A ===> list_all2 A) remove1 remove1"
unfolding remove1_def by transfer_prover
lemma removeAll_transfer [transfer_rule]:
assumes [transfer_rule]: "bi_unique A"
shows "(A ===> list_all2 A ===> list_all2 A) removeAll removeAll"
unfolding removeAll_def by transfer_prover
lemma successively_transfer [transfer_rule]:
"((A ===> A ===> (=)) ===> list_all2 A ===> (=)) successively successively"
unfolding successively_altdef by transfer_prover
lemma distinct_transfer [transfer_rule]:
assumes [transfer_rule]: "bi_unique A"
shows "(list_all2 A ===> (=)) distinct distinct"
unfolding distinct_def by transfer_prover
lemma distinct_adj_transfer [transfer_rule]:
assumes "bi_unique A"
shows "(list_all2 A ===> (=)) distinct_adj distinct_adj"
unfolding rel_fun_def
proof (intro allI impI)
fix xs ys assume "list_all2 A xs ys"
thus "distinct_adj xs \<longleftrightarrow> distinct_adj ys"
proof (induction rule: list_all2_induct)
case (Cons x xs y ys)
show ?case
by (metis Cons assms bi_unique_def distinct_adj_Cons list.rel_sel)
qed auto
qed
lemma remdups_transfer [transfer_rule]:
assumes [transfer_rule]: "bi_unique A"
shows "(list_all2 A ===> list_all2 A) remdups remdups"
unfolding remdups_def by transfer_prover
lemma remdups_adj_transfer [transfer_rule]:
assumes [transfer_rule]: "bi_unique A"
shows "(list_all2 A ===> list_all2 A) remdups_adj remdups_adj"
proof (rule rel_funI, erule list_all2_induct)
qed (auto simp: remdups_adj_Cons assms[unfolded bi_unique_def] split: list.splits)
lemma replicate_transfer [transfer_rule]:
"((=) ===> A ===> list_all2 A) replicate replicate"
unfolding replicate_def by transfer_prover
lemma length_transfer [transfer_rule]:
"(list_all2 A ===> (=)) length length"
unfolding size_list_overloaded_def size_list_def by transfer_prover
lemma rotate1_transfer [transfer_rule]:
"(list_all2 A ===> list_all2 A) rotate1 rotate1"
unfolding rotate1_def by transfer_prover
lemma rotate_transfer [transfer_rule]:
"((=) ===> list_all2 A ===> list_all2 A) rotate rotate"
unfolding rotate_def [abs_def] by transfer_prover
lemma nths_transfer [transfer_rule]:
"(list_all2 A ===> rel_set (=) ===> list_all2 A) nths nths"
unfolding nths_def [abs_def] by transfer_prover
lemma subseqs_transfer [transfer_rule]:
"(list_all2 A ===> list_all2 (list_all2 A)) subseqs subseqs"
unfolding subseqs_def [abs_def] by transfer_prover
lemma partition_transfer [transfer_rule]:
"((A ===> (=)) ===> list_all2 A ===> rel_prod (list_all2 A) (list_all2 A))
partition partition"
unfolding partition_def by transfer_prover
lemma lists_transfer [transfer_rule]:
"(rel_set A ===> rel_set (list_all2 A)) lists lists"
proof (rule rel_funI, rule rel_setI)
show "\<lbrakk>l \<in> lists X; rel_set A X Y\<rbrakk> \<Longrightarrow> \<exists>y\<in>lists Y. list_all2 A l y" for X Y l
proof (induction l rule: lists.induct)
case (Cons a l)
then show ?case
by (simp only: rel_set_def list_all2_Cons1, metis lists.Cons)
qed auto
show "\<lbrakk>l \<in> lists Y; rel_set A X Y\<rbrakk> \<Longrightarrow> \<exists>x\<in>lists X. list_all2 A x l" for X Y l
proof (induction l rule: lists.induct)
case (Cons a l)
then show ?case
by (simp only: rel_set_def list_all2_Cons2, metis lists.Cons)
qed auto
qed
lemma set_Cons_transfer [transfer_rule]:
"(rel_set A ===> rel_set (list_all2 A) ===> rel_set (list_all2 A))
set_Cons set_Cons"
unfolding rel_fun_def rel_set_def set_Cons_def
by (fastforce simp add: list_all2_Cons1 list_all2_Cons2)
lemma listset_transfer [transfer_rule]:
"(list_all2 (rel_set A) ===> rel_set (list_all2 A)) listset listset"
unfolding listset_def by transfer_prover
lemma null_transfer [transfer_rule]:
"(list_all2 A ===> (=)) List.null List.null"
unfolding rel_fun_def List.null_def by auto
lemma list_all_transfer [transfer_rule]:
"((A ===> (=)) ===> list_all2 A ===> (=)) list_all list_all"
unfolding list_all_iff [abs_def] by transfer_prover
lemma list_ex_transfer [transfer_rule]:
"((A ===> (=)) ===> list_all2 A ===> (=)) list_ex list_ex"
unfolding list_ex_iff [abs_def] by transfer_prover
lemma splice_transfer [transfer_rule]:
"(list_all2 A ===> list_all2 A ===> list_all2 A) splice splice"
apply (rule rel_funI, erule list_all2_induct, simp add: rel_fun_def, simp)
apply (rule rel_funI)
apply (erule_tac xs=x in list_all2_induct, simp, simp add: rel_fun_def)
done
lemma shuffles_transfer [transfer_rule]:
"(list_all2 A ===> list_all2 A ===> rel_set (list_all2 A)) shuffles shuffles"
proof (intro rel_funI, goal_cases)
case (1 xs xs' ys ys')
thus ?case
proof (induction xs ys arbitrary: xs' ys' rule: shuffles.induct)
case (3 x xs y ys xs' ys')
from "3.prems" obtain x' xs'' where xs': "xs' = x' # xs''" by (cases xs') auto
from "3.prems" obtain y' ys'' where ys': "ys' = y' # ys''" by (cases ys') auto
have [transfer_rule]: "A x x'" "A y y'" "list_all2 A xs xs''" "list_all2 A ys ys''"
using "3.prems" by (simp_all add: xs' ys')
have [transfer_rule]: "rel_set (list_all2 A) (shuffles xs (y # ys)) (shuffles xs'' ys')" and
[transfer_rule]: "rel_set (list_all2 A) (shuffles (x # xs) ys) (shuffles xs' ys'')"
using "3.prems" by (auto intro!: "3.IH" simp: xs' ys')
have "rel_set (list_all2 A) ((#) x ` shuffles xs (y # ys) \<union> (#) y ` shuffles (x # xs) ys)
((#) x' ` shuffles xs'' ys' \<union> (#) y' ` shuffles xs' ys'')" by transfer_prover
thus ?case by (simp add: xs' ys')
qed (auto simp: rel_set_def)
qed
lemma rtrancl_parametric [transfer_rule]:
assumes [transfer_rule]: "bi_unique A" "bi_total A"
shows "(rel_set (rel_prod A A) ===> rel_set (rel_prod A A)) rtrancl rtrancl"
unfolding rtrancl_def by transfer_prover
lemma monotone_parametric [transfer_rule]:
assumes [transfer_rule]: "bi_total A"
shows "((A ===> A ===> (=)) ===> (B ===> B ===> (=)) ===> (A ===> B) ===> (=)) monotone monotone"
unfolding monotone_def[abs_def] by transfer_prover
lemma fun_ord_parametric [transfer_rule]:
assumes [transfer_rule]: "bi_total C"
shows "((A ===> B ===> (=)) ===> (C ===> A) ===> (C ===> B) ===> (=)) fun_ord fun_ord"
unfolding fun_ord_def[abs_def] by transfer_prover
lemma fun_lub_parametric [transfer_rule]:
assumes [transfer_rule]: "bi_total A" "bi_unique A"
shows "((rel_set A ===> B) ===> rel_set (C ===> A) ===> C ===> B) fun_lub fun_lub"
unfolding fun_lub_def[abs_def] by transfer_prover
end
end
diff --git a/src/HOL/Nat.thy b/src/HOL/Nat.thy
--- a/src/HOL/Nat.thy
+++ b/src/HOL/Nat.thy
@@ -1,2572 +1,2583 @@
(* Title: HOL/Nat.thy
Author: Tobias Nipkow
Author: Lawrence C Paulson
Author: Markus Wenzel
*)
section \<open>Natural numbers\<close>
theory Nat
imports Inductive Typedef Fun Rings
begin
subsection \<open>Type \<open>ind\<close>\<close>
typedecl ind
axiomatization Zero_Rep :: ind and Suc_Rep :: "ind \<Rightarrow> ind"
\<comment> \<open>The axiom of infinity in 2 parts:\<close>
where Suc_Rep_inject: "Suc_Rep x = Suc_Rep y \<Longrightarrow> x = y"
and Suc_Rep_not_Zero_Rep: "Suc_Rep x \<noteq> Zero_Rep"
subsection \<open>Type nat\<close>
text \<open>Type definition\<close>
inductive Nat :: "ind \<Rightarrow> bool"
where
Zero_RepI: "Nat Zero_Rep"
| Suc_RepI: "Nat i \<Longrightarrow> Nat (Suc_Rep i)"
typedef nat = "{n. Nat n}"
morphisms Rep_Nat Abs_Nat
using Nat.Zero_RepI by auto
lemma Nat_Rep_Nat: "Nat (Rep_Nat n)"
using Rep_Nat by simp
lemma Nat_Abs_Nat_inverse: "Nat n \<Longrightarrow> Rep_Nat (Abs_Nat n) = n"
using Abs_Nat_inverse by simp
lemma Nat_Abs_Nat_inject: "Nat n \<Longrightarrow> Nat m \<Longrightarrow> Abs_Nat n = Abs_Nat m \<longleftrightarrow> n = m"
using Abs_Nat_inject by simp
instantiation nat :: zero
begin
definition Zero_nat_def: "0 = Abs_Nat Zero_Rep"
instance ..
end
definition Suc :: "nat \<Rightarrow> nat"
where "Suc n = Abs_Nat (Suc_Rep (Rep_Nat n))"
lemma Suc_not_Zero: "Suc m \<noteq> 0"
by (simp add: Zero_nat_def Suc_def Suc_RepI Zero_RepI
Nat_Abs_Nat_inject Suc_Rep_not_Zero_Rep Nat_Rep_Nat)
lemma Zero_not_Suc: "0 \<noteq> Suc m"
by (rule not_sym) (rule Suc_not_Zero)
lemma Suc_Rep_inject': "Suc_Rep x = Suc_Rep y \<longleftrightarrow> x = y"
by (rule iffI, rule Suc_Rep_inject) simp_all
lemma nat_induct0:
assumes "P 0" and "\<And>n. P n \<Longrightarrow> P (Suc n)"
shows "P n"
proof -
have "P (Abs_Nat (Rep_Nat n))"
using assms unfolding Zero_nat_def Suc_def
by (iprover intro: Nat_Rep_Nat [THEN Nat.induct] elim: Nat_Abs_Nat_inverse [THEN subst])
then show ?thesis
by (simp add: Rep_Nat_inverse)
qed
free_constructors case_nat for "0 :: nat" | Suc pred
where "pred (0 :: nat) = (0 :: nat)"
proof atomize_elim
fix n
show "n = 0 \<or> (\<exists>m. n = Suc m)"
by (induction n rule: nat_induct0) auto
next
fix n m
show "(Suc n = Suc m) = (n = m)"
by (simp add: Suc_def Nat_Abs_Nat_inject Nat_Rep_Nat Suc_RepI Suc_Rep_inject' Rep_Nat_inject)
next
fix n
show "0 \<noteq> Suc n"
by (simp add: Suc_not_Zero)
qed
\<comment> \<open>Avoid name clashes by prefixing the output of \<open>old_rep_datatype\<close> with \<open>old\<close>.\<close>
setup \<open>Sign.mandatory_path "old"\<close>
old_rep_datatype "0 :: nat" Suc
by (erule nat_induct0) auto
setup \<open>Sign.parent_path\<close>
\<comment> \<open>But erase the prefix for properties that are not generated by \<open>free_constructors\<close>.\<close>
setup \<open>Sign.mandatory_path "nat"\<close>
declare old.nat.inject[iff del]
and old.nat.distinct(1)[simp del, induct_simp del]
lemmas induct = old.nat.induct
lemmas inducts = old.nat.inducts
lemmas rec = old.nat.rec
lemmas simps = nat.inject nat.distinct nat.case nat.rec
setup \<open>Sign.parent_path\<close>
abbreviation rec_nat :: "'a \<Rightarrow> (nat \<Rightarrow> 'a \<Rightarrow> 'a) \<Rightarrow> nat \<Rightarrow> 'a"
where "rec_nat \<equiv> old.rec_nat"
declare nat.sel[code del]
hide_const (open) Nat.pred \<comment> \<open>hide everything related to the selector\<close>
hide_fact
nat.case_eq_if
nat.collapse
nat.expand
nat.sel
nat.exhaust_sel
nat.split_sel
nat.split_sel_asm
lemma nat_exhaust [case_names 0 Suc, cases type: nat]:
"(y = 0 \<Longrightarrow> P) \<Longrightarrow> (\<And>nat. y = Suc nat \<Longrightarrow> P) \<Longrightarrow> P"
\<comment> \<open>for backward compatibility -- names of variables differ\<close>
by (rule old.nat.exhaust)
lemma nat_induct [case_names 0 Suc, induct type: nat]:
fixes n
assumes "P 0" and "\<And>n. P n \<Longrightarrow> P (Suc n)"
shows "P n"
\<comment> \<open>for backward compatibility -- names of variables differ\<close>
using assms by (rule nat.induct)
hide_fact
nat_exhaust
nat_induct0
ML \<open>
val nat_basic_lfp_sugar =
let
val ctr_sugar = the (Ctr_Sugar.ctr_sugar_of_global \<^theory> \<^type_name>\<open>nat\<close>);
val recx = Logic.varify_types_global \<^term>\<open>rec_nat\<close>;
val C = body_type (fastype_of recx);
in
{T = HOLogic.natT, fp_res_index = 0, C = C, fun_arg_Tsss = [[], [[HOLogic.natT, C]]],
ctr_sugar = ctr_sugar, recx = recx, rec_thms = @{thms nat.rec}}
end;
\<close>
setup \<open>
let
fun basic_lfp_sugars_of _ [\<^typ>\<open>nat\<close>] _ _ ctxt =
([], [0], [nat_basic_lfp_sugar], [], [], [], TrueI (*dummy*), [], false, ctxt)
| basic_lfp_sugars_of bs arg_Ts callers callssss ctxt =
BNF_LFP_Rec_Sugar.default_basic_lfp_sugars_of bs arg_Ts callers callssss ctxt;
in
BNF_LFP_Rec_Sugar.register_lfp_rec_extension
{nested_simps = [], special_endgame_tac = K (K (K (K no_tac))), is_new_datatype = K (K true),
basic_lfp_sugars_of = basic_lfp_sugars_of, rewrite_nested_rec_call = NONE}
end
\<close>
text \<open>Injectiveness and distinctness lemmas\<close>
lemma inj_Suc [simp]:
"inj_on Suc N"
by (simp add: inj_on_def)
lemma bij_betw_Suc [simp]:
"bij_betw Suc M N \<longleftrightarrow> Suc ` M = N"
by (simp add: bij_betw_def)
lemma Suc_neq_Zero: "Suc m = 0 \<Longrightarrow> R"
by (rule notE) (rule Suc_not_Zero)
lemma Zero_neq_Suc: "0 = Suc m \<Longrightarrow> R"
by (rule Suc_neq_Zero) (erule sym)
lemma Suc_inject: "Suc x = Suc y \<Longrightarrow> x = y"
by (rule inj_Suc [THEN injD])
lemma n_not_Suc_n: "n \<noteq> Suc n"
by (induct n) simp_all
lemma Suc_n_not_n: "Suc n \<noteq> n"
by (rule not_sym) (rule n_not_Suc_n)
text \<open>A special form of induction for reasoning about \<^term>\<open>m < n\<close> and \<^term>\<open>m - n\<close>.\<close>
lemma diff_induct:
assumes "\<And>x. P x 0"
and "\<And>y. P 0 (Suc y)"
and "\<And>x y. P x y \<Longrightarrow> P (Suc x) (Suc y)"
shows "P m n"
proof (induct n arbitrary: m)
case 0
show ?case by (rule assms(1))
next
case (Suc n)
show ?case
proof (induct m)
case 0
show ?case by (rule assms(2))
next
case (Suc m)
from \<open>P m n\<close> show ?case by (rule assms(3))
qed
qed
subsection \<open>Arithmetic operators\<close>
instantiation nat :: comm_monoid_diff
begin
primrec plus_nat
where
add_0: "0 + n = (n::nat)"
| add_Suc: "Suc m + n = Suc (m + n)"
lemma add_0_right [simp]: "m + 0 = m"
for m :: nat
by (induct m) simp_all
lemma add_Suc_right [simp]: "m + Suc n = Suc (m + n)"
by (induct m) simp_all
declare add_0 [code]
lemma add_Suc_shift [code]: "Suc m + n = m + Suc n"
by simp
primrec minus_nat
where
diff_0 [code]: "m - 0 = (m::nat)"
| diff_Suc: "m - Suc n = (case m - n of 0 \<Rightarrow> 0 | Suc k \<Rightarrow> k)"
declare diff_Suc [simp del]
lemma diff_0_eq_0 [simp, code]: "0 - n = 0"
for n :: nat
by (induct n) (simp_all add: diff_Suc)
lemma diff_Suc_Suc [simp, code]: "Suc m - Suc n = m - n"
by (induct n) (simp_all add: diff_Suc)
instance
proof
fix n m q :: nat
show "(n + m) + q = n + (m + q)" by (induct n) simp_all
show "n + m = m + n" by (induct n) simp_all
show "m + n - m = n" by (induct m) simp_all
show "n - m - q = n - (m + q)" by (induct q) (simp_all add: diff_Suc)
show "0 + n = n" by simp
show "0 - n = 0" by simp
qed
end
hide_fact (open) add_0 add_0_right diff_0
instantiation nat :: comm_semiring_1_cancel
begin
definition One_nat_def [simp]: "1 = Suc 0"
primrec times_nat
where
mult_0: "0 * n = (0::nat)"
| mult_Suc: "Suc m * n = n + (m * n)"
lemma mult_0_right [simp]: "m * 0 = 0"
for m :: nat
by (induct m) simp_all
lemma mult_Suc_right [simp]: "m * Suc n = m + (m * n)"
by (induct m) (simp_all add: add.left_commute)
lemma add_mult_distrib: "(m + n) * k = (m * k) + (n * k)"
for m n k :: nat
by (induct m) (simp_all add: add.assoc)
instance
proof
fix k n m q :: nat
show "0 \<noteq> (1::nat)"
by simp
show "1 * n = n"
by simp
show "n * m = m * n"
by (induct n) simp_all
show "(n * m) * q = n * (m * q)"
by (induct n) (simp_all add: add_mult_distrib)
show "(n + m) * q = n * q + m * q"
by (rule add_mult_distrib)
show "k * (m - n) = (k * m) - (k * n)"
by (induct m n rule: diff_induct) simp_all
qed
end
subsubsection \<open>Addition\<close>
text \<open>Reasoning about \<open>m + 0 = 0\<close>, etc.\<close>
lemma add_is_0 [iff]: "m + n = 0 \<longleftrightarrow> m = 0 \<and> n = 0"
for m n :: nat
by (cases m) simp_all
lemma add_is_1: "m + n = Suc 0 \<longleftrightarrow> m = Suc 0 \<and> n = 0 \<or> m = 0 \<and> n = Suc 0"
by (cases m) simp_all
lemma one_is_add: "Suc 0 = m + n \<longleftrightarrow> m = Suc 0 \<and> n = 0 \<or> m = 0 \<and> n = Suc 0"
by (rule trans, rule eq_commute, rule add_is_1)
lemma add_eq_self_zero: "m + n = m \<Longrightarrow> n = 0"
for m n :: nat
by (induct m) simp_all
lemma plus_1_eq_Suc:
"plus 1 = Suc"
by (simp add: fun_eq_iff)
lemma Suc_eq_plus1: "Suc n = n + 1"
by simp
lemma Suc_eq_plus1_left: "Suc n = 1 + n"
by simp
subsubsection \<open>Difference\<close>
lemma Suc_diff_diff [simp]: "(Suc m - n) - Suc k = m - n - k"
by (simp add: diff_diff_add)
lemma diff_Suc_1 [simp]: "Suc n - 1 = n"
by simp
subsubsection \<open>Multiplication\<close>
lemma mult_is_0 [simp]: "m * n = 0 \<longleftrightarrow> m = 0 \<or> n = 0" for m n :: nat
by (induct m) auto
lemma mult_eq_1_iff [simp]: "m * n = Suc 0 \<longleftrightarrow> m = Suc 0 \<and> n = Suc 0"
proof (induct m)
case 0
then show ?case by simp
next
case (Suc m)
then show ?case by (induct n) auto
qed
lemma one_eq_mult_iff [simp]: "Suc 0 = m * n \<longleftrightarrow> m = Suc 0 \<and> n = Suc 0"
by (simp add: eq_commute flip: mult_eq_1_iff)
lemma nat_mult_eq_1_iff [simp]: "m * n = 1 \<longleftrightarrow> m = 1 \<and> n = 1"
and nat_1_eq_mult_iff [simp]: "1 = m * n \<longleftrightarrow> m = 1 \<and> n = 1" for m n :: nat
by auto
lemma mult_cancel1 [simp]: "k * m = k * n \<longleftrightarrow> m = n \<or> k = 0"
for k m n :: nat
proof -
have "k \<noteq> 0 \<Longrightarrow> k * m = k * n \<Longrightarrow> m = n"
proof (induct n arbitrary: m)
case 0
then show "m = 0" by simp
next
case (Suc n)
then show "m = Suc n"
by (cases m) (simp_all add: eq_commute [of 0])
qed
then show ?thesis by auto
qed
lemma mult_cancel2 [simp]: "m * k = n * k \<longleftrightarrow> m = n \<or> k = 0"
for k m n :: nat
by (simp add: mult.commute)
lemma Suc_mult_cancel1: "Suc k * m = Suc k * n \<longleftrightarrow> m = n"
by (subst mult_cancel1) simp
subsection \<open>Orders on \<^typ>\<open>nat\<close>\<close>
subsubsection \<open>Operation definition\<close>
instantiation nat :: linorder
begin
primrec less_eq_nat
where
"(0::nat) \<le> n \<longleftrightarrow> True"
| "Suc m \<le> n \<longleftrightarrow> (case n of 0 \<Rightarrow> False | Suc n \<Rightarrow> m \<le> n)"
declare less_eq_nat.simps [simp del]
lemma le0 [iff]: "0 \<le> n" for
n :: nat
by (simp add: less_eq_nat.simps)
lemma [code]: "0 \<le> n \<longleftrightarrow> True"
for n :: nat
by simp
definition less_nat
where less_eq_Suc_le: "n < m \<longleftrightarrow> Suc n \<le> m"
lemma Suc_le_mono [iff]: "Suc n \<le> Suc m \<longleftrightarrow> n \<le> m"
by (simp add: less_eq_nat.simps(2))
lemma Suc_le_eq [code]: "Suc m \<le> n \<longleftrightarrow> m < n"
unfolding less_eq_Suc_le ..
lemma le_0_eq [iff]: "n \<le> 0 \<longleftrightarrow> n = 0"
for n :: nat
by (induct n) (simp_all add: less_eq_nat.simps(2))
lemma not_less0 [iff]: "\<not> n < 0"
for n :: nat
by (simp add: less_eq_Suc_le)
lemma less_nat_zero_code [code]: "n < 0 \<longleftrightarrow> False"
for n :: nat
by simp
lemma Suc_less_eq [iff]: "Suc m < Suc n \<longleftrightarrow> m < n"
by (simp add: less_eq_Suc_le)
lemma less_Suc_eq_le [code]: "m < Suc n \<longleftrightarrow> m \<le> n"
by (simp add: less_eq_Suc_le)
lemma Suc_less_eq2: "Suc n < m \<longleftrightarrow> (\<exists>m'. m = Suc m' \<and> n < m')"
by (cases m) auto
lemma le_SucI: "m \<le> n \<Longrightarrow> m \<le> Suc n"
by (induct m arbitrary: n) (simp_all add: less_eq_nat.simps(2) split: nat.splits)
lemma Suc_leD: "Suc m \<le> n \<Longrightarrow> m \<le> n"
by (cases n) (auto intro: le_SucI)
lemma less_SucI: "m < n \<Longrightarrow> m < Suc n"
by (simp add: less_eq_Suc_le) (erule Suc_leD)
lemma Suc_lessD: "Suc m < n \<Longrightarrow> m < n"
by (simp add: less_eq_Suc_le) (erule Suc_leD)
instance
proof
fix n m q :: nat
show "n < m \<longleftrightarrow> n \<le> m \<and> \<not> m \<le> n"
proof (induct n arbitrary: m)
case 0
then show ?case
by (cases m) (simp_all add: less_eq_Suc_le)
next
case (Suc n)
then show ?case
by (cases m) (simp_all add: less_eq_Suc_le)
qed
show "n \<le> n"
by (induct n) simp_all
then show "n = m" if "n \<le> m" and "m \<le> n"
using that by (induct n arbitrary: m)
(simp_all add: less_eq_nat.simps(2) split: nat.splits)
show "n \<le> q" if "n \<le> m" and "m \<le> q"
using that
proof (induct n arbitrary: m q)
case 0
show ?case by simp
next
case (Suc n)
then show ?case
by (simp_all (no_asm_use) add: less_eq_nat.simps(2) split: nat.splits, clarify,
simp_all (no_asm_use) add: less_eq_nat.simps(2) split: nat.splits, clarify,
simp_all (no_asm_use) add: less_eq_nat.simps(2) split: nat.splits)
qed
show "n \<le> m \<or> m \<le> n"
by (induct n arbitrary: m)
(simp_all add: less_eq_nat.simps(2) split: nat.splits)
qed
end
instantiation nat :: order_bot
begin
definition bot_nat :: nat
where "bot_nat = 0"
instance
by standard (simp add: bot_nat_def)
end
instance nat :: no_top
by standard (auto intro: less_Suc_eq_le [THEN iffD2])
subsubsection \<open>Introduction properties\<close>
lemma lessI [iff]: "n < Suc n"
by (simp add: less_Suc_eq_le)
lemma zero_less_Suc [iff]: "0 < Suc n"
by (simp add: less_Suc_eq_le)
subsubsection \<open>Elimination properties\<close>
lemma less_not_refl: "\<not> n < n"
for n :: nat
by (rule order_less_irrefl)
lemma less_not_refl2: "n < m \<Longrightarrow> m \<noteq> n"
for m n :: nat
by (rule not_sym) (rule less_imp_neq)
lemma less_not_refl3: "s < t \<Longrightarrow> s \<noteq> t"
for s t :: nat
by (rule less_imp_neq)
lemma less_irrefl_nat: "n < n \<Longrightarrow> R"
for n :: nat
by (rule notE, rule less_not_refl)
lemma less_zeroE: "n < 0 \<Longrightarrow> R"
for n :: nat
by (rule notE) (rule not_less0)
lemma less_Suc_eq: "m < Suc n \<longleftrightarrow> m < n \<or> m = n"
unfolding less_Suc_eq_le le_less ..
lemma less_Suc0 [iff]: "(n < Suc 0) = (n = 0)"
by (simp add: less_Suc_eq)
lemma less_one [iff]: "n < 1 \<longleftrightarrow> n = 0"
for n :: nat
unfolding One_nat_def by (rule less_Suc0)
lemma Suc_mono: "m < n \<Longrightarrow> Suc m < Suc n"
by simp
text \<open>"Less than" is antisymmetric, sort of.\<close>
lemma less_antisym: "\<not> n < m \<Longrightarrow> n < Suc m \<Longrightarrow> m = n"
unfolding not_less less_Suc_eq_le by (rule antisym)
lemma nat_neq_iff: "m \<noteq> n \<longleftrightarrow> m < n \<or> n < m"
for m n :: nat
by (rule linorder_neq_iff)
subsubsection \<open>Inductive (?) properties\<close>
lemma Suc_lessI: "m < n \<Longrightarrow> Suc m \<noteq> n \<Longrightarrow> Suc m < n"
unfolding less_eq_Suc_le [of m] le_less by simp
lemma lessE:
assumes major: "i < k"
and 1: "k = Suc i \<Longrightarrow> P"
and 2: "\<And>j. i < j \<Longrightarrow> k = Suc j \<Longrightarrow> P"
shows P
proof -
from major have "\<exists>j. i \<le> j \<and> k = Suc j"
unfolding less_eq_Suc_le by (induct k) simp_all
then have "(\<exists>j. i < j \<and> k = Suc j) \<or> k = Suc i"
by (auto simp add: less_le)
with 1 2 show P by auto
qed
lemma less_SucE:
assumes major: "m < Suc n"
and less: "m < n \<Longrightarrow> P"
and eq: "m = n \<Longrightarrow> P"
shows P
proof (rule major [THEN lessE])
show "Suc n = Suc m \<Longrightarrow> P"
using eq by blast
show "\<And>j. \<lbrakk>m < j; Suc n = Suc j\<rbrakk> \<Longrightarrow> P"
by (blast intro: less)
qed
lemma Suc_lessE:
assumes major: "Suc i < k"
and minor: "\<And>j. i < j \<Longrightarrow> k = Suc j \<Longrightarrow> P"
shows P
proof (rule major [THEN lessE])
show "k = Suc (Suc i) \<Longrightarrow> P"
using lessI minor by iprover
show "\<And>j. \<lbrakk>Suc i < j; k = Suc j\<rbrakk> \<Longrightarrow> P"
using Suc_lessD minor by iprover
qed
lemma Suc_less_SucD: "Suc m < Suc n \<Longrightarrow> m < n"
by simp
lemma less_trans_Suc:
assumes le: "i < j"
shows "j < k \<Longrightarrow> Suc i < k"
proof (induct k)
case 0
then show ?case by simp
next
case (Suc k)
with le show ?case
by simp (auto simp add: less_Suc_eq dest: Suc_lessD)
qed
text \<open>Can be used with \<open>less_Suc_eq\<close> to get \<^prop>\<open>n = m \<or> n < m\<close>.\<close>
lemma not_less_eq: "\<not> m < n \<longleftrightarrow> n < Suc m"
by (simp only: not_less less_Suc_eq_le)
lemma not_less_eq_eq: "\<not> m \<le> n \<longleftrightarrow> Suc n \<le> m"
by (simp only: not_le Suc_le_eq)
text \<open>Properties of "less than or equal".\<close>
lemma le_imp_less_Suc: "m \<le> n \<Longrightarrow> m < Suc n"
by (simp only: less_Suc_eq_le)
lemma Suc_n_not_le_n: "\<not> Suc n \<le> n"
by (simp add: not_le less_Suc_eq_le)
lemma le_Suc_eq: "m \<le> Suc n \<longleftrightarrow> m \<le> n \<or> m = Suc n"
by (simp add: less_Suc_eq_le [symmetric] less_Suc_eq)
lemma le_SucE: "m \<le> Suc n \<Longrightarrow> (m \<le> n \<Longrightarrow> R) \<Longrightarrow> (m = Suc n \<Longrightarrow> R) \<Longrightarrow> R"
by (drule le_Suc_eq [THEN iffD1], iprover+)
lemma Suc_leI: "m < n \<Longrightarrow> Suc m \<le> n"
by (simp only: Suc_le_eq)
text \<open>Stronger version of \<open>Suc_leD\<close>.\<close>
lemma Suc_le_lessD: "Suc m \<le> n \<Longrightarrow> m < n"
by (simp only: Suc_le_eq)
lemma less_imp_le_nat: "m < n \<Longrightarrow> m \<le> n" for m n :: nat
unfolding less_eq_Suc_le by (rule Suc_leD)
text \<open>For instance, \<open>(Suc m < Suc n) = (Suc m \<le> n) = (m < n)\<close>\<close>
lemmas le_simps = less_imp_le_nat less_Suc_eq_le Suc_le_eq
text \<open>Equivalence of \<open>m \<le> n\<close> and \<open>m < n \<or> m = n\<close>\<close>
lemma less_or_eq_imp_le: "m < n \<or> m = n \<Longrightarrow> m \<le> n"
for m n :: nat
unfolding le_less .
lemma le_eq_less_or_eq: "m \<le> n \<longleftrightarrow> m < n \<or> m = n"
for m n :: nat
by (rule le_less)
text \<open>Useful with \<open>blast\<close>.\<close>
lemma eq_imp_le: "m = n \<Longrightarrow> m \<le> n"
for m n :: nat
by auto
lemma le_refl: "n \<le> n"
for n :: nat
by simp
lemma le_trans: "i \<le> j \<Longrightarrow> j \<le> k \<Longrightarrow> i \<le> k"
for i j k :: nat
by (rule order_trans)
lemma le_antisym: "m \<le> n \<Longrightarrow> n \<le> m \<Longrightarrow> m = n"
for m n :: nat
by (rule antisym)
lemma nat_less_le: "m < n \<longleftrightarrow> m \<le> n \<and> m \<noteq> n"
for m n :: nat
by (rule less_le)
lemma le_neq_implies_less: "m \<le> n \<Longrightarrow> m \<noteq> n \<Longrightarrow> m < n"
for m n :: nat
unfolding less_le ..
lemma nat_le_linear: "m \<le> n \<or> n \<le> m"
for m n :: nat
by (rule linear)
lemmas linorder_neqE_nat = linorder_neqE [where 'a = nat]
lemma le_less_Suc_eq: "m \<le> n \<Longrightarrow> n < Suc m \<longleftrightarrow> n = m"
unfolding less_Suc_eq_le by auto
lemma not_less_less_Suc_eq: "\<not> n < m \<Longrightarrow> n < Suc m \<longleftrightarrow> n = m"
unfolding not_less by (rule le_less_Suc_eq)
lemmas not_less_simps = not_less_less_Suc_eq le_less_Suc_eq
lemma not0_implies_Suc: "n \<noteq> 0 \<Longrightarrow> \<exists>m. n = Suc m"
by (cases n) simp_all
lemma gr0_implies_Suc: "n > 0 \<Longrightarrow> \<exists>m. n = Suc m"
by (cases n) simp_all
lemma gr_implies_not0: "m < n \<Longrightarrow> n \<noteq> 0"
for m n :: nat
by (cases n) simp_all
lemma neq0_conv[iff]: "n \<noteq> 0 \<longleftrightarrow> 0 < n"
for n :: nat
by (cases n) simp_all
text \<open>This theorem is useful with \<open>blast\<close>\<close>
lemma gr0I: "(n = 0 \<Longrightarrow> False) \<Longrightarrow> 0 < n"
for n :: nat
by (rule neq0_conv[THEN iffD1]) iprover
lemma gr0_conv_Suc: "0 < n \<longleftrightarrow> (\<exists>m. n = Suc m)"
by (fast intro: not0_implies_Suc)
lemma not_gr0 [iff]: "\<not> 0 < n \<longleftrightarrow> n = 0"
for n :: nat
using neq0_conv by blast
lemma Suc_le_D: "Suc n \<le> m' \<Longrightarrow> \<exists>m. m' = Suc m"
by (induct m') simp_all
text \<open>Useful in certain inductive arguments\<close>
lemma less_Suc_eq_0_disj: "m < Suc n \<longleftrightarrow> m = 0 \<or> (\<exists>j. m = Suc j \<and> j < n)"
by (cases m) simp_all
lemma All_less_Suc: "(\<forall>i < Suc n. P i) = (P n \<and> (\<forall>i < n. P i))"
by (auto simp: less_Suc_eq)
lemma All_less_Suc2: "(\<forall>i < Suc n. P i) = (P 0 \<and> (\<forall>i < n. P(Suc i)))"
by (auto simp: less_Suc_eq_0_disj)
lemma Ex_less_Suc: "(\<exists>i < Suc n. P i) = (P n \<or> (\<exists>i < n. P i))"
by (auto simp: less_Suc_eq)
lemma Ex_less_Suc2: "(\<exists>i < Suc n. P i) = (P 0 \<or> (\<exists>i < n. P(Suc i)))"
by (auto simp: less_Suc_eq_0_disj)
text \<open>@{term mono} (non-strict) doesn't imply increasing, as the function could be constant\<close>
lemma strict_mono_imp_increasing:
fixes n::nat
assumes "strict_mono f" shows "f n \<ge> n"
proof (induction n)
case 0
then show ?case
by auto
next
case (Suc n)
then show ?case
unfolding not_less_eq_eq [symmetric]
using Suc_n_not_le_n assms order_trans strict_mono_less_eq by blast
qed
subsubsection \<open>Monotonicity of Addition\<close>
lemma Suc_pred [simp]: "n > 0 \<Longrightarrow> Suc (n - Suc 0) = n"
by (simp add: diff_Suc split: nat.split)
lemma Suc_diff_1 [simp]: "0 < n \<Longrightarrow> Suc (n - 1) = n"
unfolding One_nat_def by (rule Suc_pred)
lemma nat_add_left_cancel_le [simp]: "k + m \<le> k + n \<longleftrightarrow> m \<le> n"
for k m n :: nat
by (induct k) simp_all
lemma nat_add_left_cancel_less [simp]: "k + m < k + n \<longleftrightarrow> m < n"
for k m n :: nat
by (induct k) simp_all
lemma add_gr_0 [iff]: "m + n > 0 \<longleftrightarrow> m > 0 \<or> n > 0"
for m n :: nat
by (auto dest: gr0_implies_Suc)
text \<open>strict, in 1st argument\<close>
lemma add_less_mono1: "i < j \<Longrightarrow> i + k < j + k"
for i j k :: nat
by (induct k) simp_all
text \<open>strict, in both arguments\<close>
lemma add_less_mono:
fixes i j k l :: nat
assumes "i < j" "k < l" shows "i + k < j + l"
proof -
have "i + k < j + k"
by (simp add: add_less_mono1 assms)
also have "... < j + l"
using \<open>i < j\<close> by (induction j) (auto simp: assms)
finally show ?thesis .
qed
lemma less_imp_Suc_add: "m < n \<Longrightarrow> \<exists>k. n = Suc (m + k)"
proof (induct n)
case 0
then show ?case by simp
next
case Suc
then show ?case
by (simp add: order_le_less)
(blast elim!: less_SucE intro!: Nat.add_0_right [symmetric] add_Suc_right [symmetric])
qed
lemma le_Suc_ex: "k \<le> l \<Longrightarrow> (\<exists>n. l = k + n)"
for k l :: nat
by (auto simp: less_Suc_eq_le[symmetric] dest: less_imp_Suc_add)
lemma less_natE:
assumes \<open>m < n\<close>
obtains q where \<open>n = Suc (m + q)\<close>
using assms by (auto dest: less_imp_Suc_add intro: that)
text \<open>strict, in 1st argument; proof is by induction on \<open>k > 0\<close>\<close>
lemma mult_less_mono2:
fixes i j :: nat
assumes "i < j" and "0 < k"
shows "k * i < k * j"
using \<open>0 < k\<close>
proof (induct k)
case 0
then show ?case by simp
next
case (Suc k)
with \<open>i < j\<close> show ?case
by (cases k) (simp_all add: add_less_mono)
qed
text \<open>Addition is the inverse of subtraction:
if \<^term>\<open>n \<le> m\<close> then \<^term>\<open>n + (m - n) = m\<close>.\<close>
lemma add_diff_inverse_nat: "\<not> m < n \<Longrightarrow> n + (m - n) = m"
for m n :: nat
by (induct m n rule: diff_induct) simp_all
lemma nat_le_iff_add: "m \<le> n \<longleftrightarrow> (\<exists>k. n = m + k)"
for m n :: nat
using nat_add_left_cancel_le[of m 0] by (auto dest: le_Suc_ex)
text \<open>The naturals form an ordered \<open>semidom\<close> and a \<open>dioid\<close>.\<close>
instance nat :: linordered_semidom
proof
fix m n q :: nat
show "0 < (1::nat)"
by simp
show "m \<le> n \<Longrightarrow> q + m \<le> q + n"
by simp
show "m < n \<Longrightarrow> 0 < q \<Longrightarrow> q * m < q * n"
by (simp add: mult_less_mono2)
show "m \<noteq> 0 \<Longrightarrow> n \<noteq> 0 \<Longrightarrow> m * n \<noteq> 0"
by simp
show "n \<le> m \<Longrightarrow> (m - n) + n = m"
by (simp add: add_diff_inverse_nat add.commute linorder_not_less)
qed
instance nat :: dioid
by standard (rule nat_le_iff_add)
declare le0[simp del] \<comment> \<open>This is now @{thm zero_le}\<close>
declare le_0_eq[simp del] \<comment> \<open>This is now @{thm le_zero_eq}\<close>
declare not_less0[simp del] \<comment> \<open>This is now @{thm not_less_zero}\<close>
declare not_gr0[simp del] \<comment> \<open>This is now @{thm not_gr_zero}\<close>
instance nat :: ordered_cancel_comm_monoid_add ..
instance nat :: ordered_cancel_comm_monoid_diff ..
subsubsection \<open>\<^term>\<open>min\<close> and \<^term>\<open>max\<close>\<close>
global_interpretation bot_nat_0: ordering_top \<open>(\<ge>)\<close> \<open>(>)\<close> \<open>0::nat\<close>
by standard simp
global_interpretation max_nat: semilattice_neutr_order max \<open>0::nat\<close> \<open>(\<ge>)\<close> \<open>(>)\<close>
by standard (simp add: max_def)
lemma mono_Suc: "mono Suc"
by (rule monoI) simp
lemma min_0L [simp]: "min 0 n = 0"
for n :: nat
by (rule min_absorb1) simp
lemma min_0R [simp]: "min n 0 = 0"
for n :: nat
by (rule min_absorb2) simp
lemma min_Suc_Suc [simp]: "min (Suc m) (Suc n) = Suc (min m n)"
by (simp add: mono_Suc min_of_mono)
lemma min_Suc1: "min (Suc n) m = (case m of 0 \<Rightarrow> 0 | Suc m' \<Rightarrow> Suc(min n m'))"
by (simp split: nat.split)
lemma min_Suc2: "min m (Suc n) = (case m of 0 \<Rightarrow> 0 | Suc m' \<Rightarrow> Suc(min m' n))"
by (simp split: nat.split)
lemma max_0L [simp]: "max 0 n = n"
for n :: nat
by (fact max_nat.left_neutral)
lemma max_0R [simp]: "max n 0 = n"
for n :: nat
by (fact max_nat.right_neutral)
lemma max_Suc_Suc [simp]: "max (Suc m) (Suc n) = Suc (max m n)"
by (simp add: mono_Suc max_of_mono)
lemma max_Suc1: "max (Suc n) m = (case m of 0 \<Rightarrow> Suc n | Suc m' \<Rightarrow> Suc (max n m'))"
by (simp split: nat.split)
lemma max_Suc2: "max m (Suc n) = (case m of 0 \<Rightarrow> Suc n | Suc m' \<Rightarrow> Suc (max m' n))"
by (simp split: nat.split)
lemma nat_mult_min_left: "min m n * q = min (m * q) (n * q)"
for m n q :: nat
by (simp add: min_def not_le)
(auto dest: mult_right_le_imp_le mult_right_less_imp_less le_less_trans)
lemma nat_mult_min_right: "m * min n q = min (m * n) (m * q)"
for m n q :: nat
by (simp add: min_def not_le)
(auto dest: mult_left_le_imp_le mult_left_less_imp_less le_less_trans)
lemma nat_add_max_left: "max m n + q = max (m + q) (n + q)"
for m n q :: nat
by (simp add: max_def)
lemma nat_add_max_right: "m + max n q = max (m + n) (m + q)"
for m n q :: nat
by (simp add: max_def)
lemma nat_mult_max_left: "max m n * q = max (m * q) (n * q)"
for m n q :: nat
by (simp add: max_def not_le)
(auto dest: mult_right_le_imp_le mult_right_less_imp_less le_less_trans)
lemma nat_mult_max_right: "m * max n q = max (m * n) (m * q)"
for m n q :: nat
by (simp add: max_def not_le)
(auto dest: mult_left_le_imp_le mult_left_less_imp_less le_less_trans)
subsubsection \<open>Additional theorems about \<^term>\<open>(\<le>)\<close>\<close>
text \<open>Complete induction, aka course-of-values induction\<close>
instance nat :: wellorder
proof
fix P and n :: nat
assume step: "(\<And>m. m < n \<Longrightarrow> P m) \<Longrightarrow> P n" for n :: nat
have "\<And>q. q \<le> n \<Longrightarrow> P q"
proof (induct n)
case (0 n)
have "P 0" by (rule step) auto
with 0 show ?case by auto
next
case (Suc m n)
then have "n \<le> m \<or> n = Suc m"
by (simp add: le_Suc_eq)
then show ?case
proof
assume "n \<le> m"
then show "P n" by (rule Suc(1))
next
assume n: "n = Suc m"
show "P n" by (rule step) (rule Suc(1), simp add: n le_simps)
qed
qed
then show "P n" by auto
qed
lemma Least_eq_0[simp]: "P 0 \<Longrightarrow> Least P = 0"
for P :: "nat \<Rightarrow> bool"
by (rule Least_equality[OF _ le0])
lemma Least_Suc:
assumes "P n" "\<not> P 0"
shows "(LEAST n. P n) = Suc (LEAST m. P (Suc m))"
proof (cases n)
case (Suc m)
show ?thesis
proof (rule antisym)
show "(LEAST x. P x) \<le> Suc (LEAST x. P (Suc x))"
using assms Suc by (force intro: LeastI Least_le)
have \<section>: "P (LEAST x. P x)"
by (blast intro: LeastI assms)
show "Suc (LEAST m. P (Suc m)) \<le> (LEAST n. P n)"
proof (cases "(LEAST n. P n)")
case 0
then show ?thesis
using \<section> by (simp add: assms)
next
case Suc
with \<section> show ?thesis
by (auto simp: Least_le)
qed
qed
qed (use assms in auto)
lemma Least_Suc2: "P n \<Longrightarrow> Q m \<Longrightarrow> \<not> P 0 \<Longrightarrow> \<forall>k. P (Suc k) = Q k \<Longrightarrow> Least P = Suc (Least Q)"
by (erule (1) Least_Suc [THEN ssubst]) simp
lemma ex_least_nat_le:
fixes P :: "nat \<Rightarrow> bool"
assumes "P n" "\<not> P 0"
shows "\<exists>k\<le>n. (\<forall>i<k. \<not> P i) \<and> P k"
proof (cases n)
case (Suc m)
with assms show ?thesis
by (blast intro: Least_le LeastI_ex dest: not_less_Least)
qed (use assms in auto)
lemma ex_least_nat_less:
fixes P :: "nat \<Rightarrow> bool"
assumes "P n" "\<not> P 0"
shows "\<exists>k<n. (\<forall>i\<le>k. \<not> P i) \<and> P (Suc k)"
proof (cases n)
case (Suc m)
then obtain k where k: "k \<le> n" "\<forall>i<k. \<not> P i" "P k"
using ex_least_nat_le [OF assms] by blast
show ?thesis
by (cases k) (use assms k less_eq_Suc_le in auto)
qed (use assms in auto)
lemma nat_less_induct:
fixes P :: "nat \<Rightarrow> bool"
assumes "\<And>n. \<forall>m. m < n \<longrightarrow> P m \<Longrightarrow> P n"
shows "P n"
using assms less_induct by blast
lemma measure_induct_rule [case_names less]:
fixes f :: "'a \<Rightarrow> 'b::wellorder"
assumes step: "\<And>x. (\<And>y. f y < f x \<Longrightarrow> P y) \<Longrightarrow> P x"
shows "P a"
by (induct m \<equiv> "f a" arbitrary: a rule: less_induct) (auto intro: step)
text \<open>old style induction rules:\<close>
lemma measure_induct:
fixes f :: "'a \<Rightarrow> 'b::wellorder"
shows "(\<And>x. \<forall>y. f y < f x \<longrightarrow> P y \<Longrightarrow> P x) \<Longrightarrow> P a"
by (rule measure_induct_rule [of f P a]) iprover
lemma full_nat_induct:
assumes step: "\<And>n. (\<forall>m. Suc m \<le> n \<longrightarrow> P m) \<Longrightarrow> P n"
shows "P n"
by (rule less_induct) (auto intro: step simp:le_simps)
text\<open>An induction rule for establishing binary relations\<close>
lemma less_Suc_induct [consumes 1]:
assumes less: "i < j"
and step: "\<And>i. P i (Suc i)"
and trans: "\<And>i j k. i < j \<Longrightarrow> j < k \<Longrightarrow> P i j \<Longrightarrow> P j k \<Longrightarrow> P i k"
shows "P i j"
proof -
from less obtain k where j: "j = Suc (i + k)"
by (auto dest: less_imp_Suc_add)
have "P i (Suc (i + k))"
proof (induct k)
case 0
show ?case by (simp add: step)
next
case (Suc k)
have "0 + i < Suc k + i" by (rule add_less_mono1) simp
then have "i < Suc (i + k)" by (simp add: add.commute)
from trans[OF this lessI Suc step]
show ?case by simp
qed
then show "P i j" by (simp add: j)
qed
text \<open>
The method of infinite descent, frequently used in number theory.
Provided by Roelof Oosterhuis.
\<open>P n\<close> is true for all natural numbers if
\<^item> case ``0'': given \<open>n = 0\<close> prove \<open>P n\<close>
\<^item> case ``smaller'': given \<open>n > 0\<close> and \<open>\<not> P n\<close> prove there exists
a smaller natural number \<open>m\<close> such that \<open>\<not> P m\<close>.
\<close>
lemma infinite_descent: "(\<And>n. \<not> P n \<Longrightarrow> \<exists>m<n. \<not> P m) \<Longrightarrow> P n" for P :: "nat \<Rightarrow> bool"
\<comment> \<open>compact version without explicit base case\<close>
by (induct n rule: less_induct) auto
lemma infinite_descent0 [case_names 0 smaller]:
fixes P :: "nat \<Rightarrow> bool"
assumes "P 0"
and "\<And>n. n > 0 \<Longrightarrow> \<not> P n \<Longrightarrow> \<exists>m. m < n \<and> \<not> P m"
shows "P n"
proof (rule infinite_descent)
fix n
show "\<not> P n \<Longrightarrow> \<exists>m<n. \<not> P m"
using assms by (cases "n > 0") auto
qed
text \<open>
Infinite descent using a mapping to \<open>nat\<close>:
\<open>P x\<close> is true for all \<open>x \<in> D\<close> if there exists a \<open>V \<in> D \<Rightarrow> nat\<close> and
\<^item> case ``0'': given \<open>V x = 0\<close> prove \<open>P x\<close>
\<^item> ``smaller'': given \<open>V x > 0\<close> and \<open>\<not> P x\<close> prove
there exists a \<open>y \<in> D\<close> such that \<open>V y < V x\<close> and \<open>\<not> P y\<close>.
\<close>
corollary infinite_descent0_measure [case_names 0 smaller]:
fixes V :: "'a \<Rightarrow> nat"
assumes 1: "\<And>x. V x = 0 \<Longrightarrow> P x"
and 2: "\<And>x. V x > 0 \<Longrightarrow> \<not> P x \<Longrightarrow> \<exists>y. V y < V x \<and> \<not> P y"
shows "P x"
proof -
obtain n where "n = V x" by auto
moreover have "\<And>x. V x = n \<Longrightarrow> P x"
proof (induct n rule: infinite_descent0)
case 0
with 1 show "P x" by auto
next
case (smaller n)
then obtain x where *: "V x = n " and "V x > 0 \<and> \<not> P x" by auto
with 2 obtain y where "V y < V x \<and> \<not> P y" by auto
with * obtain m where "m = V y \<and> m < n \<and> \<not> P y" by auto
then show ?case by auto
qed
ultimately show "P x" by auto
qed
text \<open>Again, without explicit base case:\<close>
lemma infinite_descent_measure:
fixes V :: "'a \<Rightarrow> nat"
assumes "\<And>x. \<not> P x \<Longrightarrow> \<exists>y. V y < V x \<and> \<not> P y"
shows "P x"
proof -
from assms obtain n where "n = V x" by auto
moreover have "\<And>x. V x = n \<Longrightarrow> P x"
proof -
have "\<exists>m < V x. \<exists>y. V y = m \<and> \<not> P y" if "\<not> P x" for x
using assms and that by auto
then show "\<And>x. V x = n \<Longrightarrow> P x"
by (induct n rule: infinite_descent, auto)
qed
ultimately show "P x" by auto
qed
text \<open>A (clumsy) way of lifting \<open><\<close> monotonicity to \<open>\<le>\<close> monotonicity\<close>
lemma less_mono_imp_le_mono:
fixes f :: "nat \<Rightarrow> nat"
and i j :: nat
assumes "\<And>i j::nat. i < j \<Longrightarrow> f i < f j"
and "i \<le> j"
shows "f i \<le> f j"
using assms by (auto simp add: order_le_less)
text \<open>non-strict, in 1st argument\<close>
lemma add_le_mono1: "i \<le> j \<Longrightarrow> i + k \<le> j + k"
for i j k :: nat
by (rule add_right_mono)
text \<open>non-strict, in both arguments\<close>
lemma add_le_mono: "i \<le> j \<Longrightarrow> k \<le> l \<Longrightarrow> i + k \<le> j + l"
for i j k l :: nat
by (rule add_mono)
lemma le_add2: "n \<le> m + n"
for m n :: nat
by simp
lemma le_add1: "n \<le> n + m"
for m n :: nat
by simp
lemma less_add_Suc1: "i < Suc (i + m)"
by (rule le_less_trans, rule le_add1, rule lessI)
lemma less_add_Suc2: "i < Suc (m + i)"
by (rule le_less_trans, rule le_add2, rule lessI)
lemma less_iff_Suc_add: "m < n \<longleftrightarrow> (\<exists>k. n = Suc (m + k))"
by (iprover intro!: less_add_Suc1 less_imp_Suc_add)
lemma trans_le_add1: "i \<le> j \<Longrightarrow> i \<le> j + m"
for i j m :: nat
by (rule le_trans, assumption, rule le_add1)
lemma trans_le_add2: "i \<le> j \<Longrightarrow> i \<le> m + j"
for i j m :: nat
by (rule le_trans, assumption, rule le_add2)
lemma trans_less_add1: "i < j \<Longrightarrow> i < j + m"
for i j m :: nat
by (rule less_le_trans, assumption, rule le_add1)
lemma trans_less_add2: "i < j \<Longrightarrow> i < m + j"
for i j m :: nat
by (rule less_le_trans, assumption, rule le_add2)
lemma add_lessD1: "i + j < k \<Longrightarrow> i < k"
for i j k :: nat
by (rule le_less_trans [of _ "i+j"]) (simp_all add: le_add1)
lemma not_add_less1 [iff]: "\<not> i + j < i"
for i j :: nat
by simp
lemma not_add_less2 [iff]: "\<not> j + i < i"
for i j :: nat
by simp
lemma add_leD1: "m + k \<le> n \<Longrightarrow> m \<le> n"
for k m n :: nat
by (rule order_trans [of _ "m + k"]) (simp_all add: le_add1)
lemma add_leD2: "m + k \<le> n \<Longrightarrow> k \<le> n"
for k m n :: nat
by (force simp add: add.commute dest: add_leD1)
lemma add_leE: "m + k \<le> n \<Longrightarrow> (m \<le> n \<Longrightarrow> k \<le> n \<Longrightarrow> R) \<Longrightarrow> R"
for k m n :: nat
by (blast dest: add_leD1 add_leD2)
text \<open>needs \<open>\<And>k\<close> for \<open>ac_simps\<close> to work\<close>
lemma less_add_eq_less: "\<And>k. k < l \<Longrightarrow> m + l = k + n \<Longrightarrow> m < n"
for l m n :: nat
by (force simp del: add_Suc_right simp add: less_iff_Suc_add add_Suc_right [symmetric] ac_simps)
subsubsection \<open>More results about difference\<close>
lemma Suc_diff_le: "n \<le> m \<Longrightarrow> Suc m - n = Suc (m - n)"
by (induct m n rule: diff_induct) simp_all
lemma diff_less_Suc: "m - n < Suc m"
by (induct m n rule: diff_induct) (auto simp: less_Suc_eq)
lemma diff_le_self [simp]: "m - n \<le> m"
for m n :: nat
by (induct m n rule: diff_induct) (simp_all add: le_SucI)
lemma less_imp_diff_less: "j < k \<Longrightarrow> j - n < k"
for j k n :: nat
by (rule le_less_trans, rule diff_le_self)
lemma diff_Suc_less [simp]: "0 < n \<Longrightarrow> n - Suc i < n"
by (cases n) (auto simp add: le_simps)
lemma diff_add_assoc: "k \<le> j \<Longrightarrow> (i + j) - k = i + (j - k)"
for i j k :: nat
by (fact ordered_cancel_comm_monoid_diff_class.diff_add_assoc)
lemma add_diff_assoc [simp]: "k \<le> j \<Longrightarrow> i + (j - k) = i + j - k"
for i j k :: nat
by (fact ordered_cancel_comm_monoid_diff_class.add_diff_assoc)
lemma diff_add_assoc2: "k \<le> j \<Longrightarrow> (j + i) - k = (j - k) + i"
for i j k :: nat
by (fact ordered_cancel_comm_monoid_diff_class.diff_add_assoc2)
lemma add_diff_assoc2 [simp]: "k \<le> j \<Longrightarrow> j - k + i = j + i - k"
for i j k :: nat
by (fact ordered_cancel_comm_monoid_diff_class.add_diff_assoc2)
lemma le_imp_diff_is_add: "i \<le> j \<Longrightarrow> (j - i = k) = (j = k + i)"
for i j k :: nat
by auto
lemma diff_is_0_eq [simp]: "m - n = 0 \<longleftrightarrow> m \<le> n"
for m n :: nat
by (induct m n rule: diff_induct) simp_all
lemma diff_is_0_eq' [simp]: "m \<le> n \<Longrightarrow> m - n = 0"
for m n :: nat
by (rule iffD2, rule diff_is_0_eq)
lemma zero_less_diff [simp]: "0 < n - m \<longleftrightarrow> m < n"
for m n :: nat
by (induct m n rule: diff_induct) simp_all
lemma less_imp_add_positive:
assumes "i < j"
shows "\<exists>k::nat. 0 < k \<and> i + k = j"
proof
from assms show "0 < j - i \<and> i + (j - i) = j"
by (simp add: order_less_imp_le)
qed
text \<open>a nice rewrite for bounded subtraction\<close>
lemma nat_minus_add_max: "n - m + m = max n m"
for m n :: nat
by (simp add: max_def not_le order_less_imp_le)
lemma nat_diff_split: "P (a - b) \<longleftrightarrow> (a < b \<longrightarrow> P 0) \<and> (\<forall>d. a = b + d \<longrightarrow> P d)"
for a b :: nat
\<comment> \<open>elimination of \<open>-\<close> on \<open>nat\<close>\<close>
by (cases "a < b") (auto simp add: not_less le_less dest!: add_eq_self_zero [OF sym])
lemma nat_diff_split_asm: "P (a - b) \<longleftrightarrow> \<not> (a < b \<and> \<not> P 0 \<or> (\<exists>d. a = b + d \<and> \<not> P d))"
for a b :: nat
\<comment> \<open>elimination of \<open>-\<close> on \<open>nat\<close> in assumptions\<close>
by (auto split: nat_diff_split)
lemma Suc_pred': "0 < n \<Longrightarrow> n = Suc(n - 1)"
by simp
lemma add_eq_if: "m + n = (if m = 0 then n else Suc ((m - 1) + n))"
unfolding One_nat_def by (cases m) simp_all
lemma mult_eq_if: "m * n = (if m = 0 then 0 else n + ((m - 1) * n))"
for m n :: nat
by (cases m) simp_all
lemma Suc_diff_eq_diff_pred: "0 < n \<Longrightarrow> Suc m - n = m - (n - 1)"
by (cases n) simp_all
lemma diff_Suc_eq_diff_pred: "m - Suc n = (m - 1) - n"
by (cases m) simp_all
lemma Let_Suc [simp]: "Let (Suc n) f \<equiv> f (Suc n)"
by (fact Let_def)
subsubsection \<open>Monotonicity of multiplication\<close>
lemma mult_le_mono1: "i \<le> j \<Longrightarrow> i * k \<le> j * k"
for i j k :: nat
by (simp add: mult_right_mono)
lemma mult_le_mono2: "i \<le> j \<Longrightarrow> k * i \<le> k * j"
for i j k :: nat
by (simp add: mult_left_mono)
text \<open>\<open>\<le>\<close> monotonicity, BOTH arguments\<close>
lemma mult_le_mono: "i \<le> j \<Longrightarrow> k \<le> l \<Longrightarrow> i * k \<le> j * l"
for i j k l :: nat
by (simp add: mult_mono)
lemma mult_less_mono1: "i < j \<Longrightarrow> 0 < k \<Longrightarrow> i * k < j * k"
for i j k :: nat
by (simp add: mult_strict_right_mono)
text \<open>Differs from the standard \<open>zero_less_mult_iff\<close> in that there are no negative numbers.\<close>
lemma nat_0_less_mult_iff [simp]: "0 < m * n \<longleftrightarrow> 0 < m \<and> 0 < n"
for m n :: nat
proof (induct m)
case 0
then show ?case by simp
next
case (Suc m)
then show ?case by (cases n) simp_all
qed
lemma one_le_mult_iff [simp]: "Suc 0 \<le> m * n \<longleftrightarrow> Suc 0 \<le> m \<and> Suc 0 \<le> n"
proof (induct m)
case 0
then show ?case by simp
next
case (Suc m)
then show ?case by (cases n) simp_all
qed
lemma mult_less_cancel2 [simp]: "m * k < n * k \<longleftrightarrow> 0 < k \<and> m < n"
for k m n :: nat
proof (intro iffI conjI)
assume m: "m * k < n * k"
then show "0 < k"
by (cases k) auto
show "m < n"
proof (cases k)
case 0
then show ?thesis
using m by auto
next
case (Suc k')
then show ?thesis
using m
by (simp flip: linorder_not_le) (blast intro: add_mono mult_le_mono1)
qed
next
assume "0 < k \<and> m < n"
then show "m * k < n * k"
by (blast intro: mult_less_mono1)
qed
lemma mult_less_cancel1 [simp]: "k * m < k * n \<longleftrightarrow> 0 < k \<and> m < n"
for k m n :: nat
by (simp add: mult.commute [of k])
lemma mult_le_cancel1 [simp]: "k * m \<le> k * n \<longleftrightarrow> (0 < k \<longrightarrow> m \<le> n)"
for k m n :: nat
by (simp add: linorder_not_less [symmetric], auto)
lemma mult_le_cancel2 [simp]: "m * k \<le> n * k \<longleftrightarrow> (0 < k \<longrightarrow> m \<le> n)"
for k m n :: nat
by (simp add: linorder_not_less [symmetric], auto)
lemma Suc_mult_less_cancel1: "Suc k * m < Suc k * n \<longleftrightarrow> m < n"
by (subst mult_less_cancel1) simp
lemma Suc_mult_le_cancel1: "Suc k * m \<le> Suc k * n \<longleftrightarrow> m \<le> n"
by (subst mult_le_cancel1) simp
lemma le_square: "m \<le> m * m"
for m :: nat
by (cases m) (auto intro: le_add1)
lemma le_cube: "m \<le> m * (m * m)"
for m :: nat
by (cases m) (auto intro: le_add1)
text \<open>Lemma for \<open>gcd\<close>\<close>
lemma mult_eq_self_implies_10:
fixes m n :: nat
assumes "m = m * n" shows "n = 1 \<or> m = 0"
proof (rule disjCI)
assume "m \<noteq> 0"
show "n = 1"
proof (cases n "1::nat" rule: linorder_cases)
case greater
show ?thesis
using assms mult_less_mono2 [OF greater, of m] \<open>m \<noteq> 0\<close> by auto
qed (use assms \<open>m \<noteq> 0\<close> in auto)
qed
lemma mono_times_nat:
fixes n :: nat
assumes "n > 0"
shows "mono (times n)"
proof
fix m q :: nat
assume "m \<le> q"
with assms show "n * m \<le> n * q" by simp
qed
text \<open>The lattice order on \<^typ>\<open>nat\<close>.\<close>
instantiation nat :: distrib_lattice
begin
definition "(inf :: nat \<Rightarrow> nat \<Rightarrow> nat) = min"
definition "(sup :: nat \<Rightarrow> nat \<Rightarrow> nat) = max"
instance
by intro_classes
(auto simp add: inf_nat_def sup_nat_def max_def not_le min_def
intro: order_less_imp_le antisym elim!: order_trans order_less_trans)
end
subsection \<open>Natural operation of natural numbers on functions\<close>
text \<open>
We use the same logical constant for the power operations on
functions and relations, in order to share the same syntax.
\<close>
consts compow :: "nat \<Rightarrow> 'a \<Rightarrow> 'a"
abbreviation compower :: "'a \<Rightarrow> nat \<Rightarrow> 'a" (infixr "^^" 80)
where "f ^^ n \<equiv> compow n f"
notation (latex output)
compower ("(_\<^bsup>_\<^esup>)" [1000] 1000)
text \<open>\<open>f ^^ n = f \<circ> \<dots> \<circ> f\<close>, the \<open>n\<close>-fold composition of \<open>f\<close>\<close>
overloading
funpow \<equiv> "compow :: nat \<Rightarrow> ('a \<Rightarrow> 'a) \<Rightarrow> ('a \<Rightarrow> 'a)"
begin
primrec funpow :: "nat \<Rightarrow> ('a \<Rightarrow> 'a) \<Rightarrow> 'a \<Rightarrow> 'a"
where
"funpow 0 f = id"
| "funpow (Suc n) f = f \<circ> funpow n f"
end
lemma funpow_0 [simp]: "(f ^^ 0) x = x"
by simp
lemma funpow_Suc_right: "f ^^ Suc n = f ^^ n \<circ> f"
proof (induct n)
case 0
then show ?case by simp
next
fix n
assume "f ^^ Suc n = f ^^ n \<circ> f"
then show "f ^^ Suc (Suc n) = f ^^ Suc n \<circ> f"
by (simp add: o_assoc)
qed
lemmas funpow_simps_right = funpow.simps(1) funpow_Suc_right
text \<open>For code generation.\<close>
context
begin
qualified definition funpow :: "nat \<Rightarrow> ('a \<Rightarrow> 'a) \<Rightarrow> 'a \<Rightarrow> 'a"
where funpow_code_def [code_abbrev]: "funpow = compow"
lemma [code]:
"funpow (Suc n) f = f \<circ> funpow n f"
"funpow 0 f = id"
by (simp_all add: funpow_code_def)
end
lemma funpow_add: "f ^^ (m + n) = f ^^ m \<circ> f ^^ n"
by (induct m) simp_all
lemma funpow_mult: "(f ^^ m) ^^ n = f ^^ (m * n)"
for f :: "'a \<Rightarrow> 'a"
by (induct n) (simp_all add: funpow_add)
lemma funpow_swap1: "f ((f ^^ n) x) = (f ^^ n) (f x)"
proof -
have "f ((f ^^ n) x) = (f ^^ (n + 1)) x" by simp
also have "\<dots> = (f ^^ n \<circ> f ^^ 1) x" by (simp only: funpow_add)
also have "\<dots> = (f ^^ n) (f x)" by simp
finally show ?thesis .
qed
lemma comp_funpow: "comp f ^^ n = comp (f ^^ n)"
for f :: "'a \<Rightarrow> 'a"
by (induct n) simp_all
lemma Suc_funpow[simp]: "Suc ^^ n = ((+) n)"
by (induct n) simp_all
lemma id_funpow[simp]: "id ^^ n = id"
by (induct n) simp_all
lemma funpow_mono: "mono f \<Longrightarrow> A \<le> B \<Longrightarrow> (f ^^ n) A \<le> (f ^^ n) B"
for f :: "'a \<Rightarrow> ('a::order)"
by (induct n arbitrary: A B)
(auto simp del: funpow.simps(2) simp add: funpow_Suc_right mono_def)
lemma funpow_mono2:
assumes "mono f"
and "i \<le> j"
and "x \<le> y"
and "x \<le> f x"
shows "(f ^^ i) x \<le> (f ^^ j) y"
using assms(2,3)
proof (induct j arbitrary: y)
case 0
then show ?case by simp
next
case (Suc j)
show ?case
proof(cases "i = Suc j")
case True
with assms(1) Suc show ?thesis
by (simp del: funpow.simps add: funpow_simps_right monoD funpow_mono)
next
case False
with assms(1,4) Suc show ?thesis
by (simp del: funpow.simps add: funpow_simps_right le_eq_less_or_eq less_Suc_eq_le)
(simp add: Suc.hyps monoD order_subst1)
qed
qed
lemma inj_fn[simp]:
fixes f::"'a \<Rightarrow> 'a"
assumes "inj f"
shows "inj (f^^n)"
proof (induction n)
case Suc thus ?case using inj_compose[OF assms Suc.IH] by (simp del: comp_apply)
qed simp
lemma surj_fn[simp]:
fixes f::"'a \<Rightarrow> 'a"
assumes "surj f"
shows "surj (f^^n)"
proof (induction n)
case Suc thus ?case by (simp add: comp_surj[OF Suc.IH assms] del: comp_apply)
qed simp
lemma bij_fn[simp]:
fixes f::"'a \<Rightarrow> 'a"
assumes "bij f"
shows "bij (f^^n)"
by (rule bijI[OF inj_fn[OF bij_is_inj[OF assms]] surj_fn[OF bij_is_surj[OF assms]]])
lemma bij_betw_funpow: \<^marker>\<open>contributor \<open>Lars Noschinski\<close>\<close>
assumes "bij_betw f S S" shows "bij_betw (f ^^ n) S S"
proof (induct n)
case 0 then show ?case by (auto simp: id_def[symmetric])
next
case (Suc n)
then show ?case unfolding funpow.simps using assms by (rule bij_betw_trans)
qed
subsection \<open>Kleene iteration\<close>
lemma Kleene_iter_lpfp:
fixes f :: "'a::order_bot \<Rightarrow> 'a"
assumes "mono f"
and "f p \<le> p"
shows "(f ^^ k) bot \<le> p"
proof (induct k)
case 0
show ?case by simp
next
case Suc
show ?case
using monoD[OF assms(1) Suc] assms(2) by simp
qed
lemma lfp_Kleene_iter:
assumes "mono f"
and "(f ^^ Suc k) bot = (f ^^ k) bot"
shows "lfp f = (f ^^ k) bot"
proof (rule antisym)
show "lfp f \<le> (f ^^ k) bot"
proof (rule lfp_lowerbound)
show "f ((f ^^ k) bot) \<le> (f ^^ k) bot"
using assms(2) by simp
qed
show "(f ^^ k) bot \<le> lfp f"
using Kleene_iter_lpfp[OF assms(1)] lfp_unfold[OF assms(1)] by simp
qed
lemma mono_pow: "mono f \<Longrightarrow> mono (f ^^ n)"
for f :: "'a \<Rightarrow> 'a::complete_lattice"
by (induct n) (auto simp: mono_def)
lemma lfp_funpow:
assumes f: "mono f"
shows "lfp (f ^^ Suc n) = lfp f"
proof (rule antisym)
show "lfp f \<le> lfp (f ^^ Suc n)"
proof (rule lfp_lowerbound)
have "f (lfp (f ^^ Suc n)) = lfp (\<lambda>x. f ((f ^^ n) x))"
unfolding funpow_Suc_right by (simp add: lfp_rolling f mono_pow comp_def)
then show "f (lfp (f ^^ Suc n)) \<le> lfp (f ^^ Suc n)"
by (simp add: comp_def)
qed
have "(f ^^ n) (lfp f) = lfp f" for n
by (induct n) (auto intro: f lfp_fixpoint)
then show "lfp (f ^^ Suc n) \<le> lfp f"
by (intro lfp_lowerbound) (simp del: funpow.simps)
qed
lemma gfp_funpow:
assumes f: "mono f"
shows "gfp (f ^^ Suc n) = gfp f"
proof (rule antisym)
show "gfp f \<ge> gfp (f ^^ Suc n)"
proof (rule gfp_upperbound)
have "f (gfp (f ^^ Suc n)) = gfp (\<lambda>x. f ((f ^^ n) x))"
unfolding funpow_Suc_right by (simp add: gfp_rolling f mono_pow comp_def)
then show "f (gfp (f ^^ Suc n)) \<ge> gfp (f ^^ Suc n)"
by (simp add: comp_def)
qed
have "(f ^^ n) (gfp f) = gfp f" for n
by (induct n) (auto intro: f gfp_fixpoint)
then show "gfp (f ^^ Suc n) \<ge> gfp f"
by (intro gfp_upperbound) (simp del: funpow.simps)
qed
lemma Kleene_iter_gpfp:
fixes f :: "'a::order_top \<Rightarrow> 'a"
assumes "mono f"
and "p \<le> f p"
shows "p \<le> (f ^^ k) top"
proof (induct k)
case 0
show ?case by simp
next
case Suc
show ?case
using monoD[OF assms(1) Suc] assms(2) by simp
qed
lemma gfp_Kleene_iter:
assumes "mono f"
and "(f ^^ Suc k) top = (f ^^ k) top"
shows "gfp f = (f ^^ k) top"
(is "?lhs = ?rhs")
proof (rule antisym)
have "?rhs \<le> f ?rhs"
using assms(2) by simp
then show "?rhs \<le> ?lhs"
by (rule gfp_upperbound)
show "?lhs \<le> ?rhs"
using Kleene_iter_gpfp[OF assms(1)] gfp_unfold[OF assms(1)] by simp
qed
subsection \<open>Embedding of the naturals into any \<open>semiring_1\<close>: \<^term>\<open>of_nat\<close>\<close>
context semiring_1
begin
definition of_nat :: "nat \<Rightarrow> 'a"
where "of_nat n = (plus 1 ^^ n) 0"
lemma of_nat_simps [simp]:
shows of_nat_0: "of_nat 0 = 0"
and of_nat_Suc: "of_nat (Suc m) = 1 + of_nat m"
by (simp_all add: of_nat_def)
lemma of_nat_1 [simp]: "of_nat 1 = 1"
by (simp add: of_nat_def)
lemma of_nat_add [simp]: "of_nat (m + n) = of_nat m + of_nat n"
by (induct m) (simp_all add: ac_simps)
lemma of_nat_mult [simp]: "of_nat (m * n) = of_nat m * of_nat n"
by (induct m) (simp_all add: ac_simps distrib_right)
lemma mult_of_nat_commute: "of_nat x * y = y * of_nat x"
by (induct x) (simp_all add: algebra_simps)
primrec of_nat_aux :: "('a \<Rightarrow> 'a) \<Rightarrow> nat \<Rightarrow> 'a \<Rightarrow> 'a"
where
"of_nat_aux inc 0 i = i"
| "of_nat_aux inc (Suc n) i = of_nat_aux inc n (inc i)" \<comment> \<open>tail recursive\<close>
lemma of_nat_code: "of_nat n = of_nat_aux (\<lambda>i. i + 1) n 0"
proof (induct n)
case 0
then show ?case by simp
next
case (Suc n)
have "\<And>i. of_nat_aux (\<lambda>i. i + 1) n (i + 1) = of_nat_aux (\<lambda>i. i + 1) n i + 1"
by (induct n) simp_all
from this [of 0] have "of_nat_aux (\<lambda>i. i + 1) n 1 = of_nat_aux (\<lambda>i. i + 1) n 0 + 1"
by simp
with Suc show ?case
by (simp add: add.commute)
qed
lemma of_nat_of_bool [simp]:
"of_nat (of_bool P) = of_bool P"
by auto
end
declare of_nat_code [code]
context semiring_1_cancel
begin
lemma of_nat_diff:
\<open>of_nat (m - n) = of_nat m - of_nat n\<close> if \<open>n \<le> m\<close>
proof -
from that obtain q where \<open>m = n + q\<close>
by (blast dest: le_Suc_ex)
then show ?thesis
by simp
qed
end
text \<open>Class for unital semirings with characteristic zero.
Includes non-ordered rings like the complex numbers.\<close>
class semiring_char_0 = semiring_1 +
assumes inj_of_nat: "inj of_nat"
begin
lemma of_nat_eq_iff [simp]: "of_nat m = of_nat n \<longleftrightarrow> m = n"
by (auto intro: inj_of_nat injD)
text \<open>Special cases where either operand is zero\<close>
lemma of_nat_0_eq_iff [simp]: "0 = of_nat n \<longleftrightarrow> 0 = n"
by (fact of_nat_eq_iff [of 0 n, unfolded of_nat_0])
lemma of_nat_eq_0_iff [simp]: "of_nat m = 0 \<longleftrightarrow> m = 0"
by (fact of_nat_eq_iff [of m 0, unfolded of_nat_0])
lemma of_nat_1_eq_iff [simp]: "1 = of_nat n \<longleftrightarrow> n=1"
using of_nat_eq_iff by fastforce
lemma of_nat_eq_1_iff [simp]: "of_nat n = 1 \<longleftrightarrow> n=1"
using of_nat_eq_iff by fastforce
lemma of_nat_neq_0 [simp]: "of_nat (Suc n) \<noteq> 0"
unfolding of_nat_eq_0_iff by simp
lemma of_nat_0_neq [simp]: "0 \<noteq> of_nat (Suc n)"
unfolding of_nat_0_eq_iff by simp
end
class ring_char_0 = ring_1 + semiring_char_0
context linordered_nonzero_semiring
begin
lemma of_nat_0_le_iff [simp]: "0 \<le> of_nat n"
by (induct n) simp_all
lemma of_nat_less_0_iff [simp]: "\<not> of_nat m < 0"
by (simp add: not_less)
lemma of_nat_mono[simp]: "i \<le> j \<Longrightarrow> of_nat i \<le> of_nat j"
by (auto simp: le_iff_add intro!: add_increasing2)
lemma of_nat_less_iff [simp]: "of_nat m < of_nat n \<longleftrightarrow> m < n"
proof(induct m n rule: diff_induct)
case (1 m) then show ?case
by auto
next
case (2 n) then show ?case
by (simp add: add_pos_nonneg)
next
case (3 m n)
then show ?case
by (auto simp: add_commute [of 1] add_mono1 not_less add_right_mono leD)
qed
lemma of_nat_le_iff [simp]: "of_nat m \<le> of_nat n \<longleftrightarrow> m \<le> n"
by (simp add: not_less [symmetric] linorder_not_less [symmetric])
lemma less_imp_of_nat_less: "m < n \<Longrightarrow> of_nat m < of_nat n"
by simp
lemma of_nat_less_imp_less: "of_nat m < of_nat n \<Longrightarrow> m < n"
by simp
text \<open>Every \<open>linordered_nonzero_semiring\<close> has characteristic zero.\<close>
subclass semiring_char_0
by standard (auto intro!: injI simp add: order.eq_iff)
text \<open>Special cases where either operand is zero\<close>
lemma of_nat_le_0_iff [simp]: "of_nat m \<le> 0 \<longleftrightarrow> m = 0"
by (rule of_nat_le_iff [of _ 0, simplified])
lemma of_nat_0_less_iff [simp]: "0 < of_nat n \<longleftrightarrow> 0 < n"
by (rule of_nat_less_iff [of 0, simplified])
end
context linordered_nonzero_semiring
begin
lemma of_nat_max: "of_nat (max x y) = max (of_nat x) (of_nat y)"
by (auto simp: max_def ord_class.max_def)
lemma of_nat_min: "of_nat (min x y) = min (of_nat x) (of_nat y)"
by (auto simp: min_def ord_class.min_def)
end
context linordered_semidom
begin
subclass linordered_nonzero_semiring ..
subclass semiring_char_0 ..
end
context linordered_idom
begin
lemma abs_of_nat [simp]:
"\<bar>of_nat n\<bar> = of_nat n"
by (simp add: abs_if)
lemma sgn_of_nat [simp]:
"sgn (of_nat n) = of_bool (n > 0)"
by simp
end
lemma of_nat_id [simp]: "of_nat n = n"
by (induct n) simp_all
lemma of_nat_eq_id [simp]: "of_nat = id"
by (auto simp add: fun_eq_iff)
subsection \<open>The set of natural numbers\<close>
context semiring_1
begin
definition Nats :: "'a set" ("\<nat>")
where "\<nat> = range of_nat"
lemma of_nat_in_Nats [simp]: "of_nat n \<in> \<nat>"
by (simp add: Nats_def)
lemma Nats_0 [simp]: "0 \<in> \<nat>"
using of_nat_0 [symmetric] unfolding Nats_def
by (rule range_eqI)
lemma Nats_1 [simp]: "1 \<in> \<nat>"
using of_nat_1 [symmetric] unfolding Nats_def
by (rule range_eqI)
lemma Nats_add [simp]: "a \<in> \<nat> \<Longrightarrow> b \<in> \<nat> \<Longrightarrow> a + b \<in> \<nat>"
unfolding Nats_def using of_nat_add [symmetric]
by (blast intro: range_eqI)
lemma Nats_mult [simp]: "a \<in> \<nat> \<Longrightarrow> b \<in> \<nat> \<Longrightarrow> a * b \<in> \<nat>"
unfolding Nats_def using of_nat_mult [symmetric]
by (blast intro: range_eqI)
lemma Nats_cases [cases set: Nats]:
assumes "x \<in> \<nat>"
obtains (of_nat) n where "x = of_nat n"
unfolding Nats_def
proof -
from \<open>x \<in> \<nat>\<close> have "x \<in> range of_nat" unfolding Nats_def .
then obtain n where "x = of_nat n" ..
then show thesis ..
qed
lemma Nats_induct [case_names of_nat, induct set: Nats]: "x \<in> \<nat> \<Longrightarrow> (\<And>n. P (of_nat n)) \<Longrightarrow> P x"
by (rule Nats_cases) auto
end
lemma Nats_diff [simp]:
fixes a:: "'a::linordered_idom"
assumes "a \<in> \<nat>" "b \<in> \<nat>" "b \<le> a" shows "a - b \<in> \<nat>"
proof -
obtain i where i: "a = of_nat i"
using Nats_cases assms by blast
obtain j where j: "b = of_nat j"
using Nats_cases assms by blast
have "j \<le> i"
using \<open>b \<le> a\<close> i j of_nat_le_iff by blast
then have *: "of_nat i - of_nat j = (of_nat (i-j) :: 'a)"
by (simp add: of_nat_diff)
then show ?thesis
by (simp add: * i j)
qed
subsection \<open>Further arithmetic facts concerning the natural numbers\<close>
lemma subst_equals:
assumes "t = s" and "u = t"
shows "u = s"
using assms(2,1) by (rule trans)
locale nat_arith
begin
lemma add1: "(A::'a::comm_monoid_add) \<equiv> k + a \<Longrightarrow> A + b \<equiv> k + (a + b)"
by (simp only: ac_simps)
lemma add2: "(B::'a::comm_monoid_add) \<equiv> k + b \<Longrightarrow> a + B \<equiv> k + (a + b)"
by (simp only: ac_simps)
lemma suc1: "A == k + a \<Longrightarrow> Suc A \<equiv> k + Suc a"
by (simp only: add_Suc_right)
lemma rule0: "(a::'a::comm_monoid_add) \<equiv> a + 0"
by (simp only: add_0_right)
end
ML_file \<open>Tools/nat_arith.ML\<close>
simproc_setup nateq_cancel_sums
("(l::nat) + m = n" | "(l::nat) = m + n" | "Suc m = n" | "m = Suc n") =
- \<open>fn phi => try o Nat_Arith.cancel_eq_conv\<close>
+ \<open>K (try o Nat_Arith.cancel_eq_conv)\<close>
simproc_setup natless_cancel_sums
("(l::nat) + m < n" | "(l::nat) < m + n" | "Suc m < n" | "m < Suc n") =
- \<open>fn phi => try o Nat_Arith.cancel_less_conv\<close>
+ \<open>K (try o Nat_Arith.cancel_less_conv)\<close>
simproc_setup natle_cancel_sums
("(l::nat) + m \<le> n" | "(l::nat) \<le> m + n" | "Suc m \<le> n" | "m \<le> Suc n") =
- \<open>fn phi => try o Nat_Arith.cancel_le_conv\<close>
+ \<open>K (try o Nat_Arith.cancel_le_conv)\<close>
simproc_setup natdiff_cancel_sums
("(l::nat) + m - n" | "(l::nat) - (m + n)" | "Suc m - n" | "m - Suc n") =
- \<open>fn phi => try o Nat_Arith.cancel_diff_conv\<close>
+ \<open>K (try o Nat_Arith.cancel_diff_conv)\<close>
context order
begin
lemma lift_Suc_mono_le:
assumes mono: "\<And>n. f n \<le> f (Suc n)"
and "n \<le> n'"
shows "f n \<le> f n'"
proof (cases "n < n'")
case True
then show ?thesis
by (induct n n' rule: less_Suc_induct) (auto intro: mono)
next
case False
with \<open>n \<le> n'\<close> show ?thesis by auto
qed
lemma lift_Suc_antimono_le:
assumes mono: "\<And>n. f n \<ge> f (Suc n)"
and "n \<le> n'"
shows "f n \<ge> f n'"
proof (cases "n < n'")
case True
then show ?thesis
by (induct n n' rule: less_Suc_induct) (auto intro: mono)
next
case False
with \<open>n \<le> n'\<close> show ?thesis by auto
qed
lemma lift_Suc_mono_less:
assumes mono: "\<And>n. f n < f (Suc n)"
and "n < n'"
shows "f n < f n'"
using \<open>n < n'\<close> by (induct n n' rule: less_Suc_induct) (auto intro: mono)
lemma lift_Suc_mono_less_iff: "(\<And>n. f n < f (Suc n)) \<Longrightarrow> f n < f m \<longleftrightarrow> n < m"
by (blast intro: less_asym' lift_Suc_mono_less [of f]
dest: linorder_not_less[THEN iffD1] le_eq_less_or_eq [THEN iffD1])
end
lemma mono_iff_le_Suc: "mono f \<longleftrightarrow> (\<forall>n. f n \<le> f (Suc n))"
unfolding mono_def by (auto intro: lift_Suc_mono_le [of f])
lemma antimono_iff_le_Suc: "antimono f \<longleftrightarrow> (\<forall>n. f (Suc n) \<le> f n)"
unfolding antimono_def by (auto intro: lift_Suc_antimono_le [of f])
+lemma strict_mono_Suc_iff: "strict_mono f \<longleftrightarrow> (\<forall>n. f n < f (Suc n))"
+proof (intro iffI strict_monoI)
+ assume *: "\<forall>n. f n < f (Suc n)"
+ fix m n :: nat assume "m < n"
+ thus "f m < f n"
+ by (induction rule: less_Suc_induct) (use * in auto)
+qed (auto simp: strict_mono_def)
+
+lemma strict_mono_add: "strict_mono (\<lambda>n::'a::linordered_semidom. n + k)"
+ by (auto simp: strict_mono_def)
+
lemma mono_nat_linear_lb:
fixes f :: "nat \<Rightarrow> nat"
assumes "\<And>m n. m < n \<Longrightarrow> f m < f n"
shows "f m + k \<le> f (m + k)"
proof (induct k)
case 0
then show ?case by simp
next
case (Suc k)
then have "Suc (f m + k) \<le> Suc (f (m + k))" by simp
also from assms [of "m + k" "Suc (m + k)"] have "Suc (f (m + k)) \<le> f (Suc (m + k))"
by (simp add: Suc_le_eq)
finally show ?case by simp
qed
text \<open>Subtraction laws, mostly by Clemens Ballarin\<close>
lemma diff_less_mono:
fixes a b c :: nat
assumes "a < b" and "c \<le> a"
shows "a - c < b - c"
proof -
from assms obtain d e where "b = c + (d + e)" and "a = c + e" and "d > 0"
by (auto dest!: le_Suc_ex less_imp_Suc_add simp add: ac_simps)
then show ?thesis by simp
qed
lemma less_diff_conv: "i < j - k \<longleftrightarrow> i + k < j"
for i j k :: nat
by (cases "k \<le> j") (auto simp add: not_le dest: less_imp_Suc_add le_Suc_ex)
lemma less_diff_conv2: "k \<le> j \<Longrightarrow> j - k < i \<longleftrightarrow> j < i + k"
for j k i :: nat
by (auto dest: le_Suc_ex)
lemma le_diff_conv: "j - k \<le> i \<longleftrightarrow> j \<le> i + k"
for j k i :: nat
by (cases "k \<le> j") (auto simp add: not_le dest!: less_imp_Suc_add le_Suc_ex)
lemma diff_diff_cancel [simp]: "i \<le> n \<Longrightarrow> n - (n - i) = i"
for i n :: nat
by (auto dest: le_Suc_ex)
lemma diff_less [simp]: "0 < n \<Longrightarrow> 0 < m \<Longrightarrow> m - n < m"
for i n :: nat
by (auto dest: less_imp_Suc_add)
text \<open>Simplification of relational expressions involving subtraction\<close>
lemma diff_diff_eq: "k \<le> m \<Longrightarrow> k \<le> n \<Longrightarrow> m - k - (n - k) = m - n"
for m n k :: nat
by (auto dest!: le_Suc_ex)
hide_fact (open) diff_diff_eq
lemma eq_diff_iff: "k \<le> m \<Longrightarrow> k \<le> n \<Longrightarrow> m - k = n - k \<longleftrightarrow> m = n"
for m n k :: nat
by (auto dest: le_Suc_ex)
lemma less_diff_iff: "k \<le> m \<Longrightarrow> k \<le> n \<Longrightarrow> m - k < n - k \<longleftrightarrow> m < n"
for m n k :: nat
by (auto dest!: le_Suc_ex)
lemma le_diff_iff: "k \<le> m \<Longrightarrow> k \<le> n \<Longrightarrow> m - k \<le> n - k \<longleftrightarrow> m \<le> n"
for m n k :: nat
by (auto dest!: le_Suc_ex)
lemma le_diff_iff': "a \<le> c \<Longrightarrow> b \<le> c \<Longrightarrow> c - a \<le> c - b \<longleftrightarrow> b \<le> a"
for a b c :: nat
by (force dest: le_Suc_ex)
text \<open>(Anti)Monotonicity of subtraction -- by Stephan Merz\<close>
lemma diff_le_mono: "m \<le> n \<Longrightarrow> m - l \<le> n - l"
for m n l :: nat
by (auto dest: less_imp_le less_imp_Suc_add split: nat_diff_split)
lemma diff_le_mono2: "m \<le> n \<Longrightarrow> l - n \<le> l - m"
for m n l :: nat
by (auto dest: less_imp_le le_Suc_ex less_imp_Suc_add less_le_trans split: nat_diff_split)
lemma diff_less_mono2: "m < n \<Longrightarrow> m < l \<Longrightarrow> l - n < l - m"
for m n l :: nat
by (auto dest: less_imp_Suc_add split: nat_diff_split)
lemma diffs0_imp_equal: "m - n = 0 \<Longrightarrow> n - m = 0 \<Longrightarrow> m = n"
for m n :: nat
by (simp split: nat_diff_split)
lemma min_diff: "min (m - i) (n - i) = min m n - i"
for m n i :: nat
by (cases m n rule: le_cases)
(auto simp add: not_le min.absorb1 min.absorb2 min.absorb_iff1 [symmetric] diff_le_mono)
lemma inj_on_diff_nat:
fixes k :: nat
assumes "\<And>n. n \<in> N \<Longrightarrow> k \<le> n"
shows "inj_on (\<lambda>n. n - k) N"
proof (rule inj_onI)
fix x y
assume a: "x \<in> N" "y \<in> N" "x - k = y - k"
with assms have "x - k + k = y - k + k" by auto
with a assms show "x = y" by (auto simp add: eq_diff_iff)
qed
text \<open>Rewriting to pull differences out\<close>
lemma diff_diff_right [simp]: "k \<le> j \<Longrightarrow> i - (j - k) = i + k - j"
for i j k :: nat
by (fact diff_diff_right)
lemma diff_Suc_diff_eq1 [simp]:
assumes "k \<le> j"
shows "i - Suc (j - k) = i + k - Suc j"
proof -
from assms have *: "Suc (j - k) = Suc j - k"
by (simp add: Suc_diff_le)
from assms have "k \<le> Suc j"
by (rule order_trans) simp
with diff_diff_right [of k "Suc j" i] * show ?thesis
by simp
qed
lemma diff_Suc_diff_eq2 [simp]:
assumes "k \<le> j"
shows "Suc (j - k) - i = Suc j - (k + i)"
proof -
from assms obtain n where "j = k + n"
by (auto dest: le_Suc_ex)
moreover have "Suc n - i = (k + Suc n) - (k + i)"
using add_diff_cancel_left [of k "Suc n" i] by simp
ultimately show ?thesis by simp
qed
lemma Suc_diff_Suc:
assumes "n < m"
shows "Suc (m - Suc n) = m - n"
proof -
from assms obtain q where "m = n + Suc q"
by (auto dest: less_imp_Suc_add)
moreover define r where "r = Suc q"
ultimately have "Suc (m - Suc n) = r" and "m = n + r"
by simp_all
then show ?thesis by simp
qed
lemma one_less_mult: "Suc 0 < n \<Longrightarrow> Suc 0 < m \<Longrightarrow> Suc 0 < m * n"
using less_1_mult [of n m] by (simp add: ac_simps)
lemma n_less_m_mult_n: "0 < n \<Longrightarrow> Suc 0 < m \<Longrightarrow> n < m * n"
using mult_strict_right_mono [of 1 m n] by simp
lemma n_less_n_mult_m: "0 < n \<Longrightarrow> Suc 0 < m \<Longrightarrow> n < n * m"
using mult_strict_left_mono [of 1 m n] by simp
text \<open>Induction starting beyond zero\<close>
lemma nat_induct_at_least [consumes 1, case_names base Suc]:
"P n" if "n \<ge> m" "P m" "\<And>n. n \<ge> m \<Longrightarrow> P n \<Longrightarrow> P (Suc n)"
proof -
define q where "q = n - m"
with \<open>n \<ge> m\<close> have "n = m + q"
by simp
moreover have "P (m + q)"
by (induction q) (use that in simp_all)
ultimately show "P n"
by simp
qed
lemma nat_induct_non_zero [consumes 1, case_names 1 Suc]:
"P n" if "n > 0" "P 1" "\<And>n. n > 0 \<Longrightarrow> P n \<Longrightarrow> P (Suc n)"
proof -
from \<open>n > 0\<close> have "n \<ge> 1"
by (cases n) simp_all
moreover note \<open>P 1\<close>
moreover have "\<And>n. n \<ge> 1 \<Longrightarrow> P n \<Longrightarrow> P (Suc n)"
using \<open>\<And>n. n > 0 \<Longrightarrow> P n \<Longrightarrow> P (Suc n)\<close>
by (simp add: Suc_le_eq)
ultimately show "P n"
by (rule nat_induct_at_least)
qed
text \<open>Specialized induction principles that work "backwards":\<close>
lemma inc_induct [consumes 1, case_names base step]:
assumes less: "i \<le> j"
and base: "P j"
and step: "\<And>n. i \<le> n \<Longrightarrow> n < j \<Longrightarrow> P (Suc n) \<Longrightarrow> P n"
shows "P i"
using less step
proof (induct "j - i" arbitrary: i)
case (0 i)
then have "i = j" by simp
with base show ?case by simp
next
case (Suc d n)
from Suc.hyps have "n \<noteq> j" by auto
with Suc have "n < j" by (simp add: less_le)
from \<open>Suc d = j - n\<close> have "d + 1 = j - n" by simp
then have "d + 1 - 1 = j - n - 1" by simp
then have "d = j - n - 1" by simp
then have "d = j - (n + 1)" by (simp add: diff_diff_eq)
then have "d = j - Suc n" by simp
moreover from \<open>n < j\<close> have "Suc n \<le> j" by (simp add: Suc_le_eq)
ultimately have "P (Suc n)"
proof (rule Suc.hyps)
fix q
assume "Suc n \<le> q"
then have "n \<le> q" by (simp add: Suc_le_eq less_imp_le)
moreover assume "q < j"
moreover assume "P (Suc q)"
ultimately show "P q" by (rule Suc.prems)
qed
with order_refl \<open>n < j\<close> show "P n" by (rule Suc.prems)
qed
lemma strict_inc_induct [consumes 1, case_names base step]:
assumes less: "i < j"
and base: "\<And>i. j = Suc i \<Longrightarrow> P i"
and step: "\<And>i. i < j \<Longrightarrow> P (Suc i) \<Longrightarrow> P i"
shows "P i"
using less proof (induct "j - i - 1" arbitrary: i)
case (0 i)
from \<open>i < j\<close> obtain n where "j = i + n" and "n > 0"
by (auto dest!: less_imp_Suc_add)
with 0 have "j = Suc i"
by (auto intro: order_antisym simp add: Suc_le_eq)
with base show ?case by simp
next
case (Suc d i)
from \<open>Suc d = j - i - 1\<close> have *: "Suc d = j - Suc i"
by (simp add: diff_diff_add)
then have "Suc d - 1 = j - Suc i - 1" by simp
then have "d = j - Suc i - 1" by simp
moreover from * have "j - Suc i \<noteq> 0" by auto
then have "Suc i < j" by (simp add: not_le)
ultimately have "P (Suc i)" by (rule Suc.hyps)
with \<open>i < j\<close> show "P i" by (rule step)
qed
lemma zero_induct_lemma: "P k \<Longrightarrow> (\<And>n. P (Suc n) \<Longrightarrow> P n) \<Longrightarrow> P (k - i)"
using inc_induct[of "k - i" k P, simplified] by blast
lemma zero_induct: "P k \<Longrightarrow> (\<And>n. P (Suc n) \<Longrightarrow> P n) \<Longrightarrow> P 0"
using inc_induct[of 0 k P] by blast
text \<open>Further induction rule similar to @{thm inc_induct}.\<close>
lemma dec_induct [consumes 1, case_names base step]:
"i \<le> j \<Longrightarrow> P i \<Longrightarrow> (\<And>n. i \<le> n \<Longrightarrow> n < j \<Longrightarrow> P n \<Longrightarrow> P (Suc n)) \<Longrightarrow> P j"
proof (induct j arbitrary: i)
case 0
then show ?case by simp
next
case (Suc j)
from Suc.prems consider "i \<le> j" | "i = Suc j"
by (auto simp add: le_Suc_eq)
then show ?case
proof cases
case 1
moreover have "j < Suc j" by simp
moreover have "P j" using \<open>i \<le> j\<close> \<open>P i\<close>
proof (rule Suc.hyps)
fix q
assume "i \<le> q"
moreover assume "q < j" then have "q < Suc j"
by (simp add: less_Suc_eq)
moreover assume "P q"
ultimately show "P (Suc q)" by (rule Suc.prems)
qed
ultimately show "P (Suc j)" by (rule Suc.prems)
next
case 2
with \<open>P i\<close> show "P (Suc j)" by simp
qed
qed
lemma transitive_stepwise_le:
assumes "m \<le> n" "\<And>x. R x x" "\<And>x y z. R x y \<Longrightarrow> R y z \<Longrightarrow> R x z" and "\<And>n. R n (Suc n)"
shows "R m n"
using \<open>m \<le> n\<close>
by (induction rule: dec_induct) (use assms in blast)+
subsubsection \<open>Greatest operator\<close>
lemma ex_has_greatest_nat:
"P (k::nat) \<Longrightarrow> \<forall>y. P y \<longrightarrow> y \<le> b \<Longrightarrow> \<exists>x. P x \<and> (\<forall>y. P y \<longrightarrow> y \<le> x)"
proof (induction "b-k" arbitrary: b k rule: less_induct)
case less
show ?case
proof cases
assume "\<exists>n>k. P n"
then obtain n where "n>k" "P n" by blast
have "n \<le> b" using \<open>P n\<close> less.prems(2) by auto
hence "b-n < b-k"
by(rule diff_less_mono2[OF \<open>k<n\<close> less_le_trans[OF \<open>k<n\<close>]])
from less.hyps[OF this \<open>P n\<close> less.prems(2)]
show ?thesis .
next
assume "\<not> (\<exists>n>k. P n)"
hence "\<forall>y. P y \<longrightarrow> y \<le> k" by (auto simp: not_less)
thus ?thesis using less.prems(1) by auto
qed
qed
lemma
fixes k::nat
assumes "P k" and minor: "\<And>y. P y \<Longrightarrow> y \<le> b"
shows GreatestI_nat: "P (Greatest P)"
and Greatest_le_nat: "k \<le> Greatest P"
proof -
obtain x where "P x" "\<And>y. P y \<Longrightarrow> y \<le> x"
using assms ex_has_greatest_nat by blast
with \<open>P k\<close> show "P (Greatest P)" "k \<le> Greatest P"
using GreatestI2_order by blast+
qed
lemma GreatestI_ex_nat:
"\<lbrakk> \<exists>k::nat. P k; \<And>y. P y \<Longrightarrow> y \<le> b \<rbrakk> \<Longrightarrow> P (Greatest P)"
by (blast intro: GreatestI_nat)
subsection \<open>Monotonicity of \<open>funpow\<close>\<close>
lemma funpow_increasing: "m \<le> n \<Longrightarrow> mono f \<Longrightarrow> (f ^^ n) \<top> \<le> (f ^^ m) \<top>"
for f :: "'a::{lattice,order_top} \<Rightarrow> 'a"
by (induct rule: inc_induct)
(auto simp del: funpow.simps(2) simp add: funpow_Suc_right
intro: order_trans[OF _ funpow_mono])
lemma funpow_decreasing: "m \<le> n \<Longrightarrow> mono f \<Longrightarrow> (f ^^ m) \<bottom> \<le> (f ^^ n) \<bottom>"
for f :: "'a::{lattice,order_bot} \<Rightarrow> 'a"
by (induct rule: dec_induct)
(auto simp del: funpow.simps(2) simp add: funpow_Suc_right
intro: order_trans[OF _ funpow_mono])
lemma mono_funpow: "mono Q \<Longrightarrow> mono (\<lambda>i. (Q ^^ i) \<bottom>)"
for Q :: "'a::{lattice,order_bot} \<Rightarrow> 'a"
by (auto intro!: funpow_decreasing simp: mono_def)
lemma antimono_funpow: "mono Q \<Longrightarrow> antimono (\<lambda>i. (Q ^^ i) \<top>)"
for Q :: "'a::{lattice,order_top} \<Rightarrow> 'a"
by (auto intro!: funpow_increasing simp: antimono_def)
subsection \<open>The divides relation on \<^typ>\<open>nat\<close>\<close>
lemma dvd_1_left [iff]: "Suc 0 dvd k"
by (simp add: dvd_def)
lemma dvd_1_iff_1 [simp]: "m dvd Suc 0 \<longleftrightarrow> m = Suc 0"
by (simp add: dvd_def)
lemma nat_dvd_1_iff_1 [simp]: "m dvd 1 \<longleftrightarrow> m = 1"
for m :: nat
by (simp add: dvd_def)
lemma dvd_antisym: "m dvd n \<Longrightarrow> n dvd m \<Longrightarrow> m = n"
for m n :: nat
unfolding dvd_def by (force dest: mult_eq_self_implies_10 simp add: mult.assoc)
lemma dvd_diff_nat [simp]: "k dvd m \<Longrightarrow> k dvd n \<Longrightarrow> k dvd (m - n)"
for k m n :: nat
unfolding dvd_def by (blast intro: right_diff_distrib' [symmetric])
lemma dvd_diffD:
fixes k m n :: nat
assumes "k dvd m - n" "k dvd n" "n \<le> m"
shows "k dvd m"
proof -
have "k dvd n + (m - n)"
using assms by (blast intro: dvd_add)
with assms show ?thesis
by simp
qed
lemma dvd_diffD1: "k dvd m - n \<Longrightarrow> k dvd m \<Longrightarrow> n \<le> m \<Longrightarrow> k dvd n"
for k m n :: nat
by (drule_tac m = m in dvd_diff_nat) auto
lemma dvd_mult_cancel:
fixes m n k :: nat
assumes "k * m dvd k * n" and "0 < k"
shows "m dvd n"
proof -
from assms(1) obtain q where "k * n = (k * m) * q" ..
then have "k * n = k * (m * q)" by (simp add: ac_simps)
with \<open>0 < k\<close> have "n = m * q" by (auto simp add: mult_left_cancel)
then show ?thesis ..
qed
lemma dvd_mult_cancel1:
fixes m n :: nat
assumes "0 < m"
shows "m * n dvd m \<longleftrightarrow> n = 1"
proof
assume "m * n dvd m"
then have "m * n dvd m * 1"
by simp
then have "n dvd 1"
by (iprover intro: assms dvd_mult_cancel)
then show "n = 1"
by auto
qed auto
lemma dvd_mult_cancel2: "0 < m \<Longrightarrow> n * m dvd m \<longleftrightarrow> n = 1"
for m n :: nat
using dvd_mult_cancel1 [of m n] by (simp add: ac_simps)
lemma dvd_imp_le: "k dvd n \<Longrightarrow> 0 < n \<Longrightarrow> k \<le> n"
for k n :: nat
by (auto elim!: dvdE) (auto simp add: gr0_conv_Suc)
lemma nat_dvd_not_less: "0 < m \<Longrightarrow> m < n \<Longrightarrow> \<not> n dvd m"
for m n :: nat
by (auto elim!: dvdE) (auto simp add: gr0_conv_Suc)
lemma less_eq_dvd_minus:
fixes m n :: nat
assumes "m \<le> n"
shows "m dvd n \<longleftrightarrow> m dvd n - m"
proof -
from assms have "n = m + (n - m)" by simp
then obtain q where "n = m + q" ..
then show ?thesis by (simp add: add.commute [of m])
qed
lemma dvd_minus_self: "m dvd n - m \<longleftrightarrow> n < m \<or> m dvd n"
for m n :: nat
by (cases "n < m") (auto elim!: dvdE simp add: not_less le_imp_diff_is_add dest: less_imp_le)
lemma dvd_minus_add:
fixes m n q r :: nat
assumes "q \<le> n" "q \<le> r * m"
shows "m dvd n - q \<longleftrightarrow> m dvd n + (r * m - q)"
proof -
have "m dvd n - q \<longleftrightarrow> m dvd r * m + (n - q)"
using dvd_add_times_triv_left_iff [of m r] by simp
also from assms have "\<dots> \<longleftrightarrow> m dvd r * m + n - q" by simp
also from assms have "\<dots> \<longleftrightarrow> m dvd (r * m - q) + n" by simp
also have "\<dots> \<longleftrightarrow> m dvd n + (r * m - q)" by (simp add: add.commute)
finally show ?thesis .
qed
subsection \<open>Aliasses\<close>
lemma nat_mult_1: "1 * n = n"
for n :: nat
by (fact mult_1_left)
lemma nat_mult_1_right: "n * 1 = n"
for n :: nat
by (fact mult_1_right)
lemma diff_mult_distrib: "(m - n) * k = (m * k) - (n * k)"
for k m n :: nat
by (fact left_diff_distrib')
lemma diff_mult_distrib2: "k * (m - n) = (k * m) - (k * n)"
for k m n :: nat
by (fact right_diff_distrib')
(*Used in AUTO2 and Groups.le_diff_conv2 (with variables renamed) doesn't work for some reason*)
lemma le_diff_conv2: "k \<le> j \<Longrightarrow> (i \<le> j - k) = (i + k \<le> j)"
for i j k :: nat
by (fact le_diff_conv2)
lemma diff_self_eq_0 [simp]: "m - m = 0"
for m :: nat
by (fact diff_cancel)
lemma diff_diff_left [simp]: "i - j - k = i - (j + k)"
for i j k :: nat
by (fact diff_diff_add)
lemma diff_commute: "i - j - k = i - k - j"
for i j k :: nat
by (fact diff_right_commute)
lemma diff_add_inverse: "(n + m) - n = m"
for m n :: nat
by (fact add_diff_cancel_left')
lemma diff_add_inverse2: "(m + n) - n = m"
for m n :: nat
by (fact add_diff_cancel_right')
lemma diff_cancel: "(k + m) - (k + n) = m - n"
for k m n :: nat
by (fact add_diff_cancel_left)
lemma diff_cancel2: "(m + k) - (n + k) = m - n"
for k m n :: nat
by (fact add_diff_cancel_right)
lemma diff_add_0: "n - (n + m) = 0"
for m n :: nat
by (fact diff_add_zero)
lemma add_mult_distrib2: "k * (m + n) = (k * m) + (k * n)"
for k m n :: nat
by (fact distrib_left)
lemmas nat_distrib =
add_mult_distrib distrib_left diff_mult_distrib diff_mult_distrib2
subsection \<open>Size of a datatype value\<close>
class size =
fixes size :: "'a \<Rightarrow> nat" \<comment> \<open>see further theory \<open>Wellfounded\<close>\<close>
instantiation nat :: size
begin
definition size_nat where [simp, code]: "size (n::nat) = n"
instance ..
end
lemmas size_nat = size_nat_def
lemma size_neq_size_imp_neq: "size x \<noteq> size y \<Longrightarrow> x \<noteq> y"
by (erule contrapos_nn) (rule arg_cong)
subsection \<open>Code module namespace\<close>
code_identifier
code_module Nat \<rightharpoonup> (SML) Arith and (OCaml) Arith and (Haskell) Arith
hide_const (open) of_nat_aux
end
diff --git a/src/HOL/Nominal/nominal_inductive.ML b/src/HOL/Nominal/nominal_inductive.ML
--- a/src/HOL/Nominal/nominal_inductive.ML
+++ b/src/HOL/Nominal/nominal_inductive.ML
@@ -1,698 +1,698 @@
(* Title: HOL/Nominal/nominal_inductive.ML
Author: Stefan Berghofer, TU Muenchen
Infrastructure for proving equivariance and strong induction theorems
for inductive predicates involving nominal datatypes.
*)
signature NOMINAL_INDUCTIVE =
sig
val prove_strong_ind: string -> (string * string list) list -> local_theory -> Proof.state
val prove_eqvt: string -> string list -> local_theory -> local_theory
end
structure NominalInductive : NOMINAL_INDUCTIVE =
struct
val inductive_forall_def = @{thm HOL.induct_forall_def};
val inductive_atomize = @{thms induct_atomize};
val inductive_rulify = @{thms induct_rulify};
fun rulify_term thy = Raw_Simplifier.rewrite_term thy inductive_rulify [];
fun atomize_conv ctxt =
Raw_Simplifier.rewrite_cterm (true, false, false) (K (K NONE))
(put_simpset HOL_basic_ss ctxt addsimps inductive_atomize);
fun atomize_intr ctxt = Conv.fconv_rule (Conv.prems_conv ~1 (atomize_conv ctxt));
fun atomize_induct ctxt = Conv.fconv_rule (Conv.prems_conv ~1
(Conv.params_conv ~1 (Conv.prems_conv ~1 o atomize_conv) ctxt));
fun preds_of ps t = inter (op = o apsnd dest_Free) ps (Term.add_frees t []);
val fresh_prod = @{thm fresh_prod};
val perm_bool = mk_meta_eq @{thm perm_bool_def};
val perm_boolI = @{thm perm_boolI};
val (_, [perm_boolI_pi, _]) = Drule.strip_comb (snd (Thm.dest_comb
(Drule.strip_imp_concl (Thm.cprop_of perm_boolI))));
fun mk_perm_bool ctxt pi th =
th RS infer_instantiate ctxt [(#1 (dest_Var (Thm.term_of perm_boolI_pi)), pi)] perm_boolI;
fun mk_perm_bool_simproc names =
Simplifier.make_simproc \<^context> "perm_bool"
{lhss = [\<^term>\<open>perm pi x\<close>],
proc = fn _ => fn _ => fn ct =>
(case Thm.term_of ct of
Const (\<^const_name>\<open>Nominal.perm\<close>, _) $ _ $ t =>
if member (op =) names (the_default "" (try (head_of #> dest_Const #> fst) t))
then SOME perm_bool else NONE
| _ => NONE)};
fun transp ([] :: _) = []
| transp xs = map hd xs :: transp (map tl xs);
fun add_binders thy i (t as (_ $ _)) bs = (case strip_comb t of
(Const (s, T), ts) => (case strip_type T of
(Ts, Type (tname, _)) =>
(case NominalDatatype.get_nominal_datatype thy tname of
NONE => fold (add_binders thy i) ts bs
| SOME {descr, index, ...} => (case AList.lookup op =
(#3 (the (AList.lookup op = descr index))) s of
NONE => fold (add_binders thy i) ts bs
| SOME cargs => fst (fold (fn (xs, x) => fn (bs', cargs') =>
let val (cargs1, (u, _) :: cargs2) = chop (length xs) cargs'
in (add_binders thy i u
(fold (fn (u, T) =>
if exists (fn j => j < i) (loose_bnos u) then I
else insert (op aconv o apply2 fst)
(incr_boundvars (~i) u, T)) cargs1 bs'), cargs2)
end) cargs (bs, ts ~~ Ts))))
| _ => fold (add_binders thy i) ts bs)
| (u, ts) => add_binders thy i u (fold (add_binders thy i) ts bs))
| add_binders thy i (Abs (_, _, t)) bs = add_binders thy (i + 1) t bs
| add_binders thy i _ bs = bs;
fun split_conj f names (Const (\<^const_name>\<open>HOL.conj\<close>, _) $ p $ q) _ = (case head_of p of
Const (name, _) =>
if member (op =) names name then SOME (f p q) else NONE
| _ => NONE)
| split_conj _ _ _ _ = NONE;
fun strip_all [] t = t
| strip_all (_ :: xs) (Const (\<^const_name>\<open>All\<close>, _) $ Abs (s, T, t)) = strip_all xs t;
(*********************************************************************)
(* maps R ... & (ALL pi_1 ... pi_n z. P z (pi_1 o ... o pi_n o t)) *)
(* or ALL pi_1 ... pi_n z. P z (pi_1 o ... o pi_n o t) *)
(* to R ... & id (ALL z. P z (pi_1 o ... o pi_n o t)) *)
(* or id (ALL z. P z (pi_1 o ... o pi_n o t)) *)
(* *)
(* where "id" protects the subformula from simplification *)
(*********************************************************************)
fun inst_conj_all names ps pis (Const (\<^const_name>\<open>HOL.conj\<close>, _) $ p $ q) _ =
(case head_of p of
Const (name, _) =>
if member (op =) names name then SOME (HOLogic.mk_conj (p,
Const (\<^const_name>\<open>Fun.id\<close>, HOLogic.boolT --> HOLogic.boolT) $
(subst_bounds (pis, strip_all pis q))))
else NONE
| _ => NONE)
| inst_conj_all names ps pis t u =
if member (op aconv) ps (head_of u) then
SOME (Const (\<^const_name>\<open>Fun.id\<close>, HOLogic.boolT --> HOLogic.boolT) $
(subst_bounds (pis, strip_all pis t)))
else NONE
| inst_conj_all _ _ _ _ _ = NONE;
fun inst_conj_all_tac ctxt k = EVERY
[TRY (EVERY [eresolve_tac ctxt [conjE] 1, resolve_tac ctxt [conjI] 1, assume_tac ctxt 1]),
REPEAT_DETERM_N k (eresolve_tac ctxt [allE] 1),
simp_tac (put_simpset HOL_basic_ss ctxt addsimps [@{thm id_apply}]) 1];
fun map_term f t u = (case f t u of
NONE => map_term' f t u | x => x)
and map_term' f (t $ u) (t' $ u') = (case (map_term f t t', map_term f u u') of
(NONE, NONE) => NONE
| (SOME t'', NONE) => SOME (t'' $ u)
| (NONE, SOME u'') => SOME (t $ u'')
| (SOME t'', SOME u'') => SOME (t'' $ u''))
| map_term' f (Abs (s, T, t)) (Abs (s', T', t')) = (case map_term f t t' of
NONE => NONE
| SOME t'' => SOME (Abs (s, T, t'')))
| map_term' _ _ _ = NONE;
(*********************************************************************)
(* Prove F[f t] from F[t], where F is monotone *)
(*********************************************************************)
fun map_thm ctxt f tac monos opt th =
let
val prop = Thm.prop_of th;
fun prove t =
Goal.prove ctxt [] [] t (fn {context = goal_ctxt, ...} =>
EVERY [cut_facts_tac [th] 1, eresolve_tac goal_ctxt [rev_mp] 1,
REPEAT_DETERM (FIRSTGOAL (resolve_tac goal_ctxt monos)),
REPEAT_DETERM (resolve_tac goal_ctxt [impI] 1 THEN (assume_tac goal_ctxt 1 ORELSE tac))])
in Option.map prove (map_term f prop (the_default prop opt)) end;
val eta_contract_cterm = Thm.dest_arg o Thm.cprop_of o Thm.eta_conversion;
fun first_order_matchs pats objs = Thm.first_order_match
(eta_contract_cterm (Conjunction.mk_conjunction_balanced pats),
eta_contract_cterm (Conjunction.mk_conjunction_balanced objs));
fun first_order_mrs ths th = ths MRS
Thm.instantiate (first_order_matchs (cprems_of th) (map Thm.cprop_of ths)) th;
fun prove_strong_ind s avoids lthy =
let
val thy = Proof_Context.theory_of lthy;
val ({names, ...}, {raw_induct, intrs, elims, ...}) =
Inductive.the_inductive_global lthy (Sign.intern_const thy s);
val ind_params = Inductive.params_of raw_induct;
val raw_induct = atomize_induct lthy raw_induct;
val elims = map (atomize_induct lthy) elims;
val monos = Inductive.get_monos lthy;
val eqvt_thms = NominalThmDecls.get_eqvt_thms lthy;
val _ = (case subtract (op =) (fold (Term.add_const_names o Thm.prop_of) eqvt_thms []) names of
[] => ()
| xs => error ("Missing equivariance theorem for predicate(s): " ^
commas_quote xs));
val induct_cases = map (fst o fst) (fst (Rule_Cases.get (the
(Induct.lookup_inductP lthy (hd names)))));
val (raw_induct', ctxt') = lthy
|> yield_singleton (Variable.import_terms false) (Thm.prop_of raw_induct);
val concls = raw_induct' |> Logic.strip_imp_concl |> HOLogic.dest_Trueprop |>
HOLogic.dest_conj |> map (HOLogic.dest_imp ##> strip_comb);
val ps = map (fst o snd) concls;
val _ = (case duplicates (op = o apply2 fst) avoids of
[] => ()
| xs => error ("Duplicate case names: " ^ commas_quote (map fst xs)));
val _ = avoids |> forall (fn (a, xs) => null (duplicates (op =) xs) orelse
error ("Duplicate variable names for case " ^ quote a));
val _ = (case subtract (op =) induct_cases (map fst avoids) of
[] => ()
| xs => error ("No such case(s) in inductive definition: " ^ commas_quote xs));
val avoids' = if null induct_cases then replicate (length intrs) ("", [])
else map (fn name =>
(name, the_default [] (AList.lookup op = avoids name))) induct_cases;
fun mk_avoids params (name, ps) =
let val k = length params - 1
in map (fn x => case find_index (equal x o fst) params of
~1 => error ("No such variable in case " ^ quote name ^
" of inductive definition: " ^ quote x)
| i => (Bound (k - i), snd (nth params i))) ps
end;
val prems = map (fn (prem, avoid) =>
let
val prems = map (incr_boundvars 1) (Logic.strip_assums_hyp prem);
val concl = incr_boundvars 1 (Logic.strip_assums_concl prem);
val params = Logic.strip_params prem
in
(params,
fold (add_binders thy 0) (prems @ [concl]) [] @
map (apfst (incr_boundvars 1)) (mk_avoids params avoid),
prems, strip_comb (HOLogic.dest_Trueprop concl))
end) (Logic.strip_imp_prems raw_induct' ~~ avoids');
val atomTs = distinct op = (maps (map snd o #2) prems);
val ind_sort = if null atomTs then \<^sort>\<open>type\<close>
else Sign.minimize_sort thy (Sign.certify_sort thy (map (fn T => Sign.intern_class thy
("fs_" ^ Long_Name.base_name (fst (dest_Type T)))) atomTs));
val (fs_ctxt_tyname, _) = Name.variant "'n" (Variable.names_of ctxt');
val ([fs_ctxt_name], ctxt'') = Variable.variant_fixes ["z"] ctxt';
val fsT = TFree (fs_ctxt_tyname, ind_sort);
val inductive_forall_def' = Thm.instantiate'
[SOME (Thm.global_ctyp_of thy fsT)] [] inductive_forall_def;
fun lift_pred' t (Free (s, T)) ts =
list_comb (Free (s, fsT --> T), t :: ts);
val lift_pred = lift_pred' (Bound 0);
fun lift_prem (t as (f $ u)) =
let val (p, ts) = strip_comb t
in
if member (op =) ps p then HOLogic.mk_induct_forall fsT $
Abs ("z", fsT, lift_pred p (map (incr_boundvars 1) ts))
else lift_prem f $ lift_prem u
end
| lift_prem (Abs (s, T, t)) = Abs (s, T, lift_prem t)
| lift_prem t = t;
fun mk_distinct [] = []
| mk_distinct ((x, T) :: xs) = map_filter (fn (y, U) =>
if T = U then SOME (HOLogic.mk_Trueprop
(HOLogic.mk_not (HOLogic.eq_const T $ x $ y)))
else NONE) xs @ mk_distinct xs;
fun mk_fresh (x, T) = HOLogic.mk_Trueprop
(NominalDatatype.fresh_const T fsT $ x $ Bound 0);
val (prems', prems'') = split_list (map (fn (params, bvars, prems, (p, ts)) =>
let
val params' = params @ [("y", fsT)];
val prem = Logic.list_implies
(map mk_fresh bvars @ mk_distinct bvars @
map (fn prem =>
if null (preds_of ps prem) then prem
else lift_prem prem) prems,
HOLogic.mk_Trueprop (lift_pred p ts));
val vs = map (Var o apfst (rpair 0)) (Term.rename_wrt_term prem params')
in
(Logic.list_all (params', prem), (rev vs, subst_bounds (vs, prem)))
end) prems);
val ind_vars =
(Old_Datatype_Prop.indexify_names (replicate (length atomTs) "pi") ~~
map NominalAtoms.mk_permT atomTs) @ [("z", fsT)];
val ind_Ts = rev (map snd ind_vars);
val concl = HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj
(map (fn (prem, (p, ts)) => HOLogic.mk_imp (prem,
HOLogic.list_all (ind_vars, lift_pred p
(map (fold_rev (NominalDatatype.mk_perm ind_Ts)
(map Bound (length atomTs downto 1))) ts)))) concls));
val concl' = HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj
(map (fn (prem, (p, ts)) => HOLogic.mk_imp (prem,
lift_pred' (Free (fs_ctxt_name, fsT)) p ts)) concls));
val vc_compat = map (fn (params, bvars, prems, (p, ts)) =>
map (fn q => Logic.list_all (params, incr_boundvars ~1 (Logic.list_implies
(map_filter (fn prem =>
if null (preds_of ps prem) then SOME prem
else map_term (split_conj (K o I) names) prem prem) prems, q))))
(mk_distinct bvars @
maps (fn (t, T) => map (fn (u, U) => HOLogic.mk_Trueprop
(NominalDatatype.fresh_const U T $ u $ t)) bvars)
(ts ~~ binder_types (fastype_of p)))) prems;
val perm_pi_simp = Global_Theory.get_thms thy "perm_pi_simp";
val pt2_atoms = map (fn aT => Global_Theory.get_thm thy
("pt_" ^ Long_Name.base_name (fst (dest_Type aT)) ^ "2")) atomTs;
fun eqvt_ss ctxt =
put_simpset HOL_basic_ss ctxt
addsimps (eqvt_thms @ perm_pi_simp @ pt2_atoms)
addsimprocs [mk_perm_bool_simproc [\<^const_name>\<open>Fun.id\<close>],
NominalPermeq.perm_simproc_app, NominalPermeq.perm_simproc_fun];
val fresh_bij = Global_Theory.get_thms thy "fresh_bij";
val perm_bij = Global_Theory.get_thms thy "perm_bij";
val fs_atoms = map (fn aT => Global_Theory.get_thm thy
("fs_" ^ Long_Name.base_name (fst (dest_Type aT)) ^ "1")) atomTs;
val exists_fresh' = Global_Theory.get_thms thy "exists_fresh'";
val fresh_atm = Global_Theory.get_thms thy "fresh_atm";
val swap_simps = Global_Theory.get_thms thy "swap_simps";
val perm_fresh_fresh = Global_Theory.get_thms thy "perm_fresh_fresh";
fun obtain_fresh_name ts T (freshs1, freshs2, ctxt) =
let
(** protect terms to avoid that fresh_prod interferes with **)
(** pairs used in introduction rules of inductive predicate **)
fun protect t =
let val T = fastype_of t in Const (\<^const_name>\<open>Fun.id\<close>, T --> T) $ t end;
val p = foldr1 HOLogic.mk_prod (map protect ts @ freshs1);
val ex = Goal.prove ctxt [] [] (HOLogic.mk_Trueprop
(HOLogic.exists_const T $ Abs ("x", T,
NominalDatatype.fresh_const T (fastype_of p) $
Bound 0 $ p)))
(fn {context = goal_ctxt, ...} => EVERY
[resolve_tac goal_ctxt exists_fresh' 1,
resolve_tac goal_ctxt fs_atoms 1]);
val (([(_, cx)], ths), ctxt') = Obtain.result
(fn goal_ctxt => EVERY
[eresolve_tac goal_ctxt [exE] 1,
full_simp_tac (put_simpset HOL_ss goal_ctxt addsimps (fresh_prod :: fresh_atm)) 1,
full_simp_tac (put_simpset HOL_basic_ss goal_ctxt addsimps [@{thm id_apply}]) 1,
REPEAT (eresolve_tac goal_ctxt [conjE] 1)])
[ex] ctxt
in (freshs1 @ [Thm.term_of cx], freshs2 @ ths, ctxt') end;
fun mk_ind_proof ctxt thss =
Goal.prove ctxt [] prems' concl' (fn {prems = ihyps, context = goal_ctxt} =>
let val th = Goal.prove goal_ctxt [] [] concl (fn {context = goal_ctxt1, ...} =>
resolve_tac goal_ctxt1 [raw_induct] 1 THEN
EVERY (maps (fn ((((_, bvars, oprems, _), vc_compat_ths), ihyp), (vs, ihypt)) =>
[REPEAT (resolve_tac goal_ctxt1 [allI] 1),
simp_tac (eqvt_ss goal_ctxt1) 1,
SUBPROOF (fn {prems = gprems, params, concl, context = goal_ctxt2, ...} =>
let
val (params', (pis, z)) =
chop (length params - length atomTs - 1) (map (Thm.term_of o #2) params) ||>
split_last;
val bvars' = map
(fn (Bound i, T) => (nth params' (length params' - i), T)
| (t, T) => (t, T)) bvars;
val pi_bvars = map (fn (t, _) =>
fold_rev (NominalDatatype.mk_perm []) pis t) bvars';
val (P, ts) = strip_comb (HOLogic.dest_Trueprop (Thm.term_of concl));
val (freshs1, freshs2, ctxt') = fold
(obtain_fresh_name (ts @ pi_bvars))
(map snd bvars') ([], [], goal_ctxt2);
val freshs2' = NominalDatatype.mk_not_sym freshs2;
val pis' = map NominalDatatype.perm_of_pair (pi_bvars ~~ freshs1);
fun concat_perm pi1 pi2 =
let val T = fastype_of pi1
in if T = fastype_of pi2 then
Const (\<^const_name>\<open>append\<close>, T --> T --> T) $ pi1 $ pi2
else pi2
end;
val pis'' = fold (concat_perm #> map) pis' pis;
val env = Pattern.first_order_match thy (ihypt, Thm.prop_of ihyp)
(Vartab.empty, Vartab.empty);
val ihyp' = Thm.instantiate (TVars.empty,
Vars.make (map (fn (v, t) => (dest_Var v, Thm.global_cterm_of thy t))
(map (Envir.subst_term env) vs ~~
map (fold_rev (NominalDatatype.mk_perm [])
(rev pis' @ pis)) params' @ [z]))) ihyp;
fun mk_pi th =
Simplifier.simplify (put_simpset HOL_basic_ss ctxt' addsimps [@{thm id_apply}]
addsimprocs [NominalDatatype.perm_simproc])
(Simplifier.simplify (eqvt_ss ctxt')
(fold_rev (mk_perm_bool ctxt' o Thm.cterm_of ctxt')
(rev pis' @ pis) th));
val (gprems1, gprems2) = split_list
(map (fn (th, t) =>
if null (preds_of ps t) then (SOME th, mk_pi th)
else
(map_thm ctxt' (split_conj (K o I) names)
(eresolve_tac ctxt' [conjunct1] 1) monos NONE th,
mk_pi (the (map_thm ctxt' (inst_conj_all names ps (rev pis''))
(inst_conj_all_tac ctxt' (length pis'')) monos (SOME t) th))))
(gprems ~~ oprems)) |>> map_filter I;
val vc_compat_ths' = map (fn th =>
let
val th' = first_order_mrs gprems1 th;
val (bop, lhs, rhs) = (case Thm.concl_of th' of
_ $ (fresh $ lhs $ rhs) =>
(fn t => fn u => fresh $ t $ u, lhs, rhs)
| _ $ (_ $ (_ $ lhs $ rhs)) =>
(curry (HOLogic.mk_not o HOLogic.mk_eq), lhs, rhs));
val th'' = Goal.prove ctxt' [] [] (HOLogic.mk_Trueprop
(bop (fold_rev (NominalDatatype.mk_perm []) pis lhs)
(fold_rev (NominalDatatype.mk_perm []) pis rhs)))
(fn {context = goal_ctxt3, ...} =>
simp_tac (put_simpset HOL_basic_ss goal_ctxt3 addsimps
(fresh_bij @ perm_bij)) 1 THEN resolve_tac goal_ctxt3 [th'] 1)
in Simplifier.simplify (eqvt_ss ctxt' addsimps fresh_atm) th'' end)
vc_compat_ths;
val vc_compat_ths'' = NominalDatatype.mk_not_sym vc_compat_ths';
(** Since swap_simps simplifies (pi :: 'a prm) o (x :: 'b) to x **)
(** we have to pre-simplify the rewrite rules **)
val swap_simps_simpset = put_simpset HOL_ss ctxt' addsimps swap_simps @
map (Simplifier.simplify (put_simpset HOL_ss ctxt' addsimps swap_simps))
(vc_compat_ths'' @ freshs2');
val th = Goal.prove ctxt' [] []
(HOLogic.mk_Trueprop (list_comb (P $ hd ts,
map (fold (NominalDatatype.mk_perm []) pis') (tl ts))))
(fn {context = goal_ctxt4, ...} =>
EVERY ([simp_tac (eqvt_ss goal_ctxt4) 1,
resolve_tac goal_ctxt4 [ihyp'] 1,
REPEAT_DETERM_N (Thm.nprems_of ihyp - length gprems)
(simp_tac swap_simps_simpset 1),
REPEAT_DETERM_N (length gprems)
(simp_tac (put_simpset HOL_basic_ss goal_ctxt4
addsimps [inductive_forall_def']
addsimprocs [NominalDatatype.perm_simproc]) 1 THEN
resolve_tac goal_ctxt4 gprems2 1)]));
val final = Goal.prove ctxt' [] [] (Thm.term_of concl)
(fn {context = goal_ctxt5, ...} =>
cut_facts_tac [th] 1 THEN full_simp_tac (put_simpset HOL_ss goal_ctxt5
addsimps vc_compat_ths'' @ freshs2' @
perm_fresh_fresh @ fresh_atm) 1);
val final' = Proof_Context.export ctxt' goal_ctxt2 [final];
in resolve_tac goal_ctxt2 final' 1 end) goal_ctxt1 1])
(prems ~~ thss ~~ ihyps ~~ prems'')))
in
cut_facts_tac [th] 1 THEN REPEAT (eresolve_tac goal_ctxt [conjE] 1) THEN
REPEAT (REPEAT (resolve_tac goal_ctxt [conjI, impI] 1) THEN
eresolve_tac goal_ctxt [impE] 1 THEN assume_tac goal_ctxt 1 THEN
REPEAT (eresolve_tac goal_ctxt @{thms allE_Nil} 1) THEN
asm_full_simp_tac goal_ctxt 1)
end) |> singleton (Proof_Context.export ctxt lthy);
(** strong case analysis rule **)
val cases_prems = map (fn ((name, avoids), rule) =>
let
val ([rule'], ctxt') = Variable.import_terms false [Thm.prop_of rule] lthy;
val prem :: prems = Logic.strip_imp_prems rule';
val concl = Logic.strip_imp_concl rule'
in
(prem,
List.drop (snd (strip_comb (HOLogic.dest_Trueprop prem)), length ind_params),
concl,
fold_map (fn (prem, (_, avoid)) => fn ctxt =>
let
val prems = Logic.strip_assums_hyp prem;
val params = Logic.strip_params prem;
val bnds = fold (add_binders thy 0) prems [] @ mk_avoids params avoid;
fun mk_subst (p as (s, T)) (i, j, ctxt, ps, qs, is, ts) =
if member (op = o apsnd fst) bnds (Bound i) then
let
val ([s'], ctxt') = Variable.variant_fixes [s] ctxt;
val t = Free (s', T)
in (i + 1, j, ctxt', ps, (t, T) :: qs, i :: is, t :: ts) end
else (i + 1, j + 1, ctxt, p :: ps, qs, is, Bound j :: ts);
val (_, _, ctxt', ps, qs, is, ts) = fold_rev mk_subst params
(0, 0, ctxt, [], [], [], [])
in
((ps, qs, is, map (curry subst_bounds (rev ts)) prems), ctxt')
end) (prems ~~ avoids) ctxt')
end)
(Inductive.partition_rules' raw_induct (intrs ~~ avoids') ~~
elims);
val cases_prems' =
map (fn (prem, args, concl, (prems, _)) =>
let
fun mk_prem (ps, [], _, prems) =
Logic.list_all (ps, Logic.list_implies (prems, concl))
| mk_prem (ps, qs, _, prems) =
Logic.list_all (ps, Logic.mk_implies
(Logic.list_implies
(mk_distinct qs @
maps (fn (t, T) => map (fn u => HOLogic.mk_Trueprop
(NominalDatatype.fresh_const T (fastype_of u) $ t $ u))
args) qs,
HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj
(map HOLogic.dest_Trueprop prems))),
concl))
in map mk_prem prems end) cases_prems;
fun cases_eqvt_simpset ctxt =
put_simpset HOL_ss ctxt
addsimps eqvt_thms @ swap_simps @ perm_pi_simp
addsimprocs [NominalPermeq.perm_simproc_app, NominalPermeq.perm_simproc_fun];
fun simp_fresh_atm ctxt =
Simplifier.simplify (put_simpset HOL_basic_ss ctxt addsimps fresh_atm);
fun mk_cases_proof ((((name, thss), elim), (prem, args, concl, (prems, ctxt))),
prems') =
(name, Goal.prove ctxt [] (prem :: prems') concl
(fn {prems = hyp :: hyps, context = ctxt1} =>
EVERY (resolve_tac ctxt1 [hyp RS elim] 1 ::
map (fn (((_, vc_compat_ths), case_hyp), (_, qs, is, _)) =>
SUBPROOF (fn {prems = case_hyps, params, context = ctxt2, concl, ...} =>
if null qs then
resolve_tac ctxt2 [first_order_mrs case_hyps case_hyp] 1
else
let
val params' = map (Thm.term_of o #2 o nth (rev params)) is;
val tab = params' ~~ map fst qs;
val (hyps1, hyps2) = chop (length args) case_hyps;
(* turns a = t and [x1 # t, ..., xn # t] *)
(* into [x1 # a, ..., xn # a] *)
fun inst_fresh th' ths =
let val (ths1, ths2) = chop (length qs) ths
in
(map (fn th =>
let
val (cf, ct) =
Thm.dest_comb (Thm.dest_arg (Thm.cprop_of th));
val arg_cong' = Thm.instantiate'
[SOME (Thm.ctyp_of_cterm ct)]
[NONE, SOME ct, SOME cf] (arg_cong RS iffD2);
val inst = Thm.first_order_match (ct,
Thm.dest_arg (Thm.dest_arg (Thm.cprop_of th')))
in [th', th] MRS Thm.instantiate inst arg_cong'
end) ths1,
ths2)
end;
val (vc_compat_ths1, vc_compat_ths2) =
chop (length vc_compat_ths - length args * length qs)
(map (first_order_mrs hyps2) vc_compat_ths);
val vc_compat_ths' =
NominalDatatype.mk_not_sym vc_compat_ths1 @
flat (fst (fold_map inst_fresh hyps1 vc_compat_ths2));
val (freshs1, freshs2, ctxt3) = fold
(obtain_fresh_name (args @ map fst qs @ params'))
(map snd qs) ([], [], ctxt2);
val freshs2' = NominalDatatype.mk_not_sym freshs2;
val pis = map (NominalDatatype.perm_of_pair)
((freshs1 ~~ map fst qs) @ (params' ~~ freshs1));
val mk_pis = fold_rev (mk_perm_bool ctxt3) (map (Thm.cterm_of ctxt3) pis);
val obj = Thm.global_cterm_of thy (foldr1 HOLogic.mk_conj (map (map_aterms
(fn x as Free _ =>
if member (op =) args x then x
else (case AList.lookup op = tab x of
SOME y => y
| NONE => fold_rev (NominalDatatype.mk_perm []) pis x)
| x => x) o HOLogic.dest_Trueprop o Thm.prop_of) case_hyps));
val inst = Thm.first_order_match (Thm.dest_arg
(Drule.strip_imp_concl (hd (cprems_of case_hyp))), obj);
val th = Goal.prove ctxt3 [] [] (Thm.term_of concl)
(fn {context = ctxt4, ...} =>
resolve_tac ctxt4 [Thm.instantiate inst case_hyp] 1 THEN
SUBPROOF (fn {context = ctxt5, prems = fresh_hyps, ...} =>
let
val fresh_hyps' = NominalDatatype.mk_not_sym fresh_hyps;
val case_simpset = cases_eqvt_simpset ctxt5 addsimps freshs2' @
map (simp_fresh_atm ctxt5) (vc_compat_ths' @ fresh_hyps');
val fresh_fresh_simpset = case_simpset addsimps perm_fresh_fresh;
val hyps1' = map
(mk_pis #> Simplifier.simplify fresh_fresh_simpset) hyps1;
val hyps2' = map
(mk_pis #> Simplifier.simplify case_simpset) hyps2;
val case_hyps' = hyps1' @ hyps2'
in
simp_tac case_simpset 1 THEN
REPEAT_DETERM (TRY (resolve_tac ctxt5 [conjI] 1) THEN
resolve_tac ctxt5 case_hyps' 1)
end) ctxt4 1)
val final = Proof_Context.export ctxt3 ctxt2 [th]
in resolve_tac ctxt2 final 1 end) ctxt1 1)
(thss ~~ hyps ~~ prems))) |>
singleton (Proof_Context.export ctxt lthy))
in
ctxt'' |>
Proof.theorem NONE (fn thss => fn lthy1 =>
let
val rec_name = space_implode "_" (map Long_Name.base_name names);
val rec_qualified = Binding.qualify false rec_name;
val ind_case_names = Rule_Cases.case_names induct_cases;
val induct_cases' = Inductive.partition_rules' raw_induct
(intrs ~~ induct_cases);
val thss' = map (map (atomize_intr lthy1)) thss;
val thsss = Inductive.partition_rules' raw_induct (intrs ~~ thss');
val strong_raw_induct =
mk_ind_proof lthy1 thss' |> Inductive.rulify lthy1;
val strong_cases = map (mk_cases_proof ##> Inductive.rulify lthy1)
(thsss ~~ elims ~~ cases_prems ~~ cases_prems');
val strong_induct_atts =
- map (Attrib.internal o K)
+ map (Attrib.internal \<^here> o K)
[ind_case_names, Rule_Cases.consumes (~ (Thm.nprems_of strong_raw_induct))];
val strong_induct =
if length names > 1 then strong_raw_induct
else strong_raw_induct RSN (2, rev_mp);
val ((_, [strong_induct']), lthy2) = lthy1 |> Local_Theory.note
((rec_qualified (Binding.name "strong_induct"), strong_induct_atts), [strong_induct]);
val strong_inducts =
Project_Rule.projects lthy1 (1 upto length names) strong_induct';
in
lthy2 |>
Local_Theory.notes
[((rec_qualified (Binding.name "strong_inducts"), []),
strong_inducts |> map (fn th => ([th],
- [Attrib.internal (K ind_case_names),
- Attrib.internal (K (Rule_Cases.consumes (1 - Thm.nprems_of th)))])))] |> snd |>
+ [Attrib.internal \<^here> (K ind_case_names),
+ Attrib.internal \<^here> (K (Rule_Cases.consumes (1 - Thm.nprems_of th)))])))] |> snd |>
Local_Theory.notes (map (fn ((name, elim), (_, cases)) =>
((Binding.qualified_name (Long_Name.qualify (Long_Name.base_name name) "strong_cases"),
- [Attrib.internal (K (Rule_Cases.case_names (map snd cases))),
- Attrib.internal (K (Rule_Cases.consumes (1 - Thm.nprems_of elim)))]), [([elim], [])]))
+ [Attrib.internal \<^here> (K (Rule_Cases.case_names (map snd cases))),
+ Attrib.internal \<^here> (K (Rule_Cases.consumes (1 - Thm.nprems_of elim)))]), [([elim], [])]))
(strong_cases ~~ induct_cases')) |> snd
end)
(map (map (rulify_term thy #> rpair [])) vc_compat)
end;
fun prove_eqvt s xatoms lthy =
let
val thy = Proof_Context.theory_of lthy;
val ({names, ...}, {raw_induct, intrs, elims, ...}) =
Inductive.the_inductive_global lthy (Sign.intern_const thy s);
val raw_induct = atomize_induct lthy raw_induct;
val elims = map (atomize_induct lthy) elims;
val intrs = map (atomize_intr lthy) intrs;
val monos = Inductive.get_monos lthy;
val intrs' = Inductive.unpartition_rules intrs
(map (fn (((s, ths), (_, k)), th) =>
(s, ths ~~ Inductive.infer_intro_vars thy th k ths))
(Inductive.partition_rules raw_induct intrs ~~
Inductive.arities_of raw_induct ~~ elims));
val k = length (Inductive.params_of raw_induct);
val atoms' = NominalAtoms.atoms_of thy;
val atoms =
if null xatoms then atoms' else
let val atoms = map (Sign.intern_type thy) xatoms
in
(case duplicates op = atoms of
[] => ()
| xs => error ("Duplicate atoms: " ^ commas xs);
case subtract (op =) atoms' atoms of
[] => ()
| xs => error ("No such atoms: " ^ commas xs);
atoms)
end;
val perm_pi_simp = Global_Theory.get_thms thy "perm_pi_simp";
val (([t], [pi]), ctxt1) = lthy |>
Variable.import_terms false [Thm.concl_of raw_induct] ||>>
Variable.variant_fixes ["pi"];
fun eqvt_simpset ctxt = put_simpset HOL_basic_ss ctxt addsimps
(NominalThmDecls.get_eqvt_thms ctxt @ perm_pi_simp) addsimprocs
[mk_perm_bool_simproc names,
NominalPermeq.perm_simproc_app, NominalPermeq.perm_simproc_fun];
val ps = map (fst o HOLogic.dest_imp)
(HOLogic.dest_conj (HOLogic.dest_Trueprop t));
fun eqvt_tac ctxt pi (intr, vs) st =
let
fun eqvt_err s =
let val ([t], ctxt') = Variable.import_terms true [Thm.prop_of intr] ctxt
in error ("Could not prove equivariance for introduction rule\n" ^
Syntax.string_of_term ctxt' t ^ "\n" ^ s)
end;
val res = SUBPROOF (fn {context = goal_ctxt, prems, params, ...} =>
let
val prems' = map (fn th => the_default th (map_thm goal_ctxt
(split_conj (K I) names) (eresolve_tac goal_ctxt [conjunct2] 1) monos NONE th)) prems;
val prems'' = map (fn th => Simplifier.simplify (eqvt_simpset goal_ctxt)
(mk_perm_bool goal_ctxt (Thm.cterm_of goal_ctxt pi) th)) prems';
val intr' = infer_instantiate goal_ctxt (map (#1 o dest_Var) vs ~~
map (Thm.cterm_of goal_ctxt o NominalDatatype.mk_perm [] pi o Thm.term_of o #2) params)
intr
in (resolve_tac goal_ctxt [intr'] THEN_ALL_NEW (TRY o resolve_tac goal_ctxt prems'')) 1
end) ctxt 1 st
in
case (Seq.pull res handle THM (s, _, _) => eqvt_err s) of
NONE => eqvt_err ("Rule does not match goal\n" ^
Syntax.string_of_term ctxt (hd (Thm.prems_of st)))
| SOME (th, _) => Seq.single th
end;
val thss = map (fn atom =>
let val pi' = Free (pi, NominalAtoms.mk_permT (Type (atom, [])))
in map (fn th => zero_var_indexes (th RS mp))
(Old_Datatype_Aux.split_conj_thm (Goal.prove ctxt1 [] []
(HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj (map (fn p =>
let
val (h, ts) = strip_comb p;
val (ts1, ts2) = chop k ts
in
HOLogic.mk_imp (p, list_comb (h, ts1 @
map (NominalDatatype.mk_perm [] pi') ts2))
end) ps)))
(fn {context = goal_ctxt, ...} =>
EVERY (resolve_tac goal_ctxt [raw_induct] 1 :: map (fn intr_vs =>
full_simp_tac (eqvt_simpset goal_ctxt) 1 THEN
eqvt_tac goal_ctxt pi' intr_vs) intrs')) |>
singleton (Proof_Context.export ctxt1 lthy)))
end) atoms
in
lthy |>
Local_Theory.notes (map (fn (name, ths) =>
((Binding.qualified_name (Long_Name.qualify (Long_Name.base_name name) "eqvt"),
- [Attrib.internal (K NominalThmDecls.eqvt_add)]), [(ths, [])]))
+ @{attributes [eqvt]}), [(ths, [])]))
(names ~~ transp thss)) |> snd
end;
(* outer syntax *)
val _ =
Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>nominal_inductive\<close>
"prove equivariance and strong induction theorem for inductive predicate involving nominal datatypes"
(Parse.name -- Scan.optional (\<^keyword>\<open>avoids\<close> |-- Parse.and_list1 (Parse.name --
(\<^keyword>\<open>:\<close> |-- Scan.repeat1 Parse.name))) [] >> (fn (name, avoids) =>
prove_strong_ind name avoids));
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>equivariance\<close>
"prove equivariance for inductive predicate involving nominal datatypes"
(Parse.name -- Scan.optional (\<^keyword>\<open>[\<close> |-- Parse.list1 Parse.name --| \<^keyword>\<open>]\<close>) [] >>
(fn (name, atoms) => prove_eqvt name atoms));
end
diff --git a/src/HOL/Nominal/nominal_inductive2.ML b/src/HOL/Nominal/nominal_inductive2.ML
--- a/src/HOL/Nominal/nominal_inductive2.ML
+++ b/src/HOL/Nominal/nominal_inductive2.ML
@@ -1,505 +1,505 @@
(* Title: HOL/Nominal/nominal_inductive2.ML
Author: Stefan Berghofer, TU Muenchen
Infrastructure for proving equivariance and strong induction theorems
for inductive predicates involving nominal datatypes.
Experimental version that allows to avoid lists of atoms.
*)
signature NOMINAL_INDUCTIVE2 =
sig
val prove_strong_ind: string -> string option -> (string * string list) list ->
local_theory -> Proof.state
end
structure NominalInductive2 : NOMINAL_INDUCTIVE2 =
struct
val inductive_forall_def = @{thm HOL.induct_forall_def};
val inductive_atomize = @{thms induct_atomize};
val inductive_rulify = @{thms induct_rulify};
fun rulify_term thy = Raw_Simplifier.rewrite_term thy inductive_rulify [];
fun atomize_conv ctxt =
Raw_Simplifier.rewrite_cterm (true, false, false) (K (K NONE))
(put_simpset HOL_basic_ss ctxt addsimps inductive_atomize);
fun atomize_intr ctxt = Conv.fconv_rule (Conv.prems_conv ~1 (atomize_conv ctxt));
fun atomize_induct ctxt = Conv.fconv_rule (Conv.prems_conv ~1
(Conv.params_conv ~1 (Conv.prems_conv ~1 o atomize_conv) ctxt));
fun fresh_postprocess ctxt =
Simplifier.full_simplify (put_simpset HOL_basic_ss ctxt addsimps
[@{thm fresh_star_set_eq}, @{thm fresh_star_Un_elim},
@{thm fresh_star_insert_elim}, @{thm fresh_star_empty_elim}]);
fun preds_of ps t = inter (op = o apsnd dest_Free) ps (Term.add_frees t []);
val perm_bool = mk_meta_eq @{thm perm_bool_def};
val perm_boolI = @{thm perm_boolI};
val (_, [perm_boolI_pi, _]) = Drule.strip_comb (snd (Thm.dest_comb
(Drule.strip_imp_concl (Thm.cprop_of perm_boolI))));
fun mk_perm_bool ctxt pi th =
th RS infer_instantiate ctxt [(#1 (dest_Var (Thm.term_of perm_boolI_pi)), pi)] perm_boolI;
fun mk_perm_bool_simproc names =
Simplifier.make_simproc \<^context> "perm_bool"
{lhss = [\<^term>\<open>perm pi x\<close>],
proc = fn _ => fn _ => fn ct =>
(case Thm.term_of ct of
Const (\<^const_name>\<open>Nominal.perm\<close>, _) $ _ $ t =>
if member (op =) names (the_default "" (try (head_of #> dest_Const #> fst) t))
then SOME perm_bool else NONE
| _ => NONE)};
fun transp ([] :: _) = []
| transp xs = map hd xs :: transp (map tl xs);
fun add_binders thy i (t as (_ $ _)) bs = (case strip_comb t of
(Const (s, T), ts) => (case strip_type T of
(Ts, Type (tname, _)) =>
(case NominalDatatype.get_nominal_datatype thy tname of
NONE => fold (add_binders thy i) ts bs
| SOME {descr, index, ...} => (case AList.lookup op =
(#3 (the (AList.lookup op = descr index))) s of
NONE => fold (add_binders thy i) ts bs
| SOME cargs => fst (fold (fn (xs, x) => fn (bs', cargs') =>
let val (cargs1, (u, _) :: cargs2) = chop (length xs) cargs'
in (add_binders thy i u
(fold (fn (u, T) =>
if exists (fn j => j < i) (loose_bnos u) then I
else AList.map_default op = (T, [])
(insert op aconv (incr_boundvars (~i) u)))
cargs1 bs'), cargs2)
end) cargs (bs, ts ~~ Ts))))
| _ => fold (add_binders thy i) ts bs)
| (u, ts) => add_binders thy i u (fold (add_binders thy i) ts bs))
| add_binders thy i (Abs (_, _, t)) bs = add_binders thy (i + 1) t bs
| add_binders thy i _ bs = bs;
fun split_conj f names (Const (\<^const_name>\<open>HOL.conj\<close>, _) $ p $ q) _ = (case head_of p of
Const (name, _) =>
if member (op =) names name then SOME (f p q) else NONE
| _ => NONE)
| split_conj _ _ _ _ = NONE;
fun strip_all [] t = t
| strip_all (_ :: xs) (Const (\<^const_name>\<open>All\<close>, _) $ Abs (s, T, t)) = strip_all xs t;
(*********************************************************************)
(* maps R ... & (ALL pi_1 ... pi_n z. P z (pi_1 o ... o pi_n o t)) *)
(* or ALL pi_1 ... pi_n z. P z (pi_1 o ... o pi_n o t) *)
(* to R ... & id (ALL z. P z (pi_1 o ... o pi_n o t)) *)
(* or id (ALL z. P z (pi_1 o ... o pi_n o t)) *)
(* *)
(* where "id" protects the subformula from simplification *)
(*********************************************************************)
fun inst_conj_all names ps pis (Const (\<^const_name>\<open>HOL.conj\<close>, _) $ p $ q) _ =
(case head_of p of
Const (name, _) =>
if member (op =) names name then SOME (HOLogic.mk_conj (p,
Const (\<^const_name>\<open>Fun.id\<close>, HOLogic.boolT --> HOLogic.boolT) $
(subst_bounds (pis, strip_all pis q))))
else NONE
| _ => NONE)
| inst_conj_all names ps pis t u =
if member (op aconv) ps (head_of u) then
SOME (Const (\<^const_name>\<open>Fun.id\<close>, HOLogic.boolT --> HOLogic.boolT) $
(subst_bounds (pis, strip_all pis t)))
else NONE
| inst_conj_all _ _ _ _ _ = NONE;
fun inst_conj_all_tac ctxt k = EVERY
[TRY (EVERY [eresolve_tac ctxt [conjE] 1, resolve_tac ctxt [conjI] 1, assume_tac ctxt 1]),
REPEAT_DETERM_N k (eresolve_tac ctxt [allE] 1),
simp_tac (put_simpset HOL_basic_ss ctxt addsimps [@{thm id_apply}]) 1];
fun map_term f t u = (case f t u of
NONE => map_term' f t u | x => x)
and map_term' f (t $ u) (t' $ u') = (case (map_term f t t', map_term f u u') of
(NONE, NONE) => NONE
| (SOME t'', NONE) => SOME (t'' $ u)
| (NONE, SOME u'') => SOME (t $ u'')
| (SOME t'', SOME u'') => SOME (t'' $ u''))
| map_term' f (Abs (s, T, t)) (Abs (s', T', t')) = (case map_term f t t' of
NONE => NONE
| SOME t'' => SOME (Abs (s, T, t'')))
| map_term' _ _ _ = NONE;
(*********************************************************************)
(* Prove F[f t] from F[t], where F is monotone *)
(*********************************************************************)
fun map_thm ctxt f tac monos opt th =
let
val prop = Thm.prop_of th;
fun prove t =
Goal.prove ctxt [] [] t (fn {context = goal_ctxt, ...} =>
EVERY [cut_facts_tac [th] 1, eresolve_tac goal_ctxt [rev_mp] 1,
REPEAT_DETERM (FIRSTGOAL (resolve_tac goal_ctxt monos)),
REPEAT_DETERM (resolve_tac goal_ctxt [impI] 1 THEN (assume_tac goal_ctxt 1 ORELSE tac))])
in Option.map prove (map_term f prop (the_default prop opt)) end;
fun abs_params params t =
let val vs = map (Var o apfst (rpair 0)) (Term.rename_wrt_term t params)
in (Logic.list_all (params, t), (rev vs, subst_bounds (vs, t))) end;
fun inst_params thy (vs, p) th cts =
let val env = Pattern.first_order_match thy (p, Thm.prop_of th)
(Vartab.empty, Vartab.empty)
in
Thm.instantiate (TVars.empty, Vars.make (map (dest_Var o Envir.subst_term env) vs ~~ cts)) th
end;
fun prove_strong_ind s alt_name avoids lthy =
let
val thy = Proof_Context.theory_of lthy;
val ({names, ...}, {raw_induct, intrs, elims, ...}) =
Inductive.the_inductive_global lthy (Sign.intern_const thy s);
val ind_params = Inductive.params_of raw_induct;
val raw_induct = atomize_induct lthy raw_induct;
val elims = map (atomize_induct lthy) elims;
val monos = Inductive.get_monos lthy;
val eqvt_thms = NominalThmDecls.get_eqvt_thms lthy;
val _ = (case subtract (op =) (fold (Term.add_const_names o Thm.prop_of) eqvt_thms []) names of
[] => ()
| xs => error ("Missing equivariance theorem for predicate(s): " ^
commas_quote xs));
val induct_cases = map (fst o fst) (fst (Rule_Cases.get (the
(Induct.lookup_inductP lthy (hd names)))));
val induct_cases' = if null induct_cases then replicate (length intrs) ""
else induct_cases;
val (raw_induct', ctxt') = lthy
|> yield_singleton (Variable.import_terms false) (Thm.prop_of raw_induct);
val concls = raw_induct' |> Logic.strip_imp_concl |> HOLogic.dest_Trueprop |>
HOLogic.dest_conj |> map (HOLogic.dest_imp ##> strip_comb);
val ps = map (fst o snd) concls;
val _ = (case duplicates (op = o apply2 fst) avoids of
[] => ()
| xs => error ("Duplicate case names: " ^ commas_quote (map fst xs)));
val _ = (case subtract (op =) induct_cases (map fst avoids) of
[] => ()
| xs => error ("No such case(s) in inductive definition: " ^ commas_quote xs));
fun mk_avoids params name sets =
let
val (_, ctxt') = Proof_Context.add_fixes
(map (fn (s, T) => (Binding.name s, SOME T, NoSyn)) params) lthy;
fun mk s =
let
val t = Syntax.read_term ctxt' s;
val t' = fold_rev absfree params t |>
funpow (length params) (fn Abs (_, _, t) => t)
in (t', HOLogic.dest_setT (fastype_of t)) end
handle TERM _ =>
error ("Expression " ^ quote s ^ " to be avoided in case " ^
quote name ^ " is not a set type");
fun add_set p [] = [p]
| add_set (t, T) ((u, U) :: ps) =
if T = U then
let val S = HOLogic.mk_setT T
in (Const (\<^const_name>\<open>sup\<close>, S --> S --> S) $ u $ t, T) :: ps
end
else (u, U) :: add_set (t, T) ps
in
fold (mk #> add_set) sets []
end;
val prems = map (fn (prem, name) =>
let
val prems = map (incr_boundvars 1) (Logic.strip_assums_hyp prem);
val concl = incr_boundvars 1 (Logic.strip_assums_concl prem);
val params = Logic.strip_params prem
in
(params,
if null avoids then
map (fn (T, ts) => (HOLogic.mk_set T ts, T))
(fold (add_binders thy 0) (prems @ [concl]) [])
else case AList.lookup op = avoids name of
NONE => []
| SOME sets =>
map (apfst (incr_boundvars 1)) (mk_avoids params name sets),
prems, strip_comb (HOLogic.dest_Trueprop concl))
end) (Logic.strip_imp_prems raw_induct' ~~ induct_cases');
val atomTs = distinct op = (maps (map snd o #2) prems);
val atoms = map (fst o dest_Type) atomTs;
val ind_sort = if null atomTs then \<^sort>\<open>type\<close>
else Sign.minimize_sort thy (Sign.certify_sort thy (map (fn a => Sign.intern_class thy
("fs_" ^ Long_Name.base_name a)) atoms));
val (fs_ctxt_tyname, _) = Name.variant "'n" (Variable.names_of ctxt');
val ([fs_ctxt_name], ctxt'') = Variable.variant_fixes ["z"] ctxt';
val fsT = TFree (fs_ctxt_tyname, ind_sort);
val inductive_forall_def' = Thm.instantiate'
[SOME (Thm.global_ctyp_of thy fsT)] [] inductive_forall_def;
fun lift_pred' t (Free (s, T)) ts =
list_comb (Free (s, fsT --> T), t :: ts);
val lift_pred = lift_pred' (Bound 0);
fun lift_prem (t as (f $ u)) =
let val (p, ts) = strip_comb t
in
if member (op =) ps p then HOLogic.mk_induct_forall fsT $
Abs ("z", fsT, lift_pred p (map (incr_boundvars 1) ts))
else lift_prem f $ lift_prem u
end
| lift_prem (Abs (s, T, t)) = Abs (s, T, lift_prem t)
| lift_prem t = t;
fun mk_fresh (x, T) = HOLogic.mk_Trueprop
(NominalDatatype.fresh_star_const T fsT $ x $ Bound 0);
val (prems', prems'') = split_list (map (fn (params, sets, prems, (p, ts)) =>
let
val params' = params @ [("y", fsT)];
val prem = Logic.list_implies
(map mk_fresh sets @
map (fn prem =>
if null (preds_of ps prem) then prem
else lift_prem prem) prems,
HOLogic.mk_Trueprop (lift_pred p ts));
in abs_params params' prem end) prems);
val ind_vars =
(Old_Datatype_Prop.indexify_names (replicate (length atomTs) "pi") ~~
map NominalAtoms.mk_permT atomTs) @ [("z", fsT)];
val ind_Ts = rev (map snd ind_vars);
val concl = HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj
(map (fn (prem, (p, ts)) => HOLogic.mk_imp (prem,
HOLogic.list_all (ind_vars, lift_pred p
(map (fold_rev (NominalDatatype.mk_perm ind_Ts)
(map Bound (length atomTs downto 1))) ts)))) concls));
val concl' = HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj
(map (fn (prem, (p, ts)) => HOLogic.mk_imp (prem,
lift_pred' (Free (fs_ctxt_name, fsT)) p ts)) concls));
val (vc_compat, vc_compat') = map (fn (params, sets, prems, (p, ts)) =>
map (fn q => abs_params params (incr_boundvars ~1 (Logic.list_implies
(map_filter (fn prem =>
if null (preds_of ps prem) then SOME prem
else map_term (split_conj (K o I) names) prem prem) prems, q))))
(maps (fn (t, T) => map (fn (u, U) => HOLogic.mk_Trueprop
(NominalDatatype.fresh_star_const U T $ u $ t)) sets)
(ts ~~ binder_types (fastype_of p)) @
map (fn (u, U) => HOLogic.mk_Trueprop (Const (\<^const_name>\<open>finite\<close>,
HOLogic.mk_setT U --> HOLogic.boolT) $ u)) sets) |>
split_list) prems |> split_list;
val perm_pi_simp = Global_Theory.get_thms thy "perm_pi_simp";
val pt2_atoms = map (fn a => Global_Theory.get_thm thy
("pt_" ^ Long_Name.base_name a ^ "2")) atoms;
fun eqvt_ss ctxt =
put_simpset HOL_basic_ss ctxt
addsimps (eqvt_thms @ perm_pi_simp @ pt2_atoms)
addsimprocs [mk_perm_bool_simproc [\<^const_name>\<open>Fun.id\<close>],
NominalPermeq.perm_simproc_app, NominalPermeq.perm_simproc_fun];
val fresh_star_bij = Global_Theory.get_thms thy "fresh_star_bij";
val pt_insts = map (NominalAtoms.pt_inst_of thy) atoms;
val at_insts = map (NominalAtoms.at_inst_of thy) atoms;
val dj_thms = maps (fn a =>
map (NominalAtoms.dj_thm_of thy a) (remove (op =) a atoms)) atoms;
val finite_ineq = map2 (fn th => fn th' => th' RS (th RS
@{thm pt_set_finite_ineq})) pt_insts at_insts;
val perm_set_forget =
map (fn th => th RS @{thm dj_perm_set_forget}) dj_thms;
val perm_freshs_freshs = atomTs ~~ map2 (fn th => fn th' => th' RS (th RS
@{thm pt_freshs_freshs})) pt_insts at_insts;
fun obtain_fresh_name ts sets (T, fin) (freshs, ths1, ths2, ths3, ctxt) =
let
val thy = Proof_Context.theory_of ctxt;
(** protect terms to avoid that fresh_star_prod_set interferes with **)
(** pairs used in introduction rules of inductive predicate **)
fun protect t =
let val T = fastype_of t in Const (\<^const_name>\<open>Fun.id\<close>, T --> T) $ t end;
val p = foldr1 HOLogic.mk_prod (map protect ts);
val atom = fst (dest_Type T);
val {at_inst, ...} = NominalAtoms.the_atom_info thy atom;
val fs_atom = Global_Theory.get_thm thy
("fs_" ^ Long_Name.base_name atom ^ "1");
val avoid_th = Thm.instantiate'
[SOME (Thm.global_ctyp_of thy (fastype_of p))] [SOME (Thm.global_cterm_of thy p)]
([at_inst, fin, fs_atom] MRS @{thm at_set_avoiding});
val (([(_, cx)], th1 :: th2 :: ths), ctxt') = Obtain.result
(fn ctxt' => EVERY
[resolve_tac ctxt' [avoid_th] 1,
full_simp_tac (put_simpset HOL_ss ctxt' addsimps [@{thm fresh_star_prod_set}]) 1,
full_simp_tac (put_simpset HOL_basic_ss ctxt' addsimps [@{thm id_apply}]) 1,
rotate_tac 1 1,
REPEAT (eresolve_tac ctxt' [conjE] 1)])
[] ctxt;
val (Ts1, _ :: Ts2) = chop_prefix (not o equal T) (map snd sets);
val pTs = map NominalAtoms.mk_permT (Ts1 @ Ts2);
val (pis1, pis2) = chop (length Ts1)
(map Bound (length pTs - 1 downto 0));
val _ $ (f $ (_ $ pi $ l) $ r) = Thm.prop_of th2
val th2' =
Goal.prove ctxt' [] []
(Logic.list_all (map (pair "pi") pTs, HOLogic.mk_Trueprop
(f $ fold_rev (NominalDatatype.mk_perm (rev pTs))
(pis1 @ pi :: pis2) l $ r)))
(fn {context = goal_ctxt, ...} =>
cut_facts_tac [th2] 1 THEN
full_simp_tac (put_simpset HOL_basic_ss goal_ctxt addsimps perm_set_forget) 1) |>
Simplifier.simplify (eqvt_ss ctxt')
in
(freshs @ [Thm.term_of cx],
ths1 @ ths, ths2 @ [th1], ths3 @ [th2'], ctxt')
end;
fun mk_ind_proof ctxt thss =
Goal.prove ctxt [] prems' concl' (fn {prems = ihyps, context = goal_ctxt} =>
let val th = Goal.prove goal_ctxt [] [] concl (fn {context = goal_ctxt1, ...} =>
resolve_tac goal_ctxt1 [raw_induct] 1 THEN
EVERY (maps (fn (((((_, sets, oprems, _),
vc_compat_ths), vc_compat_vs), ihyp), vs_ihypt) =>
[REPEAT (resolve_tac goal_ctxt1 [allI] 1), simp_tac (eqvt_ss goal_ctxt1) 1,
SUBPROOF (fn {prems = gprems, params, concl, context = goal_ctxt2, ...} =>
let
val (cparams', (pis, z)) =
chop (length params - length atomTs - 1) (map #2 params) ||>
(map Thm.term_of #> split_last);
val params' = map Thm.term_of cparams'
val sets' = map (apfst (curry subst_bounds (rev params'))) sets;
val pi_sets = map (fn (t, _) =>
fold_rev (NominalDatatype.mk_perm []) pis t) sets';
val (P, ts) = strip_comb (HOLogic.dest_Trueprop (Thm.term_of concl));
val gprems1 = map_filter (fn (th, t) =>
if null (preds_of ps t) then SOME th
else
map_thm goal_ctxt2 (split_conj (K o I) names)
(eresolve_tac goal_ctxt2 [conjunct1] 1) monos NONE th)
(gprems ~~ oprems);
val vc_compat_ths' = map2 (fn th => fn p =>
let
val th' = gprems1 MRS inst_params thy p th cparams';
val (h, ts) =
strip_comb (HOLogic.dest_Trueprop (Thm.concl_of th'))
in
Goal.prove goal_ctxt2 [] []
(HOLogic.mk_Trueprop (list_comb (h,
map (fold_rev (NominalDatatype.mk_perm []) pis) ts)))
(fn {context = goal_ctxt3, ...} =>
simp_tac (put_simpset HOL_basic_ss goal_ctxt3 addsimps
(fresh_star_bij @ finite_ineq)) 1 THEN resolve_tac goal_ctxt3 [th'] 1)
end) vc_compat_ths vc_compat_vs;
val (vc_compat_ths1, vc_compat_ths2) =
chop (length vc_compat_ths - length sets) vc_compat_ths';
val vc_compat_ths1' = map
(Conv.fconv_rule (Conv.arg_conv (Conv.arg_conv
(Simplifier.rewrite (eqvt_ss goal_ctxt2))))) vc_compat_ths1;
val (pis', fresh_ths1, fresh_ths2, fresh_ths3, ctxt'') = fold
(obtain_fresh_name ts sets)
(map snd sets' ~~ vc_compat_ths2) ([], [], [], [], goal_ctxt2);
fun concat_perm pi1 pi2 =
let val T = fastype_of pi1
in if T = fastype_of pi2 then
Const (\<^const_name>\<open>append\<close>, T --> T --> T) $ pi1 $ pi2
else pi2
end;
val pis'' = fold_rev (concat_perm #> map) pis' pis;
val ihyp' = inst_params thy vs_ihypt ihyp
(map (fold_rev (NominalDatatype.mk_perm [])
(pis' @ pis) #> Thm.global_cterm_of thy) params' @ [Thm.global_cterm_of thy z]);
fun mk_pi th =
Simplifier.simplify
(put_simpset HOL_basic_ss ctxt'' addsimps [@{thm id_apply}]
addsimprocs [NominalDatatype.perm_simproc])
(Simplifier.simplify (eqvt_ss ctxt'')
(fold_rev (mk_perm_bool ctxt'' o Thm.cterm_of ctxt'')
(pis' @ pis) th));
val gprems2 = map (fn (th, t) =>
if null (preds_of ps t) then mk_pi th
else
mk_pi (the (map_thm ctxt'' (inst_conj_all names ps (rev pis''))
(inst_conj_all_tac ctxt'' (length pis'')) monos (SOME t) th)))
(gprems ~~ oprems);
val perm_freshs_freshs' = map (fn (th, (_, T)) =>
th RS the (AList.lookup op = perm_freshs_freshs T))
(fresh_ths2 ~~ sets);
val th = Goal.prove ctxt'' [] []
(HOLogic.mk_Trueprop (list_comb (P $ hd ts,
map (fold_rev (NominalDatatype.mk_perm []) pis') (tl ts))))
(fn {context = goal_ctxt4, ...} =>
EVERY ([simp_tac (eqvt_ss goal_ctxt4) 1,
resolve_tac goal_ctxt4 [ihyp'] 1] @
map (fn th => resolve_tac goal_ctxt4 [th] 1) fresh_ths3 @
[REPEAT_DETERM_N (length gprems)
(simp_tac (put_simpset HOL_basic_ss goal_ctxt4
addsimps [inductive_forall_def']
addsimprocs [NominalDatatype.perm_simproc]) 1 THEN
resolve_tac goal_ctxt4 gprems2 1)]));
val final = Goal.prove ctxt'' [] [] (Thm.term_of concl)
(fn {context = goal_ctxt5, ...} =>
cut_facts_tac [th] 1 THEN full_simp_tac (put_simpset HOL_ss goal_ctxt5
addsimps vc_compat_ths1' @ fresh_ths1 @
perm_freshs_freshs') 1);
val final' = Proof_Context.export ctxt'' goal_ctxt2 [final];
in resolve_tac goal_ctxt2 final' 1 end) goal_ctxt1 1])
(prems ~~ thss ~~ vc_compat' ~~ ihyps ~~ prems'')))
in
cut_facts_tac [th] 1 THEN REPEAT (eresolve_tac goal_ctxt [conjE] 1) THEN
REPEAT (REPEAT (resolve_tac goal_ctxt [conjI, impI] 1) THEN
eresolve_tac goal_ctxt [impE] 1 THEN
assume_tac goal_ctxt 1 THEN REPEAT (eresolve_tac goal_ctxt @{thms allE_Nil} 1) THEN
asm_full_simp_tac goal_ctxt 1)
end) |>
fresh_postprocess ctxt |>
singleton (Proof_Context.export ctxt lthy);
in
ctxt'' |>
Proof.theorem NONE (fn thss => fn lthy1 =>
let
val rec_name = space_implode "_" (map Long_Name.base_name names);
val rec_qualified = Binding.qualify false rec_name;
val ind_case_names = Rule_Cases.case_names induct_cases;
val induct_cases' = Inductive.partition_rules' raw_induct
(intrs ~~ induct_cases);
val thss' = map (map (atomize_intr lthy1)) thss;
val thsss = Inductive.partition_rules' raw_induct (intrs ~~ thss');
val strong_raw_induct =
mk_ind_proof lthy1 thss' |> Inductive.rulify lthy1;
val strong_induct_atts =
- map (Attrib.internal o K)
+ map (Attrib.internal \<^here> o K)
[ind_case_names, Rule_Cases.consumes (~ (Thm.nprems_of strong_raw_induct))];
val strong_induct =
if length names > 1 then strong_raw_induct
else strong_raw_induct RSN (2, rev_mp);
val (induct_name, inducts_name) =
case alt_name of
NONE => (rec_qualified (Binding.name "strong_induct"),
rec_qualified (Binding.name "strong_inducts"))
| SOME s => (Binding.name s, Binding.name (s ^ "s"));
val ((_, [strong_induct']), lthy2) = lthy1 |> Local_Theory.note
((induct_name, strong_induct_atts), [strong_induct]);
val strong_inducts =
Project_Rule.projects lthy2 (1 upto length names) strong_induct'
in
lthy2 |>
Local_Theory.notes [((inducts_name, []),
strong_inducts |> map (fn th => ([th],
- [Attrib.internal (K ind_case_names),
- Attrib.internal (K (Rule_Cases.consumes (1 - Thm.nprems_of th)))])))] |> snd
+ [Attrib.internal \<^here> (K ind_case_names),
+ Attrib.internal \<^here> (K (Rule_Cases.consumes (1 - Thm.nprems_of th)))])))] |> snd
end)
(map (map (rulify_term thy #> rpair [])) vc_compat)
end;
(* outer syntax *)
val _ =
Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>nominal_inductive2\<close>
"prove strong induction theorem for inductive predicate involving nominal datatypes"
(Parse.name --
Scan.option (\<^keyword>\<open>(\<close> |-- Parse.!!! (Parse.name --| \<^keyword>\<open>)\<close>)) --
(Scan.optional (\<^keyword>\<open>avoids\<close> |-- Parse.enum1 "|" (Parse.name --
(\<^keyword>\<open>:\<close> |-- Parse.and_list1 Parse.term))) []) >> (fn ((name, rule_name), avoids) =>
prove_strong_ind name rule_name avoids));
end
diff --git a/src/HOL/Nonstandard_Analysis/NSA.thy b/src/HOL/Nonstandard_Analysis/NSA.thy
--- a/src/HOL/Nonstandard_Analysis/NSA.thy
+++ b/src/HOL/Nonstandard_Analysis/NSA.thy
@@ -1,1672 +1,1672 @@
(* Title: HOL/Nonstandard_Analysis/NSA.thy
Author: Jacques D. Fleuriot, University of Cambridge
Author: Lawrence C Paulson, University of Cambridge
*)
section \<open>Infinite Numbers, Infinitesimals, Infinitely Close Relation\<close>
theory NSA
imports HyperDef "HOL-Library.Lub_Glb"
begin
definition hnorm :: "'a::real_normed_vector star \<Rightarrow> real star"
where [transfer_unfold]: "hnorm = *f* norm"
definition Infinitesimal :: "('a::real_normed_vector) star set"
where "Infinitesimal = {x. \<forall>r \<in> Reals. 0 < r \<longrightarrow> hnorm x < r}"
definition HFinite :: "('a::real_normed_vector) star set"
where "HFinite = {x. \<exists>r \<in> Reals. hnorm x < r}"
definition HInfinite :: "('a::real_normed_vector) star set"
where "HInfinite = {x. \<forall>r \<in> Reals. r < hnorm x}"
definition approx :: "'a::real_normed_vector star \<Rightarrow> 'a star \<Rightarrow> bool" (infixl "\<approx>" 50)
where "x \<approx> y \<longleftrightarrow> x - y \<in> Infinitesimal"
\<comment> \<open>the ``infinitely close'' relation\<close>
definition st :: "hypreal \<Rightarrow> hypreal"
where "st = (\<lambda>x. SOME r. x \<in> HFinite \<and> r \<in> \<real> \<and> r \<approx> x)"
\<comment> \<open>the standard part of a hyperreal\<close>
definition monad :: "'a::real_normed_vector star \<Rightarrow> 'a star set"
where "monad x = {y. x \<approx> y}"
definition galaxy :: "'a::real_normed_vector star \<Rightarrow> 'a star set"
where "galaxy x = {y. (x + -y) \<in> HFinite}"
lemma SReal_def: "\<real> \<equiv> {x. \<exists>r. x = hypreal_of_real r}"
by (simp add: Reals_def image_def)
subsection \<open>Nonstandard Extension of the Norm Function\<close>
definition scaleHR :: "real star \<Rightarrow> 'a star \<Rightarrow> 'a::real_normed_vector star"
where [transfer_unfold]: "scaleHR = starfun2 scaleR"
lemma Standard_hnorm [simp]: "x \<in> Standard \<Longrightarrow> hnorm x \<in> Standard"
by (simp add: hnorm_def)
lemma star_of_norm [simp]: "star_of (norm x) = hnorm (star_of x)"
by transfer (rule refl)
lemma hnorm_ge_zero [simp]: "\<And>x::'a::real_normed_vector star. 0 \<le> hnorm x"
by transfer (rule norm_ge_zero)
lemma hnorm_eq_zero [simp]: "\<And>x::'a::real_normed_vector star. hnorm x = 0 \<longleftrightarrow> x = 0"
by transfer (rule norm_eq_zero)
lemma hnorm_triangle_ineq: "\<And>x y::'a::real_normed_vector star. hnorm (x + y) \<le> hnorm x + hnorm y"
by transfer (rule norm_triangle_ineq)
lemma hnorm_triangle_ineq3: "\<And>x y::'a::real_normed_vector star. \<bar>hnorm x - hnorm y\<bar> \<le> hnorm (x - y)"
by transfer (rule norm_triangle_ineq3)
lemma hnorm_scaleR: "\<And>x::'a::real_normed_vector star. hnorm (a *\<^sub>R x) = \<bar>star_of a\<bar> * hnorm x"
by transfer (rule norm_scaleR)
lemma hnorm_scaleHR: "\<And>a (x::'a::real_normed_vector star). hnorm (scaleHR a x) = \<bar>a\<bar> * hnorm x"
by transfer (rule norm_scaleR)
lemma hnorm_mult_ineq: "\<And>x y::'a::real_normed_algebra star. hnorm (x * y) \<le> hnorm x * hnorm y"
by transfer (rule norm_mult_ineq)
lemma hnorm_mult: "\<And>x y::'a::real_normed_div_algebra star. hnorm (x * y) = hnorm x * hnorm y"
by transfer (rule norm_mult)
lemma hnorm_hyperpow: "\<And>(x::'a::{real_normed_div_algebra} star) n. hnorm (x pow n) = hnorm x pow n"
by transfer (rule norm_power)
lemma hnorm_one [simp]: "hnorm (1::'a::real_normed_div_algebra star) = 1"
by transfer (rule norm_one)
lemma hnorm_zero [simp]: "hnorm (0::'a::real_normed_vector star) = 0"
by transfer (rule norm_zero)
lemma zero_less_hnorm_iff [simp]: "\<And>x::'a::real_normed_vector star. 0 < hnorm x \<longleftrightarrow> x \<noteq> 0"
by transfer (rule zero_less_norm_iff)
lemma hnorm_minus_cancel [simp]: "\<And>x::'a::real_normed_vector star. hnorm (- x) = hnorm x"
by transfer (rule norm_minus_cancel)
lemma hnorm_minus_commute: "\<And>a b::'a::real_normed_vector star. hnorm (a - b) = hnorm (b - a)"
by transfer (rule norm_minus_commute)
lemma hnorm_triangle_ineq2: "\<And>a b::'a::real_normed_vector star. hnorm a - hnorm b \<le> hnorm (a - b)"
by transfer (rule norm_triangle_ineq2)
lemma hnorm_triangle_ineq4: "\<And>a b::'a::real_normed_vector star. hnorm (a - b) \<le> hnorm a + hnorm b"
by transfer (rule norm_triangle_ineq4)
lemma abs_hnorm_cancel [simp]: "\<And>a::'a::real_normed_vector star. \<bar>hnorm a\<bar> = hnorm a"
by transfer (rule abs_norm_cancel)
lemma hnorm_of_hypreal [simp]: "\<And>r. hnorm (of_hypreal r::'a::real_normed_algebra_1 star) = \<bar>r\<bar>"
by transfer (rule norm_of_real)
lemma nonzero_hnorm_inverse:
"\<And>a::'a::real_normed_div_algebra star. a \<noteq> 0 \<Longrightarrow> hnorm (inverse a) = inverse (hnorm a)"
by transfer (rule nonzero_norm_inverse)
lemma hnorm_inverse:
"\<And>a::'a::{real_normed_div_algebra, division_ring} star. hnorm (inverse a) = inverse (hnorm a)"
by transfer (rule norm_inverse)
lemma hnorm_divide: "\<And>a b::'a::{real_normed_field, field} star. hnorm (a / b) = hnorm a / hnorm b"
by transfer (rule norm_divide)
lemma hypreal_hnorm_def [simp]: "\<And>r::hypreal. hnorm r = \<bar>r\<bar>"
by transfer (rule real_norm_def)
lemma hnorm_add_less:
"\<And>(x::'a::real_normed_vector star) y r s. hnorm x < r \<Longrightarrow> hnorm y < s \<Longrightarrow> hnorm (x + y) < r + s"
by transfer (rule norm_add_less)
lemma hnorm_mult_less:
"\<And>(x::'a::real_normed_algebra star) y r s. hnorm x < r \<Longrightarrow> hnorm y < s \<Longrightarrow> hnorm (x * y) < r * s"
by transfer (rule norm_mult_less)
lemma hnorm_scaleHR_less: "\<bar>x\<bar> < r \<Longrightarrow> hnorm y < s \<Longrightarrow> hnorm (scaleHR x y) < r * s"
by (simp only: hnorm_scaleHR) (simp add: mult_strict_mono')
subsection \<open>Closure Laws for the Standard Reals\<close>
lemma Reals_add_cancel: "x + y \<in> \<real> \<Longrightarrow> y \<in> \<real> \<Longrightarrow> x \<in> \<real>"
by (drule (1) Reals_diff) simp
lemma SReal_hrabs: "x \<in> \<real> \<Longrightarrow> \<bar>x\<bar> \<in> \<real>"
for x :: hypreal
by (simp add: Reals_eq_Standard)
lemma SReal_hypreal_of_real [simp]: "hypreal_of_real x \<in> \<real>"
by (simp add: Reals_eq_Standard)
lemma SReal_divide_numeral: "r \<in> \<real> \<Longrightarrow> r / (numeral w::hypreal) \<in> \<real>"
by simp
text \<open>\<open>\<epsilon>\<close> is not in Reals because it is an infinitesimal\<close>
lemma SReal_epsilon_not_mem: "\<epsilon> \<notin> \<real>"
by (auto simp: SReal_def hypreal_of_real_not_eq_epsilon [symmetric])
lemma SReal_omega_not_mem: "\<omega> \<notin> \<real>"
by (auto simp: SReal_def hypreal_of_real_not_eq_omega [symmetric])
lemma SReal_UNIV_real: "{x. hypreal_of_real x \<in> \<real>} = (UNIV::real set)"
by simp
lemma SReal_iff: "x \<in> \<real> \<longleftrightarrow> (\<exists>y. x = hypreal_of_real y)"
by (simp add: SReal_def)
lemma hypreal_of_real_image: "hypreal_of_real `(UNIV::real set) = \<real>"
by (simp add: Reals_eq_Standard Standard_def)
lemma inv_hypreal_of_real_image: "inv hypreal_of_real ` \<real> = UNIV"
by (simp add: Reals_eq_Standard Standard_def inj_star_of)
lemma SReal_dense: "x \<in> \<real> \<Longrightarrow> y \<in> \<real> \<Longrightarrow> x < y \<Longrightarrow> \<exists>r \<in> Reals. x < r \<and> r < y"
for x y :: hypreal
using dense by (fastforce simp add: SReal_def)
subsection \<open>Set of Finite Elements is a Subring of the Extended Reals\<close>
lemma HFinite_add: "x \<in> HFinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> x + y \<in> HFinite"
unfolding HFinite_def by (blast intro!: Reals_add hnorm_add_less)
lemma HFinite_mult: "x \<in> HFinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> x * y \<in> HFinite"
for x y :: "'a::real_normed_algebra star"
unfolding HFinite_def by (blast intro!: Reals_mult hnorm_mult_less)
lemma HFinite_scaleHR: "x \<in> HFinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> scaleHR x y \<in> HFinite"
by (auto simp: HFinite_def intro!: Reals_mult hnorm_scaleHR_less)
lemma HFinite_minus_iff: "- x \<in> HFinite \<longleftrightarrow> x \<in> HFinite"
by (simp add: HFinite_def)
lemma HFinite_star_of [simp]: "star_of x \<in> HFinite"
by (simp add: HFinite_def) (metis SReal_hypreal_of_real gt_ex star_of_less star_of_norm)
lemma SReal_subset_HFinite: "(\<real>::hypreal set) \<subseteq> HFinite"
by (auto simp add: SReal_def)
lemma HFiniteD: "x \<in> HFinite \<Longrightarrow> \<exists>t \<in> Reals. hnorm x < t"
by (simp add: HFinite_def)
lemma HFinite_hrabs_iff [iff]: "\<bar>x\<bar> \<in> HFinite \<longleftrightarrow> x \<in> HFinite"
for x :: hypreal
by (simp add: HFinite_def)
lemma HFinite_hnorm_iff [iff]: "hnorm x \<in> HFinite \<longleftrightarrow> x \<in> HFinite"
for x :: hypreal
by (simp add: HFinite_def)
lemma HFinite_numeral [simp]: "numeral w \<in> HFinite"
unfolding star_numeral_def by (rule HFinite_star_of)
text \<open>As always with numerals, \<open>0\<close> and \<open>1\<close> are special cases.\<close>
lemma HFinite_0 [simp]: "0 \<in> HFinite"
unfolding star_zero_def by (rule HFinite_star_of)
lemma HFinite_1 [simp]: "1 \<in> HFinite"
unfolding star_one_def by (rule HFinite_star_of)
lemma hrealpow_HFinite: "x \<in> HFinite \<Longrightarrow> x ^ n \<in> HFinite"
for x :: "'a::{real_normed_algebra,monoid_mult} star"
by (induct n) (auto intro: HFinite_mult)
lemma HFinite_bounded:
fixes x y :: hypreal
assumes "x \<in> HFinite" and y: "y \<le> x" "0 \<le> y" shows "y \<in> HFinite"
proof (cases "x \<le> 0")
case True
then have "y = 0"
using y by auto
then show ?thesis
by simp
next
case False
then show ?thesis
using assms le_less_trans by (auto simp: HFinite_def)
qed
subsection \<open>Set of Infinitesimals is a Subring of the Hyperreals\<close>
lemma InfinitesimalI: "(\<And>r. r \<in> \<real> \<Longrightarrow> 0 < r \<Longrightarrow> hnorm x < r) \<Longrightarrow> x \<in> Infinitesimal"
by (simp add: Infinitesimal_def)
lemma InfinitesimalD: "x \<in> Infinitesimal \<Longrightarrow> \<forall>r \<in> Reals. 0 < r \<longrightarrow> hnorm x < r"
by (simp add: Infinitesimal_def)
lemma InfinitesimalI2: "(\<And>r. 0 < r \<Longrightarrow> hnorm x < star_of r) \<Longrightarrow> x \<in> Infinitesimal"
by (auto simp add: Infinitesimal_def SReal_def)
lemma InfinitesimalD2: "x \<in> Infinitesimal \<Longrightarrow> 0 < r \<Longrightarrow> hnorm x < star_of r"
by (auto simp add: Infinitesimal_def SReal_def)
lemma Infinitesimal_zero [iff]: "0 \<in> Infinitesimal"
by (simp add: Infinitesimal_def)
lemma Infinitesimal_add:
assumes "x \<in> Infinitesimal" "y \<in> Infinitesimal"
shows "x + y \<in> Infinitesimal"
proof (rule InfinitesimalI)
show "hnorm (x + y) < r"
if "r \<in> \<real>" and "0 < r" for r :: "real star"
proof -
have "hnorm x < r/2" "hnorm y < r/2"
using InfinitesimalD SReal_divide_numeral assms half_gt_zero that by blast+
then show ?thesis
using hnorm_add_less by fastforce
qed
qed
lemma Infinitesimal_minus_iff [simp]: "- x \<in> Infinitesimal \<longleftrightarrow> x \<in> Infinitesimal"
by (simp add: Infinitesimal_def)
lemma Infinitesimal_hnorm_iff: "hnorm x \<in> Infinitesimal \<longleftrightarrow> x \<in> Infinitesimal"
by (simp add: Infinitesimal_def)
lemma Infinitesimal_hrabs_iff [iff]: "\<bar>x\<bar> \<in> Infinitesimal \<longleftrightarrow> x \<in> Infinitesimal"
for x :: hypreal
by (simp add: abs_if)
lemma Infinitesimal_of_hypreal_iff [simp]:
"(of_hypreal x::'a::real_normed_algebra_1 star) \<in> Infinitesimal \<longleftrightarrow> x \<in> Infinitesimal"
by (subst Infinitesimal_hnorm_iff [symmetric]) simp
lemma Infinitesimal_diff: "x \<in> Infinitesimal \<Longrightarrow> y \<in> Infinitesimal \<Longrightarrow> x - y \<in> Infinitesimal"
using Infinitesimal_add [of x "- y"] by simp
lemma Infinitesimal_mult:
fixes x y :: "'a::real_normed_algebra star"
assumes "x \<in> Infinitesimal" "y \<in> Infinitesimal"
shows "x * y \<in> Infinitesimal"
proof (rule InfinitesimalI)
show "hnorm (x * y) < r"
if "r \<in> \<real>" and "0 < r" for r :: "real star"
proof -
have "hnorm x < 1" "hnorm y < r"
using assms that by (auto simp add: InfinitesimalD)
then show ?thesis
using hnorm_mult_less by fastforce
qed
qed
lemma Infinitesimal_HFinite_mult:
fixes x y :: "'a::real_normed_algebra star"
assumes "x \<in> Infinitesimal" "y \<in> HFinite"
shows "x * y \<in> Infinitesimal"
proof (rule InfinitesimalI)
obtain t where "hnorm y < t" "t \<in> Reals"
using HFiniteD \<open>y \<in> HFinite\<close> by blast
then have "t > 0"
using hnorm_ge_zero le_less_trans by blast
show "hnorm (x * y) < r"
if "r \<in> \<real>" and "0 < r" for r :: "real star"
proof -
have "hnorm x < r/t"
by (meson InfinitesimalD Reals_divide \<open>hnorm y < t\<close> \<open>t \<in> \<real>\<close> assms(1) divide_pos_pos hnorm_ge_zero le_less_trans that)
then have "hnorm (x * y) < (r / t) * t"
using \<open>hnorm y < t\<close> hnorm_mult_less by blast
then show ?thesis
using \<open>0 < t\<close> by auto
qed
qed
lemma Infinitesimal_HFinite_scaleHR:
assumes "x \<in> Infinitesimal" "y \<in> HFinite"
shows "scaleHR x y \<in> Infinitesimal"
proof (rule InfinitesimalI)
obtain t where "hnorm y < t" "t \<in> Reals"
using HFiniteD \<open>y \<in> HFinite\<close> by blast
then have "t > 0"
using hnorm_ge_zero le_less_trans by blast
show "hnorm (scaleHR x y) < r"
if "r \<in> \<real>" and "0 < r" for r :: "real star"
proof -
have "\<bar>x\<bar> * hnorm y < (r / t) * t"
by (metis InfinitesimalD Reals_divide \<open>0 < t\<close> \<open>hnorm y < t\<close> \<open>t \<in> \<real>\<close> assms(1) divide_pos_pos hnorm_ge_zero hypreal_hnorm_def mult_strict_mono' that)
then show ?thesis
by (simp add: \<open>0 < t\<close> hnorm_scaleHR less_imp_not_eq2)
qed
qed
lemma Infinitesimal_HFinite_mult2:
fixes x y :: "'a::real_normed_algebra star"
assumes "x \<in> Infinitesimal" "y \<in> HFinite"
shows "y * x \<in> Infinitesimal"
proof (rule InfinitesimalI)
obtain t where "hnorm y < t" "t \<in> Reals"
using HFiniteD \<open>y \<in> HFinite\<close> by blast
then have "t > 0"
using hnorm_ge_zero le_less_trans by blast
show "hnorm (y * x) < r"
if "r \<in> \<real>" and "0 < r" for r :: "real star"
proof -
have "hnorm x < r/t"
by (meson InfinitesimalD Reals_divide \<open>hnorm y < t\<close> \<open>t \<in> \<real>\<close> assms(1) divide_pos_pos hnorm_ge_zero le_less_trans that)
then have "hnorm (y * x) < t * (r / t)"
using \<open>hnorm y < t\<close> hnorm_mult_less by blast
then show ?thesis
using \<open>0 < t\<close> by auto
qed
qed
lemma Infinitesimal_scaleR2:
assumes "x \<in> Infinitesimal" shows "a *\<^sub>R x \<in> Infinitesimal"
by (metis HFinite_star_of Infinitesimal_HFinite_mult2 Infinitesimal_hnorm_iff assms hnorm_scaleR hypreal_hnorm_def star_of_norm)
lemma Compl_HFinite: "- HFinite = HInfinite"
proof -
have "r < hnorm x" if *: "\<And>s. s \<in> \<real> \<Longrightarrow> s \<le> hnorm x" and "r \<in> \<real>"
for x :: "'a star" and r :: hypreal
using * [of "r+1"] \<open>r \<in> \<real>\<close> by auto
then show ?thesis
by (auto simp add: HInfinite_def HFinite_def linorder_not_less)
qed
lemma HInfinite_inverse_Infinitesimal:
"x \<in> HInfinite \<Longrightarrow> inverse x \<in> Infinitesimal"
for x :: "'a::real_normed_div_algebra star"
by (simp add: HInfinite_def InfinitesimalI hnorm_inverse inverse_less_imp_less)
lemma inverse_Infinitesimal_iff_HInfinite:
"x \<noteq> 0 \<Longrightarrow> inverse x \<in> Infinitesimal \<longleftrightarrow> x \<in> HInfinite"
for x :: "'a::real_normed_div_algebra star"
by (metis Compl_HFinite Compl_iff HInfinite_inverse_Infinitesimal InfinitesimalD Infinitesimal_HFinite_mult Reals_1 hnorm_one left_inverse less_irrefl zero_less_one)
lemma HInfiniteI: "(\<And>r. r \<in> \<real> \<Longrightarrow> r < hnorm x) \<Longrightarrow> x \<in> HInfinite"
by (simp add: HInfinite_def)
lemma HInfiniteD: "x \<in> HInfinite \<Longrightarrow> r \<in> \<real> \<Longrightarrow> r < hnorm x"
by (simp add: HInfinite_def)
lemma HInfinite_mult:
fixes x y :: "'a::real_normed_div_algebra star"
assumes "x \<in> HInfinite" "y \<in> HInfinite" shows "x * y \<in> HInfinite"
proof (rule HInfiniteI, simp only: hnorm_mult)
have "x \<noteq> 0"
using Compl_HFinite HFinite_0 assms by blast
show "r < hnorm x * hnorm y"
if "r \<in> \<real>" for r :: "real star"
proof -
have "r = r * 1"
by simp
also have "\<dots> < hnorm x * hnorm y"
by (meson HInfiniteD Reals_1 \<open>x \<noteq> 0\<close> assms le_numeral_extra(1) mult_strict_mono that zero_less_hnorm_iff)
finally show ?thesis .
qed
qed
lemma hypreal_add_zero_less_le_mono: "r < x \<Longrightarrow> 0 \<le> y \<Longrightarrow> r < x + y"
for r x y :: hypreal
by simp
lemma HInfinite_add_ge_zero: "x \<in> HInfinite \<Longrightarrow> 0 \<le> y \<Longrightarrow> 0 \<le> x \<Longrightarrow> x + y \<in> HInfinite"
for x y :: hypreal
by (auto simp: abs_if add.commute HInfinite_def)
lemma HInfinite_add_ge_zero2: "x \<in> HInfinite \<Longrightarrow> 0 \<le> y \<Longrightarrow> 0 \<le> x \<Longrightarrow> y + x \<in> HInfinite"
for x y :: hypreal
by (auto intro!: HInfinite_add_ge_zero simp add: add.commute)
lemma HInfinite_add_gt_zero: "x \<in> HInfinite \<Longrightarrow> 0 < y \<Longrightarrow> 0 < x \<Longrightarrow> x + y \<in> HInfinite"
for x y :: hypreal
by (blast intro: HInfinite_add_ge_zero order_less_imp_le)
lemma HInfinite_minus_iff: "- x \<in> HInfinite \<longleftrightarrow> x \<in> HInfinite"
by (simp add: HInfinite_def)
lemma HInfinite_add_le_zero: "x \<in> HInfinite \<Longrightarrow> y \<le> 0 \<Longrightarrow> x \<le> 0 \<Longrightarrow> x + y \<in> HInfinite"
for x y :: hypreal
by (metis (no_types, lifting) HInfinite_add_ge_zero2 HInfinite_minus_iff add.inverse_distrib_swap neg_0_le_iff_le)
lemma HInfinite_add_lt_zero: "x \<in> HInfinite \<Longrightarrow> y < 0 \<Longrightarrow> x < 0 \<Longrightarrow> x + y \<in> HInfinite"
for x y :: hypreal
by (blast intro: HInfinite_add_le_zero order_less_imp_le)
lemma not_Infinitesimal_not_zero: "x \<notin> Infinitesimal \<Longrightarrow> x \<noteq> 0"
by auto
lemma HFinite_diff_Infinitesimal_hrabs:
"x \<in> HFinite - Infinitesimal \<Longrightarrow> \<bar>x\<bar> \<in> HFinite - Infinitesimal"
for x :: hypreal
by blast
lemma hnorm_le_Infinitesimal: "e \<in> Infinitesimal \<Longrightarrow> hnorm x \<le> e \<Longrightarrow> x \<in> Infinitesimal"
by (auto simp: Infinitesimal_def abs_less_iff)
lemma hnorm_less_Infinitesimal: "e \<in> Infinitesimal \<Longrightarrow> hnorm x < e \<Longrightarrow> x \<in> Infinitesimal"
by (erule hnorm_le_Infinitesimal, erule order_less_imp_le)
lemma hrabs_le_Infinitesimal: "e \<in> Infinitesimal \<Longrightarrow> \<bar>x\<bar> \<le> e \<Longrightarrow> x \<in> Infinitesimal"
for x :: hypreal
by (erule hnorm_le_Infinitesimal) simp
lemma hrabs_less_Infinitesimal: "e \<in> Infinitesimal \<Longrightarrow> \<bar>x\<bar> < e \<Longrightarrow> x \<in> Infinitesimal"
for x :: hypreal
by (erule hnorm_less_Infinitesimal) simp
lemma Infinitesimal_interval:
"e \<in> Infinitesimal \<Longrightarrow> e' \<in> Infinitesimal \<Longrightarrow> e' < x \<Longrightarrow> x < e \<Longrightarrow> x \<in> Infinitesimal"
for x :: hypreal
by (auto simp add: Infinitesimal_def abs_less_iff)
lemma Infinitesimal_interval2:
"e \<in> Infinitesimal \<Longrightarrow> e' \<in> Infinitesimal \<Longrightarrow> e' \<le> x \<Longrightarrow> x \<le> e \<Longrightarrow> x \<in> Infinitesimal"
for x :: hypreal
by (auto intro: Infinitesimal_interval simp add: order_le_less)
lemma lemma_Infinitesimal_hyperpow: "x \<in> Infinitesimal \<Longrightarrow> 0 < N \<Longrightarrow> \<bar>x pow N\<bar> \<le> \<bar>x\<bar>"
for x :: hypreal
apply (clarsimp simp: Infinitesimal_def)
by (metis Reals_1 abs_ge_zero hyperpow_Suc_le_self2 hyperpow_hrabs hypnat_gt_zero_iff2 zero_less_one)
lemma Infinitesimal_hyperpow: "x \<in> Infinitesimal \<Longrightarrow> 0 < N \<Longrightarrow> x pow N \<in> Infinitesimal"
for x :: hypreal
using hrabs_le_Infinitesimal lemma_Infinitesimal_hyperpow by blast
lemma hrealpow_hyperpow_Infinitesimal_iff:
"(x ^ n \<in> Infinitesimal) \<longleftrightarrow> x pow (hypnat_of_nat n) \<in> Infinitesimal"
by (simp only: hyperpow_hypnat_of_nat)
lemma Infinitesimal_hrealpow: "x \<in> Infinitesimal \<Longrightarrow> 0 < n \<Longrightarrow> x ^ n \<in> Infinitesimal"
for x :: hypreal
by (simp add: hrealpow_hyperpow_Infinitesimal_iff Infinitesimal_hyperpow)
lemma not_Infinitesimal_mult:
"x \<notin> Infinitesimal \<Longrightarrow> y \<notin> Infinitesimal \<Longrightarrow> x * y \<notin> Infinitesimal"
for x y :: "'a::real_normed_div_algebra star"
by (metis (no_types, lifting) inverse_Infinitesimal_iff_HInfinite ComplI Compl_HFinite Infinitesimal_HFinite_mult divide_inverse eq_divide_imp inverse_inverse_eq mult_zero_right)
lemma Infinitesimal_mult_disj: "x * y \<in> Infinitesimal \<Longrightarrow> x \<in> Infinitesimal \<or> y \<in> Infinitesimal"
for x y :: "'a::real_normed_div_algebra star"
using not_Infinitesimal_mult by blast
lemma HFinite_Infinitesimal_not_zero: "x \<in> HFinite-Infinitesimal \<Longrightarrow> x \<noteq> 0"
by blast
lemma HFinite_Infinitesimal_diff_mult:
"x \<in> HFinite - Infinitesimal \<Longrightarrow> y \<in> HFinite - Infinitesimal \<Longrightarrow> x * y \<in> HFinite - Infinitesimal"
for x y :: "'a::real_normed_div_algebra star"
by (simp add: HFinite_mult not_Infinitesimal_mult)
lemma Infinitesimal_subset_HFinite: "Infinitesimal \<subseteq> HFinite"
using HFinite_def InfinitesimalD Reals_1 zero_less_one by blast
lemma Infinitesimal_star_of_mult: "x \<in> Infinitesimal \<Longrightarrow> x * star_of r \<in> Infinitesimal"
for x :: "'a::real_normed_algebra star"
by (erule HFinite_star_of [THEN [2] Infinitesimal_HFinite_mult])
lemma Infinitesimal_star_of_mult2: "x \<in> Infinitesimal \<Longrightarrow> star_of r * x \<in> Infinitesimal"
for x :: "'a::real_normed_algebra star"
by (erule HFinite_star_of [THEN [2] Infinitesimal_HFinite_mult2])
subsection \<open>The Infinitely Close Relation\<close>
lemma mem_infmal_iff: "x \<in> Infinitesimal \<longleftrightarrow> x \<approx> 0"
by (simp add: Infinitesimal_def approx_def)
lemma approx_minus_iff: "x \<approx> y \<longleftrightarrow> x - y \<approx> 0"
by (simp add: approx_def)
lemma approx_minus_iff2: "x \<approx> y \<longleftrightarrow> - y + x \<approx> 0"
by (simp add: approx_def add.commute)
lemma approx_refl [iff]: "x \<approx> x"
by (simp add: approx_def Infinitesimal_def)
lemma approx_sym: "x \<approx> y \<Longrightarrow> y \<approx> x"
by (metis Infinitesimal_minus_iff approx_def minus_diff_eq)
lemma approx_trans:
assumes "x \<approx> y" "y \<approx> z" shows "x \<approx> z"
proof -
have "x - y \<in> Infinitesimal" "z - y \<in> Infinitesimal"
using assms approx_def approx_sym by auto
then have "x - z \<in> Infinitesimal"
using Infinitesimal_diff by force
then show ?thesis
by (simp add: approx_def)
qed
lemma approx_trans2: "r \<approx> x \<Longrightarrow> s \<approx> x \<Longrightarrow> r \<approx> s"
by (blast intro: approx_sym approx_trans)
lemma approx_trans3: "x \<approx> r \<Longrightarrow> x \<approx> s \<Longrightarrow> r \<approx> s"
by (blast intro: approx_sym approx_trans)
lemma approx_reorient: "x \<approx> y \<longleftrightarrow> y \<approx> x"
by (blast intro: approx_sym)
text \<open>Reorientation simplification procedure: reorients (polymorphic)
\<open>0 = x\<close>, \<open>1 = x\<close>, \<open>nnn = x\<close> provided \<open>x\<close> isn't \<open>0\<close>, \<open>1\<close> or a numeral.\<close>
simproc_setup approx_reorient_simproc
("0 \<approx> x" | "1 \<approx> y" | "numeral w \<approx> z" | "- 1 \<approx> y" | "- numeral w \<approx> r") =
\<open>
let val rule = @{thm approx_reorient} RS eq_reflection
- fun proc phi ss ct =
+ fun proc ct =
case Thm.term_of ct of
_ $ t $ u => if can HOLogic.dest_number u then NONE
else if can HOLogic.dest_number t then SOME rule else NONE
| _ => NONE
- in proc end
+ in K (K proc) end
\<close>
lemma Infinitesimal_approx_minus: "x - y \<in> Infinitesimal \<longleftrightarrow> x \<approx> y"
by (simp add: approx_minus_iff [symmetric] mem_infmal_iff)
lemma approx_monad_iff: "x \<approx> y \<longleftrightarrow> monad x = monad y"
apply (simp add: monad_def set_eq_iff)
using approx_reorient approx_trans by blast
lemma Infinitesimal_approx: "x \<in> Infinitesimal \<Longrightarrow> y \<in> Infinitesimal \<Longrightarrow> x \<approx> y"
by (simp add: Infinitesimal_diff approx_def)
lemma approx_add: "a \<approx> b \<Longrightarrow> c \<approx> d \<Longrightarrow> a + c \<approx> b + d"
proof (unfold approx_def)
assume inf: "a - b \<in> Infinitesimal" "c - d \<in> Infinitesimal"
have "a + c - (b + d) = (a - b) + (c - d)" by simp
also have "... \<in> Infinitesimal"
using inf by (rule Infinitesimal_add)
finally show "a + c - (b + d) \<in> Infinitesimal" .
qed
lemma approx_minus: "a \<approx> b \<Longrightarrow> - a \<approx> - b"
by (metis approx_def approx_sym minus_diff_eq minus_diff_minus)
lemma approx_minus2: "- a \<approx> - b \<Longrightarrow> a \<approx> b"
by (auto dest: approx_minus)
lemma approx_minus_cancel [simp]: "- a \<approx> - b \<longleftrightarrow> a \<approx> b"
by (blast intro: approx_minus approx_minus2)
lemma approx_add_minus: "a \<approx> b \<Longrightarrow> c \<approx> d \<Longrightarrow> a + - c \<approx> b + - d"
by (blast intro!: approx_add approx_minus)
lemma approx_diff: "a \<approx> b \<Longrightarrow> c \<approx> d \<Longrightarrow> a - c \<approx> b - d"
using approx_add [of a b "- c" "- d"] by simp
lemma approx_mult1: "a \<approx> b \<Longrightarrow> c \<in> HFinite \<Longrightarrow> a * c \<approx> b * c"
for a b c :: "'a::real_normed_algebra star"
by (simp add: approx_def Infinitesimal_HFinite_mult left_diff_distrib [symmetric])
lemma approx_mult2: "a \<approx> b \<Longrightarrow> c \<in> HFinite \<Longrightarrow> c * a \<approx> c * b"
for a b c :: "'a::real_normed_algebra star"
by (simp add: approx_def Infinitesimal_HFinite_mult2 right_diff_distrib [symmetric])
lemma approx_mult_subst: "u \<approx> v * x \<Longrightarrow> x \<approx> y \<Longrightarrow> v \<in> HFinite \<Longrightarrow> u \<approx> v * y"
for u v x y :: "'a::real_normed_algebra star"
by (blast intro: approx_mult2 approx_trans)
lemma approx_mult_subst2: "u \<approx> x * v \<Longrightarrow> x \<approx> y \<Longrightarrow> v \<in> HFinite \<Longrightarrow> u \<approx> y * v"
for u v x y :: "'a::real_normed_algebra star"
by (blast intro: approx_mult1 approx_trans)
lemma approx_mult_subst_star_of: "u \<approx> x * star_of v \<Longrightarrow> x \<approx> y \<Longrightarrow> u \<approx> y * star_of v"
for u x y :: "'a::real_normed_algebra star"
by (auto intro: approx_mult_subst2)
lemma approx_eq_imp: "a = b \<Longrightarrow> a \<approx> b"
by (simp add: approx_def)
lemma Infinitesimal_minus_approx: "x \<in> Infinitesimal \<Longrightarrow> - x \<approx> x"
by (blast intro: Infinitesimal_minus_iff [THEN iffD2] mem_infmal_iff [THEN iffD1] approx_trans2)
lemma bex_Infinitesimal_iff: "(\<exists>y \<in> Infinitesimal. x - z = y) \<longleftrightarrow> x \<approx> z"
by (simp add: approx_def)
lemma bex_Infinitesimal_iff2: "(\<exists>y \<in> Infinitesimal. x = z + y) \<longleftrightarrow> x \<approx> z"
by (force simp add: bex_Infinitesimal_iff [symmetric])
lemma Infinitesimal_add_approx: "y \<in> Infinitesimal \<Longrightarrow> x + y = z \<Longrightarrow> x \<approx> z"
using approx_sym bex_Infinitesimal_iff2 by blast
lemma Infinitesimal_add_approx_self: "y \<in> Infinitesimal \<Longrightarrow> x \<approx> x + y"
by (simp add: Infinitesimal_add_approx)
lemma Infinitesimal_add_approx_self2: "y \<in> Infinitesimal \<Longrightarrow> x \<approx> y + x"
by (auto dest: Infinitesimal_add_approx_self simp add: add.commute)
lemma Infinitesimal_add_minus_approx_self: "y \<in> Infinitesimal \<Longrightarrow> x \<approx> x + - y"
by (blast intro!: Infinitesimal_add_approx_self Infinitesimal_minus_iff [THEN iffD2])
lemma Infinitesimal_add_cancel: "y \<in> Infinitesimal \<Longrightarrow> x + y \<approx> z \<Longrightarrow> x \<approx> z"
using Infinitesimal_add_approx approx_trans by blast
lemma Infinitesimal_add_right_cancel: "y \<in> Infinitesimal \<Longrightarrow> x \<approx> z + y \<Longrightarrow> x \<approx> z"
by (metis Infinitesimal_add_approx_self approx_monad_iff)
lemma approx_add_left_cancel: "d + b \<approx> d + c \<Longrightarrow> b \<approx> c"
by (metis add_diff_cancel_left bex_Infinitesimal_iff)
lemma approx_add_right_cancel: "b + d \<approx> c + d \<Longrightarrow> b \<approx> c"
by (simp add: approx_def)
lemma approx_add_mono1: "b \<approx> c \<Longrightarrow> d + b \<approx> d + c"
by (simp add: approx_add)
lemma approx_add_mono2: "b \<approx> c \<Longrightarrow> b + a \<approx> c + a"
by (simp add: add.commute approx_add_mono1)
lemma approx_add_left_iff [simp]: "a + b \<approx> a + c \<longleftrightarrow> b \<approx> c"
by (fast elim: approx_add_left_cancel approx_add_mono1)
lemma approx_add_right_iff [simp]: "b + a \<approx> c + a \<longleftrightarrow> b \<approx> c"
by (simp add: add.commute)
lemma approx_HFinite: "x \<in> HFinite \<Longrightarrow> x \<approx> y \<Longrightarrow> y \<in> HFinite"
by (metis HFinite_add Infinitesimal_subset_HFinite approx_sym subsetD bex_Infinitesimal_iff2)
lemma approx_star_of_HFinite: "x \<approx> star_of D \<Longrightarrow> x \<in> HFinite"
by (rule approx_sym [THEN [2] approx_HFinite], auto)
lemma approx_mult_HFinite: "a \<approx> b \<Longrightarrow> c \<approx> d \<Longrightarrow> b \<in> HFinite \<Longrightarrow> d \<in> HFinite \<Longrightarrow> a * c \<approx> b * d"
for a b c d :: "'a::real_normed_algebra star"
by (meson approx_HFinite approx_mult2 approx_mult_subst2 approx_sym)
lemma scaleHR_left_diff_distrib: "\<And>a b x. scaleHR (a - b) x = scaleHR a x - scaleHR b x"
by transfer (rule scaleR_left_diff_distrib)
lemma approx_scaleR1: "a \<approx> star_of b \<Longrightarrow> c \<in> HFinite \<Longrightarrow> scaleHR a c \<approx> b *\<^sub>R c"
unfolding approx_def
by (metis Infinitesimal_HFinite_scaleHR scaleHR_def scaleHR_left_diff_distrib star_scaleR_def starfun2_star_of)
lemma approx_scaleR2: "a \<approx> b \<Longrightarrow> c *\<^sub>R a \<approx> c *\<^sub>R b"
by (simp add: approx_def Infinitesimal_scaleR2 scaleR_right_diff_distrib [symmetric])
lemma approx_scaleR_HFinite: "a \<approx> star_of b \<Longrightarrow> c \<approx> d \<Longrightarrow> d \<in> HFinite \<Longrightarrow> scaleHR a c \<approx> b *\<^sub>R d"
by (meson approx_HFinite approx_scaleR1 approx_scaleR2 approx_sym approx_trans)
lemma approx_mult_star_of: "a \<approx> star_of b \<Longrightarrow> c \<approx> star_of d \<Longrightarrow> a * c \<approx> star_of b * star_of d"
for a c :: "'a::real_normed_algebra star"
by (blast intro!: approx_mult_HFinite approx_star_of_HFinite HFinite_star_of)
lemma approx_SReal_mult_cancel_zero:
fixes a x :: hypreal
assumes "a \<in> \<real>" "a \<noteq> 0" "a * x \<approx> 0" shows "x \<approx> 0"
proof -
have "inverse a \<in> HFinite"
using Reals_inverse SReal_subset_HFinite assms(1) by blast
then show ?thesis
using assms by (auto dest: approx_mult2 simp add: mult.assoc [symmetric])
qed
lemma approx_mult_SReal1: "a \<in> \<real> \<Longrightarrow> x \<approx> 0 \<Longrightarrow> x * a \<approx> 0"
for a x :: hypreal
by (auto dest: SReal_subset_HFinite [THEN subsetD] approx_mult1)
lemma approx_mult_SReal2: "a \<in> \<real> \<Longrightarrow> x \<approx> 0 \<Longrightarrow> a * x \<approx> 0"
for a x :: hypreal
by (auto dest: SReal_subset_HFinite [THEN subsetD] approx_mult2)
lemma approx_mult_SReal_zero_cancel_iff [simp]: "a \<in> \<real> \<Longrightarrow> a \<noteq> 0 \<Longrightarrow> a * x \<approx> 0 \<longleftrightarrow> x \<approx> 0"
for a x :: hypreal
by (blast intro: approx_SReal_mult_cancel_zero approx_mult_SReal2)
lemma approx_SReal_mult_cancel:
fixes a w z :: hypreal
assumes "a \<in> \<real>" "a \<noteq> 0" "a * w \<approx> a * z" shows "w \<approx> z"
proof -
have "inverse a \<in> HFinite"
using Reals_inverse SReal_subset_HFinite assms(1) by blast
then show ?thesis
using assms by (auto dest: approx_mult2 simp add: mult.assoc [symmetric])
qed
lemma approx_SReal_mult_cancel_iff1 [simp]: "a \<in> \<real> \<Longrightarrow> a \<noteq> 0 \<Longrightarrow> a * w \<approx> a * z \<longleftrightarrow> w \<approx> z"
for a w z :: hypreal
by (meson SReal_subset_HFinite approx_SReal_mult_cancel approx_mult2 subsetD)
lemma approx_le_bound:
fixes z :: hypreal
assumes "z \<le> f" " f \<approx> g" "g \<le> z" shows "f \<approx> z"
proof -
obtain y where "z \<le> g + y" and "y \<in> Infinitesimal" "f = g + y"
using assms bex_Infinitesimal_iff2 by auto
then have "z - g \<in> Infinitesimal"
using assms(3) hrabs_le_Infinitesimal by auto
then show ?thesis
by (metis approx_def approx_trans2 assms(2))
qed
lemma approx_hnorm: "x \<approx> y \<Longrightarrow> hnorm x \<approx> hnorm y"
for x y :: "'a::real_normed_vector star"
proof (unfold approx_def)
assume "x - y \<in> Infinitesimal"
then have "hnorm (x - y) \<in> Infinitesimal"
by (simp only: Infinitesimal_hnorm_iff)
moreover have "(0::real star) \<in> Infinitesimal"
by (rule Infinitesimal_zero)
moreover have "0 \<le> \<bar>hnorm x - hnorm y\<bar>"
by (rule abs_ge_zero)
moreover have "\<bar>hnorm x - hnorm y\<bar> \<le> hnorm (x - y)"
by (rule hnorm_triangle_ineq3)
ultimately have "\<bar>hnorm x - hnorm y\<bar> \<in> Infinitesimal"
by (rule Infinitesimal_interval2)
then show "hnorm x - hnorm y \<in> Infinitesimal"
by (simp only: Infinitesimal_hrabs_iff)
qed
subsection \<open>Zero is the Only Infinitesimal that is also a Real\<close>
lemma Infinitesimal_less_SReal: "x \<in> \<real> \<Longrightarrow> y \<in> Infinitesimal \<Longrightarrow> 0 < x \<Longrightarrow> y < x"
for x y :: hypreal
using InfinitesimalD by fastforce
lemma Infinitesimal_less_SReal2: "y \<in> Infinitesimal \<Longrightarrow> \<forall>r \<in> Reals. 0 < r \<longrightarrow> y < r"
for y :: hypreal
by (blast intro: Infinitesimal_less_SReal)
lemma SReal_not_Infinitesimal: "0 < y \<Longrightarrow> y \<in> \<real> ==> y \<notin> Infinitesimal"
for y :: hypreal
by (auto simp add: Infinitesimal_def abs_if)
lemma SReal_minus_not_Infinitesimal: "y < 0 \<Longrightarrow> y \<in> \<real> \<Longrightarrow> y \<notin> Infinitesimal"
for y :: hypreal
using Infinitesimal_minus_iff Reals_minus SReal_not_Infinitesimal neg_0_less_iff_less by blast
lemma SReal_Int_Infinitesimal_zero: "\<real> Int Infinitesimal = {0::hypreal}"
proof -
have "x = 0" if "x \<in> \<real>" "x \<in> Infinitesimal" for x :: "real star"
using that SReal_minus_not_Infinitesimal SReal_not_Infinitesimal not_less_iff_gr_or_eq by blast
then show ?thesis
by auto
qed
lemma SReal_Infinitesimal_zero: "x \<in> \<real> \<Longrightarrow> x \<in> Infinitesimal \<Longrightarrow> x = 0"
for x :: hypreal
using SReal_Int_Infinitesimal_zero by blast
lemma SReal_HFinite_diff_Infinitesimal: "x \<in> \<real> \<Longrightarrow> x \<noteq> 0 \<Longrightarrow> x \<in> HFinite - Infinitesimal"
for x :: hypreal
by (auto dest: SReal_Infinitesimal_zero SReal_subset_HFinite [THEN subsetD])
lemma hypreal_of_real_HFinite_diff_Infinitesimal:
"hypreal_of_real x \<noteq> 0 \<Longrightarrow> hypreal_of_real x \<in> HFinite - Infinitesimal"
by (rule SReal_HFinite_diff_Infinitesimal) auto
lemma star_of_Infinitesimal_iff_0 [iff]: "star_of x \<in> Infinitesimal \<longleftrightarrow> x = 0"
proof
show "x = 0" if "star_of x \<in> Infinitesimal"
proof -
have "hnorm (star_n (\<lambda>n. x)) \<in> Standard"
by (metis Reals_eq_Standard SReal_iff star_of_def star_of_norm)
then show ?thesis
by (metis InfinitesimalD2 less_irrefl star_of_norm that zero_less_norm_iff)
qed
qed auto
lemma star_of_HFinite_diff_Infinitesimal: "x \<noteq> 0 \<Longrightarrow> star_of x \<in> HFinite - Infinitesimal"
by simp
lemma numeral_not_Infinitesimal [simp]:
"numeral w \<noteq> (0::hypreal) \<Longrightarrow> (numeral w :: hypreal) \<notin> Infinitesimal"
by (fast dest: Reals_numeral [THEN SReal_Infinitesimal_zero])
text \<open>Again: \<open>1\<close> is a special case, but not \<open>0\<close> this time.\<close>
lemma one_not_Infinitesimal [simp]:
"(1::'a::{real_normed_vector,zero_neq_one} star) \<notin> Infinitesimal"
by (metis star_of_Infinitesimal_iff_0 star_one_def zero_neq_one)
lemma approx_SReal_not_zero: "y \<in> \<real> \<Longrightarrow> x \<approx> y \<Longrightarrow> y \<noteq> 0 \<Longrightarrow> x \<noteq> 0"
for x y :: hypreal
using SReal_Infinitesimal_zero approx_sym mem_infmal_iff by auto
lemma HFinite_diff_Infinitesimal_approx:
"x \<approx> y \<Longrightarrow> y \<in> HFinite - Infinitesimal \<Longrightarrow> x \<in> HFinite - Infinitesimal"
by (meson Diff_iff approx_HFinite approx_sym approx_trans3 mem_infmal_iff)
text \<open>The premise \<open>y \<noteq> 0\<close> is essential; otherwise \<open>x / y = 0\<close> and we lose the
\<open>HFinite\<close> premise.\<close>
lemma Infinitesimal_ratio:
"y \<noteq> 0 \<Longrightarrow> y \<in> Infinitesimal \<Longrightarrow> x / y \<in> HFinite \<Longrightarrow> x \<in> Infinitesimal"
for x y :: "'a::{real_normed_div_algebra,field} star"
using Infinitesimal_HFinite_mult by fastforce
lemma Infinitesimal_SReal_divide: "x \<in> Infinitesimal \<Longrightarrow> y \<in> \<real> \<Longrightarrow> x / y \<in> Infinitesimal"
for x y :: hypreal
by (metis HFinite_star_of Infinitesimal_HFinite_mult Reals_inverse SReal_iff divide_inverse)
section \<open>Standard Part Theorem\<close>
text \<open>
Every finite \<open>x \<in> R*\<close> is infinitely close to a unique real number
(i.e. a member of \<open>Reals\<close>).
\<close>
subsection \<open>Uniqueness: Two Infinitely Close Reals are Equal\<close>
lemma star_of_approx_iff [simp]: "star_of x \<approx> star_of y \<longleftrightarrow> x = y"
by (metis approx_def right_minus_eq star_of_Infinitesimal_iff_0 star_of_simps(2))
lemma SReal_approx_iff: "x \<in> \<real> \<Longrightarrow> y \<in> \<real> \<Longrightarrow> x \<approx> y \<longleftrightarrow> x = y"
for x y :: hypreal
by (meson Reals_diff SReal_Infinitesimal_zero approx_def approx_refl right_minus_eq)
lemma numeral_approx_iff [simp]:
"(numeral v \<approx> (numeral w :: 'a::{numeral,real_normed_vector} star)) = (numeral v = (numeral w :: 'a))"
by (metis star_of_approx_iff star_of_numeral)
text \<open>And also for \<open>0 \<approx> #nn\<close> and \<open>1 \<approx> #nn\<close>, \<open>#nn \<approx> 0\<close> and \<open>#nn \<approx> 1\<close>.\<close>
lemma [simp]:
"(numeral w \<approx> (0::'a::{numeral,real_normed_vector} star)) = (numeral w = (0::'a))"
"((0::'a::{numeral,real_normed_vector} star) \<approx> numeral w) = (numeral w = (0::'a))"
"(numeral w \<approx> (1::'b::{numeral,one,real_normed_vector} star)) = (numeral w = (1::'b))"
"((1::'b::{numeral,one,real_normed_vector} star) \<approx> numeral w) = (numeral w = (1::'b))"
"\<not> (0 \<approx> (1::'c::{zero_neq_one,real_normed_vector} star))"
"\<not> (1 \<approx> (0::'c::{zero_neq_one,real_normed_vector} star))"
unfolding star_numeral_def star_zero_def star_one_def star_of_approx_iff
by (auto intro: sym)
lemma star_of_approx_numeral_iff [simp]: "star_of k \<approx> numeral w \<longleftrightarrow> k = numeral w"
by (subst star_of_approx_iff [symmetric]) auto
lemma star_of_approx_zero_iff [simp]: "star_of k \<approx> 0 \<longleftrightarrow> k = 0"
by (simp_all add: star_of_approx_iff [symmetric])
lemma star_of_approx_one_iff [simp]: "star_of k \<approx> 1 \<longleftrightarrow> k = 1"
by (simp_all add: star_of_approx_iff [symmetric])
lemma approx_unique_real: "r \<in> \<real> \<Longrightarrow> s \<in> \<real> \<Longrightarrow> r \<approx> x \<Longrightarrow> s \<approx> x \<Longrightarrow> r = s"
for r s :: hypreal
by (blast intro: SReal_approx_iff [THEN iffD1] approx_trans2)
subsection \<open>Existence of Unique Real Infinitely Close\<close>
subsubsection \<open>Lifting of the Ub and Lub Properties\<close>
lemma hypreal_of_real_isUb_iff: "isUb \<real> (hypreal_of_real ` Q) (hypreal_of_real Y) = isUb UNIV Q Y"
for Q :: "real set" and Y :: real
by (simp add: isUb_def setle_def)
lemma hypreal_of_real_isLub_iff:
"isLub \<real> (hypreal_of_real ` Q) (hypreal_of_real Y) = isLub (UNIV :: real set) Q Y" (is "?lhs = ?rhs")
for Q :: "real set" and Y :: real
proof
assume ?lhs
then show ?rhs
by (simp add: isLub_def leastP_def) (metis hypreal_of_real_isUb_iff mem_Collect_eq setge_def star_of_le)
next
assume ?rhs
then show ?lhs
apply (simp add: isLub_def leastP_def hypreal_of_real_isUb_iff setge_def)
by (metis SReal_iff hypreal_of_real_isUb_iff isUb_def star_of_le)
qed
lemma lemma_isUb_hypreal_of_real: "isUb \<real> P Y \<Longrightarrow> \<exists>Yo. isUb \<real> P (hypreal_of_real Yo)"
by (auto simp add: SReal_iff isUb_def)
lemma lemma_isLub_hypreal_of_real: "isLub \<real> P Y \<Longrightarrow> \<exists>Yo. isLub \<real> P (hypreal_of_real Yo)"
by (auto simp add: isLub_def leastP_def isUb_def SReal_iff)
lemma SReal_complete:
fixes P :: "hypreal set"
assumes "isUb \<real> P Y" "P \<subseteq> \<real>" "P \<noteq> {}"
shows "\<exists>t. isLub \<real> P t"
proof -
obtain Q where "P = hypreal_of_real ` Q"
by (metis \<open>P \<subseteq> \<real>\<close> hypreal_of_real_image subset_imageE)
then show ?thesis
by (metis assms(1) \<open>P \<noteq> {}\<close> equals0I hypreal_of_real_isLub_iff hypreal_of_real_isUb_iff image_empty lemma_isUb_hypreal_of_real reals_complete)
qed
text \<open>Lemmas about lubs.\<close>
lemma lemma_st_part_lub:
fixes x :: hypreal
assumes "x \<in> HFinite"
shows "\<exists>t. isLub \<real> {s. s \<in> \<real> \<and> s < x} t"
proof -
obtain t where t: "t \<in> \<real>" "hnorm x < t"
using HFiniteD assms by blast
then have "isUb \<real> {s. s \<in> \<real> \<and> s < x} t"
by (simp add: abs_less_iff isUbI le_less_linear less_imp_not_less setleI)
moreover have "\<exists>y. y \<in> \<real> \<and> y < x"
using t by (rule_tac x = "-t" in exI) (auto simp add: abs_less_iff)
ultimately show ?thesis
using SReal_complete by fastforce
qed
lemma hypreal_setle_less_trans: "S *<= x \<Longrightarrow> x < y \<Longrightarrow> S *<= y"
for x y :: hypreal
by (meson le_less_trans less_imp_le setle_def)
lemma hypreal_gt_isUb: "isUb R S x \<Longrightarrow> x < y \<Longrightarrow> y \<in> R \<Longrightarrow> isUb R S y"
for x y :: hypreal
using hypreal_setle_less_trans isUb_def by blast
lemma lemma_SReal_ub: "x \<in> \<real> \<Longrightarrow> isUb \<real> {s. s \<in> \<real> \<and> s < x} x"
for x :: hypreal
by (auto intro: isUbI setleI order_less_imp_le)
lemma lemma_SReal_lub:
fixes x :: hypreal
assumes "x \<in> \<real>" shows "isLub \<real> {s. s \<in> \<real> \<and> s < x} x"
proof -
have "x \<le> y" if "isUb \<real> {s \<in> \<real>. s < x} y" for y
proof -
have "y \<in> \<real>"
using isUbD2a that by blast
show ?thesis
proof (cases x y rule: linorder_cases)
case greater
then obtain r where "y < r" "r < x"
using dense by blast
then show ?thesis
using isUbD [OF that]
by simp (meson SReal_dense \<open>y \<in> \<real>\<close> assms greater not_le)
qed auto
qed
with assms show ?thesis
by (simp add: isLubI2 isUbI setgeI setleI)
qed
lemma lemma_st_part_major:
fixes x r t :: hypreal
assumes x: "x \<in> HFinite" and r: "r \<in> \<real>" "0 < r" and t: "isLub \<real> {s. s \<in> \<real> \<and> s < x} t"
shows "\<bar>x - t\<bar> < r"
proof -
have "t \<in> \<real>"
using isLubD1a t by blast
have lemma_st_part_gt_ub: "x < r \<Longrightarrow> r \<in> \<real> \<Longrightarrow> isUb \<real> {s. s \<in> \<real> \<and> s < x} r"
for r :: hypreal
by (auto dest: order_less_trans intro: order_less_imp_le intro!: isUbI setleI)
have "isUb \<real> {s \<in> \<real>. s < x} t"
by (simp add: t isLub_isUb)
then have "\<not> r + t < x"
by (metis (mono_tags, lifting) Reals_add \<open>t \<in> \<real>\<close> add_le_same_cancel2 isUbD leD mem_Collect_eq r)
then have "x - t \<le> r"
by simp
moreover have "\<not> x < t - r"
using lemma_st_part_gt_ub isLub_le_isUb \<open>t \<in> \<real>\<close> r t x by fastforce
then have "- (x - t) \<le> r"
by linarith
moreover have False if "x - t = r \<or> - (x - t) = r"
proof -
have "x \<in> \<real>"
by (metis \<open>t \<in> \<real>\<close> \<open>r \<in> \<real>\<close> that Reals_add_cancel Reals_minus_iff add_uminus_conv_diff)
then have "isLub \<real> {s \<in> \<real>. s < x} x"
by (rule lemma_SReal_lub)
then show False
using r t that x isLub_unique by force
qed
ultimately show ?thesis
using abs_less_iff dual_order.order_iff_strict by blast
qed
lemma lemma_st_part_major2:
"x \<in> HFinite \<Longrightarrow> isLub \<real> {s. s \<in> \<real> \<and> s < x} t \<Longrightarrow> \<forall>r \<in> Reals. 0 < r \<longrightarrow> \<bar>x - t\<bar> < r"
for x t :: hypreal
by (blast dest!: lemma_st_part_major)
text\<open>Existence of real and Standard Part Theorem.\<close>
lemma lemma_st_part_Ex: "x \<in> HFinite \<Longrightarrow> \<exists>t\<in>Reals. \<forall>r \<in> Reals. 0 < r \<longrightarrow> \<bar>x - t\<bar> < r"
for x :: hypreal
by (meson isLubD1a lemma_st_part_lub lemma_st_part_major2)
lemma st_part_Ex: "x \<in> HFinite \<Longrightarrow> \<exists>t\<in>Reals. x \<approx> t"
for x :: hypreal
by (metis InfinitesimalI approx_def hypreal_hnorm_def lemma_st_part_Ex)
text \<open>There is a unique real infinitely close.\<close>
lemma st_part_Ex1: "x \<in> HFinite \<Longrightarrow> \<exists>!t::hypreal. t \<in> \<real> \<and> x \<approx> t"
by (meson SReal_approx_iff approx_trans2 st_part_Ex)
subsection \<open>Finite, Infinite and Infinitesimal\<close>
lemma HFinite_Int_HInfinite_empty [simp]: "HFinite Int HInfinite = {}"
using Compl_HFinite by blast
lemma HFinite_not_HInfinite:
assumes x: "x \<in> HFinite" shows "x \<notin> HInfinite"
using Compl_HFinite x by blast
lemma not_HFinite_HInfinite: "x \<notin> HFinite \<Longrightarrow> x \<in> HInfinite"
using Compl_HFinite by blast
lemma HInfinite_HFinite_disj: "x \<in> HInfinite \<or> x \<in> HFinite"
by (blast intro: not_HFinite_HInfinite)
lemma HInfinite_HFinite_iff: "x \<in> HInfinite \<longleftrightarrow> x \<notin> HFinite"
by (blast dest: HFinite_not_HInfinite not_HFinite_HInfinite)
lemma HFinite_HInfinite_iff: "x \<in> HFinite \<longleftrightarrow> x \<notin> HInfinite"
by (simp add: HInfinite_HFinite_iff)
lemma HInfinite_diff_HFinite_Infinitesimal_disj:
"x \<notin> Infinitesimal \<Longrightarrow> x \<in> HInfinite \<or> x \<in> HFinite - Infinitesimal"
by (fast intro: not_HFinite_HInfinite)
lemma HFinite_inverse: "x \<in> HFinite \<Longrightarrow> x \<notin> Infinitesimal \<Longrightarrow> inverse x \<in> HFinite"
for x :: "'a::real_normed_div_algebra star"
using HInfinite_inverse_Infinitesimal not_HFinite_HInfinite by force
lemma HFinite_inverse2: "x \<in> HFinite - Infinitesimal \<Longrightarrow> inverse x \<in> HFinite"
for x :: "'a::real_normed_div_algebra star"
by (blast intro: HFinite_inverse)
text \<open>Stronger statement possible in fact.\<close>
lemma Infinitesimal_inverse_HFinite: "x \<notin> Infinitesimal \<Longrightarrow> inverse x \<in> HFinite"
for x :: "'a::real_normed_div_algebra star"
using HFinite_HInfinite_iff HInfinite_inverse_Infinitesimal by fastforce
lemma HFinite_not_Infinitesimal_inverse:
"x \<in> HFinite - Infinitesimal \<Longrightarrow> inverse x \<in> HFinite - Infinitesimal"
for x :: "'a::real_normed_div_algebra star"
using HFinite_Infinitesimal_not_zero HFinite_inverse2 Infinitesimal_HFinite_mult2 by fastforce
lemma approx_inverse:
fixes x y :: "'a::real_normed_div_algebra star"
assumes "x \<approx> y" and y: "y \<in> HFinite - Infinitesimal" shows "inverse x \<approx> inverse y"
proof -
have x: "x \<in> HFinite - Infinitesimal"
using HFinite_diff_Infinitesimal_approx assms(1) y by blast
with y HFinite_inverse2 have "inverse x \<in> HFinite" "inverse y \<in> HFinite"
by blast+
then have "inverse y * x \<approx> 1"
by (metis Diff_iff approx_mult2 assms(1) left_inverse not_Infinitesimal_not_zero y)
then show ?thesis
by (metis (no_types, lifting) DiffD2 HFinite_Infinitesimal_not_zero Infinitesimal_mult_disj x approx_def approx_sym left_diff_distrib left_inverse)
qed
(*Used for NSLIM_inverse, NSLIMSEQ_inverse*)
lemmas star_of_approx_inverse = star_of_HFinite_diff_Infinitesimal [THEN [2] approx_inverse]
lemmas hypreal_of_real_approx_inverse = hypreal_of_real_HFinite_diff_Infinitesimal [THEN [2] approx_inverse]
lemma inverse_add_Infinitesimal_approx:
"x \<in> HFinite - Infinitesimal \<Longrightarrow> h \<in> Infinitesimal \<Longrightarrow> inverse (x + h) \<approx> inverse x"
for x h :: "'a::real_normed_div_algebra star"
by (auto intro: approx_inverse approx_sym Infinitesimal_add_approx_self)
lemma inverse_add_Infinitesimal_approx2:
"x \<in> HFinite - Infinitesimal \<Longrightarrow> h \<in> Infinitesimal \<Longrightarrow> inverse (h + x) \<approx> inverse x"
for x h :: "'a::real_normed_div_algebra star"
by (metis add.commute inverse_add_Infinitesimal_approx)
lemma inverse_add_Infinitesimal_approx_Infinitesimal:
"x \<in> HFinite - Infinitesimal \<Longrightarrow> h \<in> Infinitesimal \<Longrightarrow> inverse (x + h) - inverse x \<approx> h"
for x h :: "'a::real_normed_div_algebra star"
by (meson Infinitesimal_approx bex_Infinitesimal_iff inverse_add_Infinitesimal_approx)
lemma Infinitesimal_square_iff: "x \<in> Infinitesimal \<longleftrightarrow> x * x \<in> Infinitesimal"
for x :: "'a::real_normed_div_algebra star"
using Infinitesimal_mult Infinitesimal_mult_disj by auto
declare Infinitesimal_square_iff [symmetric, simp]
lemma HFinite_square_iff [simp]: "x * x \<in> HFinite \<longleftrightarrow> x \<in> HFinite"
for x :: "'a::real_normed_div_algebra star"
using HFinite_HInfinite_iff HFinite_mult HInfinite_mult by blast
lemma HInfinite_square_iff [simp]: "x * x \<in> HInfinite \<longleftrightarrow> x \<in> HInfinite"
for x :: "'a::real_normed_div_algebra star"
by (auto simp add: HInfinite_HFinite_iff)
lemma approx_HFinite_mult_cancel: "a \<in> HFinite - Infinitesimal \<Longrightarrow> a * w \<approx> a * z \<Longrightarrow> w \<approx> z"
for a w z :: "'a::real_normed_div_algebra star"
by (metis DiffD2 Infinitesimal_mult_disj bex_Infinitesimal_iff right_diff_distrib)
lemma approx_HFinite_mult_cancel_iff1: "a \<in> HFinite - Infinitesimal \<Longrightarrow> a * w \<approx> a * z \<longleftrightarrow> w \<approx> z"
for a w z :: "'a::real_normed_div_algebra star"
by (auto intro: approx_mult2 approx_HFinite_mult_cancel)
lemma HInfinite_HFinite_add_cancel: "x + y \<in> HInfinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> x \<in> HInfinite"
using HFinite_add HInfinite_HFinite_iff by blast
lemma HInfinite_HFinite_add: "x \<in> HInfinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> x + y \<in> HInfinite"
by (metis (no_types, opaque_lifting) HFinite_HInfinite_iff HFinite_add HFinite_minus_iff add.commute add_minus_cancel)
lemma HInfinite_ge_HInfinite: "x \<in> HInfinite \<Longrightarrow> x \<le> y \<Longrightarrow> 0 \<le> x \<Longrightarrow> y \<in> HInfinite"
for x y :: hypreal
by (auto intro: HFinite_bounded simp add: HInfinite_HFinite_iff)
lemma Infinitesimal_inverse_HInfinite: "x \<in> Infinitesimal \<Longrightarrow> x \<noteq> 0 \<Longrightarrow> inverse x \<in> HInfinite"
for x :: "'a::real_normed_div_algebra star"
by (metis Infinitesimal_HFinite_mult not_HFinite_HInfinite one_not_Infinitesimal right_inverse)
lemma HInfinite_HFinite_not_Infinitesimal_mult:
"x \<in> HInfinite \<Longrightarrow> y \<in> HFinite - Infinitesimal \<Longrightarrow> x * y \<in> HInfinite"
for x y :: "'a::real_normed_div_algebra star"
by (metis (no_types, opaque_lifting) HFinite_HInfinite_iff HFinite_Infinitesimal_not_zero HFinite_inverse2 HFinite_mult mult.assoc mult.right_neutral right_inverse)
lemma HInfinite_HFinite_not_Infinitesimal_mult2:
"x \<in> HInfinite \<Longrightarrow> y \<in> HFinite - Infinitesimal \<Longrightarrow> y * x \<in> HInfinite"
for x y :: "'a::real_normed_div_algebra star"
by (metis Diff_iff HInfinite_HFinite_iff HInfinite_inverse_Infinitesimal Infinitesimal_HFinite_mult2 divide_inverse mult_zero_right nonzero_eq_divide_eq)
lemma HInfinite_gt_SReal: "x \<in> HInfinite \<Longrightarrow> 0 < x \<Longrightarrow> y \<in> \<real> \<Longrightarrow> y < x"
for x y :: hypreal
by (auto dest!: bspec simp add: HInfinite_def abs_if order_less_imp_le)
lemma HInfinite_gt_zero_gt_one: "x \<in> HInfinite \<Longrightarrow> 0 < x \<Longrightarrow> 1 < x"
for x :: hypreal
by (auto intro: HInfinite_gt_SReal)
lemma not_HInfinite_one [simp]: "1 \<notin> HInfinite"
by (simp add: HInfinite_HFinite_iff)
lemma approx_hrabs_disj: "\<bar>x\<bar> \<approx> x \<or> \<bar>x\<bar> \<approx> -x"
for x :: hypreal
by (simp add: abs_if)
subsection \<open>Theorems about Monads\<close>
lemma monad_hrabs_Un_subset: "monad \<bar>x\<bar> \<le> monad x \<union> monad (- x)"
for x :: hypreal
by (simp add: abs_if)
lemma Infinitesimal_monad_eq: "e \<in> Infinitesimal \<Longrightarrow> monad (x + e) = monad x"
by (fast intro!: Infinitesimal_add_approx_self [THEN approx_sym] approx_monad_iff [THEN iffD1])
lemma mem_monad_iff: "u \<in> monad x \<longleftrightarrow> - u \<in> monad (- x)"
by (simp add: monad_def)
lemma Infinitesimal_monad_zero_iff: "x \<in> Infinitesimal \<longleftrightarrow> x \<in> monad 0"
by (auto intro: approx_sym simp add: monad_def mem_infmal_iff)
lemma monad_zero_minus_iff: "x \<in> monad 0 \<longleftrightarrow> - x \<in> monad 0"
by (simp add: Infinitesimal_monad_zero_iff [symmetric])
lemma monad_zero_hrabs_iff: "x \<in> monad 0 \<longleftrightarrow> \<bar>x\<bar> \<in> monad 0"
for x :: hypreal
using Infinitesimal_monad_zero_iff by blast
lemma mem_monad_self [simp]: "x \<in> monad x"
by (simp add: monad_def)
subsection \<open>Proof that \<^term>\<open>x \<approx> y\<close> implies \<^term>\<open>\<bar>x\<bar> \<approx> \<bar>y\<bar>\<close>\<close>
lemma approx_subset_monad: "x \<approx> y \<Longrightarrow> {x, y} \<le> monad x"
by (simp (no_asm)) (simp add: approx_monad_iff)
lemma approx_subset_monad2: "x \<approx> y \<Longrightarrow> {x, y} \<le> monad y"
using approx_subset_monad approx_sym by auto
lemma mem_monad_approx: "u \<in> monad x \<Longrightarrow> x \<approx> u"
by (simp add: monad_def)
lemma approx_mem_monad: "x \<approx> u \<Longrightarrow> u \<in> monad x"
by (simp add: monad_def)
lemma approx_mem_monad2: "x \<approx> u \<Longrightarrow> x \<in> monad u"
using approx_mem_monad approx_sym by blast
lemma approx_mem_monad_zero: "x \<approx> y \<Longrightarrow> x \<in> monad 0 \<Longrightarrow> y \<in> monad 0"
using approx_trans monad_def by blast
lemma Infinitesimal_approx_hrabs: "x \<approx> y \<Longrightarrow> x \<in> Infinitesimal \<Longrightarrow> \<bar>x\<bar> \<approx> \<bar>y\<bar>"
for x y :: hypreal
using approx_hnorm by fastforce
lemma less_Infinitesimal_less: "0 < x \<Longrightarrow> x \<notin> Infinitesimal \<Longrightarrow> e \<in> Infinitesimal \<Longrightarrow> e < x"
for x :: hypreal
using Infinitesimal_interval less_linear by blast
lemma Ball_mem_monad_gt_zero: "0 < x \<Longrightarrow> x \<notin> Infinitesimal \<Longrightarrow> u \<in> monad x \<Longrightarrow> 0 < u"
for u x :: hypreal
by (metis bex_Infinitesimal_iff2 less_Infinitesimal_less less_add_same_cancel2 mem_monad_approx)
lemma Ball_mem_monad_less_zero: "x < 0 \<Longrightarrow> x \<notin> Infinitesimal \<Longrightarrow> u \<in> monad x \<Longrightarrow> u < 0"
for u x :: hypreal
by (metis Ball_mem_monad_gt_zero approx_monad_iff less_asym linorder_neqE_linordered_idom mem_infmal_iff mem_monad_approx mem_monad_self)
lemma lemma_approx_gt_zero: "0 < x \<Longrightarrow> x \<notin> Infinitesimal \<Longrightarrow> x \<approx> y \<Longrightarrow> 0 < y"
for x y :: hypreal
by (blast dest: Ball_mem_monad_gt_zero approx_subset_monad)
lemma lemma_approx_less_zero: "x < 0 \<Longrightarrow> x \<notin> Infinitesimal \<Longrightarrow> x \<approx> y \<Longrightarrow> y < 0"
for x y :: hypreal
by (blast dest: Ball_mem_monad_less_zero approx_subset_monad)
lemma approx_hrabs: "x \<approx> y \<Longrightarrow> \<bar>x\<bar> \<approx> \<bar>y\<bar>"
for x y :: hypreal
by (drule approx_hnorm) simp
lemma approx_hrabs_zero_cancel: "\<bar>x\<bar> \<approx> 0 \<Longrightarrow> x \<approx> 0"
for x :: hypreal
using mem_infmal_iff by blast
lemma approx_hrabs_add_Infinitesimal: "e \<in> Infinitesimal \<Longrightarrow> \<bar>x\<bar> \<approx> \<bar>x + e\<bar>"
for e x :: hypreal
by (fast intro: approx_hrabs Infinitesimal_add_approx_self)
lemma approx_hrabs_add_minus_Infinitesimal: "e \<in> Infinitesimal ==> \<bar>x\<bar> \<approx> \<bar>x + -e\<bar>"
for e x :: hypreal
by (fast intro: approx_hrabs Infinitesimal_add_minus_approx_self)
lemma hrabs_add_Infinitesimal_cancel:
"e \<in> Infinitesimal \<Longrightarrow> e' \<in> Infinitesimal \<Longrightarrow> \<bar>x + e\<bar> = \<bar>y + e'\<bar> \<Longrightarrow> \<bar>x\<bar> \<approx> \<bar>y\<bar>"
for e e' x y :: hypreal
by (metis approx_hrabs_add_Infinitesimal approx_trans2)
lemma hrabs_add_minus_Infinitesimal_cancel:
"e \<in> Infinitesimal \<Longrightarrow> e' \<in> Infinitesimal \<Longrightarrow> \<bar>x + -e\<bar> = \<bar>y + -e'\<bar> \<Longrightarrow> \<bar>x\<bar> \<approx> \<bar>y\<bar>"
for e e' x y :: hypreal
by (meson Infinitesimal_minus_iff hrabs_add_Infinitesimal_cancel)
subsection \<open>More \<^term>\<open>HFinite\<close> and \<^term>\<open>Infinitesimal\<close> Theorems\<close>
text \<open>
Interesting slightly counterintuitive theorem: necessary
for proving that an open interval is an NS open set.
\<close>
lemma Infinitesimal_add_hypreal_of_real_less:
assumes "x < y" and u: "u \<in> Infinitesimal"
shows "hypreal_of_real x + u < hypreal_of_real y"
proof -
have "\<bar>u\<bar> < hypreal_of_real y - hypreal_of_real x"
using InfinitesimalD \<open>x < y\<close> u by fastforce
then show ?thesis
by (simp add: abs_less_iff)
qed
lemma Infinitesimal_add_hrabs_hypreal_of_real_less:
"x \<in> Infinitesimal \<Longrightarrow> \<bar>hypreal_of_real r\<bar> < hypreal_of_real y \<Longrightarrow>
\<bar>hypreal_of_real r + x\<bar> < hypreal_of_real y"
by (metis Infinitesimal_add_hypreal_of_real_less approx_hrabs_add_Infinitesimal approx_sym bex_Infinitesimal_iff2 star_of_abs star_of_less)
lemma Infinitesimal_add_hrabs_hypreal_of_real_less2:
"x \<in> Infinitesimal \<Longrightarrow> \<bar>hypreal_of_real r\<bar> < hypreal_of_real y \<Longrightarrow>
\<bar>x + hypreal_of_real r\<bar> < hypreal_of_real y"
using Infinitesimal_add_hrabs_hypreal_of_real_less by fastforce
lemma hypreal_of_real_le_add_Infininitesimal_cancel:
assumes le: "hypreal_of_real x + u \<le> hypreal_of_real y + v"
and u: "u \<in> Infinitesimal" and v: "v \<in> Infinitesimal"
shows "hypreal_of_real x \<le> hypreal_of_real y"
proof (rule ccontr)
assume "\<not> hypreal_of_real x \<le> hypreal_of_real y"
then have "hypreal_of_real y + (v - u) < hypreal_of_real x"
by (simp add: Infinitesimal_add_hypreal_of_real_less Infinitesimal_diff u v)
then show False
by (simp add: add_diff_eq add_le_imp_le_diff le leD)
qed
lemma hypreal_of_real_le_add_Infininitesimal_cancel2:
"u \<in> Infinitesimal \<Longrightarrow> v \<in> Infinitesimal \<Longrightarrow>
hypreal_of_real x + u \<le> hypreal_of_real y + v \<Longrightarrow> x \<le> y"
by (blast intro: star_of_le [THEN iffD1] intro!: hypreal_of_real_le_add_Infininitesimal_cancel)
lemma hypreal_of_real_less_Infinitesimal_le_zero:
"hypreal_of_real x < e \<Longrightarrow> e \<in> Infinitesimal \<Longrightarrow> hypreal_of_real x \<le> 0"
by (metis Infinitesimal_interval eq_iff le_less_linear star_of_Infinitesimal_iff_0 star_of_eq_0)
lemma Infinitesimal_add_not_zero: "h \<in> Infinitesimal \<Longrightarrow> x \<noteq> 0 \<Longrightarrow> star_of x + h \<noteq> 0"
by (metis Infinitesimal_add_approx_self star_of_approx_zero_iff)
lemma monad_hrabs_less: "y \<in> monad x \<Longrightarrow> 0 < hypreal_of_real e \<Longrightarrow> \<bar>y - x\<bar> < hypreal_of_real e"
by (simp add: Infinitesimal_approx_minus approx_sym less_Infinitesimal_less mem_monad_approx)
lemma mem_monad_SReal_HFinite: "x \<in> monad (hypreal_of_real a) \<Longrightarrow> x \<in> HFinite"
using HFinite_star_of approx_HFinite mem_monad_approx by blast
subsection \<open>Theorems about Standard Part\<close>
lemma st_approx_self: "x \<in> HFinite \<Longrightarrow> st x \<approx> x"
by (metis (no_types, lifting) approx_refl approx_trans3 someI_ex st_def st_part_Ex st_part_Ex1)
lemma st_SReal: "x \<in> HFinite \<Longrightarrow> st x \<in> \<real>"
by (metis (mono_tags, lifting) approx_sym someI_ex st_def st_part_Ex)
lemma st_HFinite: "x \<in> HFinite \<Longrightarrow> st x \<in> HFinite"
by (erule st_SReal [THEN SReal_subset_HFinite [THEN subsetD]])
lemma st_unique: "r \<in> \<real> \<Longrightarrow> r \<approx> x \<Longrightarrow> st x = r"
by (meson SReal_subset_HFinite approx_HFinite approx_unique_real st_SReal st_approx_self subsetD)
lemma st_SReal_eq: "x \<in> \<real> \<Longrightarrow> st x = x"
by (metis approx_refl st_unique)
lemma st_hypreal_of_real [simp]: "st (hypreal_of_real x) = hypreal_of_real x"
by (rule SReal_hypreal_of_real [THEN st_SReal_eq])
lemma st_eq_approx: "x \<in> HFinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> st x = st y \<Longrightarrow> x \<approx> y"
by (auto dest!: st_approx_self elim!: approx_trans3)
lemma approx_st_eq:
assumes x: "x \<in> HFinite" and y: "y \<in> HFinite" and xy: "x \<approx> y"
shows "st x = st y"
proof -
have "st x \<approx> x" "st y \<approx> y" "st x \<in> \<real>" "st y \<in> \<real>"
by (simp_all add: st_approx_self st_SReal x y)
with xy show ?thesis
by (fast elim: approx_trans approx_trans2 SReal_approx_iff [THEN iffD1])
qed
lemma st_eq_approx_iff: "x \<in> HFinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> x \<approx> y \<longleftrightarrow> st x = st y"
by (blast intro: approx_st_eq st_eq_approx)
lemma st_Infinitesimal_add_SReal: "x \<in> \<real> \<Longrightarrow> e \<in> Infinitesimal \<Longrightarrow> st (x + e) = x"
by (simp add: Infinitesimal_add_approx_self st_unique)
lemma st_Infinitesimal_add_SReal2: "x \<in> \<real> \<Longrightarrow> e \<in> Infinitesimal \<Longrightarrow> st (e + x) = x"
by (metis add.commute st_Infinitesimal_add_SReal)
lemma HFinite_st_Infinitesimal_add: "x \<in> HFinite \<Longrightarrow> \<exists>e \<in> Infinitesimal. x = st(x) + e"
by (blast dest!: st_approx_self [THEN approx_sym] bex_Infinitesimal_iff2 [THEN iffD2])
lemma st_add: "x \<in> HFinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> st (x + y) = st x + st y"
by (simp add: st_unique st_SReal st_approx_self approx_add)
lemma st_numeral [simp]: "st (numeral w) = numeral w"
by (rule Reals_numeral [THEN st_SReal_eq])
lemma st_neg_numeral [simp]: "st (- numeral w) = - numeral w"
using st_unique by auto
lemma st_0 [simp]: "st 0 = 0"
by (simp add: st_SReal_eq)
lemma st_1 [simp]: "st 1 = 1"
by (simp add: st_SReal_eq)
lemma st_neg_1 [simp]: "st (- 1) = - 1"
by (simp add: st_SReal_eq)
lemma st_minus: "x \<in> HFinite \<Longrightarrow> st (- x) = - st x"
by (simp add: st_unique st_SReal st_approx_self approx_minus)
lemma st_diff: "\<lbrakk>x \<in> HFinite; y \<in> HFinite\<rbrakk> \<Longrightarrow> st (x - y) = st x - st y"
by (simp add: st_unique st_SReal st_approx_self approx_diff)
lemma st_mult: "\<lbrakk>x \<in> HFinite; y \<in> HFinite\<rbrakk> \<Longrightarrow> st (x * y) = st x * st y"
by (simp add: st_unique st_SReal st_approx_self approx_mult_HFinite)
lemma st_Infinitesimal: "x \<in> Infinitesimal \<Longrightarrow> st x = 0"
by (simp add: st_unique mem_infmal_iff)
lemma st_not_Infinitesimal: "st(x) \<noteq> 0 \<Longrightarrow> x \<notin> Infinitesimal"
by (fast intro: st_Infinitesimal)
lemma st_inverse: "x \<in> HFinite \<Longrightarrow> st x \<noteq> 0 \<Longrightarrow> st (inverse x) = inverse (st x)"
by (simp add: approx_inverse st_SReal st_approx_self st_not_Infinitesimal st_unique)
lemma st_divide [simp]: "x \<in> HFinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> st y \<noteq> 0 \<Longrightarrow> st (x / y) = st x / st y"
by (simp add: divide_inverse st_mult st_not_Infinitesimal HFinite_inverse st_inverse)
lemma st_idempotent [simp]: "x \<in> HFinite \<Longrightarrow> st (st x) = st x"
by (blast intro: st_HFinite st_approx_self approx_st_eq)
lemma Infinitesimal_add_st_less:
"x \<in> HFinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> u \<in> Infinitesimal \<Longrightarrow> st x < st y \<Longrightarrow> st x + u < st y"
by (metis Infinitesimal_add_hypreal_of_real_less SReal_iff st_SReal star_of_less)
lemma Infinitesimal_add_st_le_cancel:
"x \<in> HFinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> u \<in> Infinitesimal \<Longrightarrow>
st x \<le> st y + u \<Longrightarrow> st x \<le> st y"
by (meson Infinitesimal_add_st_less leD le_less_linear)
lemma st_le: "x \<in> HFinite \<Longrightarrow> y \<in> HFinite \<Longrightarrow> x \<le> y \<Longrightarrow> st x \<le> st y"
by (metis approx_le_bound approx_sym linear st_SReal st_approx_self st_part_Ex1)
lemma st_zero_le: "0 \<le> x \<Longrightarrow> x \<in> HFinite \<Longrightarrow> 0 \<le> st x"
by (metis HFinite_0 st_0 st_le)
lemma st_zero_ge: "x \<le> 0 \<Longrightarrow> x \<in> HFinite \<Longrightarrow> st x \<le> 0"
by (metis HFinite_0 st_0 st_le)
lemma st_hrabs: "x \<in> HFinite \<Longrightarrow> \<bar>st x\<bar> = st \<bar>x\<bar>"
by (simp add: order_class.order.antisym st_zero_ge linorder_not_le st_zero_le abs_if st_minus linorder_not_less)
subsection \<open>Alternative Definitions using Free Ultrafilter\<close>
subsubsection \<open>\<^term>\<open>HFinite\<close>\<close>
lemma HFinite_FreeUltrafilterNat:
assumes "star_n X \<in> HFinite"
shows "\<exists>u. eventually (\<lambda>n. norm (X n) < u) \<U>"
proof -
obtain r where "hnorm (star_n X) < hypreal_of_real r"
using HFiniteD SReal_iff assms by fastforce
then have "\<forall>\<^sub>F n in \<U>. norm (X n) < r"
by (simp add: hnorm_def star_n_less star_of_def starfun_star_n)
then show ?thesis ..
qed
lemma FreeUltrafilterNat_HFinite:
assumes "eventually (\<lambda>n. norm (X n) < u) \<U>"
shows "star_n X \<in> HFinite"
proof -
have "hnorm (star_n X) < hypreal_of_real u"
by (simp add: assms hnorm_def star_n_less star_of_def starfun_star_n)
then show ?thesis
by (meson HInfiniteD SReal_hypreal_of_real less_asym not_HFinite_HInfinite)
qed
lemma HFinite_FreeUltrafilterNat_iff:
"star_n X \<in> HFinite \<longleftrightarrow> (\<exists>u. eventually (\<lambda>n. norm (X n) < u) \<U>)"
using FreeUltrafilterNat_HFinite HFinite_FreeUltrafilterNat by blast
subsubsection \<open>\<^term>\<open>HInfinite\<close>\<close>
text \<open>Exclude this type of sets from free ultrafilter for Infinite numbers!\<close>
lemma FreeUltrafilterNat_const_Finite:
"eventually (\<lambda>n. norm (X n) = u) \<U> \<Longrightarrow> star_n X \<in> HFinite"
by (simp add: FreeUltrafilterNat_HFinite [where u = "u+1"] eventually_mono)
lemma HInfinite_FreeUltrafilterNat:
assumes "star_n X \<in> HInfinite" shows "\<forall>\<^sub>F n in \<U>. u < norm (X n)"
proof -
have "\<not> (\<forall>\<^sub>F n in \<U>. norm (X n) < u + 1)"
using FreeUltrafilterNat_HFinite HFinite_HInfinite_iff assms by auto
then show ?thesis
by (auto simp flip: FreeUltrafilterNat.eventually_not_iff elim: eventually_mono)
qed
lemma FreeUltrafilterNat_HInfinite:
assumes "\<And>u. eventually (\<lambda>n. u < norm (X n)) \<U>"
shows "star_n X \<in> HInfinite"
proof -
{ fix u
assume "\<forall>\<^sub>Fn in \<U>. norm (X n) < u" "\<forall>\<^sub>Fn in \<U>. u < norm (X n)"
then have "\<forall>\<^sub>F x in \<U>. False"
by eventually_elim auto
then have False
by (simp add: eventually_False FreeUltrafilterNat.proper) }
then show ?thesis
using HFinite_FreeUltrafilterNat HInfinite_HFinite_iff assms by blast
qed
lemma HInfinite_FreeUltrafilterNat_iff:
"star_n X \<in> HInfinite \<longleftrightarrow> (\<forall>u. eventually (\<lambda>n. u < norm (X n)) \<U>)"
using HInfinite_FreeUltrafilterNat FreeUltrafilterNat_HInfinite by blast
subsubsection \<open>\<^term>\<open>Infinitesimal\<close>\<close>
lemma ball_SReal_eq: "(\<forall>x::hypreal \<in> Reals. P x) \<longleftrightarrow> (\<forall>x::real. P (star_of x))"
by (auto simp: SReal_def)
lemma Infinitesimal_FreeUltrafilterNat_iff:
"(star_n X \<in> Infinitesimal) = (\<forall>u>0. eventually (\<lambda>n. norm (X n) < u) \<U>)" (is "?lhs = ?rhs")
proof -
have "?lhs \<longleftrightarrow> (\<forall>r>0. hnorm (star_n X) < hypreal_of_real r)"
by (simp add: Infinitesimal_def ball_SReal_eq)
also have "... \<longleftrightarrow> ?rhs"
by (simp add: hnorm_def starfun_star_n star_of_def star_less_def starP2_star_n)
finally show ?thesis .
qed
text \<open>Infinitesimals as smaller than \<open>1/n\<close> for all \<open>n::nat (> 0)\<close>.\<close>
lemma lemma_Infinitesimal: "(\<forall>r. 0 < r \<longrightarrow> x < r) \<longleftrightarrow> (\<forall>n. x < inverse (real (Suc n)))"
by (meson inverse_positive_iff_positive less_trans of_nat_0_less_iff reals_Archimedean zero_less_Suc)
lemma lemma_Infinitesimal2:
"(\<forall>r \<in> Reals. 0 < r \<longrightarrow> x < r) \<longleftrightarrow> (\<forall>n. x < inverse(hypreal_of_nat (Suc n)))" (is "_ = ?rhs")
proof (intro iffI strip)
assume R: ?rhs
fix r::hypreal
assume "r \<in> \<real>" "0 < r"
then obtain n y where "inverse (real (Suc n)) < y" and r: "r = hypreal_of_real y"
by (metis SReal_iff reals_Archimedean star_of_0_less)
then have "inverse (1 + hypreal_of_nat n) < hypreal_of_real y"
by (metis of_nat_Suc star_of_inverse star_of_less star_of_nat_def)
then show "x < r"
by (metis R r le_less_trans less_imp_le of_nat_Suc)
qed (meson Reals_inverse Reals_of_nat of_nat_0_less_iff positive_imp_inverse_positive zero_less_Suc)
lemma Infinitesimal_hypreal_of_nat_iff:
"Infinitesimal = {x. \<forall>n. hnorm x < inverse (hypreal_of_nat (Suc n))}"
using Infinitesimal_def lemma_Infinitesimal2 by auto
subsection \<open>Proof that \<open>\<omega>\<close> is an infinite number\<close>
text \<open>It will follow that \<open>\<epsilon>\<close> is an infinitesimal number.\<close>
lemma Suc_Un_eq: "{n. n < Suc m} = {n. n < m} Un {n. n = m}"
by (auto simp add: less_Suc_eq)
text \<open>Prove that any segment is finite and hence cannot belong to \<open>\<U>\<close>.\<close>
lemma finite_real_of_nat_segment: "finite {n::nat. real n < real (m::nat)}"
by auto
lemma finite_real_of_nat_less_real: "finite {n::nat. real n < u}"
proof -
obtain m where "u < real m"
using reals_Archimedean2 by blast
then have "{n. real n < u} \<subseteq> {..<m}"
by force
then show ?thesis
using finite_nat_iff_bounded by force
qed
lemma finite_real_of_nat_le_real: "finite {n::nat. real n \<le> u}"
by (metis infinite_nat_iff_unbounded leD le_nat_floor mem_Collect_eq)
lemma finite_rabs_real_of_nat_le_real: "finite {n::nat. \<bar>real n\<bar> \<le> u}"
by (simp add: finite_real_of_nat_le_real)
lemma rabs_real_of_nat_le_real_FreeUltrafilterNat:
"\<not> eventually (\<lambda>n. \<bar>real n\<bar> \<le> u) \<U>"
by (blast intro!: FreeUltrafilterNat.finite finite_rabs_real_of_nat_le_real)
lemma FreeUltrafilterNat_nat_gt_real: "eventually (\<lambda>n. u < real n) \<U>"
proof -
have "{n::nat. \<not> u < real n} = {n. real n \<le> u}"
by auto
then show ?thesis
by (auto simp add: FreeUltrafilterNat.finite' finite_real_of_nat_le_real)
qed
text \<open>The complement of \<open>{n. \<bar>real n\<bar> \<le> u} = {n. u < \<bar>real n\<bar>}\<close> is in
\<open>\<U>\<close> by property of (free) ultrafilters.\<close>
text \<open>\<^term>\<open>\<omega>\<close> is a member of \<^term>\<open>HInfinite\<close>.\<close>
theorem HInfinite_omega [simp]: "\<omega> \<in> HInfinite"
proof -
have "\<forall>\<^sub>F n in \<U>. u < norm (1 + real n)" for u
using FreeUltrafilterNat_nat_gt_real [of "u-1"] eventually_mono by fastforce
then show ?thesis
by (simp add: omega_def FreeUltrafilterNat_HInfinite)
qed
text \<open>Epsilon is a member of Infinitesimal.\<close>
lemma Infinitesimal_epsilon [simp]: "\<epsilon> \<in> Infinitesimal"
by (auto intro!: HInfinite_inverse_Infinitesimal HInfinite_omega
simp add: epsilon_inverse_omega)
lemma HFinite_epsilon [simp]: "\<epsilon> \<in> HFinite"
by (auto intro: Infinitesimal_subset_HFinite [THEN subsetD])
lemma epsilon_approx_zero [simp]: "\<epsilon> \<approx> 0"
by (simp add: mem_infmal_iff [symmetric])
text \<open>Needed for proof that we define a hyperreal \<open>[<X(n)] \<approx> hypreal_of_real a\<close> given
that \<open>\<forall>n. |X n - a| < 1/n\<close>. Used in proof of \<open>NSLIM \<Rightarrow> LIM\<close>.\<close>
lemma real_of_nat_less_inverse_iff: "0 < u \<Longrightarrow> u < inverse (real(Suc n)) \<longleftrightarrow> real(Suc n) < inverse u"
using less_imp_inverse_less by force
lemma finite_inverse_real_of_posnat_gt_real: "0 < u \<Longrightarrow> finite {n. u < inverse (real (Suc n))}"
proof (simp only: real_of_nat_less_inverse_iff)
have "{n. 1 + real n < inverse u} = {n. real n < inverse u - 1}"
by fastforce
then show "finite {n. real (Suc n) < inverse u}"
using finite_real_of_nat_less_real [of "inverse u - 1"]
by auto
qed
lemma finite_inverse_real_of_posnat_ge_real:
assumes "0 < u"
shows "finite {n. u \<le> inverse (real (Suc n))}"
proof -
have "\<forall>na. u \<le> inverse (1 + real na) \<longrightarrow> na \<le> ceiling (inverse u)"
by (smt (verit, best) assms ceiling_less_cancel ceiling_of_nat inverse_inverse_eq inverse_le_iff_le)
then show ?thesis
apply (auto simp add: finite_nat_set_iff_bounded_le)
by (meson assms inverse_positive_iff_positive le_nat_iff less_imp_le zero_less_ceiling)
qed
lemma inverse_real_of_posnat_ge_real_FreeUltrafilterNat:
"0 < u \<Longrightarrow> \<not> eventually (\<lambda>n. u \<le> inverse(real(Suc n))) \<U>"
by (blast intro!: FreeUltrafilterNat.finite finite_inverse_real_of_posnat_ge_real)
lemma FreeUltrafilterNat_inverse_real_of_posnat:
"0 < u \<Longrightarrow> eventually (\<lambda>n. inverse(real(Suc n)) < u) \<U>"
by (drule inverse_real_of_posnat_ge_real_FreeUltrafilterNat)
(simp add: FreeUltrafilterNat.eventually_not_iff not_le[symmetric])
text \<open>Example of an hypersequence (i.e. an extended standard sequence)
whose term with an hypernatural suffix is an infinitesimal i.e.
the whn'nth term of the hypersequence is a member of Infinitesimal\<close>
lemma SEQ_Infinitesimal: "( *f* (\<lambda>n::nat. inverse(real(Suc n)))) whn \<in> Infinitesimal"
by (simp add: hypnat_omega_def starfun_star_n star_n_inverse Infinitesimal_FreeUltrafilterNat_iff
FreeUltrafilterNat_inverse_real_of_posnat del: of_nat_Suc)
text \<open>Example where we get a hyperreal from a real sequence
for which a particular property holds. The theorem is
used in proofs about equivalence of nonstandard and
standard neighbourhoods. Also used for equivalence of
nonstandard ans standard definitions of pointwise
limit.\<close>
text \<open>\<open>|X(n) - x| < 1/n \<Longrightarrow> [<X n>] - hypreal_of_real x| \<in> Infinitesimal\<close>\<close>
lemma real_seq_to_hypreal_Infinitesimal:
"\<forall>n. norm (X n - x) < inverse (real (Suc n)) \<Longrightarrow> star_n X - star_of x \<in> Infinitesimal"
unfolding star_n_diff star_of_def Infinitesimal_FreeUltrafilterNat_iff star_n_inverse
by (auto dest!: FreeUltrafilterNat_inverse_real_of_posnat
intro: order_less_trans elim!: eventually_mono)
lemma real_seq_to_hypreal_approx:
"\<forall>n. norm (X n - x) < inverse (real (Suc n)) \<Longrightarrow> star_n X \<approx> star_of x"
by (metis bex_Infinitesimal_iff real_seq_to_hypreal_Infinitesimal)
lemma real_seq_to_hypreal_approx2:
"\<forall>n. norm (x - X n) < inverse(real(Suc n)) \<Longrightarrow> star_n X \<approx> star_of x"
by (metis norm_minus_commute real_seq_to_hypreal_approx)
lemma real_seq_to_hypreal_Infinitesimal2:
"\<forall>n. norm(X n - Y n) < inverse(real(Suc n)) \<Longrightarrow> star_n X - star_n Y \<in> Infinitesimal"
unfolding Infinitesimal_FreeUltrafilterNat_iff star_n_diff
by (auto dest!: FreeUltrafilterNat_inverse_real_of_posnat
intro: order_less_trans elim!: eventually_mono)
end
diff --git a/src/HOL/Num.thy b/src/HOL/Num.thy
--- a/src/HOL/Num.thy
+++ b/src/HOL/Num.thy
@@ -1,1553 +1,1553 @@
(* Title: HOL/Num.thy
Author: Florian Haftmann
Author: Brian Huffman
*)
section \<open>Binary Numerals\<close>
theory Num
imports BNF_Least_Fixpoint Transfer
begin
subsection \<open>The \<open>num\<close> type\<close>
datatype num = One | Bit0 num | Bit1 num
text \<open>Increment function for type \<^typ>\<open>num\<close>\<close>
primrec inc :: "num \<Rightarrow> num"
where
"inc One = Bit0 One"
| "inc (Bit0 x) = Bit1 x"
| "inc (Bit1 x) = Bit0 (inc x)"
text \<open>Converting between type \<^typ>\<open>num\<close> and type \<^typ>\<open>nat\<close>\<close>
primrec nat_of_num :: "num \<Rightarrow> nat"
where
"nat_of_num One = Suc 0"
| "nat_of_num (Bit0 x) = nat_of_num x + nat_of_num x"
| "nat_of_num (Bit1 x) = Suc (nat_of_num x + nat_of_num x)"
primrec num_of_nat :: "nat \<Rightarrow> num"
where
"num_of_nat 0 = One"
| "num_of_nat (Suc n) = (if 0 < n then inc (num_of_nat n) else One)"
lemma nat_of_num_pos: "0 < nat_of_num x"
by (induct x) simp_all
lemma nat_of_num_neq_0: " nat_of_num x \<noteq> 0"
by (induct x) simp_all
lemma nat_of_num_inc: "nat_of_num (inc x) = Suc (nat_of_num x)"
by (induct x) simp_all
lemma num_of_nat_double: "0 < n \<Longrightarrow> num_of_nat (n + n) = Bit0 (num_of_nat n)"
by (induct n) simp_all
text \<open>Type \<^typ>\<open>num\<close> is isomorphic to the strictly positive natural numbers.\<close>
lemma nat_of_num_inverse: "num_of_nat (nat_of_num x) = x"
by (induct x) (simp_all add: num_of_nat_double nat_of_num_pos)
lemma num_of_nat_inverse: "0 < n \<Longrightarrow> nat_of_num (num_of_nat n) = n"
by (induct n) (simp_all add: nat_of_num_inc)
lemma num_eq_iff: "x = y \<longleftrightarrow> nat_of_num x = nat_of_num y"
apply safe
apply (drule arg_cong [where f=num_of_nat])
apply (simp add: nat_of_num_inverse)
done
lemma num_induct [case_names One inc]:
fixes P :: "num \<Rightarrow> bool"
assumes One: "P One"
and inc: "\<And>x. P x \<Longrightarrow> P (inc x)"
shows "P x"
proof -
obtain n where n: "Suc n = nat_of_num x"
by (cases "nat_of_num x") (simp_all add: nat_of_num_neq_0)
have "P (num_of_nat (Suc n))"
proof (induct n)
case 0
from One show ?case by simp
next
case (Suc n)
then have "P (inc (num_of_nat (Suc n)))" by (rule inc)
then show "P (num_of_nat (Suc (Suc n)))" by simp
qed
with n show "P x"
by (simp add: nat_of_num_inverse)
qed
text \<open>
From now on, there are two possible models for \<^typ>\<open>num\<close>: as positive
naturals (rule \<open>num_induct\<close>) and as digit representation (rules
\<open>num.induct\<close>, \<open>num.cases\<close>).
\<close>
subsection \<open>Numeral operations\<close>
instantiation num :: "{plus,times,linorder}"
begin
definition [code del]: "m + n = num_of_nat (nat_of_num m + nat_of_num n)"
definition [code del]: "m * n = num_of_nat (nat_of_num m * nat_of_num n)"
definition [code del]: "m \<le> n \<longleftrightarrow> nat_of_num m \<le> nat_of_num n"
definition [code del]: "m < n \<longleftrightarrow> nat_of_num m < nat_of_num n"
instance
by standard (auto simp add: less_num_def less_eq_num_def num_eq_iff)
end
lemma nat_of_num_add: "nat_of_num (x + y) = nat_of_num x + nat_of_num y"
unfolding plus_num_def
by (intro num_of_nat_inverse add_pos_pos nat_of_num_pos)
lemma nat_of_num_mult: "nat_of_num (x * y) = nat_of_num x * nat_of_num y"
unfolding times_num_def
by (intro num_of_nat_inverse mult_pos_pos nat_of_num_pos)
lemma add_num_simps [simp, code]:
"One + One = Bit0 One"
"One + Bit0 n = Bit1 n"
"One + Bit1 n = Bit0 (n + One)"
"Bit0 m + One = Bit1 m"
"Bit0 m + Bit0 n = Bit0 (m + n)"
"Bit0 m + Bit1 n = Bit1 (m + n)"
"Bit1 m + One = Bit0 (m + One)"
"Bit1 m + Bit0 n = Bit1 (m + n)"
"Bit1 m + Bit1 n = Bit0 (m + n + One)"
by (simp_all add: num_eq_iff nat_of_num_add)
lemma mult_num_simps [simp, code]:
"m * One = m"
"One * n = n"
"Bit0 m * Bit0 n = Bit0 (Bit0 (m * n))"
"Bit0 m * Bit1 n = Bit0 (m * Bit1 n)"
"Bit1 m * Bit0 n = Bit0 (Bit1 m * n)"
"Bit1 m * Bit1 n = Bit1 (m + n + Bit0 (m * n))"
by (simp_all add: num_eq_iff nat_of_num_add nat_of_num_mult distrib_right distrib_left)
lemma eq_num_simps:
"One = One \<longleftrightarrow> True"
"One = Bit0 n \<longleftrightarrow> False"
"One = Bit1 n \<longleftrightarrow> False"
"Bit0 m = One \<longleftrightarrow> False"
"Bit1 m = One \<longleftrightarrow> False"
"Bit0 m = Bit0 n \<longleftrightarrow> m = n"
"Bit0 m = Bit1 n \<longleftrightarrow> False"
"Bit1 m = Bit0 n \<longleftrightarrow> False"
"Bit1 m = Bit1 n \<longleftrightarrow> m = n"
by simp_all
lemma le_num_simps [simp, code]:
"One \<le> n \<longleftrightarrow> True"
"Bit0 m \<le> One \<longleftrightarrow> False"
"Bit1 m \<le> One \<longleftrightarrow> False"
"Bit0 m \<le> Bit0 n \<longleftrightarrow> m \<le> n"
"Bit0 m \<le> Bit1 n \<longleftrightarrow> m \<le> n"
"Bit1 m \<le> Bit1 n \<longleftrightarrow> m \<le> n"
"Bit1 m \<le> Bit0 n \<longleftrightarrow> m < n"
using nat_of_num_pos [of n] nat_of_num_pos [of m]
by (auto simp add: less_eq_num_def less_num_def)
lemma less_num_simps [simp, code]:
"m < One \<longleftrightarrow> False"
"One < Bit0 n \<longleftrightarrow> True"
"One < Bit1 n \<longleftrightarrow> True"
"Bit0 m < Bit0 n \<longleftrightarrow> m < n"
"Bit0 m < Bit1 n \<longleftrightarrow> m \<le> n"
"Bit1 m < Bit1 n \<longleftrightarrow> m < n"
"Bit1 m < Bit0 n \<longleftrightarrow> m < n"
using nat_of_num_pos [of n] nat_of_num_pos [of m]
by (auto simp add: less_eq_num_def less_num_def)
lemma le_num_One_iff: "x \<le> num.One \<longleftrightarrow> x = num.One"
by (simp add: antisym_conv)
text \<open>Rules using \<open>One\<close> and \<open>inc\<close> as constructors.\<close>
lemma add_One: "x + One = inc x"
by (simp add: num_eq_iff nat_of_num_add nat_of_num_inc)
lemma add_One_commute: "One + n = n + One"
by (induct n) simp_all
lemma add_inc: "x + inc y = inc (x + y)"
by (simp add: num_eq_iff nat_of_num_add nat_of_num_inc)
lemma mult_inc: "x * inc y = x * y + x"
by (simp add: num_eq_iff nat_of_num_mult nat_of_num_add nat_of_num_inc)
text \<open>The \<^const>\<open>num_of_nat\<close> conversion.\<close>
lemma num_of_nat_One: "n \<le> 1 \<Longrightarrow> num_of_nat n = One"
by (cases n) simp_all
lemma num_of_nat_plus_distrib:
"0 < m \<Longrightarrow> 0 < n \<Longrightarrow> num_of_nat (m + n) = num_of_nat m + num_of_nat n"
by (induct n) (auto simp add: add_One add_One_commute add_inc)
text \<open>A double-and-decrement function.\<close>
primrec BitM :: "num \<Rightarrow> num"
where
"BitM One = One"
| "BitM (Bit0 n) = Bit1 (BitM n)"
| "BitM (Bit1 n) = Bit1 (Bit0 n)"
lemma BitM_plus_one: "BitM n + One = Bit0 n"
by (induct n) simp_all
lemma one_plus_BitM: "One + BitM n = Bit0 n"
unfolding add_One_commute BitM_plus_one ..
lemma BitM_inc_eq:
\<open>Num.BitM (Num.inc n) = Num.Bit1 n\<close>
by (induction n) simp_all
lemma inc_BitM_eq:
\<open>Num.inc (Num.BitM n) = num.Bit0 n\<close>
by (simp add: BitM_plus_one[symmetric] add_One)
text \<open>Squaring and exponentiation.\<close>
primrec sqr :: "num \<Rightarrow> num"
where
"sqr One = One"
| "sqr (Bit0 n) = Bit0 (Bit0 (sqr n))"
| "sqr (Bit1 n) = Bit1 (Bit0 (sqr n + n))"
primrec pow :: "num \<Rightarrow> num \<Rightarrow> num"
where
"pow x One = x"
| "pow x (Bit0 y) = sqr (pow x y)"
| "pow x (Bit1 y) = sqr (pow x y) * x"
lemma nat_of_num_sqr: "nat_of_num (sqr x) = nat_of_num x * nat_of_num x"
by (induct x) (simp_all add: algebra_simps nat_of_num_add)
lemma sqr_conv_mult: "sqr x = x * x"
by (simp add: num_eq_iff nat_of_num_sqr nat_of_num_mult)
lemma num_double [simp]:
"num.Bit0 num.One * n = num.Bit0 n"
by (simp add: num_eq_iff nat_of_num_mult)
subsection \<open>Binary numerals\<close>
text \<open>
We embed binary representations into a generic algebraic
structure using \<open>numeral\<close>.
\<close>
class numeral = one + semigroup_add
begin
primrec numeral :: "num \<Rightarrow> 'a"
where
numeral_One: "numeral One = 1"
| numeral_Bit0: "numeral (Bit0 n) = numeral n + numeral n"
| numeral_Bit1: "numeral (Bit1 n) = numeral n + numeral n + 1"
lemma numeral_code [code]:
"numeral One = 1"
"numeral (Bit0 n) = (let m = numeral n in m + m)"
"numeral (Bit1 n) = (let m = numeral n in m + m + 1)"
by (simp_all add: Let_def)
lemma one_plus_numeral_commute: "1 + numeral x = numeral x + 1"
proof (induct x)
case One
then show ?case by simp
next
case Bit0
then show ?case by (simp add: add.assoc [symmetric]) (simp add: add.assoc)
next
case Bit1
then show ?case by (simp add: add.assoc [symmetric]) (simp add: add.assoc)
qed
lemma numeral_inc: "numeral (inc x) = numeral x + 1"
proof (induct x)
case One
then show ?case by simp
next
case Bit0
then show ?case by simp
next
case (Bit1 x)
have "numeral x + (1 + numeral x) + 1 = numeral x + (numeral x + 1) + 1"
by (simp only: one_plus_numeral_commute)
with Bit1 show ?case
by (simp add: add.assoc)
qed
declare numeral.simps [simp del]
abbreviation "Numeral1 \<equiv> numeral One"
declare numeral_One [code_post]
end
text \<open>Numeral syntax.\<close>
syntax
"_Numeral" :: "num_const \<Rightarrow> 'a" ("_")
ML_file \<open>Tools/numeral.ML\<close>
parse_translation \<open>
let
fun numeral_tr [(c as Const (\<^syntax_const>\<open>_constrain\<close>, _)) $ t $ u] =
c $ numeral_tr [t] $ u
| numeral_tr [Const (num, _)] =
(Numeral.mk_number_syntax o #value o Lexicon.read_num) num
| numeral_tr ts = raise TERM ("numeral_tr", ts);
in [(\<^syntax_const>\<open>_Numeral\<close>, K numeral_tr)] end
\<close>
typed_print_translation \<open>
let
fun num_tr' ctxt T [n] =
let
val k = Numeral.dest_num_syntax n;
val t' =
Syntax.const \<^syntax_const>\<open>_Numeral\<close> $
Syntax.free (string_of_int k);
in
(case T of
Type (\<^type_name>\<open>fun\<close>, [_, T']) =>
if Printer.type_emphasis ctxt T' then
Syntax.const \<^syntax_const>\<open>_constrain\<close> $ t' $
Syntax_Phases.term_of_typ ctxt T'
else t'
| _ => if T = dummyT then t' else raise Match)
end;
in
[(\<^const_syntax>\<open>numeral\<close>, num_tr')]
end
\<close>
subsection \<open>Class-specific numeral rules\<close>
text \<open>\<^const>\<open>numeral\<close> is a morphism.\<close>
subsubsection \<open>Structures with addition: class \<open>numeral\<close>\<close>
context numeral
begin
lemma numeral_add: "numeral (m + n) = numeral m + numeral n"
by (induct n rule: num_induct)
(simp_all only: numeral_One add_One add_inc numeral_inc add.assoc)
lemma numeral_plus_numeral: "numeral m + numeral n = numeral (m + n)"
by (rule numeral_add [symmetric])
lemma numeral_plus_one: "numeral n + 1 = numeral (n + One)"
using numeral_add [of n One] by (simp add: numeral_One)
lemma one_plus_numeral: "1 + numeral n = numeral (One + n)"
using numeral_add [of One n] by (simp add: numeral_One)
lemma one_add_one: "1 + 1 = 2"
using numeral_add [of One One] by (simp add: numeral_One)
lemmas add_numeral_special =
numeral_plus_one one_plus_numeral one_add_one
end
subsubsection \<open>Structures with negation: class \<open>neg_numeral\<close>\<close>
class neg_numeral = numeral + group_add
begin
lemma uminus_numeral_One: "- Numeral1 = - 1"
by (simp add: numeral_One)
text \<open>Numerals form an abelian subgroup.\<close>
inductive is_num :: "'a \<Rightarrow> bool"
where
"is_num 1"
| "is_num x \<Longrightarrow> is_num (- x)"
| "is_num x \<Longrightarrow> is_num y \<Longrightarrow> is_num (x + y)"
lemma is_num_numeral: "is_num (numeral k)"
by (induct k) (simp_all add: numeral.simps is_num.intros)
lemma is_num_add_commute: "is_num x \<Longrightarrow> is_num y \<Longrightarrow> x + y = y + x"
proof(induction x rule: is_num.induct)
case 1
then show ?case
proof (induction y rule: is_num.induct)
case 1
then show ?case by simp
next
case (2 y)
then have "y + (1 + - y) + y = y + (- y + 1) + y"
by (simp add: add.assoc)
then have "y + (1 + - y) = y + (- y + 1)"
by simp
then show ?case
by (rule add_left_imp_eq[of y])
next
case (3 x y)
then have "1 + (x + y) = x + 1 + y"
by (simp add: add.assoc [symmetric])
then show ?case using 3
by (simp add: add.assoc)
qed
next
case (2 x)
then have "x + (- x + y) + x = x + (y + - x) + x"
by (simp add: add.assoc)
then have "x + (- x + y) = x + (y + - x)"
by simp
then show ?case
by (rule add_left_imp_eq[of x])
next
case (3 x z)
moreover have "x + (y + z) = (x + y) + z"
by (simp add: add.assoc[symmetric])
ultimately show ?case
by (simp add: add.assoc)
qed
lemma is_num_add_left_commute: "is_num x \<Longrightarrow> is_num y \<Longrightarrow> x + (y + z) = y + (x + z)"
by (simp only: add.assoc [symmetric] is_num_add_commute)
lemmas is_num_normalize =
add.assoc is_num_add_commute is_num_add_left_commute
is_num.intros is_num_numeral
minus_add
definition dbl :: "'a \<Rightarrow> 'a"
where "dbl x = x + x"
definition dbl_inc :: "'a \<Rightarrow> 'a"
where "dbl_inc x = x + x + 1"
definition dbl_dec :: "'a \<Rightarrow> 'a"
where "dbl_dec x = x + x - 1"
definition sub :: "num \<Rightarrow> num \<Rightarrow> 'a"
where "sub k l = numeral k - numeral l"
lemma numeral_BitM: "numeral (BitM n) = numeral (Bit0 n) - 1"
by (simp only: BitM_plus_one [symmetric] numeral_add numeral_One eq_diff_eq)
lemma sub_inc_One_eq:
\<open>Num.sub (Num.inc n) num.One = numeral n\<close>
by (simp_all add: sub_def diff_eq_eq numeral_inc numeral.numeral_One)
lemma dbl_simps [simp]:
"dbl (- numeral k) = - dbl (numeral k)"
"dbl 0 = 0"
"dbl 1 = 2"
"dbl (- 1) = - 2"
"dbl (numeral k) = numeral (Bit0 k)"
by (simp_all add: dbl_def numeral.simps minus_add)
lemma dbl_inc_simps [simp]:
"dbl_inc (- numeral k) = - dbl_dec (numeral k)"
"dbl_inc 0 = 1"
"dbl_inc 1 = 3"
"dbl_inc (- 1) = - 1"
"dbl_inc (numeral k) = numeral (Bit1 k)"
by (simp_all add: dbl_inc_def dbl_dec_def numeral.simps numeral_BitM is_num_normalize algebra_simps
del: add_uminus_conv_diff)
lemma dbl_dec_simps [simp]:
"dbl_dec (- numeral k) = - dbl_inc (numeral k)"
"dbl_dec 0 = - 1"
"dbl_dec 1 = 1"
"dbl_dec (- 1) = - 3"
"dbl_dec (numeral k) = numeral (BitM k)"
by (simp_all add: dbl_dec_def dbl_inc_def numeral.simps numeral_BitM is_num_normalize)
lemma sub_num_simps [simp]:
"sub One One = 0"
"sub One (Bit0 l) = - numeral (BitM l)"
"sub One (Bit1 l) = - numeral (Bit0 l)"
"sub (Bit0 k) One = numeral (BitM k)"
"sub (Bit1 k) One = numeral (Bit0 k)"
"sub (Bit0 k) (Bit0 l) = dbl (sub k l)"
"sub (Bit0 k) (Bit1 l) = dbl_dec (sub k l)"
"sub (Bit1 k) (Bit0 l) = dbl_inc (sub k l)"
"sub (Bit1 k) (Bit1 l) = dbl (sub k l)"
by (simp_all add: dbl_def dbl_dec_def dbl_inc_def sub_def numeral.simps
numeral_BitM is_num_normalize del: add_uminus_conv_diff add: diff_conv_add_uminus)
lemma add_neg_numeral_simps:
"numeral m + - numeral n = sub m n"
"- numeral m + numeral n = sub n m"
"- numeral m + - numeral n = - (numeral m + numeral n)"
by (simp_all add: sub_def numeral_add numeral.simps is_num_normalize
del: add_uminus_conv_diff add: diff_conv_add_uminus)
lemma add_neg_numeral_special:
"1 + - numeral m = sub One m"
"- numeral m + 1 = sub One m"
"numeral m + - 1 = sub m One"
"- 1 + numeral n = sub n One"
"- 1 + - numeral n = - numeral (inc n)"
"- numeral m + - 1 = - numeral (inc m)"
"1 + - 1 = 0"
"- 1 + 1 = 0"
"- 1 + - 1 = - 2"
by (simp_all add: sub_def numeral_add numeral.simps is_num_normalize right_minus numeral_inc
del: add_uminus_conv_diff add: diff_conv_add_uminus)
lemma diff_numeral_simps:
"numeral m - numeral n = sub m n"
"numeral m - - numeral n = numeral (m + n)"
"- numeral m - numeral n = - numeral (m + n)"
"- numeral m - - numeral n = sub n m"
by (simp_all add: sub_def numeral_add numeral.simps is_num_normalize
del: add_uminus_conv_diff add: diff_conv_add_uminus)
lemma diff_numeral_special:
"1 - numeral n = sub One n"
"numeral m - 1 = sub m One"
"1 - - numeral n = numeral (One + n)"
"- numeral m - 1 = - numeral (m + One)"
"- 1 - numeral n = - numeral (inc n)"
"numeral m - - 1 = numeral (inc m)"
"- 1 - - numeral n = sub n One"
"- numeral m - - 1 = sub One m"
"1 - 1 = 0"
"- 1 - 1 = - 2"
"1 - - 1 = 2"
"- 1 - - 1 = 0"
by (simp_all add: sub_def numeral_add numeral.simps is_num_normalize numeral_inc
del: add_uminus_conv_diff add: diff_conv_add_uminus)
end
subsubsection \<open>Structures with multiplication: class \<open>semiring_numeral\<close>\<close>
class semiring_numeral = semiring + monoid_mult
begin
subclass numeral ..
lemma numeral_mult: "numeral (m * n) = numeral m * numeral n"
by (induct n rule: num_induct)
(simp_all add: numeral_One mult_inc numeral_inc numeral_add distrib_left)
lemma numeral_times_numeral: "numeral m * numeral n = numeral (m * n)"
by (rule numeral_mult [symmetric])
lemma mult_2: "2 * z = z + z"
by (simp add: one_add_one [symmetric] distrib_right)
lemma mult_2_right: "z * 2 = z + z"
by (simp add: one_add_one [symmetric] distrib_left)
lemma left_add_twice:
"a + (a + b) = 2 * a + b"
by (simp add: mult_2 ac_simps)
end
subsubsection \<open>Structures with a zero: class \<open>semiring_1\<close>\<close>
context semiring_1
begin
subclass semiring_numeral ..
lemma of_nat_numeral [simp]: "of_nat (numeral n) = numeral n"
by (induct n) (simp_all only: numeral.simps numeral_class.numeral.simps of_nat_add of_nat_1)
end
lemma nat_of_num_numeral [code_abbrev]: "nat_of_num = numeral"
proof
fix n
have "numeral n = nat_of_num n"
by (induct n) (simp_all add: numeral.simps)
then show "nat_of_num n = numeral n"
by simp
qed
lemma nat_of_num_code [code]:
"nat_of_num One = 1"
"nat_of_num (Bit0 n) = (let m = nat_of_num n in m + m)"
"nat_of_num (Bit1 n) = (let m = nat_of_num n in Suc (m + m))"
by (simp_all add: Let_def)
subsubsection \<open>Equality: class \<open>semiring_char_0\<close>\<close>
context semiring_char_0
begin
lemma numeral_eq_iff: "numeral m = numeral n \<longleftrightarrow> m = n"
by (simp only: of_nat_numeral [symmetric] nat_of_num_numeral [symmetric]
of_nat_eq_iff num_eq_iff)
lemma numeral_eq_one_iff: "numeral n = 1 \<longleftrightarrow> n = One"
by (rule numeral_eq_iff [of n One, unfolded numeral_One])
lemma one_eq_numeral_iff: "1 = numeral n \<longleftrightarrow> One = n"
by (rule numeral_eq_iff [of One n, unfolded numeral_One])
lemma numeral_neq_zero: "numeral n \<noteq> 0"
by (simp add: of_nat_numeral [symmetric] nat_of_num_numeral [symmetric] nat_of_num_pos)
lemma zero_neq_numeral: "0 \<noteq> numeral n"
unfolding eq_commute [of 0] by (rule numeral_neq_zero)
lemmas eq_numeral_simps [simp] =
numeral_eq_iff
numeral_eq_one_iff
one_eq_numeral_iff
numeral_neq_zero
zero_neq_numeral
end
subsubsection \<open>Comparisons: class \<open>linordered_nonzero_semiring\<close>\<close>
context linordered_nonzero_semiring
begin
lemma numeral_le_iff: "numeral m \<le> numeral n \<longleftrightarrow> m \<le> n"
proof -
have "of_nat (numeral m) \<le> of_nat (numeral n) \<longleftrightarrow> m \<le> n"
by (simp only: less_eq_num_def nat_of_num_numeral of_nat_le_iff)
then show ?thesis by simp
qed
lemma one_le_numeral: "1 \<le> numeral n"
using numeral_le_iff [of num.One n] by (simp add: numeral_One)
lemma numeral_le_one_iff: "numeral n \<le> 1 \<longleftrightarrow> n \<le> num.One"
using numeral_le_iff [of n num.One] by (simp add: numeral_One)
lemma numeral_less_iff: "numeral m < numeral n \<longleftrightarrow> m < n"
proof -
have "of_nat (numeral m) < of_nat (numeral n) \<longleftrightarrow> m < n"
unfolding less_num_def nat_of_num_numeral of_nat_less_iff ..
then show ?thesis by simp
qed
lemma not_numeral_less_one: "\<not> numeral n < 1"
using numeral_less_iff [of n num.One] by (simp add: numeral_One)
lemma one_less_numeral_iff: "1 < numeral n \<longleftrightarrow> num.One < n"
using numeral_less_iff [of num.One n] by (simp add: numeral_One)
lemma zero_le_numeral: "0 \<le> numeral n"
using dual_order.trans one_le_numeral zero_le_one by blast
lemma zero_less_numeral: "0 < numeral n"
using less_linear not_numeral_less_one order.strict_trans zero_less_one by blast
lemma not_numeral_le_zero: "\<not> numeral n \<le> 0"
by (simp add: not_le zero_less_numeral)
lemma not_numeral_less_zero: "\<not> numeral n < 0"
by (simp add: not_less zero_le_numeral)
lemmas le_numeral_extra =
zero_le_one not_one_le_zero
order_refl [of 0] order_refl [of 1]
lemmas less_numeral_extra =
zero_less_one not_one_less_zero
less_irrefl [of 0] less_irrefl [of 1]
lemmas le_numeral_simps [simp] =
numeral_le_iff
one_le_numeral
numeral_le_one_iff
zero_le_numeral
not_numeral_le_zero
lemmas less_numeral_simps [simp] =
numeral_less_iff
one_less_numeral_iff
not_numeral_less_one
zero_less_numeral
not_numeral_less_zero
lemma min_0_1 [simp]:
fixes min' :: "'a \<Rightarrow> 'a \<Rightarrow> 'a"
defines "min' \<equiv> min"
shows
"min' 0 1 = 0"
"min' 1 0 = 0"
"min' 0 (numeral x) = 0"
"min' (numeral x) 0 = 0"
"min' 1 (numeral x) = 1"
"min' (numeral x) 1 = 1"
by (simp_all add: min'_def min_def le_num_One_iff)
lemma max_0_1 [simp]:
fixes max' :: "'a \<Rightarrow> 'a \<Rightarrow> 'a"
defines "max' \<equiv> max"
shows
"max' 0 1 = 1"
"max' 1 0 = 1"
"max' 0 (numeral x) = numeral x"
"max' (numeral x) 0 = numeral x"
"max' 1 (numeral x) = numeral x"
"max' (numeral x) 1 = numeral x"
by (simp_all add: max'_def max_def le_num_One_iff)
end
text \<open>Unfold \<open>min\<close> and \<open>max\<close> on numerals.\<close>
lemmas max_number_of [simp] =
max_def [of "numeral u" "numeral v"]
max_def [of "numeral u" "- numeral v"]
max_def [of "- numeral u" "numeral v"]
max_def [of "- numeral u" "- numeral v"] for u v
lemmas min_number_of [simp] =
min_def [of "numeral u" "numeral v"]
min_def [of "numeral u" "- numeral v"]
min_def [of "- numeral u" "numeral v"]
min_def [of "- numeral u" "- numeral v"] for u v
subsubsection \<open>Multiplication and negation: class \<open>ring_1\<close>\<close>
context ring_1
begin
subclass neg_numeral ..
lemma mult_neg_numeral_simps:
"- numeral m * - numeral n = numeral (m * n)"
"- numeral m * numeral n = - numeral (m * n)"
"numeral m * - numeral n = - numeral (m * n)"
by (simp_all only: mult_minus_left mult_minus_right minus_minus numeral_mult)
lemma mult_minus1 [simp]: "- 1 * z = - z"
by (simp add: numeral.simps)
lemma mult_minus1_right [simp]: "z * - 1 = - z"
by (simp add: numeral.simps)
lemma minus_sub_one_diff_one [simp]:
\<open>- sub m One - 1 = - numeral m\<close>
proof -
have \<open>sub m One + 1 = numeral m\<close>
by (simp flip: eq_diff_eq add: diff_numeral_special)
then have \<open>- (sub m One + 1) = - numeral m\<close>
by simp
then show ?thesis
by simp
qed
end
subsubsection \<open>Equality using \<open>iszero\<close> for rings with non-zero characteristic\<close>
context ring_1
begin
definition iszero :: "'a \<Rightarrow> bool"
where "iszero z \<longleftrightarrow> z = 0"
lemma iszero_0 [simp]: "iszero 0"
by (simp add: iszero_def)
lemma not_iszero_1 [simp]: "\<not> iszero 1"
by (simp add: iszero_def)
lemma not_iszero_Numeral1: "\<not> iszero Numeral1"
by (simp add: numeral_One)
lemma not_iszero_neg_1 [simp]: "\<not> iszero (- 1)"
by (simp add: iszero_def)
lemma not_iszero_neg_Numeral1: "\<not> iszero (- Numeral1)"
by (simp add: numeral_One)
lemma iszero_neg_numeral [simp]: "iszero (- numeral w) \<longleftrightarrow> iszero (numeral w)"
unfolding iszero_def by (rule neg_equal_0_iff_equal)
lemma eq_iff_iszero_diff: "x = y \<longleftrightarrow> iszero (x - y)"
unfolding iszero_def by (rule eq_iff_diff_eq_0)
text \<open>
The \<open>eq_numeral_iff_iszero\<close> lemmas are not declared \<open>[simp]\<close> by default,
because for rings of characteristic zero, better simp rules are possible.
For a type like integers mod \<open>n\<close>, type-instantiated versions of these rules
should be added to the simplifier, along with a type-specific rule for
deciding propositions of the form \<open>iszero (numeral w)\<close>.
bh: Maybe it would not be so bad to just declare these as simp rules anyway?
I should test whether these rules take precedence over the \<open>ring_char_0\<close>
rules in the simplifier.
\<close>
lemma eq_numeral_iff_iszero:
"numeral x = numeral y \<longleftrightarrow> iszero (sub x y)"
"numeral x = - numeral y \<longleftrightarrow> iszero (numeral (x + y))"
"- numeral x = numeral y \<longleftrightarrow> iszero (numeral (x + y))"
"- numeral x = - numeral y \<longleftrightarrow> iszero (sub y x)"
"numeral x = 1 \<longleftrightarrow> iszero (sub x One)"
"1 = numeral y \<longleftrightarrow> iszero (sub One y)"
"- numeral x = 1 \<longleftrightarrow> iszero (numeral (x + One))"
"1 = - numeral y \<longleftrightarrow> iszero (numeral (One + y))"
"numeral x = 0 \<longleftrightarrow> iszero (numeral x)"
"0 = numeral y \<longleftrightarrow> iszero (numeral y)"
"- numeral x = 0 \<longleftrightarrow> iszero (numeral x)"
"0 = - numeral y \<longleftrightarrow> iszero (numeral y)"
unfolding eq_iff_iszero_diff diff_numeral_simps diff_numeral_special
by simp_all
end
subsubsection \<open>Equality and negation: class \<open>ring_char_0\<close>\<close>
context ring_char_0
begin
lemma not_iszero_numeral [simp]: "\<not> iszero (numeral w)"
by (simp add: iszero_def)
lemma neg_numeral_eq_iff: "- numeral m = - numeral n \<longleftrightarrow> m = n"
by simp
lemma numeral_neq_neg_numeral: "numeral m \<noteq> - numeral n"
by (simp add: eq_neg_iff_add_eq_0 numeral_plus_numeral)
lemma neg_numeral_neq_numeral: "- numeral m \<noteq> numeral n"
by (rule numeral_neq_neg_numeral [symmetric])
lemma zero_neq_neg_numeral: "0 \<noteq> - numeral n"
by simp
lemma neg_numeral_neq_zero: "- numeral n \<noteq> 0"
by simp
lemma one_neq_neg_numeral: "1 \<noteq> - numeral n"
using numeral_neq_neg_numeral [of One n] by (simp add: numeral_One)
lemma neg_numeral_neq_one: "- numeral n \<noteq> 1"
using neg_numeral_neq_numeral [of n One] by (simp add: numeral_One)
lemma neg_one_neq_numeral: "- 1 \<noteq> numeral n"
using neg_numeral_neq_numeral [of One n] by (simp add: numeral_One)
lemma numeral_neq_neg_one: "numeral n \<noteq> - 1"
using numeral_neq_neg_numeral [of n One] by (simp add: numeral_One)
lemma neg_one_eq_numeral_iff: "- 1 = - numeral n \<longleftrightarrow> n = One"
using neg_numeral_eq_iff [of One n] by (auto simp add: numeral_One)
lemma numeral_eq_neg_one_iff: "- numeral n = - 1 \<longleftrightarrow> n = One"
using neg_numeral_eq_iff [of n One] by (auto simp add: numeral_One)
lemma neg_one_neq_zero: "- 1 \<noteq> 0"
by simp
lemma zero_neq_neg_one: "0 \<noteq> - 1"
by simp
lemma neg_one_neq_one: "- 1 \<noteq> 1"
using neg_numeral_neq_numeral [of One One] by (simp only: numeral_One not_False_eq_True)
lemma one_neq_neg_one: "1 \<noteq> - 1"
using numeral_neq_neg_numeral [of One One] by (simp only: numeral_One not_False_eq_True)
lemmas eq_neg_numeral_simps [simp] =
neg_numeral_eq_iff
numeral_neq_neg_numeral neg_numeral_neq_numeral
one_neq_neg_numeral neg_numeral_neq_one
zero_neq_neg_numeral neg_numeral_neq_zero
neg_one_neq_numeral numeral_neq_neg_one
neg_one_eq_numeral_iff numeral_eq_neg_one_iff
neg_one_neq_zero zero_neq_neg_one
neg_one_neq_one one_neq_neg_one
end
subsubsection \<open>Structures with negation and order: class \<open>linordered_idom\<close>\<close>
context linordered_idom
begin
subclass ring_char_0 ..
lemma neg_numeral_le_iff: "- numeral m \<le> - numeral n \<longleftrightarrow> n \<le> m"
by (simp only: neg_le_iff_le numeral_le_iff)
lemma neg_numeral_less_iff: "- numeral m < - numeral n \<longleftrightarrow> n < m"
by (simp only: neg_less_iff_less numeral_less_iff)
lemma neg_numeral_less_zero: "- numeral n < 0"
by (simp only: neg_less_0_iff_less zero_less_numeral)
lemma neg_numeral_le_zero: "- numeral n \<le> 0"
by (simp only: neg_le_0_iff_le zero_le_numeral)
lemma not_zero_less_neg_numeral: "\<not> 0 < - numeral n"
by (simp only: not_less neg_numeral_le_zero)
lemma not_zero_le_neg_numeral: "\<not> 0 \<le> - numeral n"
by (simp only: not_le neg_numeral_less_zero)
lemma neg_numeral_less_numeral: "- numeral m < numeral n"
using neg_numeral_less_zero zero_less_numeral by (rule less_trans)
lemma neg_numeral_le_numeral: "- numeral m \<le> numeral n"
by (simp only: less_imp_le neg_numeral_less_numeral)
lemma not_numeral_less_neg_numeral: "\<not> numeral m < - numeral n"
by (simp only: not_less neg_numeral_le_numeral)
lemma not_numeral_le_neg_numeral: "\<not> numeral m \<le> - numeral n"
by (simp only: not_le neg_numeral_less_numeral)
lemma neg_numeral_less_one: "- numeral m < 1"
by (rule neg_numeral_less_numeral [of m One, unfolded numeral_One])
lemma neg_numeral_le_one: "- numeral m \<le> 1"
by (rule neg_numeral_le_numeral [of m One, unfolded numeral_One])
lemma not_one_less_neg_numeral: "\<not> 1 < - numeral m"
by (simp only: not_less neg_numeral_le_one)
lemma not_one_le_neg_numeral: "\<not> 1 \<le> - numeral m"
by (simp only: not_le neg_numeral_less_one)
lemma not_numeral_less_neg_one: "\<not> numeral m < - 1"
using not_numeral_less_neg_numeral [of m One] by (simp add: numeral_One)
lemma not_numeral_le_neg_one: "\<not> numeral m \<le> - 1"
using not_numeral_le_neg_numeral [of m One] by (simp add: numeral_One)
lemma neg_one_less_numeral: "- 1 < numeral m"
using neg_numeral_less_numeral [of One m] by (simp add: numeral_One)
lemma neg_one_le_numeral: "- 1 \<le> numeral m"
using neg_numeral_le_numeral [of One m] by (simp add: numeral_One)
lemma neg_numeral_less_neg_one_iff: "- numeral m < - 1 \<longleftrightarrow> m \<noteq> One"
by (cases m) simp_all
lemma neg_numeral_le_neg_one: "- numeral m \<le> - 1"
by simp
lemma not_neg_one_less_neg_numeral: "\<not> - 1 < - numeral m"
by simp
lemma not_neg_one_le_neg_numeral_iff: "\<not> - 1 \<le> - numeral m \<longleftrightarrow> m \<noteq> One"
by (cases m) simp_all
lemma sub_non_negative: "sub n m \<ge> 0 \<longleftrightarrow> n \<ge> m"
by (simp only: sub_def le_diff_eq) simp
lemma sub_positive: "sub n m > 0 \<longleftrightarrow> n > m"
by (simp only: sub_def less_diff_eq) simp
lemma sub_non_positive: "sub n m \<le> 0 \<longleftrightarrow> n \<le> m"
by (simp only: sub_def diff_le_eq) simp
lemma sub_negative: "sub n m < 0 \<longleftrightarrow> n < m"
by (simp only: sub_def diff_less_eq) simp
lemmas le_neg_numeral_simps [simp] =
neg_numeral_le_iff
neg_numeral_le_numeral not_numeral_le_neg_numeral
neg_numeral_le_zero not_zero_le_neg_numeral
neg_numeral_le_one not_one_le_neg_numeral
neg_one_le_numeral not_numeral_le_neg_one
neg_numeral_le_neg_one not_neg_one_le_neg_numeral_iff
lemma le_minus_one_simps [simp]:
"- 1 \<le> 0"
"- 1 \<le> 1"
"\<not> 0 \<le> - 1"
"\<not> 1 \<le> - 1"
by simp_all
lemmas less_neg_numeral_simps [simp] =
neg_numeral_less_iff
neg_numeral_less_numeral not_numeral_less_neg_numeral
neg_numeral_less_zero not_zero_less_neg_numeral
neg_numeral_less_one not_one_less_neg_numeral
neg_one_less_numeral not_numeral_less_neg_one
neg_numeral_less_neg_one_iff not_neg_one_less_neg_numeral
lemma less_minus_one_simps [simp]:
"- 1 < 0"
"- 1 < 1"
"\<not> 0 < - 1"
"\<not> 1 < - 1"
by (simp_all add: less_le)
lemma abs_numeral [simp]: "\<bar>numeral n\<bar> = numeral n"
by simp
lemma abs_neg_numeral [simp]: "\<bar>- numeral n\<bar> = numeral n"
by (simp only: abs_minus_cancel abs_numeral)
lemma abs_neg_one [simp]: "\<bar>- 1\<bar> = 1"
by simp
end
subsubsection \<open>Natural numbers\<close>
lemma numeral_num_of_nat:
"numeral (num_of_nat n) = n" if "n > 0"
using that nat_of_num_numeral num_of_nat_inverse by simp
lemma Suc_1 [simp]: "Suc 1 = 2"
unfolding Suc_eq_plus1 by (rule one_add_one)
lemma Suc_numeral [simp]: "Suc (numeral n) = numeral (n + One)"
unfolding Suc_eq_plus1 by (rule numeral_plus_one)
definition pred_numeral :: "num \<Rightarrow> nat"
where "pred_numeral k = numeral k - 1"
declare [[code drop: pred_numeral]]
lemma numeral_eq_Suc: "numeral k = Suc (pred_numeral k)"
by (simp add: pred_numeral_def)
lemma eval_nat_numeral:
"numeral One = Suc 0"
"numeral (Bit0 n) = Suc (numeral (BitM n))"
"numeral (Bit1 n) = Suc (numeral (Bit0 n))"
by (simp_all add: numeral.simps BitM_plus_one)
lemma pred_numeral_simps [simp]:
"pred_numeral One = 0"
"pred_numeral (Bit0 k) = numeral (BitM k)"
"pred_numeral (Bit1 k) = numeral (Bit0 k)"
by (simp_all only: pred_numeral_def eval_nat_numeral diff_Suc_Suc diff_0)
lemma pred_numeral_inc [simp]:
"pred_numeral (Num.inc k) = numeral k"
by (simp only: pred_numeral_def numeral_inc diff_add_inverse2)
lemma numeral_2_eq_2: "2 = Suc (Suc 0)"
by (simp add: eval_nat_numeral)
lemma numeral_3_eq_3: "3 = Suc (Suc (Suc 0))"
by (simp add: eval_nat_numeral)
lemma numeral_1_eq_Suc_0: "Numeral1 = Suc 0"
by (simp only: numeral_One One_nat_def)
lemma Suc_nat_number_of_add: "Suc (numeral v + n) = numeral (v + One) + n"
by simp
lemma numerals: "Numeral1 = (1::nat)" "2 = Suc (Suc 0)"
by (rule numeral_One) (rule numeral_2_eq_2)
lemmas numeral_nat = eval_nat_numeral BitM.simps One_nat_def
text \<open>Comparisons involving \<^term>\<open>Suc\<close>.\<close>
lemma eq_numeral_Suc [simp]: "numeral k = Suc n \<longleftrightarrow> pred_numeral k = n"
by (simp add: numeral_eq_Suc)
lemma Suc_eq_numeral [simp]: "Suc n = numeral k \<longleftrightarrow> n = pred_numeral k"
by (simp add: numeral_eq_Suc)
lemma less_numeral_Suc [simp]: "numeral k < Suc n \<longleftrightarrow> pred_numeral k < n"
by (simp add: numeral_eq_Suc)
lemma less_Suc_numeral [simp]: "Suc n < numeral k \<longleftrightarrow> n < pred_numeral k"
by (simp add: numeral_eq_Suc)
lemma le_numeral_Suc [simp]: "numeral k \<le> Suc n \<longleftrightarrow> pred_numeral k \<le> n"
by (simp add: numeral_eq_Suc)
lemma le_Suc_numeral [simp]: "Suc n \<le> numeral k \<longleftrightarrow> n \<le> pred_numeral k"
by (simp add: numeral_eq_Suc)
lemma diff_Suc_numeral [simp]: "Suc n - numeral k = n - pred_numeral k"
by (simp add: numeral_eq_Suc)
lemma diff_numeral_Suc [simp]: "numeral k - Suc n = pred_numeral k - n"
by (simp add: numeral_eq_Suc)
lemma max_Suc_numeral [simp]: "max (Suc n) (numeral k) = Suc (max n (pred_numeral k))"
by (simp add: numeral_eq_Suc)
lemma max_numeral_Suc [simp]: "max (numeral k) (Suc n) = Suc (max (pred_numeral k) n)"
by (simp add: numeral_eq_Suc)
lemma min_Suc_numeral [simp]: "min (Suc n) (numeral k) = Suc (min n (pred_numeral k))"
by (simp add: numeral_eq_Suc)
lemma min_numeral_Suc [simp]: "min (numeral k) (Suc n) = Suc (min (pred_numeral k) n)"
by (simp add: numeral_eq_Suc)
text \<open>For \<^term>\<open>case_nat\<close> and \<^term>\<open>rec_nat\<close>.\<close>
lemma case_nat_numeral [simp]: "case_nat a f (numeral v) = (let pv = pred_numeral v in f pv)"
by (simp add: numeral_eq_Suc)
lemma case_nat_add_eq_if [simp]:
"case_nat a f ((numeral v) + n) = (let pv = pred_numeral v in f (pv + n))"
by (simp add: numeral_eq_Suc)
lemma rec_nat_numeral [simp]:
"rec_nat a f (numeral v) = (let pv = pred_numeral v in f pv (rec_nat a f pv))"
by (simp add: numeral_eq_Suc Let_def)
lemma rec_nat_add_eq_if [simp]:
"rec_nat a f (numeral v + n) = (let pv = pred_numeral v in f (pv + n) (rec_nat a f (pv + n)))"
by (simp add: numeral_eq_Suc Let_def)
text \<open>Case analysis on \<^term>\<open>n < 2\<close>.\<close>
lemma less_2_cases: "n < 2 \<Longrightarrow> n = 0 \<or> n = Suc 0"
by (auto simp add: numeral_2_eq_2)
lemma less_2_cases_iff: "n < 2 \<longleftrightarrow> n = 0 \<or> n = Suc 0"
by (auto simp add: numeral_2_eq_2)
text \<open>Removal of Small Numerals: 0, 1 and (in additive positions) 2.\<close>
text \<open>bh: Are these rules really a good idea? LCP: well, it already happens for 0 and 1!\<close>
lemma add_2_eq_Suc [simp]: "2 + n = Suc (Suc n)"
by simp
lemma add_2_eq_Suc' [simp]: "n + 2 = Suc (Suc n)"
by simp
text \<open>Can be used to eliminate long strings of Sucs, but not by default.\<close>
lemma Suc3_eq_add_3: "Suc (Suc (Suc n)) = 3 + n"
by simp
lemmas nat_1_add_1 = one_add_one [where 'a=nat] (* legacy *)
context semiring_numeral
begin
lemma numeral_add_unfold_funpow:
\<open>numeral k + a = ((+) 1 ^^ numeral k) a\<close>
proof (rule sym, induction k arbitrary: a)
case One
then show ?case
by (simp add: numeral_One Num.numeral_One)
next
case (Bit0 k)
then show ?case
by (simp add: numeral_Bit0 Num.numeral_Bit0 ac_simps funpow_add)
next
case (Bit1 k)
then show ?case
by (simp add: numeral_Bit1 Num.numeral_Bit1 ac_simps funpow_add)
qed
end
context semiring_1
begin
lemma numeral_unfold_funpow:
\<open>numeral k = ((+) 1 ^^ numeral k) 0\<close>
using numeral_add_unfold_funpow [of k 0] by simp
end
context
includes lifting_syntax
begin
lemma transfer_rule_numeral:
\<open>((=) ===> R) numeral numeral\<close>
if [transfer_rule]: \<open>R 0 0\<close> \<open>R 1 1\<close>
\<open>(R ===> R ===> R) (+) (+)\<close>
for R :: \<open>'a::{semiring_numeral,monoid_add} \<Rightarrow> 'b::{semiring_numeral,monoid_add} \<Rightarrow> bool\<close>
proof -
have "((=) ===> R) (\<lambda>k. ((+) 1 ^^ numeral k) 0) (\<lambda>k. ((+) 1 ^^ numeral k) 0)"
by transfer_prover
moreover have \<open>numeral = (\<lambda>k. ((+) (1::'a) ^^ numeral k) 0)\<close>
using numeral_add_unfold_funpow [where ?'a = 'a, of _ 0]
by (simp add: fun_eq_iff)
moreover have \<open>numeral = (\<lambda>k. ((+) (1::'b) ^^ numeral k) 0)\<close>
using numeral_add_unfold_funpow [where ?'a = 'b, of _ 0]
by (simp add: fun_eq_iff)
ultimately show ?thesis
by simp
qed
end
subsection \<open>Particular lemmas concerning \<^term>\<open>2\<close>\<close>
context linordered_field
begin
subclass field_char_0 ..
lemma half_gt_zero_iff: "0 < a / 2 \<longleftrightarrow> 0 < a"
by (auto simp add: field_simps)
lemma half_gt_zero [simp]: "0 < a \<Longrightarrow> 0 < a / 2"
by (simp add: half_gt_zero_iff)
end
subsection \<open>Numeral equations as default simplification rules\<close>
declare (in numeral) numeral_One [simp]
declare (in numeral) numeral_plus_numeral [simp]
declare (in numeral) add_numeral_special [simp]
declare (in neg_numeral) add_neg_numeral_simps [simp]
declare (in neg_numeral) add_neg_numeral_special [simp]
declare (in neg_numeral) diff_numeral_simps [simp]
declare (in neg_numeral) diff_numeral_special [simp]
declare (in semiring_numeral) numeral_times_numeral [simp]
declare (in ring_1) mult_neg_numeral_simps [simp]
subsubsection \<open>Special Simplification for Constants\<close>
text \<open>These distributive laws move literals inside sums and differences.\<close>
lemmas distrib_right_numeral [simp] = distrib_right [of _ _ "numeral v"] for v
lemmas distrib_left_numeral [simp] = distrib_left [of "numeral v"] for v
lemmas left_diff_distrib_numeral [simp] = left_diff_distrib [of _ _ "numeral v"] for v
lemmas right_diff_distrib_numeral [simp] = right_diff_distrib [of "numeral v"] for v
text \<open>These are actually for fields, like real\<close>
lemmas zero_less_divide_iff_numeral [simp, no_atp] = zero_less_divide_iff [of "numeral w"] for w
lemmas divide_less_0_iff_numeral [simp, no_atp] = divide_less_0_iff [of "numeral w"] for w
lemmas zero_le_divide_iff_numeral [simp, no_atp] = zero_le_divide_iff [of "numeral w"] for w
lemmas divide_le_0_iff_numeral [simp, no_atp] = divide_le_0_iff [of "numeral w"] for w
text \<open>Replaces \<open>inverse #nn\<close> by \<open>1/#nn\<close>. It looks
strange, but then other simprocs simplify the quotient.\<close>
lemmas inverse_eq_divide_numeral [simp] =
inverse_eq_divide [of "numeral w"] for w
lemmas inverse_eq_divide_neg_numeral [simp] =
inverse_eq_divide [of "- numeral w"] for w
text \<open>These laws simplify inequalities, moving unary minus from a term
into the literal.\<close>
lemmas equation_minus_iff_numeral [no_atp] =
equation_minus_iff [of "numeral v"] for v
lemmas minus_equation_iff_numeral [no_atp] =
minus_equation_iff [of _ "numeral v"] for v
lemmas le_minus_iff_numeral [no_atp] =
le_minus_iff [of "numeral v"] for v
lemmas minus_le_iff_numeral [no_atp] =
minus_le_iff [of _ "numeral v"] for v
lemmas less_minus_iff_numeral [no_atp] =
less_minus_iff [of "numeral v"] for v
lemmas minus_less_iff_numeral [no_atp] =
minus_less_iff [of _ "numeral v"] for v
(* FIXME maybe simproc *)
text \<open>Cancellation of constant factors in comparisons (\<open><\<close> and \<open>\<le>\<close>)\<close>
lemmas mult_less_cancel_left_numeral [simp, no_atp] = mult_less_cancel_left [of "numeral v"] for v
lemmas mult_less_cancel_right_numeral [simp, no_atp] = mult_less_cancel_right [of _ "numeral v"] for v
lemmas mult_le_cancel_left_numeral [simp, no_atp] = mult_le_cancel_left [of "numeral v"] for v
lemmas mult_le_cancel_right_numeral [simp, no_atp] = mult_le_cancel_right [of _ "numeral v"] for v
text \<open>Multiplying out constant divisors in comparisons (\<open><\<close>, \<open>\<le>\<close> and \<open>=\<close>)\<close>
named_theorems divide_const_simps "simplification rules to simplify comparisons involving constant divisors"
lemmas le_divide_eq_numeral1 [simp,divide_const_simps] =
pos_le_divide_eq [of "numeral w", OF zero_less_numeral]
neg_le_divide_eq [of "- numeral w", OF neg_numeral_less_zero] for w
lemmas divide_le_eq_numeral1 [simp,divide_const_simps] =
pos_divide_le_eq [of "numeral w", OF zero_less_numeral]
neg_divide_le_eq [of "- numeral w", OF neg_numeral_less_zero] for w
lemmas less_divide_eq_numeral1 [simp,divide_const_simps] =
pos_less_divide_eq [of "numeral w", OF zero_less_numeral]
neg_less_divide_eq [of "- numeral w", OF neg_numeral_less_zero] for w
lemmas divide_less_eq_numeral1 [simp,divide_const_simps] =
pos_divide_less_eq [of "numeral w", OF zero_less_numeral]
neg_divide_less_eq [of "- numeral w", OF neg_numeral_less_zero] for w
lemmas eq_divide_eq_numeral1 [simp,divide_const_simps] =
eq_divide_eq [of _ _ "numeral w"]
eq_divide_eq [of _ _ "- numeral w"] for w
lemmas divide_eq_eq_numeral1 [simp,divide_const_simps] =
divide_eq_eq [of _ "numeral w"]
divide_eq_eq [of _ "- numeral w"] for w
subsubsection \<open>Optional Simplification Rules Involving Constants\<close>
text \<open>Simplify quotients that are compared with a literal constant.\<close>
lemmas le_divide_eq_numeral [divide_const_simps] =
le_divide_eq [of "numeral w"]
le_divide_eq [of "- numeral w"] for w
lemmas divide_le_eq_numeral [divide_const_simps] =
divide_le_eq [of _ _ "numeral w"]
divide_le_eq [of _ _ "- numeral w"] for w
lemmas less_divide_eq_numeral [divide_const_simps] =
less_divide_eq [of "numeral w"]
less_divide_eq [of "- numeral w"] for w
lemmas divide_less_eq_numeral [divide_const_simps] =
divide_less_eq [of _ _ "numeral w"]
divide_less_eq [of _ _ "- numeral w"] for w
lemmas eq_divide_eq_numeral [divide_const_simps] =
eq_divide_eq [of "numeral w"]
eq_divide_eq [of "- numeral w"] for w
lemmas divide_eq_eq_numeral [divide_const_simps] =
divide_eq_eq [of _ _ "numeral w"]
divide_eq_eq [of _ _ "- numeral w"] for w
text \<open>Not good as automatic simprules because they cause case splits.\<close>
lemmas [divide_const_simps] =
le_divide_eq_1 divide_le_eq_1 less_divide_eq_1 divide_less_eq_1
subsection \<open>Setting up simprocs\<close>
lemma mult_numeral_1: "Numeral1 * a = a"
for a :: "'a::semiring_numeral"
by simp
lemma mult_numeral_1_right: "a * Numeral1 = a"
for a :: "'a::semiring_numeral"
by simp
lemma divide_numeral_1: "a / Numeral1 = a"
for a :: "'a::field"
by simp
lemma inverse_numeral_1: "inverse Numeral1 = (Numeral1::'a::division_ring)"
by simp
text \<open>
Theorem lists for the cancellation simprocs. The use of a binary
numeral for 1 reduces the number of special cases.
\<close>
lemma mult_1s_semiring_numeral:
"Numeral1 * a = a"
"a * Numeral1 = a"
for a :: "'a::semiring_numeral"
by simp_all
lemma mult_1s_ring_1:
"- Numeral1 * b = - b"
"b * - Numeral1 = - b"
for b :: "'a::ring_1"
by simp_all
lemmas mult_1s = mult_1s_semiring_numeral mult_1s_ring_1
setup \<open>
Reorient_Proc.add
(fn Const (\<^const_name>\<open>numeral\<close>, _) $ _ => true
| Const (\<^const_name>\<open>uminus\<close>, _) $ (Const (\<^const_name>\<open>numeral\<close>, _) $ _) => true
| _ => false)
\<close>
simproc_setup reorient_numeral ("numeral w = x" | "- numeral w = y") =
- Reorient_Proc.proc
+ \<open>K Reorient_Proc.proc\<close>
subsubsection \<open>Simplification of arithmetic operations on integer constants\<close>
lemmas arith_special = (* already declared simp above *)
add_numeral_special add_neg_numeral_special
diff_numeral_special
lemmas arith_extra_simps = (* rules already in simpset *)
numeral_plus_numeral add_neg_numeral_simps add_0_left add_0_right
minus_zero
diff_numeral_simps diff_0 diff_0_right
numeral_times_numeral mult_neg_numeral_simps
mult_zero_left mult_zero_right
abs_numeral abs_neg_numeral
text \<open>
For making a minimal simpset, one must include these default simprules.
Also include \<open>simp_thms\<close>.
\<close>
lemmas arith_simps =
add_num_simps mult_num_simps sub_num_simps
BitM.simps dbl_simps dbl_inc_simps dbl_dec_simps
abs_zero abs_one arith_extra_simps
lemmas more_arith_simps =
neg_le_iff_le
minus_zero left_minus right_minus
mult_1_left mult_1_right
mult_minus_left mult_minus_right
minus_add_distrib minus_minus mult.assoc
lemmas of_nat_simps =
of_nat_0 of_nat_1 of_nat_Suc of_nat_add of_nat_mult
text \<open>Simplification of relational operations.\<close>
lemmas eq_numeral_extra =
zero_neq_one one_neq_zero
lemmas rel_simps =
le_num_simps less_num_simps eq_num_simps
le_numeral_simps le_neg_numeral_simps le_minus_one_simps le_numeral_extra
less_numeral_simps less_neg_numeral_simps less_minus_one_simps less_numeral_extra
eq_numeral_simps eq_neg_numeral_simps eq_numeral_extra
lemma Let_numeral [simp]: "Let (numeral v) f = f (numeral v)"
\<comment> \<open>Unfold all \<open>let\<close>s involving constants\<close>
unfolding Let_def ..
lemma Let_neg_numeral [simp]: "Let (- numeral v) f = f (- numeral v)"
\<comment> \<open>Unfold all \<open>let\<close>s involving constants\<close>
unfolding Let_def ..
declaration \<open>
let
fun number_of ctxt T n =
if not (Sign.of_sort (Proof_Context.theory_of ctxt) (T, \<^sort>\<open>numeral\<close>))
then raise CTERM ("number_of", [])
else Numeral.mk_cnumber (Thm.ctyp_of ctxt T) n;
in
K (
Lin_Arith.set_number_of number_of
#> Lin_Arith.add_simps
@{thms arith_simps more_arith_simps rel_simps pred_numeral_simps
arith_special numeral_One of_nat_simps uminus_numeral_One
Suc_numeral Let_numeral Let_neg_numeral Let_0 Let_1
le_Suc_numeral le_numeral_Suc less_Suc_numeral less_numeral_Suc
Suc_eq_numeral eq_numeral_Suc mult_Suc mult_Suc_right of_nat_numeral})
end
\<close>
subsubsection \<open>Simplification of arithmetic when nested to the right\<close>
lemma add_numeral_left [simp]: "numeral v + (numeral w + z) = (numeral(v + w) + z)"
by (simp_all add: add.assoc [symmetric])
lemma add_neg_numeral_left [simp]:
"numeral v + (- numeral w + y) = (sub v w + y)"
"- numeral v + (numeral w + y) = (sub w v + y)"
"- numeral v + (- numeral w + y) = (- numeral(v + w) + y)"
by (simp_all add: add.assoc [symmetric])
lemma mult_numeral_left_semiring_numeral:
"numeral v * (numeral w * z) = (numeral(v * w) * z :: 'a::semiring_numeral)"
by (simp add: mult.assoc [symmetric])
lemma mult_numeral_left_ring_1:
"- numeral v * (numeral w * y) = (- numeral(v * w) * y :: 'a::ring_1)"
"numeral v * (- numeral w * y) = (- numeral(v * w) * y :: 'a::ring_1)"
"- numeral v * (- numeral w * y) = (numeral(v * w) * y :: 'a::ring_1)"
by (simp_all add: mult.assoc [symmetric])
lemmas mult_numeral_left [simp] =
mult_numeral_left_semiring_numeral
mult_numeral_left_ring_1
hide_const (open) One Bit0 Bit1 BitM inc pow sqr sub dbl dbl_inc dbl_dec
subsection \<open>Code module namespace\<close>
code_identifier
code_module Num \<rightharpoonup> (SML) Arith and (OCaml) Arith and (Haskell) Arith
subsection \<open>Printing of evaluated natural numbers as numerals\<close>
lemma [code_post]:
"Suc 0 = 1"
"Suc 1 = 2"
"Suc (numeral n) = numeral (Num.inc n)"
by (simp_all add: numeral_inc)
lemmas [code_post] = Num.inc.simps
subsection \<open>More on auxiliary conversion\<close>
context semiring_1
begin
lemma numeral_num_of_nat_unfold:
\<open>numeral (num_of_nat n) = (if n = 0 then 1 else of_nat n)\<close>
by (induction n) (simp_all add: numeral_inc ac_simps)
lemma num_of_nat_numeral_eq [simp]:
\<open>num_of_nat (numeral q) = q\<close>
proof (induction q)
case One
then show ?case
by simp
next
case (Bit0 q)
then have "num_of_nat (numeral (num.Bit0 q)) = num_of_nat (numeral q + numeral q)"
by (simp only: Num.numeral_Bit0 Num.numeral_add)
also have "\<dots> = num.Bit0 (num_of_nat (numeral q))"
by (rule num_of_nat_double) simp
finally show ?case
using Bit0.IH by simp
next
case (Bit1 q)
then have "num_of_nat (numeral (num.Bit1 q)) = num_of_nat (numeral q + numeral q + 1)"
by (simp only: Num.numeral_Bit1 Num.numeral_add)
also have "\<dots> = num_of_nat (numeral q + numeral q) + num_of_nat 1"
by (rule num_of_nat_plus_distrib) auto
also have "\<dots> = num.Bit0 (num_of_nat (numeral q)) + num_of_nat 1"
by (subst num_of_nat_double) auto
finally show ?case
using Bit1.IH by simp
qed
end
end
diff --git a/src/HOL/Numeral_Simprocs.thy b/src/HOL/Numeral_Simprocs.thy
--- a/src/HOL/Numeral_Simprocs.thy
+++ b/src/HOL/Numeral_Simprocs.thy
@@ -1,299 +1,299 @@
(* Author: Various *)
section \<open>Combination and Cancellation Simprocs for Numeral Expressions\<close>
theory Numeral_Simprocs
imports Parity
begin
ML_file \<open>~~/src/Provers/Arith/assoc_fold.ML\<close>
ML_file \<open>~~/src/Provers/Arith/cancel_numerals.ML\<close>
ML_file \<open>~~/src/Provers/Arith/combine_numerals.ML\<close>
ML_file \<open>~~/src/Provers/Arith/cancel_numeral_factor.ML\<close>
ML_file \<open>~~/src/Provers/Arith/extract_common_term.ML\<close>
lemmas semiring_norm =
Let_def arith_simps diff_nat_numeral rel_simps
if_False if_True
add_Suc add_numeral_left
add_neg_numeral_left mult_numeral_left
numeral_One [symmetric] uminus_numeral_One [symmetric] Suc_eq_plus1
eq_numeral_iff_iszero not_iszero_Numeral1
text \<open>For \<open>combine_numerals\<close>\<close>
lemma left_add_mult_distrib: "i*u + (j*u + k) = (i+j)*u + (k::nat)"
by (simp add: add_mult_distrib)
text \<open>For \<open>cancel_numerals\<close>\<close>
lemma nat_diff_add_eq1:
"j <= (i::nat) ==> ((i*u + m) - (j*u + n)) = (((i-j)*u + m) - n)"
by (simp split: nat_diff_split add: add_mult_distrib)
lemma nat_diff_add_eq2:
"i <= (j::nat) ==> ((i*u + m) - (j*u + n)) = (m - ((j-i)*u + n))"
by (simp split: nat_diff_split add: add_mult_distrib)
lemma nat_eq_add_iff1:
"j <= (i::nat) ==> (i*u + m = j*u + n) = ((i-j)*u + m = n)"
by (auto split: nat_diff_split simp add: add_mult_distrib)
lemma nat_eq_add_iff2:
"i <= (j::nat) ==> (i*u + m = j*u + n) = (m = (j-i)*u + n)"
by (auto split: nat_diff_split simp add: add_mult_distrib)
lemma nat_less_add_iff1:
"j <= (i::nat) ==> (i*u + m < j*u + n) = ((i-j)*u + m < n)"
by (auto split: nat_diff_split simp add: add_mult_distrib)
lemma nat_less_add_iff2:
"i <= (j::nat) ==> (i*u + m < j*u + n) = (m < (j-i)*u + n)"
by (auto split: nat_diff_split simp add: add_mult_distrib)
lemma nat_le_add_iff1:
"j <= (i::nat) ==> (i*u + m <= j*u + n) = ((i-j)*u + m <= n)"
by (auto split: nat_diff_split simp add: add_mult_distrib)
lemma nat_le_add_iff2:
"i <= (j::nat) ==> (i*u + m <= j*u + n) = (m <= (j-i)*u + n)"
by (auto split: nat_diff_split simp add: add_mult_distrib)
text \<open>For \<open>cancel_numeral_factors\<close>\<close>
lemma nat_mult_le_cancel1: "(0::nat) < k ==> (k*m <= k*n) = (m<=n)"
by auto
lemma nat_mult_less_cancel1: "(0::nat) < k ==> (k*m < k*n) = (m<n)"
by auto
lemma nat_mult_eq_cancel1: "(0::nat) < k ==> (k*m = k*n) = (m=n)"
by auto
lemma nat_mult_div_cancel1: "(0::nat) < k ==> (k*m) div (k*n) = (m div n)"
by auto
lemma nat_mult_dvd_cancel_disj[simp]:
"(k*m) dvd (k*n) = (k=0 \<or> m dvd (n::nat))"
by (auto simp: dvd_eq_mod_eq_0 mod_mult_mult1)
lemma nat_mult_dvd_cancel1: "0 < k \<Longrightarrow> (k*m) dvd (k*n::nat) = (m dvd n)"
by(auto)
text \<open>For \<open>cancel_factor\<close>\<close>
lemmas nat_mult_le_cancel_disj = mult_le_cancel1
lemmas nat_mult_less_cancel_disj = mult_less_cancel1
lemma nat_mult_eq_cancel_disj:
fixes k m n :: nat
shows "k * m = k * n \<longleftrightarrow> k = 0 \<or> m = n"
by (fact mult_cancel_left)
lemma nat_mult_div_cancel_disj:
fixes k m n :: nat
shows "(k * m) div (k * n) = (if k = 0 then 0 else m div n)"
by (fact div_mult_mult1_if)
lemma numeral_times_minus_swap:
fixes x:: "'a::comm_ring_1" shows "numeral w * -x = x * - numeral w"
by (simp add: ac_simps)
ML_file \<open>Tools/numeral_simprocs.ML\<close>
simproc_setup semiring_assoc_fold
("(a::'a::comm_semiring_1_cancel) * b") =
- \<open>fn phi => Numeral_Simprocs.assoc_fold\<close>
+ \<open>K Numeral_Simprocs.assoc_fold\<close>
(* TODO: see whether the type class can be generalized further *)
simproc_setup int_combine_numerals
("(i::'a::comm_ring_1) + j" | "(i::'a::comm_ring_1) - j") =
- \<open>fn phi => Numeral_Simprocs.combine_numerals\<close>
+ \<open>K Numeral_Simprocs.combine_numerals\<close>
simproc_setup field_combine_numerals
("(i::'a::{field,ring_char_0}) + j"
|"(i::'a::{field,ring_char_0}) - j") =
- \<open>fn phi => Numeral_Simprocs.field_combine_numerals\<close>
+ \<open>K Numeral_Simprocs.field_combine_numerals\<close>
simproc_setup inteq_cancel_numerals
("(l::'a::comm_ring_1) + m = n"
|"(l::'a::comm_ring_1) = m + n"
|"(l::'a::comm_ring_1) - m = n"
|"(l::'a::comm_ring_1) = m - n"
|"(l::'a::comm_ring_1) * m = n"
|"(l::'a::comm_ring_1) = m * n"
|"- (l::'a::comm_ring_1) = m"
|"(l::'a::comm_ring_1) = - m") =
- \<open>fn phi => Numeral_Simprocs.eq_cancel_numerals\<close>
+ \<open>K Numeral_Simprocs.eq_cancel_numerals\<close>
simproc_setup intless_cancel_numerals
("(l::'a::linordered_idom) + m < n"
|"(l::'a::linordered_idom) < m + n"
|"(l::'a::linordered_idom) - m < n"
|"(l::'a::linordered_idom) < m - n"
|"(l::'a::linordered_idom) * m < n"
|"(l::'a::linordered_idom) < m * n"
|"- (l::'a::linordered_idom) < m"
|"(l::'a::linordered_idom) < - m") =
- \<open>fn phi => Numeral_Simprocs.less_cancel_numerals\<close>
+ \<open>K Numeral_Simprocs.less_cancel_numerals\<close>
simproc_setup intle_cancel_numerals
("(l::'a::linordered_idom) + m \<le> n"
|"(l::'a::linordered_idom) \<le> m + n"
|"(l::'a::linordered_idom) - m \<le> n"
|"(l::'a::linordered_idom) \<le> m - n"
|"(l::'a::linordered_idom) * m \<le> n"
|"(l::'a::linordered_idom) \<le> m * n"
|"- (l::'a::linordered_idom) \<le> m"
|"(l::'a::linordered_idom) \<le> - m") =
- \<open>fn phi => Numeral_Simprocs.le_cancel_numerals\<close>
+ \<open>K Numeral_Simprocs.le_cancel_numerals\<close>
simproc_setup ring_eq_cancel_numeral_factor
("(l::'a::{idom,ring_char_0}) * m = n"
|"(l::'a::{idom,ring_char_0}) = m * n") =
- \<open>fn phi => Numeral_Simprocs.eq_cancel_numeral_factor\<close>
+ \<open>K Numeral_Simprocs.eq_cancel_numeral_factor\<close>
simproc_setup ring_less_cancel_numeral_factor
("(l::'a::linordered_idom) * m < n"
|"(l::'a::linordered_idom) < m * n") =
- \<open>fn phi => Numeral_Simprocs.less_cancel_numeral_factor\<close>
+ \<open>K Numeral_Simprocs.less_cancel_numeral_factor\<close>
simproc_setup ring_le_cancel_numeral_factor
("(l::'a::linordered_idom) * m <= n"
|"(l::'a::linordered_idom) <= m * n") =
- \<open>fn phi => Numeral_Simprocs.le_cancel_numeral_factor\<close>
+ \<open>K Numeral_Simprocs.le_cancel_numeral_factor\<close>
(* TODO: remove comm_ring_1 constraint if possible *)
simproc_setup int_div_cancel_numeral_factors
("((l::'a::{euclidean_semiring_cancel,comm_ring_1,ring_char_0}) * m) div n"
|"(l::'a::{euclidean_semiring_cancel,comm_ring_1,ring_char_0}) div (m * n)") =
- \<open>fn phi => Numeral_Simprocs.div_cancel_numeral_factor\<close>
+ \<open>K Numeral_Simprocs.div_cancel_numeral_factor\<close>
simproc_setup divide_cancel_numeral_factor
("((l::'a::{field,ring_char_0}) * m) / n"
|"(l::'a::{field,ring_char_0}) / (m * n)"
|"((numeral v)::'a::{field,ring_char_0}) / (numeral w)") =
- \<open>fn phi => Numeral_Simprocs.divide_cancel_numeral_factor\<close>
+ \<open>K Numeral_Simprocs.divide_cancel_numeral_factor\<close>
simproc_setup ring_eq_cancel_factor
("(l::'a::idom) * m = n" | "(l::'a::idom) = m * n") =
- \<open>fn phi => Numeral_Simprocs.eq_cancel_factor\<close>
+ \<open>K Numeral_Simprocs.eq_cancel_factor\<close>
simproc_setup linordered_ring_le_cancel_factor
("(l::'a::linordered_idom) * m <= n"
|"(l::'a::linordered_idom) <= m * n") =
- \<open>fn phi => Numeral_Simprocs.le_cancel_factor\<close>
+ \<open>K Numeral_Simprocs.le_cancel_factor\<close>
simproc_setup linordered_ring_less_cancel_factor
("(l::'a::linordered_idom) * m < n"
|"(l::'a::linordered_idom) < m * n") =
- \<open>fn phi => Numeral_Simprocs.less_cancel_factor\<close>
+ \<open>K Numeral_Simprocs.less_cancel_factor\<close>
simproc_setup int_div_cancel_factor
("((l::'a::euclidean_semiring_cancel) * m) div n"
|"(l::'a::euclidean_semiring_cancel) div (m * n)") =
- \<open>fn phi => Numeral_Simprocs.div_cancel_factor\<close>
+ \<open>K Numeral_Simprocs.div_cancel_factor\<close>
simproc_setup int_mod_cancel_factor
("((l::'a::euclidean_semiring_cancel) * m) mod n"
|"(l::'a::euclidean_semiring_cancel) mod (m * n)") =
- \<open>fn phi => Numeral_Simprocs.mod_cancel_factor\<close>
+ \<open>K Numeral_Simprocs.mod_cancel_factor\<close>
simproc_setup dvd_cancel_factor
("((l::'a::idom) * m) dvd n"
|"(l::'a::idom) dvd (m * n)") =
- \<open>fn phi => Numeral_Simprocs.dvd_cancel_factor\<close>
+ \<open>K Numeral_Simprocs.dvd_cancel_factor\<close>
simproc_setup divide_cancel_factor
("((l::'a::field) * m) / n"
|"(l::'a::field) / (m * n)") =
- \<open>fn phi => Numeral_Simprocs.divide_cancel_factor\<close>
+ \<open>K Numeral_Simprocs.divide_cancel_factor\<close>
ML_file \<open>Tools/nat_numeral_simprocs.ML\<close>
simproc_setup nat_combine_numerals
("(i::nat) + j" | "Suc (i + j)") =
- \<open>fn phi => Nat_Numeral_Simprocs.combine_numerals\<close>
+ \<open>K Nat_Numeral_Simprocs.combine_numerals\<close>
simproc_setup nateq_cancel_numerals
("(l::nat) + m = n" | "(l::nat) = m + n" |
"(l::nat) * m = n" | "(l::nat) = m * n" |
"Suc m = n" | "m = Suc n") =
- \<open>fn phi => Nat_Numeral_Simprocs.eq_cancel_numerals\<close>
+ \<open>K Nat_Numeral_Simprocs.eq_cancel_numerals\<close>
simproc_setup natless_cancel_numerals
("(l::nat) + m < n" | "(l::nat) < m + n" |
"(l::nat) * m < n" | "(l::nat) < m * n" |
"Suc m < n" | "m < Suc n") =
- \<open>fn phi => Nat_Numeral_Simprocs.less_cancel_numerals\<close>
+ \<open>K Nat_Numeral_Simprocs.less_cancel_numerals\<close>
simproc_setup natle_cancel_numerals
("(l::nat) + m \<le> n" | "(l::nat) \<le> m + n" |
"(l::nat) * m \<le> n" | "(l::nat) \<le> m * n" |
"Suc m \<le> n" | "m \<le> Suc n") =
- \<open>fn phi => Nat_Numeral_Simprocs.le_cancel_numerals\<close>
+ \<open>K Nat_Numeral_Simprocs.le_cancel_numerals\<close>
simproc_setup natdiff_cancel_numerals
("((l::nat) + m) - n" | "(l::nat) - (m + n)" |
"(l::nat) * m - n" | "(l::nat) - m * n" |
"Suc m - n" | "m - Suc n") =
- \<open>fn phi => Nat_Numeral_Simprocs.diff_cancel_numerals\<close>
+ \<open>K Nat_Numeral_Simprocs.diff_cancel_numerals\<close>
simproc_setup nat_eq_cancel_numeral_factor
("(l::nat) * m = n" | "(l::nat) = m * n") =
- \<open>fn phi => Nat_Numeral_Simprocs.eq_cancel_numeral_factor\<close>
+ \<open>K Nat_Numeral_Simprocs.eq_cancel_numeral_factor\<close>
simproc_setup nat_less_cancel_numeral_factor
("(l::nat) * m < n" | "(l::nat) < m * n") =
- \<open>fn phi => Nat_Numeral_Simprocs.less_cancel_numeral_factor\<close>
+ \<open>K Nat_Numeral_Simprocs.less_cancel_numeral_factor\<close>
simproc_setup nat_le_cancel_numeral_factor
("(l::nat) * m <= n" | "(l::nat) <= m * n") =
- \<open>fn phi => Nat_Numeral_Simprocs.le_cancel_numeral_factor\<close>
+ \<open>K Nat_Numeral_Simprocs.le_cancel_numeral_factor\<close>
simproc_setup nat_div_cancel_numeral_factor
("((l::nat) * m) div n" | "(l::nat) div (m * n)") =
- \<open>fn phi => Nat_Numeral_Simprocs.div_cancel_numeral_factor\<close>
+ \<open>K Nat_Numeral_Simprocs.div_cancel_numeral_factor\<close>
simproc_setup nat_dvd_cancel_numeral_factor
("((l::nat) * m) dvd n" | "(l::nat) dvd (m * n)") =
- \<open>fn phi => Nat_Numeral_Simprocs.dvd_cancel_numeral_factor\<close>
+ \<open>K Nat_Numeral_Simprocs.dvd_cancel_numeral_factor\<close>
simproc_setup nat_eq_cancel_factor
("(l::nat) * m = n" | "(l::nat) = m * n") =
- \<open>fn phi => Nat_Numeral_Simprocs.eq_cancel_factor\<close>
+ \<open>K Nat_Numeral_Simprocs.eq_cancel_factor\<close>
simproc_setup nat_less_cancel_factor
("(l::nat) * m < n" | "(l::nat) < m * n") =
- \<open>fn phi => Nat_Numeral_Simprocs.less_cancel_factor\<close>
+ \<open>K Nat_Numeral_Simprocs.less_cancel_factor\<close>
simproc_setup nat_le_cancel_factor
("(l::nat) * m <= n" | "(l::nat) <= m * n") =
- \<open>fn phi => Nat_Numeral_Simprocs.le_cancel_factor\<close>
+ \<open>K Nat_Numeral_Simprocs.le_cancel_factor\<close>
simproc_setup nat_div_cancel_factor
("((l::nat) * m) div n" | "(l::nat) div (m * n)") =
- \<open>fn phi => Nat_Numeral_Simprocs.div_cancel_factor\<close>
+ \<open>K Nat_Numeral_Simprocs.div_cancel_factor\<close>
simproc_setup nat_dvd_cancel_factor
("((l::nat) * m) dvd n" | "(l::nat) dvd (m * n)") =
- \<open>fn phi => Nat_Numeral_Simprocs.dvd_cancel_factor\<close>
+ \<open>K Nat_Numeral_Simprocs.dvd_cancel_factor\<close>
declaration \<open>
K (Lin_Arith.add_simprocs
[\<^simproc>\<open>semiring_assoc_fold\<close>,
\<^simproc>\<open>int_combine_numerals\<close>,
\<^simproc>\<open>inteq_cancel_numerals\<close>,
\<^simproc>\<open>intless_cancel_numerals\<close>,
\<^simproc>\<open>intle_cancel_numerals\<close>,
\<^simproc>\<open>field_combine_numerals\<close>,
\<^simproc>\<open>nat_combine_numerals\<close>,
\<^simproc>\<open>nateq_cancel_numerals\<close>,
\<^simproc>\<open>natless_cancel_numerals\<close>,
\<^simproc>\<open>natle_cancel_numerals\<close>,
\<^simproc>\<open>natdiff_cancel_numerals\<close>,
Numeral_Simprocs.field_divide_cancel_numeral_factor])
\<close>
end
diff --git a/src/HOL/Parity.thy b/src/HOL/Parity.thy
--- a/src/HOL/Parity.thy
+++ b/src/HOL/Parity.thy
@@ -1,1381 +1,1379 @@
(* Title: HOL/Parity.thy
Author: Jeremy Avigad
Author: Jacques D. Fleuriot
*)
section \<open>Parity in rings and semirings\<close>
theory Parity
imports Euclidean_Rings
begin
subsection \<open>Ring structures with parity and \<open>even\<close>/\<open>odd\<close> predicates\<close>
class semiring_parity = comm_semiring_1 + semiring_modulo +
assumes even_iff_mod_2_eq_zero: "2 dvd a \<longleftrightarrow> a mod 2 = 0"
and odd_iff_mod_2_eq_one: "\<not> 2 dvd a \<longleftrightarrow> a mod 2 = 1"
and odd_one [simp]: "\<not> 2 dvd 1"
begin
abbreviation even :: "'a \<Rightarrow> bool"
where "even a \<equiv> 2 dvd a"
abbreviation odd :: "'a \<Rightarrow> bool"
where "odd a \<equiv> \<not> 2 dvd a"
end
class ring_parity = ring + semiring_parity
begin
subclass comm_ring_1 ..
end
instance nat :: semiring_parity
by standard (simp_all add: dvd_eq_mod_eq_0)
instance int :: ring_parity
by standard (auto simp add: dvd_eq_mod_eq_0)
context semiring_parity
begin
lemma parity_cases [case_names even odd]:
assumes "even a \<Longrightarrow> a mod 2 = 0 \<Longrightarrow> P"
assumes "odd a \<Longrightarrow> a mod 2 = 1 \<Longrightarrow> P"
shows P
using assms by (cases "even a")
(simp_all add: even_iff_mod_2_eq_zero [symmetric] odd_iff_mod_2_eq_one [symmetric])
lemma odd_of_bool_self [simp]:
\<open>odd (of_bool p) \<longleftrightarrow> p\<close>
by (cases p) simp_all
lemma not_mod_2_eq_0_eq_1 [simp]:
"a mod 2 \<noteq> 0 \<longleftrightarrow> a mod 2 = 1"
by (cases a rule: parity_cases) simp_all
lemma not_mod_2_eq_1_eq_0 [simp]:
"a mod 2 \<noteq> 1 \<longleftrightarrow> a mod 2 = 0"
by (cases a rule: parity_cases) simp_all
lemma evenE [elim?]:
assumes "even a"
obtains b where "a = 2 * b"
using assms by (rule dvdE)
lemma oddE [elim?]:
assumes "odd a"
obtains b where "a = 2 * b + 1"
proof -
have "a = 2 * (a div 2) + a mod 2"
by (simp add: mult_div_mod_eq)
with assms have "a = 2 * (a div 2) + 1"
by (simp add: odd_iff_mod_2_eq_one)
then show ?thesis ..
qed
lemma mod_2_eq_odd:
"a mod 2 = of_bool (odd a)"
by (auto elim: oddE simp add: even_iff_mod_2_eq_zero)
lemma of_bool_odd_eq_mod_2:
"of_bool (odd a) = a mod 2"
by (simp add: mod_2_eq_odd)
lemma even_mod_2_iff [simp]:
\<open>even (a mod 2) \<longleftrightarrow> even a\<close>
by (simp add: mod_2_eq_odd)
lemma mod2_eq_if:
"a mod 2 = (if even a then 0 else 1)"
by (simp add: mod_2_eq_odd)
lemma even_zero [simp]:
"even 0"
by (fact dvd_0_right)
lemma odd_even_add:
"even (a + b)" if "odd a" and "odd b"
proof -
from that obtain c d where "a = 2 * c + 1" and "b = 2 * d + 1"
by (blast elim: oddE)
then have "a + b = 2 * c + 2 * d + (1 + 1)"
by (simp only: ac_simps)
also have "\<dots> = 2 * (c + d + 1)"
by (simp add: algebra_simps)
finally show ?thesis ..
qed
lemma even_add [simp]:
"even (a + b) \<longleftrightarrow> (even a \<longleftrightarrow> even b)"
by (auto simp add: dvd_add_right_iff dvd_add_left_iff odd_even_add)
lemma odd_add [simp]:
"odd (a + b) \<longleftrightarrow> \<not> (odd a \<longleftrightarrow> odd b)"
by simp
lemma even_plus_one_iff [simp]:
"even (a + 1) \<longleftrightarrow> odd a"
by (auto simp add: dvd_add_right_iff intro: odd_even_add)
lemma even_mult_iff [simp]:
"even (a * b) \<longleftrightarrow> even a \<or> even b" (is "?P \<longleftrightarrow> ?Q")
proof
assume ?Q
then show ?P
by auto
next
assume ?P
show ?Q
proof (rule ccontr)
assume "\<not> (even a \<or> even b)"
then have "odd a" and "odd b"
by auto
then obtain r s where "a = 2 * r + 1" and "b = 2 * s + 1"
by (blast elim: oddE)
then have "a * b = (2 * r + 1) * (2 * s + 1)"
by simp
also have "\<dots> = 2 * (2 * r * s + r + s) + 1"
by (simp add: algebra_simps)
finally have "odd (a * b)"
by simp
with \<open>?P\<close> show False
by auto
qed
qed
lemma even_numeral [simp]: "even (numeral (Num.Bit0 n))"
proof -
have "even (2 * numeral n)"
unfolding even_mult_iff by simp
then have "even (numeral n + numeral n)"
unfolding mult_2 .
then show ?thesis
unfolding numeral.simps .
qed
lemma odd_numeral [simp]: "odd (numeral (Num.Bit1 n))"
proof
assume "even (numeral (num.Bit1 n))"
then have "even (numeral n + numeral n + 1)"
unfolding numeral.simps .
then have "even (2 * numeral n + 1)"
unfolding mult_2 .
then have "2 dvd numeral n * 2 + 1"
by (simp add: ac_simps)
then have "2 dvd 1"
using dvd_add_times_triv_left_iff [of 2 "numeral n" 1] by simp
then show False by simp
qed
lemma odd_numeral_BitM [simp]:
\<open>odd (numeral (Num.BitM w))\<close>
by (cases w) simp_all
lemma even_power [simp]: "even (a ^ n) \<longleftrightarrow> even a \<and> n > 0"
by (induct n) auto
lemma even_prod_iff:
\<open>even (prod f A) \<longleftrightarrow> (\<exists>a\<in>A. even (f a))\<close> if \<open>finite A\<close>
using that by (induction A) simp_all
lemma mask_eq_sum_exp:
\<open>2 ^ n - 1 = (\<Sum>m\<in>{q. q < n}. 2 ^ m)\<close>
proof -
have *: \<open>{q. q < Suc m} = insert m {q. q < m}\<close> for m
by auto
have \<open>2 ^ n = (\<Sum>m\<in>{q. q < n}. 2 ^ m) + 1\<close>
by (induction n) (simp_all add: ac_simps mult_2 *)
then have \<open>2 ^ n - 1 = (\<Sum>m\<in>{q. q < n}. 2 ^ m) + 1 - 1\<close>
by simp
then show ?thesis
by simp
qed
lemma (in -) mask_eq_sum_exp_nat:
\<open>2 ^ n - Suc 0 = (\<Sum>m\<in>{q. q < n}. 2 ^ m)\<close>
using mask_eq_sum_exp [where ?'a = nat] by simp
end
context ring_parity
begin
lemma even_minus:
"even (- a) \<longleftrightarrow> even a"
by (fact dvd_minus_iff)
lemma even_diff [simp]:
"even (a - b) \<longleftrightarrow> even (a + b)"
using even_add [of a "- b"] by simp
end
subsection \<open>Instance for \<^typ>\<open>nat\<close>\<close>
lemma even_Suc_Suc_iff [simp]:
"even (Suc (Suc n)) \<longleftrightarrow> even n"
using dvd_add_triv_right_iff [of 2 n] by simp
lemma even_Suc [simp]: "even (Suc n) \<longleftrightarrow> odd n"
using even_plus_one_iff [of n] by simp
lemma even_diff_nat [simp]:
"even (m - n) \<longleftrightarrow> m < n \<or> even (m + n)" for m n :: nat
proof (cases "n \<le> m")
case True
then have "m - n + n * 2 = m + n" by (simp add: mult_2_right)
moreover have "even (m - n) \<longleftrightarrow> even (m - n + n * 2)" by simp
ultimately have "even (m - n) \<longleftrightarrow> even (m + n)" by (simp only:)
then show ?thesis by auto
next
case False
then show ?thesis by simp
qed
lemma odd_pos:
"odd n \<Longrightarrow> 0 < n" for n :: nat
by (auto elim: oddE)
lemma Suc_double_not_eq_double:
"Suc (2 * m) \<noteq> 2 * n"
proof
assume "Suc (2 * m) = 2 * n"
moreover have "odd (Suc (2 * m))" and "even (2 * n)"
by simp_all
ultimately show False by simp
qed
lemma double_not_eq_Suc_double:
"2 * m \<noteq> Suc (2 * n)"
using Suc_double_not_eq_double [of n m] by simp
lemma odd_Suc_minus_one [simp]: "odd n \<Longrightarrow> Suc (n - Suc 0) = n"
by (auto elim: oddE)
lemma even_Suc_div_two [simp]:
"even n \<Longrightarrow> Suc n div 2 = n div 2"
by auto
lemma odd_Suc_div_two [simp]:
"odd n \<Longrightarrow> Suc n div 2 = Suc (n div 2)"
by (auto elim: oddE)
lemma odd_two_times_div_two_nat [simp]:
assumes "odd n"
shows "2 * (n div 2) = n - (1 :: nat)"
proof -
from assms have "2 * (n div 2) + 1 = n"
by (auto elim: oddE)
then have "Suc (2 * (n div 2)) - 1 = n - 1"
by simp
then show ?thesis
by simp
qed
lemma not_mod2_eq_Suc_0_eq_0 [simp]:
"n mod 2 \<noteq> Suc 0 \<longleftrightarrow> n mod 2 = 0"
using not_mod_2_eq_1_eq_0 [of n] by simp
lemma odd_card_imp_not_empty:
\<open>A \<noteq> {}\<close> if \<open>odd (card A)\<close>
using that by auto
lemma nat_induct2 [case_names 0 1 step]:
assumes "P 0" "P 1" and step: "\<And>n::nat. P n \<Longrightarrow> P (n + 2)"
shows "P n"
proof (induct n rule: less_induct)
case (less n)
show ?case
proof (cases "n < Suc (Suc 0)")
case True
then show ?thesis
using assms by (auto simp: less_Suc_eq)
next
case False
then obtain k where k: "n = Suc (Suc k)"
by (force simp: not_less nat_le_iff_add)
then have "k<n"
by simp
with less assms have "P (k+2)"
by blast
then show ?thesis
by (simp add: k)
qed
qed
context semiring_parity
begin
lemma even_sum_iff:
\<open>even (sum f A) \<longleftrightarrow> even (card {a\<in>A. odd (f a)})\<close> if \<open>finite A\<close>
using that proof (induction A)
case empty
then show ?case
by simp
next
case (insert a A)
moreover have \<open>{b \<in> insert a A. odd (f b)} = (if odd (f a) then {a} else {}) \<union> {b \<in> A. odd (f b)}\<close>
by auto
ultimately show ?case
by simp
qed
lemma even_mask_iff [simp]:
\<open>even (2 ^ n - 1) \<longleftrightarrow> n = 0\<close>
proof (cases \<open>n = 0\<close>)
case True
then show ?thesis
by simp
next
case False
then have \<open>{a. a = 0 \<and> a < n} = {0}\<close>
by auto
then show ?thesis
by (auto simp add: mask_eq_sum_exp even_sum_iff)
qed
lemma even_of_nat_iff [simp]:
"even (of_nat n) \<longleftrightarrow> even n"
by (induction n) simp_all
end
subsection \<open>Parity and powers\<close>
context ring_1
begin
lemma power_minus_even [simp]: "even n \<Longrightarrow> (- a) ^ n = a ^ n"
by (auto elim: evenE)
lemma power_minus_odd [simp]: "odd n \<Longrightarrow> (- a) ^ n = - (a ^ n)"
by (auto elim: oddE)
lemma uminus_power_if:
"(- a) ^ n = (if even n then a ^ n else - (a ^ n))"
by auto
lemma neg_one_even_power [simp]: "even n \<Longrightarrow> (- 1) ^ n = 1"
by simp
lemma neg_one_odd_power [simp]: "odd n \<Longrightarrow> (- 1) ^ n = - 1"
by simp
lemma neg_one_power_add_eq_neg_one_power_diff: "k \<le> n \<Longrightarrow> (- 1) ^ (n + k) = (- 1) ^ (n - k)"
by (cases "even (n + k)") auto
lemma minus_one_power_iff: "(- 1) ^ n = (if even n then 1 else - 1)"
by (induct n) auto
end
context linordered_idom
begin
lemma zero_le_even_power: "even n \<Longrightarrow> 0 \<le> a ^ n"
by (auto elim: evenE)
lemma zero_le_odd_power: "odd n \<Longrightarrow> 0 \<le> a ^ n \<longleftrightarrow> 0 \<le> a"
by (auto simp add: power_even_eq zero_le_mult_iff elim: oddE)
lemma zero_le_power_eq: "0 \<le> a ^ n \<longleftrightarrow> even n \<or> odd n \<and> 0 \<le> a"
by (auto simp add: zero_le_even_power zero_le_odd_power)
lemma zero_less_power_eq: "0 < a ^ n \<longleftrightarrow> n = 0 \<or> even n \<and> a \<noteq> 0 \<or> odd n \<and> 0 < a"
proof -
have [simp]: "0 = a ^ n \<longleftrightarrow> a = 0 \<and> n > 0"
unfolding power_eq_0_iff [of a n, symmetric] by blast
show ?thesis
unfolding less_le zero_le_power_eq by auto
qed
lemma power_less_zero_eq [simp]: "a ^ n < 0 \<longleftrightarrow> odd n \<and> a < 0"
unfolding not_le [symmetric] zero_le_power_eq by auto
lemma power_le_zero_eq: "a ^ n \<le> 0 \<longleftrightarrow> n > 0 \<and> (odd n \<and> a \<le> 0 \<or> even n \<and> a = 0)"
unfolding not_less [symmetric] zero_less_power_eq by auto
lemma power_even_abs: "even n \<Longrightarrow> \<bar>a\<bar> ^ n = a ^ n"
using power_abs [of a n] by (simp add: zero_le_even_power)
lemma power_mono_even:
assumes "even n" and "\<bar>a\<bar> \<le> \<bar>b\<bar>"
shows "a ^ n \<le> b ^ n"
proof -
have "0 \<le> \<bar>a\<bar>" by auto
with \<open>\<bar>a\<bar> \<le> \<bar>b\<bar>\<close> have "\<bar>a\<bar> ^ n \<le> \<bar>b\<bar> ^ n"
by (rule power_mono)
with \<open>even n\<close> show ?thesis
by (simp add: power_even_abs)
qed
lemma power_mono_odd:
assumes "odd n" and "a \<le> b"
shows "a ^ n \<le> b ^ n"
proof (cases "b < 0")
case True
with \<open>a \<le> b\<close> have "- b \<le> - a" and "0 \<le> - b" by auto
then have "(- b) ^ n \<le> (- a) ^ n" by (rule power_mono)
with \<open>odd n\<close> show ?thesis by simp
next
case False
then have "0 \<le> b" by auto
show ?thesis
proof (cases "a < 0")
case True
then have "n \<noteq> 0" and "a \<le> 0" using \<open>odd n\<close> [THEN odd_pos] by auto
then have "a ^ n \<le> 0" unfolding power_le_zero_eq using \<open>odd n\<close> by auto
moreover from \<open>0 \<le> b\<close> have "0 \<le> b ^ n" by auto
ultimately show ?thesis by auto
next
case False
then have "0 \<le> a" by auto
with \<open>a \<le> b\<close> show ?thesis
using power_mono by auto
qed
qed
text \<open>Simplify, when the exponent is a numeral\<close>
lemma zero_le_power_eq_numeral [simp]:
"0 \<le> a ^ numeral w \<longleftrightarrow> even (numeral w :: nat) \<or> odd (numeral w :: nat) \<and> 0 \<le> a"
by (fact zero_le_power_eq)
lemma zero_less_power_eq_numeral [simp]:
"0 < a ^ numeral w \<longleftrightarrow>
numeral w = (0 :: nat) \<or>
even (numeral w :: nat) \<and> a \<noteq> 0 \<or>
odd (numeral w :: nat) \<and> 0 < a"
by (fact zero_less_power_eq)
lemma power_le_zero_eq_numeral [simp]:
"a ^ numeral w \<le> 0 \<longleftrightarrow>
(0 :: nat) < numeral w \<and>
(odd (numeral w :: nat) \<and> a \<le> 0 \<or> even (numeral w :: nat) \<and> a = 0)"
by (fact power_le_zero_eq)
lemma power_less_zero_eq_numeral [simp]:
"a ^ numeral w < 0 \<longleftrightarrow> odd (numeral w :: nat) \<and> a < 0"
by (fact power_less_zero_eq)
lemma power_even_abs_numeral [simp]:
"even (numeral w :: nat) \<Longrightarrow> \<bar>a\<bar> ^ numeral w = a ^ numeral w"
by (fact power_even_abs)
end
subsection \<open>Instance for \<^typ>\<open>int\<close>\<close>
lemma even_diff_iff:
"even (k - l) \<longleftrightarrow> even (k + l)" for k l :: int
by (fact even_diff)
lemma even_abs_add_iff:
"even (\<bar>k\<bar> + l) \<longleftrightarrow> even (k + l)" for k l :: int
by simp
lemma even_add_abs_iff:
"even (k + \<bar>l\<bar>) \<longleftrightarrow> even (k + l)" for k l :: int
by simp
lemma even_nat_iff: "0 \<le> k \<Longrightarrow> even (nat k) \<longleftrightarrow> even k"
by (simp add: even_of_nat_iff [of "nat k", where ?'a = int, symmetric])
context
assumes "SORT_CONSTRAINT('a::division_ring)"
begin
lemma power_int_minus_left:
"power_int (-a :: 'a) n = (if even n then power_int a n else -power_int a n)"
by (auto simp: power_int_def minus_one_power_iff even_nat_iff)
lemma power_int_minus_left_even [simp]: "even n \<Longrightarrow> power_int (-a :: 'a) n = power_int a n"
by (simp add: power_int_minus_left)
lemma power_int_minus_left_odd [simp]: "odd n \<Longrightarrow> power_int (-a :: 'a) n = -power_int a n"
by (simp add: power_int_minus_left)
lemma power_int_minus_left_distrib:
"NO_MATCH (-1) x \<Longrightarrow> power_int (-a :: 'a) n = power_int (-1) n * power_int a n"
by (simp add: power_int_minus_left)
lemma power_int_minus_one_minus: "power_int (-1 :: 'a) (-n) = power_int (-1) n"
by (simp add: power_int_minus_left)
lemma power_int_minus_one_diff_commute: "power_int (-1 :: 'a) (a - b) = power_int (-1) (b - a)"
by (subst power_int_minus_one_minus [symmetric]) auto
lemma power_int_minus_one_mult_self [simp]:
"power_int (-1 :: 'a) m * power_int (-1) m = 1"
by (simp add: power_int_minus_left)
lemma power_int_minus_one_mult_self' [simp]:
"power_int (-1 :: 'a) m * (power_int (-1) m * b) = b"
by (simp add: power_int_minus_left)
end
subsection \<open>Special case: euclidean rings containing the natural numbers\<close>
class unique_euclidean_semiring_with_nat = semidom + semiring_char_0 + unique_euclidean_semiring +
assumes of_nat_div: "of_nat (m div n) = of_nat m div of_nat n"
and division_segment_of_nat [simp]: "division_segment (of_nat n) = 1"
and division_segment_euclidean_size [simp]: "division_segment a * of_nat (euclidean_size a) = a"
begin
lemma division_segment_eq_iff:
"a = b" if "division_segment a = division_segment b"
and "euclidean_size a = euclidean_size b"
using that division_segment_euclidean_size [of a] by simp
lemma euclidean_size_of_nat [simp]:
"euclidean_size (of_nat n) = n"
proof -
have "division_segment (of_nat n) * of_nat (euclidean_size (of_nat n)) = of_nat n"
by (fact division_segment_euclidean_size)
then show ?thesis by simp
qed
lemma of_nat_euclidean_size:
"of_nat (euclidean_size a) = a div division_segment a"
proof -
have "of_nat (euclidean_size a) = division_segment a * of_nat (euclidean_size a) div division_segment a"
by (subst nonzero_mult_div_cancel_left) simp_all
also have "\<dots> = a div division_segment a"
by simp
finally show ?thesis .
qed
lemma division_segment_1 [simp]:
"division_segment 1 = 1"
using division_segment_of_nat [of 1] by simp
lemma division_segment_numeral [simp]:
"division_segment (numeral k) = 1"
using division_segment_of_nat [of "numeral k"] by simp
lemma euclidean_size_1 [simp]:
"euclidean_size 1 = 1"
using euclidean_size_of_nat [of 1] by simp
lemma euclidean_size_numeral [simp]:
"euclidean_size (numeral k) = numeral k"
using euclidean_size_of_nat [of "numeral k"] by simp
lemma of_nat_dvd_iff:
"of_nat m dvd of_nat n \<longleftrightarrow> m dvd n" (is "?P \<longleftrightarrow> ?Q")
proof (cases "m = 0")
case True
then show ?thesis
by simp
next
case False
show ?thesis
proof
assume ?Q
then show ?P
by auto
next
assume ?P
with False have "of_nat n = of_nat n div of_nat m * of_nat m"
by simp
then have "of_nat n = of_nat (n div m * m)"
by (simp add: of_nat_div)
then have "n = n div m * m"
by (simp only: of_nat_eq_iff)
then have "n = m * (n div m)"
by (simp add: ac_simps)
then show ?Q ..
qed
qed
lemma of_nat_mod:
"of_nat (m mod n) = of_nat m mod of_nat n"
proof -
have "of_nat m div of_nat n * of_nat n + of_nat m mod of_nat n = of_nat m"
by (simp add: div_mult_mod_eq)
also have "of_nat m = of_nat (m div n * n + m mod n)"
by simp
finally show ?thesis
by (simp only: of_nat_div of_nat_mult of_nat_add) simp
qed
lemma one_div_two_eq_zero [simp]:
"1 div 2 = 0"
proof -
from of_nat_div [symmetric] have "of_nat 1 div of_nat 2 = of_nat 0"
by (simp only:) simp
then show ?thesis
by simp
qed
lemma one_mod_two_eq_one [simp]:
"1 mod 2 = 1"
proof -
from of_nat_mod [symmetric] have "of_nat 1 mod of_nat 2 = of_nat 1"
by (simp only:) simp
then show ?thesis
by simp
qed
lemma one_mod_2_pow_eq [simp]:
"1 mod (2 ^ n) = of_bool (n > 0)"
proof -
have "1 mod (2 ^ n) = of_nat (1 mod (2 ^ n))"
using of_nat_mod [of 1 "2 ^ n"] by simp
also have "\<dots> = of_bool (n > 0)"
by simp
finally show ?thesis .
qed
lemma one_div_2_pow_eq [simp]:
"1 div (2 ^ n) = of_bool (n = 0)"
using div_mult_mod_eq [of 1 "2 ^ n"] by auto
lemma div_mult2_eq':
\<open>a div (of_nat m * of_nat n) = a div of_nat m div of_nat n\<close>
proof (cases \<open>m = 0 \<or> n = 0\<close>)
case True
then show ?thesis
by auto
next
case False
then have \<open>m > 0\<close> \<open>n > 0\<close>
by simp_all
show ?thesis
proof (cases \<open>of_nat m * of_nat n dvd a\<close>)
case True
then obtain b where \<open>a = (of_nat m * of_nat n) * b\<close> ..
then have \<open>a = of_nat m * (of_nat n * b)\<close>
by (simp add: ac_simps)
then show ?thesis
by simp
next
case False
define q where \<open>q = a div (of_nat m * of_nat n)\<close>
define r where \<open>r = a mod (of_nat m * of_nat n)\<close>
from \<open>m > 0\<close> \<open>n > 0\<close> \<open>\<not> of_nat m * of_nat n dvd a\<close> r_def have "division_segment r = 1"
using division_segment_of_nat [of "m * n"] by (simp add: division_segment_mod)
with division_segment_euclidean_size [of r]
have "of_nat (euclidean_size r) = r"
by simp
have "a mod (of_nat m * of_nat n) div (of_nat m * of_nat n) = 0"
by simp
with \<open>m > 0\<close> \<open>n > 0\<close> r_def have "r div (of_nat m * of_nat n) = 0"
by simp
with \<open>of_nat (euclidean_size r) = r\<close>
have "of_nat (euclidean_size r) div (of_nat m * of_nat n) = 0"
by simp
then have "of_nat (euclidean_size r div (m * n)) = 0"
by (simp add: of_nat_div)
then have "of_nat (euclidean_size r div m div n) = 0"
by (simp add: div_mult2_eq)
with \<open>of_nat (euclidean_size r) = r\<close> have "r div of_nat m div of_nat n = 0"
by (simp add: of_nat_div)
with \<open>m > 0\<close> \<open>n > 0\<close> q_def
have "q = (r div of_nat m + q * of_nat n * of_nat m div of_nat m) div of_nat n"
by simp
moreover have \<open>a = q * (of_nat m * of_nat n) + r\<close>
by (simp add: q_def r_def div_mult_mod_eq)
ultimately show \<open>a div (of_nat m * of_nat n) = a div of_nat m div of_nat n\<close>
using q_def [symmetric] div_plus_div_distrib_dvd_right [of \<open>of_nat m\<close> \<open>q * (of_nat m * of_nat n)\<close> r]
by (simp add: ac_simps)
qed
qed
lemma mod_mult2_eq':
"a mod (of_nat m * of_nat n) = of_nat m * (a div of_nat m mod of_nat n) + a mod of_nat m"
proof -
have "a div (of_nat m * of_nat n) * (of_nat m * of_nat n) + a mod (of_nat m * of_nat n) = a div of_nat m div of_nat n * of_nat n * of_nat m + (a div of_nat m mod of_nat n * of_nat m + a mod of_nat m)"
by (simp add: combine_common_factor div_mult_mod_eq)
moreover have "a div of_nat m div of_nat n * of_nat n * of_nat m = of_nat n * of_nat m * (a div of_nat m div of_nat n)"
by (simp add: ac_simps)
ultimately show ?thesis
by (simp add: div_mult2_eq' mult_commute)
qed
lemma div_mult2_numeral_eq:
"a div numeral k div numeral l = a div numeral (k * l)" (is "?A = ?B")
proof -
have "?A = a div of_nat (numeral k) div of_nat (numeral l)"
by simp
also have "\<dots> = a div (of_nat (numeral k) * of_nat (numeral l))"
by (fact div_mult2_eq' [symmetric])
also have "\<dots> = ?B"
by simp
finally show ?thesis .
qed
lemma numeral_Bit0_div_2:
"numeral (num.Bit0 n) div 2 = numeral n"
proof -
have "numeral (num.Bit0 n) = numeral n + numeral n"
by (simp only: numeral.simps)
also have "\<dots> = numeral n * 2"
by (simp add: mult_2_right)
finally have "numeral (num.Bit0 n) div 2 = numeral n * 2 div 2"
by simp
also have "\<dots> = numeral n"
by (rule nonzero_mult_div_cancel_right) simp
finally show ?thesis .
qed
lemma numeral_Bit1_div_2:
"numeral (num.Bit1 n) div 2 = numeral n"
proof -
have "numeral (num.Bit1 n) = numeral n + numeral n + 1"
by (simp only: numeral.simps)
also have "\<dots> = numeral n * 2 + 1"
by (simp add: mult_2_right)
finally have "numeral (num.Bit1 n) div 2 = (numeral n * 2 + 1) div 2"
by simp
also have "\<dots> = numeral n * 2 div 2 + 1 div 2"
using dvd_triv_right by (rule div_plus_div_distrib_dvd_left)
also have "\<dots> = numeral n * 2 div 2"
by simp
also have "\<dots> = numeral n"
by (rule nonzero_mult_div_cancel_right) simp
finally show ?thesis .
qed
lemma exp_mod_exp:
\<open>2 ^ m mod 2 ^ n = of_bool (m < n) * 2 ^ m\<close>
proof -
have \<open>(2::nat) ^ m mod 2 ^ n = of_bool (m < n) * 2 ^ m\<close> (is \<open>?lhs = ?rhs\<close>)
by (auto simp add: not_less monoid_mult_class.power_add dest!: le_Suc_ex)
then have \<open>of_nat ?lhs = of_nat ?rhs\<close>
by simp
then show ?thesis
by (simp add: of_nat_mod)
qed
lemma mask_mod_exp:
\<open>(2 ^ n - 1) mod 2 ^ m = 2 ^ min m n - 1\<close>
proof -
have \<open>(2 ^ n - 1) mod 2 ^ m = 2 ^ min m n - (1::nat)\<close> (is \<open>?lhs = ?rhs\<close>)
proof (cases \<open>n \<le> m\<close>)
case True
then show ?thesis
by (simp add: Suc_le_lessD)
next
case False
then have \<open>m < n\<close>
by simp
then obtain q where n: \<open>n = Suc q + m\<close>
by (auto dest: less_imp_Suc_add)
then have \<open>min m n = m\<close>
by simp
moreover have \<open>(2::nat) ^ m \<le> 2 * 2 ^ q * 2 ^ m\<close>
using mult_le_mono1 [of 1 \<open>2 * 2 ^ q\<close> \<open>2 ^ m\<close>] by simp
with n have \<open>2 ^ n - 1 = (2 ^ Suc q - 1) * 2 ^ m + (2 ^ m - (1::nat))\<close>
by (simp add: monoid_mult_class.power_add algebra_simps)
ultimately show ?thesis
by (simp only: euclidean_semiring_cancel_class.mod_mult_self3) simp
qed
then have \<open>of_nat ?lhs = of_nat ?rhs\<close>
by simp
then show ?thesis
by (simp add: of_nat_mod of_nat_diff)
qed
lemma of_bool_half_eq_0 [simp]:
\<open>of_bool b div 2 = 0\<close>
by simp
end
class unique_euclidean_ring_with_nat = ring + unique_euclidean_semiring_with_nat
instance nat :: unique_euclidean_semiring_with_nat
by standard (simp_all add: dvd_eq_mod_eq_0)
instance int :: unique_euclidean_ring_with_nat
by standard (auto simp add: divide_int_def division_segment_int_def elim: contrapos_np)
context unique_euclidean_semiring_with_nat
begin
subclass semiring_parity
proof
show "2 dvd a \<longleftrightarrow> a mod 2 = 0" for a
by (fact dvd_eq_mod_eq_0)
show "\<not> 2 dvd a \<longleftrightarrow> a mod 2 = 1" for a
proof
assume "a mod 2 = 1"
then show "\<not> 2 dvd a"
by auto
next
assume "\<not> 2 dvd a"
have eucl: "euclidean_size (a mod 2) = 1"
proof (rule order_antisym)
show "euclidean_size (a mod 2) \<le> 1"
using mod_size_less [of 2 a] by simp
show "1 \<le> euclidean_size (a mod 2)"
using \<open>\<not> 2 dvd a\<close> by (simp add: Suc_le_eq dvd_eq_mod_eq_0)
qed
from \<open>\<not> 2 dvd a\<close> have "\<not> of_nat 2 dvd division_segment a * of_nat (euclidean_size a)"
by simp
then have "\<not> of_nat 2 dvd of_nat (euclidean_size a)"
by (auto simp only: dvd_mult_unit_iff' is_unit_division_segment)
then have "\<not> 2 dvd euclidean_size a"
using of_nat_dvd_iff [of 2] by simp
then have "euclidean_size a mod 2 = 1"
by (simp add: semidom_modulo_class.dvd_eq_mod_eq_0)
then have "of_nat (euclidean_size a mod 2) = of_nat 1"
by simp
then have "of_nat (euclidean_size a) mod 2 = 1"
by (simp add: of_nat_mod)
from \<open>\<not> 2 dvd a\<close> eucl
show "a mod 2 = 1"
by (auto intro: division_segment_eq_iff simp add: division_segment_mod)
qed
show "\<not> is_unit 2"
proof (rule notI)
assume "is_unit 2"
then have "of_nat 2 dvd of_nat 1"
by simp
then have "is_unit (2::nat)"
by (simp only: of_nat_dvd_iff)
then show False
by simp
qed
qed
lemma even_succ_div_two [simp]:
"even a \<Longrightarrow> (a + 1) div 2 = a div 2"
by (cases "a = 0") (auto elim!: evenE dest: mult_not_zero)
lemma odd_succ_div_two [simp]:
"odd a \<Longrightarrow> (a + 1) div 2 = a div 2 + 1"
by (auto elim!: oddE simp add: add.assoc)
lemma even_two_times_div_two:
"even a \<Longrightarrow> 2 * (a div 2) = a"
by (fact dvd_mult_div_cancel)
lemma odd_two_times_div_two_succ [simp]:
"odd a \<Longrightarrow> 2 * (a div 2) + 1 = a"
using mult_div_mod_eq [of 2 a]
by (simp add: even_iff_mod_2_eq_zero)
lemma coprime_left_2_iff_odd [simp]:
"coprime 2 a \<longleftrightarrow> odd a"
proof
assume "odd a"
show "coprime 2 a"
proof (rule coprimeI)
fix b
assume "b dvd 2" "b dvd a"
then have "b dvd a mod 2"
by (auto intro: dvd_mod)
with \<open>odd a\<close> show "is_unit b"
by (simp add: mod_2_eq_odd)
qed
next
assume "coprime 2 a"
show "odd a"
proof (rule notI)
assume "even a"
then obtain b where "a = 2 * b" ..
with \<open>coprime 2 a\<close> have "coprime 2 (2 * b)"
by simp
moreover have "\<not> coprime 2 (2 * b)"
by (rule not_coprimeI [of 2]) simp_all
ultimately show False
by blast
qed
qed
lemma coprime_right_2_iff_odd [simp]:
"coprime a 2 \<longleftrightarrow> odd a"
using coprime_left_2_iff_odd [of a] by (simp add: ac_simps)
end
context unique_euclidean_ring_with_nat
begin
subclass ring_parity ..
lemma minus_1_mod_2_eq [simp]:
"- 1 mod 2 = 1"
by (simp add: mod_2_eq_odd)
lemma minus_1_div_2_eq [simp]:
"- 1 div 2 = - 1"
proof -
from div_mult_mod_eq [of "- 1" 2]
have "- 1 div 2 * 2 = - 1 * 2"
using add_implies_diff by fastforce
then show ?thesis
using mult_right_cancel [of 2 "- 1 div 2" "- 1"] by simp
qed
end
context unique_euclidean_semiring_with_nat
begin
lemma even_mask_div_iff':
\<open>even ((2 ^ m - 1) div 2 ^ n) \<longleftrightarrow> m \<le> n\<close>
proof -
have \<open>even ((2 ^ m - 1) div 2 ^ n) \<longleftrightarrow> even (of_nat ((2 ^ m - Suc 0) div 2 ^ n))\<close>
by (simp only: of_nat_div) (simp add: of_nat_diff)
also have \<open>\<dots> \<longleftrightarrow> even ((2 ^ m - Suc 0) div 2 ^ n)\<close>
by simp
also have \<open>\<dots> \<longleftrightarrow> m \<le> n\<close>
proof (cases \<open>m \<le> n\<close>)
case True
then show ?thesis
by (simp add: Suc_le_lessD)
next
case False
then obtain r where r: \<open>m = n + Suc r\<close>
using less_imp_Suc_add by fastforce
from r have \<open>{q. q < m} \<inter> {q. 2 ^ n dvd (2::nat) ^ q} = {q. n \<le> q \<and> q < m}\<close>
by (auto simp add: dvd_power_iff_le)
moreover from r have \<open>{q. q < m} \<inter> {q. \<not> 2 ^ n dvd (2::nat) ^ q} = {q. q < n}\<close>
by (auto simp add: dvd_power_iff_le)
moreover from False have \<open>{q. n \<le> q \<and> q < m \<and> q \<le> n} = {n}\<close>
by auto
then have \<open>odd ((\<Sum>a\<in>{q. n \<le> q \<and> q < m}. 2 ^ a div (2::nat) ^ n) + sum ((^) 2) {q. q < n} div 2 ^ n)\<close>
by (simp_all add: euclidean_semiring_cancel_class.power_diff_power_eq semiring_parity_class.even_sum_iff not_less mask_eq_sum_exp_nat [symmetric])
ultimately have \<open>odd (sum ((^) (2::nat)) {q. q < m} div 2 ^ n)\<close>
by (subst euclidean_semiring_cancel_class.sum_div_partition) simp_all
with False show ?thesis
by (simp add: mask_eq_sum_exp_nat)
qed
finally show ?thesis .
qed
end
subsection \<open>Generic symbolic computations\<close>
text \<open>
The following type class contains everything necessary to formulate
a division algorithm in ring structures with numerals, restricted
to its positive segments.
\<close>
class unique_euclidean_semiring_with_nat_division = unique_euclidean_semiring_with_nat +
fixes divmod :: \<open>num \<Rightarrow> num \<Rightarrow> 'a \<times> 'a\<close>
and divmod_step :: \<open>'a \<Rightarrow> 'a \<times> 'a \<Rightarrow> 'a \<times> 'a\<close> \<comment> \<open>
These are conceptually definitions but force generated code
to be monomorphic wrt. particular instances of this class which
yields a significant speedup.\<close>
assumes divmod_def: \<open>divmod m n = (numeral m div numeral n, numeral m mod numeral n)\<close>
and divmod_step_def [simp]: \<open>divmod_step l (q, r) =
(if euclidean_size l \<le> euclidean_size r then (2 * q + 1, r - l)
else (2 * q, r))\<close> \<comment> \<open>
This is a formulation of one step (referring to one digit position)
in school-method division: compare the dividend at the current
digit position with the remainder from previous division steps
and evaluate accordingly.\<close>
begin
lemma fst_divmod:
\<open>fst (divmod m n) = numeral m div numeral n\<close>
by (simp add: divmod_def)
lemma snd_divmod:
\<open>snd (divmod m n) = numeral m mod numeral n\<close>
by (simp add: divmod_def)
text \<open>
Following a formulation of school-method division.
If the divisor is smaller than the dividend, terminate.
If not, shift the dividend to the right until termination
occurs and then reiterate single division steps in the
opposite direction.
\<close>
lemma divmod_divmod_step:
\<open>divmod m n = (if m < n then (0, numeral m)
else divmod_step (numeral n) (divmod m (Num.Bit0 n)))\<close>
proof (cases \<open>m < n\<close>)
case True
then show ?thesis
by (simp add: prod_eq_iff fst_divmod snd_divmod flip: of_nat_numeral of_nat_div of_nat_mod)
next
case False
define r s t where \<open>r = (numeral m :: nat)\<close> \<open>s = (numeral n :: nat)\<close> \<open>t = 2 * s\<close>
then have *: \<open>numeral m = of_nat r\<close> \<open>numeral n = of_nat s\<close> \<open>numeral (num.Bit0 n) = of_nat t\<close>
and \<open>\<not> s \<le> r mod s\<close>
by (simp_all add: not_le)
have t: \<open>2 * (r div t) = r div s - r div s mod 2\<close>
\<open>r mod t = s * (r div s mod 2) + r mod s\<close>
by (simp add: Rings.minus_mod_eq_mult_div Groups.mult.commute [of 2] Euclidean_Rings.div_mult2_eq \<open>t = 2 * s\<close>)
(use mod_mult2_eq [of r s 2] in \<open>simp add: ac_simps \<open>t = 2 * s\<close>\<close>)
have rs: \<open>r div s mod 2 = 0 \<or> r div s mod 2 = Suc 0\<close>
by auto
from \<open>\<not> s \<le> r mod s\<close> have \<open>s \<le> r mod t \<Longrightarrow>
r div s = Suc (2 * (r div t)) \<and>
r mod s = r mod t - s\<close>
using rs
by (auto simp add: t)
moreover have \<open>r mod t < s \<Longrightarrow>
r div s = 2 * (r div t) \<and>
r mod s = r mod t\<close>
using rs
by (auto simp add: t)
ultimately show ?thesis
by (simp add: divmod_def prod_eq_iff split_def Let_def
not_less mod_eq_0_iff_dvd Rings.mod_eq_0_iff_dvd False not_le *)
(simp add: flip: of_nat_numeral of_nat_mult add.commute [of 1] of_nat_div of_nat_mod of_nat_Suc of_nat_diff)
qed
text \<open>The division rewrite proper -- first, trivial results involving \<open>1\<close>\<close>
lemma divmod_trivial [simp]:
"divmod m Num.One = (numeral m, 0)"
"divmod num.One (num.Bit0 n) = (0, Numeral1)"
"divmod num.One (num.Bit1 n) = (0, Numeral1)"
using divmod_divmod_step [of "Num.One"] by (simp_all add: divmod_def)
text \<open>Division by an even number is a right-shift\<close>
lemma divmod_cancel [simp]:
\<open>divmod (Num.Bit0 m) (Num.Bit0 n) = (case divmod m n of (q, r) \<Rightarrow> (q, 2 * r))\<close> (is ?P)
\<open>divmod (Num.Bit1 m) (Num.Bit0 n) = (case divmod m n of (q, r) \<Rightarrow> (q, 2 * r + 1))\<close> (is ?Q)
proof -
define r s where \<open>r = (numeral m :: nat)\<close> \<open>s = (numeral n :: nat)\<close>
then have *: \<open>numeral m = of_nat r\<close> \<open>numeral n = of_nat s\<close>
\<open>numeral (num.Bit0 m) = of_nat (2 * r)\<close> \<open>numeral (num.Bit0 n) = of_nat (2 * s)\<close>
\<open>numeral (num.Bit1 m) = of_nat (Suc (2 * r))\<close>
by simp_all
have **: \<open>Suc (2 * r) div 2 = r\<close>
by simp
show ?P and ?Q
by (simp_all add: divmod_def *)
(simp_all flip: of_nat_numeral of_nat_div of_nat_mod of_nat_mult add.commute [of 1] of_nat_Suc
add: Euclidean_Rings.mod_mult_mult1 div_mult2_eq [of _ 2] mod_mult2_eq [of _ 2] **)
qed
text \<open>The really hard work\<close>
lemma divmod_steps [simp]:
"divmod (num.Bit0 m) (num.Bit1 n) =
(if m \<le> n then (0, numeral (num.Bit0 m))
else divmod_step (numeral (num.Bit1 n))
(divmod (num.Bit0 m)
(num.Bit0 (num.Bit1 n))))"
"divmod (num.Bit1 m) (num.Bit1 n) =
(if m < n then (0, numeral (num.Bit1 m))
else divmod_step (numeral (num.Bit1 n))
(divmod (num.Bit1 m)
(num.Bit0 (num.Bit1 n))))"
by (simp_all add: divmod_divmod_step)
lemmas divmod_algorithm_code = divmod_trivial divmod_cancel divmod_steps
text \<open>Special case: divisibility\<close>
definition divides_aux :: "'a \<times> 'a \<Rightarrow> bool"
where
"divides_aux qr \<longleftrightarrow> snd qr = 0"
lemma divides_aux_eq [simp]:
"divides_aux (q, r) \<longleftrightarrow> r = 0"
by (simp add: divides_aux_def)
lemma dvd_numeral_simp [simp]:
"numeral m dvd numeral n \<longleftrightarrow> divides_aux (divmod n m)"
by (simp add: divmod_def mod_eq_0_iff_dvd)
text \<open>Generic computation of quotient and remainder\<close>
lemma numeral_div_numeral [simp]:
"numeral k div numeral l = fst (divmod k l)"
by (simp add: fst_divmod)
lemma numeral_mod_numeral [simp]:
"numeral k mod numeral l = snd (divmod k l)"
by (simp add: snd_divmod)
lemma one_div_numeral [simp]:
"1 div numeral n = fst (divmod num.One n)"
by (simp add: fst_divmod)
lemma one_mod_numeral [simp]:
"1 mod numeral n = snd (divmod num.One n)"
by (simp add: snd_divmod)
end
instantiation nat :: unique_euclidean_semiring_with_nat_division
begin
definition divmod_nat :: "num \<Rightarrow> num \<Rightarrow> nat \<times> nat"
where
divmod'_nat_def: "divmod_nat m n = (numeral m div numeral n, numeral m mod numeral n)"
definition divmod_step_nat :: "nat \<Rightarrow> nat \<times> nat \<Rightarrow> nat \<times> nat"
where
"divmod_step_nat l qr = (let (q, r) = qr
in if r \<ge> l then (2 * q + 1, r - l)
else (2 * q, r))"
instance
by standard (simp_all add: divmod'_nat_def divmod_step_nat_def)
end
declare divmod_algorithm_code [where ?'a = nat, code]
lemma Suc_0_div_numeral [simp]:
\<open>Suc 0 div numeral Num.One = 1\<close>
\<open>Suc 0 div numeral (Num.Bit0 n) = 0\<close>
\<open>Suc 0 div numeral (Num.Bit1 n) = 0\<close>
by simp_all
lemma Suc_0_mod_numeral [simp]:
\<open>Suc 0 mod numeral Num.One = 0\<close>
\<open>Suc 0 mod numeral (Num.Bit0 n) = 1\<close>
\<open>Suc 0 mod numeral (Num.Bit1 n) = 1\<close>
by simp_all
instantiation int :: unique_euclidean_semiring_with_nat_division
begin
definition divmod_int :: "num \<Rightarrow> num \<Rightarrow> int \<times> int"
where
"divmod_int m n = (numeral m div numeral n, numeral m mod numeral n)"
definition divmod_step_int :: "int \<Rightarrow> int \<times> int \<Rightarrow> int \<times> int"
where
"divmod_step_int l qr = (let (q, r) = qr
in if \<bar>l\<bar> \<le> \<bar>r\<bar> then (2 * q + 1, r - l)
else (2 * q, r))"
instance
by standard (auto simp add: divmod_int_def divmod_step_int_def)
end
declare divmod_algorithm_code [where ?'a = int, code]
context
begin
qualified definition adjust_div :: "int \<times> int \<Rightarrow> int"
where
"adjust_div qr = (let (q, r) = qr in q + of_bool (r \<noteq> 0))"
qualified lemma adjust_div_eq [simp, code]:
"adjust_div (q, r) = q + of_bool (r \<noteq> 0)"
by (simp add: adjust_div_def)
qualified definition adjust_mod :: "num \<Rightarrow> int \<Rightarrow> int"
where
[simp]: "adjust_mod l r = (if r = 0 then 0 else numeral l - r)"
lemma minus_numeral_div_numeral [simp]:
"- numeral m div numeral n = - (adjust_div (divmod m n) :: int)"
proof -
have "int (fst (divmod m n)) = fst (divmod m n)"
by (simp only: fst_divmod divide_int_def) auto
then show ?thesis
by (auto simp add: split_def Let_def adjust_div_def divides_aux_def divide_int_def)
qed
lemma minus_numeral_mod_numeral [simp]:
"- numeral m mod numeral n = adjust_mod n (snd (divmod m n) :: int)"
proof (cases "snd (divmod m n) = (0::int)")
case True
then show ?thesis
by (simp add: mod_eq_0_iff_dvd divides_aux_def)
next
case False
then have "int (snd (divmod m n)) = snd (divmod m n)" if "snd (divmod m n) \<noteq> (0::int)"
by (simp only: snd_divmod modulo_int_def) auto
then show ?thesis
by (simp add: divides_aux_def adjust_div_def)
(simp add: divides_aux_def modulo_int_def)
qed
lemma numeral_div_minus_numeral [simp]:
"numeral m div - numeral n = - (adjust_div (divmod m n) :: int)"
proof -
have "int (fst (divmod m n)) = fst (divmod m n)"
by (simp only: fst_divmod divide_int_def) auto
then show ?thesis
by (auto simp add: split_def Let_def adjust_div_def divides_aux_def divide_int_def)
qed
lemma numeral_mod_minus_numeral [simp]:
"numeral m mod - numeral n = - adjust_mod n (snd (divmod m n) :: int)"
proof (cases "snd (divmod m n) = (0::int)")
case True
then show ?thesis
by (simp add: mod_eq_0_iff_dvd divides_aux_def)
next
case False
then have "int (snd (divmod m n)) = snd (divmod m n)" if "snd (divmod m n) \<noteq> (0::int)"
by (simp only: snd_divmod modulo_int_def) auto
then show ?thesis
by (simp add: divides_aux_def adjust_div_def)
(simp add: divides_aux_def modulo_int_def)
qed
lemma minus_one_div_numeral [simp]:
"- 1 div numeral n = - (adjust_div (divmod Num.One n) :: int)"
using minus_numeral_div_numeral [of Num.One n] by simp
lemma minus_one_mod_numeral [simp]:
"- 1 mod numeral n = adjust_mod n (snd (divmod Num.One n) :: int)"
using minus_numeral_mod_numeral [of Num.One n] by simp
lemma one_div_minus_numeral [simp]:
"1 div - numeral n = - (adjust_div (divmod Num.One n) :: int)"
using numeral_div_minus_numeral [of Num.One n] by simp
lemma one_mod_minus_numeral [simp]:
"1 mod - numeral n = - adjust_mod n (snd (divmod Num.One n) :: int)"
using numeral_mod_minus_numeral [of Num.One n] by simp
lemma [code]:
fixes k :: int
shows
"k div 0 = 0"
"k mod 0 = k"
"0 div k = 0"
"0 mod k = 0"
"k div Int.Pos Num.One = k"
"k mod Int.Pos Num.One = 0"
"k div Int.Neg Num.One = - k"
"k mod Int.Neg Num.One = 0"
"Int.Pos m div Int.Pos n = (fst (divmod m n) :: int)"
"Int.Pos m mod Int.Pos n = (snd (divmod m n) :: int)"
"Int.Neg m div Int.Pos n = - (adjust_div (divmod m n) :: int)"
"Int.Neg m mod Int.Pos n = adjust_mod n (snd (divmod m n) :: int)"
"Int.Pos m div Int.Neg n = - (adjust_div (divmod m n) :: int)"
"Int.Pos m mod Int.Neg n = - adjust_mod n (snd (divmod m n) :: int)"
"Int.Neg m div Int.Neg n = (fst (divmod m n) :: int)"
"Int.Neg m mod Int.Neg n = - (snd (divmod m n) :: int)"
by simp_all
end
lemma divmod_BitM_2_eq [simp]:
\<open>divmod (Num.BitM m) (Num.Bit0 Num.One) = (numeral m - 1, (1 :: int))\<close>
by (cases m) simp_all
subsubsection \<open>Computation by simplification\<close>
lemma euclidean_size_nat_less_eq_iff:
\<open>euclidean_size m \<le> euclidean_size n \<longleftrightarrow> m \<le> n\<close> for m n :: nat
by simp
lemma euclidean_size_int_less_eq_iff:
\<open>euclidean_size k \<le> euclidean_size l \<longleftrightarrow> \<bar>k\<bar> \<le> \<bar>l\<bar>\<close> for k l :: int
by auto
simproc_setup numeral_divmod
("0 div 0 :: 'a :: unique_euclidean_semiring_with_nat_division" | "0 mod 0 :: 'a :: unique_euclidean_semiring_with_nat_division" |
"0 div 1 :: 'a :: unique_euclidean_semiring_with_nat_division" | "0 mod 1 :: 'a :: unique_euclidean_semiring_with_nat_division" |
"0 div - 1 :: int" | "0 mod - 1 :: int" |
"0 div numeral b :: 'a :: unique_euclidean_semiring_with_nat_division" | "0 mod numeral b :: 'a :: unique_euclidean_semiring_with_nat_division" |
"0 div - numeral b :: int" | "0 mod - numeral b :: int" |
"1 div 0 :: 'a :: unique_euclidean_semiring_with_nat_division" | "1 mod 0 :: 'a :: unique_euclidean_semiring_with_nat_division" |
"1 div 1 :: 'a :: unique_euclidean_semiring_with_nat_division" | "1 mod 1 :: 'a :: unique_euclidean_semiring_with_nat_division" |
"1 div - 1 :: int" | "1 mod - 1 :: int" |
"1 div numeral b :: 'a :: unique_euclidean_semiring_with_nat_division" | "1 mod numeral b :: 'a :: unique_euclidean_semiring_with_nat_division" |
"1 div - numeral b :: int" |"1 mod - numeral b :: int" |
"- 1 div 0 :: int" | "- 1 mod 0 :: int" | "- 1 div 1 :: int" | "- 1 mod 1 :: int" |
"- 1 div - 1 :: int" | "- 1 mod - 1 :: int" | "- 1 div numeral b :: int" | "- 1 mod numeral b :: int" |
"- 1 div - numeral b :: int" | "- 1 mod - numeral b :: int" |
"numeral a div 0 :: 'a :: unique_euclidean_semiring_with_nat_division" | "numeral a mod 0 :: 'a :: unique_euclidean_semiring_with_nat_division" |
"numeral a div 1 :: 'a :: unique_euclidean_semiring_with_nat_division" | "numeral a mod 1 :: 'a :: unique_euclidean_semiring_with_nat_division" |
"numeral a div - 1 :: int" | "numeral a mod - 1 :: int" |
"numeral a div numeral b :: 'a :: unique_euclidean_semiring_with_nat_division" | "numeral a mod numeral b :: 'a :: unique_euclidean_semiring_with_nat_division" |
"numeral a div - numeral b :: int" | "numeral a mod - numeral b :: int" |
"- numeral a div 0 :: int" | "- numeral a mod 0 :: int" |
"- numeral a div 1 :: int" | "- numeral a mod 1 :: int" |
"- numeral a div - 1 :: int" | "- numeral a mod - 1 :: int" |
"- numeral a div numeral b :: int" | "- numeral a mod numeral b :: int" |
"- numeral a div - numeral b :: int" | "- numeral a mod - numeral b :: int") = \<open>
let
val if_cong = the (Code.get_case_cong \<^theory> \<^const_name>\<open>If\<close>);
fun successful_rewrite ctxt ct =
let
val thm = Simplifier.rewrite ctxt ct
in if Thm.is_reflexive thm then NONE else SOME thm end;
- in fn phi =>
- let
- val simps = Morphism.fact phi (@{thms div_0 mod_0 div_by_0 mod_by_0 div_by_1 mod_by_1
- one_div_numeral one_mod_numeral minus_one_div_numeral minus_one_mod_numeral
- one_div_minus_numeral one_mod_minus_numeral
- numeral_div_numeral numeral_mod_numeral minus_numeral_div_numeral minus_numeral_mod_numeral
- numeral_div_minus_numeral numeral_mod_minus_numeral
- div_minus_minus mod_minus_minus Parity.adjust_div_eq of_bool_eq one_neq_zero
- numeral_neq_zero neg_equal_0_iff_equal arith_simps arith_special divmod_trivial
- divmod_cancel divmod_steps divmod_step_def fst_conv snd_conv numeral_One
- case_prod_beta rel_simps Parity.adjust_mod_def div_minus1_right mod_minus1_right
- minus_minus numeral_times_numeral mult_zero_right mult_1_right
- euclidean_size_nat_less_eq_iff euclidean_size_int_less_eq_iff diff_nat_numeral nat_numeral}
- @ [@{lemma "0 = 0 \<longleftrightarrow> True" by simp}]);
- fun prepare_simpset ctxt = HOL_ss |> Simplifier.simpset_map ctxt
- (Simplifier.add_cong if_cong #> fold Simplifier.add_simp simps)
- in fn ctxt => successful_rewrite (Simplifier.put_simpset (prepare_simpset ctxt) ctxt) end
- end
+ val simps = @{thms div_0 mod_0 div_by_0 mod_by_0 div_by_1 mod_by_1
+ one_div_numeral one_mod_numeral minus_one_div_numeral minus_one_mod_numeral
+ one_div_minus_numeral one_mod_minus_numeral
+ numeral_div_numeral numeral_mod_numeral minus_numeral_div_numeral minus_numeral_mod_numeral
+ numeral_div_minus_numeral numeral_mod_minus_numeral
+ div_minus_minus mod_minus_minus Parity.adjust_div_eq of_bool_eq one_neq_zero
+ numeral_neq_zero neg_equal_0_iff_equal arith_simps arith_special divmod_trivial
+ divmod_cancel divmod_steps divmod_step_def fst_conv snd_conv numeral_One
+ case_prod_beta rel_simps Parity.adjust_mod_def div_minus1_right mod_minus1_right
+ minus_minus numeral_times_numeral mult_zero_right mult_1_right
+ euclidean_size_nat_less_eq_iff euclidean_size_int_less_eq_iff diff_nat_numeral nat_numeral}
+ @ [@{lemma "0 = 0 \<longleftrightarrow> True" by simp}];
+ val simpset =
+ HOL_ss |> Simplifier.simpset_map \<^context>
+ (Simplifier.add_cong if_cong #> fold Simplifier.add_simp simps);
+ in K (fn ctxt => successful_rewrite (Simplifier.put_simpset simpset ctxt)) end
\<close> \<comment> \<open>
There is space for improvement here: the calculation itself
could be carried out outside the logic, and a generic simproc
(simplifier setup) for generic calculation would be helpful.
\<close>
subsection \<open>Computing congruences modulo \<open>2 ^ q\<close>\<close>
context unique_euclidean_semiring_with_nat_division
begin
lemma cong_exp_iff_simps:
"numeral n mod numeral Num.One = 0
\<longleftrightarrow> True"
"numeral (Num.Bit0 n) mod numeral (Num.Bit0 q) = 0
\<longleftrightarrow> numeral n mod numeral q = 0"
"numeral (Num.Bit1 n) mod numeral (Num.Bit0 q) = 0
\<longleftrightarrow> False"
"numeral m mod numeral Num.One = (numeral n mod numeral Num.One)
\<longleftrightarrow> True"
"numeral Num.One mod numeral (Num.Bit0 q) = (numeral Num.One mod numeral (Num.Bit0 q))
\<longleftrightarrow> True"
"numeral Num.One mod numeral (Num.Bit0 q) = (numeral (Num.Bit0 n) mod numeral (Num.Bit0 q))
\<longleftrightarrow> False"
"numeral Num.One mod numeral (Num.Bit0 q) = (numeral (Num.Bit1 n) mod numeral (Num.Bit0 q))
\<longleftrightarrow> (numeral n mod numeral q) = 0"
"numeral (Num.Bit0 m) mod numeral (Num.Bit0 q) = (numeral Num.One mod numeral (Num.Bit0 q))
\<longleftrightarrow> False"
"numeral (Num.Bit0 m) mod numeral (Num.Bit0 q) = (numeral (Num.Bit0 n) mod numeral (Num.Bit0 q))
\<longleftrightarrow> numeral m mod numeral q = (numeral n mod numeral q)"
"numeral (Num.Bit0 m) mod numeral (Num.Bit0 q) = (numeral (Num.Bit1 n) mod numeral (Num.Bit0 q))
\<longleftrightarrow> False"
"numeral (Num.Bit1 m) mod numeral (Num.Bit0 q) = (numeral Num.One mod numeral (Num.Bit0 q))
\<longleftrightarrow> (numeral m mod numeral q) = 0"
"numeral (Num.Bit1 m) mod numeral (Num.Bit0 q) = (numeral (Num.Bit0 n) mod numeral (Num.Bit0 q))
\<longleftrightarrow> False"
"numeral (Num.Bit1 m) mod numeral (Num.Bit0 q) = (numeral (Num.Bit1 n) mod numeral (Num.Bit0 q))
\<longleftrightarrow> numeral m mod numeral q = (numeral n mod numeral q)"
by (auto simp add: case_prod_beta dest: arg_cong [of _ _ even])
end
code_identifier
code_module Parity \<rightharpoonup> (SML) Arith and (OCaml) Arith and (Haskell) Arith
lemmas even_of_nat = even_of_nat_iff
end
diff --git a/src/HOL/Product_Type.thy b/src/HOL/Product_Type.thy
--- a/src/HOL/Product_Type.thy
+++ b/src/HOL/Product_Type.thy
@@ -1,1376 +1,1376 @@
(* Title: HOL/Product_Type.thy
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Copyright 1992 University of Cambridge
*)
section \<open>Cartesian products\<close>
theory Product_Type
imports Typedef Inductive Fun
keywords "inductive_set" "coinductive_set" :: thy_defn
begin
subsection \<open>\<^typ>\<open>bool\<close> is a datatype\<close>
free_constructors (discs_sels) case_bool for True | False
by auto
text \<open>Avoid name clashes by prefixing the output of \<open>old_rep_datatype\<close> with \<open>old\<close>.\<close>
setup \<open>Sign.mandatory_path "old"\<close>
old_rep_datatype True False by (auto intro: bool_induct)
setup \<open>Sign.parent_path\<close>
text \<open>But erase the prefix for properties that are not generated by \<open>free_constructors\<close>.\<close>
setup \<open>Sign.mandatory_path "bool"\<close>
lemmas induct = old.bool.induct
lemmas inducts = old.bool.inducts
lemmas rec = old.bool.rec
lemmas simps = bool.distinct bool.case bool.rec
setup \<open>Sign.parent_path\<close>
declare case_split [cases type: bool]
\<comment> \<open>prefer plain propositional version\<close>
lemma [code]: "HOL.equal False P \<longleftrightarrow> \<not> P"
and [code]: "HOL.equal True P \<longleftrightarrow> P"
and [code]: "HOL.equal P False \<longleftrightarrow> \<not> P"
and [code]: "HOL.equal P True \<longleftrightarrow> P"
and [code nbe]: "HOL.equal P P \<longleftrightarrow> True"
by (simp_all add: equal)
lemma If_case_cert:
assumes "CASE \<equiv> (\<lambda>b. If b f g)"
shows "(CASE True \<equiv> f) &&& (CASE False \<equiv> g)"
using assms by simp_all
setup \<open>Code.declare_case_global @{thm If_case_cert}\<close>
code_printing
constant "HOL.equal :: bool \<Rightarrow> bool \<Rightarrow> bool" \<rightharpoonup> (Haskell) infix 4 "=="
| class_instance "bool" :: "equal" \<rightharpoonup> (Haskell) -
subsection \<open>The \<open>unit\<close> type\<close>
typedef unit = "{True}"
by auto
definition Unity :: unit ("'(')")
where "() = Abs_unit True"
lemma unit_eq [no_atp]: "u = ()"
by (induct u) (simp add: Unity_def)
text \<open>
Simplification procedure for @{thm [source] unit_eq}. Cannot use
this rule directly --- it loops!
\<close>
simproc_setup unit_eq ("x::unit") = \<open>
- fn _ => fn _ => fn ct =>
+ K (K (fn ct =>
if HOLogic.is_unit (Thm.term_of ct) then NONE
- else SOME (mk_meta_eq @{thm unit_eq})
+ else SOME (mk_meta_eq @{thm unit_eq})))
\<close>
free_constructors case_unit for "()"
by auto
text \<open>Avoid name clashes by prefixing the output of \<open>old_rep_datatype\<close> with \<open>old\<close>.\<close>
setup \<open>Sign.mandatory_path "old"\<close>
old_rep_datatype "()" by simp
setup \<open>Sign.parent_path\<close>
text \<open>But erase the prefix for properties that are not generated by \<open>free_constructors\<close>.\<close>
setup \<open>Sign.mandatory_path "unit"\<close>
lemmas induct = old.unit.induct
lemmas inducts = old.unit.inducts
lemmas rec = old.unit.rec
lemmas simps = unit.case unit.rec
setup \<open>Sign.parent_path\<close>
lemma unit_all_eq1: "(\<And>x::unit. PROP P x) \<equiv> PROP P ()"
by simp
lemma unit_all_eq2: "(\<And>x::unit. PROP P) \<equiv> PROP P"
by (rule triv_forall_equality)
text \<open>
This rewrite counters the effect of simproc \<open>unit_eq\<close> on @{term
[source] "\<lambda>u::unit. f u"}, replacing it by @{term [source]
f} rather than by @{term [source] "\<lambda>u. f ()"}.
\<close>
lemma unit_abs_eta_conv [simp]: "(\<lambda>u::unit. f ()) = f"
by (rule ext) simp
lemma UNIV_unit: "UNIV = {()}"
by auto
instantiation unit :: default
begin
definition "default = ()"
instance ..
end
instantiation unit :: "{complete_boolean_algebra,complete_linorder,wellorder}"
begin
definition less_eq_unit :: "unit \<Rightarrow> unit \<Rightarrow> bool"
where "(_::unit) \<le> _ \<longleftrightarrow> True"
lemma less_eq_unit [iff]: "u \<le> v" for u v :: unit
by (simp add: less_eq_unit_def)
definition less_unit :: "unit \<Rightarrow> unit \<Rightarrow> bool"
where "(_::unit) < _ \<longleftrightarrow> False"
lemma less_unit [iff]: "\<not> u < v" for u v :: unit
by (simp_all add: less_eq_unit_def less_unit_def)
definition bot_unit :: unit
where [code_unfold]: "\<bottom> = ()"
definition top_unit :: unit
where [code_unfold]: "\<top> = ()"
definition inf_unit :: "unit \<Rightarrow> unit \<Rightarrow> unit"
where [simp]: "_ \<sqinter> _ = ()"
definition sup_unit :: "unit \<Rightarrow> unit \<Rightarrow> unit"
where [simp]: "_ \<squnion> _ = ()"
definition Inf_unit :: "unit set \<Rightarrow> unit"
where [simp]: "\<Sqinter>_ = ()"
definition Sup_unit :: "unit set \<Rightarrow> unit"
where [simp]: "\<Squnion>_ = ()"
definition uminus_unit :: "unit \<Rightarrow> unit"
where [simp]: "- _ = ()"
declare less_eq_unit_def [abs_def, code_unfold]
less_unit_def [abs_def, code_unfold]
inf_unit_def [abs_def, code_unfold]
sup_unit_def [abs_def, code_unfold]
Inf_unit_def [abs_def, code_unfold]
Sup_unit_def [abs_def, code_unfold]
uminus_unit_def [abs_def, code_unfold]
instance
by intro_classes auto
end
lemma [code]: "HOL.equal u v \<longleftrightarrow> True" for u v :: unit
unfolding equal unit_eq [of u] unit_eq [of v] by (rule iffI TrueI refl)+
code_printing
type_constructor unit \<rightharpoonup>
(SML) "unit"
and (OCaml) "unit"
and (Haskell) "()"
and (Scala) "Unit"
| constant Unity \<rightharpoonup>
(SML) "()"
and (OCaml) "()"
and (Haskell) "()"
and (Scala) "()"
| class_instance unit :: equal \<rightharpoonup>
(Haskell) -
| constant "HOL.equal :: unit \<Rightarrow> unit \<Rightarrow> bool" \<rightharpoonup>
(Haskell) infix 4 "=="
code_reserved SML
unit
code_reserved OCaml
unit
code_reserved Scala
Unit
subsection \<open>The product type\<close>
subsubsection \<open>Type definition\<close>
definition Pair_Rep :: "'a \<Rightarrow> 'b \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> bool"
where "Pair_Rep a b = (\<lambda>x y. x = a \<and> y = b)"
definition "prod = {f. \<exists>a b. f = Pair_Rep (a::'a) (b::'b)}"
typedef ('a, 'b) prod ("(_ \<times>/ _)" [21, 20] 20) = "prod :: ('a \<Rightarrow> 'b \<Rightarrow> bool) set"
unfolding prod_def by auto
type_notation (ASCII)
prod (infixr "*" 20)
definition Pair :: "'a \<Rightarrow> 'b \<Rightarrow> 'a \<times> 'b"
where "Pair a b = Abs_prod (Pair_Rep a b)"
lemma prod_cases: "(\<And>a b. P (Pair a b)) \<Longrightarrow> P p"
by (cases p) (auto simp add: prod_def Pair_def Pair_Rep_def)
free_constructors case_prod for Pair fst snd
proof -
fix P :: bool and p :: "'a \<times> 'b"
show "(\<And>x1 x2. p = Pair x1 x2 \<Longrightarrow> P) \<Longrightarrow> P"
by (cases p) (auto simp add: prod_def Pair_def Pair_Rep_def)
next
fix a c :: 'a and b d :: 'b
have "Pair_Rep a b = Pair_Rep c d \<longleftrightarrow> a = c \<and> b = d"
by (auto simp add: Pair_Rep_def fun_eq_iff)
moreover have "Pair_Rep a b \<in> prod" and "Pair_Rep c d \<in> prod"
by (auto simp add: prod_def)
ultimately show "Pair a b = Pair c d \<longleftrightarrow> a = c \<and> b = d"
by (simp add: Pair_def Abs_prod_inject)
qed
text \<open>Avoid name clashes by prefixing the output of \<open>old_rep_datatype\<close> with \<open>old\<close>.\<close>
setup \<open>Sign.mandatory_path "old"\<close>
old_rep_datatype Pair
by (erule prod_cases) (rule prod.inject)
setup \<open>Sign.parent_path\<close>
text \<open>But erase the prefix for properties that are not generated by \<open>free_constructors\<close>.\<close>
setup \<open>Sign.mandatory_path "prod"\<close>
declare old.prod.inject [iff del]
lemmas induct = old.prod.induct
lemmas inducts = old.prod.inducts
lemmas rec = old.prod.rec
lemmas simps = prod.inject prod.case prod.rec
setup \<open>Sign.parent_path\<close>
declare prod.case [nitpick_simp del]
declare old.prod.case_cong_weak [cong del]
declare prod.case_eq_if [mono]
declare prod.split [no_atp]
declare prod.split_asm [no_atp]
text \<open>
@{thm [source] prod.split} could be declared as \<open>[split]\<close>
done after the Splitter has been speeded up significantly;
precompute the constants involved and don't do anything unless the
current goal contains one of those constants.
\<close>
subsubsection \<open>Tuple syntax\<close>
text \<open>
Patterns -- extends pre-defined type \<^typ>\<open>pttrn\<close> used in
abstractions.
\<close>
nonterminal tuple_args and patterns
syntax
"_tuple" :: "'a \<Rightarrow> tuple_args \<Rightarrow> 'a \<times> 'b" ("(1'(_,/ _'))")
"_tuple_arg" :: "'a \<Rightarrow> tuple_args" ("_")
"_tuple_args" :: "'a \<Rightarrow> tuple_args \<Rightarrow> tuple_args" ("_,/ _")
"_pattern" :: "pttrn \<Rightarrow> patterns \<Rightarrow> pttrn" ("'(_,/ _')")
"" :: "pttrn \<Rightarrow> patterns" ("_")
"_patterns" :: "pttrn \<Rightarrow> patterns \<Rightarrow> patterns" ("_,/ _")
"_unit" :: pttrn ("'(')")
translations
"(x, y)" \<rightleftharpoons> "CONST Pair x y"
"_pattern x y" \<rightleftharpoons> "CONST Pair x y"
"_patterns x y" \<rightleftharpoons> "CONST Pair x y"
"_tuple x (_tuple_args y z)" \<rightleftharpoons> "_tuple x (_tuple_arg (_tuple y z))"
"\<lambda>(x, y, zs). b" \<rightleftharpoons> "CONST case_prod (\<lambda>x (y, zs). b)"
"\<lambda>(x, y). b" \<rightleftharpoons> "CONST case_prod (\<lambda>x y. b)"
"_abs (CONST Pair x y) t" \<rightharpoonup> "\<lambda>(x, y). t"
\<comment> \<open>This rule accommodates tuples in \<open>case C \<dots> (x, y) \<dots> \<Rightarrow> \<dots>\<close>:
The \<open>(x, y)\<close> is parsed as \<open>Pair x y\<close> because it is \<open>logic\<close>,
not \<open>pttrn\<close>.\<close>
"\<lambda>(). b" \<rightleftharpoons> "CONST case_unit b"
"_abs (CONST Unity) t" \<rightharpoonup> "\<lambda>(). t"
text \<open>print \<^term>\<open>case_prod f\<close> as \<^term>\<open>\<lambda>(x, y). f x y\<close> and
\<^term>\<open>case_prod (\<lambda>x. f x)\<close> as \<^term>\<open>\<lambda>(x, y). f x y\<close>\<close>
typed_print_translation \<open>
let
fun case_prod_guess_names_tr' T [Abs (x, _, Abs _)] = raise Match
| case_prod_guess_names_tr' T [Abs (x, xT, t)] =
(case (head_of t) of
Const (\<^const_syntax>\<open>case_prod\<close>, _) => raise Match
| _ =>
let
val (_ :: yT :: _) = binder_types (domain_type T) handle Bind => raise Match;
val (y, t') = Syntax_Trans.atomic_abs_tr' ("y", yT, incr_boundvars 1 t $ Bound 0);
val (x', t'') = Syntax_Trans.atomic_abs_tr' (x, xT, t');
in
Syntax.const \<^syntax_const>\<open>_abs\<close> $
(Syntax.const \<^syntax_const>\<open>_pattern\<close> $ x' $ y) $ t''
end)
| case_prod_guess_names_tr' T [t] =
(case head_of t of
Const (\<^const_syntax>\<open>case_prod\<close>, _) => raise Match
| _ =>
let
val (xT :: yT :: _) = binder_types (domain_type T) handle Bind => raise Match;
val (y, t') =
Syntax_Trans.atomic_abs_tr' ("y", yT, incr_boundvars 2 t $ Bound 1 $ Bound 0);
val (x', t'') = Syntax_Trans.atomic_abs_tr' ("x", xT, t');
in
Syntax.const \<^syntax_const>\<open>_abs\<close> $
(Syntax.const \<^syntax_const>\<open>_pattern\<close> $ x' $ y) $ t''
end)
| case_prod_guess_names_tr' _ _ = raise Match;
in [(\<^const_syntax>\<open>case_prod\<close>, K case_prod_guess_names_tr')] end
\<close>
text \<open>Reconstruct pattern from (nested) \<^const>\<open>case_prod\<close>s,
avoiding eta-contraction of body; required for enclosing "let",
if "let" does not avoid eta-contraction, which has been observed to occur.\<close>
print_translation \<open>
let
fun case_prod_tr' [Abs (x, T, t as (Abs abs))] =
(* case_prod (\<lambda>x y. t) \<Rightarrow> \<lambda>(x, y) t *)
let
val (y, t') = Syntax_Trans.atomic_abs_tr' abs;
val (x', t'') = Syntax_Trans.atomic_abs_tr' (x, T, t');
in
Syntax.const \<^syntax_const>\<open>_abs\<close> $
(Syntax.const \<^syntax_const>\<open>_pattern\<close> $ x' $ y) $ t''
end
| case_prod_tr' [Abs (x, T, (s as Const (\<^const_syntax>\<open>case_prod\<close>, _) $ t))] =
(* case_prod (\<lambda>x. (case_prod (\<lambda>y z. t))) \<Rightarrow> \<lambda>(x, y, z). t *)
let
val Const (\<^syntax_const>\<open>_abs\<close>, _) $
(Const (\<^syntax_const>\<open>_pattern\<close>, _) $ y $ z) $ t' =
case_prod_tr' [t];
val (x', t'') = Syntax_Trans.atomic_abs_tr' (x, T, t');
in
Syntax.const \<^syntax_const>\<open>_abs\<close> $
(Syntax.const \<^syntax_const>\<open>_pattern\<close> $ x' $
(Syntax.const \<^syntax_const>\<open>_patterns\<close> $ y $ z)) $ t''
end
| case_prod_tr' [Const (\<^const_syntax>\<open>case_prod\<close>, _) $ t] =
(* case_prod (case_prod (\<lambda>x y z. t)) \<Rightarrow> \<lambda>((x, y), z). t *)
case_prod_tr' [(case_prod_tr' [t])]
(* inner case_prod_tr' creates next pattern *)
| case_prod_tr' [Const (\<^syntax_const>\<open>_abs\<close>, _) $ x_y $ Abs abs] =
(* case_prod (\<lambda>pttrn z. t) \<Rightarrow> \<lambda>(pttrn, z). t *)
let val (z, t) = Syntax_Trans.atomic_abs_tr' abs in
Syntax.const \<^syntax_const>\<open>_abs\<close> $
(Syntax.const \<^syntax_const>\<open>_pattern\<close> $ x_y $ z) $ t
end
| case_prod_tr' _ = raise Match;
in [(\<^const_syntax>\<open>case_prod\<close>, K case_prod_tr')] end
\<close>
subsubsection \<open>Code generator setup\<close>
code_printing
type_constructor prod \<rightharpoonup>
(SML) infix 2 "*"
and (OCaml) infix 2 "*"
and (Haskell) "!((_),/ (_))"
and (Scala) "((_),/ (_))"
| constant Pair \<rightharpoonup>
(SML) "!((_),/ (_))"
and (OCaml) "!((_),/ (_))"
and (Haskell) "!((_),/ (_))"
and (Scala) "!((_),/ (_))"
| class_instance prod :: equal \<rightharpoonup>
(Haskell) -
| constant "HOL.equal :: 'a \<times> 'b \<Rightarrow> 'a \<times> 'b \<Rightarrow> bool" \<rightharpoonup>
(Haskell) infix 4 "=="
| constant fst \<rightharpoonup> (Haskell) "fst"
| constant snd \<rightharpoonup> (Haskell) "snd"
subsubsection \<open>Fundamental operations and properties\<close>
lemma Pair_inject: "(a, b) = (a', b') \<Longrightarrow> (a = a' \<Longrightarrow> b = b' \<Longrightarrow> R) \<Longrightarrow> R"
by simp
lemma surj_pair [simp]: "\<exists>x y. p = (x, y)"
by (cases p) simp
lemma fst_eqD: "fst (x, y) = a \<Longrightarrow> x = a"
by simp
lemma snd_eqD: "snd (x, y) = a \<Longrightarrow> y = a"
by simp
lemma case_prod_unfold [nitpick_unfold]: "case_prod = (\<lambda>c p. c (fst p) (snd p))"
by (simp add: fun_eq_iff split: prod.split)
lemma case_prod_conv [simp, code]: "(case (a, b) of (c, d) \<Rightarrow> f c d) = f a b"
by (fact prod.case)
lemmas surjective_pairing = prod.collapse [symmetric]
lemma prod_eq_iff: "s = t \<longleftrightarrow> fst s = fst t \<and> snd s = snd t"
by (cases s, cases t) simp
lemma prod_eqI [intro?]: "fst p = fst q \<Longrightarrow> snd p = snd q \<Longrightarrow> p = q"
by (simp add: prod_eq_iff)
lemma case_prodI: "f a b \<Longrightarrow> case (a, b) of (c, d) \<Rightarrow> f c d"
by (rule prod.case [THEN iffD2])
lemma case_prodD: "(case (a, b) of (c, d) \<Rightarrow> f c d) \<Longrightarrow> f a b"
by (rule prod.case [THEN iffD1])
lemma case_prod_Pair [simp]: "case_prod Pair = id"
by (simp add: fun_eq_iff split: prod.split)
lemma case_prod_eta: "(\<lambda>(x, y). f (x, y)) = f"
\<comment> \<open>Subsumes the old \<open>split_Pair\<close> when \<^term>\<open>f\<close> is the identity function.\<close>
by (simp add: fun_eq_iff split: prod.split)
(* This looks like a sensible simp-rule but appears to do more harm than good:
lemma case_prod_const [simp]: "(\<lambda>(_,_). c) = (\<lambda>_. c)"
by(rule case_prod_eta)
*)
lemma case_prod_comp: "(case x of (a, b) \<Rightarrow> (f \<circ> g) a b) = f (g (fst x)) (snd x)"
by (cases x) simp
lemma The_case_prod: "The (case_prod P) = (THE xy. P (fst xy) (snd xy))"
by (simp add: case_prod_unfold)
lemma cond_case_prod_eta: "(\<And>x y. f x y = g (x, y)) \<Longrightarrow> (\<lambda>(x, y). f x y) = g"
by (simp add: case_prod_eta)
lemma split_paired_all [no_atp]: "(\<And>x. PROP P x) \<equiv> (\<And>a b. PROP P (a, b))"
proof
fix a b
assume "\<And>x. PROP P x"
then show "PROP P (a, b)" .
next
fix x
assume "\<And>a b. PROP P (a, b)"
from \<open>PROP P (fst x, snd x)\<close> show "PROP P x" by simp
qed
text \<open>
The rule @{thm [source] split_paired_all} does not work with the
Simplifier because it also affects premises in congrence rules,
where this can lead to premises of the form \<open>\<And>a b. \<dots> = ?P(a, b)\<close>
which cannot be solved by reflexivity.
\<close>
lemmas split_tupled_all = split_paired_all unit_all_eq2
ML \<open>
(* replace parameters of product type by individual component parameters *)
local (* filtering with exists_paired_all is an essential optimization *)
fun exists_paired_all (Const (\<^const_name>\<open>Pure.all\<close>, _) $ Abs (_, T, t)) =
can HOLogic.dest_prodT T orelse exists_paired_all t
| exists_paired_all (t $ u) = exists_paired_all t orelse exists_paired_all u
| exists_paired_all (Abs (_, _, t)) = exists_paired_all t
| exists_paired_all _ = false;
val ss =
simpset_of
(put_simpset HOL_basic_ss \<^context>
addsimps [@{thm split_paired_all}, @{thm unit_all_eq2}, @{thm unit_abs_eta_conv}]
addsimprocs [\<^simproc>\<open>unit_eq\<close>]);
in
fun split_all_tac ctxt = SUBGOAL (fn (t, i) =>
if exists_paired_all t then safe_full_simp_tac (put_simpset ss ctxt) i else no_tac);
fun unsafe_split_all_tac ctxt = SUBGOAL (fn (t, i) =>
if exists_paired_all t then full_simp_tac (put_simpset ss ctxt) i else no_tac);
fun split_all ctxt th =
if exists_paired_all (Thm.prop_of th)
then full_simplify (put_simpset ss ctxt) th else th;
end;
\<close>
setup \<open>map_theory_claset (fn ctxt => ctxt addSbefore ("split_all_tac", split_all_tac))\<close>
lemma split_paired_All [simp, no_atp]: "(\<forall>x. P x) \<longleftrightarrow> (\<forall>a b. P (a, b))"
\<comment> \<open>\<open>[iff]\<close> is not a good idea because it makes \<open>blast\<close> loop\<close>
by fast
lemma split_paired_Ex [simp, no_atp]: "(\<exists>x. P x) \<longleftrightarrow> (\<exists>a b. P (a, b))"
by fast
lemma split_paired_The [no_atp]: "(THE x. P x) = (THE (a, b). P (a, b))"
\<comment> \<open>Can't be added to simpset: loops!\<close>
by (simp add: case_prod_eta)
text \<open>
Simplification procedure for @{thm [source] cond_case_prod_eta}. Using
@{thm [source] case_prod_eta} as a rewrite rule is not general enough,
and using @{thm [source] cond_case_prod_eta} directly would render some
existing proofs very inefficient; similarly for \<open>prod.case_eq_if\<close>.
\<close>
ML \<open>
local
val cond_case_prod_eta_ss =
simpset_of (put_simpset HOL_basic_ss \<^context> addsimps @{thms cond_case_prod_eta});
fun Pair_pat k 0 (Bound m) = (m = k)
| Pair_pat k i (Const (\<^const_name>\<open>Pair\<close>, _) $ Bound m $ t) =
i > 0 andalso m = k + i andalso Pair_pat k (i - 1) t
| Pair_pat _ _ _ = false;
fun no_args k i (Abs (_, _, t)) = no_args (k + 1) i t
| no_args k i (t $ u) = no_args k i t andalso no_args k i u
| no_args k i (Bound m) = m < k orelse m > k + i
| no_args _ _ _ = true;
fun split_pat tp i (Abs (_, _, t)) = if tp 0 i t then SOME (i, t) else NONE
| split_pat tp i (Const (\<^const_name>\<open>case_prod\<close>, _) $ Abs (_, _, t)) = split_pat tp (i + 1) t
| split_pat tp i _ = NONE;
fun metaeq ctxt lhs rhs = mk_meta_eq (Goal.prove ctxt [] []
(HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs, rhs)))
(K (simp_tac (put_simpset cond_case_prod_eta_ss ctxt) 1)));
fun beta_term_pat k i (Abs (_, _, t)) = beta_term_pat (k + 1) i t
| beta_term_pat k i (t $ u) =
Pair_pat k i (t $ u) orelse (beta_term_pat k i t andalso beta_term_pat k i u)
| beta_term_pat k i t = no_args k i t;
fun eta_term_pat k i (f $ arg) = no_args k i f andalso Pair_pat k i arg
| eta_term_pat _ _ _ = false;
fun subst arg k i (Abs (x, T, t)) = Abs (x, T, subst arg (k+1) i t)
| subst arg k i (t $ u) =
if Pair_pat k i (t $ u) then incr_boundvars k arg
else (subst arg k i t $ subst arg k i u)
| subst arg k i t = t;
in
fun beta_proc ctxt (s as Const (\<^const_name>\<open>case_prod\<close>, _) $ Abs (_, _, t) $ arg) =
(case split_pat beta_term_pat 1 t of
SOME (i, f) => SOME (metaeq ctxt s (subst arg 0 i f))
| NONE => NONE)
| beta_proc _ _ = NONE;
fun eta_proc ctxt (s as Const (\<^const_name>\<open>case_prod\<close>, _) $ Abs (_, _, t)) =
(case split_pat eta_term_pat 1 t of
SOME (_, ft) => SOME (metaeq ctxt s (let val f $ _ = ft in f end))
| NONE => NONE)
| eta_proc _ _ = NONE;
end;
\<close>
simproc_setup case_prod_beta ("case_prod f z") =
- \<open>fn _ => fn ctxt => fn ct => beta_proc ctxt (Thm.term_of ct)\<close>
+ \<open>K (fn ctxt => fn ct => beta_proc ctxt (Thm.term_of ct))\<close>
simproc_setup case_prod_eta ("case_prod f") =
- \<open>fn _ => fn ctxt => fn ct => eta_proc ctxt (Thm.term_of ct)\<close>
+ \<open>K (fn ctxt => fn ct => eta_proc ctxt (Thm.term_of ct))\<close>
lemma case_prod_beta': "(\<lambda>(x,y). f x y) = (\<lambda>x. f (fst x) (snd x))"
by (auto simp: fun_eq_iff)
text \<open>
\<^medskip> \<^const>\<open>case_prod\<close> used as a logical connective or set former.
\<^medskip> These rules are for use with \<open>blast\<close>; could instead
call \<open>simp\<close> using @{thm [source] prod.split} as rewrite.\<close>
lemma case_prodI2:
"\<And>p. (\<And>a b. p = (a, b) \<Longrightarrow> c a b) \<Longrightarrow> case p of (a, b) \<Rightarrow> c a b"
by (simp add: split_tupled_all)
lemma case_prodI2':
"\<And>p. (\<And>a b. (a, b) = p \<Longrightarrow> c a b x) \<Longrightarrow> (case p of (a, b) \<Rightarrow> c a b) x"
by (simp add: split_tupled_all)
lemma case_prodE [elim!]:
"(case p of (a, b) \<Rightarrow> c a b) \<Longrightarrow> (\<And>x y. p = (x, y) \<Longrightarrow> c x y \<Longrightarrow> Q) \<Longrightarrow> Q"
by (induct p) simp
lemma case_prodE' [elim!]:
"(case p of (a, b) \<Rightarrow> c a b) z \<Longrightarrow> (\<And>x y. p = (x, y) \<Longrightarrow> c x y z \<Longrightarrow> Q) \<Longrightarrow> Q"
by (induct p) simp
lemma case_prodE2:
assumes q: "Q (case z of (a, b) \<Rightarrow> P a b)"
and r: "\<And>x y. z = (x, y) \<Longrightarrow> Q (P x y) \<Longrightarrow> R"
shows R
proof (rule r)
show "z = (fst z, snd z)" by simp
then show "Q (P (fst z) (snd z))"
using q by (simp add: case_prod_unfold)
qed
lemma case_prodD': "(case (a, b) of (c, d) \<Rightarrow> R c d) c \<Longrightarrow> R a b c"
by simp
lemma mem_case_prodI: "z \<in> c a b \<Longrightarrow> z \<in> (case (a, b) of (d, e) \<Rightarrow> c d e)"
by simp
lemma mem_case_prodI2 [intro!]:
"\<And>p. (\<And>a b. p = (a, b) \<Longrightarrow> z \<in> c a b) \<Longrightarrow> z \<in> (case p of (a, b) \<Rightarrow> c a b)"
by (simp only: split_tupled_all) simp
declare mem_case_prodI [intro!] \<comment> \<open>postponed to maintain traditional declaration order!\<close>
declare case_prodI2' [intro!] \<comment> \<open>postponed to maintain traditional declaration order!\<close>
declare case_prodI2 [intro!] \<comment> \<open>postponed to maintain traditional declaration order!\<close>
declare case_prodI [intro!] \<comment> \<open>postponed to maintain traditional declaration order!\<close>
lemma mem_case_prodE [elim!]:
assumes "z \<in> case_prod c p"
obtains x y where "p = (x, y)" and "z \<in> c x y"
using assms by (rule case_prodE2)
ML \<open>
local (* filtering with exists_p_split is an essential optimization *)
fun exists_p_split (Const (\<^const_name>\<open>case_prod\<close>,_) $ _ $ (Const (\<^const_name>\<open>Pair\<close>,_)$_$_)) = true
| exists_p_split (t $ u) = exists_p_split t orelse exists_p_split u
| exists_p_split (Abs (_, _, t)) = exists_p_split t
| exists_p_split _ = false;
in
fun split_conv_tac ctxt = SUBGOAL (fn (t, i) =>
if exists_p_split t
then safe_full_simp_tac (put_simpset HOL_basic_ss ctxt addsimps @{thms case_prod_conv}) i
else no_tac);
end;
\<close>
(* This prevents applications of splitE for already splitted arguments leading
to quite time-consuming computations (in particular for nested tuples) *)
setup \<open>map_theory_claset (fn ctxt => ctxt addSbefore ("split_conv_tac", split_conv_tac))\<close>
lemma split_eta_SetCompr [simp, no_atp]: "(\<lambda>u. \<exists>x y. u = (x, y) \<and> P (x, y)) = P"
by (rule ext) fast
lemma split_eta_SetCompr2 [simp, no_atp]: "(\<lambda>u. \<exists>x y. u = (x, y) \<and> P x y) = case_prod P"
by (rule ext) fast
lemma split_part [simp]: "(\<lambda>(a,b). P \<and> Q a b) = (\<lambda>ab. P \<and> case_prod Q ab)"
\<comment> \<open>Allows simplifications of nested splits in case of independent predicates.\<close>
by (rule ext) blast
(* Do NOT make this a simp rule as it
a) only helps in special situations
b) can lead to nontermination in the presence of split_def
*)
lemma split_comp_eq:
fixes f :: "'a \<Rightarrow> 'b \<Rightarrow> 'c"
and g :: "'d \<Rightarrow> 'a"
shows "(\<lambda>u. f (g (fst u)) (snd u)) = case_prod (\<lambda>x. f (g x))"
by (rule ext) auto
lemma pair_imageI [intro]: "(a, b) \<in> A \<Longrightarrow> f a b \<in> (\<lambda>(a, b). f a b) ` A"
by (rule image_eqI [where x = "(a, b)"]) auto
lemma Collect_const_case_prod[simp]: "{(a,b). P} = (if P then UNIV else {})"
by auto
lemma The_split_eq [simp]: "(THE (x',y'). x = x' \<and> y = y') = (x, y)"
by blast
(*
the following would be slightly more general,
but cannot be used as rewrite rule:
### Cannot add premise as rewrite rule because it contains (type) unknowns:
### ?y = .x
Goal "[| P y; !!x. P x ==> x = y |] ==> (@(x',y). x = x' & P y) = (x,y)"
by (rtac some_equality 1)
by ( Simp_tac 1)
by (split_all_tac 1)
by (Asm_full_simp_tac 1)
qed "The_split_eq";
*)
lemma case_prod_beta: "case_prod f p = f (fst p) (snd p)"
by (fact prod.case_eq_if)
lemma prod_cases3 [cases type]:
obtains (fields) a b c where "y = (a, b, c)"
proof (cases y)
case (Pair a b)
with that show ?thesis
by (cases b) blast
qed
lemma prod_induct3 [case_names fields, induct type]:
"(\<And>a b c. P (a, b, c)) \<Longrightarrow> P x"
by (cases x) blast
lemma prod_cases4 [cases type]:
obtains (fields) a b c d where "y = (a, b, c, d)"
proof (cases y)
case (fields a b c)
with that show ?thesis
by (cases c) blast
qed
lemma prod_induct4 [case_names fields, induct type]:
"(\<And>a b c d. P (a, b, c, d)) \<Longrightarrow> P x"
by (cases x) blast
lemma prod_cases5 [cases type]:
obtains (fields) a b c d e where "y = (a, b, c, d, e)"
proof (cases y)
case (fields a b c d)
with that show ?thesis
by (cases d) blast
qed
lemma prod_induct5 [case_names fields, induct type]:
"(\<And>a b c d e. P (a, b, c, d, e)) \<Longrightarrow> P x"
by (cases x) blast
lemma prod_cases6 [cases type]:
obtains (fields) a b c d e f where "y = (a, b, c, d, e, f)"
proof (cases y)
case (fields a b c d e)
with that show ?thesis
by (cases e) blast
qed
lemma prod_induct6 [case_names fields, induct type]:
"(\<And>a b c d e f. P (a, b, c, d, e, f)) \<Longrightarrow> P x"
by (cases x) blast
lemma prod_cases7 [cases type]:
obtains (fields) a b c d e f g where "y = (a, b, c, d, e, f, g)"
proof (cases y)
case (fields a b c d e f)
with that show ?thesis
by (cases f) blast
qed
lemma prod_induct7 [case_names fields, induct type]:
"(\<And>a b c d e f g. P (a, b, c, d, e, f, g)) \<Longrightarrow> P x"
by (cases x) blast
definition internal_case_prod :: "('a \<Rightarrow> 'b \<Rightarrow> 'c) \<Rightarrow> 'a \<times> 'b \<Rightarrow> 'c"
where "internal_case_prod \<equiv> case_prod"
lemma internal_case_prod_conv: "internal_case_prod c (a, b) = c a b"
by (simp only: internal_case_prod_def case_prod_conv)
ML_file \<open>Tools/split_rule.ML\<close>
hide_const internal_case_prod
subsubsection \<open>Derived operations\<close>
definition curry :: "('a \<times> 'b \<Rightarrow> 'c) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> 'c"
where "curry = (\<lambda>c x y. c (x, y))"
lemma curry_conv [simp, code]: "curry f a b = f (a, b)"
by (simp add: curry_def)
lemma curryI [intro!]: "f (a, b) \<Longrightarrow> curry f a b"
by (simp add: curry_def)
lemma curryD [dest!]: "curry f a b \<Longrightarrow> f (a, b)"
by (simp add: curry_def)
lemma curryE: "curry f a b \<Longrightarrow> (f (a, b) \<Longrightarrow> Q) \<Longrightarrow> Q"
by (simp add: curry_def)
lemma curry_case_prod [simp]: "curry (case_prod f) = f"
by (simp add: curry_def case_prod_unfold)
lemma case_prod_curry [simp]: "case_prod (curry f) = f"
by (simp add: curry_def case_prod_unfold)
lemma curry_K: "curry (\<lambda>x. c) = (\<lambda>x y. c)"
by (simp add: fun_eq_iff)
text \<open>The composition-uncurry combinator.\<close>
definition scomp :: "('a \<Rightarrow> 'b \<times> 'c) \<Rightarrow> ('b \<Rightarrow> 'c \<Rightarrow> 'd) \<Rightarrow> 'a \<Rightarrow> 'd" (infixl "\<circ>\<rightarrow>" 60)
where "f \<circ>\<rightarrow> g = (\<lambda>x. case_prod g (f x))"
no_notation scomp (infixl "\<circ>\<rightarrow>" 60)
bundle state_combinator_syntax
begin
notation fcomp (infixl "\<circ>>" 60)
notation scomp (infixl "\<circ>\<rightarrow>" 60)
end
context
includes state_combinator_syntax
begin
lemma scomp_unfold: "(\<circ>\<rightarrow>) = (\<lambda>f g x. g (fst (f x)) (snd (f x)))"
by (simp add: fun_eq_iff scomp_def case_prod_unfold)
lemma scomp_apply [simp]: "(f \<circ>\<rightarrow> g) x = case_prod g (f x)"
by (simp add: scomp_unfold case_prod_unfold)
lemma Pair_scomp: "Pair x \<circ>\<rightarrow> f = f x"
by (simp add: fun_eq_iff)
lemma scomp_Pair: "x \<circ>\<rightarrow> Pair = x"
by (simp add: fun_eq_iff)
lemma scomp_scomp: "(f \<circ>\<rightarrow> g) \<circ>\<rightarrow> h = f \<circ>\<rightarrow> (\<lambda>x. g x \<circ>\<rightarrow> h)"
by (simp add: fun_eq_iff scomp_unfold)
lemma scomp_fcomp: "(f \<circ>\<rightarrow> g) \<circ>> h = f \<circ>\<rightarrow> (\<lambda>x. g x \<circ>> h)"
by (simp add: fun_eq_iff scomp_unfold fcomp_def)
lemma fcomp_scomp: "(f \<circ>> g) \<circ>\<rightarrow> h = f \<circ>> (g \<circ>\<rightarrow> h)"
by (simp add: fun_eq_iff scomp_unfold)
end
code_printing
constant scomp \<rightharpoonup> (Eval) infixl 3 "#->"
text \<open>
\<^term>\<open>map_prod\<close> --- action of the product functor upon functions.
\<close>
definition map_prod :: "('a \<Rightarrow> 'c) \<Rightarrow> ('b \<Rightarrow> 'd) \<Rightarrow> 'a \<times> 'b \<Rightarrow> 'c \<times> 'd"
where "map_prod f g = (\<lambda>(x, y). (f x, g y))"
lemma map_prod_simp [simp, code]: "map_prod f g (a, b) = (f a, g b)"
by (simp add: map_prod_def)
functor map_prod: map_prod
by (auto simp add: split_paired_all)
lemma fst_map_prod [simp]: "fst (map_prod f g x) = f (fst x)"
by (cases x) simp_all
lemma snd_map_prod [simp]: "snd (map_prod f g x) = g (snd x)"
by (cases x) simp_all
lemma fst_comp_map_prod [simp]: "fst \<circ> map_prod f g = f \<circ> fst"
by (rule ext) simp_all
lemma snd_comp_map_prod [simp]: "snd \<circ> map_prod f g = g \<circ> snd"
by (rule ext) simp_all
lemma map_prod_compose: "map_prod (f1 \<circ> f2) (g1 \<circ> g2) = (map_prod f1 g1 \<circ> map_prod f2 g2)"
by (rule ext) (simp add: map_prod.compositionality comp_def)
lemma map_prod_ident [simp]: "map_prod (\<lambda>x. x) (\<lambda>y. y) = (\<lambda>z. z)"
by (rule ext) (simp add: map_prod.identity)
lemma map_prod_imageI [intro]: "(a, b) \<in> R \<Longrightarrow> (f a, g b) \<in> map_prod f g ` R"
by (rule image_eqI) simp_all
lemma prod_fun_imageE [elim!]:
assumes major: "c \<in> map_prod f g ` R"
and cases: "\<And>x y. c = (f x, g y) \<Longrightarrow> (x, y) \<in> R \<Longrightarrow> P"
shows P
proof (rule major [THEN imageE])
fix x
assume "c = map_prod f g x" "x \<in> R"
then show P
using cases by (cases x) simp
qed
definition apfst :: "('a \<Rightarrow> 'c) \<Rightarrow> 'a \<times> 'b \<Rightarrow> 'c \<times> 'b"
where "apfst f = map_prod f id"
definition apsnd :: "('b \<Rightarrow> 'c) \<Rightarrow> 'a \<times> 'b \<Rightarrow> 'a \<times> 'c"
where "apsnd f = map_prod id f"
lemma apfst_conv [simp, code]: "apfst f (x, y) = (f x, y)"
by (simp add: apfst_def)
lemma apsnd_conv [simp, code]: "apsnd f (x, y) = (x, f y)"
by (simp add: apsnd_def)
lemma fst_apfst [simp]: "fst (apfst f x) = f (fst x)"
by (cases x) simp
lemma fst_comp_apfst [simp]: "fst \<circ> apfst f = f \<circ> fst"
by (simp add: fun_eq_iff)
lemma fst_apsnd [simp]: "fst (apsnd f x) = fst x"
by (cases x) simp
lemma fst_comp_apsnd [simp]: "fst \<circ> apsnd f = fst"
by (simp add: fun_eq_iff)
lemma snd_apfst [simp]: "snd (apfst f x) = snd x"
by (cases x) simp
lemma snd_comp_apfst [simp]: "snd \<circ> apfst f = snd"
by (simp add: fun_eq_iff)
lemma snd_apsnd [simp]: "snd (apsnd f x) = f (snd x)"
by (cases x) simp
lemma snd_comp_apsnd [simp]: "snd \<circ> apsnd f = f \<circ> snd"
by (simp add: fun_eq_iff)
lemma apfst_compose: "apfst f (apfst g x) = apfst (f \<circ> g) x"
by (cases x) simp
lemma apsnd_compose: "apsnd f (apsnd g x) = apsnd (f \<circ> g) x"
by (cases x) simp
lemma apfst_apsnd [simp]: "apfst f (apsnd g x) = (f (fst x), g (snd x))"
by (cases x) simp
lemma apsnd_apfst [simp]: "apsnd f (apfst g x) = (g (fst x), f (snd x))"
by (cases x) simp
lemma apfst_id [simp]: "apfst id = id"
by (simp add: fun_eq_iff)
lemma apsnd_id [simp]: "apsnd id = id"
by (simp add: fun_eq_iff)
lemma apfst_eq_conv [simp]: "apfst f x = apfst g x \<longleftrightarrow> f (fst x) = g (fst x)"
by (cases x) simp
lemma apsnd_eq_conv [simp]: "apsnd f x = apsnd g x \<longleftrightarrow> f (snd x) = g (snd x)"
by (cases x) simp
lemma apsnd_apfst_commute: "apsnd f (apfst g p) = apfst g (apsnd f p)"
by simp
context
begin
local_setup \<open>Local_Theory.map_background_naming (Name_Space.mandatory_path "prod")\<close>
definition swap :: "'a \<times> 'b \<Rightarrow> 'b \<times> 'a"
where "swap p = (snd p, fst p)"
end
lemma swap_simp [simp]: "prod.swap (x, y) = (y, x)"
by (simp add: prod.swap_def)
lemma swap_swap [simp]: "prod.swap (prod.swap p) = p"
by (cases p) simp
lemma swap_comp_swap [simp]: "prod.swap \<circ> prod.swap = id"
by (simp add: fun_eq_iff)
lemma pair_in_swap_image [simp]: "(y, x) \<in> prod.swap ` A \<longleftrightarrow> (x, y) \<in> A"
by (auto intro!: image_eqI)
lemma inj_swap [simp]: "inj_on prod.swap A"
by (rule inj_onI) auto
lemma swap_inj_on: "inj_on (\<lambda>(i, j). (j, i)) A"
by (rule inj_onI) auto
lemma surj_swap [simp]: "surj prod.swap"
by (rule surjI [of _ prod.swap]) simp
lemma bij_swap [simp]: "bij prod.swap"
by (simp add: bij_def)
lemma case_swap [simp]: "(case prod.swap p of (y, x) \<Rightarrow> f x y) = (case p of (x, y) \<Rightarrow> f x y)"
by (cases p) simp
lemma fst_swap [simp]: "fst (prod.swap x) = snd x"
by (cases x) simp
lemma snd_swap [simp]: "snd (prod.swap x) = fst x"
by (cases x) simp
text \<open>Disjoint union of a family of sets -- Sigma.\<close>
definition Sigma :: "'a set \<Rightarrow> ('a \<Rightarrow> 'b set) \<Rightarrow> ('a \<times> 'b) set"
where "Sigma A B \<equiv> \<Union>x\<in>A. \<Union>y\<in>B x. {Pair x y}"
abbreviation Times :: "'a set \<Rightarrow> 'b set \<Rightarrow> ('a \<times> 'b) set" (infixr "\<times>" 80)
where "A \<times> B \<equiv> Sigma A (\<lambda>_. B)"
hide_const (open) Times
bundle no_Set_Product_syntax begin
no_notation Product_Type.Times (infixr "\<times>" 80)
end
bundle Set_Product_syntax begin
notation Product_Type.Times (infixr "\<times>" 80)
end
syntax
"_Sigma" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> 'b set \<Rightarrow> ('a \<times> 'b) set" ("(3SIGMA _:_./ _)" [0, 0, 10] 10)
translations
"SIGMA x:A. B" \<rightleftharpoons> "CONST Sigma A (\<lambda>x. B)"
lemma SigmaI [intro!]: "a \<in> A \<Longrightarrow> b \<in> B a \<Longrightarrow> (a, b) \<in> Sigma A B"
unfolding Sigma_def by blast
lemma SigmaE [elim!]: "c \<in> Sigma A B \<Longrightarrow> (\<And>x y. x \<in> A \<Longrightarrow> y \<in> B x \<Longrightarrow> c = (x, y) \<Longrightarrow> P) \<Longrightarrow> P"
\<comment> \<open>The general elimination rule.\<close>
unfolding Sigma_def by blast
text \<open>
Elimination of \<^term>\<open>(a, b) \<in> A \<times> B\<close> -- introduces no
eigenvariables.
\<close>
lemma SigmaD1: "(a, b) \<in> Sigma A B \<Longrightarrow> a \<in> A"
by blast
lemma SigmaD2: "(a, b) \<in> Sigma A B \<Longrightarrow> b \<in> B a"
by blast
lemma SigmaE2: "(a, b) \<in> Sigma A B \<Longrightarrow> (a \<in> A \<Longrightarrow> b \<in> B a \<Longrightarrow> P) \<Longrightarrow> P"
by blast
lemma Sigma_cong: "A = B \<Longrightarrow> (\<And>x. x \<in> B \<Longrightarrow> C x = D x) \<Longrightarrow> (SIGMA x:A. C x) = (SIGMA x:B. D x)"
by auto
lemma Sigma_mono: "A \<subseteq> C \<Longrightarrow> (\<And>x. x \<in> A \<Longrightarrow> B x \<subseteq> D x) \<Longrightarrow> Sigma A B \<subseteq> Sigma C D"
by blast
lemma Sigma_empty1 [simp]: "Sigma {} B = {}"
by blast
lemma Sigma_empty2 [simp]: "A \<times> {} = {}"
by blast
lemma UNIV_Times_UNIV [simp]: "UNIV \<times> UNIV = UNIV"
by auto
lemma Compl_Times_UNIV1 [simp]: "- (UNIV \<times> A) = UNIV \<times> (-A)"
by auto
lemma Compl_Times_UNIV2 [simp]: "- (A \<times> UNIV) = (-A) \<times> UNIV"
by auto
lemma mem_Sigma_iff [iff]: "(a, b) \<in> Sigma A B \<longleftrightarrow> a \<in> A \<and> b \<in> B a"
by blast
lemma mem_Times_iff: "x \<in> A \<times> B \<longleftrightarrow> fst x \<in> A \<and> snd x \<in> B"
by (induct x) simp
lemma Sigma_empty_iff: "(SIGMA i:I. X i) = {} \<longleftrightarrow> (\<forall>i\<in>I. X i = {})"
by auto
lemma Times_subset_cancel2: "x \<in> C \<Longrightarrow> A \<times> C \<subseteq> B \<times> C \<longleftrightarrow> A \<subseteq> B"
by blast
lemma Times_eq_cancel2: "x \<in> C \<Longrightarrow> A \<times> C = B \<times> C \<longleftrightarrow> A = B"
by (blast elim: equalityE)
lemma Collect_case_prod_Sigma: "{(x, y). P x \<and> Q x y} = (SIGMA x:Collect P. Collect (Q x))"
by blast
lemma Collect_case_prod [simp]: "{(a, b). P a \<and> Q b} = Collect P \<times> Collect Q "
by (fact Collect_case_prod_Sigma)
lemma Collect_case_prodD: "x \<in> Collect (case_prod A) \<Longrightarrow> A (fst x) (snd x)"
by auto
lemma Collect_case_prod_mono: "A \<le> B \<Longrightarrow> Collect (case_prod A) \<subseteq> Collect (case_prod B)"
by auto (auto elim!: le_funE)
lemma Collect_split_mono_strong:
"X = fst ` A \<Longrightarrow> Y = snd ` A \<Longrightarrow> \<forall>a\<in>X. \<forall>b \<in> Y. P a b \<longrightarrow> Q a b
\<Longrightarrow> A \<subseteq> Collect (case_prod P) \<Longrightarrow> A \<subseteq> Collect (case_prod Q)"
by fastforce
lemma UN_Times_distrib: "(\<Union>(a, b)\<in>A \<times> B. E a \<times> F b) = \<Union>(E ` A) \<times> \<Union>(F ` B)"
\<comment> \<open>Suggested by Pierre Chartier\<close>
by blast
lemma split_paired_Ball_Sigma [simp, no_atp]: "(\<forall>z\<in>Sigma A B. P z) \<longleftrightarrow> (\<forall>x\<in>A. \<forall>y\<in>B x. P (x, y))"
by blast
lemma split_paired_Bex_Sigma [simp, no_atp]: "(\<exists>z\<in>Sigma A B. P z) \<longleftrightarrow> (\<exists>x\<in>A. \<exists>y\<in>B x. P (x, y))"
by blast
lemma Sigma_Un_distrib1: "Sigma (I \<union> J) C = Sigma I C \<union> Sigma J C"
by blast
lemma Sigma_Un_distrib2: "(SIGMA i:I. A i \<union> B i) = Sigma I A \<union> Sigma I B"
by blast
lemma Sigma_Int_distrib1: "Sigma (I \<inter> J) C = Sigma I C \<inter> Sigma J C"
by blast
lemma Sigma_Int_distrib2: "(SIGMA i:I. A i \<inter> B i) = Sigma I A \<inter> Sigma I B"
by blast
lemma Sigma_Diff_distrib1: "Sigma (I - J) C = Sigma I C - Sigma J C"
by blast
lemma Sigma_Diff_distrib2: "(SIGMA i:I. A i - B i) = Sigma I A - Sigma I B"
by blast
lemma Sigma_Union: "Sigma (\<Union>X) B = (\<Union>A\<in>X. Sigma A B)"
by blast
lemma Pair_vimage_Sigma: "Pair x -` Sigma A f = (if x \<in> A then f x else {})"
by auto
text \<open>
Non-dependent versions are needed to avoid the need for higher-order
matching, especially when the rules are re-oriented.
\<close>
lemma Times_Un_distrib1: "(A \<union> B) \<times> C = A \<times> C \<union> B \<times> C "
by (fact Sigma_Un_distrib1)
lemma Times_Int_distrib1: "(A \<inter> B) \<times> C = A \<times> C \<inter> B \<times> C "
by (fact Sigma_Int_distrib1)
lemma Times_Diff_distrib1: "(A - B) \<times> C = A \<times> C - B \<times> C "
by (fact Sigma_Diff_distrib1)
lemma Times_empty [simp]: "A \<times> B = {} \<longleftrightarrow> A = {} \<or> B = {}"
by auto
lemma times_subset_iff: "A \<times> C \<subseteq> B \<times> D \<longleftrightarrow> A={} \<or> C={} \<or> A \<subseteq> B \<and> C \<subseteq> D"
by blast
lemma times_eq_iff: "A \<times> B = C \<times> D \<longleftrightarrow> A = C \<and> B = D \<or> (A = {} \<or> B = {}) \<and> (C = {} \<or> D = {})"
by auto
lemma fst_image_times [simp]: "fst ` (A \<times> B) = (if B = {} then {} else A)"
by force
lemma snd_image_times [simp]: "snd ` (A \<times> B) = (if A = {} then {} else B)"
by force
lemma fst_image_Sigma: "fst ` (Sigma A B) = {x \<in> A. B(x) \<noteq> {}}"
by force
lemma snd_image_Sigma: "snd ` (Sigma A B) = (\<Union> x \<in> A. B x)"
by force
lemma vimage_fst: "fst -` A = A \<times> UNIV"
by auto
lemma vimage_snd: "snd -` A = UNIV \<times> A"
by auto
lemma insert_Times_insert [simp]:
"insert a A \<times> insert b B = insert (a,b) (A \<times> insert b B \<union> insert a A \<times> B)"
by blast
lemma vimage_Times: "f -` (A \<times> B) = (fst \<circ> f) -` A \<inter> (snd \<circ> f) -` B"
proof (rule set_eqI)
show "x \<in> f -` (A \<times> B) \<longleftrightarrow> x \<in> (fst \<circ> f) -` A \<inter> (snd \<circ> f) -` B" for x
by (cases "f x") (auto split: prod.split)
qed
lemma Times_Int_Times: "A \<times> B \<inter> C \<times> D = (A \<inter> C) \<times> (B \<inter> D)"
by auto
lemma image_paired_Times:
"(\<lambda>(x,y). (f x, g y)) ` (A \<times> B) = (f ` A) \<times> (g ` B)"
by auto
lemma product_swap: "prod.swap ` (A \<times> B) = B \<times> A"
by (auto simp add: set_eq_iff)
lemma swap_product: "(\<lambda>(i, j). (j, i)) ` (A \<times> B) = B \<times> A"
by (auto simp add: set_eq_iff)
lemma image_split_eq_Sigma: "(\<lambda>x. (f x, g x)) ` A = Sigma (f ` A) (\<lambda>x. g ` (f -` {x} \<inter> A))"
proof (safe intro!: imageI)
fix a b
assume *: "a \<in> A" "b \<in> A" and eq: "f a = f b"
show "(f b, g a) \<in> (\<lambda>x. (f x, g x)) ` A"
using * eq[symmetric] by auto
qed simp_all
lemma subset_fst_snd: "A \<subseteq> (fst ` A \<times> snd ` A)"
by force
lemma inj_on_apfst [simp]: "inj_on (apfst f) (A \<times> UNIV) \<longleftrightarrow> inj_on f A"
by (auto simp add: inj_on_def)
lemma inj_apfst [simp]: "inj (apfst f) \<longleftrightarrow> inj f"
using inj_on_apfst[of f UNIV] by simp
lemma inj_on_apsnd [simp]: "inj_on (apsnd f) (UNIV \<times> A) \<longleftrightarrow> inj_on f A"
by (auto simp add: inj_on_def)
lemma inj_apsnd [simp]: "inj (apsnd f) \<longleftrightarrow> inj f"
using inj_on_apsnd[of f UNIV] by simp
context
begin
qualified definition product :: "'a set \<Rightarrow> 'b set \<Rightarrow> ('a \<times> 'b) set"
where [code_abbrev]: "product A B = A \<times> B"
lemma member_product: "x \<in> Product_Type.product A B \<longleftrightarrow> x \<in> A \<times> B"
by (simp add: product_def)
end
text \<open>The following \<^const>\<open>map_prod\<close> lemmas are due to Joachim Breitner:\<close>
lemma map_prod_inj_on:
assumes "inj_on f A"
and "inj_on g B"
shows "inj_on (map_prod f g) (A \<times> B)"
proof (rule inj_onI)
fix x :: "'a \<times> 'c"
fix y :: "'a \<times> 'c"
assume "x \<in> A \<times> B"
then have "fst x \<in> A" and "snd x \<in> B" by auto
assume "y \<in> A \<times> B"
then have "fst y \<in> A" and "snd y \<in> B" by auto
assume "map_prod f g x = map_prod f g y"
then have "fst (map_prod f g x) = fst (map_prod f g y)" by auto
then have "f (fst x) = f (fst y)" by (cases x, cases y) auto
with \<open>inj_on f A\<close> and \<open>fst x \<in> A\<close> and \<open>fst y \<in> A\<close> have "fst x = fst y"
by (auto dest: inj_onD)
moreover from \<open>map_prod f g x = map_prod f g y\<close>
have "snd (map_prod f g x) = snd (map_prod f g y)" by auto
then have "g (snd x) = g (snd y)" by (cases x, cases y) auto
with \<open>inj_on g B\<close> and \<open>snd x \<in> B\<close> and \<open>snd y \<in> B\<close> have "snd x = snd y"
by (auto dest: inj_onD)
ultimately show "x = y" by (rule prod_eqI)
qed
lemma map_prod_surj:
fixes f :: "'a \<Rightarrow> 'b"
and g :: "'c \<Rightarrow> 'd"
assumes "surj f" and "surj g"
shows "surj (map_prod f g)"
unfolding surj_def
proof
fix y :: "'b \<times> 'd"
from \<open>surj f\<close> obtain a where "fst y = f a"
by (auto elim: surjE)
moreover
from \<open>surj g\<close> obtain b where "snd y = g b"
by (auto elim: surjE)
ultimately have "(fst y, snd y) = map_prod f g (a,b)"
by auto
then show "\<exists>x. y = map_prod f g x"
by auto
qed
lemma map_prod_surj_on:
assumes "f ` A = A'" and "g ` B = B'"
shows "map_prod f g ` (A \<times> B) = A' \<times> B'"
unfolding image_def
proof (rule set_eqI, rule iffI)
fix x :: "'a \<times> 'c"
assume "x \<in> {y::'a \<times> 'c. \<exists>x::'b \<times> 'd\<in>A \<times> B. y = map_prod f g x}"
then obtain y where "y \<in> A \<times> B" and "x = map_prod f g y"
by blast
from \<open>image f A = A'\<close> and \<open>y \<in> A \<times> B\<close> have "f (fst y) \<in> A'"
by auto
moreover from \<open>image g B = B'\<close> and \<open>y \<in> A \<times> B\<close> have "g (snd y) \<in> B'"
by auto
ultimately have "(f (fst y), g (snd y)) \<in> (A' \<times> B')"
by auto
with \<open>x = map_prod f g y\<close> show "x \<in> A' \<times> B'"
by (cases y) auto
next
fix x :: "'a \<times> 'c"
assume "x \<in> A' \<times> B'"
then have "fst x \<in> A'" and "snd x \<in> B'"
by auto
from \<open>image f A = A'\<close> and \<open>fst x \<in> A'\<close> have "fst x \<in> image f A"
by auto
then obtain a where "a \<in> A" and "fst x = f a"
by (rule imageE)
moreover from \<open>image g B = B'\<close> and \<open>snd x \<in> B'\<close> obtain b where "b \<in> B" and "snd x = g b"
by auto
ultimately have "(fst x, snd x) = map_prod f g (a, b)"
by auto
moreover from \<open>a \<in> A\<close> and \<open>b \<in> B\<close> have "(a , b) \<in> A \<times> B"
by auto
ultimately have "\<exists>y \<in> A \<times> B. x = map_prod f g y"
by auto
then show "x \<in> {x. \<exists>y \<in> A \<times> B. x = map_prod f g y}"
by auto
qed
subsection \<open>Simproc for rewriting a set comprehension into a pointfree expression\<close>
ML_file \<open>Tools/set_comprehension_pointfree.ML\<close>
setup \<open>
Code_Preproc.map_pre (fn ctxt => ctxt addsimprocs
[Simplifier.make_simproc \<^context> "set comprehension"
{lhss = [\<^term>\<open>Collect P\<close>],
proc = K Set_Comprehension_Pointfree.code_simproc}])
\<close>
subsection \<open>Lemmas about disjointness\<close>
lemma disjnt_Times1_iff [simp]: "disjnt (C \<times> A) (C \<times> B) \<longleftrightarrow> C = {} \<or> disjnt A B"
by (auto simp: disjnt_def)
lemma disjnt_Times2_iff [simp]: "disjnt (A \<times> C) (B \<times> C) \<longleftrightarrow> C = {} \<or> disjnt A B"
by (auto simp: disjnt_def)
lemma disjnt_Sigma_iff: "disjnt (Sigma A C) (Sigma B C) \<longleftrightarrow> (\<forall>i \<in> A\<inter>B. C i = {}) \<or> disjnt A B"
by (auto simp: disjnt_def)
subsection \<open>Inductively defined sets\<close>
(* simplify {(x1, ..., xn). (x1, ..., xn) : S} to S *)
simproc_setup Collect_mem ("Collect t") = \<open>
- fn _ => fn ctxt => fn ct =>
+ K (fn ctxt => fn ct =>
(case Thm.term_of ct of
S as Const (\<^const_name>\<open>Collect\<close>, Type (\<^type_name>\<open>fun\<close>, [_, T])) $ t =>
let val (u, _, ps) = HOLogic.strip_ptupleabs t in
(case u of
(c as Const (\<^const_name>\<open>Set.member\<close>, _)) $ q $ S' =>
(case try (HOLogic.strip_ptuple ps) q of
NONE => NONE
| SOME ts =>
if not (Term.is_open S') andalso
ts = map Bound (length ps downto 0)
then
let val simp =
full_simp_tac (put_simpset HOL_basic_ss ctxt
addsimps [@{thm split_paired_all}, @{thm case_prod_conv}]) 1
in
SOME (Goal.prove ctxt [] []
(Const (\<^const_name>\<open>Pure.eq\<close>, T --> T --> propT) $ S $ S')
(K (EVERY
[resolve_tac ctxt [eq_reflection] 1,
resolve_tac ctxt @{thms subset_antisym} 1,
resolve_tac ctxt @{thms subsetI} 1,
dresolve_tac ctxt @{thms CollectD} 1, simp,
resolve_tac ctxt @{thms subsetI} 1,
resolve_tac ctxt @{thms CollectI} 1, simp])))
end
else NONE)
| _ => NONE)
end
- | _ => NONE)
+ | _ => NONE))
\<close>
ML_file \<open>Tools/inductive_set.ML\<close>
subsection \<open>Legacy theorem bindings and duplicates\<close>
lemmas fst_conv = prod.sel(1)
lemmas snd_conv = prod.sel(2)
lemmas split_def = case_prod_unfold
lemmas split_beta' = case_prod_beta'
lemmas split_beta = prod.case_eq_if
lemmas split_conv = case_prod_conv
lemmas split = case_prod_conv
hide_const (open) prod
end
diff --git a/src/HOL/Real_Asymp/exp_log_expression.ML b/src/HOL/Real_Asymp/exp_log_expression.ML
--- a/src/HOL/Real_Asymp/exp_log_expression.ML
+++ b/src/HOL/Real_Asymp/exp_log_expression.ML
@@ -1,689 +1,689 @@
signature EXP_LOG_EXPRESSION = sig
exception DUP
datatype expr =
ConstExpr of term
| X
| Uminus of expr
| Add of expr * expr
| Minus of expr * expr
| Inverse of expr
| Mult of expr * expr
| Div of expr * expr
| Ln of expr
| Exp of expr
| Power of expr * term
| LnPowr of expr * expr
| ExpLn of expr
| Powr of expr * expr
| Powr_Nat of expr * expr
| Powr' of expr * term
| Root of expr * term
| Absolute of expr
| Sgn of expr
| Min of expr * expr
| Max of expr * expr
| Floor of expr
| Ceiling of expr
| Frac of expr
| NatMod of expr * expr
| Sin of expr
| Cos of expr
| ArcTan of expr
| Custom of string * term * expr list
val preproc_term_conv : Proof.context -> conv
val expr_to_term : expr -> term
val reify : Proof.context -> term -> expr * thm
val reify_simple : Proof.context -> term -> expr * thm
type custom_handler =
Lazy_Eval.eval_ctxt -> term -> thm list * Asymptotic_Basis.basis -> thm * Asymptotic_Basis.basis
val register_custom :
binding -> term -> custom_handler -> local_theory -> local_theory
val register_custom_from_thm :
binding -> thm -> custom_handler -> local_theory -> local_theory
val expand_custom : Proof.context -> string -> custom_handler option
val to_mathematica : expr -> string
val to_maple : expr -> string
val to_maxima : expr -> string
val to_sympy : expr -> string
val to_sage : expr -> string
val reify_mathematica : Proof.context -> term -> string
val reify_maple : Proof.context -> term -> string
val reify_maxima : Proof.context -> term -> string
val reify_sympy : Proof.context -> term -> string
val reify_sage : Proof.context -> term -> string
val limit_mathematica : string -> string
val limit_maple : string -> string
val limit_maxima : string -> string
val limit_sympy : string -> string
val limit_sage : string -> string
end
structure Exp_Log_Expression : EXP_LOG_EXPRESSION = struct
datatype expr =
ConstExpr of term
| X
| Uminus of expr
| Add of expr * expr
| Minus of expr * expr
| Inverse of expr
| Mult of expr * expr
| Div of expr * expr
| Ln of expr
| Exp of expr
| Power of expr * term
| LnPowr of expr * expr
| ExpLn of expr
| Powr of expr * expr
| Powr_Nat of expr * expr
| Powr' of expr * term
| Root of expr * term
| Absolute of expr
| Sgn of expr
| Min of expr * expr
| Max of expr * expr
| Floor of expr
| Ceiling of expr
| Frac of expr
| NatMod of expr * expr
| Sin of expr
| Cos of expr
| ArcTan of expr
| Custom of string * term * expr list
type custom_handler =
Lazy_Eval.eval_ctxt -> term -> thm list * Asymptotic_Basis.basis -> thm * Asymptotic_Basis.basis
type entry = {
name : string,
pat : term,
net_pat : term,
expand : custom_handler
}
type entry' = {
pat : term,
net_pat : term,
expand : custom_handler
}
exception DUP
structure Custom_Funs = Generic_Data
(
type T = {
name_table : entry' Name_Space.table,
net : entry Item_Net.T
}
fun eq_entry ({name = name1, ...}, {name = name2, ...}) = (name1 = name2)
val empty =
{
name_table = Name_Space.empty_table "exp_log_custom_function",
net = Item_Net.init eq_entry (fn {net_pat, ...} => [net_pat])
}
fun merge ({name_table = tbl1, net = net1}, {name_table = tbl2, net = net2}) =
{name_table = Name_Space.join_tables (fn _ => raise DUP) (tbl1, tbl2),
net = Item_Net.merge (net1, net2)}
)
fun rewrite' ctxt old_prems bounds thms ct =
let
val thy = Proof_Context.theory_of ctxt
fun apply_rule t thm =
let
val lhs = thm |> Thm.concl_of |> Logic.dest_equals |> fst
val _ = Pattern.first_order_match thy (lhs, t) (Vartab.empty, Vartab.empty)
val insts = (lhs, t) |> apply2 (Thm.cterm_of ctxt) |> Thm.first_order_match
val thm = Thm.instantiate insts thm
val prems = Thm.prems_of thm
val frees = fold Term.add_frees prems []
in
if exists (member op = bounds o fst) frees then
NONE
else
let
val thm' = thm OF (map (Thm.assume o Thm.cterm_of ctxt) prems)
val prems' = fold (insert op aconv) prems old_prems
val crhs = thm |> Thm.concl_of |> Logic.dest_equals |> snd |> Thm.cterm_of ctxt
in
SOME (thm', crhs, prems')
end
end
handle Pattern.MATCH => NONE
fun rewrite_subterm prems ct (Abs (x, _, _)) =
let
val ((v, ct'), ctxt') = Variable.dest_abs_cterm ct ctxt;
val (thm, prems) = rewrite' ctxt' prems (x :: bounds) thms ct'
in
if Thm.is_reflexive thm then
(Thm.reflexive ct, prems)
else
(Thm.abstract_rule x v thm, prems)
end
| rewrite_subterm prems ct (_ $ _) =
let
val (cs, ct) = Thm.dest_comb ct
val (thm, prems') = rewrite' ctxt prems bounds thms cs
val (thm', prems'') = rewrite' ctxt prems' bounds thms ct
in
(Thm.combination thm thm', prems'')
end
| rewrite_subterm prems ct _ = (Thm.reflexive ct, prems)
val t = Thm.term_of ct
in
case get_first (apply_rule t) thms of
NONE => rewrite_subterm old_prems ct t
| SOME (thm, rhs, prems) =>
case rewrite' ctxt prems bounds thms rhs of
(thm', prems) => (Thm.transitive thm thm', prems)
end
fun rewrite ctxt thms ct =
let
val thm1 = Thm.eta_long_conversion ct
val rhs = thm1 |> Thm.cprop_of |> Thm.dest_comb |> snd
val (thm2, prems) = rewrite' ctxt [] [] thms rhs
val rhs = thm2 |> Thm.cprop_of |> Thm.dest_comb |> snd
val thm3 = Thm.eta_conversion rhs
val thm = Thm.transitive thm1 (Thm.transitive thm2 thm3)
in
fold (fn prem => fn thm => Thm.implies_intr (Thm.cterm_of ctxt prem) thm) prems thm
end
fun preproc_term_conv ctxt =
let
val thms = Named_Theorems.get ctxt \<^named_theorems>\<open>real_asymp_reify_simps\<close>
val thms = map (fn thm => thm RS @{thm HOL.eq_reflection}) thms
in
rewrite ctxt thms
end
fun register_custom' binding pat expand context =
let
val n = pat |> fastype_of |> strip_type |> fst |> length
val maxidx = Term.maxidx_of_term pat
val vars = map (fn i => Var ((Name.uu_, maxidx + i), \<^typ>\<open>real\<close>)) (1 upto n)
val net_pat = Library.foldl betapply (pat, vars)
val {name_table = tbl, net = net} = Custom_Funs.get context
val entry' = {pat = pat, net_pat = net_pat, expand = expand}
val (name, tbl) = Name_Space.define context true (binding, entry') tbl
val entry = {name = name, pat = pat, net_pat = net_pat, expand = expand}
val net = Item_Net.update entry net
in
Custom_Funs.put {name_table = tbl, net = net} context
end
fun register_custom binding pat expand =
let
fun decl phi =
register_custom' binding (Morphism.term phi pat) expand
in
- Local_Theory.declaration {syntax = false, pervasive = false} decl
+ Local_Theory.declaration {syntax = false, pervasive = false, pos = Binding.pos_of binding} decl
end
fun register_custom_from_thm binding thm expand =
let
val pat = thm |> Thm.concl_of |> HOLogic.dest_Trueprop |> dest_comb |> snd
in
register_custom binding pat expand
end
fun expand_custom ctxt name =
let
val {name_table, ...} = Custom_Funs.get (Context.Proof ctxt)
in
case Name_Space.lookup name_table name of
NONE => NONE
| SOME {expand, ...} => SOME expand
end
fun expr_to_term e =
let
fun expr_to_term' (ConstExpr c) = c
| expr_to_term' X = Bound 0
| expr_to_term' (Add (a, b)) =
\<^term>\<open>(+) :: real => _\<close> $ expr_to_term' a $ expr_to_term' b
| expr_to_term' (Mult (a, b)) =
\<^term>\<open>(*) :: real => _\<close> $ expr_to_term' a $ expr_to_term' b
| expr_to_term' (Minus (a, b)) =
\<^term>\<open>(-) :: real => _\<close> $ expr_to_term' a $ expr_to_term' b
| expr_to_term' (Div (a, b)) =
\<^term>\<open>(/) :: real => _\<close> $ expr_to_term' a $ expr_to_term' b
| expr_to_term' (Uminus a) =
\<^term>\<open>uminus :: real => _\<close> $ expr_to_term' a
| expr_to_term' (Inverse a) =
\<^term>\<open>inverse :: real => _\<close> $ expr_to_term' a
| expr_to_term' (Ln a) =
\<^term>\<open>ln :: real => _\<close> $ expr_to_term' a
| expr_to_term' (Exp a) =
\<^term>\<open>exp :: real => _\<close> $ expr_to_term' a
| expr_to_term' (Powr (a,b)) =
\<^term>\<open>(powr) :: real => _\<close> $ expr_to_term' a $ expr_to_term' b
| expr_to_term' (Powr_Nat (a,b)) =
\<^term>\<open>powr_nat :: real => _\<close> $ expr_to_term' a $ expr_to_term' b
| expr_to_term' (LnPowr (a,b)) =
\<^term>\<open>ln :: real => _\<close> $
(\<^term>\<open>(powr) :: real => _\<close> $ expr_to_term' a $ expr_to_term' b)
| expr_to_term' (ExpLn a) =
\<^term>\<open>exp :: real => _\<close> $ (\<^term>\<open>ln :: real => _\<close> $ expr_to_term' a)
| expr_to_term' (Powr' (a,b)) =
\<^term>\<open>(powr) :: real => _\<close> $ expr_to_term' a $ b
| expr_to_term' (Power (a,b)) =
\<^term>\<open>(^) :: real => _\<close> $ expr_to_term' a $ b
| expr_to_term' (Floor a) =
\<^term>\<open>Multiseries_Expansion.rfloor\<close> $ expr_to_term' a
| expr_to_term' (Ceiling a) =
\<^term>\<open>Multiseries_Expansion.rceil\<close> $ expr_to_term' a
| expr_to_term' (Frac a) =
\<^term>\<open>Archimedean_Field.frac :: real \<Rightarrow> real\<close> $ expr_to_term' a
| expr_to_term' (NatMod (a,b)) =
\<^term>\<open>Multiseries_Expansion.rnatmod\<close> $ expr_to_term' a $ expr_to_term' b
| expr_to_term' (Root (a,b)) =
\<^term>\<open>root :: nat \<Rightarrow> real \<Rightarrow> _\<close> $ b $ expr_to_term' a
| expr_to_term' (Sin a) =
\<^term>\<open>sin :: real => _\<close> $ expr_to_term' a
| expr_to_term' (ArcTan a) =
\<^term>\<open>arctan :: real => _\<close> $ expr_to_term' a
| expr_to_term' (Cos a) =
\<^term>\<open>cos :: real => _\<close> $ expr_to_term' a
| expr_to_term' (Absolute a) =
\<^term>\<open>abs :: real => _\<close> $ expr_to_term' a
| expr_to_term' (Sgn a) =
\<^term>\<open>sgn :: real => _\<close> $ expr_to_term' a
| expr_to_term' (Min (a,b)) =
\<^term>\<open>min :: real => _\<close> $ expr_to_term' a $ expr_to_term' b
| expr_to_term' (Max (a,b)) =
\<^term>\<open>max :: real => _\<close> $ expr_to_term' a $ expr_to_term' b
| expr_to_term' (Custom (_, t, args)) = Envir.beta_eta_contract (
fold (fn e => fn t => betapply (t, expr_to_term' e )) args t)
in
Abs ("x", \<^typ>\<open>real\<close>, expr_to_term' e)
end
fun reify_custom ctxt t =
let
val thy = Proof_Context.theory_of ctxt
val t = Envir.beta_eta_contract t
val t' = Envir.beta_eta_contract (Term.abs ("x", \<^typ>\<open>real\<close>) t)
val {net, ...} = Custom_Funs.get (Context.Proof ctxt)
val entries = Item_Net.retrieve_matching net (Term.subst_bound (Free ("x", \<^typ>\<open>real\<close>), t))
fun go {pat, name, ...} =
let
val n = pat |> fastype_of |> strip_type |> fst |> length
val maxidx = Term.maxidx_of_term t'
val vs = map (fn i => (Name.uu_, maxidx + i)) (1 upto n)
val args = map (fn v => Var (v, \<^typ>\<open>real => real\<close>) $ Bound 0) vs
val pat' =
Envir.beta_eta_contract (Term.abs ("x", \<^typ>\<open>real\<close>)
(Library.foldl betapply (pat, args)))
val (T_insts, insts) = Pattern.match thy (pat', t') (Vartab.empty, Vartab.empty)
fun map_option _ [] acc = SOME (rev acc)
| map_option f (x :: xs) acc =
case f x of
NONE => NONE
| SOME y => map_option f xs (y :: acc)
val t = Envir.subst_term (T_insts, insts) pat
in
Option.map (pair (name, t)) (map_option (Option.map snd o Vartab.lookup insts) vs [])
end
handle Pattern.MATCH => NONE
in
get_first go entries
end
fun reify_aux ctxt t' t =
let
fun is_const t =
fastype_of (Abs ("x", \<^typ>\<open>real\<close>, t)) = \<^typ>\<open>real \<Rightarrow> real\<close>
andalso not (exists_subterm (fn t => t = Bound 0) t)
fun is_const' t = not (exists_subterm (fn t => t = Bound 0) t)
fun reify'' (\<^term>\<open>(+) :: real => _\<close> $ s $ t) =
Add (reify' s, reify' t)
| reify'' (\<^term>\<open>(-) :: real => _\<close> $ s $ t) =
Minus (reify' s, reify' t)
| reify'' (\<^term>\<open>(*) :: real => _\<close> $ s $ t) =
Mult (reify' s, reify' t)
| reify'' (\<^term>\<open>(/) :: real => _\<close> $ s $ t) =
Div (reify' s, reify' t)
| reify'' (\<^term>\<open>uminus :: real => _\<close> $ s) =
Uminus (reify' s)
| reify'' (\<^term>\<open>inverse :: real => _\<close> $ s) =
Inverse (reify' s)
| reify'' (\<^term>\<open>ln :: real => _\<close> $ (\<^term>\<open>(powr) :: real => _\<close> $ s $ t)) =
LnPowr (reify' s, reify' t)
| reify'' (\<^term>\<open>exp :: real => _\<close> $ (\<^term>\<open>ln :: real => _\<close> $ s)) =
ExpLn (reify' s)
| reify'' (\<^term>\<open>ln :: real => _\<close> $ s) =
Ln (reify' s)
| reify'' (\<^term>\<open>exp :: real => _\<close> $ s) =
Exp (reify' s)
| reify'' (\<^term>\<open>(powr) :: real => _\<close> $ s $ t) =
(if is_const t then Powr' (reify' s, t) else Powr (reify' s, reify' t))
| reify'' (\<^term>\<open>powr_nat :: real => _\<close> $ s $ t) =
Powr_Nat (reify' s, reify' t)
| reify'' (\<^term>\<open>(^) :: real => _\<close> $ s $ t) =
(if is_const' t then Power (reify' s, t) else raise TERM ("reify", [t']))
| reify'' (\<^term>\<open>root\<close> $ s $ t) =
(if is_const' s then Root (reify' t, s) else raise TERM ("reify", [t']))
| reify'' (\<^term>\<open>abs :: real => _\<close> $ s) =
Absolute (reify' s)
| reify'' (\<^term>\<open>sgn :: real => _\<close> $ s) =
Sgn (reify' s)
| reify'' (\<^term>\<open>min :: real => _\<close> $ s $ t) =
Min (reify' s, reify' t)
| reify'' (\<^term>\<open>max :: real => _\<close> $ s $ t) =
Max (reify' s, reify' t)
| reify'' (\<^term>\<open>Multiseries_Expansion.rfloor\<close> $ s) =
Floor (reify' s)
| reify'' (\<^term>\<open>Multiseries_Expansion.rceil\<close> $ s) =
Ceiling (reify' s)
| reify'' (\<^term>\<open>Archimedean_Field.frac :: real \<Rightarrow> real\<close> $ s) =
Frac (reify' s)
| reify'' (\<^term>\<open>Multiseries_Expansion.rnatmod\<close> $ s $ t) =
NatMod (reify' s, reify' t)
| reify'' (\<^term>\<open>sin :: real => _\<close> $ s) =
Sin (reify' s)
| reify'' (\<^term>\<open>arctan :: real => _\<close> $ s) =
ArcTan (reify' s)
| reify'' (\<^term>\<open>cos :: real => _\<close> $ s) =
Cos (reify' s)
| reify'' (Bound 0) = X
| reify'' t =
case reify_custom ctxt t of
SOME ((name, t), ts) =>
let
val args = map (reify_aux ctxt t') ts
in
Custom (name, t, args)
end
| NONE => raise TERM ("reify", [t'])
and reify' t = if is_const t then ConstExpr t else reify'' t
in
case Envir.eta_long [] t of
Abs (_, \<^typ>\<open>real\<close>, t'') => reify' t''
| _ => raise TERM ("reify", [t])
end
fun reify ctxt t =
let
val thm = preproc_term_conv ctxt (Thm.cterm_of ctxt t)
val rhs = thm |> Thm.concl_of |> Logic.dest_equals |> snd
in
(reify_aux ctxt t rhs, thm)
end
fun reify_simple_aux ctxt t' t =
let
fun is_const t =
fastype_of (Abs ("x", \<^typ>\<open>real\<close>, t)) = \<^typ>\<open>real \<Rightarrow> real\<close>
andalso not (exists_subterm (fn t => t = Bound 0) t)
fun is_const' t = not (exists_subterm (fn t => t = Bound 0) t)
fun reify'' (\<^term>\<open>(+) :: real => _\<close> $ s $ t) =
Add (reify'' s, reify'' t)
| reify'' (\<^term>\<open>(-) :: real => _\<close> $ s $ t) =
Minus (reify'' s, reify'' t)
| reify'' (\<^term>\<open>(*) :: real => _\<close> $ s $ t) =
Mult (reify'' s, reify'' t)
| reify'' (\<^term>\<open>(/) :: real => _\<close> $ s $ t) =
Div (reify'' s, reify'' t)
| reify'' (\<^term>\<open>uminus :: real => _\<close> $ s) =
Uminus (reify'' s)
| reify'' (\<^term>\<open>inverse :: real => _\<close> $ s) =
Inverse (reify'' s)
| reify'' (\<^term>\<open>ln :: real => _\<close> $ s) =
Ln (reify'' s)
| reify'' (\<^term>\<open>exp :: real => _\<close> $ s) =
Exp (reify'' s)
| reify'' (\<^term>\<open>(powr) :: real => _\<close> $ s $ t) =
Powr (reify'' s, reify'' t)
| reify'' (\<^term>\<open>powr_nat :: real => _\<close> $ s $ t) =
Powr_Nat (reify'' s, reify'' t)
| reify'' (\<^term>\<open>(^) :: real => _\<close> $ s $ t) =
(if is_const' t then Power (reify'' s, t) else raise TERM ("reify", [t']))
| reify'' (\<^term>\<open>root\<close> $ s $ t) =
(if is_const' s then Root (reify'' t, s) else raise TERM ("reify", [t']))
| reify'' (\<^term>\<open>abs :: real => _\<close> $ s) =
Absolute (reify'' s)
| reify'' (\<^term>\<open>sgn :: real => _\<close> $ s) =
Sgn (reify'' s)
| reify'' (\<^term>\<open>min :: real => _\<close> $ s $ t) =
Min (reify'' s, reify'' t)
| reify'' (\<^term>\<open>max :: real => _\<close> $ s $ t) =
Max (reify'' s, reify'' t)
| reify'' (\<^term>\<open>Multiseries_Expansion.rfloor\<close> $ s) =
Floor (reify'' s)
| reify'' (\<^term>\<open>Multiseries_Expansion.rceil\<close> $ s) =
Ceiling (reify'' s)
| reify'' (\<^term>\<open>Archimedean_Field.frac :: real \<Rightarrow> real\<close> $ s) =
Frac (reify'' s)
| reify'' (\<^term>\<open>Multiseries_Expansion.rnatmod\<close> $ s $ t) =
NatMod (reify'' s, reify'' t)
| reify'' (\<^term>\<open>sin :: real => _\<close> $ s) =
Sin (reify'' s)
| reify'' (\<^term>\<open>cos :: real => _\<close> $ s) =
Cos (reify'' s)
| reify'' (Bound 0) = X
| reify'' t =
if is_const t then
ConstExpr t
else
case reify_custom ctxt t of
SOME ((name, t), ts) =>
let
val args = map (reify_aux ctxt t') ts
in
Custom (name, t, args)
end
| NONE => raise TERM ("reify", [t'])
in
case Envir.eta_long [] t of
Abs (_, \<^typ>\<open>real\<close>, t'') => reify'' t''
| _ => raise TERM ("reify", [t])
end
fun reify_simple ctxt t =
let
val thm = preproc_term_conv ctxt (Thm.cterm_of ctxt t)
val rhs = thm |> Thm.concl_of |> Logic.dest_equals |> snd
in
(reify_simple_aux ctxt t rhs, thm)
end
fun simple_print_const (Free (x, _)) = x
| simple_print_const (\<^term>\<open>uminus :: real => real\<close> $ a) =
"(-" ^ simple_print_const a ^ ")"
| simple_print_const (\<^term>\<open>(+) :: real => _\<close> $ a $ b) =
"(" ^ simple_print_const a ^ "+" ^ simple_print_const b ^ ")"
| simple_print_const (\<^term>\<open>(-) :: real => _\<close> $ a $ b) =
"(" ^ simple_print_const a ^ "-" ^ simple_print_const b ^ ")"
| simple_print_const (\<^term>\<open>(*) :: real => _\<close> $ a $ b) =
"(" ^ simple_print_const a ^ "*" ^ simple_print_const b ^ ")"
| simple_print_const (\<^term>\<open>inverse :: real => _\<close> $ a) =
"(1 / " ^ simple_print_const a ^ ")"
| simple_print_const (\<^term>\<open>(/) :: real => _\<close> $ a $ b) =
"(" ^ simple_print_const a ^ "/" ^ simple_print_const b ^ ")"
| simple_print_const t = Int.toString (snd (HOLogic.dest_number t))
fun to_mathematica (Add (a, b)) = "(" ^ to_mathematica a ^ " + " ^ to_mathematica b ^ ")"
| to_mathematica (Minus (a, b)) = "(" ^ to_mathematica a ^ " - " ^ to_mathematica b ^ ")"
| to_mathematica (Mult (a, b)) = "(" ^ to_mathematica a ^ " * " ^ to_mathematica b ^ ")"
| to_mathematica (Div (a, b)) = "(" ^ to_mathematica a ^ " / " ^ to_mathematica b ^ ")"
| to_mathematica (Powr (a, b)) = "(" ^ to_mathematica a ^ " ^ " ^ to_mathematica b ^ ")"
| to_mathematica (Powr_Nat (a, b)) = "(" ^ to_mathematica a ^ " ^ " ^ to_mathematica b ^ ")"
| to_mathematica (Powr' (a, b)) = "(" ^ to_mathematica a ^ " ^ " ^
to_mathematica (ConstExpr b) ^ ")"
| to_mathematica (LnPowr (a, b)) = "Log[" ^ to_mathematica a ^ " ^ " ^ to_mathematica b ^ "]"
| to_mathematica (ExpLn a) = "Exp[Ln[" ^ to_mathematica a ^ "]]"
| to_mathematica (Power (a, b)) = "(" ^ to_mathematica a ^ " ^ " ^
to_mathematica (ConstExpr b) ^ ")"
| to_mathematica (Root (a, \<^term>\<open>2::real\<close>)) = "Sqrt[" ^ to_mathematica a ^ "]"
| to_mathematica (Root (a, b)) = "Surd[" ^ to_mathematica a ^ ", " ^
to_mathematica (ConstExpr b) ^ "]"
| to_mathematica (Uminus a) = "(-" ^ to_mathematica a ^ ")"
| to_mathematica (Inverse a) = "(1/(" ^ to_mathematica a ^ "))"
| to_mathematica (Exp a) = "Exp[" ^ to_mathematica a ^ "]"
| to_mathematica (Ln a) = "Log[" ^ to_mathematica a ^ "]"
| to_mathematica (Sin a) = "Sin[" ^ to_mathematica a ^ "]"
| to_mathematica (Cos a) = "Cos[" ^ to_mathematica a ^ "]"
| to_mathematica (ArcTan a) = "ArcTan[" ^ to_mathematica a ^ "]"
| to_mathematica (Absolute a) = "Abs[" ^ to_mathematica a ^ "]"
| to_mathematica (Sgn a) = "Sign[" ^ to_mathematica a ^ "]"
| to_mathematica (Min (a, b)) = "Min[" ^ to_mathematica a ^ ", " ^ to_mathematica b ^ "]"
| to_mathematica (Max (a, b)) = "Max[" ^ to_mathematica a ^ ", " ^ to_mathematica b ^ "]"
| to_mathematica (Floor a) = "Floor[" ^ to_mathematica a ^ "]"
| to_mathematica (Ceiling a) = "Ceiling[" ^ to_mathematica a ^ "]"
| to_mathematica (Frac a) = "Mod[" ^ to_mathematica a ^ ", 1]"
| to_mathematica (ConstExpr t) = simple_print_const t
| to_mathematica X = "X"
(* TODO: correct translation of frac() for Maple and Sage *)
fun to_maple (Add (a, b)) = "(" ^ to_maple a ^ " + " ^ to_maple b ^ ")"
| to_maple (Minus (a, b)) = "(" ^ to_maple a ^ " - " ^ to_maple b ^ ")"
| to_maple (Mult (a, b)) = "(" ^ to_maple a ^ " * " ^ to_maple b ^ ")"
| to_maple (Div (a, b)) = "(" ^ to_maple a ^ " / " ^ to_maple b ^ ")"
| to_maple (Powr (a, b)) = "(" ^ to_maple a ^ " ^ " ^ to_maple b ^ ")"
| to_maple (Powr_Nat (a, b)) = "(" ^ to_maple a ^ " ^ " ^ to_maple b ^ ")"
| to_maple (Powr' (a, b)) = "(" ^ to_maple a ^ " ^ " ^
to_maple (ConstExpr b) ^ ")"
| to_maple (LnPowr (a, b)) = "ln(" ^ to_maple a ^ " ^ " ^ to_maple b ^ ")"
| to_maple (ExpLn a) = "ln(exp(" ^ to_maple a ^ "))"
| to_maple (Power (a, b)) = "(" ^ to_maple a ^ " ^ " ^
to_maple (ConstExpr b) ^ ")"
| to_maple (Root (a, \<^term>\<open>2::real\<close>)) = "sqrt(" ^ to_maple a ^ ")"
| to_maple (Root (a, b)) = "root(" ^ to_maple a ^ ", " ^
to_maple (ConstExpr b) ^ ")"
| to_maple (Uminus a) = "(-" ^ to_maple a ^ ")"
| to_maple (Inverse a) = "(1/(" ^ to_maple a ^ "))"
| to_maple (Exp a) = "exp(" ^ to_maple a ^ ")"
| to_maple (Ln a) = "ln(" ^ to_maple a ^ ")"
| to_maple (Sin a) = "sin(" ^ to_maple a ^ ")"
| to_maple (Cos a) = "cos(" ^ to_maple a ^ ")"
| to_maple (ArcTan a) = "arctan(" ^ to_maple a ^ ")"
| to_maple (Absolute a) = "abs(" ^ to_maple a ^ ")"
| to_maple (Sgn a) = "signum(" ^ to_maple a ^ ")"
| to_maple (Min (a, b)) = "min(" ^ to_maple a ^ ", " ^ to_maple b ^ ")"
| to_maple (Max (a, b)) = "max(" ^ to_maple a ^ ", " ^ to_maple b ^ ")"
| to_maple (Floor a) = "floor(" ^ to_maple a ^ ")"
| to_maple (Ceiling a) = "ceil(" ^ to_maple a ^ ")"
| to_maple (Frac a) = "frac(" ^ to_maple a ^ ")"
| to_maple (ConstExpr t) = simple_print_const t
| to_maple X = "x"
fun to_maxima (Add (a, b)) = "(" ^ to_maxima a ^ " + " ^ to_maxima b ^ ")"
| to_maxima (Minus (a, b)) = "(" ^ to_maxima a ^ " - " ^ to_maxima b ^ ")"
| to_maxima (Mult (a, b)) = "(" ^ to_maxima a ^ " * " ^ to_maxima b ^ ")"
| to_maxima (Div (a, b)) = "(" ^ to_maxima a ^ " / " ^ to_maxima b ^ ")"
| to_maxima (Powr (a, b)) = "(" ^ to_maxima a ^ " ^ " ^ to_maxima b ^ ")"
| to_maxima (Powr_Nat (a, b)) = "(" ^ to_maxima a ^ " ^ " ^ to_maxima b ^ ")"
| to_maxima (Powr' (a, b)) = "(" ^ to_maxima a ^ " ^ " ^
to_maxima (ConstExpr b) ^ ")"
| to_maxima (ExpLn a) = "exp (log (" ^ to_maxima a ^ "))"
| to_maxima (LnPowr (a, b)) = "log(" ^ to_maxima a ^ " ^ " ^ to_maxima b ^ ")"
| to_maxima (Power (a, b)) = "(" ^ to_maxima a ^ " ^ " ^
to_maxima (ConstExpr b) ^ ")"
| to_maxima (Root (a, \<^term>\<open>2::real\<close>)) = "sqrt(" ^ to_maxima a ^ ")"
| to_maxima (Root (a, b)) = to_maxima a ^ "^(1/" ^
to_maxima (ConstExpr b) ^ ")"
| to_maxima (Uminus a) = "(-" ^ to_maxima a ^ ")"
| to_maxima (Inverse a) = "(1/(" ^ to_maxima a ^ "))"
| to_maxima (Exp a) = "exp(" ^ to_maxima a ^ ")"
| to_maxima (Ln a) = "log(" ^ to_maxima a ^ ")"
| to_maxima (Sin a) = "sin(" ^ to_maxima a ^ ")"
| to_maxima (Cos a) = "cos(" ^ to_maxima a ^ ")"
| to_maxima (ArcTan a) = "atan(" ^ to_maxima a ^ ")"
| to_maxima (Absolute a) = "abs(" ^ to_maxima a ^ ")"
| to_maxima (Sgn a) = "signum(" ^ to_maxima a ^ ")"
| to_maxima (Min (a, b)) = "min(" ^ to_maxima a ^ ", " ^ to_maxima b ^ ")"
| to_maxima (Max (a, b)) = "max(" ^ to_maxima a ^ ", " ^ to_maxima b ^ ")"
| to_maxima (Floor a) = "floor(" ^ to_maxima a ^ ")"
| to_maxima (Ceiling a) = "ceil(" ^ to_maxima a ^ ")"
| to_maxima (Frac a) = let val x = to_maxima a in "(" ^ x ^ " - floor(" ^ x ^ "))" end
| to_maxima (ConstExpr t) = simple_print_const t
| to_maxima X = "x"
fun to_sympy (Add (a, b)) = "(" ^ to_sympy a ^ " + " ^ to_sympy b ^ ")"
| to_sympy (Minus (a, b)) = "(" ^ to_sympy a ^ " - " ^ to_sympy b ^ ")"
| to_sympy (Mult (a, b)) = "(" ^ to_sympy a ^ " * " ^ to_sympy b ^ ")"
| to_sympy (Div (a, b)) = "(" ^ to_sympy a ^ " / " ^ to_sympy b ^ ")"
| to_sympy (Powr (a, b)) = "(" ^ to_sympy a ^ " ** " ^ to_sympy b ^ ")"
| to_sympy (Powr_Nat (a, b)) = "(" ^ to_sympy a ^ " ** " ^ to_sympy b ^ ")"
| to_sympy (Powr' (a, b)) = "(" ^ to_sympy a ^ " ** " ^
to_sympy (ConstExpr b) ^ ")"
| to_sympy (ExpLn a) = "exp (log (" ^ to_sympy a ^ "))"
| to_sympy (LnPowr (a, b)) = "log(" ^ to_sympy a ^ " ** " ^ to_sympy b ^ ")"
| to_sympy (Power (a, b)) = "(" ^ to_sympy a ^ " ** " ^
to_sympy (ConstExpr b) ^ ")"
| to_sympy (Root (a, \<^term>\<open>2::real\<close>)) = "sqrt(" ^ to_sympy a ^ ")"
| to_sympy (Root (a, b)) = "root(" ^ to_sympy a ^ ", " ^ to_sympy (ConstExpr b) ^ ")"
| to_sympy (Uminus a) = "(-" ^ to_sympy a ^ ")"
| to_sympy (Inverse a) = "(1/(" ^ to_sympy a ^ "))"
| to_sympy (Exp a) = "exp(" ^ to_sympy a ^ ")"
| to_sympy (Ln a) = "log(" ^ to_sympy a ^ ")"
| to_sympy (Sin a) = "sin(" ^ to_sympy a ^ ")"
| to_sympy (Cos a) = "cos(" ^ to_sympy a ^ ")"
| to_sympy (ArcTan a) = "atan(" ^ to_sympy a ^ ")"
| to_sympy (Absolute a) = "abs(" ^ to_sympy a ^ ")"
| to_sympy (Sgn a) = "sign(" ^ to_sympy a ^ ")"
| to_sympy (Min (a, b)) = "min(" ^ to_sympy a ^ ", " ^ to_sympy b ^ ")"
| to_sympy (Max (a, b)) = "max(" ^ to_sympy a ^ ", " ^ to_sympy b ^ ")"
| to_sympy (Floor a) = "floor(" ^ to_sympy a ^ ")"
| to_sympy (Ceiling a) = "ceiling(" ^ to_sympy a ^ ")"
| to_sympy (Frac a) = "frac(" ^ to_sympy a ^ ")"
| to_sympy (ConstExpr t) = simple_print_const t
| to_sympy X = "x"
fun to_sage (Add (a, b)) = "(" ^ to_sage a ^ " + " ^ to_sage b ^ ")"
| to_sage (Minus (a, b)) = "(" ^ to_sage a ^ " - " ^ to_sage b ^ ")"
| to_sage (Mult (a, b)) = "(" ^ to_sage a ^ " * " ^ to_sage b ^ ")"
| to_sage (Div (a, b)) = "(" ^ to_sage a ^ " / " ^ to_sage b ^ ")"
| to_sage (Powr (a, b)) = "(" ^ to_sage a ^ " ^ " ^ to_sage b ^ ")"
| to_sage (Powr_Nat (a, b)) = "(" ^ to_sage a ^ " ^ " ^ to_sage b ^ ")"
| to_sage (Powr' (a, b)) = "(" ^ to_sage a ^ " ^ " ^
to_sage (ConstExpr b) ^ ")"
| to_sage (ExpLn a) = "exp (log (" ^ to_sage a ^ "))"
| to_sage (LnPowr (a, b)) = "log(" ^ to_sage a ^ " ^ " ^ to_sage b ^ ")"
| to_sage (Power (a, b)) = "(" ^ to_sage a ^ " ^ " ^
to_sage (ConstExpr b) ^ ")"
| to_sage (Root (a, \<^term>\<open>2::real\<close>)) = "sqrt(" ^ to_sage a ^ ")"
| to_sage (Root (a, b)) = to_sage a ^ "^(1/" ^ to_sage (ConstExpr b) ^ ")"
| to_sage (Uminus a) = "(-" ^ to_sage a ^ ")"
| to_sage (Inverse a) = "(1/(" ^ to_sage a ^ "))"
| to_sage (Exp a) = "exp(" ^ to_sage a ^ ")"
| to_sage (Ln a) = "log(" ^ to_sage a ^ ")"
| to_sage (Sin a) = "sin(" ^ to_sage a ^ ")"
| to_sage (Cos a) = "cos(" ^ to_sage a ^ ")"
| to_sage (ArcTan a) = "atan(" ^ to_sage a ^ ")"
| to_sage (Absolute a) = "abs(" ^ to_sage a ^ ")"
| to_sage (Sgn a) = "sign(" ^ to_sage a ^ ")"
| to_sage (Min (a, b)) = "min(" ^ to_sage a ^ ", " ^ to_sage b ^ ")"
| to_sage (Max (a, b)) = "max(" ^ to_sage a ^ ", " ^ to_sage b ^ ")"
| to_sage (Floor a) = "floor(" ^ to_sage a ^ ")"
| to_sage (Ceiling a) = "ceil(" ^ to_sage a ^ ")"
| to_sage (Frac a) = "frac(" ^ to_sage a ^ ")"
| to_sage (ConstExpr t) = simple_print_const t
| to_sage X = "x"
fun reify_mathematica ctxt = to_mathematica o fst o reify_simple ctxt
fun reify_maple ctxt = to_maple o fst o reify_simple ctxt
fun reify_maxima ctxt = to_maxima o fst o reify_simple ctxt
fun reify_sympy ctxt = to_sympy o fst o reify_simple ctxt
fun reify_sage ctxt = to_sage o fst o reify_simple ctxt
fun limit_mathematica s = "Limit[" ^ s ^ ", X -> Infinity]"
fun limit_maple s = "limit(" ^ s ^ ", x = infinity);"
fun limit_maxima s = "limit(" ^ s ^ ", x, inf);"
fun limit_sympy s = "limit(" ^ s ^ ", x, oo)"
fun limit_sage s = "limit(" ^ s ^ ", x = Infinity)"
end
diff --git a/src/HOL/Set.thy b/src/HOL/Set.thy
--- a/src/HOL/Set.thy
+++ b/src/HOL/Set.thy
@@ -1,2044 +1,2042 @@
(* Title: HOL/Set.thy
Author: Tobias Nipkow
Author: Lawrence C Paulson
Author: Markus Wenzel
*)
section \<open>Set theory for higher-order logic\<close>
theory Set
imports Lattices Boolean_Algebras
begin
subsection \<open>Sets as predicates\<close>
typedecl 'a set
axiomatization Collect :: "('a \<Rightarrow> bool) \<Rightarrow> 'a set" \<comment> \<open>comprehension\<close>
and member :: "'a \<Rightarrow> 'a set \<Rightarrow> bool" \<comment> \<open>membership\<close>
where mem_Collect_eq [iff, code_unfold]: "member a (Collect P) = P a"
and Collect_mem_eq [simp]: "Collect (\<lambda>x. member x A) = A"
notation
member ("'(\<in>')") and
member ("(_/ \<in> _)" [51, 51] 50)
abbreviation not_member
where "not_member x A \<equiv> \<not> (x \<in> A)" \<comment> \<open>non-membership\<close>
notation
not_member ("'(\<notin>')") and
not_member ("(_/ \<notin> _)" [51, 51] 50)
notation (ASCII)
member ("'(:')") and
member ("(_/ : _)" [51, 51] 50) and
not_member ("'(~:')") and
not_member ("(_/ ~: _)" [51, 51] 50)
text \<open>Set comprehensions\<close>
syntax
"_Coll" :: "pttrn \<Rightarrow> bool \<Rightarrow> 'a set" ("(1{_./ _})")
translations
"{x. P}" \<rightleftharpoons> "CONST Collect (\<lambda>x. P)"
syntax (ASCII)
"_Collect" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> 'a set" ("(1{(_/: _)./ _})")
syntax
"_Collect" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> 'a set" ("(1{(_/ \<in> _)./ _})")
translations
"{p:A. P}" \<rightharpoonup> "CONST Collect (\<lambda>p. p \<in> A \<and> P)"
lemma CollectI: "P a \<Longrightarrow> a \<in> {x. P x}"
by simp
lemma CollectD: "a \<in> {x. P x} \<Longrightarrow> P a"
by simp
lemma Collect_cong: "(\<And>x. P x = Q x) \<Longrightarrow> {x. P x} = {x. Q x}"
by simp
text \<open>
Simproc for pulling \<open>x = t\<close> in \<open>{x. \<dots> \<and> x = t \<and> \<dots>}\<close>
to the front (and similarly for \<open>t = x\<close>):
\<close>
simproc_setup defined_Collect ("{x. P x \<and> Q x}") = \<open>
- fn _ => Quantifier1.rearrange_Collect
+ K (Quantifier1.rearrange_Collect
(fn ctxt =>
resolve_tac ctxt @{thms Collect_cong} 1 THEN
resolve_tac ctxt @{thms iffI} 1 THEN
ALLGOALS
(EVERY' [REPEAT_DETERM o eresolve_tac ctxt @{thms conjE},
- DEPTH_SOLVE_1 o (assume_tac ctxt ORELSE' resolve_tac ctxt @{thms conjI})]))
+ DEPTH_SOLVE_1 o (assume_tac ctxt ORELSE' resolve_tac ctxt @{thms conjI})])))
\<close>
lemmas CollectE = CollectD [elim_format]
lemma set_eqI:
assumes "\<And>x. x \<in> A \<longleftrightarrow> x \<in> B"
shows "A = B"
proof -
from assms have "{x. x \<in> A} = {x. x \<in> B}"
by simp
then show ?thesis by simp
qed
lemma set_eq_iff: "A = B \<longleftrightarrow> (\<forall>x. x \<in> A \<longleftrightarrow> x \<in> B)"
by (auto intro:set_eqI)
lemma Collect_eqI:
assumes "\<And>x. P x = Q x"
shows "Collect P = Collect Q"
using assms by (auto intro: set_eqI)
text \<open>Lifting of predicate class instances\<close>
instantiation set :: (type) boolean_algebra
begin
definition less_eq_set
where "A \<le> B \<longleftrightarrow> (\<lambda>x. member x A) \<le> (\<lambda>x. member x B)"
definition less_set
where "A < B \<longleftrightarrow> (\<lambda>x. member x A) < (\<lambda>x. member x B)"
definition inf_set
where "A \<sqinter> B = Collect ((\<lambda>x. member x A) \<sqinter> (\<lambda>x. member x B))"
definition sup_set
where "A \<squnion> B = Collect ((\<lambda>x. member x A) \<squnion> (\<lambda>x. member x B))"
definition bot_set
where "\<bottom> = Collect \<bottom>"
definition top_set
where "\<top> = Collect \<top>"
definition uminus_set
where "- A = Collect (- (\<lambda>x. member x A))"
definition minus_set
where "A - B = Collect ((\<lambda>x. member x A) - (\<lambda>x. member x B))"
instance
by standard
(simp_all add: less_eq_set_def less_set_def inf_set_def sup_set_def
bot_set_def top_set_def uminus_set_def minus_set_def
less_le_not_le sup_inf_distrib1 diff_eq set_eqI fun_eq_iff
del: inf_apply sup_apply bot_apply top_apply minus_apply uminus_apply)
end
text \<open>Set enumerations\<close>
abbreviation empty :: "'a set" ("{}")
where "{} \<equiv> bot"
definition insert :: "'a \<Rightarrow> 'a set \<Rightarrow> 'a set"
where insert_compr: "insert a B = {x. x = a \<or> x \<in> B}"
syntax
"_Finset" :: "args \<Rightarrow> 'a set" ("{(_)}")
translations
"{x, xs}" \<rightleftharpoons> "CONST insert x {xs}"
"{x}" \<rightleftharpoons> "CONST insert x {}"
subsection \<open>Subsets and bounded quantifiers\<close>
abbreviation subset :: "'a set \<Rightarrow> 'a set \<Rightarrow> bool"
where "subset \<equiv> less"
abbreviation subset_eq :: "'a set \<Rightarrow> 'a set \<Rightarrow> bool"
where "subset_eq \<equiv> less_eq"
notation
subset ("'(\<subset>')") and
subset ("(_/ \<subset> _)" [51, 51] 50) and
subset_eq ("'(\<subseteq>')") and
subset_eq ("(_/ \<subseteq> _)" [51, 51] 50)
abbreviation (input)
supset :: "'a set \<Rightarrow> 'a set \<Rightarrow> bool" where
"supset \<equiv> greater"
abbreviation (input)
supset_eq :: "'a set \<Rightarrow> 'a set \<Rightarrow> bool" where
"supset_eq \<equiv> greater_eq"
notation
supset ("'(\<supset>')") and
supset ("(_/ \<supset> _)" [51, 51] 50) and
supset_eq ("'(\<supseteq>')") and
supset_eq ("(_/ \<supseteq> _)" [51, 51] 50)
notation (ASCII output)
subset ("'(<')") and
subset ("(_/ < _)" [51, 51] 50) and
subset_eq ("'(<=')") and
subset_eq ("(_/ <= _)" [51, 51] 50)
definition Ball :: "'a set \<Rightarrow> ('a \<Rightarrow> bool) \<Rightarrow> bool"
where "Ball A P \<longleftrightarrow> (\<forall>x. x \<in> A \<longrightarrow> P x)" \<comment> \<open>bounded universal quantifiers\<close>
definition Bex :: "'a set \<Rightarrow> ('a \<Rightarrow> bool) \<Rightarrow> bool"
where "Bex A P \<longleftrightarrow> (\<exists>x. x \<in> A \<and> P x)" \<comment> \<open>bounded existential quantifiers\<close>
syntax (ASCII)
"_Ball" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3ALL (_/:_)./ _)" [0, 0, 10] 10)
"_Bex" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3EX (_/:_)./ _)" [0, 0, 10] 10)
"_Bex1" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3EX! (_/:_)./ _)" [0, 0, 10] 10)
"_Bleast" :: "id \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> 'a" ("(3LEAST (_/:_)./ _)" [0, 0, 10] 10)
syntax (input)
"_Ball" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3! (_/:_)./ _)" [0, 0, 10] 10)
"_Bex" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3? (_/:_)./ _)" [0, 0, 10] 10)
"_Bex1" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3?! (_/:_)./ _)" [0, 0, 10] 10)
syntax
"_Ball" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3\<forall>(_/\<in>_)./ _)" [0, 0, 10] 10)
"_Bex" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3\<exists>(_/\<in>_)./ _)" [0, 0, 10] 10)
"_Bex1" :: "pttrn \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> bool" ("(3\<exists>!(_/\<in>_)./ _)" [0, 0, 10] 10)
"_Bleast" :: "id \<Rightarrow> 'a set \<Rightarrow> bool \<Rightarrow> 'a" ("(3LEAST(_/\<in>_)./ _)" [0, 0, 10] 10)
translations
"\<forall>x\<in>A. P" \<rightleftharpoons> "CONST Ball A (\<lambda>x. P)"
"\<exists>x\<in>A. P" \<rightleftharpoons> "CONST Bex A (\<lambda>x. P)"
"\<exists>!x\<in>A. P" \<rightharpoonup> "\<exists>!x. x \<in> A \<and> P"
"LEAST x:A. P" \<rightharpoonup> "LEAST x. x \<in> A \<and> P"
syntax (ASCII output)
"_setlessAll" :: "[idt, 'a, bool] \<Rightarrow> bool" ("(3ALL _<_./ _)" [0, 0, 10] 10)
"_setlessEx" :: "[idt, 'a, bool] \<Rightarrow> bool" ("(3EX _<_./ _)" [0, 0, 10] 10)
"_setleAll" :: "[idt, 'a, bool] \<Rightarrow> bool" ("(3ALL _<=_./ _)" [0, 0, 10] 10)
"_setleEx" :: "[idt, 'a, bool] \<Rightarrow> bool" ("(3EX _<=_./ _)" [0, 0, 10] 10)
"_setleEx1" :: "[idt, 'a, bool] \<Rightarrow> bool" ("(3EX! _<=_./ _)" [0, 0, 10] 10)
syntax
"_setlessAll" :: "[idt, 'a, bool] \<Rightarrow> bool" ("(3\<forall>_\<subset>_./ _)" [0, 0, 10] 10)
"_setlessEx" :: "[idt, 'a, bool] \<Rightarrow> bool" ("(3\<exists>_\<subset>_./ _)" [0, 0, 10] 10)
"_setleAll" :: "[idt, 'a, bool] \<Rightarrow> bool" ("(3\<forall>_\<subseteq>_./ _)" [0, 0, 10] 10)
"_setleEx" :: "[idt, 'a, bool] \<Rightarrow> bool" ("(3\<exists>_\<subseteq>_./ _)" [0, 0, 10] 10)
"_setleEx1" :: "[idt, 'a, bool] \<Rightarrow> bool" ("(3\<exists>!_\<subseteq>_./ _)" [0, 0, 10] 10)
translations
"\<forall>A\<subset>B. P" \<rightharpoonup> "\<forall>A. A \<subset> B \<longrightarrow> P"
"\<exists>A\<subset>B. P" \<rightharpoonup> "\<exists>A. A \<subset> B \<and> P"
"\<forall>A\<subseteq>B. P" \<rightharpoonup> "\<forall>A. A \<subseteq> B \<longrightarrow> P"
"\<exists>A\<subseteq>B. P" \<rightharpoonup> "\<exists>A. A \<subseteq> B \<and> P"
"\<exists>!A\<subseteq>B. P" \<rightharpoonup> "\<exists>!A. A \<subseteq> B \<and> P"
print_translation \<open>
let
val All_binder = Mixfix.binder_name \<^const_syntax>\<open>All\<close>;
val Ex_binder = Mixfix.binder_name \<^const_syntax>\<open>Ex\<close>;
val impl = \<^const_syntax>\<open>HOL.implies\<close>;
val conj = \<^const_syntax>\<open>HOL.conj\<close>;
val sbset = \<^const_syntax>\<open>subset\<close>;
val sbset_eq = \<^const_syntax>\<open>subset_eq\<close>;
val trans =
[((All_binder, impl, sbset), \<^syntax_const>\<open>_setlessAll\<close>),
((All_binder, impl, sbset_eq), \<^syntax_const>\<open>_setleAll\<close>),
((Ex_binder, conj, sbset), \<^syntax_const>\<open>_setlessEx\<close>),
((Ex_binder, conj, sbset_eq), \<^syntax_const>\<open>_setleEx\<close>)];
fun mk v (v', T) c n P =
if v = v' andalso not (Term.exists_subterm (fn Free (x, _) => x = v | _ => false) n)
then Syntax.const c $ Syntax_Trans.mark_bound_body (v', T) $ n $ P
else raise Match;
fun tr' q = (q, fn _ =>
(fn [Const (\<^syntax_const>\<open>_bound\<close>, _) $ Free (v, Type (\<^type_name>\<open>set\<close>, _)),
Const (c, _) $
(Const (d, _) $ (Const (\<^syntax_const>\<open>_bound\<close>, _) $ Free (v', T)) $ n) $ P] =>
(case AList.lookup (=) trans (q, c, d) of
NONE => raise Match
| SOME l => mk v (v', T) l n P)
| _ => raise Match));
in
[tr' All_binder, tr' Ex_binder]
end
\<close>
text \<open>
\<^medskip>
Translate between \<open>{e | x1\<dots>xn. P}\<close> and \<open>{u. \<exists>x1\<dots>xn. u = e \<and> P}\<close>;
\<open>{y. \<exists>x1\<dots>xn. y = e \<and> P}\<close> is only translated if \<open>[0..n] \<subseteq> bvs e\<close>.
\<close>
syntax
"_Setcompr" :: "'a \<Rightarrow> idts \<Rightarrow> bool \<Rightarrow> 'a set" ("(1{_ |/_./ _})")
parse_translation \<open>
let
val ex_tr = snd (Syntax_Trans.mk_binder_tr ("EX ", \<^const_syntax>\<open>Ex\<close>));
fun nvars (Const (\<^syntax_const>\<open>_idts\<close>, _) $ _ $ idts) = nvars idts + 1
| nvars _ = 1;
fun setcompr_tr ctxt [e, idts, b] =
let
val eq = Syntax.const \<^const_syntax>\<open>HOL.eq\<close> $ Bound (nvars idts) $ e;
val P = Syntax.const \<^const_syntax>\<open>HOL.conj\<close> $ eq $ b;
val exP = ex_tr ctxt [idts, P];
in Syntax.const \<^const_syntax>\<open>Collect\<close> $ absdummy dummyT exP end;
in [(\<^syntax_const>\<open>_Setcompr\<close>, setcompr_tr)] end
\<close>
print_translation \<open>
[Syntax_Trans.preserve_binder_abs2_tr' \<^const_syntax>\<open>Ball\<close> \<^syntax_const>\<open>_Ball\<close>,
Syntax_Trans.preserve_binder_abs2_tr' \<^const_syntax>\<open>Bex\<close> \<^syntax_const>\<open>_Bex\<close>]
\<close> \<comment> \<open>to avoid eta-contraction of body\<close>
print_translation \<open>
let
val ex_tr' = snd (Syntax_Trans.mk_binder_tr' (\<^const_syntax>\<open>Ex\<close>, "DUMMY"));
fun setcompr_tr' ctxt [Abs (abs as (_, _, P))] =
let
fun check (Const (\<^const_syntax>\<open>Ex\<close>, _) $ Abs (_, _, P), n) = check (P, n + 1)
| check (Const (\<^const_syntax>\<open>HOL.conj\<close>, _) $
(Const (\<^const_syntax>\<open>HOL.eq\<close>, _) $ Bound m $ e) $ P, n) =
n > 0 andalso m = n andalso not (loose_bvar1 (P, n)) andalso
subset (=) (0 upto (n - 1), add_loose_bnos (e, 0, []))
| check _ = false;
fun tr' (_ $ abs) =
let val _ $ idts $ (_ $ (_ $ _ $ e) $ Q) = ex_tr' ctxt [abs]
in Syntax.const \<^syntax_const>\<open>_Setcompr\<close> $ e $ idts $ Q end;
in
if check (P, 0) then tr' P
else
let
val (x as _ $ Free(xN, _), t) = Syntax_Trans.atomic_abs_tr' abs;
val M = Syntax.const \<^syntax_const>\<open>_Coll\<close> $ x $ t;
in
case t of
Const (\<^const_syntax>\<open>HOL.conj\<close>, _) $
(Const (\<^const_syntax>\<open>Set.member\<close>, _) $
(Const (\<^syntax_const>\<open>_bound\<close>, _) $ Free (yN, _)) $ A) $ P =>
if xN = yN then Syntax.const \<^syntax_const>\<open>_Collect\<close> $ x $ A $ P else M
| _ => M
end
end;
in [(\<^const_syntax>\<open>Collect\<close>, setcompr_tr')] end
\<close>
simproc_setup defined_Bex ("\<exists>x\<in>A. P x \<and> Q x") = \<open>
- fn _ => Quantifier1.rearrange_Bex
- (fn ctxt => unfold_tac ctxt @{thms Bex_def})
+ K (Quantifier1.rearrange_Bex (fn ctxt => unfold_tac ctxt @{thms Bex_def}))
\<close>
simproc_setup defined_All ("\<forall>x\<in>A. P x \<longrightarrow> Q x") = \<open>
- fn _ => Quantifier1.rearrange_Ball
- (fn ctxt => unfold_tac ctxt @{thms Ball_def})
+ K (Quantifier1.rearrange_Ball (fn ctxt => unfold_tac ctxt @{thms Ball_def}))
\<close>
lemma ballI [intro!]: "(\<And>x. x \<in> A \<Longrightarrow> P x) \<Longrightarrow> \<forall>x\<in>A. P x"
by (simp add: Ball_def)
lemmas strip = impI allI ballI
lemma bspec [dest?]: "\<forall>x\<in>A. P x \<Longrightarrow> x \<in> A \<Longrightarrow> P x"
by (simp add: Ball_def)
text \<open>Gives better instantiation for bound:\<close>
setup \<open>
map_theory_claset (fn ctxt =>
ctxt addbefore ("bspec", fn ctxt' => dresolve_tac ctxt' @{thms bspec} THEN' assume_tac ctxt'))
\<close>
ML \<open>
structure Simpdata =
struct
open Simpdata;
val mksimps_pairs = [(\<^const_name>\<open>Ball\<close>, @{thms bspec})] @ mksimps_pairs;
end;
open Simpdata;
\<close>
declaration \<open>fn _ => Simplifier.map_ss (Simplifier.set_mksimps (mksimps mksimps_pairs))\<close>
lemma ballE [elim]: "\<forall>x\<in>A. P x \<Longrightarrow> (P x \<Longrightarrow> Q) \<Longrightarrow> (x \<notin> A \<Longrightarrow> Q) \<Longrightarrow> Q"
unfolding Ball_def by blast
lemma bexI [intro]: "P x \<Longrightarrow> x \<in> A \<Longrightarrow> \<exists>x\<in>A. P x"
\<comment> \<open>Normally the best argument order: \<open>P x\<close> constrains the choice of \<open>x \<in> A\<close>.\<close>
unfolding Bex_def by blast
lemma rev_bexI [intro?]: "x \<in> A \<Longrightarrow> P x \<Longrightarrow> \<exists>x\<in>A. P x"
\<comment> \<open>The best argument order when there is only one \<open>x \<in> A\<close>.\<close>
unfolding Bex_def by blast
lemma bexCI: "(\<forall>x\<in>A. \<not> P x \<Longrightarrow> P a) \<Longrightarrow> a \<in> A \<Longrightarrow> \<exists>x\<in>A. P x"
unfolding Bex_def by blast
lemma bexE [elim!]: "\<exists>x\<in>A. P x \<Longrightarrow> (\<And>x. x \<in> A \<Longrightarrow> P x \<Longrightarrow> Q) \<Longrightarrow> Q"
unfolding Bex_def by blast
lemma ball_triv [simp]: "(\<forall>x\<in>A. P) \<longleftrightarrow> ((\<exists>x. x \<in> A) \<longrightarrow> P)"
\<comment> \<open>trivial rewrite rule.\<close>
by (simp add: Ball_def)
lemma bex_triv [simp]: "(\<exists>x\<in>A. P) \<longleftrightarrow> ((\<exists>x. x \<in> A) \<and> P)"
\<comment> \<open>Dual form for existentials.\<close>
by (simp add: Bex_def)
lemma bex_triv_one_point1 [simp]: "(\<exists>x\<in>A. x = a) \<longleftrightarrow> a \<in> A"
by blast
lemma bex_triv_one_point2 [simp]: "(\<exists>x\<in>A. a = x) \<longleftrightarrow> a \<in> A"
by blast
lemma bex_one_point1 [simp]: "(\<exists>x\<in>A. x = a \<and> P x) \<longleftrightarrow> a \<in> A \<and> P a"
by blast
lemma bex_one_point2 [simp]: "(\<exists>x\<in>A. a = x \<and> P x) \<longleftrightarrow> a \<in> A \<and> P a"
by blast
lemma ball_one_point1 [simp]: "(\<forall>x\<in>A. x = a \<longrightarrow> P x) \<longleftrightarrow> (a \<in> A \<longrightarrow> P a)"
by blast
lemma ball_one_point2 [simp]: "(\<forall>x\<in>A. a = x \<longrightarrow> P x) \<longleftrightarrow> (a \<in> A \<longrightarrow> P a)"
by blast
lemma ball_conj_distrib: "(\<forall>x\<in>A. P x \<and> Q x) \<longleftrightarrow> (\<forall>x\<in>A. P x) \<and> (\<forall>x\<in>A. Q x)"
by blast
lemma bex_disj_distrib: "(\<exists>x\<in>A. P x \<or> Q x) \<longleftrightarrow> (\<exists>x\<in>A. P x) \<or> (\<exists>x\<in>A. Q x)"
by blast
text \<open>Congruence rules\<close>
lemma ball_cong:
"\<lbrakk> A = B; \<And>x. x \<in> B \<Longrightarrow> P x \<longleftrightarrow> Q x \<rbrakk> \<Longrightarrow>
(\<forall>x\<in>A. P x) \<longleftrightarrow> (\<forall>x\<in>B. Q x)"
by (simp add: Ball_def)
lemma ball_cong_simp [cong]:
"\<lbrakk> A = B; \<And>x. x \<in> B =simp=> P x \<longleftrightarrow> Q x \<rbrakk> \<Longrightarrow>
(\<forall>x\<in>A. P x) \<longleftrightarrow> (\<forall>x\<in>B. Q x)"
by (simp add: simp_implies_def Ball_def)
lemma bex_cong:
"\<lbrakk> A = B; \<And>x. x \<in> B \<Longrightarrow> P x \<longleftrightarrow> Q x \<rbrakk> \<Longrightarrow>
(\<exists>x\<in>A. P x) \<longleftrightarrow> (\<exists>x\<in>B. Q x)"
by (simp add: Bex_def cong: conj_cong)
lemma bex_cong_simp [cong]:
"\<lbrakk> A = B; \<And>x. x \<in> B =simp=> P x \<longleftrightarrow> Q x \<rbrakk> \<Longrightarrow>
(\<exists>x\<in>A. P x) \<longleftrightarrow> (\<exists>x\<in>B. Q x)"
by (simp add: simp_implies_def Bex_def cong: conj_cong)
lemma bex1_def: "(\<exists>!x\<in>X. P x) \<longleftrightarrow> (\<exists>x\<in>X. P x) \<and> (\<forall>x\<in>X. \<forall>y\<in>X. P x \<longrightarrow> P y \<longrightarrow> x = y)"
by auto
subsection \<open>Basic operations\<close>
subsubsection \<open>Subsets\<close>
lemma subsetI [intro!]: "(\<And>x. x \<in> A \<Longrightarrow> x \<in> B) \<Longrightarrow> A \<subseteq> B"
by (simp add: less_eq_set_def le_fun_def)
text \<open>
\<^medskip>
Map the type \<open>'a set \<Rightarrow> anything\<close> to just \<open>'a\<close>; for overloading constants
whose first argument has type \<open>'a set\<close>.
\<close>
lemma subsetD [elim, intro?]: "A \<subseteq> B \<Longrightarrow> c \<in> A \<Longrightarrow> c \<in> B"
by (simp add: less_eq_set_def le_fun_def)
\<comment> \<open>Rule in Modus Ponens style.\<close>
lemma rev_subsetD [intro?,no_atp]: "c \<in> A \<Longrightarrow> A \<subseteq> B \<Longrightarrow> c \<in> B"
\<comment> \<open>The same, with reversed premises for use with @{method erule} -- cf. @{thm rev_mp}.\<close>
by (rule subsetD)
lemma subsetCE [elim,no_atp]: "A \<subseteq> B \<Longrightarrow> (c \<notin> A \<Longrightarrow> P) \<Longrightarrow> (c \<in> B \<Longrightarrow> P) \<Longrightarrow> P"
\<comment> \<open>Classical elimination rule.\<close>
by (auto simp add: less_eq_set_def le_fun_def)
lemma subset_eq: "A \<subseteq> B \<longleftrightarrow> (\<forall>x\<in>A. x \<in> B)"
by blast
lemma contra_subsetD [no_atp]: "A \<subseteq> B \<Longrightarrow> c \<notin> B \<Longrightarrow> c \<notin> A"
by blast
lemma subset_refl: "A \<subseteq> A"
by (fact order_refl) (* already [iff] *)
lemma subset_trans: "A \<subseteq> B \<Longrightarrow> B \<subseteq> C \<Longrightarrow> A \<subseteq> C"
by (fact order_trans)
lemma subset_not_subset_eq [code]: "A \<subset> B \<longleftrightarrow> A \<subseteq> B \<and> \<not> B \<subseteq> A"
by (fact less_le_not_le)
lemma eq_mem_trans: "a = b \<Longrightarrow> b \<in> A \<Longrightarrow> a \<in> A"
by simp
lemmas basic_trans_rules [trans] =
order_trans_rules rev_subsetD subsetD eq_mem_trans
subsubsection \<open>Equality\<close>
lemma subset_antisym [intro!]: "A \<subseteq> B \<Longrightarrow> B \<subseteq> A \<Longrightarrow> A = B"
\<comment> \<open>Anti-symmetry of the subset relation.\<close>
by (iprover intro: set_eqI subsetD)
text \<open>\<^medskip> Equality rules from ZF set theory -- are they appropriate here?\<close>
lemma equalityD1: "A = B \<Longrightarrow> A \<subseteq> B"
by simp
lemma equalityD2: "A = B \<Longrightarrow> B \<subseteq> A"
by simp
text \<open>
\<^medskip>
Be careful when adding this to the claset as \<open>subset_empty\<close> is in the
simpset: \<^prop>\<open>A = {}\<close> goes to \<^prop>\<open>{} \<subseteq> A\<close> and \<^prop>\<open>A \<subseteq> {}\<close>
and then back to \<^prop>\<open>A = {}\<close>!
\<close>
lemma equalityE: "A = B \<Longrightarrow> (A \<subseteq> B \<Longrightarrow> B \<subseteq> A \<Longrightarrow> P) \<Longrightarrow> P"
by simp
lemma equalityCE [elim]: "A = B \<Longrightarrow> (c \<in> A \<Longrightarrow> c \<in> B \<Longrightarrow> P) \<Longrightarrow> (c \<notin> A \<Longrightarrow> c \<notin> B \<Longrightarrow> P) \<Longrightarrow> P"
by blast
lemma eqset_imp_iff: "A = B \<Longrightarrow> x \<in> A \<longleftrightarrow> x \<in> B"
by simp
lemma eqelem_imp_iff: "x = y \<Longrightarrow> x \<in> A \<longleftrightarrow> y \<in> A"
by simp
subsubsection \<open>The empty set\<close>
lemma empty_def: "{} = {x. False}"
by (simp add: bot_set_def bot_fun_def)
lemma empty_iff [simp]: "c \<in> {} \<longleftrightarrow> False"
by (simp add: empty_def)
lemma emptyE [elim!]: "a \<in> {} \<Longrightarrow> P"
by simp
lemma empty_subsetI [iff]: "{} \<subseteq> A"
\<comment> \<open>One effect is to delete the ASSUMPTION \<^prop>\<open>{} \<subseteq> A\<close>\<close>
by blast
lemma equals0I: "(\<And>y. y \<in> A \<Longrightarrow> False) \<Longrightarrow> A = {}"
by blast
lemma equals0D: "A = {} \<Longrightarrow> a \<notin> A"
\<comment> \<open>Use for reasoning about disjointness: \<open>A \<inter> B = {}\<close>\<close>
by blast
lemma ball_empty [simp]: "Ball {} P \<longleftrightarrow> True"
by (simp add: Ball_def)
lemma bex_empty [simp]: "Bex {} P \<longleftrightarrow> False"
by (simp add: Bex_def)
subsubsection \<open>The universal set -- UNIV\<close>
abbreviation UNIV :: "'a set"
where "UNIV \<equiv> top"
lemma UNIV_def: "UNIV = {x. True}"
by (simp add: top_set_def top_fun_def)
lemma UNIV_I [simp]: "x \<in> UNIV"
by (simp add: UNIV_def)
declare UNIV_I [intro] \<comment> \<open>unsafe makes it less likely to cause problems\<close>
lemma UNIV_witness [intro?]: "\<exists>x. x \<in> UNIV"
by simp
lemma subset_UNIV: "A \<subseteq> UNIV"
by (fact top_greatest) (* already simp *)
text \<open>
\<^medskip>
Eta-contracting these two rules (to remove \<open>P\<close>) causes them
to be ignored because of their interaction with congruence rules.
\<close>
lemma ball_UNIV [simp]: "Ball UNIV P \<longleftrightarrow> All P"
by (simp add: Ball_def)
lemma bex_UNIV [simp]: "Bex UNIV P \<longleftrightarrow> Ex P"
by (simp add: Bex_def)
lemma UNIV_eq_I: "(\<And>x. x \<in> A) \<Longrightarrow> UNIV = A"
by auto
lemma UNIV_not_empty [iff]: "UNIV \<noteq> {}"
by (blast elim: equalityE)
lemma empty_not_UNIV[simp]: "{} \<noteq> UNIV"
by blast
subsubsection \<open>The Powerset operator -- Pow\<close>
definition Pow :: "'a set \<Rightarrow> 'a set set"
where Pow_def: "Pow A = {B. B \<subseteq> A}"
lemma Pow_iff [iff]: "A \<in> Pow B \<longleftrightarrow> A \<subseteq> B"
by (simp add: Pow_def)
lemma PowI: "A \<subseteq> B \<Longrightarrow> A \<in> Pow B"
by (simp add: Pow_def)
lemma PowD: "A \<in> Pow B \<Longrightarrow> A \<subseteq> B"
by (simp add: Pow_def)
lemma Pow_bottom: "{} \<in> Pow B"
by simp
lemma Pow_top: "A \<in> Pow A"
by simp
lemma Pow_not_empty: "Pow A \<noteq> {}"
using Pow_top by blast
subsubsection \<open>Set complement\<close>
lemma Compl_iff [simp]: "c \<in> - A \<longleftrightarrow> c \<notin> A"
by (simp add: fun_Compl_def uminus_set_def)
lemma ComplI [intro!]: "(c \<in> A \<Longrightarrow> False) \<Longrightarrow> c \<in> - A"
by (simp add: fun_Compl_def uminus_set_def) blast
text \<open>
\<^medskip>
This form, with negated conclusion, works well with the Classical prover.
Negated assumptions behave like formulae on the right side of the
notional turnstile \dots
\<close>
lemma ComplD [dest!]: "c \<in> - A \<Longrightarrow> c \<notin> A"
by simp
lemmas ComplE = ComplD [elim_format]
lemma Compl_eq: "- A = {x. \<not> x \<in> A}"
by blast
subsubsection \<open>Binary intersection\<close>
abbreviation inter :: "'a set \<Rightarrow> 'a set \<Rightarrow> 'a set" (infixl "\<inter>" 70)
where "(\<inter>) \<equiv> inf"
notation (ASCII)
inter (infixl "Int" 70)
lemma Int_def: "A \<inter> B = {x. x \<in> A \<and> x \<in> B}"
by (simp add: inf_set_def inf_fun_def)
lemma Int_iff [simp]: "c \<in> A \<inter> B \<longleftrightarrow> c \<in> A \<and> c \<in> B"
unfolding Int_def by blast
lemma IntI [intro!]: "c \<in> A \<Longrightarrow> c \<in> B \<Longrightarrow> c \<in> A \<inter> B"
by simp
lemma IntD1: "c \<in> A \<inter> B \<Longrightarrow> c \<in> A"
by simp
lemma IntD2: "c \<in> A \<inter> B \<Longrightarrow> c \<in> B"
by simp
lemma IntE [elim!]: "c \<in> A \<inter> B \<Longrightarrow> (c \<in> A \<Longrightarrow> c \<in> B \<Longrightarrow> P) \<Longrightarrow> P"
by simp
subsubsection \<open>Binary union\<close>
abbreviation union :: "'a set \<Rightarrow> 'a set \<Rightarrow> 'a set" (infixl "\<union>" 65)
where "union \<equiv> sup"
notation (ASCII)
union (infixl "Un" 65)
lemma Un_def: "A \<union> B = {x. x \<in> A \<or> x \<in> B}"
by (simp add: sup_set_def sup_fun_def)
lemma Un_iff [simp]: "c \<in> A \<union> B \<longleftrightarrow> c \<in> A \<or> c \<in> B"
unfolding Un_def by blast
lemma UnI1 [elim?]: "c \<in> A \<Longrightarrow> c \<in> A \<union> B"
by simp
lemma UnI2 [elim?]: "c \<in> B \<Longrightarrow> c \<in> A \<union> B"
by simp
text \<open>\<^medskip> Classical introduction rule: no commitment to \<open>A\<close> vs. \<open>B\<close>.\<close>
lemma UnCI [intro!]: "(c \<notin> B \<Longrightarrow> c \<in> A) \<Longrightarrow> c \<in> A \<union> B"
by auto
lemma UnE [elim!]: "c \<in> A \<union> B \<Longrightarrow> (c \<in> A \<Longrightarrow> P) \<Longrightarrow> (c \<in> B \<Longrightarrow> P) \<Longrightarrow> P"
unfolding Un_def by blast
lemma insert_def: "insert a B = {x. x = a} \<union> B"
by (simp add: insert_compr Un_def)
subsubsection \<open>Set difference\<close>
lemma Diff_iff [simp]: "c \<in> A - B \<longleftrightarrow> c \<in> A \<and> c \<notin> B"
by (simp add: minus_set_def fun_diff_def)
lemma DiffI [intro!]: "c \<in> A \<Longrightarrow> c \<notin> B \<Longrightarrow> c \<in> A - B"
by simp
lemma DiffD1: "c \<in> A - B \<Longrightarrow> c \<in> A"
by simp
lemma DiffD2: "c \<in> A - B \<Longrightarrow> c \<in> B \<Longrightarrow> P"
by simp
lemma DiffE [elim!]: "c \<in> A - B \<Longrightarrow> (c \<in> A \<Longrightarrow> c \<notin> B \<Longrightarrow> P) \<Longrightarrow> P"
by simp
lemma set_diff_eq: "A - B = {x. x \<in> A \<and> x \<notin> B}"
by blast
lemma Compl_eq_Diff_UNIV: "- A = (UNIV - A)"
by blast
subsubsection \<open>Augmenting a set -- \<^const>\<open>insert\<close>\<close>
lemma insert_iff [simp]: "a \<in> insert b A \<longleftrightarrow> a = b \<or> a \<in> A"
unfolding insert_def by blast
lemma insertI1: "a \<in> insert a B"
by simp
lemma insertI2: "a \<in> B \<Longrightarrow> a \<in> insert b B"
by simp
lemma insertE [elim!]: "a \<in> insert b A \<Longrightarrow> (a = b \<Longrightarrow> P) \<Longrightarrow> (a \<in> A \<Longrightarrow> P) \<Longrightarrow> P"
unfolding insert_def by blast
lemma insertCI [intro!]: "(a \<notin> B \<Longrightarrow> a = b) \<Longrightarrow> a \<in> insert b B"
\<comment> \<open>Classical introduction rule.\<close>
by auto
lemma subset_insert_iff: "A \<subseteq> insert x B \<longleftrightarrow> (if x \<in> A then A - {x} \<subseteq> B else A \<subseteq> B)"
by auto
lemma set_insert:
assumes "x \<in> A"
obtains B where "A = insert x B" and "x \<notin> B"
proof
show "A = insert x (A - {x})" using assms by blast
show "x \<notin> A - {x}" by blast
qed
lemma insert_ident: "x \<notin> A \<Longrightarrow> x \<notin> B \<Longrightarrow> insert x A = insert x B \<longleftrightarrow> A = B"
by auto
lemma insert_eq_iff:
assumes "a \<notin> A" "b \<notin> B"
shows "insert a A = insert b B \<longleftrightarrow>
(if a = b then A = B else \<exists>C. A = insert b C \<and> b \<notin> C \<and> B = insert a C \<and> a \<notin> C)"
(is "?L \<longleftrightarrow> ?R")
proof
show ?R if ?L
proof (cases "a = b")
case True
with assms \<open>?L\<close> show ?R
by (simp add: insert_ident)
next
case False
let ?C = "A - {b}"
have "A = insert b ?C \<and> b \<notin> ?C \<and> B = insert a ?C \<and> a \<notin> ?C"
using assms \<open>?L\<close> \<open>a \<noteq> b\<close> by auto
then show ?R using \<open>a \<noteq> b\<close> by auto
qed
show ?L if ?R
using that by (auto split: if_splits)
qed
lemma insert_UNIV: "insert x UNIV = UNIV"
by auto
subsubsection \<open>Singletons, using insert\<close>
lemma singletonI [intro!]: "a \<in> {a}"
\<comment> \<open>Redundant? But unlike \<open>insertCI\<close>, it proves the subgoal immediately!\<close>
by (rule insertI1)
lemma singletonD [dest!]: "b \<in> {a} \<Longrightarrow> b = a"
by blast
lemmas singletonE = singletonD [elim_format]
lemma singleton_iff: "b \<in> {a} \<longleftrightarrow> b = a"
by blast
lemma singleton_inject [dest!]: "{a} = {b} \<Longrightarrow> a = b"
by blast
lemma singleton_insert_inj_eq [iff]: "{b} = insert a A \<longleftrightarrow> a = b \<and> A \<subseteq> {b}"
by blast
lemma singleton_insert_inj_eq' [iff]: "insert a A = {b} \<longleftrightarrow> a = b \<and> A \<subseteq> {b}"
by blast
lemma subset_singletonD: "A \<subseteq> {x} \<Longrightarrow> A = {} \<or> A = {x}"
by fast
lemma subset_singleton_iff: "X \<subseteq> {a} \<longleftrightarrow> X = {} \<or> X = {a}"
by blast
lemma subset_singleton_iff_Uniq: "(\<exists>a. A \<subseteq> {a}) \<longleftrightarrow> (\<exists>\<^sub>\<le>\<^sub>1x. x \<in> A)"
unfolding Uniq_def by blast
lemma singleton_conv [simp]: "{x. x = a} = {a}"
by blast
lemma singleton_conv2 [simp]: "{x. a = x} = {a}"
by blast
lemma Diff_single_insert: "A - {x} \<subseteq> B \<Longrightarrow> A \<subseteq> insert x B"
by blast
lemma subset_Diff_insert: "A \<subseteq> B - insert x C \<longleftrightarrow> A \<subseteq> B - C \<and> x \<notin> A"
by blast
lemma doubleton_eq_iff: "{a, b} = {c, d} \<longleftrightarrow> a = c \<and> b = d \<or> a = d \<and> b = c"
by (blast elim: equalityE)
lemma Un_singleton_iff: "A \<union> B = {x} \<longleftrightarrow> A = {} \<and> B = {x} \<or> A = {x} \<and> B = {} \<or> A = {x} \<and> B = {x}"
by auto
lemma singleton_Un_iff: "{x} = A \<union> B \<longleftrightarrow> A = {} \<and> B = {x} \<or> A = {x} \<and> B = {} \<or> A = {x} \<and> B = {x}"
by auto
subsubsection \<open>Image of a set under a function\<close>
text \<open>Frequently \<open>b\<close> does not have the syntactic form of \<open>f x\<close>.\<close>
definition image :: "('a \<Rightarrow> 'b) \<Rightarrow> 'a set \<Rightarrow> 'b set" (infixr "`" 90)
where "f ` A = {y. \<exists>x\<in>A. y = f x}"
lemma image_eqI [simp, intro]: "b = f x \<Longrightarrow> x \<in> A \<Longrightarrow> b \<in> f ` A"
unfolding image_def by blast
lemma imageI: "x \<in> A \<Longrightarrow> f x \<in> f ` A"
by (rule image_eqI) (rule refl)
lemma rev_image_eqI: "x \<in> A \<Longrightarrow> b = f x \<Longrightarrow> b \<in> f ` A"
\<comment> \<open>This version's more effective when we already have the required \<open>x\<close>.\<close>
by (rule image_eqI)
lemma imageE [elim!]:
assumes "b \<in> (\<lambda>x. f x) ` A" \<comment> \<open>The eta-expansion gives variable-name preservation.\<close>
obtains x where "b = f x" and "x \<in> A"
using assms unfolding image_def by blast
lemma Compr_image_eq: "{x \<in> f ` A. P x} = f ` {x \<in> A. P (f x)}"
by auto
lemma image_Un: "f ` (A \<union> B) = f ` A \<union> f ` B"
by blast
lemma image_iff: "z \<in> f ` A \<longleftrightarrow> (\<exists>x\<in>A. z = f x)"
by blast
lemma image_subsetI: "(\<And>x. x \<in> A \<Longrightarrow> f x \<in> B) \<Longrightarrow> f ` A \<subseteq> B"
\<comment> \<open>Replaces the three steps \<open>subsetI\<close>, \<open>imageE\<close>,
\<open>hypsubst\<close>, but breaks too many existing proofs.\<close>
by blast
lemma image_subset_iff: "f ` A \<subseteq> B \<longleftrightarrow> (\<forall>x\<in>A. f x \<in> B)"
\<comment> \<open>This rewrite rule would confuse users if made default.\<close>
by blast
lemma subset_imageE:
assumes "B \<subseteq> f ` A"
obtains C where "C \<subseteq> A" and "B = f ` C"
proof -
from assms have "B = f ` {a \<in> A. f a \<in> B}" by fast
moreover have "{a \<in> A. f a \<in> B} \<subseteq> A" by blast
ultimately show thesis by (blast intro: that)
qed
lemma subset_image_iff: "B \<subseteq> f ` A \<longleftrightarrow> (\<exists>AA\<subseteq>A. B = f ` AA)"
by (blast elim: subset_imageE)
lemma image_ident [simp]: "(\<lambda>x. x) ` Y = Y"
by blast
lemma image_empty [simp]: "f ` {} = {}"
by blast
lemma image_insert [simp]: "f ` insert a B = insert (f a) (f ` B)"
by blast
lemma image_constant: "x \<in> A \<Longrightarrow> (\<lambda>x. c) ` A = {c}"
by auto
lemma image_constant_conv: "(\<lambda>x. c) ` A = (if A = {} then {} else {c})"
by auto
lemma image_image: "f ` (g ` A) = (\<lambda>x. f (g x)) ` A"
by blast
lemma insert_image [simp]: "x \<in> A \<Longrightarrow> insert (f x) (f ` A) = f ` A"
by blast
lemma image_is_empty [iff]: "f ` A = {} \<longleftrightarrow> A = {}"
by blast
lemma empty_is_image [iff]: "{} = f ` A \<longleftrightarrow> A = {}"
by blast
lemma image_Collect: "f ` {x. P x} = {f x | x. P x}"
\<comment> \<open>NOT suitable as a default simp rule: the RHS isn't simpler than the LHS,
with its implicit quantifier and conjunction. Also image enjoys better
equational properties than does the RHS.\<close>
by blast
lemma if_image_distrib [simp]:
"(\<lambda>x. if P x then f x else g x) ` S = f ` (S \<inter> {x. P x}) \<union> g ` (S \<inter> {x. \<not> P x})"
by auto
lemma image_cong:
"f ` M = g ` N" if "M = N" "\<And>x. x \<in> N \<Longrightarrow> f x = g x"
using that by (simp add: image_def)
lemma image_cong_simp [cong]:
"f ` M = g ` N" if "M = N" "\<And>x. x \<in> N =simp=> f x = g x"
using that image_cong [of M N f g] by (simp add: simp_implies_def)
lemma image_Int_subset: "f ` (A \<inter> B) \<subseteq> f ` A \<inter> f ` B"
by blast
lemma image_diff_subset: "f ` A - f ` B \<subseteq> f ` (A - B)"
by blast
lemma Setcompr_eq_image: "{f x |x. x \<in> A} = f ` A"
by blast
lemma setcompr_eq_image: "{f x |x. P x} = f ` {x. P x}"
by auto
lemma ball_imageD: "\<forall>x\<in>f ` A. P x \<Longrightarrow> \<forall>x\<in>A. P (f x)"
by simp
lemma bex_imageD: "\<exists>x\<in>f ` A. P x \<Longrightarrow> \<exists>x\<in>A. P (f x)"
by auto
lemma image_add_0 [simp]: "(+) (0::'a::comm_monoid_add) ` S = S"
by auto
theorem Cantors_theorem: "\<nexists>f. f ` A = Pow A"
proof
assume "\<exists>f. f ` A = Pow A"
then obtain f where f: "f ` A = Pow A" ..
let ?X = "{a \<in> A. a \<notin> f a}"
have "?X \<in> Pow A" by blast
then have "?X \<in> f ` A" by (simp only: f)
then obtain x where "x \<in> A" and "f x = ?X" by blast
then show False by blast
qed
text \<open>\<^medskip> Range of a function -- just an abbreviation for image!\<close>
abbreviation range :: "('a \<Rightarrow> 'b) \<Rightarrow> 'b set" \<comment> \<open>of function\<close>
where "range f \<equiv> f ` UNIV"
lemma range_eqI: "b = f x \<Longrightarrow> b \<in> range f"
by simp
lemma rangeI: "f x \<in> range f"
by simp
lemma rangeE [elim?]: "b \<in> range (\<lambda>x. f x) \<Longrightarrow> (\<And>x. b = f x \<Longrightarrow> P) \<Longrightarrow> P"
by (rule imageE)
lemma range_subsetD: "range f \<subseteq> B \<Longrightarrow> f i \<in> B"
by blast
lemma full_SetCompr_eq: "{u. \<exists>x. u = f x} = range f"
by auto
lemma range_composition: "range (\<lambda>x. f (g x)) = f ` range g"
by auto
lemma range_constant [simp]: "range (\<lambda>_. x) = {x}"
by (simp add: image_constant)
lemma range_eq_singletonD: "range f = {a} \<Longrightarrow> f x = a"
by auto
subsubsection \<open>Some rules with \<open>if\<close>\<close>
text \<open>Elimination of \<open>{x. \<dots> \<and> x = t \<and> \<dots>}\<close>.\<close>
lemma Collect_conv_if: "{x. x = a \<and> P x} = (if P a then {a} else {})"
by auto
lemma Collect_conv_if2: "{x. a = x \<and> P x} = (if P a then {a} else {})"
by auto
text \<open>
Rewrite rules for boolean case-splitting: faster than \<open>if_split [split]\<close>.
\<close>
lemma if_split_eq1: "(if Q then x else y) = b \<longleftrightarrow> (Q \<longrightarrow> x = b) \<and> (\<not> Q \<longrightarrow> y = b)"
by (rule if_split)
lemma if_split_eq2: "a = (if Q then x else y) \<longleftrightarrow> (Q \<longrightarrow> a = x) \<and> (\<not> Q \<longrightarrow> a = y)"
by (rule if_split)
text \<open>
Split ifs on either side of the membership relation.
Not for \<open>[simp]\<close> -- can cause goals to blow up!
\<close>
lemma if_split_mem1: "(if Q then x else y) \<in> b \<longleftrightarrow> (Q \<longrightarrow> x \<in> b) \<and> (\<not> Q \<longrightarrow> y \<in> b)"
by (rule if_split)
lemma if_split_mem2: "(a \<in> (if Q then x else y)) \<longleftrightarrow> (Q \<longrightarrow> a \<in> x) \<and> (\<not> Q \<longrightarrow> a \<in> y)"
by (rule if_split [where P = "\<lambda>S. a \<in> S"])
lemmas split_ifs = if_bool_eq_conj if_split_eq1 if_split_eq2 if_split_mem1 if_split_mem2
(*Would like to add these, but the existing code only searches for the
outer-level constant, which in this case is just Set.member; we instead need
to use term-nets to associate patterns with rules. Also, if a rule fails to
apply, then the formula should be kept.
[("uminus", Compl_iff RS iffD1), ("minus", [Diff_iff RS iffD1]),
("Int", [IntD1,IntD2]),
("Collect", [CollectD]), ("Inter", [InterD]), ("INTER", [INT_D])]
*)
subsection \<open>Further operations and lemmas\<close>
subsubsection \<open>The ``proper subset'' relation\<close>
lemma psubsetI [intro!]: "A \<subseteq> B \<Longrightarrow> A \<noteq> B \<Longrightarrow> A \<subset> B"
unfolding less_le by blast
lemma psubsetE [elim!]: "A \<subset> B \<Longrightarrow> (A \<subseteq> B \<Longrightarrow> \<not> B \<subseteq> A \<Longrightarrow> R) \<Longrightarrow> R"
unfolding less_le by blast
lemma psubset_insert_iff:
"A \<subset> insert x B \<longleftrightarrow> (if x \<in> B then A \<subset> B else if x \<in> A then A - {x} \<subset> B else A \<subseteq> B)"
by (auto simp add: less_le subset_insert_iff)
lemma psubset_eq: "A \<subset> B \<longleftrightarrow> A \<subseteq> B \<and> A \<noteq> B"
by (simp only: less_le)
lemma psubset_imp_subset: "A \<subset> B \<Longrightarrow> A \<subseteq> B"
by (simp add: psubset_eq)
lemma psubset_trans: "A \<subset> B \<Longrightarrow> B \<subset> C \<Longrightarrow> A \<subset> C"
unfolding less_le by (auto dest: subset_antisym)
lemma psubsetD: "A \<subset> B \<Longrightarrow> c \<in> A \<Longrightarrow> c \<in> B"
unfolding less_le by (auto dest: subsetD)
lemma psubset_subset_trans: "A \<subset> B \<Longrightarrow> B \<subseteq> C \<Longrightarrow> A \<subset> C"
by (auto simp add: psubset_eq)
lemma subset_psubset_trans: "A \<subseteq> B \<Longrightarrow> B \<subset> C \<Longrightarrow> A \<subset> C"
by (auto simp add: psubset_eq)
lemma psubset_imp_ex_mem: "A \<subset> B \<Longrightarrow> \<exists>b. b \<in> B - A"
unfolding less_le by blast
lemma atomize_ball: "(\<And>x. x \<in> A \<Longrightarrow> P x) \<equiv> Trueprop (\<forall>x\<in>A. P x)"
by (simp only: Ball_def atomize_all atomize_imp)
lemmas [symmetric, rulify] = atomize_ball
and [symmetric, defn] = atomize_ball
lemma image_Pow_mono: "f ` A \<subseteq> B \<Longrightarrow> image f ` Pow A \<subseteq> Pow B"
by blast
lemma image_Pow_surj: "f ` A = B \<Longrightarrow> image f ` Pow A = Pow B"
by (blast elim: subset_imageE)
subsubsection \<open>Derived rules involving subsets.\<close>
text \<open>\<open>insert\<close>.\<close>
lemma subset_insertI: "B \<subseteq> insert a B"
by (rule subsetI) (erule insertI2)
lemma subset_insertI2: "A \<subseteq> B \<Longrightarrow> A \<subseteq> insert b B"
by blast
lemma subset_insert: "x \<notin> A \<Longrightarrow> A \<subseteq> insert x B \<longleftrightarrow> A \<subseteq> B"
by blast
text \<open>\<^medskip> Finite Union -- the least upper bound of two sets.\<close>
lemma Un_upper1: "A \<subseteq> A \<union> B"
by (fact sup_ge1)
lemma Un_upper2: "B \<subseteq> A \<union> B"
by (fact sup_ge2)
lemma Un_least: "A \<subseteq> C \<Longrightarrow> B \<subseteq> C \<Longrightarrow> A \<union> B \<subseteq> C"
by (fact sup_least)
text \<open>\<^medskip> Finite Intersection -- the greatest lower bound of two sets.\<close>
lemma Int_lower1: "A \<inter> B \<subseteq> A"
by (fact inf_le1)
lemma Int_lower2: "A \<inter> B \<subseteq> B"
by (fact inf_le2)
lemma Int_greatest: "C \<subseteq> A \<Longrightarrow> C \<subseteq> B \<Longrightarrow> C \<subseteq> A \<inter> B"
by (fact inf_greatest)
text \<open>\<^medskip> Set difference.\<close>
lemma Diff_subset[simp]: "A - B \<subseteq> A"
by blast
lemma Diff_subset_conv: "A - B \<subseteq> C \<longleftrightarrow> A \<subseteq> B \<union> C"
by blast
subsubsection \<open>Equalities involving union, intersection, inclusion, etc.\<close>
text \<open>\<open>{}\<close>.\<close>
lemma Collect_const [simp]: "{s. P} = (if P then UNIV else {})"
\<comment> \<open>supersedes \<open>Collect_False_empty\<close>\<close>
by auto
lemma subset_empty [simp]: "A \<subseteq> {} \<longleftrightarrow> A = {}"
by (fact bot_unique)
lemma not_psubset_empty [iff]: "\<not> (A < {})"
by (fact not_less_bot) (* FIXME: already simp *)
lemma Collect_subset [simp]: "{x\<in>A. P x} \<subseteq> A" by auto
lemma Collect_empty_eq [simp]: "Collect P = {} \<longleftrightarrow> (\<forall>x. \<not> P x)"
by blast
lemma empty_Collect_eq [simp]: "{} = Collect P \<longleftrightarrow> (\<forall>x. \<not> P x)"
by blast
lemma Collect_neg_eq: "{x. \<not> P x} = - {x. P x}"
by blast
lemma Collect_disj_eq: "{x. P x \<or> Q x} = {x. P x} \<union> {x. Q x}"
by blast
lemma Collect_imp_eq: "{x. P x \<longrightarrow> Q x} = - {x. P x} \<union> {x. Q x}"
by blast
lemma Collect_conj_eq: "{x. P x \<and> Q x} = {x. P x} \<inter> {x. Q x}"
by blast
lemma Collect_mono_iff: "Collect P \<subseteq> Collect Q \<longleftrightarrow> (\<forall>x. P x \<longrightarrow> Q x)"
by blast
text \<open>\<^medskip> \<open>insert\<close>.\<close>
lemma insert_is_Un: "insert a A = {a} \<union> A"
\<comment> \<open>NOT SUITABLE FOR REWRITING since \<open>{a} \<equiv> insert a {}\<close>\<close>
by blast
lemma insert_not_empty [simp]: "insert a A \<noteq> {}"
and empty_not_insert [simp]: "{} \<noteq> insert a A"
by blast+
lemma insert_absorb: "a \<in> A \<Longrightarrow> insert a A = A"
\<comment> \<open>\<open>[simp]\<close> causes recursive calls when there are nested inserts\<close>
\<comment> \<open>with \<^emph>\<open>quadratic\<close> running time\<close>
by blast
lemma insert_absorb2 [simp]: "insert x (insert x A) = insert x A"
by blast
lemma insert_commute: "insert x (insert y A) = insert y (insert x A)"
by blast
lemma insert_subset [simp]: "insert x A \<subseteq> B \<longleftrightarrow> x \<in> B \<and> A \<subseteq> B"
by blast
lemma mk_disjoint_insert: "a \<in> A \<Longrightarrow> \<exists>B. A = insert a B \<and> a \<notin> B"
\<comment> \<open>use new \<open>B\<close> rather than \<open>A - {a}\<close> to avoid infinite unfolding\<close>
by (rule exI [where x = "A - {a}"]) blast
lemma insert_Collect: "insert a (Collect P) = {u. u \<noteq> a \<longrightarrow> P u}"
by auto
lemma insert_inter_insert [simp]: "insert a A \<inter> insert a B = insert a (A \<inter> B)"
by blast
lemma insert_disjoint [simp]:
"insert a A \<inter> B = {} \<longleftrightarrow> a \<notin> B \<and> A \<inter> B = {}"
"{} = insert a A \<inter> B \<longleftrightarrow> a \<notin> B \<and> {} = A \<inter> B"
by auto
lemma disjoint_insert [simp]:
"B \<inter> insert a A = {} \<longleftrightarrow> a \<notin> B \<and> B \<inter> A = {}"
"{} = A \<inter> insert b B \<longleftrightarrow> b \<notin> A \<and> {} = A \<inter> B"
by auto
text \<open>\<^medskip> \<open>Int\<close>\<close>
lemma Int_absorb: "A \<inter> A = A"
by (fact inf_idem) (* already simp *)
lemma Int_left_absorb: "A \<inter> (A \<inter> B) = A \<inter> B"
by (fact inf_left_idem)
lemma Int_commute: "A \<inter> B = B \<inter> A"
by (fact inf_commute)
lemma Int_left_commute: "A \<inter> (B \<inter> C) = B \<inter> (A \<inter> C)"
by (fact inf_left_commute)
lemma Int_assoc: "(A \<inter> B) \<inter> C = A \<inter> (B \<inter> C)"
by (fact inf_assoc)
lemmas Int_ac = Int_assoc Int_left_absorb Int_commute Int_left_commute
\<comment> \<open>Intersection is an AC-operator\<close>
lemma Int_absorb1: "B \<subseteq> A \<Longrightarrow> A \<inter> B = B"
by (fact inf_absorb2)
lemma Int_absorb2: "A \<subseteq> B \<Longrightarrow> A \<inter> B = A"
by (fact inf_absorb1)
lemma Int_empty_left: "{} \<inter> B = {}"
by (fact inf_bot_left) (* already simp *)
lemma Int_empty_right: "A \<inter> {} = {}"
by (fact inf_bot_right) (* already simp *)
lemma disjoint_eq_subset_Compl: "A \<inter> B = {} \<longleftrightarrow> A \<subseteq> - B"
by blast
lemma disjoint_iff: "A \<inter> B = {} \<longleftrightarrow> (\<forall>x. x\<in>A \<longrightarrow> x \<notin> B)"
by blast
lemma disjoint_iff_not_equal: "A \<inter> B = {} \<longleftrightarrow> (\<forall>x\<in>A. \<forall>y\<in>B. x \<noteq> y)"
by blast
lemma Int_UNIV_left: "UNIV \<inter> B = B"
by (fact inf_top_left) (* already simp *)
lemma Int_UNIV_right: "A \<inter> UNIV = A"
by (fact inf_top_right) (* already simp *)
lemma Int_Un_distrib: "A \<inter> (B \<union> C) = (A \<inter> B) \<union> (A \<inter> C)"
by (fact inf_sup_distrib1)
lemma Int_Un_distrib2: "(B \<union> C) \<inter> A = (B \<inter> A) \<union> (C \<inter> A)"
by (fact inf_sup_distrib2)
lemma Int_UNIV [simp]: "A \<inter> B = UNIV \<longleftrightarrow> A = UNIV \<and> B = UNIV"
by (fact inf_eq_top_iff) (* already simp *)
lemma Int_subset_iff [simp]: "C \<subseteq> A \<inter> B \<longleftrightarrow> C \<subseteq> A \<and> C \<subseteq> B"
by (fact le_inf_iff)
lemma Int_Collect: "x \<in> A \<inter> {x. P x} \<longleftrightarrow> x \<in> A \<and> P x"
by blast
text \<open>\<^medskip> \<open>Un\<close>.\<close>
lemma Un_absorb: "A \<union> A = A"
by (fact sup_idem) (* already simp *)
lemma Un_left_absorb: "A \<union> (A \<union> B) = A \<union> B"
by (fact sup_left_idem)
lemma Un_commute: "A \<union> B = B \<union> A"
by (fact sup_commute)
lemma Un_left_commute: "A \<union> (B \<union> C) = B \<union> (A \<union> C)"
by (fact sup_left_commute)
lemma Un_assoc: "(A \<union> B) \<union> C = A \<union> (B \<union> C)"
by (fact sup_assoc)
lemmas Un_ac = Un_assoc Un_left_absorb Un_commute Un_left_commute
\<comment> \<open>Union is an AC-operator\<close>
lemma Un_absorb1: "A \<subseteq> B \<Longrightarrow> A \<union> B = B"
by (fact sup_absorb2)
lemma Un_absorb2: "B \<subseteq> A \<Longrightarrow> A \<union> B = A"
by (fact sup_absorb1)
lemma Un_empty_left: "{} \<union> B = B"
by (fact sup_bot_left) (* already simp *)
lemma Un_empty_right: "A \<union> {} = A"
by (fact sup_bot_right) (* already simp *)
lemma Un_UNIV_left: "UNIV \<union> B = UNIV"
by (fact sup_top_left) (* already simp *)
lemma Un_UNIV_right: "A \<union> UNIV = UNIV"
by (fact sup_top_right) (* already simp *)
lemma Un_insert_left [simp]: "(insert a B) \<union> C = insert a (B \<union> C)"
by blast
lemma Un_insert_right [simp]: "A \<union> (insert a B) = insert a (A \<union> B)"
by blast
lemma Int_insert_left: "(insert a B) \<inter> C = (if a \<in> C then insert a (B \<inter> C) else B \<inter> C)"
by auto
lemma Int_insert_left_if0 [simp]: "a \<notin> C \<Longrightarrow> (insert a B) \<inter> C = B \<inter> C"
by auto
lemma Int_insert_left_if1 [simp]: "a \<in> C \<Longrightarrow> (insert a B) \<inter> C = insert a (B \<inter> C)"
by auto
lemma Int_insert_right: "A \<inter> (insert a B) = (if a \<in> A then insert a (A \<inter> B) else A \<inter> B)"
by auto
lemma Int_insert_right_if0 [simp]: "a \<notin> A \<Longrightarrow> A \<inter> (insert a B) = A \<inter> B"
by auto
lemma Int_insert_right_if1 [simp]: "a \<in> A \<Longrightarrow> A \<inter> (insert a B) = insert a (A \<inter> B)"
by auto
lemma Un_Int_distrib: "A \<union> (B \<inter> C) = (A \<union> B) \<inter> (A \<union> C)"
by (fact sup_inf_distrib1)
lemma Un_Int_distrib2: "(B \<inter> C) \<union> A = (B \<union> A) \<inter> (C \<union> A)"
by (fact sup_inf_distrib2)
lemma Un_Int_crazy: "(A \<inter> B) \<union> (B \<inter> C) \<union> (C \<inter> A) = (A \<union> B) \<inter> (B \<union> C) \<inter> (C \<union> A)"
by blast
lemma subset_Un_eq: "A \<subseteq> B \<longleftrightarrow> A \<union> B = B"
by (fact le_iff_sup)
lemma Un_empty [iff]: "A \<union> B = {} \<longleftrightarrow> A = {} \<and> B = {}"
by (fact sup_eq_bot_iff) (* FIXME: already simp *)
lemma Un_subset_iff [simp]: "A \<union> B \<subseteq> C \<longleftrightarrow> A \<subseteq> C \<and> B \<subseteq> C"
by (fact le_sup_iff)
lemma Un_Diff_Int: "(A - B) \<union> (A \<inter> B) = A"
by blast
lemma Diff_Int2: "A \<inter> C - B \<inter> C = A \<inter> C - B"
by blast
lemma subset_UnE:
assumes "C \<subseteq> A \<union> B"
obtains A' B' where "A' \<subseteq> A" "B' \<subseteq> B" "C = A' \<union> B'"
proof
show "C \<inter> A \<subseteq> A" "C \<inter> B \<subseteq> B" "C = (C \<inter> A) \<union> (C \<inter> B)"
using assms by blast+
qed
lemma Un_Int_eq [simp]: "(S \<union> T) \<inter> S = S" "(S \<union> T) \<inter> T = T" "S \<inter> (S \<union> T) = S" "T \<inter> (S \<union> T) = T"
by auto
lemma Int_Un_eq [simp]: "(S \<inter> T) \<union> S = S" "(S \<inter> T) \<union> T = T" "S \<union> (S \<inter> T) = S" "T \<union> (S \<inter> T) = T"
by auto
text \<open>\<^medskip> Set complement\<close>
lemma Compl_disjoint [simp]: "A \<inter> - A = {}"
by (fact inf_compl_bot)
lemma Compl_disjoint2 [simp]: "- A \<inter> A = {}"
by (fact compl_inf_bot)
lemma Compl_partition: "A \<union> - A = UNIV"
by (fact sup_compl_top)
lemma Compl_partition2: "- A \<union> A = UNIV"
by (fact compl_sup_top)
lemma double_complement: "- (-A) = A" for A :: "'a set"
by (fact double_compl) (* already simp *)
lemma Compl_Un: "- (A \<union> B) = (- A) \<inter> (- B)"
by (fact compl_sup) (* already simp *)
lemma Compl_Int: "- (A \<inter> B) = (- A) \<union> (- B)"
by (fact compl_inf) (* already simp *)
lemma subset_Compl_self_eq: "A \<subseteq> - A \<longleftrightarrow> A = {}"
by blast
lemma Un_Int_assoc_eq: "(A \<inter> B) \<union> C = A \<inter> (B \<union> C) \<longleftrightarrow> C \<subseteq> A"
\<comment> \<open>Halmos, Naive Set Theory, page 16.\<close>
by blast
lemma Compl_UNIV_eq: "- UNIV = {}"
by (fact compl_top_eq) (* already simp *)
lemma Compl_empty_eq: "- {} = UNIV"
by (fact compl_bot_eq) (* already simp *)
lemma Compl_subset_Compl_iff [iff]: "- A \<subseteq> - B \<longleftrightarrow> B \<subseteq> A"
by (fact compl_le_compl_iff) (* FIXME: already simp *)
lemma Compl_eq_Compl_iff [iff]: "- A = - B \<longleftrightarrow> A = B"
for A B :: "'a set"
by (fact compl_eq_compl_iff) (* FIXME: already simp *)
lemma Compl_insert: "- insert x A = (- A) - {x}"
by blast
text \<open>\<^medskip> Bounded quantifiers.
The following are not added to the default simpset because
(a) they duplicate the body and (b) there are no similar rules for \<open>Int\<close>.
\<close>
lemma ball_Un: "(\<forall>x \<in> A \<union> B. P x) \<longleftrightarrow> (\<forall>x\<in>A. P x) \<and> (\<forall>x\<in>B. P x)"
by blast
lemma bex_Un: "(\<exists>x \<in> A \<union> B. P x) \<longleftrightarrow> (\<exists>x\<in>A. P x) \<or> (\<exists>x\<in>B. P x)"
by blast
text \<open>\<^medskip> Set difference.\<close>
lemma Diff_eq: "A - B = A \<inter> (- B)"
by blast
lemma Diff_eq_empty_iff [simp]: "A - B = {} \<longleftrightarrow> A \<subseteq> B"
by blast
lemma Diff_cancel [simp]: "A - A = {}"
by blast
lemma Diff_idemp [simp]: "(A - B) - B = A - B"
for A B :: "'a set"
by blast
lemma Diff_triv: "A \<inter> B = {} \<Longrightarrow> A - B = A"
by (blast elim: equalityE)
lemma empty_Diff [simp]: "{} - A = {}"
by blast
lemma Diff_empty [simp]: "A - {} = A"
by blast
lemma Diff_UNIV [simp]: "A - UNIV = {}"
by blast
lemma Diff_insert0 [simp]: "x \<notin> A \<Longrightarrow> A - insert x B = A - B"
by blast
lemma Diff_insert: "A - insert a B = A - B - {a}"
\<comment> \<open>NOT SUITABLE FOR REWRITING since \<open>{a} \<equiv> insert a 0\<close>\<close>
by blast
lemma Diff_insert2: "A - insert a B = A - {a} - B"
\<comment> \<open>NOT SUITABLE FOR REWRITING since \<open>{a} \<equiv> insert a 0\<close>\<close>
by blast
lemma insert_Diff_if: "insert x A - B = (if x \<in> B then A - B else insert x (A - B))"
by auto
lemma insert_Diff1 [simp]: "x \<in> B \<Longrightarrow> insert x A - B = A - B"
by blast
lemma insert_Diff_single[simp]: "insert a (A - {a}) = insert a A"
by blast
lemma insert_Diff: "a \<in> A \<Longrightarrow> insert a (A - {a}) = A"
by blast
lemma Diff_insert_absorb: "x \<notin> A \<Longrightarrow> (insert x A) - {x} = A"
by auto
lemma Diff_disjoint [simp]: "A \<inter> (B - A) = {}"
by blast
lemma Diff_partition: "A \<subseteq> B \<Longrightarrow> A \<union> (B - A) = B"
by blast
lemma double_diff: "A \<subseteq> B \<Longrightarrow> B \<subseteq> C \<Longrightarrow> B - (C - A) = A"
by blast
lemma Un_Diff_cancel [simp]: "A \<union> (B - A) = A \<union> B"
by blast
lemma Un_Diff_cancel2 [simp]: "(B - A) \<union> A = B \<union> A"
by blast
lemma Diff_Un: "A - (B \<union> C) = (A - B) \<inter> (A - C)"
by blast
lemma Diff_Int: "A - (B \<inter> C) = (A - B) \<union> (A - C)"
by blast
lemma Diff_Diff_Int: "A - (A - B) = A \<inter> B"
by blast
lemma Un_Diff: "(A \<union> B) - C = (A - C) \<union> (B - C)"
by blast
lemma Int_Diff: "(A \<inter> B) - C = A \<inter> (B - C)"
by blast
lemma Diff_Int_distrib: "C \<inter> (A - B) = (C \<inter> A) - (C \<inter> B)"
by blast
lemma Diff_Int_distrib2: "(A - B) \<inter> C = (A \<inter> C) - (B \<inter> C)"
by blast
lemma Diff_Compl [simp]: "A - (- B) = A \<inter> B"
by auto
lemma Compl_Diff_eq [simp]: "- (A - B) = - A \<union> B"
by blast
lemma subset_Compl_singleton [simp]: "A \<subseteq> - {b} \<longleftrightarrow> b \<notin> A"
by blast
text \<open>\<^medskip> Quantification over type \<^typ>\<open>bool\<close>.\<close>
lemma bool_induct: "P True \<Longrightarrow> P False \<Longrightarrow> P x"
by (cases x) auto
lemma all_bool_eq: "(\<forall>b. P b) \<longleftrightarrow> P True \<and> P False"
by (auto intro: bool_induct)
lemma bool_contrapos: "P x \<Longrightarrow> \<not> P False \<Longrightarrow> P True"
by (cases x) auto
lemma ex_bool_eq: "(\<exists>b. P b) \<longleftrightarrow> P True \<or> P False"
by (auto intro: bool_contrapos)
lemma UNIV_bool: "UNIV = {False, True}"
by (auto intro: bool_induct)
text \<open>\<^medskip> \<open>Pow\<close>\<close>
lemma Pow_empty [simp]: "Pow {} = {{}}"
by (auto simp add: Pow_def)
lemma Pow_singleton_iff [simp]: "Pow X = {Y} \<longleftrightarrow> X = {} \<and> Y = {}"
by blast (* somewhat slow *)
lemma Pow_insert: "Pow (insert a A) = Pow A \<union> (insert a ` Pow A)"
by (blast intro: image_eqI [where ?x = "u - {a}" for u])
lemma Pow_Compl: "Pow (- A) = {- B | B. A \<in> Pow B}"
by (blast intro: exI [where ?x = "- u" for u])
lemma Pow_UNIV [simp]: "Pow UNIV = UNIV"
by blast
lemma Un_Pow_subset: "Pow A \<union> Pow B \<subseteq> Pow (A \<union> B)"
by blast
lemma Pow_Int_eq [simp]: "Pow (A \<inter> B) = Pow A \<inter> Pow B"
by blast
text \<open>\<^medskip> Miscellany.\<close>
lemma Int_Diff_disjoint: "A \<inter> B \<inter> (A - B) = {}"
by blast
lemma Int_Diff_Un: "A \<inter> B \<union> (A - B) = A"
by blast
lemma set_eq_subset: "A = B \<longleftrightarrow> A \<subseteq> B \<and> B \<subseteq> A"
by blast
lemma subset_iff: "A \<subseteq> B \<longleftrightarrow> (\<forall>t. t \<in> A \<longrightarrow> t \<in> B)"
by blast
lemma subset_iff_psubset_eq: "A \<subseteq> B \<longleftrightarrow> A \<subset> B \<or> A = B"
unfolding less_le by blast
lemma all_not_in_conv [simp]: "(\<forall>x. x \<notin> A) \<longleftrightarrow> A = {}"
by blast
lemma ex_in_conv: "(\<exists>x. x \<in> A) \<longleftrightarrow> A \<noteq> {}"
by blast
lemma ball_simps [simp, no_atp]:
"\<And>A P Q. (\<forall>x\<in>A. P x \<or> Q) \<longleftrightarrow> ((\<forall>x\<in>A. P x) \<or> Q)"
"\<And>A P Q. (\<forall>x\<in>A. P \<or> Q x) \<longleftrightarrow> (P \<or> (\<forall>x\<in>A. Q x))"
"\<And>A P Q. (\<forall>x\<in>A. P \<longrightarrow> Q x) \<longleftrightarrow> (P \<longrightarrow> (\<forall>x\<in>A. Q x))"
"\<And>A P Q. (\<forall>x\<in>A. P x \<longrightarrow> Q) \<longleftrightarrow> ((\<exists>x\<in>A. P x) \<longrightarrow> Q)"
"\<And>P. (\<forall>x\<in>{}. P x) \<longleftrightarrow> True"
"\<And>P. (\<forall>x\<in>UNIV. P x) \<longleftrightarrow> (\<forall>x. P x)"
"\<And>a B P. (\<forall>x\<in>insert a B. P x) \<longleftrightarrow> (P a \<and> (\<forall>x\<in>B. P x))"
"\<And>P Q. (\<forall>x\<in>Collect Q. P x) \<longleftrightarrow> (\<forall>x. Q x \<longrightarrow> P x)"
"\<And>A P f. (\<forall>x\<in>f`A. P x) \<longleftrightarrow> (\<forall>x\<in>A. P (f x))"
"\<And>A P. (\<not> (\<forall>x\<in>A. P x)) \<longleftrightarrow> (\<exists>x\<in>A. \<not> P x)"
by auto
lemma bex_simps [simp, no_atp]:
"\<And>A P Q. (\<exists>x\<in>A. P x \<and> Q) \<longleftrightarrow> ((\<exists>x\<in>A. P x) \<and> Q)"
"\<And>A P Q. (\<exists>x\<in>A. P \<and> Q x) \<longleftrightarrow> (P \<and> (\<exists>x\<in>A. Q x))"
"\<And>P. (\<exists>x\<in>{}. P x) \<longleftrightarrow> False"
"\<And>P. (\<exists>x\<in>UNIV. P x) \<longleftrightarrow> (\<exists>x. P x)"
"\<And>a B P. (\<exists>x\<in>insert a B. P x) \<longleftrightarrow> (P a \<or> (\<exists>x\<in>B. P x))"
"\<And>P Q. (\<exists>x\<in>Collect Q. P x) \<longleftrightarrow> (\<exists>x. Q x \<and> P x)"
"\<And>A P f. (\<exists>x\<in>f`A. P x) \<longleftrightarrow> (\<exists>x\<in>A. P (f x))"
"\<And>A P. (\<not>(\<exists>x\<in>A. P x)) \<longleftrightarrow> (\<forall>x\<in>A. \<not> P x)"
by auto
lemma ex_image_cong_iff [simp, no_atp]:
"(\<exists>x. x\<in>f`A) \<longleftrightarrow> A \<noteq> {}" "(\<exists>x. x\<in>f`A \<and> P x) \<longleftrightarrow> (\<exists>x\<in>A. P (f x))"
by auto
subsubsection \<open>Monotonicity of various operations\<close>
lemma image_mono: "A \<subseteq> B \<Longrightarrow> f ` A \<subseteq> f ` B"
by blast
lemma Pow_mono: "A \<subseteq> B \<Longrightarrow> Pow A \<subseteq> Pow B"
by blast
lemma insert_mono: "C \<subseteq> D \<Longrightarrow> insert a C \<subseteq> insert a D"
by blast
lemma Un_mono: "A \<subseteq> C \<Longrightarrow> B \<subseteq> D \<Longrightarrow> A \<union> B \<subseteq> C \<union> D"
by (fact sup_mono)
lemma Int_mono: "A \<subseteq> C \<Longrightarrow> B \<subseteq> D \<Longrightarrow> A \<inter> B \<subseteq> C \<inter> D"
by (fact inf_mono)
lemma Diff_mono: "A \<subseteq> C \<Longrightarrow> D \<subseteq> B \<Longrightarrow> A - B \<subseteq> C - D"
by blast
lemma Compl_anti_mono: "A \<subseteq> B \<Longrightarrow> - B \<subseteq> - A"
by (fact compl_mono)
text \<open>\<^medskip> Monotonicity of implications.\<close>
lemma in_mono: "A \<subseteq> B \<Longrightarrow> x \<in> A \<longrightarrow> x \<in> B"
by (rule impI) (erule subsetD)
lemma conj_mono: "P1 \<longrightarrow> Q1 \<Longrightarrow> P2 \<longrightarrow> Q2 \<Longrightarrow> (P1 \<and> P2) \<longrightarrow> (Q1 \<and> Q2)"
by iprover
lemma disj_mono: "P1 \<longrightarrow> Q1 \<Longrightarrow> P2 \<longrightarrow> Q2 \<Longrightarrow> (P1 \<or> P2) \<longrightarrow> (Q1 \<or> Q2)"
by iprover
lemma imp_mono: "Q1 \<longrightarrow> P1 \<Longrightarrow> P2 \<longrightarrow> Q2 \<Longrightarrow> (P1 \<longrightarrow> P2) \<longrightarrow> (Q1 \<longrightarrow> Q2)"
by iprover
lemma imp_refl: "P \<longrightarrow> P" ..
lemma not_mono: "Q \<longrightarrow> P \<Longrightarrow> \<not> P \<longrightarrow> \<not> Q"
by iprover
lemma ex_mono: "(\<And>x. P x \<longrightarrow> Q x) \<Longrightarrow> (\<exists>x. P x) \<longrightarrow> (\<exists>x. Q x)"
by iprover
lemma all_mono: "(\<And>x. P x \<longrightarrow> Q x) \<Longrightarrow> (\<forall>x. P x) \<longrightarrow> (\<forall>x. Q x)"
by iprover
lemma Collect_mono: "(\<And>x. P x \<longrightarrow> Q x) \<Longrightarrow> Collect P \<subseteq> Collect Q"
by blast
lemma Int_Collect_mono: "A \<subseteq> B \<Longrightarrow> (\<And>x. x \<in> A \<Longrightarrow> P x \<longrightarrow> Q x) \<Longrightarrow> A \<inter> Collect P \<subseteq> B \<inter> Collect Q"
by blast
lemmas basic_monos =
subset_refl imp_refl disj_mono conj_mono ex_mono Collect_mono in_mono
lemma eq_to_mono: "a = b \<Longrightarrow> c = d \<Longrightarrow> b \<longrightarrow> d \<Longrightarrow> a \<longrightarrow> c"
by iprover
subsubsection \<open>Inverse image of a function\<close>
definition vimage :: "('a \<Rightarrow> 'b) \<Rightarrow> 'b set \<Rightarrow> 'a set" (infixr "-`" 90)
where "f -` B \<equiv> {x. f x \<in> B}"
lemma vimage_eq [simp]: "a \<in> f -` B \<longleftrightarrow> f a \<in> B"
unfolding vimage_def by blast
lemma vimage_singleton_eq: "a \<in> f -` {b} \<longleftrightarrow> f a = b"
by simp
lemma vimageI [intro]: "f a = b \<Longrightarrow> b \<in> B \<Longrightarrow> a \<in> f -` B"
unfolding vimage_def by blast
lemma vimageI2: "f a \<in> A \<Longrightarrow> a \<in> f -` A"
unfolding vimage_def by fast
lemma vimageE [elim!]: "a \<in> f -` B \<Longrightarrow> (\<And>x. f a = x \<Longrightarrow> x \<in> B \<Longrightarrow> P) \<Longrightarrow> P"
unfolding vimage_def by blast
lemma vimageD: "a \<in> f -` A \<Longrightarrow> f a \<in> A"
unfolding vimage_def by fast
lemma vimage_empty [simp]: "f -` {} = {}"
by blast
lemma vimage_Compl: "f -` (- A) = - (f -` A)"
by blast
lemma vimage_Un [simp]: "f -` (A \<union> B) = (f -` A) \<union> (f -` B)"
by blast
lemma vimage_Int [simp]: "f -` (A \<inter> B) = (f -` A) \<inter> (f -` B)"
by fast
lemma vimage_Collect_eq [simp]: "f -` Collect P = {y. P (f y)}"
by blast
lemma vimage_Collect: "(\<And>x. P (f x) = Q x) \<Longrightarrow> f -` (Collect P) = Collect Q"
by blast
lemma vimage_insert: "f -` (insert a B) = (f -` {a}) \<union> (f -` B)"
\<comment> \<open>NOT suitable for rewriting because of the recurrence of \<open>{a}\<close>.\<close>
by blast
lemma vimage_Diff: "f -` (A - B) = (f -` A) - (f -` B)"
by blast
lemma vimage_UNIV [simp]: "f -` UNIV = UNIV"
by blast
lemma vimage_mono: "A \<subseteq> B \<Longrightarrow> f -` A \<subseteq> f -` B"
\<comment> \<open>monotonicity\<close>
by blast
lemma vimage_image_eq: "f -` (f ` A) = {y. \<exists>x\<in>A. f x = f y}"
by (blast intro: sym)
lemma image_vimage_subset: "f ` (f -` A) \<subseteq> A"
by blast
lemma image_vimage_eq [simp]: "f ` (f -` A) = A \<inter> range f"
by blast
lemma image_subset_iff_subset_vimage: "f ` A \<subseteq> B \<longleftrightarrow> A \<subseteq> f -` B"
by blast
lemma subset_vimage_iff: "A \<subseteq> f -` B \<longleftrightarrow> (\<forall>x\<in>A. f x \<in> B)"
by auto
lemma vimage_const [simp]: "((\<lambda>x. c) -` A) = (if c \<in> A then UNIV else {})"
by auto
lemma vimage_if [simp]: "((\<lambda>x. if x \<in> B then c else d) -` A) =
(if c \<in> A then (if d \<in> A then UNIV else B)
else if d \<in> A then - B else {})"
by (auto simp add: vimage_def)
lemma vimage_inter_cong: "(\<And> w. w \<in> S \<Longrightarrow> f w = g w) \<Longrightarrow> f -` y \<inter> S = g -` y \<inter> S"
by auto
lemma vimage_ident [simp]: "(\<lambda>x. x) -` Y = Y"
by blast
subsubsection \<open>Singleton sets\<close>
definition is_singleton :: "'a set \<Rightarrow> bool"
where "is_singleton A \<longleftrightarrow> (\<exists>x. A = {x})"
lemma is_singletonI [simp, intro!]: "is_singleton {x}"
unfolding is_singleton_def by simp
lemma is_singletonI': "A \<noteq> {} \<Longrightarrow> (\<And>x y. x \<in> A \<Longrightarrow> y \<in> A \<Longrightarrow> x = y) \<Longrightarrow> is_singleton A"
unfolding is_singleton_def by blast
lemma is_singletonE: "is_singleton A \<Longrightarrow> (\<And>x. A = {x} \<Longrightarrow> P) \<Longrightarrow> P"
unfolding is_singleton_def by blast
subsubsection \<open>Getting the contents of a singleton set\<close>
definition the_elem :: "'a set \<Rightarrow> 'a"
where "the_elem X = (THE x. X = {x})"
lemma the_elem_eq [simp]: "the_elem {x} = x"
by (simp add: the_elem_def)
lemma is_singleton_the_elem: "is_singleton A \<longleftrightarrow> A = {the_elem A}"
by (auto simp: is_singleton_def)
lemma the_elem_image_unique:
assumes "A \<noteq> {}"
and *: "\<And>y. y \<in> A \<Longrightarrow> f y = f x"
shows "the_elem (f ` A) = f x"
unfolding the_elem_def
proof (rule the1_equality)
from \<open>A \<noteq> {}\<close> obtain y where "y \<in> A" by auto
with * have "f x = f y" by simp
with \<open>y \<in> A\<close> have "f x \<in> f ` A" by blast
with * show "f ` A = {f x}" by auto
then show "\<exists>!x. f ` A = {x}" by auto
qed
subsubsection \<open>Monad operation\<close>
definition bind :: "'a set \<Rightarrow> ('a \<Rightarrow> 'b set) \<Rightarrow> 'b set"
where "bind A f = {x. \<exists>B \<in> f`A. x \<in> B}"
hide_const (open) bind
lemma bind_bind: "Set.bind (Set.bind A B) C = Set.bind A (\<lambda>x. Set.bind (B x) C)"
for A :: "'a set"
by (auto simp: bind_def)
lemma empty_bind [simp]: "Set.bind {} f = {}"
by (simp add: bind_def)
lemma nonempty_bind_const: "A \<noteq> {} \<Longrightarrow> Set.bind A (\<lambda>_. B) = B"
by (auto simp: bind_def)
lemma bind_const: "Set.bind A (\<lambda>_. B) = (if A = {} then {} else B)"
by (auto simp: bind_def)
lemma bind_singleton_conv_image: "Set.bind A (\<lambda>x. {f x}) = f ` A"
by (auto simp: bind_def)
subsubsection \<open>Operations for execution\<close>
definition is_empty :: "'a set \<Rightarrow> bool"
where [code_abbrev]: "is_empty A \<longleftrightarrow> A = {}"
hide_const (open) is_empty
definition remove :: "'a \<Rightarrow> 'a set \<Rightarrow> 'a set"
where [code_abbrev]: "remove x A = A - {x}"
hide_const (open) remove
lemma member_remove [simp]: "x \<in> Set.remove y A \<longleftrightarrow> x \<in> A \<and> x \<noteq> y"
by (simp add: remove_def)
definition filter :: "('a \<Rightarrow> bool) \<Rightarrow> 'a set \<Rightarrow> 'a set"
where [code_abbrev]: "filter P A = {a \<in> A. P a}"
hide_const (open) filter
lemma member_filter [simp]: "x \<in> Set.filter P A \<longleftrightarrow> x \<in> A \<and> P x"
by (simp add: filter_def)
instantiation set :: (equal) equal
begin
definition "HOL.equal A B \<longleftrightarrow> A \<subseteq> B \<and> B \<subseteq> A"
instance by standard (auto simp add: equal_set_def)
end
text \<open>Misc\<close>
definition pairwise :: "('a \<Rightarrow> 'a \<Rightarrow> bool) \<Rightarrow> 'a set \<Rightarrow> bool"
where "pairwise R S \<longleftrightarrow> (\<forall>x \<in> S. \<forall>y \<in> S. x \<noteq> y \<longrightarrow> R x y)"
lemma pairwise_alt: "pairwise R S \<longleftrightarrow> (\<forall>x\<in>S. \<forall>y\<in>S-{x}. R x y)"
by (auto simp add: pairwise_def)
lemma pairwise_trivial [simp]: "pairwise (\<lambda>i j. j \<noteq> i) I"
by (auto simp: pairwise_def)
lemma pairwiseI [intro?]:
"pairwise R S" if "\<And>x y. x \<in> S \<Longrightarrow> y \<in> S \<Longrightarrow> x \<noteq> y \<Longrightarrow> R x y"
using that by (simp add: pairwise_def)
lemma pairwiseD:
"R x y" and "R y x"
if "pairwise R S" "x \<in> S" and "y \<in> S" and "x \<noteq> y"
using that by (simp_all add: pairwise_def)
lemma pairwise_empty [simp]: "pairwise P {}"
by (simp add: pairwise_def)
lemma pairwise_singleton [simp]: "pairwise P {A}"
by (simp add: pairwise_def)
lemma pairwise_insert:
"pairwise r (insert x s) \<longleftrightarrow> (\<forall>y. y \<in> s \<and> y \<noteq> x \<longrightarrow> r x y \<and> r y x) \<and> pairwise r s"
by (force simp: pairwise_def)
lemma pairwise_subset: "pairwise P S \<Longrightarrow> T \<subseteq> S \<Longrightarrow> pairwise P T"
by (force simp: pairwise_def)
lemma pairwise_mono: "\<lbrakk>pairwise P A; \<And>x y. P x y \<Longrightarrow> Q x y; B \<subseteq> A\<rbrakk> \<Longrightarrow> pairwise Q B"
by (fastforce simp: pairwise_def)
lemma pairwise_imageI:
"pairwise P (f ` A)"
if "\<And>x y. x \<in> A \<Longrightarrow> y \<in> A \<Longrightarrow> x \<noteq> y \<Longrightarrow> f x \<noteq> f y \<Longrightarrow> P (f x) (f y)"
using that by (auto intro: pairwiseI)
lemma pairwise_image: "pairwise r (f ` s) \<longleftrightarrow> pairwise (\<lambda>x y. (f x \<noteq> f y) \<longrightarrow> r (f x) (f y)) s"
by (force simp: pairwise_def)
definition disjnt :: "'a set \<Rightarrow> 'a set \<Rightarrow> bool"
where "disjnt A B \<longleftrightarrow> A \<inter> B = {}"
lemma disjnt_self_iff_empty [simp]: "disjnt S S \<longleftrightarrow> S = {}"
by (auto simp: disjnt_def)
lemma disjnt_iff: "disjnt A B \<longleftrightarrow> (\<forall>x. \<not> (x \<in> A \<and> x \<in> B))"
by (force simp: disjnt_def)
lemma disjnt_sym: "disjnt A B \<Longrightarrow> disjnt B A"
using disjnt_iff by blast
lemma disjnt_empty1 [simp]: "disjnt {} A" and disjnt_empty2 [simp]: "disjnt A {}"
by (auto simp: disjnt_def)
lemma disjnt_insert1 [simp]: "disjnt (insert a X) Y \<longleftrightarrow> a \<notin> Y \<and> disjnt X Y"
by (simp add: disjnt_def)
lemma disjnt_insert2 [simp]: "disjnt Y (insert a X) \<longleftrightarrow> a \<notin> Y \<and> disjnt Y X"
by (simp add: disjnt_def)
lemma disjnt_subset1 : "\<lbrakk>disjnt X Y; Z \<subseteq> X\<rbrakk> \<Longrightarrow> disjnt Z Y"
by (auto simp: disjnt_def)
lemma disjnt_subset2 : "\<lbrakk>disjnt X Y; Z \<subseteq> Y\<rbrakk> \<Longrightarrow> disjnt X Z"
by (auto simp: disjnt_def)
lemma disjnt_Un1 [simp]: "disjnt (A \<union> B) C \<longleftrightarrow> disjnt A C \<and> disjnt B C"
by (auto simp: disjnt_def)
lemma disjnt_Un2 [simp]: "disjnt C (A \<union> B) \<longleftrightarrow> disjnt C A \<and> disjnt C B"
by (auto simp: disjnt_def)
lemma disjnt_Diff1: "disjnt (X-Y) (U-V)" and disjnt_Diff2: "disjnt (U-V) (X-Y)" if "X \<subseteq> V"
using that by (auto simp: disjnt_def)
lemma disjoint_image_subset: "\<lbrakk>pairwise disjnt \<A>; \<And>X. X \<in> \<A> \<Longrightarrow> f X \<subseteq> X\<rbrakk> \<Longrightarrow> pairwise disjnt (f `\<A>)"
unfolding disjnt_def pairwise_def by fast
lemma pairwise_disjnt_iff: "pairwise disjnt \<A> \<longleftrightarrow> (\<forall>x. \<exists>\<^sub>\<le>\<^sub>1 X. X \<in> \<A> \<and> x \<in> X)"
by (auto simp: Uniq_def disjnt_iff pairwise_def)
lemma disjnt_insert: \<^marker>\<open>contributor \<open>Lars Hupel\<close>\<close>
\<open>disjnt (insert x M) N\<close> if \<open>x \<notin> N\<close> \<open>disjnt M N\<close>
using that by (simp add: disjnt_def)
lemma Int_emptyI: "(\<And>x. x \<in> A \<Longrightarrow> x \<in> B \<Longrightarrow> False) \<Longrightarrow> A \<inter> B = {}"
by blast
lemma in_image_insert_iff:
assumes "\<And>C. C \<in> B \<Longrightarrow> x \<notin> C"
shows "A \<in> insert x ` B \<longleftrightarrow> x \<in> A \<and> A - {x} \<in> B" (is "?P \<longleftrightarrow> ?Q")
proof
assume ?P then show ?Q
using assms by auto
next
assume ?Q
then have "x \<in> A" and "A - {x} \<in> B"
by simp_all
from \<open>A - {x} \<in> B\<close> have "insert x (A - {x}) \<in> insert x ` B"
by (rule imageI)
also from \<open>x \<in> A\<close>
have "insert x (A - {x}) = A"
by auto
finally show ?P .
qed
hide_const (open) member not_member
lemmas equalityI = subset_antisym
lemmas set_mp = subsetD
lemmas set_rev_mp = rev_subsetD
ML \<open>
val Ball_def = @{thm Ball_def}
val Bex_def = @{thm Bex_def}
val CollectD = @{thm CollectD}
val CollectE = @{thm CollectE}
val CollectI = @{thm CollectI}
val Collect_conj_eq = @{thm Collect_conj_eq}
val Collect_mem_eq = @{thm Collect_mem_eq}
val IntD1 = @{thm IntD1}
val IntD2 = @{thm IntD2}
val IntE = @{thm IntE}
val IntI = @{thm IntI}
val Int_Collect = @{thm Int_Collect}
val UNIV_I = @{thm UNIV_I}
val UNIV_witness = @{thm UNIV_witness}
val UnE = @{thm UnE}
val UnI1 = @{thm UnI1}
val UnI2 = @{thm UnI2}
val ballE = @{thm ballE}
val ballI = @{thm ballI}
val bexCI = @{thm bexCI}
val bexE = @{thm bexE}
val bexI = @{thm bexI}
val bex_triv = @{thm bex_triv}
val bspec = @{thm bspec}
val contra_subsetD = @{thm contra_subsetD}
val equalityCE = @{thm equalityCE}
val equalityD1 = @{thm equalityD1}
val equalityD2 = @{thm equalityD2}
val equalityE = @{thm equalityE}
val equalityI = @{thm equalityI}
val imageE = @{thm imageE}
val imageI = @{thm imageI}
val image_Un = @{thm image_Un}
val image_insert = @{thm image_insert}
val insert_commute = @{thm insert_commute}
val insert_iff = @{thm insert_iff}
val mem_Collect_eq = @{thm mem_Collect_eq}
val rangeE = @{thm rangeE}
val rangeI = @{thm rangeI}
val range_eqI = @{thm range_eqI}
val subsetCE = @{thm subsetCE}
val subsetD = @{thm subsetD}
val subsetI = @{thm subsetI}
val subset_refl = @{thm subset_refl}
val subset_trans = @{thm subset_trans}
val vimageD = @{thm vimageD}
val vimageE = @{thm vimageE}
val vimageI = @{thm vimageI}
val vimageI2 = @{thm vimageI2}
val vimage_Collect = @{thm vimage_Collect}
val vimage_Int = @{thm vimage_Int}
val vimage_Un = @{thm vimage_Un}
\<close>
end
diff --git a/src/HOL/Statespace/state_space.ML b/src/HOL/Statespace/state_space.ML
--- a/src/HOL/Statespace/state_space.ML
+++ b/src/HOL/Statespace/state_space.ML
@@ -1,731 +1,736 @@
(* Title: HOL/Statespace/state_space.ML
Author: Norbert Schirmer, TU Muenchen, 2007
Author: Norbert Schirmer, Apple, 2021
*)
signature STATE_SPACE =
sig
val distinct_compsN : string
val getN : string
val putN : string
val injectN : string
val namespaceN : string
val projectN : string
val valuetypesN : string
val namespace_definition :
bstring ->
typ ->
(xstring, string) Expression.expr * (binding * string option * mixfix) list ->
string list -> string list -> theory -> theory
val define_statespace :
string list ->
string ->
((string * bool) * (string list * bstring * (string * string) list)) list ->
(string * string) list -> theory -> theory
val define_statespace_i :
string option ->
string list ->
string ->
((string * bool) * (typ list * bstring * (string * string) list)) list ->
(string * typ) list -> theory -> theory
val statespace_decl :
((string list * bstring) *
(((string * bool) * (string list * xstring * (bstring * bstring) list)) list *
(bstring * string) list)) parser
val neq_x_y : Proof.context -> term -> term -> thm option
val distinctNameSolver : Simplifier.solver
val distinctTree_tac : Proof.context -> int -> tactic
val distinct_simproc : Simplifier.simproc
val is_statespace : Context.generic -> xstring -> bool
val get_comp' : Context.generic -> string -> (typ * string list) option
val get_comp : Context.generic -> string -> (typ * string) option (* legacy wrapper, eventually replace by primed version *)
val get_comps : Context.generic -> (typ * string list) Termtab.table
val silent : bool Config.T
val gen_lookup_tr : Proof.context -> term -> string -> term
val lookup_swap_tr : Proof.context -> term list -> term
val lookup_tr : Proof.context -> term list -> term
val lookup_tr' : Proof.context -> term list -> term
val gen'_update_tr :
bool -> bool -> Proof.context -> string -> term -> term -> term
val gen_update_tr : (* legacy wrapper, eventually replace by primed version *)
bool -> Proof.context -> string -> term -> term -> term
val update_tr : Proof.context -> term list -> term
val update_tr' : Proof.context -> term list -> term
val trace_data: Context.generic -> unit
end;
structure StateSpace : STATE_SPACE =
struct
(* Names *)
val distinct_compsN = "distinct_names"
val namespaceN = "_namespace"
val valuetypesN = "_valuetypes"
val projectN = "project"
val injectN = "inject"
val getN = "get"
val putN = "put"
val project_injectL = "StateSpaceLocale.project_inject";
(* Library *)
fun fold1 f xs = fold f (tl xs) (hd xs)
fun fold1' f [] x = x
| fold1' f xs _ = fold1 f xs
fun sorted_subset eq [] ys = true
| sorted_subset eq (x::xs) [] = false
| sorted_subset eq (x::xs) (y::ys) = if eq (x,y) then sorted_subset eq xs ys
else sorted_subset eq (x::xs) ys;
fun comps_of_distinct_thm thm = Thm.prop_of thm
|> (fn (_$(_$t)) => DistinctTreeProver.dest_tree t) |> map (fst o dest_Free) |> sort_strings;
fun insert_tagged_distinct_thms tagged_thm tagged_thms =
let
fun ins t1 [] = [t1]
| ins (t1 as (names1, _)) ((t2 as (names2, _))::thms) =
if Ord_List.subset string_ord (names1, names2) then t2::thms
else if Ord_List.subset string_ord (names2, names1) then ins t1 thms
else t2 :: ins t1 thms
in
ins tagged_thm tagged_thms
end
fun join_tagged_distinct_thms tagged_thms1 tagged_thms2 =
tagged_thms1 |> fold insert_tagged_distinct_thms tagged_thms2
fun tag_distinct_thm thm = (comps_of_distinct_thm thm, thm)
val tag_distinct_thms = map tag_distinct_thm
fun join_distinct_thms (thms1, thms2) =
if pointer_eq (thms1, thms2) then thms1
else join_tagged_distinct_thms (tag_distinct_thms thms1) (tag_distinct_thms thms2) |> map snd
fun insert_distinct_thm thm thms = join_distinct_thms (thms, [thm])
fun join_declinfo_entry name (T1:typ, names1:string list) (T2, names2) =
let
fun typ_info T names = @{make_string} T ^ " " ^ Pretty.string_of (Pretty.str_list "(" ")" names);
in
if T1 = T2 then (T1, distinct (op =) (names1 @ names2))
else error ("statespace component '" ^ name ^ "' disagrees on type:\n " ^
typ_info T1 names1 ^ " vs. " ^ typ_info T2 names2
)
end
fun guess_name (Free (x,_)) = x
| guess_name _ = "unknown"
val join_declinfo = Termtab.join (fn t => uncurry (join_declinfo_entry (guess_name t)))
(*
A component might appear in *different* statespaces within the same context. However, it must
be mapped to the same type. Note that this information is currently only properly maintained
within contexts where all components are actually "fixes" and not arbitrary terms. Moreover, on
the theory level the info stays empty.
This means that on the theory level we do not make use of the syntax and the tree-based distinct simprocs.
N.B: It might still make sense (from a performance perspective) to overcome this limitation
and even use the simprocs when a statespace is interpreted for concrete names like HOL-strings.
Once the distinct-theorem is proven by the interpretation the simproc can use the positions in
the tree to derive distinctness of two strings vs. HOL-string comparison.
*)
type statespace_info =
{args: (string * sort) list, (* type arguments *)
parents: (typ list * string * string option list) list,
(* type instantiation, state-space name, component renamings *)
components: (string * typ) list,
types: typ list (* range types of state space *)
};
structure Data = Generic_Data
(
type T =
(typ * string list) Termtab.table * (*declinfo: type, names of statespaces of component*)
thm list Symtab.table * (*distinctthm: minimal list of maximal distinctness-assumptions for a component name*)
statespace_info Symtab.table;
val empty = (Termtab.empty, Symtab.empty, Symtab.empty);
fun merge ((declinfo1, distinctthm1, statespaces1), (declinfo2, distinctthm2, statespaces2)) =
(join_declinfo (declinfo1, declinfo2),
Symtab.join (K join_distinct_thms) (distinctthm1, distinctthm2),
Symtab.merge (K true) (statespaces1, statespaces2));
);
val get_declinfo = #1 o Data.get
val get_distinctthm = #2 o Data.get
val get_statespaces = #3 o Data.get
val map_declinfo = Data.map o @{apply 3(1)}
val map_distinctthm = Data.map o @{apply 3(2)}
val map_statespaces = Data.map o @{apply 3(3)}
fun trace_data context =
tracing ("StateSpace.Data: " ^ @{make_string}
{declinfo = get_declinfo context,
distinctthm = get_distinctthm context,
statespaces = get_statespaces context})
fun update_declinfo (n,v) = map_declinfo (fn declinfo =>
let val vs = apsnd single v
in Termtab.map_default (n, vs) (join_declinfo_entry (guess_name n) vs) declinfo end);
fun expression_no_pos (expr, fixes) : Expression.expression =
(map (fn (name, inst) => ((name, Position.none), inst)) expr, fixes);
fun prove_interpretation_in ctxt_tac (name, expr) thy =
thy
|> Interpretation.global_sublocale_cmd (name, Position.none) (expression_no_pos expr) []
|> Proof.global_terminal_proof
((Method.Basic (fn ctxt => SIMPLE_METHOD (ctxt_tac ctxt)), Position.no_range), NONE)
|> Proof_Context.theory_of
fun add_locale name expr elems thy =
thy
|> Expression.add_locale (Binding.name name) (Binding.name name) [] expr elems
|> snd
|> Local_Theory.exit;
fun add_locale_cmd name expr elems thy =
thy
|> Expression.add_locale_cmd (Binding.name name) Binding.empty [] (expression_no_pos expr) elems
|> snd
|> Local_Theory.exit;
fun is_statespace context name =
Symtab.defined (get_statespaces context) (Locale.intern (Context.theory_of context) name)
fun add_statespace name args parents components types =
map_statespaces (Symtab.update_new (name, {args=args,parents=parents, components=components,types=types}))
val get_statespace = Symtab.lookup o get_statespaces
val the_statespace = the oo get_statespace
fun mk_free ctxt name =
if Variable.is_fixed ctxt name orelse Variable.is_declared ctxt name
then
let
val n' = Variable.intern_fixed ctxt name |> perhaps Long_Name.dest_hidden;
val free = Free (n', Proof_Context.infer_type ctxt (n', dummyT))
in SOME (free) end
else (tracing ("variable not fixed or declared: " ^ name); NONE)
-val get_dist_thm = Symtab.lookup o get_distinctthm;
+fun get_dist_thm context name =
+ Symtab.lookup_list (get_distinctthm context) name
+ |> map (Thm.transfer'' context)
fun get_dist_thm2 ctxt x y =
(let
val dist_thms = [x, y] |> map (#1 o dest_Free)
- |> map (these o get_dist_thm (Context.Proof ctxt)) |> flat;
+ |> maps (get_dist_thm (Context.Proof ctxt));
fun get_paths dist_thm =
let
val ctree = Thm.cprop_of dist_thm |> Thm.dest_comb |> #2 |> Thm.dest_comb |> #2;
val tree = Thm.term_of ctree;
val x_path = the (DistinctTreeProver.find_tree x tree);
val y_path = the (DistinctTreeProver.find_tree y tree);
in SOME (dist_thm, x_path, y_path) end
handle Option.Option => NONE
val result = get_first get_paths dist_thms
in
result
end)
fun get_comp' context name =
mk_free (Context.proof_of context) name
|> Option.mapPartial (fn t =>
let
val declinfo = get_declinfo context
in
case Termtab.lookup declinfo t of
NONE => (* during syntax phase, types of fixes might not yet be constrained completely *)
AList.lookup (fn (x, Free (n,_)) => n = x | _ => false) (Termtab.dest declinfo) name
| some => some
end)
(* legacy wrapper *)
fun get_comp ctxt name =
get_comp' ctxt name |> Option.map (apsnd (fn ns => if null ns then "" else hd ns))
val get_comps = get_declinfo
(*** Tactics ***)
fun neq_x_y ctxt x y =
(let
val (dist_thm, x_path, y_path) = the (get_dist_thm2 ctxt x y);
val thm = DistinctTreeProver.distinctTreeProver ctxt dist_thm x_path y_path;
in SOME thm
end handle Option.Option => NONE)
fun distinctTree_tac ctxt = SUBGOAL (fn (goal, i) =>
(case goal of
Const (\<^const_name>\<open>Trueprop\<close>, _) $
(Const (\<^const_name>\<open>Not\<close>, _) $
(Const (\<^const_name>\<open>HOL.eq\<close>, _) $ (x as Free _) $ (y as Free _))) =>
(case neq_x_y ctxt x y of
SOME neq => resolve_tac ctxt [neq] i
| NONE => no_tac)
| _ => no_tac));
val distinctNameSolver = mk_solver "distinctNameSolver" distinctTree_tac;
val distinct_simproc =
Simplifier.make_simproc \<^context> "StateSpace.distinct_simproc"
{lhss = [\<^term>\<open>x = y\<close>],
proc = fn _ => fn ctxt => fn ct =>
(case Thm.term_of ct of
Const (\<^const_name>\<open>HOL.eq\<close>,_) $ (x as Free _) $ (y as Free _) =>
Option.map (fn neq => DistinctTreeProver.neq_to_eq_False OF [neq])
(neq_x_y ctxt x y)
| _ => NONE)};
fun interprete_parent name dist_thm_name parent_expr thy =
let
fun solve_tac ctxt = CSUBGOAL (fn (goal, i) =>
let
val distinct_thm = Proof_Context.get_thm ctxt dist_thm_name;
val rule = DistinctTreeProver.distinct_implProver ctxt distinct_thm goal;
in resolve_tac ctxt [rule] i end);
fun tac ctxt =
Locale.intro_locales_tac {strict = true, eager = true} ctxt [] THEN ALLGOALS (solve_tac ctxt);
in
thy |> prove_interpretation_in tac (name, parent_expr)
end;
fun namespace_definition name nameT parent_expr parent_comps new_comps thy =
let
val all_comps = parent_comps @ new_comps;
val vars = (map (fn n => (Binding.name n, NONE, NoSyn)) all_comps);
val dist_thm_name = distinct_compsN;
val dist_thm_full_name = dist_thm_name;
fun type_attr phi = Thm.declaration_attribute (fn thm => fn context =>
(case context of
Context.Theory _ => context
| Context.Proof ctxt =>
let
val declinfo = get_declinfo context
val tt = get_distinctthm context;
val all_names = comps_of_distinct_thm thm;
- fun upd name = Symtab.map_default (name, [thm]) (insert_distinct_thm thm)
+ val thm0 = Thm.trim_context thm;
+ fun upd name = Symtab.map_default (name, [thm0]) (insert_distinct_thm thm0)
val tt' = tt |> fold upd all_names;
val context' =
Context_Position.set_visible false ctxt
addsimprocs [distinct_simproc]
|> Context_Position.restore_visible ctxt
|> Context.Proof
|> map_declinfo (K declinfo)
|> map_distinctthm (K tt');
in context' end));
- val attr = Attrib.internal type_attr;
+ val attr = Attrib.internal \<^here> type_attr;
val assume =
((Binding.name dist_thm_name, [attr]),
[(HOLogic.Trueprop $
(Const (\<^const_name>\<open>all_distinct\<close>, Type (\<^type_name>\<open>tree\<close>, [nameT]) --> HOLogic.boolT) $
DistinctTreeProver.mk_tree (fn n => Free (n, nameT)) nameT
(sort fast_string_ord all_comps)), [])]);
in
thy
|> add_locale name ([], vars) [Element.Assumes [assume]]
|> Proof_Context.theory_of
|> interprete_parent name dist_thm_full_name parent_expr
end;
fun encode_dot x = if x = #"." then #"_" else x;
fun encode_type (TFree (s, _)) = s
| encode_type (TVar ((s,i),_)) = "?" ^ s ^ string_of_int i
| encode_type (Type (n,Ts)) =
let
val Ts' = fold1' (fn x => fn y => x ^ "_" ^ y) (map encode_type Ts) "";
val n' = String.map encode_dot n;
in if Ts'="" then n' else Ts' ^ "_" ^ n' end;
fun project_name T = projectN ^"_"^encode_type T;
fun inject_name T = injectN ^"_"^encode_type T;
fun add_declaration name decl thy =
thy
|> Named_Target.init [] name
- |> (fn lthy => Local_Theory.declaration {syntax = true, pervasive = false} (decl lthy) lthy)
+ |> (fn lthy =>
+ Local_Theory.declaration {syntax = true, pervasive = false, pos = Position.thread_data ()}
+ (decl lthy) lthy)
|> Local_Theory.exit_global;
fun parent_components thy (Ts, pname, renaming) =
let
fun rename [] xs = xs
| rename (NONE::rs) (x::xs) = x::rename rs xs
| rename (SOME r::rs) ((x,T)::xs) = (r,T)::rename rs xs;
val {args, parents, components, ...} = the_statespace (Context.Theory thy) pname;
val inst = map fst args ~~ Ts;
val subst = Term.map_type_tfree (the o AList.lookup (op =) inst o fst);
val parent_comps =
maps (fn (Ts',n,rs) => parent_components thy (map subst Ts', n, rs)) parents;
val all_comps = rename renaming (parent_comps @ map (apsnd subst) components);
in all_comps end;
fun statespace_definition state_type args name parents parent_comps components thy =
let
val full_name = Sign.full_bname thy name;
val all_comps = parent_comps @ components;
val components' = map (fn (n,T) => (n,(T,full_name))) components;
fun parent_expr (prefix, (_, n, rs)) =
(suffix namespaceN n, (prefix, (Expression.Positional rs,[])));
val parents_expr = map parent_expr parents;
fun distinct_types Ts =
let val tab = fold (fn T => fn tab => Typtab.update (T,()) tab) Ts Typtab.empty;
in map fst (Typtab.dest tab) end;
val Ts = distinct_types (map snd all_comps);
val arg_names = map fst args;
val valueN = singleton (Name.variant_list arg_names) "'value";
val nameN = singleton (Name.variant_list (valueN :: arg_names)) "'name";
val valueT = TFree (valueN, Sign.defaultS thy);
val nameT = TFree (nameN, Sign.defaultS thy);
val stateT = nameT --> valueT;
fun projectT T = valueT --> T;
fun injectT T = T --> valueT;
val locinsts = map (fn T => (project_injectL,
((encode_type T,false),(Expression.Positional
[SOME (Free (project_name T,projectT T)),
SOME (Free ((inject_name T,injectT T)))],[])))) Ts;
val locs = maps (fn T => [(Binding.name (project_name T),NONE,NoSyn),
(Binding.name (inject_name T),NONE,NoSyn)]) Ts;
val constrains = maps (fn T => [(project_name T,projectT T),(inject_name T,injectT T)]) Ts;
fun interprete_parent_valuetypes (prefix, (Ts, pname, _)) thy =
let
val {args,types,...} = the_statespace (Context.Theory thy) pname;
val inst = map fst args ~~ Ts;
val subst = Term.map_type_tfree (the o AList.lookup (op =) inst o fst);
val pars = maps ((fn T => [project_name T,inject_name T]) o subst) types;
val expr = ([(suffix valuetypesN name,
(prefix, (Expression.Positional (map SOME pars),[])))],[]);
in
prove_interpretation_in (fn ctxt => ALLGOALS (solve_tac ctxt (Assumption.all_prems_of ctxt)))
(suffix valuetypesN name, expr) thy
end;
fun interprete_parent (prefix, (_, pname, rs)) =
let
val expr = ([(pname, (prefix, (Expression.Positional rs,[])))],[])
in prove_interpretation_in
(fn ctxt => Locale.intro_locales_tac {strict = true, eager = false} ctxt [])
(full_name, expr) end;
fun declare_declinfo updates lthy phi ctxt =
let
fun upd_prf ctxt =
let
fun upd (n,v) =
let
val nT = Proof_Context.infer_type (Local_Theory.target_of lthy) (n, dummyT)
in Context.proof_map
(update_declinfo (Morphism.term phi (Free (n,nT)),v))
end;
val ctxt' = ctxt |> fold upd updates
in ctxt' end;
in Context.mapping I upd_prf ctxt end;
fun string_of_typ T =
Print_Mode.setmp []
(Syntax.string_of_typ (Config.put show_sorts true (Syntax.init_pretty_global thy))) T;
val fixestate = (case state_type of
NONE => []
| SOME s =>
let
val fx = Element.Fixes [(Binding.name s,SOME (string_of_typ stateT),NoSyn)];
val cs = Element.Constrains
(map (fn (n,T) => (n,string_of_typ T))
((map (fn (n,_) => (n,nameT)) all_comps) @
constrains))
in [fx,cs] end
)
in thy
|> namespace_definition
(suffix namespaceN name) nameT (parents_expr,[])
(map fst parent_comps) (map fst components)
|> Context.theory_map (add_statespace full_name args (map snd parents) components [])
|> add_locale (suffix valuetypesN name) (locinsts,locs) []
|> Proof_Context.theory_of
|> fold interprete_parent_valuetypes parents
|> add_locale_cmd name
([(suffix namespaceN full_name ,(("",false),(Expression.Named [],[]))),
(suffix valuetypesN full_name,(("",false),(Expression.Named [],[])))],[]) fixestate
|> Proof_Context.theory_of
|> fold interprete_parent parents
|> add_declaration full_name (declare_declinfo components')
end;
(* prepare arguments *)
fun read_typ ctxt raw_T env =
let
val ctxt' = fold (Variable.declare_typ o TFree) env ctxt;
val T = Syntax.read_typ ctxt' raw_T;
val env' = Term.add_tfreesT T env;
in (T, env') end;
fun cert_typ ctxt raw_T env =
let
val thy = Proof_Context.theory_of ctxt;
val T = Type.no_tvars (Sign.certify_typ thy raw_T)
handle TYPE (msg, _, _) => error msg;
val env' = Term.add_tfreesT T env;
in (T, env') end;
fun gen_define_statespace prep_typ state_space args name parents comps thy =
let (* - args distinct
- only args may occur in comps and parent-instantiations
- number of insts must match parent args
- no duplicate renamings
- renaming should occur in namespace
*)
val _ = writeln ("Defining statespace " ^ quote name ^ " ...");
val ctxt = Proof_Context.init_global thy;
fun add_parent (prefix, (Ts, pname, rs)) env =
let
val prefix' =
(case prefix of
("", mandatory) => (pname, mandatory)
| _ => prefix);
val full_pname = Sign.full_bname thy pname;
val {args,components,...} =
(case get_statespace (Context.Theory thy) full_pname of
SOME r => r
| NONE => error ("Undefined statespace " ^ quote pname));
val (Ts',env') = fold_map (prep_typ ctxt) Ts env
handle ERROR msg => cat_error msg
("The error(s) above occurred in parent statespace specification "
^ quote pname);
val err_insts = if length args <> length Ts' then
["number of type instantiation(s) does not match arguments of parent statespace "
^ quote pname]
else [];
val rnames = map fst rs
val err_dup_renamings = (case duplicates (op =) rnames of
[] => []
| dups => ["Duplicate renaming(s) for " ^ commas dups])
val cnames = map fst components;
val err_rename_unknowns = (case subtract (op =) cnames rnames of
[] => []
| rs => ["Unknown components " ^ commas rs]);
val rs' = map (AList.lookup (op =) rs o fst) components;
val errs =err_insts @ err_dup_renamings @ err_rename_unknowns
in
if null errs then ((prefix', (Ts', full_pname, rs')), env')
else error (cat_lines (errs @ ["in parent statespace " ^ quote pname]))
end;
val (parents',env) = fold_map add_parent parents [];
val err_dup_args =
(case duplicates (op =) args of
[] => []
| dups => ["Duplicate type argument(s) " ^ commas dups]);
val err_dup_components =
(case duplicates (op =) (map fst comps) of
[] => []
| dups => ["Duplicate state-space components " ^ commas dups]);
fun prep_comp (n,T) env =
let val (T', env') = prep_typ ctxt T env handle ERROR msg =>
cat_error msg ("The error(s) above occurred in component " ^ quote n)
in ((n,T'), env') end;
val (comps',env') = fold_map prep_comp comps env;
val err_extra_frees =
(case subtract (op =) args (map fst env') of
[] => []
| extras => ["Extra free type variable(s) " ^ commas extras]);
val defaultS = Sign.defaultS thy;
val args' = map (fn x => (x, AList.lookup (op =) env x |> the_default defaultS)) args;
fun fst_eq ((x:string,_),(y,_)) = x = y;
fun snd_eq ((_,t:typ),(_,u)) = t = u;
val raw_parent_comps = maps (parent_components thy o snd) parents';
fun check_type (n,T) =
(case distinct (snd_eq) (filter (curry fst_eq (n,T)) raw_parent_comps) of
[] => []
| [_] => []
| rs => ["Different types for component " ^ quote n ^ ": " ^
commas (map (Syntax.string_of_typ ctxt o snd) rs)])
val err_dup_types = maps check_type (duplicates fst_eq raw_parent_comps)
val parent_comps = distinct (fst_eq) raw_parent_comps;
val all_comps = parent_comps @ comps';
val err_comp_in_parent = (case duplicates (op =) (map fst all_comps) of
[] => []
| xs => ["Components already defined in parents: " ^ commas_quote xs]);
val errs = err_dup_args @ err_dup_components @ err_extra_frees @
err_dup_types @ err_comp_in_parent;
in if null errs
then thy |> statespace_definition state_space args' name parents' parent_comps comps'
else error (cat_lines errs)
end
handle ERROR msg => cat_error msg ("Failed to define statespace " ^ quote name);
val define_statespace = gen_define_statespace read_typ NONE;
val define_statespace_i = gen_define_statespace cert_typ;
(*** parse/print - translations ***)
val silent = Attrib.setup_config_bool \<^binding>\<open>statespace_silent\<close> (K false);
fun gen_lookup_tr ctxt s n =
(case get_comp' (Context.Proof ctxt) n of
SOME (T, _) =>
Syntax.const \<^const_name>\<open>StateFun.lookup\<close> $
Syntax.free (project_name T) $ Syntax.free n $ s
| NONE =>
if Config.get ctxt silent
then Syntax.const \<^const_name>\<open>StateFun.lookup\<close> $
Syntax.const \<^const_syntax>\<open>undefined\<close> $ Syntax.free n $ s
else raise TERM ("StateSpace.gen_lookup_tr: component " ^ quote n ^ " not defined", []));
fun lookup_tr ctxt [s, x] =
(case Term_Position.strip_positions x of
Free (n,_) => gen_lookup_tr ctxt s n
| _ => raise Match);
fun lookup_swap_tr ctxt [Free (n,_),s] = gen_lookup_tr ctxt s n;
fun lookup_tr' ctxt [_ $ Free (prj, _), n as (_ $ Free (name, _)), s] =
(case get_comp' (Context.Proof ctxt) name of
SOME (T, _) =>
if prj = project_name T
then Syntax.const "_statespace_lookup" $ s $ n
else raise Match
| NONE => raise Match)
| lookup_tr' _ _ = raise Match;
fun gen'_update_tr const_val id ctxt n v s =
let
fun pname T = if id then \<^const_name>\<open>Fun.id\<close> else project_name T;
fun iname T = if id then \<^const_name>\<open>Fun.id\<close> else inject_name T;
val v = if const_val then (Syntax.const \<^const_name>\<open>K_statefun\<close> $ v) else v
in
(case get_comp' (Context.Proof ctxt) n of
SOME (T, _) =>
Syntax.const \<^const_name>\<open>StateFun.update\<close> $
Syntax.free (pname T) $ Syntax.free (iname T) $
Syntax.free n $ v $ s
| NONE =>
if Config.get ctxt silent then
Syntax.const \<^const_name>\<open>StateFun.update\<close> $
Syntax.const \<^const_syntax>\<open>undefined\<close> $ Syntax.const \<^const_syntax>\<open>undefined\<close> $
Syntax.free n $ v $ s
else raise TERM ("StateSpace.gen_update_tr: component " ^ n ^ " not defined", []))
end;
val gen_update_tr = gen'_update_tr true
fun update_tr ctxt [s, x, v] =
(case Term_Position.strip_positions x of
Free (n, _) => gen'_update_tr true false ctxt n v s
| _ => raise Match);
fun update_tr' ctxt
[_ $ Free (prj, _), _ $ Free (inj, _), n as (_ $ Free (name, _)), (Const (k, _) $ v), s] =
if Long_Name.base_name k = Long_Name.base_name \<^const_name>\<open>K_statefun\<close> then
(case get_comp' (Context.Proof ctxt) name of
SOME (T, _) =>
if inj = inject_name T andalso prj = project_name T then
Syntax.const "_statespace_update" $ s $ n $ v
else raise Match
| NONE => raise Match)
else raise Match
| update_tr' _ _ = raise Match;
(*** outer syntax *)
local
val type_insts =
Parse.typ >> single ||
\<^keyword>\<open>(\<close> |-- Parse.!!! (Parse.list1 Parse.typ --| \<^keyword>\<open>)\<close>)
val comp = Parse.name -- (\<^keyword>\<open>::\<close> |-- Parse.!!! Parse.typ);
fun plus1_unless test scan =
scan ::: Scan.repeat (\<^keyword>\<open>+\<close> |-- Scan.unless test (Parse.!!! scan));
val mapsto = \<^keyword>\<open>=\<close>;
val rename = Parse.name -- (mapsto |-- Parse.name);
val renames = Scan.optional (\<^keyword>\<open>[\<close> |-- Parse.!!! (Parse.list1 rename --| \<^keyword>\<open>]\<close>)) [];
val parent =
Parse_Spec.locale_prefix --
((type_insts -- Parse.name) || (Parse.name >> pair [])) -- renames
>> (fn ((prefix, (insts, name)), renames) => (prefix, (insts, name, renames)));
in
val statespace_decl =
Parse.type_args -- Parse.name --
(\<^keyword>\<open>=\<close> |--
((Scan.repeat1 comp >> pair []) ||
(plus1_unless comp parent --
Scan.optional (\<^keyword>\<open>+\<close> |-- Parse.!!! (Scan.repeat1 comp)) [])));
val _ =
Outer_Syntax.command \<^command_keyword>\<open>statespace\<close> "define state-space as locale context"
(statespace_decl >> (fn ((args, name), (parents, comps)) =>
Toplevel.theory (define_statespace args name parents comps)));
end;
end;
diff --git a/src/HOL/Tools/BNF/bnf_def.ML b/src/HOL/Tools/BNF/bnf_def.ML
--- a/src/HOL/Tools/BNF/bnf_def.ML
+++ b/src/HOL/Tools/BNF/bnf_def.ML
@@ -1,2187 +1,2187 @@
(* Title: HOL/Tools/BNF/bnf_def.ML
Author: Dmitriy Traytel, TU Muenchen
Author: Jasmin Blanchette, TU Muenchen
Author: Martin Desharnais, TU Muenchen
Author: Jan van Brügge, TU Muenchen
Copyright 2012, 2013, 2014, 2022
Definition of bounded natural functors.
*)
signature BNF_DEF =
sig
type bnf
type nonemptiness_witness = {I: int list, wit: term, prop: thm list}
val morph_bnf: morphism -> bnf -> bnf
val morph_bnf_defs: morphism -> bnf -> bnf
val permute_deads: (typ list -> typ list) -> bnf -> bnf
val transfer_bnf: theory -> bnf -> bnf
val bnf_of: Proof.context -> string -> bnf option
val bnf_of_global: theory -> string -> bnf option
val bnf_interpretation: string -> (bnf -> local_theory -> local_theory) -> theory -> theory
val interpret_bnf: (string -> bool) -> bnf -> local_theory -> local_theory
val register_bnf_raw: string -> bnf -> local_theory -> local_theory
val register_bnf: (string -> bool) -> string -> bnf -> local_theory -> local_theory
val name_of_bnf: bnf -> binding
val T_of_bnf: bnf -> typ
val live_of_bnf: bnf -> int
val lives_of_bnf: bnf -> typ list
val dead_of_bnf: bnf -> int
val deads_of_bnf: bnf -> typ list
val bd_of_bnf: bnf -> term
val nwits_of_bnf: bnf -> int
val mapN: string
val predN: string
val relN: string
val setN: string
val mk_setN: int -> string
val mk_witN: int -> string
val map_of_bnf: bnf -> term
val pred_of_bnf: bnf -> term
val rel_of_bnf: bnf -> term
val sets_of_bnf: bnf -> term list
val mk_T_of_bnf: typ list -> typ list -> bnf -> typ
val mk_bd_of_bnf: typ list -> typ list -> bnf -> term
val mk_map_of_bnf: typ list -> typ list -> typ list -> bnf -> term
val mk_pred_of_bnf: typ list -> typ list -> bnf -> term
val mk_rel_of_bnf: typ list -> typ list -> typ list -> bnf -> term
val mk_sets_of_bnf: typ list list -> typ list list -> bnf -> term list
val mk_wits_of_bnf: typ list list -> typ list list -> bnf -> (int list * term) list
val bd_Card_order_of_bnf: bnf -> thm
val bd_Cinfinite_of_bnf: bnf -> thm
val bd_Cnotzero_of_bnf: bnf -> thm
val bd_card_order_of_bnf: bnf -> thm
val bd_cinfinite_of_bnf: bnf -> thm
val bd_regularCard_of_bnf: bnf -> thm
val collect_set_map_of_bnf: bnf -> thm
val in_bd_of_bnf: bnf -> thm
val in_cong_of_bnf: bnf -> thm
val in_mono_of_bnf: bnf -> thm
val in_rel_of_bnf: bnf -> thm
val inj_map_of_bnf: bnf -> thm
val inj_map_strong_of_bnf: bnf -> thm
val le_rel_OO_of_bnf: bnf -> thm
val map_comp0_of_bnf: bnf -> thm
val map_comp_of_bnf: bnf -> thm
val map_cong0_of_bnf: bnf -> thm
val map_cong_of_bnf: bnf -> thm
val map_cong_pred_of_bnf: bnf -> thm
val map_cong_simp_of_bnf: bnf -> thm
val map_def_of_bnf: bnf -> thm
val map_id0_of_bnf: bnf -> thm
val map_id_of_bnf: bnf -> thm
val map_ident0_of_bnf: bnf -> thm
val map_ident_of_bnf: bnf -> thm
val map_transfer_of_bnf: bnf -> thm
val pred_cong0_of_bnf: bnf -> thm
val pred_cong_of_bnf: bnf -> thm
val pred_cong_simp_of_bnf: bnf -> thm
val pred_def_of_bnf: bnf -> thm
val pred_map_of_bnf: bnf -> thm
val pred_mono_strong0_of_bnf: bnf -> thm
val pred_mono_strong_of_bnf: bnf -> thm
val pred_mono_of_bnf: bnf -> thm
val pred_set_of_bnf: bnf -> thm
val pred_rel_of_bnf: bnf -> thm
val pred_transfer_of_bnf: bnf -> thm
val pred_True_of_bnf: bnf -> thm
val rel_Grp_of_bnf: bnf -> thm
val rel_OO_Grp_of_bnf: bnf -> thm
val rel_OO_of_bnf: bnf -> thm
val rel_cong0_of_bnf: bnf -> thm
val rel_cong_of_bnf: bnf -> thm
val rel_cong_simp_of_bnf: bnf -> thm
val rel_conversep_of_bnf: bnf -> thm
val rel_def_of_bnf: bnf -> thm
val rel_eq_of_bnf: bnf -> thm
val rel_flip_of_bnf: bnf -> thm
val rel_map_of_bnf: bnf -> thm list
val rel_mono_of_bnf: bnf -> thm
val rel_mono_strong0_of_bnf: bnf -> thm
val rel_mono_strong_of_bnf: bnf -> thm
val rel_eq_onp_of_bnf: bnf -> thm
val rel_refl_of_bnf: bnf -> thm
val rel_refl_strong_of_bnf: bnf -> thm
val rel_reflp_of_bnf: bnf -> thm
val rel_symp_of_bnf: bnf -> thm
val rel_transfer_of_bnf: bnf -> thm
val rel_transp_of_bnf: bnf -> thm
val set_bd_of_bnf: bnf -> thm list
val set_defs_of_bnf: bnf -> thm list
val set_map0_of_bnf: bnf -> thm list
val set_map_of_bnf: bnf -> thm list
val set_transfer_of_bnf: bnf -> thm list
val wit_thms_of_bnf: bnf -> thm list
val wit_thmss_of_bnf: bnf -> thm list list
val mk_map: int -> typ list -> typ list -> term -> term
val mk_pred: typ list -> term -> term
val mk_rel: int -> typ list -> typ list -> term -> term
val mk_set: typ list -> term -> term
val build_map: Proof.context -> typ list -> typ list -> (typ * typ -> term) -> typ * typ -> term
val build_rel: (string * (int * term)) list -> Proof.context -> typ list -> typ list ->
(typ * typ -> term) -> typ * typ -> term
val build_set: Proof.context -> typ -> typ -> term
val flatten_type_args_of_bnf: bnf -> 'a -> 'a list -> 'a list
val map_flattened_map_args: Proof.context -> string -> (term list -> 'a list) -> term list ->
'a list
val mk_witness: int list * term -> thm list -> nonemptiness_witness
val mk_wit_goals: term list -> term list -> term list -> int list * term -> term list
val minimize_wits: (''a list * 'b) list -> (''a list * 'b) list
val wits_of_bnf: bnf -> nonemptiness_witness list
val zip_axioms: 'a -> 'a -> 'a -> 'a list -> 'a -> 'a -> 'a -> 'a list -> 'a -> 'a -> 'a -> 'a list
datatype inline_policy = Dont_Inline | Hardly_Inline | Smart_Inline | Do_Inline
datatype fact_policy = Dont_Note | Note_Some | Note_All
val bnf_internals: bool Config.T
val bnf_timing: bool Config.T
val user_policy: fact_policy -> Proof.context -> fact_policy
val note_bnf_thms: fact_policy -> (binding -> binding) -> binding -> bnf -> local_theory ->
bnf * local_theory
val note_bnf_defs: bnf -> local_theory -> bnf * local_theory
val print_bnfs: Proof.context -> unit
val prepare_def: inline_policy -> (Proof.context -> fact_policy) -> bool ->
(binding -> binding) -> (Proof.context -> 'a -> typ) -> (Proof.context -> 'b -> term) ->
typ list option -> binding -> binding -> binding -> binding list ->
((((((binding * 'a) * 'b) * 'b list) * 'b) * 'b list) * 'b option) * 'b option ->
Proof.context ->
string * term list * ((Proof.context -> thm list -> tactic) option * term list list) *
((thm list -> thm list list) -> thm list list -> Proof.context -> bnf * local_theory) *
local_theory * thm list
val define_bnf_consts: inline_policy -> fact_policy -> bool -> typ list option ->
binding -> binding -> binding -> binding list ->
((((((binding * typ) * term) * term list) * term) * term list) * term option) * term option ->
local_theory ->
((typ list * typ list * typ list * typ) *
(term * term list * term * (int list * term) list * term * term) *
(thm * thm list * thm * thm list * thm * thm) *
((typ list -> typ list -> typ list -> term) *
(typ list -> typ list -> term -> term) *
(typ list -> typ list -> typ -> typ) *
(typ list -> typ list -> typ list -> term) *
(typ list -> typ list -> term) *
(typ list -> typ list -> typ list -> term) *
(typ list -> typ list -> term))) * local_theory
val bnf_def: inline_policy -> (Proof.context -> fact_policy) -> bool -> (binding -> binding) ->
(Proof.context -> tactic) list -> (Proof.context -> tactic) -> typ list option -> binding ->
binding -> binding -> binding list ->
((((((binding * typ) * term) * term list) * term) * term list) * term option) * term option ->
local_theory -> bnf * local_theory
val bnf_cmd: (((((((binding * string) * string) * string list) * string) * string list)
* string option) * string option) * (Proof.context -> Plugin_Name.filter) ->
Proof.context -> Proof.state
end;
structure BNF_Def : BNF_DEF =
struct
open BNF_Util
open BNF_Tactics
open BNF_Def_Tactics
val fundefcong_attrs = @{attributes [fundef_cong]};
val mono_attrs = @{attributes [mono]};
type axioms = {
map_id0: thm,
map_comp0: thm,
map_cong0: thm,
set_map0: thm list,
bd_card_order: thm,
bd_cinfinite: thm,
bd_regularCard: thm,
set_bd: thm list,
le_rel_OO: thm,
rel_OO_Grp: thm,
pred_set: thm
};
fun mk_axioms' ((((((((((id, comp), cong), map), c_o), cinf), creg), set_bd), le_rel_OO), rel), pred) =
{map_id0 = id, map_comp0 = comp, map_cong0 = cong, set_map0 = map, bd_card_order = c_o,
bd_cinfinite = cinf, bd_regularCard = creg, set_bd = set_bd, le_rel_OO = le_rel_OO, rel_OO_Grp = rel, pred_set = pred};
fun dest_cons [] = raise List.Empty
| dest_cons (x :: xs) = (x, xs);
fun mk_axioms n thms = thms
|> map the_single
|> dest_cons
||>> dest_cons
||>> dest_cons
||>> chop n
||>> dest_cons
||>> dest_cons
||>> dest_cons
||>> chop n
||>> dest_cons
||>> dest_cons
||> the_single
|> mk_axioms';
fun zip_axioms mid mcomp mcong smap bdco bdinf bdreg sbd le_rel_OO rel pred =
[mid, mcomp, mcong] @ smap @ [bdco, bdinf, bdreg] @ sbd @ [le_rel_OO, rel, pred];
fun map_axioms f {map_id0, map_comp0, map_cong0, set_map0, bd_card_order, bd_cinfinite,
bd_regularCard, set_bd, le_rel_OO, rel_OO_Grp, pred_set} =
{map_id0 = f map_id0,
map_comp0 = f map_comp0,
map_cong0 = f map_cong0,
set_map0 = map f set_map0,
bd_card_order = f bd_card_order,
bd_cinfinite = f bd_cinfinite,
bd_regularCard = f bd_regularCard,
set_bd = map f set_bd,
le_rel_OO = f le_rel_OO,
rel_OO_Grp = f rel_OO_Grp,
pred_set = f pred_set};
val morph_axioms = map_axioms o Morphism.thm;
type defs = {
map_def: thm,
set_defs: thm list,
rel_def: thm,
pred_def: thm
}
fun mk_defs map sets rel pred = {map_def = map, set_defs = sets, rel_def = rel, pred_def = pred};
fun map_defs f {map_def, set_defs, rel_def, pred_def} =
{map_def = f map_def, set_defs = map f set_defs, rel_def = f rel_def, pred_def = f pred_def};
val morph_defs = map_defs o Morphism.thm;
type facts = {
bd_Card_order: thm,
bd_Cinfinite: thm,
bd_Cnotzero: thm,
collect_set_map: thm lazy,
in_bd: thm lazy,
in_cong: thm lazy,
in_mono: thm lazy,
in_rel: thm lazy,
inj_map: thm lazy,
inj_map_strong: thm lazy,
map_comp: thm lazy,
map_cong: thm lazy,
map_cong_simp: thm lazy,
map_cong_pred: thm lazy,
map_id: thm lazy,
map_ident0: thm lazy,
map_ident: thm lazy,
map_ident_strong: thm lazy,
map_transfer: thm lazy,
rel_eq: thm lazy,
rel_flip: thm lazy,
set_map: thm lazy list,
rel_cong0: thm lazy,
rel_cong: thm lazy,
rel_cong_simp: thm lazy,
rel_map: thm list lazy,
rel_mono: thm lazy,
rel_mono_strong0: thm lazy,
rel_mono_strong: thm lazy,
set_transfer: thm list lazy,
rel_Grp: thm lazy,
rel_conversep: thm lazy,
rel_OO: thm lazy,
rel_refl: thm lazy,
rel_refl_strong: thm lazy,
rel_reflp: thm lazy,
rel_symp: thm lazy,
rel_transp: thm lazy,
rel_transfer: thm lazy,
rel_eq_onp: thm lazy,
pred_transfer: thm lazy,
pred_True: thm lazy,
pred_map: thm lazy,
pred_rel: thm lazy,
pred_mono_strong0: thm lazy,
pred_mono_strong: thm lazy,
pred_mono: thm lazy,
pred_cong0: thm lazy,
pred_cong: thm lazy,
pred_cong_simp: thm lazy
};
fun mk_facts bd_Card_order bd_Cinfinite bd_Cnotzero collect_set_map in_bd in_cong in_mono in_rel
inj_map inj_map_strong map_comp map_cong map_cong_simp map_cong_pred map_id map_ident0 map_ident
map_ident_strong map_transfer rel_eq rel_flip set_map rel_cong0 rel_cong rel_cong_simp rel_map
rel_mono rel_mono_strong0 rel_mono_strong set_transfer rel_Grp rel_conversep rel_OO rel_refl
rel_refl_strong rel_reflp rel_symp rel_transp rel_transfer rel_eq_onp pred_transfer pred_True
pred_map pred_rel pred_mono_strong0 pred_mono_strong pred_mono pred_cong0 pred_cong
pred_cong_simp = {
bd_Card_order = bd_Card_order,
bd_Cinfinite = bd_Cinfinite,
bd_Cnotzero = bd_Cnotzero,
collect_set_map = collect_set_map,
in_bd = in_bd,
in_cong = in_cong,
in_mono = in_mono,
in_rel = in_rel,
inj_map = inj_map,
inj_map_strong = inj_map_strong,
map_comp = map_comp,
map_cong = map_cong,
map_cong_simp = map_cong_simp,
map_cong_pred = map_cong_pred,
map_id = map_id,
map_ident0 = map_ident0,
map_ident = map_ident,
map_ident_strong = map_ident_strong,
map_transfer = map_transfer,
rel_eq = rel_eq,
rel_flip = rel_flip,
set_map = set_map,
rel_cong0 = rel_cong0,
rel_cong = rel_cong,
rel_cong_simp = rel_cong_simp,
rel_map = rel_map,
rel_mono = rel_mono,
rel_mono_strong0 = rel_mono_strong0,
rel_mono_strong = rel_mono_strong,
rel_transfer = rel_transfer,
rel_Grp = rel_Grp,
rel_conversep = rel_conversep,
rel_OO = rel_OO,
rel_refl = rel_refl,
rel_refl_strong = rel_refl_strong,
rel_reflp = rel_reflp,
rel_symp = rel_symp,
rel_transp = rel_transp,
set_transfer = set_transfer,
rel_eq_onp = rel_eq_onp,
pred_transfer = pred_transfer,
pred_True = pred_True,
pred_map = pred_map,
pred_rel = pred_rel,
pred_mono_strong0 = pred_mono_strong0,
pred_mono_strong = pred_mono_strong,
pred_mono = pred_mono,
pred_cong0 = pred_cong0,
pred_cong = pred_cong,
pred_cong_simp = pred_cong_simp};
fun map_facts f {
bd_Card_order,
bd_Cinfinite,
bd_Cnotzero,
collect_set_map,
in_bd,
in_cong,
in_mono,
in_rel,
inj_map,
inj_map_strong,
map_comp,
map_cong,
map_cong_simp,
map_cong_pred,
map_id,
map_ident0,
map_ident,
map_ident_strong,
map_transfer,
rel_eq,
rel_flip,
set_map,
rel_cong0,
rel_cong,
rel_cong_simp,
rel_map,
rel_mono,
rel_mono_strong0,
rel_mono_strong,
rel_transfer,
rel_Grp,
rel_conversep,
rel_OO,
rel_refl,
rel_refl_strong,
rel_reflp,
rel_symp,
rel_transp,
set_transfer,
rel_eq_onp,
pred_transfer,
pred_True,
pred_map,
pred_rel,
pred_mono_strong0,
pred_mono_strong,
pred_mono,
pred_cong0,
pred_cong,
pred_cong_simp} =
{bd_Card_order = f bd_Card_order,
bd_Cinfinite = f bd_Cinfinite,
bd_Cnotzero = f bd_Cnotzero,
collect_set_map = Lazy.map f collect_set_map,
in_bd = Lazy.map f in_bd,
in_cong = Lazy.map f in_cong,
in_mono = Lazy.map f in_mono,
in_rel = Lazy.map f in_rel,
inj_map = Lazy.map f inj_map,
inj_map_strong = Lazy.map f inj_map_strong,
map_comp = Lazy.map f map_comp,
map_cong = Lazy.map f map_cong,
map_cong_simp = Lazy.map f map_cong_simp,
map_cong_pred = Lazy.map f map_cong_pred,
map_id = Lazy.map f map_id,
map_ident0 = Lazy.map f map_ident0,
map_ident = Lazy.map f map_ident,
map_ident_strong = Lazy.map f map_ident_strong,
map_transfer = Lazy.map f map_transfer,
rel_eq = Lazy.map f rel_eq,
rel_flip = Lazy.map f rel_flip,
set_map = map (Lazy.map f) set_map,
rel_cong0 = Lazy.map f rel_cong0,
rel_cong = Lazy.map f rel_cong,
rel_cong_simp = Lazy.map f rel_cong_simp,
rel_map = Lazy.map (map f) rel_map,
rel_mono = Lazy.map f rel_mono,
rel_mono_strong0 = Lazy.map f rel_mono_strong0,
rel_mono_strong = Lazy.map f rel_mono_strong,
rel_transfer = Lazy.map f rel_transfer,
rel_Grp = Lazy.map f rel_Grp,
rel_conversep = Lazy.map f rel_conversep,
rel_OO = Lazy.map f rel_OO,
rel_refl = Lazy.map f rel_refl,
rel_refl_strong = Lazy.map f rel_refl_strong,
rel_reflp = Lazy.map f rel_reflp,
rel_symp = Lazy.map f rel_symp,
rel_transp = Lazy.map f rel_transp,
set_transfer = Lazy.map (map f) set_transfer,
rel_eq_onp = Lazy.map f rel_eq_onp,
pred_transfer = Lazy.map f pred_transfer,
pred_True = Lazy.map f pred_True,
pred_map = Lazy.map f pred_map,
pred_rel = Lazy.map f pred_rel,
pred_mono_strong0 = Lazy.map f pred_mono_strong0,
pred_mono_strong = Lazy.map f pred_mono_strong,
pred_mono = Lazy.map f pred_mono,
pred_cong0 = Lazy.map f pred_cong0,
pred_cong = Lazy.map f pred_cong,
pred_cong_simp = Lazy.map f pred_cong_simp};
val morph_facts = map_facts o Morphism.thm;
type nonemptiness_witness = {
I: int list,
wit: term,
prop: thm list
};
fun mk_witness (I, wit) prop = {I = I, wit = wit, prop = prop};
fun map_witness f g {I, wit, prop} = {I = I, wit = f wit, prop = map g prop};
fun morph_witness phi = map_witness (Morphism.term phi) (Morphism.thm phi);
datatype bnf = BNF of {
name: binding,
T: typ,
live: int,
lives: typ list, (*source type variables of map*)
lives': typ list, (*target type variables of map*)
dead: int,
deads: typ list,
map: term,
sets: term list,
bd: term,
axioms: axioms,
defs: defs,
facts: facts,
nwits: int,
wits: nonemptiness_witness list,
rel: term,
pred: term
};
(* getters *)
fun rep_bnf (BNF bnf) = bnf;
val name_of_bnf = #name o rep_bnf;
val T_of_bnf = #T o rep_bnf;
fun mk_T_of_bnf Ds Ts bnf =
let val bnf_rep = rep_bnf bnf
in Term.typ_subst_atomic ((#deads bnf_rep ~~ Ds) @ (#lives bnf_rep ~~ Ts)) (#T bnf_rep) end;
val live_of_bnf = #live o rep_bnf;
val lives_of_bnf = #lives o rep_bnf;
val dead_of_bnf = #dead o rep_bnf;
val deads_of_bnf = #deads o rep_bnf;
val axioms_of_bnf = #axioms o rep_bnf;
val facts_of_bnf = #facts o rep_bnf;
val nwits_of_bnf = #nwits o rep_bnf;
val wits_of_bnf = #wits o rep_bnf;
fun flatten_type_args_of_bnf bnf dead_x xs =
let
val Type (_, Ts) = T_of_bnf bnf;
val lives = lives_of_bnf bnf;
val deads = deads_of_bnf bnf;
in
permute_like_unique (op =) (deads @ lives) Ts (replicate (length deads) dead_x @ xs)
end;
(*terms*)
val map_of_bnf = #map o rep_bnf;
val sets_of_bnf = #sets o rep_bnf;
fun mk_map_of_bnf Ds Ts Us bnf =
let val bnf_rep = rep_bnf bnf;
in
Term.subst_atomic_types
((#deads bnf_rep ~~ Ds) @ (#lives bnf_rep ~~ Ts) @ (#lives' bnf_rep ~~ Us)) (#map bnf_rep)
end;
fun mk_sets_of_bnf Dss Tss bnf =
let val bnf_rep = rep_bnf bnf;
in
map2 (fn (Ds, Ts) => Term.subst_atomic_types
((#deads bnf_rep ~~ Ds) @ (#lives bnf_rep ~~ Ts))) (Dss ~~ Tss) (#sets bnf_rep)
end;
val bd_of_bnf = #bd o rep_bnf;
fun mk_bd_of_bnf Ds Ts bnf =
let val bnf_rep = rep_bnf bnf;
in Term.subst_atomic_types ((#deads bnf_rep ~~ Ds) @ (#lives bnf_rep ~~ Ts)) (#bd bnf_rep) end;
fun mk_wits_of_bnf Dss Tss bnf =
let
val bnf_rep = rep_bnf bnf;
val wits = map (fn x => (#I x, #wit x)) (#wits bnf_rep);
in
map2 (fn (Ds, Ts) => apsnd (Term.subst_atomic_types
((#deads bnf_rep ~~ Ds) @ (#lives bnf_rep ~~ Ts)))) (Dss ~~ Tss) wits
end;
val rel_of_bnf = #rel o rep_bnf;
fun mk_rel_of_bnf Ds Ts Us bnf =
let val bnf_rep = rep_bnf bnf;
in
Term.subst_atomic_types
((#deads bnf_rep ~~ Ds) @ (#lives bnf_rep ~~ Ts) @ (#lives' bnf_rep ~~ Us)) (#rel bnf_rep)
end;
val pred_of_bnf = #pred o rep_bnf;
fun mk_pred_of_bnf Ds Ts bnf =
let val bnf_rep = rep_bnf bnf;
in
Term.subst_atomic_types
((#deads bnf_rep ~~ Ds) @ (#lives bnf_rep ~~ Ts)) (#pred bnf_rep)
end;
(*thms*)
val bd_Card_order_of_bnf = #bd_Card_order o #facts o rep_bnf;
val bd_Cinfinite_of_bnf = #bd_Cinfinite o #facts o rep_bnf;
val bd_Cnotzero_of_bnf = #bd_Cnotzero o #facts o rep_bnf;
val bd_card_order_of_bnf = #bd_card_order o #axioms o rep_bnf;
val bd_cinfinite_of_bnf = #bd_cinfinite o #axioms o rep_bnf;
val bd_regularCard_of_bnf = #bd_regularCard o #axioms o rep_bnf;
val collect_set_map_of_bnf = Lazy.force o #collect_set_map o #facts o rep_bnf;
val in_bd_of_bnf = Lazy.force o #in_bd o #facts o rep_bnf;
val in_cong_of_bnf = Lazy.force o #in_cong o #facts o rep_bnf;
val in_mono_of_bnf = Lazy.force o #in_mono o #facts o rep_bnf;
val in_rel_of_bnf = Lazy.force o #in_rel o #facts o rep_bnf;
val inj_map_of_bnf = Lazy.force o #inj_map o #facts o rep_bnf;
val inj_map_strong_of_bnf = Lazy.force o #inj_map_strong o #facts o rep_bnf;
val le_rel_OO_of_bnf = #le_rel_OO o #axioms o rep_bnf;
val map_comp0_of_bnf = #map_comp0 o #axioms o rep_bnf;
val map_comp_of_bnf = Lazy.force o #map_comp o #facts o rep_bnf;
val map_cong0_of_bnf = #map_cong0 o #axioms o rep_bnf;
val map_cong_of_bnf = Lazy.force o #map_cong o #facts o rep_bnf;
val map_cong_pred_of_bnf = Lazy.force o #map_cong_pred o #facts o rep_bnf;
val map_cong_simp_of_bnf = Lazy.force o #map_cong_simp o #facts o rep_bnf;
val map_def_of_bnf = #map_def o #defs o rep_bnf;
val map_id0_of_bnf = #map_id0 o #axioms o rep_bnf;
val map_id_of_bnf = Lazy.force o #map_id o #facts o rep_bnf;
val map_ident0_of_bnf = Lazy.force o #map_ident0 o #facts o rep_bnf;
val map_ident_of_bnf = Lazy.force o #map_ident o #facts o rep_bnf;
val map_ident_strong_of_bnf = Lazy.force o #map_ident_strong o #facts o rep_bnf;
val map_transfer_of_bnf = Lazy.force o #map_transfer o #facts o rep_bnf;
val rel_eq_onp_of_bnf = Lazy.force o #rel_eq_onp o #facts o rep_bnf;
val pred_def_of_bnf = #pred_def o #defs o rep_bnf;
val pred_map_of_bnf = Lazy.force o #pred_map o #facts o rep_bnf;
val pred_mono_strong0_of_bnf = Lazy.force o #pred_mono_strong0 o #facts o rep_bnf;
val pred_mono_strong_of_bnf = Lazy.force o #pred_mono_strong o #facts o rep_bnf;
val pred_mono_of_bnf = Lazy.force o #pred_mono o #facts o rep_bnf;
val pred_cong0_of_bnf = Lazy.force o #pred_cong0 o #facts o rep_bnf;
val pred_cong_of_bnf = Lazy.force o #pred_cong o #facts o rep_bnf;
val pred_cong_simp_of_bnf = Lazy.force o #pred_cong_simp o #facts o rep_bnf;
val pred_rel_of_bnf = Lazy.force o #pred_rel o #facts o rep_bnf;
val pred_set_of_bnf = #pred_set o #axioms o rep_bnf;
val pred_transfer_of_bnf = Lazy.force o #pred_transfer o #facts o rep_bnf;
val pred_True_of_bnf = Lazy.force o #pred_True o #facts o rep_bnf;
val rel_Grp_of_bnf = Lazy.force o #rel_Grp o #facts o rep_bnf;
val rel_OO_Grp_of_bnf = #rel_OO_Grp o #axioms o rep_bnf;
val rel_OO_of_bnf = Lazy.force o #rel_OO o #facts o rep_bnf;
val rel_cong0_of_bnf = Lazy.force o #rel_cong0 o #facts o rep_bnf;
val rel_cong_of_bnf = Lazy.force o #rel_cong o #facts o rep_bnf;
val rel_cong_simp_of_bnf = Lazy.force o #rel_cong_simp o #facts o rep_bnf;
val rel_conversep_of_bnf = Lazy.force o #rel_conversep o #facts o rep_bnf;
val rel_def_of_bnf = #rel_def o #defs o rep_bnf;
val rel_eq_of_bnf = Lazy.force o #rel_eq o #facts o rep_bnf;
val rel_flip_of_bnf = Lazy.force o #rel_flip o #facts o rep_bnf;
val rel_map_of_bnf = Lazy.force o #rel_map o #facts o rep_bnf;
val rel_mono_of_bnf = Lazy.force o #rel_mono o #facts o rep_bnf;
val rel_mono_strong0_of_bnf = Lazy.force o #rel_mono_strong0 o #facts o rep_bnf;
val rel_mono_strong_of_bnf = Lazy.force o #rel_mono_strong o #facts o rep_bnf;
val rel_refl_of_bnf = Lazy.force o #rel_refl o #facts o rep_bnf;
val rel_refl_strong_of_bnf = Lazy.force o #rel_refl_strong o #facts o rep_bnf;
val rel_reflp_of_bnf = Lazy.force o #rel_reflp o #facts o rep_bnf;
val rel_symp_of_bnf = Lazy.force o #rel_symp o #facts o rep_bnf;
val rel_transfer_of_bnf = Lazy.force o #rel_transfer o #facts o rep_bnf;
val rel_transp_of_bnf = Lazy.force o #rel_transp o #facts o rep_bnf;
val set_bd_of_bnf = #set_bd o #axioms o rep_bnf;
val set_defs_of_bnf = #set_defs o #defs o rep_bnf;
val set_map0_of_bnf = #set_map0 o #axioms o rep_bnf;
val set_map_of_bnf = map Lazy.force o #set_map o #facts o rep_bnf;
val set_transfer_of_bnf = Lazy.force o #set_transfer o #facts o rep_bnf;
val wit_thms_of_bnf = maps #prop o wits_of_bnf;
val wit_thmss_of_bnf = map #prop o wits_of_bnf;
fun mk_bnf name T live lives lives' dead deads map sets bd axioms defs facts wits rel pred =
BNF {name = name, T = T,
live = live, lives = lives, lives' = lives', dead = dead, deads = deads,
map = map, sets = sets, bd = bd,
axioms = axioms, defs = defs, facts = facts,
nwits = length wits, wits = wits, rel = rel, pred = pred};
fun map_bnf f1 f2 f3 f4 f5 f6 f7 f8 f9 f10 f11 f12 f13 f14 f15 f16 f17
(BNF {name = name, T = T, live = live, lives = lives, lives' = lives',
dead = dead, deads = deads, map = map, sets = sets, bd = bd,
axioms = axioms, defs = defs, facts = facts,
nwits = nwits, wits = wits, rel = rel, pred = pred}) =
BNF {name = f1 name, T = f2 T,
live = f3 live, lives = f4 lives, lives' = f5 lives', dead = f6 dead, deads = f7 deads,
map = f8 map, sets = f9 sets, bd = f10 bd,
axioms = f11 axioms, defs = f12 defs, facts = f13 facts,
nwits = f14 nwits, wits = f15 wits, rel = f16 rel, pred = f17 pred};
fun morph_bnf phi =
let
val Tphi = Morphism.typ phi;
val tphi = Morphism.term phi;
in
map_bnf (Morphism.binding phi) Tphi I (map Tphi) (map Tphi) I (map Tphi) tphi (map tphi) tphi
(morph_axioms phi) (morph_defs phi) (morph_facts phi) I (map (morph_witness phi)) tphi tphi
end;
fun morph_bnf_defs phi = map_bnf I I I I I I I I I I I (morph_defs phi) I I I I I;
fun permute_deads perm = map_bnf I I I I I I perm I I I I I I I I I I;
val transfer_bnf = morph_bnf o Morphism.transfer_morphism;
structure Data = Generic_Data
(
type T = bnf Symtab.table;
val empty = Symtab.empty;
fun merge data : T = Symtab.merge (K true) data;
);
fun bnf_of_generic context =
Option.map (transfer_bnf (Context.theory_of context)) o Symtab.lookup (Data.get context);
val bnf_of = bnf_of_generic o Context.Proof;
val bnf_of_global = bnf_of_generic o Context.Theory;
(* Utilities *)
fun normalize_set insts instA set =
let
val (T, T') = dest_funT (fastype_of set);
val A = fst (Term.dest_TVar (HOLogic.dest_setT T'));
val params = Term.add_tvar_namesT T [];
in Term.subst_TVars ((A :: params) ~~ (instA :: insts)) set end;
fun normalize_rel ctxt instTs instA instB rel =
let
val thy = Proof_Context.theory_of ctxt;
val tyenv =
Sign.typ_match thy (fastype_of rel, Library.foldr (op -->) (instTs, mk_pred2T instA instB))
Vartab.empty;
in Envir.subst_term (tyenv, Vartab.empty) rel end
handle Type.TYPE_MATCH => error "Bad relator";
fun normalize_pred ctxt instTs instA pred =
let
val thy = Proof_Context.theory_of ctxt;
val tyenv =
Sign.typ_match thy (fastype_of pred, Library.foldr (op -->) (instTs, mk_pred1T instA))
Vartab.empty;
in Envir.subst_term (tyenv, Vartab.empty) pred end
handle Type.TYPE_MATCH => error "Bad predicator";
fun normalize_wit insts CA As wit =
let
fun strip_param (Ts, T as Type (\<^type_name>\<open>fun\<close>, [T1, T2])) =
if Type.raw_instance (CA, T) then (Ts, T) else strip_param (T1 :: Ts, T2)
| strip_param x = x;
val (Ts, T) = strip_param ([], fastype_of wit);
val subst = Term.add_tvar_namesT T [] ~~ insts;
fun find y = find_index (fn x => x = y) As;
in
(map (find o Term.typ_subst_TVars subst) (rev Ts), Term.subst_TVars subst wit)
end;
fun minimize_wits wits =
let
fun minimize done [] = done
| minimize done ((I, wit) :: todo) =
if exists (fn (J, _) => subset (op =) (J, I)) (done @ todo)
then minimize done todo
else minimize ((I, wit) :: done) todo;
in minimize [] wits end;
fun mk_map live Ts Us t =
let val (Type (_, Ts0), Type (_, Us0)) = strip_typeN (live + 1) (fastype_of t) |>> List.last in
Term.subst_atomic_types (Ts0 @ Us0 ~~ Ts @ Us) t
end;
fun mk_pred Ts t =
let val Type (_, Ts0) = domain_type (body_fun_type (fastype_of t)) in
Term.subst_atomic_types (Ts0 ~~ Ts) t
end;
val mk_set = mk_pred;
fun mk_rel live Ts Us t =
let val [Type (_, Ts0), Type (_, Us0)] = binder_types (snd (strip_typeN live (fastype_of t))) in
Term.subst_atomic_types (Ts0 @ Us0 ~~ Ts @ Us) t
end;
fun build_map_or_rel mk const of_bnf dest pre_cst_table ctxt simple_Ts simple_Us build_simple =
let
fun build (TU as (T, U)) =
if exists (curry (op =) T) simple_Ts orelse exists (curry (op =) U) simple_Us then
build_simple TU
else if T = U andalso not (exists_subtype_in simple_Ts T) andalso
not (exists_subtype_in simple_Us U) then
const T
else
(case TU of
(Type (s, Ts), Type (s', Us)) =>
if s = s' then
let
fun recurse (live, cst0) =
let
val cst = mk live Ts Us cst0;
val TUs' = map dest (fst (strip_typeN live (fastype_of cst)));
in Term.list_comb (cst, map build TUs') end;
in
(case AList.lookup (op =) pre_cst_table s of
NONE =>
(case bnf_of ctxt s of
SOME bnf => recurse (live_of_bnf bnf, of_bnf bnf)
| NONE => build_simple TU)
| SOME entry => recurse entry)
end
else
build_simple TU
| _ => build_simple TU);
in build end;
val build_map = build_map_or_rel mk_map HOLogic.id_const map_of_bnf dest_funT
[(\<^type_name>\<open>set\<close>, (1, \<^term>\<open>image\<close>))];
val build_rel = build_map_or_rel mk_rel HOLogic.eq_const rel_of_bnf dest_pred2T o append
[(\<^type_name>\<open>set\<close>, (1, \<^term>\<open>rel_set\<close>)), (\<^type_name>\<open>fun\<close>, (2, \<^term>\<open>rel_fun\<close>))];
fun build_set ctxt A =
let
fun build T =
Abs (Name.uu, T,
if T = A then
HOLogic.mk_set A [Bound 0]
else
(case T of
Type (s, Ts) =>
let
val sets = map (mk_set Ts) (sets_of_bnf (the (bnf_of ctxt s)))
|> filter (exists_subtype_in [A] o range_type o fastype_of);
val set_apps = map (fn set => Term.betapply (set, Bound 0)) sets;
fun recurse set_app =
let val Type (\<^type_name>\<open>set\<close>, [elemT]) = fastype_of set_app in
if elemT = A then set_app else mk_UNION set_app (build elemT)
end;
in
if null set_apps then HOLogic.mk_set A []
else Library.foldl1 mk_union (map recurse set_apps)
end
| _ => HOLogic.mk_set A []));
in build end;
fun map_flattened_map_args ctxt s map_args fs =
let
val flat_fs = flatten_type_args_of_bnf (the (bnf_of ctxt s)) Term.dummy fs;
val flat_fs' = map_args flat_fs;
in
permute_like_unique (op aconv) flat_fs fs flat_fs'
end;
(* Names *)
val mapN = "map";
val setN = "set";
fun mk_setN i = setN ^ nonzero_string_of_int i;
val bdN = "bd";
val witN = "wit";
fun mk_witN i = witN ^ nonzero_string_of_int i;
val relN = "rel";
val predN = "pred";
val bd_Card_orderN = "bd_Card_order";
val bd_CinfiniteN = "bd_Cinfinite";
val bd_CnotzeroN = "bd_Cnotzero";
val bd_card_orderN = "bd_card_order";
val bd_cinfiniteN = "bd_cinfinite";
val bd_regularCardN = "bd_regularCard";
val collect_set_mapN = "collect_set_map";
val in_bdN = "in_bd";
val in_monoN = "in_mono";
val in_relN = "in_rel";
val inj_mapN = "inj_map";
val inj_map_strongN = "inj_map_strong";
val map_comp0N = "map_comp0";
val map_compN = "map_comp";
val map_cong0N = "map_cong0";
val map_congN = "map_cong";
val map_cong_simpN = "map_cong_simp";
val map_cong_predN = "map_cong_pred";
val map_id0N = "map_id0";
val map_idN = "map_id";
val map_identN = "map_ident";
val map_ident_strongN = "map_ident_strong";
val map_transferN = "map_transfer";
val pred_mono_strong0N = "pred_mono_strong0";
val pred_mono_strongN = "pred_mono_strong";
val pred_monoN = "pred_mono";
val pred_transferN = "pred_transfer";
val pred_TrueN = "pred_True";
val pred_mapN = "pred_map";
val pred_relN = "pred_rel";
val pred_setN = "pred_set";
val pred_congN = "pred_cong";
val pred_cong_simpN = "pred_cong_simp";
val rel_GrpN = "rel_Grp";
val rel_comppN = "rel_compp";
val rel_compp_GrpN = "rel_compp_Grp";
val rel_congN = "rel_cong";
val rel_cong_simpN = "rel_cong_simp";
val rel_conversepN = "rel_conversep";
val rel_eqN = "rel_eq";
val rel_eq_onpN = "rel_eq_onp";
val rel_flipN = "rel_flip";
val rel_mapN = "rel_map";
val rel_monoN = "rel_mono";
val rel_mono_strong0N = "rel_mono_strong0";
val rel_mono_strongN = "rel_mono_strong";
val rel_reflN = "rel_refl";
val rel_refl_strongN = "rel_refl_strong";
val rel_reflpN = "rel_reflp";
val rel_sympN = "rel_symp";
val rel_transferN = "rel_transfer";
val rel_transpN = "rel_transp";
val set_bdN = "set_bd";
val set_map0N = "set_map0";
val set_mapN = "set_map";
val set_transferN = "set_transfer";
datatype inline_policy = Dont_Inline | Hardly_Inline | Smart_Inline | Do_Inline;
datatype fact_policy = Dont_Note | Note_Some | Note_All;
val bnf_internals = Attrib.setup_config_bool \<^binding>\<open>bnf_internals\<close> (K false);
val bnf_timing = Attrib.setup_config_bool \<^binding>\<open>bnf_timing\<close> (K false);
fun user_policy policy ctxt = if Config.get ctxt bnf_internals then Note_All else policy;
val smart_max_inline_term_size = 25; (*FUDGE*)
fun note_bnf_thms fact_policy qualify0 bnf_b bnf lthy =
let
val axioms = axioms_of_bnf bnf;
val facts = facts_of_bnf bnf;
val wits = wits_of_bnf bnf;
val qualify =
let val qs = Binding.path_of bnf_b;
in fold_rev (fn (s, mand) => Binding.qualify mand s) qs #> qualify0 end;
fun note_if_note_all (noted0, lthy0) =
let
val witNs = if length wits = 1 then [witN] else map mk_witN (1 upto length wits);
val notes =
[(bd_card_orderN, [#bd_card_order axioms]),
(bd_cinfiniteN, [#bd_cinfinite axioms]),
(bd_regularCardN, [#bd_regularCard axioms]),
(bd_Card_orderN, [#bd_Card_order facts]),
(bd_CinfiniteN, [#bd_Cinfinite facts]),
(bd_CnotzeroN, [#bd_Cnotzero facts]),
(collect_set_mapN, [Lazy.force (#collect_set_map facts)]),
(in_bdN, [Lazy.force (#in_bd facts)]),
(in_monoN, [Lazy.force (#in_mono facts)]),
(map_comp0N, [#map_comp0 axioms]),
(rel_mono_strong0N, [Lazy.force (#rel_mono_strong0 facts)]),
(pred_mono_strong0N, [Lazy.force (#pred_mono_strong0 facts)]),
(set_map0N, #set_map0 axioms),
(set_bdN, #set_bd axioms)] @
(witNs ~~ wit_thmss_of_bnf bnf)
|> map (fn (thmN, thms) =>
((qualify (Binding.qualify true (Binding.name_of bnf_b) (Binding.name thmN)), []),
[(thms, [])]));
in
Local_Theory.notes notes lthy0 |>> append noted0
end;
fun note_unless_dont_note (noted0, lthy0) =
let
val notes =
[(in_relN, [Lazy.force (#in_rel facts)], []),
(inj_mapN, [Lazy.force (#inj_map facts)], []),
(inj_map_strongN, [Lazy.force (#inj_map_strong facts)], []),
(map_compN, [Lazy.force (#map_comp facts)], []),
(map_cong0N, [#map_cong0 axioms], []),
(map_congN, [Lazy.force (#map_cong facts)], fundefcong_attrs),
(map_cong_simpN, [Lazy.force (#map_cong_simp facts)], []),
(map_cong_predN, [Lazy.force (#map_cong_pred facts)], []),
(map_idN, [Lazy.force (#map_id facts)], []),
(map_id0N, [#map_id0 axioms], []),
(map_transferN, [Lazy.force (#map_transfer facts)], []),
(map_identN, [Lazy.force (#map_ident facts)], []),
(map_ident_strongN, [Lazy.force (#map_ident_strong facts)], []),
(pred_monoN, [Lazy.force (#pred_mono facts)], mono_attrs),
(pred_mono_strongN, [Lazy.force (#pred_mono_strong facts)], []),
(pred_congN, [Lazy.force (#pred_cong facts)], fundefcong_attrs),
(pred_cong_simpN, [Lazy.force (#pred_cong_simp facts)], []),
(pred_mapN, [Lazy.force (#pred_map facts)], []),
(pred_relN, [Lazy.force (#pred_rel facts)], []),
(pred_transferN, [Lazy.force (#pred_transfer facts)], []),
(pred_TrueN, [Lazy.force (#pred_True facts)], []),
(pred_setN, [#pred_set axioms], []),
(rel_comppN, [Lazy.force (#rel_OO facts)], []),
(rel_compp_GrpN, no_refl [#rel_OO_Grp axioms], []),
(rel_conversepN, [Lazy.force (#rel_conversep facts)], []),
(rel_eqN, [Lazy.force (#rel_eq facts)], []),
(rel_eq_onpN, [Lazy.force (#rel_eq_onp facts)], []),
(rel_flipN, [Lazy.force (#rel_flip facts)], []),
(rel_GrpN, [Lazy.force (#rel_Grp facts)], []),
(rel_mapN, Lazy.force (#rel_map facts), []),
(rel_monoN, [Lazy.force (#rel_mono facts)], mono_attrs),
(rel_mono_strongN, [Lazy.force (#rel_mono_strong facts)], []),
(rel_congN, [Lazy.force (#rel_cong facts)], fundefcong_attrs),
(rel_cong_simpN, [Lazy.force (#rel_cong_simp facts)], []),
(rel_reflN, [Lazy.force (#rel_refl facts)], []),
(rel_refl_strongN, [Lazy.force (#rel_refl_strong facts)], []),
(rel_reflpN, [Lazy.force (#rel_reflp facts)], []),
(rel_sympN, [Lazy.force (#rel_symp facts)], []),
(rel_transpN, [Lazy.force (#rel_transp facts)], []),
(rel_transferN, [Lazy.force (#rel_transfer facts)], []),
(set_mapN, map Lazy.force (#set_map facts), []),
(set_transferN, Lazy.force (#set_transfer facts), [])]
|> filter_out (null o #2)
|> map (fn (thmN, thms, attrs) =>
((qualify (Binding.qualify true (Binding.name_of bnf_b) (Binding.name thmN)), attrs),
[(thms, [])]));
in
Local_Theory.notes notes lthy0 |>> append noted0
end;
in
([], lthy)
|> fact_policy = Note_All ? note_if_note_all
|> fact_policy <> Dont_Note ? note_unless_dont_note
|>> (fn [] => bnf | noted => morph_bnf (substitute_noted_thm noted) bnf)
end;
fun note_bnf_defs bnf lthy =
let
fun mk_def_binding cst_of =
Thm.def_binding (Binding.qualified_name (dest_Const (cst_of bnf) |> fst));
val notes =
[(mk_def_binding map_of_bnf, map_def_of_bnf bnf),
(mk_def_binding rel_of_bnf, rel_def_of_bnf bnf),
(mk_def_binding pred_of_bnf, pred_def_of_bnf bnf)] @
@{map 2} (pair o mk_def_binding o K) (sets_of_bnf bnf) (set_defs_of_bnf bnf)
|> map (fn (b, thm) => ((b, []), [([thm], [])]));
in
lthy
|> Local_Theory.notes notes
|>> (fn noted => morph_bnf (substitute_noted_thm noted) bnf)
end;
fun mk_wit_goals zs bs sets (I, wit) =
let
val xs = map (nth bs) I;
fun wit_goal i =
let
val z = nth zs i;
val set_wit = nth sets i $ Term.list_comb (wit, xs);
val concl = HOLogic.mk_Trueprop
(if member (op =) I i then HOLogic.mk_eq (z, nth bs i) else \<^term>\<open>False\<close>);
in
fold_rev Logic.all (z :: xs) (Logic.mk_implies (mk_Trueprop_mem (z, set_wit), concl))
end;
in
map wit_goal (0 upto length sets - 1)
end;
(* Define new BNFs *)
fun define_bnf_consts const_policy fact_policy internal Ds_opt map_b rel_b pred_b set_bs
(((((((bnf_b, T_rhs), map_rhs), set_rhss), bd_rhs), wit_rhss), rel_rhs_opt), pred_rhs_opt)
no_defs_lthy =
let
val live = length set_rhss;
val def_qualify = Binding.qualify false (Binding.name_of bnf_b);
fun mk_prefix_binding pre = Binding.prefix_name (pre ^ "_") bnf_b;
fun maybe_define user_specified (b, rhs) lthy =
let
val inline =
(user_specified orelse fact_policy = Dont_Note) andalso
(case const_policy of
Dont_Inline => false
| Hardly_Inline => Term.is_Free rhs orelse Term.is_Const rhs
| Smart_Inline => Term.size_of_term rhs <= smart_max_inline_term_size
| Do_Inline => true);
in
if inline then
((rhs, Drule.reflexive_thm), lthy)
else
let val b = b () in
apfst (apsnd snd)
((if internal then Local_Theory.define_internal else Local_Theory.define)
((b, NoSyn), ((Binding.concealed (Thm.def_binding b), []), rhs)) lthy)
end
end;
val map_bind_def =
(fn () => def_qualify (if Binding.is_empty map_b then mk_prefix_binding mapN else map_b),
map_rhs);
val set_binds_defs =
let
fun set_name i get_b =
(case try (nth set_bs) (i - 1) of
SOME b => if Binding.is_empty b then get_b else K b
| NONE => get_b) #> def_qualify;
val bs = if live = 1 then [set_name 1 (fn () => mk_prefix_binding setN)]
else map (fn i => set_name i (fn () => mk_prefix_binding (mk_setN i))) (1 upto live);
in bs ~~ set_rhss end;
val bd_bind_def = (fn () => def_qualify (mk_prefix_binding bdN), bd_rhs);
val (((bnf_map_term, raw_map_def),
(bnf_set_terms, raw_set_defs)),
(lthy, lthy_old)) =
no_defs_lthy
|> (snd o Local_Theory.begin_nested)
|> maybe_define true map_bind_def
||>> apfst split_list o fold_map (maybe_define true) set_binds_defs
||> `Local_Theory.end_nested;
val phi = Proof_Context.export_morphism lthy_old lthy;
val ((bnf_bd_term, raw_bd_def), (lthy, lthy_old)) =
lthy
|> (snd o Local_Theory.begin_nested)
|> maybe_define true bd_bind_def
||> `Local_Theory.end_nested;
val phi' = Proof_Context.export_morphism lthy_old lthy;
val bnf_map_def = Morphism.thm phi raw_map_def;
val bnf_set_defs = map (Morphism.thm phi) raw_set_defs;
val bnf_bd_def = Morphism.thm phi' raw_bd_def;
val bnf_map = Morphism.term phi bnf_map_term;
(*TODO: handle errors*)
(*simple shape analysis of a map function*)
val ((alphas, betas), (Calpha, _)) =
fastype_of bnf_map
|> strip_typeN live
|>> map_split dest_funT
||> dest_funT
handle TYPE _ => error "Bad map function";
val Calpha_params = map TVar (Term.add_tvarsT Calpha []);
val bnf_T = Morphism.typ phi T_rhs;
val bad_args = Term.add_tfreesT bnf_T [];
val _ = null bad_args orelse error ("Locally fixed type arguments " ^
commas_quote (map (Syntax.string_of_typ no_defs_lthy o TFree) bad_args));
val bnf_sets =
map2 (normalize_set Calpha_params) alphas (map (Morphism.term phi) bnf_set_terms);
val bnf_bd =
Term.subst_TVars (Term.add_tvar_namesT bnf_T [] ~~ Calpha_params)
(Morphism.term phi' bnf_bd_term);
(*TODO: assert Ds = (TVars of bnf_map) \ (alphas @ betas) as sets*)
val deads = (case Ds_opt of
NONE => subtract (op =) (alphas @ betas) (map TVar (Term.add_tvars bnf_map []))
| SOME Ds => map (Morphism.typ phi) Ds);
(*TODO: further checks of type of bnf_map*)
(*TODO: check types of bnf_sets*)
(*TODO: check type of bnf_bd*)
(*TODO: check type of bnf_rel*)
fun mk_bnf_map Ds As' Bs' =
Term.subst_atomic_types ((deads ~~ Ds) @ (alphas ~~ As') @ (betas ~~ Bs')) bnf_map;
fun mk_bnf_t Ds As' = Term.subst_atomic_types ((deads ~~ Ds) @ (alphas ~~ As'));
fun mk_bnf_T Ds As' = Term.typ_subst_atomic ((deads ~~ Ds) @ (alphas ~~ As'));
val (((As, Bs), unsorted_Ds), names_lthy) = lthy
|> mk_TFrees live
||>> mk_TFrees live
||>> mk_TFrees (length deads);
val Ds = map2 (resort_tfree_or_tvar o Type.sort_of_atyp) deads unsorted_Ds;
val RTs = map2 (curry HOLogic.mk_prodT) As Bs;
val pred2RTs = map2 mk_pred2T As Bs;
val (Rs, Rs') = names_lthy |> mk_Frees' "R" pred2RTs |> fst;
val CA = mk_bnf_T Ds As Calpha;
val CR = mk_bnf_T Ds RTs Calpha;
val setRs =
@{map 3} (fn R => fn T => fn U =>
HOLogic.Collect_const (HOLogic.mk_prodT (T, U)) $ HOLogic.mk_case_prod R) Rs As Bs;
(*Grp (in (Collect (split R1) .. Collect (split Rn))) (map fst .. fst)^--1 OO
Grp (in (Collect (split R1) .. Collect (split Rn))) (map snd .. snd)*)
val rel_spec =
let
val map1 = Term.list_comb (mk_bnf_map Ds RTs As, map fst_const RTs);
val map2 = Term.list_comb (mk_bnf_map Ds RTs Bs, map snd_const RTs);
val bnf_in = mk_in setRs (map (mk_bnf_t Ds RTs) bnf_sets) CR;
in
mk_rel_compp (mk_conversep (mk_Grp bnf_in map1), mk_Grp bnf_in map2)
|> fold_rev Term.absfree Rs'
end;
val rel_rhs = the_default rel_spec rel_rhs_opt;
val rel_bind_def =
(fn () => def_qualify (if Binding.is_empty rel_b then mk_prefix_binding relN else rel_b),
rel_rhs);
val pred_spec =
if live = 0 then Term.absdummy (mk_bnf_T Ds As Calpha) \<^term>\<open>True\<close> else
let
val sets = map (mk_bnf_t Ds As) bnf_sets;
val argTs = map mk_pred1T As;
val T = mk_bnf_T Ds As Calpha;
val ((Ps, Ps'), x) = lthy
|> mk_Frees' "P" argTs
||>> yield_singleton (mk_Frees "x") T
|> fst;
val conjs = map2 (fn set => fn P => mk_Ball (set $ x) P) sets Ps;
in
fold_rev Term.absfree Ps'
(Term.absfree (dest_Free x) (Library.foldr1 HOLogic.mk_conj conjs))
end;
val pred_rhs = the_default pred_spec pred_rhs_opt;
val pred_bind_def =
(fn () => def_qualify (if Binding.is_empty pred_b then mk_prefix_binding predN else pred_b),
pred_rhs);
val wit_rhss =
if null wit_rhss then
[fold_rev Term.absdummy As (Term.list_comb (mk_bnf_map Ds As As,
map2 (fn T => fn i => Term.absdummy T (Bound i)) As (live downto 1)) $
Const (\<^const_name>\<open>undefined\<close>, CA))]
else wit_rhss;
val nwits = length wit_rhss;
val wit_binds_defs =
let
val bs = if nwits = 1 then [fn () => def_qualify (mk_prefix_binding witN)]
else map (fn i => fn () => def_qualify (mk_prefix_binding (mk_witN i))) (1 upto nwits);
in bs ~~ wit_rhss end;
val ((((bnf_rel_term, raw_rel_def), (bnf_pred_term, raw_pred_def)),
(bnf_wit_terms, raw_wit_defs)), (lthy, lthy_old)) =
lthy
|> (snd o Local_Theory.begin_nested)
|> maybe_define (is_some rel_rhs_opt) rel_bind_def
||>> maybe_define (is_some pred_rhs_opt) pred_bind_def
||>> apfst split_list o fold_map (maybe_define (not (null wit_rhss))) wit_binds_defs
||> `Local_Theory.end_nested;
val phi = Proof_Context.export_morphism lthy_old lthy;
val bnf_rel_def = Morphism.thm phi raw_rel_def;
val bnf_rel = Morphism.term phi bnf_rel_term;
fun mk_bnf_rel Ds As' Bs' =
normalize_rel lthy (map2 mk_pred2T As' Bs') (mk_bnf_T Ds As' Calpha) (mk_bnf_T Ds Bs' Calpha)
bnf_rel;
val bnf_pred_def = Morphism.thm phi raw_pred_def;
val bnf_pred = Morphism.term phi bnf_pred_term;
fun mk_bnf_pred Ds As' =
normalize_pred lthy (map mk_pred1T As') (mk_bnf_T Ds As' Calpha) bnf_pred;
val bnf_wit_defs = map (Morphism.thm phi) raw_wit_defs;
val bnf_wits =
map (normalize_wit Calpha_params Calpha alphas o Morphism.term phi) bnf_wit_terms;
fun mk_rel_spec Ds' As' Bs' =
Term.subst_atomic_types ((Ds ~~ Ds') @ (As ~~ As') @ (Bs ~~ Bs')) rel_spec;
fun mk_pred_spec Ds' As' =
Term.subst_atomic_types ((Ds ~~ Ds') @ (As ~~ As')) pred_spec;
in
(((alphas, betas, deads, Calpha),
(bnf_map, bnf_sets, bnf_bd, bnf_wits, bnf_rel, bnf_pred),
(bnf_map_def, bnf_set_defs, bnf_bd_def, bnf_wit_defs, bnf_rel_def, bnf_pred_def),
(mk_bnf_map, mk_bnf_t, mk_bnf_T, mk_bnf_rel, mk_bnf_pred, mk_rel_spec, mk_pred_spec)), lthy)
end;
fun prepare_def const_policy mk_fact_policy internal qualify prep_typ prep_term Ds_opt map_b rel_b
pred_b set_bs (((((((raw_bnf_b, raw_bnf_T), raw_map), raw_sets), raw_bd), raw_wits), raw_rel_opt),
raw_pred_opt) no_defs_lthy =
let
val fact_policy = mk_fact_policy no_defs_lthy;
val bnf_b = qualify raw_bnf_b;
val live = length raw_sets;
val T_rhs = prep_typ no_defs_lthy raw_bnf_T;
val map_rhs = prep_term no_defs_lthy raw_map;
val set_rhss = map (prep_term no_defs_lthy) raw_sets;
val bd_rhs = prep_term no_defs_lthy raw_bd;
val wit_rhss = map (prep_term no_defs_lthy) raw_wits;
val rel_rhs_opt = Option.map (prep_term no_defs_lthy) raw_rel_opt;
val pred_rhs_opt = Option.map (prep_term no_defs_lthy) raw_pred_opt;
fun err T =
error ("Trying to register the type " ^ quote (Syntax.string_of_typ no_defs_lthy T) ^
" as unnamed BNF");
val (bnf_b, key) =
if Binding.is_empty bnf_b then
(case T_rhs of
Type (C, Ts) =>
if forall (can dest_TFree) Ts andalso not (has_duplicates (op =) Ts) then
(Binding.qualified_name C, C)
else
err T_rhs
| T => err T)
else
(bnf_b, Local_Theory.full_name no_defs_lthy bnf_b);
val (((alphas, betas, deads, Calpha),
(bnf_map, bnf_sets, bnf_bd, bnf_wits, bnf_rel, bnf_pred),
(bnf_map_def, bnf_set_defs, bnf_bd_def, bnf_wit_defs, bnf_rel_def, bnf_pred_def),
(mk_bnf_map_Ds, mk_bnf_t_Ds, mk_bnf_T_Ds, _, _, mk_rel_spec, mk_pred_spec)), lthy) =
define_bnf_consts const_policy fact_policy internal Ds_opt map_b rel_b pred_b set_bs
(((((((bnf_b, T_rhs), map_rhs), set_rhss), bd_rhs), wit_rhss), rel_rhs_opt), pred_rhs_opt)
no_defs_lthy;
val dead = length deads;
val (((((((As', Bs'), Cs), unsorted_Ds), Es), B1Ts), B2Ts), (Ts, T)) = lthy
|> mk_TFrees live
||>> mk_TFrees live
||>> mk_TFrees live
||>> mk_TFrees dead
||>> mk_TFrees live
||>> mk_TFrees live
||>> mk_TFrees live
||> fst o mk_TFrees 1
||> the_single
||> `(replicate live);
val Ds = map2 (resort_tfree_or_tvar o Type.sort_of_atyp) deads unsorted_Ds;
val mk_bnf_map = mk_bnf_map_Ds Ds;
val mk_bnf_t = mk_bnf_t_Ds Ds;
val mk_bnf_T = mk_bnf_T_Ds Ds;
val pred1PTs = map mk_pred1T As';
val pred1QTs = map mk_pred1T Bs';
val pred2RTs = map2 mk_pred2T As' Bs';
val pred2RTsAsCs = map2 mk_pred2T As' Cs;
val pred2RTsBsCs = map2 mk_pred2T Bs' Cs;
val pred2RTsBsEs = map2 mk_pred2T Bs' Es;
val pred2RTsCsBs = map2 mk_pred2T Cs Bs';
val pred2RTsCsEs = map2 mk_pred2T Cs Es;
val pred2RT's = map2 mk_pred2T Bs' As';
val self_pred2RTs = map2 mk_pred2T As' As';
val transfer_domRTs = map2 mk_pred2T As' B1Ts;
val transfer_ranRTs = map2 mk_pred2T Bs' B2Ts;
val CA' = mk_bnf_T As' Calpha;
val CB' = mk_bnf_T Bs' Calpha;
val CC' = mk_bnf_T Cs Calpha;
val CE' = mk_bnf_T Es Calpha;
val CB1 = mk_bnf_T B1Ts Calpha;
val CB2 = mk_bnf_T B2Ts Calpha;
val bnf_map_AsAs = mk_bnf_map As' As';
val bnf_map_AsBs = mk_bnf_map As' Bs';
val bnf_map_AsCs = mk_bnf_map As' Cs;
val bnf_map_BsCs = mk_bnf_map Bs' Cs;
val bnf_sets_As = map (mk_bnf_t As') bnf_sets;
val bnf_sets_Bs = map (mk_bnf_t Bs') bnf_sets;
val bnf_bd_As = mk_bnf_t As' bnf_bd;
fun mk_bnf_rel RTs CA CB = normalize_rel lthy RTs CA CB bnf_rel;
fun mk_bnf_pred PTs CA = normalize_pred lthy PTs CA bnf_pred;
val ((((((((((((((((((((((((((fs, fs'), gs), hs), is), x), x'), y), y'), zs), zs'), ys), As),
As_copy), bs), (Ps, Ps')), Ps_copy), Qs), Rs), Rs_copy), Ss), S_AsCs), S_CsBs), S_BsEs),
transfer_domRs), transfer_ranRs), _) = lthy
|> mk_Frees "f" (map2 (curry op -->) As' Bs')
||>> mk_Frees "f" (map2 (curry op -->) As' Bs')
||>> mk_Frees "g" (map2 (curry op -->) Bs' Cs)
||>> mk_Frees "h" (map2 (curry op -->) As' Ts)
||>> mk_Frees "i" (map2 (curry op -->) As' Cs)
||>> yield_singleton (mk_Frees "x") CA'
||>> yield_singleton (mk_Frees "x") CA'
||>> yield_singleton (mk_Frees "y") CB'
||>> yield_singleton (mk_Frees "y") CB'
||>> mk_Frees "z" As'
||>> mk_Frees "z" As'
||>> mk_Frees "y" Bs'
||>> mk_Frees "A" (map HOLogic.mk_setT As')
||>> mk_Frees "A" (map HOLogic.mk_setT As')
||>> mk_Frees "b" As'
||>> mk_Frees' "P" pred1PTs
||>> mk_Frees "P" pred1PTs
||>> mk_Frees "Q" pred1QTs
||>> mk_Frees "R" pred2RTs
||>> mk_Frees "R" pred2RTs
||>> mk_Frees "S" pred2RTsBsCs
||>> mk_Frees "S" pred2RTsAsCs
||>> mk_Frees "S" pred2RTsCsBs
||>> mk_Frees "S" pred2RTsBsEs
||>> mk_Frees "R" transfer_domRTs
||>> mk_Frees "S" transfer_ranRTs;
val fs_copy = map2 (retype_const_or_free o fastype_of) fs gs;
val x_copy = retype_const_or_free CA' y';
val y_copy = retype_const_or_free CB' x';
val rel = mk_bnf_rel pred2RTs CA' CB';
val pred = mk_bnf_pred pred1PTs CA';
val pred' = mk_bnf_pred pred1QTs CB';
val relCsEs = mk_bnf_rel pred2RTsCsEs CC' CE';
val relAsAs = mk_bnf_rel self_pred2RTs CA' CA';
val bnf_wit_As = map (apsnd (mk_bnf_t As')) bnf_wits;
val map_id0_goal =
let val bnf_map_app_id = Term.list_comb (bnf_map_AsAs, map HOLogic.id_const As') in
mk_Trueprop_eq (bnf_map_app_id, HOLogic.id_const CA')
end;
val map_comp0_goal =
let
val bnf_map_app_comp = Term.list_comb (bnf_map_AsCs, map2 (curry HOLogic.mk_comp) gs fs);
val comp_bnf_map_app = HOLogic.mk_comp
(Term.list_comb (bnf_map_BsCs, gs), Term.list_comb (bnf_map_AsBs, fs));
in
fold_rev Logic.all (fs @ gs) (mk_Trueprop_eq (bnf_map_app_comp, comp_bnf_map_app))
end;
fun mk_map_cong_prem mk_implies x z set f f_copy =
Logic.all z (mk_implies (mk_Trueprop_mem (z, set $ x), mk_Trueprop_eq (f $ z, f_copy $ z)));
val map_cong0_goal =
let
val prems = @{map 4} (mk_map_cong_prem Logic.mk_implies x) zs bnf_sets_As fs fs_copy;
val eq = mk_Trueprop_eq (Term.list_comb (bnf_map_AsBs, fs) $ x,
Term.list_comb (bnf_map_AsBs, fs_copy) $ x);
in
fold_rev Logic.all (x :: fs @ fs_copy) (Logic.list_implies (prems, eq))
end;
val set_map0s_goal =
let
fun mk_goal setA setB f =
let
val set_comp_map = HOLogic.mk_comp (setB, Term.list_comb (bnf_map_AsBs, fs));
val image_comp_set = HOLogic.mk_comp (mk_image f, setA);
in
fold_rev Logic.all fs (mk_Trueprop_eq (set_comp_map, image_comp_set))
end;
in
@{map 3} mk_goal bnf_sets_As bnf_sets_Bs fs
end;
val card_order_bd_goal = HOLogic.mk_Trueprop (mk_card_order bnf_bd_As);
val cinfinite_bd_goal = HOLogic.mk_Trueprop (mk_cinfinite bnf_bd_As);
val regularCard_bd_goal = HOLogic.mk_Trueprop (mk_regularCard bnf_bd_As);
val set_bds_goal =
let
fun mk_goal set =
Logic.all x (HOLogic.mk_Trueprop (mk_ordLess (mk_card_of (set $ x)) bnf_bd_As));
in
map mk_goal bnf_sets_As
end;
val relAsCs = mk_bnf_rel pred2RTsAsCs CA' CC';
val relBsCs = mk_bnf_rel pred2RTsBsCs CB' CC';
val relCsBs = mk_bnf_rel pred2RTsCsBs CC' CB';
val rel_OO_lhs = Term.list_comb (relAsCs, map2 (curry mk_rel_compp) Rs Ss);
val rel_OO_rhs = mk_rel_compp (Term.list_comb (rel, Rs), Term.list_comb (relBsCs, Ss));
val le_rel_OO_goal =
fold_rev Logic.all (Rs @ Ss) (HOLogic.mk_Trueprop (mk_leq rel_OO_rhs rel_OO_lhs));
val rel_OO_Grp_goal = fold_rev Logic.all Rs (mk_Trueprop_eq (Term.list_comb (rel, Rs),
Term.list_comb (mk_rel_spec Ds As' Bs', Rs)));
val pred_set_goal = fold_rev Logic.all Ps (mk_Trueprop_eq (Term.list_comb (pred, Ps),
Term.list_comb (mk_pred_spec Ds As', Ps)));
val goals = zip_axioms map_id0_goal map_comp0_goal map_cong0_goal set_map0s_goal
card_order_bd_goal cinfinite_bd_goal regularCard_bd_goal set_bds_goal le_rel_OO_goal rel_OO_Grp_goal pred_set_goal;
val mk_wit_goals = mk_wit_goals bs zs bnf_sets_As;
fun triv_wit_tac ctxt = mk_trivial_wit_tac ctxt bnf_wit_defs;
val wit_goalss =
(if null raw_wits then SOME triv_wit_tac else NONE, map mk_wit_goals bnf_wit_As);
fun after_qed mk_wit_thms thms lthy =
let
val (axioms, nontriv_wit_thms) = apfst (mk_axioms live) (chop (length goals) thms);
val bd_Card_order = #bd_card_order axioms RS @{thm conjunct2[OF card_order_on_Card_order]};
val bd_Cinfinite = @{thm conjI} OF [#bd_cinfinite axioms, bd_Card_order];
val bd_Cnotzero = bd_Cinfinite RS @{thm Cinfinite_Cnotzero};
fun mk_collect_set_map () =
let
val defT = mk_bnf_T Ts Calpha --> HOLogic.mk_setT T;
val collect_map = HOLogic.mk_comp (mk_collect (map (mk_bnf_t Ts) bnf_sets) defT,
Term.list_comb (mk_bnf_map As' Ts, hs));
val image_collect = mk_collect
(map2 (fn h => fn set => HOLogic.mk_comp (mk_image h, set)) hs bnf_sets_As) defT;
(*collect {set1 ... setm} o map f1 ... fm = collect {f1` o set1 ... fm` o setm}*)
val goal = fold_rev Logic.all hs (mk_Trueprop_eq (collect_map, image_collect));
in
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} =>
mk_collect_set_map_tac ctxt (#set_map0 axioms))
|> Thm.close_derivation \<^here>
end;
val collect_set_map = Lazy.lazy mk_collect_set_map;
fun mk_in_mono () =
let
val prems_mono = map2 (HOLogic.mk_Trueprop oo mk_leq) As As_copy;
val in_mono_goal =
fold_rev Logic.all (As @ As_copy)
(Logic.list_implies (prems_mono, HOLogic.mk_Trueprop
(mk_leq (mk_in As bnf_sets_As CA') (mk_in As_copy bnf_sets_As CA'))));
in
Goal.prove_sorry lthy [] [] in_mono_goal (fn {context = ctxt, prems = _} =>
mk_in_mono_tac ctxt live)
|> Thm.close_derivation \<^here>
end;
val in_mono = Lazy.lazy mk_in_mono;
fun mk_in_cong () =
let
val prems_cong = map2 (curry mk_Trueprop_eq) As As_copy;
val in_cong_goal =
fold_rev Logic.all (As @ As_copy)
(Logic.list_implies (prems_cong,
mk_Trueprop_eq (mk_in As bnf_sets_As CA', mk_in As_copy bnf_sets_As CA')));
in
Goal.prove_sorry lthy [] [] in_cong_goal
(fn {context = ctxt, prems = _} => (TRY o hyp_subst_tac ctxt THEN' rtac ctxt refl) 1)
|> Thm.close_derivation \<^here>
end;
val in_cong = Lazy.lazy mk_in_cong;
val map_id = Lazy.lazy (fn () => mk_map_id (#map_id0 axioms));
val map_ident0 = Lazy.lazy (fn () => mk_map_ident lthy (#map_id0 axioms));
val map_ident = Lazy.lazy (fn () => mk_map_ident lthy (Lazy.force map_id));
val map_ident_strong = Lazy.lazy (fn () =>
mk_map_ident_strong lthy (#map_cong0 axioms) (Lazy.force map_id));
val map_comp = Lazy.lazy (fn () => mk_map_comp (#map_comp0 axioms));
fun mk_map_cong mk_implies () =
let
val prem0 = mk_Trueprop_eq (x, x_copy);
val prems = @{map 4} (mk_map_cong_prem mk_implies x_copy) zs bnf_sets_As fs fs_copy;
val eq = mk_Trueprop_eq (Term.list_comb (bnf_map_AsBs, fs) $ x,
Term.list_comb (bnf_map_AsBs, fs_copy) $ x_copy);
val goal = fold_rev Logic.all (x :: x_copy :: fs @ fs_copy)
(Logic.list_implies (prem0 :: prems, eq));
in
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} =>
unfold_thms_tac ctxt @{thms simp_implies_def} THEN
mk_map_cong_tac ctxt (#map_cong0 axioms))
|> Thm.close_derivation \<^here>
end;
val map_cong = Lazy.lazy (mk_map_cong Logic.mk_implies);
val map_cong_simp = Lazy.lazy (mk_map_cong (fn (a, b) => \<^term>\<open>simp_implies\<close> $ a $ b));
fun mk_inj_map () =
let
val prems = map (HOLogic.mk_Trueprop o mk_inj) fs;
val concl = HOLogic.mk_Trueprop (mk_inj (Term.list_comb (bnf_map_AsBs, fs)));
val goal = fold_rev Logic.all fs (Logic.list_implies (prems, concl));
in
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} =>
mk_inj_map_tac ctxt live (Lazy.force map_id) (Lazy.force map_comp) (#map_cong0 axioms)
(Lazy.force map_cong))
|> Thm.close_derivation \<^here>
end;
val inj_map = Lazy.lazy mk_inj_map;
val set_map = map (fn thm => Lazy.lazy (fn () => mk_set_map thm)) (#set_map0 axioms);
val wit_thms =
if null nontriv_wit_thms then mk_wit_thms (map Lazy.force set_map) else nontriv_wit_thms;
fun mk_in_bd () =
let
val bdT = fst (dest_relT (fastype_of bnf_bd_As));
val bdTs = replicate live bdT;
val bd_bnfT = mk_bnf_T bdTs Calpha;
val surj_imp_ordLeq_inst = (if live = 0 then TrueI else
let
val ranTs = map (fn AT => mk_sumT (AT, HOLogic.unitT)) As';
val funTs = map (fn T => bdT --> T) ranTs;
val ran_bnfT = mk_bnf_T ranTs Calpha;
val (revTs, Ts) = `rev (bd_bnfT :: funTs);
val cTs = map (SOME o Thm.ctyp_of lthy) [ran_bnfT,
Library.foldr1 HOLogic.mk_prodT Ts];
val tinst = fold (fn T => fn t =>
HOLogic.mk_case_prod (Term.absdummy T t)) (tl revTs)
(Term.absdummy (hd revTs) (Term.list_comb (mk_bnf_map bdTs ranTs,
map Bound (live - 1 downto 0)) $ Bound live));
val cts = [NONE, SOME (Thm.cterm_of lthy tinst)];
in
Thm.instantiate' cTs cts @{thm surj_imp_ordLeq}
end);
val bd = mk_cexp
(if live = 0 then ctwo
else mk_csum (Library.foldr1 (uncurry mk_csum) (map mk_card_of As)) ctwo)
(mk_csum bnf_bd_As (mk_card_of (HOLogic.mk_UNIV bd_bnfT)));
val in_bd_goal =
fold_rev Logic.all As
(HOLogic.mk_Trueprop (mk_ordLeq (mk_card_of (mk_in As bnf_sets_As CA')) bd));
val weak_set_bds = map (fn thm => @{thm ordLess_imp_ordLeq} OF [thm]) (#set_bd axioms);
in
Goal.prove_sorry lthy [] [] in_bd_goal
(fn {context = ctxt, prems = _} => mk_in_bd_tac ctxt live surj_imp_ordLeq_inst
(Lazy.force map_comp) (Lazy.force map_id) (#map_cong0 axioms)
(map Lazy.force set_map) weak_set_bds (#bd_card_order axioms)
bd_Card_order bd_Cinfinite bd_Cnotzero)
|> Thm.close_derivation \<^here>
end;
val in_bd = Lazy.lazy mk_in_bd;
val rel_OO_Grp = #rel_OO_Grp axioms;
val rel_OO_Grps = no_refl [rel_OO_Grp];
fun mk_rel_Grp () =
let
val lhs = Term.list_comb (rel, map2 mk_Grp As fs);
val rhs = mk_Grp (mk_in As bnf_sets_As CA') (Term.list_comb (bnf_map_AsBs, fs));
val goal = fold_rev Logic.all (As @ fs) (mk_Trueprop_eq (lhs, rhs));
in
Goal.prove_sorry lthy [] [] goal
(fn {context = ctxt, prems = _} => mk_rel_Grp_tac ctxt rel_OO_Grps (#map_id0 axioms)
(#map_cong0 axioms) (Lazy.force map_id) (Lazy.force map_comp)
(map Lazy.force set_map))
|> Thm.close_derivation \<^here>
end;
val rel_Grp = Lazy.lazy mk_rel_Grp;
fun mk_rel_prems f = map2 (HOLogic.mk_Trueprop oo f) Rs Rs_copy;
fun mk_rel_concl f = HOLogic.mk_Trueprop
(f (Term.list_comb (rel, Rs), Term.list_comb (rel, Rs_copy)));
fun mk_rel_mono () =
let
val mono_prems = mk_rel_prems mk_leq;
val mono_concl = mk_rel_concl (uncurry mk_leq);
in
Goal.prove_sorry lthy [] []
(fold_rev Logic.all (Rs @ Rs_copy) (Logic.list_implies (mono_prems, mono_concl)))
(fn {context = ctxt, prems = _} =>
mk_rel_mono_tac ctxt rel_OO_Grps (Lazy.force in_mono))
|> Thm.close_derivation \<^here>
end;
fun mk_rel_cong0 () =
let
val cong_prems = mk_rel_prems (curry HOLogic.mk_eq);
val cong_concl = mk_rel_concl HOLogic.mk_eq;
in
Goal.prove_sorry lthy [] []
(fold_rev Logic.all (Rs @ Rs_copy) (Logic.list_implies (cong_prems, cong_concl)))
(fn {context = ctxt, prems = _} => (TRY o hyp_subst_tac ctxt THEN' rtac ctxt refl) 1)
|> Thm.close_derivation \<^here>
end;
val rel_mono = Lazy.lazy mk_rel_mono;
val rel_cong0 = Lazy.lazy mk_rel_cong0;
fun mk_rel_eq () =
Goal.prove_sorry lthy [] []
(mk_Trueprop_eq (Term.list_comb (relAsAs, map HOLogic.eq_const As'),
HOLogic.eq_const CA'))
(fn {context = ctxt, prems = _} =>
mk_rel_eq_tac ctxt live (Lazy.force rel_Grp) (Lazy.force rel_cong0) (#map_id0 axioms))
|> Thm.close_derivation \<^here>;
val rel_eq = Lazy.lazy mk_rel_eq;
fun mk_rel_conversep () =
let
val relBsAs = mk_bnf_rel pred2RT's CB' CA';
val lhs = Term.list_comb (relBsAs, map mk_conversep Rs);
val rhs = mk_conversep (Term.list_comb (rel, Rs));
val le_goal = fold_rev Logic.all Rs (HOLogic.mk_Trueprop (mk_leq lhs rhs));
val le_thm = Goal.prove_sorry lthy [] [] le_goal
(fn {context = ctxt, prems = _} => mk_rel_conversep_le_tac ctxt rel_OO_Grps
(Lazy.force rel_eq) (#map_cong0 axioms) (Lazy.force map_comp)
(map Lazy.force set_map))
|> Thm.close_derivation \<^here>
val goal = fold_rev Logic.all Rs (mk_Trueprop_eq (lhs, rhs));
in
Goal.prove_sorry lthy [] [] goal
(fn {context = ctxt, prems = _} =>
mk_rel_conversep_tac ctxt le_thm (Lazy.force rel_mono))
|> Thm.close_derivation \<^here>
end;
val rel_conversep = Lazy.lazy mk_rel_conversep;
fun mk_rel_OO () =
Goal.prove_sorry lthy [] []
(fold_rev Logic.all (Rs @ Ss) (HOLogic.mk_Trueprop (mk_leq rel_OO_lhs rel_OO_rhs)))
(fn {context = ctxt, prems = _} => mk_rel_OO_le_tac ctxt rel_OO_Grps (Lazy.force rel_eq)
(#map_cong0 axioms) (Lazy.force map_comp) (map Lazy.force set_map))
|> Thm.close_derivation \<^here>
|> (fn thm => @{thm antisym} OF [thm, #le_rel_OO axioms]);
val rel_OO = Lazy.lazy mk_rel_OO;
fun mk_in_rel () = trans OF [rel_OO_Grp, @{thm OO_Grp_alt}] RS @{thm predicate2_eqD};
val in_rel = Lazy.lazy mk_in_rel;
fun mk_rel_flip () =
unfold_thms lthy @{thms conversep_iff}
(Lazy.force rel_conversep RS @{thm predicate2_eqD});
val rel_flip = Lazy.lazy mk_rel_flip;
fun mk_rel_mono_strong0 () =
let
fun mk_prem setA setB R S a b =
HOLogic.mk_Trueprop
(mk_Ball (setA $ x) (Term.absfree (dest_Free a)
(mk_Ball (setB $ y) (Term.absfree (dest_Free b)
(HOLogic.mk_imp (R $ a $ b, S $ a $ b))))));
val prems = HOLogic.mk_Trueprop (Term.list_comb (rel, Rs) $ x $ y) ::
@{map 6} mk_prem bnf_sets_As bnf_sets_Bs Rs Rs_copy zs ys;
val concl = HOLogic.mk_Trueprop (Term.list_comb (rel, Rs_copy) $ x $ y);
in
Goal.prove_sorry lthy [] []
(fold_rev Logic.all (x :: y :: Rs @ Rs_copy) (Logic.list_implies (prems, concl)))
(fn {context = ctxt, prems = _} => mk_rel_mono_strong0_tac ctxt (Lazy.force in_rel)
(map Lazy.force set_map))
|> Thm.close_derivation \<^here>
end;
val rel_mono_strong0 = Lazy.lazy mk_rel_mono_strong0;
val rel_mono_strong = Lazy.map (Object_Logic.rulify lthy) rel_mono_strong0;
fun mk_rel_cong_prem mk_implies x x' z z' set set' R R_copy =
Logic.all z (Logic.all z'
(mk_implies (mk_Trueprop_mem (z, set $ x), mk_implies (mk_Trueprop_mem (z', set' $ x'),
mk_Trueprop_eq (R $ z $ z', R_copy $ z $ z')))));
fun mk_rel_cong mk_implies () =
let
val prem0 = mk_Trueprop_eq (x, x_copy);
val prem1 = mk_Trueprop_eq (y, y_copy);
val prems = @{map 6} (mk_rel_cong_prem mk_implies x_copy y_copy)
zs ys bnf_sets_As bnf_sets_Bs Rs Rs_copy;
val eq = mk_Trueprop_eq (Term.list_comb (rel, Rs) $ x $ y,
Term.list_comb (rel, Rs_copy) $ x_copy $ y_copy);
in
fold (Variable.add_free_names lthy) (eq :: prem0 :: prem1 :: prems) []
|> (fn vars => Goal.prove_sorry lthy vars (prem0 :: prem1 :: prems) eq
(fn {context = ctxt, prems} =>
mk_rel_cong_tac ctxt (chop 2 prems) (Lazy.force rel_mono_strong)))
|> Thm.close_derivation \<^here>
end;
val rel_cong = Lazy.lazy (mk_rel_cong Logic.mk_implies);
val rel_cong_simp = Lazy.lazy (mk_rel_cong (fn (a, b) => \<^term>\<open>simp_implies\<close> $ a $ b));
fun mk_pred_prems f = map2 (HOLogic.mk_Trueprop oo f) Ps Ps_copy;
fun mk_pred_concl f = HOLogic.mk_Trueprop
(f (Term.list_comb (pred, Ps), Term.list_comb (pred, Ps_copy)));
fun mk_pred_cong0 () =
let
val cong_prems = mk_pred_prems (curry HOLogic.mk_eq);
val cong_concl = mk_pred_concl HOLogic.mk_eq;
in
Goal.prove_sorry lthy [] []
(fold_rev Logic.all (Ps @ Ps_copy) (Logic.list_implies (cong_prems, cong_concl)))
(fn {context = ctxt, prems = _} => (TRY o hyp_subst_tac ctxt THEN' rtac ctxt refl) 1)
|> Thm.close_derivation \<^here>
end;
val pred_cong0 = Lazy.lazy mk_pred_cong0;
fun mk_rel_eq_onp () =
let
val lhs = Term.list_comb (relAsAs, map mk_eq_onp Ps);
val rhs = mk_eq_onp (Term.list_comb (pred, Ps));
in
Goal.prove_sorry lthy (map fst Ps') [] (mk_Trueprop_eq (lhs, rhs))
(fn {context = ctxt, prems = _} =>
mk_rel_eq_onp_tac ctxt (#pred_set axioms) (#map_id0 axioms) (Lazy.force rel_Grp))
|> Thm.close_derivation \<^here>
end;
val rel_eq_onp = Lazy.lazy mk_rel_eq_onp;
val pred_rel = Lazy.map (fn thm => thm RS sym RS @{thm eq_onp_eqD}) rel_eq_onp;
fun mk_pred_mono_strong0 () =
let
fun mk_prem setA P Q a =
HOLogic.mk_Trueprop
(mk_Ball (setA $ x) (Term.absfree (dest_Free a) (HOLogic.mk_imp (P $ a, Q $ a))));
val prems = HOLogic.mk_Trueprop (Term.list_comb (pred, Ps) $ x) ::
@{map 4} mk_prem bnf_sets_As Ps Ps_copy zs;
val concl = HOLogic.mk_Trueprop (Term.list_comb (pred, Ps_copy) $ x);
in
Goal.prove_sorry lthy [] []
(fold_rev Logic.all (x :: Ps @ Ps_copy) (Logic.list_implies (prems, concl)))
(fn {context = ctxt, prems = _} =>
mk_pred_mono_strong0_tac ctxt (Lazy.force pred_rel) (Lazy.force rel_mono_strong0))
|> Thm.close_derivation \<^here>
end;
val pred_mono_strong0 = Lazy.lazy mk_pred_mono_strong0;
val pred_mono_strong = Lazy.map (Object_Logic.rulify lthy) pred_mono_strong0;
fun mk_pred_mono () =
let
val mono_prems = mk_pred_prems mk_leq;
val mono_concl = mk_pred_concl (uncurry mk_leq);
in
Goal.prove_sorry lthy [] []
(fold_rev Logic.all (Ps @ Ps_copy) (Logic.list_implies (mono_prems, mono_concl)))
(fn {context = ctxt, prems = _} =>
mk_pred_mono_tac ctxt (Lazy.force rel_eq_onp) (Lazy.force rel_mono))
|> Thm.close_derivation \<^here>
end;
val pred_mono = Lazy.lazy mk_pred_mono;
fun mk_pred_cong_prem mk_implies x z set P P_copy =
Logic.all z
(mk_implies (mk_Trueprop_mem (z, set $ x), mk_Trueprop_eq (P $ z, P_copy $ z)));
fun mk_pred_cong mk_implies () =
let
val prem0 = mk_Trueprop_eq (x, x_copy);
val prems = @{map 4} (mk_pred_cong_prem mk_implies x_copy) zs bnf_sets_As Ps Ps_copy;
val eq = mk_Trueprop_eq (Term.list_comb (pred, Ps) $ x,
Term.list_comb (pred, Ps_copy) $ x_copy);
in
fold (Variable.add_free_names lthy) (eq :: prem0 :: prems) []
|> (fn vars => Goal.prove_sorry lthy vars (prem0 :: prems) eq
(fn {context = ctxt, prems} =>
mk_rel_cong_tac ctxt (chop 1 prems) (Lazy.force pred_mono_strong)))
|> Thm.close_derivation \<^here>
end;
val pred_cong = Lazy.lazy (mk_pred_cong Logic.mk_implies);
val pred_cong_simp = Lazy.lazy (mk_pred_cong (fn (a, b) => \<^term>\<open>simp_implies\<close> $ a $ b));
fun mk_map_cong_pred () =
let
val prem0 = mk_Trueprop_eq (x, x_copy);
fun mk_eq f g z = Term.absfree (dest_Free z) (HOLogic.mk_eq (f $ z, g $ z));
val prem = HOLogic.mk_Trueprop
(Term.list_comb (pred, @{map 3} mk_eq fs fs_copy zs) $ x_copy);
val eq = mk_Trueprop_eq (Term.list_comb (bnf_map_AsBs, fs) $ x,
Term.list_comb (bnf_map_AsBs, fs_copy) $ x_copy);
val goal = fold_rev Logic.all (x :: x_copy :: fs @ fs_copy)
(Logic.list_implies ([prem0, prem], eq));
in
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} =>
unfold_thms_tac ctxt [#pred_set axioms] THEN
HEADGOAL (EVERY' [REPEAT_DETERM o etac ctxt conjE,
etac ctxt (Lazy.force map_cong) THEN_ALL_NEW
(etac ctxt @{thm bspec} THEN' assume_tac ctxt)]))
|> Thm.close_derivation \<^here>
end;
val map_cong_pred = Lazy.lazy mk_map_cong_pred;
fun mk_rel_map () =
let
fun mk_goal lhs rhs =
fold_rev Logic.all ([x, y] @ S_CsBs @ S_AsCs @ is @ gs) (mk_Trueprop_eq (lhs, rhs));
val lhss =
[Term.list_comb (relCsBs, S_CsBs) $ (Term.list_comb (bnf_map_AsCs, is) $ x) $ y,
Term.list_comb (relAsCs, S_AsCs) $ x $ (Term.list_comb (bnf_map_BsCs, gs) $ y)];
val rhss =
[Term.list_comb (rel, @{map 3} (fn f => fn P => fn T =>
mk_vimage2p f (HOLogic.id_const T) $ P) is S_CsBs Bs') $ x $ y,
Term.list_comb (rel, @{map 3} (fn f => fn P => fn T =>
mk_vimage2p (HOLogic.id_const T) f $ P) gs S_AsCs As') $ x $ y];
val goals = map2 mk_goal lhss rhss;
in
goals
|> map (fn goal => Goal.prove_sorry lthy [] [] goal
(fn {context = ctxt, prems = _} =>
mk_rel_map0_tac ctxt live (Lazy.force rel_OO) (Lazy.force rel_conversep)
(Lazy.force rel_Grp) (Lazy.force map_id)))
|> map (unfold_thms lthy @{thms vimage2p_def[of id, simplified id_apply]
vimage2p_def[of _ id, simplified id_apply]})
|> map (Thm.close_derivation \<^here>)
end;
val rel_map = Lazy.lazy mk_rel_map;
fun mk_rel_refl () = @{thm ge_eq_refl[OF ord_eq_le_trans]} OF
[Lazy.force rel_eq RS sym, Lazy.force rel_mono OF (replicate live @{thm refl_ge_eq})];
val rel_refl = Lazy.lazy mk_rel_refl;
fun mk_rel_refl_strong () =
(rule_by_tactic lthy (ALLGOALS (Object_Logic.full_atomize_tac lthy))
((Lazy.force rel_eq RS @{thm predicate2_eqD}) RS @{thm iffD2[OF _ refl]} RS
Lazy.force rel_mono_strong)) OF
(replicate live @{thm diag_imp_eq_le})
val rel_refl_strong = Lazy.lazy mk_rel_refl_strong;
fun mk_rel_preserves mk_prop prop_conv_thm thm () =
let
val Rs = map2 retype_const_or_free self_pred2RTs Rs;
val prems = map (HOLogic.mk_Trueprop o mk_prop) Rs;
val goal = HOLogic.mk_Trueprop (mk_prop (Term.list_comb (relAsAs, Rs)));
val vars = fold (Variable.add_free_names lthy) (goal :: prems) [];
in
Goal.prove_sorry lthy vars [] (Logic.list_implies (prems, goal))
(fn {context = ctxt, prems = _} =>
unfold_thms_tac ctxt [prop_conv_thm] THEN
HEADGOAL (rtac ctxt (Lazy.force thm RS sym RS @{thm ord_eq_le_trans})
THEN' rtac ctxt (Lazy.force rel_mono) THEN_ALL_NEW assume_tac ctxt))
|> Thm.close_derivation \<^here>
end;
val rel_reflp = Lazy.lazy (mk_rel_preserves mk_reflp @{thm reflp_eq} rel_eq);
val rel_symp = Lazy.lazy (mk_rel_preserves mk_symp @{thm symp_conversep} rel_conversep);
val rel_transp = Lazy.lazy (mk_rel_preserves mk_transp @{thm transp_relcompp} rel_OO);
fun mk_pred_True () =
let
val lhs = Term.list_comb (pred, map (fn T => absdummy T \<^term>\<open>True\<close>) As');
val rhs = absdummy CA' \<^term>\<open>True\<close>;
val goal = mk_Trueprop_eq (lhs, rhs);
in
Goal.prove_sorry lthy [] [] goal
(fn {context = ctxt, prems = _} =>
HEADGOAL (EVERY' (map (rtac ctxt) [ext, Lazy.force pred_rel RS trans,
Lazy.force rel_cong0 RS fun_cong RS fun_cong RS trans OF
replicate live @{thm eq_onp_True},
Lazy.force rel_eq RS fun_cong RS fun_cong RS trans, @{thm eqTrueI[OF refl]}])))
|> Thm.close_derivation \<^here>
end;
val pred_True = Lazy.lazy mk_pred_True;
fun mk_pred_map () =
let
val lhs = Term.list_comb (pred', Qs) $ (Term.list_comb (bnf_map_AsBs, fs) $ x);
val rhs = Term.list_comb (pred, @{map 2} (curry HOLogic.mk_comp) Qs fs) $ x;
val goal = mk_Trueprop_eq (lhs, rhs);
val vars = Variable.add_free_names lthy goal [];
val pred_set = #pred_set axioms RS fun_cong RS sym;
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} =>
HEADGOAL (rtac ctxt (pred_set RSN (2, pred_set RSN (2, box_equals)))) THEN
unfold_thms_tac ctxt
(@{thms Ball_image_comp ball_empty} @ map Lazy.force set_map) THEN
HEADGOAL (rtac ctxt refl))
|> Thm.close_derivation \<^here>
end;
val pred_map = Lazy.lazy mk_pred_map;
fun mk_map_transfer () =
let
val rels = map2 mk_rel_fun transfer_domRs transfer_ranRs;
val rel = mk_rel_fun
(Term.list_comb (mk_bnf_rel transfer_domRTs CA' CB1, transfer_domRs))
(Term.list_comb (mk_bnf_rel transfer_ranRTs CB' CB2, transfer_ranRs));
val concl = HOLogic.mk_Trueprop
(fold_rev mk_rel_fun rels rel $ bnf_map_AsBs $ mk_bnf_map B1Ts B2Ts);
in
Goal.prove_sorry lthy [] []
(fold_rev Logic.all (transfer_domRs @ transfer_ranRs) concl)
(fn {context = ctxt, prems = _} => mk_map_transfer_tac ctxt (Lazy.force rel_mono)
(Lazy.force in_rel) (map Lazy.force set_map) (#map_cong0 axioms)
(Lazy.force map_comp))
|> Thm.close_derivation \<^here>
end;
val map_transfer = Lazy.lazy mk_map_transfer;
fun mk_pred_transfer () =
let
val iff = HOLogic.eq_const HOLogic.boolT;
val prem_rels = map (fn T => mk_rel_fun T iff) Rs;
val prem_elems = mk_rel_fun (Term.list_comb (rel, Rs)) iff;
val goal = HOLogic.mk_Trueprop
(fold_rev mk_rel_fun prem_rels prem_elems $ pred $ pred');
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, prems = _} =>
mk_pred_transfer_tac ctxt live (Lazy.force in_rel) (Lazy.force pred_map)
(Lazy.force pred_cong))
|> Thm.close_derivation \<^here>
end;
val pred_transfer = Lazy.lazy mk_pred_transfer;
fun mk_rel_transfer () =
let
val iff = HOLogic.eq_const HOLogic.boolT;
val prem_rels =
map2 (fn T1 => fn T2 => mk_rel_fun T1 (mk_rel_fun T2 iff)) S_AsCs S_BsEs;
val prem_elems =
mk_rel_fun (Term.list_comb (mk_bnf_rel pred2RTsAsCs CA' CC', S_AsCs))
(mk_rel_fun (Term.list_comb (mk_bnf_rel pred2RTsBsEs CB' CE', S_BsEs)) iff);
val goal =
HOLogic.mk_Trueprop (fold_rev mk_rel_fun prem_rels prem_elems $ rel $ relCsEs);
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} =>
mk_rel_transfer_tac ctxt (Lazy.force in_rel) (Lazy.force rel_map)
(Lazy.force rel_mono_strong))
|> Thm.close_derivation \<^here>
end;
val rel_transfer = Lazy.lazy mk_rel_transfer;
fun mk_set_transfer () =
let
val rel_sets = map2 (fn A => fn B => mk_rel 1 [A] [B] \<^term>\<open>rel_set\<close>) As' Bs';
val rel_Rs = Term.list_comb (rel, Rs);
val goals = @{map 4} (fn R => fn rel_set => fn setA => fn setB => HOLogic.mk_Trueprop
(mk_rel_fun rel_Rs (rel_set $ R) $ setA $ setB)) Rs rel_sets bnf_sets_As bnf_sets_Bs;
in
if null goals then []
else
let
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} =>
mk_set_transfer_tac ctxt (Lazy.force in_rel) (map Lazy.force set_map))
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end
end;
val set_transfer = Lazy.lazy mk_set_transfer;
fun mk_inj_map_strong () =
let
val assms = @{map 5} (fn setA => fn z => fn f => fn z' => fn f' =>
fold_rev Logic.all [z, z']
(Logic.mk_implies (mk_Trueprop_mem (z, setA $ x),
Logic.mk_implies (mk_Trueprop_mem (z', setA $ x'),
Logic.mk_implies (mk_Trueprop_eq (f $ z, f' $ z'),
mk_Trueprop_eq (z, z')))))) bnf_sets_As zs fs zs' fs';
val concl = Logic.mk_implies
(mk_Trueprop_eq
(Term.list_comb (bnf_map_AsBs, fs) $ x,
Term.list_comb (bnf_map_AsBs, fs') $ x'),
mk_Trueprop_eq (x, x'));
val goal = fold_rev Logic.all (x :: x' :: fs @ fs')
(fold_rev (curry Logic.mk_implies) assms concl);
in
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} =>
mk_inj_map_strong_tac ctxt (Lazy.force rel_eq) (Lazy.force rel_map)
(Lazy.force rel_mono_strong))
|> Thm.close_derivation \<^here>
end;
val inj_map_strong = Lazy.lazy mk_inj_map_strong;
val defs = mk_defs bnf_map_def bnf_set_defs bnf_rel_def bnf_pred_def;
val facts = mk_facts bd_Card_order bd_Cinfinite bd_Cnotzero collect_set_map in_bd in_cong
in_mono in_rel inj_map inj_map_strong map_comp map_cong map_cong_simp map_cong_pred map_id
map_ident0 map_ident map_ident_strong map_transfer rel_eq rel_flip set_map rel_cong0 rel_cong
rel_cong_simp rel_map rel_mono rel_mono_strong0 rel_mono_strong set_transfer rel_Grp rel_conversep
rel_OO rel_refl rel_refl_strong rel_reflp rel_symp rel_transp rel_transfer rel_eq_onp
pred_transfer pred_True pred_map pred_rel pred_mono_strong0 pred_mono_strong pred_mono
pred_cong0 pred_cong pred_cong_simp;
val wits = map2 mk_witness bnf_wits wit_thms;
val bnf_rel =
Term.subst_atomic_types ((Ds ~~ deads) @ (As' ~~ alphas) @ (Bs' ~~ betas)) rel;
val bnf_pred = Term.subst_atomic_types ((Ds ~~ deads) @ (As' ~~ alphas)) pred;
val bnf = mk_bnf bnf_b Calpha live alphas betas dead deads bnf_map bnf_sets bnf_bd axioms
defs facts wits bnf_rel bnf_pred;
in
note_bnf_thms fact_policy qualify bnf_b bnf lthy
end;
val one_step_defs =
no_reflexive (bnf_map_def :: bnf_bd_def :: bnf_set_defs @ bnf_wit_defs @
[bnf_rel_def, bnf_pred_def]);
in
(key, goals, wit_goalss, after_qed, lthy, one_step_defs)
end;
structure BNF_Plugin = Plugin(type T = bnf);
fun bnf_interpretation name f =
BNF_Plugin.interpretation name
(fn bnf => fn lthy => f (transfer_bnf (Proof_Context.theory_of lthy) bnf) lthy);
val interpret_bnf = BNF_Plugin.data;
fun register_bnf_raw key bnf =
- Local_Theory.declaration {syntax = false, pervasive = true}
+ Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => Data.map (Symtab.update (key, morph_bnf phi bnf)));
fun register_bnf plugins key bnf =
register_bnf_raw key bnf #> interpret_bnf plugins bnf;
fun bnf_def const_policy fact_policy internal qualify tacs wit_tac Ds map_b rel_b pred_b set_bs
raw_csts =
(fn (_, goals, (triv_tac_opt, wit_goalss), after_qed, lthy, one_step_defs) =>
let
fun mk_wits_tac ctxt set_maps =
TRYALL Goal.conjunction_tac THEN
(case triv_tac_opt of
SOME tac => tac ctxt set_maps
| NONE => unfold_thms_tac ctxt one_step_defs THEN wit_tac ctxt);
val wit_goals = map Logic.mk_conjunction_balanced wit_goalss;
fun mk_wit_thms set_maps =
Goal.prove_sorry lthy [] [] (Logic.mk_conjunction_balanced wit_goals)
(fn {context = ctxt, prems = _} => mk_wits_tac ctxt set_maps)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length wit_goals)
|> map2 (Conjunction.elim_balanced o length) wit_goalss
|> (map o map) (Thm.forall_elim_vars 0);
in
map2 (Thm.close_derivation \<^here> oo Goal.prove_sorry lthy [] [])
goals (map (fn tac => fn {context = ctxt, prems = _} =>
unfold_thms_tac ctxt one_step_defs THEN tac ctxt) tacs)
|> (fn thms => after_qed mk_wit_thms (map single thms) lthy)
end) o prepare_def const_policy fact_policy internal qualify (K I) (K I) Ds map_b rel_b pred_b
set_bs raw_csts;
fun bnf_cmd (raw_csts, raw_plugins) =
(fn (key, goals, (triv_tac_opt, wit_goalss), after_qed, lthy, defs) =>
let
val plugins = raw_plugins lthy;
val wit_goals = map Logic.mk_conjunction_balanced wit_goalss;
fun mk_triv_wit_thms tac set_maps =
Goal.prove_sorry lthy [] [] (Logic.mk_conjunction_balanced wit_goals)
(fn {context = ctxt, prems = _} => TRYALL Goal.conjunction_tac THEN tac ctxt set_maps)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length wit_goals)
|> map2 (Conjunction.elim_balanced o length) wit_goalss
|> (map o map) (Thm.forall_elim_vars 0);
val (mk_wit_thms, nontriv_wit_goals) =
(case triv_tac_opt of
NONE => (fn _ => [], map (map (rpair [])) wit_goalss)
| SOME tac => (mk_triv_wit_thms tac, []));
in
lthy
|> Proof.theorem NONE (uncurry (register_bnf plugins key) oo after_qed mk_wit_thms)
(map (single o rpair []) goals @ nontriv_wit_goals)
|> Proof.unfolding ([[(@{thm OO_Grp_alt} :: @{thm mem_Collect_eq} :: defs, [])]])
|> Proof.refine_singleton (Method.Basic (fn ctxt =>
Method.SIMPLE_METHOD (TRYALL (rtac ctxt refl))))
end) o prepare_def Do_Inline (user_policy Note_Some) false I Syntax.read_typ Syntax.read_term
NONE Binding.empty Binding.empty Binding.empty [] raw_csts;
fun print_bnfs ctxt =
let
fun pretty_set sets i = Pretty.block
[Pretty.str (mk_setN (i + 1) ^ ":"), Pretty.brk 1,
Pretty.quote (Syntax.pretty_term ctxt (nth sets i))];
fun pretty_bnf (key, BNF {T, map, sets, bd, live, lives, dead, deads, ...}) =
Pretty.big_list
(Pretty.string_of (Pretty.block [Pretty.str key, Pretty.str ":", Pretty.brk 1,
Pretty.quote (Syntax.pretty_typ ctxt T)]))
([Pretty.block [Pretty.str "live:", Pretty.brk 1, Pretty.str (string_of_int live),
Pretty.brk 3, Pretty.list "[" "]" (List.map (Syntax.pretty_typ ctxt) lives)],
Pretty.block [Pretty.str "dead:", Pretty.brk 1, Pretty.str (string_of_int dead),
Pretty.brk 3, Pretty.list "[" "]" (List.map (Syntax.pretty_typ ctxt) deads)],
Pretty.block [Pretty.str (mapN ^ ":"), Pretty.brk 1,
Pretty.quote (Syntax.pretty_term ctxt map)]] @
List.map (pretty_set sets) (0 upto length sets - 1) @
[Pretty.block [Pretty.str (bdN ^ ":"), Pretty.brk 1,
Pretty.quote (Syntax.pretty_term ctxt bd)]]);
in
Pretty.big_list "Registered bounded natural functors:"
(map pretty_bnf (sort_by fst (Symtab.dest (Data.get (Context.Proof ctxt)))))
|> Pretty.writeln
end;
val _ =
Outer_Syntax.command \<^command_keyword>\<open>print_bnfs\<close>
"print all bounded natural functors"
(Scan.succeed (Toplevel.keep (print_bnfs o Toplevel.context_of)));
val _ =
Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>bnf\<close>
"register a type as a bounded natural functor"
(parse_opt_binding_colon -- Parse.typ --|
(Parse.reserved "map" -- \<^keyword>\<open>:\<close>) -- Parse.term --
Scan.optional ((Parse.reserved "sets" -- \<^keyword>\<open>:\<close>) |--
Scan.repeat1 (Scan.unless (Parse.reserved "bd") Parse.term)) [] --|
(Parse.reserved "bd" -- \<^keyword>\<open>:\<close>) -- Parse.term --
Scan.optional ((Parse.reserved "wits" -- \<^keyword>\<open>:\<close>) |--
Scan.repeat1 (Scan.unless (Parse.reserved "rel" ||
Parse.reserved "plugins") Parse.term)) [] --
Scan.option ((Parse.reserved "rel" -- \<^keyword>\<open>:\<close>) |-- Parse.term) --
Scan.option ((Parse.reserved "pred" -- \<^keyword>\<open>:\<close>) |-- Parse.term) --
Scan.optional Plugin_Name.parse_filter (K Plugin_Name.default_filter)
>> bnf_cmd);
end;
diff --git a/src/HOL/Tools/BNF/bnf_fp_def_sugar.ML b/src/HOL/Tools/BNF/bnf_fp_def_sugar.ML
--- a/src/HOL/Tools/BNF/bnf_fp_def_sugar.ML
+++ b/src/HOL/Tools/BNF/bnf_fp_def_sugar.ML
@@ -1,2927 +1,2927 @@
(* Title: HOL/Tools/BNF/bnf_fp_def_sugar.ML
Author: Jasmin Blanchette, TU Muenchen
Author: Martin Desharnais, TU Muenchen
Copyright 2012, 2013, 2014
Sugared datatype and codatatype constructions.
*)
signature BNF_FP_DEF_SUGAR =
sig
type fp_ctr_sugar =
{ctrXs_Tss: typ list list,
ctor_iff_dtor: thm,
ctr_defs: thm list,
ctr_sugar: Ctr_Sugar.ctr_sugar,
ctr_transfers: thm list,
case_transfers: thm list,
disc_transfers: thm list,
sel_transfers: thm list}
type fp_bnf_sugar =
{map_thms: thm list,
map_disc_iffs: thm list,
map_selss: thm list list,
rel_injects: thm list,
rel_distincts: thm list,
rel_sels: thm list,
rel_intros: thm list,
rel_cases: thm list,
pred_injects: thm list,
set_thms: thm list,
set_selssss: thm list list list list,
set_introssss: thm list list list list,
set_cases: thm list}
type fp_co_induct_sugar =
{co_rec: term,
common_co_inducts: thm list,
co_inducts: thm list,
co_rec_def: thm,
co_rec_thms: thm list,
co_rec_discs: thm list,
co_rec_disc_iffs: thm list,
co_rec_selss: thm list list,
co_rec_codes: thm list,
co_rec_transfers: thm list,
co_rec_o_maps: thm list,
common_rel_co_inducts: thm list,
rel_co_inducts: thm list,
common_set_inducts: thm list,
set_inducts: thm list}
type fp_sugar =
{T: typ,
BT: typ,
X: typ,
fp: BNF_Util.fp_kind,
fp_res_index: int,
fp_res: BNF_FP_Util.fp_result,
pre_bnf: BNF_Def.bnf,
fp_bnf: BNF_Def.bnf,
absT_info: BNF_Comp.absT_info,
fp_nesting_bnfs: BNF_Def.bnf list,
live_nesting_bnfs: BNF_Def.bnf list,
fp_ctr_sugar: fp_ctr_sugar,
fp_bnf_sugar: fp_bnf_sugar,
fp_co_induct_sugar: fp_co_induct_sugar option}
val co_induct_of: 'a list -> 'a
val strong_co_induct_of: 'a list -> 'a
val morph_fp_bnf_sugar: morphism -> fp_bnf_sugar -> fp_bnf_sugar
val morph_fp_co_induct_sugar: morphism -> fp_co_induct_sugar -> fp_co_induct_sugar
val morph_fp_ctr_sugar: morphism -> fp_ctr_sugar -> fp_ctr_sugar
val morph_fp_sugar: morphism -> fp_sugar -> fp_sugar
val transfer_fp_sugar: theory -> fp_sugar -> fp_sugar
val fp_sugar_of: Proof.context -> string -> fp_sugar option
val fp_sugar_of_global: theory -> string -> fp_sugar option
val fp_sugars_of: Proof.context -> fp_sugar list
val fp_sugars_of_global: theory -> fp_sugar list
val fp_sugars_interpretation: string -> (fp_sugar list -> local_theory -> local_theory) ->
theory -> theory
val interpret_fp_sugars: (string -> bool) -> fp_sugar list -> local_theory -> local_theory
val register_fp_sugars_raw: fp_sugar list -> local_theory -> local_theory
val register_fp_sugars: (string -> bool) -> fp_sugar list -> local_theory -> local_theory
val merge_type_args: BNF_Util.fp_kind -> ''a list * ''a list -> ''a list
val type_args_named_constrained_of_spec: (((('a * 'b) * 'c) * 'd) * 'e) * 'f -> 'a
val type_binding_of_spec: (((('a * 'b) * 'c) * 'd) * 'e) * 'f -> 'b
val mixfix_of_spec: ((('a * 'b) * 'c) * 'd) * 'e -> 'b
val mixfixed_ctr_specs_of_spec: (('a * 'b) * 'c) * 'd -> 'b
val map_binding_of_spec: ('a * ('b * 'c * 'd)) * 'e -> 'b
val rel_binding_of_spec: ('a * ('b * 'c * 'd)) * 'e -> 'c
val pred_binding_of_spec: ('a * ('b * 'c * 'd)) * 'e -> 'd
val sel_default_eqs_of_spec: 'a * 'b -> 'b
val mk_parametricity_goal: Proof.context -> term list -> term -> term -> term
val flat_corec_preds_predsss_gettersss: 'a list -> 'a list list list -> 'a list list list ->
'a list
val mk_ctor: typ list -> term -> term
val mk_dtor: typ list -> term -> term
val mk_bnf_sets: BNF_Def.bnf -> string * term list
val liveness_of_fp_bnf: int -> BNF_Def.bnf -> bool list
val nesting_bnfs: Proof.context -> typ list list list -> typ list -> BNF_Def.bnf list
val massage_simple_notes: string -> (bstring * 'a list * (int -> 'b)) list ->
((binding * 'c list) * ('a list * 'b) list) list
val massage_multi_notes: string list -> typ list ->
(string * 'a list list * (string -> 'b)) list ->
((binding * 'b) * ('a list * 'c list) list) list
val define_ctrs_dtrs_for_type: string -> typ -> term -> term -> thm -> thm -> int -> int list ->
term -> binding list -> mixfix list -> typ list list -> local_theory ->
(term list list * term list * thm * thm list) * local_theory
val wrap_ctrs: (string -> bool) -> BNF_Util.fp_kind -> bool -> string -> thm -> int -> int list ->
thm -> thm -> binding list -> binding list list -> term list -> term list -> thm -> thm list ->
local_theory -> Ctr_Sugar.ctr_sugar * local_theory
val derive_map_set_rel_pred_thms: (string -> bool) -> BNF_Util.fp_kind -> int -> typ list ->
typ list -> typ -> typ -> thm list -> thm list -> thm list -> thm list -> thm list ->
thm list -> thm list -> thm list -> thm list -> string -> BNF_Def.bnf -> BNF_Def.bnf list ->
typ -> term -> thm -> thm -> thm -> thm list -> thm -> thm -> thm list -> thm -> thm list ->
thm list -> thm list -> typ list list -> Ctr_Sugar.ctr_sugar -> local_theory ->
(thm list * thm list * thm list list * thm list * thm list * thm list * thm list * thm list
* thm list * thm list * thm list list list list * thm list list list list * thm list
* thm list * thm list * thm list * thm list) * local_theory
type lfp_sugar_thms = (thm list * thm * Token.src list) * (thm list list * Token.src list)
val morph_lfp_sugar_thms: morphism -> lfp_sugar_thms -> lfp_sugar_thms
val transfer_lfp_sugar_thms: theory -> lfp_sugar_thms -> lfp_sugar_thms
type gfp_sugar_thms =
((thm list * thm) list * (Token.src list * Token.src list))
* thm list list
* thm list list
* (thm list list * Token.src list)
* (thm list list list * Token.src list)
val morph_gfp_sugar_thms: morphism -> gfp_sugar_thms -> gfp_sugar_thms
val transfer_gfp_sugar_thms: theory -> gfp_sugar_thms -> gfp_sugar_thms
val mk_co_recs_prelims: Proof.context -> BNF_Util.fp_kind -> typ list list list -> typ list ->
typ list -> typ list -> typ list -> int list -> int list list -> term list ->
term list
* (typ list list * typ list list list list * term list list * term list list list list) option
* (string * term list * term list list
* (((term list list * term list list * term list list list list * term list list list list)
* term list list list) * typ list)) option
val repair_nullary_single_ctr: typ list list -> typ list list
val mk_corec_p_pred_types: typ list -> int list -> typ list list
val mk_corec_fun_arg_types: typ list list list -> typ list -> typ list -> typ list -> int list ->
int list list -> term ->
typ list list
* (typ list list list list * typ list list list * typ list list list list * typ list)
val define_co_rec_as: BNF_Util.fp_kind -> typ list -> typ -> binding -> term -> local_theory ->
(term * thm) * local_theory
val define_rec:
typ list list * typ list list list list * term list list * term list list list list ->
(string -> binding) -> typ list -> typ list -> term list -> term -> Proof.context ->
(term * thm) * Proof.context
val define_corec: 'a * term list * term list list
* (((term list list * term list list * term list list list list * term list list list list)
* term list list list) * typ list) -> (string -> binding) -> 'b list -> typ list ->
term list -> term -> local_theory -> (term * thm) * local_theory
val mk_induct_raw_prem: (term -> term) -> Proof.context -> typ list list ->
(string * term list) list -> term -> term -> typ list -> typ list ->
term list * ((term * (term * term)) list * (int * term)) list * term
val finish_induct_prem: Proof.context -> int -> term list ->
term list * ((term * (term * term)) list * (int * term)) list * term -> term
val mk_coinduct_prem: Proof.context -> typ list list -> typ list list -> term list -> term ->
term -> term -> int -> term list -> term list list -> term list -> term list list ->
typ list list -> term
val mk_induct_attrs: term list list -> Token.src list
val mk_coinduct_attrs: typ list -> term list list -> term list list -> int list list ->
Token.src list * Token.src list
val derive_induct_recs_thms_for_types: (string -> bool) -> BNF_Def.bnf list ->
('a * typ list list list list * term list list * 'b) option -> thm -> thm list ->
BNF_Def.bnf list -> BNF_Def.bnf list -> typ list -> typ list -> typ list ->
typ list list list -> thm list -> thm list -> thm list -> term list list -> thm list list ->
term list -> thm list -> Proof.context -> lfp_sugar_thms
val derive_coinduct_thms_for_types: Proof.context -> bool -> (term -> term) -> BNF_Def.bnf list ->
thm -> thm list -> BNF_Def.bnf list -> typ list -> typ list -> typ list list list -> int list ->
thm list -> thm list -> (thm -> thm) -> thm list list -> Ctr_Sugar.ctr_sugar list ->
(thm list * thm) list
val derive_coinduct_corecs_thms_for_types: Proof.context -> BNF_Def.bnf list ->
string * term list * term list list
* (((term list list * term list list * term list list list list * term list list list list)
* term list list list) * typ list) ->
thm -> thm list -> thm list -> thm list -> BNF_Def.bnf list -> typ list -> typ list ->
typ list -> typ list list list -> int list list -> int list list -> int list -> thm list ->
thm list -> (thm -> thm) -> thm list list -> Ctr_Sugar.ctr_sugar list -> term list ->
thm list -> gfp_sugar_thms
val co_datatypes: BNF_Util.fp_kind -> (mixfix list -> binding list -> binding list ->
binding list -> binding list list -> binding list -> (string * sort) list ->
typ list * typ list list -> BNF_Def.bnf list -> BNF_Comp.absT_info list -> local_theory ->
BNF_FP_Util.fp_result * local_theory) ->
Ctr_Sugar.ctr_options
* ((((((binding option * (typ * sort)) list * binding) * mixfix)
* ((binding, binding * typ) Ctr_Sugar.ctr_spec * mixfix) list) *
(binding * binding * binding))
* term list) list ->
local_theory -> local_theory
val co_datatype_cmd: BNF_Util.fp_kind ->
(mixfix list -> binding list -> binding list -> binding list -> binding list list ->
binding list -> (string * sort) list -> typ list * typ list list -> BNF_Def.bnf list ->
BNF_Comp.absT_info list -> local_theory -> BNF_FP_Util.fp_result * Proof.context) ->
((Proof.context -> Plugin_Name.filter) * bool)
* ((((((binding option * (string * string option)) list * binding) * mixfix)
* ((binding, binding * string) Ctr_Sugar.ctr_spec * mixfix) list)
* (binding * binding * binding)) * string list) list ->
Proof.context -> local_theory
val parse_ctr_arg: (binding * string) parser
val parse_ctr_specs: ((binding, binding * string) Ctr_Sugar.ctr_spec * mixfix) list parser
val parse_spec: ((((((binding option * (string * string option)) list * binding) * mixfix)
* ((binding, binding * string) Ctr_Sugar.ctr_spec * mixfix) list)
* (binding * binding * binding)) * string list) parser
val parse_co_datatype: (Ctr_Sugar.ctr_options_cmd
* ((((((binding option * (string * string option)) list * binding) * mixfix)
* ((binding, binding * string) Ctr_Sugar.ctr_spec * mixfix) list)
* (binding * binding * binding)) * string list) list) parser
val parse_co_datatype_cmd: BNF_Util.fp_kind -> (mixfix list -> binding list -> binding list ->
binding list -> binding list list -> binding list -> (string * sort) list ->
typ list * typ list list -> BNF_Def.bnf list -> BNF_Comp.absT_info list -> local_theory ->
BNF_FP_Util.fp_result * local_theory) ->
(local_theory -> local_theory) parser
end;
structure BNF_FP_Def_Sugar : BNF_FP_DEF_SUGAR =
struct
open Ctr_Sugar
open BNF_FP_Rec_Sugar_Util
open BNF_Util
open BNF_Comp
open BNF_Def
open BNF_FP_Util
open BNF_FP_Def_Sugar_Tactics
val Eq_prefix = "Eq_";
val case_transferN = "case_transfer";
val ctor_iff_dtorN = "ctor_iff_dtor";
val ctr_transferN = "ctr_transfer";
val disc_transferN = "disc_transfer";
val sel_transferN = "sel_transfer";
val corec_codeN = "corec_code";
val corec_transferN = "corec_transfer";
val map_disc_iffN = "map_disc_iff";
val map_o_corecN = "map_o_corec";
val map_selN = "map_sel";
val pred_injectN = "pred_inject";
val rec_o_mapN = "rec_o_map";
val rec_transferN = "rec_transfer";
val set0N = "set0";
val set_casesN = "set_cases";
val set_introsN = "set_intros";
val set_inductN = "set_induct";
val set_selN = "set_sel";
type fp_ctr_sugar =
{ctrXs_Tss: typ list list,
ctor_iff_dtor: thm,
ctr_defs: thm list,
ctr_sugar: Ctr_Sugar.ctr_sugar,
ctr_transfers: thm list,
case_transfers: thm list,
disc_transfers: thm list,
sel_transfers: thm list};
type fp_bnf_sugar =
{map_thms: thm list,
map_disc_iffs: thm list,
map_selss: thm list list,
rel_injects: thm list,
rel_distincts: thm list,
rel_sels: thm list,
rel_intros: thm list,
rel_cases: thm list,
pred_injects: thm list,
set_thms: thm list,
set_selssss: thm list list list list,
set_introssss: thm list list list list,
set_cases: thm list};
type fp_co_induct_sugar =
{co_rec: term,
common_co_inducts: thm list,
co_inducts: thm list,
co_rec_def: thm,
co_rec_thms: thm list,
co_rec_discs: thm list,
co_rec_disc_iffs: thm list,
co_rec_selss: thm list list,
co_rec_codes: thm list,
co_rec_transfers: thm list,
co_rec_o_maps: thm list,
common_rel_co_inducts: thm list,
rel_co_inducts: thm list,
common_set_inducts: thm list,
set_inducts: thm list};
type fp_sugar =
{T: typ,
BT: typ,
X: typ,
fp: fp_kind,
fp_res_index: int,
fp_res: fp_result,
pre_bnf: bnf,
fp_bnf: bnf,
absT_info: absT_info,
fp_nesting_bnfs: bnf list,
live_nesting_bnfs: bnf list,
fp_ctr_sugar: fp_ctr_sugar,
fp_bnf_sugar: fp_bnf_sugar,
fp_co_induct_sugar: fp_co_induct_sugar option};
fun co_induct_of (i :: _) = i;
fun strong_co_induct_of [_, s] = s;
fun morph_fp_bnf_sugar phi ({map_thms, map_disc_iffs, map_selss, rel_injects, rel_distincts,
rel_sels, rel_intros, rel_cases, pred_injects, set_thms, set_selssss, set_introssss,
set_cases} : fp_bnf_sugar) =
{map_thms = map (Morphism.thm phi) map_thms,
map_disc_iffs = map (Morphism.thm phi) map_disc_iffs,
map_selss = map (map (Morphism.thm phi)) map_selss,
rel_injects = map (Morphism.thm phi) rel_injects,
rel_distincts = map (Morphism.thm phi) rel_distincts,
rel_sels = map (Morphism.thm phi) rel_sels,
rel_intros = map (Morphism.thm phi) rel_intros,
rel_cases = map (Morphism.thm phi) rel_cases,
pred_injects = map (Morphism.thm phi) pred_injects,
set_thms = map (Morphism.thm phi) set_thms,
set_selssss = map (map (map (map (Morphism.thm phi)))) set_selssss,
set_introssss = map (map (map (map (Morphism.thm phi)))) set_introssss,
set_cases = map (Morphism.thm phi) set_cases};
fun morph_fp_co_induct_sugar phi ({co_rec, common_co_inducts, co_inducts, co_rec_def, co_rec_thms,
co_rec_discs, co_rec_disc_iffs, co_rec_selss, co_rec_codes, co_rec_transfers, co_rec_o_maps,
common_rel_co_inducts, rel_co_inducts, common_set_inducts, set_inducts} : fp_co_induct_sugar) =
{co_rec = Morphism.term phi co_rec,
common_co_inducts = map (Morphism.thm phi) common_co_inducts,
co_inducts = map (Morphism.thm phi) co_inducts,
co_rec_def = Morphism.thm phi co_rec_def,
co_rec_thms = map (Morphism.thm phi) co_rec_thms,
co_rec_discs = map (Morphism.thm phi) co_rec_discs,
co_rec_disc_iffs = map (Morphism.thm phi) co_rec_disc_iffs,
co_rec_selss = map (map (Morphism.thm phi)) co_rec_selss,
co_rec_codes = map (Morphism.thm phi) co_rec_codes,
co_rec_transfers = map (Morphism.thm phi) co_rec_transfers,
co_rec_o_maps = map (Morphism.thm phi) co_rec_o_maps,
common_rel_co_inducts = map (Morphism.thm phi) common_rel_co_inducts,
rel_co_inducts = map (Morphism.thm phi) rel_co_inducts,
common_set_inducts = map (Morphism.thm phi) common_set_inducts,
set_inducts = map (Morphism.thm phi) set_inducts};
fun morph_fp_ctr_sugar phi ({ctrXs_Tss, ctor_iff_dtor, ctr_defs, ctr_sugar, ctr_transfers,
case_transfers, disc_transfers, sel_transfers} : fp_ctr_sugar) =
{ctrXs_Tss = map (map (Morphism.typ phi)) ctrXs_Tss,
ctor_iff_dtor = Morphism.thm phi ctor_iff_dtor,
ctr_defs = map (Morphism.thm phi) ctr_defs,
ctr_sugar = morph_ctr_sugar phi ctr_sugar,
ctr_transfers = map (Morphism.thm phi) ctr_transfers,
case_transfers = map (Morphism.thm phi) case_transfers,
disc_transfers = map (Morphism.thm phi) disc_transfers,
sel_transfers = map (Morphism.thm phi) sel_transfers};
fun morph_fp_sugar phi ({T, BT, X, fp, fp_res, fp_res_index, pre_bnf, fp_bnf, absT_info,
fp_nesting_bnfs, live_nesting_bnfs, fp_ctr_sugar, fp_bnf_sugar,
fp_co_induct_sugar} : fp_sugar) =
{T = Morphism.typ phi T,
BT = Morphism.typ phi BT,
X = Morphism.typ phi X,
fp = fp,
fp_res = morph_fp_result phi fp_res,
fp_res_index = fp_res_index,
pre_bnf = morph_bnf phi pre_bnf,
fp_bnf = morph_bnf phi fp_bnf,
absT_info = morph_absT_info phi absT_info,
fp_nesting_bnfs = map (morph_bnf phi) fp_nesting_bnfs,
live_nesting_bnfs = map (morph_bnf phi) live_nesting_bnfs,
fp_ctr_sugar = morph_fp_ctr_sugar phi fp_ctr_sugar,
fp_bnf_sugar = morph_fp_bnf_sugar phi fp_bnf_sugar,
fp_co_induct_sugar = Option.map (morph_fp_co_induct_sugar phi) fp_co_induct_sugar};
val transfer_fp_sugar = morph_fp_sugar o Morphism.transfer_morphism;
structure Data = Generic_Data
(
type T = fp_sugar Symtab.table;
val empty = Symtab.empty;
fun merge data : T = Symtab.merge (K true) data;
);
fun fp_sugar_of_generic context =
Option.map (transfer_fp_sugar (Context.theory_of context)) o Symtab.lookup (Data.get context);
fun fp_sugars_of_generic context =
Symtab.fold (cons o transfer_fp_sugar (Context.theory_of context) o snd) (Data.get context) [];
val fp_sugar_of = fp_sugar_of_generic o Context.Proof;
val fp_sugar_of_global = fp_sugar_of_generic o Context.Theory;
val fp_sugars_of = fp_sugars_of_generic o Context.Proof;
val fp_sugars_of_global = fp_sugars_of_generic o Context.Theory;
structure FP_Sugar_Plugin = Plugin(type T = fp_sugar list);
fun fp_sugars_interpretation name f =
FP_Sugar_Plugin.interpretation name (fn fp_sugars => fn lthy =>
f (map (transfer_fp_sugar (Proof_Context.theory_of lthy)) fp_sugars) lthy);
val interpret_fp_sugars = FP_Sugar_Plugin.data;
val register_fp_sugars_raw =
fold (fn fp_sugar as {T = Type (s, _), ...} =>
- Local_Theory.declaration {syntax = false, pervasive = true}
+ Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => Data.map (Symtab.update (s, morph_fp_sugar phi fp_sugar))));
fun register_fp_sugars plugins fp_sugars =
register_fp_sugars_raw fp_sugars #> interpret_fp_sugars plugins fp_sugars;
fun interpret_bnfs_register_fp_sugars plugins Ts BTs Xs fp pre_bnfs absT_infos fp_nesting_bnfs
live_nesting_bnfs fp_res ctrXs_Tsss ctor_iff_dtors ctr_defss ctr_sugars co_recs co_rec_defs
map_thmss common_co_inducts co_inductss co_rec_thmss co_rec_discss co_rec_selsss rel_injectss
rel_distinctss map_disc_iffss map_selsss rel_selss rel_intross rel_casess pred_injectss
set_thmss set_selsssss set_introsssss set_casess ctr_transferss case_transferss disc_transferss
sel_transferss co_rec_disc_iffss co_rec_codess co_rec_transferss common_rel_co_inducts
rel_co_inductss common_set_inducts set_inductss co_rec_o_mapss noted =
let
val fp_sugars =
map_index (fn (kk, T) =>
{T = T, BT = nth BTs kk, X = nth Xs kk, fp = fp, fp_res = fp_res, fp_res_index = kk,
pre_bnf = nth pre_bnfs kk, absT_info = nth absT_infos kk,
fp_bnf = nth (#bnfs fp_res) kk,
fp_nesting_bnfs = fp_nesting_bnfs, live_nesting_bnfs = live_nesting_bnfs,
fp_ctr_sugar =
{ctrXs_Tss = nth ctrXs_Tsss kk,
ctor_iff_dtor = nth ctor_iff_dtors kk,
ctr_defs = nth ctr_defss kk,
ctr_sugar = nth ctr_sugars kk,
ctr_transfers = nth ctr_transferss kk,
case_transfers = nth case_transferss kk,
disc_transfers = nth disc_transferss kk,
sel_transfers = nth sel_transferss kk},
fp_bnf_sugar =
{map_thms = nth map_thmss kk,
map_disc_iffs = nth map_disc_iffss kk,
map_selss = nth map_selsss kk,
rel_injects = nth rel_injectss kk,
rel_distincts = nth rel_distinctss kk,
rel_sels = nth rel_selss kk,
rel_intros = nth rel_intross kk,
rel_cases = nth rel_casess kk,
pred_injects = nth pred_injectss kk,
set_thms = nth set_thmss kk,
set_selssss = nth set_selsssss kk,
set_introssss = nth set_introsssss kk,
set_cases = nth set_casess kk},
fp_co_induct_sugar = SOME
{co_rec = nth co_recs kk,
common_co_inducts = common_co_inducts,
co_inducts = nth co_inductss kk,
co_rec_def = nth co_rec_defs kk,
co_rec_thms = nth co_rec_thmss kk,
co_rec_discs = nth co_rec_discss kk,
co_rec_disc_iffs = nth co_rec_disc_iffss kk,
co_rec_selss = nth co_rec_selsss kk,
co_rec_codes = nth co_rec_codess kk,
co_rec_transfers = nth co_rec_transferss kk,
co_rec_o_maps = nth co_rec_o_mapss kk,
common_rel_co_inducts = common_rel_co_inducts,
rel_co_inducts = nth rel_co_inductss kk,
common_set_inducts = common_set_inducts,
set_inducts = nth set_inductss kk}}
|> morph_fp_sugar (substitute_noted_thm noted)) Ts;
in
register_fp_sugars_raw fp_sugars
#> fold (interpret_bnf plugins) (#bnfs fp_res)
#> interpret_fp_sugars plugins fp_sugars
end;
fun quasi_unambiguous_case_names names =
let
val ps = map (`Long_Name.base_name) names;
val dups = Library.duplicates (op =) (map fst ps);
fun underscore s =
let val ss = Long_Name.explode s
in space_implode "_" (drop (length ss - 2) ss) end;
in
map (fn (base, full) => if member (op =) dups base then underscore full else base) ps
|> Name.variant_list []
end;
fun zipper_map f =
let
fun zed _ [] = []
| zed xs (y :: ys) = f (xs, y, ys) :: zed (xs @ [y]) ys;
in zed [] end;
fun cannot_merge_types fp =
error ("Mutually " ^ co_prefix fp ^ "recursive types must have the same type parameters");
fun merge_type_arg fp T T' = if T = T' then T else cannot_merge_types fp;
fun merge_type_args fp (As, As') =
if length As = length As' then map2 (merge_type_arg fp) As As' else cannot_merge_types fp;
fun type_args_named_constrained_of_spec (((((ncAs, _), _), _), _), _) = ncAs;
fun type_binding_of_spec (((((_, b), _), _), _), _) = b;
fun mixfix_of_spec ((((_, mx), _), _), _) = mx;
fun mixfixed_ctr_specs_of_spec (((_, mx_ctr_specs), _), _) = mx_ctr_specs;
fun map_binding_of_spec ((_, (b, _, _)), _) = b;
fun rel_binding_of_spec ((_, (_, b, _)), _) = b;
fun pred_binding_of_spec ((_, (_, _, b)), _) = b;
fun sel_default_eqs_of_spec (_, ts) = ts;
fun ctr_sugar_kind_of_fp_kind Least_FP = Datatype
| ctr_sugar_kind_of_fp_kind Greatest_FP = Codatatype;
fun uncurry_thm 0 thm = thm
| uncurry_thm 1 thm = thm
| uncurry_thm n thm = rotate_prems ~1 (uncurry_thm (n - 1) (rotate_prems 1 (conjI RS thm)));
fun choose_binary_fun fs AB =
find_first (fastype_of #> binder_types #> (fn [A, B] => AB = (A, B))) fs;
fun build_binary_fun_app fs t u =
Option.map (rapp u o rapp t) (choose_binary_fun fs (fastype_of t, fastype_of u));
fun build_the_rel ctxt Rs Ts A B =
build_rel [] ctxt Ts [] (the o choose_binary_fun Rs) (A, B);
fun build_rel_app ctxt Rs Ts t u =
build_the_rel ctxt Rs Ts (fastype_of t) (fastype_of u) $ t $ u;
fun build_set_app ctxt A t = Term.betapply (build_set ctxt A (fastype_of t), t);
fun mk_parametricity_goal ctxt Rs t u =
let val prem = build_the_rel ctxt Rs [] (fastype_of t) (fastype_of u) in
HOLogic.mk_Trueprop (prem $ t $ u)
end;
val name_of_set = name_of_const "set function" domain_type;
val fundefcong_attrs = @{attributes [fundef_cong]};
val nitpicksimp_attrs = @{attributes [nitpick_simp]};
val simp_attrs = @{attributes [simp]};
val lists_bmoc = fold (fn xs => fn t => Term.list_comb (t, xs));
fun flat_corec_predss_getterss qss gss = maps (op @) (qss ~~ gss);
fun flat_corec_preds_predsss_gettersss [] [qss] [gss] = flat_corec_predss_getterss qss gss
| flat_corec_preds_predsss_gettersss (p :: ps) (qss :: qsss) (gss :: gsss) =
p :: flat_corec_predss_getterss qss gss @ flat_corec_preds_predsss_gettersss ps qsss gsss;
fun mk_flip (x, Type (_, [T1, Type (_, [T2, T3])])) =
Abs ("x", T1, Abs ("y", T2, Var (x, T2 --> T1 --> T3) $ Bound 0 $ Bound 1));
fun flip_rels ctxt n thm =
let
val Rs = Term.add_vars (Thm.prop_of thm) [];
val Rs' = rev (drop (length Rs - n) Rs);
in
infer_instantiate ctxt (map (fn f => (fst f, Thm.cterm_of ctxt (mk_flip f))) Rs') thm
end;
fun mk_ctor_or_dtor get_T Ts t =
let val Type (_, Ts0) = get_T (fastype_of t) in
Term.subst_atomic_types (Ts0 ~~ Ts) t
end;
val mk_ctor = mk_ctor_or_dtor range_type;
val mk_dtor = mk_ctor_or_dtor domain_type;
fun mk_bnf_sets bnf =
let
val Type (T_name, Us) = T_of_bnf bnf;
val lives = lives_of_bnf bnf;
val sets = sets_of_bnf bnf;
fun mk_set U =
(case find_index (curry (op =) U) lives of
~1 => Term.dummy
| i => nth sets i);
in
(T_name, map mk_set Us)
end;
fun mk_xtor_co_recs thy fp fpTs Cs ts0 =
let
val nn = length fpTs;
val (fpTs0, Cs0) =
map ((fp = Greatest_FP ? swap) o dest_funT o snd o strip_typeN nn o fastype_of) ts0
|> split_list;
val rho = tvar_subst thy (fpTs0 @ Cs0) (fpTs @ Cs);
in
map (Term.subst_TVars rho) ts0
end;
fun liveness_of_fp_bnf n bnf =
(case T_of_bnf bnf of
Type (_, Ts) => map (not o member (op =) (deads_of_bnf bnf)) Ts
| _ => replicate n false);
fun add_nesting_bnf_names Us =
let
fun add (Type (s, Ts)) ss =
let val (needs, ss') = fold_map add Ts ss in
if exists I needs then (true, insert (op =) s ss') else (false, ss')
end
| add T ss = (member (op =) Us T, ss);
in snd oo add end;
fun nesting_bnfs ctxt ctr_Tsss Us =
map_filter (bnf_of ctxt) (fold (fold (fold (add_nesting_bnf_names Us))) ctr_Tsss []);
fun indexify proj xs f p = f (find_index (curry (op =) (proj p)) xs) p;
fun massage_simple_notes base =
filter_out (null o #2)
#> map (fn (thmN, thms, f_attrs) =>
((Binding.qualify true base (Binding.name thmN), []),
map_index (fn (i, thm) => ([thm], f_attrs i)) thms));
fun massage_multi_notes b_names Ts =
maps (fn (thmN, thmss, attrs) =>
@{map 3} (fn b_name => fn Type (T_name, _) => fn thms =>
((Binding.qualify true b_name (Binding.name thmN), attrs T_name), [(thms, [])]))
b_names Ts thmss)
#> filter_out (null o fst o hd o snd);
fun define_ctrs_dtrs_for_type fp_b_name fpT ctor dtor ctor_dtor dtor_ctor n ks abs ctr_bindings
ctr_mixfixes ctr_Tss lthy =
let
val ctor_absT = domain_type (fastype_of ctor);
val (((w, xss), u'), _) = lthy
|> yield_singleton (mk_Frees "w") ctor_absT
||>> mk_Freess "x" ctr_Tss
||>> yield_singleton Variable.variant_fixes fp_b_name;
val u = Free (u', fpT);
val ctor_iff_dtor_thm =
let
val goal =
fold_rev Logic.all [w, u]
(mk_Trueprop_eq (HOLogic.mk_eq (u, ctor $ w), HOLogic.mk_eq (dtor $ u, w)));
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, ...} =>
mk_ctor_iff_dtor_tac ctxt (map (SOME o Thm.ctyp_of lthy) [ctor_absT, fpT])
(Thm.cterm_of lthy ctor) (Thm.cterm_of lthy dtor) ctor_dtor dtor_ctor)
|> Thm.close_derivation \<^here>
end;
val ctr_rhss =
map2 (fn k => fn xs => fold_rev Term.lambda xs (ctor $ mk_absumprod ctor_absT abs n k xs))
ks xss;
val ((raw_ctrs, raw_ctr_defs), (lthy, lthy_old)) = lthy
|> (snd o Local_Theory.begin_nested)
|> apfst split_list o @{fold_map 3} (fn b => fn mx => fn rhs =>
Local_Theory.define ((b, mx),
((Thm.make_def_binding (Config.get lthy bnf_internals) b, []), rhs))
#>> apsnd snd) ctr_bindings ctr_mixfixes ctr_rhss
||> `Local_Theory.end_nested;
val phi = Proof_Context.export_morphism lthy_old lthy;
val ctr_defs = map (Morphism.thm phi) raw_ctr_defs;
val ctrs0 = map (Morphism.term phi) raw_ctrs;
in
((xss, ctrs0, ctor_iff_dtor_thm, ctr_defs), lthy)
end;
fun wrap_ctrs plugins fp discs_sels fp_b_name ctor_inject n ms abs_inject type_definition
disc_bindings sel_bindingss sel_default_eqs ctrs0 ctor_iff_dtor_thm ctr_defs lthy =
let
val sumEN_thm' = unfold_thms lthy @{thms unit_all_eq1} (mk_absumprodE type_definition ms);
fun exhaust_tac {context = ctxt, prems = _} =
mk_exhaust_tac ctxt n ctr_defs ctor_iff_dtor_thm sumEN_thm';
val inject_tacss =
map2 (fn ctr_def => fn 0 => [] | _ => [fn {context = ctxt, ...} =>
mk_inject_tac ctxt ctr_def ctor_inject abs_inject]) ctr_defs ms;
val half_distinct_tacss =
map (map (fn (def, def') => fn {context = ctxt, ...} =>
mk_half_distinct_tac ctxt ctor_inject abs_inject [def, def']))
(mk_half_pairss (`I ctr_defs));
val tacss = [exhaust_tac] :: inject_tacss @ half_distinct_tacss;
fun ctr_spec_of disc_b ctr0 sel_bs = ((disc_b, ctr0), sel_bs);
val ctr_specs = @{map 3} ctr_spec_of disc_bindings ctrs0 sel_bindingss;
val (ctr_sugar as {case_cong, ...}, lthy) =
free_constructors (ctr_sugar_kind_of_fp_kind fp) tacss
((((plugins, discs_sels), standard_binding), ctr_specs), sel_default_eqs) lthy;
val anonymous_notes =
[([case_cong], fundefcong_attrs)]
|> map (fn (thms, attrs) => ((Binding.empty, attrs), [(thms, [])]));
val notes =
if Config.get lthy bnf_internals then
[(ctor_iff_dtorN, [ctor_iff_dtor_thm], K [])]
|> massage_simple_notes fp_b_name
else
[];
in
(ctr_sugar, lthy |> Local_Theory.notes (anonymous_notes @ notes) |> snd)
end;
fun derive_map_set_rel_pred_thms plugins fp live As Bs C E abs_inverses ctr_defs fp_nesting_set_maps
fp_nesting_rel_eq_onps live_nesting_map_id0s live_nesting_set_maps live_nesting_rel_eqs
live_nesting_rel_eq_onps fp_nested_rel_eq_onps fp_b_name fp_bnf fp_bnfs fpT ctor ctor_dtor
dtor_ctor pre_map_def pre_set_defs pre_rel_def fp_map_thm fp_set_thms fp_rel_thm
extra_unfolds_map extra_unfolds_set extra_unfolds_rel ctr_Tss
({casex, case_thms, discs, selss, sel_defs, ctrs, exhaust, exhaust_discs, disc_thmss, sel_thmss,
injects, distincts, distinct_discsss, ...} : ctr_sugar)
lthy =
let
val n = length ctr_Tss;
val ms = map length ctr_Tss;
val B_ify_T = Term.typ_subst_atomic (As ~~ Bs);
val fpBT = B_ify_T fpT;
val live_AsBs = filter (op <>) (As ~~ Bs);
val live_As = map fst live_AsBs;
val fTs = map (op -->) live_AsBs;
val ((((((((xss, yss), fs), Ps), Rs), ta), tb), thesis), names_lthy) = lthy
|> fold (fold Variable.declare_typ) [As, Bs]
|> mk_Freess "x" ctr_Tss
||>> mk_Freess "y" (map (map B_ify_T) ctr_Tss)
||>> mk_Frees "f" fTs
||>> mk_Frees "P" (map mk_pred1T live_As)
||>> mk_Frees "R" (map (uncurry mk_pred2T) live_AsBs)
||>> yield_singleton (mk_Frees "a") fpT
||>> yield_singleton (mk_Frees "b") fpBT
||>> apfst HOLogic.mk_Trueprop o yield_singleton (mk_Frees "thesis") HOLogic.boolT;
val ctrAs = map (mk_ctr As) ctrs;
val ctrBs = map (mk_ctr Bs) ctrs;
val ctr_defs' =
map2 (fn m => fn def => mk_unabs_def m (HOLogic.mk_obj_eq def)) ms ctr_defs;
val ABfs = live_AsBs ~~ fs;
fun derive_rel_case relAsBs rel_inject_thms rel_distinct_thms =
let
val rel_Rs_a_b = list_comb (relAsBs, Rs) $ ta $ tb;
fun mk_assms ctrA ctrB ctxt =
let
val argA_Ts = binder_types (fastype_of ctrA);
val argB_Ts = binder_types (fastype_of ctrB);
val ((argAs, argBs), names_ctxt) = ctxt
|> mk_Frees "x" argA_Ts
||>> mk_Frees "y" argB_Ts;
val ctrA_args = list_comb (ctrA, argAs);
val ctrB_args = list_comb (ctrB, argBs);
in
(fold_rev Logic.all (argAs @ argBs) (Logic.list_implies
(mk_Trueprop_eq (ta, ctrA_args) :: mk_Trueprop_eq (tb, ctrB_args) ::
map2 (HOLogic.mk_Trueprop oo build_rel_app lthy Rs []) argAs argBs,
thesis)),
names_ctxt)
end;
val (assms, names_lthy) = @{fold_map 2} mk_assms ctrAs ctrBs names_lthy;
val goal = Logic.list_implies (HOLogic.mk_Trueprop rel_Rs_a_b :: assms, thesis);
in
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} =>
mk_rel_case_tac ctxt (Thm.cterm_of ctxt ta) (Thm.cterm_of ctxt tb) exhaust injects
rel_inject_thms distincts rel_distinct_thms live_nesting_rel_eqs)
|> singleton (Proof_Context.export names_lthy lthy)
|> Thm.close_derivation \<^here>
end;
fun derive_case_transfer rel_case_thm =
let
val (S, names_lthy) = yield_singleton (mk_Frees "S") (mk_pred2T C E) names_lthy;
val caseA = mk_case As C casex;
val caseB = mk_case Bs E casex;
val goal = mk_parametricity_goal names_lthy (S :: Rs) caseA caseB;
in
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} =>
mk_case_transfer_tac ctxt rel_case_thm case_thms)
|> singleton (Proof_Context.export names_lthy lthy)
|> Thm.close_derivation \<^here>
end;
in
if live = 0 then
if plugins transfer_plugin then
let
val relAsBs = HOLogic.eq_const fpT;
val rel_case_thm = derive_rel_case relAsBs [] [];
val case_transfer_thm = derive_case_transfer rel_case_thm;
val notes =
[(case_transferN, [case_transfer_thm], K [])]
|> massage_simple_notes fp_b_name;
val (noted, lthy') = lthy
|> Local_Theory.notes notes;
val subst = Morphism.thm (substitute_noted_thm noted);
in
(([], [], [], [], [], [], [], [], [], [], [], [], [], [], [subst case_transfer_thm], [],
[]), lthy')
end
else
(([], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []), lthy)
else
let
val mapx = mk_map live As Bs (map_of_bnf fp_bnf);
val relAsBs = mk_rel live As Bs (rel_of_bnf fp_bnf);
val setAs = map (mk_set As) (sets_of_bnf fp_bnf);
val discAs = map (mk_disc_or_sel As) discs;
val discBs = map (mk_disc_or_sel Bs) discs;
val selAss = map (map (mk_disc_or_sel As)) selss;
val selBss = map (map (mk_disc_or_sel Bs)) selss;
val map_ctor_thm =
if fp = Least_FP then
fp_map_thm
else
let
val ctorA = mk_ctor As ctor;
val ctorB = mk_ctor Bs ctor;
val y_T = domain_type (fastype_of ctorA);
val (y as Free (y_s, _), _) = lthy
|> yield_singleton (mk_Frees "y") y_T;
val ctor_cong =
infer_instantiate' lthy [NONE, NONE, SOME (Thm.cterm_of lthy ctorB)] arg_cong;
val fp_map_thm' = fp_map_thm
|> infer_instantiate' lthy (replicate live NONE @
[SOME (Thm.cterm_of lthy (ctorA $ y))])
|> unfold_thms lthy [dtor_ctor];
in
(fp_map_thm' RS ctor_cong RS (ctor_dtor RS sym RS trans))
|> Drule.generalize (Names.empty, Names.make1_set y_s)
end;
val map_thms =
let
fun mk_goal ctrA ctrB xs ys =
let
val fmap = list_comb (mapx, fs);
fun mk_arg (x as Free (_, T)) (Free (_, U)) =
if T = U then x
else build_map lthy [] [] (the o AList.lookup (op =) ABfs) (T, U) $ x;
val xs' = map2 mk_arg xs ys;
in
mk_Trueprop_eq (fmap $ list_comb (ctrA, xs), list_comb (ctrB, xs'))
end;
val goals = @{map 4} mk_goal ctrAs ctrBs xss yss;
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, prems = _} =>
mk_map_tac ctxt abs_inverses pre_map_def map_ctor_thm live_nesting_map_id0s ctr_defs'
extra_unfolds_map)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end;
val set0_thms =
let
fun mk_goal A setA ctrA xs =
let
val sets = map (build_set_app lthy A)
(filter (exists_subtype_in [A] o fastype_of) xs);
in
mk_Trueprop_eq (setA $ list_comb (ctrA, xs),
(if null sets then HOLogic.mk_set A [] else Library.foldl1 mk_union sets))
end;
val goals =
@{map 2} (fn live_A => fn setA => map2 (mk_goal live_A setA) ctrAs xss) live_As setAs
|> flat;
in
if null goals then
[]
else
let
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, prems = _} =>
mk_set0_tac ctxt abs_inverses pre_set_defs dtor_ctor fp_set_thms
fp_nesting_set_maps live_nesting_set_maps ctr_defs' extra_unfolds_set)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end
end;
val set_thms = set0_thms
|> map (unfold_thms lthy @{thms insert_is_Un[THEN sym] Un_empty_left Un_insert_left});
val rel_ctor_thm =
if fp = Least_FP then
fp_rel_thm
else
let
val ctorA = mk_ctor As ctor;
val ctorB = mk_ctor Bs ctor;
val y_T = domain_type (fastype_of ctorA);
val z_T = domain_type (fastype_of ctorB);
val ((y as Free (y_s, _), z as Free (z_s, _)), _) = lthy
|> yield_singleton (mk_Frees "y") y_T
||>> yield_singleton (mk_Frees "z") z_T;
in
fp_rel_thm
|> infer_instantiate' lthy (replicate live NONE @
[SOME (Thm.cterm_of lthy (ctorA $ y)), SOME (Thm.cterm_of lthy (ctorB $ z))])
|> unfold_thms lthy [dtor_ctor]
|> Drule.generalize (Names.empty, Names.make2_set y_s z_s)
end;
val rel_inject_thms =
let
fun mk_goal ctrA ctrB xs ys =
let
val lhs = list_comb (relAsBs, Rs) $ list_comb (ctrA, xs) $ list_comb (ctrB, ys);
val conjuncts = map2 (build_rel_app lthy Rs []) xs ys;
in
HOLogic.mk_Trueprop
(if null conjuncts then lhs
else HOLogic.mk_eq (lhs, Library.foldr1 HOLogic.mk_conj conjuncts))
end;
val goals = @{map 4} mk_goal ctrAs ctrBs xss yss;
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, prems = _} =>
mk_rel_tac ctxt abs_inverses pre_rel_def rel_ctor_thm live_nesting_rel_eqs ctr_defs'
extra_unfolds_rel)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end;
val half_rel_distinct_thmss =
let
fun mk_goal ((ctrA, xs), (ctrB, ys)) =
HOLogic.mk_Trueprop (HOLogic.mk_not
(list_comb (relAsBs, Rs) $ list_comb (ctrA, xs) $ list_comb (ctrB, ys)));
val rel_infos = (ctrAs ~~ xss, ctrBs ~~ yss);
val goalss = map (map mk_goal) (mk_half_pairss rel_infos);
val goals = flat goalss;
in
unflat goalss
(if null goals then
[]
else
let
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, prems = _} =>
mk_rel_tac ctxt abs_inverses pre_rel_def rel_ctor_thm live_nesting_rel_eqs
ctr_defs' extra_unfolds_rel)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end)
end;
val rel_flip = rel_flip_of_bnf fp_bnf;
fun mk_other_half_rel_distinct_thm thm =
flip_rels lthy live thm RS (rel_flip RS sym RS @{thm arg_cong[of _ _ Not]} RS iffD2);
val other_half_rel_distinct_thmss =
map (map mk_other_half_rel_distinct_thm) half_rel_distinct_thmss;
val (rel_distinct_thms, _) =
join_halves n half_rel_distinct_thmss other_half_rel_distinct_thmss;
fun mk_rel_intro_thm m thm =
uncurry_thm m (thm RS iffD2) handle THM _ => thm;
val rel_intro_thms = map2 mk_rel_intro_thm ms rel_inject_thms;
val rel_code_thms =
map (fn thm => thm RS @{thm eq_False[THEN iffD2]}) rel_distinct_thms @
map2 (fn thm => fn 0 => thm RS @{thm eq_True[THEN iffD2]} | _ => thm) rel_inject_thms ms;
val ctr_transfer_thms =
let
val goals = map2 (mk_parametricity_goal names_lthy Rs) ctrAs ctrBs;
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} =>
mk_ctr_transfer_tac ctxt rel_intro_thms live_nesting_rel_eqs)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end;
val (set_cases_thms, set_cases_attrss) =
let
fun mk_prems assms elem t ctxt =
(case fastype_of t of
Type (type_name, xs) =>
(case bnf_of ctxt type_name of
NONE => ([], ctxt)
| SOME bnf =>
apfst flat (fold_map (fn set => fn ctxt =>
let
val T = HOLogic.dest_setT (range_type (fastype_of set));
val new_var = not (T = fastype_of elem);
val (x, ctxt') =
if new_var then yield_singleton (mk_Frees "x") T ctxt else (elem, ctxt);
in
mk_prems (mk_Trueprop_mem (x, set $ t) :: assms) elem x ctxt'
|>> map (new_var ? Logic.all x)
end) (map (mk_set xs) (sets_of_bnf bnf)) ctxt))
| T => rpair ctxt
(if T = fastype_of elem then [fold (curry Logic.mk_implies) assms thesis] else []));
in
split_list (map (fn set =>
let
val A = HOLogic.dest_setT (range_type (fastype_of set));
val (elem, names_lthy) = yield_singleton (mk_Frees "e") A names_lthy;
val premss =
map (fn ctr =>
let
val (args, names_lthy) =
mk_Frees "z" (binder_types (fastype_of ctr)) names_lthy;
in
flat (zipper_map (fn (prev_args, arg, next_args) =>
let
val (args_with_elem, args_without_elem) =
if fastype_of arg = A then
(prev_args @ [elem] @ next_args, prev_args @ next_args)
else
`I (prev_args @ [arg] @ next_args);
in
mk_prems [mk_Trueprop_eq (ta, Term.list_comb (ctr, args_with_elem))]
elem arg names_lthy
|> fst
|> map (fold_rev Logic.all args_without_elem)
end) args)
end) ctrAs;
val goal = Logic.mk_implies (mk_Trueprop_mem (elem, set $ ta), thesis);
val vars = Variable.add_free_names lthy goal [];
val thm =
Goal.prove_sorry lthy vars (flat premss) goal (fn {context = ctxt, prems} =>
mk_set_cases_tac ctxt (Thm.cterm_of ctxt ta) prems exhaust set_thms)
|> Thm.close_derivation \<^here>
|> rotate_prems ~1;
val cases_set_attr =
- Attrib.internal (K (Induct.cases_pred (name_of_set set)));
+ Attrib.internal \<^here> (K (Induct.cases_pred (name_of_set set)));
val ctr_names = quasi_unambiguous_case_names (flat
(map (uncurry mk_names o map_prod length name_of_ctr) (premss ~~ ctrAs)));
in
(* TODO: @{attributes [elim?]} *)
(thm, [Attrib.consumes 1, cases_set_attr, Attrib.case_names ctr_names])
end) setAs)
end;
val (set_intros_thmssss, set_intros_thms) =
let
fun mk_goals A setA ctr_args t ctxt =
(case fastype_of t of
Type (type_name, innerTs) =>
(case bnf_of ctxt type_name of
NONE => ([], ctxt)
| SOME bnf =>
apfst flat (fold_map (fn set => fn ctxt =>
let
val T = HOLogic.dest_setT (range_type (fastype_of set));
val (y, ctxt') = yield_singleton (mk_Frees "y") T ctxt;
val assm = mk_Trueprop_mem (y, set $ t);
in
apfst (map (Logic.mk_implies o pair assm)) (mk_goals A setA ctr_args y ctxt')
end) (map (mk_set innerTs) (sets_of_bnf bnf)) ctxt))
| T => (if T = A then [mk_Trueprop_mem (t, setA $ ctr_args)] else [], ctxt));
val (goalssss, _) =
fold_map (fn set =>
let val A = HOLogic.dest_setT (range_type (fastype_of set)) in
@{fold_map 2} (fn ctr => fn xs =>
fold_map (mk_goals A set (Term.list_comb (ctr, xs))) xs)
ctrAs xss
end) setAs lthy;
val goals = flat (flat (flat goalssss));
in
`(unflattt goalssss)
(if null goals then
[]
else
let
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} => mk_set_intros_tac ctxt set0_thms)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end)
end;
val rel_sel_thms =
let
val n = length discAs;
fun mk_conjunct n k discA selAs discB selBs =
(if k = n then [] else [HOLogic.mk_eq (discA $ ta, discB $ tb)]) @
(if null selAs then
[]
else
[Library.foldr HOLogic.mk_imp
(if n = 1 then [] else [discA $ ta, discB $ tb],
Library.foldr1 HOLogic.mk_conj (map2 (build_rel_app names_lthy Rs [])
(map (rapp ta) selAs) (map (rapp tb) selBs)))]);
val goals =
if n = 0 then
[]
else
[mk_Trueprop_eq (build_rel_app names_lthy Rs [] ta tb,
(case flat (@{map 5} (mk_conjunct n) (1 upto n) discAs selAss discBs selBss) of
[] => \<^term>\<open>True\<close>
| conjuncts => Library.foldr1 HOLogic.mk_conj conjuncts))];
fun prove goal =
Variable.add_free_names lthy goal []
|> (fn vars => Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, prems = _} =>
mk_rel_sel_tac ctxt (Thm.cterm_of ctxt ta) (Thm.cterm_of ctxt tb) exhaust
(flat disc_thmss) (flat sel_thmss) rel_inject_thms distincts rel_distinct_thms
live_nesting_rel_eqs))
|> Thm.close_derivation \<^here>;
in
map prove goals
end;
val (rel_case_thm, rel_case_attrs) =
let
val thm = derive_rel_case relAsBs rel_inject_thms rel_distinct_thms;
val ctr_names = quasi_unambiguous_case_names (map name_of_ctr ctrAs);
in
(thm, [Attrib.case_names ctr_names, Attrib.consumes 1] @ @{attributes [cases pred]})
end;
val case_transfer_thm = derive_case_transfer rel_case_thm;
val sel_transfer_thms =
if null selAss then
[]
else
let
val shared_sels = foldl1 (uncurry (inter (op =))) (map (op ~~) (selAss ~~ selBss));
val goals = map (uncurry (mk_parametricity_goal names_lthy Rs)) shared_sels;
in
if null goals then
[]
else
let
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} =>
mk_sel_transfer_tac ctxt n sel_defs case_transfer_thm)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end
end;
val disc_transfer_thms =
let val goals = map2 (mk_parametricity_goal names_lthy Rs) discAs discBs in
if null goals then
[]
else
let
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} => mk_disc_transfer_tac ctxt
(the_single rel_sel_thms) (the_single exhaust_discs)
(flat (flat distinct_discsss)))
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end
end;
val map_disc_iff_thms =
let
val discsB = map (mk_disc_or_sel Bs) discs;
val discsA_t = map (fn disc1 => Term.betapply (disc1, ta)) discAs;
fun mk_goal (discA_t, discB) =
if head_of discA_t aconv HOLogic.Not orelse is_refl_bool discA_t then
NONE
else
SOME (mk_Trueprop_eq (betapply (discB, (Term.list_comb (mapx, fs) $ ta)), discA_t));
val goals = map_filter mk_goal (discsA_t ~~ discsB);
in
if null goals then
[]
else
let
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} =>
mk_map_disc_iff_tac ctxt (Thm.cterm_of ctxt ta) exhaust (flat disc_thmss)
map_thms)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end
end;
val (map_sel_thmss, map_sel_thms) =
let
fun mk_goal discA selA selB =
let
val prem = Term.betapply (discA, ta);
val lhs = selB $ (Term.list_comb (mapx, fs) $ ta);
val lhsT = fastype_of lhs;
val map_rhsT =
map_atyps (perhaps (AList.lookup (op =) (map swap live_AsBs))) lhsT;
val map_rhs = build_map lthy [] []
(the o (AList.lookup (op =) (live_AsBs ~~ fs))) (map_rhsT, lhsT);
val rhs = (case map_rhs of
Const (\<^const_name>\<open>id\<close>, _) => selA $ ta
| _ => map_rhs $ (selA $ ta));
val concl = mk_Trueprop_eq (lhs, rhs);
in
if is_refl_bool prem then concl
else Logic.mk_implies (HOLogic.mk_Trueprop prem, concl)
end;
val goalss = @{map 3} (map2 o mk_goal) discAs selAss selBss;
val goals = flat goalss;
in
`(unflat goalss)
(if null goals then
[]
else
let
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} =>
mk_map_sel_tac ctxt (Thm.cterm_of ctxt ta) exhaust (flat disc_thmss)
map_thms (flat sel_thmss) live_nesting_map_id0s)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end)
end;
val (set_sel_thmssss, set_sel_thms) =
let
fun mk_goal setA discA selA ctxt =
let
val prem = Term.betapply (discA, ta);
val sel_rangeT = range_type (fastype_of selA);
val A = HOLogic.dest_setT (range_type (fastype_of setA));
fun travese_nested_types t ctxt =
(case fastype_of t of
Type (type_name, innerTs) =>
(case bnf_of ctxt type_name of
NONE => ([], ctxt)
| SOME bnf =>
let
fun seq_assm a set ctxt =
let
val T = HOLogic.dest_setT (range_type (fastype_of set));
val (x, ctxt') = yield_singleton (mk_Frees "x") T ctxt;
val assm = mk_Trueprop_mem (x, set $ a);
in
travese_nested_types x ctxt'
|>> map (Logic.mk_implies o pair assm)
end;
in
fold_map (seq_assm t o mk_set innerTs) (sets_of_bnf bnf) ctxt
|>> flat
end)
| T =>
if T = A then ([mk_Trueprop_mem (t, setA $ ta)], ctxt) else ([], ctxt));
val (concls, ctxt') =
if sel_rangeT = A then ([mk_Trueprop_mem (selA $ ta, setA $ ta)], ctxt)
else travese_nested_types (selA $ ta) ctxt;
in
if exists_subtype_in [A] sel_rangeT then
if is_refl_bool prem then (concls, ctxt')
else (map (Logic.mk_implies o pair (HOLogic.mk_Trueprop prem)) concls, ctxt')
else
([], ctxt)
end;
val (goalssss, _) =
fold_map (fn set => @{fold_map 2} (fold_map o mk_goal set) discAs selAss)
setAs names_lthy;
val goals = flat (flat (flat goalssss));
in
`(unflattt goalssss)
(if null goals then
[]
else
let
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} =>
mk_set_sel_tac ctxt (Thm.cterm_of ctxt ta) exhaust (flat disc_thmss)
(flat sel_thmss) set0_thms)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end)
end;
val pred_injects =
let
val rel_eq_onp_with_tops_of =
Conv.fconv_rule (HOLogic.Trueprop_conv (Conv.arg1_conv
(Conv.top_sweep_rewrs_conv @{thms eq_onp_top_eq_eq[symmetric, THEN eq_reflection]} lthy)));
val eq_onps = map rel_eq_onp_with_tops_of
(map rel_eq_onp_of_bnf fp_bnfs @ fp_nesting_rel_eq_onps @ live_nesting_rel_eq_onps @
fp_nested_rel_eq_onps);
val cTs = map (SOME o Thm.ctyp_of lthy) (maps (replicate 2) live_As);
val cts = map (SOME o Thm.cterm_of lthy) (map mk_eq_onp Ps);
val get_rhs = Thm.concl_of #> HOLogic.dest_Trueprop #> HOLogic.dest_eq #> snd;
val pred_eq_onp_conj =
List.foldr (fn (_, thm) => thm RS @{thm eq_onp_live_step}) @{thm refl[of True]};
fun predify_rel_inject rel_inject =
let
val conjuncts = try (get_rhs #> HOLogic.dest_conj) rel_inject |> the_default [];
fun postproc thm =
if null conjuncts then
thm RS (@{thm eq_onp_same_args} RS iffD1)
else
@{thm box_equals} OF [thm, @{thm eq_onp_same_args},
pred_eq_onp_conj conjuncts |> unfold_thms lthy @{thms simp_thms(21)}];
in
rel_inject
|> Thm.instantiate' cTs cts
|> Conv.fconv_rule (HOLogic.Trueprop_conv (Conv.arg_conv
(Raw_Simplifier.rewrite lthy false
@{thms eq_onp_top_eq_eq[symmetric, THEN eq_reflection]})))
|> unfold_thms lthy eq_onps
|> postproc
|> unfold_thms lthy @{thms top_conj}
end;
in
rel_inject_thms
|> map (unfold_thms lthy [@{thm conj_assoc}])
|> map predify_rel_inject
|> Proof_Context.export names_lthy lthy
end;
val anonymous_notes =
[(rel_code_thms, nitpicksimp_attrs)]
|> map (fn (thms, attrs) => ((Binding.empty, attrs), [(thms, [])]));
val notes =
(if Config.get lthy bnf_internals then
[(set0N, set0_thms, K [])]
else
[]) @
[(case_transferN, [case_transfer_thm], K []),
(ctr_transferN, ctr_transfer_thms, K []),
(disc_transferN, disc_transfer_thms, K []),
(sel_transferN, sel_transfer_thms, K []),
(mapN, map_thms, K (nitpicksimp_attrs @ simp_attrs)),
(map_disc_iffN, map_disc_iff_thms, K simp_attrs),
(map_selN, map_sel_thms, K []),
(pred_injectN, pred_injects, K simp_attrs),
(rel_casesN, [rel_case_thm], K rel_case_attrs),
(rel_distinctN, rel_distinct_thms, K simp_attrs),
(rel_injectN, rel_inject_thms, K simp_attrs),
(rel_introsN, rel_intro_thms, K []),
(rel_selN, rel_sel_thms, K []),
(setN, set_thms, K (case_fp fp nitpicksimp_attrs [] @ simp_attrs)),
(set_casesN, set_cases_thms, nth set_cases_attrss),
(set_introsN, set_intros_thms, K []),
(set_selN, set_sel_thms, K [])]
|> massage_simple_notes fp_b_name;
val (noted, lthy') = lthy
|> uncurry (Spec_Rules.add Binding.empty Spec_Rules.equational)
(`(single o lhs_head_of o hd) map_thms)
|> fp = Least_FP ?
uncurry (Spec_Rules.add Binding.empty Spec_Rules.equational)
(`(single o lhs_head_of o hd) rel_code_thms)
|> uncurry (Spec_Rules.add Binding.empty Spec_Rules.equational)
(`(single o lhs_head_of o hd) set0_thms)
|> plugins code_plugin ? Code.declare_default_eqns (map (rpair true) (rel_code_thms @ map_thms @ set_thms))
|> Local_Theory.notes (anonymous_notes @ notes);
val subst = Morphism.thm (substitute_noted_thm noted);
in
((map subst map_thms,
map subst map_disc_iff_thms,
map (map subst) map_sel_thmss,
map subst rel_inject_thms,
map subst rel_distinct_thms,
map subst rel_sel_thms,
map subst rel_intro_thms,
[subst rel_case_thm],
map subst pred_injects,
map subst set_thms,
map (map (map (map subst))) set_sel_thmssss,
map (map (map (map subst))) set_intros_thmssss,
map subst set_cases_thms,
map subst ctr_transfer_thms,
[subst case_transfer_thm],
map subst disc_transfer_thms,
map subst sel_transfer_thms), lthy')
end
end;
type lfp_sugar_thms = (thm list * thm * Token.src list) * (thm list list * Token.src list);
fun morph_lfp_sugar_thms phi ((inducts, induct, induct_attrs), (recss, rec_attrs)) =
((map (Morphism.thm phi) inducts, Morphism.thm phi induct, induct_attrs),
(map (map (Morphism.thm phi)) recss, rec_attrs)) : lfp_sugar_thms;
val transfer_lfp_sugar_thms = morph_lfp_sugar_thms o Morphism.transfer_morphism;
type gfp_sugar_thms =
((thm list * thm) list * (Token.src list * Token.src list))
* thm list list
* thm list list
* (thm list list * Token.src list)
* (thm list list list * Token.src list);
fun morph_gfp_sugar_thms phi ((coinducts_pairs, coinduct_attrs_pair),
corecss, corec_discss, (corec_disc_iffss, corec_disc_iff_attrs),
(corec_selsss, corec_sel_attrs)) =
((map (apfst (map (Morphism.thm phi)) o apsnd (Morphism.thm phi)) coinducts_pairs,
coinduct_attrs_pair),
map (map (Morphism.thm phi)) corecss,
map (map (Morphism.thm phi)) corec_discss,
(map (map (Morphism.thm phi)) corec_disc_iffss, corec_disc_iff_attrs),
(map (map (map (Morphism.thm phi))) corec_selsss, corec_sel_attrs)) : gfp_sugar_thms;
val transfer_gfp_sugar_thms = morph_gfp_sugar_thms o Morphism.transfer_morphism;
fun unzip_recT (Type (\<^type_name>\<open>prod\<close>, [_, TFree x]))
(T as Type (\<^type_name>\<open>prod\<close>, Ts as [_, TFree y])) =
if x = y then [T] else Ts
| unzip_recT _ (Type (\<^type_name>\<open>prod\<close>, Ts as [_, TFree _])) = Ts
| unzip_recT _ T = [T];
fun mk_recs_args_types ctxt ctr_Tsss Cs absTs repTs ns mss ctor_rec_fun_Ts =
let
val Css = map2 replicate ns Cs;
val x_Tssss =
@{map 6} (fn absT => fn repT => fn n => fn ms => fn ctr_Tss => fn ctor_rec_fun_T =>
map2 (map2 unzip_recT)
ctr_Tss (dest_absumprodT absT repT n ms (domain_type ctor_rec_fun_T)))
absTs repTs ns mss ctr_Tsss ctor_rec_fun_Ts;
val x_Tsss' = map (map flat_rec_arg_args) x_Tssss;
val f_Tss = map2 (map2 (curry (op --->))) x_Tsss' Css;
val ((fss, xssss), _) = ctxt
|> mk_Freess "f" f_Tss
||>> mk_Freessss "x" x_Tssss;
in
(f_Tss, x_Tssss, fss, xssss)
end;
fun unzip_corecT (Type (\<^type_name>\<open>sum\<close>, _)) T = [T]
| unzip_corecT _ (Type (\<^type_name>\<open>sum\<close>, Ts)) = Ts
| unzip_corecT _ T = [T];
(*avoid "'a itself" arguments in corecursors*)
fun repair_nullary_single_ctr [[]] = [[HOLogic.unitT]]
| repair_nullary_single_ctr Tss = Tss;
fun mk_corec_fun_arg_types0 ctr_Tsss Cs absTs repTs ns mss fun_Ts =
let
val ctr_Tsss' = map repair_nullary_single_ctr ctr_Tsss;
val g_absTs = map range_type fun_Ts;
val g_Tsss =
map repair_nullary_single_ctr (@{map 5} dest_absumprodT absTs repTs ns mss g_absTs);
val g_Tssss = @{map 3} (fn C => map2 (map2 (map (curry (op -->) C) oo unzip_corecT)))
Cs ctr_Tsss' g_Tsss;
val q_Tssss = map (map (map (fn [_] => [] | [_, T] => [mk_pred1T (domain_type T)]))) g_Tssss;
in
(q_Tssss, g_Tsss, g_Tssss, g_absTs)
end;
fun mk_corec_p_pred_types Cs ns = map2 (fn n => replicate (Int.max (0, n - 1)) o mk_pred1T) ns Cs;
fun mk_corec_fun_arg_types ctr_Tsss Cs absTs repTs ns mss dtor_corec =
(mk_corec_p_pred_types Cs ns,
mk_corec_fun_arg_types0 ctr_Tsss Cs absTs repTs ns mss
(binder_fun_types (fastype_of dtor_corec)));
fun mk_corecs_args_types ctxt ctr_Tsss Cs absTs repTs ns mss dtor_corec_fun_Ts =
let
val p_Tss = mk_corec_p_pred_types Cs ns;
val (q_Tssss, g_Tsss, g_Tssss, corec_types) =
mk_corec_fun_arg_types0 ctr_Tsss Cs absTs repTs ns mss dtor_corec_fun_Ts;
val (((((Free (x, _), cs), pss), qssss), gssss), _) = ctxt
|> yield_singleton (mk_Frees "x") dummyT
||>> mk_Frees "a" Cs
||>> mk_Freess "p" p_Tss
||>> mk_Freessss "q" q_Tssss
||>> mk_Freessss "g" g_Tssss;
val cpss = map2 (map o rapp) cs pss;
fun build_sum_inj mk_inj = build_map ctxt [] [] (uncurry mk_inj o dest_sumT o snd);
fun build_dtor_corec_arg _ [] [cg] = cg
| build_dtor_corec_arg T [cq] [cg, cg'] =
mk_If cq (build_sum_inj Inl_const (fastype_of cg, T) $ cg)
(build_sum_inj Inr_const (fastype_of cg', T) $ cg');
val pgss = @{map 3} flat_corec_preds_predsss_gettersss pss qssss gssss;
val cqssss = map2 (map o map o map o rapp) cs qssss;
val cgssss = map2 (map o map o map o rapp) cs gssss;
val cqgsss = @{map 3} (@{map 3} (@{map 3} build_dtor_corec_arg)) g_Tsss cqssss cgssss;
in
(x, cs, cpss, (((pgss, pss, qssss, gssss), cqgsss), corec_types))
end;
fun mk_co_recs_prelims ctxt fp ctr_Tsss fpTs Cs absTs repTs ns mss xtor_co_recs0 =
let
val thy = Proof_Context.theory_of ctxt;
val (xtor_co_rec_fun_Ts, xtor_co_recs) =
mk_xtor_co_recs thy fp fpTs Cs xtor_co_recs0 |> `(binder_fun_types o fastype_of o hd);
val (recs_args_types, corecs_args_types) =
if fp = Least_FP then
mk_recs_args_types ctxt ctr_Tsss Cs absTs repTs ns mss xtor_co_rec_fun_Ts
|> (rpair NONE o SOME)
else
mk_corecs_args_types ctxt ctr_Tsss Cs absTs repTs ns mss xtor_co_rec_fun_Ts
|> (pair NONE o SOME);
in
(xtor_co_recs, recs_args_types, corecs_args_types)
end;
fun mk_preds_getterss_join c cps absT abs cqgss =
let
val n = length cqgss;
val ts = map2 (mk_absumprod absT abs n) (1 upto n) cqgss;
in
Term.lambda c (mk_IfN absT cps ts)
end;
fun define_co_rec_as fp Cs fpT b rhs lthy0 =
let
val thy = Proof_Context.theory_of lthy0;
val ((cst, (_, def)), (lthy', lthy)) = lthy0
|> (snd o Local_Theory.begin_nested)
|> Local_Theory.define
((b, NoSyn), ((Thm.make_def_binding (Config.get lthy0 bnf_internals) b, []), rhs))
||> `Local_Theory.end_nested;
val phi = Proof_Context.export_morphism lthy lthy';
val cst' = mk_co_rec thy fp Cs fpT (Morphism.term phi cst);
val def' = Morphism.thm phi def;
in
((cst', def'), lthy')
end;
fun define_rec (_, _, fss, xssss) mk_binding fpTs Cs reps ctor_rec =
let
val nn = length fpTs;
val (ctor_rec_absTs, fpT) = strip_typeN nn (fastype_of ctor_rec)
|>> map domain_type ||> domain_type;
in
define_co_rec_as Least_FP Cs fpT (mk_binding recN)
(fold_rev (fold_rev Term.lambda) fss (Term.list_comb (ctor_rec,
@{map 4} (fn ctor_rec_absT => fn rep => fn fs => fn xsss =>
mk_case_absumprod ctor_rec_absT rep fs (map (map HOLogic.mk_tuple) xsss)
(map flat_rec_arg_args xsss))
ctor_rec_absTs reps fss xssss)))
end;
fun define_corec (_, cs, cpss, (((pgss, _, _, _), cqgsss), f_absTs)) mk_binding fpTs Cs abss
dtor_corec =
let
val nn = length fpTs;
val fpT = range_type (snd (strip_typeN nn (fastype_of dtor_corec)));
in
define_co_rec_as Greatest_FP Cs fpT (mk_binding corecN)
(fold_rev (fold_rev Term.lambda) pgss (Term.list_comb (dtor_corec,
@{map 5} mk_preds_getterss_join cs cpss f_absTs abss cqgsss)))
end;
fun mk_induct_raw_prem_prems names_ctxt Xss setss_fp_nesting (x as Free (s, Type (T_name, Ts0)))
(Type (_, Xs_Ts0)) =
(case AList.lookup (op =) setss_fp_nesting T_name of
NONE => []
| SOME raw_sets0 =>
let
val (Xs_Ts, (Ts, raw_sets)) =
filter (exists_subtype_in (flat Xss) o fst) (Xs_Ts0 ~~ (Ts0 ~~ raw_sets0))
|> split_list ||> split_list;
val sets = map (mk_set Ts0) raw_sets;
val (ys, names_ctxt') = names_ctxt |> mk_Frees s Ts;
val xysets = map (pair x) (ys ~~ sets);
val ppremss = map2 (mk_induct_raw_prem_prems names_ctxt' Xss setss_fp_nesting) ys Xs_Ts;
in
flat (map2 (map o apfst o cons) xysets ppremss)
end)
| mk_induct_raw_prem_prems _ Xss _ (x as Free (_, Type _)) X =
[([], (find_index (fn Xs => member (op =) Xs X) Xss + 1, x))]
| mk_induct_raw_prem_prems _ _ _ _ _ = [];
fun mk_induct_raw_prem alter_x names_ctxt Xss setss_fp_nesting p ctr ctr_Ts ctrXs_Ts =
let
val (xs, names_ctxt') = names_ctxt |> mk_Frees "x" ctr_Ts;
val pprems =
flat (map2 (mk_induct_raw_prem_prems names_ctxt' Xss setss_fp_nesting) xs ctrXs_Ts);
val y = Term.list_comb (ctr, map alter_x xs);
val p' = enforce_type names_ctxt domain_type (fastype_of y) p;
in (xs, pprems, HOLogic.mk_Trueprop (p' $ y)) end;
fun close_induct_prem_prem nn ps xs t =
fold_rev Logic.all (map Free (drop (nn + length xs)
(rev (Term.add_frees t (map dest_Free xs @ map_filter (try dest_Free) ps))))) t;
fun finish_induct_prem_prem ctxt nn ps xs (xysets, (j, x)) =
let val p' = enforce_type ctxt domain_type (fastype_of x) (nth ps (j - 1)) in
close_induct_prem_prem nn ps xs (Logic.list_implies (map (fn (x', (y, set)) =>
mk_Trueprop_mem (y, set $ x')) xysets,
HOLogic.mk_Trueprop (p' $ x)))
end;
fun finish_induct_prem ctxt nn ps (xs, raw_pprems, concl) =
fold_rev Logic.all xs (Logic.list_implies
(map (finish_induct_prem_prem ctxt nn ps xs) raw_pprems, concl));
fun mk_coinduct_prem_ctr_concls ctxt Xss fpTss rs' n k udisc usels vdisc vsels ctrXs_Ts =
let
fun build_the_rel T Xs_T =
build_rel [] ctxt [] [] (fn (T, X) =>
nth rs' (find_index (fn Xs => member (op =) Xs X) Xss)
|> enforce_type ctxt domain_type T)
(T, Xs_T)
|> Term.subst_atomic_types (flat Xss ~~ flat fpTss);
fun build_rel_app usel vsel Xs_T =
fold rapp [usel, vsel] (build_the_rel (fastype_of usel) Xs_T);
in
(if k = n then [] else [HOLogic.mk_eq (udisc, vdisc)]) @
(if null usels then
[]
else
[Library.foldr HOLogic.mk_imp (if n = 1 then [] else [udisc, vdisc],
Library.foldr1 HOLogic.mk_conj (@{map 3} build_rel_app usels vsels ctrXs_Ts))])
end;
fun mk_coinduct_prem_concl ctxt Xss fpTss rs' n udiscs uselss vdiscs vselss ctrXs_Tss =
@{map 6} (mk_coinduct_prem_ctr_concls ctxt Xss fpTss rs' n)
(1 upto n) udiscs uselss vdiscs vselss ctrXs_Tss
|> flat |> Library.foldr1 HOLogic.mk_conj
handle List.Empty => \<^term>\<open>True\<close>;
fun mk_coinduct_prem ctxt Xss fpTss rs' uvr u v n udiscs uselss vdiscs vselss ctrXs_Tss =
fold_rev Logic.all [u, v] (Logic.mk_implies (HOLogic.mk_Trueprop uvr,
HOLogic.mk_Trueprop (mk_coinduct_prem_concl ctxt Xss fpTss rs' n udiscs uselss vdiscs vselss
ctrXs_Tss)));
fun postproc_co_induct ctxt nn prop prop_conj =
Drule.zero_var_indexes
#> `(conj_dests nn)
#>> map (fn thm => Thm.permute_prems 0 ~1 (thm RS prop))
##> (fn thm => Thm.permute_prems 0 (~ nn)
(if nn = 1 then thm RS prop
else funpow nn (fn thm => unfold_thms ctxt @{thms conj_assoc} (thm RS prop_conj)) thm));
fun mk_induct_attrs ctrss =
let val induct_cases = quasi_unambiguous_case_names (maps (map name_of_ctr) ctrss);
in [Attrib.case_names induct_cases] end;
fun derive_rel_induct_thms_for_types ctxt nn fpA_Ts As Bs ctrAss ctrAs_Tsss exhausts ctor_rel_induct
ctor_defss ctor_injects pre_rel_defs abs_inverses live_nesting_rel_eqs =
let
val B_ify_T = Term.typ_subst_atomic (As ~~ Bs);
val B_ify = Term.map_types B_ify_T;
val fpB_Ts = map B_ify_T fpA_Ts;
val ctrBs_Tsss = map (map (map B_ify_T)) ctrAs_Tsss;
val ctrBss = map (map B_ify) ctrAss;
val ((((Rs, IRs), ctrAsss), ctrBsss), names_ctxt) = ctxt
|> mk_Frees "R" (map2 mk_pred2T As Bs)
||>> mk_Frees "IR" (map2 mk_pred2T fpA_Ts fpB_Ts)
||>> mk_Freesss "a" ctrAs_Tsss
||>> mk_Freesss "b" ctrBs_Tsss;
val prems =
let
fun mk_prem ctrA ctrB argAs argBs =
fold_rev Logic.all (argAs @ argBs) (fold_rev (curry Logic.mk_implies)
(map2 (HOLogic.mk_Trueprop oo build_rel_app names_ctxt (Rs @ IRs) fpA_Ts) argAs argBs)
(HOLogic.mk_Trueprop (build_rel_app names_ctxt (Rs @ IRs) fpA_Ts
(Term.list_comb (ctrA, argAs)) (Term.list_comb (ctrB, argBs)))));
in
flat (@{map 4} (@{map 4} mk_prem) ctrAss ctrBss ctrAsss ctrBsss)
end;
val goal = HOLogic.mk_Trueprop (Library.foldr1 HOLogic.mk_conj (map2 mk_leq
(map2 (build_the_rel ctxt (Rs @ IRs) []) fpA_Ts fpB_Ts) IRs));
val vars = Variable.add_free_names ctxt goal [];
val rel_induct0_thm =
Goal.prove_sorry ctxt vars prems goal (fn {context = ctxt, prems} =>
mk_rel_induct0_tac ctxt ctor_rel_induct prems (map (Thm.cterm_of ctxt) IRs) exhausts
ctor_defss ctor_injects pre_rel_defs abs_inverses live_nesting_rel_eqs)
|> Thm.close_derivation \<^here>;
in
(postproc_co_induct ctxt nn @{thm predicate2D} @{thm predicate2D_conj} rel_induct0_thm,
mk_induct_attrs ctrAss)
end;
fun derive_induct_recs_thms_for_types plugins pre_bnfs rec_args_typess ctor_induct ctor_rec_thms
live_nesting_bnfs fp_nesting_bnfs fpTs Cs Xs ctrXs_Tsss pre_abs_inverses pre_type_definitions
abs_inverses ctrss ctr_defss recs rec_defs ctxt =
let
val ctr_Tsss = map (map (binder_types o fastype_of)) ctrss;
val nn = length pre_bnfs;
val ns = map length ctr_Tsss;
val mss = map (map length) ctr_Tsss;
val pre_map_defs = map map_def_of_bnf pre_bnfs;
val pre_set_defss = map set_defs_of_bnf pre_bnfs;
val live_nesting_map_ident0s = map map_ident0_of_bnf live_nesting_bnfs;
val fp_nesting_map_ident0s = map map_ident0_of_bnf fp_nesting_bnfs;
val fp_nesting_set_maps = maps set_map_of_bnf fp_nesting_bnfs;
val fp_b_names = map base_name_of_typ fpTs;
val (((ps, xsss), us'), names_ctxt) = ctxt
|> mk_Frees "P" (map mk_pred1T fpTs)
||>> mk_Freesss "x" ctr_Tsss
||>> Variable.variant_fixes fp_b_names;
val us = map2 (curry Free) us' fpTs;
val setss_fp_nesting = map mk_bnf_sets fp_nesting_bnfs;
val (induct_thms, induct_thm) =
let
val raw_premss = @{map 4} (@{map 3}
o mk_induct_raw_prem I names_ctxt (map single Xs) setss_fp_nesting)
ps ctrss ctr_Tsss ctrXs_Tsss;
val concl =
HOLogic.mk_Trueprop (Library.foldr1 HOLogic.mk_conj (map2 (curry (op $)) ps us));
val goal =
Library.foldr (Logic.list_implies o apfst (map (finish_induct_prem ctxt nn ps)))
(raw_premss, concl);
val vars = Variable.add_free_names ctxt goal [];
val kksss = map (map (map (fst o snd) o #2)) raw_premss;
val ctor_induct' = ctor_induct OF (map2 mk_absumprodE pre_type_definitions mss);
val thm =
Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, ...} =>
mk_induct_tac ctxt nn ns mss kksss (flat ctr_defss) ctor_induct' pre_abs_inverses
abs_inverses fp_nesting_set_maps pre_set_defss)
|> Thm.close_derivation \<^here>;
in
`(conj_dests nn) thm
end;
val xctrss = map2 (map2 (curry Term.list_comb)) ctrss xsss;
fun mk_rec_thmss (_, x_Tssss, fss, _) recs rec_defs ctor_rec_thms =
let
val frecs = map (lists_bmoc fss) recs;
fun mk_goal frec xctr f xs fxs =
fold_rev (fold_rev Logic.all) (xs :: fss)
(mk_Trueprop_eq (frec $ xctr, Term.list_comb (f, fxs)));
fun maybe_tick (T, U) u f =
if try (fst o HOLogic.dest_prodT) U = SOME T then
Term.lambda u (HOLogic.mk_prod (u, f $ u))
else
f;
fun build_rec (x as Free (_, T)) U =
if T = U then
x
else
let
val build_simple =
indexify (perhaps (try (snd o HOLogic.dest_prodT)) o snd) Cs
(fn kk => fn TU => maybe_tick TU (nth us kk) (nth frecs kk));
in
build_map ctxt [] [] build_simple (T, U) $ x
end;
val fxsss = map2 (map2 (flat_rec_arg_args oo map2 (map o build_rec))) xsss x_Tssss;
val goalss = @{map 5} (@{map 4} o mk_goal) frecs xctrss fss xsss fxsss;
val tacss = @{map 4} (map ooo
mk_rec_tac pre_map_defs (fp_nesting_map_ident0s @ live_nesting_map_ident0s) rec_defs)
ctor_rec_thms pre_abs_inverses abs_inverses ctr_defss;
fun prove goal tac =
Goal.prove_sorry ctxt [] [] goal (tac o #context)
|> Thm.close_derivation \<^here>;
in
map2 (map2 prove) goalss tacss
end;
val rec_thmss = mk_rec_thmss (the rec_args_typess) recs rec_defs ctor_rec_thms;
in
((induct_thms, induct_thm, mk_induct_attrs ctrss),
(rec_thmss, nitpicksimp_attrs @ simp_attrs))
end;
fun mk_coinduct_attrs fpTs ctrss discss mss =
let
val fp_b_names = map base_name_of_typ fpTs;
fun mk_coinduct_concls ms discs ctrs =
let
fun mk_disc_concl disc = [name_of_disc disc];
fun mk_ctr_concl 0 _ = []
| mk_ctr_concl _ ctr = [name_of_ctr ctr];
val disc_concls = map mk_disc_concl (fst (split_last discs)) @ [[]];
val ctr_concls = map2 mk_ctr_concl ms ctrs;
in
flat (map2 append disc_concls ctr_concls)
end;
val coinduct_cases = quasi_unambiguous_case_names (map (prefix Eq_prefix) fp_b_names);
val coinduct_conclss =
@{map 3} (quasi_unambiguous_case_names ooo mk_coinduct_concls) mss discss ctrss;
val coinduct_case_names_attr = Attrib.case_names coinduct_cases;
val coinduct_case_concl_attrs =
map2 (fn casex => fn concls => Attrib.case_conclusion (casex, concls))
coinduct_cases coinduct_conclss;
val common_coinduct_attrs = coinduct_case_names_attr :: coinduct_case_concl_attrs;
val coinduct_attrs = Attrib.consumes 1 :: coinduct_case_names_attr :: coinduct_case_concl_attrs;
in
(coinduct_attrs, common_coinduct_attrs)
end;
fun derive_rel_coinduct_thms_for_types ctxt nn fpA_Ts ns As Bs mss (ctr_sugars : ctr_sugar list)
abs_inverses abs_injects ctor_injects dtor_ctors rel_pre_defs ctor_defss dtor_rel_coinduct
live_nesting_rel_eqs =
let
val B_ify_T = Term.typ_subst_atomic (As ~~ Bs);
val fpB_Ts = map B_ify_T fpA_Ts;
val (Rs, IRs, fpAs, fpBs, _) =
let
val fp_names = map base_name_of_typ fpA_Ts;
val ((((Rs, IRs), fpAs_names), fpBs_names), names_ctxt) = ctxt
|> mk_Frees "R" (map2 mk_pred2T As Bs)
||>> mk_Frees "IR" (map2 mk_pred2T fpA_Ts fpB_Ts)
||>> Variable.variant_fixes fp_names
||>> Variable.variant_fixes (map (suffix "'") fp_names);
in
(Rs, IRs, map2 (curry Free) fpAs_names fpA_Ts, map2 (curry Free) fpBs_names fpB_Ts,
names_ctxt)
end;
val ((discA_tss, selA_tsss), (discB_tss, selB_tsss)) =
let
val discss = map #discs ctr_sugars;
val selsss = map #selss ctr_sugars;
fun mk_discss ts Ts = map2 (map o rapp) ts (map (map (mk_disc_or_sel Ts)) discss);
fun mk_selsss ts Ts =
map2 (map o map o rapp) ts (map (map (map (mk_disc_or_sel Ts))) selsss);
in
((mk_discss fpAs As, mk_selsss fpAs As),
(mk_discss fpBs Bs, mk_selsss fpBs Bs))
end;
val prems =
let
fun mk_prem_ctr_concls n k discA_t selA_ts discB_t selB_ts =
(if k = n then [] else [HOLogic.mk_eq (discA_t, discB_t)]) @
(case (selA_ts, selB_ts) of
([], []) => []
| (_ :: _, _ :: _) =>
[Library.foldr HOLogic.mk_imp
(if n = 1 then [] else [discA_t, discB_t],
Library.foldr1 HOLogic.mk_conj
(map2 (build_rel_app ctxt (Rs @ IRs) fpA_Ts) selA_ts selB_ts))]);
fun mk_prem_concl n discA_ts selA_tss discB_ts selB_tss =
Library.foldr1 HOLogic.mk_conj (flat (@{map 5} (mk_prem_ctr_concls n)
(1 upto n) discA_ts selA_tss discB_ts selB_tss))
handle List.Empty => \<^term>\<open>True\<close>;
fun mk_prem IR tA tB n discA_ts selA_tss discB_ts selB_tss =
fold_rev Logic.all [tA, tB] (Logic.mk_implies (HOLogic.mk_Trueprop (IR $ tA $ tB),
HOLogic.mk_Trueprop (mk_prem_concl n discA_ts selA_tss discB_ts selB_tss)));
in
@{map 8} mk_prem IRs fpAs fpBs ns discA_tss selA_tsss discB_tss selB_tsss
end;
val goal = HOLogic.mk_Trueprop (Library.foldr1 HOLogic.mk_conj (map2 mk_leq
IRs (map2 (build_the_rel ctxt (Rs @ IRs) []) fpA_Ts fpB_Ts)));
val vars = Variable.add_free_names ctxt goal [];
val rel_coinduct0_thm =
Goal.prove_sorry ctxt vars prems goal (fn {context = ctxt, prems} =>
mk_rel_coinduct0_tac ctxt dtor_rel_coinduct (map (Thm.cterm_of ctxt) IRs) prems
(map #exhaust ctr_sugars) (map (flat o #disc_thmss) ctr_sugars)
(map (flat o #sel_thmss) ctr_sugars) ctor_defss dtor_ctors ctor_injects abs_injects
rel_pre_defs abs_inverses live_nesting_rel_eqs)
|> Thm.close_derivation \<^here>;
in
(postproc_co_induct ctxt nn @{thm predicate2D} @{thm predicate2D_conj} rel_coinduct0_thm,
mk_coinduct_attrs fpA_Ts (map #ctrs ctr_sugars) (map #discs ctr_sugars) mss)
end;
fun derive_set_induct_thms_for_types ctxt nn fpTs ctrss setss dtor_set_inducts exhausts set_pre_defs
ctor_defs dtor_ctors Abs_pre_inverses =
let
fun mk_prems A Ps ctr_args t ctxt =
(case fastype_of t of
Type (type_name, innerTs) =>
(case bnf_of ctxt type_name of
NONE => ([], ctxt)
| SOME bnf =>
let
fun seq_assm a set ctxt =
let
val X = HOLogic.dest_setT (range_type (fastype_of set));
val (x, ctxt') = yield_singleton (mk_Frees "x") X ctxt;
val assm = mk_Trueprop_mem (x, set $ a);
in
(case build_binary_fun_app Ps x a of
NONE =>
mk_prems A Ps ctr_args x ctxt'
|>> map (Logic.all x o Logic.mk_implies o pair assm)
| SOME f =>
([Logic.all x
(Logic.mk_implies (assm,
Logic.mk_implies (HOLogic.mk_Trueprop f,
HOLogic.mk_Trueprop (the (build_binary_fun_app Ps x ctr_args)))))],
ctxt'))
end;
in
fold_map (seq_assm t o mk_set innerTs) (sets_of_bnf bnf) ctxt
|>> flat
end)
| T =>
if T = A then ([HOLogic.mk_Trueprop (the (build_binary_fun_app Ps t ctr_args))], ctxt)
else ([], ctxt));
fun mk_prems_for_ctr A Ps ctr ctxt =
let
val (args, ctxt') = mk_Frees "z" (binder_types (fastype_of ctr)) ctxt;
in
fold_map (mk_prems A Ps (list_comb (ctr, args))) args ctxt'
|>> map (fold_rev Logic.all args) o flat
|>> (fn prems => (prems, mk_names (length prems) (name_of_ctr ctr)))
end;
fun mk_prems_and_concl_for_type A Ps ((fpT, ctrs), set) ctxt =
let
val ((x, fp), ctxt') = ctxt
|> yield_singleton (mk_Frees "x") A
||>> yield_singleton (mk_Frees "a") fpT;
val concl = mk_Ball (set $ fp) (Term.absfree (dest_Free x)
(the (build_binary_fun_app Ps x fp)));
in
fold_map (mk_prems_for_ctr A Ps) ctrs ctxt'
|>> split_list
|>> map_prod flat flat
|>> apfst (rpair concl)
end;
fun mk_thm ctxt fpTs ctrss sets =
let
val A = HOLogic.dest_setT (range_type (fastype_of (hd sets)));
val (Ps, ctxt') = mk_Frees "P" (map (fn fpT => A --> fpT --> HOLogic.boolT) fpTs) ctxt;
val (((prems, concl), case_names), ctxt'') =
fold_map (mk_prems_and_concl_for_type A Ps) (fpTs ~~ ctrss ~~ sets) ctxt'
|>> apfst split_list o split_list
|>> apfst (apfst flat)
|>> apfst (apsnd (Library.foldr1 HOLogic.mk_conj))
|>> apsnd flat;
val vars = fold (Variable.add_free_names ctxt) (concl :: prems) [];
val thm =
Goal.prove_sorry ctxt vars prems (HOLogic.mk_Trueprop concl)
(fn {context = ctxt, prems} =>
mk_set_induct0_tac ctxt (map (Thm.cterm_of ctxt'') Ps) prems dtor_set_inducts
exhausts set_pre_defs ctor_defs dtor_ctors Abs_pre_inverses)
|> Thm.close_derivation \<^here>;
val case_names_attr = Attrib.case_names (quasi_unambiguous_case_names case_names);
- val induct_set_attrs = map (Attrib.internal o K o Induct.induct_pred o name_of_set) sets;
+ val induct_set_attrs = map (Attrib.internal \<^here> o K o Induct.induct_pred o name_of_set) sets;
in
(thm, case_names_attr :: induct_set_attrs)
end
val consumes_attr = Attrib.consumes 1;
in
map (mk_thm ctxt fpTs ctrss
#> nn = 1 ? map_prod (fn thm => rotate_prems ~1 (thm RS bspec)) (cons consumes_attr))
(transpose setss)
end;
fun mk_coinduct_strong_thm coind rel_eqs rel_monos mk_vimage2p ctxt =
let
val n = Thm.nprems_of coind;
val m = Thm.nprems_of (hd rel_monos) - n;
fun mk_inst phi =
(phi, Thm.cterm_of ctxt (mk_union (Var phi, HOLogic.eq_const (fst (dest_pred2T (#2 phi))))));
val insts = Term.add_vars (Thm.prop_of coind) [] |> rev |> take n |> map mk_inst;
fun mk_unfold rel_eq rel_mono =
let
val eq = iffD2 OF [rel_eq RS @{thm predicate2_eqD}, refl];
val mono = rel_mono OF (replicate m @{thm order_refl} @ replicate n @{thm eq_subset});
in mk_vimage2p (eq RS (mono RS @{thm predicate2D})) RS eqTrueI end;
val unfolds = map2 mk_unfold rel_eqs rel_monos @ @{thms sup_fun_def sup_bool_def
imp_disjL all_conj_distrib subst_eq_imp simp_thms(18,21,35)};
in
Thm.instantiate (TVars.empty, Vars.make insts) coind
|> unfold_thms ctxt unfolds
end;
fun derive_coinduct_thms_for_types ctxt strong alter_r pre_bnfs dtor_coinduct dtor_ctors
live_nesting_bnfs fpTs Xs ctrXs_Tsss ns pre_abs_inverses abs_inverses mk_vimage2p ctr_defss
(ctr_sugars : ctr_sugar list) =
let
val nn = length pre_bnfs;
val pre_rel_defs = map rel_def_of_bnf pre_bnfs;
val live_nesting_rel_eqs = map rel_eq_of_bnf live_nesting_bnfs;
val fp_b_names = map base_name_of_typ fpTs;
val discss = map #discs ctr_sugars;
val selsss = map #selss ctr_sugars;
val exhausts = map #exhaust ctr_sugars;
val disc_thmsss = map #disc_thmss ctr_sugars;
val sel_thmsss = map #sel_thmss ctr_sugars;
val (((rs, us'), vs'), _) = ctxt
|> mk_Frees "R" (map (fn T => mk_pred2T T T) fpTs)
||>> Variable.variant_fixes fp_b_names
||>> Variable.variant_fixes (map (suffix "'") fp_b_names);
val us = map2 (curry Free) us' fpTs;
val udiscss = map2 (map o rapp) us discss;
val uselsss = map2 (map o map o rapp) us selsss;
val vs = map2 (curry Free) vs' fpTs;
val vdiscss = map2 (map o rapp) vs discss;
val vselsss = map2 (map o map o rapp) vs selsss;
val uvrs = @{map 3} (fn r => fn u => fn v => r $ u $ v) rs us vs;
val uv_eqs = map2 (curry HOLogic.mk_eq) us vs;
val strong_rs =
@{map 4} (fn u => fn v => fn uvr => fn uv_eq =>
fold_rev Term.lambda [u, v] (HOLogic.mk_disj (uvr, uv_eq))) us vs uvrs uv_eqs;
val concl =
HOLogic.mk_Trueprop (Library.foldr1 HOLogic.mk_conj
(@{map 3} (fn uvr => fn u => fn v => HOLogic.mk_imp (uvr, HOLogic.mk_eq (u, v)))
uvrs us vs))
fun mk_goal rs0' =
Logic.list_implies (@{map 9} (mk_coinduct_prem ctxt (map single Xs) (map single fpTs)
(map alter_r rs0'))
uvrs us vs ns udiscss uselsss vdiscss vselsss ctrXs_Tsss,
concl);
val goals = map mk_goal ([rs] @ (if strong then [strong_rs] else []));
fun prove dtor_coinduct' goal =
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, ...} =>
mk_coinduct_tac ctxt live_nesting_rel_eqs nn ns dtor_coinduct' pre_rel_defs pre_abs_inverses
abs_inverses dtor_ctors exhausts ctr_defss disc_thmsss sel_thmsss))
|> Thm.close_derivation \<^here>;
val rel_eqs = map rel_eq_of_bnf pre_bnfs;
val rel_monos = map rel_mono_of_bnf pre_bnfs;
val dtor_coinducts =
[dtor_coinduct] @
(if strong then [mk_coinduct_strong_thm dtor_coinduct rel_eqs rel_monos mk_vimage2p ctxt]
else []);
in
map2 (postproc_co_induct ctxt nn mp @{thm conj_commute[THEN iffD1]} oo prove)
dtor_coinducts goals
end;
fun derive_coinduct_corecs_thms_for_types ctxt pre_bnfs
(x, cs, cpss, (((pgss, _, _, _), cqgsss), _)) dtor_coinduct dtor_injects dtor_ctors
dtor_corec_thms live_nesting_bnfs fpTs Cs Xs ctrXs_Tsss kss mss ns pre_abs_inverses abs_inverses
mk_vimage2p ctr_defss (ctr_sugars : ctr_sugar list) corecs corec_defs =
let
fun mk_ctor_dtor_corec_thm dtor_inject dtor_ctor corec =
iffD1 OF [dtor_inject, trans OF [corec, dtor_ctor RS sym]];
val ctor_dtor_corec_thms =
@{map 3} mk_ctor_dtor_corec_thm dtor_injects dtor_ctors dtor_corec_thms;
val pre_map_defs = map map_def_of_bnf pre_bnfs;
val live_nesting_map_ident0s = map map_ident0_of_bnf live_nesting_bnfs;
val fp_b_names = map base_name_of_typ fpTs;
val ctrss = map #ctrs ctr_sugars;
val discss = map #discs ctr_sugars;
val selsss = map #selss ctr_sugars;
val disc_thmsss = map #disc_thmss ctr_sugars;
val discIss = map #discIs ctr_sugars;
val sel_thmsss = map #sel_thmss ctr_sugars;
val coinduct_thms_pairs = derive_coinduct_thms_for_types ctxt true I pre_bnfs dtor_coinduct
dtor_ctors live_nesting_bnfs fpTs Xs ctrXs_Tsss ns pre_abs_inverses abs_inverses mk_vimage2p
ctr_defss ctr_sugars;
fun mk_maybe_not pos = not pos ? HOLogic.mk_not;
val gcorecs = map (lists_bmoc pgss) corecs;
val corec_thmss =
let
val (us', _) = ctxt
|> Variable.variant_fixes fp_b_names;
val us = map2 (curry Free) us' fpTs;
fun mk_goal c cps gcorec n k ctr m cfs' =
fold_rev (fold_rev Logic.all) ([c] :: pgss)
(Logic.list_implies (seq_conds (HOLogic.mk_Trueprop oo mk_maybe_not) n k cps,
mk_Trueprop_eq (gcorec $ c, Term.list_comb (ctr, take m cfs'))));
val mk_U = typ_subst_nonatomic (map2 (fn C => fn fpT => (mk_sumT (fpT, C), fpT)) Cs fpTs);
fun tack (c, u) f =
let val x' = Free (x, mk_sumT (fastype_of u, fastype_of c)) in
Term.lambda x' (mk_case_sum (Term.lambda u u, Term.lambda c (f $ c)) $ x')
end;
fun build_corec cqg =
let val T = fastype_of cqg in
if exists_subtype_in Cs T then
let
val U = mk_U T;
val build_simple =
indexify fst (map2 (curry mk_sumT) fpTs Cs)
(fn kk => fn _ => tack (nth cs kk, nth us kk) (nth gcorecs kk));
in
build_map ctxt [] [] build_simple (T, U) $ cqg
end
else
cqg
end;
val cqgsss' = map (map (map build_corec)) cqgsss;
val goalss = @{map 8} (@{map 4} oooo mk_goal) cs cpss gcorecs ns kss ctrss mss cqgsss';
val tacss =
@{map 4} (map ooo mk_corec_tac corec_defs live_nesting_map_ident0s)
ctor_dtor_corec_thms pre_map_defs abs_inverses ctr_defss;
fun prove goal tac =
Goal.prove_sorry ctxt [] [] goal (tac o #context)
|> Thm.close_derivation \<^here>;
in
map2 (map2 prove) goalss tacss
|> map (map (unfold_thms ctxt @{thms case_sum_if}))
end;
val corec_disc_iff_thmss =
let
fun mk_goal c cps gcorec n k disc =
mk_Trueprop_eq (disc $ (gcorec $ c),
if n = 1 then \<^Const>\<open>True\<close>
else Library.foldr1 HOLogic.mk_conj (seq_conds mk_maybe_not n k cps));
val goalss = @{map 6} (map2 oooo mk_goal) cs cpss gcorecs ns kss discss;
fun mk_case_split' cp = Thm.instantiate' [] [SOME (Thm.cterm_of ctxt cp)] @{thm case_split};
val case_splitss' = map (map mk_case_split') cpss;
val tacss = @{map 3} (map oo mk_corec_disc_iff_tac) case_splitss' corec_thmss disc_thmsss;
fun prove goal tac =
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (tac o #context))
|> Thm.close_derivation \<^here>;
fun proves [_] [_] = []
| proves goals tacs = map2 prove goals tacs;
in
map2 proves goalss tacss
end;
fun mk_corec_disc_thms corecs discIs = map (op RS) (corecs ~~ discIs);
val corec_disc_thmss = map2 mk_corec_disc_thms corec_thmss discIss;
fun mk_corec_sel_thm corec_thm sel sel_thm =
let
val (domT, ranT) = dest_funT (fastype_of sel);
val arg_cong' =
Thm.instantiate' (map (SOME o Thm.ctyp_of ctxt) [domT, ranT])
[NONE, NONE, SOME (Thm.cterm_of ctxt sel)] arg_cong
|> Thm.varifyT_global;
val sel_thm' = sel_thm RSN (2, trans);
in
corec_thm RS arg_cong' RS sel_thm'
end;
fun mk_corec_sel_thms corec_thmss =
@{map 3} (@{map 3} (map2 o mk_corec_sel_thm)) corec_thmss selsss sel_thmsss;
val corec_sel_thmsss = mk_corec_sel_thms corec_thmss;
in
((coinduct_thms_pairs,
mk_coinduct_attrs fpTs (map #ctrs ctr_sugars) (map #discs ctr_sugars) mss),
corec_thmss,
corec_disc_thmss,
(corec_disc_iff_thmss, simp_attrs),
(corec_sel_thmsss, simp_attrs))
end;
fun define_co_datatypes prepare_plugins prepare_constraint prepare_typ prepare_term fp construct_fp
((raw_plugins, discs_sels0), specs) lthy =
let
val plugins = prepare_plugins lthy raw_plugins;
val discs_sels = discs_sels0 orelse fp = Greatest_FP;
val nn = length specs;
val fp_bs = map type_binding_of_spec specs;
val fp_b_names = map Binding.name_of fp_bs;
val fp_common_name = mk_common_name fp_b_names;
val map_bs = map map_binding_of_spec specs;
val rel_bs = map rel_binding_of_spec specs;
val pred_bs = map pred_binding_of_spec specs;
fun prepare_type_arg (_, (ty, c)) =
let val TFree (s, _) = prepare_typ lthy ty in
TFree (s, prepare_constraint lthy c)
end;
val Ass0 = map (map prepare_type_arg o type_args_named_constrained_of_spec) specs;
val unsorted_Ass0 = map (map (resort_tfree_or_tvar \<^sort>\<open>type\<close>)) Ass0;
val unsorted_As = Library.foldr1 (merge_type_args fp) unsorted_Ass0;
val num_As = length unsorted_As;
val set_boss = map (map fst o type_args_named_constrained_of_spec) specs;
val set_bss = map (map (the_default Binding.empty)) set_boss;
fun add_fake_type spec =
Typedecl.basic_typedecl {final = true}
(type_binding_of_spec spec, num_As, Mixfix.reset_pos (mixfix_of_spec spec));
val (fake_T_names, fake_lthy) = fold_map add_fake_type specs lthy;
val qsoty = quote o Syntax.string_of_typ fake_lthy;
val _ = (case Library.duplicates (op =) unsorted_As of [] => ()
| A :: _ => error ("Duplicate type parameter " ^ qsoty A ^ " in " ^ co_prefix fp ^
"datatype specification"));
val bad_args =
map (Logic.type_map (singleton (Variable.polymorphic lthy))) unsorted_As
|> filter_out Term.is_TVar;
val _ = null bad_args orelse
error ("Locally fixed type argument " ^ qsoty (hd bad_args) ^ " in " ^ co_prefix fp ^
"datatype specification");
val mixfixes = map mixfix_of_spec specs;
val _ = (case Library.duplicates Binding.eq_name fp_bs of [] => ()
| b :: _ => error ("Duplicate type name declaration " ^ quote (Binding.name_of b)));
val mx_ctr_specss = map mixfixed_ctr_specs_of_spec specs;
val ctr_specss = map (map fst) mx_ctr_specss;
val ctr_mixfixess = map (map snd) mx_ctr_specss;
val disc_bindingss = map (map disc_of_ctr_spec) ctr_specss;
val ctr_bindingss =
map2 (fn fp_b_name => map (Binding.qualify false fp_b_name o ctr_of_ctr_spec)) fp_b_names
ctr_specss;
val ctr_argsss = map (map args_of_ctr_spec) ctr_specss;
val sel_bindingsss = map (map (map fst)) ctr_argsss;
val fake_ctr_Tsss0 = map (map (map (prepare_typ fake_lthy o snd))) ctr_argsss;
val raw_sel_default_eqss = map sel_default_eqs_of_spec specs;
val (As :: _) :: fake_ctr_Tsss =
burrow (burrow (Syntax.check_typs fake_lthy)) (Ass0 :: fake_ctr_Tsss0);
val As' = map dest_TFree As;
val rhs_As' = fold (fold (fold Term.add_tfreesT)) fake_ctr_Tsss [];
val _ = (case subtract (op =) As' rhs_As' of [] => ()
| extras => error ("Extra type variables on right-hand side: " ^
commas (map (qsoty o TFree) extras)));
val fake_Ts = map (fn s => Type (s, As)) fake_T_names;
val ((((Bs0, Cs), Es), Xs), _) = lthy
|> fold (Variable.declare_typ o resort_tfree_or_tvar dummyS) unsorted_As
|> mk_TFrees num_As
||>> mk_TFrees nn
||>> mk_TFrees nn
||>> variant_tfrees fp_b_names;
fun eq_fpT_check (T as Type (s, Ts)) (T' as Type (s', Ts')) =
s = s' andalso (Ts = Ts' orelse
error ("Wrong type arguments in " ^ co_prefix fp ^ "recursive type " ^ qsoty T ^
" (expected " ^ qsoty T' ^ ")"))
| eq_fpT_check _ _ = false;
fun freeze_fp (T as Type (s, Ts)) =
(case find_index (eq_fpT_check T) fake_Ts of
~1 => Type (s, map freeze_fp Ts)
| kk => nth Xs kk)
| freeze_fp T = T;
val unfreeze_fp = Term.typ_subst_atomic (Xs ~~ fake_Ts);
val ctrXs_Tsss = map (map (map freeze_fp)) fake_ctr_Tsss;
val ctrXs_repTs = map mk_sumprodT_balanced ctrXs_Tsss;
val _ =
let
fun add_deps i =
fold (fn T => fold_index (fn (j, X) =>
(i <> j andalso exists_subtype_in [X] T) ? insert (op =) (i, j)) Xs);
val add_missing_nodes = fold (AList.default (op =) o rpair []) (0 upto nn - 1);
val deps = fold_index (uncurry (fold o add_deps)) ctrXs_Tsss []
|> AList.group (op =)
|> add_missing_nodes;
val G = Int_Graph.make (map (apfst (rpair ())) deps);
val sccs = map (sort int_ord) (Int_Graph.strong_conn G);
val str_of_scc = prefix (co_prefix fp ^ "datatype ") o
space_implode " and " o map (suffix " = \<dots>" o Long_Name.base_name);
fun warn [_] = ()
| warn sccs =
warning ("Defined types not fully mutually " ^ co_prefix fp ^ "recursive\n\
\Alternative specification:\n" ^
cat_lines (map (prefix " " o str_of_scc o map (nth fp_b_names)) sccs));
in
warn (order_strong_conn (op =) Int_Graph.make Int_Graph.topological_order deps sccs)
end;
val killed_As =
map_filter (fn (A, set_bos) => if exists is_none set_bos then SOME A else NONE)
(As ~~ transpose set_boss);
val (((pre_bnfs, absT_infos), _), (fp_res as {bnfs = fp_bnfs as any_fp_bnf :: _, ctors = ctors0,
dtors = dtors0, xtor_co_recs = xtor_co_recs0, xtor_co_induct, dtor_ctors, ctor_dtors,
ctor_injects, dtor_injects, xtor_maps, xtor_setss, xtor_rels, xtor_co_rec_thms,
xtor_rel_co_induct, dtor_set_inducts, xtor_co_rec_transfers, xtor_co_rec_o_maps, ...},
lthy)) =
fixpoint_bnf false I (construct_fp mixfixes map_bs rel_bs pred_bs set_bss) fp_bs
(map dest_TFree As) (map dest_TFree killed_As) (map dest_TFree Xs) ctrXs_repTs
empty_comp_cache lthy
handle BAD_DEAD (X, X_backdrop) =>
(case X_backdrop of
Type (bad_tc, _) =>
let
val fake_T = qsoty (unfreeze_fp X);
val fake_T_backdrop = qsoty (unfreeze_fp X_backdrop);
fun register_hint () =
"\nUse the " ^ quote (#1 \<^command_keyword>\<open>bnf\<close>) ^ " command to register " ^
quote bad_tc ^ " as a bounded natural functor to allow nested (co)recursion through \
\it";
in
if is_some (bnf_of lthy bad_tc) orelse is_some (fp_sugar_of lthy bad_tc) then
error ("Inadmissible " ^ co_prefix fp ^ "recursive occurrence of type " ^ fake_T ^
" in type expression " ^ fake_T_backdrop)
else if is_some (Old_Datatype_Data.get_info (Proof_Context.theory_of lthy)
bad_tc) then
error ("Unsupported " ^ co_prefix fp ^ "recursive occurrence of type " ^ fake_T ^
" via the old-style datatype " ^ quote bad_tc ^ " in type expression " ^
fake_T_backdrop ^ register_hint ())
else
error ("Unsupported " ^ co_prefix fp ^ "recursive occurrence of type " ^ fake_T ^
" via type constructor " ^ quote bad_tc ^ " in type expression " ^ fake_T_backdrop ^
register_hint ())
end);
val time = time lthy;
val timer = time (Timer.startRealTimer ());
val fp_nesting_bnfs = nesting_bnfs lthy ctrXs_Tsss Xs;
val live_nesting_bnfs = nesting_bnfs lthy ctrXs_Tsss As;
val pre_map_defs = map map_def_of_bnf pre_bnfs;
val pre_set_defss = map set_defs_of_bnf pre_bnfs;
val pre_rel_defs = map rel_def_of_bnf pre_bnfs;
val fp_nesting_set_maps = maps set_map_of_bnf fp_nesting_bnfs;
val fp_nesting_rel_eq_onps = map rel_eq_onp_of_bnf fp_nesting_bnfs;
val live_nesting_map_id0s = map map_id0_of_bnf live_nesting_bnfs;
val live_nesting_map_ident0s = map map_ident0_of_bnf live_nesting_bnfs;
val live_nesting_set_maps = maps set_map_of_bnf live_nesting_bnfs;
val live_nesting_rel_eqs = map rel_eq_of_bnf live_nesting_bnfs;
val live_nesting_rel_eq_onps = map rel_eq_onp_of_bnf live_nesting_bnfs;
val liveness = liveness_of_fp_bnf num_As any_fp_bnf;
val live = live_of_bnf any_fp_bnf;
val _ =
if live = 0 andalso exists (not o Binding.is_empty) (map_bs @ rel_bs @ pred_bs) then
warning "Map function, relator, and predicator names ignored"
else
();
val Bs = @{map 3} (fn alive => fn A as TFree (_, S) => fn B =>
if alive then resort_tfree_or_tvar S B else A)
liveness As Bs0;
val B_ify_T = Term.typ_subst_atomic (As ~~ Bs);
val B_ify = Term.map_types B_ify_T;
val live_AsBs = filter (op <>) (As ~~ Bs);
val abss = map #abs absT_infos;
val reps = map #rep absT_infos;
val absTs = map #absT absT_infos;
val repTs = map #repT absT_infos;
val abs_injects = map #abs_inject absT_infos;
val abs_inverses = map #abs_inverse absT_infos;
val type_definitions = map #type_definition absT_infos;
val ctors = map (mk_ctor As) ctors0;
val dtors = map (mk_dtor As) dtors0;
val fpTs = map (domain_type o fastype_of) dtors;
val fpBTs = map B_ify_T fpTs;
val real_unfreeze_fp = Term.typ_subst_atomic (Xs ~~ fpTs);
val ctr_Tsss = map (map (map real_unfreeze_fp)) ctrXs_Tsss;
val ns = map length ctr_Tsss;
val kss = map (fn n => 1 upto n) ns;
val mss = map (map length) ctr_Tsss;
val (xtor_co_recs, recs_args_types, corecs_args_types) =
mk_co_recs_prelims lthy fp ctr_Tsss fpTs Cs absTs repTs ns mss xtor_co_recs0;
fun define_ctrs_dtrs_for_type_etc fp_bnf fp_b fpT C E ctor dtor xtor_co_rec ctor_dtor dtor_ctor
ctor_inject pre_map_def pre_set_defs pre_rel_def fp_map_thm fp_set_thms fp_rel_thm n ks ms
abs abs_inject type_definition ctr_bindings ctr_mixfixes ctr_Tss disc_bindings sel_bindingss
raw_sel_default_eqs lthy =
let
val fp_b_name = Binding.name_of fp_b;
val ((xss, ctrs0, ctor_iff_dtor_thm, ctr_defs), lthy) =
define_ctrs_dtrs_for_type fp_b_name fpT ctor dtor ctor_dtor dtor_ctor n ks abs
ctr_bindings ctr_mixfixes ctr_Tss lthy;
val ctrs = map (mk_ctr As) ctrs0;
val sel_default_eqs =
let
val sel_Tss = map (map (curry (op -->) fpT)) ctr_Tss;
val sel_bTs =
flat sel_bindingss ~~ flat sel_Tss
|> filter_out (Binding.is_empty o fst)
|> distinct (Binding.eq_name o apply2 fst);
val sel_default_lthy = fake_local_theory_for_sel_defaults sel_bTs lthy
in
map (prepare_term sel_default_lthy) raw_sel_default_eqs
end;
fun mk_binding pre =
Binding.qualify false fp_b_name (Binding.prefix_name (pre ^ "_") fp_b);
fun massage_res (ctr_sugar, maps_sets_rels) =
(maps_sets_rels, (ctrs, xss, ctor_iff_dtor_thm, ctr_defs, ctr_sugar));
in
(wrap_ctrs plugins fp discs_sels fp_b_name ctor_inject n ms abs_inject type_definition
disc_bindings sel_bindingss sel_default_eqs ctrs0 ctor_iff_dtor_thm ctr_defs
#> (fn (ctr_sugar, lthy) =>
derive_map_set_rel_pred_thms plugins fp live As Bs C E abs_inverses ctr_defs
fp_nesting_set_maps fp_nesting_rel_eq_onps live_nesting_map_id0s live_nesting_set_maps
live_nesting_rel_eqs live_nesting_rel_eq_onps [] fp_b_name fp_bnf fp_bnfs fpT ctor
ctor_dtor dtor_ctor pre_map_def pre_set_defs pre_rel_def fp_map_thm fp_set_thms
fp_rel_thm [] [] [] ctr_Tss ctr_sugar lthy
|>> pair ctr_sugar)
##>>
(if fp = Least_FP then define_rec (the recs_args_types) mk_binding fpTs Cs reps
else define_corec (the corecs_args_types) mk_binding fpTs Cs abss) xtor_co_rec
#>> apfst massage_res, lthy)
end;
fun wrap_ctrs_derive_map_set_rel_pred_thms_define_co_rec_for_types (wrap_one_etcs, lthy) =
fold_map I wrap_one_etcs lthy
|>> apsnd split_list o apfst (apsnd @{split_list 5} o apfst @{split_list 17} o split_list)
o split_list;
fun mk_simp_thms ({injects, distincts, case_thms, ...} : ctr_sugar) co_recs map_thms rel_injects
rel_distincts set_thmss =
injects @ distincts @ case_thms @ co_recs @ map_thms @ rel_injects @ rel_distincts @
set_thmss;
fun mk_co_rec_transfer_goals lthy co_recs =
let
val BE_ify = Term.subst_atomic_types (live_AsBs @ (Cs ~~ Es));
val ((Rs, Ss), names_lthy) = lthy
|> mk_Frees "R" (map (uncurry mk_pred2T) live_AsBs)
||>> mk_Frees "S" (map2 mk_pred2T Cs Es);
val co_recBs = map BE_ify co_recs;
in
(Rs, Ss, map2 (mk_parametricity_goal lthy (Rs @ Ss)) co_recs co_recBs, names_lthy)
end;
fun derive_rec_transfer_thms lthy recs rec_defs (SOME (_, _, _, xsssss)) =
let
val (Rs, Ss, goals, _) = mk_co_rec_transfer_goals lthy recs;
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} =>
mk_rec_transfer_tac ctxt nn ns (map (Thm.cterm_of ctxt) Ss)
(map (Thm.cterm_of ctxt) Rs) xsssss rec_defs xtor_co_rec_transfers pre_rel_defs
live_nesting_rel_eqs)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced nn
end;
fun derive_rec_o_map_thmss lthy recs rec_defs =
if live = 0 then
replicate nn []
else
let
fun variant_names n pre = fst (Variable.variant_fixes (replicate n pre) lthy);
val maps0 = map map_of_bnf fp_bnfs;
val f_names = variant_names num_As "f";
val fs = map2 (curry Free) f_names (map (op -->) (As ~~ Bs));
val live_gs = AList.find (op =) (fs ~~ liveness) true;
val gmaps = map (fn map0 => Term.list_comb (mk_map live As Bs map0, live_gs)) maps0;
val rec_arg_Ts = binder_fun_types (hd (map fastype_of recs));
val num_rec_args = length rec_arg_Ts;
val g_Ts = map B_ify_T rec_arg_Ts;
val g_names = variant_names num_rec_args "g";
val gs = map2 (curry Free) g_names g_Ts;
val grecs = map (fn recx => Term.list_comb (Term.map_types B_ify_T recx, gs)) recs;
val rec_o_map_lhss = map2 (curry HOLogic.mk_comp) grecs gmaps;
val ABfs = (As ~~ Bs) ~~ fs;
fun mk_rec_arg_arg (x as Free (_, T)) =
let val U = B_ify_T T in
if T = U then x else build_map lthy [] [] (the o AList.lookup (op =) ABfs) (T, U) $ x
end;
fun mk_rec_o_map_arg rec_arg_T h =
let
val x_Ts = binder_types rec_arg_T;
val m = length x_Ts;
val x_names = variant_names m "x";
val xs = map2 (curry Free) x_names x_Ts;
val xs' = map mk_rec_arg_arg xs;
in
fold_rev Term.lambda xs (Term.list_comb (h, xs'))
end;
fun mk_rec_o_map_rhs recx =
let val args = map2 mk_rec_o_map_arg rec_arg_Ts gs in
Term.list_comb (recx, args)
end;
val rec_o_map_rhss = map mk_rec_o_map_rhs recs;
val rec_o_map_goals =
map2 (fold_rev (fold_rev Logic.all) [fs, gs] o HOLogic.mk_Trueprop oo
curry HOLogic.mk_eq) rec_o_map_lhss rec_o_map_rhss;
val rec_o_map_thms =
@{map 3} (fn goal => fn rec_def => fn ctor_rec_o_map =>
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, ...} =>
mk_co_rec_o_map_tac ctxt rec_def pre_map_defs live_nesting_map_ident0s
abs_inverses ctor_rec_o_map)
|> Thm.close_derivation \<^here>)
rec_o_map_goals rec_defs xtor_co_rec_o_maps;
in
map single rec_o_map_thms
end;
fun derive_note_induct_recs_thms_for_types
((((map_thmss, map_disc_iffss, map_selsss, rel_injectss, rel_distinctss, rel_selss,
rel_intross, rel_casess, pred_injectss, set_thmss, set_selsssss, set_introsssss,
set_casess, ctr_transferss, case_transferss, disc_transferss, sel_transferss),
(ctrss, _, ctor_iff_dtors, ctr_defss, ctr_sugars)),
(recs, rec_defs)), lthy) =
let
val ((induct_thms, induct_thm, induct_attrs), (rec_thmss, rec_attrs)) =
derive_induct_recs_thms_for_types plugins pre_bnfs recs_args_types xtor_co_induct
xtor_co_rec_thms live_nesting_bnfs fp_nesting_bnfs fpTs Cs Xs ctrXs_Tsss abs_inverses
type_definitions abs_inverses ctrss ctr_defss recs rec_defs lthy;
val rec_transfer_thmss =
map single (derive_rec_transfer_thms lthy recs rec_defs recs_args_types);
- val induct_type_attr = Attrib.internal o K o Induct.induct_type;
- val induct_pred_attr = Attrib.internal o K o Induct.induct_pred;
+ val induct_type_attr = Attrib.internal \<^here> o K o Induct.induct_type;
+ val induct_pred_attr = Attrib.internal \<^here> o K o Induct.induct_pred;
val ((rel_induct_thmss, common_rel_induct_thms),
(rel_induct_attrs, common_rel_induct_attrs)) =
if live = 0 then
((replicate nn [], []), ([], []))
else
let
val ((rel_induct_thms, common_rel_induct_thm), rel_induct_attrs) =
derive_rel_induct_thms_for_types lthy nn fpTs As Bs ctrss ctr_Tsss
(map #exhaust ctr_sugars) xtor_rel_co_induct ctr_defss ctor_injects
pre_rel_defs abs_inverses live_nesting_rel_eqs;
in
((map single rel_induct_thms, single common_rel_induct_thm),
(rel_induct_attrs, rel_induct_attrs))
end;
val rec_o_map_thmss = derive_rec_o_map_thmss lthy recs rec_defs;
val simp_thmss =
@{map 6} mk_simp_thms ctr_sugars rec_thmss map_thmss rel_injectss rel_distinctss
set_thmss;
val common_notes =
(if nn > 1 then
[(inductN, [induct_thm], K induct_attrs),
(rel_inductN, common_rel_induct_thms, K common_rel_induct_attrs)]
else
[])
|> massage_simple_notes fp_common_name;
val notes =
[(inductN, map single induct_thms, fn T_name => induct_attrs @ [induct_type_attr T_name]),
(recN, rec_thmss, K rec_attrs),
(rec_o_mapN, rec_o_map_thmss, K []),
(rec_transferN, rec_transfer_thmss, K []),
(rel_inductN, rel_induct_thmss, K (rel_induct_attrs @ [induct_pred_attr ""])),
(simpsN, simp_thmss, K [])]
|> massage_multi_notes fp_b_names fpTs;
in
lthy
|> Spec_Rules.add Binding.empty Spec_Rules.equational recs (flat rec_thmss)
|> plugins code_plugin ? Code.declare_default_eqns (map (rpair true) (flat rec_thmss))
|> Local_Theory.notes (common_notes @ notes)
(* for "datatype_realizer.ML": *)
|>> name_noted_thms
(fst (dest_Type (hd fpTs)) ^ implode (map (prefix "_") (tl fp_b_names))) inductN
|-> interpret_bnfs_register_fp_sugars plugins fpTs fpBTs Xs Least_FP pre_bnfs absT_infos
fp_nesting_bnfs live_nesting_bnfs fp_res ctrXs_Tsss ctor_iff_dtors ctr_defss ctr_sugars
recs rec_defs map_thmss [induct_thm] (map single induct_thms) rec_thmss (replicate nn [])
(replicate nn []) rel_injectss rel_distinctss map_disc_iffss map_selsss rel_selss
rel_intross rel_casess pred_injectss set_thmss set_selsssss set_introsssss set_casess
ctr_transferss case_transferss disc_transferss sel_transferss (replicate nn [])
(replicate nn []) rec_transfer_thmss common_rel_induct_thms rel_induct_thmss []
(replicate nn []) rec_o_map_thmss
end;
fun derive_corec_transfer_thms lthy corecs corec_defs =
let
val (Rs, Ss, goals, _) = mk_co_rec_transfer_goals lthy corecs;
val (_, _, _, (((pgss, pss, qssss, gssss), _), _)) = the corecs_args_types;
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, prems = _} =>
mk_corec_transfer_tac ctxt (map (Thm.cterm_of ctxt) Ss) (map (Thm.cterm_of ctxt) Rs)
type_definitions corec_defs xtor_co_rec_transfers pre_rel_defs
live_nesting_rel_eqs (flat pgss) pss qssss gssss)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end;
fun derive_map_o_corec_thmss lthy0 lthy2 corecs corec_defs =
if live = 0 then
replicate nn []
else
let
fun variant_names n pre = fst (Variable.variant_fixes (replicate n pre) lthy0);
val maps0 = map map_of_bnf fp_bnfs;
val f_names = variant_names num_As "f";
val fs = map2 (curry Free) f_names (map (op -->) (As ~~ Bs));
val live_fs = AList.find (op =) (fs ~~ liveness) true;
val fmaps = map (fn map0 => Term.list_comb (mk_map live As Bs map0, live_fs)) maps0;
val corec_arg_Ts = binder_fun_types (hd (map fastype_of corecs));
val num_rec_args = length corec_arg_Ts;
val g_names = variant_names num_rec_args "g";
val gs = map2 (curry Free) g_names corec_arg_Ts;
val gcorecs = map (fn corecx => Term.list_comb (corecx, gs)) corecs;
val map_o_corec_lhss = map2 (curry HOLogic.mk_comp) fmaps gcorecs;
val ABfs = (As ~~ Bs) ~~ fs;
fun mk_map_o_corec_arg corec_argB_T g =
let
val T = range_type (fastype_of g);
val U = range_type corec_argB_T;
in
if T = U then
g
else
HOLogic.mk_comp (build_map lthy2 [] [] (the o AList.lookup (op =) ABfs) (T, U), g)
end;
fun mk_map_o_corec_rhs corecx =
let val args = map2 (mk_map_o_corec_arg o B_ify_T) corec_arg_Ts gs in
Term.list_comb (B_ify corecx, args)
end;
val map_o_corec_rhss = map mk_map_o_corec_rhs corecs;
val map_o_corec_goals =
map2 (fold_rev (fold_rev Logic.all) [fs, gs] o HOLogic.mk_Trueprop oo
curry HOLogic.mk_eq) map_o_corec_lhss map_o_corec_rhss;
val map_o_corec_thms =
@{map 3} (fn goal => fn corec_def => fn dtor_map_o_corec =>
Goal.prove_sorry lthy2 [] [] goal (fn {context = ctxt, ...} =>
mk_co_rec_o_map_tac ctxt corec_def pre_map_defs live_nesting_map_ident0s
abs_inverses dtor_map_o_corec)
|> Thm.close_derivation \<^here>)
map_o_corec_goals corec_defs xtor_co_rec_o_maps;
in
map single map_o_corec_thms
end;
fun derive_note_coinduct_corecs_thms_for_types
((((map_thmss, map_disc_iffss, map_selsss, rel_injectss, rel_distinctss, rel_selss,
rel_intross, rel_casess, pred_injectss, set_thmss, set_selsssss, set_introsssss,
set_casess, ctr_transferss, case_transferss, disc_transferss, sel_transferss),
(_, _, ctor_iff_dtors, ctr_defss, ctr_sugars)),
(corecs, corec_defs)), lthy) =
let
val (([(coinduct_thms, coinduct_thm), (coinduct_strong_thms, coinduct_strong_thm)],
(coinduct_attrs, common_coinduct_attrs)),
corec_thmss, corec_disc_thmss,
(corec_disc_iff_thmss, corec_disc_iff_attrs), (corec_sel_thmsss, corec_sel_attrs)) =
derive_coinduct_corecs_thms_for_types lthy pre_bnfs (the corecs_args_types) xtor_co_induct
dtor_injects dtor_ctors xtor_co_rec_thms live_nesting_bnfs fpTs Cs Xs ctrXs_Tsss kss mss
ns abs_inverses abs_inverses I ctr_defss ctr_sugars corecs corec_defs;
fun distinct_prems ctxt thm =
Goal.prove (*no sorry*) ctxt [] []
(thm |> Thm.prop_of |> Logic.strip_horn |>> distinct (op aconv) |> Logic.list_implies)
(fn _ => HEADGOAL (cut_tac thm THEN' assume_tac ctxt) THEN ALLGOALS eq_assume_tac);
fun eq_ifIN _ [thm] = thm
| eq_ifIN ctxt (thm :: thms) =
distinct_prems ctxt (@{thm eq_ifI} OF
(map (unfold_thms ctxt @{thms atomize_imp[of _ "t = u" for t u]})
[thm, eq_ifIN ctxt thms]));
val corec_code_thms = map (eq_ifIN lthy) corec_thmss;
val corec_sel_thmss = map flat corec_sel_thmsss;
- val coinduct_type_attr = Attrib.internal o K o Induct.coinduct_type;
- val coinduct_pred_attr = Attrib.internal o K o Induct.coinduct_pred;
+ val coinduct_type_attr = Attrib.internal \<^here> o K o Induct.coinduct_type;
+ val coinduct_pred_attr = Attrib.internal \<^here> o K o Induct.coinduct_pred;
val flat_corec_thms = append oo append;
val corec_transfer_thmss = map single (derive_corec_transfer_thms lthy corecs corec_defs);
val ((rel_coinduct_thmss, common_rel_coinduct_thms),
(rel_coinduct_attrs, common_rel_coinduct_attrs)) =
if live = 0 then
((replicate nn [], []), ([], []))
else
let
val ((rel_coinduct_thms, common_rel_coinduct_thm),
(rel_coinduct_attrs, common_rel_coinduct_attrs)) =
derive_rel_coinduct_thms_for_types lthy nn fpTs ns As Bs mss ctr_sugars abs_inverses
abs_injects ctor_injects dtor_ctors pre_rel_defs ctr_defss xtor_rel_co_induct
live_nesting_rel_eqs;
in
((map single rel_coinduct_thms, single common_rel_coinduct_thm),
(rel_coinduct_attrs, common_rel_coinduct_attrs))
end;
val map_o_corec_thmss = derive_map_o_corec_thmss lthy lthy corecs corec_defs;
val (set_induct_thms, set_induct_attrss) =
derive_set_induct_thms_for_types lthy nn fpTs (map #ctrs ctr_sugars)
(map (map (mk_set As)) (map sets_of_bnf fp_bnfs)) dtor_set_inducts
(map #exhaust ctr_sugars) (flat pre_set_defss) (flat ctr_defss) dtor_ctors abs_inverses
|> split_list;
val simp_thmss =
@{map 6} mk_simp_thms ctr_sugars
(@{map 3} flat_corec_thms corec_disc_thmss corec_disc_iff_thmss corec_sel_thmss)
map_thmss rel_injectss rel_distinctss set_thmss;
val common_notes =
(set_inductN, set_induct_thms, nth set_induct_attrss) ::
(if nn > 1 then
[(coinductN, [coinduct_thm], K common_coinduct_attrs),
(coinduct_strongN, [coinduct_strong_thm], K common_coinduct_attrs),
(rel_coinductN, common_rel_coinduct_thms, K common_rel_coinduct_attrs)]
else [])
|> massage_simple_notes fp_common_name;
val notes =
[(coinductN, map single coinduct_thms,
fn T_name => coinduct_attrs @ [coinduct_type_attr T_name]),
(coinduct_strongN, map single coinduct_strong_thms, K coinduct_attrs),
(corecN, corec_thmss, K []),
(corec_codeN, map single corec_code_thms, K (nitpicksimp_attrs)),
(corec_discN, corec_disc_thmss, K []),
(corec_disc_iffN, corec_disc_iff_thmss, K corec_disc_iff_attrs),
(corec_selN, corec_sel_thmss, K corec_sel_attrs),
(corec_transferN, corec_transfer_thmss, K []),
(map_o_corecN, map_o_corec_thmss, K []),
(rel_coinductN, rel_coinduct_thmss, K (rel_coinduct_attrs @ [coinduct_pred_attr ""])),
(simpsN, simp_thmss, K [])]
|> massage_multi_notes fp_b_names fpTs;
in
lthy
|> fold (Spec_Rules.add Binding.empty Spec_Rules.equational corecs)
[flat corec_sel_thmss, flat corec_thmss]
|> plugins code_plugin ? Code.declare_default_eqns (map (rpair true) corec_code_thms)
|> Local_Theory.notes (common_notes @ notes)
|-> interpret_bnfs_register_fp_sugars plugins fpTs fpBTs Xs Greatest_FP pre_bnfs absT_infos
fp_nesting_bnfs live_nesting_bnfs fp_res ctrXs_Tsss ctor_iff_dtors ctr_defss ctr_sugars
corecs corec_defs map_thmss [coinduct_thm, coinduct_strong_thm]
(transpose [coinduct_thms, coinduct_strong_thms]) corec_thmss corec_disc_thmss
corec_sel_thmsss rel_injectss rel_distinctss map_disc_iffss map_selsss rel_selss
rel_intross rel_casess pred_injectss set_thmss set_selsssss set_introsssss set_casess
ctr_transferss case_transferss disc_transferss sel_transferss corec_disc_iff_thmss
(map single corec_code_thms) corec_transfer_thmss common_rel_coinduct_thms
rel_coinduct_thmss set_induct_thms (replicate nn (if nn = 1 then set_induct_thms else []))
map_o_corec_thmss
end;
val lthy = lthy
|> live > 0 ? fold2 (fn Type (s, _) => fn bnf => register_bnf_raw s bnf) fpTs fp_bnfs
|> @{fold_map 29} define_ctrs_dtrs_for_type_etc fp_bnfs fp_bs fpTs Cs Es ctors dtors
xtor_co_recs ctor_dtors dtor_ctors ctor_injects pre_map_defs pre_set_defss pre_rel_defs
xtor_maps xtor_setss xtor_rels ns kss mss abss abs_injects type_definitions ctr_bindingss
ctr_mixfixess ctr_Tsss disc_bindingss sel_bindingsss raw_sel_default_eqss
|> wrap_ctrs_derive_map_set_rel_pred_thms_define_co_rec_for_types
|> case_fp fp derive_note_induct_recs_thms_for_types
derive_note_coinduct_corecs_thms_for_types;
val timer = time (timer ("Constructors, discriminators, selectors, etc., for the new " ^
co_prefix fp ^ "datatype"));
in
lthy
end;
fun co_datatypes fp = define_co_datatypes (K I) (K I) (K I) (K I) fp;
fun co_datatype_cmd fp construct_fp options lthy =
define_co_datatypes Plugin_Name.make_filter Typedecl.read_constraint Syntax.parse_typ
Syntax.parse_term fp construct_fp options lthy
handle EMPTY_DATATYPE s => error ("Cannot define empty datatype " ^ quote s);
val parse_ctr_arg =
\<^keyword>\<open>(\<close> |-- parse_binding_colon -- Parse.typ --| \<^keyword>\<open>)\<close>
|| Parse.typ >> pair Binding.empty;
val parse_ctr_specs =
Parse.enum1 "|" (parse_ctr_spec Parse.binding parse_ctr_arg -- Parse.opt_mixfix);
val parse_spec =
parse_type_args_named_constrained -- Parse.binding -- Parse.opt_mixfix --
(\<^keyword>\<open>=\<close> |-- parse_ctr_specs) -- parse_map_rel_pred_bindings -- parse_sel_default_eqs;
val parse_co_datatype = parse_ctr_options -- Parse.and_list1 parse_spec;
fun parse_co_datatype_cmd fp construct_fp =
parse_co_datatype >> co_datatype_cmd fp construct_fp;
end;
diff --git a/src/HOL/Tools/BNF/bnf_fp_n2m_sugar.ML b/src/HOL/Tools/BNF/bnf_fp_n2m_sugar.ML
--- a/src/HOL/Tools/BNF/bnf_fp_n2m_sugar.ML
+++ b/src/HOL/Tools/BNF/bnf_fp_n2m_sugar.ML
@@ -1,519 +1,519 @@
(* Title: HOL/Tools/BNF/bnf_fp_n2m_sugar.ML
Author: Jasmin Blanchette, TU Muenchen
Copyright 2013
Suggared flattening of nested to mutual (co)recursion.
*)
signature BNF_FP_N2M_SUGAR =
sig
val unfold_lets_splits: term -> term
val unfold_splits_lets: term -> term
val dest_map: Proof.context -> string -> term -> term * term list
val dest_pred: Proof.context -> string -> term -> term * term list
val mutualize_fp_sugars: (string -> bool) -> BNF_Util.fp_kind -> int list -> binding list ->
typ list -> term list -> term list list list list -> BNF_FP_Def_Sugar.fp_sugar list ->
local_theory ->
(BNF_FP_Def_Sugar.fp_sugar list
* (BNF_FP_Def_Sugar.lfp_sugar_thms option * BNF_FP_Def_Sugar.gfp_sugar_thms option))
* local_theory
val nested_to_mutual_fps: (string -> bool) -> BNF_Util.fp_kind -> binding list -> typ list ->
term list -> (term * term list list) list list -> local_theory ->
(typ list * int list * BNF_FP_Def_Sugar.fp_sugar list
* (BNF_FP_Def_Sugar.lfp_sugar_thms option * BNF_FP_Def_Sugar.gfp_sugar_thms option))
* local_theory
end;
structure BNF_FP_N2M_Sugar : BNF_FP_N2M_SUGAR =
struct
open Ctr_Sugar
open BNF_Util
open BNF_Def
open BNF_Comp
open BNF_FP_Util
open BNF_FP_Def_Sugar
open BNF_FP_N2M
val n2mN = "n2m_";
type n2m_sugar = fp_sugar list * (lfp_sugar_thms option * gfp_sugar_thms option);
structure Data = Generic_Data
(
type T = n2m_sugar Typtab.table;
val empty = Typtab.empty;
fun merge data : T = Typtab.merge (K true) data;
);
fun morph_n2m_sugar phi (fp_sugars, (lfp_sugar_thms_opt, gfp_sugar_thms_opt)) =
(map (morph_fp_sugar phi) fp_sugars,
(Option.map (morph_lfp_sugar_thms phi) lfp_sugar_thms_opt,
Option.map (morph_gfp_sugar_thms phi) gfp_sugar_thms_opt));
val transfer_n2m_sugar = morph_n2m_sugar o Morphism.transfer_morphism;
fun n2m_sugar_of ctxt =
Typtab.lookup (Data.get (Context.Proof ctxt))
#> Option.map (transfer_n2m_sugar (Proof_Context.theory_of ctxt));
fun register_n2m_sugar key n2m_sugar =
- Local_Theory.declaration {syntax = false, pervasive = false}
+ Local_Theory.declaration {syntax = false, pervasive = false, pos = \<^here>}
(fn phi => Data.map (Typtab.update (key, morph_n2m_sugar phi n2m_sugar)));
fun unfold_lets_splits (Const (\<^const_name>\<open>Let\<close>, _) $ t $ u) =
unfold_lets_splits (betapply (unfold_splits_lets u, t))
| unfold_lets_splits (t $ u) = betapply (unfold_lets_splits t, unfold_lets_splits u)
| unfold_lets_splits (Abs (s, T, t)) = Abs (s, T, unfold_lets_splits t)
| unfold_lets_splits t = t
and unfold_splits_lets ((t as Const (\<^const_name>\<open>case_prod\<close>, _)) $ u) =
(case unfold_splits_lets u of
u' as Abs (s1, T1, Abs (s2, T2, _)) =>
let val v = Var ((s1 ^ s2, Term.maxidx_of_term u' + 1), HOLogic.mk_prodT (T1, T2)) in
lambda v (incr_boundvars 1 (betapplys (u', [HOLogic.mk_fst v, HOLogic.mk_snd v])))
end
| _ => t $ unfold_lets_splits u)
| unfold_splits_lets (t as Const (\<^const_name>\<open>Let\<close>, _) $ _ $ _) = unfold_lets_splits t
| unfold_splits_lets (t $ u) = betapply (unfold_splits_lets t, unfold_lets_splits u)
| unfold_splits_lets (Abs (s, T, t)) = Abs (s, T, unfold_splits_lets t)
| unfold_splits_lets t = unfold_lets_splits t;
fun dest_either_map_or_pred map_or_pred_of_bnf ctxt T_name call =
let
val bnf = the (bnf_of ctxt T_name);
val const0 = map_or_pred_of_bnf bnf;
val live = live_of_bnf bnf;
val (f_Ts, _) = strip_typeN live (fastype_of const0);
val fs = map_index (fn (i, T) => Var (("f", i), T)) f_Ts;
val pat = betapplys (const0, fs);
val (_, tenv) = fo_match ctxt call pat;
in
(const0, Vartab.fold_rev (fn (_, (_, f)) => cons f) tenv [])
end;
val dest_map = dest_either_map_or_pred map_of_bnf;
val dest_pred = dest_either_map_or_pred pred_of_bnf;
fun dest_map_or_pred ctxt T_name call =
(case try (dest_map ctxt T_name) call of
SOME res => res
| NONE => dest_pred ctxt T_name call);
fun dest_abs_or_applied_map_or_pred _ _ (Abs (_, _, t)) = (Term.dummy, [t])
| dest_abs_or_applied_map_or_pred ctxt s (t1 $ _) = dest_map_or_pred ctxt s t1;
fun map_partition f xs =
fold_rev (fn x => fn (ys, (good, bad)) =>
case f x of SOME y => (y :: ys, (x :: good, bad)) | NONE => (ys, (good, x :: bad)))
xs ([], ([], []));
fun key_of_fp_eqs fp As fpTs Xs ctrXs_repTs =
Type (case_fp fp "l" "g", fpTs @ Xs @ ctrXs_repTs)
|> Term.map_atyps (fn T as TFree (_, S) =>
(case find_index (curry (op =) T) As of
~1 => T
| j => TFree ("'" ^ string_of_int j, S)));
fun get_indices callers t =
callers
|> map_index (fn (i, v) => if exists_subterm (equal v) t then SOME i else NONE)
|> map_filter I;
fun mutualize_fp_sugars plugins fp mutual_cliques bs fpTs callers callssss fp_sugars0 no_defs_lthy =
let
val thy = Proof_Context.theory_of no_defs_lthy;
val qsotm = quote o Syntax.string_of_term no_defs_lthy;
fun incompatible_calls ts =
error ("Incompatible " ^ co_prefix fp ^ "recursive calls: " ^ commas (map qsotm ts));
fun mutual_self_call caller t =
error ("Unsupported mutual self-call " ^ qsotm t ^ " from " ^ qsotm caller);
fun nested_self_call t =
error ("Unsupported nested self-call " ^ qsotm t);
val b_names = map Binding.name_of bs;
val fp_b_names = map base_name_of_typ fpTs;
val nn = length fpTs;
val kks = 0 upto nn - 1;
fun target_ctr_sugar_of_fp_sugar fpT ({T, fp_ctr_sugar = {ctr_sugar, ...}, ...} : fp_sugar) =
let
val rho = Vartab.fold (cons o apsnd snd) (Sign.typ_match thy (T, fpT) Vartab.empty) [];
val phi = Morphism.term_morphism "BNF" (Term.subst_TVars rho);
in
morph_ctr_sugar phi ctr_sugar
end;
val ctor_iff_dtors = map (#ctor_iff_dtor o #fp_ctr_sugar) fp_sugars0;
val ctr_defss = map (#ctr_defs o #fp_ctr_sugar) fp_sugars0;
val mapss = map (#map_thms o #fp_bnf_sugar) fp_sugars0;
val ctr_sugars = map2 target_ctr_sugar_of_fp_sugar fpTs fp_sugars0;
val ctrss = map #ctrs ctr_sugars;
val ctr_Tss = map (map fastype_of) ctrss;
val As' = fold (fold Term.add_tfreesT) ctr_Tss [];
val As = map TFree As';
val ((Cs, Xs), _) =
no_defs_lthy
|> fold Variable.declare_typ As
|> mk_TFrees nn
||>> variant_tfrees fp_b_names;
fun check_call_dead live_call call =
if null (get_indices callers call) then () else incompatible_calls [live_call, call];
fun freeze_fpTs_type_based_default (T as Type (s, Ts)) =
(case filter (curry (op =) T o snd) (map_index I fpTs) of
[(kk, _)] => nth Xs kk
| _ => Type (s, map freeze_fpTs_type_based_default Ts))
| freeze_fpTs_type_based_default T = T;
fun freeze_fpTs_mutual_call kk fpT calls T =
(case fold (union (op =)) (map (get_indices callers) calls) [] of
[] => if T = fpT then nth Xs kk else freeze_fpTs_type_based_default T
| [kk'] =>
if T = fpT andalso kk' <> kk then
mutual_self_call (nth callers kk)
(the (find_first (not o null o get_indices callers) calls))
else if T = nth fpTs kk' then
nth Xs kk'
else
freeze_fpTs_type_based_default T
| _ => incompatible_calls calls);
fun freeze_fpTs_map kk (fpT as Type (_, Ts')) (callss, (live_call :: _, dead_calls))
(Type (s, Ts)) =
if Ts' = Ts then
nested_self_call live_call
else
(List.app (check_call_dead live_call) dead_calls;
Type (s, map2 (freeze_fpTs_call kk fpT)
(flatten_type_args_of_bnf (the (bnf_of no_defs_lthy s)) [] (transpose callss)) Ts))
and freeze_fpTs_call kk fpT calls (T as Type (s, _)) =
(case map_partition (try (snd o dest_map no_defs_lthy s)) calls of
([], _) =>
(case map_partition (try (snd o dest_abs_or_applied_map_or_pred no_defs_lthy s)) calls of
([], _) => freeze_fpTs_mutual_call kk fpT calls T
| callsp => freeze_fpTs_map kk fpT callsp T)
| callsp => freeze_fpTs_map kk fpT callsp T)
| freeze_fpTs_call _ _ _ T = T;
val ctr_Tsss = map (map binder_types) ctr_Tss;
val ctrXs_Tsss = @{map 4} (map2 o map2 oo freeze_fpTs_call) kks fpTs callssss ctr_Tsss;
val ctrXs_repTs = map mk_sumprodT_balanced ctrXs_Tsss;
val ns = map length ctr_Tsss;
val kss = map (fn n => 1 upto n) ns;
val mss = map (map length) ctr_Tsss;
val key = key_of_fp_eqs fp As fpTs Xs ctrXs_repTs;
in
(case n2m_sugar_of no_defs_lthy key of
SOME n2m_sugar => (n2m_sugar, no_defs_lthy)
| NONE =>
let
val base_fp_names = Name.variant_list [] fp_b_names;
val fp_bs = map2 (fn b_name => fn base_fp_name =>
Binding.qualify true b_name (Binding.name (n2mN ^ base_fp_name)))
b_names base_fp_names;
val Type (s, Us) :: _ = fpTs;
val killed_As' =
(case bnf_of no_defs_lthy s of
NONE => As'
| SOME bnf =>
let
val Type (_, Ts) = T_of_bnf bnf;
val deads = deads_of_bnf bnf;
val dead_Us =
map_filter (fn (T, U) => if member (op =) deads T then SOME U else NONE) (Ts ~~ Us);
in fold Term.add_tfreesT dead_Us [] end);
val fp_absT_infos = map #absT_info fp_sugars0;
val indexed_fp_ress = map (apsnd #fp_res o `(#fp_res_index)) fp_sugars0;
val (((pre_bnfs, absT_infos), _), (fp_res as {xtor_co_recs = xtor_co_recs0, xtor_co_induct,
dtor_injects, dtor_ctors, xtor_co_rec_thms, ...}, lthy)) =
fixpoint_bnf false I (construct_mutualized_fp fp mutual_cliques fpTs indexed_fp_ress)
fp_bs As' killed_As' (map dest_TFree Xs) ctrXs_repTs empty_comp_cache no_defs_lthy;
val time = time lthy;
val timer = time (Timer.startRealTimer ());
val fp_abs_inverses = map #abs_inverse fp_absT_infos;
val fp_type_definitions = map #type_definition fp_absT_infos;
val abss = map #abs absT_infos;
val reps = map #rep absT_infos;
val absTs = map #absT absT_infos;
val repTs = map #repT absT_infos;
val abs_inverses = map #abs_inverse absT_infos;
val fp_nesting_bnfs = nesting_bnfs lthy ctrXs_Tsss Xs;
val live_nesting_bnfs = nesting_bnfs lthy ctrXs_Tsss As;
val (xtor_co_recs, recs_args_types, corecs_args_types) =
mk_co_recs_prelims lthy fp ctr_Tsss fpTs Cs absTs repTs ns mss xtor_co_recs0;
fun mk_binding b pre = Binding.prefix_name (pre ^ "_") b;
val ((co_recs, co_rec_defs), lthy) =
@{fold_map 2} (fn b =>
if fp = Least_FP then define_rec (the recs_args_types) (mk_binding b) fpTs Cs reps
else define_corec (the corecs_args_types) (mk_binding b) fpTs Cs abss)
fp_bs xtor_co_recs lthy
|>> split_list;
val timer = time (timer ("High-level " ^ co_prefix fp ^ "recursors"));
val ((common_co_inducts, co_inductss', co_rec_thmss, co_rec_disc_thmss, co_rec_sel_thmsss),
fp_sugar_thms) =
if fp = Least_FP then
derive_induct_recs_thms_for_types plugins pre_bnfs recs_args_types xtor_co_induct
xtor_co_rec_thms live_nesting_bnfs fp_nesting_bnfs fpTs Cs Xs ctrXs_Tsss
fp_abs_inverses fp_type_definitions abs_inverses ctrss ctr_defss co_recs co_rec_defs
lthy
|> `(fn ((inducts, induct, _), (rec_thmss, _)) =>
([induct], [inducts], rec_thmss, replicate nn [], replicate nn []))
||> (fn info => (SOME info, NONE))
else
derive_coinduct_corecs_thms_for_types lthy pre_bnfs (the corecs_args_types)
xtor_co_induct dtor_injects dtor_ctors xtor_co_rec_thms live_nesting_bnfs fpTs Cs Xs
ctrXs_Tsss kss mss ns fp_abs_inverses abs_inverses
(fn thm => thm RS @{thm vimage2p_refl}) ctr_defss ctr_sugars co_recs co_rec_defs
|> `(fn ((coinduct_thms_pairs, _), corec_thmss, corec_disc_thmss, _,
(corec_sel_thmsss, _)) =>
(map snd coinduct_thms_pairs, map fst coinduct_thms_pairs, corec_thmss,
corec_disc_thmss, corec_sel_thmsss))
||> (fn info => (NONE, SOME info));
val timer = time (timer ("High-level " ^ co_prefix fp ^ "induction principles"));
val names_lthy = lthy |> fold Variable.declare_typ (As @ Cs @ Xs);
val phi = Proof_Context.export_morphism names_lthy lthy;
fun mk_target_fp_sugar T X kk pre_bnf absT_info ctrXs_Tss ctor_iff_dtor ctr_defs ctr_sugar
co_rec co_rec_def map_thms co_inducts co_rec_thms co_rec_disc_thms co_rec_sel_thmss
({fp_ctr_sugar = {ctr_transfers, case_transfers, disc_transfers, sel_transfers, ...},
fp_bnf_sugar = {map_disc_iffs, map_selss, rel_injects, rel_distincts, rel_sels,
rel_intros, rel_cases, pred_injects, set_thms, set_selssss, set_introssss,
set_cases, ...},
fp_co_induct_sugar = SOME {co_rec_disc_iffs, co_rec_codes, co_rec_transfers,
co_rec_o_maps, common_rel_co_inducts, rel_co_inducts, common_set_inducts,
set_inducts, ...},
...} : fp_sugar) =
{T = T, BT = T (*wrong but harmless*), X = X, fp = fp, fp_res = fp_res, fp_res_index = kk,
pre_bnf = pre_bnf, fp_bnf = nth (#bnfs fp_res) kk, absT_info = absT_info,
fp_nesting_bnfs = fp_nesting_bnfs, live_nesting_bnfs = live_nesting_bnfs,
fp_ctr_sugar =
{ctrXs_Tss = ctrXs_Tss,
ctor_iff_dtor = ctor_iff_dtor,
ctr_defs = ctr_defs,
ctr_sugar = ctr_sugar,
ctr_transfers = ctr_transfers,
case_transfers = case_transfers,
disc_transfers = disc_transfers,
sel_transfers = sel_transfers},
fp_bnf_sugar =
{map_thms = map_thms,
map_disc_iffs = map_disc_iffs,
map_selss = map_selss,
rel_injects = rel_injects,
rel_distincts = rel_distincts,
rel_sels = rel_sels,
rel_intros = rel_intros,
rel_cases = rel_cases,
pred_injects = pred_injects,
set_thms = set_thms,
set_selssss = set_selssss,
set_introssss = set_introssss,
set_cases = set_cases},
fp_co_induct_sugar = SOME
{co_rec = co_rec,
common_co_inducts = common_co_inducts,
co_inducts = co_inducts,
co_rec_def = co_rec_def,
co_rec_thms = co_rec_thms,
co_rec_discs = co_rec_disc_thms,
co_rec_disc_iffs = co_rec_disc_iffs,
co_rec_selss = co_rec_sel_thmss,
co_rec_codes = co_rec_codes,
co_rec_transfers = co_rec_transfers,
co_rec_o_maps = co_rec_o_maps,
common_rel_co_inducts = common_rel_co_inducts,
rel_co_inducts = rel_co_inducts,
common_set_inducts = common_set_inducts,
set_inducts = set_inducts}}
|> morph_fp_sugar phi;
val target_fp_sugars =
@{map 17} mk_target_fp_sugar fpTs Xs kks pre_bnfs absT_infos ctrXs_Tsss ctor_iff_dtors
ctr_defss ctr_sugars co_recs co_rec_defs mapss (transpose co_inductss') co_rec_thmss
co_rec_disc_thmss co_rec_sel_thmsss fp_sugars0;
val n2m_sugar = (target_fp_sugars, fp_sugar_thms);
in
(n2m_sugar, lthy |> register_n2m_sugar key n2m_sugar)
end)
end;
fun indexify_callsss ctrs callsss =
let
fun indexify_ctr ctr =
(case AList.lookup Term.aconv_untyped callsss ctr of
NONE => replicate (num_binder_types (fastype_of ctr)) []
| SOME callss => map (map (Envir.beta_eta_contract o unfold_lets_splits)) callss);
in
map indexify_ctr ctrs
end;
fun retypargs tyargs (Type (s, _)) = Type (s, tyargs);
fun fold_subtype_pairs f (T as Type (s, Ts), U as Type (s', Us)) =
f (T, U) #> (if s = s' then fold (fold_subtype_pairs f) (Ts ~~ Us) else I)
| fold_subtype_pairs f TU = f TU;
val impossible_caller = Bound ~1;
fun nested_to_mutual_fps plugins fp actual_bs actual_Ts actual_callers actual_callssss0 lthy =
let
val qsoty = quote o Syntax.string_of_typ lthy;
val qsotys = space_implode " or " o map qsoty;
fun not_co_datatype0 T = error (qsoty T ^ " is not a " ^ co_prefix fp ^ "datatype");
fun not_co_datatype (T as Type (s, _)) =
if fp = Least_FP andalso
is_some (Old_Datatype_Data.get_info (Proof_Context.theory_of lthy) s) then
error (qsoty T ^ " is an old-style datatype")
else
not_co_datatype0 T
| not_co_datatype T = not_co_datatype0 T;
fun not_mutually_nested_rec Ts1 Ts2 =
error (qsotys Ts1 ^ " is neither mutually " ^ co_prefix fp ^ "recursive with " ^ qsotys Ts2 ^
" nor nested " ^ co_prefix fp ^ "recursive through " ^
(if Ts1 = Ts2 andalso length Ts1 = 1 then "itself" else qsotys Ts2));
val sorted_actual_Ts =
sort (prod_ord int_ord Term_Ord.typ_ord o apply2 (`Term.size_of_typ)) actual_Ts;
fun the_ctrs_of (Type (s, Ts)) = map (mk_ctr Ts) (#ctrs (the (ctr_sugar_of lthy s)));
fun gen_rhss_in gen_Ts rho (subTs as Type (_, sub_tyargs) :: _) =
let
fun maybe_insert (T, Type (_, gen_tyargs)) =
member (op =) subTs T ? insert (op =) gen_tyargs
| maybe_insert _ = I;
val gen_ctrs = maps the_ctrs_of gen_Ts;
val gen_ctr_Ts = maps (binder_types o fastype_of) gen_ctrs;
val ctr_Ts = map (Term.typ_subst_atomic rho) gen_ctr_Ts;
in
(case fold (fold_subtype_pairs maybe_insert) (ctr_Ts ~~ gen_ctr_Ts) [] of
[] => [map (typ_subst_nonatomic (map swap rho)) sub_tyargs]
| gen_tyargss => gen_tyargss)
end;
fun the_fp_sugar_of (T as Type (T_name, _)) =
(case fp_sugar_of lthy T_name of
SOME (fp_sugar as {fp = fp', fp_co_induct_sugar = SOME _, ...}) =>
if fp = fp' then fp_sugar else not_co_datatype T
| _ => not_co_datatype T);
fun gather_types _ _ rev_seens gen_seen [] = (rev rev_seens, gen_seen)
| gather_types lthy rho rev_seens gen_seen ((T as Type (_, tyargs)) :: Ts) =
let
val {T = Type (_, tyargs0), fp_res = {Ts = mutual_Ts0, ...}, ...} = the_fp_sugar_of T;
val mutual_Ts = map (retypargs tyargs) mutual_Ts0;
val rev_seen = flat rev_seens;
val _ = null rev_seens orelse exists (exists_strict_subtype_in rev_seen) mutual_Ts orelse
not_mutually_nested_rec mutual_Ts rev_seen;
fun fresh_tyargs () =
let
val (unsorted_gen_tyargs, lthy') =
variant_tfrees (replicate (length tyargs) "z") lthy
|>> map Logic.varifyT_global;
val gen_tyargs =
map2 (resort_tfree_or_tvar o snd o dest_TFree_or_TVar) tyargs0 unsorted_gen_tyargs;
val rho' = (gen_tyargs ~~ tyargs) @ rho;
in
(rho', gen_tyargs, gen_seen, lthy')
end;
val (rho', gen_tyargs, gen_seen', lthy') =
if exists (exists_subtype_in (flat rev_seens)) mutual_Ts then
(case gen_rhss_in gen_seen rho mutual_Ts of
[] => fresh_tyargs ()
| gen_tyargs :: gen_tyargss_tl =>
let
val unify_pairs = split_list (maps (curry (op ~~) gen_tyargs) gen_tyargss_tl);
val mgu = Type.raw_unifys unify_pairs Vartab.empty;
val gen_tyargs' = map (Envir.norm_type mgu) gen_tyargs;
val gen_seen' = map (Envir.norm_type mgu) gen_seen;
in
(rho, gen_tyargs', gen_seen', lthy)
end)
else
fresh_tyargs ();
val gen_mutual_Ts = map (retypargs gen_tyargs) mutual_Ts0;
val other_mutual_Ts = remove1 (op =) T mutual_Ts;
val Ts' = fold (remove1 (op =)) other_mutual_Ts Ts;
in
gather_types lthy' rho' (mutual_Ts :: rev_seens) (gen_seen' @ gen_mutual_Ts) Ts'
end
| gather_types _ _ _ _ (T :: _) = not_co_datatype T;
val (perm_Tss, perm_gen_Ts) = gather_types lthy [] [] [] sorted_actual_Ts;
val (perm_mutual_cliques, perm_Ts) =
split_list (flat (map_index (fn (i, perm_Ts) => map (pair i) perm_Ts) perm_Tss));
val perm_frozen_gen_Ts = map Logic.unvarifyT_global perm_gen_Ts;
val missing_Ts = fold (remove1 (op =)) actual_Ts perm_Ts;
val Ts = actual_Ts @ missing_Ts;
val nn = length Ts;
val kks = 0 upto nn - 1;
val callssss0 = pad_list [] nn actual_callssss0;
val common_name = mk_common_name (map Binding.name_of actual_bs);
val bs = pad_list (Binding.name common_name) nn actual_bs;
val callers = pad_list impossible_caller nn actual_callers;
fun permute xs = permute_like (op =) Ts perm_Ts xs;
fun unpermute perm_xs = permute_like (op =) perm_Ts Ts perm_xs;
(* The assignment of callers to mutual cliques is incorrect in general. This code would need to
inspect the actual calls to discover the correct cliques in the presence of type duplicates.
However, the naive scheme implemented here supports "prim(co)rec" specifications following
reasonable ordering of the duplicates (e.g., keeping the cliques together). *)
val perm_bs = permute bs;
val perm_callers = permute callers;
val perm_kks = permute kks;
val perm_callssss0 = permute callssss0;
val perm_fp_sugars0 = map (the o fp_sugar_of lthy o fst o dest_Type) perm_Ts;
val perm_callssss =
map2 (indexify_callsss o #ctrs o #ctr_sugar o #fp_ctr_sugar) perm_fp_sugars0 perm_callssss0;
val ((perm_fp_sugars, fp_sugar_thms), lthy) =
if length perm_Tss = 1 then
((perm_fp_sugars0, (NONE, NONE)), lthy)
else
mutualize_fp_sugars plugins fp perm_mutual_cliques perm_bs perm_frozen_gen_Ts perm_callers
perm_callssss perm_fp_sugars0 lthy;
val fp_sugars = unpermute perm_fp_sugars;
in
((missing_Ts, perm_kks, fp_sugars, fp_sugar_thms), lthy)
end;
end;
diff --git a/src/HOL/Tools/BNF/bnf_gfp_grec.ML b/src/HOL/Tools/BNF/bnf_gfp_grec.ML
--- a/src/HOL/Tools/BNF/bnf_gfp_grec.ML
+++ b/src/HOL/Tools/BNF/bnf_gfp_grec.ML
@@ -1,3228 +1,3228 @@
(* Title: HOL/Tools/BNF/bnf_gfp_grec.ML
Author: Jasmin Blanchette, Inria, LORIA, MPII
Author: Aymeric Bouzy, Ecole polytechnique
Author: Dmitriy Traytel, ETH Zürich
Copyright 2015, 2016
Generalized corecursor construction.
*)
signature BNF_GFP_GREC =
sig
val Tsubst: typ -> typ -> typ -> typ
val substT: typ -> typ -> term -> term
val freeze_types: Proof.context -> (indexname * sort) list -> typ list -> typ list
val dummify_atomic_types: term -> term
val define_const: bool -> binding -> int -> string -> term -> local_theory ->
(term * thm) * local_theory
type buffer =
{Oper: term,
VLeaf: term,
CLeaf: term,
ctr_wrapper: term,
friends: (typ * term) Symtab.table}
val map_buffer: (term -> term) -> buffer -> buffer
val specialize_buffer_types: buffer -> buffer
type dtor_coinduct_info =
{dtor_coinduct: thm,
cong_def: thm,
cong_locale: thm,
cong_base: thm,
cong_refl: thm,
cong_sym: thm,
cong_trans: thm,
cong_alg_intros: thm list}
type corec_info =
{fp_b: binding,
version: int,
fpT: typ,
Y: typ,
Z: typ,
friend_names: string list,
sig_fp_sugars: BNF_FP_Def_Sugar.fp_sugar list,
ssig_fp_sugar: BNF_FP_Def_Sugar.fp_sugar,
Lam: term,
proto_sctr: term,
flat: term,
eval_core: term,
eval: term,
algLam: term,
corecUU: term,
dtor_transfer: thm,
Lam_transfer: thm,
Lam_pointful_natural: thm,
proto_sctr_transfer: thm,
flat_simps: thm list,
eval_core_simps: thm list,
eval_thm: thm,
eval_simps: thm list,
all_algLam_algs: thm list,
algLam_thm: thm,
dtor_algLam: thm,
corecUU_thm: thm,
corecUU_unique: thm,
corecUU_transfer: thm,
buffer: buffer,
all_dead_k_bnfs: BNF_Def.bnf list,
Retr: term,
equivp_Retr: thm,
Retr_coinduct: thm,
dtor_coinduct_info: dtor_coinduct_info}
type friend_info =
{algrho: term,
dtor_algrho: thm,
algLam_algrho: thm}
val not_codatatype: Proof.context -> typ -> 'a
val mk_fp_binding: binding -> string -> binding
val bnf_kill_all_but: int -> BNF_Def.bnf -> local_theory -> BNF_Def.bnf * local_theory
val print_corec_infos: Proof.context -> unit
val has_no_corec_info: Proof.context -> string -> bool
val corec_info_of: typ -> local_theory -> corec_info * local_theory
val maybe_corec_info_of: Proof.context -> typ -> corec_info option
val corec_infos_of: Proof.context -> string -> corec_info list
val corec_infos_of_generic: Context.generic -> Symtab.key -> corec_info list
val prepare_friend_corec: string -> typ -> local_theory ->
(corec_info * binding * int * typ * typ * typ * typ * typ * BNF_Def.bnf * BNF_Def.bnf
* BNF_FP_Def_Sugar.fp_sugar * BNF_FP_Def_Sugar.fp_sugar * buffer) * local_theory
val register_friend_corec: string -> binding -> int -> typ -> typ -> typ -> BNF_Def.bnf ->
BNF_FP_Def_Sugar.fp_sugar -> BNF_FP_Def_Sugar.fp_sugar -> term -> term -> thm -> corec_info ->
local_theory -> friend_info * local_theory
end;
structure BNF_GFP_Grec : BNF_GFP_GREC =
struct
open Ctr_Sugar
open BNF_Util
open BNF_Def
open BNF_Comp
open BNF_FP_Util
open BNF_LFP
open BNF_FP_Def_Sugar
open BNF_LFP_Rec_Sugar
open BNF_GFP_Grec_Tactics
val algLamN = "algLam";
val algLam_algLamN = "algLam_algLam";
val algLam_algrhoN = "algLam_algrho";
val algrhoN = "algrho";
val CLeafN = "CLeaf";
val congN = "congclp";
val cong_alg_introsN = "cong_alg_intros";
val cong_localeN = "cong_locale";
val corecUUN = "corecUU";
val corecUU_transferN = "corecUU_transfer";
val corecUU_uniqueN = "corecUU_unique";
val cutSsigN = "cutSsig";
val dtor_algLamN = "dtor_algLam";
val dtor_algrhoN = "dtor_algrho";
val dtor_coinductN = "dtor_coinduct";
val dtor_transferN = "dtor_transfer";
val embLN = "embL";
val embLLN = "embLL";
val embLRN = "embLR";
val embL_pointful_naturalN = "embL_pointful_natural";
val embL_transferN = "embL_transfer";
val equivp_RetrN = "equivp_Retr";
val evalN = "eval";
val eval_coreN = "eval_core";
val eval_core_pointful_naturalN = "eval_core_pointful_natural";
val eval_core_transferN = "eval_core_transfer";
val eval_flatN = "eval_flat";
val eval_simpsN = "eval_simps";
val flatN = "flat";
val flat_pointful_naturalN = "flat_pointful_natural";
val flat_transferN = "flat_transfer";
val k_as_ssig_naturalN = "k_as_ssig_natural";
val k_as_ssig_transferN = "k_as_ssig_transfer";
val LamN = "Lam";
val Lam_transferN = "Lam_transfer";
val Lam_pointful_naturalN = "Lam_pointful_natural";
val OperN = "Oper";
val proto_sctrN = "proto_sctr";
val proto_sctr_pointful_naturalN = "proto_sctr_pointful_natural";
val proto_sctr_transferN = "proto_sctr_transfer";
val rho_transferN = "rho_transfer";
val Retr_coinductN = "Retr_coinduct";
val sctrN = "sctr";
val sctr_transferN = "sctr_transfer";
val sctr_pointful_naturalN = "sctr_pointful_natural";
val sigN = "sig";
val SigN = "Sig";
val Sig_pointful_naturalN = "Sig_pointful_natural";
val corecUN = "corecU";
val corecU_ctorN = "corecU_ctor";
val corecU_uniqueN = "corecU_unique";
val unsigN = "unsig";
val VLeafN = "VLeaf";
val s_prefix = "s"; (* transforms "sig" into "ssig" *)
fun not_codatatype ctxt T =
error ("Not a codatatype: " ^ Syntax.string_of_typ ctxt T);
fun mutual_codatatype () =
error ("Mutually corecursive codatatypes are not supported (try " ^
quote (#1 \<^command_keyword>\<open>primcorec\<close>) ^ " instead of " ^
quote (#1 \<^command_keyword>\<open>corec\<close>) ^ ")");
fun noncorecursive_codatatype () =
error ("Noncorecursive codatatypes are not supported (try " ^
quote (#1 \<^command_keyword>\<open>definition\<close>) ^ " instead of " ^
quote (#1 \<^command_keyword>\<open>corec\<close>) ^ ")");
fun singleton_codatatype ctxt =
error ("Singleton corecursive codatatypes are not supported (use " ^
quote (Syntax.string_of_typ ctxt \<^typ>\<open>unit\<close>) ^ " instead)");
fun merge_lists eq old1 old2 = (old1 |> subtract eq old2) @ old2;
fun add_type_namesT (Type (s, Ts)) = insert (op =) s #> fold add_type_namesT Ts
| add_type_namesT _ = I;
fun Tsubst Y T = Term.typ_subst_atomic [(Y, T)];
fun substT Y T = Term.subst_atomic_types [(Y, T)];
fun freeze_types ctxt except_tvars Ts =
let
val As = fold Term.add_tvarsT Ts [] |> subtract (op =) except_tvars;
val (Bs, _) = ctxt |> mk_TFrees' (map snd As);
in
map (Term.typ_subst_TVars (map fst As ~~ Bs)) Ts
end;
fun typ_unify_disjointly thy (T, T') =
if T = T' then
T
else
let
val tvars = Term.add_tvar_namesT T [];
val tvars' = Term.add_tvar_namesT T' [];
val maxidx' = maxidx_of_typ T';
val T = T |> exists (member (op =) tvars') tvars ? Logic.incr_tvar (maxidx' + 1);
val maxidx = Integer.max (maxidx_of_typ T) maxidx';
val (tyenv, _) = Sign.typ_unify thy (T, T') (Vartab.empty, maxidx);
in
Envir.subst_type tyenv T
end;
val dummify_atomic_types = Term.map_types (Term.map_atyps (K Term.dummyT));
fun mk_internal internal ctxt f =
if internal andalso not (Config.get ctxt bnf_internals) then f else I
fun mk_fp_binding fp_b pre = Binding.map_name (K pre) fp_b
|> Binding.qualify true (Binding.name_of fp_b);
fun mk_version_binding version = Binding.qualify false ("v" ^ string_of_int version);
fun mk_version_fp_binding internal ctxt =
mk_internal internal ctxt Binding.concealed ooo (mk_fp_binding oo mk_version_binding);
(*FIXME: get rid of ugly names when typedef and primrec respect qualification*)
fun mk_version_binding_ugly version = Binding.suffix_name ("_v" ^ string_of_int version);
fun mk_version_fp_binding_ugly internal ctxt version fp_b pre =
Binding.prefix_name (pre ^ "_") fp_b
|> mk_version_binding_ugly version
|> mk_internal internal ctxt Binding.concealed;
fun mk_mapN ctxt live_AsBs TA bnf =
let val TB = Term.typ_subst_atomic live_AsBs TA in
enforce_type ctxt (snd o strip_typeN (length live_AsBs)) (TA --> TB) (map_of_bnf bnf)
end;
fun mk_relN ctxt live_AsBs TA bnf =
let val TB = Term.typ_subst_atomic live_AsBs TA in
enforce_type ctxt (snd o strip_typeN (length live_AsBs)) (mk_pred2T TA TB) (rel_of_bnf bnf)
end;
fun mk_map1 ctxt Y Z = mk_mapN ctxt [(Y, Z)];
fun mk_rel1 ctxt Y Z = mk_relN ctxt [(Y, Z)];
fun define_const internal fp_b version name rhs lthy =
let
val b = mk_version_fp_binding internal lthy version fp_b name;
val ((free, (_, def_free)), (lthy, lthy_old)) = lthy
|> (snd o Local_Theory.begin_nested)
|> Local_Theory.define ((b, NoSyn), ((Thm.def_binding b |> Binding.concealed, []), rhs))
||> `Local_Theory.end_nested;
val phi = Proof_Context.export_morphism lthy_old lthy;
val const = Morphism.term phi free;
val const' = enforce_type lthy I (fastype_of free) const;
in
((const', Morphism.thm phi def_free), lthy)
end;
fun define_single_primrec b eqs lthy =
let
val (([free], [def_free], [simps_free]), (lthy, lthy_old)) = lthy
|> (snd o Local_Theory.begin_nested)
|> Local_Theory.map_background_naming (mk_internal true lthy Name_Space.concealed) (*TODO check*)
|> primrec false [] [(b, NONE, NoSyn)] (map (fn eq => ((Binding.empty_atts, eq), [], [])) eqs)
||> `Local_Theory.end_nested;
val phi = Proof_Context.export_morphism lthy_old lthy;
val const = Morphism.term phi free;
val const' = enforce_type lthy I (fastype_of free) const;
in
((const', Morphism.thm phi def_free, map (Morphism.thm phi) simps_free), lthy)
end;
type buffer =
{Oper: term,
VLeaf: term,
CLeaf: term,
ctr_wrapper: term,
friends: (typ * term) Symtab.table};
fun map_buffer f {Oper, VLeaf, CLeaf, ctr_wrapper, friends} =
{Oper = f Oper, VLeaf = f VLeaf, CLeaf = f CLeaf, ctr_wrapper = f ctr_wrapper,
friends = Symtab.map (K (apsnd f)) friends};
fun morph_buffer phi = map_buffer (Morphism.term phi);
fun specialize_buffer_types {Oper, VLeaf, CLeaf, ctr_wrapper, friends} =
let
val ssig_T as Type (_, Ts) = body_type (fastype_of VLeaf);
val Y = List.last Ts;
val ssigifyT = substT Y ssig_T;
in
{Oper = Oper, VLeaf = VLeaf, CLeaf = CLeaf, ctr_wrapper = ssigifyT ctr_wrapper,
friends = Symtab.map (K (apsnd ssigifyT)) friends}
end;
type dtor_coinduct_info =
{dtor_coinduct: thm,
cong_def: thm,
cong_locale: thm,
cong_base: thm,
cong_refl: thm,
cong_sym: thm,
cong_trans: thm,
cong_alg_intros: thm list};
fun map_dtor_coinduct_info f {dtor_coinduct, cong_def, cong_locale, cong_base, cong_refl, cong_sym,
cong_trans, cong_alg_intros} =
{dtor_coinduct = f dtor_coinduct, cong_def = f cong_def, cong_locale = f cong_locale,
cong_base = f cong_base, cong_refl = f cong_refl, cong_sym = f cong_sym,
cong_trans = f cong_trans, cong_alg_intros = map f cong_alg_intros};
fun morph_dtor_coinduct_info phi = map_dtor_coinduct_info (Morphism.thm phi);
type corec_ad =
{fpT: typ,
friend_names: string list};
fun morph_corec_ad phi {fpT, friend_names} =
{fpT = Morphism.typ phi fpT, friend_names = friend_names};
type corec_info =
{fp_b: binding,
version: int,
fpT: typ,
Y: typ,
Z: typ,
friend_names: string list,
sig_fp_sugars: fp_sugar list,
ssig_fp_sugar: fp_sugar,
Lam: term,
proto_sctr: term,
flat: term,
eval_core: term,
eval: term,
algLam: term,
corecUU: term,
dtor_transfer: thm,
Lam_transfer: thm,
Lam_pointful_natural: thm,
proto_sctr_transfer: thm,
flat_simps: thm list,
eval_core_simps: thm list,
eval_thm: thm,
eval_simps: thm list,
all_algLam_algs: thm list,
algLam_thm: thm,
dtor_algLam: thm,
corecUU_thm: thm,
corecUU_unique: thm,
corecUU_transfer: thm,
buffer: buffer,
all_dead_k_bnfs: bnf list,
Retr: term,
equivp_Retr: thm,
Retr_coinduct: thm,
dtor_coinduct_info: dtor_coinduct_info};
fun morph_corec_info phi
({fp_b, version, fpT, Y, Z, friend_names, sig_fp_sugars, ssig_fp_sugar, Lam, proto_sctr, flat,
eval_core, eval, algLam, corecUU, dtor_transfer, Lam_transfer, Lam_pointful_natural,
proto_sctr_transfer, flat_simps, eval_core_simps, eval_thm, eval_simps, all_algLam_algs,
algLam_thm, dtor_algLam, corecUU_thm, corecUU_unique, corecUU_transfer, buffer,
all_dead_k_bnfs, Retr, equivp_Retr, Retr_coinduct, dtor_coinduct_info} : corec_info) =
{fp_b = fp_b, version = version, fpT = Morphism.typ phi fpT, Y = Morphism.typ phi Y,
Z = Morphism.typ phi Z, friend_names = friend_names, sig_fp_sugars = sig_fp_sugars (*no morph*),
ssig_fp_sugar = ssig_fp_sugar (*no morph*), Lam = Morphism.term phi Lam,
proto_sctr = Morphism.term phi proto_sctr, flat = Morphism.term phi flat,
eval_core = Morphism.term phi eval_core, eval = Morphism.term phi eval,
algLam = Morphism.term phi algLam, corecUU = Morphism.term phi corecUU,
dtor_transfer = dtor_transfer, Lam_transfer = Morphism.thm phi Lam_transfer,
Lam_pointful_natural = Morphism.thm phi Lam_pointful_natural,
proto_sctr_transfer = Morphism.thm phi proto_sctr_transfer,
flat_simps = map (Morphism.thm phi) flat_simps,
eval_core_simps = map (Morphism.thm phi) eval_core_simps, eval_thm = Morphism.thm phi eval_thm,
eval_simps = map (Morphism.thm phi) eval_simps,
all_algLam_algs = map (Morphism.thm phi) all_algLam_algs,
algLam_thm = Morphism.thm phi algLam_thm, dtor_algLam = Morphism.thm phi dtor_algLam,
corecUU_thm = Morphism.thm phi corecUU_thm, corecUU_unique = Morphism.thm phi corecUU_unique,
corecUU_transfer = Morphism.thm phi corecUU_transfer, buffer = morph_buffer phi buffer,
all_dead_k_bnfs = map (morph_bnf phi) all_dead_k_bnfs, Retr = Morphism.term phi Retr,
equivp_Retr = Morphism.thm phi equivp_Retr, Retr_coinduct = Morphism.thm phi Retr_coinduct,
dtor_coinduct_info = morph_dtor_coinduct_info phi dtor_coinduct_info};
datatype ('a, 'b) expr =
Ad of 'a * (local_theory -> 'b * local_theory) |
Info of 'b;
fun is_Ad (Ad _) = true
| is_Ad _ = false;
fun is_Info (Info _) = true
| is_Info _ = false;
type corec_info_expr = (corec_ad, corec_info) expr;
fun morph_corec_info_expr phi (Ad (ad, f)) = Ad (morph_corec_ad phi ad, f)
| morph_corec_info_expr phi (Info info) = Info (morph_corec_info phi info);
val transfer_corec_info_expr = morph_corec_info_expr o Morphism.transfer_morphism;
type corec_data = int Symtab.table * corec_info_expr list Symtab.table list;
structure Data = Generic_Data
(
type T = corec_data;
val empty = (Symtab.empty, [Symtab.empty]);
fun merge ((version_tab1, info_tabs1), (version_tab2, info_tabs2)) : T =
(Symtab.join (K Int.max) (version_tab1, version_tab2), info_tabs1 @ info_tabs2);
);
fun corec_ad_of_expr (Ad (ad, _)) = ad
| corec_ad_of_expr (Info {fpT, friend_names, ...}) = {fpT = fpT, friend_names = friend_names};
fun corec_info_exprs_of_generic context fpT_name =
let
val thy = Context.theory_of context;
val info_tabs = snd (Data.get context);
in
maps (fn info_tab => these (Symtab.lookup info_tab fpT_name)) info_tabs
|> map (transfer_corec_info_expr thy)
end;
val corec_info_exprs_of = corec_info_exprs_of_generic o Context.Proof;
val keep_corec_infos = map_filter (fn Ad _ => NONE | Info info => SOME info);
val corec_infos_of_generic = keep_corec_infos oo corec_info_exprs_of_generic;
val corec_infos_of = keep_corec_infos oo corec_info_exprs_of;
fun str_of_corec_ad ctxt {fpT, friend_names} =
"[" ^ Syntax.string_of_typ ctxt fpT ^ "; " ^ commas friend_names ^ "]";
fun str_of_corec_info ctxt {fpT, version, friend_names, ...} =
"{" ^ Syntax.string_of_typ ctxt fpT ^ "; " ^ commas friend_names ^ "; v" ^ string_of_int version ^
"}";
fun str_of_corec_info_expr ctxt (Ad (ad, _)) = str_of_corec_ad ctxt ad
| str_of_corec_info_expr ctxt (Info info) = str_of_corec_info ctxt info;
fun print_corec_infos ctxt =
Symtab.fold (fn (fpT_name, exprs) => fn () =>
writeln (fpT_name ^ ":\n" ^
cat_lines (map (prefix " " o str_of_corec_info_expr ctxt) exprs)))
(the_single (snd (Data.get (Context.Proof ctxt)))) ();
val has_no_corec_info = null oo corec_info_exprs_of;
fun get_name_next_version_of fpT_name ctxt =
let
val (version_tab, info_tabs) = Data.get (Context.Theory (Proof_Context.theory_of ctxt));
val fp_base = Long_Name.base_name fpT_name;
val fp_b = Binding.name fp_base;
val version_tab' = Symtab.map_default (fp_base, ~1) (Integer.add 1) version_tab;
val SOME version = Symtab.lookup version_tab' fp_base;
val ctxt' = ctxt
|> Local_Theory.background_theory (Context.theory_map (Data.put (version_tab', info_tabs)));
in
((fp_b, version), ctxt')
end;
type friend_info =
{algrho: term,
dtor_algrho: thm,
algLam_algrho: thm};
fun morph_friend_info phi ({algrho, dtor_algrho, algLam_algrho} : friend_info) =
{algrho = Morphism.term phi algrho, dtor_algrho = Morphism.thm phi dtor_algrho,
algLam_algrho = Morphism.thm phi algLam_algrho};
fun checked_fp_sugar_of ctxt fpT_name =
let
val fp_sugar as {X, fp_res = {Ts = fpTs, ...}, fp_ctr_sugar = {ctrXs_Tss, ...}, ...} =
(case fp_sugar_of ctxt fpT_name of
SOME (fp_sugar as {fp = Greatest_FP, ...}) => fp_sugar
| _ => not_codatatype ctxt (Type (fpT_name, [] (*yuck*))));
val _ =
if length fpTs > 1 then
mutual_codatatype ()
else if not (exists (exists (Term.exists_subtype (curry (op =) X))) ctrXs_Tss) then
noncorecursive_codatatype ()
else if ctrXs_Tss = [[X]] then
singleton_codatatype ctxt
else
();
in
fp_sugar
end;
fun bnf_kill_all_but nn bnf lthy =
((empty_comp_cache, empty_unfolds), lthy)
|> kill_bnf I (live_of_bnf bnf - nn) bnf
||> snd;
fun bnf_with_deads_and_lives dead_Es live_As Y fpT T lthy =
let
val qsoty = quote o Syntax.string_of_typ lthy;
val unfreeze_fp = Tsubst Y fpT;
fun flatten_tyargs Ass =
map dest_TFree live_As
|> filter (fn T => exists (fn Ts => member (op =) Ts T) Ass);
val ((bnf, _), (_, lthy)) =
bnf_of_typ false Do_Inline I flatten_tyargs [Term.dest_TFree Y] (map Term.dest_TFree dead_Es)
T ((empty_comp_cache, empty_unfolds), lthy)
handle BAD_DEAD (Y, Y_backdrop) =>
(case Y_backdrop of
Type (bad_tc, _) =>
let
val T = qsoty (unfreeze_fp Y);
val T_backdrop = qsoty (unfreeze_fp Y_backdrop);
fun register_hint () =
"\nUse the " ^ quote (#1 \<^command_keyword>\<open>bnf\<close>) ^ " command to register " ^
quote bad_tc ^ " as a bounded natural functor to allow nested (co)recursion through \
\it";
in
if is_some (bnf_of lthy bad_tc) orelse is_some (fp_sugar_of lthy bad_tc) then
error ("Inadmissible occurrence of type " ^ T ^ " in type expression " ^
T_backdrop)
else
error ("Unsupported occurrence of type " ^ T ^ " via type constructor " ^
quote bad_tc ^ " in type expression " ^ T_backdrop ^ register_hint ())
end);
val phi =
Morphism.term_morphism "BNF" (Raw_Simplifier.rewrite_term (Proof_Context.theory_of lthy)
@{thms BNF_Composition.id_bnf_def} [])
$> Morphism.thm_morphism "BNF" (unfold_thms lthy @{thms BNF_Composition.id_bnf_def});
in
(morph_bnf phi bnf, lthy)
end;
fun define_sig_type fp_b version fp_alives Es Y rhsT lthy =
let
val T_b = mk_version_fp_binding_ugly true lthy version fp_b sigN;
val ctr_b = mk_version_fp_binding false lthy version fp_b SigN;
val sel_b = mk_version_fp_binding true lthy version fp_b unsigN;
val lthy = (snd o Local_Theory.begin_nested) lthy;
val T_name = Local_Theory.full_name lthy T_b;
val tyargs = map2 (fn alive => fn T =>
(if alive then SOME Binding.empty else NONE, (T, Type.sort_of_atyp T)))
(fp_alives @ [true]) (Es @ [Y]);
val ctr_specs = [(((Binding.empty, ctr_b), [(sel_b, rhsT)]), NoSyn)];
val spec = (((((tyargs, T_b), NoSyn), ctr_specs),
(Binding.empty, Binding.empty, Binding.empty)), []);
val plugins = Plugin_Name.make_filter lthy (K (curry (op =) transfer_plugin));
val discs_sels = true;
val lthy = lthy
|> Local_Theory.map_background_naming (mk_internal true lthy Name_Space.concealed) (*TODO check*)
|> with_typedef_threshold ~1
(co_datatypes Least_FP construct_lfp ((plugins, discs_sels), [spec]))
|> Local_Theory.end_nested;
val SOME fp_sugar = fp_sugar_of lthy T_name;
in
(fp_sugar, lthy)
end;
fun define_ssig_type fp_b version fp_alives Es Y fpT lthy =
let
val sig_T_b = mk_version_fp_binding_ugly true lthy version fp_b sigN;
val T_b = Binding.prefix_name s_prefix sig_T_b;
val Oper_b = mk_version_fp_binding false lthy version fp_b OperN;
val VLeaf_b = mk_version_fp_binding false lthy version fp_b VLeafN;
val CLeaf_b = mk_version_fp_binding false lthy version fp_b CLeafN;
val lthy = (snd o Local_Theory.begin_nested) lthy;
val sig_T_name = Local_Theory.full_name lthy sig_T_b;
val T_name = Long_Name.map_base_name (prefix s_prefix) sig_T_name;
val As = Es @ [Y];
val ssig_sig_T = Type (sig_T_name, Es @ [Type (T_name, As)]);
val tyargs = map2 (fn alive => fn T =>
(if alive then SOME Binding.empty else NONE, (T, Type.sort_of_atyp T)))
(fp_alives @ [true]) (Es @ [Y]);
val ctr_specs =
[(((Binding.empty, Oper_b), [(Binding.empty, ssig_sig_T)]), NoSyn),
(((Binding.empty, VLeaf_b), [(Binding.empty, Y)]), NoSyn),
(((Binding.empty, CLeaf_b), [(Binding.empty, fpT)]), NoSyn)];
val spec = (((((tyargs, T_b), NoSyn), ctr_specs),
(Binding.empty, Binding.empty, Binding.empty)), []);
val plugins = Plugin_Name.make_filter lthy (K (curry (op =) transfer_plugin));
val discs_sels = false;
val lthy = lthy
|> Local_Theory.map_background_naming (mk_internal true lthy Name_Space.concealed) (*TODO check*)
|> with_typedef_threshold ~1
(co_datatypes Least_FP construct_lfp ((plugins, discs_sels), [spec]))
|> Local_Theory.end_nested;
val SOME fp_sugar = fp_sugar_of lthy T_name;
in
(fp_sugar, lthy)
end;
fun embed_Sig ctxt Sig inl_or_r t =
Library.foldl1 HOLogic.mk_comp [Sig, inl_or_r, dummify_atomic_types t]
|> Syntax.check_term ctxt;
fun mk_ctr_wrapper_friends ctxt friend_name friend_T old_sig_T k_T Sig old_buffer =
let
val embed_Sig_inl = embed_Sig ctxt Sig (Inl_const old_sig_T k_T);
val ctr_wrapper = embed_Sig_inl (#ctr_wrapper old_buffer);
val friends = Symtab.map (K (apsnd embed_Sig_inl)) (#friends old_buffer)
|> Symtab.update_new (friend_name, (friend_T,
HOLogic.mk_comp (Sig, Inr_const old_sig_T k_T)));
in
(ctr_wrapper, friends)
end;
fun pre_type_of_ctor Y ctor =
let
val (fp_preT, fpT) = dest_funT (fastype_of ctor);
in
typ_subst_nonatomic [(fpT, Y)] fp_preT
end;
fun mk_k_as_ssig Z old_sig_T k_T ssig_T Sig dead_sig_map Oper VLeaf =
let
val inr' = Inr_const old_sig_T k_T;
val dead_sig_map' = substT Z ssig_T dead_sig_map;
in
Library.foldl1 HOLogic.mk_comp [Oper, dead_sig_map' $ VLeaf, Sig, inr']
end;
fun define_embL name fp_b version Y Z fpT old_sig_T old_ssig_T other_summand_T ssig_T Inl_or_r_const
dead_old_sig_map Sig old_Oper old_VLeaf old_CLeaf Oper VLeaf CLeaf lthy =
let
val embL_b = mk_version_fp_binding true lthy version fp_b name;
val old_ssig_old_sig_T = Tsubst Y old_ssig_T old_sig_T;
val ssig_old_sig_T = Tsubst Y ssig_T old_sig_T;
val ssig_other_summand_T = Tsubst Y ssig_T other_summand_T;
val sigx = Var (("s", 0), old_ssig_old_sig_T);
val x = Var (("x", 0), Y);
val j = Var (("j", 0), fpT);
val embL = Free (Binding.name_of embL_b, old_ssig_T --> ssig_T);
val dead_old_sig_map' = Term.subst_atomic_types [(Y, old_ssig_T), (Z, ssig_T)] dead_old_sig_map;
val Sig' = substT Y ssig_T Sig;
val inl' = Inl_or_r_const ssig_old_sig_T ssig_other_summand_T;
val Oper_eq = mk_Trueprop_eq (embL $ (old_Oper $ sigx),
Oper $ (Sig' $ (inl' $ (dead_old_sig_map' $ embL $ sigx))))
|> Logic.all sigx;
val VLeaf_eq = mk_Trueprop_eq (embL $ (old_VLeaf $ x), VLeaf $ x)
|> Logic.all x;
val CLeaf_eq = mk_Trueprop_eq (embL $ (old_CLeaf $ j), CLeaf $ j)
|> Logic.all j;
in
define_single_primrec embL_b [Oper_eq, VLeaf_eq, CLeaf_eq] lthy
end;
fun define_Lam_base fp_b version Y Z preT ssig_T dead_pre_map Sig unsig dead_sig_map Oper VLeaf
lthy =
let
val YpreT = HOLogic.mk_prodT (Y, preT);
val snd' = snd_const YpreT;
val dead_pre_map' = substT Z ssig_T dead_pre_map;
val Sig' = substT Y ssig_T Sig;
val unsig' = substT Y ssig_T unsig;
val dead_sig_map' = Term.subst_atomic_types [(Y, YpreT), (Z, ssig_T)] dead_sig_map;
val rhs = HOLogic.mk_comp (unsig', dead_sig_map'
$ Library.foldl1 HOLogic.mk_comp [Oper, Sig', dead_pre_map' $ VLeaf, snd']);
in
define_const true fp_b version LamN rhs lthy
end;
fun define_Lam_step_or_merge fp_b version Y preT unsig left_case right_case lthy =
let
val YpreT = HOLogic.mk_prodT (Y, preT);
val unsig' = substT Y YpreT unsig;
val rhs = HOLogic.mk_comp (mk_case_sum (left_case, right_case), unsig');
in
define_const true fp_b version LamN rhs lthy
end;
fun define_Lam_step fp_b version Y Z preT old_ssig_T ssig_T dead_pre_map unsig rho embL old_Lam
lthy =
let
val dead_pre_map' = Term.subst_atomic_types [(Y, old_ssig_T), (Z, ssig_T)] dead_pre_map;
val left_case = HOLogic.mk_comp (dead_pre_map' $ embL, old_Lam);
in
define_Lam_step_or_merge fp_b version Y preT unsig left_case rho lthy
end;
fun define_Lam_merge fp_b version Y Z preT old1_ssig_T old2_ssig_T ssig_T dead_pre_map unsig embLL
embLR old1_Lam old2_Lam lthy =
let
val dead_pre_map' = Term.subst_atomic_types [(Y, old1_ssig_T), (Z, ssig_T)] dead_pre_map;
val dead_pre_map'' = Term.subst_atomic_types [(Y, old2_ssig_T), (Z, ssig_T)] dead_pre_map;
val left_case = HOLogic.mk_comp (dead_pre_map' $ embLL, old1_Lam);
val right_case = HOLogic.mk_comp (dead_pre_map'' $ embLR, old2_Lam);
in
define_Lam_step_or_merge fp_b version Y preT unsig left_case right_case lthy
end;
fun define_proto_sctr_step_or_merge fp_b version old_sig_T right_T Sig old_proto_sctr =
let
val rhs = Library.foldl1 HOLogic.mk_comp [Sig, Inl_const old_sig_T right_T, old_proto_sctr];
in
define_const true fp_b version proto_sctrN rhs
end;
fun define_flat fp_b version Y Z fpT sig_T ssig_T Oper VLeaf CLeaf dead_sig_map lthy =
let
val flat_b = mk_version_fp_binding true lthy version fp_b flatN;
val ssig_sig_T = Tsubst Y ssig_T sig_T;
val ssig_ssig_sig_T = Tsubst Y ssig_T ssig_sig_T;
val ssig_ssig_T = Tsubst Y ssig_T ssig_T;
val sigx = Var (("s", 0), ssig_ssig_sig_T);
val x = Var (("x", 0), ssig_T);
val j = Var (("j", 0), fpT);
val flat = Free (Binding.name_of flat_b, ssig_ssig_T --> ssig_T);
val Oper' = substT Y ssig_T Oper;
val VLeaf' = substT Y ssig_T VLeaf;
val CLeaf' = substT Y ssig_T CLeaf;
val dead_sig_map' = Term.subst_atomic_types [(Y, ssig_ssig_T), (Z, ssig_T)] dead_sig_map;
val Oper_eq = mk_Trueprop_eq (flat $ (Oper' $ sigx), Oper $ (dead_sig_map' $ flat $ sigx))
|> Logic.all sigx;
val VLeaf_eq = mk_Trueprop_eq (flat $ (VLeaf' $ x), x)
|> Logic.all x;
val CLeaf_eq = mk_Trueprop_eq (flat $ (CLeaf' $ j), CLeaf $ j)
|> Logic.all j;
in
define_single_primrec flat_b [Oper_eq, VLeaf_eq, CLeaf_eq] lthy
end;
fun define_eval_core fp_b version Y Z preT fpT sig_T ssig_T dtor Oper VLeaf CLeaf dead_pre_map
dead_sig_map dead_ssig_map flat Lam lthy =
let
val eval_core_b = mk_version_fp_binding true lthy version fp_b eval_coreN;
val YpreT = HOLogic.mk_prodT (Y, preT);
val Ypre_ssig_T = Tsubst Y YpreT ssig_T;
val Ypre_ssig_sig_T = Tsubst Y Ypre_ssig_T sig_T;
val ssig_preT = Tsubst Y ssig_T preT;
val ssig_YpreT = Tsubst Y ssig_T YpreT;
val ssig_ssig_T = Tsubst Y ssig_T ssig_T;
val sigx = Var (("s", 0), Ypre_ssig_sig_T);
val x = Var (("x", 0), YpreT);
val j = Var (("j", 0), fpT);
val eval_core = Free (Binding.name_of eval_core_b, Ypre_ssig_T --> ssig_preT);
val Oper' = substT Y YpreT Oper;
val VLeaf' = substT Y YpreT VLeaf;
val CLeaf' = substT Y YpreT CLeaf;
val dead_pre_map' = Term.subst_atomic_types [(Y, ssig_ssig_T), (Z, ssig_T)] dead_pre_map;
val dead_pre_map'' = substT Z ssig_T dead_pre_map;
val dead_pre_map''' = Term.subst_atomic_types [(Y, fpT), (Z, ssig_T)] dead_pre_map;
val dead_sig_map' = Term.subst_atomic_types [(Y, Ypre_ssig_T), (Z, ssig_YpreT)] dead_sig_map;
val dead_ssig_map' = Term.subst_atomic_types [(Y, YpreT), (Z, Y)] dead_ssig_map;
val Lam' = substT Y ssig_T Lam;
val fst' = fst_const YpreT;
val snd' = snd_const YpreT;
val Oper_eq = mk_Trueprop_eq (eval_core $ (Oper' $ sigx),
dead_pre_map' $ flat $ (Lam' $ (dead_sig_map' $ (Abs (Name.uu, Ypre_ssig_T,
HOLogic.mk_prod (dead_ssig_map' $ fst' $ Bound 0, eval_core $ Bound 0))) $ sigx)))
|> Logic.all sigx;
val VLeaf_eq = mk_Trueprop_eq (eval_core $ (VLeaf' $ x), dead_pre_map'' $ VLeaf $ (snd' $ x))
|> Logic.all x;
val CLeaf_eq = mk_Trueprop_eq (eval_core $ (CLeaf' $ j), dead_pre_map''' $ CLeaf $ (dtor $ j))
|> Logic.all j;
in
define_single_primrec eval_core_b [Oper_eq, VLeaf_eq, CLeaf_eq] lthy
end;
fun define_eval fp_b version Y Z preT fpT ssig_T dtor dtor_unfold dead_ssig_map eval_core lthy =
let
val fp_preT = Tsubst Y fpT preT;
val fppreT = HOLogic.mk_prodT (fpT, fp_preT);
val fp_ssig_T = Tsubst Y fpT ssig_T;
val dtor_unfold' = substT Z fp_ssig_T dtor_unfold;
val dead_ssig_map' = Term.subst_atomic_types [(Y, fpT), (Z, fppreT)] dead_ssig_map;
val eval_core' = substT Y fpT eval_core;
val id' = HOLogic.id_const fpT;
val rhs = dtor_unfold' $ HOLogic.mk_comp (eval_core', dead_ssig_map' $ mk_convol (id', dtor));
in
define_const true fp_b version evalN rhs lthy
end;
fun define_cutSsig fp_b version Y Z preT ssig_T dead_pre_map VLeaf dead_ssig_map flat eval_core
lthy =
let
val ssig_preT = Tsubst Y ssig_T preT;
val ssig_ssig_T = Tsubst Y ssig_T ssig_T;
val ssig_ssig_preT = HOLogic.mk_prodT (ssig_T, ssig_preT);
val h = Var (("h", 0), Y --> ssig_preT);
val dead_pre_map' = Term.subst_atomic_types [(Y, ssig_ssig_T), (Z, ssig_T)] dead_pre_map;
val dead_ssig_map' = substT Z ssig_ssig_preT dead_ssig_map;
val eval_core' = substT Y ssig_T eval_core;
val rhs = Library.foldl1 HOLogic.mk_comp [dead_pre_map' $ flat, eval_core',
dead_ssig_map' $ mk_convol (VLeaf, h)]
|> Term.lambda h;
in
define_const true fp_b version cutSsigN rhs lthy
end;
fun define_algLam fp_b version Y Z fpT ssig_T Oper VLeaf dead_sig_map eval lthy =
let
val fp_ssig_T = Tsubst Y fpT ssig_T;
val Oper' = substT Y fpT Oper;
val VLeaf' = substT Y fpT VLeaf;
val dead_sig_map' = Term.subst_atomic_types [(Y, fpT), (Z, fp_ssig_T)] dead_sig_map;
val rhs = Library.foldl1 HOLogic.mk_comp [eval, Oper', dead_sig_map' $ VLeaf'];
in
define_const true fp_b version algLamN rhs lthy
end;
fun define_corecU fp_b version Y Z preT ssig_T dtor_unfold VLeaf cutSsig lthy =
let
val ssig_preT = Tsubst Y ssig_T preT;
val h = Var (("h", 0), Y --> ssig_preT);
val dtor_unfold' = substT Z ssig_T dtor_unfold;
val rhs = HOLogic.mk_comp (dtor_unfold' $ (cutSsig $ h), VLeaf)
|> Term.lambda h;
in
define_const true fp_b version corecUN rhs lthy
end;
fun define_corecUU fp_b version Y Z preT ssig_T dead_pre_map dead_ssig_map flat eval_core sctr
corecU lthy =
let
val ssig_preT = Tsubst Y ssig_T preT;
val ssig_ssig_T = Tsubst Y ssig_T ssig_T
val ssig_ssig_preT = HOLogic.mk_prodT (ssig_T, ssig_preT);
val ssig_pre_ssig_T = Tsubst Y ssig_preT ssig_T;
val h = Var (("h", 0), Y --> ssig_pre_ssig_T);
val dead_pre_map' = Term.subst_atomic_types [(Y, ssig_ssig_T), (Z, ssig_T)] dead_pre_map;
val eval_core' = substT Y ssig_T eval_core;
val dead_ssig_map' =
Term.subst_atomic_types [(Y, ssig_preT), (Z, ssig_ssig_preT)] dead_ssig_map;
val id' = HOLogic.id_const ssig_preT;
val rhs = corecU $ Library.foldl1 HOLogic.mk_comp
[dead_pre_map' $ flat, eval_core', dead_ssig_map' $ mk_convol (sctr, id'), h]
|> Term.lambda h;
in
define_const true fp_b version corecUUN rhs lthy
end;
fun derive_sig_transfer maybe_swap ctxt live_AsBs pre_rel sig_rel Rs R const pre_rel_def
preT_rel_eqs transfer_thm =
let
val RRpre_rel = list_comb (pre_rel, Rs) $ R;
val RRsig_rel = list_comb (sig_rel, Rs) $ R;
val constB = Term.subst_atomic_types live_AsBs const;
val goal = uncurry mk_rel_fun (maybe_swap (RRpre_rel, RRsig_rel)) $ const $ constB
|> HOLogic.mk_Trueprop;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_sig_transfer_tac ctxt pre_rel_def preT_rel_eqs transfer_thm))
|> Thm.close_derivation \<^here>
end;
fun derive_transfer_by_transfer_prover ctxt live_AsBs Rs R const const_defs rel_eqs transfers =
let
val constB = Term.subst_atomic_types live_AsBs const;
val goal = mk_parametricity_goal ctxt (Rs @ [R]) const constB;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove (*no sorry*) ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_transfer_by_transfer_prover_tac ctxt (const_defs @ map (fn thm => thm RS sym) rel_eqs)
rel_eqs transfers))
|> Thm.close_derivation \<^here>
end;
fun derive_dtor_transfer ctxt live_EsFs Y Z pre_rel fp_rel Rs dtor dtor_rel_thm =
let
val Type (\<^type_name>\<open>fun\<close>, [fpT, Type (\<^type_name>\<open>fun\<close>, [fpTB, \<^typ>\<open>bool\<close>])]) =
snd (strip_typeN (length live_EsFs) (fastype_of fp_rel));
val pre_rel' = Term.subst_atomic_types [(Y, fpT), (Z, fpTB)] pre_rel;
val Rpre_rel = list_comb (pre_rel', Rs);
val Rfp_rel = list_comb (fp_rel, Rs);
val dtorB = Term.subst_atomic_types live_EsFs dtor;
val goal = HOLogic.mk_Trueprop (mk_rel_fun Rfp_rel (Rpre_rel $ Rfp_rel) $ dtor $ dtorB);
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove (*no sorry*) ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_dtor_transfer_tac ctxt dtor_rel_thm))
|> Thm.close_derivation \<^here>
end;
fun derive_Lam_or_eval_core_transfer ctxt live_AsBs Y Z preT ssig_T Rs R pre_rel sig_or_ssig_rel
ssig_rel const const_def rel_eqs transfers =
let
val YpreT = HOLogic.mk_prodT (Y, preT);
val ZpreTB = typ_subst_atomic live_AsBs YpreT;
val ssig_TB = typ_subst_atomic live_AsBs ssig_T;
val pre_rel' = Term.subst_atomic_types [(Y, ssig_T), (Z, ssig_TB)] pre_rel;
val sig_or_ssig_rel' = Term.subst_atomic_types [(Y, YpreT), (Z, ZpreTB)] sig_or_ssig_rel;
val Rsig_or_ssig_rel' = list_comb (sig_or_ssig_rel', Rs);
val RRpre_rel = list_comb (pre_rel, Rs) $ R;
val RRssig_rel = list_comb (ssig_rel, Rs) $ R;
val Rpre_rel' = list_comb (pre_rel', Rs);
val constB = subst_atomic_types live_AsBs const;
val goal = mk_rel_fun (Rsig_or_ssig_rel' $ mk_rel_prod R RRpre_rel) (Rpre_rel' $ RRssig_rel)
$ const $ constB
|> HOLogic.mk_Trueprop;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove (*no sorry*) ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_transfer_by_transfer_prover_tac ctxt [const_def] rel_eqs transfers))
|> Thm.close_derivation \<^here>
end;
fun derive_proto_sctr_transfer_step_or_merge ctxt Y Z R dead_pre_rel dead_sig_rel proto_sctr
proto_sctr_def fp_k_T_rel_eqs transfers =
let
val proto_sctrZ = substT Y Z proto_sctr;
val goal = mk_rel_fun (dead_pre_rel $ R) (dead_sig_rel $ R) $ proto_sctr $ proto_sctrZ
|> HOLogic.mk_Trueprop;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove (*no sorry*) ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_transfer_by_transfer_prover_tac ctxt [proto_sctr_def] fp_k_T_rel_eqs transfers))
|> Thm.close_derivation \<^here>
end;
fun derive_sctr_transfer ctxt live_AsBs Y Z ssig_T Rs R pre_rel ssig_rel sctr sctr_def
fp_k_T_rel_eqs transfers =
let
val ssig_TB = typ_subst_atomic live_AsBs ssig_T;
val pre_rel' = Term.subst_atomic_types [(Y, ssig_T), (Z, ssig_TB)] pre_rel;
val Rpre_rel' = list_comb (pre_rel', Rs);
val RRssig_rel = list_comb (ssig_rel, Rs) $ R;
val sctrB = subst_atomic_types live_AsBs sctr;
val goal = HOLogic.mk_Trueprop (mk_rel_fun (Rpre_rel' $ RRssig_rel) RRssig_rel $ sctr $ sctrB);
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove (*no sorry*) ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_transfer_by_transfer_prover_tac ctxt [sctr_def] fp_k_T_rel_eqs transfers))
|> Thm.close_derivation \<^here>
end;
fun derive_corecUU_transfer ctxt live_AsBs Y Z Rs R preT ssig_T pre_rel fp_rel ssig_rel corecUU
cutSsig_def corecU_def corecUU_def fp_k_T_rel_eqs transfers =
let
val ssig_preT = Tsubst Y ssig_T preT;
val ssig_TB = typ_subst_atomic live_AsBs ssig_T;
val ssig_preTB = typ_subst_atomic live_AsBs ssig_preT;
val pre_rel' = Term.subst_atomic_types [(Y, ssig_T), (Z, ssig_TB)] pre_rel;
val ssig_rel' = Term.subst_atomic_types [(Y, ssig_preT), (Z, ssig_preTB)] ssig_rel;
val Rpre_rel' = list_comb (pre_rel', Rs);
val Rfp_rel = list_comb (fp_rel, Rs);
val RRssig_rel = list_comb (ssig_rel, Rs) $ R;
val Rssig_rel' = list_comb (ssig_rel', Rs);
val corecUUB = subst_atomic_types live_AsBs corecUU;
val goal = mk_rel_fun (mk_rel_fun R (Rssig_rel' $ (Rpre_rel' $ RRssig_rel)))
(mk_rel_fun R Rfp_rel) $ corecUU $ corecUUB
|> HOLogic.mk_Trueprop;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove (*no sorry*) ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_transfer_by_transfer_prover_tac ctxt [cutSsig_def, corecU_def, corecUU_def] fp_k_T_rel_eqs
transfers))
|> Thm.close_derivation \<^here>
end;
fun mk_natural_goal ctxt simple_T_mapfs fs t u =
let
fun build_simple (T, _) =
(case AList.lookup (op =) simple_T_mapfs T of
SOME mapf => mapf
| NONE => the (find_first (fn f => domain_type (fastype_of f) = T) fs));
val simple_Ts = map fst simple_T_mapfs;
val t_map = build_map ctxt simple_Ts [] build_simple (apply2 (range_type o fastype_of) (t, u));
val u_map = build_map ctxt simple_Ts [] build_simple (apply2 (domain_type o fastype_of) (t, u));
in
mk_Trueprop_eq (HOLogic.mk_comp (u, u_map), HOLogic.mk_comp (t_map, t))
end;
fun derive_natural_by_unfolding ctxt live_AsBs preT pre_map fs f const map_thms =
let
val ffpre_map = list_comb (pre_map, fs) $ f;
val constB = subst_atomic_types live_AsBs const;
val goal = mk_natural_goal ctxt [(preT, ffpre_map)] (fs @ [f]) const constB;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_natural_by_unfolding_tac ctxt map_thms))
|> Thm.close_derivation \<^here>
end;
fun derive_natural_from_transfer ctxt live_AsBs simple_T_mapfs fs f const transfer bnfs subst_bnfs =
let
val m = length live_AsBs;
val constB = Term.subst_atomic_types live_AsBs const;
val goal = mk_natural_goal ctxt simple_T_mapfs (fs @ [f]) const constB;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_natural_from_transfer_tac ctxt m (replicate m true) transfer [] (map rel_Grp_of_bnf bnfs)
(map rel_Grp_of_bnf subst_bnfs)))
|> Thm.close_derivation \<^here>
end;
fun derive_natural_from_transfer_with_pre_type ctxt live_AsBs Y Z preT ssig_T pre_map ssig_map fs
f =
let
val ssig_TB = typ_subst_atomic live_AsBs ssig_T;
val preT' = Term.typ_subst_atomic [(Y, ssig_T), (Z, ssig_TB)] preT;
val ffpre_map = list_comb (pre_map, fs) $ f;
val pre_map' = Term.subst_atomic_types [(Y, ssig_T), (Z, ssig_TB)] pre_map;
val fpre_map' = list_comb (pre_map', fs);
val ffssig_map = list_comb (ssig_map, fs) $ f;
val preT_mapfs = [(preT, ffpre_map), (preT', fpre_map' $ ffssig_map)];
in
derive_natural_from_transfer ctxt live_AsBs preT_mapfs fs f
end;
fun derive_Lam_Inl_Inr ctxt Y Z preT old_sig_T old_ssig_T k_T ssig_T dead_pre_map Sig embL old_Lam
Lam rho unsig_thm Lam_def =
let
val YpreT = HOLogic.mk_prodT (Y, preT);
val Ypre_old_sig_T = Tsubst Y YpreT old_sig_T;
val Ypre_k_T = Tsubst Y YpreT k_T;
val inl' = Inl_const Ypre_old_sig_T Ypre_k_T;
val inr' = Inr_const Ypre_old_sig_T Ypre_k_T;
val dead_pre_map' = Term.subst_atomic_types [(Y, old_ssig_T), (Z, ssig_T)] dead_pre_map;
val Sig' = substT Y YpreT Sig;
val Lam_o_Sig = HOLogic.mk_comp (Lam, Sig');
val inl_goal = mk_Trueprop_eq (HOLogic.mk_comp (Lam_o_Sig, inl'),
HOLogic.mk_comp (dead_pre_map' $ embL, old_Lam));
val inr_goal = mk_Trueprop_eq (HOLogic.mk_comp (Lam_o_Sig, inr'), rho);
val goals = [inl_goal, inr_goal];
val goal = Logic.mk_conjunction_balanced goals;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal
(fn {context = ctxt, prems = _} => mk_Lam_Inl_Inr_tac ctxt unsig_thm Lam_def))
|> Conjunction.elim_balanced (length goals)
|> map (Thm.close_derivation \<^here>)
end;
fun derive_flat_VLeaf ctxt Y Z ssig_T x VLeaf dead_ssig_map flat ssig_induct fp_map_id sig_map_cong
sig_map_ident sig_map_comp ssig_map_thms flat_simps =
let
val x' = substT Y ssig_T x;
val dead_ssig_map' = substT Z ssig_T dead_ssig_map;
val goal = mk_Trueprop_eq (flat $ (dead_ssig_map' $ VLeaf $ x'), x');
val ssig_induct' = infer_instantiate' ctxt [NONE, SOME (Thm.cterm_of ctxt x')] ssig_induct;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_flat_VLeaf_or_flat_tac ctxt ssig_induct' sig_map_cong
(fp_map_id :: sig_map_ident :: sig_map_comp :: ssig_map_thms @ flat_simps @
@{thms o_apply id_apply id_def[symmetric]})))
|> Thm.close_derivation \<^here>
end;
fun derive_flat_flat ctxt Y Z ssig_T x dead_ssig_map flat ssig_induct fp_map_id sig_map_cong
sig_map_comp ssig_map_thms flat_simps =
let
val ssig_ssig_T = Tsubst Y ssig_T ssig_T;
val ssig_ssig_ssig_T = Tsubst Y ssig_T ssig_ssig_T;
val x' = substT Y ssig_ssig_ssig_T x;
val dead_ssig_map' = Term.subst_atomic_types [(Y, ssig_ssig_T), (Z, ssig_T)] dead_ssig_map;
val flat' = substT Y ssig_T flat;
val goal = mk_Trueprop_eq (flat $ (dead_ssig_map' $ flat $ x'), flat $ (flat' $ x'));
val ssig_induct' = infer_instantiate' ctxt [NONE, SOME (Thm.cterm_of ctxt x')] ssig_induct;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_flat_VLeaf_or_flat_tac ctxt ssig_induct' sig_map_cong
(o_apply :: fp_map_id :: sig_map_comp :: ssig_map_thms @ flat_simps)))
|> Thm.close_derivation \<^here>
end;
fun derive_eval_core_flat ctxt Y Z preT ssig_T dead_pre_map dead_ssig_map flat eval_core x
ssig_induct dead_pre_map_id dead_pre_map_comp0 dead_pre_map_comp fp_map_id sig_map_comp
sig_map_cong ssig_map_thms ssig_map_comp flat_simps flat_pointful_natural flat_flat
Lam_pointful_natural eval_core_simps =
let
val YpreT = HOLogic.mk_prodT (Y, preT);
val ssig_ssig_T = Tsubst Y ssig_T ssig_T;
val Ypre_ssig_T = Tsubst Y YpreT ssig_T;
val Ypre_ssig_ssig_T = Tsubst Y YpreT ssig_ssig_T;
val ssig_YpreT = Tsubst Y ssig_T YpreT;
val dead_pre_map' = Term.subst_atomic_types [(Y, ssig_ssig_T), (Z, ssig_T)] dead_pre_map;
val dead_ssig_map' = Term.subst_atomic_types [(Y, Ypre_ssig_T), (Z, ssig_YpreT)] dead_ssig_map;
val dead_ssig_map'' = Term.subst_atomic_types [(Y, YpreT), (Z, Y)] dead_ssig_map;
val flat' = substT Y YpreT flat;
val eval_core' = substT Y ssig_T eval_core;
val x' = substT Y Ypre_ssig_ssig_T x;
val fst' = fst_const YpreT;
val goal = mk_Trueprop_eq (eval_core $ (flat' $ x'), dead_pre_map' $ flat
$ (eval_core' $ (dead_ssig_map' $ mk_convol (dead_ssig_map'' $ fst', eval_core) $ x')));
val ssig_induct' = infer_instantiate' ctxt [NONE, SOME (Thm.cterm_of ctxt x')] ssig_induct;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_eval_core_flat_tac ctxt ssig_induct' dead_pre_map_id dead_pre_map_comp0 dead_pre_map_comp
fp_map_id sig_map_comp sig_map_cong ssig_map_thms ssig_map_comp flat_simps
flat_pointful_natural flat_flat Lam_pointful_natural eval_core_simps))
|> Thm.close_derivation \<^here>
end;
fun derive_eval_thm ctxt dtor_inject dtor_unfold_thm eval_def =
(trans OF [iffD2 OF [dtor_inject, HOLogic.mk_obj_eq eval_def RS fun_cong], dtor_unfold_thm])
|> unfold_thms ctxt [o_apply, eval_def RS symmetric_thm];
fun derive_eval_flat ctxt Y Z fpT ssig_T dead_ssig_map flat eval x dead_pre_map_comp0
dtor_unfold_unique ssig_map_id ssig_map_comp flat_pointful_natural eval_core_pointful_natural
eval_core_flat eval_thm =
let
val fp_ssig_T = Tsubst Y fpT ssig_T;
val fp_ssig_ssig_T = Tsubst Y fp_ssig_T ssig_T;
val dead_ssig_map' = Term.subst_atomic_types [(Y, fp_ssig_T), (Z, fpT)] dead_ssig_map;
val flat' = substT Y fpT flat;
val x' = substT Y fp_ssig_ssig_T x;
val goal = mk_Trueprop_eq (eval $ (flat' $ x'), eval $ (dead_ssig_map' $ eval $ x'));
val cond_eval_o_flat =
infer_instantiate' ctxt [SOME (Thm.cterm_of ctxt (HOLogic.mk_comp (eval, flat')))]
(trans OF [dtor_unfold_unique, dtor_unfold_unique RS sym] RS fun_cong)
OF [ext, ext];
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_eval_flat_tac ctxt dead_pre_map_comp0 ssig_map_id ssig_map_comp flat_pointful_natural
eval_core_pointful_natural eval_core_flat eval_thm cond_eval_o_flat))
|> Thm.close_derivation \<^here>
end;
fun derive_eval_Oper ctxt live Y Z fpT sig_T ssig_T dead_sig_map Oper eval algLam x sig_map_ident
sig_map_comp0 sig_map_comp Oper_natural_pointful VLeaf_natural flat_simps eval_flat algLam_def =
let
val fp_ssig_T = Tsubst Y fpT ssig_T;
val fp_ssig_sig_T = Tsubst Y fp_ssig_T sig_T;
val dead_sig_map' = Term.subst_atomic_types [(Y, fp_ssig_T), (Z, fpT)] dead_sig_map;
val Oper' = substT Y fpT Oper;
val x' = substT Y fp_ssig_sig_T x;
val goal = mk_Trueprop_eq (eval $ (Oper' $ x'), algLam $ (dead_sig_map' $ eval $ x'));
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_eval_Oper_tac ctxt live sig_map_ident sig_map_comp0 sig_map_comp Oper_natural_pointful
VLeaf_natural flat_simps eval_flat algLam_def))
|> Thm.close_derivation \<^here>
end;
fun derive_eval_V_or_CLeaf ctxt Y fpT V_or_CLeaf eval x dead_pre_map_id dead_pre_map_comp fp_map_id
dtor_unfold_unique V_or_CLeaf_map_thm eval_core_simps eval_thm =
let
val V_or_CLeaf' = substT Y fpT V_or_CLeaf;
val x' = substT Y fpT x;
val goal = mk_Trueprop_eq (eval $ (V_or_CLeaf' $ x'), x');
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_eval_V_or_CLeaf_tac ctxt dead_pre_map_id dead_pre_map_comp fp_map_id dtor_unfold_unique
V_or_CLeaf_map_thm eval_core_simps eval_thm))
|> Thm.close_derivation \<^here>
end;
fun derive_extdd_mor ctxt Y Z preT fpT ssig_T dead_pre_map dtor extdd cutSsig f g dead_pre_map_comp0
dead_pre_map_comp VLeaf_map_thm ssig_map_comp flat_pointful_natural eval_core_pointful_natural
eval_thm eval_flat eval_VLeaf cutSsig_def =
let
val ssig_preT = Tsubst Y ssig_T preT;
val dead_pre_map' = Term.subst_atomic_types [(Y, ssig_T), (Z, fpT)] dead_pre_map;
val f' = substT Z fpT f;
val g' = substT Z ssig_preT g;
val extdd_f = extdd $ f';
val prem = mk_Trueprop_eq (HOLogic.mk_comp (dead_pre_map' $ extdd_f, g'),
HOLogic.mk_comp (dtor, f'));
val goal = mk_Trueprop_eq (HOLogic.mk_comp (dead_pre_map' $ extdd_f, cutSsig $ g'),
HOLogic.mk_comp (dtor, extdd_f));
in
fold (Variable.add_free_names ctxt) [prem, goal] []
|> (fn vars => Goal.prove_sorry ctxt vars [prem] goal (fn {context = ctxt, prems = [prem]} =>
mk_extdd_mor_tac ctxt dead_pre_map_comp0 dead_pre_map_comp VLeaf_map_thm ssig_map_comp
flat_pointful_natural eval_core_pointful_natural eval_thm eval_flat eval_VLeaf cutSsig_def
prem))
|> Thm.close_derivation \<^here>
end;
fun derive_mor_cutSsig_flat ctxt Y Z preT fpT ssig_T dead_pre_map dead_ssig_map dtor flat eval_core
eval cutSsig f g dead_pre_map_comp0 dead_pre_map_comp dead_pre_map_cong dtor_unfold_unique
dead_ssig_map_comp0 ssig_map_comp flat_simps flat_pointful_natural eval_core_pointful_natural
flat_flat flat_VLeaf eval_core_flat cutSsig_def cutSsig_def_pointful_natural eval_thm =
let
val ssig_preT = Tsubst Y ssig_T preT;
val substYZ = Term.subst_atomic_types [(Y, ssig_T), (Z, fpT)];
val dead_pre_map' = substYZ dead_pre_map;
val dead_ssig_map' = substYZ dead_ssig_map;
val f' = substYZ f;
val g' = substT Z ssig_preT g;
val cutSsig_g = cutSsig $ g';
val id' = HOLogic.id_const ssig_T;
val convol' = mk_convol (id', cutSsig_g);
val dead_ssig_map'' =
Term.subst_atomic_types [(Y, ssig_T), (Z, range_type (fastype_of convol'))] dead_ssig_map;
val eval_core' = substT Y ssig_T eval_core;
val eval_core_o_map = HOLogic.mk_comp (eval_core', dead_ssig_map'' $ convol');
val prem = mk_Trueprop_eq (HOLogic.mk_comp (dead_pre_map' $ f', cutSsig_g),
HOLogic.mk_comp (dtor, f'));
val goal = mk_Trueprop_eq (HOLogic.mk_comp (eval, dead_ssig_map' $ f'),
HOLogic.mk_comp (f', flat));
in
fold (Variable.add_free_names ctxt) [prem, goal] []
|> (fn vars => Goal.prove_sorry ctxt vars [prem] goal (fn {context = ctxt, prems = [prem]} =>
mk_mor_cutSsig_flat_tac ctxt eval_core_o_map dead_pre_map_comp0 dead_pre_map_comp
dead_pre_map_cong dtor_unfold_unique dead_ssig_map_comp0 ssig_map_comp flat_simps
flat_pointful_natural eval_core_pointful_natural flat_flat flat_VLeaf eval_core_flat
cutSsig_def cutSsig_def_pointful_natural eval_thm prem))
|> Thm.close_derivation \<^here>
end;
fun derive_extdd_o_VLeaf ctxt Y Z preT fpT ssig_T dead_pre_map dtor VLeaf extdd f g
dead_pre_map_comp0 dead_pre_map_comp dtor_inject ssig_map_thms eval_core_simps eval_thm
eval_VLeaf =
let
val ssig_preT = Tsubst Y ssig_T preT;
val substYZ = Term.subst_atomic_types [(Y, ssig_T), (Z, fpT)];
val dead_pre_map' = substYZ dead_pre_map;
val f' = substT Z fpT f;
val g' = substT Z ssig_preT g;
val extdd_f = extdd $ f';
val prem = mk_Trueprop_eq (HOLogic.mk_comp (dead_pre_map' $ extdd_f, g'),
HOLogic.mk_comp (dtor, f'));
val goal = mk_Trueprop_eq (HOLogic.mk_comp (extdd_f, VLeaf), f');
in
fold (Variable.add_free_names ctxt) [prem, goal] []
|> (fn vars => Goal.prove_sorry ctxt vars [prem] goal (fn {context = ctxt, prems = [prem]} =>
mk_extdd_o_VLeaf_tac ctxt dead_pre_map_comp0 dead_pre_map_comp dtor_inject ssig_map_thms
eval_core_simps eval_thm eval_VLeaf prem))
|> Thm.close_derivation \<^here>
end;
fun derive_corecU_pointfree ctxt Y Z preT fpT ssig_T dead_pre_map dtor extdd corecU g
dead_pre_map_comp dtor_unfold_thm ssig_map_thms dead_ssig_map_comp0 flat_simps flat_VLeaf
eval_core_simps cutSsig_def mor_cutSsig_flat corecU_def =
let
val ssig_preT = Tsubst Y ssig_T preT;
val substYZ = Term.subst_atomic_types [(Y, ssig_T), (Z, fpT)];
val dead_pre_map' = substYZ dead_pre_map;
val g' = substT Z ssig_preT g;
val corecU_g = corecU $ g';
val goal = mk_Trueprop_eq (HOLogic.mk_comp (dead_pre_map' $ (extdd $ corecU_g), g'),
HOLogic.mk_comp (dtor, corecU_g));
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_corecU_pointfree_tac ctxt dead_pre_map_comp dtor_unfold_thm ssig_map_thms
dead_ssig_map_comp0 flat_simps flat_VLeaf eval_core_simps cutSsig_def mor_cutSsig_flat
corecU_def))
|> Thm.close_derivation \<^here>
end;
fun derive_corecU_ctor_unique ctxt Y Z preT fpT ssig_T dead_pre_map ctor dtor VLeaf extdd corecU f g
dead_pre_map_comp ctor_dtor dtor_unfold_thm dtor_unfold_unique ssig_map_thms dead_ssig_map_comp0
flat_simps flat_VLeaf eval_core_simps extdd_mor extdd_o_VLeaf cutSsig_def mor_cutSsig_flat
corecU_def =
let
val corecU_pointfree = derive_corecU_pointfree ctxt Y Z preT fpT ssig_T dead_pre_map dtor extdd
corecU g dead_pre_map_comp dtor_unfold_thm ssig_map_thms dead_ssig_map_comp0 flat_simps
flat_VLeaf eval_core_simps cutSsig_def mor_cutSsig_flat corecU_def;
val corecU_thm = corecU_pointfree RS @{thm comp_eq_dest};
val corecU_ctor =
let
val arg_cong' =
infer_instantiate' ctxt [NONE, NONE, SOME (Thm.cterm_of ctxt ctor)] arg_cong;
in
unfold_thms ctxt [ctor_dtor] (corecU_thm RS arg_cong')
end;
val corecU_unique =
let
val substYZ = Term.subst_atomic_types [(Y, ssig_T), (Z, fpT)];
val f' = substYZ f;
val abs_f_o_VLeaf = Term.lambda f' (HOLogic.mk_comp (f', VLeaf));
val inject_refine' = infer_instantiate' ctxt [SOME (Thm.cterm_of ctxt abs_f_o_VLeaf),
SOME (Thm.cterm_of ctxt extdd)] @{thm inject_refine};
in
unfold_thms ctxt @{thms atomize_imp}
(((inject_refine' OF [extdd_o_VLeaf, extdd_o_VLeaf] RS iffD1)
OF [asm_rl, corecU_pointfree])
OF [asm_rl, trans OF [dtor_unfold_unique, dtor_unfold_unique RS sym]
OF [extdd_mor, corecU_pointfree RS extdd_mor]])
RS @{thm obj_distinct_prems}
end;
in
(corecU_ctor, corecU_unique)
end;
fun derive_dtor_algLam ctxt Y Z preT fpT sig_T ssig_T dead_pre_map dtor dead_sig_map Lam eval algLam
x pre_map_comp dead_pre_map_id dead_pre_map_comp0 dead_pre_map_comp sig_map_comp
Oper_pointful_natural ssig_map_thms dead_ssig_map_comp0 Lam_pointful_natural eval_core_simps
eval_thm eval_flat eval_VLeaf algLam_def =
let
val fp_preT = Tsubst Y fpT preT;
val fppreT = HOLogic.mk_prodT (fpT, fp_preT);
val fp_sig_T = Tsubst Y fpT sig_T;
val fp_ssig_T = Tsubst Y fpT ssig_T;
val id' = HOLogic.id_const fpT;
val convol' = mk_convol (id', dtor);
val dead_pre_map' = Term.subst_atomic_types [(Y, fp_ssig_T), (Z, fpT)] dead_pre_map;
val dead_sig_map' = Term.subst_atomic_types [(Y, fpT), (Z, fppreT)] dead_sig_map;
val Lam' = substT Y fpT Lam;
val x' = substT Y fp_sig_T x;
val goal = mk_Trueprop_eq (dtor $ (algLam $ x'),
dead_pre_map' $ eval $ (Lam' $ (dead_sig_map' $ convol' $ x')));
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_dtor_algLam_tac ctxt pre_map_comp dead_pre_map_id dead_pre_map_comp0 dead_pre_map_comp
sig_map_comp Oper_pointful_natural ssig_map_thms dead_ssig_map_comp0 Lam_pointful_natural
eval_core_simps eval_thm eval_flat eval_VLeaf algLam_def))
|> Thm.close_derivation \<^here>
end;
fun derive_algLam_base ctxt Y Z preT fpT dead_pre_map ctor dtor algLam proto_sctr dead_pre_map_id
dead_pre_map_comp ctor_dtor dtor_ctor dtor_unfold_unique unsig_thm Sig_pointful_natural
ssig_map_thms Lam_def flat_simps eval_core_simps eval_thm algLam_def =
let
val fp_preT = Tsubst Y fpT preT;
val proto_sctr' = substT Y fpT proto_sctr;
val dead_pre_map' = Term.subst_atomic_types [(Y, fpT), (Z, fp_preT)] dead_pre_map;
val dead_pre_map_dtor = dead_pre_map' $ dtor;
val goal = mk_Trueprop_eq (HOLogic.mk_comp (algLam, proto_sctr'), ctor);
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_algLam_base_tac ctxt dead_pre_map_dtor dead_pre_map_id dead_pre_map_comp ctor_dtor
dtor_ctor dtor_unfold_unique unsig_thm Sig_pointful_natural ssig_map_thms Lam_def flat_simps
eval_core_simps eval_thm algLam_def))
|> Thm.close_derivation \<^here>
end;
fun derive_flat_embL ctxt Y Z old_ssig_T ssig_T dead_old_ssig_map embL old_flat flat x
old_ssig_induct fp_map_id Sig_pointful_natural old_sig_map_comp old_sig_map_cong
old_ssig_map_thms old_flat_simps flat_simps embL_simps =
let
val old_ssig_old_ssig_T = Tsubst Y old_ssig_T old_ssig_T;
val dead_old_ssig_map' =
Term.subst_atomic_types [(Y, old_ssig_T), (Z, ssig_T)] dead_old_ssig_map;
val embL' = substT Y ssig_T embL;
val x' = substT Y old_ssig_old_ssig_T x;
val goal = mk_Trueprop_eq (flat $ (embL' $ (dead_old_ssig_map' $ embL $ x')),
embL $ (old_flat $ x'));
val old_ssig_induct' =
infer_instantiate' ctxt [NONE, SOME (Thm.cterm_of ctxt x')] old_ssig_induct;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_flat_embL_tac ctxt old_ssig_induct' fp_map_id Sig_pointful_natural old_sig_map_comp
old_sig_map_cong old_ssig_map_thms old_flat_simps flat_simps embL_simps))
|> Thm.close_derivation \<^here>
end;
fun derive_eval_core_embL ctxt Y Z preT old_ssig_T ssig_T dead_pre_map embL old_eval_core eval_core
x old_ssig_induct dead_pre_map_comp0 dead_pre_map_comp Sig_pointful_natural unsig_thm
old_sig_map_comp old_sig_map_cong old_Lam_pointful_natural Lam_def flat_embL embL_simps
embL_pointful_natural old_eval_core_simps eval_core_simps =
let
val YpreT = HOLogic.mk_prodT (Y, preT);
val Ypre_old_ssig_T = Tsubst Y YpreT old_ssig_T;
val dead_pre_map' = Term.subst_atomic_types [(Y, old_ssig_T), (Z, ssig_T)] dead_pre_map;
val embL' = substT Y YpreT embL;
val x' = substT Y Ypre_old_ssig_T x;
val goal =
mk_Trueprop_eq (eval_core $ (embL' $ x'), dead_pre_map' $ embL $ (old_eval_core $ x'));
val old_ssig_induct' =
infer_instantiate' ctxt [NONE, SOME (Thm.cterm_of ctxt x')] old_ssig_induct;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_eval_core_embL_tac ctxt old_ssig_induct' dead_pre_map_comp0 dead_pre_map_comp
Sig_pointful_natural unsig_thm old_sig_map_comp old_sig_map_cong old_Lam_pointful_natural
Lam_def flat_embL old_eval_core_simps eval_core_simps embL_simps embL_pointful_natural))
|> Thm.close_derivation \<^here>
end;
fun derive_eval_embL ctxt Y fpT embL old_eval eval dead_pre_map_comp0 dtor_unfold_unique
embL_pointful_natural eval_core_embL old_eval_thm eval_thm =
let
val embL' = substT Y fpT embL;
val goal = mk_Trueprop_eq (HOLogic.mk_comp (eval, embL'), old_eval);
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_eval_embL_tac ctxt dead_pre_map_comp0 dtor_unfold_unique embL_pointful_natural
eval_core_embL old_eval_thm eval_thm))
|> Thm.close_derivation \<^here>
end;
fun derive_algLam_algLam ctxt Inx_const Y fpT Sig old_algLam algLam dead_pre_map_comp dtor_inject
unsig_thm sig_map_thm Lam_def eval_embL old_dtor_algLam dtor_algLam =
let
val Sig' = substT Y fpT Sig;
val (left_T, right_T) = dest_sumT (domain_type (fastype_of Sig'));
val inx' = Inx_const left_T right_T;
val goal = mk_Trueprop_eq (Library.foldl1 HOLogic.mk_comp [algLam, Sig', inx'], old_algLam);
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_algLam_algLam_tac ctxt dead_pre_map_comp dtor_inject unsig_thm sig_map_thm Lam_def
eval_embL old_dtor_algLam dtor_algLam))
|> Thm.close_derivation \<^here>
end;
fun derive_eval_core_k_as_ssig ctxt Y preT k_T rho eval_core k_as_ssig x pre_map_comp
dead_pre_map_id sig_map_comp ssig_map_thms Lam_natural_pointful Lam_Inr flat_VLeaf
eval_core_simps =
let
val YpreT = HOLogic.mk_prodT (Y, preT);
val Ypre_k_T = Tsubst Y YpreT k_T;
val k_as_ssig' = substT Y YpreT k_as_ssig;
val x' = substT Y Ypre_k_T x;
val goal = mk_Trueprop_eq (eval_core $ (k_as_ssig' $ x'), rho $ x');
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_eval_core_k_as_ssig_tac ctxt pre_map_comp dead_pre_map_id sig_map_comp ssig_map_thms
Lam_natural_pointful Lam_Inr flat_VLeaf eval_core_simps))
|> Thm.close_derivation \<^here>
end;
fun derive_algLam_algrho ctxt Y fpT Sig algLam algrho algLam_def algrho_def =
let
val Sig' = substT Y fpT Sig;
val (left_T, right_T) = dest_sumT (domain_type (fastype_of Sig'));
val inr' = Inr_const left_T right_T;
val goal = mk_Trueprop_eq (Library.foldl1 HOLogic.mk_comp [algLam, Sig', inr'], algrho);
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_algLam_algrho_tac ctxt algLam_def algrho_def))
|> Thm.close_derivation \<^here>
end;
fun derive_dtor_algrho ctxt Y Z preT fpT k_T ssig_T dead_pre_map dead_k_map dtor rho eval algrho x
eval_thm k_as_ssig_natural_pointful eval_core_k_as_ssig algrho_def =
let
val YpreT = HOLogic.mk_prodT (Y, preT);
val fppreT = Tsubst Y fpT YpreT;
val fp_k_T = Tsubst Y fpT k_T;
val fp_ssig_T = Tsubst Y fpT ssig_T;
val id' = HOLogic.id_const fpT;
val convol' = mk_convol (id', dtor);
val dead_pre_map' = Term.subst_atomic_types [(Y, fp_ssig_T), (Z, fpT)] dead_pre_map;
val dead_k_map' = Term.subst_atomic_types [(Y, fpT), (Z, fppreT)] dead_k_map;
val rho' = substT Y fpT rho;
val x' = substT Y fp_k_T x;
val goal = mk_Trueprop_eq (dtor $ (algrho $ x'),
dead_pre_map' $ eval $ (rho' $ (dead_k_map' $ convol' $ x')));
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_dtor_algrho_tac ctxt eval_thm k_as_ssig_natural_pointful eval_core_k_as_ssig algrho_def))
|> Thm.close_derivation \<^here>
end;
fun derive_algLam_step_or_merge ctxt Y fpT ctor proto_sctr algLam proto_sctr_def old_algLam_pointful
algLam_algLam =
let
val proto_sctr' = substT Y fpT proto_sctr;
val goal = mk_Trueprop_eq (HOLogic.mk_comp (algLam, proto_sctr'), ctor);
val algLam_algLam_pointful = mk_pointful ctxt algLam_algLam;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_algLam_step_tac ctxt proto_sctr_def old_algLam_pointful algLam_algLam_pointful))
|> Thm.close_derivation \<^here>
end;
fun derive_eval_sctr ctxt Y Z fpT ssig_T dead_pre_map ctor eval sctr proto_sctr_pointful_natural
eval_Oper algLam_thm sctr_def =
let
val fp_ssig_T = Tsubst Y fpT ssig_T;
val dead_pre_map' = Term.subst_atomic_types [(Y, fp_ssig_T), (Z, fpT)] dead_pre_map;
val sctr' = substT Y fpT sctr;
val goal = mk_Trueprop_eq (HOLogic.mk_comp (eval, sctr'),
HOLogic.mk_comp (ctor, dead_pre_map' $ eval));
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_eval_sctr_tac ctxt proto_sctr_pointful_natural eval_Oper algLam_thm sctr_def))
|> Thm.close_derivation \<^here>
end;
fun derive_corecUU_pointfree_unique ctxt Y Z preT fpT ssig_T dead_pre_map ctor dead_ssig_map eval
corecUU f g dead_pre_map_comp0 dead_pre_map_comp dtor_ctor dtor_inject ssig_map_comp
flat_pointful_natural eval_core_pointful_natural eval_thm eval_flat corecU_ctor corecU_unique
sctr_pointful_natural eval_sctr_pointful corecUU_def =
let
val ssig_preT = Tsubst Y ssig_T preT;
val ssig_pre_ssig_T = Tsubst Y ssig_preT ssig_T;
val fp_ssig_T = Tsubst Y fpT ssig_T;
val dead_pre_map' = Term.subst_atomic_types [(Y, fp_ssig_T), (Z, fpT)] dead_pre_map;
val dead_pre_map'' = Term.subst_atomic_types [(Y, ssig_T), (Z, fp_ssig_T)] dead_pre_map;
val dead_ssig_map' = Term.subst_atomic_types [(Y, ssig_preT), (Z, fpT)] dead_ssig_map;
val dead_ssig_map'' = substT Z fpT dead_ssig_map;
val f' = substT Z ssig_pre_ssig_T f;
val g' = substT Z fpT g;
val corecUU_f = corecUU $ f';
fun mk_eq fpf =
mk_Trueprop_eq (fpf, Library.foldl1 HOLogic.mk_comp [eval, dead_ssig_map' $
Library.foldl1 HOLogic.mk_comp [ctor, dead_pre_map' $ eval, dead_pre_map''
$ (dead_ssig_map'' $ fpf)],
f']);
val corecUU_pointfree =
let
val goal = mk_eq corecUU_f;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_corecUU_pointfree_tac ctxt dead_pre_map_comp0 dead_pre_map_comp dtor_ctor dtor_inject
ssig_map_comp flat_pointful_natural eval_core_pointful_natural eval_thm eval_flat
corecU_ctor sctr_pointful_natural eval_sctr_pointful corecUU_def))
|> Thm.close_derivation \<^here>
end;
val corecUU_unique =
let
val prem = mk_eq g';
val goal = mk_Trueprop_eq (g', corecUU_f);
in
fold (Variable.add_free_names ctxt) [prem, goal] []
|> (fn vars => Goal.prove_sorry ctxt vars [prem] goal
(fn {context = ctxt, prems = [prem]} =>
mk_corecUU_unique_tac ctxt dead_pre_map_comp0 dead_pre_map_comp dtor_ctor
ssig_map_comp flat_pointful_natural eval_core_pointful_natural eval_thm eval_flat
corecU_unique sctr_pointful_natural eval_sctr_pointful corecUU_def prem))
|> Thm.close_derivation \<^here>
end;
in
(corecUU_pointfree, corecUU_unique)
end;
fun define_flat_etc fp_b version live_AsBs Y Z preT fpT sig_T ssig_T Oper VLeaf CLeaf pre_rel
dead_pre_map dtor dtor_unfold dead_sig_map ssig_rel dead_ssig_map Lam Rs R pre_map_transfer
fp_k_T_rel_eqs sig_map_transfer ssig_map_transfer Lam_transfer dtor_transfer lthy =
let
val (flat_data as (flat, flat_def, _), lthy) = lthy
|> define_flat fp_b version Y Z fpT sig_T ssig_T Oper VLeaf CLeaf dead_sig_map;
val (eval_core_data as (eval_core, eval_core_def, _), lthy) = lthy
|> define_eval_core fp_b version Y Z preT fpT sig_T ssig_T dtor Oper VLeaf CLeaf dead_pre_map
dead_sig_map dead_ssig_map flat Lam;
val ((eval_data as (eval, _), cutSsig_data as (cutSsig, _)), lthy) = lthy
|> define_eval fp_b version Y Z preT fpT ssig_T dtor dtor_unfold dead_ssig_map eval_core
||>> define_cutSsig fp_b version Y Z preT ssig_T dead_pre_map VLeaf dead_ssig_map flat
eval_core;
val ((algLam_data, unfold_data), lthy) = lthy
|> define_algLam fp_b version Y Z fpT ssig_T Oper VLeaf dead_sig_map eval
||>> define_corecU fp_b version Y Z preT ssig_T dtor_unfold VLeaf cutSsig;
val flat_transfer = derive_transfer_by_transfer_prover lthy live_AsBs Rs R flat [flat_def] []
[sig_map_transfer];
val eval_core_transfer = derive_Lam_or_eval_core_transfer lthy live_AsBs Y Z preT ssig_T Rs R
pre_rel ssig_rel ssig_rel eval_core eval_core_def fp_k_T_rel_eqs
[pre_map_transfer, sig_map_transfer, ssig_map_transfer, flat_transfer, Lam_transfer,
dtor_transfer];
in
(((((((flat_data, flat_transfer), (eval_core_data, eval_core_transfer)), eval_data),
cutSsig_data), algLam_data), unfold_data), lthy)
end;
fun derive_Sig_natural_etc ctxt live live_AsBs Y Z preT fpT k_or_fpT sig_T ssig_T pre_map
dead_pre_map ctor dtor Sig dead_sig_map Oper VLeaf CLeaf ssig_map dead_ssig_map Lam flat
eval_core eval cutSsig algLam corecU x fs f g ctor_dtor dtor_inject dtor_unfold_thm
dtor_unfold_unique sig_map_thm ssig_induct ssig_map_thms Oper_map_thm VLeaf_map_thm
CLeaf_map_thm Lam_transfer flat_simps flat_transfer eval_core_simps eval_core_transfer eval_def
cutSsig_def algLam_def corecU_def live_pre_bnf pre_bnf dead_pre_bnf fp_bnf sig_bnf ssig_bnf
dead_ssig_bnf =
let
val SOME prod_bnf = bnf_of ctxt \<^type_name>\<open>prod\<close>;
val f' = substT Z fpT f;
val dead_ssig_map' = substT Z fpT dead_ssig_map;
val extdd = Term.lambda f' (HOLogic.mk_comp (eval, dead_ssig_map' $ f'));
val live_pre_map_def = map_def_of_bnf live_pre_bnf;
val pre_map_comp = map_comp_of_bnf pre_bnf;
val dead_pre_map_id = map_id_of_bnf dead_pre_bnf;
val dead_pre_map_comp0 = map_comp0_of_bnf dead_pre_bnf;
val dead_pre_map_comp = map_comp_of_bnf dead_pre_bnf;
val dead_pre_map_cong = map_cong_of_bnf dead_pre_bnf;
val fp_map_id = map_id_of_bnf fp_bnf;
val sig_map_ident = map_ident_of_bnf sig_bnf;
val sig_map_comp0 = map_comp0_of_bnf sig_bnf;
val sig_map_comp = map_comp_of_bnf sig_bnf;
val sig_map_cong = map_cong_of_bnf sig_bnf;
val ssig_map_id = map_id_of_bnf ssig_bnf;
val ssig_map_comp = map_comp_of_bnf ssig_bnf;
val dead_ssig_map_comp0 = map_comp0_of_bnf dead_ssig_bnf;
val k_preT_map_id0s =
map map_id0_of_bnf (map_filter (bnf_of ctxt) (fold add_type_namesT [preT, k_or_fpT] []));
val Sig_natural = derive_natural_by_unfolding ctxt live_AsBs preT pre_map fs f Sig
([sig_map_thm, live_pre_map_def, @{thm BNF_Composition.id_bnf_def}] @ k_preT_map_id0s);
val Oper_natural =
derive_natural_by_unfolding ctxt live_AsBs preT pre_map fs f Oper [Oper_map_thm];
val VLeaf_natural =
derive_natural_by_unfolding ctxt live_AsBs preT pre_map fs f VLeaf [VLeaf_map_thm];
val Lam_natural = derive_natural_from_transfer_with_pre_type ctxt live_AsBs Y Z preT ssig_T
pre_map ssig_map fs f Lam Lam_transfer [prod_bnf, pre_bnf, sig_bnf, ssig_bnf] [];
val flat_natural = derive_natural_from_transfer ctxt live_AsBs [] fs f flat flat_transfer
[ssig_bnf] [];
val eval_core_natural = derive_natural_from_transfer_with_pre_type ctxt live_AsBs Y Z preT
ssig_T pre_map ssig_map fs f eval_core eval_core_transfer [prod_bnf, pre_bnf, ssig_bnf] [];
val Sig_pointful_natural = mk_pointful ctxt Sig_natural RS sym;
val Oper_natural_pointful = mk_pointful ctxt Oper_natural;
val Oper_pointful_natural = Oper_natural_pointful RS sym;
val flat_pointful_natural = mk_pointful ctxt flat_natural RS sym;
val Lam_natural_pointful = mk_pointful ctxt Lam_natural;
val Lam_pointful_natural = Lam_natural_pointful RS sym;
val eval_core_pointful_natural = mk_pointful ctxt eval_core_natural RS sym;
val cutSsig_def_pointful_natural = mk_pointful ctxt (HOLogic.mk_obj_eq cutSsig_def) RS sym;
val flat_VLeaf = derive_flat_VLeaf ctxt Y Z ssig_T x VLeaf dead_ssig_map flat ssig_induct
fp_map_id sig_map_cong sig_map_ident sig_map_comp ssig_map_thms flat_simps;
val flat_flat = derive_flat_flat ctxt Y Z ssig_T x dead_ssig_map flat ssig_induct fp_map_id
sig_map_cong sig_map_comp ssig_map_thms flat_simps;
val eval_core_flat = derive_eval_core_flat ctxt Y Z preT ssig_T dead_pre_map dead_ssig_map flat
eval_core x ssig_induct dead_pre_map_id dead_pre_map_comp0 dead_pre_map_comp fp_map_id
sig_map_comp sig_map_cong ssig_map_thms ssig_map_comp flat_simps flat_pointful_natural
flat_flat Lam_pointful_natural eval_core_simps;
val eval_thm = derive_eval_thm ctxt dtor_inject dtor_unfold_thm eval_def;
val eval_flat = derive_eval_flat ctxt Y Z fpT ssig_T dead_ssig_map flat eval x
dead_pre_map_comp0 dtor_unfold_unique ssig_map_id ssig_map_comp flat_pointful_natural
eval_core_pointful_natural eval_core_flat eval_thm;
val eval_Oper = derive_eval_Oper ctxt live Y Z fpT sig_T ssig_T dead_sig_map Oper eval algLam x
sig_map_ident sig_map_comp0 sig_map_comp Oper_natural_pointful VLeaf_natural flat_simps
eval_flat algLam_def;
val eval_VLeaf = derive_eval_V_or_CLeaf ctxt Y fpT VLeaf eval x dead_pre_map_id
dead_pre_map_comp fp_map_id dtor_unfold_unique VLeaf_map_thm eval_core_simps eval_thm;
val eval_CLeaf = derive_eval_V_or_CLeaf ctxt Y fpT CLeaf eval x dead_pre_map_id
dead_pre_map_comp fp_map_id dtor_unfold_unique CLeaf_map_thm eval_core_simps eval_thm;
val extdd_mor = derive_extdd_mor ctxt Y Z preT fpT ssig_T dead_pre_map dtor extdd cutSsig f g
dead_pre_map_comp0 dead_pre_map_comp VLeaf_map_thm ssig_map_comp flat_pointful_natural
eval_core_pointful_natural eval_thm eval_flat eval_VLeaf cutSsig_def;
val mor_cutSsig_flat = derive_mor_cutSsig_flat ctxt Y Z preT fpT ssig_T dead_pre_map
dead_ssig_map dtor flat eval_core eval cutSsig f g dead_pre_map_comp0 dead_pre_map_comp
dead_pre_map_cong dtor_unfold_unique dead_ssig_map_comp0 ssig_map_comp flat_simps
flat_pointful_natural eval_core_pointful_natural flat_flat flat_VLeaf eval_core_flat
cutSsig_def cutSsig_def_pointful_natural eval_thm;
val extdd_o_VLeaf = derive_extdd_o_VLeaf ctxt Y Z preT fpT ssig_T dead_pre_map dtor VLeaf extdd
f g dead_pre_map_comp0 dead_pre_map_comp dtor_inject ssig_map_thms eval_core_simps eval_thm
eval_VLeaf;
val (corecU_ctor, corecU_unique) = derive_corecU_ctor_unique ctxt Y Z preT fpT ssig_T
dead_pre_map ctor dtor VLeaf extdd corecU f g dead_pre_map_comp ctor_dtor dtor_unfold_thm
dtor_unfold_unique ssig_map_thms dead_ssig_map_comp0 flat_simps flat_VLeaf eval_core_simps
extdd_mor extdd_o_VLeaf cutSsig_def mor_cutSsig_flat corecU_def;
val dtor_algLam = derive_dtor_algLam ctxt Y Z preT fpT sig_T ssig_T dead_pre_map dtor
dead_sig_map Lam eval algLam x pre_map_comp dead_pre_map_id dead_pre_map_comp0
dead_pre_map_comp sig_map_comp Oper_pointful_natural ssig_map_thms dead_ssig_map_comp0
Lam_pointful_natural eval_core_simps eval_thm eval_flat eval_VLeaf algLam_def;
in
(Sig_pointful_natural, flat_pointful_natural, Lam_natural_pointful, Lam_pointful_natural,
flat_VLeaf, eval_core_pointful_natural, eval_thm, eval_flat,
[eval_Oper, eval_VLeaf, eval_CLeaf], corecU_ctor, corecU_unique, dtor_algLam)
end;
fun derive_embL_natural_etc ctxt Inx_const old_ssig_bnf ssig_bnf Y Z preT fpT old_ssig_T ssig_T
dead_pre_map Sig dead_old_ssig_map embL old_algLam algLam old_flat flat old_eval_core eval_core
old_eval eval x f old_ssig_induct dead_pre_map_comp0 dead_pre_map_comp fp_map_id dtor_inject
dtor_unfold_unique Sig_pointful_natural unsig_thm sig_map_thm old_sig_map_comp old_sig_map_cong
old_ssig_map_thms old_Lam_pointful_natural Lam_def old_flat_simps flat_simps embL_simps
embL_transfer old_eval_core_simps eval_core_simps old_eval_thm eval_thm old_dtor_algLam
dtor_algLam old_algLam_thm =
let
val embL_natural = derive_natural_from_transfer ctxt [(Y, Z)] [] [] f embL embL_transfer
[old_ssig_bnf, ssig_bnf] [];
val embL_pointful_natural = mk_pointful ctxt embL_natural RS sym;
val old_algLam_pointful = mk_pointful ctxt old_algLam_thm;
val flat_embL = derive_flat_embL ctxt Y Z old_ssig_T ssig_T dead_old_ssig_map embL old_flat flat
x old_ssig_induct fp_map_id Sig_pointful_natural old_sig_map_comp old_sig_map_cong
old_ssig_map_thms old_flat_simps flat_simps embL_simps;
val eval_core_embL = derive_eval_core_embL ctxt Y Z preT old_ssig_T ssig_T dead_pre_map embL
old_eval_core eval_core x old_ssig_induct dead_pre_map_comp0 dead_pre_map_comp
Sig_pointful_natural unsig_thm old_sig_map_comp old_sig_map_cong old_Lam_pointful_natural
Lam_def flat_embL embL_simps embL_pointful_natural old_eval_core_simps eval_core_simps;
val eval_embL = derive_eval_embL ctxt Y fpT embL old_eval eval dead_pre_map_comp0
dtor_unfold_unique embL_pointful_natural eval_core_embL old_eval_thm eval_thm;
val algLam_algLam = derive_algLam_algLam ctxt Inx_const Y fpT Sig old_algLam algLam
dead_pre_map_comp dtor_inject unsig_thm sig_map_thm Lam_def eval_embL old_dtor_algLam
dtor_algLam;
in
(embL_pointful_natural, old_algLam_pointful, eval_embL, algLam_algLam)
end;
fun define_corecUU_etc fp_b version live_AsBs Y Z preT fpT ssig_T pre_map dead_pre_map pre_rel
fp_rel ctor Oper ssig_map dead_ssig_map ssig_rel proto_sctr flat eval_core eval corecU fs f g Rs
R pre_map_transfer fp_k_T_rel_eqs dtor_unfold_transfer dtor_transfer ssig_map_transfer
proto_sctr_transfer proto_sctr_pointful_natural flat_transfer flat_pointful_natural
eval_core_transfer eval_core_pointful_natural eval_thm eval_flat eval_Oper algLam_thm
cutSsig_def corecU_def corecU_ctor corecU_unique pre_bnf dead_pre_bnf fp_res ssig_fp_sugar
lthy =
let
val ssig_bnf = #fp_bnf ssig_fp_sugar;
val dead_pre_map_comp0 = map_comp0_of_bnf dead_pre_bnf;
val dead_pre_map_comp = map_comp_of_bnf dead_pre_bnf;
val [dtor_ctor] = #dtor_ctors fp_res;
val [dtor_inject] = #dtor_injects fp_res;
val ssig_map_comp = map_comp_of_bnf ssig_bnf;
val sctr_rhs = HOLogic.mk_comp (Oper, substT Y ssig_T proto_sctr);
val ((sctr, sctr_def), lthy) = lthy
|> define_const true fp_b version sctrN sctr_rhs;
val (corecUU_data as (corecUU, corecUU_def), lthy) = lthy
|> define_corecUU fp_b version Y Z preT ssig_T dead_pre_map dead_ssig_map flat eval_core sctr
corecU;
val eval_sctr = derive_eval_sctr lthy Y Z fpT ssig_T dead_pre_map ctor eval sctr
proto_sctr_pointful_natural eval_Oper algLam_thm sctr_def;
val sctr_transfer = derive_sctr_transfer lthy live_AsBs Y Z ssig_T Rs R pre_rel ssig_rel sctr
sctr_def fp_k_T_rel_eqs [proto_sctr_transfer];
val sctr_natural = derive_natural_from_transfer_with_pre_type lthy live_AsBs Y Z preT ssig_T
pre_map ssig_map fs f sctr sctr_transfer [pre_bnf, ssig_bnf] [];
val sctr_pointful_natural = mk_pointful lthy sctr_natural RS sym;
val eval_sctr_pointful = mk_pointful lthy eval_sctr RS sym;
val (corecUU_pointfree, corecUU_unique) = derive_corecUU_pointfree_unique lthy Y Z preT fpT
ssig_T dead_pre_map ctor dead_ssig_map eval corecUU f g dead_pre_map_comp0 dead_pre_map_comp
dtor_ctor dtor_inject ssig_map_comp flat_pointful_natural eval_core_pointful_natural eval_thm
eval_flat corecU_ctor corecU_unique sctr_pointful_natural eval_sctr_pointful corecUU_def;
val corecUU_thm = mk_pointful lthy corecUU_pointfree;
val corecUU_transfer = derive_corecUU_transfer lthy live_AsBs Y Z Rs R preT ssig_T pre_rel
fp_rel ssig_rel corecUU cutSsig_def corecU_def corecUU_def fp_k_T_rel_eqs
[pre_map_transfer, dtor_unfold_transfer, dtor_transfer, ssig_map_transfer, flat_transfer,
eval_core_transfer, sctr_transfer, @{thm convol_transfer} (*FIXME: needed?*)];
in
((corecUU_data, corecUU_thm, corecUU_unique, corecUU_transfer, eval_sctr, sctr_transfer,
sctr_pointful_natural), lthy)
end;
fun mk_equivp T = Const (\<^const_name>\<open>equivp\<close>, mk_predT [mk_pred2T T T]);
fun derive_equivp_Retr ctxt fpT Retr R dead_pre_rel_refl_thm dead_pre_rel_flip_thm
dead_pre_rel_mono_thm dead_pre_rel_compp_thm =
let
val prem = HOLogic.mk_Trueprop (mk_equivp fpT $ R);
val goal = Logic.mk_implies (prem, HOLogic.mk_Trueprop (mk_equivp fpT $ (betapply (Retr, R))));
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_equivp_Retr_tac ctxt dead_pre_rel_refl_thm dead_pre_rel_flip_thm dead_pre_rel_mono_thm
dead_pre_rel_compp_thm))
|> Thm.close_derivation \<^here>
end;
fun derive_Retr_coinduct ctxt fpT Retr R dtor_rel_coinduct_thm rel_eq_thm =
let
val goal = HOLogic.mk_Trueprop (list_all_free [R]
(HOLogic.mk_imp (mk_leq R (Retr $ R), mk_leq R (HOLogic.eq_const fpT))));
in
Goal.prove_sorry ctxt [] [] goal (fn {context = ctxt, prems = _} =>
mk_Retr_coinduct_tac ctxt dtor_rel_coinduct_thm rel_eq_thm)
|> Thm.close_derivation \<^here>
end;
fun derive_Retr fp_sugar fpT dead_pre_bnf ctor dtor names_lthy lthy =
let
val (R, _) = names_lthy
|> yield_singleton (mk_Frees "R") (mk_pred2T fpT fpT);
val pre_fpT = pre_type_of_ctor fpT ctor;
val fp_pre_rel = mk_rel1 lthy fpT fpT pre_fpT dead_pre_bnf;
val Retr = Abs ("R", fastype_of R, Abs ("a", fpT,
Abs ("b", fpT, list_comb (fp_pre_rel, [Bound 2, dtor $ Bound 1, dtor $ Bound 0]))));
val equivp_Retr = derive_equivp_Retr lthy fpT Retr R (rel_refl_of_bnf dead_pre_bnf)
(rel_flip_of_bnf dead_pre_bnf) (rel_mono_of_bnf dead_pre_bnf) (rel_OO_of_bnf dead_pre_bnf);
val Retr_coinduct = derive_Retr_coinduct lthy fpT Retr R
(fp_sugar |> #fp_res |> #xtor_rel_co_induct) (fp_sugar |> #fp_bnf |> rel_eq_of_bnf);
in
(Retr, equivp_Retr, Retr_coinduct)
end;
fun mk_gen_cong fpT eval_domT =
let val fp_relT = mk_pred2T fpT fpT in
Const (\<^const_name>\<open>cong.gen_cong\<close>,
[mk_predT [fp_relT, eval_domT, eval_domT], eval_domT --> fpT, fp_relT] ---> fp_relT)
end;
fun mk_cong_locale rel eval Retr =
Const (\<^const_name>\<open>cong\<close>, mk_predT (map fastype_of [rel, eval, Retr]));
fun derive_cong_locale ctxt rel eval Retr0 tac =
let
val Retr = enforce_type ctxt domain_type (domain_type (fastype_of rel)) Retr0;
val goal = HOLogic.mk_Trueprop (list_comb (mk_cong_locale rel eval Retr, [rel, eval, Retr]));
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} => tac ctxt))
|> Thm.close_derivation \<^here>
end;
fun derive_cong_general fp_b version fpT dead_ssig_bnf dead_pre_bnf eval Retr equivp_Retr
Retr_coinduct eval_thm eval_core_transfer lthy =
let
val eval_domT = domain_type (fastype_of eval);
fun cong_locale_tac ctxt =
mk_cong_locale_tac ctxt (rel_mono_of_bnf dead_pre_bnf) (rel_map_of_bnf dead_pre_bnf)
equivp_Retr (rel_mono_of_bnf dead_ssig_bnf) (rel_map_of_bnf dead_ssig_bnf) eval_thm
eval_core_transfer;
val rel = mk_rel1 lthy fpT fpT eval_domT dead_ssig_bnf;
val cong_rhs = list_comb (mk_gen_cong fpT eval_domT, [rel, eval]);
val ((_, cong_def), lthy) = lthy
|> define_const false fp_b version congN cong_rhs;
val cong_locale = derive_cong_locale lthy rel eval Retr cong_locale_tac;
val fold_cong_def = Local_Defs.fold lthy [cong_def];
fun instance_of_gen thm = fold_cong_def (thm OF [cong_locale]);
val cong_base = instance_of_gen @{thm cong.imp_gen_cong};
val cong_refl = instance_of_gen @{thm cong.gen_cong_reflp};
val cong_sym = instance_of_gen @{thm cong.gen_cong_symp};
val cong_trans = instance_of_gen @{thm cong.gen_cong_transp};
fun mk_cong_rho thm = thm RS instance_of_gen @{thm cong.gen_cong_rho};
val dtor_coinduct = @{thm predicate2I_obj} RS
(Retr_coinduct RS instance_of_gen @{thm cong.coinduction} RS @{thm predicate2D_obj});
in
(cong_def, cong_locale, cong_base, cong_refl, cong_sym, cong_trans, dtor_coinduct, mk_cong_rho,
lthy)
end;
fun derive_cong_base fp_b version fpT dead_ssig_bnf ssig_fp_bnf_sugar dead_pre_bnf eval eval_thm
eval_core_transfer eval_VLeaf eval_sctr sctr_transfer Retr equivp_Retr Retr_coinduct lthy =
let
val (cong_def, cong_locale, cong_base, cong_refl, cong_sym, cong_trans, dtor_coinduct,
mk_cong_rho, lthy) =
derive_cong_general fp_b version fpT dead_ssig_bnf dead_pre_bnf eval Retr equivp_Retr
Retr_coinduct eval_thm eval_core_transfer lthy;
val dead_pre_map_id0 = map_id0_of_bnf dead_pre_bnf;
val dead_pre_map_comp0 = map_comp0_of_bnf dead_pre_bnf;
val dead_pre_map_cong0 = map_cong0_of_bnf dead_pre_bnf;
val dead_pre_map_cong0' =
@{thm box_equals[OF _ o_apply[symmetric] id_apply[symmetric]]} RS dead_pre_map_cong0 RS ext;
val dead_pre_rel_map = rel_map_of_bnf dead_pre_bnf;
val ctor_alt_thm = eval_VLeaf RS (@{thm eq_comp_compI} OF [eval_sctr,
trans OF [dead_pre_map_comp0 RS sym, trans OF [dead_pre_map_cong0', dead_pre_map_id0]]]);
val cong_ctor_intro = mk_cong_rho ctor_alt_thm
|> unfold_thms lthy [o_apply]
|> (fn thm => sctr_transfer RS rel_funD RS thm)
|> unfold_thms lthy (id_apply :: dead_pre_rel_map @ #rel_injects ssig_fp_bnf_sugar);
in
({cong_def = cong_def, cong_locale = cong_locale, cong_base = cong_base, cong_refl = cong_refl,
cong_sym = cong_sym, cong_trans = cong_trans, dtor_coinduct = dtor_coinduct,
cong_alg_intros = [cong_ctor_intro]}, lthy)
end;
fun update_cong_alg_intros ctxt cong_def cong_locale old_cong_def old_cong_locale emb =
let
fun instance_of_gen thm = Local_Defs.fold ctxt [cong_def] (thm OF [cong_locale]);
fun instance_of_old_gen thm = Local_Defs.fold ctxt [old_cong_def] (thm OF [old_cong_locale]);
val emb_idem = @{thm ord_le_eq_trans} OF [emb, instance_of_gen @{thm cong.gen_cong_idem}];
fun mk_rel_mono bnf = instance_of_old_gen @{thm cong.leq_gen_cong} RS rel_mono_of_bnf bnf RS
@{thm predicate2D};
fun mk_intro bnf thm = mk_rel_mono bnf RS (@{thm predicate2D} OF [emb_idem, thm]);
in
map2 mk_intro
end
fun derive_cong_step fp_b version fpT dead_ssig_bnf dead_pre_bnf eval eval_thm eval_core_transfer
old_dtor_coinduct_info algrho_def k_as_ssig_transfer Retr equivp_Retr Retr_coinduct
eval_embL embL_transfer old_all_dead_k_bnfs lthy =
let
val old_cong_def = #cong_def old_dtor_coinduct_info;
val old_cong_locale = #cong_locale old_dtor_coinduct_info;
val old_cong_alg_intros = #cong_alg_intros old_dtor_coinduct_info;
val (cong_def, cong_locale, cong_base, cong_refl, cong_sym, cong_trans, dtor_coinduct,
mk_cong_rho, lthy) =
derive_cong_general fp_b version fpT dead_ssig_bnf dead_pre_bnf eval Retr equivp_Retr
Retr_coinduct eval_thm eval_core_transfer lthy;
val cong_alg_intro =
k_as_ssig_transfer RS rel_funD RS mk_cong_rho (HOLogic.mk_obj_eq algrho_def);
val gen_cong_emb =
(@{thm gen_cong_emb} OF [old_cong_locale, cong_locale, eval_embL, embL_transfer])
|> Local_Defs.fold lthy [old_cong_def, cong_def];
val cong_alg_intros = update_cong_alg_intros lthy cong_def cong_locale old_cong_def
old_cong_locale gen_cong_emb old_all_dead_k_bnfs old_cong_alg_intros;
in
({cong_def = cong_def, cong_locale = cong_locale, cong_base = cong_base, cong_refl = cong_refl,
cong_sym = cong_sym, cong_trans = cong_trans, dtor_coinduct = dtor_coinduct,
cong_alg_intros = cong_alg_intro :: cong_alg_intros}, lthy)
end;
fun derive_cong_merge fp_b version fpT old1_friend_names old2_friend_names dead_ssig_bnf
dead_pre_bnf eval eval_thm eval_core_transfer old1_dtor_coinduct_info old2_dtor_coinduct_info
Retr equivp_Retr Retr_coinduct eval_embLL embLL_transfer eval_embLR embLR_transfer
old1_all_dead_k_bnfs old2_all_dead_k_bnfs lthy =
let
val old1_cong_def = #cong_def old1_dtor_coinduct_info;
val old1_cong_locale = #cong_locale old1_dtor_coinduct_info;
val old1_cong_alg_intros = #cong_alg_intros old1_dtor_coinduct_info;
val old2_cong_def = #cong_def old2_dtor_coinduct_info;
val old2_cong_locale = #cong_locale old2_dtor_coinduct_info;
val old2_cong_alg_intros = #cong_alg_intros old2_dtor_coinduct_info;
val (cong_def, cong_locale, cong_base, cong_refl, cong_sym, cong_trans, dtor_coinduct, _,
lthy) =
derive_cong_general fp_b version fpT dead_ssig_bnf dead_pre_bnf eval Retr equivp_Retr
Retr_coinduct eval_thm eval_core_transfer lthy;
val emb1 = (@{thm gen_cong_emb} OF [old1_cong_locale, cong_locale, eval_embLL, embLL_transfer])
|> Local_Defs.fold lthy [old1_cong_def, cong_def];
val emb2 = (@{thm gen_cong_emb} OF [old2_cong_locale, cong_locale, eval_embLR, embLR_transfer])
|> Local_Defs.fold lthy [old2_cong_def, cong_def];
val cong_alg_intros1 = update_cong_alg_intros lthy cong_def cong_locale old1_cong_def
old1_cong_locale emb1 old1_all_dead_k_bnfs old1_cong_alg_intros;
val cong_alg_intros2 = update_cong_alg_intros lthy cong_def cong_locale old2_cong_def
old2_cong_locale emb2 old2_all_dead_k_bnfs old2_cong_alg_intros;
val (cong_algrho_intros1, cong_ctor_intro1) = split_last cong_alg_intros1;
val (cong_algrho_intros2, _) = split_last cong_alg_intros2;
val (old1_all_rho_k_bnfs, old1_Sig_bnf) = split_last old1_all_dead_k_bnfs;
val (old2_all_rho_k_bnfs, _) = split_last old2_all_dead_k_bnfs;
val (friend_names, (cong_algrho_intros, all_rho_k_bnfs)) =
merge_lists (op = o apply2 fst)
(old1_friend_names ~~ (cong_algrho_intros1 ~~ old1_all_rho_k_bnfs))
(old2_friend_names ~~ (cong_algrho_intros2 ~~ old2_all_rho_k_bnfs))
|> split_list ||> split_list;
in
(({cong_def = cong_def, cong_locale = cong_locale, cong_base = cong_base, cong_refl = cong_refl,
cong_sym = cong_sym, cong_trans = cong_trans, dtor_coinduct = dtor_coinduct,
cong_alg_intros = cong_algrho_intros @ [cong_ctor_intro1]}, all_rho_k_bnfs @ [old1_Sig_bnf],
friend_names), lthy)
end;
fun derive_corecUU_base fpT_name lthy =
let
val fp_sugar as {T = Type (_, fpT_args0), pre_bnf, fp_bnf, fp_res, ...} =
checked_fp_sugar_of lthy fpT_name;
val fpT_Ss = map Type.sort_of_atyp fpT_args0;
val fp_alives = liveness_of_fp_bnf (length fpT_args0) fp_bnf;
val (((Es, Fs0), [Y, Z]), names_lthy) = lthy
|> mk_TFrees' fpT_Ss
||>> mk_TFrees' fpT_Ss
||>> mk_TFrees 2;
val Fs = @{map 3} (fn alive => fn E => fn F => if alive then F else E) fp_alives Es Fs0;
val As = Es @ [Y];
val Bs = Es @ [Z];
val live_EsFs = filter (op <>) (Es ~~ Fs);
val live_AsBs = live_EsFs @ [(Y, Z)];
val fTs = map (op -->) live_EsFs;
val RTs = map (uncurry mk_pred2T) live_EsFs;
val live = length live_EsFs;
val ((((((x, fs), f), g), Rs), R), names_lthy) = names_lthy
|> yield_singleton (mk_Frees "x") Y
||>> mk_Frees "f" fTs
||>> yield_singleton (mk_Frees "f") (Y --> Z)
||>> yield_singleton (mk_Frees "g") (Y --> Z)
||>> mk_Frees "R" RTs
||>> yield_singleton (mk_Frees "R") (mk_pred2T Y Z);
val ctor = mk_ctor Es (the_single (#ctors fp_res));
val dtor = mk_dtor Es (the_single (#dtors fp_res));
val fpT = Type (fpT_name, Es);
val preT = pre_type_of_ctor Y ctor;
val ((fp_b, version), lthy) = lthy |> get_name_next_version_of fpT_name;
val ((sig_fp_sugar, ssig_fp_sugar), lthy) = lthy
|> define_sig_type fp_b version fp_alives Es Y preT
||>> define_ssig_type fp_b version fp_alives Es Y fpT;
val sig_bnf = #fp_bnf sig_fp_sugar;
val ssig_bnf = #fp_bnf ssig_fp_sugar;
val (((dead_pre_bnf, dead_sig_bnf), dead_ssig_bnf), lthy) = lthy
|> bnf_kill_all_but 1 pre_bnf
||>> bnf_kill_all_but 1 sig_bnf
||>> bnf_kill_all_but 1 ssig_bnf;
val sig_fp_ctr_sugar = #fp_ctr_sugar sig_fp_sugar;
val ssig_fp_ctr_sugar = #fp_ctr_sugar ssig_fp_sugar;
val sig_fp_bnf_sugar = #fp_bnf_sugar sig_fp_sugar;
val ssig_fp_bnf_sugar = #fp_bnf_sugar ssig_fp_sugar;
val ssig_fp_induct_sugar = the (#fp_co_induct_sugar ssig_fp_sugar);
val sig_ctr_sugar = #ctr_sugar sig_fp_ctr_sugar;
val ssig_ctr_sugar = #ctr_sugar ssig_fp_ctr_sugar;
val sig_T_name = fst (dest_Type (#T sig_fp_sugar));
val ssig_T_name = fst (dest_Type (#T ssig_fp_sugar));
val sig_T = Type (sig_T_name, As);
val ssig_T = Type (ssig_T_name, As);
val pre_map = mk_mapN lthy live_AsBs preT pre_bnf;
val pre_rel = mk_relN lthy live_AsBs preT pre_bnf;
val dead_pre_map = mk_map1 lthy Y Z preT dead_pre_bnf;
val fp_rel = mk_relN lthy live_EsFs fpT fp_bnf;
val dtor_unfold = mk_co_rec (Proof_Context.theory_of lthy) Greatest_FP [Z] fpT
(the_single (#xtor_un_folds fp_res));
val Sig = mk_ctr As (the_single (#ctrs sig_ctr_sugar));
val unsig = mk_disc_or_sel As (the_single (the_single (#selss sig_ctr_sugar)));
val sig_rel = mk_relN lthy live_AsBs sig_T sig_bnf;
val dead_sig_map = mk_map 1 As Bs (map_of_bnf dead_sig_bnf);
val [Oper, VLeaf, CLeaf] = map (mk_ctr As) (#ctrs ssig_ctr_sugar);
val ssig_map = mk_mapN lthy live_AsBs ssig_T ssig_bnf;
val ssig_rel = mk_relN lthy live_AsBs ssig_T ssig_bnf;
val dead_ssig_map = mk_map 1 As Bs (map_of_bnf dead_ssig_bnf);
val ((Lam, Lam_def), lthy) = lthy
|> define_Lam_base fp_b version Y Z preT ssig_T dead_pre_map Sig unsig dead_sig_map Oper
VLeaf;
val proto_sctr = Sig;
val pre_map_transfer = map_transfer_of_bnf pre_bnf;
val pre_rel_def = rel_def_of_bnf pre_bnf;
val dead_pre_map_id = map_id_of_bnf dead_pre_bnf;
val dead_pre_map_comp = map_comp_of_bnf dead_pre_bnf;
val fp_rel_eq = rel_eq_of_bnf fp_bnf;
val [ctor_dtor] = #ctor_dtors fp_res;
val [dtor_ctor] = #dtor_ctors fp_res;
val [dtor_inject] = #dtor_injects fp_res;
val [dtor_unfold_thm] = #xtor_un_fold_thms fp_res;
val dtor_unfold_unique = #xtor_un_fold_unique fp_res;
val [dtor_unfold_transfer] = #xtor_un_fold_transfers fp_res;
val [dtor_rel_thm] = #xtor_rels fp_res;
val unsig_thm = the_single (the_single (#sel_thmss sig_ctr_sugar));
val [sig_map_thm] = #map_thms sig_fp_bnf_sugar;
val [Oper_map_thm, VLeaf_map_thm, CLeaf_map_thm] = #map_thms ssig_fp_bnf_sugar;
val sig_map_transfer = map_transfer_of_bnf sig_bnf;
val ssig_map_thms = #map_thms ssig_fp_bnf_sugar;
val ssig_map_transfer = map_transfer_of_bnf ssig_bnf;
val ssig_induct = the_single (#co_inducts ssig_fp_induct_sugar);
val dtor_transfer = derive_dtor_transfer lthy live_EsFs Y Z pre_rel fp_rel Rs dtor dtor_rel_thm;
val preT_rel_eqs = map rel_eq_of_bnf (map_filter (bnf_of lthy) (add_type_namesT preT []));
val Sig_transfer = derive_sig_transfer I lthy live_AsBs pre_rel sig_rel Rs R Sig pre_rel_def
preT_rel_eqs (the_single (#ctr_transfers sig_fp_ctr_sugar));
val proto_sctr_transfer = Sig_transfer;
val unsig_transfer = derive_sig_transfer swap lthy live_AsBs pre_rel sig_rel Rs R unsig
pre_rel_def preT_rel_eqs (the_single (#sel_transfers sig_fp_ctr_sugar));
val Lam_transfer = derive_Lam_or_eval_core_transfer lthy live_AsBs Y Z preT ssig_T Rs R pre_rel
sig_rel ssig_rel Lam Lam_def []
[pre_map_transfer, sig_map_transfer, Sig_transfer, unsig_transfer];
val ((((((((flat, _, flat_simps), flat_transfer),
((eval_core, _, eval_core_simps), eval_core_transfer)), (eval, eval_def)),
(cutSsig, cutSsig_def)), (algLam, algLam_def)), (corecU, corecU_def)), lthy) = lthy
|> define_flat_etc fp_b version live_AsBs Y Z preT fpT sig_T ssig_T Oper VLeaf CLeaf pre_rel
dead_pre_map dtor dtor_unfold dead_sig_map ssig_rel dead_ssig_map Lam Rs R pre_map_transfer
[fp_rel_eq] sig_map_transfer ssig_map_transfer Lam_transfer dtor_transfer;
val (Sig_pointful_natural, flat_pointful_natural, _, Lam_pointful_natural, _,
eval_core_pointful_natural, eval_thm, eval_flat, eval_simps as [eval_Oper, eval_VLeaf, _],
corecU_ctor, corecU_unique, dtor_algLam) =
derive_Sig_natural_etc lthy live live_AsBs Y Z preT fpT fpT sig_T ssig_T pre_map dead_pre_map
ctor dtor Sig dead_sig_map Oper VLeaf CLeaf ssig_map dead_ssig_map Lam flat eval_core eval
cutSsig algLam corecU x fs f g ctor_dtor dtor_inject dtor_unfold_thm dtor_unfold_unique
sig_map_thm ssig_induct ssig_map_thms Oper_map_thm VLeaf_map_thm CLeaf_map_thm Lam_transfer
flat_simps flat_transfer eval_core_simps eval_core_transfer eval_def cutSsig_def algLam_def
corecU_def pre_bnf pre_bnf dead_pre_bnf fp_bnf sig_bnf ssig_bnf dead_ssig_bnf;
val proto_sctr_pointful_natural = Sig_pointful_natural;
val algLam_thm = derive_algLam_base lthy Y Z preT fpT dead_pre_map ctor dtor algLam proto_sctr
dead_pre_map_id dead_pre_map_comp ctor_dtor dtor_ctor dtor_unfold_unique unsig_thm
Sig_pointful_natural ssig_map_thms Lam_def flat_simps eval_core_simps eval_thm algLam_def;
val (((corecUU, _), corecUU_thm, corecUU_unique, corecUU_transfer, eval_sctr, sctr_transfer,
sctr_pointful_natural), lthy) = lthy
|> define_corecUU_etc fp_b version live_AsBs Y Z preT fpT ssig_T pre_map dead_pre_map pre_rel
fp_rel ctor Oper ssig_map dead_ssig_map ssig_rel proto_sctr flat eval_core eval corecU fs f
g Rs R pre_map_transfer [] dtor_unfold_transfer dtor_transfer ssig_map_transfer
proto_sctr_transfer proto_sctr_pointful_natural flat_transfer flat_pointful_natural
eval_core_transfer eval_core_pointful_natural eval_thm eval_flat eval_Oper algLam_thm
cutSsig_def corecU_def corecU_ctor corecU_unique pre_bnf dead_pre_bnf fp_res ssig_fp_sugar;
val (Retr, equivp_Retr, Retr_coinduct) = lthy
|> derive_Retr fp_sugar fpT dead_pre_bnf ctor dtor names_lthy;
val (dtor_coinduct_info, lthy) = lthy
|> derive_cong_base fp_b version fpT dead_ssig_bnf ssig_fp_bnf_sugar dead_pre_bnf eval
eval_thm eval_core_transfer eval_VLeaf eval_sctr sctr_transfer Retr equivp_Retr Retr_coinduct;
val buffer =
{Oper = Oper, VLeaf = VLeaf, CLeaf = CLeaf, ctr_wrapper = Sig, friends = Symtab.empty};
val notes =
[(corecUU_transferN, [corecUU_transfer])] @
(if Config.get lthy bnf_internals then
[(algLamN, [algLam_thm]),
(cong_alg_introsN, #cong_alg_intros dtor_coinduct_info),
(cong_localeN, [#cong_locale dtor_coinduct_info]),
(corecU_ctorN, [corecU_ctor]),
(corecU_uniqueN, [corecU_unique]),
(corecUUN, [corecUU_thm]),
(corecUU_uniqueN, [corecUU_unique]),
(dtor_algLamN, [dtor_algLam]),
(dtor_coinductN, [#dtor_coinduct dtor_coinduct_info]),
(dtor_transferN, [dtor_transfer]),
(equivp_RetrN, [equivp_Retr]),
(evalN, [eval_thm]),
(eval_core_pointful_naturalN, [eval_core_pointful_natural]),
(eval_core_transferN, [eval_core_transfer]),
(eval_flatN, [eval_flat]),
(eval_simpsN, eval_simps),
(flat_pointful_naturalN, [flat_pointful_natural]),
(flat_transferN, [flat_transfer]),
(Lam_pointful_naturalN, [Lam_pointful_natural]),
(Lam_transferN, [Lam_transfer]),
(proto_sctr_pointful_naturalN, [proto_sctr_pointful_natural]),
(proto_sctr_transferN, [proto_sctr_transfer]),
(Retr_coinductN, [Retr_coinduct]),
(sctr_pointful_naturalN, [sctr_pointful_natural]),
(sctr_transferN, [sctr_transfer]),
(Sig_pointful_naturalN, [Sig_pointful_natural])]
else
[])
|> map (fn (thmN, thms) =>
((mk_version_fp_binding true lthy version fp_b thmN, []), [(thms, [])]));
in
({fp_b = fp_b, version = version, fpT = fpT, Y = Y, Z = Z, friend_names = [],
sig_fp_sugars = [sig_fp_sugar], ssig_fp_sugar = ssig_fp_sugar, Lam = Lam,
proto_sctr = proto_sctr, flat = flat, eval_core = eval_core, eval = eval, algLam = algLam,
corecUU = corecUU, dtor_transfer = dtor_transfer, Lam_transfer = Lam_transfer,
Lam_pointful_natural = Lam_pointful_natural, proto_sctr_transfer = proto_sctr_transfer,
flat_simps = flat_simps, eval_core_simps = eval_core_simps, eval_thm = eval_thm,
eval_simps = eval_simps, all_algLam_algs = [algLam_thm], algLam_thm = algLam_thm,
dtor_algLam = dtor_algLam, corecUU_thm = corecUU_thm, corecUU_unique = corecUU_unique,
corecUU_transfer = corecUU_transfer, buffer = buffer, all_dead_k_bnfs = [dead_pre_bnf],
Retr = Retr, equivp_Retr = equivp_Retr, Retr_coinduct = Retr_coinduct,
dtor_coinduct_info = dtor_coinduct_info}
|> morph_corec_info (Local_Theory.target_morphism lthy),
lthy |> Local_Theory.notes notes |> snd)
end;
fun derive_corecUU_step (fpT as Type (fpT_name, res_Ds))
({friend_names = old_friend_names, sig_fp_sugars = old_sig_fp_sugars as old_sig_fp_sugar :: _,
ssig_fp_sugar = old_ssig_fp_sugar, Lam = old_Lam0, proto_sctr = old_proto_sctr0,
flat = old_flat0, eval_core = old_eval_core0, eval = old_eval0, algLam = old_algLam0,
dtor_transfer, Lam_transfer = old_Lam_transfer,
Lam_pointful_natural = old_Lam_pointful_natural,
proto_sctr_transfer = old_proto_sctr_transfer, flat_simps = old_flat_simps,
eval_core_simps = old_eval_core_simps, eval_thm = old_eval_thm,
all_algLam_algs = old_all_algLam_algs, algLam_thm = old_algLam_thm,
dtor_algLam = old_dtor_algLam, buffer = old_buffer, all_dead_k_bnfs = old_all_dead_k_bnfs,
Retr = old_Retr0, equivp_Retr, Retr_coinduct, dtor_coinduct_info = old_dtor_coinduct_info,
...} : corec_info)
friend_name friend_T fp_b version Y Z k_T dead_k_bnf sig_fp_sugar ssig_fp_sugar rho rho_transfer
lthy =
let
val {pre_bnf = live_pre_bnf, fp_bnf = live_fp_bnf, fp_res, ...} =
checked_fp_sugar_of lthy fpT_name;
val names_lthy = lthy
|> fold Variable.declare_typ [Y, Z];
(* FIXME *)
val live_EsFs = [];
val live_AsBs = live_EsFs @ [(Y, Z)];
val live = length live_EsFs;
val ((((x, f), g), R), _) = names_lthy
|> yield_singleton (mk_Frees "x") Y
||>> yield_singleton (mk_Frees "f") (Y --> Z)
||>> yield_singleton (mk_Frees "g") (Y --> Z)
||>> yield_singleton (mk_Frees "R") (mk_pred2T Y Z);
(* FIXME *)
val fs = [];
val Rs = [];
val ctor = mk_ctor res_Ds (the_single (#ctors fp_res));
val dtor = mk_dtor res_Ds (the_single (#dtors fp_res));
val friend_names = friend_name :: old_friend_names;
val old_sig_bnf = #fp_bnf old_sig_fp_sugar;
val old_ssig_bnf = #fp_bnf old_ssig_fp_sugar;
val sig_bnf = #fp_bnf sig_fp_sugar;
val ssig_bnf = #fp_bnf ssig_fp_sugar;
val ((((((dead_pre_bnf, dead_fp_bnf), dead_old_sig_bnf), dead_old_ssig_bnf), dead_sig_bnf),
dead_ssig_bnf), lthy) = lthy
|> bnf_kill_all_but 1 live_pre_bnf
||>> bnf_kill_all_but 0 live_fp_bnf
||>> bnf_kill_all_but 1 old_sig_bnf
||>> bnf_kill_all_but 1 old_ssig_bnf
||>> bnf_kill_all_but 1 sig_bnf
||>> bnf_kill_all_but 1 ssig_bnf;
(* FIXME *)
val pre_bnf = dead_pre_bnf;
val fp_bnf = dead_fp_bnf;
val old_ssig_fp_ctr_sugar = #fp_ctr_sugar old_ssig_fp_sugar;
val sig_fp_ctr_sugar = #fp_ctr_sugar sig_fp_sugar;
val ssig_fp_ctr_sugar = #fp_ctr_sugar ssig_fp_sugar;
val sig_fp_bnf_sugar = #fp_bnf_sugar sig_fp_sugar;
val old_ssig_fp_bnf_sugar = #fp_bnf_sugar old_ssig_fp_sugar;
val ssig_fp_bnf_sugar = #fp_bnf_sugar ssig_fp_sugar;
val old_ssig_fp_induct_sugar = the (#fp_co_induct_sugar old_ssig_fp_sugar);
val ssig_fp_induct_sugar = the (#fp_co_induct_sugar ssig_fp_sugar);
val old_ssig_ctr_sugar = #ctr_sugar old_ssig_fp_ctr_sugar;
val sig_ctr_sugar = #ctr_sugar sig_fp_ctr_sugar;
val ssig_ctr_sugar = #ctr_sugar ssig_fp_ctr_sugar;
val old_sig_T_name = fst (dest_Type (#T old_sig_fp_sugar));
val old_ssig_T_name = fst (dest_Type (#T old_ssig_fp_sugar));
val sig_T_name = fst (dest_Type (#T sig_fp_sugar));
val ssig_T_name = fst (dest_Type (#T ssig_fp_sugar));
val res_As = res_Ds @ [Y];
val res_Bs = res_Ds @ [Z];
val preT = pre_type_of_ctor Y ctor;
val YpreT = HOLogic.mk_prodT (Y, preT);
val old_sig_T = Type (old_sig_T_name, res_As);
val old_ssig_T = Type (old_ssig_T_name, res_As);
val sig_T = Type (sig_T_name, res_As);
val ssig_T = Type (ssig_T_name, res_As);
val old_Lam_domT = Tsubst Y YpreT old_sig_T;
val old_eval_core_domT = Tsubst Y YpreT old_ssig_T;
val pre_map = mk_mapN lthy live_AsBs preT pre_bnf;
val pre_rel = mk_relN lthy live_AsBs preT pre_bnf;
val dead_pre_map = mk_map1 lthy Y Z preT dead_pre_bnf;
val dead_pre_rel = mk_rel1 lthy Y Z preT dead_pre_bnf;
val fp_rel = mk_relN lthy live_EsFs fpT fp_bnf;
val dtor_unfold = mk_co_rec (Proof_Context.theory_of lthy) Greatest_FP [Z] fpT
(the_single (#xtor_un_folds fp_res));
val dead_k_map = mk_map1 lthy Y Z k_T dead_k_bnf;
val Sig = mk_ctr res_As (the_single (#ctrs sig_ctr_sugar));
val unsig = mk_disc_or_sel res_As (the_single (the_single (#selss sig_ctr_sugar)));
val sig_rel = mk_relN lthy live_AsBs sig_T sig_bnf;
val dead_old_sig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_old_sig_bnf);
val dead_sig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_sig_bnf);
val dead_sig_rel = mk_rel 1 res_As res_Bs (rel_of_bnf dead_sig_bnf);
val [old_Oper, old_VLeaf, old_CLeaf] = map (mk_ctr res_As) (#ctrs old_ssig_ctr_sugar);
val [Oper, VLeaf, CLeaf] = map (mk_ctr res_As) (#ctrs ssig_ctr_sugar);
val dead_old_ssig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_old_ssig_bnf);
val ssig_map = mk_mapN lthy live_AsBs ssig_T ssig_bnf;
val ssig_rel = mk_relN lthy live_AsBs ssig_T ssig_bnf;
val dead_ssig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_ssig_bnf);
val old_Lam = enforce_type lthy domain_type old_Lam_domT old_Lam0;
val old_proto_sctr = enforce_type lthy domain_type preT old_proto_sctr0;
val old_flat = enforce_type lthy range_type old_ssig_T old_flat0;
val old_eval_core = enforce_type lthy domain_type old_eval_core_domT old_eval_core0;
val old_eval = enforce_type lthy range_type fpT old_eval0;
val old_algLam = enforce_type lthy range_type fpT old_algLam0;
val ((embL, embL_def, embL_simps), lthy) = lthy
|> define_embL embLN fp_b version Y Z fpT old_sig_T old_ssig_T k_T ssig_T Inl_const
dead_old_sig_map Sig old_Oper old_VLeaf old_CLeaf Oper VLeaf CLeaf;
val ((Lam, Lam_def), lthy) = lthy
|> define_Lam_step fp_b version Y Z preT old_ssig_T ssig_T dead_pre_map unsig rho embL
old_Lam;
val ((proto_sctr, proto_sctr_def), lthy) = lthy
|> define_proto_sctr_step_or_merge fp_b version old_sig_T k_T Sig old_proto_sctr;
val pre_map_comp = map_comp_of_bnf pre_bnf;
val pre_map_transfer = map_transfer_of_bnf pre_bnf;
val dead_pre_map_id = map_id_of_bnf dead_pre_bnf;
val dead_pre_map_comp0 = map_comp0_of_bnf dead_pre_bnf;
val dead_pre_map_comp = map_comp_of_bnf dead_pre_bnf;
val fp_map_id = map_id_of_bnf fp_bnf;
val [ctor_dtor] = #ctor_dtors fp_res;
val [dtor_inject] = #dtor_injects fp_res;
val [dtor_unfold_thm] = #xtor_un_fold_thms fp_res;
val dtor_unfold_unique = #xtor_un_fold_unique fp_res;
val [dtor_unfold_transfer] = #xtor_un_fold_transfers fp_res;
val fp_k_T_rel_eqs =
map rel_eq_of_bnf (map_filter (bnf_of lthy) (fold add_type_namesT [fpT, k_T] []));
val unsig_thm = the_single (the_single (#sel_thmss sig_ctr_sugar));
val [sig_map_thm] = #map_thms sig_fp_bnf_sugar;
val old_sig_map_comp = map_comp_of_bnf old_sig_bnf;
val old_sig_map_cong = map_cong_of_bnf old_sig_bnf;
val old_ssig_map_thms = #map_thms old_ssig_fp_bnf_sugar;
val [Oper_map_thm, VLeaf_map_thm, CLeaf_map_thm] = #map_thms ssig_fp_bnf_sugar;
val old_sig_map_transfer = map_transfer_of_bnf old_sig_bnf;
val sig_map_comp = map_comp_of_bnf sig_bnf;
val sig_map_transfer = map_transfer_of_bnf sig_bnf;
val ssig_map_thms = #map_thms ssig_fp_bnf_sugar;
val ssig_map_transfer = map_transfer_of_bnf ssig_bnf;
val old_ssig_induct = the_single (#co_inducts old_ssig_fp_induct_sugar);
val ssig_induct = the_single (#co_inducts ssig_fp_induct_sugar);
val proto_sctr_transfer = derive_proto_sctr_transfer_step_or_merge lthy Y Z R dead_pre_rel
dead_sig_rel proto_sctr proto_sctr_def fp_k_T_rel_eqs [old_proto_sctr_transfer];
val embL_transfer = derive_transfer_by_transfer_prover lthy live_AsBs Rs R embL [embL_def]
fp_k_T_rel_eqs [old_sig_map_transfer];
val Lam_transfer = derive_Lam_or_eval_core_transfer lthy live_AsBs Y Z preT ssig_T Rs R pre_rel
sig_rel ssig_rel Lam Lam_def fp_k_T_rel_eqs
[pre_map_transfer, old_Lam_transfer, embL_transfer, rho_transfer];
val ((((((((flat, _, flat_simps), flat_transfer),
((eval_core, _, eval_core_simps), eval_core_transfer)), (eval, eval_def)),
(cutSsig, cutSsig_def)), (algLam, algLam_def)), (corecU, corecU_def)), lthy) = lthy
|> define_flat_etc fp_b version live_AsBs Y Z preT fpT sig_T ssig_T Oper VLeaf CLeaf pre_rel
dead_pre_map dtor dtor_unfold dead_sig_map ssig_rel dead_ssig_map Lam Rs R pre_map_transfer
fp_k_T_rel_eqs sig_map_transfer ssig_map_transfer Lam_transfer dtor_transfer;
val (Sig_pointful_natural, flat_pointful_natural, Lam_natural_pointful, Lam_pointful_natural,
flat_VLeaf, eval_core_pointful_natural, eval_thm, eval_flat,
eval_simps as [eval_Oper, _, _], corecU_ctor, corecU_unique, dtor_algLam) =
derive_Sig_natural_etc lthy live live_AsBs Y Z preT fpT k_T sig_T ssig_T pre_map dead_pre_map
ctor dtor Sig dead_sig_map Oper VLeaf CLeaf ssig_map dead_ssig_map Lam flat eval_core eval
cutSsig algLam corecU x fs f g ctor_dtor dtor_inject dtor_unfold_thm dtor_unfold_unique
sig_map_thm ssig_induct ssig_map_thms Oper_map_thm VLeaf_map_thm CLeaf_map_thm Lam_transfer
flat_simps flat_transfer eval_core_simps eval_core_transfer eval_def cutSsig_def algLam_def
corecU_def live_pre_bnf pre_bnf dead_pre_bnf fp_bnf sig_bnf ssig_bnf dead_ssig_bnf;
val proto_sctr_natural = derive_natural_from_transfer_with_pre_type lthy live_AsBs Y Z preT
ssig_T pre_map ssig_map fs f proto_sctr proto_sctr_transfer [pre_bnf, sig_bnf] [];
val proto_sctr_pointful_natural = mk_pointful lthy proto_sctr_natural RS sym;
val (embL_pointful_natural, old_algLam_pointful, eval_embL, algLam_algLam) =
derive_embL_natural_etc lthy Inl_const old_ssig_bnf ssig_bnf Y Z preT fpT old_ssig_T ssig_T
dead_pre_map Sig dead_old_ssig_map embL old_algLam algLam old_flat flat old_eval_core
eval_core old_eval eval x f old_ssig_induct dead_pre_map_comp0 dead_pre_map_comp fp_map_id
dtor_inject dtor_unfold_unique Sig_pointful_natural unsig_thm sig_map_thm old_sig_map_comp
old_sig_map_cong old_ssig_map_thms old_Lam_pointful_natural Lam_def old_flat_simps
flat_simps embL_simps embL_transfer old_eval_core_simps eval_core_simps old_eval_thm
eval_thm old_dtor_algLam dtor_algLam old_algLam_thm;
val algLam_thm = derive_algLam_step_or_merge lthy Y fpT ctor proto_sctr algLam proto_sctr_def
old_algLam_pointful algLam_algLam;
val k_as_ssig = mk_k_as_ssig Z old_sig_T k_T ssig_T Sig dead_sig_map Oper VLeaf;
val k_as_ssig' = substT Y fpT k_as_ssig;
val algrho_rhs = HOLogic.mk_comp (eval, k_as_ssig');
val ((algrho, algrho_def), lthy) = lthy
|> define_const true fp_b version algrhoN algrho_rhs;
val k_as_ssig_transfer = derive_transfer_by_transfer_prover lthy live_AsBs Rs R k_as_ssig []
fp_k_T_rel_eqs [sig_map_transfer];
val k_as_ssig_natural = derive_natural_from_transfer lthy [(Y, Z)] [] [] f k_as_ssig
k_as_ssig_transfer [ssig_bnf] [dead_k_bnf];
val k_as_ssig_natural_pointful = mk_pointful lthy k_as_ssig_natural;
val [_, Lam_Inr] = derive_Lam_Inl_Inr lthy Y Z preT old_sig_T old_ssig_T k_T ssig_T
dead_pre_map Sig embL old_Lam Lam rho unsig_thm Lam_def;
val eval_core_k_as_ssig = derive_eval_core_k_as_ssig lthy Y preT k_T rho eval_core k_as_ssig x
pre_map_comp dead_pre_map_id sig_map_comp ssig_map_thms Lam_natural_pointful Lam_Inr
flat_VLeaf eval_core_simps;
val algLam_algrho = derive_algLam_algrho lthy Y fpT Sig algLam algrho algLam_def algrho_def;
val dtor_algrho = derive_dtor_algrho lthy Y Z preT fpT k_T ssig_T dead_pre_map dead_k_map dtor
rho eval algrho x eval_thm k_as_ssig_natural_pointful eval_core_k_as_ssig algrho_def;
val all_algLam_algs = algLam_algLam :: algLam_algrho :: old_all_algLam_algs;
val (((corecUU, _), corecUU_thm, corecUU_unique, corecUU_transfer, _, sctr_transfer,
sctr_pointful_natural), lthy) = lthy
|> define_corecUU_etc fp_b version live_AsBs Y Z preT fpT ssig_T pre_map dead_pre_map pre_rel
fp_rel ctor Oper ssig_map dead_ssig_map ssig_rel proto_sctr flat eval_core eval corecU fs f
g Rs R pre_map_transfer fp_k_T_rel_eqs dtor_unfold_transfer dtor_transfer ssig_map_transfer
proto_sctr_transfer proto_sctr_pointful_natural flat_transfer flat_pointful_natural
eval_core_transfer eval_core_pointful_natural eval_thm eval_flat eval_Oper algLam_thm
cutSsig_def corecU_def corecU_ctor corecU_unique pre_bnf dead_pre_bnf fp_res ssig_fp_sugar;
val (ctr_wrapper, friends) =
mk_ctr_wrapper_friends lthy friend_name friend_T old_sig_T k_T Sig old_buffer;
val Retr = enforce_type lthy (domain_type o domain_type) fpT old_Retr0;
val (dtor_coinduct_info, lthy) = lthy
|> derive_cong_step fp_b version fpT dead_ssig_bnf dead_pre_bnf eval eval_thm
eval_core_transfer old_dtor_coinduct_info algrho_def k_as_ssig_transfer Retr equivp_Retr
Retr_coinduct eval_embL embL_transfer old_all_dead_k_bnfs;
val buffer =
{Oper = Oper, VLeaf = VLeaf, CLeaf = CLeaf, ctr_wrapper = ctr_wrapper, friends = friends};
val notes =
[(corecUU_transferN, [corecUU_transfer])] @
(if Config.get lthy bnf_internals then
[(algLamN, [algLam_thm]),
(algLam_algLamN, [algLam_algLam]),
(algLam_algrhoN, [algLam_algrho]),
(cong_alg_introsN, #cong_alg_intros dtor_coinduct_info),
(cong_localeN, [#cong_locale dtor_coinduct_info]),
(corecU_ctorN, [corecU_ctor]),
(corecU_uniqueN, [corecU_unique]),
(corecUUN, [corecUU_thm]),
(corecUU_uniqueN, [corecUU_unique]),
(dtor_algLamN, [dtor_algLam]),
(dtor_algrhoN, [dtor_algrho]),
(dtor_coinductN, [#dtor_coinduct dtor_coinduct_info]),
(embL_pointful_naturalN, [embL_pointful_natural]),
(embL_transferN, [embL_transfer]),
(evalN, [eval_thm]),
(eval_core_pointful_naturalN, [eval_core_pointful_natural]),
(eval_core_transferN, [eval_core_transfer]),
(eval_flatN, [eval_flat]),
(eval_simpsN, eval_simps),
(flat_pointful_naturalN, [flat_pointful_natural]),
(flat_transferN, [flat_transfer]),
(k_as_ssig_naturalN, [k_as_ssig_natural]),
(k_as_ssig_transferN, [k_as_ssig_transfer]),
(Lam_pointful_naturalN, [Lam_pointful_natural]),
(Lam_transferN, [Lam_transfer]),
(proto_sctr_pointful_naturalN, [proto_sctr_pointful_natural]),
(proto_sctr_transferN, [proto_sctr_transfer]),
(rho_transferN, [rho_transfer]),
(sctr_pointful_naturalN, [sctr_pointful_natural]),
(sctr_transferN, [sctr_transfer]),
(Sig_pointful_naturalN, [Sig_pointful_natural])]
else
[])
|> map (fn (thmN, thms) =>
((mk_version_fp_binding true lthy version fp_b thmN, []), [(thms, [])]));
val phi = Local_Theory.target_morphism lthy;
in
(({fp_b = fp_b, version = version, fpT = fpT, Y = Y, Z = Z, friend_names = friend_names,
sig_fp_sugars = sig_fp_sugar :: old_sig_fp_sugars, ssig_fp_sugar = ssig_fp_sugar, Lam = Lam,
proto_sctr = proto_sctr, flat = flat, eval_core = eval_core, eval = eval, algLam = algLam,
corecUU = corecUU, dtor_transfer = dtor_transfer, Lam_transfer = Lam_transfer,
Lam_pointful_natural = Lam_pointful_natural, proto_sctr_transfer = proto_sctr_transfer,
flat_simps = flat_simps, eval_core_simps = eval_core_simps, eval_thm = eval_thm,
eval_simps = eval_simps, all_algLam_algs = all_algLam_algs, algLam_thm = algLam_thm,
dtor_algLam = dtor_algLam, corecUU_thm = corecUU_thm, corecUU_unique = corecUU_unique,
corecUU_transfer = corecUU_transfer, buffer = buffer,
all_dead_k_bnfs = dead_k_bnf :: old_all_dead_k_bnfs, Retr = Retr, equivp_Retr = equivp_Retr,
Retr_coinduct = Retr_coinduct, dtor_coinduct_info = dtor_coinduct_info}
|> morph_corec_info phi,
({algrho = algrho, dtor_algrho = dtor_algrho, algLam_algrho = algLam_algrho}
|> morph_friend_info phi)),
lthy |> Local_Theory.notes notes |> snd)
end;
fun derive_corecUU_merge (fpT as Type (fpT_name, res_Ds))
({friend_names = old1_friend_names,
sig_fp_sugars = old1_sig_fp_sugars as old1_sig_fp_sugar :: _,
ssig_fp_sugar = old1_ssig_fp_sugar, Lam = old1_Lam0, proto_sctr = old1_proto_sctr0,
flat = old1_flat0, eval_core = old1_eval_core0, eval = old1_eval0, algLam = old1_algLam0,
dtor_transfer, Lam_transfer = old1_Lam_transfer,
Lam_pointful_natural = old1_Lam_pointful_natural,
proto_sctr_transfer = old1_proto_sctr_transfer, flat_simps = old1_flat_simps,
eval_core_simps = old1_eval_core_simps, eval_thm = old1_eval_thm,
all_algLam_algs = old1_all_algLam_algs, algLam_thm = old1_algLam_thm,
dtor_algLam = old1_dtor_algLam, buffer = old1_buffer, all_dead_k_bnfs = old1_all_dead_k_bnfs,
Retr = old1_Retr0, equivp_Retr, Retr_coinduct, dtor_coinduct_info = old1_dtor_coinduct_info,
...} : corec_info)
({friend_names = old2_friend_names,
sig_fp_sugars = old2_sig_fp_sugars as old2_sig_fp_sugar :: _,
ssig_fp_sugar = old2_ssig_fp_sugar, Lam = old2_Lam0, flat = old2_flat0,
eval_core = old2_eval_core0, eval = old2_eval0, algLam = old2_algLam0,
Lam_transfer = old2_Lam_transfer, Lam_pointful_natural = old2_Lam_pointful_natural,
flat_simps = old2_flat_simps, eval_core_simps = old2_eval_core_simps,
eval_thm = old2_eval_thm, all_algLam_algs = old2_all_algLam_algs,
algLam_thm = old2_algLam_thm, dtor_algLam = old2_dtor_algLam, buffer = old2_buffer,
all_dead_k_bnfs = old2_all_dead_k_bnfs, dtor_coinduct_info = old2_dtor_coinduct_info, ...}
: corec_info)
lthy =
let
val {T = Type (_, fpT_args0), pre_bnf = live_pre_bnf, fp_bnf = live_fp_bnf, fp_res, ...} =
checked_fp_sugar_of lthy fpT_name;
val fpT_Ss = map Type.sort_of_atyp fpT_args0;
val live_fp_alives = liveness_of_fp_bnf (length fpT_args0) live_fp_bnf;
val ((Ds, [Y, Z]), names_lthy) = lthy
|> mk_TFrees' fpT_Ss
||>> mk_TFrees 2;
(* FIXME *)
val live_EsFs = [];
val live_AsBs = live_EsFs @ [(Y, Z)];
val live = length live_EsFs;
val ((((x, f), g), R), _) = names_lthy
|> yield_singleton (mk_Frees "x") Y
||>> yield_singleton (mk_Frees "f") (Y --> Z)
||>> yield_singleton (mk_Frees "g") (Y --> Z)
||>> yield_singleton (mk_Frees "R") (mk_pred2T Y Z);
(* FIXME *)
val fs = [];
val Rs = [];
val ctor = mk_ctor res_Ds (the_single (#ctors fp_res));
val dtor = mk_dtor res_Ds (the_single (#dtors fp_res));
val old1_sig_T_name = fst (dest_Type (#T old1_sig_fp_sugar));
val old2_sig_T_name = fst (dest_Type (#T old2_sig_fp_sugar));
val old1_ssig_T_name = fst (dest_Type (#T old1_ssig_fp_sugar));
val old2_ssig_T_name = fst (dest_Type (#T old2_ssig_fp_sugar));
val fp_alives = map (K false) live_fp_alives;
val As = Ds @ [Y];
val res_As = res_Ds @ [Y];
val res_Bs = res_Ds @ [Z];
val preT = pre_type_of_ctor Y ctor;
val YpreT = HOLogic.mk_prodT (Y, preT);
val fpT0 = Type (fpT_name, Ds);
val old1_sig_T0 = Type (old1_sig_T_name, As);
val old2_sig_T0 = Type (old2_sig_T_name, As);
val old1_sig_T = Type (old1_sig_T_name, res_As);
val old2_sig_T = Type (old2_sig_T_name, res_As);
val old1_ssig_T = Type (old1_ssig_T_name, res_As);
val old2_ssig_T = Type (old2_ssig_T_name, res_As);
val old1_Lam_domT = Tsubst Y YpreT old1_sig_T;
val old2_Lam_domT = Tsubst Y YpreT old2_sig_T;
val old1_eval_core_domT = Tsubst Y YpreT old1_ssig_T;
val old2_eval_core_domT = Tsubst Y YpreT old2_ssig_T;
val ((fp_b, version), lthy) = lthy |> get_name_next_version_of fpT_name;
val ((sig_fp_sugar, ssig_fp_sugar), lthy) = lthy
|> define_sig_type fp_b version fp_alives Ds Y (mk_sumT (old1_sig_T0, old2_sig_T0))
||>> define_ssig_type fp_b version fp_alives Ds Y fpT0;
val sig_T_name = fst (dest_Type (#T sig_fp_sugar));
val ssig_T_name = fst (dest_Type (#T ssig_fp_sugar));
val old1_sig_bnf = #fp_bnf old1_sig_fp_sugar;
val old2_sig_bnf = #fp_bnf old2_sig_fp_sugar;
val old1_ssig_bnf = #fp_bnf old1_ssig_fp_sugar;
val old2_ssig_bnf = #fp_bnf old2_ssig_fp_sugar;
val sig_bnf = #fp_bnf sig_fp_sugar;
val ssig_bnf = #fp_bnf ssig_fp_sugar;
val ((((((((dead_pre_bnf, dead_fp_bnf), dead_old1_sig_bnf), dead_old2_sig_bnf),
dead_old1_ssig_bnf), dead_old2_ssig_bnf), dead_sig_bnf), dead_ssig_bnf), lthy) = lthy
|> bnf_kill_all_but 1 live_pre_bnf
||>> bnf_kill_all_but 0 live_fp_bnf
||>> bnf_kill_all_but 1 old1_sig_bnf
||>> bnf_kill_all_but 1 old2_sig_bnf
||>> bnf_kill_all_but 1 old1_ssig_bnf
||>> bnf_kill_all_but 1 old2_ssig_bnf
||>> bnf_kill_all_but 1 sig_bnf
||>> bnf_kill_all_but 1 ssig_bnf;
(* FIXME *)
val pre_bnf = dead_pre_bnf;
val fp_bnf = dead_fp_bnf;
val old1_ssig_fp_ctr_sugar = #fp_ctr_sugar old1_ssig_fp_sugar;
val old2_ssig_fp_ctr_sugar = #fp_ctr_sugar old2_ssig_fp_sugar;
val sig_fp_ctr_sugar = #fp_ctr_sugar sig_fp_sugar;
val ssig_fp_ctr_sugar = #fp_ctr_sugar ssig_fp_sugar;
val sig_fp_bnf_sugar = #fp_bnf_sugar sig_fp_sugar;
val old1_ssig_fp_bnf_sugar = #fp_bnf_sugar old1_ssig_fp_sugar;
val old2_ssig_fp_bnf_sugar = #fp_bnf_sugar old2_ssig_fp_sugar;
val ssig_fp_bnf_sugar = #fp_bnf_sugar ssig_fp_sugar;
val old1_ssig_fp_induct_sugar = the (#fp_co_induct_sugar old1_ssig_fp_sugar);
val old2_ssig_fp_induct_sugar = the (#fp_co_induct_sugar old2_ssig_fp_sugar);
val ssig_fp_induct_sugar = the (#fp_co_induct_sugar ssig_fp_sugar);
val old1_ssig_ctr_sugar = #ctr_sugar old1_ssig_fp_ctr_sugar;
val old2_ssig_ctr_sugar = #ctr_sugar old2_ssig_fp_ctr_sugar;
val sig_ctr_sugar = #ctr_sugar sig_fp_ctr_sugar;
val ssig_ctr_sugar = #ctr_sugar ssig_fp_ctr_sugar;
val sig_T = Type (sig_T_name, res_As);
val ssig_T = Type (ssig_T_name, res_As);
val pre_map = mk_mapN lthy live_AsBs preT pre_bnf;
val pre_rel = mk_relN lthy live_AsBs preT pre_bnf;
val dead_pre_map = mk_map1 lthy Y Z preT dead_pre_bnf;
val dead_pre_rel = mk_rel1 lthy Y Z preT dead_pre_bnf;
val fp_rel = mk_relN lthy live_EsFs fpT fp_bnf;
val dtor_unfold = mk_co_rec (Proof_Context.theory_of lthy) Greatest_FP [Z] fpT
(the_single (#xtor_un_folds fp_res));
val Sig = mk_ctr res_As (the_single (#ctrs sig_ctr_sugar));
val unsig = mk_disc_or_sel res_As (the_single (the_single (#selss sig_ctr_sugar)));
val sig_rel = mk_relN lthy live_AsBs sig_T sig_bnf;
val dead_old1_sig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_old1_sig_bnf);
val dead_old2_sig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_old2_sig_bnf);
val dead_sig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_sig_bnf);
val dead_sig_rel = mk_rel 1 res_As res_Bs (rel_of_bnf dead_sig_bnf);
val [old1_Oper, old1_VLeaf, old1_CLeaf] = map (mk_ctr res_As) (#ctrs old1_ssig_ctr_sugar);
val [old2_Oper, old2_VLeaf, old2_CLeaf] = map (mk_ctr res_As) (#ctrs old2_ssig_ctr_sugar);
val [Oper, VLeaf, CLeaf] = map (mk_ctr res_As) (#ctrs ssig_ctr_sugar);
val old1_ssig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_old1_ssig_bnf);
val old2_ssig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_old2_ssig_bnf);
val ssig_map = mk_mapN lthy live_AsBs ssig_T ssig_bnf;
val ssig_rel = mk_relN lthy live_AsBs ssig_T ssig_bnf;
val dead_ssig_map = mk_map 1 res_As res_Bs (map_of_bnf dead_ssig_bnf);
val old1_Lam = enforce_type lthy domain_type old1_Lam_domT old1_Lam0;
val old2_Lam = enforce_type lthy domain_type old2_Lam_domT old2_Lam0;
val old1_proto_sctr = enforce_type lthy domain_type preT old1_proto_sctr0;
val old1_flat = enforce_type lthy range_type old1_ssig_T old1_flat0;
val old2_flat = enforce_type lthy range_type old2_ssig_T old2_flat0;
val old1_eval_core = enforce_type lthy domain_type old1_eval_core_domT old1_eval_core0;
val old2_eval_core = enforce_type lthy domain_type old2_eval_core_domT old2_eval_core0;
val old1_eval = enforce_type lthy range_type fpT old1_eval0;
val old2_eval = enforce_type lthy range_type fpT old2_eval0;
val old1_algLam = enforce_type lthy range_type fpT old1_algLam0;
val old2_algLam = enforce_type lthy range_type fpT old2_algLam0;
val ((embLL, embLL_def, embLL_simps), lthy) = lthy
|> define_embL embLLN fp_b version Y Z fpT old1_sig_T old1_ssig_T old2_sig_T ssig_T Inl_const
dead_old1_sig_map Sig old1_Oper old1_VLeaf old1_CLeaf Oper VLeaf CLeaf;
val ((embLR, embLR_def, embLR_simps), lthy) = lthy
|> define_embL embLRN fp_b version Y Z fpT old2_sig_T old2_ssig_T old1_sig_T ssig_T
(fn T => fn U => Inr_const U T) dead_old2_sig_map Sig old2_Oper old2_VLeaf old2_CLeaf Oper
VLeaf CLeaf;
val ((Lam, Lam_def), lthy) = lthy
|> define_Lam_merge fp_b version Y Z preT old1_ssig_T old2_ssig_T ssig_T dead_pre_map unsig
embLL embLR old1_Lam old2_Lam;
val ((proto_sctr, proto_sctr_def), lthy) = lthy
|> define_proto_sctr_step_or_merge fp_b version old1_sig_T old2_sig_T Sig old1_proto_sctr;
val pre_map_transfer = map_transfer_of_bnf pre_bnf;
val dead_pre_map_comp0 = map_comp0_of_bnf dead_pre_bnf;
val dead_pre_map_comp = map_comp_of_bnf dead_pre_bnf;
val fp_map_id = map_id_of_bnf fp_bnf;
val fp_rel_eq = rel_eq_of_bnf fp_bnf;
val [ctor_dtor] = #ctor_dtors fp_res;
val [dtor_inject] = #dtor_injects fp_res;
val [dtor_unfold_thm] = #xtor_un_fold_thms fp_res;
val dtor_unfold_unique = #xtor_un_fold_unique fp_res;
val [dtor_unfold_transfer] = #xtor_un_fold_transfers fp_res;
val unsig_thm = the_single (the_single (#sel_thmss sig_ctr_sugar));
val [sig_map_thm] = #map_thms sig_fp_bnf_sugar;
val old1_sig_map_comp = map_comp_of_bnf old1_sig_bnf;
val old2_sig_map_comp = map_comp_of_bnf old2_sig_bnf;
val old1_sig_map_cong = map_cong_of_bnf old1_sig_bnf;
val old2_sig_map_cong = map_cong_of_bnf old2_sig_bnf;
val old1_ssig_map_thms = #map_thms old1_ssig_fp_bnf_sugar;
val old2_ssig_map_thms = #map_thms old2_ssig_fp_bnf_sugar;
val [Oper_map_thm, VLeaf_map_thm, CLeaf_map_thm] = #map_thms ssig_fp_bnf_sugar;
val old1_sig_map_transfer = map_transfer_of_bnf old1_sig_bnf;
val old2_sig_map_transfer = map_transfer_of_bnf old2_sig_bnf;
val sig_map_transfer = map_transfer_of_bnf sig_bnf;
val ssig_map_thms = #map_thms ssig_fp_bnf_sugar;
val ssig_map_transfer = map_transfer_of_bnf ssig_bnf;
val old1_ssig_induct = the_single (#co_inducts old1_ssig_fp_induct_sugar);
val old2_ssig_induct = the_single (#co_inducts old2_ssig_fp_induct_sugar);
val ssig_induct = the_single (#co_inducts ssig_fp_induct_sugar);
val proto_sctr_transfer = derive_proto_sctr_transfer_step_or_merge lthy Y Z R dead_pre_rel
dead_sig_rel proto_sctr proto_sctr_def [] [old1_proto_sctr_transfer];
val embLL_transfer = derive_transfer_by_transfer_prover lthy live_AsBs Rs R embLL [embLL_def] []
[old1_sig_map_transfer];
val embLR_transfer = derive_transfer_by_transfer_prover lthy live_AsBs Rs R embLR [embLR_def] []
[old2_sig_map_transfer];
val Lam_transfer = derive_Lam_or_eval_core_transfer lthy live_AsBs Y Z preT ssig_T Rs R
pre_rel sig_rel ssig_rel Lam Lam_def []
[pre_map_transfer, old1_Lam_transfer, old2_Lam_transfer, embLL_transfer, embLR_transfer];
val ((((((((flat, _, flat_simps), flat_transfer),
((eval_core, _, eval_core_simps), eval_core_transfer)), (eval, eval_def)),
(cutSsig, cutSsig_def)), (algLam, algLam_def)), (corecU, corecU_def)), lthy) = lthy
|> define_flat_etc fp_b version live_AsBs Y Z preT fpT sig_T ssig_T Oper VLeaf CLeaf pre_rel
dead_pre_map dtor dtor_unfold dead_sig_map ssig_rel dead_ssig_map Lam Rs R pre_map_transfer
[fp_rel_eq] sig_map_transfer ssig_map_transfer Lam_transfer dtor_transfer;
val (Sig_pointful_natural, flat_pointful_natural, _, Lam_pointful_natural, _,
eval_core_pointful_natural, eval_thm, eval_flat, eval_simps as [eval_Oper, _, _],
corecU_ctor, corecU_unique, dtor_algLam) =
derive_Sig_natural_etc lthy live live_AsBs Y Z preT fpT fpT sig_T ssig_T pre_map dead_pre_map
ctor dtor Sig dead_sig_map Oper VLeaf CLeaf ssig_map dead_ssig_map Lam flat eval_core eval
cutSsig algLam corecU x fs f g ctor_dtor dtor_inject dtor_unfold_thm dtor_unfold_unique
sig_map_thm ssig_induct ssig_map_thms Oper_map_thm VLeaf_map_thm CLeaf_map_thm Lam_transfer
flat_simps flat_transfer eval_core_simps eval_core_transfer eval_def cutSsig_def algLam_def
corecU_def live_pre_bnf pre_bnf dead_pre_bnf fp_bnf sig_bnf ssig_bnf dead_ssig_bnf;
val proto_sctr_natural = derive_natural_from_transfer_with_pre_type lthy live_AsBs Y Z preT
ssig_T pre_map ssig_map fs f proto_sctr proto_sctr_transfer [pre_bnf, sig_bnf] [];
val proto_sctr_pointful_natural = mk_pointful lthy proto_sctr_natural RS sym;
val (embLL_pointful_natural, old1_algLam_pointful, eval_embLL, algLam_algLamL) =
derive_embL_natural_etc lthy Inl_const old1_ssig_bnf ssig_bnf Y Z preT fpT old1_ssig_T ssig_T
dead_pre_map Sig old1_ssig_map embLL old1_algLam algLam old1_flat flat old1_eval_core
eval_core old1_eval eval x f old1_ssig_induct dead_pre_map_comp0 dead_pre_map_comp fp_map_id
dtor_inject dtor_unfold_unique Sig_pointful_natural unsig_thm sig_map_thm old1_sig_map_comp
old1_sig_map_cong old1_ssig_map_thms old1_Lam_pointful_natural Lam_def old1_flat_simps
flat_simps embLL_simps embLL_transfer old1_eval_core_simps eval_core_simps old1_eval_thm
eval_thm old1_dtor_algLam dtor_algLam old1_algLam_thm;
val (embLR_pointful_natural, _, eval_embLR, algLam_algLamR) =
derive_embL_natural_etc lthy Inr_const old2_ssig_bnf ssig_bnf Y Z preT fpT old2_ssig_T ssig_T
dead_pre_map Sig old2_ssig_map embLR old2_algLam algLam old2_flat flat old2_eval_core
eval_core old2_eval eval x f old2_ssig_induct dead_pre_map_comp0 dead_pre_map_comp fp_map_id
dtor_inject dtor_unfold_unique Sig_pointful_natural unsig_thm sig_map_thm old2_sig_map_comp
old2_sig_map_cong old2_ssig_map_thms old2_Lam_pointful_natural Lam_def old2_flat_simps
flat_simps embLR_simps embLR_transfer old2_eval_core_simps eval_core_simps old2_eval_thm
eval_thm old2_dtor_algLam dtor_algLam old2_algLam_thm;
val algLam_thm = derive_algLam_step_or_merge lthy Y fpT ctor proto_sctr algLam proto_sctr_def
old1_algLam_pointful algLam_algLamL;
val all_algLam_algs = algLam_algLamL :: algLam_algLamR ::
merge_lists (Thm.eq_thm_prop o apply2 zero_var_indexes) old1_all_algLam_algs
old2_all_algLam_algs;
val (((corecUU, _), corecUU_thm, corecUU_unique, corecUU_transfer, _, sctr_transfer,
sctr_pointful_natural), lthy) = lthy
|> define_corecUU_etc fp_b version live_AsBs Y Z preT fpT ssig_T pre_map dead_pre_map pre_rel
fp_rel ctor Oper ssig_map dead_ssig_map ssig_rel proto_sctr flat eval_core eval corecU fs f
g Rs R pre_map_transfer [] dtor_unfold_transfer dtor_transfer ssig_map_transfer
proto_sctr_transfer proto_sctr_pointful_natural flat_transfer flat_pointful_natural
eval_core_transfer eval_core_pointful_natural eval_thm eval_flat eval_Oper algLam_thm
cutSsig_def corecU_def corecU_ctor corecU_unique pre_bnf dead_pre_bnf fp_res ssig_fp_sugar;
val Retr = enforce_type lthy (domain_type o domain_type) fpT old1_Retr0;
val embed_Sig_inl = embed_Sig lthy Sig (Inl_const old1_sig_T old2_sig_T);
val embed_Sig_inr = embed_Sig lthy Sig (Inr_const old1_sig_T old2_sig_T);
val ctr_wrapper = embed_Sig_inl (#ctr_wrapper old1_buffer);
val friends = Symtab.merge (K true)
(Symtab.map (K (apsnd embed_Sig_inl)) (#friends old1_buffer),
Symtab.map (K (apsnd embed_Sig_inr)) (#friends old2_buffer));
val old_fp_sugars =
merge_lists (op = o apply2 (fst o dest_Type o #T)) old1_sig_fp_sugars old2_sig_fp_sugars;
val ((dtor_coinduct_info, all_dead_k_bnfs, friend_names), lthy) = lthy
|> derive_cong_merge fp_b version fpT old1_friend_names old2_friend_names dead_ssig_bnf
dead_pre_bnf eval eval_thm eval_core_transfer old1_dtor_coinduct_info
old2_dtor_coinduct_info Retr equivp_Retr Retr_coinduct eval_embLL embLL_transfer eval_embLR
embLR_transfer old1_all_dead_k_bnfs old2_all_dead_k_bnfs;
val buffer =
{Oper = Oper, VLeaf = VLeaf, CLeaf = CLeaf, ctr_wrapper = ctr_wrapper, friends = friends};
val notes =
[(corecUU_transferN, [corecUU_transfer])] @
(if Config.get lthy bnf_internals then
[(algLamN, [algLam_thm]),
(algLam_algLamN, [algLam_algLamL, algLam_algLamR]),
(cong_alg_introsN, #cong_alg_intros dtor_coinduct_info),
(cong_localeN, [#cong_locale dtor_coinduct_info]),
(corecU_ctorN, [corecU_ctor]),
(corecU_uniqueN, [corecU_unique]),
(corecUUN, [corecUU_thm]),
(corecUU_uniqueN, [corecUU_unique]),
(dtor_algLamN, [dtor_algLam]),
(dtor_coinductN, [#dtor_coinduct dtor_coinduct_info]),
(eval_core_pointful_naturalN, [eval_core_pointful_natural]),
(eval_core_transferN, [eval_core_transfer]),
(embL_pointful_naturalN, [embLL_pointful_natural, embLR_pointful_natural]),
(embL_transferN, [embLL_transfer, embLR_transfer]),
(evalN, [eval_thm]),
(eval_flatN, [eval_flat]),
(eval_simpsN, eval_simps),
(flat_pointful_naturalN, [flat_pointful_natural]),
(flat_transferN, [flat_transfer]),
(Lam_pointful_naturalN, [Lam_pointful_natural]),
(Lam_transferN, [Lam_transfer]),
(proto_sctr_pointful_naturalN, [proto_sctr_pointful_natural]),
(proto_sctr_transferN, [proto_sctr_transfer]),
(sctr_pointful_naturalN, [sctr_pointful_natural]),
(sctr_transferN, [sctr_transfer]),
(Sig_pointful_naturalN, [Sig_pointful_natural])]
else
[])
|> map (fn (thmN, thms) =>
((mk_version_fp_binding true lthy version fp_b thmN, []), [(thms, [])]));
in
({fp_b = fp_b, version = version, fpT = fpT, Y = Y, Z = Z, friend_names = friend_names,
sig_fp_sugars = sig_fp_sugar :: old_fp_sugars, ssig_fp_sugar = ssig_fp_sugar, Lam = Lam,
proto_sctr = proto_sctr, flat = flat, eval_core = eval_core, eval = eval, algLam = algLam,
corecUU = corecUU, dtor_transfer = dtor_transfer, Lam_transfer = Lam_transfer,
Lam_pointful_natural = Lam_pointful_natural, proto_sctr_transfer = proto_sctr_transfer,
flat_simps = flat_simps, eval_core_simps = eval_core_simps, eval_thm = eval_thm,
eval_simps = eval_simps, all_algLam_algs = all_algLam_algs, algLam_thm = algLam_thm,
dtor_algLam = dtor_algLam, corecUU_thm = corecUU_thm, corecUU_unique = corecUU_unique,
corecUU_transfer = corecUU_transfer, buffer = buffer, all_dead_k_bnfs = all_dead_k_bnfs,
Retr = Retr, equivp_Retr = equivp_Retr, Retr_coinduct = Retr_coinduct,
dtor_coinduct_info = dtor_coinduct_info}
|> morph_corec_info (Local_Theory.target_morphism lthy),
lthy |> Local_Theory.notes notes |> snd)
end;
fun set_corec_info_exprs fpT_name f =
- Local_Theory.declaration {syntax = false, pervasive = true} (fn phi =>
+ Local_Theory.declaration {syntax = false, pervasive = true, pos = Position.thread_data ()} (fn phi =>
let val exprs = f phi in
Data.map (apsnd (fn [info_tab] => [Symtab.map_default (fpT_name, exprs) (K exprs) info_tab]))
end);
fun subsume_corec_info_ad ctxt {fpT = fpT1, friend_names = friend_names1}
{fpT = fpT2, friend_names = friend_names2} =
Sign.typ_instance (Proof_Context.theory_of ctxt) (fpT1, fpT2) andalso
subset (op =) (friend_names1, friend_names2);
fun subsume_corec_info_expr ctxt expr1 expr2 =
subsume_corec_info_ad ctxt (corec_ad_of_expr expr1) (corec_ad_of_expr expr2);
fun instantiate_corec_info thy res_T (info as {fpT, ...}) =
let
val As_rho = tvar_subst thy [fpT] [res_T];
val substAT = Term.typ_subst_TVars As_rho;
val substA = Term.subst_TVars As_rho;
val phi = Morphism.typ_morphism "BNF" substAT $> Morphism.term_morphism "BNF" substA;
in
morph_corec_info phi info
end;
fun instantiate_corec_info_expr thy res_T (Ad ({friend_names, ...}, f)) =
Ad ({fpT = res_T, friend_names = friend_names}, f #>> instantiate_corec_info thy res_T)
| instantiate_corec_info_expr thy res_T (Info info) =
Info (instantiate_corec_info thy res_T info);
fun ensure_Info expr = corec_info_of_expr expr #>> Info
and ensure_Info_if_Info old_expr (expr, lthy) =
if is_Info old_expr then ensure_Info expr lthy else (expr, lthy)
and merge_corec_info_exprs old_exprs expr1 expr2 lthy =
if subsume_corec_info_expr lthy expr2 expr1 then
if subsume_corec_info_expr lthy expr1 expr2 andalso is_Ad expr1 then
(expr2, lthy)
else
ensure_Info_if_Info expr2 (expr1, lthy)
else if subsume_corec_info_expr lthy expr1 expr2 then
ensure_Info_if_Info expr1 (expr2, lthy)
else
let
val thy = Proof_Context.theory_of lthy;
val {fpT = fpT1, friend_names = friend_names1} = corec_ad_of_expr expr1;
val {fpT = fpT2, friend_names = friend_names2} = corec_ad_of_expr expr2;
val fpT0 = typ_unify_disjointly thy (fpT1, fpT2);
val fpT = singleton (freeze_types lthy []) fpT0;
val friend_names = merge_lists (op =) friend_names1 friend_names2;
val expr =
Ad ({fpT = fpT, friend_names = friend_names},
corec_info_of_expr expr1
##>> corec_info_of_expr expr2
#-> uncurry (derive_corecUU_merge fpT));
val old_same_type_exprs =
if old_exprs then
[]
|> Sign.typ_instance thy (fpT1, fpT0) ? cons expr1
|> Sign.typ_instance thy (fpT2, fpT0) ? cons expr2
else
[];
in
(expr, lthy) |> fold ensure_Info_if_Info old_same_type_exprs
end
and insert_corec_info_expr expr exprs lthy =
let
val thy = Proof_Context.theory_of lthy;
val {fpT = new_fpT, ...} = corec_ad_of_expr expr;
val is_Tinst = curry (Sign.typ_instance thy);
fun is_Tequiv T U = is_Tinst T U andalso is_Tinst U T;
val (((equiv_exprs, sub_exprs), sup_exprs), incomp_exprs) = exprs
|> List.partition ((fn {fpT, ...} => is_Tequiv fpT new_fpT) o corec_ad_of_expr)
||>> List.partition ((fn {fpT, ...} => is_Tinst fpT new_fpT) o corec_ad_of_expr)
||>> List.partition ((fn {fpT, ...} => is_Tinst new_fpT fpT) o corec_ad_of_expr);
fun add_instantiated_incomp_expr expr exprs =
let val {fpT, ...} = corec_ad_of_expr expr in
(case try (typ_unify_disjointly thy) (fpT, new_fpT) of
SOME new_T =>
let val subsumes = (fn {fpT, ...} => is_Tinst new_T fpT) o corec_ad_of_expr in
if exists (exists subsumes) [exprs, sub_exprs] then exprs
else instantiate_corec_info_expr thy new_T expr :: exprs
end
| NONE => exprs)
end;
val unincomp_exprs = fold add_instantiated_incomp_expr incomp_exprs [];
val ((merged_sub_exprs, merged_unincomp_exprs), lthy) = lthy
|> fold_map (merge_corec_info_exprs true expr) sub_exprs
||>> fold_map (merge_corec_info_exprs false expr) unincomp_exprs;
val (merged_equiv_expr, lthy) = (expr, lthy)
|> fold (uncurry o merge_corec_info_exprs true) equiv_exprs;
in
(merged_unincomp_exprs @ merged_sub_exprs @ merged_equiv_expr :: sup_exprs @ incomp_exprs
|> sort (rev_order o int_ord o apply2 (length o #friend_names o corec_ad_of_expr)),
lthy)
end
and register_corec_info (info as {fpT = Type (fpT_name, _), ...}) lthy =
let
val (exprs, lthy) = insert_corec_info_expr (Info info) (corec_info_exprs_of lthy fpT_name) lthy;
in
lthy |> set_corec_info_exprs fpT_name (fn phi => map (morph_corec_info_expr phi) exprs)
end
and corec_info_of_expr (Ad (_, f)) lthy = f lthy
| corec_info_of_expr (Info info) lthy = (info, lthy);
fun nonempty_corec_info_exprs_of fpT_name lthy =
(case corec_info_exprs_of lthy fpT_name of
[] =>
derive_corecUU_base fpT_name lthy
|> (fn (info, lthy) =>
([Info info], lthy
|> set_corec_info_exprs fpT_name (fn phi => [Info (morph_corec_info phi info)])))
| exprs => (exprs, lthy));
fun corec_info_of res_T lthy =
(case res_T of
Type (fpT_name, _) =>
let
val (exprs, lthy) = nonempty_corec_info_exprs_of fpT_name lthy;
val thy = Proof_Context.theory_of lthy;
val expr =
(case find_first ((fn {fpT, ...} => Sign.typ_instance thy (res_T, fpT)) o corec_ad_of_expr)
exprs of
SOME expr => expr
| NONE => error ("Invalid type: " ^ Syntax.string_of_typ lthy res_T));
val (info, lthy) = corec_info_of_expr expr lthy;
in
(instantiate_corec_info thy res_T info, lthy |> is_Ad expr ? register_corec_info info)
end
| _ => not_codatatype lthy res_T);
fun maybe_corec_info_of ctxt res_T =
(case res_T of
Type (fpT_name, _) =>
let
val thy = Proof_Context.theory_of ctxt;
val infos = corec_infos_of ctxt fpT_name;
in
find_first (fn {fpT, ...} => Sign.typ_instance thy (res_T, fpT)) infos
|> Option.map (instantiate_corec_info thy res_T)
end
| _ => not_codatatype ctxt res_T);
fun prepare_friend_corec friend_name friend_T lthy =
let
val (arg_Ts, res_T) = strip_type friend_T;
val Type (fpT_name, res_Ds) =
(case res_T of
T as Type _ => T
| T => error (not_codatatype lthy T));
val _ = not (null arg_Ts) orelse
error "Function with no argument cannot be registered as friend";
val {T = Type (fpT_name, fpT_args0), pre_bnf, fp_bnf = live_fp_bnf, fp_res, ...} =
checked_fp_sugar_of lthy fpT_name;
val num_fp_tyargs = length fpT_args0;
val fpT_Ss = map Type.sort_of_atyp fpT_args0;
val live_fp_alives = liveness_of_fp_bnf num_fp_tyargs live_fp_bnf;
val (old_info as {friend_names = old_friend_names, sig_fp_sugars = old_sig_fp_sugar :: _,
buffer = old_buffer, ...}, lthy) =
corec_info_of res_T lthy;
val old_sig_T_name = fst (dest_Type (#T old_sig_fp_sugar));
val old_sig_alives = liveness_of_fp_bnf (num_fp_tyargs + 1) (#fp_bnf old_sig_fp_sugar);
(* FIXME: later *)
val fp_alives = fst (split_last old_sig_alives);
val fp_alives = map (K false) live_fp_alives;
val _ = not (member (op =) old_friend_names friend_name) orelse
error ("Function " ^ quote (Syntax.string_of_term lthy (Const (friend_name, friend_T))) ^
" already registered as friend");
val lthy = lthy |> Variable.declare_typ friend_T;
val ((Ds, [Y, Z]), _) = lthy
|> mk_TFrees' fpT_Ss
||>> mk_TFrees 2;
(* FIXME *)
val dead_Ds = Ds;
val live_As = [Y];
val ctor = mk_ctor res_Ds (the_single (#ctors fp_res));
val fpT0 = Type (fpT_name, Ds);
val k_Ts0 = map (typ_subst_nonatomic (res_Ds ~~ Ds) o typ_subst_nonatomic [(res_T, Y)]) arg_Ts;
val k_T0 = mk_tupleT_balanced k_Ts0;
val As = Ds @ [Y];
val res_As = res_Ds @ [Y];
val k_As = fold Term.add_tfreesT k_Ts0 [];
val _ = (case filter_out (member (op =) As o TFree) k_As of [] => ()
| k_A :: _ => error ("Cannot have type variable " ^
quote (Syntax.string_of_typ lthy (TFree k_A)) ^
" in the argument types when it does not occur as an immediate argument of the result \
\type constructor"));
val substDT = Term.typ_subst_atomic (Ds ~~ res_Ds);
val old_sig_T0 = Type (old_sig_T_name, As);
val ((fp_b, version), lthy) = lthy |> get_name_next_version_of fpT_name;
val (((dead_k_bnf, sig_fp_sugar), ssig_fp_sugar), lthy) = lthy
|> bnf_with_deads_and_lives dead_Ds live_As Y fpT0 k_T0
||>> define_sig_type fp_b version fp_alives Ds Y (mk_sumT (old_sig_T0, k_T0))
||>> define_ssig_type fp_b version fp_alives Ds Y fpT0;
val _ = live_of_bnf dead_k_bnf = 1 orelse
error "Impossible type for friend (the result codatatype must occur live in the arguments)";
val (dead_pre_bnf, lthy) = lthy
|> bnf_kill_all_but 1 pre_bnf;
val sig_fp_ctr_sugar = #fp_ctr_sugar sig_fp_sugar;
val ssig_fp_ctr_sugar = #fp_ctr_sugar ssig_fp_sugar;
val sig_ctr_sugar = #ctr_sugar sig_fp_ctr_sugar;
val ssig_ctr_sugar = #ctr_sugar ssig_fp_ctr_sugar;
val ssig_T_name = fst (dest_Type (#T ssig_fp_sugar));
val preT = pre_type_of_ctor Y ctor;
val old_sig_T = substDT old_sig_T0;
val k_T = substDT k_T0;
val ssig_T = Type (ssig_T_name, res_As);
val Sig = mk_ctr res_As (the_single (#ctrs sig_ctr_sugar));
val [Oper, VLeaf, CLeaf] = map (mk_ctr res_As) (#ctrs ssig_ctr_sugar);
val (ctr_wrapper, friends) =
mk_ctr_wrapper_friends lthy friend_name friend_T old_sig_T k_T Sig old_buffer;
val buffer =
{Oper = Oper, VLeaf = VLeaf, CLeaf = CLeaf, ctr_wrapper = ctr_wrapper, friends = friends};
in
((old_info, fp_b, version, Y, Z, preT, k_T, ssig_T, dead_pre_bnf, dead_k_bnf, sig_fp_sugar,
ssig_fp_sugar, buffer), lthy)
end;
fun register_friend_corec key fp_b version Y Z k_T dead_k_bnf sig_fp_sugar ssig_fp_sugar
friend_const rho rho_transfer old_info lthy =
let
val friend_T = fastype_of friend_const;
val res_T = body_type friend_T;
in
derive_corecUU_step res_T old_info key friend_T fp_b version Y Z k_T dead_k_bnf sig_fp_sugar
ssig_fp_sugar rho rho_transfer lthy
|> (fn ((info, friend_info), lthy) => (friend_info, register_corec_info info lthy))
end;
fun merge_corec_info_exprss exprs1 exprs2 lthy =
let
fun all_friend_names_of exprs =
fold (union (op =)) (map (#friend_names o corec_ad_of_expr) exprs) [];
val friend_names1 = all_friend_names_of exprs1;
val friend_names2 = all_friend_names_of exprs2;
in
if subset (op =) (friend_names2, friend_names1) then
if subset (op =) (friend_names1, friend_names2) andalso
length (filter is_Info exprs2) > length (filter is_Info exprs1) then
(exprs2, lthy)
else
(exprs1, lthy)
else if subset (op =) (friend_names1, friend_names2) then
(exprs2, lthy)
else
fold_rev (uncurry o insert_corec_info_expr) exprs2 (exprs1, lthy)
end;
fun merge_corec_info_tabs info_tab1 info_tab2 lthy =
let
val fpT_names = union (op =) (Symtab.keys info_tab1) (Symtab.keys info_tab2);
fun add_infos_of fpT_name (info_tab, lthy) =
(case Symtab.lookup info_tab1 fpT_name of
NONE =>
(Symtab.update_new (fpT_name, the (Symtab.lookup info_tab2 fpT_name)) info_tab, lthy)
| SOME exprs1 =>
(case Symtab.lookup info_tab2 fpT_name of
NONE => (Symtab.update_new (fpT_name, exprs1) info_tab, lthy)
| SOME exprs2 =>
let val (exprs, lthy) = merge_corec_info_exprss exprs1 exprs2 lthy in
(Symtab.update_new (fpT_name, exprs) info_tab, lthy)
end));
in
fold add_infos_of fpT_names (Symtab.empty, lthy)
end;
fun consolidate lthy =
(case snd (Data.get (Context.Proof lthy)) of
[_] => raise Same.SAME
| info_tab :: info_tabs =>
let
val (info_tab', lthy) = fold_rev (uncurry o merge_corec_info_tabs) info_tabs (info_tab, lthy);
in
- Local_Theory.declaration {syntax = false, pervasive = true} (fn phi =>
+ Local_Theory.declaration {syntax = false, pervasive = true, pos = Position.thread_data ()} (fn phi =>
Data.map (apsnd (fn _ => [Symtab.map (K (map (morph_corec_info_expr phi))) info_tab'])))
lthy
end);
fun consolidate_global thy =
SOME (Named_Target.theory_map consolidate thy)
handle Same.SAME => NONE;
val _ = Theory.setup (Theory.at_begin consolidate_global);
end;
diff --git a/src/HOL/Tools/BNF/bnf_gfp_grec_sugar.ML b/src/HOL/Tools/BNF/bnf_gfp_grec_sugar.ML
--- a/src/HOL/Tools/BNF/bnf_gfp_grec_sugar.ML
+++ b/src/HOL/Tools/BNF/bnf_gfp_grec_sugar.ML
@@ -1,2391 +1,2394 @@
(* Title: HOL/Tools/BNF/bnf_gfp_grec_sugar.ML
Author: Aymeric Bouzy, Ecole polytechnique
Author: Jasmin Blanchette, Inria, LORIA, MPII
Author: Dmitriy Traytel, ETH Zürich
Copyright 2015, 2016
Generalized corecursor sugar ("corec" and friends).
*)
signature BNF_GFP_GREC_SUGAR =
sig
datatype corec_option =
Plugins_Option of Proof.context -> Plugin_Name.filter |
Friend_Option |
Transfer_Option
val parse_corec_equation: Proof.context -> term list -> term -> term list * term
val explore_corec_equation: Proof.context -> bool -> bool -> string -> term ->
BNF_GFP_Grec_Sugar_Util.s_parse_info -> typ -> term list * term -> term list * term
val build_corecUU_arg_and_goals: bool -> term -> term list * term -> local_theory ->
(((thm list * thm list * thm list) * term list) * term) * local_theory
val derive_eq_corecUU: Proof.context -> BNF_GFP_Grec.corec_info -> term -> term -> thm -> thm
val derive_unique: Proof.context -> morphism -> term -> BNF_GFP_Grec.corec_info -> string ->
thm -> thm
val corec_cmd: bool -> corec_option list -> (binding * string option * mixfix) list * string ->
local_theory -> local_theory
val corecursive_cmd: bool -> corec_option list ->
(binding * string option * mixfix) list * string -> local_theory -> Proof.state
val friend_of_corec_cmd: (string * string option) * string -> local_theory -> Proof.state
val coinduction_upto_cmd: string * string -> local_theory -> local_theory
end;
structure BNF_GFP_Grec_Sugar : BNF_GFP_GREC_SUGAR =
struct
open Ctr_Sugar
open BNF_Util
open BNF_Tactics
open BNF_Def
open BNF_Comp
open BNF_FP_Util
open BNF_FP_Def_Sugar
open BNF_FP_N2M_Sugar
open BNF_GFP_Util
open BNF_GFP_Rec_Sugar
open BNF_FP_Rec_Sugar_Transfer
open BNF_GFP_Grec
open BNF_GFP_Grec_Sugar_Util
open BNF_GFP_Grec_Sugar_Tactics
val cong_N = "cong_";
val baseN = "base";
val reflN = "refl";
val symN = "sym";
val transN = "trans";
val cong_introsN = prefix cong_N "intros";
val codeN = "code";
val coinductN = "coinduct";
val coinduct_uptoN = "coinduct_upto";
val corecN = "corec";
val ctrN = "ctr";
val discN = "disc";
val disc_iffN = "disc_iff";
val eq_algrhoN = "eq_algrho";
val eq_corecUUN = "eq_corecUU";
val friendN = "friend";
val inner_elimN = "inner_elim";
val inner_inductN = "inner_induct";
val inner_simpN = "inner_simp";
val rhoN = "rho";
val selN = "sel";
val uniqueN = "unique";
val inner_fp_suffix = "_inner_fp";
val nitpicksimp_attrs = @{attributes [nitpick_simp]};
val simp_attrs = @{attributes [simp]};
val unfold_id_thms1 =
map (fn thm => thm RS eq_reflection) @{thms id_bnf_o o_id_bnf id_apply o_apply} @
@{thms fst_def[abs_def, symmetric] snd_def[abs_def, symmetric]};
fun unfold_id_bnf_etc lthy =
let val thy = Proof_Context.theory_of lthy in
Raw_Simplifier.rewrite_term thy unfold_id_thms1 []
#> Raw_Simplifier.rewrite_term thy @{thms BNF_Composition.id_bnf_def} []
end;
datatype corec_option =
Plugins_Option of Proof.context -> Plugin_Name.filter |
Friend_Option |
Transfer_Option;
val corec_option_parser = Parse.group (K "option")
(Plugin_Name.parse_filter >> Plugins_Option
|| Parse.reserved "friend" >> K Friend_Option
|| Parse.reserved "transfer" >> K Transfer_Option);
type codatatype_extra =
{case_dtor: thm,
case_trivial: thm,
abs_rep_transfers: thm list};
fun morph_codatatype_extra phi ({case_dtor, case_trivial, abs_rep_transfers} : codatatype_extra) =
{case_dtor = Morphism.thm phi case_dtor, case_trivial = Morphism.thm phi case_trivial,
abs_rep_transfers = map (Morphism.thm phi) abs_rep_transfers};
val transfer_codatatype_extra = morph_codatatype_extra o Morphism.transfer_morphism;
type coinduct_extra =
{coinduct: thm,
coinduct_attrs: Token.src list,
cong_intro_pairs: (string * thm) list};
fun morph_coinduct_extra phi ({coinduct, coinduct_attrs, cong_intro_pairs} : coinduct_extra) =
{coinduct = Morphism.thm phi coinduct, coinduct_attrs = coinduct_attrs,
cong_intro_pairs = map (apsnd (Morphism.thm phi)) cong_intro_pairs};
val transfer_coinduct_extra = morph_coinduct_extra o Morphism.transfer_morphism;
type friend_extra =
{eq_algrhos: thm list,
algrho_eqs: thm list};
val empty_friend_extra = {eq_algrhos = [], algrho_eqs = []};
fun merge_friend_extras ({eq_algrhos = eq_algrhos1, algrho_eqs = algrho_eqs1},
{eq_algrhos = eq_algrhos2, algrho_eqs = algrho_eqs2}) =
{eq_algrhos = union Thm.eq_thm_prop eq_algrhos1 eq_algrhos2,
algrho_eqs = union Thm.eq_thm_prop algrho_eqs1 algrho_eqs2};
type corec_sugar_data =
codatatype_extra Symtab.table * coinduct_extra Symtab.table * friend_extra Symtab.table;
structure Data = Generic_Data
(
type T = corec_sugar_data;
val empty = (Symtab.empty, Symtab.empty, Symtab.empty);
fun merge data : T =
(Symtab.merge (K true) (apply2 #1 data), Symtab.merge (K true) (apply2 #2 data),
Symtab.join (K merge_friend_extras) (apply2 #3 data));
);
fun register_codatatype_extra fpT_name extra =
- Local_Theory.declaration {syntax = false, pervasive = true} (fn phi =>
- Data.map (@{apply 3(1)} (Symtab.update (fpT_name, morph_codatatype_extra phi extra))));
+ Local_Theory.declaration {syntax = false, pervasive = true, pos = Position.thread_data ()}
+ (fn phi =>
+ Data.map (@{apply 3(1)} (Symtab.update (fpT_name, morph_codatatype_extra phi extra))));
fun codatatype_extra_of ctxt =
Symtab.lookup (#1 (Data.get (Context.Proof ctxt)))
#> Option.map (transfer_codatatype_extra (Proof_Context.theory_of ctxt));
fun all_codatatype_extras_of ctxt =
Symtab.dest (#1 (Data.get (Context.Proof ctxt)));
fun register_coinduct_extra fpT_name extra =
- Local_Theory.declaration {syntax = false, pervasive = true} (fn phi =>
- Data.map (@{apply 3(2)} (Symtab.update (fpT_name, morph_coinduct_extra phi extra))));
+ Local_Theory.declaration {syntax = false, pervasive = true, pos = Position.thread_data ()}
+ (fn phi =>
+ Data.map (@{apply 3(2)} (Symtab.update (fpT_name, morph_coinduct_extra phi extra))));
fun coinduct_extra_of ctxt =
Symtab.lookup (#2 (Data.get (Context.Proof ctxt)))
#> Option.map (transfer_coinduct_extra (Proof_Context.theory_of ctxt));
fun register_friend_extra fun_name eq_algrho algrho_eq =
- Local_Theory.declaration {syntax = false, pervasive = true} (fn phi =>
- Data.map (@{apply 3(3)} (Symtab.map_default (fun_name, empty_friend_extra)
- (fn {eq_algrhos, algrho_eqs} =>
- {eq_algrhos = Morphism.thm phi eq_algrho :: eq_algrhos,
- algrho_eqs = Morphism.thm phi algrho_eq :: algrho_eqs}))));
+ Local_Theory.declaration {syntax = false, pervasive = true, pos = Position.thread_data ()}
+ (fn phi =>
+ Data.map (@{apply 3(3)} (Symtab.map_default (fun_name, empty_friend_extra)
+ (fn {eq_algrhos, algrho_eqs} =>
+ {eq_algrhos = Morphism.thm phi eq_algrho :: eq_algrhos,
+ algrho_eqs = Morphism.thm phi algrho_eq :: algrho_eqs}))));
fun all_friend_extras_of ctxt =
Symtab.dest (#3 (Data.get (Context.Proof ctxt)));
fun coinduct_extras_of_generic context =
corec_infos_of_generic context
#> map (#corecUU #> dest_Const #> fst #> Symtab.lookup (#2 (Data.get context)) #> the
#> transfer_coinduct_extra (Context.theory_of context));
fun get_coinduct_uptos fpT_name context =
coinduct_extras_of_generic context fpT_name |> map #coinduct;
fun get_cong_all_intros fpT_name context =
coinduct_extras_of_generic context fpT_name |> maps (#cong_intro_pairs #> map snd);
fun get_cong_intros fpT_name name context =
coinduct_extras_of_generic context fpT_name
|> map_filter (#cong_intro_pairs #> (fn ps => AList.lookup (op =) ps name));
fun ctr_names_of_fp_name lthy fpT_name =
fpT_name |> fp_sugar_of lthy |> the |> #fp_ctr_sugar |> #ctr_sugar |> #ctrs
|> map (Long_Name.base_name o name_of_ctr);
fun register_coinduct_dynamic_base fpT_name lthy =
let val fp_b = Binding.name (Long_Name.base_name fpT_name) in
lthy
|> fold Local_Theory.add_thms_dynamic
((mk_fp_binding fp_b coinduct_uptoN, get_coinduct_uptos fpT_name) ::
map (fn N =>
let val N = cong_N ^ N in
(mk_fp_binding fp_b N, get_cong_intros fpT_name N)
end)
([baseN, reflN, symN, transN] @ ctr_names_of_fp_name lthy fpT_name))
|> Local_Theory.add_thms_dynamic
(mk_fp_binding fp_b cong_introsN, get_cong_all_intros fpT_name)
end;
fun register_coinduct_dynamic_friend fpT_name friend_name =
let
val fp_b = Binding.name (Long_Name.base_name fpT_name);
val friend_base_name = cong_N ^ Long_Name.base_name friend_name;
in
Local_Theory.add_thms_dynamic
(mk_fp_binding fp_b friend_base_name, get_cong_intros fpT_name friend_base_name)
end;
fun derive_case_dtor ctxt fpT_name =
let
val thy = Proof_Context.theory_of ctxt;
val SOME ({fp_res_index, fp_res = {dtors = dtors0, dtor_ctors, ...},
absT_info = {rep = rep0, abs_inverse, ...},
fp_ctr_sugar = {ctr_defs, ctr_sugar = {casex, exhaust, case_thms, ...}, ...}, ...}) =
fp_sugar_of ctxt fpT_name;
val (f_Ts, Type (_, [fpT as Type (_, As), _])) = strip_fun_type (fastype_of casex);
val x_Tss = map binder_types f_Ts;
val (((u, fs), xss), _) = ctxt
|> yield_singleton (mk_Frees "y") fpT
||>> mk_Frees "f" f_Ts
||>> mk_Freess "x" x_Tss;
val dtor0 = nth dtors0 fp_res_index;
val dtor = mk_dtor As dtor0;
val u' = dtor $ u;
val absT = fastype_of u';
val rep = mk_rep absT rep0;
val goal = mk_Trueprop_eq (list_comb (casex, fs) $ u,
mk_case_absumprod absT rep fs xss xss $ u')
|> Raw_Simplifier.rewrite_term thy @{thms comp_def[THEN eq_reflection]} [];
val vars = map (fst o dest_Free) (u :: fs);
val dtor_ctor = nth dtor_ctors fp_res_index;
in
Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_case_dtor_tac ctxt u abs_inverse dtor_ctor ctr_defs exhaust case_thms)
|> Thm.close_derivation \<^here>
end;
fun derive_case_trivial ctxt fpT_name =
let
val SOME {casex, exhaust, case_thms, ...} = ctr_sugar_of ctxt fpT_name;
val Type (_, As0) = domain_type (body_fun_type (fastype_of casex));
val (As, _) = ctxt
|> mk_TFrees' (map Type.sort_of_atyp As0);
val fpT = Type (fpT_name, As);
val (var_name, ()) = singleton (Variable.variant_frees ctxt []) ("x", ());
val var = Free (var_name, fpT);
val goal = mk_Trueprop_eq (expand_to_ctr_term ctxt fpT var, var);
val exhaust' = infer_instantiate' ctxt [SOME (Thm.cterm_of ctxt var)] exhaust;
in
Goal.prove_sorry ctxt [var_name] [] goal (fn {context = ctxt, prems = _} =>
HEADGOAL (rtac ctxt exhaust') THEN ALLGOALS (hyp_subst_tac ctxt) THEN
unfold_thms_tac ctxt case_thms THEN ALLGOALS (rtac ctxt refl))
|> Thm.close_derivation \<^here>
end;
fun mk_abs_rep_transfers ctxt fpT_name =
[mk_abs_transfer ctxt fpT_name, mk_rep_transfer ctxt fpT_name]
handle Fail _ => [];
fun ensure_codatatype_extra fpT_name ctxt =
(case codatatype_extra_of ctxt fpT_name of
NONE =>
let val abs_rep_transfers = mk_abs_rep_transfers ctxt fpT_name in
ctxt
|> register_codatatype_extra fpT_name
{case_dtor = derive_case_dtor ctxt fpT_name,
case_trivial = derive_case_trivial ctxt fpT_name,
abs_rep_transfers = abs_rep_transfers}
|> set_transfer_rule_attrs abs_rep_transfers
end
| SOME {abs_rep_transfers, ...} => ctxt |> set_transfer_rule_attrs abs_rep_transfers);
fun setup_base fpT_name =
register_coinduct_dynamic_base fpT_name
#> ensure_codatatype_extra fpT_name;
fun is_set ctxt (const_name, T) =
(case T of
Type (\<^type_name>\<open>fun\<close>, [Type (fpT_name, _), Type (\<^type_name>\<open>set\<close>, [_])]) =>
(case bnf_of ctxt fpT_name of
SOME bnf => exists (fn Const (s, _) => s = const_name | _ => false) (sets_of_bnf bnf)
| NONE => false)
| _ => false);
fun case_eq_if_thms_of_term ctxt t =
let val ctr_sugars = map_filter (ctr_sugar_of_case ctxt o fst) (Term.add_consts t []) in
maps #case_eq_ifs ctr_sugars
end;
fun all_algrho_eqs_of ctxt =
maps (#algrho_eqs o snd) (all_friend_extras_of ctxt);
fun derive_code ctxt inner_fp_simps goal
{sig_fp_sugars, ssig_fp_sugar, eval, eval_simps, all_algLam_algs, corecUU_thm, ...} fun_t
fun_def =
let
val fun_T = fastype_of fun_t;
val (arg_Ts, Type (fpT_name, _)) = strip_type fun_T;
val num_args = length arg_Ts;
val SOME {pre_bnf, fp_bnf, absT_info, fp_nesting_bnfs, live_nesting_bnfs, fp_ctr_sugar, ...} =
fp_sugar_of ctxt fpT_name;
val SOME {case_trivial, ...} = codatatype_extra_of ctxt fpT_name;
val ctr_sugar = #ctr_sugar fp_ctr_sugar;
val pre_map_def = map_def_of_bnf pre_bnf;
val abs_inverse = #abs_inverse absT_info;
val ctr_defs = #ctr_defs fp_ctr_sugar;
val case_eq_ifs = #case_eq_ifs ctr_sugar @ case_eq_if_thms_of_term ctxt goal;
val all_sig_map_thms = maps (#map_thms o #fp_bnf_sugar) sig_fp_sugars;
val fp_map_ident = map_ident_of_bnf fp_bnf;
val fpsig_nesting_bnfs = fp_nesting_bnfs @ maps #live_nesting_bnfs sig_fp_sugars;
val fpsig_nesting_T_names = map (fst o dest_Type o T_of_bnf) fpsig_nesting_bnfs;
val fpsig_nesting_fp_sugars = map_filter (fp_sugar_of ctxt) fpsig_nesting_T_names;
val fpsig_nesting_fp_bnf_sugars = map #fp_bnf_sugar fpsig_nesting_fp_sugars;
val ssig_fp_bnf_sugar = #fp_bnf_sugar ssig_fp_sugar;
val ssig_bnf = #fp_bnf ssig_fp_sugar;
val ssig_map = map_of_bnf ssig_bnf;
val fpsig_nesting_maps = map map_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_ident0s = map map_ident0_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_comps = map map_comp_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_thms = maps #map_thms fpsig_nesting_fp_bnf_sugars;
val live_nesting_map_ident0s = map map_ident0_of_bnf live_nesting_bnfs;
val ssig_map_thms = #map_thms ssig_fp_bnf_sugar;
val all_algLam_alg_pointfuls = map (mk_pointful ctxt) all_algLam_algs;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_code_tac ctxt num_args fpsig_nesting_maps ssig_map eval pre_map_def abs_inverse
fpsig_nesting_map_ident0s fpsig_nesting_map_comps fpsig_nesting_map_thms
live_nesting_map_ident0s fp_map_ident case_trivial ctr_defs case_eq_ifs corecUU_thm
all_sig_map_thms ssig_map_thms all_algLam_alg_pointfuls (all_algrho_eqs_of ctxt) eval_simps
inner_fp_simps fun_def))
|> Thm.close_derivation \<^here>
end;
fun derive_unique ctxt phi code_goal
{sig_fp_sugars, ssig_fp_sugar, eval, eval_simps, all_algLam_algs, corecUU_unique, ...} fpT_name
eq_corecUU =
let
val SOME {pre_bnf, fp_bnf, absT_info, fp_nesting_bnfs, live_nesting_bnfs, fp_ctr_sugar, ...} =
fp_sugar_of ctxt fpT_name;
val SOME {case_trivial, ...} = codatatype_extra_of ctxt fpT_name;
val ctr_sugar = #ctr_sugar fp_ctr_sugar;
val pre_map_def = map_def_of_bnf pre_bnf;
val abs_inverse = #abs_inverse absT_info;
val ctr_defs = #ctr_defs fp_ctr_sugar;
val case_eq_ifs = #case_eq_ifs ctr_sugar @ case_eq_if_thms_of_term ctxt code_goal;
val all_sig_map_thms = maps (#map_thms o #fp_bnf_sugar) sig_fp_sugars;
val fp_map_ident = map_ident_of_bnf fp_bnf;
val fpsig_nesting_bnfs = fp_nesting_bnfs @ maps #live_nesting_bnfs sig_fp_sugars;
val fpsig_nesting_T_names = map (fst o dest_Type o T_of_bnf) fpsig_nesting_bnfs;
val fpsig_nesting_fp_sugars = map_filter (fp_sugar_of ctxt) fpsig_nesting_T_names;
val fpsig_nesting_fp_bnf_sugars = map #fp_bnf_sugar fpsig_nesting_fp_sugars;
val ssig_fp_bnf_sugar = #fp_bnf_sugar ssig_fp_sugar;
val ssig_bnf = #fp_bnf ssig_fp_sugar;
val ssig_map = map_of_bnf ssig_bnf;
val fpsig_nesting_maps = map map_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_ident0s = map map_ident0_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_comps = map map_comp_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_thms = maps #map_thms fpsig_nesting_fp_bnf_sugars;
val live_nesting_map_ident0s = map map_ident0_of_bnf live_nesting_bnfs;
val ssig_map_thms = #map_thms ssig_fp_bnf_sugar;
val all_algLam_alg_pointfuls = map (mk_pointful ctxt) all_algLam_algs;
val \<^Const_>\<open>Trueprop for \<^Const_>\<open>HOL.eq _ for lhs rhs\<close>\<close> = code_goal;
val (fun_t, args) = strip_comb lhs;
val closed_rhs = fold_rev lambda args rhs;
val fun_T = fastype_of fun_t;
val num_args = num_binder_types fun_T;
val f = Free (singleton (Variable.variant_frees ctxt []) ("f", fun_T));
val is_self_call = curry (op aconv) fun_t;
val has_self_call = exists_subterm is_self_call;
fun fify args (t $ u) = fify (u :: args) t $ fify [] u
| fify _ (Abs (s, T, t)) = Abs (s, T, fify [] t)
| fify args t = if t = fun_t andalso not (exists has_self_call args) then f else t;
val goal = Logic.mk_implies (mk_Trueprop_eq (f, fify [] closed_rhs), mk_Trueprop_eq (f, fun_t))
|> Morphism.term phi;
in
Goal.prove_sorry ctxt [fst (dest_Free f)] [] goal (fn {context = ctxt, prems = _} =>
mk_unique_tac ctxt num_args fpsig_nesting_maps ssig_map eval pre_map_def abs_inverse
fpsig_nesting_map_ident0s fpsig_nesting_map_comps fpsig_nesting_map_thms
live_nesting_map_ident0s fp_map_ident case_trivial ctr_defs case_eq_ifs all_sig_map_thms
ssig_map_thms all_algLam_alg_pointfuls (all_algrho_eqs_of ctxt) eval_simps corecUU_unique
eq_corecUU)
|> Thm.close_derivation \<^here>
end;
fun derive_last_disc ctxt fcT_name =
let
val SOME {T = fcT, discs, exhaust, disc_thmss, ...} = ctr_sugar_of ctxt fcT_name;
val (u, _) = ctxt
|> yield_singleton (mk_Frees "x") fcT;
val udiscs = map (rapp u) discs;
val (not_udiscs, last_udisc) = split_last udiscs
|>> map HOLogic.mk_not;
val goal = mk_Trueprop_eq (last_udisc, foldr1 HOLogic.mk_conj not_udiscs);
in
Goal.prove_sorry ctxt [fst (dest_Free u)] [] goal (fn {context = ctxt, prems = _} =>
mk_last_disc_tac ctxt u exhaust (flat disc_thmss))
|> Thm.close_derivation \<^here>
end;
fun derive_eq_algrho ctxt {sig_fp_sugars, ssig_fp_sugar, eval, eval_simps, all_algLam_algs,
corecUU_unique, ...}
({algrho = algrho0, dtor_algrho, ...} : friend_info) fun_t k_T code_goal const_transfers rho_def
eq_corecUU =
let
val fun_T = fastype_of fun_t;
val (arg_Ts, Type (fpT_name, Ts)) = strip_type fun_T;
val num_args = length arg_Ts;
val SOME {fp_res_index, fp_res, pre_bnf, fp_bnf, absT_info, fp_nesting_bnfs, live_nesting_bnfs,
fp_ctr_sugar, ...} =
fp_sugar_of ctxt fpT_name;
val SOME {case_dtor, ...} = codatatype_extra_of ctxt fpT_name;
val fp_nesting_Ts = map T_of_bnf fp_nesting_bnfs;
fun is_nullary_disc_def (\<^Const>\<open>Trueprop\<close> $ (Const (\<^const_name>\<open>HOL.eq\<close>, _) $ _
$ (Const (\<^const_name>\<open>HOL.eq\<close>, _) $ _ $ _))) = true
| is_nullary_disc_def (Const (\<^const_name>\<open>Pure.eq\<close>, _) $ _
$ (Const (\<^const_name>\<open>HOL.eq\<close>, _) $ _ $ _)) = true
| is_nullary_disc_def _ = false;
val dtor_ctor = nth (#dtor_ctors fp_res) fp_res_index;
val ctor_iff_dtor = #ctor_iff_dtor fp_ctr_sugar;
val ctr_sugar = #ctr_sugar fp_ctr_sugar;
val pre_map_def = map_def_of_bnf pre_bnf;
val abs_inverse = #abs_inverse absT_info;
val ctr_defs = #ctr_defs fp_ctr_sugar;
val nullary_disc_defs = filter (is_nullary_disc_def o Thm.prop_of) (#disc_defs ctr_sugar);
val disc_sel_eq_cases = #disc_eq_cases ctr_sugar @ #sel_defs ctr_sugar;
val case_eq_ifs = #case_eq_ifs ctr_sugar @ case_eq_if_thms_of_term ctxt code_goal;
val all_sig_map_thms = maps (#map_thms o #fp_bnf_sugar) sig_fp_sugars;
fun add_tnameT (Type (s, Ts)) = insert (op =) s #> fold add_tnameT Ts
| add_tnameT _ = I;
fun map_disc_sels'_of s =
(case fp_sugar_of ctxt s of
SOME {fp_bnf_sugar = {map_disc_iffs, map_selss, ...}, ...} =>
let
val map_selss' =
if length map_selss <= 1 then map_selss
else map (map (unfold_thms ctxt (no_refl [derive_last_disc ctxt s]))) map_selss;
in
map_disc_iffs @ flat map_selss'
end
| NONE => []);
fun mk_const_pointful_natural const_transfer =
SOME (mk_pointful_natural_from_transfer ctxt const_transfer)
handle UNNATURAL () => NONE;
val const_pointful_natural_opts = map mk_const_pointful_natural const_transfers;
val const_pointful_naturals = map_filter I const_pointful_natural_opts;
val fp_nesting_k_T_names = fold add_tnameT (k_T :: fp_nesting_Ts) [];
val fp_nesting_k_map_disc_sels' = maps map_disc_sels'_of fp_nesting_k_T_names;
val fp_map_ident = map_ident_of_bnf fp_bnf;
val fpsig_nesting_bnfs = fp_nesting_bnfs @ maps #live_nesting_bnfs sig_fp_sugars;
val fpsig_nesting_T_names = map (fst o dest_Type o T_of_bnf) fpsig_nesting_bnfs;
val fpsig_nesting_fp_sugars = map_filter (fp_sugar_of ctxt) fpsig_nesting_T_names;
val fpsig_nesting_fp_bnf_sugars = map #fp_bnf_sugar fpsig_nesting_fp_sugars;
val ssig_fp_bnf_sugar = #fp_bnf_sugar ssig_fp_sugar;
val ssig_bnf = #fp_bnf ssig_fp_sugar;
val ssig_map = map_of_bnf ssig_bnf;
val fpsig_nesting_maps = map map_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_ident0s = map map_ident0_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_comps = map map_comp_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_thms = maps #map_thms fpsig_nesting_fp_bnf_sugars;
val live_nesting_map_ident0s = map map_ident0_of_bnf live_nesting_bnfs;
val ssig_map_thms = #map_thms ssig_fp_bnf_sugar;
val all_algLam_alg_pointfuls = map (mk_pointful ctxt) all_algLam_algs;
val ctor = nth (#ctors fp_res) fp_res_index;
val abs = #abs absT_info;
val rep = #rep absT_info;
val algrho = mk_ctr Ts algrho0;
val goal = mk_Trueprop_eq (fun_t, abs_curried_balanced arg_Ts algrho);
fun const_of_transfer thm =
(case Thm.prop_of thm of \<^Const>\<open>Trueprop\<close> $ (_ $ cst $ _) => cst);
val eq_algrho =
Goal.prove (*no sorry*) ctxt [] [] goal (fn {context = ctxt, prems = _} =>
mk_eq_algrho_tac ctxt fpsig_nesting_maps abs rep ctor ssig_map eval pre_map_def abs_inverse
fpsig_nesting_map_ident0s fpsig_nesting_map_comps fpsig_nesting_map_thms
live_nesting_map_ident0s fp_map_ident dtor_ctor ctor_iff_dtor ctr_defs nullary_disc_defs
disc_sel_eq_cases case_dtor case_eq_ifs const_pointful_naturals
fp_nesting_k_map_disc_sels' rho_def dtor_algrho corecUU_unique eq_corecUU all_sig_map_thms
ssig_map_thms all_algLam_alg_pointfuls (all_algrho_eqs_of ctxt) eval_simps)
|> Thm.close_derivation \<^here>
handle e as ERROR _ =>
(case filter (is_none o snd) (const_transfers ~~ const_pointful_natural_opts) of
[] => Exn.reraise e
| thm_nones =>
error ("Failed to state naturality property for " ^
commas (map (Syntax.string_of_term ctxt o const_of_transfer o fst) thm_nones)));
val algrho_eq = eq_algrho RS (mk_curry_uncurryN_balanced ctxt num_args RS iffD2) RS sym;
in
(eq_algrho, algrho_eq)
end;
fun prime_rho_transfer_goal ctxt fpT_name rho_def goal =
let
val thy = Proof_Context.theory_of ctxt;
val SOME {pre_bnf, ...} = fp_sugar_of ctxt fpT_name;
val SOME {abs_rep_transfers, ...} = codatatype_extra_of ctxt fpT_name;
val simps = rel_def_of_bnf pre_bnf :: rho_transfer_simps;
val fold_rho = unfold_thms ctxt [rho_def RS @{thm symmetric}];
fun derive_unprimed rho_transfer' =
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
unfold_thms_tac ctxt simps THEN HEADGOAL (rtac ctxt rho_transfer')))
|> Thm.close_derivation \<^here>;
val goal' = Raw_Simplifier.rewrite_term thy simps [] goal;
in
if null abs_rep_transfers then (goal', derive_unprimed #> fold_rho)
else (goal, fold_rho)
end;
fun derive_rho_transfer_folded ctxt fpT_name const_transfers rho_def goal =
let
val SOME {pre_bnf, ...} = fp_sugar_of ctxt fpT_name;
val SOME {abs_rep_transfers, ...} = codatatype_extra_of ctxt fpT_name;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove (*no sorry*) ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_rho_transfer_tac ctxt (null abs_rep_transfers) (rel_def_of_bnf pre_bnf)
const_transfers))
|> unfold_thms ctxt [rho_def RS @{thm symmetric}]
|> Thm.close_derivation \<^here>
end;
fun mk_cong_intro_ctr_or_friend_goal ctxt fpT Rcong alg =
let
val xy_Ts = binder_types (fastype_of alg);
val ((xs, ys), _) = ctxt
|> mk_Frees "x" xy_Ts
||>> mk_Frees "y" xy_Ts;
fun mk_prem xy_T x y =
build_rel [] ctxt [fpT] [] (fn (T, _) => if T = fpT then Rcong else HOLogic.eq_const T)
(xy_T, xy_T) $ x $ y;
val prems = @{map 3} mk_prem xy_Ts xs ys;
val concl = Rcong $ list_comb (alg, xs) $ list_comb (alg, ys);
in
Logic.list_implies (map HOLogic.mk_Trueprop prems, HOLogic.mk_Trueprop concl)
end;
fun derive_cong_ctr_intros ctxt cong_ctor_intro =
let
val \<^Const_>\<open>Pure.imp\<close> $ _ $ (\<^Const_>\<open>Trueprop\<close> $ ((Rcong as _ $ _) $ _ $ (ctor $ _))) =
Thm.prop_of cong_ctor_intro;
val fpT as Type (fpT_name, fp_argTs) = range_type (fastype_of ctor);
val SOME {pre_bnf, absT_info = {abs_inverse, ...},
fp_ctr_sugar = {ctr_defs, ctr_sugar = {ctrs = ctrs0, ...}, ...}, ...} =
fp_sugar_of ctxt fpT_name;
val ctrs = map (mk_ctr fp_argTs) ctrs0;
val pre_rel_def = rel_def_of_bnf pre_bnf;
fun prove ctr_def goal =
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_cong_intro_ctr_or_friend_tac ctxt ctr_def [pre_rel_def, abs_inverse] cong_ctor_intro))
|> Thm.close_derivation \<^here>;
val goals = map (mk_cong_intro_ctr_or_friend_goal ctxt fpT Rcong) ctrs;
in
map2 prove ctr_defs goals
end;
fun derive_cong_friend_intro ctxt cong_algrho_intro =
let
val \<^Const_>\<open>Pure.imp\<close> $ _ $ (\<^Const_>\<open>Trueprop\<close> $ ((Rcong as _ $ _) $ _
$ ((algrho as Const (algrho_name, _)) $ _))) =
Thm.prop_of cong_algrho_intro;
val fpT as Type (_, fp_argTs) = range_type (fastype_of algrho);
fun has_algrho (\<^Const_>\<open>Trueprop\<close> $ (Const (\<^const_name>\<open>HOL.eq\<close>, _) $ _ $ rhs)) =
fst (dest_Const (head_of (strip_abs_body rhs))) = algrho_name;
val eq_algrho :: _ =
maps (filter (has_algrho o Thm.prop_of) o #eq_algrhos o snd) (all_friend_extras_of ctxt);
val \<^Const_>\<open>Trueprop\<close> $ (Const (\<^const_name>\<open>HOL.eq\<close>, _) $ friend0 $ _) = Thm.prop_of eq_algrho;
val friend = mk_ctr fp_argTs friend0;
val goal = mk_cong_intro_ctr_or_friend_goal ctxt fpT Rcong friend;
in
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove_sorry ctxt vars [] goal (fn {context = ctxt, prems = _} =>
mk_cong_intro_ctr_or_friend_tac ctxt eq_algrho [] cong_algrho_intro))
|> Thm.close_derivation \<^here>
end;
fun derive_cong_intros lthy ctr_names friend_names
({cong_base, cong_refl, cong_sym, cong_trans, cong_alg_intros, ...} : dtor_coinduct_info) =
let
val cong_ctor_intro :: cong_algrho_intros = rev cong_alg_intros;
val names = map (prefix cong_N) ([baseN, reflN, symN, transN] @ ctr_names @ friend_names);
val thms = [cong_base, cong_refl, cong_sym, cong_trans] @
derive_cong_ctr_intros lthy cong_ctor_intro @
map (derive_cong_friend_intro lthy) cong_algrho_intros;
in
names ~~ thms
end;
fun derive_coinduct ctxt (fpT as Type (fpT_name, fpT_args)) dtor_coinduct =
let
val thy = Proof_Context.theory_of ctxt;
val \<^Const_>\<open>Pure.imp\<close> $ (\<^Const_>\<open>Trueprop\<close> $ (_ $ Abs (_, _, _ $
Abs (_, _, \<^Const_>\<open>implies\<close> $ _ $ (_ $ (cong0 $ _) $ _ $ _))))) $ _ =
Thm.prop_of dtor_coinduct;
val SOME {X as TVar ((X_s, _), _), fp_res = {dtor_ctors, ...}, pre_bnf,
absT_info = {abs_inverse, ...}, live_nesting_bnfs,
fp_ctr_sugar = {ctrXs_Tss, ctr_defs,
ctr_sugar = ctr_sugar0 as {T = Type (_, T0_args), ctrs = ctrs0, discs = discs0, ...},
...}, ...} =
fp_sugar_of ctxt fpT_name;
val n = length ctrXs_Tss;
val ms = map length ctrXs_Tss;
val X' = TVar ((X_s, maxidx_of_typ fpT + 1), \<^sort>\<open>type\<close>);
val As_rho = tvar_subst thy T0_args fpT_args;
val substXAT = Term.typ_subst_TVars As_rho o Tsubst X X';
val substXA = Term.subst_TVars As_rho o substT X X';
val phi = Morphism.typ_morphism "BNF" substXAT $> Morphism.term_morphism "BNF" substXA;
fun mk_applied_cong arg =
enforce_type ctxt domain_type (fastype_of arg) cong0 $ arg;
val thm = derive_coinduct_thms_for_types ctxt false mk_applied_cong [pre_bnf] dtor_coinduct
dtor_ctors live_nesting_bnfs [fpT] [substXAT X] [map (map substXAT) ctrXs_Tss] [n]
[abs_inverse] [abs_inverse] I [ctr_defs] [morph_ctr_sugar phi ctr_sugar0]
|> map snd |> the_single;
val (attrs, _) = mk_coinduct_attrs [fpT] [ctrs0] [discs0] [ms];
in
(thm, attrs)
end;
type explore_parameters =
{bound_Us: typ list,
bound_Ts: typ list,
U: typ,
T: typ};
fun update_UT {bound_Us, bound_Ts, ...} U T =
{bound_Us = bound_Us, bound_Ts = bound_Ts, U = U, T = T};
fun explore_nested lthy explore {bound_Us, bound_Ts, U, T} t =
let
fun build_simple (T, U) =
if T = U then
\<^term>\<open>%y. y\<close>
else
Bound 0
|> explore {bound_Us = T :: bound_Us, bound_Ts = T :: bound_Ts, U = U, T = T}
|> (fn t => Abs (Name.uu, T, t));
in
betapply (build_map lthy [] [] build_simple (T, U), t)
end;
fun add_boundvar t = betapply (incr_boundvars 1 t, Bound 0);
fun explore_fun (arg_U :: arg_Us) explore {bound_Us, bound_Ts, U, T} t =
let val arg_name = the_default Name.uu (try (fn (Abs (s, _, _)) => s) t) in
add_boundvar t
|> explore_fun arg_Us explore
{bound_Us = arg_U :: bound_Us, bound_Ts = domain_type T :: bound_Ts, U = range_type U,
T = range_type T}
|> (fn t => Abs (arg_name, arg_U, t))
end
| explore_fun [] explore params t = explore params t;
fun massage_fun explore (params as {T, U, ...}) =
if can dest_funT T then explore_fun [domain_type U] explore params else explore params;
fun massage_star massages explore =
let
fun after_massage massages' t params t' =
if t aconv t' then massage_any massages' params t else massage_any massages params t'
and massage_any [] params t = explore params t
| massage_any (massage :: massages') params t =
massage (after_massage massages' t) params t;
in
massage_any massages
end;
fun massage_let explore params t =
(case strip_comb t of
(Const (\<^const_name>\<open>Let\<close>, _), [_, _]) => unfold_lets_splits t
| _ => t)
|> explore params;
fun check_corec_equation ctxt fun_frees (lhs, rhs) =
let
val (fun_t, arg_ts) = strip_comb lhs;
fun check_fun_name () =
null fun_frees orelse member (op aconv) fun_frees fun_t orelse
ill_formed_equation_head ctxt [] fun_t;
fun check_no_other_frees () =
(case Term.add_frees rhs [] |> map Free |> subtract (op =) (fun_frees @ arg_ts)
|> find_first (not o Variable.is_fixed ctxt o fst o dest_Free) of
NONE => ()
| SOME t => extra_variable_in_rhs ctxt [] t);
in
check_duplicate_variables_in_lhs ctxt [] arg_ts;
check_fun_name ();
check_all_fun_arg_frees ctxt [] (filter_out is_Var arg_ts);
check_no_other_frees ()
end;
fun parse_corec_equation ctxt fun_frees eq =
let
val (lhs, rhs) = HOLogic.dest_eq (HOLogic.dest_Trueprop (drop_all eq))
handle TERM _ => ill_formed_equation_lhs_rhs ctxt [eq];
val _ = check_corec_equation ctxt fun_frees (lhs, rhs);
val (fun_t, arg_ts) = strip_comb lhs;
val (arg_Ts, _) = strip_type (fastype_of fun_t);
val added_Ts = drop (length arg_ts) arg_Ts;
val free_names = mk_names (length added_Ts) "x" ~~ added_Ts;
val free_args = Variable.variant_frees ctxt [rhs, lhs] free_names |> map Free;
in
(arg_ts @ free_args, list_comb (rhs, free_args))
end;
fun morph_views phi (code, ctrs, discs, disc_iffs, sels) =
(Morphism.term phi code, map (Morphism.term phi) ctrs, map (Morphism.term phi) discs,
map (Morphism.term phi) disc_iffs, map (Morphism.term phi) sels);
fun generate_views ctxt eq fun_t (lhs_free_args, rhs) =
let
val lhs = list_comb (fun_t, lhs_free_args);
val T as Type (T_name, Ts) = fastype_of rhs;
val SOME {fp_ctr_sugar = {ctr_sugar = {ctrs = ctrs0, discs = discs0, selss = selss0, ...}, ...},
...} =
fp_sugar_of ctxt T_name;
val ctrs = map (mk_ctr Ts) ctrs0;
val discs = map (mk_disc_or_sel Ts) discs0;
val selss = map (map (mk_disc_or_sel Ts)) selss0;
val code_view = drop_all eq;
fun can_case_expand t = not (can (dest_ctr ctxt T_name) t);
fun generate_raw_views conds t raw_views =
let
fun analyse (ctr :: ctrs) (disc :: discs) ctr' =
if ctr = ctr' then
(conds, disc, ctr)
else
analyse ctrs discs ctr';
in
(analyse ctrs discs (fst (strip_comb t))) :: raw_views
end;
fun generate_disc_views raw_views =
if length discs = 1 then
([], [])
else
let
fun collect_condss_disc condss [] _ = condss
| collect_condss_disc condss ((conds, disc', _) :: raw_views) disc =
collect_condss_disc (condss |> disc = disc' ? cons conds) raw_views disc;
val grouped_disc_views = discs
|> map (collect_condss_disc [] raw_views)
|> curry (op ~~) (map (fn disc => disc $ lhs) discs);
fun mk_disc_iff_props props [] = props
| mk_disc_iff_props _ ((lhs, \<^Const_>\<open>True\<close>) :: _) = [lhs]
| mk_disc_iff_props props ((lhs, rhs) :: views) =
mk_disc_iff_props ((HOLogic.mk_eq (lhs, rhs)) :: props) views;
in
(grouped_disc_views
|> map swap,
grouped_disc_views
|> map (apsnd (s_dnf #> mk_conjs))
|> mk_disc_iff_props []
|> map (fn eq => ([[]], eq)))
end;
fun generate_ctr_views raw_views =
let
fun collect_condss_ctr condss [] _ = condss
| collect_condss_ctr condss ((conds, _, ctr') :: raw_views) ctr =
collect_condss_ctr (condss |> ctr = ctr' ? cons conds) raw_views ctr;
fun mk_ctr_eq ctr_sels ctr =
let
fun extract_arg n sel _(*bound_Ts*) fun_t arg_ts =
if ctr = fun_t then
nth arg_ts n
else
let val t = list_comb (fun_t, arg_ts) in
if can_case_expand t then
sel $ t
else
Term.dummy_pattern (range_type (fastype_of sel))
end;
in
ctr_sels
|> map_index (uncurry extract_arg)
|> map (fn extract => massage_corec_code_rhs ctxt extract [] rhs)
|> curry list_comb ctr
|> curry HOLogic.mk_eq lhs
end;
fun remove_condss_if_alone [(_, concl)] = [([[]], concl)]
| remove_condss_if_alone views = views;
in
ctrs
|> `(map (collect_condss_ctr [] raw_views))
||> map2 mk_ctr_eq selss
|> op ~~
|> filter_out (null o fst)
|> remove_condss_if_alone
end;
fun generate_sel_views raw_views only_one_ctr =
let
fun mk_sel_positions sel =
let
fun get_sel_position _ [] = NONE
| get_sel_position i (sel' :: sels) =
if sel = sel' then SOME i else get_sel_position (i + 1) sels;
in
ctrs ~~ map (get_sel_position 0) selss
|> map_filter (fn (ctr, pos_opt) =>
if is_some pos_opt then SOME (ctr, the pos_opt) else NONE)
end;
fun collect_sel_condss0 condss [] _ = condss
| collect_sel_condss0 condss ((conds, _, ctr) :: raw_views) sel_positions =
let val condss' = condss |> is_some (AList.lookup (op =) sel_positions ctr) ? cons conds
in
collect_sel_condss0 condss' raw_views sel_positions
end;
val collect_sel_condss =
if only_one_ctr then K [[]] else collect_sel_condss0 [] raw_views;
fun mk_sel_rhs sel_positions sel =
let
val sel_T = range_type (fastype_of sel);
fun extract_sel_value _(*bound_Ts*) fun_t arg_ts =
(case AList.lookup (op =) sel_positions fun_t of
SOME n => nth arg_ts n
| NONE =>
let val t = list_comb (fun_t, arg_ts) in
if can_case_expand t then
sel $ t
else
Term.dummy_pattern sel_T
end);
in
massage_corec_code_rhs ctxt extract_sel_value [] rhs
end;
val ordered_sels = distinct (op =) (flat selss);
val sel_positionss = map mk_sel_positions ordered_sels;
val sel_rhss = map2 mk_sel_rhs sel_positionss ordered_sels;
val sel_lhss = map (rapp lhs o mk_disc_or_sel Ts) ordered_sels;
val sel_condss = map collect_sel_condss sel_positionss;
fun is_undefined (Const (\<^const_name>\<open>undefined\<close>, _)) = true
| is_undefined _ = false;
in
sel_condss ~~ (sel_lhss ~~ sel_rhss)
|> filter_out (is_undefined o snd o snd)
|> map (apsnd HOLogic.mk_eq)
end;
fun mk_atomic_prop fun_args (condss, concl) =
(Logic.list_all (map dest_Free fun_args, abstract_over_list fun_args
(Logic.list_implies (map HOLogic.mk_Trueprop (s_dnf condss), HOLogic.mk_Trueprop concl))));
val raw_views = rhs
|> massage_let_if_case ctxt (K false) (fn _(*bound_Ts*) => fn t => t
|> can_case_expand t ? expand_to_ctr_term ctxt T) (K (K ())) (K I) []
|> (fn expanded_rhs => fold_rev_let_if_case ctxt generate_raw_views [] expanded_rhs [])
|> rev;
val (disc_views, disc_iff_views) = generate_disc_views raw_views;
val ctr_views = generate_ctr_views raw_views;
val sel_views = generate_sel_views raw_views (length ctr_views = 1);
val mk_props = filter_out (null o fst) #> map (mk_atomic_prop lhs_free_args);
in
(code_view, mk_props ctr_views, mk_props disc_views, mk_props disc_iff_views,
mk_props sel_views)
end;
fun find_all_associated_types [] _ = []
| find_all_associated_types ((Type (_, Ts1), Type (_, Ts2)) :: TTs) T =
find_all_associated_types ((Ts1 ~~ Ts2) @ TTs) T
| find_all_associated_types ((T1, T2) :: TTs) T =
find_all_associated_types TTs T |> T1 = T ? cons T2;
fun as_member_of tab = try dest_Const #> Option.mapPartial (fst #> Symtab.lookup tab);
fun extract_rho_from_equation
({ctr_guards, inner_buffer = {Oper, VLeaf, CLeaf, ctr_wrapper, friends}, ...},
{pattern_ctrs, discs, sels, it, mk_case})
b version Y preT ssig_T friend_tm (lhs_args, rhs) lthy =
let
val thy = Proof_Context.theory_of lthy;
val res_T = fastype_of rhs;
val YpreT = HOLogic.mk_prodT (Y, preT);
fun fpT_to new_T T =
if T = res_T then
new_T
else
(case T of
Type (s, Ts) => Type (s, map (fpT_to new_T) Ts)
| _ => T);
fun build_params bound_Us bound_Ts T =
{bound_Us = bound_Us, bound_Ts = bound_Ts, U = T, T = T};
fun typ_before explore {bound_Us, bound_Ts, ...} t =
explore (build_params bound_Us bound_Ts (fastype_of1 (bound_Ts, t))) t;
val is_self_call = curry (op aconv) friend_tm;
val has_self_call = Term.exists_subterm is_self_call;
fun has_res_T bound_Ts t = fastype_of1 (bound_Ts, t) = res_T;
fun contains_res_T (Type (s, Ts)) = s = fst (dest_Type res_T) orelse exists contains_res_T Ts
| contains_res_T _ = false;
val res_T_lhs_args = filter (exists_type contains_res_T) lhs_args;
val is_res_T_lhs_arg = member (op =) res_T_lhs_args;
fun is_constant t =
not (Term.exists_subterm is_res_T_lhs_arg t orelse has_self_call t orelse loose_bvar (t, 0));
fun is_nested_type T = T <> res_T andalso T <> YpreT andalso T <> ssig_T;
val is_valid_case_argumentT = not o member (op =) [Y, ssig_T];
fun is_same_type_constr (Type (s, _)) (Type (s', _)) = (s = s')
| is_same_type_constr _ _ = false;
exception NO_ENCAPSULATION of unit;
val parametric_consts = Unsynchronized.ref [];
(* We are assuming that set functions are marked with "[transfer_rule]" (cf. the "transfer"
plugin). Otherwise, the "eq_algrho" tactic might fail. *)
fun is_special_parametric_const (x as (s, _)) =
s = \<^const_name>\<open>id\<close> orelse is_set lthy x;
fun add_parametric_const s general_T T U =
let
fun tupleT_of_funT T =
let val (Ts, T) = strip_type T in
mk_tupleT_balanced (Ts @ [T])
end;
fun funT_of_tupleT n =
dest_tupleT_balanced (n + 1)
#> split_last
#> op --->;
val m = num_binder_types general_T;
val param1_T = Type_Infer.paramify_vars general_T;
val param2_T = Type_Infer.paramify_vars param1_T;
val deadfixed_T =
build_map lthy [] [] (mk_undefined o op -->) (apply2 tupleT_of_funT (param1_T, param2_T))
|> singleton (Type_Infer_Context.infer_types lthy)
|> singleton (Type_Infer.fixate lthy false)
|> type_of
|> dest_funT
|-> generalize_types 1
|> funT_of_tupleT m;
val j = maxidx_of_typ deadfixed_T + 1;
fun varifyT (Type (s, Ts)) = Type (s, map varifyT Ts)
| varifyT (TFree (s, T)) = TVar ((s, j), T)
| varifyT T = T;
val dedvarified_T = varifyT deadfixed_T;
val new_vars = Sign.typ_match thy (dedvarified_T, T) Vartab.empty
|> Vartab.dest
|> filter (curry (op =) j o snd o fst)
|> Vartab.make;
val deadinstantiated_T = map_atyps (Type.devar new_vars) dedvarified_T;
val final_T =
if Sign.typ_instance thy (U, deadinstantiated_T) then deadfixed_T else general_T;
in
parametric_consts := insert (op =) (s, final_T) (!parametric_consts)
end;
fun encapsulate (params as {U, T, ...}) t =
if U = T then
t
else if T = Y then
VLeaf $ t
else if T = res_T then
CLeaf $ t
else if T = YpreT then
it $ t
else if is_nested_type T andalso is_same_type_constr T U then
explore_nested lthy encapsulate params t
else
raise NO_ENCAPSULATION ();
fun build_function_after_encapsulation fun_t fun_t' (params as {bound_Us, ...}) arg_ts arg_ts' =
let
val arg_Us' = fst (strip_typeN (length arg_ts) (fastype_of1 (bound_Us, fun_t')));
fun the_or_error arg NONE =
error ("Illegal argument " ^ quote (Syntax.string_of_term lthy arg) ^
" to " ^ quote (Syntax.string_of_term lthy fun_t))
| the_or_error _ (SOME arg') = arg';
in
arg_ts'
|> `(map (curry fastype_of1 bound_Us))
|>> map2 (update_UT params) arg_Us'
|> op ~~
|> map (try (uncurry encapsulate))
|> map2 the_or_error arg_ts
|> curry list_comb fun_t'
end;
fun rebuild_function_after_exploration old_fn new_fn explore params arg_ts =
arg_ts
|> map (typ_before explore params)
|> build_function_after_encapsulation old_fn new_fn params arg_ts;
fun update_case Us U casex =
let
val Type (T_name, _) = domain_type (snd (strip_fun_type (fastype_of casex)));
val SOME {fp_ctr_sugar = {ctr_sugar = {T = Type (_, Ts), casex, ...}, ...}, ...} =
fp_sugar_of lthy T_name;
val T = body_type (fastype_of casex);
in
Term.subst_atomic_types ((T :: Ts) ~~ (U :: Us)) casex
end;
fun deduce_according_type default_T [] = default_T
| deduce_according_type default_T Ts = (case distinct (op =) Ts of
U :: [] => U
| _ => fpT_to ssig_T default_T);
fun massage_if explore_cond explore (params as {bound_Us, bound_Ts, ...}) t =
(case strip_comb t of
(const as Const (\<^const_name>\<open>If\<close>, _), obj :: (branches as [_, _])) =>
(case List.partition Term.is_dummy_pattern (map (explore params) branches) of
(dummy_branch' :: _, []) => dummy_branch'
| (_, [branch']) => branch'
| (_, branches') =>
let
val brancheUs = map (curry fastype_of1 bound_Us) branches';
val U = deduce_according_type (fastype_of1 (bound_Ts, hd branches)) brancheUs;
val const_obj' = (If_const U, obj)
||> explore_cond (update_UT params \<^typ>\<open>bool\<close> \<^typ>\<open>bool\<close>)
|> op $;
in
build_function_after_encapsulation (const $ obj) const_obj' params branches branches'
end)
| _ => explore params t);
fun massage_map explore (params as {bound_Us, bound_Ts, T = Type (T_name, Ts), ...})
(t as func $ mapped_arg) =
if is_self_call (head_of func) then
explore params t
else
(case try (dest_map lthy T_name) func of
SOME (map_tm, fs) =>
let
val n = length fs;
val mapped_arg' = mapped_arg
|> `(curry fastype_of1 bound_Ts)
|>> build_params bound_Us bound_Ts
|-> explore;
in
(case fastype_of1 (bound_Us, mapped_arg') of
Type (U_name, Us0) =>
if U_name = T_name then
let
val Us = map (fpT_to ssig_T) Us0;
val temporary_map = map_tm
|> mk_map n Us Ts;
val map_fn_Ts = fastype_of #> strip_fun_type #> fst;
val binder_Uss = map_fn_Ts temporary_map
|> map (map (fpT_to ssig_T) o binder_types);
val fun_paramss = map_fn_Ts (head_of func)
|> map (build_params bound_Us bound_Ts);
val fs' = fs
|> @{map 4} explore_fun binder_Uss (replicate n explore) fun_paramss;
val SOME bnf = bnf_of lthy T_name;
val Type (_, bnf_Ts) = T_of_bnf bnf;
val typ_alist =
lives_of_bnf bnf ~~ map (curry fastype_of1 bound_Us #> range_type) fs';
val Us' = map2 the_default Us (map (AList.lookup (op =) typ_alist) bnf_Ts);
val map_tm' = map_tm |> mk_map n Us Us';
in
build_function_after_encapsulation func (list_comb (map_tm', fs')) params
[mapped_arg] [mapped_arg']
end
else
explore params t
| _ => explore params t)
end
| NONE => explore params t)
| massage_map explore params t = explore params t;
fun massage_comp explore (params as {bound_Us, ...}) t =
(case strip_comb t of
(Const (\<^const_name>\<open>comp\<close>, _), f1 :: f2 :: args) =>
let
val args' = map (typ_before explore params) args;
val f2' = typ_before (explore_fun (map (curry fastype_of1 bound_Us) args') explore) params
f2;
val f1' = typ_before (explore_fun [range_type (fastype_of1 (bound_Us, f2'))] explore)
params f1;
in
betapply (f1', list_comb (f2', args'))
end
| _ => explore params t);
fun massage_ctr explore (params as {T = T as Type (s, Ts), bound_Us, ...}) t =
if T <> res_T then
(case try (dest_ctr lthy s) t of
SOME (ctr, args) =>
let
val args' = map (typ_before explore params) args;
val SOME {T = Type (_, ctr_Ts), ...} = ctr_sugar_of lthy s;
val temp_ctr = mk_ctr ctr_Ts ctr;
val argUs = map (curry fastype_of1 bound_Us) args';
val typ_alist = binder_types (fastype_of temp_ctr) ~~ argUs;
val Us = ctr_Ts
|> map (find_all_associated_types typ_alist)
|> map2 deduce_according_type Ts;
val ctr' = mk_ctr Us ctr;
in
build_function_after_encapsulation ctr ctr' params args args'
end
| NONE => explore params t)
else
explore params t
| massage_ctr explore params t = explore params t;
fun const_of [] _ = NONE
| const_of ((sel as Const (s1, _)) :: r) (const as Const (s2, _)) =
if s1 = s2 then SOME sel else const_of r const
| const_of _ _ = NONE;
fun massage_disc explore (params as {T, bound_Us, bound_Ts, ...}) t =
(case (strip_comb t, T = \<^typ>\<open>bool\<close>) of
((fun_t, arg :: []), true) =>
let val arg_T = fastype_of1 (bound_Ts, arg) in
if arg_T <> res_T then
(case arg_T |> try (fst o dest_Type) |> Option.mapPartial (ctr_sugar_of lthy) of
SOME {discs, T = Type (_, Ts), ...} =>
(case const_of discs fun_t of
SOME disc =>
let
val arg' = arg |> typ_before explore params;
val Type (_, Us) = fastype_of1 (bound_Us, arg');
val disc' = disc |> Term.subst_TVars (map (fst o dest_TVar) Ts ~~ Us);
in
disc' $ arg'
end
| NONE => explore params t)
| NONE => explore params t)
else
explore params t
end
| _ => explore params t);
fun massage_sel explore (params as {bound_Us, bound_Ts, ...}) t =
let val (fun_t, args) = strip_comb t in
if args = [] then
explore params t
else
let val T = fastype_of1 (bound_Ts, hd args) in
(case (Option.mapPartial (ctr_sugar_of lthy) (try (fst o dest_Type) T), T <> res_T) of
(SOME {selss, T = Type (_, Ts), ...}, true) =>
(case const_of (flat selss) fun_t of
SOME sel =>
let
val args' = args |> map (typ_before explore params);
val Type (_, Us) = fastype_of1 (bound_Us, hd args');
val sel' = sel |> Term.subst_TVars (map (fst o dest_TVar) Ts ~~ Us);
in
build_function_after_encapsulation sel sel' params args args'
end
| NONE => explore params t)
| _ => explore params t)
end
end;
fun massage_equality explore (params as {bound_Us, bound_Ts, ...})
(t as Const (\<^const_name>\<open>HOL.eq\<close>, _) $ t1 $ t2) =
let
val check_is_VLeaf =
not o (Term.exists_subterm (fn t => t aconv CLeaf orelse t aconv Oper));
fun try_pattern_matching (fun_t, arg_ts) t =
(case as_member_of pattern_ctrs fun_t of
SOME (disc, sels) =>
let val t' = typ_before explore params t in
if fastype_of1 (bound_Us, t') = YpreT then
let
val arg_ts' = map (typ_before explore params) arg_ts;
val sels_t' = map (fn sel => betapply (sel, t')) sels;
val Ts = map (curry fastype_of1 bound_Us) arg_ts';
val Us = map (curry fastype_of1 bound_Us) sels_t';
val arg_ts' = map2 encapsulate (map2 (update_UT params) Us Ts) arg_ts';
in
if forall check_is_VLeaf arg_ts' then
SOME (Library.foldl1 HOLogic.mk_conj
(betapply (disc, t') :: (map HOLogic.mk_eq (arg_ts' ~~ sels_t'))))
else
NONE
end
else
NONE
end
| NONE => NONE);
in
(case try_pattern_matching (strip_comb t1) t2 of
SOME cond => cond
| NONE => (case try_pattern_matching (strip_comb t2) t1 of
SOME cond => cond
| NONE =>
let
val T = fastype_of1 (bound_Ts, t1);
val params' = build_params bound_Us bound_Ts T;
val t1' = explore params' t1;
val t2' = explore params' t2;
in
if fastype_of1 (bound_Us, t1') = T andalso fastype_of1 (bound_Us, t2') = T then
HOLogic.mk_eq (t1', t2')
else
error ("Unsupported condition: " ^ quote (Syntax.string_of_term lthy t))
end))
end
| massage_equality explore params t = explore params t;
fun infer_types (TVar _) (TVar _) = []
| infer_types (U as TVar _) T = [(U, T)]
| infer_types (Type (s', Us)) (Type (s, Ts)) =
if s' = s then flat (map2 infer_types Us Ts) else []
| infer_types _ _ = [];
fun group_by_fst associations [] = associations
| group_by_fst associations ((a, b) :: r) = group_by_fst (add_association a b associations) r
and add_association a b [] = [(a, [b])]
| add_association a b ((c, d) :: r) =
if a = c then (c, b :: d) :: r
else (c, d) :: (add_association a b r);
fun new_TVar known_TVars =
Name.invent_list (map (fst o fst o dest_TVar) known_TVars) "x" 1
|> (fn [s] => TVar ((s, 0), []));
fun instantiate_type inferred_types =
Term.typ_subst_TVars (map (apfst (fst o dest_TVar)) inferred_types);
fun chose_unknown_TVar (T as TVar _) = SOME T
| chose_unknown_TVar (Type (_, Ts)) =
fold (curry merge_options) (map chose_unknown_TVar Ts) NONE
| chose_unknown_TVar _ = NONE;
(* The function under definition might not be defined yet when this is queried. *)
fun maybe_const_type ctxt (s, T) =
Sign.const_type (Proof_Context.theory_of ctxt) s |> the_default T;
fun massage_const polymorphic explore (params as {bound_Us, ...}) t =
let val (fun_t, arg_ts) = strip_comb t in
(case fun_t of
Const (fun_x as (s, fun_T)) =>
let val general_T = if polymorphic then maybe_const_type lthy fun_x else fun_T in
if fun_t aconv friend_tm orelse contains_res_T (body_type general_T) orelse
is_constant t then
explore params t
else
let
val inferred_types = infer_types general_T fun_T;
fun prepare_skeleton [] _ = []
| prepare_skeleton ((T, U) :: inferred_types) As =
let
fun schematize_res_T U As =
if U = res_T then
let val A = new_TVar As in
(A, A :: As)
end
else
(case U of
Type (s, Us) => fold_map schematize_res_T Us As |>> curry Type s
| _ => (U, As));
val (U', As') = schematize_res_T U As;
in
(T, U') :: (prepare_skeleton inferred_types As')
end;
val inferred_types' = prepare_skeleton inferred_types (map fst inferred_types);
val skeleton_T = instantiate_type inferred_types' general_T;
fun explore_if_possible (exp_arg as (_, true)) _ = exp_arg
| explore_if_possible (exp_arg as (arg, false)) arg_T =
if exists (exists_subtype is_TVar) (binder_types arg_T) then exp_arg
else (typ_before (explore_fun (binder_types arg_T) explore) params arg, true);
fun collect_inferred_types [] _ = []
| collect_inferred_types ((arg, explored) :: exp_arg_ts) (arg_T :: arg_Ts) =
(if explored then infer_types arg_T (fastype_of1 (bound_Us, arg)) else []) @
collect_inferred_types exp_arg_ts arg_Ts;
fun propagate exp_arg_ts skeleton_T =
let
val arg_gen_Ts = binder_types skeleton_T;
val exp_arg_ts = map2 explore_if_possible exp_arg_ts arg_gen_Ts;
val inferred_types = collect_inferred_types exp_arg_ts arg_gen_Ts
|> group_by_fst []
|> map (apsnd (deduce_according_type ssig_T));
in
(exp_arg_ts, instantiate_type inferred_types skeleton_T)
end;
val remaining_to_be_explored = filter_out snd #> length;
fun try_exploring_args exp_arg_ts skeleton_T =
let
val n = remaining_to_be_explored exp_arg_ts;
val (exp_arg_ts', skeleton_T') = propagate exp_arg_ts skeleton_T;
val n' = remaining_to_be_explored exp_arg_ts';
fun try_instantiating A T =
try (try_exploring_args exp_arg_ts') (instantiate_type [(A, T)] skeleton_T');
in
if n' = 0 then
SOME (exp_arg_ts', skeleton_T')
else if n = n' then
if exists_subtype is_TVar skeleton_T' then
let val SOME A = chose_unknown_TVar skeleton_T' in
(case try_instantiating A ssig_T of
SOME result => result
| NONE => (case try_instantiating A YpreT of
SOME result => result
| NONE => (case try_instantiating A res_T of
SOME result => result
| NONE => NONE)))
end
else
NONE
else
try_exploring_args exp_arg_ts' skeleton_T'
end;
in
(case try_exploring_args (map (fn arg => (arg, false)) arg_ts) skeleton_T of
SOME (exp_arg_ts, fun_U) =>
let
val arg_ts' = map fst exp_arg_ts;
val fun_t' = Const (s, fun_U);
fun finish_off () =
let
val t' =
build_function_after_encapsulation fun_t fun_t' params arg_ts arg_ts';
in
if can type_of1 (bound_Us, t') then
(if fun_T = fun_U orelse is_special_parametric_const (s, fun_T) then ()
else add_parametric_const s general_T fun_T fun_U;
t')
else
explore params t
end;
in
if polymorphic then
finish_off ()
else
(case try finish_off () of
SOME t' => t'
| NONE => explore params t)
end
| NONE => explore params t)
end
end
| _ => explore params t)
end;
fun massage_rho explore =
massage_star [massage_let, massage_if explore_cond, massage_case, massage_fun, massage_comp,
massage_map, massage_ctr, massage_sel, massage_disc, massage_equality,
massage_const false, massage_const true]
explore
and massage_case explore (params as {bound_Ts, bound_Us, ...}) t =
(case strip_comb t of
(casex as Const (case_x as (c, _)), args as _ :: _ :: _) =>
(case try strip_fun_type (maybe_const_type lthy case_x) of
SOME (gen_branch_Ts, gen_body_fun_T) =>
let
val gen_branch_ms = map num_binder_types gen_branch_Ts;
val n = length gen_branch_ms;
val (branches, obj_leftovers) = chop n args;
in
if n < length args then
(case gen_body_fun_T of
Type (_, [Type (T_name, _), _]) =>
if case_of lthy T_name = SOME (c, true) then
let
val brancheTs = binder_fun_types (fastype_of1 (bound_Ts, casex));
val obj_leftover_Ts = map (curry fastype_of1 bound_Ts) obj_leftovers;
val obj_leftovers' =
if is_constant (hd obj_leftovers) then
obj_leftovers
else
(obj_leftover_Ts, obj_leftovers)
|>> map (build_params bound_Us bound_Ts)
|> op ~~
|> map (uncurry explore_inner);
val obj_leftoverUs = obj_leftovers' |> map (curry fastype_of1 bound_Us);
val _ = is_valid_case_argumentT (hd obj_leftoverUs) orelse
error (quote (Syntax.string_of_term lthy (hd obj_leftovers)) ^
" is not a valid case argument");
val Us = obj_leftoverUs |> hd |> dest_Type |> snd;
val branche_binderUss =
(if hd obj_leftoverUs = YpreT then mk_case HOLogic.boolT
else update_case Us HOLogic.boolT casex)
|> fastype_of
|> binder_fun_types
|> map binder_types;
val b_params = map (build_params bound_Us bound_Ts) brancheTs;
val branches' = branches
|> @{map 4} explore_fun branche_binderUss (replicate n explore) b_params;
val brancheUs = map (curry fastype_of1 bound_Us) branches';
val U = deduce_according_type (body_type (hd brancheTs))
(map body_type brancheUs);
val casex' =
if hd obj_leftoverUs = YpreT then mk_case U else update_case Us U casex;
in
build_function_after_encapsulation casex casex' params
(branches @ obj_leftovers) (branches' @ obj_leftovers')
end
else
explore params t
| _ => explore params t)
else
explore params t
end
| NONE => explore params t)
| _ => explore params t)
and explore_cond params t =
if has_self_call t then unexpected_rec_call_in lthy [] t else explore_inner params t
and explore_inner params t =
massage_rho explore_inner_general params t
and explore_inner_general (params as {bound_Us, bound_Ts, T, ...}) t =
let val (fun_t, arg_ts) = strip_comb t in
if is_constant t then
t
else
(case (as_member_of discs fun_t,
length arg_ts = 1 andalso has_res_T bound_Ts (the_single arg_ts)) of
(SOME disc', true) =>
let
val arg' = explore_inner params (the_single arg_ts);
val arg_U = fastype_of1 (bound_Us, arg');
in
if arg_U = res_T then
fun_t $ arg'
else if arg_U = YpreT then
disc' $ arg'
else
error ("Discriminator " ^ quote (Syntax.string_of_term lthy fun_t) ^
" cannot be applied to non-variable " ^
quote (Syntax.string_of_term lthy (hd arg_ts)))
end
| _ =>
(case as_member_of sels fun_t of
SOME sel' =>
let
val arg_ts' = map (explore_inner params) arg_ts;
val arg_U = fastype_of1 (bound_Us, hd arg_ts');
in
if arg_U = res_T then
build_function_after_encapsulation fun_t fun_t params arg_ts arg_ts'
else if arg_U = YpreT then
build_function_after_encapsulation fun_t sel' params arg_ts arg_ts'
else
error ("Selector " ^ quote (Syntax.string_of_term lthy fun_t) ^
" cannot be applied to non-variable " ^
quote (Syntax.string_of_term lthy (hd arg_ts)))
end
| NONE =>
(case as_member_of friends fun_t of
SOME (_, friend') =>
rebuild_function_after_exploration fun_t friend' explore_inner params arg_ts
|> curry (op $) Oper
| NONE =>
(case as_member_of ctr_guards fun_t of
SOME ctr_guard' =>
rebuild_function_after_exploration fun_t ctr_guard' explore_inner params arg_ts
|> curry (op $) ctr_wrapper
|> curry (op $) Oper
| NONE =>
if is_Bound fun_t then
rebuild_function_after_exploration fun_t fun_t explore_inner params arg_ts
else if is_Free fun_t then
let val fun_t' = map_types (fpT_to YpreT) fun_t in
rebuild_function_after_exploration fun_t fun_t' explore_inner params arg_ts
end
else if T = res_T then
error (quote (Syntax.string_of_term lthy fun_t) ^
" not polymorphic enough to be applied like this and no friend")
else
error (quote (Syntax.string_of_term lthy fun_t) ^
" not polymorphic enough to be applied like this")))))
end;
fun explore_ctr params t =
massage_rho explore_ctr_general params t
and explore_ctr_general params t =
let
val (fun_t, arg_ts) = strip_comb t;
val ctr_opt = as_member_of ctr_guards fun_t;
in
if is_some ctr_opt then
rebuild_function_after_exploration fun_t (the ctr_opt) explore_inner params arg_ts
else
not_constructor_in_rhs lthy [] fun_t
end;
val rho_rhs = rhs
|> explore_ctr (build_params [] [] (fastype_of rhs))
|> abs_tuple_balanced (map (map_types (fpT_to YpreT)) lhs_args)
|> unfold_id_bnf_etc lthy;
in
lthy
|> define_const false b version rhoN rho_rhs
|>> pair (!parametric_consts, rho_rhs)
end;
fun mk_rho_parametricity_goal ctxt Y Z preT ssig_T dead_pre_rel dead_k_rel dead_ssig_rel rho_rhs =
let
val YpreT = HOLogic.mk_prodT (Y, preT);
val ZpreT = Tsubst Y Z YpreT;
val ssigZ_T = Tsubst Y Z ssig_T;
val dead_pre_rel' = Term.subst_atomic_types [(Y, ssig_T), (Z, ssigZ_T)] dead_pre_rel;
val dead_k_rel' = Term.subst_atomic_types [(Y, YpreT), (Z, ZpreT)] dead_k_rel;
val (R, _) = ctxt
|> yield_singleton (mk_Frees "R") (mk_pred2T Y Z);
val rho_rel = mk_rel_fun (dead_k_rel' $ mk_rel_prod R (dead_pre_rel $ R))
(dead_pre_rel' $ (dead_ssig_rel $ R));
val rho_rhsZ = substT Y Z rho_rhs;
in
HOLogic.mk_Trueprop (rho_rel $ rho_rhs $ rho_rhsZ)
end;
fun extract_rho_return_transfer_goals fun_b version dead_pre_bnf dead_k_bnf Y Z preT k_T ssig_T
ssig_fp_sugar friend_parse_info fun_t parsed_eq lthy =
let
fun mk_rel T bnf =
let
val ZT = Tsubst Y Z T;
val rel_T = mk_predT [mk_pred2T Y Z, T, ZT];
in
enforce_type lthy I rel_T (rel_of_bnf bnf)
end;
val ssig_bnf = #fp_bnf ssig_fp_sugar;
val (dead_ssig_bnf, lthy') = bnf_kill_all_but 1 ssig_bnf lthy;
val dead_pre_rel = mk_rel preT dead_pre_bnf;
val dead_k_rel = mk_rel k_T dead_k_bnf;
val dead_ssig_rel = mk_rel ssig_T dead_ssig_bnf;
val (((parametric_consts, rho_rhs), rho_data), lthy'') =
extract_rho_from_equation friend_parse_info fun_b version Y preT ssig_T fun_t parsed_eq lthy';
val const_transfer_goals = map (mk_const_transfer_goal lthy'') parametric_consts;
val rho_transfer_goal =
mk_rho_parametricity_goal lthy'' Y Z preT ssig_T dead_pre_rel dead_k_rel dead_ssig_rel rho_rhs;
in
((rho_data, (const_transfer_goals, rho_transfer_goal)), lthy'')
end;
fun explore_corec_equation ctxt could_be_friend friend fun_name fun_free
{outer_buffer, ctr_guards, inner_buffer} res_T (args, rhs) =
let
val is_self_call = curry (op aconv) fun_free;
val has_self_call = Term.exists_subterm is_self_call;
val outer_ssig_T = body_type (fastype_of (#Oper outer_buffer));
fun inner_fp_of (Free (s, _)) =
Free (s ^ inner_fp_suffix, mk_tupleT_balanced (map fastype_of args) --> outer_ssig_T);
fun build_params bound_Ts U T =
{bound_Us = bound_Ts, bound_Ts = bound_Ts, U = U, T = T};
fun rebuild_function_after_exploration new_fn explore {bound_Ts, ...} arg_ts =
let
val binder_types_old_fn = map (curry fastype_of1 bound_Ts) arg_ts;
val binder_types_new_fn = new_fn
|> binder_types o (curry fastype_of1 bound_Ts)
|> take (length binder_types_old_fn);
val paramss =
map2 (build_params bound_Ts) binder_types_new_fn binder_types_old_fn;
in
map2 explore paramss arg_ts
|> curry list_comb new_fn
end;
fun massage_map_corec explore {bound_Ts, U, T, ...} t =
let val explore' = explore ooo build_params in
massage_nested_corec_call ctxt has_self_call explore' explore' bound_Ts U T t
end;
fun massage_comp explore params t =
(case strip_comb t of
(Const (\<^const_name>\<open>comp\<close>, _), f1 :: f2 :: args) =>
explore params (betapply (f1, (betapplys (f2, args))))
| _ => explore params t);
fun massage_fun explore (params as {bound_Us, bound_Ts, U, T}) t =
if can dest_funT T then
let
val arg_T = domain_type T;
val arg_name = the_default Name.uu (try (fn (Abs (s, _, _)) => s) t);
in
add_boundvar t
|> explore {bound_Us = arg_T :: bound_Us, bound_Ts = arg_T :: bound_Ts,
U = range_type U, T = range_type T}
|> (fn t => Abs (arg_name, arg_T, t))
end
else
explore params t
fun massage_let_if_case_corec explore {bound_Ts, U, T, ...} t =
massage_let_if_case ctxt has_self_call (fn bound_Ts => explore (build_params bound_Ts U T))
(K (unexpected_corec_call_in ctxt [t])) (K (unsupported_case_around_corec_call ctxt [t]))
bound_Ts t;
val massage_map_let_if_case =
massage_star [massage_map_corec, massage_fun, massage_comp, massage_let_if_case_corec];
fun explore_arg _ t =
if has_self_call t then
error (quote (Syntax.string_of_term ctxt t) ^ " contains a nested corecursive call" ^
(if could_be_friend then " (try specifying \"(friend)\")" else ""))
else
t;
fun explore_inner params t =
massage_map_let_if_case explore_inner_general params t
and explore_inner_general (params as {bound_Ts, T, ...}) t =
if T = res_T then
let val (f_t, arg_ts) = strip_comb t in
if has_self_call t then
(case as_member_of (#friends inner_buffer) f_t of
SOME (_, friend') =>
rebuild_function_after_exploration friend' explore_inner params arg_ts
|> curry (op $) (#Oper inner_buffer)
| NONE =>
(case as_member_of ctr_guards f_t of
SOME ctr_guard' =>
rebuild_function_after_exploration ctr_guard' explore_inner params arg_ts
|> curry (op $) (#ctr_wrapper inner_buffer)
|> curry (op $) (#Oper inner_buffer)
| NONE =>
if is_self_call f_t then
if friend andalso exists has_self_call arg_ts then
(case Symtab.lookup (#friends inner_buffer) fun_name of
SOME (_, friend') =>
rebuild_function_after_exploration friend' explore_inner params arg_ts
|> curry (op $) (#Oper inner_buffer))
else
let val arg_Ts = binder_types (fastype_of1 (bound_Ts, f_t)) in
map2 explore_arg (map2 (update_UT params) arg_Ts arg_Ts) arg_ts
|> mk_tuple1_balanced bound_Ts
|> curry (op $) (#VLeaf inner_buffer)
end
else
error (quote (Syntax.string_of_term ctxt f_t) ^ " not registered as friend")))
else
#CLeaf inner_buffer $ t
end
else if has_self_call t then
error (quote (Syntax.string_of_term ctxt t) ^ " contains a corecursive call but has type " ^
quote (Syntax.string_of_typ ctxt T))
else
explore_nested ctxt explore_inner_general params t;
fun explore_outer params t =
massage_map_let_if_case explore_outer_general params t
and explore_outer_general (params as {bound_Ts, T, ...}) t =
if T = res_T then
let val (f_t, arg_ts) = strip_comb t in
(case as_member_of ctr_guards f_t of
SOME ctr_guard' =>
rebuild_function_after_exploration ctr_guard' explore_inner params arg_ts
|> curry (op $) (#VLeaf outer_buffer)
| NONE =>
if not (has_self_call t) then
t
|> expand_to_ctr_term ctxt T
|> massage_let_if_case_corec explore_outer_general params
else
(case as_member_of (#friends outer_buffer) f_t of
SOME (_, friend') =>
rebuild_function_after_exploration friend' explore_outer params arg_ts
|> curry (op $) (#Oper outer_buffer)
| NONE =>
if is_self_call f_t then
let val arg_Ts = binder_types (fastype_of1 (bound_Ts, f_t)) in
map2 explore_arg (map2 (update_UT params) arg_Ts arg_Ts) arg_ts
|> mk_tuple1_balanced bound_Ts
|> curry (op $) (inner_fp_of f_t)
end
else
error (quote (Syntax.string_of_term ctxt f_t) ^ " not registered as friend")))
end
else if has_self_call t then
error (quote (Syntax.string_of_term ctxt t) ^ " contains a corecursive call but has type " ^
quote (Syntax.string_of_typ ctxt T))
else
explore_nested ctxt explore_outer_general params t;
in
(args, rhs
|> explore_outer (build_params [] outer_ssig_T res_T)
|> abs_tuple_balanced args)
end;
fun mk_corec_fun_def_rhs ctxt arg_Ts corecUU0 corecUU_arg =
let val corecUU = enforce_type ctxt domain_type (fastype_of corecUU_arg) corecUU0 in
abs_curried_balanced arg_Ts (corecUU $ unfold_id_bnf_etc ctxt corecUU_arg)
end;
fun get_options ctxt opts =
let
val plugins = get_first (fn Plugins_Option f => SOME (f ctxt) | _ => NONE) (rev opts)
|> the_default Plugin_Name.default_filter;
val friend = exists (can (fn Friend_Option => ())) opts;
val transfer = exists (can (fn Transfer_Option => ())) opts;
in
(plugins, friend, transfer)
end;
fun add_function binding parsed_eq lthy =
let
fun pat_completeness_auto ctxt =
Pat_Completeness.pat_completeness_tac ctxt 1 THEN auto_tac ctxt;
val ({defname, pelims = [[pelim]], pinducts = [pinduct], psimps = [psimp], ...}, lthy') =
Function.add_function [(Binding.concealed binding, NONE, NoSyn)]
[(((Binding.concealed Binding.empty, []), parsed_eq), [], [])]
Function_Common.default_config pat_completeness_auto lthy;
in
((defname, (pelim, pinduct, psimp)), lthy')
end;
fun build_corecUU_arg_and_goals prove_termin (Free (fun_base_name, _)) (arg_ts, explored_rhs) lthy =
let
val inner_fp_name0 = fun_base_name ^ inner_fp_suffix;
val inner_fp_free = Free (inner_fp_name0, fastype_of explored_rhs);
in
if Term.exists_subterm (curry (op aconv) inner_fp_free) explored_rhs then
let
val arg = mk_tuple_balanced arg_ts;
val inner_fp_eq =
mk_Trueprop_eq (betapply (inner_fp_free, arg), betapply (explored_rhs, arg));
val ((inner_fp_name, (pelim, pinduct, psimp)), lthy') =
add_function (Binding.name inner_fp_name0) inner_fp_eq lthy;
fun mk_triple elim induct simp = ([elim], [induct], [simp]);
fun prepare_termin () =
let
val {goal, ...} = Proof.goal (Function.termination NONE lthy');
val termin_goal = goal |> Thm.concl_of |> Logic.unprotect |> Envir.beta_eta_contract;
in
(lthy', (mk_triple pelim pinduct psimp, [termin_goal]))
end;
val (lthy'', (inner_fp_triple, termin_goals)) =
if prove_termin then
(case try (Function.prove_termination NONE
(Function_Common.termination_prover_tac true lthy')) lthy' of
NONE => prepare_termin ()
| SOME ({elims = SOME [[elim]], inducts = SOME [induct], simps = SOME [simp], ...},
lthy'') =>
(lthy'', (mk_triple elim induct simp, [])))
else
prepare_termin ();
val inner_fp_const = (Binding.name_of inner_fp_name, fastype_of explored_rhs)
|>> Proof_Context.read_const {proper = true, strict = false} lthy'
|> (fn (Const (s, _), T) => Const (s, T));
in
(((inner_fp_triple, termin_goals), inner_fp_const), lthy'')
end
else
(((([], [], []), []), explored_rhs), lthy)
end;
fun derive_eq_corecUU ctxt {sig_fp_sugars, ssig_fp_sugar, eval, corecUU, eval_simps,
all_algLam_algs, corecUU_unique, ...}
fun_t corecUU_arg fun_code =
let
val fun_T = fastype_of fun_t;
val (arg_Ts, Type (fpT_name, _)) = strip_type fun_T;
val num_args = length arg_Ts;
val SOME {pre_bnf, fp_bnf, absT_info, fp_nesting_bnfs, live_nesting_bnfs, fp_ctr_sugar, ...} =
fp_sugar_of ctxt fpT_name;
val SOME {case_trivial, ...} = codatatype_extra_of ctxt fpT_name;
val ctr_sugar = #ctr_sugar fp_ctr_sugar;
val pre_map_def = map_def_of_bnf pre_bnf;
val abs_inverse = #abs_inverse absT_info;
val ctr_defs = #ctr_defs fp_ctr_sugar;
val case_eq_ifs = #case_eq_ifs ctr_sugar @ case_eq_if_thms_of_term ctxt (Thm.prop_of fun_code);
val all_sig_map_thms = maps (#map_thms o #fp_bnf_sugar) sig_fp_sugars;
val fp_map_ident = map_ident_of_bnf fp_bnf;
val fpsig_nesting_bnfs = fp_nesting_bnfs @ maps #live_nesting_bnfs sig_fp_sugars;
val fpsig_nesting_T_names = map (fst o dest_Type o T_of_bnf) fpsig_nesting_bnfs;
val fpsig_nesting_fp_sugars = map_filter (fp_sugar_of ctxt) fpsig_nesting_T_names;
val fpsig_nesting_fp_bnf_sugars = map #fp_bnf_sugar fpsig_nesting_fp_sugars;
val ssig_fp_bnf_sugar = #fp_bnf_sugar ssig_fp_sugar;
val ssig_bnf = #fp_bnf ssig_fp_sugar;
val ssig_map = map_of_bnf ssig_bnf;
val fpsig_nesting_maps = map map_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_ident0s = map map_ident0_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_comps = map map_comp_of_bnf fpsig_nesting_bnfs;
val fpsig_nesting_map_thms = maps #map_thms fpsig_nesting_fp_bnf_sugars;
val live_nesting_map_ident0s = map map_ident0_of_bnf live_nesting_bnfs;
val ssig_map_thms = #map_thms ssig_fp_bnf_sugar;
val all_algLam_alg_pointfuls = map (mk_pointful ctxt) all_algLam_algs;
val def_rhs = mk_corec_fun_def_rhs ctxt arg_Ts corecUU corecUU_arg;
val goal = mk_Trueprop_eq (fun_t, def_rhs);
in
Goal.prove_sorry ctxt [] [] goal (fn {context = ctxt, prems = _} =>
mk_eq_corecUU_tac ctxt num_args fpsig_nesting_maps ssig_map eval pre_map_def abs_inverse
fpsig_nesting_map_ident0s fpsig_nesting_map_comps fpsig_nesting_map_thms
live_nesting_map_ident0s fp_map_ident case_trivial ctr_defs case_eq_ifs all_sig_map_thms
ssig_map_thms all_algLam_alg_pointfuls (all_algrho_eqs_of ctxt) eval_simps corecUU_unique
fun_code)
|> Thm.close_derivation \<^here>
end;
fun derive_coinduct_cong_intros
({fpT = fpT0 as Type (fpT_name, _), friend_names = friend_names0,
corecUU = Const (corecUU_name, _), dtor_coinduct_info as {dtor_coinduct, ...}, ...})
lthy =
let
val thy = Proof_Context.theory_of lthy;
val phi = Proof_Context.export_morphism lthy (Local_Theory.target_of lthy);
val fpT = Morphism.typ phi fpT0;
val general_fpT = body_type (Sign.the_const_type thy corecUU_name);
val most_general = Sign.typ_instance thy (general_fpT, fpT);
in
(case (most_general, coinduct_extra_of lthy corecUU_name) of
(true, SOME extra) => ((false, extra), lthy)
| _ =>
let
val ctr_names = ctr_names_of_fp_name lthy fpT_name;
val friend_names = friend_names0 |> map Long_Name.base_name |> rev;
val cong_intro_pairs = derive_cong_intros lthy ctr_names friend_names dtor_coinduct_info;
val (coinduct, coinduct_attrs) = derive_coinduct lthy fpT0 dtor_coinduct;
val ((_, [coinduct]), lthy) = (* TODO check: only if most_general?*)
Local_Theory.note ((Binding.empty, coinduct_attrs), [coinduct]) lthy;
val extra = {coinduct = coinduct, coinduct_attrs = coinduct_attrs,
cong_intro_pairs = cong_intro_pairs};
in
((most_general, extra), lthy |> most_general ? register_coinduct_extra corecUU_name extra)
end)
end;
fun update_coinduct_cong_intross_dynamic fpT_name lthy =
let val all_corec_infos = corec_infos_of lthy fpT_name in
lthy
|> fold_map (apfst snd oo derive_coinduct_cong_intros) all_corec_infos
|> snd
end;
fun derive_and_update_coinduct_cong_intross [] = pair (false, [])
| derive_and_update_coinduct_cong_intross (corec_infos as {fpT = Type (fpT_name, _), ...} :: _) =
fold_map derive_coinduct_cong_intros corec_infos
#>> split_list
#> (fn ((changeds, extras), lthy) =>
if exists I changeds then
((true, extras), lthy |> update_coinduct_cong_intross_dynamic fpT_name)
else
((false, extras), lthy));
fun prepare_corec_ursive_cmd int long_cmd opts (raw_fixes, raw_eq) lthy =
let
val _ = can the_single raw_fixes orelse
error "Mutually corecursive functions not supported";
val (plugins, friend, transfer) = get_options lthy opts;
val ([((b, fun_T), mx)], [(_, eq)]) =
fst (Specification.read_multi_specs raw_fixes [((Binding.empty_atts, raw_eq), [], [])] lthy);
val _ = check_top_sort lthy b fun_T;
val (arg_Ts, res_T) = strip_type fun_T;
val fpT_name = (case res_T of Type (s, _) => s | _ => not_codatatype lthy res_T);
val fun_free = Free (Binding.name_of b, fun_T);
val parsed_eq = parse_corec_equation lthy [fun_free] eq;
val fun_name = Local_Theory.full_name lthy b;
val fun_t = Const (fun_name, fun_T);
(* FIXME: does this work with locales that fix variables? *)
val no_base = has_no_corec_info lthy fpT_name;
val lthy1 = lthy |> no_base ? setup_base fpT_name;
fun extract_rho lthy' =
let
val lthy'' = lthy' |> Variable.declare_typ fun_T;
val (prepared as (_, _, version, Y, Z, preT, k_T, ssig_T, dead_pre_bnf, dead_k_bnf, _,
ssig_fp_sugar, buffer), lthy''') =
prepare_friend_corec fun_name fun_T lthy'';
val friend_parse_info = friend_parse_info_of lthy''' arg_Ts res_T buffer;
val parsed_eq' = parsed_eq ||> subst_atomic [(fun_free, fun_t)];
in
lthy'''
|> extract_rho_return_transfer_goals b version dead_pre_bnf dead_k_bnf Y Z preT k_T ssig_T
ssig_fp_sugar friend_parse_info fun_t parsed_eq'
|>> pair prepared
end;
val ((prepareds, (rho_datas, transfer_goal_datas)), lthy2) =
if friend then extract_rho lthy1 |>> (apfst single ##> (apfst single #> apsnd single))
else (([], ([], [])), lthy1);
val ((buffer, corec_infos), lthy3) =
if friend then
((#13 (the_single prepareds), []), lthy2)
else
corec_info_of res_T lthy2
||> no_base ? update_coinduct_cong_intross_dynamic fpT_name
|>> (fn info as {buffer, ...} => (buffer, [info]));
val corec_parse_info = corec_parse_info_of lthy3 arg_Ts res_T buffer;
val explored_eq =
explore_corec_equation lthy3 true friend fun_name fun_free corec_parse_info res_T parsed_eq;
val (((inner_fp_triple, termin_goals), corecUU_arg), lthy4) =
build_corecUU_arg_and_goals (not long_cmd) fun_free explored_eq lthy3;
fun def_fun (inner_fp_elims0, inner_fp_inducts0, inner_fp_simps0) const_transfers
rho_transfers_foldeds lthy5 =
let
fun register_friend lthy' =
let
val [(old_corec_info, fp_b, version, Y, Z, _, k_T, _, _, dead_k_bnf, sig_fp_sugar,
ssig_fp_sugar, _)] = prepareds;
val [(rho, rho_def)] = rho_datas;
val [(_, rho_transfer_goal)] = transfer_goal_datas;
val Type (fpT_name, _) = res_T;
val rho_transfer_folded =
(case rho_transfers_foldeds of
[] =>
derive_rho_transfer_folded lthy' fpT_name const_transfers rho_def rho_transfer_goal
| [thm] => thm);
in
lthy'
|> register_coinduct_dynamic_friend fpT_name fun_name
|> register_friend_corec fun_name fp_b version Y Z k_T dead_k_bnf sig_fp_sugar
ssig_fp_sugar fun_t rho rho_transfer_folded old_corec_info
end;
val (friend_infos, lthy6) = lthy5 |> (if friend then register_friend #>> single else pair []);
val (corec_info as {corecUU = corecUU0, ...}, lthy7) =
(case corec_infos of
[] => corec_info_of res_T lthy6
| [info] => (info, lthy6));
val def_rhs = mk_corec_fun_def_rhs lthy7 arg_Ts corecUU0 corecUU_arg;
val def = ((b, mx), ((Binding.concealed (Thm.def_binding b), []), def_rhs));
val ((fun_lhs0, (_, fun_def0)), (lthy9, lthy8')) = lthy7
|> (snd o Local_Theory.begin_nested)
|> Local_Theory.define def
|> tap (fn (def, lthy') => print_def_consts int [def] lthy')
||> `Local_Theory.end_nested;
val parsed_eq = parse_corec_equation lthy9 [fun_free] eq;
val views0 = generate_views lthy9 eq fun_free parsed_eq;
val lthy9' = lthy9 |> fold Variable.declare_typ (res_T :: arg_Ts);
val phi = Proof_Context.export_morphism lthy8' lthy9';
val fun_lhs = Morphism.term phi fun_lhs0;
val fun_def = Morphism.thm phi fun_def0;
val inner_fp_elims = map (Morphism.thm phi) inner_fp_elims0;
val inner_fp_inducts = map (Morphism.thm phi) inner_fp_inducts0;
val inner_fp_simps = map (Morphism.thm phi) inner_fp_simps0;
val (code_goal, _, _, _, _) = morph_views phi views0;
fun derive_and_note_friend_extra_theorems lthy' =
let
val k_T = #7 (the_single prepareds);
val rho_def = snd (the_single rho_datas);
val (eq_algrho, algrho_eq) = derive_eq_algrho lthy' corec_info (the_single friend_infos)
fun_lhs k_T code_goal const_transfers rho_def fun_def;
val notes =
(if Config.get lthy' bnf_internals then
[(eq_algrhoN, [eq_algrho])]
else
[])
|> map (fn (thmN, thms) =>
((Binding.qualify true (Binding.name_of b)
(Binding.qualify false friendN (Binding.name thmN)), []),
[(thms, [])]));
in
lthy'
|> register_friend_extra fun_name eq_algrho algrho_eq
|> Local_Theory.notes notes |> snd
end;
val lthy10 = lthy9 |> friend ? derive_and_note_friend_extra_theorems;
val code_thm = derive_code lthy10 inner_fp_simps code_goal corec_info fun_lhs fun_def;
(* TODO:
val ctr_thmss = map mk_thm (#2 views);
val disc_thmss = map mk_thm (#3 views);
val disc_iff_thmss = map mk_thm (#4 views);
val sel_thmss = map mk_thm (#5 views);
*)
val uniques =
if null inner_fp_simps then
[derive_unique lthy10 phi (#1 views0) corec_info fpT_name fun_def]
else
[];
(* TODO:
val disc_iff_or_disc_thmss =
map2 (fn [] => I | disc_iffs => K disc_iffs) disc_iff_thmss disc_thmss;
val simp_thmss = map2 append disc_iff_or_disc_thmss sel_thmss;
*)
val ((_, [{cong_intro_pairs, coinduct, coinduct_attrs}]), lthy11) = lthy10
|> derive_and_update_coinduct_cong_intross [corec_info];
val cong_intros_pairs = AList.group (op =) cong_intro_pairs;
val anonymous_notes = [];
(* TODO:
[(flat disc_iff_or_disc_thmss, simp_attrs)]
|> map (fn (thms, attrs) => ((Binding.empty, attrs), [(thms, [])]));
*)
val notes =
[(cong_introsN, maps snd cong_intros_pairs, []),
(codeN, [code_thm], nitpicksimp_attrs),
(coinductN, [coinduct], coinduct_attrs),
(inner_inductN, inner_fp_inducts, []),
(uniqueN, uniques, [])] @
map (fn (thmN, thms) => (thmN, thms, [])) cong_intros_pairs @
(if Config.get lthy11 bnf_internals then
[(inner_elimN, inner_fp_elims, []),
(inner_simpN, inner_fp_simps, [])]
else
[])
(* TODO:
(ctrN, ctr_thms, []),
(discN, disc_thms, []),
(disc_iffN, disc_iff_thms, []),
(selN, sel_thms, simp_attrs),
(simpsN, simp_thms, []),
*)
|> map (fn (thmN, thms, attrs) =>
((Binding.qualify true (Binding.name_of b)
(Binding.qualify false corecN (Binding.name thmN)), attrs),
[(thms, [])]))
|> filter_out (null o fst o hd o snd);
in
lthy11
(* TODO:
|> Spec_Rules.add Spec_Rules.equational ([fun_lhs], flat sel_thmss)
|> Spec_Rules.add Spec_Rules.equational ([fun_lhs], flat ctr_thmss)
*)
|> Spec_Rules.add Binding.empty Spec_Rules.equational [fun_lhs] [code_thm]
|> plugins code_plugin ? Code.declare_default_eqns [(code_thm, true)]
|> Local_Theory.notes (anonymous_notes @ notes)
|> snd
end;
fun prove_transfer_goal ctxt goal =
Variable.add_free_names ctxt goal []
|> (fn vars => Goal.prove (*no sorry*) ctxt vars [] goal (fn {context = ctxt, prems = _} =>
HEADGOAL (Transfer.transfer_prover_tac ctxt)))
|> Thm.close_derivation \<^here>;
fun maybe_prove_transfer_goal ctxt goal =
(case try (prove_transfer_goal ctxt) goal of
SOME thm => apfst (cons thm)
| NONE => apsnd (cons goal));
val const_transfer_goals = fold (union (op aconv) o fst) transfer_goal_datas [];
val (const_transfers, const_transfer_goals') =
if long_cmd then ([], const_transfer_goals)
else fold (maybe_prove_transfer_goal lthy4) const_transfer_goals ([], []);
in
((def_fun, (([res_T], prepareds, rho_datas, map snd transfer_goal_datas),
(inner_fp_triple, termin_goals), (const_transfers, const_transfer_goals'))), lthy4)
end;
fun corec_cmd int opts (raw_fixes, raw_eq) lthy =
let
val ((def_fun, (_, (inner_fp_triple, termin_goals), (const_transfers, const_transfer_goals))),
lthy') =
prepare_corec_ursive_cmd int false opts (raw_fixes, raw_eq) lthy;
in
if not (null termin_goals) then
error ("Termination prover failed (try " ^ quote (#1 \<^command_keyword>\<open>corecursive\<close>) ^
" instead of " ^ quote (#1 \<^command_keyword>\<open>corec\<close>) ^ ")")
else if not (null const_transfer_goals) then
error ("Transfer prover failed (try " ^ quote (#1 \<^command_keyword>\<open>corecursive\<close>) ^
" instead of " ^ quote (#1 \<^command_keyword>\<open>corec\<close>) ^ ")")
else
def_fun inner_fp_triple const_transfers [] lthy'
end;
fun corecursive_cmd int opts (raw_fixes, raw_eq) lthy =
let
val ((def_fun, (([Type (fpT_name, _)], prepareds, rho_datas, rho_transfer_goals),
(inner_fp_triple, termin_goals), (const_transfers, const_transfer_goals))), lthy') =
prepare_corec_ursive_cmd int true opts (raw_fixes, raw_eq) lthy;
val (rho_transfer_goals', unprime_rho_transfer_and_folds) =
@{map 3} (fn (_, _, _, _, _, _, _, _, _, _, _, _, _) => fn (_, rho_def) =>
prime_rho_transfer_goal lthy' fpT_name rho_def)
prepareds rho_datas rho_transfer_goals
|> split_list;
in
Proof.theorem NONE (fn [termin_thms, const_transfers', rho_transfers'] =>
let
val remove_domain_condition =
full_simplify (put_simpset HOL_basic_ss lthy'
addsimps (@{thm True_implies_equals} :: termin_thms));
in
def_fun (@{apply 3} (map remove_domain_condition) inner_fp_triple)
(const_transfers @ const_transfers')
(map2 (fn f => f) unprime_rho_transfer_and_folds rho_transfers')
end)
(map (map (rpair [])) [termin_goals, const_transfer_goals, rho_transfer_goals']) lthy'
end;
fun friend_of_corec_cmd ((raw_fun_name, raw_fun_T_opt), raw_eq) lthy =
let
val Const (fun_name, _) =
Proof_Context.read_const {proper = true, strict = false} lthy raw_fun_name;
val fake_lthy = lthy
|> (case raw_fun_T_opt of
SOME raw_T =>
Proof_Context.add_const_constraint (fun_name, SOME (Syntax.read_typ lthy raw_T))
| NONE => I)
handle TYPE (s, _, _) => error s;
val fun_b = Binding.name (Long_Name.base_name fun_name);
val code_goal = Syntax.read_prop fake_lthy raw_eq;
val fun_T =
(case code_goal of
\<^Const_>\<open>Trueprop\<close> $ (Const (\<^const_name>\<open>HOL.eq\<close>, _) $ t $ _) => fastype_of (head_of t)
| _ => ill_formed_equation_lhs_rhs lthy [code_goal]);
val fun_t = Const (fun_name, fun_T);
val (arg_Ts, res_T as Type (fpT_name, _)) = strip_type fun_T;
val no_base = has_no_corec_info lthy fpT_name;
val lthy1 = lthy |> no_base ? setup_base fpT_name;
val lthy2 = lthy1 |> Variable.declare_typ fun_T;
val ((old_corec_info, fp_b, version, Y, Z, preT, k_T, ssig_T, dead_pre_bnf, dead_k_bnf,
sig_fp_sugar, ssig_fp_sugar, buffer), lthy3) =
prepare_friend_corec fun_name fun_T lthy2;
val friend_parse_info = friend_parse_info_of lthy3 arg_Ts res_T buffer;
val parsed_eq = parse_corec_equation lthy3 [] code_goal;
val (((rho, rho_def), (const_transfer_goals, rho_transfer_goal)), lthy4) =
extract_rho_return_transfer_goals fun_b version dead_pre_bnf dead_k_bnf Y Z preT k_T ssig_T
ssig_fp_sugar friend_parse_info fun_t parsed_eq lthy3;
fun register_friend_extra_and_note_thms code_goal code_thm const_transfers k_T friend_info
lthy5 =
let
val (corec_info, lthy6) = corec_info_of res_T lthy5;
val fun_free = Free (Binding.name_of fun_b, fun_T);
fun freeze_fun (t as Const (s, T)) = if s = fun_name andalso T = fun_T then fun_free else t
| freeze_fun t = t;
val eq = Term.map_aterms freeze_fun code_goal;
val parsed_eq = parse_corec_equation lthy6 [fun_free] eq;
val corec_parse_info = corec_parse_info_of lthy6 arg_Ts res_T buffer;
val explored_eq = explore_corec_equation lthy6 false false fun_name fun_free corec_parse_info
res_T parsed_eq;
val ((_, corecUU_arg), _) = build_corecUU_arg_and_goals false fun_free explored_eq lthy6;
val eq_corecUU = derive_eq_corecUU lthy6 corec_info fun_t corecUU_arg code_thm;
val (eq_algrho, algrho_eq) = derive_eq_algrho lthy6 corec_info friend_info fun_t k_T
code_goal const_transfers rho_def eq_corecUU;
val ((_, [{cong_intro_pairs, coinduct, coinduct_attrs}]), lthy7) = lthy6
|> register_friend_extra fun_name eq_algrho algrho_eq
|> register_coinduct_dynamic_friend fpT_name fun_name
|> derive_and_update_coinduct_cong_intross [corec_info];
val cong_intros_pairs = AList.group (op =) cong_intro_pairs;
val unique = derive_unique lthy7 Morphism.identity code_goal corec_info fpT_name eq_corecUU;
val notes =
[(codeN, [code_thm], []),
(coinductN, [coinduct], coinduct_attrs),
(cong_introsN, maps snd cong_intros_pairs, []),
(uniqueN, [unique], [])] @
map (fn (thmN, thms) => (thmN, thms, [])) cong_intros_pairs @
(if Config.get lthy7 bnf_internals then
[(eq_algrhoN, [eq_algrho], []),
(eq_corecUUN, [eq_corecUU], [])]
else
[])
|> map (fn (thmN, thms, attrs) =>
((Binding.qualify true (Binding.name_of fun_b)
(Binding.qualify false friendN (Binding.name thmN)), attrs),
[(thms, [])]));
in
lthy7
|> Local_Theory.notes notes |> snd
end;
val (rho_transfer_goal', unprime_rho_transfer_and_fold) =
prime_rho_transfer_goal lthy4 fpT_name rho_def rho_transfer_goal;
in
lthy4
|> Proof.theorem NONE (fn [[code_thm], const_transfers, [rho_transfer']] =>
register_friend_corec fun_name fp_b version Y Z k_T dead_k_bnf sig_fp_sugar ssig_fp_sugar
fun_t rho (unprime_rho_transfer_and_fold rho_transfer') old_corec_info
#-> register_friend_extra_and_note_thms code_goal code_thm const_transfers k_T)
(map (map (rpair [])) [[code_goal], const_transfer_goals, [rho_transfer_goal']])
|> Proof.refine_singleton (Method.primitive_text (K I))
end;
fun coinduction_upto_cmd (base_name, raw_fpT) lthy =
let
val fpT as Type (fpT_name, _) = Syntax.read_typ lthy raw_fpT;
val no_base = has_no_corec_info lthy fpT_name;
val (corec_info as {version, ...}, lthy1) = lthy
|> corec_info_of fpT;
val lthy2 = lthy1 |> no_base ? setup_base fpT_name;
val ((changed, [{cong_intro_pairs, coinduct, coinduct_attrs}]), lthy3) = lthy2
|> derive_and_update_coinduct_cong_intross [corec_info];
val lthy4 = lthy3 |> (changed orelse no_base) ? update_coinduct_cong_intross_dynamic fpT_name;
val cong_intros_pairs = AList.group (op =) cong_intro_pairs;
val notes =
[(cong_introsN, maps snd cong_intros_pairs, []),
(coinduct_uptoN, [coinduct], coinduct_attrs)] @
map (fn (thmN, thms) => (thmN, thms, [])) cong_intros_pairs
|> map (fn (thmN, thms, attrs) =>
(((Binding.qualify true base_name
(Binding.qualify false ("v" ^ string_of_int version) (Binding.name thmN))), attrs),
[(thms, [])]));
in
lthy4 |> Local_Theory.notes notes |> snd
end;
fun consolidate lthy =
let
val corec_infoss = map (corec_infos_of lthy o fst) (all_codatatype_extras_of lthy);
val (changeds, lthy') = lthy
|> fold_map (apfst fst oo derive_and_update_coinduct_cong_intross) corec_infoss;
in
if exists I changeds then lthy' else raise Same.SAME
end;
fun consolidate_global thy =
SOME (Named_Target.theory_map consolidate thy)
handle Same.SAME => NONE;
val _ = Outer_Syntax.local_theory \<^command_keyword>\<open>corec\<close>
"define nonprimitive corecursive functions"
((Scan.optional (\<^keyword>\<open>(\<close> |-- Parse.!!! (Parse.list1 corec_option_parser)
--| \<^keyword>\<open>)\<close>) []) -- (Parse.vars --| Parse.where_ -- Parse.prop)
>> uncurry (corec_cmd true));
val _ = Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>corecursive\<close>
"define nonprimitive corecursive functions"
((Scan.optional (\<^keyword>\<open>(\<close> |-- Parse.!!! (Parse.list1 corec_option_parser)
--| \<^keyword>\<open>)\<close>) []) -- (Parse.vars --| Parse.where_ -- Parse.prop)
>> uncurry (corecursive_cmd true));
val _ = Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>friend_of_corec\<close>
"register a function as a legal context for nonprimitive corecursion"
(Parse.const -- Scan.option (\<^keyword>\<open>::\<close> |-- Parse.typ) --| Parse.where_ -- Parse.prop
>> friend_of_corec_cmd);
val _ = Outer_Syntax.local_theory \<^command_keyword>\<open>coinduction_upto\<close>
"derive a coinduction up-to principle and a corresponding congruence closure"
(Parse.name --| \<^keyword>\<open>:\<close> -- Parse.typ >> coinduction_upto_cmd);
val _ = Theory.setup (Theory.at_begin consolidate_global);
end;
diff --git a/src/HOL/Tools/BNF/bnf_lfp_size.ML b/src/HOL/Tools/BNF/bnf_lfp_size.ML
--- a/src/HOL/Tools/BNF/bnf_lfp_size.ML
+++ b/src/HOL/Tools/BNF/bnf_lfp_size.ML
@@ -1,399 +1,399 @@
(* Title: HOL/Tools/BNF/bnf_lfp_size.ML
Author: Jasmin Blanchette, TU Muenchen
Copyright 2014
Generation of size functions for datatypes.
*)
signature BNF_LFP_SIZE =
sig
val register_size: string -> string -> thm -> thm list -> thm list -> local_theory -> local_theory
val register_size_global: string -> string -> thm -> thm list -> thm list -> theory -> theory
val size_of: Proof.context -> string -> (string * (thm * thm list * thm list)) option
val size_of_global: theory -> string -> (string * (thm * thm list * thm list)) option
end;
structure BNF_LFP_Size : BNF_LFP_SIZE =
struct
open BNF_Util
open BNF_Tactics
open BNF_Def
open BNF_FP_Def_Sugar
val size_N = "size_";
val sizeN = "size";
val size_genN = "size_gen";
val size_gen_o_mapN = "size_gen_o_map";
val size_neqN = "size_neq";
val nitpicksimp_attrs = @{attributes [nitpick_simp]};
val simp_attrs = @{attributes [simp]};
fun mk_plus_nat (t1, t2) = Const (\<^const_name>\<open>Groups.plus\<close>,
HOLogic.natT --> HOLogic.natT --> HOLogic.natT) $ t1 $ t2;
fun mk_to_natT T = T --> HOLogic.natT;
fun mk_abs_zero_nat T = Term.absdummy T HOLogic.zero;
fun mk_unabs_def_unused_0 n =
funpow n (fn thm => thm RS @{thm fun_cong_unused_0} handle THM _ => thm RS fun_cong);
structure Data = Generic_Data
(
type T = (string * (thm * thm list * thm list)) Symtab.table;
val empty = Symtab.empty;
fun merge data = Symtab.merge (K true) data;
);
fun check_size_type thy T_name size_name =
let
val n = Sign.arity_number thy T_name;
val As = map (fn s => TFree (s, \<^sort>\<open>type\<close>)) (Name.invent Name.context Name.aT n);
val T = Type (T_name, As);
val size_T = map mk_to_natT As ---> mk_to_natT T;
val size_const = Const (size_name, size_T);
in
can (Thm.global_cterm_of thy) size_const orelse
error ("Constant " ^ quote size_name ^ " registered as size function for " ^ quote T_name ^
" must have type\n" ^ quote (Syntax.string_of_typ_global thy size_T))
end;
fun register_size T_name size_name overloaded_size_def size_simps size_gen_o_maps lthy =
(check_size_type (Proof_Context.theory_of lthy) T_name size_name;
Context.proof_map (Data.map (Symtab.update
(T_name, (size_name, (overloaded_size_def, size_simps, size_gen_o_maps)))))
lthy);
fun register_size_global T_name size_name overloaded_size_def size_simps size_gen_o_maps thy =
(check_size_type thy T_name size_name;
Context.theory_map (Data.map (Symtab.update
(T_name, (size_name, (overloaded_size_def, size_simps, size_gen_o_maps)))))
thy);
val size_of = Symtab.lookup o Data.get o Context.Proof;
val size_of_global = Symtab.lookup o Data.get o Context.Theory;
fun all_overloaded_size_defs_of ctxt =
Symtab.fold (fn (_, (_, (overloaded_size_def, _, _))) =>
can (Logic.dest_equals o Thm.prop_of) overloaded_size_def ? cons overloaded_size_def)
(Data.get (Context.Proof ctxt)) [];
val size_gen_o_map_simps = @{thms inj_on_id snd_comp_apfst[simplified apfst_def]};
fun mk_size_gen_o_map_tac ctxt size_def rec_o_map inj_maps size_maps =
unfold_thms_tac ctxt [size_def] THEN
HEADGOAL (rtac ctxt (rec_o_map RS trans) THEN'
asm_simp_tac (ss_only (inj_maps @ size_maps @ size_gen_o_map_simps) ctxt)) THEN
IF_UNSOLVED (unfold_thms_tac ctxt @{thms id_def o_def} THEN HEADGOAL (rtac ctxt refl));
fun mk_size_neq ctxt cts exhaust sizes =
HEADGOAL (rtac ctxt (infer_instantiate' ctxt (map SOME cts) exhaust)) THEN
ALLGOALS (hyp_subst_tac ctxt) THEN
unfold_thms_tac ctxt (@{thm neq0_conv} :: sizes) THEN
ALLGOALS (REPEAT_DETERM o (rtac ctxt @{thm zero_less_Suc} ORELSE'
rtac ctxt @{thm trans_less_add2}));
fun generate_datatype_size (fp_sugars as ({T = Type (_, As), BT = Type (_, Bs), fp = Least_FP,
fp_res = {bnfs = fp_bnfs, ...}, fp_nesting_bnfs, live_nesting_bnfs,
fp_co_induct_sugar = SOME _, ...} : fp_sugar) :: _)
lthy0 =
let
val data = Data.get (Context.Proof lthy0);
val Ts = map #T fp_sugars
val T_names = map (fst o dest_Type) Ts;
val nn = length Ts;
val B_ify = Term.typ_subst_atomic (As ~~ Bs);
val recs = map (#co_rec o the o #fp_co_induct_sugar) fp_sugars;
val rec_thmss = map (#co_rec_thms o the o #fp_co_induct_sugar) fp_sugars;
val rec_Ts as rec_T1 :: _ = map fastype_of recs;
val rec_arg_Ts = binder_fun_types rec_T1;
val Cs = map body_type rec_Ts;
val Cs_rho = map (rpair HOLogic.natT) Cs;
val substCnatT = Term.subst_atomic_types Cs_rho;
val f_Ts = map mk_to_natT As;
val f_TsB = map mk_to_natT Bs;
val num_As = length As;
fun variant_names n pre = fst (Variable.variant_fixes (replicate n pre) lthy0);
val f_names = variant_names num_As "f";
val fs = map2 (curry Free) f_names f_Ts;
val fsB = map2 (curry Free) f_names f_TsB;
val As_fs = As ~~ fs;
val size_bs =
map ((fn base => Binding.qualify false base (Binding.name (prefix size_N base))) o
Long_Name.base_name) T_names;
fun is_prod_C \<^type_name>\<open>prod\<close> [_, T'] = member (op =) Cs T'
| is_prod_C _ _ = false;
fun mk_size_of_typ (T as TFree _) =
pair (case AList.lookup (op =) As_fs T of
SOME f => f
| NONE => if member (op =) Cs T then Term.absdummy T (Bound 0) else mk_abs_zero_nat T)
| mk_size_of_typ (T as Type (s, Ts)) =
if is_prod_C s Ts then
pair (snd_const T)
else if exists (exists_subtype_in (As @ Cs)) Ts then
(case Symtab.lookup data s of
SOME (size_name, (_, _, size_gen_o_maps)) =>
let
val (args, size_gen_o_mapss') = fold_map mk_size_of_typ Ts [];
val size_T = map fastype_of args ---> mk_to_natT T;
val size_const = Const (size_name, size_T);
in
append (size_gen_o_maps :: size_gen_o_mapss')
#> pair (Term.list_comb (size_const, args))
end
| _ => pair (mk_abs_zero_nat T))
else
pair (mk_abs_zero_nat T);
fun mk_size_of_arg t =
mk_size_of_typ (fastype_of t) #>> (fn s => substCnatT (betapply (s, t)));
fun is_recursive_or_plain_case Ts =
exists (exists_subtype_in Cs) Ts orelse forall (not o exists_subtype_in As) Ts;
(* We want the size function to enjoy the following properties:
1. The size of a list should coincide with its length.
2. All the nonrecursive constructors of a type should have the same size.
3. Each constructor through which nested recursion takes place should count as at least
one in the generic size function.
4. The "size" function should be definable as "size_t (%_. 0) ... (%_. 0)", where "size_t"
is the generic function.
This explains the somewhat convoluted logic ahead. *)
val base_case =
if forall (is_recursive_or_plain_case o binder_types) rec_arg_Ts then HOLogic.zero
else HOLogic.Suc_zero;
fun mk_size_arg rec_arg_T =
let
val x_Ts = binder_types rec_arg_T;
val m = length x_Ts;
val x_names = variant_names m "x";
val xs = map2 (curry Free) x_names x_Ts;
val (summands, size_gen_o_mapss) =
fold_map mk_size_of_arg xs []
|>> remove (op =) HOLogic.zero;
val sum =
if null summands then base_case else foldl1 mk_plus_nat (summands @ [HOLogic.Suc_zero]);
in
append size_gen_o_mapss
#> pair (fold_rev Term.lambda (map substCnatT xs) sum)
end;
fun mk_size_rhs recx =
fold_map mk_size_arg rec_arg_Ts
#>> (fn args => fold_rev Term.lambda fs (Term.list_comb (substCnatT recx, args)));
val maybe_conceal_def_binding = Thm.def_binding
#> not (Config.get lthy0 bnf_internals) ? Binding.concealed;
val (size_rhss, nested_size_gen_o_mapss) = fold_map mk_size_rhs recs [];
val size_Ts = map fastype_of size_rhss;
val nested_size_gen_o_maps_complete = forall (not o null) nested_size_gen_o_mapss;
val nested_size_gen_o_maps = fold (union Thm.eq_thm_prop) nested_size_gen_o_mapss [];
val ((raw_size_consts, raw_size_defs), (lthy1, lthy1_old)) = lthy0
|> (snd o Local_Theory.begin_nested)
|> apfst split_list o @{fold_map 2} (fn b => fn rhs =>
Local_Theory.define ((b, NoSyn), ((maybe_conceal_def_binding b, []), rhs))
#>> apsnd snd)
size_bs size_rhss
||> `Local_Theory.end_nested;
val phi = Proof_Context.export_morphism lthy1_old lthy1;
val size_defs = map (Morphism.thm phi) raw_size_defs;
val size_consts0 = map (Morphism.term phi) raw_size_consts;
val size_consts = map2 retype_const_or_free size_Ts size_consts0;
val size_constsB = map (Term.map_types B_ify) size_consts;
val zeros = map mk_abs_zero_nat As;
val overloaded_size_rhss = map (fn c => Term.list_comb (c, zeros)) size_consts;
val overloaded_size_Ts = map fastype_of overloaded_size_rhss;
val overloaded_size_consts = map (curry Const \<^const_name>\<open>size\<close>) overloaded_size_Ts;
val overloaded_size_def_bs =
map (maybe_conceal_def_binding o Binding.suffix_name "_overloaded") size_bs;
fun define_overloaded_size def_b lhs0 rhs lthy =
let
val Free (c, _) = Syntax.check_term lthy lhs0;
val ((_, (_, thm)), lthy') = lthy
|> Local_Theory.define ((Binding.name c, NoSyn), ((def_b, []), rhs));
val thy_ctxt = Proof_Context.init_global (Proof_Context.theory_of lthy');
val thm' = singleton (Proof_Context.export lthy' thy_ctxt) thm;
in (thm', lthy') end;
val (overloaded_size_defs, lthy2) = lthy1
|> Local_Theory.background_theory_result
(Class.instantiation (T_names, map dest_TFree As, [HOLogic.class_size])
#> @{fold_map 3} define_overloaded_size overloaded_size_def_bs overloaded_size_consts
overloaded_size_rhss
##> Class.prove_instantiation_instance (fn ctxt => Class.intro_classes_tac ctxt [])
##> Local_Theory.exit_global);
val size_defs' =
map (mk_unabs_def (num_As + 1) o HOLogic.mk_obj_eq) size_defs;
val size_defs_unused_0 =
map (mk_unabs_def_unused_0 (num_As + 1) o HOLogic.mk_obj_eq) size_defs;
val overloaded_size_defs' =
map (mk_unabs_def 1 o HOLogic.mk_obj_eq) overloaded_size_defs;
val nested_size_maps =
map (mk_pointful lthy2) nested_size_gen_o_maps @ nested_size_gen_o_maps;
val all_inj_maps =
@{thm prod.inj_map} :: map inj_map_of_bnf (fp_bnfs @ fp_nesting_bnfs @ live_nesting_bnfs)
|> distinct Thm.eq_thm_prop;
fun derive_size_simp size_def' simp0 =
(trans OF [size_def', simp0])
|> Simplifier.asm_full_simplify (ss_only (@{thms inj_on_convol_ident id_def o_def
snd_conv} @ all_inj_maps @ nested_size_maps) lthy2)
|> Local_Defs.fold lthy2 size_defs_unused_0;
fun derive_overloaded_size_simp overloaded_size_def' simp0 =
(trans OF [overloaded_size_def', simp0])
|> unfold_thms lthy2 @{thms add_0_left add_0_right}
|> Local_Defs.fold lthy2 (overloaded_size_defs @ all_overloaded_size_defs_of lthy2);
val size_simpss = map2 (map o derive_size_simp) size_defs' rec_thmss;
val size_simps = flat size_simpss;
val overloaded_size_simpss =
map2 (map o derive_overloaded_size_simp) overloaded_size_defs' size_simpss;
val overloaded_size_simps = flat overloaded_size_simpss;
val size_thmss = map2 append size_simpss overloaded_size_simpss;
val size_gen_thmss = size_simpss;
fun rhs_is_zero thm =
let val Const (trueprop, _) $ (Const (eq, _) $ _ $ rhs) = Thm.prop_of thm in
trueprop = \<^const_name>\<open>Trueprop\<close> andalso eq = \<^const_name>\<open>HOL.eq\<close> andalso
rhs = HOLogic.zero
end;
val size_neq_thmss = @{map 3} (fn fp_sugar => fn size => fn size_thms =>
if exists rhs_is_zero size_thms then
[]
else
let
val (xs, _) = mk_Frees "x" (binder_types (fastype_of size)) lthy2;
val goal =
HOLogic.mk_Trueprop (BNF_LFP_Util.mk_not_eq (list_comb (size, xs)) HOLogic.zero);
val vars = Variable.add_free_names lthy2 goal [];
val thm =
Goal.prove_sorry lthy2 vars [] goal (fn {context = ctxt, ...} =>
mk_size_neq ctxt (map (Thm.cterm_of lthy2) xs)
(#exhaust (#ctr_sugar (#fp_ctr_sugar fp_sugar))) size_thms)
|> single
|> map (Thm.close_derivation \<^here>);
in thm end) fp_sugars overloaded_size_consts overloaded_size_simpss;
val ABs = As ~~ Bs;
val g_names = variant_names num_As "g";
val gs = map2 (curry Free) g_names (map (op -->) ABs);
val liveness = map (op <>) ABs;
val live_gs = AList.find (op =) (gs ~~ liveness) true;
val live = length live_gs;
val maps0 = map map_of_bnf fp_bnfs;
val size_gen_o_map_thmss =
if live = 0 then
replicate nn []
else
let
val gmaps = map (fn map0 => Term.list_comb (mk_map live As Bs map0, live_gs)) maps0;
val size_gen_o_map_conds =
if exists (can Logic.dest_implies o Thm.prop_of) nested_size_gen_o_maps then
map (HOLogic.mk_Trueprop o mk_inj) live_gs
else
[];
val fsizes = map (fn size_constB => Term.list_comb (size_constB, fsB)) size_constsB;
val size_gen_o_map_lhss = map2 (curry HOLogic.mk_comp) fsizes gmaps;
val fgs = map2 (fn fB => fn g as Free (_, Type (_, [A, B])) =>
if A = B then fB else HOLogic.mk_comp (fB, g)) fsB gs;
val size_gen_o_map_rhss = map (fn c => Term.list_comb (c, fgs)) size_consts;
val size_gen_o_map_goals =
map2 (fold_rev (fold_rev Logic.all) [fsB, gs] o
curry Logic.list_implies size_gen_o_map_conds o HOLogic.mk_Trueprop oo
curry HOLogic.mk_eq) size_gen_o_map_lhss size_gen_o_map_rhss;
val rec_o_maps =
fold_rev (curry (op @) o #co_rec_o_maps o the o #fp_co_induct_sugar) fp_sugars [];
val size_gen_o_map_thmss =
if nested_size_gen_o_maps_complete
andalso forall (fn TFree (_, S) => S = \<^sort>\<open>type\<close>) As then
@{map 3} (fn goal => fn size_def => fn rec_o_map =>
Goal.prove_sorry lthy2 [] [] goal (fn {context = ctxt, ...} =>
mk_size_gen_o_map_tac ctxt size_def rec_o_map all_inj_maps nested_size_maps)
|> Thm.close_derivation \<^here>
|> single)
size_gen_o_map_goals size_defs rec_o_maps
else
replicate nn [];
in
size_gen_o_map_thmss
end;
val massage_multi_notes =
maps (fn (thmN, thmss, attrs) =>
map2 (fn T_name => fn thms =>
((Binding.qualify true (Long_Name.base_name T_name) (Binding.name thmN), attrs),
[(thms, [])]))
T_names thmss)
#> filter_out (null o fst o hd o snd);
val notes =
[(sizeN, size_thmss, nitpicksimp_attrs @ simp_attrs),
(size_genN, size_gen_thmss, []),
(size_gen_o_mapN, size_gen_o_map_thmss, []),
(size_neqN, size_neq_thmss, [])]
|> massage_multi_notes;
val (noted, lthy3) =
lthy2
|> Spec_Rules.add Binding.empty Spec_Rules.equational size_consts size_simps
|> Spec_Rules.add Binding.empty Spec_Rules.equational
overloaded_size_consts overloaded_size_simps
|> Code.declare_default_eqns (map (rpair true) (flat size_thmss))
(*Ideally, this would be issued only if the "code" plugin is enabled.*)
|> Local_Theory.notes notes;
val phi0 = substitute_noted_thm noted;
in
lthy3
- |> Local_Theory.declaration {syntax = false, pervasive = true}
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => Data.map (@{fold 3} (fn T_name => fn Const (size_name, _) =>
fn overloaded_size_def =>
let val morph = Morphism.thm (phi0 $> phi) in
Symtab.update (T_name, (size_name, (morph overloaded_size_def,
map morph overloaded_size_simps, maps (map morph) size_gen_o_map_thmss)))
end)
T_names size_consts overloaded_size_defs))
end
| generate_datatype_size _ lthy = lthy;
val size_plugin = Plugin_Name.declare_setup \<^binding>\<open>size\<close>;
val _ = Theory.setup (fp_sugars_interpretation size_plugin generate_datatype_size);
end;
diff --git a/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML b/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML
--- a/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML
+++ b/src/HOL/Tools/Ctr_Sugar/ctr_sugar.ML
@@ -1,1272 +1,1272 @@
(* Title: HOL/Tools/Ctr_Sugar/ctr_sugar.ML
Author: Jasmin Blanchette, TU Muenchen
Author: Martin Desharnais, TU Muenchen
Copyright 2012, 2013
Wrapping existing freely generated type's constructors.
*)
signature CTR_SUGAR =
sig
datatype ctr_sugar_kind = Datatype | Codatatype | Record | Unknown
type ctr_sugar =
{kind: ctr_sugar_kind,
T: typ,
ctrs: term list,
casex: term,
discs: term list,
selss: term list list,
exhaust: thm,
nchotomy: thm,
injects: thm list,
distincts: thm list,
case_thms: thm list,
case_cong: thm,
case_cong_weak: thm,
case_distribs: thm list,
split: thm,
split_asm: thm,
disc_defs: thm list,
disc_thmss: thm list list,
discIs: thm list,
disc_eq_cases: thm list,
sel_defs: thm list,
sel_thmss: thm list list,
distinct_discsss: thm list list list,
exhaust_discs: thm list,
exhaust_sels: thm list,
collapses: thm list,
expands: thm list,
split_sels: thm list,
split_sel_asms: thm list,
case_eq_ifs: thm list};
val morph_ctr_sugar: morphism -> ctr_sugar -> ctr_sugar
val transfer_ctr_sugar: theory -> ctr_sugar -> ctr_sugar
val ctr_sugar_of: Proof.context -> string -> ctr_sugar option
val ctr_sugar_of_global: theory -> string -> ctr_sugar option
val ctr_sugars_of: Proof.context -> ctr_sugar list
val ctr_sugars_of_global: theory -> ctr_sugar list
val ctr_sugar_of_case: Proof.context -> string -> ctr_sugar option
val ctr_sugar_of_case_global: theory -> string -> ctr_sugar option
val ctr_sugar_interpretation: string -> (ctr_sugar -> local_theory -> local_theory) -> theory ->
theory
val interpret_ctr_sugar: (string -> bool) -> ctr_sugar -> local_theory -> local_theory
val register_ctr_sugar_raw: ctr_sugar -> local_theory -> local_theory
val register_ctr_sugar: (string -> bool) -> ctr_sugar -> local_theory -> local_theory
val default_register_ctr_sugar_global: (string -> bool) -> ctr_sugar -> theory -> theory
val mk_half_pairss: 'a list * 'a list -> ('a * 'a) list list
val join_halves: int -> 'a list list -> 'a list list -> 'a list * 'a list list list
val mk_ctr: typ list -> term -> term
val mk_case: typ list -> typ -> term -> term
val mk_disc_or_sel: typ list -> term -> term
val name_of_ctr: term -> string
val name_of_disc: term -> string
val dest_ctr: Proof.context -> string -> term -> term * term list
val dest_case: Proof.context -> string -> typ list -> term ->
(ctr_sugar * term list * term list) option
type ('c, 'a) ctr_spec = (binding * 'c) * 'a list
val disc_of_ctr_spec: ('c, 'a) ctr_spec -> binding
val ctr_of_ctr_spec: ('c, 'a) ctr_spec -> 'c
val args_of_ctr_spec: ('c, 'a) ctr_spec -> 'a list
val code_plugin: string
type ctr_options = (string -> bool) * bool
type ctr_options_cmd = (Proof.context -> string -> bool) * bool
val fake_local_theory_for_sel_defaults: (binding * typ) list -> Proof.context -> Proof.context
val free_constructors: ctr_sugar_kind ->
({prems: thm list, context: Proof.context} -> tactic) list list ->
((ctr_options * binding) * (term, binding) ctr_spec list) * term list -> local_theory ->
ctr_sugar * local_theory
val free_constructors_cmd: ctr_sugar_kind ->
((((Proof.context -> Plugin_Name.filter) * bool) * binding)
* ((binding * string) * binding list) list) * string list ->
Proof.context -> Proof.state
val default_ctr_options: ctr_options
val default_ctr_options_cmd: ctr_options_cmd
val parse_bound_term: (binding * string) parser
val parse_ctr_options: ctr_options_cmd parser
val parse_ctr_spec: 'c parser -> 'a parser -> ('c, 'a) ctr_spec parser
val parse_sel_default_eqs: string list parser
end;
structure Ctr_Sugar : CTR_SUGAR =
struct
open Ctr_Sugar_Util
open Ctr_Sugar_Tactics
open Ctr_Sugar_Code
datatype ctr_sugar_kind = Datatype | Codatatype | Record | Unknown;
type ctr_sugar =
{kind: ctr_sugar_kind,
T: typ,
ctrs: term list,
casex: term,
discs: term list,
selss: term list list,
exhaust: thm,
nchotomy: thm,
injects: thm list,
distincts: thm list,
case_thms: thm list,
case_cong: thm,
case_cong_weak: thm,
case_distribs: thm list,
split: thm,
split_asm: thm,
disc_defs: thm list,
disc_thmss: thm list list,
discIs: thm list,
disc_eq_cases: thm list,
sel_defs: thm list,
sel_thmss: thm list list,
distinct_discsss: thm list list list,
exhaust_discs: thm list,
exhaust_sels: thm list,
collapses: thm list,
expands: thm list,
split_sels: thm list,
split_sel_asms: thm list,
case_eq_ifs: thm list};
fun morph_ctr_sugar phi ({kind, T, ctrs, casex, discs, selss, exhaust, nchotomy, injects, distincts,
case_thms, case_cong, case_cong_weak, case_distribs, split, split_asm, disc_defs, disc_thmss,
discIs, disc_eq_cases, sel_defs, sel_thmss, distinct_discsss, exhaust_discs, exhaust_sels,
collapses, expands, split_sels, split_sel_asms, case_eq_ifs} : ctr_sugar) =
{kind = kind,
T = Morphism.typ phi T,
ctrs = map (Morphism.term phi) ctrs,
casex = Morphism.term phi casex,
discs = map (Morphism.term phi) discs,
selss = map (map (Morphism.term phi)) selss,
exhaust = Morphism.thm phi exhaust,
nchotomy = Morphism.thm phi nchotomy,
injects = map (Morphism.thm phi) injects,
distincts = map (Morphism.thm phi) distincts,
case_thms = map (Morphism.thm phi) case_thms,
case_cong = Morphism.thm phi case_cong,
case_cong_weak = Morphism.thm phi case_cong_weak,
case_distribs = map (Morphism.thm phi) case_distribs,
split = Morphism.thm phi split,
split_asm = Morphism.thm phi split_asm,
disc_defs = map (Morphism.thm phi) disc_defs,
disc_thmss = map (map (Morphism.thm phi)) disc_thmss,
discIs = map (Morphism.thm phi) discIs,
disc_eq_cases = map (Morphism.thm phi) disc_eq_cases,
sel_defs = map (Morphism.thm phi) sel_defs,
sel_thmss = map (map (Morphism.thm phi)) sel_thmss,
distinct_discsss = map (map (map (Morphism.thm phi))) distinct_discsss,
exhaust_discs = map (Morphism.thm phi) exhaust_discs,
exhaust_sels = map (Morphism.thm phi) exhaust_sels,
collapses = map (Morphism.thm phi) collapses,
expands = map (Morphism.thm phi) expands,
split_sels = map (Morphism.thm phi) split_sels,
split_sel_asms = map (Morphism.thm phi) split_sel_asms,
case_eq_ifs = map (Morphism.thm phi) case_eq_ifs};
val transfer_ctr_sugar = morph_ctr_sugar o Morphism.transfer_morphism;
structure Data = Generic_Data
(
type T = (Position.T * ctr_sugar) Symtab.table;
val empty = Symtab.empty;
fun merge data : T = Symtab.merge (K true) data;
);
fun ctr_sugar_of_generic context =
Option.map (transfer_ctr_sugar (Context.theory_of context) o #2) o Symtab.lookup (Data.get context);
fun ctr_sugars_of_generic context =
Symtab.fold (cons o transfer_ctr_sugar (Context.theory_of context) o #2 o #2) (Data.get context) [];
fun ctr_sugar_of_case_generic context s =
find_first (fn {casex = Const (s', _), ...} => s' = s | _ => false)
(ctr_sugars_of_generic context);
val ctr_sugar_of = ctr_sugar_of_generic o Context.Proof;
val ctr_sugar_of_global = ctr_sugar_of_generic o Context.Theory;
val ctr_sugars_of = ctr_sugars_of_generic o Context.Proof;
val ctr_sugars_of_global = ctr_sugars_of_generic o Context.Theory;
val ctr_sugar_of_case = ctr_sugar_of_case_generic o Context.Proof;
val ctr_sugar_of_case_global = ctr_sugar_of_case_generic o Context.Theory;
structure Ctr_Sugar_Plugin = Plugin(type T = ctr_sugar);
fun ctr_sugar_interpretation name f =
Ctr_Sugar_Plugin.interpretation name (fn ctr_sugar => fn lthy =>
f (transfer_ctr_sugar (Proof_Context.theory_of lthy) ctr_sugar) lthy);
val interpret_ctr_sugar = Ctr_Sugar_Plugin.data;
fun register_ctr_sugar_raw (ctr_sugar as {T = Type (name, _), ...}) =
- Local_Theory.declaration {syntax = false, pervasive = true}
+ Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => fn context =>
let val pos = Position.thread_data ()
in Data.map (Symtab.update (name, (pos, morph_ctr_sugar phi ctr_sugar))) context end);
fun register_ctr_sugar plugins ctr_sugar =
register_ctr_sugar_raw ctr_sugar #> interpret_ctr_sugar plugins ctr_sugar;
fun default_register_ctr_sugar_global plugins (ctr_sugar as {T = Type (name, _), ...}) thy =
let
val tab = Data.get (Context.Theory thy);
val pos = Position.thread_data ();
in
if Symtab.defined tab name then thy
else
thy
|> Context.theory_map (Data.put (Symtab.update_new (name, (pos, ctr_sugar)) tab))
|> Named_Target.theory_map (Ctr_Sugar_Plugin.data plugins ctr_sugar)
end;
val is_prefix = "is_";
val un_prefix = "un_";
val not_prefix = "not_";
fun mk_unN 1 1 suf = un_prefix ^ suf
| mk_unN _ l suf = un_prefix ^ suf ^ string_of_int l;
val caseN = "case";
val case_congN = "case_cong";
val case_eq_ifN = "case_eq_if";
val collapseN = "collapse";
val discN = "disc";
val disc_eq_caseN = "disc_eq_case";
val discIN = "discI";
val distinctN = "distinct";
val distinct_discN = "distinct_disc";
val exhaustN = "exhaust";
val exhaust_discN = "exhaust_disc";
val expandN = "expand";
val injectN = "inject";
val nchotomyN = "nchotomy";
val selN = "sel";
val exhaust_selN = "exhaust_sel";
val splitN = "split";
val split_asmN = "split_asm";
val split_selN = "split_sel";
val split_sel_asmN = "split_sel_asm";
val splitsN = "splits";
val split_selsN = "split_sels";
val case_cong_weak_thmsN = "case_cong_weak";
val case_distribN = "case_distrib";
val cong_attrs = @{attributes [cong]};
val dest_attrs = @{attributes [dest]};
val safe_elim_attrs = @{attributes [elim!]};
val iff_attrs = @{attributes [iff]};
val inductsimp_attrs = @{attributes [induct_simp]};
val nitpicksimp_attrs = @{attributes [nitpick_simp]};
val simp_attrs = @{attributes [simp]};
fun unflat_lookup eq xs ys = map (fn xs' => permute_like_unique eq xs xs' ys);
fun mk_half_pairss' _ ([], []) = []
| mk_half_pairss' indent (x :: xs, _ :: ys) =
indent @ fold_rev (cons o single o pair x) ys (mk_half_pairss' ([] :: indent) (xs, ys));
fun mk_half_pairss p = mk_half_pairss' [[]] p;
fun join_halves n half_xss other_half_xss =
(splice (flat half_xss) (flat other_half_xss),
map2 (map2 append) (Library.chop_groups n half_xss)
(transpose (Library.chop_groups n other_half_xss)));
fun mk_undefined T = Const (\<^const_name>\<open>undefined\<close>, T);
fun mk_ctr Ts t =
let val Type (_, Ts0) = body_type (fastype_of t) in
subst_nonatomic_types (Ts0 ~~ Ts) t
end;
fun mk_case Ts T t =
let val (Type (_, Ts0), body) = strip_type (fastype_of t) |>> List.last in
subst_nonatomic_types ((body, T) :: (Ts0 ~~ Ts)) t
end;
fun mk_disc_or_sel Ts t =
subst_nonatomic_types (snd (Term.dest_Type (domain_type (fastype_of t))) ~~ Ts) t;
val name_of_ctr = name_of_const "constructor" body_type;
fun name_of_disc t =
(case head_of t of
Abs (_, _, \<^Const_>\<open>Not for \<open>t' $ Bound 0\<close>\<close>) =>
Long_Name.map_base_name (prefix not_prefix) (name_of_disc t')
| Abs (_, _, \<^Const_>\<open>HOL.eq _ for \<open>Bound 0\<close> t'\<close>) =>
Long_Name.map_base_name (prefix is_prefix) (name_of_disc t')
| Abs (_, _, \<^Const_>\<open>Not for \<^Const_>\<open>HOL.eq _ for \<open>Bound 0\<close> t'\<close>\<close>) =>
Long_Name.map_base_name (prefix (not_prefix ^ is_prefix)) (name_of_disc t')
| t' => name_of_const "discriminator" (perhaps (try domain_type)) t');
val base_name_of_ctr = Long_Name.base_name o name_of_ctr;
fun dest_ctr ctxt s t =
let val (f, args) = Term.strip_comb t in
(case ctr_sugar_of ctxt s of
SOME {ctrs, ...} =>
(case find_first (can (fo_match ctxt f)) ctrs of
SOME f' => (f', args)
| NONE => raise Fail "dest_ctr")
| NONE => raise Fail "dest_ctr")
end;
fun dest_case ctxt s Ts t =
(case Term.strip_comb t of
(Const (c, _), args as _ :: _) =>
(case ctr_sugar_of ctxt s of
SOME (ctr_sugar as {casex = Const (case_name, _), discs = discs0, selss = selss0, ...}) =>
if case_name = c then
let val n = length discs0 in
if n < length args then
let
val (branches, obj :: leftovers) = chop n args;
val discs = map (mk_disc_or_sel Ts) discs0;
val selss = map (map (mk_disc_or_sel Ts)) selss0;
val conds = map (rapp obj) discs;
val branch_argss = map (fn sels => map (rapp obj) sels @ leftovers) selss;
val branches' = map2 (curry Term.betapplys) branches branch_argss;
in
SOME (ctr_sugar, conds, branches')
end
else
NONE
end
else
NONE
| _ => NONE)
| _ => NONE);
fun const_or_free_name (Const (s, _)) = Long_Name.base_name s
| const_or_free_name (Free (s, _)) = s
| const_or_free_name t = raise TERM ("const_or_free_name", [t])
fun extract_sel_default ctxt t =
let
fun malformed () =
error ("Malformed selector default value equation: " ^ Syntax.string_of_term ctxt t);
val ((sel, (ctr, vars)), rhs) =
fst (Term.replace_dummy_patterns (Syntax.check_term ctxt t) 0)
|> HOLogic.dest_eq
|>> (Term.dest_comb
#>> const_or_free_name
##> (Term.strip_comb #>> (Term.dest_Const #> fst)))
handle TERM _ => malformed ();
in
if forall (is_Free orf is_Var) vars andalso not (has_duplicates (op aconv) vars) then
((ctr, sel), fold_rev Term.lambda vars rhs)
else
malformed ()
end;
(* Ideally, we would enrich the context with constants rather than free variables. *)
fun fake_local_theory_for_sel_defaults sel_bTs =
Proof_Context.allow_dummies
#> Proof_Context.add_fixes (map (fn (b, T) => (b, SOME T, NoSyn)) sel_bTs)
#> snd;
type ('c, 'a) ctr_spec = (binding * 'c) * 'a list;
fun disc_of_ctr_spec ((disc, _), _) = disc;
fun ctr_of_ctr_spec ((_, ctr), _) = ctr;
fun args_of_ctr_spec (_, args) = args;
val code_plugin = Plugin_Name.declare_setup \<^binding>\<open>code\<close>;
fun prepare_free_constructors kind prep_plugins prep_term
((((raw_plugins, discs_sels), raw_case_binding), ctr_specs), sel_default_eqs) no_defs_lthy =
let
val plugins = prep_plugins no_defs_lthy raw_plugins;
(* TODO: sanity checks on arguments *)
val raw_ctrs = map ctr_of_ctr_spec ctr_specs;
val raw_disc_bindings = map disc_of_ctr_spec ctr_specs;
val raw_sel_bindingss = map args_of_ctr_spec ctr_specs;
val n = length raw_ctrs;
val ks = 1 upto n;
val _ = n > 0 orelse error "No constructors specified";
val ctrs0 = map (prep_term no_defs_lthy) raw_ctrs;
val (fcT_name, As0) =
(case body_type (fastype_of (hd ctrs0)) of
Type T' => T'
| _ => error "Expected type constructor in body type of constructor");
val _ = forall ((fn Type (T_name, _) => T_name = fcT_name | _ => false) o body_type
o fastype_of) (tl ctrs0) orelse error "Constructors not constructing same type";
val fc_b_name = Long_Name.base_name fcT_name;
val fc_b = Binding.name fc_b_name;
fun qualify mandatory = Binding.qualify mandatory fc_b_name;
val (unsorted_As, [B, C]) =
no_defs_lthy
|> variant_tfrees (map (fst o dest_TFree_or_TVar) As0)
||> fst o mk_TFrees 2;
val As = map2 (resort_tfree_or_tvar o snd o dest_TFree_or_TVar) As0 unsorted_As;
val fcT = Type (fcT_name, As);
val ctrs = map (mk_ctr As) ctrs0;
val ctr_Tss = map (binder_types o fastype_of) ctrs;
val ms = map length ctr_Tss;
fun can_definitely_rely_on_disc k =
not (Binding.is_empty (nth raw_disc_bindings (k - 1))) orelse nth ms (k - 1) = 0;
fun can_rely_on_disc k =
can_definitely_rely_on_disc k orelse (k = 1 andalso not (can_definitely_rely_on_disc 2));
fun should_omit_disc_binding k = n = 1 orelse (n = 2 andalso can_rely_on_disc (3 - k));
val equal_binding = \<^binding>\<open>=\<close>;
fun is_disc_binding_valid b =
not (Binding.is_empty b orelse Binding.eq_name (b, equal_binding));
val standard_disc_binding = Binding.name o prefix is_prefix o base_name_of_ctr;
val disc_bindings =
raw_disc_bindings
|> @{map 4} (fn k => fn m => fn ctr => fn disc =>
qualify false
(if Binding.is_empty disc then
if m = 0 then equal_binding
else if should_omit_disc_binding k then disc
else standard_disc_binding ctr
else if Binding.eq_name (disc, standard_binding) then
standard_disc_binding ctr
else
disc)) ks ms ctrs0;
fun standard_sel_binding m l = Binding.name o mk_unN m l o base_name_of_ctr;
val sel_bindingss =
@{map 3} (fn ctr => fn m => map2 (fn l => fn sel =>
qualify false
(if Binding.is_empty sel orelse Binding.eq_name (sel, standard_binding) then
standard_sel_binding m l ctr
else
sel)) (1 upto m) o pad_list Binding.empty m) ctrs0 ms raw_sel_bindingss;
val add_bindings =
Variable.add_fixes (distinct (op =) (filter Symbol_Pos.is_identifier
(map Binding.name_of (disc_bindings @ flat sel_bindingss))))
#> snd;
val case_Ts = map (fn Ts => Ts ---> B) ctr_Tss;
val (((((((((u, exh_y), xss), yss), fs), gs), w), (p, p'))), _) = no_defs_lthy
|> add_bindings
|> yield_singleton (mk_Frees fc_b_name) fcT
||>> yield_singleton (mk_Frees "y") fcT (* for compatibility with "datatype_realizer.ML" *)
||>> mk_Freess "x" ctr_Tss
||>> mk_Freess "y" ctr_Tss
||>> mk_Frees "f" case_Ts
||>> mk_Frees "g" case_Ts
||>> yield_singleton (mk_Frees "z") B
||>> yield_singleton (apfst (op ~~) oo mk_Frees' "P") HOLogic.boolT;
val q = Free (fst p', mk_pred1T B);
val xctrs = map2 (curry Term.list_comb) ctrs xss;
val yctrs = map2 (curry Term.list_comb) ctrs yss;
val xfs = map2 (curry Term.list_comb) fs xss;
val xgs = map2 (curry Term.list_comb) gs xss;
(* TODO: Eta-expension is for compatibility with the old datatype package (but it also provides
nicer names). Consider removing. *)
val eta_fs = map2 (fold_rev Term.lambda) xss xfs;
val eta_gs = map2 (fold_rev Term.lambda) xss xgs;
val case_binding =
qualify false
(if Binding.is_empty raw_case_binding orelse
Binding.eq_name (raw_case_binding, standard_binding) then
Binding.prefix_name (caseN ^ "_") fc_b
else
raw_case_binding);
fun mk_case_disj xctr xf xs =
list_exists_free xs (HOLogic.mk_conj (HOLogic.mk_eq (u, xctr), HOLogic.mk_eq (w, xf)));
val case_rhs = fold_rev (fold_rev Term.lambda) [fs, [u]]
(Const (\<^const_name>\<open>The\<close>, (B --> HOLogic.boolT) --> B) $
Term.lambda w (Library.foldr1 HOLogic.mk_disj (@{map 3} mk_case_disj xctrs xfs xss)));
val ((raw_case, (_, raw_case_def)), (lthy, lthy_old)) = no_defs_lthy
|> (snd o Local_Theory.begin_nested)
|> Local_Theory.define ((case_binding, NoSyn),
((Binding.concealed (Thm.def_binding case_binding), []), case_rhs))
||> `Local_Theory.end_nested;
val phi = Proof_Context.export_morphism lthy_old lthy;
val case_def = Morphism.thm phi raw_case_def;
val case0 = Morphism.term phi raw_case;
val casex = mk_case As B case0;
val casexC = mk_case As C case0;
val casexBool = mk_case As HOLogic.boolT case0;
fun mk_uu_eq () = HOLogic.mk_eq (u, u);
val exist_xs_u_eq_ctrs =
map2 (fn xctr => fn xs => list_exists_free xs (HOLogic.mk_eq (u, xctr))) xctrs xss;
val unique_disc_no_def = TrueI; (*arbitrary marker*)
val alternate_disc_no_def = FalseE; (*arbitrary marker*)
fun alternate_disc_lhs get_udisc k =
HOLogic.mk_not
(let val b = nth disc_bindings (k - 1) in
if is_disc_binding_valid b then get_udisc b (k - 1) else nth exist_xs_u_eq_ctrs (k - 1)
end);
val no_discs_sels =
not discs_sels andalso
forall (forall Binding.is_empty) (raw_disc_bindings :: raw_sel_bindingss) andalso
null sel_default_eqs;
val (all_sels_distinct, discs, selss, disc_defs, sel_defs, sel_defss, lthy) =
if no_discs_sels then
(true, [], [], [], [], [], lthy)
else
let
val all_sel_bindings = flat sel_bindingss;
val num_all_sel_bindings = length all_sel_bindings;
val uniq_sel_bindings = distinct Binding.eq_name all_sel_bindings;
val all_sels_distinct = (length uniq_sel_bindings = num_all_sel_bindings);
val sel_binding_index =
if all_sels_distinct then
1 upto num_all_sel_bindings
else
map (fn b => find_index (curry Binding.eq_name b) uniq_sel_bindings) all_sel_bindings;
val all_proto_sels = flat (@{map 3} (fn k => fn xs => map (pair k o pair xs)) ks xss xss);
val sel_infos =
AList.group (op =) (sel_binding_index ~~ all_proto_sels)
|> sort (int_ord o apply2 fst)
|> map snd |> curry (op ~~) uniq_sel_bindings;
val sel_bindings = map fst sel_infos;
val sel_defaults =
if null sel_default_eqs then
[]
else
let
val sel_Ts = map (curry (op -->) fcT o fastype_of o snd o snd o hd o snd) sel_infos;
val fake_lthy =
fake_local_theory_for_sel_defaults (sel_bindings ~~ sel_Ts) no_defs_lthy;
in
map (extract_sel_default fake_lthy o prep_term fake_lthy) sel_default_eqs
end;
fun disc_free b = Free (Binding.name_of b, mk_pred1T fcT);
fun disc_spec b exist_xs_u_eq_ctr = mk_Trueprop_eq (disc_free b $ u, exist_xs_u_eq_ctr);
fun alternate_disc k =
Term.lambda u (alternate_disc_lhs (K o rapp u o disc_free) (3 - k));
fun mk_sel_case_args b proto_sels T =
@{map 3} (fn Const (c, _) => fn Ts => fn k =>
(case AList.lookup (op =) proto_sels k of
NONE =>
(case filter (curry (op =) (c, Binding.name_of b) o fst) sel_defaults of
[] => fold_rev (Term.lambda o curry Free Name.uu) Ts (mk_undefined T)
| [(_, t)] => t
| _ => error "Multiple default values for selector/constructor pair")
| SOME (xs, x) => fold_rev Term.lambda xs x)) ctrs ctr_Tss ks;
fun sel_spec b proto_sels =
let
val _ =
(case duplicates (op =) (map fst proto_sels) of
k :: _ => error ("Duplicate selector name " ^ quote (Binding.name_of b) ^
" for constructor " ^ quote (Syntax.string_of_term lthy (nth ctrs (k - 1))))
| [] => ())
val T =
(case distinct (op =) (map (fastype_of o snd o snd) proto_sels) of
[T] => T
| T :: T' :: _ => error ("Inconsistent range type for selector " ^
quote (Binding.name_of b) ^ ": " ^ quote (Syntax.string_of_typ lthy T) ^
" vs. " ^ quote (Syntax.string_of_typ lthy T')));
in
mk_Trueprop_eq (Free (Binding.name_of b, fcT --> T) $ u,
Term.list_comb (mk_case As T case0, mk_sel_case_args b proto_sels T) $ u)
end;
fun unflat_selss xs = unflat_lookup Binding.eq_name sel_bindings xs sel_bindingss;
val (((raw_discs, raw_disc_defs), (raw_sels, raw_sel_defs)), (lthy', lthy)) =
lthy
|> (snd o Local_Theory.begin_nested)
|> apfst split_list o @{fold_map 3} (fn k => fn exist_xs_u_eq_ctr => fn b =>
if Binding.is_empty b then
if n = 1 then pair (Term.lambda u (mk_uu_eq ()), unique_disc_no_def)
else pair (alternate_disc k, alternate_disc_no_def)
else if Binding.eq_name (b, equal_binding) then
pair (Term.lambda u exist_xs_u_eq_ctr, refl)
else
Specification.definition (SOME (b, NONE, NoSyn)) [] []
((Thm.def_binding b, []), disc_spec b exist_xs_u_eq_ctr) #>> apsnd snd)
ks exist_xs_u_eq_ctrs disc_bindings
||>> apfst split_list o fold_map (fn (b, proto_sels) =>
Specification.definition (SOME (b, NONE, NoSyn)) [] []
((Thm.def_binding b, []), sel_spec b proto_sels) #>> apsnd snd) sel_infos
||> `Local_Theory.end_nested;
val phi = Proof_Context.export_morphism lthy lthy';
val disc_defs = map (Morphism.thm phi) raw_disc_defs;
val sel_defs = map (Morphism.thm phi) raw_sel_defs;
val sel_defss = unflat_selss sel_defs;
val discs0 = map (Morphism.term phi) raw_discs;
val selss0 = unflat_selss (map (Morphism.term phi) raw_sels);
val discs = map (mk_disc_or_sel As) discs0;
val selss = map (map (mk_disc_or_sel As)) selss0;
in
(all_sels_distinct, discs, selss, disc_defs, sel_defs, sel_defss, lthy')
end;
fun mk_imp_p Qs = Logic.list_implies (Qs, HOLogic.mk_Trueprop p);
val exhaust_goal =
let fun mk_prem xctr xs = fold_rev Logic.all xs (mk_imp_p [mk_Trueprop_eq (exh_y, xctr)]) in
fold_rev Logic.all [p, exh_y] (mk_imp_p (map2 mk_prem xctrs xss))
end;
val inject_goalss =
let
fun mk_goal _ _ [] [] = []
| mk_goal xctr yctr xs ys =
[fold_rev Logic.all (xs @ ys) (mk_Trueprop_eq (HOLogic.mk_eq (xctr, yctr),
Library.foldr1 HOLogic.mk_conj (map2 (curry HOLogic.mk_eq) xs ys)))];
in
@{map 4} mk_goal xctrs yctrs xss yss
end;
val half_distinct_goalss =
let
fun mk_goal ((xs, xc), (xs', xc')) =
fold_rev Logic.all (xs @ xs')
(HOLogic.mk_Trueprop (HOLogic.mk_not (HOLogic.mk_eq (xc, xc'))));
in
map (map mk_goal) (mk_half_pairss (`I (xss ~~ xctrs)))
end;
val goalss = [exhaust_goal] :: inject_goalss @ half_distinct_goalss;
fun after_qed ([exhaust_thm] :: thmss) lthy =
let
val ((((((((u, u'), (xss, xss')), fs), gs), h), v), p), _) = lthy
|> add_bindings
|> yield_singleton (apfst (op ~~) oo mk_Frees' fc_b_name) fcT
||>> mk_Freess' "x" ctr_Tss
||>> mk_Frees "f" case_Ts
||>> mk_Frees "g" case_Ts
||>> yield_singleton (mk_Frees "h") (B --> C)
||>> yield_singleton (mk_Frees (fc_b_name ^ "'")) fcT
||>> yield_singleton (mk_Frees "P") HOLogic.boolT;
val xfs = map2 (curry Term.list_comb) fs xss;
val xgs = map2 (curry Term.list_comb) gs xss;
val fcase = Term.list_comb (casex, fs);
val ufcase = fcase $ u;
val vfcase = fcase $ v;
val eta_fcase = Term.list_comb (casex, eta_fs);
val eta_gcase = Term.list_comb (casex, eta_gs);
val eta_ufcase = eta_fcase $ u;
val eta_vgcase = eta_gcase $ v;
fun mk_uu_eq () = HOLogic.mk_eq (u, u);
val uv_eq = mk_Trueprop_eq (u, v);
val ((inject_thms, inject_thmss), half_distinct_thmss) = chop n thmss |>> `flat;
val rho_As =
map (fn (T, U) => (dest_TVar T, Thm.ctyp_of lthy U))
(map Logic.varifyT_global As ~~ As);
fun inst_thm t thm =
Thm.instantiate' [] [SOME (Thm.cterm_of lthy t)]
(Thm.instantiate (TVars.make rho_As, Vars.empty) (Drule.zero_var_indexes thm));
val uexhaust_thm = inst_thm u exhaust_thm;
val exhaust_cases = map base_name_of_ctr ctrs;
val other_half_distinct_thmss = map (map (fn thm => thm RS not_sym)) half_distinct_thmss;
val (distinct_thms, (distinct_thmsss', distinct_thmsss)) =
join_halves n half_distinct_thmss other_half_distinct_thmss ||> `transpose;
val nchotomy_thm =
let
val goal =
HOLogic.mk_Trueprop (HOLogic.mk_all (fst u', snd u',
Library.foldr1 HOLogic.mk_disj exist_xs_u_eq_ctrs));
in
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} =>
mk_nchotomy_tac ctxt n exhaust_thm)
|> Thm.close_derivation \<^here>
end;
val case_thms =
let
val goals =
@{map 3} (fn xctr => fn xf => fn xs =>
fold_rev Logic.all (fs @ xs) (mk_Trueprop_eq (fcase $ xctr, xf))) xctrs xfs xss;
in
@{map 4} (fn k => fn goal => fn injects => fn distinctss =>
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, ...} =>
mk_case_tac ctxt n k case_def injects distinctss)
|> Thm.close_derivation \<^here>)
ks goals inject_thmss distinct_thmsss
end;
val (case_cong_thm, case_cong_weak_thm) =
let
fun mk_prem xctr xs xf xg =
fold_rev Logic.all xs (Logic.mk_implies (mk_Trueprop_eq (v, xctr),
mk_Trueprop_eq (xf, xg)));
val goal =
Logic.list_implies (uv_eq :: @{map 4} mk_prem xctrs xss xfs xgs,
mk_Trueprop_eq (eta_ufcase, eta_vgcase));
val weak_goal = Logic.mk_implies (uv_eq, mk_Trueprop_eq (ufcase, vfcase));
val vars = Variable.add_free_names lthy goal [];
val weak_vars = Variable.add_free_names lthy weak_goal [];
in
(Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, prems = _} =>
mk_case_cong_tac ctxt uexhaust_thm case_thms),
Goal.prove_sorry lthy weak_vars [] weak_goal (fn {context = ctxt, prems = _} =>
etac ctxt arg_cong 1))
|> apply2 (Thm.close_derivation \<^here>)
end;
val split_lhs = q $ ufcase;
fun mk_split_conjunct xctr xs f_xs =
list_all_free xs (HOLogic.mk_imp (HOLogic.mk_eq (u, xctr), q $ f_xs));
fun mk_split_disjunct xctr xs f_xs =
list_exists_free xs (HOLogic.mk_conj (HOLogic.mk_eq (u, xctr),
HOLogic.mk_not (q $ f_xs)));
fun mk_split_goal xctrs xss xfs =
mk_Trueprop_eq (split_lhs, Library.foldr1 HOLogic.mk_conj
(@{map 3} mk_split_conjunct xctrs xss xfs));
fun mk_split_asm_goal xctrs xss xfs =
mk_Trueprop_eq (split_lhs, HOLogic.mk_not (Library.foldr1 HOLogic.mk_disj
(@{map 3} mk_split_disjunct xctrs xss xfs)));
fun prove_split selss goal =
Variable.add_free_names lthy goal []
|> (fn vars => Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, prems = _} =>
mk_split_tac ctxt uexhaust_thm case_thms selss inject_thmss distinct_thmsss))
|> Thm.close_derivation \<^here>;
fun prove_split_asm asm_goal split_thm =
Variable.add_free_names lthy asm_goal []
|> (fn vars => Goal.prove_sorry lthy vars [] asm_goal (fn {context = ctxt, ...} =>
mk_split_asm_tac ctxt split_thm))
|> Thm.close_derivation \<^here>;
val (split_thm, split_asm_thm) =
let
val goal = mk_split_goal xctrs xss xfs;
val asm_goal = mk_split_asm_goal xctrs xss xfs;
val thm = prove_split (replicate n []) goal;
val asm_thm = prove_split_asm asm_goal thm;
in
(thm, asm_thm)
end;
val (sel_defs, all_sel_thms, sel_thmss, nontriv_disc_defs, disc_thmss, nontriv_disc_thmss,
discI_thms, nontriv_discI_thms, distinct_disc_thms, distinct_disc_thmsss,
exhaust_disc_thms, exhaust_sel_thms, all_collapse_thms, safe_collapse_thms,
expand_thms, split_sel_thms, split_sel_asm_thms, case_eq_if_thms, disc_eq_case_thms) =
if no_discs_sels then
([], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [])
else
let
val udiscs = map (rapp u) discs;
val uselss = map (map (rapp u)) selss;
val usel_ctrs = map2 (curry Term.list_comb) ctrs uselss;
val usel_fs = map2 (curry Term.list_comb) fs uselss;
val vdiscs = map (rapp v) discs;
val vselss = map (map (rapp v)) selss;
fun make_sel_thm xs' case_thm sel_def =
zero_var_indexes
(Variable.gen_all lthy
(Drule.rename_bvars'
(map (SOME o fst) xs')
(Thm.forall_intr_vars (case_thm RS (sel_def RS trans)))));
val sel_thmss = @{map 3} (map oo make_sel_thm) xss' case_thms sel_defss;
fun has_undefined_rhs thm =
(case snd (HOLogic.dest_eq (HOLogic.dest_Trueprop (Thm.prop_of thm))) of
Const (\<^const_name>\<open>undefined\<close>, _) => true
| _ => false);
val all_sel_thms =
(if all_sels_distinct andalso null sel_default_eqs then
flat sel_thmss
else
map_product (fn s => fn (xs', c) => make_sel_thm xs' c s) sel_defs
(xss' ~~ case_thms))
|> filter_out has_undefined_rhs;
fun mk_unique_disc_def () =
let
val m = the_single ms;
val goal = mk_Trueprop_eq (mk_uu_eq (), the_single exist_xs_u_eq_ctrs);
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, prems = _} =>
mk_unique_disc_def_tac ctxt m uexhaust_thm)
|> Thm.close_derivation \<^here>
end;
fun mk_alternate_disc_def k =
let
val goal =
mk_Trueprop_eq (alternate_disc_lhs (K (nth udiscs)) (3 - k),
nth exist_xs_u_eq_ctrs (k - 1));
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, ...} =>
mk_alternate_disc_def_tac ctxt k (nth disc_defs (2 - k))
(nth distinct_thms (2 - k)) uexhaust_thm)
|> Thm.close_derivation \<^here>
end;
val has_alternate_disc_def =
exists (fn def => Thm.eq_thm_prop (def, alternate_disc_no_def)) disc_defs;
val nontriv_disc_defs = disc_defs
|> filter_out (member Thm.eq_thm_prop [unique_disc_no_def, alternate_disc_no_def,
refl]);
val disc_defs' =
map2 (fn k => fn def =>
if Thm.eq_thm_prop (def, unique_disc_no_def) then mk_unique_disc_def ()
else if Thm.eq_thm_prop (def, alternate_disc_no_def) then mk_alternate_disc_def k
else def) ks disc_defs;
val discD_thms = map (fn def => def RS iffD1) disc_defs';
val discI_thms =
map2 (fn m => fn def => funpow m (fn thm => exI RS thm) (def RS iffD2)) ms
disc_defs';
val not_discI_thms =
map2 (fn m => fn def => funpow m (fn thm => allI RS thm)
(unfold_thms lthy @{thms not_ex} (def RS @{thm ssubst[of _ _ Not]})))
ms disc_defs';
val (disc_thmss', disc_thmss) =
let
fun mk_thm discI _ [] = refl RS discI
| mk_thm _ not_discI [distinct] = distinct RS not_discI;
fun mk_thms discI not_discI distinctss = map (mk_thm discI not_discI) distinctss;
in
@{map 3} mk_thms discI_thms not_discI_thms distinct_thmsss' |> `transpose
end;
val nontriv_disc_thmss =
map2 (fn b => if is_disc_binding_valid b then I else K []) disc_bindings disc_thmss;
fun is_discI_triv b =
(n = 1 andalso Binding.is_empty b) orelse Binding.eq_name (b, equal_binding);
val nontriv_discI_thms =
flat (map2 (fn b => if is_discI_triv b then K [] else single) disc_bindings
discI_thms);
val (distinct_disc_thms, (distinct_disc_thmsss', distinct_disc_thmsss)) =
let
fun mk_goal [] = []
| mk_goal [((_, udisc), (_, udisc'))] =
[Logic.all u (Logic.mk_implies (HOLogic.mk_Trueprop udisc,
HOLogic.mk_Trueprop (HOLogic.mk_not udisc')))];
fun prove tac goal =
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} => tac ctxt)
|> Thm.close_derivation \<^here>;
val half_pairss = mk_half_pairss (`I (ms ~~ discD_thms ~~ udiscs));
val half_goalss = map mk_goal half_pairss;
val half_thmss =
@{map 3} (fn [] => K (K []) | [goal] => fn [(((m, discD), _), _)] =>
fn disc_thm => [prove (fn ctxt =>
mk_half_distinct_disc_tac ctxt m discD disc_thm) goal])
half_goalss half_pairss (flat disc_thmss');
val other_half_goalss = map (mk_goal o map swap) half_pairss;
val other_half_thmss =
map2 (map2 (fn thm => prove (fn ctxt =>
mk_other_half_distinct_disc_tac ctxt thm))) half_thmss
other_half_goalss;
in
join_halves n half_thmss other_half_thmss ||> `transpose
|>> has_alternate_disc_def ? K []
end;
val exhaust_disc_thm =
let
fun mk_prem udisc = mk_imp_p [HOLogic.mk_Trueprop udisc];
val goal = fold_rev Logic.all [p, u] (mk_imp_p (map mk_prem udiscs));
in
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} =>
mk_exhaust_disc_tac ctxt n exhaust_thm discI_thms)
|> Thm.close_derivation \<^here>
end;
val (safe_collapse_thms, all_collapse_thms) =
let
fun mk_goal m udisc usel_ctr =
let
val prem = HOLogic.mk_Trueprop udisc;
val concl = mk_Trueprop_eq ((usel_ctr, u) |> m = 0 ? swap);
in
(prem aconv concl, Logic.all u (Logic.mk_implies (prem, concl)))
end;
val (trivs, goals) = @{map 3} mk_goal ms udiscs usel_ctrs |> split_list;
val thms =
@{map 5} (fn m => fn discD => fn sel_thms => fn triv => fn goal =>
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, ...} =>
mk_collapse_tac ctxt m discD sel_thms ORELSE HEADGOAL (assume_tac ctxt))
|> Thm.close_derivation \<^here>
|> not triv ? perhaps (try (fn thm => refl RS thm)))
ms discD_thms sel_thmss trivs goals;
in
(map_filter (fn (true, _) => NONE | (false, thm) => SOME thm) (trivs ~~ thms),
thms)
end;
val swapped_all_collapse_thms =
map2 (fn m => fn thm => if m = 0 then thm else thm RS sym) ms all_collapse_thms;
val exhaust_sel_thm =
let
fun mk_prem usel_ctr = mk_imp_p [mk_Trueprop_eq (u, usel_ctr)];
val goal = fold_rev Logic.all [p, u] (mk_imp_p (map mk_prem usel_ctrs));
in
Goal.prove_sorry lthy [] [] goal (fn {context = ctxt, prems = _} =>
mk_exhaust_sel_tac ctxt n exhaust_disc_thm swapped_all_collapse_thms)
|> Thm.close_derivation \<^here>
end;
val expand_thm =
let
fun mk_prems k udisc usels vdisc vsels =
(if k = n then [] else [mk_Trueprop_eq (udisc, vdisc)]) @
(if null usels then
[]
else
[Logic.list_implies
(if n = 1 then [] else map HOLogic.mk_Trueprop [udisc, vdisc],
HOLogic.mk_Trueprop (Library.foldr1 HOLogic.mk_conj
(map2 (curry HOLogic.mk_eq) usels vsels)))]);
val goal =
Library.foldr Logic.list_implies
(@{map 5} mk_prems ks udiscs uselss vdiscs vselss, uv_eq);
val uncollapse_thms =
map2 (fn thm => fn [] => thm | _ => thm RS sym) all_collapse_thms uselss;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, prems = _} =>
mk_expand_tac ctxt n ms (inst_thm u exhaust_disc_thm)
(inst_thm v exhaust_disc_thm) uncollapse_thms distinct_disc_thmsss
distinct_disc_thmsss')
|> Thm.close_derivation \<^here>
end;
val (split_sel_thm, split_sel_asm_thm) =
let
val zss = map (K []) xss;
val goal = mk_split_goal usel_ctrs zss usel_fs;
val asm_goal = mk_split_asm_goal usel_ctrs zss usel_fs;
val thm = prove_split sel_thmss goal;
val asm_thm = prove_split_asm asm_goal thm;
in
(thm, asm_thm)
end;
val case_eq_if_thm =
let
val goal = mk_Trueprop_eq (ufcase, mk_IfN B udiscs usel_fs);
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, ...} =>
mk_case_eq_if_tac ctxt n uexhaust_thm case_thms disc_thmss' sel_thmss)
|> Thm.close_derivation \<^here>
end;
val disc_eq_case_thms =
let
fun const_of_bool b = if b then \<^Const>\<open>True\<close> else \<^Const>\<open>False\<close>;
fun mk_case_args n = map_index (fn (k, argTs) =>
fold_rev Term.absdummy argTs (const_of_bool (n = k))) ctr_Tss;
val goals = map_index (fn (n, udisc) =>
mk_Trueprop_eq (udisc, list_comb (casexBool, mk_case_args n) $ u)) udiscs;
val goal = Logic.mk_conjunction_balanced goals;
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal
(fn {context = ctxt, ...} => mk_disc_eq_case_tac ctxt (Thm.cterm_of ctxt u)
exhaust_thm (flat nontriv_disc_thmss) distinct_thms case_thms)
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length goals)
end;
in
(sel_defs, all_sel_thms, sel_thmss, nontriv_disc_defs, disc_thmss, nontriv_disc_thmss,
discI_thms, nontriv_discI_thms, distinct_disc_thms, distinct_disc_thmsss,
[exhaust_disc_thm], [exhaust_sel_thm], all_collapse_thms, safe_collapse_thms,
[expand_thm], [split_sel_thm], [split_sel_asm_thm], [case_eq_if_thm],
disc_eq_case_thms)
end;
val case_distrib_thm =
let
val args = @{map 2} (fn f => fn argTs =>
let val (args, _) = mk_Frees "x" argTs lthy in
fold_rev Term.lambda args (h $ list_comb (f, args))
end) fs ctr_Tss;
val goal = mk_Trueprop_eq (h $ ufcase, list_comb (casexC, args) $ u);
val vars = Variable.add_free_names lthy goal [];
in
Goal.prove_sorry lthy vars [] goal (fn {context = ctxt, ...} =>
mk_case_distrib_tac ctxt (Thm.cterm_of ctxt u) exhaust_thm case_thms)
|> Thm.close_derivation \<^here>
end;
- val exhaust_case_names_attr = Attrib.internal (K (Rule_Cases.case_names exhaust_cases));
- val cases_type_attr = Attrib.internal (K (Induct.cases_type fcT_name));
+ val exhaust_case_names_attr = Attrib.internal \<^here> (K (Rule_Cases.case_names exhaust_cases));
+ val cases_type_attr = Attrib.internal \<^here> (K (Induct.cases_type fcT_name));
val nontriv_disc_eq_thmss =
map (map (fn th => th RS @{thm eq_False[THEN iffD2]}
handle THM _ => th RS @{thm eq_True[THEN iffD2]})) nontriv_disc_thmss;
val anonymous_notes =
[(map (fn th => th RS notE) distinct_thms, safe_elim_attrs),
(flat nontriv_disc_eq_thmss, nitpicksimp_attrs)]
|> map (fn (thms, attrs) => ((Binding.empty, attrs), [(thms, [])]));
val notes =
[(caseN, case_thms, nitpicksimp_attrs @ simp_attrs),
(case_congN, [case_cong_thm], []),
(case_cong_weak_thmsN, [case_cong_weak_thm], cong_attrs),
(case_distribN, [case_distrib_thm], []),
(case_eq_ifN, case_eq_if_thms, []),
(collapseN, safe_collapse_thms, if ms = [0] then [] else simp_attrs),
(discN, flat nontriv_disc_thmss, simp_attrs),
(disc_eq_caseN, disc_eq_case_thms, []),
(discIN, nontriv_discI_thms, []),
(distinctN, distinct_thms, simp_attrs @ inductsimp_attrs),
(distinct_discN, distinct_disc_thms, dest_attrs),
(exhaustN, [exhaust_thm], [exhaust_case_names_attr, cases_type_attr]),
(exhaust_discN, exhaust_disc_thms, [exhaust_case_names_attr]),
(exhaust_selN, exhaust_sel_thms, [exhaust_case_names_attr]),
(expandN, expand_thms, []),
(injectN, inject_thms, iff_attrs @ inductsimp_attrs),
(nchotomyN, [nchotomy_thm], []),
(selN, all_sel_thms, nitpicksimp_attrs @ simp_attrs),
(splitN, [split_thm], []),
(split_asmN, [split_asm_thm], []),
(split_selN, split_sel_thms, []),
(split_sel_asmN, split_sel_asm_thms, []),
(split_selsN, split_sel_thms @ split_sel_asm_thms, []),
(splitsN, [split_thm, split_asm_thm], [])]
|> filter_out (null o #2)
|> map (fn (thmN, thms, attrs) =>
((qualify true (Binding.name thmN), attrs), [(thms, [])]));
val (noted, lthy') = lthy
|> Spec_Rules.add Binding.empty Spec_Rules.equational [casex] case_thms
|> fold (uncurry (Spec_Rules.add Binding.empty Spec_Rules.equational))
(AList.group (eq_list (op aconv)) (map (`(single o lhs_head_of)) all_sel_thms))
|> fold (uncurry (Spec_Rules.add Binding.empty Spec_Rules.equational))
(filter_out (null o snd) (map single discs ~~ nontriv_disc_eq_thmss))
- |> Local_Theory.declaration {syntax = false, pervasive = true}
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => Case_Translation.register
(Morphism.term phi casex) (map (Morphism.term phi) ctrs))
|> plugins code_plugin ?
(Code.declare_default_eqns (map (rpair true) (flat nontriv_disc_eq_thmss @ case_thms @ all_sel_thms))
- #> Local_Theory.declaration {syntax = false, pervasive = false}
+ #> Local_Theory.declaration {syntax = false, pervasive = false, pos = \<^here>}
(fn phi => Context.mapping
(add_ctr_code fcT_name (map (Morphism.typ phi) As)
(map (dest_Const o Morphism.term phi) ctrs) (Morphism.fact phi inject_thms)
(Morphism.fact phi distinct_thms) (Morphism.fact phi case_thms))
I))
|> Local_Theory.notes (anonymous_notes @ notes)
(* for "datatype_realizer.ML": *)
|>> name_noted_thms fcT_name exhaustN;
val ctr_sugar =
{kind = kind, T = fcT, ctrs = ctrs, casex = casex, discs = discs, selss = selss,
exhaust = exhaust_thm, nchotomy = nchotomy_thm, injects = inject_thms,
distincts = distinct_thms, case_thms = case_thms, case_cong = case_cong_thm,
case_cong_weak = case_cong_weak_thm, case_distribs = [case_distrib_thm],
split = split_thm, split_asm = split_asm_thm, disc_defs = nontriv_disc_defs,
disc_thmss = disc_thmss, discIs = discI_thms, disc_eq_cases = disc_eq_case_thms,
sel_defs = sel_defs, sel_thmss = sel_thmss, distinct_discsss = distinct_disc_thmsss,
exhaust_discs = exhaust_disc_thms, exhaust_sels = exhaust_sel_thms,
collapses = all_collapse_thms, expands = expand_thms, split_sels = split_sel_thms,
split_sel_asms = split_sel_asm_thms, case_eq_ifs = case_eq_if_thms}
|> morph_ctr_sugar (substitute_noted_thm noted);
in
(ctr_sugar, lthy' |> register_ctr_sugar plugins ctr_sugar)
end;
in
(goalss, after_qed, lthy)
end;
fun free_constructors kind tacss = (fn (goalss, after_qed, lthy) =>
map2 (map2 (Thm.close_derivation \<^here> oo Goal.prove_sorry lthy [] [])) goalss tacss
|> (fn thms => after_qed thms lthy)) oo prepare_free_constructors kind (K I) (K I);
fun free_constructors_cmd kind = (fn (goalss, after_qed, lthy) =>
Proof.theorem NONE (snd oo after_qed) (map (map (rpair [])) goalss) lthy) oo
prepare_free_constructors kind Plugin_Name.make_filter Syntax.read_term;
val parse_bound_term = Parse.binding --| \<^keyword>\<open>:\<close> -- Parse.term;
type ctr_options = Plugin_Name.filter * bool;
type ctr_options_cmd = (Proof.context -> Plugin_Name.filter) * bool;
val default_ctr_options : ctr_options = (Plugin_Name.default_filter, false);
val default_ctr_options_cmd : ctr_options_cmd = (K Plugin_Name.default_filter, false);
val parse_ctr_options =
Scan.optional (\<^keyword>\<open>(\<close> |-- Parse.list1
(Plugin_Name.parse_filter >> (apfst o K)
|| Parse.reserved "discs_sels" >> (apsnd o K o K true)) --|
\<^keyword>\<open>)\<close>
>> (fn fs => fold I fs default_ctr_options_cmd))
default_ctr_options_cmd;
fun parse_ctr_spec parse_ctr parse_arg =
parse_opt_binding_colon -- parse_ctr -- Scan.repeat parse_arg;
val parse_ctr_specs = Parse.enum1 "|" (parse_ctr_spec Parse.term Parse.binding);
val parse_sel_default_eqs = Scan.optional (\<^keyword>\<open>where\<close> |-- Parse.enum1 "|" Parse.prop) [];
val _ =
Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>free_constructors\<close>
"register an existing freely generated type's constructors"
(parse_ctr_options -- Parse.binding --| \<^keyword>\<open>for\<close> -- parse_ctr_specs
-- parse_sel_default_eqs
>> free_constructors_cmd Unknown);
(** external views **)
(* document antiquotations *)
local
fun antiquote_setup binding co =
Document_Output.antiquotation_pretty_source_embedded binding
((Scan.ahead (Scan.lift Parse.not_eof) >> Token.pos_of) --
Args.type_name {proper = true, strict = true})
(fn ctxt => fn (pos, type_name) =>
let
fun err () =
error ("Bad " ^ Binding.name_of binding ^ ": " ^ quote type_name ^ Position.here pos);
in
(case ctr_sugar_of ctxt type_name of
NONE => err ()
| SOME {kind, T = T0, ctrs = ctrs0, ...} =>
let
val _ = if co = (kind = Codatatype) then () else err ();
val T = Logic.unvarifyT_global T0;
val ctrs = map Logic.unvarify_global ctrs0;
val pretty_typ_bracket = Syntax.pretty_typ (Config.put pretty_priority 1001 ctxt);
fun pretty_ctr ctr =
Pretty.block (Pretty.breaks (Syntax.pretty_term ctxt ctr ::
map pretty_typ_bracket (binder_types (fastype_of ctr))));
in
Pretty.block (Pretty.keyword1 (Binding.name_of binding) :: Pretty.brk 1 ::
Syntax.pretty_typ ctxt T :: Pretty.str " =" :: Pretty.brk 1 ::
flat (separate [Pretty.brk 1, Pretty.str "| "] (map (single o pretty_ctr) ctrs)))
end)
end);
in
val _ =
Theory.setup
(antiquote_setup \<^binding>\<open>datatype\<close> false #>
antiquote_setup \<^binding>\<open>codatatype\<close> true);
end;
(* theory export *)
val _ =
(Theory.setup o Thy_Info.add_presentation) (fn context => fn thy =>
if Export_Theory.export_enabled context then
let
val parents = map (Data.get o Context.Theory) (Theory.parents_of thy);
val datatypes =
(Data.get (Context.Theory thy), []) |-> Symtab.fold
(fn (name, (pos, {kind, T, ctrs, ...})) =>
if kind = Record orelse exists (fn tab => Symtab.defined tab name) parents then I
else
let
val pos_properties = Thy_Info.adjust_pos_properties context pos;
val typ = Logic.unvarifyT_global T;
val constrs = map Logic.unvarify_global ctrs;
val typargs = rev (fold Term.add_tfrees (Logic.mk_type typ :: constrs) []);
val constructors = map (fn t => (t, Term.type_of t)) constrs;
in
cons (pos_properties, (name, (kind = Codatatype, (typargs, (typ, constructors)))))
end);
in
if null datatypes then ()
else
Export_Theory.export_body thy "datatypes"
let open XML.Encode Term_XML.Encode in
list (pair properties (pair string (pair bool (pair (list (pair string sort))
(pair typ (list (pair (term (Sign.consts_of thy)) typ))))))) datatypes
end
end
else ());
end;
diff --git a/src/HOL/Tools/Function/function.ML b/src/HOL/Tools/Function/function.ML
--- a/src/HOL/Tools/Function/function.ML
+++ b/src/HOL/Tools/Function/function.ML
@@ -1,288 +1,288 @@
(* Title: HOL/Tools/Function/function.ML
Author: Alexander Krauss, TU Muenchen
Main entry points to the function package.
*)
signature FUNCTION =
sig
type info = Function_Common.info
val add_function: (binding * typ option * mixfix) list ->
Specification.multi_specs -> Function_Common.function_config ->
(Proof.context -> tactic) -> local_theory -> info * local_theory
val add_function_cmd: (binding * string option * mixfix) list ->
Specification.multi_specs_cmd -> Function_Common.function_config ->
(Proof.context -> tactic) -> bool -> local_theory -> info * local_theory
val function: (binding * typ option * mixfix) list ->
Specification.multi_specs -> Function_Common.function_config ->
local_theory -> Proof.state
val function_cmd: (binding * string option * mixfix) list ->
Specification.multi_specs_cmd -> Function_Common.function_config ->
bool -> local_theory -> Proof.state
val prove_termination: term option -> tactic -> local_theory ->
info * local_theory
val prove_termination_cmd: string option -> tactic -> local_theory ->
info * local_theory
val termination : term option -> local_theory -> Proof.state
val termination_cmd : string option -> local_theory -> Proof.state
val get_congs : Proof.context -> thm list
val get_info : Proof.context -> term -> info
end
structure Function : FUNCTION =
struct
open Function_Lib
open Function_Common
val simp_attribs =
@{attributes [simp, nitpick_simp]}
val psimp_attribs =
@{attributes [nitpick_psimp]}
fun note_derived (a, atts) (fname, thms) =
Local_Theory.note ((derived_name fname a, atts), thms) #> apfst snd
fun add_simps fnames post sort extra_qualify label mod_binding moreatts simps lthy =
let
val spec = post simps
|> map (apfst (apsnd (fn ats => moreatts @ ats)))
|> map (apfst (apfst extra_qualify))
val (saved_spec_simps, lthy') =
fold_map Local_Theory.note spec lthy
val saved_simps = maps snd saved_spec_simps
val simps_by_f = sort saved_simps
fun note fname simps =
Local_Theory.note ((mod_binding (derived_name fname label), []), simps) #> snd
in (saved_simps, fold2 note fnames simps_by_f lthy') end
fun prepare_function do_print prep fixspec eqns config lthy =
let
val ((fixes0, spec0), ctxt') = prep fixspec eqns lthy
val fixes = map (apfst (apfst Binding.name_of)) fixes0
val spec = map (fn (bnd, prop) => (bnd, [prop])) spec0
val (eqs, post, sort_cont, cnames) = get_preproc lthy config ctxt' fixes spec
val fnames = map (fst o fst) fixes0
val defname = Binding.conglomerate fnames;
val FunctionConfig {partials, default, ...} = config
val _ =
if is_some default
then legacy_feature "\"function (default)\" -- use 'partial_function' instead"
else ()
val ((goal_state, cont), lthy') =
Function_Mutual.prepare_function_mutual config defname fixes0 eqs lthy
fun afterqed [[proof]] lthy1 =
let
val result = cont lthy1 (Thm.close_derivation \<^here> proof)
val FunctionResult {fs, R, dom, psimps, simple_pinducts,
termination, domintros, cases, ...} = result
val pelims = Function_Elims.mk_partial_elim_rules lthy1 result
val concealed_partial = if partials then I else Binding.concealed
val addsmps = add_simps fnames post sort_cont
val (((((psimps', [pinducts']), [termination']), cases'), pelims'), lthy2) =
lthy1
|> addsmps (concealed_partial o Binding.qualify false "partial")
"psimps" concealed_partial psimp_attribs psimps
||>> Local_Theory.notes [((concealed_partial (derived_name defname "pinduct"), []),
simple_pinducts |> map (fn th => ([th],
[Attrib.case_names cnames, Attrib.consumes (1 - Thm.nprems_of th)] @
@{attributes [induct pred]})))]
||>> (apfst snd o
Local_Theory.note
((Binding.concealed (derived_name defname "termination"), []), [termination]))
||>> fold_map (note_derived ("cases", [Attrib.case_names cnames]))
(fnames ~~ map single cases)
||>> fold_map (note_derived ("pelims", [Attrib.consumes 1, Attrib.constraints 1]))
(fnames ~~ pelims)
||> (case domintros of NONE => I | SOME thms =>
Local_Theory.note ((derived_name defname "domintros", []), thms) #> snd)
val info =
{ add_simps=addsmps, fnames=fnames, case_names=cnames, psimps=psimps',
pinducts=snd pinducts', simps=NONE, inducts=NONE, termination=termination', totality=NONE,
fs=fs, R=R, dom=dom, defname=defname, is_partial=true, cases=flat cases',
pelims=pelims',elims=NONE}
val _ =
Proof_Display.print_consts do_print (Position.thread_data ()) lthy2
(K false) (map fst fixes)
in
(info,
- lthy2 |> Local_Theory.declaration {syntax = false, pervasive = false}
+ lthy2 |> Local_Theory.declaration {syntax = false, pervasive = false, pos = \<^here>}
(fn phi => add_function_data (transform_function_data phi info)))
end
in
((goal_state, afterqed), lthy')
end
fun gen_add_function do_print prep fixspec eqns config tac lthy =
let
val ((goal_state, afterqed), lthy') =
prepare_function do_print prep fixspec eqns config lthy
val pattern_thm =
case SINGLE (tac lthy') goal_state of
NONE => error "pattern completeness and compatibility proof failed"
| SOME st => Goal.finish lthy' st
in
lthy'
|> afterqed [[pattern_thm]]
end
val add_function = gen_add_function false Specification.check_multi_specs
fun add_function_cmd a b c d int = gen_add_function int Specification.read_multi_specs a b c d
fun gen_function do_print prep fixspec eqns config lthy =
let
val ((goal_state, afterqed), lthy') =
prepare_function do_print prep fixspec eqns config lthy
in
lthy'
|> Proof.theorem NONE (snd oo afterqed) [[(Logic.unprotect (Thm.concl_of goal_state), [])]]
|> Proof.refine_singleton (Method.primitive_text (K (K goal_state)))
end
val function = gen_function false Specification.check_multi_specs
fun function_cmd a b c int = gen_function int Specification.read_multi_specs a b c
fun prepare_termination_proof prep_binding prep_term raw_term_opt lthy =
let
val term_opt = Option.map (prep_term lthy) raw_term_opt
val info =
(case term_opt of
SOME t =>
(case import_function_data t lthy of
SOME info => info
| NONE => error ("Not a function: " ^ quote (Syntax.string_of_term lthy t)))
| NONE =>
(case import_last_function lthy of
SOME info => info
| NONE => error "Not a function"))
val { termination, fs, R, add_simps, case_names, psimps,
pinducts, defname, fnames, cases, dom, pelims, ...} = info
val domT = domain_type (fastype_of R)
val goal = HOLogic.mk_Trueprop (HOLogic.mk_all ("x", domT, mk_acc domT R $ Free ("x", domT)))
fun afterqed [[raw_totality]] lthy1 =
let
val totality = Thm.close_derivation \<^here> raw_totality
val remove_domain_condition =
full_simplify (put_simpset HOL_basic_ss lthy1
addsimps [totality, @{thm True_implies_equals}])
val tsimps = map remove_domain_condition psimps
val tinduct = map remove_domain_condition pinducts
val telims = map (map remove_domain_condition) pelims
in
lthy1
|> add_simps prep_binding "simps" prep_binding simp_attribs tsimps
||> Code.declare_default_eqns (map (rpair true) tsimps)
||>> Local_Theory.note
((prep_binding (derived_name defname "induct"), [Attrib.case_names case_names]), tinduct)
||>> fold_map (note_derived ("elims", [Attrib.consumes 1, Attrib.constraints 1]))
(map prep_binding fnames ~~ telims)
|-> (fn ((simps,(_,inducts)), elims) => fn lthy2 =>
let val info' = { is_partial=false, defname=defname, fnames=fnames, add_simps=add_simps,
case_names=case_names, fs=fs, R=R, dom=dom, psimps=psimps, pinducts=pinducts,
simps=SOME simps, inducts=SOME inducts, termination=termination, totality=SOME totality,
cases=cases, pelims=pelims, elims=SOME elims}
|> transform_function_data (Morphism.binding_morphism "" prep_binding)
in
(info',
lthy2
- |> Local_Theory.declaration {syntax = false, pervasive = false}
+ |> Local_Theory.declaration {syntax = false, pervasive = false, pos = \<^here>}
(fn phi => add_function_data (transform_function_data phi info'))
|> Spec_Rules.add Binding.empty Spec_Rules.equational_recdef fs tsimps)
end)
end
in
(goal, afterqed, termination)
end
fun gen_prove_termination prep_term raw_term_opt tac lthy =
let
val (goal, afterqed, termination) =
prepare_termination_proof I prep_term raw_term_opt lthy
val totality = Goal.prove lthy [] [] goal (K tac)
in
afterqed [[totality]] lthy
end
val prove_termination = gen_prove_termination Syntax.check_term
val prove_termination_cmd = gen_prove_termination Syntax.read_term
fun gen_termination prep_term raw_term_opt lthy =
let
val (goal, afterqed, termination) =
prepare_termination_proof Binding.reset_pos prep_term raw_term_opt lthy
in
lthy
|> Proof_Context.note_thms ""
((Binding.empty, [Context_Rules.rule_del]), [([allI], [])]) |> snd
|> Proof_Context.note_thms ""
((Binding.empty, [Context_Rules.intro_bang (SOME 1)]), [([allI], [])]) |> snd
|> Proof_Context.note_thms ""
((Binding.name "termination", [Context_Rules.intro_bang (SOME 0)]),
[([Goal.norm_result lthy termination], [])]) |> snd
|> Proof.theorem NONE (snd oo afterqed) [[(goal, [])]]
end
val termination = gen_termination Syntax.check_term
val termination_cmd = gen_termination Syntax.read_term
(* Datatype hook to declare datatype congs as "function_congs" *)
fun add_case_cong n thy =
let
val cong = #case_cong (Old_Datatype_Data.the_info thy n)
|> safe_mk_meta_eq
in
Context.theory_map (Function_Context_Tree.add_function_cong cong) thy
end
val _ = Theory.setup (Old_Datatype_Data.interpretation (K (fold add_case_cong)))
(* get info *)
val get_congs = Function_Context_Tree.get_function_congs
fun get_info ctxt t = Function_Common.retrieve_function_data ctxt t
|> the_single |> snd
(* outer syntax *)
val _ =
Outer_Syntax.local_theory_to_proof' \<^command_keyword>\<open>function\<close>
"define general recursive functions"
(function_parser default_config
>> (fn (config, (fixes, specs)) => function_cmd fixes specs config))
val _ =
Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>termination\<close>
"prove termination of a recursive function"
(Scan.option Parse.term >> termination_cmd)
end
diff --git a/src/HOL/Tools/Function/partial_function.ML b/src/HOL/Tools/Function/partial_function.ML
--- a/src/HOL/Tools/Function/partial_function.ML
+++ b/src/HOL/Tools/Function/partial_function.ML
@@ -1,322 +1,322 @@
(* Title: HOL/Tools/Function/partial_function.ML
Author: Alexander Krauss, TU Muenchen
Partial function definitions based on least fixed points in ccpos.
*)
signature PARTIAL_FUNCTION =
sig
- val init: string -> term -> term -> thm -> thm -> thm option -> declaration
+ val init: string -> term -> term -> thm -> thm -> thm option -> Morphism.declaration
val mono_tac: Proof.context -> int -> tactic
val add_partial_function: string -> (binding * typ option * mixfix) list ->
Attrib.binding * term -> local_theory -> (term * thm) * local_theory
val add_partial_function_cmd: string -> (binding * string option * mixfix) list ->
Attrib.binding * string -> local_theory -> (term * thm) * local_theory
val transform_result: morphism -> term * thm -> term * thm
end;
structure Partial_Function: PARTIAL_FUNCTION =
struct
open Function_Lib
(*** Context Data ***)
datatype setup_data = Setup_Data of
{fixp: term,
mono: term,
fixp_eq: thm,
fixp_induct: thm,
fixp_induct_user: thm option};
fun transform_setup_data phi (Setup_Data {fixp, mono, fixp_eq, fixp_induct, fixp_induct_user}) =
let
val term = Morphism.term phi;
val thm = Morphism.thm phi;
in
Setup_Data
{fixp = term fixp, mono = term mono, fixp_eq = thm fixp_eq,
fixp_induct = thm fixp_induct, fixp_induct_user = Option.map thm fixp_induct_user}
end;
structure Modes = Generic_Data
(
type T = setup_data Symtab.table;
val empty = Symtab.empty;
fun merge data = Symtab.merge (K true) data;
)
fun init mode fixp mono fixp_eq fixp_induct fixp_induct_user phi =
let
val data' =
Setup_Data
{fixp = fixp, mono = mono, fixp_eq = fixp_eq,
fixp_induct = fixp_induct, fixp_induct_user = fixp_induct_user}
|> transform_setup_data (phi $> Morphism.trim_context_morphism);
in Modes.map (Symtab.update (mode, data')) end;
val known_modes = Symtab.keys o Modes.get o Context.Proof;
fun lookup_mode ctxt =
Symtab.lookup (Modes.get (Context.Proof ctxt))
#> Option.map (transform_setup_data (Morphism.transfer_morphism' ctxt));
(*** Automated monotonicity proofs ***)
(*rewrite conclusion with k-th assumtion*)
fun rewrite_with_asm_tac ctxt k =
Subgoal.FOCUS (fn {context = ctxt', prems, ...} =>
Local_Defs.unfold0_tac ctxt' [nth prems k]) ctxt;
fun dest_case ctxt t =
case strip_comb t of
(Const (case_comb, _), args) =>
(case Ctr_Sugar.ctr_sugar_of_case ctxt case_comb of
NONE => NONE
| SOME {case_thms, ...} =>
let
val lhs = Thm.prop_of (hd case_thms)
|> HOLogic.dest_Trueprop |> HOLogic.dest_eq |> fst;
val arity = length (snd (strip_comb lhs));
val conv = funpow (length args - arity) Conv.fun_conv
(Conv.rewrs_conv (map mk_meta_eq case_thms));
in
SOME (nth args (arity - 1), conv)
end)
| _ => NONE;
(*split on case expressions*)
val split_cases_tac = Subgoal.FOCUS_PARAMS (fn {context = ctxt, ...} =>
SUBGOAL (fn (t, i) => case t of
_ $ (_ $ Abs (_, _, body)) =>
(case dest_case ctxt body of
NONE => no_tac
| SOME (arg, conv) =>
let open Conv in
if Term.is_open arg then no_tac
else ((DETERM o Induct.cases_tac ctxt false [[SOME arg]] NONE [])
THEN_ALL_NEW (rewrite_with_asm_tac ctxt 0)
THEN_ALL_NEW eresolve_tac ctxt @{thms thin_rl}
THEN_ALL_NEW (CONVERSION
(params_conv ~1 (fn ctxt' =>
arg_conv (arg_conv (abs_conv (K conv) ctxt'))) ctxt))) i
end)
| _ => no_tac) 1);
(*monotonicity proof: apply rules + split case expressions*)
fun mono_tac ctxt =
K (Local_Defs.unfold0_tac ctxt [@{thm curry_def}])
THEN' (TRY o REPEAT_ALL_NEW
(resolve_tac ctxt (rev (Named_Theorems.get ctxt \<^named_theorems>\<open>partial_function_mono\<close>))
ORELSE' split_cases_tac ctxt));
(*** Auxiliary functions ***)
(*Returns t $ u, but instantiates the type of t to make the
application type correct*)
fun apply_inst ctxt t u =
let
val thy = Proof_Context.theory_of ctxt;
val T = domain_type (fastype_of t);
val T' = fastype_of u;
val subst = Sign.typ_match thy (T, T') Vartab.empty
handle Type.TYPE_MATCH => raise TYPE ("apply_inst", [T, T'], [t, u])
in
map_types (Envir.norm_type subst) t $ u
end;
fun head_conv cv ct =
if can Thm.dest_comb ct then Conv.fun_conv (head_conv cv) ct else cv ct;
(*** currying transformation ***)
fun curry_const (A, B, C) =
Const (\<^const_name>\<open>Product_Type.curry\<close>,
[HOLogic.mk_prodT (A, B) --> C, A, B] ---> C);
fun mk_curry f =
case fastype_of f of
Type ("fun", [Type (_, [S, T]), U]) =>
curry_const (S, T, U) $ f
| T => raise TYPE ("mk_curry", [T], [f]);
(* iterated versions. Nonstandard left-nested tuples arise naturally
from "split o split o split"*)
fun curry_n arity = funpow (arity - 1) mk_curry;
fun uncurry_n arity = funpow (arity - 1) HOLogic.mk_case_prod;
val curry_uncurry_ss =
simpset_of (put_simpset HOL_basic_ss \<^context>
addsimps [@{thm Product_Type.curry_case_prod}, @{thm Product_Type.case_prod_curry}])
val split_conv_ss =
simpset_of (put_simpset HOL_basic_ss \<^context>
addsimps [@{thm Product_Type.split_conv}]);
val curry_K_ss =
simpset_of (put_simpset HOL_basic_ss \<^context>
addsimps [@{thm Product_Type.curry_K}]);
(* instantiate generic fixpoint induction and eliminate the canonical assumptions;
curry induction predicate *)
fun specialize_fixp_induct ctxt fT fT_uc curry uncurry mono_thm f_def rule =
let
val ([P], ctxt') = Variable.variant_fixes ["P"] ctxt
val P_inst = Abs ("f", fT_uc, Free (P, fT --> HOLogic.boolT) $ (curry $ Bound 0))
in
(* FIXME ctxt vs. ctxt' (!?) *)
rule
|> infer_instantiate' ctxt
((map o Option.map) (Thm.cterm_of ctxt) [SOME uncurry, NONE, SOME curry, NONE, SOME P_inst])
|> Tactic.rule_by_tactic ctxt
(Simplifier.simp_tac (put_simpset curry_uncurry_ss ctxt) 3 (* discharge U (C f) = f *)
THEN Simplifier.simp_tac (put_simpset curry_K_ss ctxt) 4 (* simplify bot case *)
THEN Simplifier.full_simp_tac (put_simpset curry_uncurry_ss ctxt) 5) (* simplify induction step *)
|> (fn thm => thm OF [mono_thm, f_def])
|> Conv.fconv_rule (Conv.concl_conv ~1 (* simplify conclusion *)
(Raw_Simplifier.rewrite ctxt false [mk_meta_eq @{thm Product_Type.curry_case_prod}]))
|> singleton (Variable.export ctxt' ctxt)
end
fun mk_curried_induct args ctxt inst_rule =
let
val cert = Thm.cterm_of ctxt
(* FIXME ctxt vs. ctxt' (!?) *)
val ([P], ctxt') = Variable.variant_fixes ["P"] ctxt
val split_paired_all_conv =
Conv.every_conv (replicate (length args - 1) (Conv.rewr_conv @{thm split_paired_all}))
val split_params_conv =
Conv.params_conv ~1 (fn _ => Conv.implies_conv split_paired_all_conv Conv.all_conv)
val (P_var, x_var) =
Thm.prop_of inst_rule |> Logic.strip_imp_concl |> HOLogic.dest_Trueprop
|> strip_comb |> apsnd hd
|> apply2 dest_Var
val P_rangeT = range_type (snd P_var)
val PT = map (snd o dest_Free) args ---> P_rangeT
val x_inst = cert (foldl1 HOLogic.mk_prod args)
val P_inst = cert (uncurry_n (length args) (Free (P, PT)))
val inst_rule' = inst_rule
|> Tactic.rule_by_tactic ctxt
(Simplifier.simp_tac (put_simpset curry_uncurry_ss ctxt) 4
THEN Simplifier.simp_tac (put_simpset curry_uncurry_ss ctxt) 3
THEN CONVERSION (split_params_conv ctxt
then_conv (Conv.forall_conv (K split_paired_all_conv) ctxt)) 3)
|> Thm.instantiate (TVars.empty, Vars.make2 (P_var, P_inst) (x_var, x_inst))
|> Simplifier.full_simplify (put_simpset split_conv_ss ctxt)
|> singleton (Variable.export ctxt' ctxt)
in
inst_rule'
end;
(*** partial_function definition ***)
fun transform_result phi (t, thm) = (Morphism.term phi t, Morphism.thm phi thm);
fun gen_add_partial_function prep mode fixes_raw eqn_raw lthy =
let
val setup_data = the (lookup_mode lthy mode)
handle Option.Option => error (cat_lines ["Unknown mode " ^ quote mode ^ ".",
"Known modes are " ^ commas_quote (known_modes lthy) ^ "."]);
val Setup_Data {fixp, mono, fixp_eq, fixp_induct, fixp_induct_user} = setup_data;
val ((fixes, [(eq_abinding, eqn)]), _) = prep fixes_raw [(eqn_raw, [], [])] lthy;
val ((_, plain_eqn), args_ctxt) = Variable.focus NONE eqn lthy;
val ((f_binding, fT), mixfix) = the_single fixes;
val f_bname = Binding.name_of f_binding;
fun note_qualified (name, thms) =
Local_Theory.note ((derived_name f_binding name, []), thms) #> snd
val (lhs, rhs) = HOLogic.dest_eq (HOLogic.dest_Trueprop plain_eqn);
val (head, args) = strip_comb lhs;
val argnames = map (fst o dest_Free) args;
val F = fold_rev lambda (head :: args) rhs;
val arity = length args;
val (aTs, bTs) = chop arity (binder_types fT);
val tupleT = foldl1 HOLogic.mk_prodT aTs;
val fT_uc = tupleT :: bTs ---> body_type fT;
val f_uc = Var ((f_bname, 0), fT_uc);
val x_uc = Var (("x", 1), tupleT);
val uncurry = lambda head (uncurry_n arity head);
val curry = lambda f_uc (curry_n arity f_uc);
val F_uc =
lambda f_uc (uncurry_n arity (F $ curry_n arity f_uc));
val mono_goal = apply_inst lthy mono (lambda f_uc (F_uc $ f_uc $ x_uc))
|> HOLogic.mk_Trueprop
|> Logic.all x_uc;
val mono_thm = Goal.prove_internal lthy [] (Thm.cterm_of lthy mono_goal)
(K (mono_tac lthy 1))
val inst_mono_thm = Thm.forall_elim (Thm.cterm_of lthy x_uc) mono_thm
val f_def_rhs = curry_n arity (apply_inst lthy fixp F_uc);
val f_def_binding =
Thm.make_def_binding (Config.get lthy Function_Lib.function_internals) f_binding
val ((f, (_, f_def)), lthy') =
Local_Theory.define ((f_binding, mixfix), ((f_def_binding, []), f_def_rhs)) lthy;
val eqn = HOLogic.mk_eq (list_comb (f, args),
Term.betapplys (F, f :: args))
|> HOLogic.mk_Trueprop;
val unfold =
(infer_instantiate' lthy' (map (SOME o Thm.cterm_of lthy') [uncurry, F, curry]) fixp_eq
OF [inst_mono_thm, f_def])
|> Tactic.rule_by_tactic lthy' (Simplifier.simp_tac (put_simpset curry_uncurry_ss lthy') 1);
val specialized_fixp_induct =
specialize_fixp_induct lthy' fT fT_uc curry uncurry inst_mono_thm f_def fixp_induct
|> Drule.rename_bvars' (map SOME (f_bname :: f_bname :: argnames));
val mk_raw_induct =
infer_instantiate' args_ctxt
((map o Option.map) (Thm.cterm_of args_ctxt) [SOME uncurry, NONE, SOME curry])
#> mk_curried_induct args args_ctxt
#> singleton (Variable.export args_ctxt lthy')
#> (fn thm => infer_instantiate' lthy'
[SOME (Thm.cterm_of lthy' F)] thm OF [inst_mono_thm, f_def])
#> Drule.rename_bvars' (map SOME (f_bname :: argnames @ argnames))
val raw_induct = Option.map mk_raw_induct fixp_induct_user
val rec_rule =
let open Conv in
Goal.prove lthy' (map (fst o dest_Free) args) [] eqn (fn _ =>
CONVERSION ((arg_conv o arg1_conv o head_conv o rewr_conv) (mk_meta_eq unfold)) 1
THEN resolve_tac lthy' @{thms refl} 1)
end;
val ((_, [rec_rule']), lthy'') = lthy' |> Local_Theory.note (eq_abinding, [rec_rule])
in
lthy''
|> Spec_Rules.add Binding.empty Spec_Rules.equational_recdef [f] [rec_rule']
|> note_qualified ("simps", [rec_rule'])
|> note_qualified ("mono", [mono_thm])
|> (case raw_induct of NONE => I | SOME thm => note_qualified ("raw_induct", [thm]))
|> note_qualified ("fixp_induct", [specialized_fixp_induct])
|> pair (f, rec_rule')
end;
val add_partial_function = gen_add_partial_function Specification.check_multi_specs;
val add_partial_function_cmd = gen_add_partial_function Specification.read_multi_specs;
val mode = \<^keyword>\<open>(\<close> |-- Parse.name --| \<^keyword>\<open>)\<close>;
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>partial_function\<close> "define partial function"
((mode -- (Parse.vars -- (Parse.where_ |-- Parse_Spec.simple_spec)))
>> (fn (mode, (vars, spec)) => add_partial_function_cmd mode vars spec #> #2));
end;
diff --git a/src/HOL/Tools/Lifting/lifting_def.ML b/src/HOL/Tools/Lifting/lifting_def.ML
--- a/src/HOL/Tools/Lifting/lifting_def.ML
+++ b/src/HOL/Tools/Lifting/lifting_def.ML
@@ -1,675 +1,675 @@
(* Title: HOL/Tools/Lifting/lifting_def.ML
Author: Ondrej Kuncar
Definitions for constants on quotient types.
*)
signature LIFTING_DEF =
sig
datatype code_eq = UNKNOWN_EQ | NONE_EQ | ABS_EQ | REP_EQ
type lift_def
val rty_of_lift_def: lift_def -> typ
val qty_of_lift_def: lift_def -> typ
val rhs_of_lift_def: lift_def -> term
val lift_const_of_lift_def: lift_def -> term
val def_thm_of_lift_def: lift_def -> thm
val rsp_thm_of_lift_def: lift_def -> thm
val abs_eq_of_lift_def: lift_def -> thm
val rep_eq_of_lift_def: lift_def -> thm option
val code_eq_of_lift_def: lift_def -> code_eq
val transfer_rules_of_lift_def: lift_def -> thm list
val morph_lift_def: morphism -> lift_def -> lift_def
val inst_of_lift_def: Proof.context -> typ -> lift_def -> lift_def
val mk_lift_const_of_lift_def: typ -> lift_def -> term
type config = { notes: bool }
val map_config: (bool -> bool) -> config -> config
val default_config: config
val generate_parametric_transfer_rule:
Proof.context -> thm -> thm -> thm
val add_lift_def:
config -> binding * mixfix -> typ -> term -> thm -> thm list -> local_theory ->
lift_def * local_theory
val prepare_lift_def:
(binding * mixfix -> typ -> term -> thm -> thm list -> Proof.context ->
lift_def * local_theory) ->
binding * mixfix -> typ -> term -> thm list -> local_theory ->
term option * (thm -> Proof.context -> lift_def * local_theory)
val gen_lift_def:
(binding * mixfix -> typ -> term -> thm -> thm list -> local_theory ->
lift_def * local_theory) ->
binding * mixfix -> typ -> term -> (Proof.context -> tactic) -> thm list ->
local_theory -> lift_def * local_theory
val lift_def:
config -> binding * mixfix -> typ -> term -> (Proof.context -> tactic) -> thm list ->
local_theory -> lift_def * local_theory
val can_generate_code_cert: thm -> bool
end
structure Lifting_Def: LIFTING_DEF =
struct
open Lifting_Util
infix 0 MRSL
datatype code_eq = UNKNOWN_EQ | NONE_EQ | ABS_EQ | REP_EQ
datatype lift_def = LIFT_DEF of {
rty: typ,
qty: typ,
rhs: term,
lift_const: term,
def_thm: thm,
rsp_thm: thm,
abs_eq: thm,
rep_eq: thm option,
code_eq: code_eq,
transfer_rules: thm list
};
fun rep_lift_def (LIFT_DEF lift_def) = lift_def;
val rty_of_lift_def = #rty o rep_lift_def;
val qty_of_lift_def = #qty o rep_lift_def;
val rhs_of_lift_def = #rhs o rep_lift_def;
val lift_const_of_lift_def = #lift_const o rep_lift_def;
val def_thm_of_lift_def = #def_thm o rep_lift_def;
val rsp_thm_of_lift_def = #rsp_thm o rep_lift_def;
val abs_eq_of_lift_def = #abs_eq o rep_lift_def;
val rep_eq_of_lift_def = #rep_eq o rep_lift_def;
val code_eq_of_lift_def = #code_eq o rep_lift_def;
val transfer_rules_of_lift_def = #transfer_rules o rep_lift_def;
fun mk_lift_def rty qty rhs lift_const def_thm rsp_thm abs_eq rep_eq code_eq transfer_rules =
LIFT_DEF {rty = rty, qty = qty,
rhs = rhs, lift_const = lift_const,
def_thm = def_thm, rsp_thm = rsp_thm, abs_eq = abs_eq, rep_eq = rep_eq,
code_eq = code_eq, transfer_rules = transfer_rules };
fun map_lift_def f1 f2 f3 f4 f5 f6 f7 f8 f9 f10
(LIFT_DEF {rty = rty, qty = qty, rhs = rhs, lift_const = lift_const,
def_thm = def_thm, rsp_thm = rsp_thm, abs_eq = abs_eq, rep_eq = rep_eq, code_eq = code_eq,
transfer_rules = transfer_rules }) =
LIFT_DEF {rty = f1 rty, qty = f2 qty, rhs = f3 rhs, lift_const = f4 lift_const,
def_thm = f5 def_thm, rsp_thm = f6 rsp_thm, abs_eq = f7 abs_eq, rep_eq = f8 rep_eq,
code_eq = f9 code_eq, transfer_rules = f10 transfer_rules }
fun morph_lift_def phi =
let
val mtyp = Morphism.typ phi
val mterm = Morphism.term phi
val mthm = Morphism.thm phi
in
map_lift_def mtyp mtyp mterm mterm mthm mthm mthm (Option.map mthm) I (map mthm)
end
fun mk_inst_of_lift_def qty lift_def = Vartab.empty |> Type.raw_match (qty_of_lift_def lift_def, qty)
fun mk_lift_const_of_lift_def qty lift_def = Envir.subst_term_types (mk_inst_of_lift_def qty lift_def)
(lift_const_of_lift_def lift_def)
fun inst_of_lift_def ctxt qty lift_def =
let
val instT =
Vartab.fold (fn (a, (S, T)) => cons ((a, S), Thm.ctyp_of ctxt T))
(mk_inst_of_lift_def qty lift_def) []
val phi = Morphism.instantiate_morphism (TVars.make instT, Vars.empty)
in morph_lift_def phi lift_def end
(* Config *)
type config = { notes: bool };
fun map_config f1 { notes = notes } = { notes = f1 notes }
val default_config = { notes = true };
(* Reflexivity prover *)
fun mono_eq_prover ctxt prop =
let
val refl_rules = Lifting_Info.get_reflexivity_rules ctxt
val transfer_rules = Transfer.get_transfer_raw ctxt
fun main_tac i = (REPEAT_ALL_NEW (DETERM o resolve_tac ctxt refl_rules) THEN_ALL_NEW
(REPEAT_ALL_NEW (DETERM o resolve_tac ctxt transfer_rules))) i
in
SOME (Goal.prove ctxt [] [] prop (K (main_tac 1)))
handle ERROR _ => NONE
end
fun try_prove_refl_rel ctxt rel =
let
fun mk_ge_eq x =
let
val T = fastype_of x
in
Const (\<^const_name>\<open>less_eq\<close>, T --> T --> HOLogic.boolT) $
(Const (\<^const_name>\<open>HOL.eq\<close>, T)) $ x
end;
val goal = HOLogic.mk_Trueprop (mk_ge_eq rel);
in
mono_eq_prover ctxt goal
end;
fun try_prove_reflexivity ctxt prop =
let
val cprop = Thm.cterm_of ctxt prop
val rule = @{thm ge_eq_refl}
val concl_pat = Drule.strip_imp_concl (Thm.cprop_of rule)
val insts = Thm.first_order_match (concl_pat, cprop)
val rule = Drule.instantiate_normalize insts rule
val prop = hd (Thm.prems_of rule)
in
case mono_eq_prover ctxt prop of
SOME thm => SOME (thm RS rule)
| NONE => NONE
end
(*
Generates a parametrized transfer rule.
transfer_rule - of the form T t f
parametric_transfer_rule - of the form par_R t' t
Result: par_T t' f, after substituing op= for relations in par_R that relate
a type constructor to the same type constructor, it is a merge of (par_R' OO T) t' f
using Lifting_Term.merge_transfer_relations
*)
fun generate_parametric_transfer_rule ctxt0 transfer_rule parametric_transfer_rule =
let
fun preprocess ctxt thm =
let
val tm = (strip_args 2 o HOLogic.dest_Trueprop o Thm.concl_of) thm;
val param_rel = (snd o dest_comb o fst o dest_comb) tm;
val free_vars = Term.add_vars param_rel [];
fun make_subst (xi, typ) subst =
let
val [rty, rty'] = binder_types typ
in
if Term.is_TVar rty andalso Term.is_Type rty' then
(xi, Thm.cterm_of ctxt (HOLogic.eq_const rty')) :: subst
else
subst
end;
val inst_thm = infer_instantiate ctxt (fold make_subst free_vars []) thm;
in
Conv.fconv_rule
((Conv.concl_conv (Thm.nprems_of inst_thm) o
HOLogic.Trueprop_conv o Conv.fun2_conv o Conv.arg1_conv)
(Raw_Simplifier.rewrite ctxt false (Transfer.get_sym_relator_eq ctxt))) inst_thm
end
fun inst_relcomppI ctxt ant1 ant2 =
let
val t1 = (HOLogic.dest_Trueprop o Thm.concl_of) ant1
val t2 = (HOLogic.dest_Trueprop o Thm.prop_of) ant2
val fun1 = Thm.cterm_of ctxt (strip_args 2 t1)
val args1 = map (Thm.cterm_of ctxt) (get_args 2 t1)
val fun2 = Thm.cterm_of ctxt (strip_args 2 t2)
val args2 = map (Thm.cterm_of ctxt) (get_args 1 t2)
val relcomppI = Drule.incr_indexes2 ant1 ant2 @{thm relcomppI}
val vars = map #1 (rev (Term.add_vars (Thm.prop_of relcomppI) []))
in
infer_instantiate ctxt (vars ~~ ([fun1] @ args1 @ [fun2] @ args2)) relcomppI
end
fun zip_transfer_rules ctxt thm =
let
fun mk_POS ty = Const (\<^const_name>\<open>POS\<close>, ty --> ty --> HOLogic.boolT)
val rel = (Thm.dest_fun2 o Thm.dest_arg o Thm.cprop_of) thm
val typ = Thm.typ_of_cterm rel
val POS_const = Thm.cterm_of ctxt (mk_POS typ)
val var = Thm.cterm_of ctxt (Var (("X", Thm.maxidx_of_cterm rel + 1), typ))
val goal =
Thm.apply (Thm.cterm_of ctxt HOLogic.Trueprop) (Thm.apply (Thm.apply POS_const rel) var)
in
[Lifting_Term.merge_transfer_relations ctxt goal, thm] MRSL @{thm POS_apply}
end
val thm =
inst_relcomppI ctxt0 parametric_transfer_rule transfer_rule
OF [parametric_transfer_rule, transfer_rule]
val preprocessed_thm = preprocess ctxt0 thm
val (fixed_thm, ctxt1) = ctxt0
|> yield_singleton (apfst snd oo Variable.import true) preprocessed_thm
val assms = cprems_of fixed_thm
val add_transfer_rule = Thm.attribute_declaration Transfer.transfer_add
val (prems, ctxt2) = ctxt1 |> fold_map Thm.assume_hyps assms
val ctxt3 = ctxt2 |> Context.proof_map (fold add_transfer_rule prems)
val zipped_thm =
fixed_thm
|> undisch_all
|> zip_transfer_rules ctxt3
|> implies_intr_list assms
|> singleton (Variable.export ctxt3 ctxt0)
in
zipped_thm
end
fun print_generate_transfer_info msg =
let
val error_msg = cat_lines
["Generation of a parametric transfer rule failed.",
(Pretty.string_of (Pretty.block
[Pretty.str "Reason:", Pretty.brk 2, msg]))]
in
error error_msg
end
fun map_ter _ x [] = x
| map_ter f _ xs = map f xs
fun generate_transfer_rules lthy quot_thm rsp_thm def_thm par_thms =
let
val transfer_rule =
([quot_thm, rsp_thm, def_thm] MRSL @{thm Quotient_to_transfer})
|> Lifting_Term.parametrize_transfer_rule lthy
in
(map_ter (generate_parametric_transfer_rule lthy transfer_rule) [transfer_rule] par_thms
handle Lifting_Term.MERGE_TRANSFER_REL msg => (print_generate_transfer_info msg; [transfer_rule]))
end
(* Generation of the code certificate from the rsp theorem *)
fun get_body_types (Type ("fun", [_, U]), Type ("fun", [_, V])) = get_body_types (U, V)
| get_body_types (U, V) = (U, V)
fun get_binder_types (Type ("fun", [T, U]), Type ("fun", [V, W])) = (T, V) :: get_binder_types (U, W)
| get_binder_types _ = []
fun get_binder_types_by_rel (Const (\<^const_name>\<open>rel_fun\<close>, _) $ _ $ S) (Type ("fun", [T, U]), Type ("fun", [V, W])) =
(T, V) :: get_binder_types_by_rel S (U, W)
| get_binder_types_by_rel _ _ = []
fun get_body_type_by_rel (Const (\<^const_name>\<open>rel_fun\<close>, _) $ _ $ S) (Type ("fun", [_, U]), Type ("fun", [_, V])) =
get_body_type_by_rel S (U, V)
| get_body_type_by_rel _ (U, V) = (U, V)
fun get_binder_rels (Const (\<^const_name>\<open>rel_fun\<close>, _) $ R $ S) = R :: get_binder_rels S
| get_binder_rels _ = []
fun force_rty_type ctxt rty rhs =
let
val thy = Proof_Context.theory_of ctxt
val rhs_schematic = singleton (Variable.polymorphic ctxt) rhs
val rty_schematic = fastype_of rhs_schematic
val match = Sign.typ_match thy (rty_schematic, rty) Vartab.empty
in
Envir.subst_term_types match rhs_schematic
end
fun unabs_def ctxt def =
let
val (_, rhs) = Thm.dest_equals (Thm.cprop_of def)
fun dest_abs (Abs (var_name, T, _)) = (var_name, T)
| dest_abs tm = raise TERM("get_abs_var",[tm])
val (var_name, T) = dest_abs (Thm.term_of rhs)
val (new_var_names, ctxt') = Variable.variant_fixes [var_name] ctxt
val refl_thm = Thm.reflexive (Thm.cterm_of ctxt' (Free (hd new_var_names, T)))
in
Thm.combination def refl_thm |>
singleton (Proof_Context.export ctxt' ctxt)
end
fun unabs_all_def ctxt def =
let
val (_, rhs) = Thm.dest_equals (Thm.cprop_of def)
val xs = strip_abs_vars (Thm.term_of rhs)
in
fold (K (unabs_def ctxt)) xs def
end
val map_fun_unfolded =
@{thm map_fun_def[abs_def]} |>
unabs_def \<^context> |>
unabs_def \<^context> |>
Local_Defs.unfold0 \<^context> [@{thm comp_def}]
fun unfold_fun_maps ctm =
let
fun unfold_conv ctm =
case (Thm.term_of ctm) of
Const (\<^const_name>\<open>map_fun\<close>, _) $ _ $ _ =>
(Conv.arg_conv unfold_conv then_conv Conv.rewr_conv map_fun_unfolded) ctm
| _ => Conv.all_conv ctm
in
(Conv.fun_conv unfold_conv) ctm
end
fun unfold_fun_maps_beta ctm =
let val try_beta_conv = Conv.try_conv (Thm.beta_conversion false)
in
(unfold_fun_maps then_conv try_beta_conv) ctm
end
fun prove_rel ctxt rsp_thm (rty, qty) =
let
val ty_args = get_binder_types (rty, qty)
fun disch_arg args_ty thm =
let
val quot_thm = Lifting_Term.prove_quot_thm ctxt args_ty
in
[quot_thm, thm] MRSL @{thm apply_rsp''}
end
in
fold disch_arg ty_args rsp_thm
end
exception CODE_CERT_GEN of string
fun simplify_code_eq ctxt def_thm =
Local_Defs.unfold0 ctxt [@{thm o_apply}, @{thm map_fun_def}, @{thm id_apply}] def_thm
(*
quot_thm - quotient theorem (Quotient R Abs Rep T).
returns: whether the Lifting package is capable to generate code for the abstract type
represented by quot_thm
*)
fun can_generate_code_cert quot_thm =
case quot_thm_rel quot_thm of
Const (\<^const_name>\<open>HOL.eq\<close>, _) => true
| Const (\<^const_name>\<open>eq_onp\<close>, _) $ _ => true
| _ => false
fun generate_rep_eq ctxt def_thm rsp_thm (rty, qty) =
let
val unfolded_def = Conv.fconv_rule (Conv.arg_conv unfold_fun_maps_beta) def_thm
val unabs_def = unabs_all_def ctxt unfolded_def
in
if body_type rty = body_type qty then
SOME (simplify_code_eq ctxt (HOLogic.mk_obj_eq unabs_def))
else
let
val quot_thm = Lifting_Term.prove_quot_thm ctxt (get_body_types (rty, qty))
val rel_fun = prove_rel ctxt rsp_thm (rty, qty)
val rep_abs_thm = [quot_thm, rel_fun] MRSL @{thm Quotient_rep_abs_eq}
in
case mono_eq_prover ctxt (hd (Thm.prems_of rep_abs_thm)) of
SOME mono_eq_thm =>
let
val rep_abs_eq = mono_eq_thm RS rep_abs_thm
val rep = Thm.cterm_of ctxt (quot_thm_rep quot_thm)
val rep_refl = HOLogic.mk_obj_eq (Thm.reflexive rep)
val repped_eq = [rep_refl, HOLogic.mk_obj_eq unabs_def] MRSL @{thm cong}
val code_cert = [repped_eq, rep_abs_eq] MRSL trans
in
SOME (simplify_code_eq ctxt code_cert)
end
| NONE => NONE
end
end
fun generate_abs_eq ctxt def_thm rsp_thm quot_thm =
let
val abs_eq_with_assms =
let
val (rty, qty) = quot_thm_rty_qty quot_thm
val rel = quot_thm_rel quot_thm
val ty_args = get_binder_types_by_rel rel (rty, qty)
val body_type = get_body_type_by_rel rel (rty, qty)
val quot_ret_thm = Lifting_Term.prove_quot_thm ctxt body_type
val rep_abs_folded_unmapped_thm =
let
val rep_id = [quot_thm, def_thm] MRSL @{thm Quotient_Rep_eq}
val ctm = Thm.dest_equals_lhs (Thm.cprop_of rep_id)
val unfolded_maps_eq = unfold_fun_maps ctm
val t1 = [quot_thm, def_thm, rsp_thm] MRSL @{thm Quotient_rep_abs_fold_unmap}
val prems_pat = (hd o Drule.cprems_of) t1
val insts = Thm.first_order_match (prems_pat, Thm.cprop_of unfolded_maps_eq)
in
unfolded_maps_eq RS (Drule.instantiate_normalize insts t1)
end
in
rep_abs_folded_unmapped_thm
|> fold (fn _ => fn thm => thm RS @{thm rel_funD2}) ty_args
|> (fn x => x RS (@{thm Quotient_rel_abs2} OF [quot_ret_thm]))
end
val prem_rels = get_binder_rels (quot_thm_rel quot_thm);
val proved_assms = prem_rels |> map (try_prove_refl_rel ctxt)
|> map_index (apfst (fn x => x + 1)) |> filter (is_some o snd) |> map (apsnd the)
|> map (apsnd (fn assm => assm RS @{thm ge_eq_refl}))
val abs_eq = fold_rev (fn (i, assm) => fn thm => assm RSN (i, thm)) proved_assms abs_eq_with_assms
in
simplify_code_eq ctxt abs_eq
end
fun register_code_eq_thy abs_eq_thm opt_rep_eq_thm (rty, qty) thy =
let
fun no_abstr (t $ u) = no_abstr t andalso no_abstr u
| no_abstr (Abs (_, _, t)) = no_abstr t
| no_abstr (Const (name, _)) = not (Code.is_abstr thy name)
| no_abstr _ = true
fun is_valid_eq eqn = can (Code.assert_eqn thy) (mk_meta_eq eqn, true)
andalso no_abstr (Thm.prop_of eqn)
fun is_valid_abs_eq abs_eq = can (Code.assert_abs_eqn thy NONE) (mk_meta_eq abs_eq)
in
if is_valid_eq abs_eq_thm then
(ABS_EQ, Code.declare_default_eqns_global [(abs_eq_thm, true)] thy)
else
let
val (rty_body, qty_body) = get_body_types (rty, qty)
in
if rty_body = qty_body then
(REP_EQ, Code.declare_default_eqns_global [(the opt_rep_eq_thm, true)] thy)
else
if is_some opt_rep_eq_thm andalso is_valid_abs_eq (the opt_rep_eq_thm)
then
(REP_EQ, Code.declare_abstract_eqn_global (the opt_rep_eq_thm) thy)
else
(NONE_EQ, thy)
end
end
local
fun no_no_code ctxt (rty, qty) =
if same_type_constrs (rty, qty) then
forall (no_no_code ctxt) (Targs rty ~~ Targs qty)
else
if Term.is_Type qty then
if Lifting_Info.is_no_code_type ctxt (Tname qty) then false
else
let
val (rty', rtyq) = Lifting_Term.instantiate_rtys ctxt (rty, qty)
val (rty's, rtyqs) = (Targs rty', Targs rtyq)
in
forall (no_no_code ctxt) (rty's ~~ rtyqs)
end
else
true
fun encode_code_eq ctxt abs_eq opt_rep_eq (rty, qty) =
let
fun mk_type typ = typ |> Logic.mk_type |> Thm.cterm_of ctxt |> Drule.mk_term
in
Conjunction.intr_balanced [abs_eq, (the_default TrueI opt_rep_eq), mk_type rty, mk_type qty]
end
exception DECODE
fun decode_code_eq thm =
if Thm.nprems_of thm > 0 then raise DECODE
else
let
val [abs_eq, rep_eq, rty, qty] = Conjunction.elim_balanced 4 thm
val opt_rep_eq = if Thm.eq_thm_prop (rep_eq, TrueI) then NONE else SOME rep_eq
fun dest_type typ = typ |> Drule.dest_term |> Thm.term_of |> Logic.dest_type
in
(abs_eq, opt_rep_eq, (dest_type rty, dest_type qty))
end
structure Data = Generic_Data
(
type T = code_eq option
val empty = NONE
fun merge _ = NONE
);
fun register_encoded_code_eq thm thy =
let
val (abs_eq_thm, opt_rep_eq_thm, (rty, qty)) = decode_code_eq thm
val (code_eq, thy) = register_code_eq_thy abs_eq_thm opt_rep_eq_thm (rty, qty) thy
in
Context.theory_map (Data.put (SOME code_eq)) thy
end
handle DECODE => thy
val register_code_eq_attribute = Thm.declaration_attribute
(fn thm => Context.mapping (register_encoded_code_eq thm) I)
- val register_code_eq_attrib = Attrib.internal (K register_code_eq_attribute)
+ val register_code_eq_attrib = Attrib.internal \<^here> (K register_code_eq_attribute)
in
fun register_code_eq abs_eq_thm opt_rep_eq_thm (rty, qty) lthy =
let
val encoded_code_eq = encode_code_eq lthy abs_eq_thm opt_rep_eq_thm (rty, qty)
in
if no_no_code lthy (rty, qty) then
let
val lthy' = lthy
|> (#2 oo Local_Theory.note) ((Binding.empty, [register_code_eq_attrib]), [encoded_code_eq])
val opt_code_eq = Data.get (Context.Theory (Proof_Context.theory_of lthy'))
val code_eq =
if is_some opt_code_eq then the opt_code_eq
else UNKNOWN_EQ (* UNKNOWN_EQ means that we are in a locale and we do not know
which code equation is going to be used. This is going to be resolved at the
point when an interpretation of the locale is executed. *)
val lthy'' = lthy'
- |> Local_Theory.declaration {syntax = false, pervasive = true} (K (Data.put NONE))
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>} (K (Data.put NONE))
in (code_eq, lthy'') end
else
(NONE_EQ, lthy)
end
end
(*
Defines an operation on an abstract type in terms of a corresponding operation
on a representation type.
var - a binding and a mixfix of the new constant being defined
qty - an abstract type of the new constant
rhs - a term representing the new constant on the raw level
rsp_thm - a respectfulness theorem in the internal tagged form (like '(R ===> R ===> R) f f'),
i.e. "(Lifting_Term.equiv_relation (fastype_of rhs, qty)) $ rhs $ rhs"
par_thms - a parametricity theorem for rhs
*)
fun add_lift_def (config: config) (binding, mx) qty rhs rsp_thm par_thms lthy0 =
let
val rty = fastype_of rhs
val quot_thm = Lifting_Term.prove_quot_thm lthy0 (rty, qty)
val absrep_trm = quot_thm_abs quot_thm
val rty_forced = (domain_type o fastype_of) absrep_trm
val forced_rhs = force_rty_type lthy0 rty_forced rhs
val lhs = Free (Binding.name_of binding, qty)
val prop = Logic.mk_equals (lhs, absrep_trm $ forced_rhs)
val (_, prop') = Local_Defs.cert_def lthy0 (K []) prop
val (_, newrhs) = Local_Defs.abs_def prop'
val var = ((#notes config = false ? Binding.concealed) binding, mx)
val def_name = Thm.make_def_binding (#notes config) (#1 var)
val ((lift_const, (_ , def_thm)), lthy1) = lthy0
|> Local_Theory.define (var, ((def_name, []), newrhs))
val transfer_rules = generate_transfer_rules lthy1 quot_thm rsp_thm def_thm par_thms
val abs_eq_thm = generate_abs_eq lthy1 def_thm rsp_thm quot_thm
val opt_rep_eq_thm = generate_rep_eq lthy1 def_thm rsp_thm (rty_forced, qty)
fun notes names =
let
val lhs_name = Binding.reset_pos (#1 var)
val rsp_thmN = Binding.qualify_name true lhs_name "rsp"
val abs_eq_thmN = Binding.qualify_name true lhs_name "abs_eq"
val rep_eq_thmN = Binding.qualify_name true lhs_name "rep_eq"
val transfer_ruleN = Binding.qualify_name true lhs_name "transfer"
val notes =
[(rsp_thmN, [], [rsp_thm]),
(transfer_ruleN, @{attributes [transfer_rule]}, transfer_rules),
(abs_eq_thmN, [], [abs_eq_thm])]
@ (case opt_rep_eq_thm of SOME rep_eq_thm => [(rep_eq_thmN, [], [rep_eq_thm])] | NONE => [])
in
if names then map (fn (name, attrs, thms) => ((name, []), [(thms, attrs)])) notes
else map_filter (fn (_, attrs, thms) => if null attrs then NONE
else SOME (Binding.empty_atts, [(thms, attrs)])) notes
end
val (code_eq, lthy2) = lthy1
|> register_code_eq abs_eq_thm opt_rep_eq_thm (rty_forced, qty)
val lift_def = mk_lift_def rty_forced qty newrhs lift_const def_thm rsp_thm abs_eq_thm
opt_rep_eq_thm code_eq transfer_rules
in
lthy2
|> (snd o Local_Theory.begin_nested)
|> Local_Theory.notes (notes (#notes config)) |> snd
|> `(fn lthy => morph_lift_def (Local_Theory.target_morphism lthy) lift_def)
||> Local_Theory.end_nested
end
(* This is not very cheap way of getting the rules but we have only few active
liftings in the current setting *)
fun get_cr_pcr_eqs ctxt =
let
fun collect (data : Lifting_Info.quotient) l =
if is_some (#pcr_info data)
then ((Thm.symmetric o safe_mk_meta_eq o Thm.transfer' ctxt o #pcr_cr_eq o the o #pcr_info) data :: l)
else l
val table = Lifting_Info.get_quotients ctxt
in
Symtab.fold (fn (_, data) => fn l => collect data l) table []
end
fun prepare_lift_def add_lift_def var qty rhs par_thms ctxt =
let
val rsp_rel = Lifting_Term.equiv_relation ctxt (fastype_of rhs, qty)
val rty_forced = (domain_type o fastype_of) rsp_rel;
val forced_rhs = force_rty_type ctxt rty_forced rhs;
val cr_to_pcr_conv = HOLogic.Trueprop_conv (Conv.fun2_conv
(Raw_Simplifier.rewrite ctxt false (get_cr_pcr_eqs ctxt)))
val (prsp_tm, rsp_prsp_eq) = HOLogic.mk_Trueprop (rsp_rel $ forced_rhs $ forced_rhs)
|> Thm.cterm_of ctxt
|> cr_to_pcr_conv
|> `Thm.concl_of
|>> Logic.dest_equals
|>> snd
val to_rsp = rsp_prsp_eq RS Drule.equal_elim_rule2
val opt_proven_rsp_thm = try_prove_reflexivity ctxt prsp_tm
fun after_qed internal_rsp_thm =
add_lift_def var qty rhs (internal_rsp_thm RS to_rsp) par_thms
in
case opt_proven_rsp_thm of
SOME thm => (NONE, K (after_qed thm))
| NONE => (SOME prsp_tm, after_qed)
end
fun gen_lift_def add_lift_def var qty rhs tac par_thms lthy =
let
val (goal, after_qed) = prepare_lift_def add_lift_def var qty rhs par_thms lthy
in
case goal of
SOME goal =>
let
val rsp_thm =
Goal.prove_sorry lthy [] [] goal (tac o #context)
|> Thm.close_derivation \<^here>
in
after_qed rsp_thm lthy
end
| NONE => after_qed Drule.dummy_thm lthy
end
val lift_def = gen_lift_def o add_lift_def
end (* structure *)
diff --git a/src/HOL/Tools/Lifting/lifting_def_code_dt.ML b/src/HOL/Tools/Lifting/lifting_def_code_dt.ML
--- a/src/HOL/Tools/Lifting/lifting_def_code_dt.ML
+++ b/src/HOL/Tools/Lifting/lifting_def_code_dt.ML
@@ -1,833 +1,833 @@
(* Title: HOL/Tools/Lifting/lifting_def_code_dt.ML
Author: Ondrej Kuncar
Workaround that allows us to execute lifted constants that have
as a return type a datatype containing a subtype; lift_definition command
*)
signature LIFTING_DEF_CODE_DT =
sig
type rep_isom_data
val isom_of_rep_isom_data: rep_isom_data -> term
val transfer_of_rep_isom_data: rep_isom_data -> thm
val bundle_name_of_rep_isom_data: rep_isom_data -> string
val pointer_of_rep_isom_data: rep_isom_data -> string
type code_dt
val rty_of_code_dt: code_dt -> typ
val qty_of_code_dt: code_dt -> typ
val wit_of_code_dt: code_dt -> term
val wit_thm_of_code_dt: code_dt -> thm
val rep_isom_data_of_code_dt: code_dt -> rep_isom_data option
val morph_code_dt: morphism -> code_dt -> code_dt
val mk_witness_of_code_dt: typ -> code_dt -> term
val mk_rep_isom_of_code_dt: typ -> code_dt -> term option
val code_dt_of: Proof.context -> typ * typ -> code_dt option
val code_dt_of_global: theory -> typ * typ -> code_dt option
val all_code_dt_of: Proof.context -> code_dt list
val all_code_dt_of_global: theory -> code_dt list
type config_code_dt = { code_dt: bool, lift_config: Lifting_Def.config }
val default_config_code_dt: config_code_dt
val add_lift_def_code_dt:
config_code_dt -> binding * mixfix -> typ -> term -> thm -> thm list -> local_theory ->
Lifting_Def.lift_def * local_theory
val lift_def_code_dt:
config_code_dt -> binding * mixfix -> typ -> term -> (Proof.context -> tactic) -> thm list ->
local_theory -> Lifting_Def.lift_def * local_theory
val lift_def_cmd:
string list * (binding * string option * mixfix) * string * (Facts.ref * Token.src list) list ->
local_theory -> Proof.state
end
structure Lifting_Def_Code_Dt: LIFTING_DEF_CODE_DT =
struct
open Ctr_Sugar_Util BNF_Util BNF_FP_Util BNF_FP_Def_Sugar Lifting_Def Lifting_Util
infix 0 MRSL
(** data structures **)
(* all type variables in qty are in rty *)
datatype rep_isom_data = REP_ISOM of { isom: term, transfer: thm, bundle_name: string, pointer: string }
fun isom_of_rep_isom_data (REP_ISOM rep_isom) = #isom rep_isom;
fun transfer_of_rep_isom_data (REP_ISOM rep_isom) = #transfer rep_isom;
fun bundle_name_of_rep_isom_data (REP_ISOM rep_isom) = #bundle_name rep_isom;
fun pointer_of_rep_isom_data (REP_ISOM rep_isom) = #pointer rep_isom;
datatype code_dt = CODE_DT of { rty: typ, qty: typ, wit: term, wit_thm: thm,
rep_isom_data: rep_isom_data option };
fun rty_of_code_dt (CODE_DT code_dt) = #rty code_dt;
fun qty_of_code_dt (CODE_DT code_dt) = #qty code_dt;
fun wit_of_code_dt (CODE_DT code_dt) = #wit code_dt;
fun wit_thm_of_code_dt (CODE_DT code_dt) = #wit_thm code_dt;
fun rep_isom_data_of_code_dt (CODE_DT code_dt) = #rep_isom_data code_dt;
fun ty_alpha_equiv (T, U) = Type.raw_instance (T, U) andalso Type.raw_instance (U, T);
fun code_dt_eq c = (ty_alpha_equiv o apply2 rty_of_code_dt) c
andalso (ty_alpha_equiv o apply2 qty_of_code_dt) c;
fun term_of_code_dt code_dt = code_dt |> `rty_of_code_dt ||> qty_of_code_dt |> HOLogic.mk_prodT
|> Net.encode_type |> single;
(* modulo renaming, typ must contain TVars *)
fun is_code_dt_of_type (rty, qty) code_dt = code_dt |> `rty_of_code_dt ||> qty_of_code_dt
|> HOLogic.mk_prodT |> curry ty_alpha_equiv (HOLogic.mk_prodT (rty, qty));
fun mk_rep_isom_data isom transfer bundle_name pointer =
REP_ISOM { isom = isom, transfer = transfer, bundle_name = bundle_name, pointer = pointer}
fun mk_code_dt rty qty wit wit_thm rep_isom_data =
CODE_DT { rty = rty, qty = qty, wit = wit, wit_thm = wit_thm, rep_isom_data = rep_isom_data };
fun map_rep_isom_data f1 f2 f3 f4
(REP_ISOM { isom = isom, transfer = transfer, bundle_name = bundle_name, pointer = pointer }) =
REP_ISOM { isom = f1 isom, transfer = f2 transfer, bundle_name = f3 bundle_name, pointer = f4 pointer };
fun map_code_dt f1 f2 f3 f4 f5 f6 f7 f8
(CODE_DT {rty = rty, qty = qty, wit = wit, wit_thm = wit_thm, rep_isom_data = rep_isom_data}) =
CODE_DT {rty = f1 rty, qty = f2 qty, wit = f3 wit, wit_thm = f4 wit_thm,
rep_isom_data = Option.map (map_rep_isom_data f5 f6 f7 f8) rep_isom_data};
fun update_rep_isom isom transfer binding pointer i = mk_code_dt (rty_of_code_dt i) (qty_of_code_dt i)
(wit_of_code_dt i) (wit_thm_of_code_dt i) (SOME (mk_rep_isom_data isom transfer binding pointer))
fun morph_code_dt phi =
let
val mty = Morphism.typ phi
val mterm = Morphism.term phi
val mthm = Morphism.thm phi
in
map_code_dt mty mty mterm mthm mterm mthm I I
end
val transfer_code_dt = morph_code_dt o Morphism.transfer_morphism;
structure Data = Generic_Data
(
type T = code_dt Item_Net.T
val empty = Item_Net.init code_dt_eq term_of_code_dt
val merge = Item_Net.merge
);
fun code_dt_of_generic context (rty, qty) =
let
val typ = HOLogic.mk_prodT (rty, qty)
val prefiltred = Item_Net.retrieve_matching (Data.get context) (Net.encode_type typ)
in
prefiltred |> filter (is_code_dt_of_type (rty, qty))
|> map (transfer_code_dt (Context.theory_of context)) |> find_first (fn _ => true)
end;
fun code_dt_of ctxt (rty, qty) =
let
val sch_rty = Logic.type_map (singleton (Variable.polymorphic ctxt)) rty
val sch_qty = Logic.type_map (singleton (Variable.polymorphic ctxt)) qty
in
code_dt_of_generic (Context.Proof ctxt) (sch_rty, sch_qty)
end;
fun code_dt_of_global thy (rty, qty) =
let
val sch_rty = Logic.varifyT_global rty
val sch_qty = Logic.varifyT_global qty
in
code_dt_of_generic (Context.Theory thy) (sch_rty, sch_qty)
end;
fun all_code_dt_of_generic context =
Item_Net.content (Data.get context) |> map (transfer_code_dt (Context.theory_of context));
val all_code_dt_of = all_code_dt_of_generic o Context.Proof;
val all_code_dt_of_global = all_code_dt_of_generic o Context.Theory;
fun update_code_dt code_dt =
(snd o Local_Theory.begin_nested)
- #> Local_Theory.declaration {syntax = false, pervasive = true}
+ #> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => Data.map (Item_Net.update (morph_code_dt phi code_dt)))
#> Local_Theory.end_nested
fun mk_match_of_code_dt qty code_dt = Vartab.empty |> Type.raw_match (qty_of_code_dt code_dt, qty)
|> Vartab.dest |> map (fn (x, (S, T)) => (TVar (x, S), T));
fun mk_witness_of_code_dt qty code_dt =
Term.subst_atomic_types (mk_match_of_code_dt qty code_dt) (wit_of_code_dt code_dt)
fun mk_rep_isom_of_code_dt qty code_dt = Option.map
(isom_of_rep_isom_data #> Term.subst_atomic_types (mk_match_of_code_dt qty code_dt))
(rep_isom_data_of_code_dt code_dt)
(** unique name for a type **)
fun var_name name sort = if sort = \<^sort>\<open>{type}\<close> orelse sort = [] then ["x" ^ name]
else "x" ^ name :: "x_" :: sort @ ["x_"];
fun concat_Tnames (Type (name, ts)) = name :: maps concat_Tnames ts
| concat_Tnames (TFree (name, sort)) = var_name name sort
| concat_Tnames (TVar ((name, _), sort)) = var_name name sort;
fun unique_Tname (rty, qty) =
let
val Tnames = map Long_Name.base_name (concat_Tnames rty @ ["x_x"] @ concat_Tnames qty);
in
fold (Binding.qualify false) (tl Tnames) (Binding.name (hd Tnames))
end;
(** witnesses **)
fun mk_undefined T = Const (\<^const_name>\<open>undefined\<close>, T);
fun mk_witness quot_thm =
let
val wit_thm = quot_thm RS @{thm type_definition_Quotient_not_empty_witness}
val wit = quot_thm_rep quot_thm $ mk_undefined (quot_thm_rty_qty quot_thm |> snd)
in
(wit, wit_thm)
end
(** config **)
type config_code_dt = { code_dt: bool, lift_config: config }
val default_config_code_dt = { code_dt = false, lift_config = default_config }
(** Main code **)
val ld_no_notes = { notes = false }
fun comp_lift_error _ _ = error "Composition of abstract types has not been implemented yet."
fun lift qty (quot_thm, (lthy, rel_eq_onps)) =
let
val quot_thm = Lifting_Term.force_qty_type lthy qty quot_thm
val (rty, qty) = quot_thm_rty_qty quot_thm;
in
if is_none (code_dt_of lthy (rty, qty)) then
let
val (wit, wit_thm) = (mk_witness quot_thm
handle THM _ => error ("code_dt: " ^ quote (Tname qty) ^ " was not defined as a subtype."))
val code_dt = mk_code_dt rty qty wit wit_thm NONE
in
(quot_thm, (update_code_dt code_dt lthy, rel_eq_onps))
end
else
(quot_thm, (lthy, rel_eq_onps))
end;
fun case_tac rule =
Subgoal.FOCUS_PARAMS (fn {context = ctxt, params, ...} =>
HEADGOAL (rtac ctxt (infer_instantiate' ctxt [SOME (snd (hd params))] rule)));
fun bundle_name_of_bundle_binding binding phi context =
Name_Space.full_name (Name_Space.naming_of context) (Morphism.binding phi binding);
fun prove_schematic_quot_thm actions ctxt =
Lifting_Term.prove_schematic_quot_thm actions (Lifting_Info.get_quotients ctxt) ctxt
fun prove_code_dt (rty, qty) lthy =
let
val (fold_quot_thm: (local_theory * thm list) Lifting_Term.fold_quot_thm) =
{ constr = constr, lift = lift, comp_lift = comp_lift_error };
in prove_schematic_quot_thm fold_quot_thm lthy (rty, qty) (lthy, []) |> snd end
and add_lift_def_code_dt config var qty rhs rsp_thm par_thms lthy =
let
fun binop_conv2 cv1 cv2 = Conv.combination_conv (Conv.arg_conv cv1) cv2
fun ret_rel_conv conv ctm =
case (Thm.term_of ctm) of
Const (\<^const_name>\<open>rel_fun\<close>, _) $ _ $ _ =>
binop_conv2 Conv.all_conv conv ctm
| _ => conv ctm
fun R_conv rel_eq_onps ctxt =
Conv.top_sweep_rewrs_conv @{thms eq_onp_top_eq_eq[symmetric, THEN eq_reflection]} ctxt
then_conv Conv.bottom_rewrs_conv rel_eq_onps ctxt
val (ret_lift_def, lthy1) = add_lift_def (#lift_config config) var qty rhs rsp_thm par_thms lthy
in
if (not (#code_dt config) orelse (code_eq_of_lift_def ret_lift_def <> NONE_EQ)
andalso (code_eq_of_lift_def ret_lift_def <> UNKNOWN_EQ))
(* Let us try even in case of UNKNOWN_EQ. If this leads to problems, the user can always
say that they do not want this workaround. *)
then (ret_lift_def, lthy1)
else
let
val lift_def = inst_of_lift_def lthy1 qty ret_lift_def
val rty = rty_of_lift_def lift_def
val rty_ret = body_type rty
val qty_ret = body_type qty
val (lthy2, rel_eq_onps) = prove_code_dt (rty_ret, qty_ret) lthy1
val code_dt = code_dt_of lthy2 (rty_ret, qty_ret)
in
if is_none code_dt orelse is_none (rep_isom_data_of_code_dt (the code_dt))
then (ret_lift_def, lthy2)
else
let
val code_dt = the code_dt
val rhs = dest_comb (rhs_of_lift_def lift_def) |> snd
val rep_isom_data = code_dt |> rep_isom_data_of_code_dt |> the
val pointer = pointer_of_rep_isom_data rep_isom_data
val quot_active =
Lifting_Info.lookup_restore_data lthy2 pointer |> the |> #quotient |> #quot_thm
|> Lifting_Info.lookup_quot_thm_quotients lthy2 |> is_some
val qty_code_dt_bundle_name = bundle_name_of_rep_isom_data rep_isom_data
val rep_isom = mk_rep_isom_of_code_dt qty_ret code_dt |> the
val lthy3 = if quot_active then lthy2 else Bundle.includes [qty_code_dt_bundle_name] lthy2
fun qty_isom_of_rep_isom rep = rep |> dest_Const |> snd |> domain_type
val qty_isom = qty_isom_of_rep_isom rep_isom
val f'_var = (Binding.suffix_name "_aux" (fst var), NoSyn);
val f'_qty = strip_type qty |> fst |> rpair qty_isom |> op --->
val f'_rsp_rel = Lifting_Term.equiv_relation lthy3 (rty, f'_qty);
val rsp = rsp_thm_of_lift_def lift_def
val rel_eq_onps_conv =
HOLogic.Trueprop_conv (Conv.fun2_conv (ret_rel_conv (R_conv rel_eq_onps lthy3)))
val rsp_norm = Conv.fconv_rule rel_eq_onps_conv rsp
val f'_rsp_goal = HOLogic.mk_Trueprop (f'_rsp_rel $ rhs $ rhs);
val f'_rsp = Goal.prove_sorry lthy3 [] [] f'_rsp_goal
(fn {context = ctxt, prems = _} =>
HEADGOAL (CONVERSION (rel_eq_onps_conv) THEN' rtac ctxt rsp_norm))
|> Thm.close_derivation \<^here>
val (f'_lift_def, lthy4) = add_lift_def ld_no_notes f'_var f'_qty rhs f'_rsp [] lthy3
val f'_lift_def = inst_of_lift_def lthy4 f'_qty f'_lift_def
val f'_lift_const = mk_lift_const_of_lift_def f'_qty f'_lift_def
val (args, args_ctxt) = mk_Frees "x" (binder_types qty) lthy4
val f_alt_def_goal_lhs = list_comb (lift_const_of_lift_def lift_def, args);
val f_alt_def_goal_rhs = rep_isom $ list_comb (f'_lift_const, args);
val f_alt_def_goal = HOLogic.mk_Trueprop (HOLogic.mk_eq (f_alt_def_goal_lhs, f_alt_def_goal_rhs));
fun f_alt_def_tac ctxt i =
EVERY' [Transfer.gen_frees_tac [] ctxt, DETERM o Transfer.transfer_tac true ctxt,
SELECT_GOAL (Local_Defs.unfold0_tac ctxt [id_apply]), rtac ctxt refl] i;
val rep_isom_transfer = transfer_of_rep_isom_data rep_isom_data
val (_, transfer_ctxt) = args_ctxt
|> Proof_Context.note_thms ""
(Binding.empty_atts, [([rep_isom_transfer], [Transfer.transfer_add])])
val f_alt_def =
Goal.prove_sorry transfer_ctxt [] [] f_alt_def_goal
(fn {context = goal_ctxt, ...} => HEADGOAL (f_alt_def_tac goal_ctxt))
|> Thm.close_derivation \<^here>
|> singleton (Variable.export transfer_ctxt lthy4)
val lthy5 = lthy4
|> Local_Theory.note ((Binding.empty, @{attributes [code]}), [f_alt_def])
|> snd
(* if processing a mutual datatype (there is a cycle!) the corresponding quotient
will be needed later and will be forgotten later *)
|> (if quot_active then I else Lifting_Setup.lifting_forget pointer)
in
(ret_lift_def, lthy5)
end
end
end
and mk_rep_isom qty_isom_bundle (rty, qty, qty_isom) lthy0 =
let
(* logical definition of qty qty_isom isomorphism *)
val uTname = unique_Tname (rty, qty)
fun eq_onp_to_top_tac ctxt = SELECT_GOAL (Local_Defs.unfold0_tac ctxt
(@{thm eq_onp_top_eq_eq[symmetric]} :: Lifting_Info.get_relator_eq_onp_rules ctxt))
fun lift_isom_tac ctxt = HEADGOAL (eq_onp_to_top_tac ctxt
THEN' (rtac ctxt @{thm id_transfer}));
val (rep_isom_lift_def, lthy1) = lthy0
|> (snd o Local_Theory.begin_nested)
|> lift_def ld_no_notes (Binding.qualify_name true uTname "Rep_isom", NoSyn)
(qty_isom --> qty) (HOLogic.id_const rty) lift_isom_tac []
|>> inst_of_lift_def lthy0 (qty_isom --> qty);
val (abs_isom, lthy2) = lthy1
|> lift_def ld_no_notes (Binding.qualify_name true uTname "Abs_isom", NoSyn)
(qty --> qty_isom) (HOLogic.id_const rty) lift_isom_tac []
|>> mk_lift_const_of_lift_def (qty --> qty_isom);
val rep_isom = lift_const_of_lift_def rep_isom_lift_def
val pointer = Lifting_Setup.pointer_of_bundle_binding lthy2 qty_isom_bundle
fun code_dt phi context =
code_dt_of lthy2 (rty, qty) |> the
|> update_rep_isom rep_isom (transfer_rules_of_lift_def rep_isom_lift_def |> hd)
(bundle_name_of_bundle_binding qty_isom_bundle phi context) pointer;
val lthy3 = lthy2
- |> Local_Theory.declaration {syntax = false, pervasive = true}
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => fn context => Data.map (Item_Net.update (morph_code_dt phi (code_dt phi context))) context)
|> Local_Theory.end_nested
(* in order to make the qty qty_isom isomorphism executable we have to define discriminators
and selectors for qty_isom *)
val (rty_name, typs) = dest_Type rty
val (_, qty_typs) = dest_Type qty
val fp = BNF_FP_Def_Sugar.fp_sugar_of lthy3 rty_name
val fp = if is_some fp then the fp
else error ("code_dt: " ^ quote rty_name ^ " is not a datatype.")
val ctr_sugar = fp |> #fp_ctr_sugar |> #ctr_sugar
val ctrs = map (Ctr_Sugar.mk_ctr typs) (#ctrs ctr_sugar);
val qty_ctrs = map (Ctr_Sugar.mk_ctr qty_typs) (#ctrs ctr_sugar);
val ctr_Tss = map (dest_Const #> snd #> binder_types) ctrs;
val qty_ctr_Tss = map (dest_Const #> snd #> binder_types) qty_ctrs;
val n = length ctrs;
val ks = 1 upto n;
val (xss, _) = mk_Freess "x" ctr_Tss lthy3;
fun sel_retT (rty' as Type (s, rtys'), qty' as Type (s', qtys')) =
if (rty', qty') = (rty, qty) then qty_isom else (if s = s'
then Type (s, map sel_retT (rtys' ~~ qtys')) else qty')
| sel_retT (_, qty') = qty';
val sel_retTs = map2 (map2 (sel_retT oo pair)) ctr_Tss qty_ctr_Tss
fun lazy_prove_code_dt (rty, qty) lthy =
if is_none (code_dt_of lthy (rty, qty)) then prove_code_dt (rty, qty) lthy |> fst else lthy;
val lthy4 = fold2 (fold2 (lazy_prove_code_dt oo pair)) ctr_Tss sel_retTs lthy3
val sel_argss = @{map 4} (fn k => fn xs => @{map 2} (fn x => fn qty_ret =>
(k, qty_ret, (xs, x)))) ks xss xss sel_retTs;
fun mk_sel_casex (_, _, (_, x)) = Ctr_Sugar.mk_case typs (x |> dest_Free |> snd) (#casex ctr_sugar);
val dis_casex = Ctr_Sugar.mk_case typs HOLogic.boolT (#casex ctr_sugar);
fun mk_sel_case_args lthy ctr_Tss ks (k, qty_ret, (xs, x)) =
let
val T = x |> dest_Free |> snd;
fun gen_undef_wit Ts wits =
case code_dt_of lthy (T, qty_ret) of
SOME code_dt =>
(fold_rev (Term.lambda o curry Free Name.uu) Ts (mk_witness_of_code_dt qty_ret code_dt),
wit_thm_of_code_dt code_dt :: wits)
| NONE => (fold_rev (Term.lambda o curry Free Name.uu) Ts (mk_undefined T), wits)
in
@{fold_map 2} (fn Ts => fn k' => fn wits =>
(if k = k' then (fold_rev Term.lambda xs x, wits) else gen_undef_wit Ts wits)) ctr_Tss ks []
end;
fun mk_sel_rhs arg =
let val (sel_rhs, wits) = mk_sel_case_args lthy4 ctr_Tss ks arg
in (arg |> #2, wits, list_comb (mk_sel_casex arg, sel_rhs)) end;
fun mk_dis_case_args args k = map (fn (k', arg) => (if k = k'
then fold_rev Term.lambda arg \<^Const>\<open>True\<close> else fold_rev Term.lambda arg \<^Const>\<open>False\<close>)) args;
val sel_rhs = map (map mk_sel_rhs) sel_argss
val dis_rhs = map (fn k => list_comb (dis_casex, mk_dis_case_args (ks ~~ xss) k)) ks
val dis_qty = qty_isom --> HOLogic.boolT;
val dis_names = map (fn k => Binding.qualify_name true uTname ("dis" ^ string_of_int k)) ks;
val (diss, lthy5) = @{fold_map 2} (fn b => fn rhs => fn lthy =>
lift_def ld_no_notes (b, NoSyn) dis_qty rhs (K all_tac) [] lthy
|>> mk_lift_const_of_lift_def dis_qty) dis_names dis_rhs lthy4
val unfold_lift_sel_rsp = @{lemma "(\<And>x. P1 x \<Longrightarrow> P2 (f x)) \<Longrightarrow> (rel_fun (eq_onp P1) (eq_onp P2)) f f"
by (simp add: eq_onp_same_args rel_fun_eq_onp_rel)}
fun lift_sel_tac exhaust_rule dt_rules wits ctxt i =
(Method.insert_tac ctxt wits THEN'
eq_onp_to_top_tac ctxt THEN' (* normalize *)
rtac ctxt unfold_lift_sel_rsp THEN'
case_tac exhaust_rule ctxt THEN_ALL_NEW (
EVERY' [hyp_subst_tac ctxt, (* does not kill wits because = was rewritten to eq_onp top *)
Raw_Simplifier.rewrite_goal_tac ctxt (map safe_mk_meta_eq dt_rules),
REPEAT_DETERM o etac ctxt conjE, assume_tac ctxt])) i
val pred_simps = Transfer.lookup_pred_data lthy5 (Tname rty) |> the |> Transfer.pred_simps
val sel_tac = lift_sel_tac (#exhaust ctr_sugar) (#case_thms ctr_sugar @ pred_simps)
val sel_names =
map (fn (k, xs) =>
map (fn k' => Binding.qualify_name true uTname ("sel" ^ string_of_int k ^ string_of_int k'))
(1 upto length xs)) (ks ~~ ctr_Tss);
val (selss, lthy6) = @{fold_map 2} (@{fold_map 2} (fn b => fn (qty_ret, wits, rhs) => fn lthy =>
lift_def_code_dt { code_dt = true, lift_config = ld_no_notes }
(b, NoSyn) (qty_isom --> qty_ret) rhs (HEADGOAL o sel_tac wits) [] lthy
|>> mk_lift_const_of_lift_def (qty_isom --> qty_ret))) sel_names sel_rhs lthy5
(* now we can execute the qty qty_isom isomorphism *)
fun mk_type_definition newT oldT RepC AbsC A =
let
val typedefC =
Const (\<^const_name>\<open>type_definition\<close>,
(newT --> oldT) --> (oldT --> newT) --> HOLogic.mk_setT oldT --> HOLogic.boolT);
in typedefC $ RepC $ AbsC $ A end;
val typedef_goal = mk_type_definition qty_isom qty rep_isom abs_isom (HOLogic.mk_UNIV qty) |>
HOLogic.mk_Trueprop;
fun typ_isom_tac ctxt i =
EVERY' [ SELECT_GOAL (Local_Defs.unfold0_tac ctxt @{thms type_definition_def}),
DETERM o Transfer.transfer_tac true ctxt,
SELECT_GOAL (Local_Defs.unfold0_tac ctxt @{thms eq_onp_top_eq_eq}) (* normalize *),
Raw_Simplifier.rewrite_goal_tac ctxt
(map safe_mk_meta_eq @{thms id_apply simp_thms Ball_def}),
rtac ctxt TrueI] i;
val (_, transfer_ctxt) =
Proof_Context.note_thms ""
(Binding.empty_atts,
[(@{thms right_total_UNIV_transfer}, [Transfer.transfer_add]),
(@{thms Domain_eq_top}, [Transfer.transfer_domain_add])]) lthy6;
val quot_thm_isom =
Goal.prove_sorry transfer_ctxt [] [] typedef_goal
(fn {context = goal_ctxt, ...} => typ_isom_tac goal_ctxt 1)
|> Thm.close_derivation \<^here>
|> singleton (Variable.export transfer_ctxt lthy6)
|> (fn thm => @{thm UNIV_typedef_to_Quotient} OF [thm, @{thm reflexive}])
val qty_isom_name = Tname qty_isom;
val quot_isom_rep =
let
val (quotients : Lifting_Term.quotients) =
Symtab.insert (Lifting_Info.quotient_eq)
(qty_isom_name, {quot_thm = quot_thm_isom, pcr_info = NONE}) Symtab.empty
val id_actions = { constr = K I, lift = K I, comp_lift = K I }
in
fn ctxt => fn (rty, qty) => Lifting_Term.prove_schematic_quot_thm id_actions quotients
ctxt (rty, qty) () |> fst |> Lifting_Term.force_qty_type ctxt qty
|> quot_thm_rep
end;
val (x, x_ctxt) = yield_singleton (mk_Frees "x") qty_isom lthy6;
fun mk_ctr ctr ctr_Ts sels =
let
val sel_ret_Ts = map (dest_Const #> snd #> body_type) sels;
fun rep_isom lthy t (rty, qty) =
let
val rep = quot_isom_rep lthy (rty, qty)
in
if is_Const rep andalso (rep |> dest_Const |> fst) = \<^const_name>\<open>id\<close> then
t else rep $ t
end;
in
@{fold 3} (fn sel => fn ctr_T => fn sel_ret_T => fn ctr =>
ctr $ rep_isom x_ctxt (sel $ x) (ctr_T, sel_ret_T)) sels ctr_Ts sel_ret_Ts ctr
end;
(* stolen from Metis *)
exception BREAK_LIST
fun break_list (x :: xs) = (x, xs)
| break_list _ = raise BREAK_LIST
val (ctr, ctrs) = qty_ctrs |> rev |> break_list;
val (ctr_Ts, ctr_Tss) = qty_ctr_Tss |> rev |> break_list;
val (sel, rselss) = selss |> rev |> break_list;
val rdiss = rev diss |> tl;
val first_ctr = mk_ctr ctr ctr_Ts sel;
fun mk_If_ctr dis ctr ctr_Ts sel elsex = mk_If (dis$x) (mk_ctr ctr ctr_Ts sel) elsex;
val rhs = @{fold 4} mk_If_ctr rdiss ctrs ctr_Tss rselss first_ctr;
val rep_isom_code_goal = HOLogic.mk_Trueprop (HOLogic.mk_eq (rep_isom$x, rhs));
local
val rep_isom_code_tac_rules = map safe_mk_meta_eq @{thms refl id_apply if_splits simp_thms}
in
fun rep_isom_code_tac (ctr_sugar:Ctr_Sugar.ctr_sugar) ctxt i =
let
val exhaust = ctr_sugar |> #exhaust
val cases = ctr_sugar |> #case_thms
val map_ids = fp |> #fp_nesting_bnfs |> map BNF_Def.map_id0_of_bnf
val simp_rules = map safe_mk_meta_eq (cases @ map_ids) @ rep_isom_code_tac_rules
in
EVERY' [Transfer.gen_frees_tac [] ctxt, DETERM o (Transfer.transfer_tac true ctxt),
case_tac exhaust ctxt THEN_ALL_NEW EVERY' [hyp_subst_tac ctxt,
Raw_Simplifier.rewrite_goal_tac ctxt simp_rules, rtac ctxt TrueI ]] i
end
end
(* stolen from bnf_fp_n2m.ML *)
fun force_typ ctxt T =
Term.map_types Type_Infer.paramify_vars
#> Type.constraint T
#> singleton (Type_Infer_Context.infer_types ctxt);
(* The following tests that types in rty have corresponding arities imposed by constraints of
the datatype fp. Otherwise rep_isom_code_tac could fail (especially transfer in it) is such
a way that it is not easy to infer the problem with sorts.
*)
val _ = yield_singleton (mk_Frees "x") (#T fp) x_ctxt |> fst |> force_typ x_ctxt qty
val rep_isom_code =
Goal.prove_sorry x_ctxt [] [] rep_isom_code_goal
(fn {context = goal_ctxt, ...} => rep_isom_code_tac ctr_sugar goal_ctxt 1)
|> Thm.close_derivation \<^here>
|> singleton(Variable.export x_ctxt lthy6)
in
lthy6
|> snd o Local_Theory.note ((Binding.empty, @{attributes [code]}), [rep_isom_code])
|> Lifting_Setup.lifting_forget pointer
|> pair (selss, diss, rep_isom_code)
end
and constr qty (quot_thm, (lthy0, rel_eq_onps)) =
let
val quot_thm = Lifting_Term.force_qty_type lthy0 qty quot_thm
val (rty, qty) = quot_thm_rty_qty quot_thm
val rty_name = Tname rty;
val pred_data = Transfer.lookup_pred_data lthy0 rty_name
val pred_data = if is_some pred_data then the pred_data
else error ("code_dt: " ^ quote rty_name ^ " is not a datatype.")
val rel_eq_onp = safe_mk_meta_eq (Transfer.rel_eq_onp pred_data);
val rel_eq_onps = insert Thm.eq_thm rel_eq_onp rel_eq_onps
fun R_conv ctxt =
Conv.top_sweep_rewrs_conv @{thms eq_onp_top_eq_eq[symmetric, THEN eq_reflection]} ctxt
then_conv Conv.rewr_conv rel_eq_onp
val quot_thm =
Conv.fconv_rule (HOLogic.Trueprop_conv (Quotient_R_conv (R_conv lthy0))) quot_thm;
in
if is_none (code_dt_of lthy0 (rty, qty)) then
let
val non_empty_pred = quot_thm RS @{thm type_definition_Quotient_not_empty}
val pred = quot_thm_rel quot_thm |> dest_comb |> snd;
val (pred, lthy1) = lthy0
|> (snd o Local_Theory.begin_nested)
|> yield_singleton (Variable.import_terms true) pred;
val TFrees = Term.add_tfreesT qty []
fun non_empty_typedef_tac non_empty_pred ctxt i =
(Method.insert_tac ctxt [non_empty_pred] THEN'
SELECT_GOAL (Local_Defs.unfold0_tac ctxt [mem_Collect_eq]) THEN' assume_tac ctxt) i
val uTname = unique_Tname (rty, qty)
val Tdef_set = HOLogic.mk_Collect ("x", rty, pred $ Free("x", rty));
val ((_, tcode_dt), lthy2) = lthy1
|> conceal_naming_result
(typedef (Binding.concealed uTname, TFrees, NoSyn)
Tdef_set NONE (fn lthy => HEADGOAL (non_empty_typedef_tac non_empty_pred lthy)));
val type_definition_thm = tcode_dt |> snd |> #type_definition;
val qty_isom = tcode_dt |> fst |> #abs_type;
val (binding, lthy3) = lthy2
|> conceal_naming_result
(Lifting_Setup.setup_by_typedef_thm {notes = false} type_definition_thm)
||> Local_Theory.end_nested
val (wit, wit_thm) = mk_witness quot_thm;
val code_dt = mk_code_dt rty qty wit wit_thm NONE;
val lthy4 = lthy3
|> update_code_dt code_dt
|> mk_rep_isom binding (rty, qty, qty_isom) |> snd
in
(quot_thm, (lthy4, rel_eq_onps))
end
else
(quot_thm, (lthy0, rel_eq_onps))
end
and lift_def_code_dt config = gen_lift_def (add_lift_def_code_dt config)
(** from parsed parameters to the config record **)
fun map_config_code_dt f1 f2 ({code_dt = code_dt, lift_config = lift_config}: config_code_dt) =
{code_dt = f1 code_dt, lift_config = f2 lift_config}
fun update_config_code_dt nval = map_config_code_dt (K nval) I
val config_flags = [("code_dt", update_config_code_dt true)]
fun evaluate_params params =
let
fun eval_param param config =
case AList.lookup (op =) config_flags param of
SOME update => update config
| NONE => error ("Unknown parameter: " ^ (quote param))
in
fold eval_param params default_config_code_dt
end
(**
lift_definition command. It opens a proof of a corresponding respectfulness
theorem in a user-friendly, readable form. Then add_lift_def_code_dt is called internally.
**)
local
val eq_onp_assms_tac_fixed_rules = map (Transfer.prep_transfer_domain_thm \<^context>)
[@{thm pcr_Domainp_total}, @{thm pcr_Domainp_par_left_total}, @{thm pcr_Domainp_par},
@{thm pcr_Domainp}]
in
fun mk_readable_rsp_thm_eq tm ctxt =
let
val ctm = Thm.cterm_of ctxt tm
fun assms_rewr_conv tactic rule ct =
let
fun prove_extra_assms thm =
let
val assms = cprems_of thm
fun finish thm = if Thm.no_prems thm then SOME (Goal.conclude thm) else NONE
fun prove ctm = Option.mapPartial finish (SINGLE tactic (Goal.init ctm))
in
map_interrupt prove assms
end
fun cconl_of thm = Drule.strip_imp_concl (Thm.cprop_of thm)
fun lhs_of thm = fst (Thm.dest_equals (cconl_of thm))
fun rhs_of thm = snd (Thm.dest_equals (cconl_of thm))
val rule1 = Thm.incr_indexes (Thm.maxidx_of_cterm ct + 1) rule;
val lhs = lhs_of rule1;
val rule2 = Thm.rename_boundvars (Thm.term_of lhs) (Thm.term_of ct) rule1;
val rule3 =
Thm.instantiate (Thm.match (lhs, ct)) rule2
handle Pattern.MATCH => raise CTERM ("assms_rewr_conv", [lhs, ct]);
val proved_assms = prove_extra_assms rule3
in
case proved_assms of
SOME proved_assms =>
let
val rule3 = proved_assms MRSL rule3
val rule4 =
if lhs_of rule3 aconvc ct then rule3
else
let val ceq = Thm.dest_fun2 (Thm.cprop_of rule3)
in rule3 COMP Thm.trivial (Thm.mk_binop ceq ct (rhs_of rule3)) end
in Thm.transitive rule4 (Thm.beta_conversion true (rhs_of rule4)) end
| NONE => Conv.no_conv ct
end
fun assms_rewrs_conv tactic rules = Conv.first_conv (map (assms_rewr_conv tactic) rules)
fun simp_arrows_conv ctm =
let
val unfold_conv = Conv.rewrs_conv
[@{thm rel_fun_eq_eq_onp[THEN eq_reflection]},
@{thm rel_fun_eq_onp_rel[THEN eq_reflection]},
@{thm rel_fun_eq[THEN eq_reflection]},
@{thm rel_fun_eq_rel[THEN eq_reflection]},
@{thm rel_fun_def[THEN eq_reflection]}]
fun binop_conv2 cv1 cv2 = Conv.combination_conv (Conv.arg_conv cv1) cv2
val eq_onp_assms_tac_rules = @{thm left_unique_OO} ::
eq_onp_assms_tac_fixed_rules @ (Transfer.get_transfer_raw ctxt)
val intro_top_rule = @{thm eq_onp_top_eq_eq[symmetric, THEN eq_reflection]}
val kill_tops = Conv.top_sweep_rewrs_conv @{thms eq_onp_top_eq_eq[THEN eq_reflection]} ctxt
val eq_onp_assms_tac = (CONVERSION kill_tops THEN'
TRY o REPEAT_ALL_NEW (resolve_tac ctxt eq_onp_assms_tac_rules)
THEN_ALL_NEW (DETERM o Transfer.eq_tac ctxt)) 1
val relator_eq_onp_conv = Conv.bottom_conv
(K (Conv.try_conv (assms_rewrs_conv eq_onp_assms_tac
(intro_top_rule :: Lifting_Info.get_relator_eq_onp_rules ctxt)))) ctxt
then_conv kill_tops
val relator_eq_conv = Conv.bottom_conv
(K (Conv.try_conv (Conv.rewrs_conv (Transfer.get_relator_eq ctxt)))) ctxt
in
case (Thm.term_of ctm) of
Const (\<^const_name>\<open>rel_fun\<close>, _) $ _ $ _ =>
(binop_conv2 simp_arrows_conv simp_arrows_conv then_conv unfold_conv) ctm
| _ => (relator_eq_onp_conv then_conv relator_eq_conv) ctm
end
val unfold_ret_val_invs = Conv.bottom_conv
(K (Conv.try_conv (Conv.rewr_conv @{thm eq_onp_same_args[THEN eq_reflection]}))) ctxt
val unfold_inv_conv =
Conv.top_sweep_rewrs_conv @{thms eq_onp_def[THEN eq_reflection]} ctxt
val simp_conv = HOLogic.Trueprop_conv (Conv.fun2_conv simp_arrows_conv)
val univq_conv = Conv.rewr_conv @{thm HOL.all_simps(6)[symmetric, THEN eq_reflection]}
val univq_prenex_conv = Conv.top_conv (K (Conv.try_conv univq_conv)) ctxt
val beta_conv = Thm.beta_conversion true
val eq_thm =
(simp_conv then_conv univq_prenex_conv then_conv beta_conv then_conv unfold_ret_val_invs
then_conv unfold_inv_conv) ctm
in
Object_Logic.rulify ctxt (eq_thm RS Drule.equal_elim_rule2)
end
end
fun rename_to_tnames ctxt term =
let
fun all_typs (Const (\<^const_name>\<open>Pure.all\<close>, _) $ Abs (_, T, t)) = T :: all_typs t
| all_typs _ = []
fun rename (Const (\<^const_name>\<open>Pure.all\<close>, T1) $ Abs (_, T2, t)) (new_name :: names) =
(Const (\<^const_name>\<open>Pure.all\<close>, T1) $ Abs (new_name, T2, rename t names))
| rename t _ = t
val (fixed_def_t, _) = yield_singleton (Variable.importT_terms) term ctxt
val new_names = Old_Datatype_Prop.make_tnames (all_typs fixed_def_t)
in
rename term new_names
end
fun quot_thm_err ctxt (rty, qty) pretty_msg =
let
val error_msg = cat_lines
["Lifting failed for the following types:",
Pretty.string_of (Pretty.block
[Pretty.str "Raw type:", Pretty.brk 2, Syntax.pretty_typ ctxt rty]),
Pretty.string_of (Pretty.block
[Pretty.str "Abstract type:", Pretty.brk 2, Syntax.pretty_typ ctxt qty]),
"",
(Pretty.string_of (Pretty.block
[Pretty.str "Reason:", Pretty.brk 2, pretty_msg]))]
in
error error_msg
end
fun check_rty_err ctxt (rty_schematic, rty_forced) (raw_var, rhs_raw) =
let
val (_, ctxt') = Proof_Context.read_var raw_var ctxt
val rhs = Syntax.read_term ctxt' rhs_raw
val error_msg = cat_lines
["Lifting failed for the following term:",
Pretty.string_of (Pretty.block
[Pretty.str "Term:", Pretty.brk 2, Syntax.pretty_term ctxt rhs]),
Pretty.string_of (Pretty.block
[Pretty.str "Type:", Pretty.brk 2, Syntax.pretty_typ ctxt rty_schematic]),
"",
(Pretty.string_of (Pretty.block
[Pretty.str "Reason:",
Pretty.brk 2,
Pretty.str "The type of the term cannot be instantiated to",
Pretty.brk 1,
Pretty.quote (Syntax.pretty_typ ctxt rty_forced),
Pretty.str "."]))]
in
error error_msg
end
fun lift_def_cmd (params, raw_var, rhs_raw, par_xthms) lthy0 =
let
val config = evaluate_params params
val ((binding, SOME qty, mx), lthy1) = Proof_Context.read_var raw_var lthy0
val var = (binding, mx)
val rhs = Syntax.read_term lthy1 rhs_raw
val par_thms = Attrib.eval_thms lthy1 par_xthms
val (goal, after_qed) = lthy1
|> prepare_lift_def (add_lift_def_code_dt config) var qty rhs par_thms
val (goal, after_qed) =
case goal of
NONE => (goal, K (after_qed Drule.dummy_thm))
| SOME prsp_tm =>
let
val readable_rsp_thm_eq = mk_readable_rsp_thm_eq prsp_tm lthy1
val (readable_rsp_tm, _) = Logic.dest_implies (Thm.prop_of readable_rsp_thm_eq)
val readable_rsp_tm_tnames = rename_to_tnames lthy1 readable_rsp_tm
fun after_qed' [[thm]] lthy =
let
val internal_rsp_thm =
Goal.prove lthy [] [] prsp_tm (fn {context = goal_ctxt, ...} =>
rtac goal_ctxt readable_rsp_thm_eq 1 THEN
Proof_Context.fact_tac goal_ctxt [thm] 1)
in after_qed internal_rsp_thm lthy end
in (SOME readable_rsp_tm_tnames, after_qed') end
fun after_qed_with_err_handling thmss ctxt = (after_qed thmss ctxt
handle Lifting_Term.QUOT_THM (rty, qty, msg) => quot_thm_err lthy1 (rty, qty) msg)
handle Lifting_Term.CHECK_RTY (rty_schematic, rty_forced) =>
check_rty_err lthy1 (rty_schematic, rty_forced) (raw_var, rhs_raw);
in
lthy1
|> Proof.theorem NONE (snd oo after_qed_with_err_handling) [map (rpair []) (the_list goal)]
end
fun lift_def_cmd_with_err_handling (params, (raw_var, rhs_raw, par_xthms)) lthy =
(lift_def_cmd (params, raw_var, rhs_raw, par_xthms) lthy
handle Lifting_Term.QUOT_THM (rty, qty, msg) => quot_thm_err lthy (rty, qty) msg)
handle Lifting_Term.CHECK_RTY (rty_schematic, rty_forced) =>
check_rty_err lthy (rty_schematic, rty_forced) (raw_var, rhs_raw);
val parse_param = Parse.name
val parse_params = Scan.optional (Args.parens (Parse.list parse_param)) [];
(* command syntax *)
val _ =
Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>lift_definition\<close>
"definition for constants over the quotient type"
(parse_params --
(((Parse.binding -- (\<^keyword>\<open>::\<close> |-- (Parse.typ >> SOME) -- Parse.opt_mixfix')
>> Scan.triple2) --
(\<^keyword>\<open>is\<close> |-- Parse.term) --
Scan.optional (\<^keyword>\<open>parametric\<close> |-- Parse.!!! Parse.thms1) []) >> Scan.triple1)
>> lift_def_cmd_with_err_handling);
end
diff --git a/src/HOL/Tools/Lifting/lifting_setup.ML b/src/HOL/Tools/Lifting/lifting_setup.ML
--- a/src/HOL/Tools/Lifting/lifting_setup.ML
+++ b/src/HOL/Tools/Lifting/lifting_setup.ML
@@ -1,1048 +1,1046 @@
(* Title: HOL/Tools/Lifting/lifting_setup.ML
Author: Ondrej Kuncar
Setting up the lifting infrastructure.
*)
signature LIFTING_SETUP =
sig
exception SETUP_LIFTING_INFR of string
type config = { notes: bool };
val default_config: config;
val setup_by_quotient: config -> thm -> thm option -> thm option -> local_theory ->
binding * local_theory
val setup_by_typedef_thm: config -> thm -> local_theory -> binding * local_theory
val lifting_restore: Lifting_Info.quotient -> Context.generic -> Context.generic
val lifting_forget: string -> local_theory -> local_theory
val update_transfer_rules: string -> local_theory -> local_theory
val pointer_of_bundle_binding: Proof.context -> binding -> string
end
structure Lifting_Setup: LIFTING_SETUP =
struct
open Lifting_Util
infix 0 MRSL
exception SETUP_LIFTING_INFR of string
(* Config *)
type config = { notes: bool };
val default_config = { notes = true };
fun define_crel (config: config) rep_fun lthy =
let
val (qty, rty) = (dest_funT o fastype_of) rep_fun
val rep_fun_graph = (HOLogic.eq_const rty) $ Bound 1 $ (rep_fun $ Bound 0)
val def_term = Abs ("x", rty, Abs ("y", qty, rep_fun_graph))
val qty_name = (Binding.name o Long_Name.base_name o fst o dest_Type) qty
val crel_name = Binding.prefix_name "cr_" qty_name
val (fixed_def_term, lthy1) = lthy |> yield_singleton (Variable.importT_terms) def_term
val ((_, (_ , def_thm)), lthy2) =
if #notes config then
Local_Theory.define
((crel_name, NoSyn), ((Thm.def_binding crel_name, []), fixed_def_term)) lthy1
else
Local_Theory.define
((Binding.concealed crel_name, NoSyn), (Binding.empty_atts, fixed_def_term)) lthy1
in
(def_thm, lthy2)
end
fun print_define_pcrel_warning msg =
let
val warning_msg = cat_lines
["Generation of a parametrized correspondence relation failed.",
(Pretty.string_of (Pretty.block
[Pretty.str "Reason:", Pretty.brk 2, msg]))]
in
warning warning_msg
end
fun define_pcrel (config: config) crel lthy0 =
let
val (fixed_crel, lthy1) = yield_singleton Variable.importT_terms crel lthy0
val [rty', qty] = (binder_types o fastype_of) fixed_crel
val (param_rel, args) = Lifting_Term.generate_parametrized_relator lthy1 rty'
val rty_raw = (domain_type o range_type o fastype_of) param_rel
val tyenv_match = Sign.typ_match (Proof_Context.theory_of lthy1) (rty_raw, rty') Vartab.empty
val param_rel_subst = Envir.subst_term (tyenv_match,Vartab.empty) param_rel
val args_subst = map (Envir.subst_term (tyenv_match,Vartab.empty)) args
val (instT, lthy2) = lthy1
|> Variable.declare_names fixed_crel
|> Variable.importT_inst (param_rel_subst :: args_subst)
val args_fixed = (map (Term_Subst.instantiate (instT, Vars.empty))) args_subst
val param_rel_fixed = Term_Subst.instantiate (instT, Vars.empty) param_rel_subst
val rty = (domain_type o fastype_of) param_rel_fixed
val relcomp_op = Const (\<^const_name>\<open>relcompp\<close>,
(rty --> rty' --> HOLogic.boolT) -->
(rty' --> qty --> HOLogic.boolT) -->
rty --> qty --> HOLogic.boolT)
val qty_name = (fst o dest_Type) qty
val pcrel_name = Binding.prefix_name "pcr_" ((Binding.name o Long_Name.base_name) qty_name)
val relator_type = foldr1 (op -->) ((map type_of args_fixed) @ [rty, qty, HOLogic.boolT])
val lhs = Library.foldl (op $) ((Free (Binding.name_of pcrel_name, relator_type)), args_fixed)
val rhs = relcomp_op $ param_rel_fixed $ fixed_crel
val definition_term = Logic.mk_equals (lhs, rhs)
fun note_def lthy =
Specification.definition (SOME (pcrel_name, SOME relator_type, NoSyn)) [] []
(Binding.empty_atts, definition_term) lthy |>> (snd #> snd);
fun raw_def lthy =
let
val ((_, rhs), prove) =
Local_Defs.derived_def lthy (K []) {conditional = true} definition_term;
val ((_, (_, raw_th)), lthy') =
Local_Theory.define
((Binding.concealed pcrel_name, NoSyn), (Binding.empty_atts, rhs)) lthy;
val th = prove lthy' raw_th;
in
(th, lthy')
end
val (def_thm, lthy3) = if #notes config then note_def lthy2 else raw_def lthy2
in
(SOME def_thm, lthy3)
end
handle Lifting_Term.PARAM_QUOT_THM (_, msg) => (print_define_pcrel_warning msg; (NONE, lthy0))
local
val eq_OO_meta = mk_meta_eq @{thm eq_OO}
fun print_generate_pcr_cr_eq_error ctxt term =
let
val goal = Const (\<^const_name>\<open>HOL.eq\<close>, dummyT) $ term $ Const (\<^const_name>\<open>HOL.eq\<close>, dummyT)
val error_msg = cat_lines
["Generation of a pcr_cr_eq failed.",
(Pretty.string_of (Pretty.block
[Pretty.str "Reason: Cannot prove this: ", Pretty.brk 2, Syntax.pretty_term ctxt goal])),
"Most probably a relator_eq rule for one of the involved types is missing."]
in
error error_msg
end
in
fun define_pcr_cr_eq (config: config) lthy pcr_rel_def =
let
val lhs = (Thm.term_of o Thm.lhs_of) pcr_rel_def
val qty_name =
(Binding.name o Long_Name.base_name o fst o dest_Type o
List.last o binder_types o fastype_of) lhs
val args = (snd o strip_comb) lhs
fun make_inst var ctxt =
let
val typ = snd (relation_types (#2 (dest_Var var)))
val sort = Type.sort_of_atyp typ
val (fresh_var, ctxt') = yield_singleton Variable.invent_types sort ctxt
val inst = (#1 (dest_Var var), Thm.cterm_of ctxt' (HOLogic.eq_const (TFree fresh_var)))
in (inst, ctxt') end
val (args_inst, args_ctxt) = fold_map make_inst args lthy
val pcr_cr_eq =
pcr_rel_def
|> infer_instantiate args_ctxt args_inst
|> Conv.fconv_rule (Conv.arg_conv (Conv.arg1_conv
(Conv.bottom_rewrs_conv (Transfer.get_relator_eq args_ctxt) args_ctxt)))
in
case (Thm.term_of o Thm.rhs_of) pcr_cr_eq of
Const (\<^const_name>\<open>relcompp\<close>, _) $ Const (\<^const_name>\<open>HOL.eq\<close>, _) $ _ =>
let
val thm =
pcr_cr_eq
|> Conv.fconv_rule (Conv.arg_conv (Conv.rewr_conv eq_OO_meta))
|> HOLogic.mk_obj_eq
|> singleton (Variable.export args_ctxt lthy)
val lthy' = lthy
|> #notes config ?
(Local_Theory.note ((Binding.qualify_name true qty_name "pcr_cr_eq", []), [thm]) #> #2)
in
(thm, lthy')
end
| Const (\<^const_name>\<open>relcompp\<close>, _) $ t $ _ => print_generate_pcr_cr_eq_error args_ctxt t
| _ => error "generate_pcr_cr_eq: implementation error"
end
end
fun define_code_constr quot_thm lthy =
let
val abs = quot_thm_abs quot_thm
in
if is_Const abs then
let
val (fixed_abs, lthy') = yield_singleton Variable.importT_terms abs lthy
in
Local_Theory.background_theory
(Code.declare_datatype_global [dest_Const fixed_abs]) lthy'
end
else
lthy
end
fun define_abs_type quot_thm =
Lifting_Def.can_generate_code_cert quot_thm ?
Code.declare_abstype (quot_thm RS @{thm Quotient_abs_rep});
local
exception QUOT_ERROR of Pretty.T list
in
fun quot_thm_sanity_check ctxt quot_thm =
let
val _ =
if (Thm.nprems_of quot_thm > 0) then
raise QUOT_ERROR [Pretty.block
[Pretty.str "The Quotient theorem has extra assumptions:",
Pretty.brk 1,
Thm.pretty_thm ctxt quot_thm]]
else ()
val _ = quot_thm |> Thm.concl_of |> HOLogic.dest_Trueprop |> dest_Quotient
handle TERM _ => raise QUOT_ERROR
[Pretty.block
[Pretty.str "The Quotient theorem is not of the right form:",
Pretty.brk 1,
Thm.pretty_thm ctxt quot_thm]]
val ((_, [quot_thm_fixed]), ctxt') = Variable.importT [quot_thm] ctxt
val (rty, qty) = quot_thm_rty_qty quot_thm_fixed
val rty_tfreesT = Term.add_tfree_namesT rty []
val qty_tfreesT = Term.add_tfree_namesT qty []
val extra_rty_tfrees =
case subtract (op =) qty_tfreesT rty_tfreesT of
[] => []
| extras => [Pretty.block ([Pretty.str "Extra variables in the raw type:",
Pretty.brk 1] @
((Pretty.commas o map (Pretty.str o quote)) extras) @
[Pretty.str "."])]
val not_type_constr =
case qty of
Type _ => []
| _ => [Pretty.block [Pretty.str "The quotient type ",
Pretty.quote (Syntax.pretty_typ ctxt' qty),
Pretty.brk 1,
Pretty.str "is not a type constructor."]]
val errs = extra_rty_tfrees @ not_type_constr
in
if null errs then () else raise QUOT_ERROR errs
end
handle QUOT_ERROR errs => error (cat_lines (["Sanity check of the quotient theorem failed:"]
@ (map (Pretty.string_of o Pretty.item o single) errs)))
end
fun lifting_bundle qty_full_name qinfo lthy =
let
val thy = Proof_Context.theory_of lthy
val binding =
Binding.qualify_name true (qty_full_name |> Long_Name.base_name |> Binding.name) "lifting"
val morphed_binding = Morphism.binding (Local_Theory.target_morphism lthy) binding
val bundle_name =
Name_Space.full_name (Name_Space.naming_of (Context.Theory thy)) morphed_binding
fun phi_qinfo phi = Lifting_Info.transform_quotient phi qinfo
val dummy_thm = Thm.transfer thy Drule.dummy_thm
val restore_lifting_att =
- ([dummy_thm],
- [map (Token.make_string o rpair Position.none)
- ["Lifting.lifting_restore_internal", bundle_name]])
+ ([dummy_thm], [map Token.make_string0 ["Lifting.lifting_restore_internal", bundle_name]])
in
lthy
- |> Local_Theory.declaration {syntax = false, pervasive = true}
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => Lifting_Info.init_restore_data bundle_name (phi_qinfo phi))
|> Bundle.bundle ((binding, [restore_lifting_att])) []
|> pair binding
end
fun setup_lifting_infr config quot_thm opt_reflp_thm lthy =
let
val _ = quot_thm_sanity_check lthy quot_thm
val (_, qty) = quot_thm_rty_qty quot_thm
val (pcrel_def, lthy1) = define_pcrel config (quot_thm_crel quot_thm) lthy
(**)
val pcrel_def = Option.map (Morphism.thm (Local_Theory.target_morphism lthy1)) pcrel_def
(**)
val (pcr_cr_eq, lthy2) =
case pcrel_def of
SOME pcrel_def => apfst SOME (define_pcr_cr_eq config lthy1 pcrel_def)
| NONE => (NONE, lthy1)
val pcr_info = case pcrel_def of
SOME pcrel_def => SOME { pcrel_def = pcrel_def, pcr_cr_eq = the pcr_cr_eq }
| NONE => NONE
val quotients = { quot_thm = quot_thm, pcr_info = pcr_info }
val qty_full_name = (fst o dest_Type) qty
fun quot_info phi = Lifting_Info.transform_quotient phi quotients
- val reflexivity_rule_attr = Attrib.internal (K Lifting_Info.add_reflexivity_rule_attribute)
+ val reflexivity_rule_attr = Attrib.internal \<^here> (K Lifting_Info.add_reflexivity_rule_attribute)
val lthy3 =
case opt_reflp_thm of
SOME reflp_thm =>
lthy2
|> (#2 oo Local_Theory.note)
((Binding.empty, [reflexivity_rule_attr]), [reflp_thm RS @{thm reflp_ge_eq}])
|> define_code_constr quot_thm
| NONE => lthy2 |> define_abs_type quot_thm
in
lthy3
- |> Local_Theory.declaration {syntax = false, pervasive = true}
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => Lifting_Info.update_quotients qty_full_name (quot_info phi))
|> lifting_bundle qty_full_name quotients
end
local
fun importT_inst_exclude exclude ts ctxt =
let
val tvars = rev (subtract op= exclude (fold Term.add_tvars ts []))
val (tfrees, ctxt') = Variable.invent_types (map #2 tvars) ctxt
in (TVars.make (tvars ~~ map TFree tfrees), ctxt') end
fun import_inst_exclude exclude ts ctxt =
let
val excludeT = fold (Term.add_tvarsT o snd) exclude []
val (instT, ctxt') = importT_inst_exclude excludeT ts ctxt
val vars = map (apsnd (Term_Subst.instantiateT instT))
(rev (subtract op= exclude (fold Term.add_vars ts [])))
val (xs, ctxt'') = Variable.variant_fixes (map (#1 o #1) vars) ctxt'
val inst = Vars.make (vars ~~ map Free (xs ~~ map #2 vars))
in ((instT, inst), ctxt'') end
fun import_terms_exclude exclude ts ctxt =
let val (inst, ctxt') = import_inst_exclude exclude ts ctxt
in (map (Term_Subst.instantiate inst) ts, ctxt') end
in
fun reduce_goal not_fix goal tac ctxt =
let
val (fixed_goal, ctxt') = yield_singleton (import_terms_exclude not_fix) goal ctxt
val init_goal = Goal.init (Thm.cterm_of ctxt' fixed_goal)
in
(singleton (Variable.export ctxt' ctxt) o Goal.conclude) (the (SINGLE tac init_goal))
end
end
local
val OO_rules = @{thms left_total_OO left_unique_OO right_total_OO right_unique_OO bi_total_OO
bi_unique_OO}
in
fun parametrize_class_constraint ctxt0 pcr_def constraint =
let
fun generate_transfer_rule pcr_def constraint goal ctxt =
let
val (fixed_goal, ctxt') = yield_singleton (Variable.import_terms true) goal ctxt
val init_goal = Goal.init (Thm.cterm_of ctxt' fixed_goal)
val rules = Transfer.get_transfer_raw ctxt'
val rules = constraint :: OO_rules @ rules
val tac =
K (Local_Defs.unfold0_tac ctxt' [pcr_def]) THEN' REPEAT_ALL_NEW (resolve_tac ctxt' rules)
in
(singleton (Variable.export ctxt' ctxt) o Goal.conclude) (the (SINGLE (tac 1) init_goal))
end
fun make_goal pcr_def constr =
let
val pred_name =
(fst o dest_Const o strip_args 1 o HOLogic.dest_Trueprop o Thm.prop_of) constr
val arg = (fst o Logic.dest_equals o Thm.prop_of) pcr_def
in
HOLogic.mk_Trueprop ((Const (pred_name, (fastype_of arg) --> HOLogic.boolT)) $ arg)
end
val check_assms =
let
val right_names = ["right_total", "right_unique", "left_total", "left_unique", "bi_total",
"bi_unique"]
fun is_right_name name = member op= right_names (Long_Name.base_name name)
fun is_trivial_assm (Const (name, _) $ Var (_, _)) = is_right_name name
| is_trivial_assm (Const (name, _) $ Free (_, _)) = is_right_name name
| is_trivial_assm _ = false
in
fn thm =>
let
val prems = map HOLogic.dest_Trueprop (Thm.prems_of thm)
val thm_name =
(Long_Name.base_name o fst o dest_Const o strip_args 1 o HOLogic.dest_Trueprop o Thm.concl_of) thm
val non_trivial_assms = filter_out is_trivial_assm prems
in
if null non_trivial_assms then ()
else
Pretty.block ([Pretty.str "Non-trivial assumptions in ",
Pretty.str thm_name,
Pretty.str " transfer rule found:",
Pretty.brk 1] @
Pretty.commas (map (Syntax.pretty_term ctxt0) non_trivial_assms))
|> Pretty.string_of
|> warning
end
end
val goal = make_goal pcr_def constraint
val thm = generate_transfer_rule pcr_def constraint goal ctxt0
val _ = check_assms thm
in
thm
end
end
local
val id_unfold = (Conv.rewr_conv (mk_meta_eq @{thm id_def}))
in
fun generate_parametric_id lthy rty id_transfer_rule =
let
(* it doesn't raise an exception because it would have already raised it in define_pcrel *)
val (quot_thm, _, ctxt') = Lifting_Term.prove_param_quot_thm lthy rty
val parametrized_relator =
singleton (Variable.export_terms ctxt' lthy) (quot_thm_crel quot_thm)
val id_transfer =
@{thm id_transfer}
|> Thm.incr_indexes (Term.maxidx_of_term parametrized_relator + 1)
|> Conv.fconv_rule(HOLogic.Trueprop_conv (Conv.arg_conv id_unfold then_conv Conv.arg1_conv id_unfold))
val var = hd (Term.add_vars (Thm.prop_of id_transfer) [])
val inst = [(#1 var, Thm.cterm_of lthy parametrized_relator)]
val id_par_thm = infer_instantiate lthy inst id_transfer
in
Lifting_Def.generate_parametric_transfer_rule lthy id_transfer_rule id_par_thm
end
handle Lifting_Term.MERGE_TRANSFER_REL msg =>
let
val error_msg = cat_lines
["Generation of a parametric transfer rule for the abs. or the rep. function failed.",
"A non-parametric version will be used.",
(Pretty.string_of (Pretty.block
[Pretty.str "Reason:", Pretty.brk 2, msg]))]
in
(warning error_msg; id_transfer_rule)
end
end
local
fun rewrite_first_Domainp_arg rewr_thm thm = Conv.fconv_rule (Conv.concl_conv ~1 (HOLogic.Trueprop_conv
(Conv.arg1_conv (Conv.arg_conv (Conv.rewr_conv rewr_thm))))) thm
fun fold_Domainp_pcrel pcrel_def thm =
let
val ct =
thm |> Thm.cprop_of |> Drule.strip_imp_concl
|> Thm.dest_arg |> Thm.dest_arg1 |> Thm.dest_arg
val pcrel_def = Thm.incr_indexes (Thm.maxidx_of_cterm ct + 1) pcrel_def
val thm' = Thm.instantiate (Thm.match (ct, Thm.rhs_of pcrel_def)) thm
handle Pattern.MATCH => raise CTERM ("fold_Domainp_pcrel", [ct, Thm.rhs_of pcrel_def])
in
rewrite_first_Domainp_arg (Thm.symmetric pcrel_def) thm'
end
fun reduce_Domainp ctxt rules thm =
let
val goal = thm |> Thm.prems_of |> hd
val var = goal |> HOLogic.dest_Trueprop |> dest_comb |> snd |> dest_Var
val reduced_assm =
reduce_goal [var] goal (TRY (REPEAT_ALL_NEW (resolve_tac ctxt rules) 1)) ctxt
in
reduced_assm RS thm
end
in
fun parametrize_domain dom_thm (pcr_info : Lifting_Info.pcr) ctxt0 =
let
fun reduce_first_assm ctxt rules thm =
let
val goal = thm |> Thm.prems_of |> hd
val reduced_assm =
reduce_goal [] goal (TRY (REPEAT_ALL_NEW (resolve_tac ctxt rules) 1)) ctxt
in
reduced_assm RS thm
end
val pcr_cr_met_eq = #pcr_cr_eq pcr_info RS @{thm eq_reflection}
val pcr_Domainp_eq = rewrite_first_Domainp_arg (Thm.symmetric pcr_cr_met_eq) dom_thm
val pcrel_def = #pcrel_def pcr_info
val pcr_Domainp_par_left_total =
(dom_thm RS @{thm pcr_Domainp_par_left_total})
|> fold_Domainp_pcrel pcrel_def
|> reduce_first_assm ctxt0 (Lifting_Info.get_reflexivity_rules ctxt0)
val pcr_Domainp_par =
(dom_thm RS @{thm pcr_Domainp_par})
|> fold_Domainp_pcrel pcrel_def
|> reduce_Domainp ctxt0 (Transfer.get_relator_domain ctxt0)
val pcr_Domainp =
(dom_thm RS @{thm pcr_Domainp})
|> fold_Domainp_pcrel pcrel_def
val thms =
[("domain", [pcr_Domainp], @{attributes [transfer_domain_rule]}),
("domain_par", [pcr_Domainp_par], @{attributes [transfer_domain_rule]}),
("domain_par_left_total", [pcr_Domainp_par_left_total], @{attributes [transfer_domain_rule]}),
("domain_eq", [pcr_Domainp_eq], @{attributes [transfer_domain_rule]})]
in
thms
end
fun parametrize_total_domain left_total pcrel_def ctxt =
let
val thm =
(left_total RS @{thm pcr_Domainp_total})
|> fold_Domainp_pcrel pcrel_def
|> reduce_Domainp ctxt (Transfer.get_relator_domain ctxt)
in
[("domain", [thm], @{attributes [transfer_domain_rule]})]
end
end
fun get_pcrel_info ctxt qty_full_name =
#pcr_info (the (Lifting_Info.lookup_quotients ctxt qty_full_name))
fun get_Domainp_thm quot_thm =
the (get_first (try(curry op RS quot_thm)) [@{thm eq_onp_to_Domainp}, @{thm Quotient_to_Domainp}])
fun notes names thms =
let
val notes =
if names then map (fn (name, thms, attrs) => ((name, []), [(thms, attrs)])) thms
else map_filter (fn (_, thms, attrs) => if null attrs then NONE
else SOME (Binding.empty_atts, [(thms, attrs)])) thms
in
Local_Theory.notes notes #> snd
end
fun map_thms map_name map_thm thms =
map (fn (name, thms, attr) => (map_name name, map map_thm thms, attr)) thms
(*
Sets up the Lifting package by a quotient theorem.
quot_thm - a quotient theorem (Quotient R Abs Rep T)
opt_reflp_thm - a theorem saying that a relation from quot_thm is reflexive
(in the form "reflp R")
opt_par_thm - a parametricity theorem for R
*)
fun setup_by_quotient (config: config) quot_thm opt_reflp_thm opt_par_thm lthy0 =
let
(**)
val quot_thm = Morphism.thm (Local_Theory.target_morphism lthy0) quot_thm
(**)
val (rty, qty) = quot_thm_rty_qty quot_thm
- val induct_attr = Attrib.internal (K (Induct.induct_type (fst (dest_Type qty))))
+ val induct_attr = Attrib.internal \<^here> (K (Induct.induct_type (fst (dest_Type qty))))
val qty_full_name = (fst o dest_Type) qty
val qty_name = (Binding.name o Long_Name.base_name) qty_full_name
val qualify = Binding.qualify_name true qty_name
val notes1 = case opt_reflp_thm of
SOME reflp_thm =>
let
val thms =
[("abs_induct", @{thms Quotient_total_abs_induct}, [induct_attr]),
("abs_eq_iff", @{thms Quotient_total_abs_eq_iff}, [])]
in map_thms qualify (fn thm => [quot_thm, reflp_thm] MRSL thm) thms end
| NONE =>
let val thms = [("abs_induct", @{thms Quotient_abs_induct}, [induct_attr])]
in map_thms qualify (fn thm => quot_thm RS thm) thms end
val dom_thm = get_Domainp_thm quot_thm
fun setup_transfer_rules_nonpar notes =
let
val notes1 =
case opt_reflp_thm of
SOME reflp_thm =>
let
val thms =
[("id_abs_transfer",@{thms Quotient_id_abs_transfer}, @{attributes [transfer_rule]}),
("left_total", @{thms Quotient_left_total}, @{attributes [transfer_rule]}),
("bi_total", @{thms Quotient_bi_total}, @{attributes [transfer_rule]})]
in
map_thms qualify (fn thm => [quot_thm, reflp_thm] MRSL thm) thms
end
| NONE => map_thms qualify I [("domain", [dom_thm], @{attributes [transfer_domain_rule]})]
val notes2 = map_thms qualify (fn thm => quot_thm RS thm)
[("rel_eq_transfer", @{thms Quotient_rel_eq_transfer}, @{attributes [transfer_rule]}),
("right_unique", @{thms Quotient_right_unique}, @{attributes [transfer_rule]}),
("right_total", @{thms Quotient_right_total}, @{attributes [transfer_rule]})]
in
notes2 @ notes1 @ notes
end
fun generate_parametric_rel_eq ctxt transfer_rule opt_param_thm =
(case opt_param_thm of
NONE => transfer_rule
| SOME param_thm =>
(Lifting_Def.generate_parametric_transfer_rule ctxt transfer_rule param_thm
handle Lifting_Term.MERGE_TRANSFER_REL msg =>
error ("Generation of a parametric transfer rule for the quotient relation failed:\n"
^ Pretty.string_of msg)))
fun setup_transfer_rules_par ctxt notes =
let
val pcrel_info = the (get_pcrel_info ctxt qty_full_name)
val pcrel_def = #pcrel_def pcrel_info
val notes1 =
case opt_reflp_thm of
SOME reflp_thm =>
let
val left_total = ([quot_thm, reflp_thm] MRSL @{thm Quotient_left_total})
val bi_total = ([quot_thm, reflp_thm] MRSL @{thm Quotient_bi_total})
val domain_thms = parametrize_total_domain left_total pcrel_def ctxt
val id_abs_transfer = generate_parametric_id ctxt rty
(Lifting_Term.parametrize_transfer_rule ctxt
([quot_thm, reflp_thm] MRSL @{thm Quotient_id_abs_transfer}))
val left_total = parametrize_class_constraint ctxt pcrel_def left_total
val bi_total = parametrize_class_constraint ctxt pcrel_def bi_total
val thms =
[("id_abs_transfer", [id_abs_transfer], @{attributes [transfer_rule]}),
("left_total", [left_total], @{attributes [transfer_rule]}),
("bi_total", [bi_total], @{attributes [transfer_rule]})]
in
map_thms qualify I thms @ map_thms qualify I domain_thms
end
| NONE =>
let
val thms = parametrize_domain dom_thm pcrel_info ctxt
in
map_thms qualify I thms
end
val rel_eq_transfer = generate_parametric_rel_eq ctxt
(Lifting_Term.parametrize_transfer_rule ctxt (quot_thm RS @{thm Quotient_rel_eq_transfer}))
opt_par_thm
val right_unique = parametrize_class_constraint ctxt pcrel_def
(quot_thm RS @{thm Quotient_right_unique})
val right_total = parametrize_class_constraint ctxt pcrel_def
(quot_thm RS @{thm Quotient_right_total})
val notes2 = map_thms qualify I
[("rel_eq_transfer", [rel_eq_transfer], @{attributes [transfer_rule]}),
("right_unique", [right_unique], @{attributes [transfer_rule]}),
("right_total", [right_total], @{attributes [transfer_rule]})]
in
notes2 @ notes1 @ notes
end
fun setup_rules lthy =
let
val thms =
if is_some (get_pcrel_info lthy qty_full_name)
then setup_transfer_rules_par lthy notes1
else setup_transfer_rules_nonpar notes1
in notes (#notes config) thms lthy end
in
lthy0
|> setup_lifting_infr config quot_thm opt_reflp_thm
||> setup_rules
end
(*
Sets up the Lifting package by a typedef theorem.
gen_code - flag if an abstract type given by typedef_thm should be registred
as an abstract type in the code generator
typedef_thm - a typedef theorem (type_definition Rep Abs S)
*)
fun setup_by_typedef_thm config typedef_thm lthy0 =
let
val (_ $ rep_fun $ _ $ typedef_set) = (HOLogic.dest_Trueprop o Thm.prop_of) typedef_thm
val (T_def, lthy1) = define_crel config rep_fun lthy0
(**)
val T_def = Morphism.thm (Local_Theory.target_morphism lthy1) T_def
(**)
val quot_thm = case typedef_set of
Const (\<^const_name>\<open>top\<close>, _) =>
[typedef_thm, T_def] MRSL @{thm UNIV_typedef_to_Quotient}
| Const (\<^const_name>\<open>Collect\<close>, _) $ Abs (_, _, _) =>
[typedef_thm, T_def] MRSL @{thm open_typedef_to_Quotient}
| _ =>
[typedef_thm, T_def] MRSL @{thm typedef_to_Quotient}
val (rty, qty) = quot_thm_rty_qty quot_thm
val qty_full_name = (fst o dest_Type) qty
val qty_name = (Binding.name o Long_Name.base_name) qty_full_name
val qualify = Binding.qualify_name true qty_name
val opt_reflp_thm =
case typedef_set of
Const (\<^const_name>\<open>top\<close>, _) =>
SOME ((typedef_thm RS @{thm UNIV_typedef_to_equivp}) RS @{thm equivp_reflp2})
| _ => NONE
val dom_thm = get_Domainp_thm quot_thm
fun setup_transfer_rules_nonpar notes =
let
val notes1 =
case opt_reflp_thm of
SOME reflp_thm =>
let
val thms =
[("id_abs_transfer",@{thms Quotient_id_abs_transfer}, @{attributes [transfer_rule]}),
("left_total", @{thms Quotient_left_total}, @{attributes [transfer_rule]}),
("bi_total", @{thms Quotient_bi_total}, @{attributes [transfer_rule]})]
in
map_thms qualify (fn thm => [quot_thm, reflp_thm] MRSL thm) thms
end
| NONE =>
map_thms qualify I [("domain", [dom_thm], @{attributes [transfer_domain_rule]})]
val thms =
[("rep_transfer", @{thms typedef_rep_transfer}, @{attributes [transfer_rule]}),
("left_unique", @{thms typedef_left_unique}, @{attributes [transfer_rule]}),
("right_unique", @{thms typedef_right_unique}, @{attributes [transfer_rule]}),
("right_total", @{thms typedef_right_total}, @{attributes [transfer_rule]}),
("bi_unique", @{thms typedef_bi_unique}, @{attributes [transfer_rule]})]
in
map_thms qualify (fn thm => [typedef_thm, T_def] MRSL thm) thms @ notes1 @ notes
end
fun setup_transfer_rules_par ctxt notes =
let
val pcrel_info = (the (get_pcrel_info ctxt qty_full_name))
val pcrel_def = #pcrel_def pcrel_info
val notes1 =
case opt_reflp_thm of
SOME reflp_thm =>
let
val left_total = ([quot_thm, reflp_thm] MRSL @{thm Quotient_left_total})
val bi_total = ([quot_thm, reflp_thm] MRSL @{thm Quotient_bi_total})
val domain_thms = parametrize_total_domain left_total pcrel_def ctxt
val left_total = parametrize_class_constraint ctxt pcrel_def left_total
val bi_total = parametrize_class_constraint ctxt pcrel_def bi_total
val id_abs_transfer = generate_parametric_id ctxt rty
(Lifting_Term.parametrize_transfer_rule ctxt
([quot_thm, reflp_thm] MRSL @{thm Quotient_id_abs_transfer}))
val thms =
[("left_total", [left_total], @{attributes [transfer_rule]}),
("bi_total", [bi_total], @{attributes [transfer_rule]}),
("id_abs_transfer",[id_abs_transfer], @{attributes [transfer_rule]})]
in
map_thms qualify I thms @ map_thms qualify I domain_thms
end
| NONE =>
let
val thms = parametrize_domain dom_thm pcrel_info ctxt
in
map_thms qualify I thms
end
val notes2 = map_thms qualify (fn thm => generate_parametric_id ctxt rty
(Lifting_Term.parametrize_transfer_rule ctxt ([typedef_thm, T_def] MRSL thm)))
[("rep_transfer", @{thms typedef_rep_transfer}, @{attributes [transfer_rule]})];
val notes3 =
map_thms qualify
(fn thm => parametrize_class_constraint ctxt pcrel_def ([typedef_thm, T_def] MRSL thm))
[("left_unique", @{thms typedef_left_unique}, @{attributes [transfer_rule]}),
("right_unique", @{thms typedef_right_unique},@{attributes [transfer_rule]}),
("bi_unique", @{thms typedef_bi_unique}, @{attributes [transfer_rule]}),
("right_total", @{thms typedef_right_total}, @{attributes [transfer_rule]})]
in
notes3 @ notes2 @ notes1 @ notes
end
val notes1 = [(Binding.prefix_name "Quotient_" qty_name, [quot_thm], [])]
fun setup_rules lthy =
let
val thms =
if is_some (get_pcrel_info lthy qty_full_name)
then setup_transfer_rules_par lthy notes1
else setup_transfer_rules_nonpar notes1
in notes (#notes config) thms lthy end
in
lthy1
|> setup_lifting_infr config quot_thm opt_reflp_thm
||> setup_rules
end
fun setup_lifting_cmd xthm opt_reflp_xthm opt_par_xthm lthy =
let
val input_thm = singleton (Attrib.eval_thms lthy) xthm
val input_term = (HOLogic.dest_Trueprop o Thm.prop_of) input_thm
handle TERM _ => error "Unsupported type of a theorem. Only Quotient or type_definition are supported."
fun sanity_check_reflp_thm reflp_thm =
let
val reflp_tm = (HOLogic.dest_Trueprop o Thm.prop_of) reflp_thm
handle TERM _ => error "Invalid form of the reflexivity theorem. Use \"reflp R\"."
in
case reflp_tm of
\<^Const_>\<open>reflp_on _ for \<^Const>\<open>top_class.top _\<close> _\<close> => ()
| _ => error "Invalid form of the reflexivity theorem. Use \"reflp R\"."
end
fun check_qty qty = if not (is_Type qty)
then error "The abstract type must be a type constructor."
else ()
fun setup_quotient () =
let
val opt_reflp_thm = Option.map (singleton (Attrib.eval_thms lthy)) opt_reflp_xthm
val _ = if is_some opt_reflp_thm then sanity_check_reflp_thm (the opt_reflp_thm) else ()
val opt_par_thm = Option.map (singleton (Attrib.eval_thms lthy)) opt_par_xthm
val _ = check_qty (snd (quot_thm_rty_qty input_thm))
in
setup_by_quotient default_config input_thm opt_reflp_thm opt_par_thm lthy |> snd
end
fun setup_typedef () =
let
val qty = (range_type o fastype_of o hd o get_args 2) input_term
val _ = check_qty qty
in
case opt_reflp_xthm of
SOME _ => error "The reflexivity theorem cannot be specified if the type_definition theorem is used."
| NONE => (
case opt_par_xthm of
SOME _ => error "The parametricity theorem cannot be specified if the type_definition theorem is used."
| NONE => setup_by_typedef_thm default_config input_thm lthy |> snd
)
end
in
case input_term of
(Const (\<^const_name>\<open>Quotient\<close>, _) $ _ $ _ $ _ $ _) => setup_quotient ()
| (Const (\<^const_name>\<open>type_definition\<close>, _) $ _ $ _ $ _) => setup_typedef ()
| _ => error "Unsupported type of a theorem. Only Quotient or type_definition are supported."
end
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>setup_lifting\<close>
"setup lifting infrastructure"
(Parse.thm -- Scan.option Parse.thm
-- Scan.option (\<^keyword>\<open>parametric\<close> |-- Parse.!!! Parse.thm) >>
(fn ((xthm, opt_reflp_xthm), opt_par_xthm) =>
setup_lifting_cmd xthm opt_reflp_xthm opt_par_xthm))
(* restoring lifting infrastructure *)
local
exception PCR_ERROR of Pretty.T list
in
fun lifting_restore_sanity_check ctxt (qinfo:Lifting_Info.quotient) =
let
val quot_thm = (#quot_thm qinfo)
val _ = quot_thm_sanity_check ctxt quot_thm
val pcr_info_err =
(case #pcr_info qinfo of
SOME pcr =>
let
val pcrel_def = #pcrel_def pcr
val pcr_cr_eq = #pcr_cr_eq pcr
val (def_lhs, _) = Logic.dest_equals (Thm.prop_of pcrel_def)
handle TERM _ => raise PCR_ERROR [Pretty.block
[Pretty.str "The pcr definiton theorem is not a plain meta equation:",
Pretty.brk 1,
Thm.pretty_thm ctxt pcrel_def]]
val pcr_const_def = head_of def_lhs
val (eq_lhs, eq_rhs) = HOLogic.dest_eq (HOLogic.dest_Trueprop (Thm.prop_of pcr_cr_eq))
handle TERM _ => raise PCR_ERROR [Pretty.block
[Pretty.str "The pcr_cr equation theorem is not a plain equation:",
Pretty.brk 1,
Thm.pretty_thm ctxt pcr_cr_eq]]
val (pcr_const_eq, eqs) = strip_comb eq_lhs
fun is_eq (Const (\<^const_name>\<open>HOL.eq\<close>, _)) = true
| is_eq _ = false
fun eq_Const (Const (name1, _)) (Const (name2, _)) = (name1 = name2)
| eq_Const _ _ = false
val all_eqs = if not (forall is_eq eqs) then
[Pretty.block
[Pretty.str "Arguments of the lhs of the pcr_cr equation theorem are not only equalities:",
Pretty.brk 1,
Thm.pretty_thm ctxt pcr_cr_eq]]
else []
val pcr_consts_not_equal = if not (eq_Const pcr_const_def pcr_const_eq) then
[Pretty.block
[Pretty.str "Parametrized correspondence relation constants in pcr_def and pcr_cr_eq are not equal:",
Pretty.brk 1,
Syntax.pretty_term ctxt pcr_const_def,
Pretty.brk 1,
Pretty.str "vs.",
Pretty.brk 1,
Syntax.pretty_term ctxt pcr_const_eq]]
else []
val crel = quot_thm_crel quot_thm
val cr_consts_not_equal = if not (eq_Const crel eq_rhs) then
[Pretty.block
[Pretty.str "Correspondence relation constants in the Quotient theorem and pcr_cr_eq are not equal:",
Pretty.brk 1,
Syntax.pretty_term ctxt crel,
Pretty.brk 1,
Pretty.str "vs.",
Pretty.brk 1,
Syntax.pretty_term ctxt eq_rhs]]
else []
in
all_eqs @ pcr_consts_not_equal @ cr_consts_not_equal
end
| NONE => [])
val errs = pcr_info_err
in
if null errs then () else raise PCR_ERROR errs
end
handle PCR_ERROR errs => error (cat_lines (["Sanity check failed:"]
@ (map (Pretty.string_of o Pretty.item o single) errs)))
end
(*
Registers the data in qinfo in the Lifting infrastructure.
*)
fun lifting_restore qinfo ctxt =
let
val _ = lifting_restore_sanity_check (Context.proof_of ctxt) qinfo
val (_, qty) = quot_thm_rty_qty (#quot_thm qinfo)
val qty_full_name = (fst o dest_Type) qty
val stored_qinfo = Lifting_Info.lookup_quotients (Context.proof_of ctxt) qty_full_name
in
if is_some (stored_qinfo) andalso not (Lifting_Info.quotient_eq (qinfo, (the stored_qinfo)))
then error (Pretty.string_of
(Pretty.block
[Pretty.str "Lifting is already setup for the type",
Pretty.brk 1,
Pretty.quote (Syntax.pretty_typ (Context.proof_of ctxt) qty)]))
else Lifting_Info.update_quotients qty_full_name qinfo ctxt
end
val parse_opt_pcr =
Scan.optional (Attrib.thm -- Attrib.thm >>
(fn (pcrel_def, pcr_cr_eq) => SOME {pcrel_def = pcrel_def, pcr_cr_eq = pcr_cr_eq})) NONE
val lifting_restore_attribute_setup =
Attrib.setup \<^binding>\<open>lifting_restore\<close>
((Attrib.thm -- parse_opt_pcr) >>
(fn (quot_thm, opt_pcr) =>
let val qinfo = { quot_thm = quot_thm, pcr_info = opt_pcr}
in Thm.declaration_attribute (K (lifting_restore qinfo)) end))
"restoring lifting infrastructure"
val _ = Theory.setup lifting_restore_attribute_setup
fun lifting_restore_internal bundle_name ctxt =
let
val restore_info = Lifting_Info.lookup_restore_data (Context.proof_of ctxt) bundle_name
in
case restore_info of
SOME restore_info =>
ctxt
|> lifting_restore (#quotient restore_info)
|> fold_rev Transfer.transfer_raw_add (Item_Net.content (#transfer_rules restore_info))
| NONE => ctxt
end
val lifting_restore_internal_attribute_setup =
Attrib.setup \<^binding>\<open>lifting_restore_internal\<close>
(Scan.lift Parse.string >>
(fn name => Thm.declaration_attribute (K (lifting_restore_internal name))))
"restoring lifting infrastructure; internal attribute; not meant to be used directly by regular users"
val _ = Theory.setup lifting_restore_internal_attribute_setup
(* lifting_forget *)
val monotonicity_names = [\<^const_name>\<open>right_unique\<close>, \<^const_name>\<open>left_unique\<close>, \<^const_name>\<open>right_total\<close>,
\<^const_name>\<open>left_total\<close>, \<^const_name>\<open>bi_unique\<close>, \<^const_name>\<open>bi_total\<close>]
fun fold_transfer_rel f (Const (\<^const_name>\<open>Transfer.Rel\<close>, _) $ rel $ _ $ _) = f rel
| fold_transfer_rel f (Const (\<^const_name>\<open>HOL.eq\<close>, _) $
(Const (\<^const_name>\<open>Domainp\<close>, _) $ rel) $ _) = f rel
| fold_transfer_rel f (Const (name, _) $ rel) =
if member op= monotonicity_names name then f rel else f \<^term>\<open>undefined\<close>
| fold_transfer_rel f _ = f \<^term>\<open>undefined\<close>
fun filter_transfer_rules_by_rel transfer_rel transfer_rules =
let
val transfer_rel_name = transfer_rel |> dest_Const |> fst;
fun has_transfer_rel thm =
let
val concl = thm |> Thm.concl_of |> HOLogic.dest_Trueprop
in
member op= (fold_transfer_rel (fn tm => Term.add_const_names tm []) concl) transfer_rel_name
end
handle TERM _ => false
in
filter has_transfer_rel transfer_rules
end
type restore_data = {quotient : Lifting_Info.quotient, transfer_rules: thm Item_Net.T}
fun get_transfer_rel (qinfo : Lifting_Info.quotient) =
let
fun get_pcrel pcr_def = pcr_def |> Thm.concl_of |> Logic.dest_equals |> fst |> head_of
in
if is_some (#pcr_info qinfo)
then get_pcrel (#pcrel_def (the (#pcr_info qinfo)))
else quot_thm_crel (#quot_thm qinfo)
end
fun pointer_of_bundle_name bundle_name ctxt =
let
val bundle = Bundle.read ctxt bundle_name
fun err () = error "The provided bundle is not a lifting bundle"
in
(case bundle of
[(_, [arg_src])] =>
let
val (name, _) = Token.syntax (Scan.lift Parse.string) arg_src ctxt
handle ERROR _ => err ()
in name end
| _ => err ())
end
fun pointer_of_bundle_binding ctxt binding = Name_Space.full_name (Name_Space.naming_of
(Context.Theory (Proof_Context.theory_of ctxt))) binding
fun lifting_forget pointer lthy =
let
fun get_transfer_rules_to_delete qinfo ctxt =
let
val transfer_rel = get_transfer_rel qinfo
in
filter_transfer_rules_by_rel transfer_rel (Transfer.get_transfer_raw ctxt)
end
in
case Lifting_Info.lookup_restore_data lthy pointer of
SOME restore_info =>
let
val qinfo = #quotient restore_info
val quot_thm = #quot_thm qinfo
val transfer_rules = get_transfer_rules_to_delete qinfo lthy
in
- Local_Theory.declaration {syntax = false, pervasive = true}
+ Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(K (fold (Transfer.transfer_raw_del) transfer_rules #> Lifting_Info.delete_quotients quot_thm))
lthy
end
| NONE => error "The lifting bundle refers to non-existent restore data."
end
fun lifting_forget_cmd bundle_name lthy =
lifting_forget (pointer_of_bundle_name bundle_name lthy) lthy
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>lifting_forget\<close>
"unsetup Lifting and Transfer for the given lifting bundle"
(Parse.name_position >> lifting_forget_cmd)
(* lifting_update *)
fun update_transfer_rules pointer lthy =
let
fun new_transfer_rules ({ quotient = qinfo, ... }:Lifting_Info.restore_data) lthy =
let
val transfer_rel = get_transfer_rel qinfo
val transfer_rules = filter_transfer_rules_by_rel transfer_rel (Transfer.get_transfer_raw lthy)
in
fn phi => fold_rev
(Item_Net.update o Morphism.thm phi) transfer_rules Thm.item_net
end
in
case Lifting_Info.lookup_restore_data lthy pointer of
SOME refresh_data =>
- Local_Theory.declaration {syntax = false, pervasive = true}
+ Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => Lifting_Info.add_transfer_rules_in_restore_data pointer
(new_transfer_rules refresh_data lthy phi)) lthy
| NONE => error "The lifting bundle refers to non-existent restore data."
end
fun lifting_update_cmd bundle_name lthy =
update_transfer_rules (pointer_of_bundle_name bundle_name lthy) lthy
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>lifting_update\<close>
"add newly introduced transfer rules to a bundle storing the state of Lifting and Transfer"
(Parse.name_position >> lifting_update_cmd)
end
diff --git a/src/HOL/Tools/Quotient/quotient_def.ML b/src/HOL/Tools/Quotient/quotient_def.ML
--- a/src/HOL/Tools/Quotient/quotient_def.ML
+++ b/src/HOL/Tools/Quotient/quotient_def.ML
@@ -1,210 +1,210 @@
(* Title: HOL/Tools/Quotient/quotient_def.ML
Author: Cezary Kaliszyk and Christian Urban
Definitions for constants on quotient types.
*)
signature QUOTIENT_DEF =
sig
val add_quotient_def:
((binding * mixfix) * Attrib.binding) * (term * term) -> thm ->
local_theory -> Quotient_Info.quotconsts * local_theory
val quotient_def:
(binding * typ option * mixfix) option * (Attrib.binding * (term * term)) ->
local_theory -> Proof.state
val quotient_def_cmd:
(binding * string option * mixfix) option * (Attrib.binding * (string * string)) ->
local_theory -> Proof.state
end;
structure Quotient_Def: QUOTIENT_DEF =
struct
(** Interface and Syntax Setup **)
(* Generation of the code certificate from the rsp theorem *)
open Lifting_Util
infix 0 MRSL
(* The ML-interface for a quotient definition takes
as argument:
- an optional binding and mixfix annotation
- attributes
- the new constant as term
- the rhs of the definition as term
- respectfulness theorem for the rhs
It stores the qconst_info in the quotconsts data slot.
Restriction: At the moment the left- and right-hand
side of the definition must be a constant.
*)
fun error_msg bind str =
let
val name = Binding.name_of bind
val pos = Position.here (Binding.pos_of bind)
in
error ("Head of quotient_definition " ^
quote str ^ " differs from declaration " ^ name ^ pos)
end
fun add_quotient_def ((var, (name, atts)), (lhs, rhs)) rsp_thm lthy =
let
val rty = fastype_of rhs
val qty = fastype_of lhs
val absrep_trm =
Quotient_Term.absrep_fun lthy Quotient_Term.AbsF (rty, qty) $ rhs
val prop = Syntax.check_term lthy (Logic.mk_equals (lhs, absrep_trm))
val (_, prop') = Local_Defs.cert_def lthy (K []) prop
val (_, newrhs) = Local_Defs.abs_def prop'
val ((qconst, (_ , def)), lthy') =
Local_Theory.define (var, ((Thm.def_binding_optional (#1 var) name, atts), newrhs)) lthy
fun qconst_data phi =
Quotient_Info.transform_quotconsts phi {qconst = qconst, rconst = rhs, def = def}
fun qualify defname suffix = Binding.name suffix
|> Binding.qualify true defname
val lhs_name = Binding.name_of (#1 var)
val rsp_thm_name = qualify lhs_name "rsp"
val lthy'' = lthy'
- |> Local_Theory.declaration {syntax = false, pervasive = true}
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi =>
(case qconst_data phi of
qcinfo as {qconst = Const (c, _), ...} =>
Quotient_Info.update_quotconsts (c, qcinfo)
| _ => I))
|> (snd oo Local_Theory.note)
((rsp_thm_name, @{attributes [quot_respect]}), [rsp_thm])
in
(qconst_data Morphism.identity, lthy'')
end
fun mk_readable_rsp_thm_eq tm ctxt =
let
val ctm = Thm.cterm_of ctxt tm
fun abs_conv2 cv = Conv.abs_conv (Conv.abs_conv (cv o #2) o #2) ctxt
fun erase_quants ctxt' ctm' =
case (Thm.term_of ctm') of
Const (\<^const_name>\<open>HOL.eq\<close>, _) $ _ $ _ => Conv.all_conv ctm'
| _ => (Conv.binder_conv (erase_quants o #2) ctxt' then_conv
Conv.rewr_conv @{thm fun_eq_iff[symmetric, THEN eq_reflection]}) ctm'
val norm_fun_eq = abs_conv2 erase_quants then_conv Thm.eta_conversion
fun simp_arrows_conv ctm =
let
val unfold_conv = Conv.rewrs_conv
[@{thm rel_fun_eq_eq_onp[THEN eq_reflection]}, @{thm rel_fun_eq_rel[THEN eq_reflection]},
@{thm rel_fun_def[THEN eq_reflection]}]
val left_conv = simp_arrows_conv then_conv Conv.try_conv norm_fun_eq
fun binop_conv2 cv1 cv2 = Conv.combination_conv (Conv.arg_conv cv1) cv2
in
case (Thm.term_of ctm) of
Const (\<^const_name>\<open>rel_fun\<close>, _) $ _ $ _ =>
(binop_conv2 left_conv simp_arrows_conv then_conv unfold_conv) ctm
| _ => Conv.all_conv ctm
end
val unfold_ret_val_invs = Conv.bottom_conv
(K (Conv.try_conv (Conv.rewr_conv @{thm eq_onp_same_args[THEN eq_reflection]}))) ctxt
val simp_conv = Conv.arg_conv (Conv.fun2_conv simp_arrows_conv)
val univq_conv = Conv.rewr_conv @{thm HOL.all_simps(6)[symmetric, THEN eq_reflection]}
val univq_prenex_conv = Conv.top_conv (K (Conv.try_conv univq_conv)) ctxt
val beta_conv = Thm.beta_conversion true
val eq_thm =
(simp_conv then_conv univq_prenex_conv then_conv beta_conv then_conv unfold_ret_val_invs) ctm
in
Object_Logic.rulify ctxt (eq_thm RS Drule.equal_elim_rule2)
end
fun gen_quotient_def prep_var parse_term (raw_var, (attr, (raw_lhs, raw_rhs))) lthy =
let
val (opt_var, ctxt) =
(case raw_var of
NONE => (NONE, lthy)
| SOME var => prep_var var lthy |>> SOME)
val lhs_constraints = (case opt_var of SOME (_, SOME T, _) => [T] | _ => [])
fun prep_term Ts = parse_term ctxt #> fold Type.constraint Ts #> Syntax.check_term ctxt;
val lhs = prep_term lhs_constraints raw_lhs
val rhs = prep_term [] raw_rhs
val (lhs_str, lhs_ty) = dest_Free lhs handle TERM _ => error "Constant already defined"
val _ = if null (strip_abs_vars rhs) then () else error "The definiens cannot be an abstraction"
val _ = if is_Const rhs then () else warning "The definiens is not a constant"
val var =
(case opt_var of
NONE => (Binding.name lhs_str, NoSyn)
| SOME (binding, _, mx) =>
if Variable.check_name binding = lhs_str then (binding, mx)
else error_msg binding lhs_str);
fun try_to_prove_refl thm =
let
val lhs_eq =
thm
|> Thm.prop_of
|> Logic.dest_implies
|> fst
|> strip_all_body
|> try HOLogic.dest_Trueprop
in
case lhs_eq of
SOME (Const (\<^const_name>\<open>HOL.eq\<close>, _) $ _ $ _) => SOME (@{thm refl} RS thm)
| SOME _ => (case body_type (fastype_of lhs) of
Type (typ_name, _) =>
\<^try>\<open>
#equiv_thm (the (Quotient_Info.lookup_quotients lthy typ_name))
RS @{thm Equiv_Relations.equivp_reflp} RS thm\<close>
| _ => NONE
)
| _ => NONE
end
val rsp_rel = Quotient_Term.equiv_relation lthy (fastype_of rhs, lhs_ty)
val internal_rsp_tm = HOLogic.mk_Trueprop (Syntax.check_term lthy (rsp_rel $ rhs $ rhs))
val readable_rsp_thm_eq = mk_readable_rsp_thm_eq internal_rsp_tm lthy
val maybe_proven_rsp_thm = try_to_prove_refl readable_rsp_thm_eq
val (readable_rsp_tm, _) = Logic.dest_implies (Thm.prop_of readable_rsp_thm_eq)
fun after_qed thm_list lthy =
let
val internal_rsp_thm =
case thm_list of
[] => the maybe_proven_rsp_thm
| [[thm]] => Goal.prove ctxt [] [] internal_rsp_tm
(fn _ =>
resolve_tac ctxt [readable_rsp_thm_eq] 1 THEN
Proof_Context.fact_tac ctxt [thm] 1)
in
snd (add_quotient_def ((var, attr), (lhs, rhs)) internal_rsp_thm lthy)
end
in
case maybe_proven_rsp_thm of
SOME _ => Proof.theorem NONE after_qed [] lthy
| NONE => Proof.theorem NONE after_qed [[(readable_rsp_tm,[])]] lthy
end
val quotient_def = gen_quotient_def Proof_Context.cert_var (K I)
val quotient_def_cmd = gen_quotient_def Proof_Context.read_var Syntax.parse_term
(* command syntax *)
val _ =
Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>quotient_definition\<close>
"definition for constants over the quotient type"
(Scan.option Parse_Spec.constdecl --
Parse.!!! (Parse_Spec.opt_thm_name ":" -- (Parse.term -- (\<^keyword>\<open>is\<close> |-- Parse.term)))
>> quotient_def_cmd);
end;
diff --git a/src/HOL/Tools/Quotient/quotient_type.ML b/src/HOL/Tools/Quotient/quotient_type.ML
--- a/src/HOL/Tools/Quotient/quotient_type.ML
+++ b/src/HOL/Tools/Quotient/quotient_type.ML
@@ -1,355 +1,355 @@
(* Title: HOL/Tools/Quotient/quotient_type.ML
Author: Cezary Kaliszyk and Christian Urban
Definition of a quotient type.
*)
signature QUOTIENT_TYPE =
sig
val add_quotient_type: {overloaded: bool} ->
((string list * binding * mixfix) * (typ * term * bool) *
((binding * binding) option * thm option)) * thm -> local_theory ->
Quotient_Info.quotients * local_theory
val quotient_type: {overloaded: bool} ->
(string list * binding * mixfix) * (typ * term * bool) *
((binding * binding) option * thm option) -> Proof.context -> Proof.state
val quotient_type_cmd: {overloaded: bool} ->
(((((string list * binding) * mixfix) * string) * (bool * string)) *
(binding * binding) option) * (Facts.ref * Token.src list) option ->
Proof.context -> Proof.state
end;
structure Quotient_Type: QUOTIENT_TYPE =
struct
(*** definition of quotient types ***)
val mem_def1 = @{lemma "y \<in> Collect S \<Longrightarrow> S y" by simp}
val mem_def2 = @{lemma "S y \<Longrightarrow> y \<in> Collect S" by simp}
(* constructs the term {c. EX (x::rty). rel x x \<and> c = Collect (rel x)} *)
fun typedef_term rel rty lthy =
let
val [x, c] =
[("x", rty), ("c", HOLogic.mk_setT rty)]
|> Variable.variant_frees lthy [rel]
|> map Free
in
HOLogic.Collect_const (HOLogic.mk_setT rty) $ (lambda c (HOLogic.exists_const rty $
lambda x (HOLogic.mk_conj (rel $ x $ x,
HOLogic.mk_eq (c, HOLogic.Collect_const rty $ (rel $ x))))))
end
(* makes the new type definitions and proves non-emptyness *)
fun typedef_make overloaded (vs, qty_name, mx, rel, rty) equiv_thm lthy =
let
fun typedef_tac ctxt =
EVERY1 (map (resolve_tac ctxt o single) [@{thm part_equivp_typedef}, equiv_thm])
in
Typedef.add_typedef overloaded (qty_name, map (rpair dummyS) vs, mx)
(typedef_term rel rty lthy) NONE typedef_tac lthy
end
(* tactic to prove the quot_type theorem for the new type *)
fun typedef_quot_type_tac ctxt equiv_thm ((_, typedef_info): Typedef.info) =
let
val rep_thm = #Rep typedef_info RS mem_def1
val rep_inv = #Rep_inverse typedef_info
val abs_inv = #Abs_inverse typedef_info
val rep_inj = #Rep_inject typedef_info
in
(resolve_tac ctxt @{thms quot_type.intro} THEN' RANGE [
resolve_tac ctxt [equiv_thm],
resolve_tac ctxt [rep_thm],
resolve_tac ctxt [rep_inv],
resolve_tac ctxt [abs_inv] THEN' resolve_tac ctxt [mem_def2] THEN' assume_tac ctxt,
resolve_tac ctxt [rep_inj]]) 1
end
(* proves the quot_type theorem for the new type *)
fun typedef_quot_type_thm (rel, abs, rep, equiv_thm, typedef_info) lthy =
let
val quot_type_const = Const (\<^const_name>\<open>quot_type\<close>,
fastype_of rel --> fastype_of abs --> fastype_of rep --> \<^typ>\<open>bool\<close>)
val goal = HOLogic.mk_Trueprop (quot_type_const $ rel $ abs $ rep)
in
Goal.prove lthy [] [] goal
(fn {context = ctxt, ...} => typedef_quot_type_tac ctxt equiv_thm typedef_info)
end
open Lifting_Util
infix 0 MRSL
fun define_cr_rel equiv_thm abs_fun lthy =
let
fun force_type_of_rel rel forced_ty =
let
val thy = Proof_Context.theory_of lthy
val rel_ty = (domain_type o fastype_of) rel
val ty_inst = Sign.typ_match thy (rel_ty, forced_ty) Vartab.empty
in
Envir.subst_term_types ty_inst rel
end
val (rty, qty) = (dest_funT o fastype_of) abs_fun
val abs_fun_graph = HOLogic.mk_eq(abs_fun $ Bound 1, Bound 0)
val Abs_body = (case (HOLogic.dest_Trueprop o Thm.prop_of) equiv_thm of
Const (\<^const_name>\<open>equivp\<close>, _) $ _ => abs_fun_graph
| Const (\<^const_name>\<open>part_equivp\<close>, _) $ rel =>
HOLogic.mk_conj (force_type_of_rel rel rty $ Bound 1 $ Bound 1, abs_fun_graph)
| _ => error "unsupported equivalence theorem"
)
val def_term = Abs ("x", rty, Abs ("y", qty, Abs_body));
val qty_name = (Binding.name o Long_Name.base_name o fst o dest_Type) qty
val cr_rel_name = Binding.prefix_name "cr_" qty_name
val (fixed_def_term, lthy') = yield_singleton (Variable.importT_terms) def_term lthy
val ((_, (_ , def_thm)), lthy'') =
Local_Theory.define ((cr_rel_name, NoSyn), ((Thm.def_binding cr_rel_name, []), fixed_def_term)) lthy'
in
(def_thm, lthy'')
end;
fun setup_lifting_package quot3_thm equiv_thm opt_par_thm lthy =
let
val (_ $ _ $ abs_fun $ _) = (HOLogic.dest_Trueprop o Thm.prop_of) quot3_thm
val (T_def, lthy') = define_cr_rel equiv_thm abs_fun lthy
val (rty, qty) = (dest_funT o fastype_of) abs_fun
val qty_name = (Binding.name o Long_Name.base_name o fst o dest_Type) qty
val quotient_thm_name = Binding.prefix_name "Quotient_" qty_name
val (reflp_thm, quot_thm) =
(case (HOLogic.dest_Trueprop o Thm.prop_of) equiv_thm of
Const (\<^const_name>\<open>equivp\<close>, _) $ _ =>
(SOME (equiv_thm RS @{thm equivp_reflp2}),
[quot3_thm, T_def, equiv_thm] MRSL @{thm Quotient3_to_Quotient_equivp})
| Const (\<^const_name>\<open>part_equivp\<close>, _) $ _ =>
(NONE, [quot3_thm, T_def] MRSL @{thm Quotient3_to_Quotient})
| _ => error "unsupported equivalence theorem")
val config = { notes = true }
in
lthy'
|> Lifting_Setup.setup_by_quotient config quot_thm reflp_thm opt_par_thm
|> snd
|> (snd oo Local_Theory.note) ((quotient_thm_name, []), [quot_thm])
end
fun init_quotient_infr quot_thm equiv_thm opt_par_thm lthy =
let
val (_ $ rel $ abs $ rep) = (HOLogic.dest_Trueprop o Thm.prop_of) quot_thm
val (qtyp, rtyp) = (dest_funT o fastype_of) rep
val qty_full_name = (fst o dest_Type) qtyp
fun quotients phi =
Quotient_Info.transform_quotients phi
{qtyp = qtyp, rtyp = rtyp, equiv_rel = rel, equiv_thm = equiv_thm,
quot_thm = quot_thm}
fun abs_rep phi =
Quotient_Info.transform_abs_rep phi {abs = abs, rep = rep}
in
lthy
- |> Local_Theory.declaration {syntax = false, pervasive = true} (fn phi =>
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>} (fn phi =>
Quotient_Info.update_quotients (qty_full_name, quotients phi) #>
Quotient_Info.update_abs_rep (qty_full_name, abs_rep phi))
|> setup_lifting_package quot_thm equiv_thm opt_par_thm
end
(* main function for constructing a quotient type *)
fun add_quotient_type overloaded
(((vs, qty_name, mx), (rty, rel, partial), (opt_morphs, opt_par_thm)), equiv_thm) lthy =
let
val part_equiv =
if partial
then equiv_thm
else equiv_thm RS @{thm equivp_implies_part_equivp}
(* generates the typedef *)
val ((_, typedef_info), lthy1) =
typedef_make overloaded (vs, qty_name, mx, rel, rty) part_equiv lthy
(* abs and rep functions from the typedef *)
val Abs_ty = #abs_type (#1 typedef_info)
val Rep_ty = #rep_type (#1 typedef_info)
val Abs_name = #Abs_name (#1 typedef_info)
val Rep_name = #Rep_name (#1 typedef_info)
val Abs_const = Const (Abs_name, Rep_ty --> Abs_ty)
val Rep_const = Const (Rep_name, Abs_ty --> Rep_ty)
(* more useful abs and rep definitions *)
val abs_const = Const (\<^const_name>\<open>quot_type.abs\<close>,
(rty --> rty --> \<^typ>\<open>bool\<close>) --> (Rep_ty --> Abs_ty) --> rty --> Abs_ty)
val rep_const = Const (\<^const_name>\<open>quot_type.rep\<close>, (Abs_ty --> Rep_ty) --> Abs_ty --> rty)
val abs_trm = abs_const $ rel $ Abs_const
val rep_trm = rep_const $ Rep_const
val (rep_name, abs_name) =
(case opt_morphs of
NONE => (Binding.prefix_name "rep_" qty_name, Binding.prefix_name "abs_" qty_name)
| SOME morphs => morphs)
val ((_, (_, abs_def)), lthy2) = lthy1
|> Local_Theory.define ((abs_name, NoSyn), ((Thm.def_binding abs_name, []), abs_trm))
val ((_, (_, rep_def)), lthy3) = lthy2
|> Local_Theory.define ((rep_name, NoSyn), ((Thm.def_binding rep_name, []), rep_trm))
(* quot_type theorem *)
val quot_thm = typedef_quot_type_thm (rel, Abs_const, Rep_const, part_equiv, typedef_info) lthy3
(* quotient theorem *)
val quotient_thm_name = Binding.prefix_name "Quotient3_" qty_name
val quotient_thm =
(quot_thm RS @{thm quot_type.Quotient})
|> fold_rule lthy3 [abs_def, rep_def]
(* name equivalence theorem *)
val equiv_thm_name = Binding.suffix_name "_equivp" qty_name
(* storing the quotients *)
val quotients = {qtyp = Abs_ty, rtyp = rty, equiv_rel = rel, equiv_thm = equiv_thm,
quot_thm = quotient_thm}
val lthy4 = lthy3
|> init_quotient_infr quotient_thm equiv_thm opt_par_thm
|> (snd oo Local_Theory.note)
((equiv_thm_name,
if partial then [] else @{attributes [quot_equiv]}),
[equiv_thm])
|> (snd oo Local_Theory.note)
((quotient_thm_name, @{attributes [quot_thm]}), [quotient_thm])
in
(quotients, lthy4)
end
(* sanity checks for the quotient type specifications *)
fun sanity_check ((vs, qty_name, _), (rty, rel, _), _) =
let
val rty_tfreesT = map fst (Term.add_tfreesT rty [])
val rel_tfrees = map fst (Term.add_tfrees rel [])
val rel_frees = map fst (Term.add_frees rel [])
val rel_vars = Term.add_vars rel []
val rel_tvars = Term.add_tvars rel []
val qty_str = Binding.print qty_name ^ ": "
val illegal_rel_vars =
if null rel_vars andalso null rel_tvars then []
else [qty_str ^ "illegal schematic variable(s) in the relation."]
val dup_vs =
(case duplicates (op =) vs of
[] => []
| dups => [qty_str ^ "duplicate type variable(s) on the lhs: " ^ commas_quote dups])
val extra_rty_tfrees =
(case subtract (op =) vs rty_tfreesT of
[] => []
| extras => [qty_str ^ "extra type variable(s) on the lhs: " ^ commas_quote extras])
val extra_rel_tfrees =
(case subtract (op =) vs rel_tfrees of
[] => []
| extras => [qty_str ^ "extra type variable(s) in the relation: " ^ commas_quote extras])
val illegal_rel_frees =
(case rel_frees of
[] => []
| xs => [qty_str ^ "illegal variable(s) in the relation: " ^ commas_quote xs])
val errs = illegal_rel_vars @ dup_vs @ extra_rty_tfrees @ extra_rel_tfrees @ illegal_rel_frees
in
if null errs then () else error (cat_lines errs)
end
(* check for existence of map functions *)
fun map_check ctxt (_, (rty, _, _), _) =
let
fun map_check_aux rty warns =
(case rty of
Type (_, []) => warns
| Type (s, _) =>
if Symtab.defined (Functor.entries ctxt) s then warns else s :: warns
| _ => warns)
val warns = map_check_aux rty []
in
if null warns then ()
else warning ("No map function defined for " ^ commas warns ^
". This will cause problems later on.")
end
(*** interface and syntax setup ***)
(* the ML-interface takes a list of tuples consisting of:
- the name of the quotient type
- its free type variables (first argument)
- its mixfix annotation
- the type to be quotient
- the partial flag (a boolean)
- the relation according to which the type is quotient
- optional names of morphisms (rep/abs)
- flag if code should be generated by Lifting package
it opens a proof-state in which one has to show that the
relations are equivalence relations
*)
fun quotient_type overloaded quot lthy =
let
(* sanity check *)
val _ = sanity_check quot
val _ = map_check lthy quot
fun mk_goal (rty, rel, partial) =
let
val equivp_ty = ([rty, rty] ---> \<^typ>\<open>bool\<close>) --> \<^typ>\<open>bool\<close>
val const =
if partial then \<^const_name>\<open>part_equivp\<close> else \<^const_name>\<open>equivp\<close>
in
HOLogic.mk_Trueprop (Const (const, equivp_ty) $ rel)
end
val goal = (mk_goal o #2) quot
fun after_qed [[thm]] = (snd oo add_quotient_type overloaded) (quot, thm)
in
Proof.theorem NONE after_qed [[(goal, [])]] lthy
end
fun quotient_type_cmd overloaded spec lthy =
let
fun parse_spec ((((((vs, qty_name), mx), rty_str), (partial, rel_str)), opt_morphs), opt_par_xthm) lthy =
let
val rty = Syntax.read_typ lthy rty_str
val tmp_lthy1 = Variable.declare_typ rty lthy
val rel =
Syntax.parse_term tmp_lthy1 rel_str
|> Type.constraint (rty --> rty --> \<^typ>\<open>bool\<close>)
|> Syntax.check_term tmp_lthy1
val tmp_lthy2 = Variable.declare_term rel tmp_lthy1
val opt_par_thm = Option.map (singleton (Attrib.eval_thms lthy)) opt_par_xthm
in
(((vs, qty_name, mx), (rty, rel, partial), (opt_morphs, opt_par_thm)), tmp_lthy2)
end
val (spec', _) = parse_spec spec lthy
in
quotient_type overloaded spec' lthy
end
(* command syntax *)
val _ =
Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>quotient_type\<close>
"quotient type definitions (require equivalence proofs)"
(* FIXME Parse.type_args_constrained and standard treatment of sort constraints *)
(Parse_Spec.overloaded -- (Parse.type_args -- Parse.binding --
Parse.opt_mixfix -- (\<^keyword>\<open>=\<close> |-- Parse.typ) -- (\<^keyword>\<open>/\<close> |--
Scan.optional (Parse.reserved "partial" -- \<^keyword>\<open>:\<close> >> K true) false -- Parse.term) --
Scan.option (\<^keyword>\<open>morphisms\<close> |-- Parse.!!! (Parse.binding -- Parse.binding)) --
Scan.option (\<^keyword>\<open>parametric\<close> |-- Parse.!!! Parse.thm))
>> (fn (overloaded, spec) => quotient_type_cmd {overloaded = overloaded} spec))
end
diff --git a/src/HOL/Tools/Transfer/transfer_bnf.ML b/src/HOL/Tools/Transfer/transfer_bnf.ML
--- a/src/HOL/Tools/Transfer/transfer_bnf.ML
+++ b/src/HOL/Tools/Transfer/transfer_bnf.ML
@@ -1,281 +1,281 @@
(* Title: HOL/Tools/Transfer/transfer_bnf.ML
Author: Ondrej Kuncar, TU Muenchen
Setup for Transfer for types that are BNF.
*)
signature TRANSFER_BNF =
sig
exception NO_PRED_DATA of unit
val base_name_of_bnf: BNF_Def.bnf -> binding
val type_name_of_bnf: BNF_Def.bnf -> string
val lookup_defined_pred_data: Proof.context -> string -> Transfer.pred_data
val bnf_only_type_ctr: (BNF_Def.bnf -> 'a -> 'a) -> BNF_Def.bnf -> 'a -> 'a
end
structure Transfer_BNF : TRANSFER_BNF =
struct
open BNF_Util
open BNF_Def
open BNF_FP_Util
open BNF_FP_Def_Sugar
exception NO_PRED_DATA of unit
(* util functions *)
fun base_name_of_bnf bnf = Binding.name (Binding.name_of (name_of_bnf bnf))
fun mk_Domainp P =
let
val PT = fastype_of P
val argT = hd (binder_types PT)
in
Const (\<^const_name>\<open>Domainp\<close>, PT --> argT --> HOLogic.boolT) $ P
end
fun type_name_of_bnf bnf = T_of_bnf bnf |> dest_Type |> fst
fun bnf_only_type_ctr f bnf = if is_Type (T_of_bnf bnf) then f bnf else I
fun bnf_of_fp_sugar (fp_sugar:fp_sugar) = nth (#bnfs (#fp_res fp_sugar)) (#fp_res_index fp_sugar)
fun fp_sugar_only_type_ctr f fp_sugars =
(case filter (is_Type o T_of_bnf o bnf_of_fp_sugar) fp_sugars of
[] => I
| fp_sugars' => f fp_sugars')
(* relation constraints - bi_total & co. *)
fun mk_relation_constraint name arg =
(Const (name, fastype_of arg --> HOLogic.boolT)) $ arg
fun side_constraint_tac bnf constr_defs ctxt =
let
val thms = constr_defs @ map mk_sym [rel_eq_of_bnf bnf, rel_conversep_of_bnf bnf,
rel_OO_of_bnf bnf]
in
SELECT_GOAL (Local_Defs.unfold0_tac ctxt thms) THEN' rtac ctxt (rel_mono_of_bnf bnf)
THEN_ALL_NEW assume_tac ctxt
end
fun bi_constraint_tac constr_iff sided_constr_intros ctxt =
SELECT_GOAL (Local_Defs.unfold0_tac ctxt [constr_iff]) THEN'
CONJ_WRAP' (fn thm => rtac ctxt thm THEN_ALL_NEW
(REPEAT_DETERM o etac ctxt conjE THEN' assume_tac ctxt)) sided_constr_intros
fun generate_relation_constraint_goal ctxt bnf constraint_def =
let
val constr_name =
constraint_def |> Thm.prop_of |> HOLogic.dest_Trueprop |> fst o HOLogic.dest_eq
|> head_of |> fst o dest_Const
val live = live_of_bnf bnf
val (((As, Bs), Ds), ctxt') = ctxt
|> mk_TFrees live
||>> mk_TFrees live
||>> mk_TFrees' (map Type.sort_of_atyp (deads_of_bnf bnf))
val relator = mk_rel_of_bnf Ds As Bs bnf
val relsT = map2 mk_pred2T As Bs
val (args, ctxt'') = Ctr_Sugar_Util.mk_Frees "R" relsT ctxt'
val concl = HOLogic.mk_Trueprop (mk_relation_constraint constr_name (list_comb (relator, args)))
val assms = map (HOLogic.mk_Trueprop o (mk_relation_constraint constr_name)) args
val goal = Logic.list_implies (assms, concl)
in
(goal, ctxt'')
end
fun prove_relation_side_constraint ctxt bnf constraint_def =
let
val (goal, ctxt') = generate_relation_constraint_goal ctxt bnf constraint_def
in
Goal.prove_sorry ctxt' [] [] goal (fn {context = goal_ctxt, ...} =>
side_constraint_tac bnf [constraint_def] goal_ctxt 1)
|> Thm.close_derivation \<^here>
|> singleton (Variable.export ctxt' ctxt)
|> Drule.zero_var_indexes
end
fun prove_relation_bi_constraint ctxt bnf constraint_def side_constraints =
let
val (goal, ctxt') = generate_relation_constraint_goal ctxt bnf constraint_def
in
Goal.prove_sorry ctxt' [] [] goal (fn {context = goal_ctxt, ...} =>
bi_constraint_tac constraint_def side_constraints goal_ctxt 1)
|> Thm.close_derivation \<^here>
|> singleton (Variable.export ctxt' ctxt)
|> Drule.zero_var_indexes
end
val defs =
[("left_total_rel", @{thm left_total_alt_def}), ("right_total_rel", @{thm right_total_alt_def}),
("left_unique_rel", @{thm left_unique_alt_def}), ("right_unique_rel", @{thm right_unique_alt_def})]
fun prove_relation_constraints bnf ctxt =
let
val transfer_attr = @{attributes [transfer_rule]}
val Tname = base_name_of_bnf bnf
val defs = map (apsnd (prove_relation_side_constraint ctxt bnf)) defs
val bi_total = prove_relation_bi_constraint ctxt bnf @{thm bi_total_alt_def}
[snd (nth defs 0), snd (nth defs 1)]
val bi_unique = prove_relation_bi_constraint ctxt bnf @{thm bi_unique_alt_def}
[snd (nth defs 2), snd (nth defs 3)]
val defs = ("bi_total_rel", bi_total) :: ("bi_unique_rel", bi_unique) :: defs
in
maps (fn (a, thm) => [((Binding.qualify_name true Tname a, []), [([thm], transfer_attr)])]) defs
end
(* relator_eq *)
fun relator_eq bnf =
[(Binding.empty_atts, [([rel_eq_of_bnf bnf], @{attributes [relator_eq]})])]
(* transfer rules *)
fun bnf_transfer_rules bnf =
let
val transfer_rules = map_transfer_of_bnf bnf :: pred_transfer_of_bnf bnf
:: rel_transfer_of_bnf bnf :: set_transfer_of_bnf bnf
val transfer_attr = @{attributes [transfer_rule]}
in
map (fn thm => (Binding.empty_atts, [([thm],transfer_attr)])) transfer_rules
end
(* Domainp theorem for predicator *)
fun Domainp_tac bnf pred_def ctxt =
let
val n = live_of_bnf bnf
val set_map's = set_map_of_bnf bnf
in
EVERY' [rtac ctxt ext, SELECT_GOAL (Local_Defs.unfold0_tac ctxt [@{thm Domainp.simps},
in_rel_of_bnf bnf, pred_def]), rtac ctxt iffI,
REPEAT_DETERM o eresolve_tac ctxt [exE, conjE, CollectE], hyp_subst_tac ctxt,
CONJ_WRAP' (fn set_map => EVERY' [rtac ctxt ballI, dtac ctxt (set_map RS equalityD1 RS set_mp),
etac ctxt imageE, dtac ctxt set_rev_mp, assume_tac ctxt,
REPEAT_DETERM o eresolve_tac ctxt [CollectE, @{thm case_prodE}],
hyp_subst_tac ctxt, rtac ctxt @{thm iffD2[OF arg_cong2[of _ _ _ _ Domainp, OF refl fst_conv]]},
etac ctxt @{thm DomainPI}]) set_map's,
REPEAT_DETERM o etac ctxt conjE,
REPEAT_DETERM o resolve_tac ctxt [exI, (refl RS conjI), rotate_prems 1 conjI],
rtac ctxt refl, rtac ctxt (box_equals OF [map_cong0_of_bnf bnf, map_comp_of_bnf bnf RS sym,
map_id_of_bnf bnf]),
REPEAT_DETERM_N n o (EVERY' [rtac ctxt @{thm box_equals[OF _ sym[OF o_apply] sym[OF id_apply]]},
rtac ctxt @{thm fst_conv}]), rtac ctxt CollectI,
CONJ_WRAP' (fn set_map => EVERY' [rtac ctxt (set_map RS @{thm ord_eq_le_trans}),
REPEAT_DETERM o resolve_tac ctxt [@{thm image_subsetI}, CollectI, @{thm case_prodI}],
dtac ctxt (rotate_prems 1 bspec), assume_tac ctxt,
etac ctxt @{thm DomainpE}, etac ctxt @{thm someI}]) set_map's
]
end
fun prove_Domainp_rel ctxt bnf pred_def =
let
val live = live_of_bnf bnf
val (((As, Bs), Ds), ctxt') = ctxt
|> mk_TFrees live
||>> mk_TFrees live
||>> mk_TFrees' (map Type.sort_of_atyp (deads_of_bnf bnf))
val relator = mk_rel_of_bnf Ds As Bs bnf
val relsT = map2 mk_pred2T As Bs
val (args, ctxt'') = Ctr_Sugar_Util.mk_Frees "R" relsT ctxt'
val lhs = mk_Domainp (list_comb (relator, args))
val rhs = Term.list_comb (mk_pred_of_bnf Ds As bnf, map mk_Domainp args)
val goal = HOLogic.mk_eq (lhs, rhs) |> HOLogic.mk_Trueprop
in
Goal.prove_sorry ctxt'' [] [] goal (fn {context = goal_ctxt, ...} =>
Domainp_tac bnf pred_def goal_ctxt 1)
|> Thm.close_derivation \<^here>
|> singleton (Variable.export ctxt'' ctxt)
|> Drule.zero_var_indexes
end
fun predicator bnf lthy =
let
val pred_def = pred_set_of_bnf bnf
val Domainp_rel = prove_Domainp_rel lthy bnf pred_def
val rel_eq_onp = rel_eq_onp_of_bnf bnf
val Domainp_rel_thm_name = Binding.qualify_name true (base_name_of_bnf bnf) "Domainp_rel"
val pred_data = Transfer.mk_pred_data pred_def rel_eq_onp []
val type_name = type_name_of_bnf bnf
val relator_domain_attr = @{attributes [relator_domain]}
val notes = [((Domainp_rel_thm_name, []), [([Domainp_rel], relator_domain_attr)])]
in
lthy
|> Local_Theory.notes notes
|> snd
- |> Local_Theory.declaration {syntax = false, pervasive = true}
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => Transfer.update_pred_data type_name (Transfer.morph_pred_data phi pred_data))
end
(* BNF interpretation *)
fun transfer_bnf_interpretation bnf lthy =
let
val dead = dead_of_bnf bnf
val constr_notes = if dead > 0 then [] else prove_relation_constraints bnf lthy
val relator_eq_notes = if dead > 0 then [] else relator_eq bnf
val transfer_rule_notes = if dead > 0 then [] else bnf_transfer_rules bnf
in
lthy
|> Local_Theory.notes (constr_notes @ relator_eq_notes @ transfer_rule_notes)
|> snd
|> predicator bnf
end
val _ =
Theory.setup
(bnf_interpretation transfer_plugin (bnf_only_type_ctr transfer_bnf_interpretation))
(* simplification rules for the predicator *)
fun lookup_defined_pred_data ctxt name =
case Transfer.lookup_pred_data ctxt name of
SOME data => data
| NONE => raise NO_PRED_DATA ()
(* fp_sugar interpretation *)
fun fp_sugar_transfer_rules (fp_sugar:fp_sugar) =
let
val fp_ctr_sugar = #fp_ctr_sugar fp_sugar
val transfer_rules = #ctr_transfers fp_ctr_sugar @ #case_transfers fp_ctr_sugar
@ #disc_transfers fp_ctr_sugar @ #sel_transfers fp_ctr_sugar
@ these (Option.map #co_rec_transfers (#fp_co_induct_sugar fp_sugar))
val transfer_attr = @{attributes [transfer_rule]}
in
map (fn thm => (Binding.empty_atts, [([thm], transfer_attr)])) transfer_rules
end
fun register_pred_injects fp_sugar lthy =
let
val pred_injects = #pred_injects (#fp_bnf_sugar fp_sugar)
val type_name = type_name_of_bnf (#fp_bnf fp_sugar)
val pred_data = lookup_defined_pred_data lthy type_name
|> Transfer.update_pred_simps pred_injects
in
lthy
- |> Local_Theory.declaration {syntax = false, pervasive = true}
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => Transfer.update_pred_data type_name (Transfer.morph_pred_data phi pred_data))
end
fun transfer_fp_sugars_interpretation fp_sugar lthy =
let
val lthy = register_pred_injects fp_sugar lthy
val transfer_rules_notes = fp_sugar_transfer_rules fp_sugar
in
lthy
|> Local_Theory.notes transfer_rules_notes
|> snd
end
val _ =
Theory.setup (fp_sugars_interpretation transfer_plugin
(fp_sugar_only_type_ctr (fold transfer_fp_sugars_interpretation)))
end
diff --git a/src/HOL/Tools/functor.ML b/src/HOL/Tools/functor.ML
--- a/src/HOL/Tools/functor.ML
+++ b/src/HOL/Tools/functor.ML
@@ -1,281 +1,281 @@
(* Title: HOL/Tools/functor.ML
Author: Florian Haftmann, TU Muenchen
Functorial structure of types.
*)
signature FUNCTOR =
sig
val find_atomic: Proof.context -> typ -> (typ * (bool * bool)) list
val construct_mapper: Proof.context -> (string * bool -> term)
-> bool -> typ -> typ -> term
val functor_: string option -> term -> local_theory -> Proof.state
val functor_cmd: string option -> string -> Proof.context -> Proof.state
type entry
val entries: Proof.context -> entry list Symtab.table
end;
structure Functor : FUNCTOR =
struct
(* bookkeeping *)
val compN = "comp";
val idN = "id";
val compositionalityN = "compositionality";
val identityN = "identity";
type entry = { mapper: term, variances: (sort * (bool * bool)) list,
comp: thm, id: thm };
structure Data = Generic_Data
(
type T = entry list Symtab.table
val empty = Symtab.empty
fun merge data = Symtab.merge (K true) data
);
val entries = Data.get o Context.Proof;
(* type analysis *)
fun term_with_typ ctxt T t =
Envir.subst_term_types
(Sign.typ_match (Proof_Context.theory_of ctxt) (fastype_of t, T) Vartab.empty) t;
fun find_atomic ctxt T =
let
val variances_of = Option.map #variances o try hd o Symtab.lookup_list (entries ctxt);
fun add_variance is_contra T =
AList.map_default (op =) (T, (false, false))
((if is_contra then apsnd else apfst) (K true));
fun analyze' is_contra (_, (co, contra)) T =
(if co then analyze is_contra T else I)
#> (if contra then analyze (not is_contra) T else I)
and analyze is_contra (T as Type (tyco, Ts)) = (case variances_of tyco
of NONE => add_variance is_contra T
| SOME variances => fold2 (analyze' is_contra) variances Ts)
| analyze is_contra T = add_variance is_contra T;
in analyze false T [] end;
fun construct_mapper ctxt atomic =
let
val lookup = hd o Symtab.lookup_list (entries ctxt);
fun constructs is_contra (_, (co, contra)) T T' =
(if co then [construct is_contra T T'] else [])
@ (if contra then [construct (not is_contra) T T'] else [])
and construct is_contra (T as Type (tyco, Ts)) (T' as Type (_, Ts')) =
let
val { mapper = raw_mapper, variances, ... } = lookup tyco;
val args = maps (fn (arg_pattern, (T, T')) =>
constructs is_contra arg_pattern T T')
(variances ~~ (Ts ~~ Ts'));
val (U, U') = if is_contra then (T', T) else (T, T');
val mapper = term_with_typ ctxt (map fastype_of args ---> U --> U') raw_mapper;
in list_comb (mapper, args) end
| construct is_contra (TFree (v, _)) (TFree _) = atomic (v, is_contra);
in construct end;
(* mapper properties *)
val compositionality_ss =
simpset_of (put_simpset HOL_basic_ss \<^context> addsimps [Simpdata.mk_eq @{thm comp_def}]);
fun make_comp_prop ctxt variances (tyco, mapper) =
let
val sorts = map fst variances
val (((vs3, vs2), vs1), _) = ctxt
|> Variable.invent_types sorts
||>> Variable.invent_types sorts
||>> Variable.invent_types sorts
val (Ts1, Ts2, Ts3) = (map TFree vs1, map TFree vs2, map TFree vs3);
fun mk_argT ((T, T'), (_, (co, contra))) =
(if co then [(T --> T')] else [])
@ (if contra then [(T' --> T)] else []);
val contras = maps (fn (_, (co, contra)) =>
(if co then [false] else []) @ (if contra then [true] else [])) variances;
val Ts21 = maps mk_argT ((Ts2 ~~ Ts1) ~~ variances);
val Ts32 = maps mk_argT ((Ts3 ~~ Ts2) ~~ variances);
fun invents n k nctxt =
let
val names = Name.invent nctxt n k;
in (names, fold Name.declare names nctxt) end;
val ((names21, names32), nctxt) = Variable.names_of ctxt
|> invents "f" (length Ts21)
||>> invents "f" (length Ts32);
val T1 = Type (tyco, Ts1);
val T2 = Type (tyco, Ts2);
val T3 = Type (tyco, Ts3);
val (args21, args32) = (names21 ~~ Ts21, names32 ~~ Ts32);
val args31 = map2 (fn is_contra => fn ((f21, T21), (f32, T32)) =>
if not is_contra then
HOLogic.mk_comp (Free (f21, T21), Free (f32, T32))
else
HOLogic.mk_comp (Free (f32, T32), Free (f21, T21))
) contras (args21 ~~ args32)
fun mk_mapper T T' args = list_comb
(term_with_typ ctxt (map fastype_of args ---> T --> T') mapper, args);
val mapper21 = mk_mapper T2 T1 (map Free args21);
val mapper32 = mk_mapper T3 T2 (map Free args32);
val mapper31 = mk_mapper T3 T1 args31;
val eq1 = (HOLogic.mk_Trueprop o HOLogic.mk_eq)
(HOLogic.mk_comp (mapper21, mapper32), mapper31);
val x = Free (the_single (Name.invent nctxt (Long_Name.base_name tyco) 1), T3)
val eq2 = (HOLogic.mk_Trueprop o HOLogic.mk_eq)
(mapper21 $ (mapper32 $ x), mapper31 $ x);
val comp_prop = fold_rev Logic.all (map Free (args21 @ args32)) eq1;
val compositionality_prop = fold_rev Logic.all (map Free (args21 @ args32) @ [x]) eq2;
fun prove_compositionality ctxt comp_thm =
Goal.prove_sorry ctxt [] [] compositionality_prop
(K (ALLGOALS (Method.insert_tac ctxt [@{thm fun_cong} OF [comp_thm]]
THEN' Simplifier.asm_lr_simp_tac (put_simpset compositionality_ss ctxt)
THEN_ALL_NEW (Goal.assume_rule_tac ctxt))));
in (comp_prop, prove_compositionality) end;
val identity_ss =
simpset_of (put_simpset HOL_basic_ss \<^context> addsimps [Simpdata.mk_eq @{thm id_def}]);
fun make_id_prop ctxt variances (tyco, mapper) =
let
val (vs, _) = Variable.invent_types (map fst variances) ctxt;
val Ts = map TFree vs;
fun bool_num b = if b then 1 else 0;
fun mk_argT (T, (_, (co, contra))) =
replicate (bool_num co + bool_num contra) T
val arg_Ts = maps mk_argT (Ts ~~ variances)
val T = Type (tyco, Ts);
val head = term_with_typ ctxt (map (fn T => T --> T) arg_Ts ---> T --> T) mapper;
val lhs1 = list_comb (head, map (HOLogic.id_const) arg_Ts);
val lhs2 = list_comb (head, map (fn arg_T => Abs ("x", arg_T, Bound 0)) arg_Ts);
val rhs = HOLogic.id_const T;
val (id_prop, identity_prop) =
apply2 (HOLogic.mk_Trueprop o HOLogic.mk_eq o rpair rhs) (lhs1, lhs2);
fun prove_identity ctxt id_thm =
Goal.prove_sorry ctxt [] [] identity_prop
(K (ALLGOALS (Method.insert_tac ctxt [id_thm] THEN'
Simplifier.asm_lr_simp_tac (put_simpset identity_ss ctxt))));
in (id_prop, prove_identity) end;
(* analyzing and registering mappers *)
fun consume _ _ [] = (false, [])
| consume eq x (ys as z :: zs) = if eq (x, z) then (true, zs) else (false, ys);
fun split_mapper_typ "fun" T =
let
val (Ts', T') = strip_type T;
val (Ts'', T'') = split_last Ts';
val (Ts''', T''') = split_last Ts'';
in (Ts''', T''', T'' --> T') end
| split_mapper_typ _ T =
let
val (Ts', T') = strip_type T;
val (Ts'', T'') = split_last Ts';
in (Ts'', T'', T') end;
fun analyze_mapper ctxt input_mapper =
let
val T = fastype_of input_mapper;
val _ = Type.no_tvars T;
val _ =
if null (subtract (op =) (Term.add_tfreesT T []) (Term.add_tfrees input_mapper []))
then ()
else error ("Illegal additional type variable(s) in term: " ^ Syntax.string_of_term ctxt input_mapper);
val _ =
if null (Term.add_vars (singleton
(Variable.export_terms (Proof_Context.augment input_mapper ctxt) ctxt) input_mapper) [])
then ()
else error ("Illegal locally free variable(s) in term: "
^ Syntax.string_of_term ctxt input_mapper);
val mapper = singleton (Variable.polymorphic ctxt) input_mapper;
val _ =
if null (Term.add_tfreesT (fastype_of mapper) []) then ()
else error ("Illegal locally fixed type variable(s) in type: " ^ Syntax.string_of_typ ctxt T);
fun add_tycos (Type (tyco, Ts)) = insert (op =) tyco #> fold add_tycos Ts
| add_tycos _ = I;
val tycos = add_tycos T [];
val tyco = if tycos = ["fun"] then "fun"
else case remove (op =) "fun" tycos
of [tyco] => tyco
| _ => error ("Bad number of type constructors: " ^ Syntax.string_of_typ ctxt T);
in (mapper, T, tyco) end;
fun analyze_variances ctxt tyco T =
let
fun bad_typ () = error ("Bad mapper type: " ^ Syntax.string_of_typ ctxt T);
val (Ts, T1, T2) = split_mapper_typ tyco T
handle List.Empty => bad_typ ();
val _ =
apply2 ((fn tyco' => if tyco' = tyco then () else bad_typ ()) o fst o dest_Type) (T1, T2)
handle TYPE _ => bad_typ ();
val (vs1, vs2) =
apply2 (map dest_TFree o snd o dest_Type) (T1, T2)
handle TYPE _ => bad_typ ();
val _ = if has_duplicates (eq_fst (op =)) (vs1 @ vs2)
then bad_typ () else ();
fun check_variance_pair (var1 as (_, sort1), var2 as (_, sort2)) =
let
val coT = TFree var1 --> TFree var2;
val contraT = TFree var2 --> TFree var1;
val sort = Sign.inter_sort (Proof_Context.theory_of ctxt) (sort1, sort2);
in
consume (op =) coT
##>> consume (op =) contraT
#>> pair sort
end;
val (variances, left_variances) = fold_map check_variance_pair (vs1 ~~ vs2) Ts;
val _ = if null left_variances then () else bad_typ ();
in variances end;
fun gen_functor prep_term some_prfx raw_mapper lthy =
let
val (mapper, T, tyco) = analyze_mapper lthy (prep_term lthy raw_mapper);
val prfx = the_default (Long_Name.base_name tyco) some_prfx;
val variances = analyze_variances lthy tyco T;
val (comp_prop, prove_compositionality) = make_comp_prop lthy variances (tyco, mapper);
val (id_prop, prove_identity) = make_id_prop lthy variances (tyco, mapper);
val qualify = Binding.qualify true prfx o Binding.name;
fun mapper_declaration comp_thm id_thm phi context =
let
val typ_instance = Sign.typ_instance (Context.theory_of context);
val mapper' = Morphism.term phi mapper;
val T_T' = apply2 fastype_of (mapper, mapper');
val vars = Term.add_vars mapper' [];
in
if null vars andalso typ_instance T_T' andalso typ_instance (swap T_T')
then (Data.map o Symtab.cons_list) (tyco,
{ mapper = mapper', variances = variances,
comp = Morphism.thm phi comp_thm, id = Morphism.thm phi id_thm }) context
else context
end;
fun after_qed [single_comp_thm, single_id_thm] lthy =
lthy
|> Local_Theory.note ((qualify compN, []), single_comp_thm)
||>> Local_Theory.note ((qualify idN, []), single_id_thm)
|-> (fn ((_, [comp_thm]), (_, [id_thm])) => fn lthy =>
lthy
|> Local_Theory.note ((qualify compositionalityN, []),
[prove_compositionality lthy comp_thm])
|> snd
|> Local_Theory.note ((qualify identityN, []),
[prove_identity lthy id_thm])
|> snd
- |> Local_Theory.declaration {syntax = false, pervasive = false}
+ |> Local_Theory.declaration {syntax = false, pervasive = false, pos = \<^here>}
(mapper_declaration comp_thm id_thm))
in
lthy
|> Proof.theorem NONE after_qed (map (fn t => [(t, [])]) [comp_prop, id_prop])
end
val functor_ = gen_functor Syntax.check_term;
val functor_cmd = gen_functor Syntax.read_term;
val _ =
Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>functor\<close>
"register operations managing the functorial structure of a type"
(Scan.option (Parse.name --| \<^keyword>\<open>:\<close>) -- Parse.term >> uncurry functor_cmd);
end;
diff --git a/src/HOL/Tools/groebner.ML b/src/HOL/Tools/groebner.ML
--- a/src/HOL/Tools/groebner.ML
+++ b/src/HOL/Tools/groebner.ML
@@ -1,986 +1,986 @@
(* Title: HOL/Tools/groebner.ML
Author: Amine Chaieb, TU Muenchen
*)
signature GROEBNER =
sig
val ring_and_ideal_conv:
{idom: thm list, ring: cterm list * thm list, field: cterm list * thm list,
vars: cterm list, semiring: cterm list * thm list, ideal : thm list} ->
(cterm -> Rat.rat) -> (Rat.rat -> cterm) ->
conv -> conv ->
{ring_conv: Proof.context -> conv,
simple_ideal: cterm list -> cterm -> cterm ord -> cterm list,
multi_ideal: cterm list -> cterm list -> cterm list -> (cterm * cterm) list,
poly_eq_ss: simpset, unwind_conv: Proof.context -> conv}
val ring_tac: thm list -> thm list -> Proof.context -> int -> tactic
val ideal_tac: thm list -> thm list -> Proof.context -> int -> tactic
val algebra_tac: thm list -> thm list -> Proof.context -> int -> tactic
end
structure Groebner : GROEBNER =
struct
val concl = Thm.cprop_of #> Thm.dest_arg;
fun is_binop ct ct' =
(case Thm.term_of ct' of
c $ _ $ _ => Thm.term_of ct aconv c
| _ => false);
fun dest_binary ct ct' =
if is_binop ct ct' then Thm.dest_binop ct'
else raise CTERM ("dest_binary: bad binop", [ct, ct'])
val denominator_rat = Rat.dest #> snd #> Rat.of_int;
fun int_of_rat a =
case Rat.dest a of (i,1) => i | _ => error "int_of_rat: not an int";
val lcm_rat = fn x => fn y => Rat.of_int (Integer.lcm (int_of_rat x) (int_of_rat y));
val (eqF_intr, eqF_elim) =
let val [th1,th2] = @{thms PFalse}
in (fn th => th COMP th2, fn th => th COMP th1) end;
val (PFalse, PFalse') =
let val PFalse_eq = nth @{thms simp_thms} 13
in (PFalse_eq RS iffD1, PFalse_eq RS iffD2) end;
(* Type for recording history, i.e. how a polynomial was obtained. *)
datatype history =
Start of int
| Mmul of (Rat.rat * int list) * history
| Add of history * history;
(* Monomial ordering. *)
fun morder_lt m1 m2=
let fun lexorder l1 l2 =
case (l1,l2) of
([],[]) => false
| (x1::o1,x2::o2) => x1 > x2 orelse x1 = x2 andalso lexorder o1 o2
| _ => error "morder: inconsistent monomial lengths"
val n1 = Integer.sum m1
val n2 = Integer.sum m2 in
n1 < n2 orelse n1 = n2 andalso lexorder m1 m2
end;
(* Arithmetic on canonical polynomials. *)
fun grob_neg l = map (fn (c,m) => (Rat.neg c,m)) l;
fun grob_add l1 l2 =
case (l1,l2) of
([],l2) => l2
| (l1,[]) => l1
| ((c1,m1)::o1,(c2,m2)::o2) =>
if m1 = m2 then
let val c = c1 + c2 val rest = grob_add o1 o2 in
if c = @0 then rest else (c,m1)::rest end
else if morder_lt m2 m1 then (c1,m1)::(grob_add o1 l2)
else (c2,m2)::(grob_add l1 o2);
fun grob_sub l1 l2 = grob_add l1 (grob_neg l2);
fun grob_mmul (c1,m1) (c2,m2) = (c1 * c2, ListPair.map (op +) (m1, m2));
fun grob_cmul cm pol = map (grob_mmul cm) pol;
fun grob_mul l1 l2 =
case l1 of
[] => []
| (h1::t1) => grob_add (grob_cmul h1 l2) (grob_mul t1 l2);
fun grob_inv l =
case l of
[(c,vs)] => if (forall (fn x => x = 0) vs) then
if c = @0 then error "grob_inv: division by zero"
else [(@1 / c,vs)]
else error "grob_inv: non-constant divisor polynomial"
| _ => error "grob_inv: non-constant divisor polynomial";
fun grob_div l1 l2 =
case l2 of
[(c,l)] => if (forall (fn x => x = 0) l) then
if c = @0 then error "grob_div: division by zero"
else grob_cmul (@1 / c,l) l1
else error "grob_div: non-constant divisor polynomial"
| _ => error "grob_div: non-constant divisor polynomial";
fun grob_pow vars l n =
if n < 0 then error "grob_pow: negative power"
else if n = 0 then [(@1,map (K 0) vars)]
else grob_mul l (grob_pow vars l (n - 1));
(* Monomial division operation. *)
fun mdiv (c1,m1) (c2,m2) =
(c1 / c2,
map2 (fn n1 => fn n2 => if n1 < n2 then error "mdiv" else n1 - n2) m1 m2);
(* Lowest common multiple of two monomials. *)
fun mlcm (_,m1) (_,m2) = (@1, ListPair.map Int.max (m1, m2));
(* Reduce monomial cm by polynomial pol, returning replacement for cm. *)
fun reduce1 cm (pol,hpol) =
case pol of
[] => error "reduce1"
| cm1::cms => ((let val (c,m) = mdiv cm cm1 in
(grob_cmul (~ c, m) cms,
Mmul ((~ c,m),hpol)) end)
handle ERROR _ => error "reduce1");
(* Try this for all polynomials in a basis. *)
fun tryfind f l =
case l of
[] => error "tryfind"
| (h::t) => ((f h) handle ERROR _ => tryfind f t);
fun reduceb cm basis = tryfind (fn p => reduce1 cm p) basis;
(* Reduction of a polynomial (always picking largest monomial possible). *)
fun reduce basis (pol,hist) =
case pol of
[] => (pol,hist)
| cm::ptl => ((let val (q,hnew) = reduceb cm basis in
reduce basis (grob_add q ptl,Add(hnew,hist)) end)
handle (ERROR _) =>
(let val (q,hist') = reduce basis (ptl,hist) in
(cm::q,hist') end));
(* Check for orthogonality w.r.t. LCM. *)
fun orthogonal l p1 p2 =
snd l = snd(grob_mmul (hd p1) (hd p2));
(* Compute S-polynomial of two polynomials. *)
fun spoly cm ph1 ph2 =
case (ph1,ph2) of
(([],h),_) => ([],h)
| (_,([],h)) => ([],h)
| ((cm1::ptl1,his1),(cm2::ptl2,his2)) =>
(grob_sub (grob_cmul (mdiv cm cm1) ptl1)
(grob_cmul (mdiv cm cm2) ptl2),
Add(Mmul(mdiv cm cm1,his1),
Mmul(mdiv (~ (fst cm),snd cm) cm2,his2)));
(* Make a polynomial monic. *)
fun monic (pol,hist) =
if null pol then (pol,hist) else
let val (c',m') = hd pol in
(map (fn (c,m) => (c / c',m)) pol,
Mmul((@1 / c',map (K 0) m'),hist)) end;
(* The most popular heuristic is to order critical pairs by LCM monomial. *)
fun forder ((_,m1),_) ((_,m2),_) = morder_lt m1 m2;
fun poly_lt p q =
case (p,q) of
(_,[]) => false
| ([],_) => true
| ((c1,m1)::o1,(c2,m2)::o2) =>
c1 < c2 orelse
c1 = c2 andalso ((morder_lt m1 m2) orelse m1 = m2 andalso poly_lt o1 o2);
fun align ((p,hp),(q,hq)) =
if poly_lt p q then ((p,hp),(q,hq)) else ((q,hq),(p,hp));
fun poly_eq p1 p2 =
eq_list (fn ((c1, m1), (c2, m2)) => c1 = c2 andalso (m1: int list) = m2) (p1, p2);
fun memx ((p1,_),(p2,_)) ppairs =
not (exists (fn ((q1,_),(q2,_)) => poly_eq p1 q1 andalso poly_eq p2 q2) ppairs);
(* Buchberger's second criterion. *)
fun criterion2 basis (lcm,((p1,h1),(p2,h2))) opairs =
exists (fn g => not(poly_eq (fst g) p1) andalso not(poly_eq (fst g) p2) andalso
can (mdiv lcm) (hd(fst g)) andalso
not(memx (align (g,(p1,h1))) (map snd opairs)) andalso
not(memx (align (g,(p2,h2))) (map snd opairs))) basis;
(* Test for hitting constant polynomial. *)
fun constant_poly p =
length p = 1 andalso forall (fn x => x = 0) (snd(hd p));
(* Grobner basis algorithm. *)
(* FIXME: try to get rid of mergesort? *)
fun merge ord l1 l2 =
case l1 of
[] => l2
| h1::t1 =>
case l2 of
[] => l1
| h2::t2 => if ord h1 h2 then h1::(merge ord t1 l2)
else h2::(merge ord l1 t2);
fun mergesort ord l =
let
fun mergepairs l1 l2 =
case (l1,l2) of
([s],[]) => s
| (l,[]) => mergepairs [] l
| (l,[s1]) => mergepairs (s1::l) []
| (l,(s1::s2::ss)) => mergepairs ((merge ord s1 s2)::l) ss
in if null l then [] else mergepairs [] (map (fn x => [x]) l)
end;
fun grobner_basis basis pairs =
case pairs of
[] => basis
| (l,(p1,p2))::opairs =>
let val (sph as (sp,_)) = monic (reduce basis (spoly l p1 p2))
in
if null sp orelse criterion2 basis (l,(p1,p2)) opairs
then grobner_basis basis opairs
else if constant_poly sp then grobner_basis (sph::basis) []
else
let
val rawcps = map (fn p => (mlcm (hd(fst p)) (hd sp),align(p,sph)))
basis
val newcps = filter (fn (l,(p,q)) => not(orthogonal l (fst p) (fst q)))
rawcps
in grobner_basis (sph::basis)
(merge forder opairs (mergesort forder newcps))
end
end;
(* Interreduce initial polynomials. *)
fun grobner_interreduce rpols ipols =
case ipols of
[] => map monic (rev rpols)
| p::ps => let val p' = reduce (rpols @ ps) p in
if null (fst p') then grobner_interreduce rpols ps
else grobner_interreduce (p'::rpols) ps end;
(* Overall function. *)
fun grobner pols =
let val npols = map_index (fn (n, p) => (p, Start n)) pols
val phists = filter (fn (p,_) => not (null p)) npols
val bas = grobner_interreduce [] (map monic phists)
val prs0 = map_product pair bas bas
val prs1 = filter (fn ((x,_),(y,_)) => poly_lt x y) prs0
val prs2 = map (fn (p,q) => (mlcm (hd(fst p)) (hd(fst q)),(p,q))) prs1
val prs3 =
filter (fn (l,(p,q)) => not(orthogonal l (fst p) (fst q))) prs2 in
grobner_basis bas (mergesort forder prs3) end;
(* Get proof of contradiction from Grobner basis. *)
fun find p l =
case l of
[] => error "find"
| (h::t) => if p(h) then h else find p t;
fun grobner_refute pols =
let val gb = grobner pols in
snd(find (fn (p,_) => length p = 1 andalso forall (fn x=> x=0) (snd(hd p))) gb)
end;
(* Turn proof into a certificate as sum of multipliers. *)
(* In principle this is very inefficient: in a heavily shared proof it may *)
(* make the same calculation many times. Could put in a cache or something. *)
fun resolve_proof vars prf =
case prf of
Start(~1) => []
| Start m => [(m,[(@1,map (K 0) vars)])]
| Mmul(pol,lin) =>
let val lis = resolve_proof vars lin in
map (fn (n,p) => (n,grob_cmul pol p)) lis end
| Add(lin1,lin2) =>
let val lis1 = resolve_proof vars lin1
val lis2 = resolve_proof vars lin2
val dom = distinct (op =) (union (op =) (map fst lis1) (map fst lis2))
in
map (fn n => let val a = these (AList.lookup (op =) lis1 n)
val b = these (AList.lookup (op =) lis2 n)
in (n,grob_add a b) end) dom end;
(* Run the procedure and produce Weak Nullstellensatz certificate. *)
fun grobner_weak vars pols =
let val cert = resolve_proof vars (grobner_refute pols)
val l =
fold_rev (fold_rev (lcm_rat o denominator_rat o fst) o snd) cert @1 in
(l,map (fn (i,p) => (i,map (fn (d,m) => (l * d,m)) p)) cert) end;
(* Prove a polynomial is in ideal generated by others, using Grobner basis. *)
fun grobner_ideal vars pols pol =
let val (pol',h) = reduce (grobner pols) (grob_neg pol,Start(~1)) in
if not (null pol') then error "grobner_ideal: not in the ideal" else
resolve_proof vars h end;
(* Produce Strong Nullstellensatz certificate for a power of pol. *)
fun grobner_strong vars pols pol =
let val vars' = \<^cterm>\<open>True\<close>::vars
val grob_z = [(@1, 1::(map (K 0) vars))]
val grob_1 = [(@1, (map (K 0) vars'))]
fun augment p= map (fn (c,m) => (c,0::m)) p
val pols' = map augment pols
val pol' = augment pol
val allpols = (grob_sub (grob_mul grob_z pol') grob_1)::pols'
val (l,cert) = grobner_weak vars' allpols
val d = fold (fold (Integer.max o hd o snd) o snd) cert 0
fun transform_monomial (c,m) =
grob_cmul (c,tl m) (grob_pow vars pol (d - hd m))
fun transform_polynomial q = fold_rev (grob_add o transform_monomial) q []
val cert' = map (fn (c,q) => (c-1,transform_polynomial q))
(filter (fn (k,_) => k <> 0) cert) in
(d,l,cert') end;
(* Overall parametrized universal procedure for (semi)rings. *)
(* We return an ideal_conv and the actual ring prover. *)
fun refute_disj rfn tm =
case Thm.term_of tm of
Const(\<^const_name>\<open>HOL.disj\<close>,_)$_$_ =>
Drule.compose
(refute_disj rfn (Thm.dest_arg tm), 2,
Drule.compose (refute_disj rfn (Thm.dest_arg1 tm), 2, disjE))
| _ => rfn tm ;
val notnotD = @{thm notnotD};
fun mk_binop ct x y = Thm.apply (Thm.apply ct x) y
fun is_neg t =
case Thm.term_of t of
(Const(\<^const_name>\<open>Not\<close>,_)$_) => true
| _ => false;
fun is_eq t =
case Thm.term_of t of
(Const(\<^const_name>\<open>HOL.eq\<close>,_)$_$_) => true
| _ => false;
fun end_itlist f l =
case l of
[] => error "end_itlist"
| [x] => x
| (h::t) => f h (end_itlist f t);
val list_mk_binop = fn b => end_itlist (mk_binop b);
val list_dest_binop = fn b =>
let fun h acc t =
((let val (l,r) = dest_binary b t in h (h acc r) l end)
handle CTERM _ => (t::acc)) (* Why had I handle _ => ? *)
in h []
end;
val strip_exists =
let fun h (acc, t) =
case Thm.term_of t of
\<^Const_>\<open>Ex _ for \<open>Abs _\<close>\<close> =>
h (Thm.dest_abs_global (Thm.dest_arg t) |>> (fn v => v::acc))
| _ => (acc,t)
in fn t => h ([],t)
end;
fun is_forall t =
case Thm.term_of t of
(Const(\<^const_name>\<open>All\<close>,_)$Abs(_,_,_)) => true
| _ => false;
val nnf_simps = @{thms nnf_simps};
fun weak_dnf_conv ctxt =
Simplifier.rewrite (put_simpset HOL_basic_ss ctxt addsimps @{thms weak_dnf_simps});
val initial_ss =
simpset_of (put_simpset HOL_basic_ss \<^context>
addsimps nnf_simps
addsimps [not_all, not_ex]
addsimps map (fn th => th RS sym) (@{thms ex_simps} @ @{thms all_simps}));
fun initial_conv ctxt =
Simplifier.rewrite (put_simpset initial_ss ctxt);
val specl = fold_rev (fn x => fn th => Thm.instantiate' [] [SOME x] (th RS spec));
val cTrp = \<^cterm>\<open>Trueprop\<close>;
val cConj = \<^cterm>\<open>HOL.conj\<close>;
val (cNot,false_tm) = (\<^cterm>\<open>Not\<close>, \<^cterm>\<open>False\<close>);
val assume_Trueprop = Thm.apply cTrp #> Thm.assume;
val list_mk_conj = list_mk_binop cConj;
val conjs = list_dest_binop cConj;
val mk_neg = Thm.apply cNot;
fun striplist dest =
let
fun h acc x = case try dest x of
SOME (a,b) => h (h acc b) a
| NONE => x::acc
in h [] end;
fun list_mk_binop b = foldr1 (fn (s,t) => Thm.apply (Thm.apply b s) t);
val eq_commute = mk_meta_eq @{thm eq_commute};
fun sym_conv eq =
let val (l,r) = Thm.dest_binop eq
in Thm.instantiate' [SOME (Thm.ctyp_of_cterm l)] [SOME l, SOME r] eq_commute
end;
(* FIXME : copied from cqe.ML -- complex QE*)
fun conjuncts ct =
case Thm.term_of ct of
\<^term>\<open>HOL.conj\<close>$_$_ => (Thm.dest_arg1 ct)::(conjuncts (Thm.dest_arg ct))
| _ => [ct];
fun fold1 f = foldr1 (uncurry f);
fun mk_conj_tab th =
let fun h acc th =
case Thm.prop_of th of
\<^term>\<open>Trueprop\<close>$(\<^term>\<open>HOL.conj\<close>$_$_) =>
h (h acc (th RS conjunct2)) (th RS conjunct1)
| \<^term>\<open>Trueprop\<close>$p => (p,th)::acc
in fold (Termtab.insert Thm.eq_thm) (h [] th) Termtab.empty end;
fun is_conj (\<^term>\<open>HOL.conj\<close>$_$_) = true
| is_conj _ = false;
fun prove_conj tab cjs =
case cjs of
[c] => if is_conj (Thm.term_of c) then prove_conj tab (conjuncts c) else tab c
| c::cs => conjI OF [prove_conj tab [c], prove_conj tab cs];
fun conj_ac_rule eq =
let
val (l,r) = Thm.dest_equals eq
val ctabl = mk_conj_tab (Thm.assume (Thm.apply \<^cterm>\<open>Trueprop\<close> l))
val ctabr = mk_conj_tab (Thm.assume (Thm.apply \<^cterm>\<open>Trueprop\<close> r))
fun tabl c = the (Termtab.lookup ctabl (Thm.term_of c))
fun tabr c = the (Termtab.lookup ctabr (Thm.term_of c))
val thl = prove_conj tabl (conjuncts r) |> implies_intr_hyps
val thr = prove_conj tabr (conjuncts l) |> implies_intr_hyps
val eqI = Thm.instantiate' [] [SOME l, SOME r] @{thm iffI}
in Thm.implies_elim (Thm.implies_elim eqI thl) thr |> mk_meta_eq end;
(* END FIXME.*)
(* Conversion for the equivalence of existential statements where
EX quantifiers are rearranged differently *)
fun ext ctxt T = Thm.cterm_of ctxt (Const (\<^const_name>\<open>Ex\<close>, (T --> \<^typ>\<open>bool\<close>) --> \<^typ>\<open>bool\<close>))
fun mk_ex ctxt v t = Thm.apply (ext ctxt (Thm.typ_of_cterm v)) (Thm.lambda v t)
fun choose v th th' = case Thm.concl_of th of
\<^term>\<open>Trueprop\<close> $ (Const(\<^const_name>\<open>Ex\<close>,_)$_) =>
let
val p = (funpow 2 Thm.dest_arg o Thm.cprop_of) th
val T = Thm.dest_ctyp0 (Thm.ctyp_of_cterm p)
val th0 = Conv.fconv_rule (Thm.beta_conversion true)
(Thm.instantiate' [SOME T] [SOME p, (SOME o Thm.dest_arg o Thm.cprop_of) th'] exE)
val pv = (Thm.rhs_of o Thm.beta_conversion true)
(Thm.apply \<^cterm>\<open>Trueprop\<close> (Thm.apply p v))
val th1 = Thm.forall_intr v (Thm.implies_intr pv th')
in Thm.implies_elim (Thm.implies_elim th0 th) th1 end
| _ => error "" (* FIXME ? *)
fun simple_choose ctxt v th =
choose v (Thm.assume ((Thm.apply \<^cterm>\<open>Trueprop\<close> o mk_ex ctxt v)
(Thm.dest_arg (hd (Thm.chyps_of th))))) th
fun mkexi v th =
let
val p = Thm.lambda v (Thm.dest_arg (Thm.cprop_of th))
in Thm.implies_elim
(Conv.fconv_rule (Thm.beta_conversion true)
(Thm.instantiate' [SOME (Thm.ctyp_of_cterm v)] [SOME p, SOME v] @{thm exI}))
th
end
fun ex_eq_conv ctxt t =
let
val (p0,q0) = Thm.dest_binop t
val (vs',P) = strip_exists p0
val (vs,_) = strip_exists q0
val th = Thm.assume (Thm.apply \<^cterm>\<open>Trueprop\<close> P)
val th1 = implies_intr_hyps (fold (simple_choose ctxt) vs' (fold mkexi vs th))
val th2 = implies_intr_hyps (fold (simple_choose ctxt) vs (fold mkexi vs' th))
val p = (Thm.dest_arg o Thm.dest_arg1 o Thm.cprop_of) th1
val q = (Thm.dest_arg o Thm.dest_arg o Thm.cprop_of) th1
in Thm.implies_elim (Thm.implies_elim (Thm.instantiate' [] [SOME p, SOME q] iffI) th1) th2
|> mk_meta_eq
end;
fun getname v = case Thm.term_of v of
Free(s,_) => s
| Var ((s,_),_) => s
| _ => "x"
fun mk_eq s t = Thm.apply (Thm.apply \<^cterm>\<open>(\<equiv>) :: bool \<Rightarrow> _\<close> s) t
fun mk_exists ctxt v th = Drule.arg_cong_rule (ext ctxt (Thm.typ_of_cterm v))
(Thm.abstract_rule (getname v) v th)
fun simp_ex_conv ctxt =
Simplifier.rewrite (put_simpset HOL_basic_ss ctxt addsimps @{thms simp_thms(39)})
fun free_in v t = Cterms.defined (Cterms.build (Drule.add_frees_cterm t)) v;
val vsubst = let
fun vsubst (t,v) tm =
(Thm.rhs_of o Thm.beta_conversion false) (Thm.apply (Thm.lambda v tm) t)
in fold vsubst end;
(** main **)
fun ring_and_ideal_conv
{vars = _, semiring = (sr_ops, _), ring = (r_ops, _),
field = (f_ops, _), idom, ideal}
dest_const mk_const ring_eq_conv ring_normalize_conv =
let
val [add_pat, mul_pat, pow_pat, zero_tm, one_tm] = sr_ops;
val [ring_add_tm, ring_mul_tm, ring_pow_tm] =
map Thm.dest_fun2 [add_pat, mul_pat, pow_pat];
val (ring_sub_tm, ring_neg_tm) =
(case r_ops of
[sub_pat, neg_pat] => (Thm.dest_fun2 sub_pat, Thm.dest_fun neg_pat)
|_ => (\<^cterm>\<open>True\<close>, \<^cterm>\<open>True\<close>));
val (field_div_tm, field_inv_tm) =
(case f_ops of
[div_pat, inv_pat] => (Thm.dest_fun2 div_pat, Thm.dest_fun inv_pat)
| _ => (\<^cterm>\<open>True\<close>, \<^cterm>\<open>True\<close>));
val [idom_thm, neq_thm] = idom;
val [idl_sub, idl_add0] =
if length ideal = 2 then ideal else [eq_commute, eq_commute]
fun ring_dest_neg t =
let val (l,r) = Thm.dest_comb t
in if Term.could_unify(Thm.term_of l, Thm.term_of ring_neg_tm) then r
else raise CTERM ("ring_dest_neg", [t])
end
fun field_dest_inv t =
let val (l,r) = Thm.dest_comb t in
if Term.could_unify (Thm.term_of l, Thm.term_of field_inv_tm) then r
else raise CTERM ("field_dest_inv", [t])
end
val ring_dest_add = dest_binary ring_add_tm;
val ring_mk_add = mk_binop ring_add_tm;
val ring_dest_sub = dest_binary ring_sub_tm;
val ring_dest_mul = dest_binary ring_mul_tm;
val ring_mk_mul = mk_binop ring_mul_tm;
val field_dest_div = dest_binary field_div_tm;
val ring_dest_pow = dest_binary ring_pow_tm;
val ring_mk_pow = mk_binop ring_pow_tm ;
fun grobvars tm acc =
if can dest_const tm then acc
else if can ring_dest_neg tm then grobvars (Thm.dest_arg tm) acc
else if can ring_dest_pow tm then grobvars (Thm.dest_arg1 tm) acc
else if can ring_dest_add tm orelse can ring_dest_sub tm
orelse can ring_dest_mul tm
then grobvars (Thm.dest_arg1 tm) (grobvars (Thm.dest_arg tm) acc)
else if can field_dest_inv tm
then
let val gvs = grobvars (Thm.dest_arg tm) []
in if null gvs then acc else tm::acc
end
else if can field_dest_div tm then
let val lvs = grobvars (Thm.dest_arg1 tm) acc
val gvs = grobvars (Thm.dest_arg tm) []
in if null gvs then lvs else tm::acc
end
else tm::acc ;
fun grobify_term vars tm =
((if not (member (op aconvc) vars tm) then raise CTERM ("Not a variable", [tm]) else
[(@1, map (fn i => if i aconvc tm then 1 else 0) vars)])
handle CTERM _ =>
((let val x = dest_const tm
in if x = @0 then [] else [(x,map (K 0) vars)]
end)
handle ERROR _ =>
((grob_neg(grobify_term vars (ring_dest_neg tm)))
handle CTERM _ =>
(
(grob_inv(grobify_term vars (field_dest_inv tm)))
handle CTERM _ =>
((let val (l,r) = ring_dest_add tm
in grob_add (grobify_term vars l) (grobify_term vars r)
end)
handle CTERM _ =>
((let val (l,r) = ring_dest_sub tm
in grob_sub (grobify_term vars l) (grobify_term vars r)
end)
handle CTERM _ =>
((let val (l,r) = ring_dest_mul tm
in grob_mul (grobify_term vars l) (grobify_term vars r)
end)
handle CTERM _ =>
( (let val (l,r) = field_dest_div tm
in grob_div (grobify_term vars l) (grobify_term vars r)
end)
handle CTERM _ =>
((let val (l,r) = ring_dest_pow tm
in grob_pow vars (grobify_term vars l) ((Thm.term_of #> HOLogic.dest_number #> snd) r)
end)
handle CTERM _ => error "grobify_term: unknown or invalid term")))))))));
val eq_tm = idom_thm |> concl |> Thm.dest_arg |> Thm.dest_arg |> Thm.dest_fun2;
val dest_eq = dest_binary eq_tm;
fun grobify_equation vars tm =
let val (l,r) = dest_binary eq_tm tm
in grob_sub (grobify_term vars l) (grobify_term vars r)
end;
fun grobify_equations tm =
let
val cjs = conjs tm
val rawvars =
fold_rev (fn eq => fn a => grobvars (Thm.dest_arg1 eq) (grobvars (Thm.dest_arg eq) a)) cjs []
val vars = sort Thm.term_ord (distinct (op aconvc) rawvars)
in (vars,map (grobify_equation vars) cjs)
end;
val holify_polynomial =
let fun holify_varpow (v,n) =
if n = 1 then v else ring_mk_pow v (Numeral.mk_cnumber \<^ctyp>\<open>nat\<close> n) (* FIXME *)
fun holify_monomial vars (c,m) =
let val xps = map holify_varpow (filter (fn (_,n) => n <> 0) (vars ~~ m))
in end_itlist ring_mk_mul (mk_const c :: xps)
end
fun holify_polynomial vars p =
if null p then mk_const @0
else end_itlist ring_mk_add (map (holify_monomial vars) p)
in holify_polynomial
end ;
fun idom_rule ctxt = simplify (put_simpset HOL_basic_ss ctxt addsimps [idom_thm]);
fun prove_nz n = eqF_elim
(ring_eq_conv(mk_binop eq_tm (mk_const n) (mk_const @0)));
val neq_01 = prove_nz @1;
fun neq_rule n th = [prove_nz n, th] MRS neq_thm;
fun mk_add th1 = Thm.combination (Drule.arg_cong_rule ring_add_tm th1);
fun refute ctxt tm =
if tm aconvc false_tm then assume_Trueprop tm else
((let
val (nths0,eths0) = List.partition (is_neg o concl) (HOLogic.conj_elims ctxt (assume_Trueprop tm))
val nths = filter (is_eq o Thm.dest_arg o concl) nths0
val eths = filter (is_eq o concl) eths0
in
if null eths then
let
val th1 = end_itlist (fn th1 => fn th2 => idom_rule ctxt (HOLogic.conj_intr ctxt th1 th2)) nths
val th2 =
Conv.fconv_rule
((Conv.arg_conv #> Conv.arg_conv) (Conv.binop_conv ring_normalize_conv)) th1
val conc = th2 |> concl |> Thm.dest_arg
val (l,_) = conc |> dest_eq
in Thm.implies_intr (Thm.apply cTrp tm)
(Thm.equal_elim (Drule.arg_cong_rule cTrp (eqF_intr th2))
(HOLogic.mk_obj_eq (Thm.reflexive l)))
end
else
let
val (vars,l,cert,noteqth) =(
if null nths then
let val (vars,pols) = grobify_equations(list_mk_conj(map concl eths))
val (l,cert) = grobner_weak vars pols
in (vars,l,cert,neq_01)
end
else
let
val nth = end_itlist (fn th1 => fn th2 => idom_rule ctxt (HOLogic.conj_intr ctxt th1 th2)) nths
val (vars,pol::pols) =
grobify_equations(list_mk_conj(Thm.dest_arg(concl nth)::map concl eths))
val (deg,l,cert) = grobner_strong vars pols pol
val th1 =
Conv.fconv_rule ((Conv.arg_conv o Conv.arg_conv) (Conv.binop_conv ring_normalize_conv)) nth
val th2 = funpow deg (idom_rule ctxt o HOLogic.conj_intr ctxt th1) neq_01
in (vars,l,cert,th2)
end)
val cert_pos = map (fn (i,p) => (i,filter (fn (c,_) => c > @0) p)) cert
val cert_neg = map (fn (i,p) => (i,map (fn (c,m) => (~ c,m))
(filter (fn (c,_) => c < @0) p))) cert
val herts_pos = map (fn (i,p) => (i,holify_polynomial vars p)) cert_pos
val herts_neg = map (fn (i,p) => (i,holify_polynomial vars p)) cert_neg
fun thm_fn pols =
if null pols then Thm.reflexive(mk_const @0) else
end_itlist mk_add
(map (fn (i,p) => Drule.arg_cong_rule (Thm.apply ring_mul_tm p)
(nth eths i |> mk_meta_eq)) pols)
val th1 = thm_fn herts_pos
val th2 = thm_fn herts_neg
val th3 = HOLogic.conj_intr ctxt (HOLogic.mk_obj_eq (mk_add (Thm.symmetric th1) th2)) noteqth
val th4 =
Conv.fconv_rule ((Conv.arg_conv o Conv.arg_conv o Conv.binop_conv) ring_normalize_conv)
(neq_rule l th3)
val (l, _) = dest_eq(Thm.dest_arg(concl th4))
in Thm.implies_intr (Thm.apply cTrp tm)
(Thm.equal_elim (Drule.arg_cong_rule cTrp (eqF_intr th4))
(HOLogic.mk_obj_eq (Thm.reflexive l)))
end
end) handle ERROR _ => raise CTERM ("Groebner-refute: unable to refute",[tm]))
fun ring ctxt tm =
let
fun mk_forall x p =
let
val T = Thm.typ_of_cterm x;
val all = Thm.cterm_of ctxt (Const (\<^const_name>\<open>All\<close>, (T --> \<^typ>\<open>bool\<close>) --> \<^typ>\<open>bool\<close>))
in Thm.apply all (Thm.lambda x p) end
val avs = Cterms.build (Drule.add_frees_cterm tm)
val P' = fold mk_forall (Cterms.list_set_rev avs) tm
val th1 = initial_conv ctxt (mk_neg P')
val (evs,bod) = strip_exists(concl th1) in
if is_forall bod then raise CTERM("ring: non-universal formula",[tm])
else
let
val th1a = weak_dnf_conv ctxt bod
val boda = concl th1a
val th2a = refute_disj (refute ctxt) boda
val th2b = [HOLogic.mk_obj_eq th1a, (th2a COMP notI) COMP PFalse'] MRS trans
val th2 = fold (fn v => fn th => (Thm.forall_intr v th) COMP allI) evs (th2b RS PFalse)
val th3 =
Thm.equal_elim
(Simplifier.rewrite (put_simpset HOL_basic_ss ctxt addsimps [not_ex RS sym])
(th2 |> Thm.cprop_of)) th2
in specl (Cterms.list_set_rev avs)
([[[HOLogic.mk_obj_eq th1, th3 RS PFalse'] MRS trans] MRS PFalse] MRS notnotD)
end
end
fun ideal tms tm ord =
let
val rawvars = fold_rev grobvars (tm::tms) []
val vars = sort ord (distinct (fn (x,y) => (Thm.term_of x) aconv (Thm.term_of y)) rawvars)
val pols = map (grobify_term vars) tms
val pol = grobify_term vars tm
val cert = grobner_ideal vars pols pol
in map_range (fn n => these (AList.lookup (op =) cert n) |> holify_polynomial vars)
(length pols)
end
fun poly_eq_conv t =
let val (a,b) = Thm.dest_binop t
in Conv.fconv_rule (Conv.arg_conv (Conv.arg1_conv ring_normalize_conv))
(Thm.instantiate' [] [SOME a, SOME b] idl_sub)
end
val poly_eq_simproc =
let
fun proc ct =
let val th = poly_eq_conv ct
in if Thm.is_reflexive th then NONE else SOME th end
in
Simplifier.cert_simproc (Thm.theory_of_thm idl_sub) "poly_eq_simproc"
{lhss = [Thm.term_of (Thm.lhs_of idl_sub)],
- proc = fn _ => fn _ => proc}
+ proc = Morphism.entity (fn _ => fn _ => proc)}
end;
val poly_eq_ss =
simpset_of (put_simpset HOL_basic_ss \<^context>
addsimps @{thms simp_thms}
addsimprocs [poly_eq_simproc])
local
fun is_defined v t =
let
val mons = striplist(dest_binary ring_add_tm) t
in member (op aconvc) mons v andalso
forall (fn m => v aconvc m
orelse not(Cterms.defined (Cterms.build (Drule.add_frees_cterm m)) v)) mons
end
fun isolate_variable vars tm =
let
val th = poly_eq_conv tm
val th' = (sym_conv then_conv poly_eq_conv) tm
val (v,th1) =
case find_first(fn v=> is_defined v (Thm.dest_arg1 (Thm.rhs_of th))) vars of
SOME v => (v,th')
| NONE => (the (find_first
(fn v => is_defined v (Thm.dest_arg1 (Thm.rhs_of th'))) vars) ,th)
val th2 = Thm.transitive th1
(Thm.instantiate' [] [(SOME o Thm.dest_arg1 o Thm.rhs_of) th1, SOME v]
idl_add0)
in Conv.fconv_rule(funpow 2 Conv.arg_conv ring_normalize_conv) th2
end
in
fun unwind_polys_conv ctxt tm =
let
val (vars,bod) = strip_exists tm
val cjs = striplist (dest_binary \<^cterm>\<open>HOL.conj\<close>) bod
val th1 = (the (get_first (try (isolate_variable vars)) cjs)
handle Option.Option => raise CTERM ("unwind_polys_conv",[tm]))
val eq = Thm.lhs_of th1
val bod' = list_mk_binop \<^cterm>\<open>HOL.conj\<close> (eq::(remove (op aconvc) eq cjs))
val th2 = conj_ac_rule (mk_eq bod bod')
val th3 =
Thm.transitive th2
(Drule.binop_cong_rule \<^cterm>\<open>HOL.conj\<close> th1
(Thm.reflexive (Thm.dest_arg (Thm.rhs_of th2))))
val v = Thm.dest_arg1(Thm.dest_arg1(Thm.rhs_of th3))
val th4 = Conv.fconv_rule (Conv.arg_conv (simp_ex_conv ctxt)) (mk_exists ctxt v th3)
val th5 = ex_eq_conv ctxt (mk_eq tm (fold (mk_ex ctxt) (remove op aconvc v vars) (Thm.lhs_of th4)))
in Thm.transitive th5 (fold (mk_exists ctxt) (remove op aconvc v vars) th4)
end;
end
local
fun scrub_var v m =
let
val ps = striplist ring_dest_mul m
val ps' = remove op aconvc v ps
in if null ps' then one_tm else fold1 ring_mk_mul ps'
end
fun find_multipliers v mons =
let
val mons1 = filter (fn m => free_in v m) mons
val mons2 = map (scrub_var v) mons1
in if null mons2 then zero_tm else fold1 ring_mk_add mons2
end
fun isolate_monomials vars tm =
let
val (vmons, cmons) =
List.partition (fn m =>
let val frees = Cterms.build (Drule.add_frees_cterm m)
in exists (Cterms.defined frees) vars end) (striplist ring_dest_add tm)
val cofactors = map (fn v => find_multipliers v vmons) vars
val cnc = if null cmons then zero_tm
else Thm.apply ring_neg_tm
(list_mk_binop ring_add_tm cmons)
in (cofactors,cnc)
end;
fun isolate_variables evs ps eq =
let
val vars = filter (fn v => free_in v eq) evs
val (qs,p) = isolate_monomials vars eq
val rs = ideal (qs @ ps) p Thm.term_ord
in (eq, take (length qs) rs ~~ vars)
end;
fun subst_in_poly i p = Thm.rhs_of (ring_normalize_conv (vsubst i p));
in
fun solve_idealism evs ps eqs =
if null evs then [] else
let
val (eq,cfs) = get_first (try (isolate_variables evs ps)) eqs |> the
val evs' = subtract op aconvc evs (map snd cfs)
val eqs' = map (subst_in_poly cfs) (remove op aconvc eq eqs)
in cfs @ solve_idealism evs' ps eqs'
end;
end;
in {ring_conv = ring, simple_ideal = ideal, multi_ideal = solve_idealism,
poly_eq_ss = poly_eq_ss, unwind_conv = unwind_polys_conv}
end;
fun find_term tm ctxt =
(case Thm.term_of tm of
Const (\<^const_name>\<open>HOL.eq\<close>, T) $ _ $ _ =>
if domain_type T = HOLogic.boolT then find_args tm ctxt
else (Thm.dest_arg tm, ctxt)
| Const (\<^const_name>\<open>Not\<close>, _) $ _ => find_term (Thm.dest_arg tm) ctxt
| Const (\<^const_name>\<open>All\<close>, _) $ _ => find_body (Thm.dest_arg tm) ctxt
| Const (\<^const_name>\<open>Ex\<close>, _) $ _ => find_body (Thm.dest_arg tm) ctxt
| Const (\<^const_name>\<open>HOL.conj\<close>, _) $ _ $ _ => find_args tm ctxt
| Const (\<^const_name>\<open>HOL.disj\<close>, _) $ _ $ _ => find_args tm ctxt
| Const (\<^const_name>\<open>HOL.implies\<close>, _) $ _ $ _ => find_args tm ctxt
| \<^term>\<open>Pure.imp\<close> $_$_ => find_args tm ctxt
| Const("(==)",_)$_$_ => find_args tm ctxt (* FIXME proper const name *)
| \<^term>\<open>Trueprop\<close>$_ => find_term (Thm.dest_arg tm) ctxt
| _ => raise TERM ("find_term", []))
and find_args tm ctxt =
let val (t, u) = Thm.dest_binop tm
in (find_term t ctxt handle TERM _ => find_term u ctxt) end
and find_body b ctxt =
let val ((_, b'), ctxt') = Variable.dest_abs_cterm b ctxt
in find_term b' ctxt' end;
fun get_ring_ideal_convs ctxt form =
case \<^try>\<open>find_term form ctxt\<close> of
NONE => NONE
| SOME (tm, ctxt') =>
(case Semiring_Normalizer.match ctxt' tm of
NONE => NONE
| SOME (res as (theory, {is_const = _, dest_const,
mk_const, conv = ring_eq_conv})) =>
SOME (ring_and_ideal_conv theory
dest_const (mk_const (Thm.ctyp_of_cterm tm)) (ring_eq_conv ctxt')
(Semiring_Normalizer.semiring_normalize_wrapper ctxt' res)))
fun presimplify ctxt add_thms del_thms =
asm_full_simp_tac (put_simpset HOL_basic_ss ctxt
addsimps (Named_Theorems.get ctxt \<^named_theorems>\<open>algebra\<close>)
delsimps del_thms addsimps add_thms);
fun ring_tac add_ths del_ths ctxt =
Object_Logic.full_atomize_tac ctxt
THEN' presimplify ctxt add_ths del_ths
THEN' CSUBGOAL (fn (p, i) =>
resolve_tac ctxt [let val form = Object_Logic.dest_judgment ctxt p
in case get_ring_ideal_convs ctxt form of
NONE => Thm.reflexive form
| SOME thy => #ring_conv thy ctxt form
end] i
handle TERM _ => no_tac
| CTERM _ => no_tac
| THM _ => no_tac);
local
fun lhs t = case Thm.term_of t of
Const(\<^const_name>\<open>HOL.eq\<close>,_)$_$_ => Thm.dest_arg1 t
| _=> raise CTERM ("ideal_tac - lhs",[t])
fun exitac _ NONE = no_tac
| exitac ctxt (SOME y) =
resolve_tac ctxt [Thm.instantiate' [SOME (Thm.ctyp_of_cterm y)] [NONE,SOME y] exI] 1
val claset = claset_of \<^context>
in
fun ideal_tac add_ths del_ths ctxt =
presimplify ctxt add_ths del_ths
THEN'
CSUBGOAL (fn (p, i) =>
case get_ring_ideal_convs ctxt p of
NONE => no_tac
| SOME thy =>
let
fun poly_exists_tac {asms = asms, concl = concl, prems = prems,
params = _, context = ctxt, schematics = _} =
let
val (evs,bod) = strip_exists (Thm.dest_arg concl)
val ps = map_filter (try (lhs o Thm.dest_arg)) asms
val cfs = (map swap o #multi_ideal thy evs ps)
(map Thm.dest_arg1 (conjuncts bod))
val ws = map (exitac ctxt o AList.lookup op aconvc cfs) evs
in EVERY (rev ws) THEN Method.insert_tac ctxt prems 1
THEN ring_tac add_ths del_ths ctxt 1
end
in
clarify_tac (put_claset claset ctxt) i
THEN Object_Logic.full_atomize_tac ctxt i
THEN asm_full_simp_tac (put_simpset (#poly_eq_ss thy) ctxt) i
THEN clarify_tac (put_claset claset ctxt) i
THEN (REPEAT (CONVERSION (#unwind_conv thy ctxt) i))
THEN SUBPROOF poly_exists_tac ctxt i
end
handle TERM _ => no_tac
| CTERM _ => no_tac
| THM _ => no_tac);
end;
fun algebra_tac add_ths del_ths ctxt i =
ring_tac add_ths del_ths ctxt i ORELSE ideal_tac add_ths del_ths ctxt i
end;
diff --git a/src/HOL/Tools/inductive.ML b/src/HOL/Tools/inductive.ML
--- a/src/HOL/Tools/inductive.ML
+++ b/src/HOL/Tools/inductive.ML
@@ -1,1318 +1,1318 @@
(* Title: HOL/Tools/inductive.ML
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Author: Stefan Berghofer and Markus Wenzel, TU Muenchen
(Co)Inductive Definition module for HOL.
Features:
* least or greatest fixedpoints
* mutually recursive definitions
* definitions involving arbitrary monotone operators
* automatically proves introduction and elimination rules
Introduction rules have the form
[| M Pj ti, ..., Q x, ... |] ==> Pk t
where M is some monotone operator (usually the identity)
Q x is any side condition on the free variables
ti, t are any terms
Pj, Pk are two of the predicates being defined in mutual recursion
*)
signature INDUCTIVE =
sig
type result =
{preds: term list, elims: thm list, raw_induct: thm,
induct: thm, inducts: thm list, intrs: thm list, eqs: thm list}
val transform_result: morphism -> result -> result
type info = {names: string list, coind: bool} * result
val the_inductive: Proof.context -> term -> info
val the_inductive_global: Proof.context -> string -> info
val print_inductives: bool -> Proof.context -> unit
val get_monos: Proof.context -> thm list
val mono_add: attribute
val mono_del: attribute
val mk_cases_tac: Proof.context -> tactic
val mk_cases: Proof.context -> term -> thm
val inductive_forall_def: thm
val rulify: Proof.context -> thm -> thm
val inductive_cases: (Attrib.binding * term list) list -> local_theory ->
(string * thm list) list * local_theory
val inductive_cases_cmd: (Attrib.binding * string list) list -> local_theory ->
(string * thm list) list * local_theory
val ind_cases_rules: Proof.context ->
string list -> (binding * string option * mixfix) list -> thm list
val inductive_simps: (Attrib.binding * term list) list -> local_theory ->
(string * thm list) list * local_theory
val inductive_simps_cmd: (Attrib.binding * string list) list -> local_theory ->
(string * thm list) list * local_theory
type flags =
{quiet_mode: bool, verbose: bool, alt_name: binding, coind: bool,
no_elim: bool, no_ind: bool, skip_mono: bool}
val add_inductive:
flags -> ((binding * typ) * mixfix) list ->
(string * typ) list -> (Attrib.binding * term) list -> thm list -> local_theory ->
result * local_theory
val add_inductive_cmd: bool -> bool ->
(binding * string option * mixfix) list ->
(binding * string option * mixfix) list ->
Specification.multi_specs_cmd ->
(Facts.ref * Token.src list) list ->
local_theory -> result * local_theory
val arities_of: thm -> (string * int) list
val params_of: thm -> term list
val partition_rules: thm -> thm list -> (string * thm list) list
val partition_rules': thm -> (thm * 'a) list -> (string * (thm * 'a) list) list
val unpartition_rules: thm list -> (string * 'a list) list -> 'a list
val infer_intro_vars: theory -> thm -> int -> thm list -> term list list
val inductive_internals: bool Config.T
val select_disj_tac: Proof.context -> int -> int -> int -> tactic
type add_ind_def =
flags ->
term list -> (Attrib.binding * term) list -> thm list ->
term list -> (binding * mixfix) list ->
local_theory -> result * local_theory
val declare_rules: binding -> bool -> bool -> binding -> string list -> term list ->
thm list -> binding list -> Token.src list list -> (thm * string list * int) list ->
thm list -> thm -> local_theory -> thm list * thm list * thm list * thm * thm list * local_theory
val add_ind_def: add_ind_def
val gen_add_inductive: add_ind_def -> flags ->
((binding * typ) * mixfix) list -> (string * typ) list -> (Attrib.binding * term) list ->
thm list -> local_theory -> result * local_theory
val gen_add_inductive_cmd: add_ind_def -> bool -> bool ->
(binding * string option * mixfix) list ->
(binding * string option * mixfix) list ->
Specification.multi_specs_cmd -> (Facts.ref * Token.src list) list ->
local_theory -> result * local_theory
val gen_ind_decl: add_ind_def -> bool -> (local_theory -> local_theory) parser
end;
structure Inductive: INDUCTIVE =
struct
(** theory context references **)
val inductive_forall_def = @{thm HOL.induct_forall_def};
val inductive_conj_def = @{thm HOL.induct_conj_def};
val inductive_conj = @{thms induct_conj};
val inductive_atomize = @{thms induct_atomize};
val inductive_rulify = @{thms induct_rulify};
val inductive_rulify_fallback = @{thms induct_rulify_fallback};
val simp_thms1 =
map mk_meta_eq
@{lemma "(\<not> True) = False" "(\<not> False) = True"
"(True \<longrightarrow> P) = P" "(False \<longrightarrow> P) = True"
"(P \<and> True) = P" "(True \<and> P) = P"
by (fact simp_thms)+};
val simp_thms2 =
map mk_meta_eq [@{thm inf_fun_def}, @{thm inf_bool_def}] @ simp_thms1;
val simp_thms3 =
@{thms le_rel_bool_arg_iff if_False if_True conj_ac
le_fun_def le_bool_def sup_fun_def sup_bool_def simp_thms
if_bool_eq_disj all_simps ex_simps imp_conjL};
(** misc utilities **)
val inductive_internals = Attrib.setup_config_bool \<^binding>\<open>inductive_internals\<close> (K false);
fun message quiet_mode s = if quiet_mode then () else writeln s;
fun clean_message ctxt quiet_mode s =
if Config.get ctxt quick_and_dirty then () else message quiet_mode s;
fun coind_prefix true = "co"
| coind_prefix false = "";
fun log (b: int) m n = if m >= n then 0 else 1 + log b (b * m) n;
fun make_bool_args f g [] i = []
| make_bool_args f g (x :: xs) i =
(if i mod 2 = 0 then f x else g x) :: make_bool_args f g xs (i div 2);
fun make_bool_args' xs =
make_bool_args (K \<^term>\<open>False\<close>) (K \<^term>\<open>True\<close>) xs;
fun arg_types_of k c = drop k (binder_types (fastype_of c));
fun find_arg T x [] = raise Fail "find_arg"
| find_arg T x ((p as (_, (SOME _, _))) :: ps) =
apsnd (cons p) (find_arg T x ps)
| find_arg T x ((p as (U, (NONE, y))) :: ps) =
if (T: typ) = U then (y, (U, (SOME x, y)) :: ps)
else apsnd (cons p) (find_arg T x ps);
fun make_args Ts xs =
map (fn (T, (NONE, ())) => Const (\<^const_name>\<open>undefined\<close>, T) | (_, (SOME t, ())) => t)
(fold (fn (t, T) => snd o find_arg T t) xs (map (rpair (NONE, ())) Ts));
fun make_args' Ts xs Us =
fst (fold_map (fn T => find_arg T ()) Us (Ts ~~ map (pair NONE) xs));
fun dest_predicate cs params t =
let
val k = length params;
val (c, ts) = strip_comb t;
val (xs, ys) = chop k ts;
val i = find_index (fn c' => c' = c) cs;
in
if xs = params andalso i >= 0 then
SOME (c, i, ys, chop (length ys) (arg_types_of k c))
else NONE
end;
fun mk_names a 0 = []
| mk_names a 1 = [a]
| mk_names a n = map (fn i => a ^ string_of_int i) (1 upto n);
fun select_disj_tac ctxt =
let
fun tacs 1 1 = []
| tacs _ 1 = [resolve_tac ctxt @{thms disjI1}]
| tacs n i = resolve_tac ctxt @{thms disjI2} :: tacs (n - 1) (i - 1);
in fn n => fn i => EVERY' (tacs n i) end;
(** context data **)
type result =
{preds: term list, elims: thm list, raw_induct: thm,
induct: thm, inducts: thm list, intrs: thm list, eqs: thm list};
fun transform_result phi {preds, elims, raw_induct: thm, induct, inducts, intrs, eqs} =
let
val term = Morphism.term phi;
val thm = Morphism.thm phi;
val fact = Morphism.fact phi;
in
{preds = map term preds, elims = fact elims, raw_induct = thm raw_induct,
induct = thm induct, inducts = fact inducts, intrs = fact intrs, eqs = fact eqs}
end;
type info = {names: string list, coind: bool} * result;
val empty_infos =
Item_Net.init (op = o apply2 (#names o fst)) (#preds o snd)
val empty_equations =
Item_Net.init Thm.eq_thm_prop
(single o fst o HOLogic.dest_eq o HOLogic.dest_Trueprop o Thm.prop_of);
datatype data = Data of
{infos: info Item_Net.T,
monos: thm list,
equations: thm Item_Net.T};
fun make_data (infos, monos, equations) =
Data {infos = infos, monos = monos, equations = equations};
structure Data = Generic_Data
(
type T = data;
val empty = make_data (empty_infos, [], empty_equations);
fun merge (Data {infos = infos1, monos = monos1, equations = equations1},
Data {infos = infos2, monos = monos2, equations = equations2}) =
make_data (Item_Net.merge (infos1, infos2),
Thm.merge_thms (monos1, monos2),
Item_Net.merge (equations1, equations2));
);
fun map_data f =
Data.map (fn Data {infos, monos, equations} => make_data (f (infos, monos, equations)));
fun rep_data ctxt = Data.get (Context.Proof ctxt) |> (fn Data rep => rep);
fun print_inductives verbose ctxt =
let
val {infos, monos, ...} = rep_data ctxt;
val space = Consts.space_of (Proof_Context.consts_of ctxt);
val consts =
Item_Net.content infos
|> maps (fn ({names, ...}, result) => map (rpair result) names)
in
[Pretty.block
(Pretty.breaks
(Pretty.str "(co)inductives:" ::
map (Pretty.mark_str o #1)
(Name_Space.markup_entries verbose ctxt space consts))),
Pretty.big_list "monotonicity rules:" (map (Thm.pretty_thm_item ctxt) monos)]
end |> Pretty.writeln_chunks;
(* inductive info *)
fun the_inductive ctxt term =
Item_Net.retrieve (#infos (rep_data ctxt)) term
|> the_single
|> apsnd (transform_result (Morphism.transfer_morphism' ctxt))
fun the_inductive_global ctxt name =
#infos (rep_data ctxt)
|> Item_Net.content
|> filter (fn ({names, ...}, _) => member op = names name)
|> the_single
|> apsnd (transform_result (Morphism.transfer_morphism' ctxt))
fun put_inductives info =
map_data (fn (infos, monos, equations) =>
(Item_Net.update (apsnd (transform_result Morphism.trim_context_morphism) info) infos,
monos, equations));
(* monotonicity rules *)
fun get_monos ctxt =
#monos (rep_data ctxt)
|> map (Thm.transfer' ctxt);
fun mk_mono ctxt thm =
let
fun eq_to_mono thm' = thm' RS (thm' RS @{thm eq_to_mono});
fun dest_less_concl thm = dest_less_concl (thm RS @{thm le_funD})
handle THM _ => thm RS @{thm le_boolD}
in
(case Thm.concl_of thm of
Const (\<^const_name>\<open>Pure.eq\<close>, _) $ _ $ _ => eq_to_mono (HOLogic.mk_obj_eq thm)
| _ $ (Const (\<^const_name>\<open>HOL.eq\<close>, _) $ _ $ _) => eq_to_mono thm
| _ $ (Const (\<^const_name>\<open>Orderings.less_eq\<close>, _) $ _ $ _) =>
dest_less_concl (Seq.hd (REPEAT (FIRSTGOAL
(resolve_tac ctxt [@{thm le_funI}, @{thm le_boolI'}])) thm))
| _ => thm)
end handle THM _ => error ("Bad monotonicity theorem:\n" ^ Thm.string_of_thm ctxt thm);
val mono_add =
Thm.declaration_attribute (fn thm => fn context =>
map_data (fn (infos, monos, equations) =>
(infos, Thm.add_thm (Thm.trim_context (mk_mono (Context.proof_of context) thm)) monos,
equations)) context);
val mono_del =
Thm.declaration_attribute (fn thm => fn context =>
map_data (fn (infos, monos, equations) =>
(infos, Thm.del_thm (mk_mono (Context.proof_of context) thm) monos, equations)) context);
val _ =
Theory.setup
(Attrib.setup \<^binding>\<open>mono\<close> (Attrib.add_del mono_add mono_del)
"declaration of monotonicity rule");
(* equations *)
fun retrieve_equations ctxt =
Item_Net.retrieve (#equations (rep_data ctxt))
#> map (Thm.transfer' ctxt);
val equation_add_permissive =
Thm.declaration_attribute (fn thm =>
map_data (fn (infos, monos, equations) =>
(infos, monos, perhaps (try (Item_Net.update (Thm.trim_context thm))) equations)));
(** process rules **)
local
fun err_in_rule ctxt name t msg =
error (cat_lines ["Ill-formed introduction rule " ^ Binding.print name,
Syntax.string_of_term ctxt t, msg]);
fun err_in_prem ctxt name t p msg =
error (cat_lines ["Ill-formed premise", Syntax.string_of_term ctxt p,
"in introduction rule " ^ Binding.print name, Syntax.string_of_term ctxt t, msg]);
val bad_concl = "Conclusion of introduction rule must be an inductive predicate";
val bad_ind_occ = "Inductive predicate occurs in argument of inductive predicate";
val bad_app = "Inductive predicate must be applied to parameter(s) ";
fun atomize_term thy = Raw_Simplifier.rewrite_term thy inductive_atomize [];
in
fun check_rule ctxt cs params ((binding, att), rule) =
let
val params' = Term.variant_frees rule (Logic.strip_params rule);
val frees = rev (map Free params');
val concl = subst_bounds (frees, Logic.strip_assums_concl rule);
val prems = map (curry subst_bounds frees) (Logic.strip_assums_hyp rule);
val rule' = Logic.list_implies (prems, concl);
val aprems = map (atomize_term (Proof_Context.theory_of ctxt)) prems;
val arule = fold_rev (Logic.all o Free) params' (Logic.list_implies (aprems, concl));
fun check_ind err t =
(case dest_predicate cs params t of
NONE => err (bad_app ^
commas (map (Syntax.string_of_term ctxt) params))
| SOME (_, _, ys, _) =>
if exists (fn c => exists (fn t => Logic.occs (c, t)) ys) cs
then err bad_ind_occ else ());
fun check_prem' prem t =
if member (op =) cs (head_of t) then
check_ind (err_in_prem ctxt binding rule prem) t
else
(case t of
Abs (_, _, t) => check_prem' prem t
| t $ u => (check_prem' prem t; check_prem' prem u)
| _ => ());
fun check_prem (prem, aprem) =
if can HOLogic.dest_Trueprop aprem then check_prem' prem prem
else err_in_prem ctxt binding rule prem "Non-atomic premise";
val _ =
(case concl of
Const (\<^const_name>\<open>Trueprop\<close>, _) $ t =>
if member (op =) cs (head_of t) then
(check_ind (err_in_rule ctxt binding rule') t;
List.app check_prem (prems ~~ aprems))
else err_in_rule ctxt binding rule' bad_concl
| _ => err_in_rule ctxt binding rule' bad_concl);
in
((binding, att), arule)
end;
fun rulify ctxt =
hol_simplify ctxt inductive_conj
#> hol_simplify ctxt inductive_rulify
#> hol_simplify ctxt inductive_rulify_fallback
#> Simplifier.norm_hhf ctxt;
end;
(** proofs for (co)inductive predicates **)
(* prove monotonicity *)
fun prove_mono quiet_mode skip_mono predT fp_fun monos ctxt =
(message (quiet_mode orelse skip_mono andalso Config.get ctxt quick_and_dirty)
" Proving monotonicity ...";
(if skip_mono then Goal.prove_sorry else Goal.prove_future) ctxt
[] []
(HOLogic.mk_Trueprop
(\<^Const>\<open>monotone_on predT predT for
\<^Const>\<open>top \<^Type>\<open>set predT\<close>\<close> \<^Const>\<open>less_eq predT\<close> \<^Const>\<open>less_eq predT\<close> fp_fun\<close>))
(fn _ => EVERY [resolve_tac ctxt @{thms monoI} 1,
REPEAT (resolve_tac ctxt [@{thm le_funI}, @{thm le_boolI'}] 1),
REPEAT (FIRST
[assume_tac ctxt 1,
resolve_tac ctxt (map (mk_mono ctxt) monos @ get_monos ctxt) 1,
eresolve_tac ctxt @{thms le_funE} 1,
dresolve_tac ctxt @{thms le_boolD} 1])]));
(* prove introduction rules *)
fun prove_intrs quiet_mode coind mono fp_def k intr_ts rec_preds_defs ctxt ctxt' =
let
val _ = clean_message ctxt quiet_mode " Proving the introduction rules ...";
val unfold = funpow k (fn th => th RS fun_cong)
(mono RS (fp_def RS
(if coind then @{thm def_gfp_unfold} else @{thm def_lfp_unfold})));
val rules = [refl, TrueI, @{lemma "\<not> False" by (rule notI)}, exI, conjI];
val intrs = map_index (fn (i, intr) =>
Goal.prove_sorry ctxt [] [] intr (fn _ => EVERY
[rewrite_goals_tac ctxt rec_preds_defs,
resolve_tac ctxt [unfold RS iffD2] 1,
select_disj_tac ctxt (length intr_ts) (i + 1) 1,
(*Not ares_tac, since refl must be tried before any equality assumptions;
backtracking may occur if the premises have extra variables!*)
DEPTH_SOLVE_1 (resolve_tac ctxt rules 1 APPEND assume_tac ctxt 1)])
|> singleton (Proof_Context.export ctxt ctxt')) intr_ts
in (intrs, unfold) end;
(* prove elimination rules *)
fun prove_elims quiet_mode cs params intr_ts intr_names unfold rec_preds_defs ctxt ctxt''' =
let
val _ = clean_message ctxt quiet_mode " Proving the elimination rules ...";
val ([pname], ctxt') = Variable.variant_fixes ["P"] ctxt;
val P = HOLogic.mk_Trueprop (Free (pname, HOLogic.boolT));
fun dest_intr r =
(the (dest_predicate cs params (HOLogic.dest_Trueprop (Logic.strip_assums_concl r))),
Logic.strip_assums_hyp r, Logic.strip_params r);
val intrs = map dest_intr intr_ts ~~ intr_names;
val rules1 = [disjE, exE, FalseE];
val rules2 = [conjE, FalseE, @{lemma "\<not> True \<Longrightarrow> R" by (rule notE [OF _ TrueI])}];
fun prove_elim c =
let
val Ts = arg_types_of (length params) c;
val (anames, ctxt'') = Variable.variant_fixes (mk_names "a" (length Ts)) ctxt';
val frees = map Free (anames ~~ Ts);
fun mk_elim_prem ((_, _, us, _), ts, params') =
Logic.list_all (params',
Logic.list_implies (map (HOLogic.mk_Trueprop o HOLogic.mk_eq)
(frees ~~ us) @ ts, P));
val c_intrs = filter (equal c o #1 o #1 o #1) intrs;
val prems = HOLogic.mk_Trueprop (list_comb (c, params @ frees)) ::
map mk_elim_prem (map #1 c_intrs)
in
(Goal.prove_sorry ctxt'' [] prems P
(fn {context = ctxt4, prems} => EVERY
[cut_tac (hd prems) 1,
rewrite_goals_tac ctxt4 rec_preds_defs,
dresolve_tac ctxt4 [unfold RS iffD1] 1,
REPEAT (FIRSTGOAL (eresolve_tac ctxt4 rules1)),
REPEAT (FIRSTGOAL (eresolve_tac ctxt4 rules2)),
EVERY (map (fn prem =>
DEPTH_SOLVE_1 (assume_tac ctxt4 1 ORELSE
resolve_tac ctxt [rewrite_rule ctxt4 rec_preds_defs prem, conjI] 1))
(tl prems))])
|> singleton (Proof_Context.export ctxt'' ctxt'''),
map #2 c_intrs, length Ts)
end
in map prove_elim cs end;
(* prove simplification equations *)
fun prove_eqs quiet_mode cs params intr_ts intrs
(elims: (thm * bstring list * int) list) ctxt ctxt'' = (* FIXME ctxt'' ?? *)
let
val _ = clean_message ctxt quiet_mode " Proving the simplification rules ...";
fun dest_intr r =
(the (dest_predicate cs params (HOLogic.dest_Trueprop (Logic.strip_assums_concl r))),
Logic.strip_assums_hyp r, Logic.strip_params r);
val intr_ts' = map dest_intr intr_ts;
fun prove_eq c (elim: thm * 'a * 'b) =
let
val Ts = arg_types_of (length params) c;
val (anames, ctxt') = Variable.variant_fixes (mk_names "a" (length Ts)) ctxt;
val frees = map Free (anames ~~ Ts);
val c_intrs = filter (equal c o #1 o #1 o #1) (intr_ts' ~~ intrs);
fun mk_intr_conj (((_, _, us, _), ts, params'), _) =
let
fun list_ex ([], t) = t
| list_ex ((a, T) :: vars, t) =
HOLogic.exists_const T $ Abs (a, T, list_ex (vars, t));
val conjs = map2 (curry HOLogic.mk_eq) frees us @ map HOLogic.dest_Trueprop ts;
in
list_ex (params', if null conjs then \<^term>\<open>True\<close> else foldr1 HOLogic.mk_conj conjs)
end;
val lhs = list_comb (c, params @ frees);
val rhs =
if null c_intrs then \<^term>\<open>False\<close>
else foldr1 HOLogic.mk_disj (map mk_intr_conj c_intrs);
val eq = HOLogic.mk_Trueprop (HOLogic.mk_eq (lhs, rhs));
fun prove_intr1 (i, _) = Subgoal.FOCUS_PREMS (fn {context = ctxt'', params, prems, ...} =>
select_disj_tac ctxt'' (length c_intrs) (i + 1) 1 THEN
EVERY (replicate (length params) (resolve_tac ctxt'' @{thms exI} 1)) THEN
(if null prems then resolve_tac ctxt'' @{thms TrueI} 1
else
let
val (prems', last_prem) = split_last prems;
in
EVERY (map (fn prem =>
(resolve_tac ctxt'' @{thms conjI} 1 THEN resolve_tac ctxt'' [prem] 1)) prems')
THEN resolve_tac ctxt'' [last_prem] 1
end)) ctxt' 1;
fun prove_intr2 (((_, _, us, _), ts, params'), intr) =
EVERY (replicate (length params') (eresolve_tac ctxt' @{thms exE} 1)) THEN
(if null ts andalso null us then resolve_tac ctxt' [intr] 1
else
EVERY (replicate (length ts + length us - 1) (eresolve_tac ctxt' @{thms conjE} 1)) THEN
Subgoal.FOCUS_PREMS (fn {context = ctxt'', prems, ...} =>
let
val (eqs, prems') = chop (length us) prems;
val rew_thms = map (fn th => th RS @{thm eq_reflection}) eqs;
in
rewrite_goal_tac ctxt'' rew_thms 1 THEN
resolve_tac ctxt'' [intr] 1 THEN
EVERY (map (fn p => resolve_tac ctxt'' [p] 1) prems')
end) ctxt' 1);
in
Goal.prove_sorry ctxt' [] [] eq (fn _ =>
resolve_tac ctxt' @{thms iffI} 1 THEN
eresolve_tac ctxt' [#1 elim] 1 THEN
EVERY (map_index prove_intr1 c_intrs) THEN
(if null c_intrs then eresolve_tac ctxt' @{thms FalseE} 1
else
let val (c_intrs', last_c_intr) = split_last c_intrs in
EVERY (map (fn ci => eresolve_tac ctxt' @{thms disjE} 1 THEN prove_intr2 ci) c_intrs')
THEN prove_intr2 last_c_intr
end))
|> rulify ctxt'
|> singleton (Proof_Context.export ctxt' ctxt'')
end;
in
map2 prove_eq cs elims
end;
(* derivation of simplified elimination rules *)
local
(*delete needless equality assumptions*)
val refl_thin = Goal.prove_global \<^theory>\<open>HOL\<close> [] [] \<^prop>\<open>\<And>P. a = a \<Longrightarrow> P \<Longrightarrow> P\<close>
(fn {context = ctxt, ...} => assume_tac ctxt 1);
val elim_rls = [asm_rl, FalseE, refl_thin, conjE, exE];
fun elim_tac ctxt = REPEAT o eresolve_tac ctxt elim_rls;
fun simp_case_tac ctxt i =
EVERY' [elim_tac ctxt,
asm_full_simp_tac ctxt,
elim_tac ctxt,
REPEAT o bound_hyp_subst_tac ctxt] i;
in
fun mk_cases_tac ctxt = ALLGOALS (simp_case_tac ctxt) THEN prune_params_tac ctxt;
fun mk_cases ctxt prop =
let
fun err msg =
error (Pretty.string_of (Pretty.block
[Pretty.str msg, Pretty.fbrk, Syntax.pretty_term ctxt prop]));
val elims = Induct.find_casesP ctxt prop;
val cprop = Thm.cterm_of ctxt prop;
fun mk_elim rl =
Thm.implies_intr cprop
(Tactic.rule_by_tactic ctxt (mk_cases_tac ctxt) (Thm.assume cprop RS rl))
|> singleton (Proof_Context.export (Proof_Context.augment prop ctxt) ctxt);
in
(case get_first (try mk_elim) elims of
SOME r => r
| NONE => err "Proposition not an inductive predicate:")
end;
end;
(* inductive_cases *)
fun gen_inductive_cases prep_att prep_prop args lthy =
let
val thmss =
map snd args
|> burrow (grouped 10 Par_List.map_independent (mk_cases lthy o prep_prop lthy));
val facts =
map2 (fn ((a, atts), _) => fn thms => ((a, map (prep_att lthy) atts), [(thms, [])]))
args thmss;
in lthy |> Local_Theory.notes facts end;
val inductive_cases = gen_inductive_cases (K I) Syntax.check_prop;
val inductive_cases_cmd = gen_inductive_cases Attrib.check_src Syntax.read_prop;
(* ind_cases *)
fun ind_cases_rules ctxt raw_props raw_fixes =
let
val (props, ctxt') = Specification.read_props raw_props raw_fixes ctxt;
val rules = Proof_Context.export ctxt' ctxt (map (mk_cases ctxt') props);
in rules end;
val _ =
Theory.setup
(Method.setup \<^binding>\<open>ind_cases\<close>
(Scan.lift (Scan.repeat1 Parse.prop -- Parse.for_fixes) >>
(fn (props, fixes) => fn ctxt =>
Method.erule ctxt 0 (ind_cases_rules ctxt props fixes)))
"case analysis for inductive definitions, based on simplified elimination rule");
(* derivation of simplified equation *)
fun mk_simp_eq ctxt prop =
let
val thy = Proof_Context.theory_of ctxt;
val ctxt' = Proof_Context.augment prop ctxt;
val lhs_of = fst o HOLogic.dest_eq o HOLogic.dest_Trueprop o Thm.prop_of;
val substs =
retrieve_equations ctxt (HOLogic.dest_Trueprop prop)
|> map_filter
(fn eq => SOME (Pattern.match thy (lhs_of eq, HOLogic.dest_Trueprop prop)
(Vartab.empty, Vartab.empty), eq)
handle Pattern.MATCH => NONE);
val (subst, eq) =
(case substs of
[s] => s
| _ => error
("equations matching pattern " ^ Syntax.string_of_term ctxt prop ^ " is not unique"));
val inst =
map (fn v => (fst v, Thm.cterm_of ctxt' (Envir.subst_term subst (Var v))))
(Term.add_vars (lhs_of eq) []);
in
infer_instantiate ctxt' inst eq
|> Conv.fconv_rule (Conv.arg_conv (Conv.arg_conv (Simplifier.full_rewrite ctxt')))
|> singleton (Proof_Context.export ctxt' ctxt)
end
(* inductive simps *)
fun gen_inductive_simps prep_att prep_prop args lthy =
let
val facts = args |> map (fn ((a, atts), props) =>
((a, map (prep_att lthy) atts),
map (Thm.no_attributes o single o mk_simp_eq lthy o prep_prop lthy) props));
in lthy |> Local_Theory.notes facts end;
val inductive_simps = gen_inductive_simps (K I) Syntax.check_prop;
val inductive_simps_cmd = gen_inductive_simps Attrib.check_src Syntax.read_prop;
(* prove induction rule *)
fun prove_indrule quiet_mode cs argTs bs xs rec_const params intr_ts mono
fp_def rec_preds_defs ctxt ctxt''' = (* FIXME ctxt''' ?? *)
let
val _ = clean_message ctxt quiet_mode " Proving the induction rule ...";
(* predicates for induction rule *)
val (pnames, ctxt') = Variable.variant_fixes (mk_names "P" (length cs)) ctxt;
val preds =
map2 (curry Free) pnames
(map (fn c => arg_types_of (length params) c ---> HOLogic.boolT) cs);
(* transform an introduction rule into a premise for induction rule *)
fun mk_ind_prem r =
let
fun subst s =
(case dest_predicate cs params s of
SOME (_, i, ys, (_, Ts)) =>
let
val k = length Ts;
val bs = map Bound (k - 1 downto 0);
val P = list_comb (nth preds i, map (incr_boundvars k) ys @ bs);
val Q =
fold_rev Term.abs (mk_names "x" k ~~ Ts)
(HOLogic.mk_binop \<^const_name>\<open>HOL.induct_conj\<close>
(list_comb (incr_boundvars k s, bs), P));
in (Q, case Ts of [] => SOME (s, P) | _ => NONE) end
| NONE =>
(case s of
t $ u => (fst (subst t) $ fst (subst u), NONE)
| Abs (a, T, t) => (Abs (a, T, fst (subst t)), NONE)
| _ => (s, NONE)));
fun mk_prem s prems =
(case subst s of
(_, SOME (t, u)) => t :: u :: prems
| (t, _) => t :: prems);
val SOME (_, i, ys, _) =
dest_predicate cs params (HOLogic.dest_Trueprop (Logic.strip_assums_concl r));
in
fold_rev (Logic.all o Free) (Logic.strip_params r)
(Logic.list_implies (map HOLogic.mk_Trueprop (fold_rev mk_prem
(map HOLogic.dest_Trueprop (Logic.strip_assums_hyp r)) []),
HOLogic.mk_Trueprop (list_comb (nth preds i, ys))))
end;
val ind_prems = map mk_ind_prem intr_ts;
(* make conclusions for induction rules *)
val Tss = map (binder_types o fastype_of) preds;
val (xnames, ctxt'') = Variable.variant_fixes (mk_names "x" (length (flat Tss))) ctxt';
val mutual_ind_concl =
HOLogic.mk_Trueprop (foldr1 HOLogic.mk_conj
(map (fn (((xnames, Ts), c), P) =>
let val frees = map Free (xnames ~~ Ts)
in HOLogic.mk_imp (list_comb (c, params @ frees), list_comb (P, frees)) end)
(unflat Tss xnames ~~ Tss ~~ cs ~~ preds)));
(* make predicate for instantiation of abstract induction rule *)
val ind_pred =
fold_rev lambda (bs @ xs) (foldr1 HOLogic.mk_conj
(map_index (fn (i, P) => fold_rev (curry HOLogic.mk_imp)
(make_bool_args HOLogic.mk_not I bs i)
(list_comb (P, make_args' argTs xs (binder_types (fastype_of P))))) preds));
val ind_concl =
HOLogic.mk_Trueprop
(HOLogic.mk_binrel \<^const_name>\<open>Orderings.less_eq\<close> (rec_const, ind_pred));
val raw_fp_induct = mono RS (fp_def RS @{thm def_lfp_induct});
val induct = Goal.prove_sorry ctxt'' [] ind_prems ind_concl
(fn {context = ctxt3, prems} => EVERY
[rewrite_goals_tac ctxt3 [inductive_conj_def],
DETERM (resolve_tac ctxt3 [raw_fp_induct] 1),
REPEAT (resolve_tac ctxt3 [@{thm le_funI}, @{thm le_boolI}] 1),
rewrite_goals_tac ctxt3 simp_thms2,
(*This disjE separates out the introduction rules*)
REPEAT (FIRSTGOAL (eresolve_tac ctxt3 [disjE, exE, FalseE])),
(*Now break down the individual cases. No disjE here in case
some premise involves disjunction.*)
REPEAT (FIRSTGOAL (eresolve_tac ctxt3 [conjE] ORELSE' bound_hyp_subst_tac ctxt3)),
REPEAT (FIRSTGOAL
(resolve_tac ctxt3 [conjI, impI] ORELSE'
(eresolve_tac ctxt3 [notE] THEN' assume_tac ctxt3))),
EVERY (map (fn prem =>
DEPTH_SOLVE_1 (assume_tac ctxt3 1 ORELSE
resolve_tac ctxt3
[rewrite_rule ctxt3 (inductive_conj_def :: rec_preds_defs @ simp_thms2) prem,
conjI, refl] 1)) prems)]);
val lemma = Goal.prove_sorry ctxt'' [] []
(Logic.mk_implies (ind_concl, mutual_ind_concl)) (fn {context = ctxt3, ...} => EVERY
[rewrite_goals_tac ctxt3 rec_preds_defs,
REPEAT (EVERY
[REPEAT (resolve_tac ctxt3 [conjI, impI] 1),
REPEAT (eresolve_tac ctxt3 [@{thm le_funE}, @{thm le_boolE}] 1),
assume_tac ctxt3 1,
rewrite_goals_tac ctxt3 simp_thms1,
assume_tac ctxt3 1])]);
in singleton (Proof_Context.export ctxt'' ctxt''') (induct RS lemma) end;
(* prove coinduction rule *)
fun If_const T = Const (\<^const_name>\<open>If\<close>, HOLogic.boolT --> T --> T --> T);
fun mk_If p t f = let val T = fastype_of t in If_const T $ p $ t $ f end;
fun prove_coindrule quiet_mode preds cs argTs bs xs params intr_ts mono
fp_def rec_preds_defs ctxt ctxt''' = (* FIXME ctxt''' ?? *)
let
val _ = clean_message ctxt quiet_mode " Proving the coinduction rule ...";
val n = length cs;
val (ns, xss) = map_split (fn pred =>
make_args' argTs xs (arg_types_of (length params) pred) |> `length) preds;
val xTss = map (map fastype_of) xss;
val (Rs_names, names_ctxt) = Variable.variant_fixes (mk_names "X" n) ctxt;
val Rs = map2 (fn name => fn Ts => Free (name, Ts ---> \<^typ>\<open>bool\<close>)) Rs_names xTss;
val Rs_applied = map2 (curry list_comb) Rs xss;
val preds_applied = map2 (curry list_comb) (map (fn p => list_comb (p, params)) preds) xss;
val abstract_list = fold_rev (absfree o dest_Free);
val bss = map (make_bool_args
(fn b => HOLogic.mk_eq (b, \<^term>\<open>False\<close>))
(fn b => HOLogic.mk_eq (b, \<^term>\<open>True\<close>)) bs) (0 upto n - 1);
val eq_undefinedss = map (fn ys => map (fn x =>
HOLogic.mk_eq (x, Const (\<^const_name>\<open>undefined\<close>, fastype_of x)))
(subtract (op =) ys xs)) xss;
val R =
@{fold 3} (fn bs => fn eqs => fn R => fn t => if null bs andalso null eqs then R else
mk_If (Library.foldr1 HOLogic.mk_conj (bs @ eqs)) R t)
bss eq_undefinedss Rs_applied \<^term>\<open>False\<close>
|> abstract_list (bs @ xs);
fun subst t =
(case dest_predicate cs params t of
SOME (_, i, ts, (_, Us)) =>
let
val l = length Us;
val bs = map Bound (l - 1 downto 0);
val args = map (incr_boundvars l) ts @ bs
in
HOLogic.mk_disj (list_comb (nth Rs i, args),
list_comb (nth preds i, params @ args))
|> fold_rev absdummy Us
end
| NONE =>
(case t of
t1 $ t2 => subst t1 $ subst t2
| Abs (x, T, u) => Abs (x, T, subst u)
| _ => t));
fun mk_coind_prem r =
let
val SOME (_, i, ts, (Ts, _)) =
dest_predicate cs params (HOLogic.dest_Trueprop (Logic.strip_assums_concl r));
val ps =
map HOLogic.mk_eq (make_args' argTs xs Ts ~~ ts) @
map (subst o HOLogic.dest_Trueprop) (Logic.strip_assums_hyp r);
in
(i, fold_rev (fn (x, T) => fn P => HOLogic.exists_const T $ Abs (x, T, P))
(Logic.strip_params r)
(if null ps then \<^term>\<open>True\<close> else foldr1 HOLogic.mk_conj ps))
end;
fun mk_prem i Ps = Logic.mk_implies
((nth Rs_applied i, Library.foldr1 HOLogic.mk_disj Ps) |> @{apply 2} HOLogic.mk_Trueprop)
|> fold_rev Logic.all (nth xss i);
val prems = map mk_coind_prem intr_ts |> AList.group (op =) |> sort (int_ord o apply2 fst)
|> map (uncurry mk_prem);
val concl = @{map 3} (fn xs =>
Ctr_Sugar_Util.list_all_free xs oo curry HOLogic.mk_imp) xss Rs_applied preds_applied
|> Library.foldr1 HOLogic.mk_conj |> HOLogic.mk_Trueprop;
val pred_defs_sym = if null rec_preds_defs then [] else map2 (fn n => fn thm =>
funpow n (fn thm => thm RS @{thm meta_fun_cong}) thm RS @{thm Pure.symmetric})
ns rec_preds_defs;
val simps = simp_thms3 @ pred_defs_sym;
val simprocs = [Simplifier.the_simproc ctxt "HOL.defined_All"];
val simplify = asm_full_simplify (Ctr_Sugar_Util.ss_only simps ctxt addsimprocs simprocs);
val coind = (mono RS (fp_def RS @{thm def_coinduct}))
|> infer_instantiate' ctxt [SOME (Thm.cterm_of ctxt R)]
|> simplify;
fun idx_of t = find_index (fn R =>
R = the_single (subtract (op =) (preds @ params) (map Free (Term.add_frees t [])))) Rs;
val coind_concls = HOLogic.dest_Trueprop (Thm.concl_of coind) |> HOLogic.dest_conj
|> map (fn t => (idx_of t, t)) |> sort (int_ord o @{apply 2} fst) |> map snd;
val reorder_bound_goals = map_filter (fn (t, u) => if t aconv u then NONE else
SOME (HOLogic.mk_Trueprop (HOLogic.mk_eq (t, u))))
((HOLogic.dest_Trueprop concl |> HOLogic.dest_conj) ~~ coind_concls);
val reorder_bound_thms = map (fn goal => Goal.prove_sorry ctxt [] [] goal
(fn {context = ctxt, prems = _} =>
HEADGOAL (EVERY' [resolve_tac ctxt [iffI],
REPEAT_DETERM o resolve_tac ctxt [allI, impI],
REPEAT_DETERM o dresolve_tac ctxt [spec], eresolve_tac ctxt [mp], assume_tac ctxt,
REPEAT_DETERM o resolve_tac ctxt [allI, impI],
REPEAT_DETERM o dresolve_tac ctxt [spec], eresolve_tac ctxt [mp], assume_tac ctxt])))
reorder_bound_goals;
val coinduction = Goal.prove_sorry ctxt [] prems concl (fn {context = ctxt, prems = CIH} =>
HEADGOAL (full_simp_tac
(Ctr_Sugar_Util.ss_only (simps @ reorder_bound_thms) ctxt addsimprocs simprocs) THEN'
resolve_tac ctxt [coind]) THEN
ALLGOALS (REPEAT_ALL_NEW (REPEAT_DETERM o resolve_tac ctxt [allI, impI, conjI] THEN'
REPEAT_DETERM o eresolve_tac ctxt [exE, conjE] THEN'
dresolve_tac ctxt (map simplify CIH) THEN'
REPEAT_DETERM o (assume_tac ctxt ORELSE'
eresolve_tac ctxt [conjE] ORELSE' dresolve_tac ctxt [spec, mp]))))
in
coinduction
|> length cs = 1 ? (Object_Logic.rulify ctxt #> rotate_prems ~1)
|> singleton (Proof_Context.export names_ctxt ctxt''')
end
(** specification of (co)inductive predicates **)
fun mk_ind_def quiet_mode skip_mono alt_name coind cs intr_ts monos params cnames_syn lthy =
let
val fp_name = if coind then \<^const_name>\<open>Inductive.gfp\<close> else \<^const_name>\<open>Inductive.lfp\<close>;
val argTs = fold (combine (op =) o arg_types_of (length params)) cs [];
val k = log 2 1 (length cs);
val predT = replicate k HOLogic.boolT ---> argTs ---> HOLogic.boolT;
val p :: xs =
map Free (Variable.variant_frees lthy intr_ts
(("p", predT) :: (mk_names "x" (length argTs) ~~ argTs)));
val bs =
map Free (Variable.variant_frees lthy (p :: xs @ intr_ts)
(map (rpair HOLogic.boolT) (mk_names "b" k)));
fun subst t =
(case dest_predicate cs params t of
SOME (_, i, ts, (Ts, Us)) =>
let
val l = length Us;
val zs = map Bound (l - 1 downto 0);
in
fold_rev (Term.abs o pair "z") Us
(list_comb (p,
make_bool_args' bs i @ make_args argTs
((map (incr_boundvars l) ts ~~ Ts) @ (zs ~~ Us))))
end
| NONE =>
(case t of
t1 $ t2 => subst t1 $ subst t2
| Abs (x, T, u) => Abs (x, T, subst u)
| _ => t));
(* transform an introduction rule into a conjunction *)
(* [| p_i t; ... |] ==> p_j u *)
(* is transformed into *)
(* b_j & x_j = u & p b_j t & ... *)
fun transform_rule r =
let
val SOME (_, i, ts, (Ts, _)) =
dest_predicate cs params (HOLogic.dest_Trueprop (Logic.strip_assums_concl r));
val ps =
make_bool_args HOLogic.mk_not I bs i @
map HOLogic.mk_eq (make_args' argTs xs Ts ~~ ts) @
map (subst o HOLogic.dest_Trueprop) (Logic.strip_assums_hyp r);
in
fold_rev (fn (x, T) => fn P => HOLogic.exists_const T $ Abs (x, T, P))
(Logic.strip_params r)
(if null ps then \<^term>\<open>True\<close> else foldr1 HOLogic.mk_conj ps)
end;
(* make a disjunction of all introduction rules *)
val fp_fun =
fold_rev lambda (p :: bs @ xs)
(if null intr_ts then \<^term>\<open>False\<close>
else foldr1 HOLogic.mk_disj (map transform_rule intr_ts));
(* add definition of recursive predicates to theory *)
val is_auxiliary = length cs > 1;
val rec_binding =
if Binding.is_empty alt_name then Binding.conglomerate (map #1 cnames_syn) else alt_name;
val rec_name = Binding.name_of rec_binding;
val internals = Config.get lthy inductive_internals;
val ((rec_const, (_, fp_def)), lthy') = lthy
|> is_auxiliary ? Proof_Context.concealed
|> Local_Theory.define
((rec_binding, case cnames_syn of [(_, mx)] => mx | _ => NoSyn),
((Thm.make_def_binding internals rec_binding, @{attributes [nitpick_unfold]}),
fold_rev lambda params
(Const (fp_name, (predT --> predT) --> predT) $ fp_fun)))
||> Proof_Context.restore_naming lthy;
val fp_def' =
Simplifier.rewrite (put_simpset HOL_basic_ss lthy' addsimps [fp_def])
(Thm.cterm_of lthy' (list_comb (rec_const, params)));
val specs =
if is_auxiliary then
map_index (fn (i, ((b, mx), c)) =>
let
val Ts = arg_types_of (length params) c;
val xs =
map Free (Variable.variant_frees lthy' intr_ts (mk_names "x" (length Ts) ~~ Ts));
in
((b, mx),
((Thm.make_def_binding internals b, []), fold_rev lambda (params @ xs)
(list_comb (rec_const, params @ make_bool_args' bs i @
make_args argTs (xs ~~ Ts)))))
end) (cnames_syn ~~ cs)
else [];
val (consts_defs, lthy'') = lthy'
|> fold_map Local_Theory.define specs;
val preds = (case cs of [_] => [rec_const] | _ => map #1 consts_defs);
val (_, ctxt'') = Variable.add_fixes (map (fst o dest_Free) params) lthy'';
val mono = prove_mono quiet_mode skip_mono predT fp_fun monos ctxt'';
val (_, lthy''') = lthy''
|> Local_Theory.note
((if internals
then Binding.qualify true rec_name (Binding.name "mono")
else Binding.empty, []),
Proof_Context.export ctxt'' lthy'' [mono]);
in
(lthy''', Proof_Context.transfer (Proof_Context.theory_of lthy''') ctxt'',
rec_binding, mono, fp_def', map (#2 o #2) consts_defs,
list_comb (rec_const, params), preds, argTs, bs, xs)
end;
fun declare_rules rec_binding coind no_ind spec_name cnames
preds intrs intr_bindings intr_atts elims eqs raw_induct lthy =
let
val rec_name = Binding.name_of rec_binding;
fun rec_qualified qualified = Binding.qualify qualified rec_name;
val intr_names = map Binding.name_of intr_bindings;
val ind_case_names =
if forall (equal "") intr_names then []
else [Attrib.case_names intr_names];
val induct =
if coind then
(raw_induct,
[Attrib.case_names [rec_name],
Attrib.case_conclusion (rec_name, intr_names),
Attrib.consumes (1 - Thm.nprems_of raw_induct),
- Attrib.internal (K (Induct.coinduct_pred (hd cnames)))])
+ Attrib.internal \<^here> (K (Induct.coinduct_pred (hd cnames)))])
else if no_ind orelse length cnames > 1 then
(raw_induct, ind_case_names @ [Attrib.consumes (~ (Thm.nprems_of raw_induct))])
else
(raw_induct RSN (2, rev_mp),
ind_case_names @ [Attrib.consumes (~ (Thm.nprems_of raw_induct))]);
val (intrs', lthy1) =
lthy |>
Spec_Rules.add spec_name
(if coind then Spec_Rules.Co_Inductive else Spec_Rules.Inductive) preds intrs |>
Local_Theory.notes
(map (rec_qualified false) intr_bindings ~~ intr_atts ~~
map (fn th => [([th], @{attributes [Pure.intro?]})]) intrs) |>>
map (hd o snd);
val (((_, elims'), (_, [induct'])), lthy2) =
lthy1 |>
Local_Theory.note ((rec_qualified true (Binding.name "intros"), []), intrs') ||>>
fold_map (fn (name, (elim, cases, k)) =>
Local_Theory.note
((Binding.qualify true (Long_Name.base_name name) (Binding.name "cases"),
((if forall (equal "") cases then [] else [Attrib.case_names cases]) @
[Attrib.consumes (1 - Thm.nprems_of elim), Attrib.constraints k,
- Attrib.internal (K (Induct.cases_pred name))] @ @{attributes [Pure.elim?]})),
+ Attrib.internal \<^here> (K (Induct.cases_pred name))] @ @{attributes [Pure.elim?]})),
[elim]) #>
apfst (hd o snd)) (if null elims then [] else cnames ~~ elims) ||>>
Local_Theory.note
((rec_qualified true (Binding.name (coind_prefix coind ^ "induct")), #2 induct),
[rulify lthy1 (#1 induct)]);
val (eqs', lthy3) = lthy2 |>
fold_map (fn (name, eq) => Local_Theory.note
((Binding.qualify true (Long_Name.base_name name) (Binding.name "simps"),
- [Attrib.internal (K equation_add_permissive)]), [eq])
+ [Attrib.internal \<^here> (K equation_add_permissive)]), [eq])
#> apfst (hd o snd))
(if null eqs then [] else (cnames ~~ eqs))
val (inducts, lthy4) =
if no_ind orelse coind then ([], lthy3)
else
let val inducts = cnames ~~ Project_Rule.projects lthy3 (1 upto length cnames) induct' in
lthy3 |>
Local_Theory.notes [((rec_qualified true (Binding.name "inducts"), []),
inducts |> map (fn (name, th) => ([th],
ind_case_names @
[Attrib.consumes (1 - Thm.nprems_of th),
- Attrib.internal (K (Induct.induct_pred name))])))] |>> snd o hd
+ Attrib.internal \<^here> (K (Induct.induct_pred name))])))] |>> snd o hd
end;
in (intrs', elims', eqs', induct', inducts, lthy4) end;
type flags =
{quiet_mode: bool, verbose: bool, alt_name: binding, coind: bool,
no_elim: bool, no_ind: bool, skip_mono: bool};
type add_ind_def =
flags ->
term list -> (Attrib.binding * term) list -> thm list ->
term list -> (binding * mixfix) list ->
local_theory -> result * local_theory;
fun add_ind_def {quiet_mode, verbose, alt_name, coind, no_elim, no_ind, skip_mono}
cs intros monos params cnames_syn lthy =
let
val _ = null cnames_syn andalso error "No inductive predicates given";
val names = map (Binding.name_of o fst) cnames_syn;
val _ = message (quiet_mode andalso not verbose)
("Proofs for " ^ coind_prefix coind ^ "inductive predicate(s) " ^ commas_quote names);
val spec_name = Binding.conglomerate (map #1 cnames_syn);
val cnames = map (Local_Theory.full_name lthy o #1) cnames_syn; (* FIXME *)
val ((intr_names, intr_atts), intr_ts) =
apfst split_list (split_list (map (check_rule lthy cs params) intros));
val (lthy1, lthy2, rec_binding, mono, fp_def, rec_preds_defs, rec_const, preds,
argTs, bs, xs) = mk_ind_def quiet_mode skip_mono alt_name coind cs intr_ts
monos params cnames_syn lthy;
val (intrs, unfold) = prove_intrs quiet_mode coind mono fp_def (length bs + length xs)
intr_ts rec_preds_defs lthy2 lthy1;
val elims =
if no_elim then []
else
prove_elims quiet_mode cs params intr_ts (map Binding.name_of intr_names)
unfold rec_preds_defs lthy2 lthy1;
val raw_induct = zero_var_indexes
(if no_ind then Drule.asm_rl
else if coind then
prove_coindrule quiet_mode preds cs argTs bs xs params intr_ts mono fp_def
rec_preds_defs lthy2 lthy1
else
prove_indrule quiet_mode cs argTs bs xs rec_const params intr_ts mono fp_def
rec_preds_defs lthy2 lthy1);
val eqs =
if no_elim then [] else prove_eqs quiet_mode cs params intr_ts intrs elims lthy2 lthy1;
val elims' = map (fn (th, ns, i) => (rulify lthy1 th, ns, i)) elims;
val intrs' = map (rulify lthy1) intrs;
val (intrs'', elims'', eqs', induct, inducts, lthy3) =
declare_rules rec_binding coind no_ind
spec_name cnames preds intrs' intr_names intr_atts elims' eqs raw_induct lthy1;
val result =
{preds = preds,
intrs = intrs'',
elims = elims'',
raw_induct = rulify lthy3 raw_induct,
induct = induct,
inducts = inducts,
eqs = eqs'};
val lthy4 = lthy3
- |> Local_Theory.declaration {syntax = false, pervasive = false} (fn phi =>
+ |> Local_Theory.declaration {syntax = false, pervasive = false, pos = \<^here>} (fn phi =>
let val result' = transform_result phi result;
in put_inductives ({names = cnames, coind = coind}, result') end);
in (result, lthy4) end;
(* external interfaces *)
fun gen_add_inductive mk_def
flags cnames_syn pnames spec monos lthy =
let
(* abbrevs *)
val (_, ctxt1) = Variable.add_fixes (map (Binding.name_of o fst o fst) cnames_syn) lthy;
fun get_abbrev ((name, atts), t) =
if can (Logic.strip_assums_concl #> Logic.dest_equals) t then
let
val _ = Binding.is_empty name andalso null atts orelse
error "Abbreviations may not have names or attributes";
val ((x, T), rhs) = Local_Defs.abs_def (snd (Local_Defs.cert_def ctxt1 (K []) t));
val var =
(case find_first (fn ((c, _), _) => Binding.name_of c = x) cnames_syn of
NONE => error ("Undeclared head of abbreviation " ^ quote x)
| SOME ((b, T'), mx) =>
if T <> T' then error ("Bad type specification for abbreviation " ^ quote x)
else (b, mx));
in SOME (var, rhs) end
else NONE;
val abbrevs = map_filter get_abbrev spec;
val bs = map (Binding.name_of o fst o fst) abbrevs;
(* predicates *)
val pre_intros = filter_out (is_some o get_abbrev) spec;
val cnames_syn' = filter_out (member (op =) bs o Binding.name_of o fst o fst) cnames_syn;
val cs = map (Free o apfst Binding.name_of o fst) cnames_syn';
val ps = map Free pnames;
val (_, ctxt2) = lthy |> Variable.add_fixes (map (Binding.name_of o fst o fst) cnames_syn');
val ctxt3 = ctxt2 |> fold (snd oo Local_Defs.fixed_abbrev) abbrevs;
val expand = Assumption.export_term ctxt3 lthy #> Proof_Context.cert_term lthy;
fun close_rule r =
fold (Logic.all o Free) (fold_aterms
(fn t as Free (v as (s, _)) =>
if Variable.is_fixed ctxt1 s orelse
member (op =) ps t then I else insert (op =) v
| _ => I) r []) r;
val intros = map (apsnd (Syntax.check_term lthy #> close_rule #> expand)) pre_intros;
val preds = map (fn ((c, _), mx) => (c, mx)) cnames_syn';
in
lthy
|> mk_def flags cs intros monos ps preds
||> fold (snd oo Local_Theory.abbrev Syntax.mode_default) abbrevs
end;
fun gen_add_inductive_cmd mk_def verbose coind cnames_syn pnames_syn intro_srcs raw_monos lthy =
let
val ((vars, intrs), _) = lthy
|> Proof_Context.set_mode Proof_Context.mode_abbrev
|> Specification.read_multi_specs (cnames_syn @ pnames_syn) intro_srcs;
val (cs, ps) = chop (length cnames_syn) vars;
val monos = Attrib.eval_thms lthy raw_monos;
val flags =
{quiet_mode = false, verbose = verbose, alt_name = Binding.empty,
coind = coind, no_elim = false, no_ind = false, skip_mono = false};
in
lthy
|> gen_add_inductive mk_def flags cs (map (apfst Binding.name_of o fst) ps) intrs monos
end;
val add_inductive = gen_add_inductive add_ind_def;
val add_inductive_cmd = gen_add_inductive_cmd add_ind_def;
(* read off arities of inductive predicates from raw induction rule *)
fun arities_of induct =
map (fn (_ $ t $ u) =>
(fst (dest_Const (head_of t)), length (snd (strip_comb u))))
(HOLogic.dest_conj (HOLogic.dest_Trueprop (Thm.concl_of induct)));
(* read off parameters of inductive predicate from raw induction rule *)
fun params_of induct =
let
val (_ $ t $ u :: _) = HOLogic.dest_conj (HOLogic.dest_Trueprop (Thm.concl_of induct));
val (_, ts) = strip_comb t;
val (_, us) = strip_comb u;
in
List.take (ts, length ts - length us)
end;
val pname_of_intr =
Thm.concl_of #> HOLogic.dest_Trueprop #> head_of #> dest_Const #> fst;
(* partition introduction rules according to predicate name *)
fun gen_partition_rules f induct intros =
fold_rev (fn r => AList.map_entry op = (pname_of_intr (f r)) (cons r)) intros
(map (rpair [] o fst) (arities_of induct));
val partition_rules = gen_partition_rules I;
fun partition_rules' induct = gen_partition_rules fst induct;
fun unpartition_rules intros xs =
fold_map (fn r => AList.map_entry_yield op = (pname_of_intr r)
(fn x :: xs => (x, xs)) #>> the) intros xs |> fst;
(* infer order of variables in intro rules from order of quantifiers in elim rule *)
fun infer_intro_vars thy elim arity intros =
let
val _ :: cases = Thm.prems_of elim;
val used = map (fst o fst) (Term.add_vars (Thm.prop_of elim) []);
fun mtch (t, u) =
let
val params = Logic.strip_params t;
val vars =
map (Var o apfst (rpair 0))
(Name.variant_list used (map fst params) ~~ map snd params);
val ts =
map (curry subst_bounds (rev vars))
(List.drop (Logic.strip_assums_hyp t, arity));
val us = Logic.strip_imp_prems u;
val tab =
fold (Pattern.first_order_match thy) (ts ~~ us) (Vartab.empty, Vartab.empty);
in
map (Envir.subst_term tab) vars
end
in
map (mtch o apsnd Thm.prop_of) (cases ~~ intros)
end;
(** outer syntax **)
fun gen_ind_decl mk_def coind =
Parse.vars -- Parse.for_fixes --
Scan.optional Parse_Spec.where_multi_specs [] --
Scan.optional (\<^keyword>\<open>monos\<close> |-- Parse.!!! Parse.thms1) []
>> (fn (((preds, params), specs), monos) =>
(snd o gen_add_inductive_cmd mk_def true coind preds params specs monos));
val ind_decl = gen_ind_decl add_ind_def;
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>inductive\<close> "define inductive predicates"
(ind_decl false);
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>coinductive\<close> "define coinductive predicates"
(ind_decl true);
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>inductive_cases\<close>
"create simplified instances of elimination rules"
(Parse.and_list1 Parse_Spec.simple_specs >> (snd oo inductive_cases_cmd));
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>inductive_simps\<close>
"create simplification rules for inductive predicates"
(Parse.and_list1 Parse_Spec.simple_specs >> (snd oo inductive_simps_cmd));
val _ =
Outer_Syntax.command \<^command_keyword>\<open>print_inductives\<close>
"print (co)inductive definitions and monotonicity rules"
(Parse.opt_bang >> (fn b => Toplevel.keep (print_inductives b o Toplevel.context_of)));
end;
diff --git a/src/HOL/Tools/inductive_set.ML b/src/HOL/Tools/inductive_set.ML
--- a/src/HOL/Tools/inductive_set.ML
+++ b/src/HOL/Tools/inductive_set.ML
@@ -1,560 +1,560 @@
(* Title: HOL/Tools/inductive_set.ML
Author: Stefan Berghofer, TU Muenchen
Wrapper for defining inductive sets using package for inductive predicates,
including infrastructure for converting between predicates and sets.
*)
signature INDUCTIVE_SET =
sig
val to_set_att: thm list -> attribute
val to_pred_att: thm list -> attribute
val to_pred : thm list -> Context.generic -> thm -> thm
val pred_set_conv_att: attribute
val add_inductive:
Inductive.flags ->
((binding * typ) * mixfix) list ->
(string * typ) list ->
(Attrib.binding * term) list -> thm list ->
local_theory -> Inductive.result * local_theory
val add_inductive_cmd: bool -> bool ->
(binding * string option * mixfix) list ->
(binding * string option * mixfix) list ->
Specification.multi_specs_cmd -> (Facts.ref * Token.src list) list ->
local_theory -> Inductive.result * local_theory
val mono_add: attribute
val mono_del: attribute
end;
structure Inductive_Set: INDUCTIVE_SET =
struct
(***********************************************************************************)
(* simplifies (%x y. (x, y) : S & P x y) to (%x y. (x, y) : S Int {(x, y). P x y}) *)
(* and (%x y. (x, y) : S | P x y) to (%x y. (x, y) : S Un {(x, y). P x y}) *)
(* used for converting "strong" (co)induction rules *)
(***********************************************************************************)
val anyt = Free ("t", TFree ("'t", []));
fun strong_ind_simproc tab =
Simplifier.make_simproc \<^context> "strong_ind"
{lhss = [\<^term>\<open>x::'a::{}\<close>],
proc = fn _ => fn ctxt => fn ct =>
let
fun close p t f =
let val vs = Term.add_vars t []
in Thm.instantiate' [] (rev (map (SOME o Thm.cterm_of ctxt o Var) vs))
(p (fold (Logic.all o Var) vs t) f)
end;
fun mkop \<^const_name>\<open>HOL.conj\<close> T x =
SOME (Const (\<^const_name>\<open>Lattices.inf\<close>, T --> T --> T), x)
| mkop \<^const_name>\<open>HOL.disj\<close> T x =
SOME (Const (\<^const_name>\<open>Lattices.sup\<close>, T --> T --> T), x)
| mkop _ _ _ = NONE;
fun mk_collect p T t =
let val U = HOLogic.dest_setT T
in HOLogic.Collect_const U $
HOLogic.mk_ptupleabs (HOLogic.flat_tuple_paths p) U HOLogic.boolT t
end;
fun decomp (Const (s, _) $ ((m as Const (\<^const_name>\<open>Set.member\<close>,
Type (_, [_, Type (_, [T, _])]))) $ p $ S) $ u) =
mkop s T (m, p, S, mk_collect p T (head_of u))
| decomp (Const (s, _) $ u $ ((m as Const (\<^const_name>\<open>Set.member\<close>,
Type (_, [_, Type (_, [T, _])]))) $ p $ S)) =
mkop s T (m, p, mk_collect p T (head_of u), S)
| decomp _ = NONE;
val simp =
full_simp_tac
(put_simpset HOL_basic_ss ctxt addsimps @{thms mem_Collect_eq case_prod_conv}) 1;
fun mk_rew t = (case strip_abs_vars t of
[] => NONE
| xs => (case decomp (strip_abs_body t) of
NONE => NONE
| SOME (bop, (m, p, S, S')) =>
SOME (close (Goal.prove ctxt [] [])
(Logic.mk_equals (t, fold_rev Term.abs xs (m $ p $ (bop $ S $ S'))))
(K (EVERY
[resolve_tac ctxt [eq_reflection] 1,
REPEAT (resolve_tac ctxt @{thms ext} 1),
resolve_tac ctxt @{thms iffI} 1,
EVERY [eresolve_tac ctxt @{thms conjE} 1,
resolve_tac ctxt @{thms IntI} 1, simp, simp,
eresolve_tac ctxt @{thms IntE} 1,
resolve_tac ctxt @{thms conjI} 1, simp, simp] ORELSE
EVERY [eresolve_tac ctxt @{thms disjE} 1,
resolve_tac ctxt @{thms UnI1} 1, simp,
resolve_tac ctxt @{thms UnI2} 1, simp,
eresolve_tac ctxt @{thms UnE} 1,
resolve_tac ctxt @{thms disjI1} 1, simp,
resolve_tac ctxt @{thms disjI2} 1, simp]])))
handle ERROR _ => NONE))
in
(case strip_comb (Thm.term_of ct) of
(h as Const (name, _), ts) =>
if Symtab.defined tab name then
let val rews = map mk_rew ts
in
if forall is_none rews then NONE
else SOME (fold (fn th1 => fn th2 => Thm.combination th2 th1)
(map2 (fn SOME r => K r | NONE => Thm.reflexive o Thm.cterm_of ctxt)
rews ts) (Thm.reflexive (Thm.cterm_of ctxt h)))
end
else NONE
| _ => NONE)
end};
(* only eta contract terms occurring as arguments of functions satisfying p *)
fun eta_contract p =
let
fun eta b (Abs (a, T, body)) =
(case eta b body of
body' as (f $ Bound 0) =>
if Term.is_dependent f orelse not b then Abs (a, T, body')
else incr_boundvars ~1 f
| body' => Abs (a, T, body'))
| eta b (t $ u) = eta b t $ eta (p (head_of t)) u
| eta b t = t
in eta false end;
fun eta_contract_thm ctxt p =
Conv.fconv_rule (Conv.then_conv (Thm.beta_conversion true, fn ct =>
Thm.transitive (Thm.eta_conversion ct)
(Thm.symmetric (Thm.eta_conversion (Thm.cterm_of ctxt (eta_contract p (Thm.term_of ct)))))));
(***********************************************************)
(* rules for converting between predicate and set notation *)
(* *)
(* rules for converting predicates to sets have the form *)
(* P (%x y. (x, y) : s) = (%x y. (x, y) : S s) *)
(* *)
(* rules for converting sets to predicates have the form *)
(* S {(x, y). p x y} = {(x, y). P p x y} *)
(* *)
(* where s and p are parameters *)
(***********************************************************)
structure Data = Generic_Data
(
type T =
{(* rules for converting predicates to sets *)
to_set_simps: thm list,
(* rules for converting sets to predicates *)
to_pred_simps: thm list,
(* arities of functions of type t set => ... => u set *)
set_arities: (typ * (int list list option list * int list list option)) list Symtab.table,
(* arities of functions of type (t => ... => bool) => u => ... => bool *)
pred_arities: (typ * (int list list option list * int list list option)) list Symtab.table};
val empty = {to_set_simps = [], to_pred_simps = [],
set_arities = Symtab.empty, pred_arities = Symtab.empty};
fun merge
({to_set_simps = to_set_simps1, to_pred_simps = to_pred_simps1,
set_arities = set_arities1, pred_arities = pred_arities1},
{to_set_simps = to_set_simps2, to_pred_simps = to_pred_simps2,
set_arities = set_arities2, pred_arities = pred_arities2}) : T =
{to_set_simps = Thm.merge_thms (to_set_simps1, to_set_simps2),
to_pred_simps = Thm.merge_thms (to_pred_simps1, to_pred_simps2),
set_arities = Symtab.merge_list (op =) (set_arities1, set_arities2),
pred_arities = Symtab.merge_list (op =) (pred_arities1, pred_arities2)};
);
fun name_type_of (Free p) = SOME p
| name_type_of (Const p) = SOME p
| name_type_of _ = NONE;
fun map_type f (Free (s, T)) = Free (s, f T)
| map_type f (Var (ixn, T)) = Var (ixn, f T)
| map_type f _ = error "map_type";
fun find_most_specific is_inst f eq xs T =
find_first (fn U => is_inst (T, f U)
andalso forall (fn U' => eq (f U, f U') orelse not
(is_inst (T, f U') andalso is_inst (f U', f U)))
xs) xs;
fun lookup_arity thy arities (s, T) = case Symtab.lookup arities s of
NONE => NONE
| SOME xs => find_most_specific (Sign.typ_instance thy) fst (op =) xs T;
fun lookup_rule thy f rules = find_most_specific
(swap #> Pattern.matches thy) (f #> fst) (op aconv) rules;
fun infer_arities thy arities (optf, t) fs = case strip_comb t of
(Abs (_, _, u), []) => infer_arities thy arities (NONE, u) fs
| (Abs _, _) => infer_arities thy arities (NONE, Envir.beta_norm t) fs
| (u, ts) => (case Option.map (lookup_arity thy arities) (name_type_of u) of
SOME (SOME (_, (arity, _))) =>
(fold (infer_arities thy arities) (arity ~~ List.take (ts, length arity)) fs
handle General.Subscript => error "infer_arities: bad term")
| _ => fold (infer_arities thy arities) (map (pair NONE) ts)
(case optf of
NONE => fs
| SOME f => AList.update op = (u, the_default f
(Option.map (fn g => inter (op =) g f) (AList.lookup op = fs u))) fs));
(**************************************************************)
(* derive the to_pred equation from the to_set equation *)
(* *)
(* 1. instantiate each set parameter with {(x, y). p x y} *)
(* 2. apply %P. {(x, y). P x y} to both sides of the equation *)
(* 3. simplify *)
(**************************************************************)
fun mk_to_pred_inst ctxt fs =
map (fn (x, ps) =>
let
val (Ts, T) = strip_type (fastype_of x);
val U = HOLogic.dest_setT T;
val x' = map_type
(K (Ts @ HOLogic.strip_ptupleT ps U ---> HOLogic.boolT)) x;
in
(dest_Var x,
Thm.cterm_of ctxt (fold_rev (Term.abs o pair "x") Ts
(HOLogic.Collect_const U $
HOLogic.mk_ptupleabs ps U HOLogic.boolT
(list_comb (x', map Bound (length Ts - 1 downto 0))))))
end) fs;
fun mk_to_pred_eq ctxt p fs optfs' T thm =
let
val insts = mk_to_pred_inst ctxt fs;
val thm' = Thm.instantiate (TVars.empty, Vars.make insts) thm;
val thm'' =
(case optfs' of
NONE => thm' RS sym
| SOME fs' =>
let
val U = HOLogic.dest_setT (body_type T);
val Ts = HOLogic.strip_ptupleT fs' U;
val arg_cong' = Thm.incr_indexes (Thm.maxidx_of thm + 1) arg_cong;
val (Var (arg_cong_f, _), _) = arg_cong' |> Thm.concl_of |>
dest_comb |> snd |> strip_comb |> snd |> hd |> dest_comb;
in
thm' RS (infer_instantiate ctxt [(arg_cong_f,
Thm.cterm_of ctxt (Abs ("P", Ts ---> HOLogic.boolT,
HOLogic.Collect_const U $ HOLogic.mk_ptupleabs fs' U
HOLogic.boolT (Bound 0))))] arg_cong' RS sym)
end)
in
Simplifier.simplify
(put_simpset HOL_basic_ss ctxt addsimps @{thms mem_Collect_eq case_prod_conv}
addsimprocs [\<^simproc>\<open>Collect_mem\<close>]) thm''
|> zero_var_indexes |> eta_contract_thm ctxt (equal p)
end;
(**** declare rules for converting predicates to sets ****)
exception Malformed of string;
fun add context thm (tab as {to_set_simps, to_pred_simps, set_arities, pred_arities}) =
(case Thm.prop_of thm of
Const (\<^const_name>\<open>Trueprop\<close>, _) $ (Const (\<^const_name>\<open>HOL.eq\<close>, Type (_, [T, _])) $ lhs $ rhs) =>
(case body_type T of
\<^typ>\<open>bool\<close> =>
let
val thy = Context.theory_of context;
val ctxt = Context.proof_of context;
fun factors_of t fs = case strip_abs_body t of
Const (\<^const_name>\<open>Set.member\<close>, _) $ u $ S =>
if is_Free S orelse is_Var S then
let val ps = HOLogic.flat_tuple_paths u
in (SOME ps, (S, ps) :: fs) end
else (NONE, fs)
| _ => (NONE, fs);
val (h, ts) = strip_comb lhs
val (pfs, fs) = fold_map factors_of ts [];
val ((h', ts'), fs') = (case rhs of
Abs _ => (case strip_abs_body rhs of
Const (\<^const_name>\<open>Set.member\<close>, _) $ u $ S =>
(strip_comb S, SOME (HOLogic.flat_tuple_paths u))
| _ => raise Malformed "member symbol on right-hand side expected")
| _ => (strip_comb rhs, NONE))
in
case (name_type_of h, name_type_of h') of
(SOME (s, T), SOME (s', T')) =>
if exists (fn (U, _) =>
Sign.typ_instance thy (T', U) andalso
Sign.typ_instance thy (U, T'))
(Symtab.lookup_list set_arities s')
then
(if Context_Position.is_really_visible ctxt then
warning ("Ignoring conversion rule for operator " ^ s')
else (); tab)
else
{to_set_simps = Thm.trim_context thm :: to_set_simps,
to_pred_simps =
Thm.trim_context (mk_to_pred_eq ctxt h fs fs' T' thm) :: to_pred_simps,
set_arities = Symtab.insert_list op = (s',
(T', (map (AList.lookup op = fs) ts', fs'))) set_arities,
pred_arities = Symtab.insert_list op = (s,
(T, (pfs, fs'))) pred_arities}
| _ => raise Malformed "set / predicate constant expected"
end
| _ => raise Malformed "equation between predicates expected")
| _ => raise Malformed "equation expected")
handle Malformed msg =>
let
val ctxt = Context.proof_of context
val _ =
if Context_Position.is_really_visible ctxt then
warning ("Ignoring malformed set / predicate conversion rule: " ^ msg ^
"\n" ^ Thm.string_of_thm ctxt thm)
else ();
in tab end;
val pred_set_conv_att = Thm.declaration_attribute
(fn thm => fn ctxt => Data.map (add ctxt thm) ctxt);
(**** convert theorem in set notation to predicate notation ****)
fun is_pred tab t =
case Option.map (Symtab.lookup tab o fst) (name_type_of t) of
SOME (SOME _) => true | _ => false;
fun to_pred_simproc rules =
let val rules' = map mk_meta_eq rules
in
Simplifier.make_simproc \<^context> "to_pred"
{lhss = [anyt],
proc = fn _ => fn ctxt => fn ct =>
lookup_rule (Proof_Context.theory_of ctxt)
(Thm.prop_of #> Logic.dest_equals) rules' (Thm.term_of ct)}
end;
fun to_pred_proc thy rules t =
case lookup_rule thy I rules t of
NONE => NONE
| SOME (lhs, rhs) =>
SOME (Envir.subst_term
(Pattern.match thy (lhs, t) (Vartab.empty, Vartab.empty)) rhs);
fun to_pred thms context thm =
let
val thy = Context.theory_of context;
val ctxt = Context.proof_of context;
val {to_pred_simps, set_arities, pred_arities, ...} =
fold (add context) thms (Data.get context);
val fs = filter (is_Var o fst)
(infer_arities thy set_arities (NONE, Thm.prop_of thm) []);
(* instantiate each set parameter with {(x, y). p x y} *)
val insts = mk_to_pred_inst ctxt fs
in
thm |>
Thm.instantiate (TVars.empty, Vars.make insts) |>
Simplifier.full_simplify (put_simpset HOL_basic_ss ctxt addsimprocs
[to_pred_simproc
(@{thm mem_Collect_eq} :: @{thm case_prod_conv} :: map (Thm.transfer thy) to_pred_simps)]) |>
eta_contract_thm ctxt (is_pred pred_arities) |>
Rule_Cases.save thm
end;
val to_pred_att = Thm.rule_attribute [] o to_pred;
(**** convert theorem in predicate notation to set notation ****)
fun to_set thms context thm =
let
val thy = Context.theory_of context;
val ctxt = Context.proof_of context;
val {to_set_simps, pred_arities, ...} =
fold (add context) thms (Data.get context);
val fs = filter (is_Var o fst)
(infer_arities thy pred_arities (NONE, Thm.prop_of thm) []);
(* instantiate each predicate parameter with %x y. (x, y) : s *)
val insts = map (fn (x, ps) =>
let
val Ts = binder_types (fastype_of x);
val l = length Ts;
val k = length ps;
val (Rs, Us) = chop (l - k - 1) Ts;
val T = HOLogic.mk_ptupleT ps Us;
val x' = map_type (K (Rs ---> HOLogic.mk_setT T)) x
in
(dest_Var x,
Thm.cterm_of ctxt (fold_rev (Term.abs o pair "x") Ts
(HOLogic.mk_mem (HOLogic.mk_ptuple ps T (map Bound (k downto 0)),
list_comb (x', map Bound (l - 1 downto k + 1))))))
end) fs;
in
thm |>
Thm.instantiate (TVars.empty, Vars.make insts) |>
Simplifier.full_simplify (put_simpset HOL_basic_ss ctxt addsimps to_set_simps
addsimprocs [strong_ind_simproc pred_arities, \<^simproc>\<open>Collect_mem\<close>]) |>
Rule_Cases.save thm
end;
val to_set_att = Thm.rule_attribute [] o to_set;
(**** definition of inductive sets ****)
fun add_ind_set_def
{quiet_mode, verbose, alt_name, coind, no_elim, no_ind, skip_mono}
cs intros monos params cnames_syn lthy =
let
val thy = Proof_Context.theory_of lthy;
val {set_arities, pred_arities, to_pred_simps, ...} =
Data.get (Context.Proof lthy);
fun infer (Abs (_, _, t)) = infer t
| infer (Const (\<^const_name>\<open>Set.member\<close>, _) $ t $ u) =
infer_arities thy set_arities (SOME (HOLogic.flat_tuple_paths t), u)
| infer (t $ u) = infer t #> infer u
| infer _ = I;
val new_arities = filter_out
(fn (x as Free (_, T), _) => member (op =) params x andalso length (binder_types T) > 0
| _ => false) (fold (snd #> infer) intros []);
val params' = map (fn x =>
(case AList.lookup op = new_arities x of
SOME fs =>
let
val T = HOLogic.dest_setT (fastype_of x);
val Ts = HOLogic.strip_ptupleT fs T;
val x' = map_type (K (Ts ---> HOLogic.boolT)) x
in
(x, (x',
(HOLogic.Collect_const T $
HOLogic.mk_ptupleabs fs T HOLogic.boolT x',
fold_rev (Term.abs o pair "x") Ts
(HOLogic.mk_mem
(HOLogic.mk_ptuple fs T (map Bound (length fs downto 0)), x)))))
end
| NONE => (x, (x, (x, x))))) params;
val (params1, (params2, params3)) =
params' |> map snd |> split_list ||> split_list;
val paramTs = map fastype_of params;
(* equations for converting sets to predicates *)
val ((cs', cs_info), eqns) = cs |> map (fn c as Free (s, T) =>
let
val fs = the_default [] (AList.lookup op = new_arities c);
val (Us, U) = strip_type T |> apsnd HOLogic.dest_setT;
val _ = Us = paramTs orelse error (Pretty.string_of (Pretty.chunks
[Pretty.str "Argument types",
Pretty.block (Pretty.commas (map (Syntax.pretty_typ lthy) Us)),
Pretty.str ("of " ^ s ^ " do not agree with types"),
Pretty.block (Pretty.commas (map (Syntax.pretty_typ lthy) paramTs)),
Pretty.str "of declared parameters"]));
val Ts = HOLogic.strip_ptupleT fs U;
val c' = Free (s ^ "p",
map fastype_of params1 @ Ts ---> HOLogic.boolT)
in
((c', (fs, U, Ts)),
(list_comb (c, params2),
HOLogic.Collect_const U $ HOLogic.mk_ptupleabs fs U HOLogic.boolT
(list_comb (c', params1))))
end) |> split_list |>> split_list;
val eqns' = eqns @
map (Thm.prop_of #> HOLogic.dest_Trueprop #> HOLogic.dest_eq)
(@{thm mem_Collect_eq} :: @{thm case_prod_conv} :: to_pred_simps);
(* predicate version of the introduction rules *)
val intros' =
map (fn (name_atts, t) => (name_atts,
t |>
map_aterms (fn u =>
(case AList.lookup op = params' u of
SOME (_, (u', _)) => u'
| NONE => u)) |>
Pattern.rewrite_term thy [] [to_pred_proc thy eqns'] |>
eta_contract (member op = cs' orf is_pred pred_arities))) intros;
val cnames_syn' = map (fn (b, _) => (Binding.suffix_name "p" b, NoSyn)) cnames_syn;
val monos' = map (to_pred [] (Context.Proof lthy)) monos;
val ({preds, intrs, elims, raw_induct, eqs, ...}, lthy1) =
Inductive.add_ind_def
{quiet_mode = quiet_mode, verbose = verbose, alt_name = Binding.empty,
coind = coind, no_elim = no_elim, no_ind = no_ind, skip_mono = skip_mono}
cs' intros' monos' params1 cnames_syn' lthy;
(* define inductive sets using previously defined predicates *)
val (defs, lthy2) = lthy1
|> fold_map Local_Theory.define
(map (fn (((b, mx), (fs, U, _)), p) =>
((b, mx), ((Thm.def_binding b, []),
fold_rev lambda params (HOLogic.Collect_const U $
HOLogic.mk_ptupleabs fs U HOLogic.boolT (list_comb (p, params3))))))
(cnames_syn ~~ cs_info ~~ preds));
(* prove theorems for converting predicate to set notation *)
val lthy3 = fold
(fn (((p, c as Free (s, _)), (fs, U, Ts)), (_, (_, def))) => fn lthy =>
let val conv_thm =
Goal.prove lthy (map (fst o dest_Free) params) []
(HOLogic.mk_Trueprop (HOLogic.mk_eq
(list_comb (p, params3),
fold_rev (Term.abs o pair "x") Ts
(HOLogic.mk_mem (HOLogic.mk_ptuple fs U (map Bound (length fs downto 0)),
list_comb (c, params))))))
(K (REPEAT (resolve_tac lthy @{thms ext} 1) THEN
simp_tac (put_simpset HOL_basic_ss lthy addsimps
[def, @{thm mem_Collect_eq}, @{thm case_prod_conv}]) 1))
in
lthy |> Local_Theory.note ((Binding.name (s ^ "p_" ^ s ^ "_eq"),
- [Attrib.internal (K pred_set_conv_att)]),
+ [Attrib.internal \<^here> (K pred_set_conv_att)]),
[conv_thm]) |> snd
end) (preds ~~ cs ~~ cs_info ~~ defs) lthy2;
(* convert theorems to set notation *)
val rec_name =
if Binding.is_empty alt_name then Binding.conglomerate (map #1 cnames_syn) else alt_name;
val cnames = map (Local_Theory.full_name lthy3 o #1) cnames_syn; (* FIXME *)
val spec_name = Binding.conglomerate (map #1 cnames_syn);
val (intr_names, intr_atts) = split_list (map fst intros);
val raw_induct' = to_set [] (Context.Proof lthy3) raw_induct;
val (intrs', elims', eqs', induct, inducts, lthy4) =
Inductive.declare_rules rec_name coind no_ind spec_name cnames (map fst defs)
(map (to_set [] (Context.Proof lthy3)) intrs) intr_names intr_atts
(map (fn th => (to_set [] (Context.Proof lthy3) th,
map (fst o fst) (fst (Rule_Cases.get th)),
Rule_Cases.get_constraints th)) elims)
(map (to_set [] (Context.Proof lthy3)) eqs) raw_induct' lthy3;
in
({intrs = intrs', elims = elims', induct = induct, inducts = inducts,
raw_induct = raw_induct', preds = map fst defs, eqs = eqs'},
lthy4)
end;
val add_inductive = Inductive.gen_add_inductive add_ind_set_def;
val add_inductive_cmd = Inductive.gen_add_inductive_cmd add_ind_set_def;
fun mono_att att =
Thm.declaration_attribute (fn thm => fn context =>
Thm.attribute_declaration att (to_pred [] context thm) context);
val mono_add = mono_att Inductive.mono_add;
val mono_del = mono_att Inductive.mono_del;
(** package setup **)
(* attributes *)
val _ =
Theory.setup
(Attrib.setup \<^binding>\<open>pred_set_conv\<close> (Scan.succeed pred_set_conv_att)
"declare rules for converting between predicate and set notation" #>
Attrib.setup \<^binding>\<open>to_set\<close> (Attrib.thms >> to_set_att)
"convert rule to set notation" #>
Attrib.setup \<^binding>\<open>to_pred\<close> (Attrib.thms >> to_pred_att)
"convert rule to predicate notation" #>
Attrib.setup \<^binding>\<open>mono_set\<close> (Attrib.add_del mono_add mono_del)
"declare of monotonicity rule for set operators");
(* commands *)
val ind_set_decl = Inductive.gen_ind_decl add_ind_set_def;
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>inductive_set\<close> "define inductive sets"
(ind_set_decl false);
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>coinductive_set\<close> "define coinductive sets"
(ind_set_decl true);
end;
diff --git a/src/HOL/Tools/semiring_normalizer.ML b/src/HOL/Tools/semiring_normalizer.ML
--- a/src/HOL/Tools/semiring_normalizer.ML
+++ b/src/HOL/Tools/semiring_normalizer.ML
@@ -1,885 +1,885 @@
(* Title: HOL/Tools/semiring_normalizer.ML
Author: Amine Chaieb, TU Muenchen
Normalization of expressions in semirings.
*)
signature SEMIRING_NORMALIZER =
sig
type entry
val match: Proof.context -> cterm -> entry option
val the_semiring: Proof.context -> thm -> cterm list * thm list
val the_ring: Proof.context -> thm -> cterm list * thm list
val the_field: Proof.context -> thm -> cterm list * thm list
val the_idom: Proof.context -> thm -> thm list
val the_ideal: Proof.context -> thm -> thm list
val declare: thm -> {semiring: term list * thm list, ring: term list * thm list,
field: term list * thm list, idom: thm list, ideal: thm list} ->
local_theory -> local_theory
val semiring_normalize_conv: Proof.context -> conv
val semiring_normalize_ord_conv: Proof.context -> cterm ord -> conv
val semiring_normalize_wrapper: Proof.context -> entry -> conv
val semiring_normalize_ord_wrapper: Proof.context -> entry -> cterm ord -> conv
val semiring_normalizers_conv: cterm list -> cterm list * thm list
-> cterm list * thm list -> cterm list * thm list ->
(cterm -> bool) * conv * conv * conv -> cterm ord ->
{add: Proof.context -> conv,
mul: Proof.context -> conv,
neg: Proof.context -> conv,
main: Proof.context -> conv,
pow: Proof.context -> conv,
sub: Proof.context -> conv}
val semiring_normalizers_ord_wrapper: Proof.context -> entry -> cterm ord ->
{add: Proof.context -> conv,
mul: Proof.context -> conv,
neg: Proof.context -> conv,
main: Proof.context -> conv,
pow: Proof.context -> conv,
sub: Proof.context -> conv}
end
structure Semiring_Normalizer: SEMIRING_NORMALIZER =
struct
(** data **)
type entry =
{vars: cterm list,
semiring: cterm list * thm list,
ring: cterm list * thm list,
field: cterm list * thm list,
idom: thm list,
ideal: thm list} *
{is_const: cterm -> bool,
dest_const: cterm -> Rat.rat,
mk_const: ctyp -> Rat.rat -> cterm,
conv: Proof.context -> cterm -> thm};
structure Data = Generic_Data
(
type T = (thm * entry) list;
val empty = [];
fun merge data = AList.merge Thm.eq_thm (K true) data;
);
fun the_rules ctxt = fst o the o AList.lookup Thm.eq_thm (Data.get (Context.Proof ctxt))
val the_semiring = #semiring oo the_rules
val the_ring = #ring oo the_rules
val the_field = #field oo the_rules
val the_idom = #idom oo the_rules
val the_ideal = #ideal oo the_rules
fun match ctxt tm =
let
fun match_inst
({vars, semiring = (sr_ops, sr_rules),
ring = (r_ops, r_rules), field = (f_ops, f_rules), idom, ideal},
fns) pat =
let
fun h instT =
let
val substT = Thm.instantiate (instT, Vars.empty);
val substT_cterm = Drule.cterm_rule substT;
val vars' = map substT_cterm vars;
val semiring' = (map substT_cterm sr_ops, map substT sr_rules);
val ring' = (map substT_cterm r_ops, map substT r_rules);
val field' = (map substT_cterm f_ops, map substT f_rules);
val idom' = map substT idom;
val ideal' = map substT ideal;
val result = ({vars = vars', semiring = semiring',
ring = ring', field = field', idom = idom', ideal = ideal'}, fns);
in SOME result end
in (case try Thm.match (pat, tm) of
NONE => NONE
| SOME (instT, _) => h instT)
end;
fun match_struct (_,
entry as ({semiring = (sr_ops, _), ring = (r_ops, _), field = (f_ops, _), ...}, _): entry) =
get_first (match_inst entry) (sr_ops @ r_ops @ f_ops);
in get_first match_struct (Data.get (Context.Proof ctxt)) end;
(* extra-logical functions *)
val semiring_norm_ss =
simpset_of (put_simpset HOL_basic_ss \<^context> addsimps @{thms semiring_norm});
val semiring_funs =
{is_const = can HOLogic.dest_number o Thm.term_of,
dest_const = (fn ct =>
Rat.of_int (snd
(HOLogic.dest_number (Thm.term_of ct)
handle TERM _ => error "ring_dest_const"))),
mk_const = (fn cT => fn x => Numeral.mk_cnumber cT
(case Rat.dest x of (i, 1) => i | _ => error "int_of_rat: bad int")),
conv = (fn ctxt =>
Simplifier.rewrite (put_simpset semiring_norm_ss ctxt)
then_conv Simplifier.rewrite (put_simpset HOL_basic_ss ctxt addsimps @{thms numeral_One}))};
val divide_const = Thm.cterm_of \<^context> (Logic.varify_global \<^term>\<open>(/)\<close>);
val [divide_tvar] = Term.add_tvars (Thm.term_of divide_const) [];
val field_funs =
let
fun numeral_is_const ct =
case Thm.term_of ct of
Const (\<^const_name>\<open>Rings.divide\<close>,_) $ a $ b =>
can HOLogic.dest_number a andalso can HOLogic.dest_number b
| Const (\<^const_name>\<open>Fields.inverse\<close>,_)$t => can HOLogic.dest_number t
| t => can HOLogic.dest_number t
fun dest_const ct = ((case Thm.term_of ct of
Const (\<^const_name>\<open>Rings.divide\<close>,_) $ a $ b=>
Rat.make (snd (HOLogic.dest_number a), snd (HOLogic.dest_number b))
| Const (\<^const_name>\<open>Fields.inverse\<close>,_)$t =>
Rat.inv (Rat.of_int (snd (HOLogic.dest_number t)))
| t => Rat.of_int (snd (HOLogic.dest_number t)))
handle TERM _ => error "ring_dest_const")
fun mk_const cT x =
let val (a, b) = Rat.dest x
in if b = 1 then Numeral.mk_cnumber cT a
else Thm.apply
(Thm.apply
(Thm.instantiate_cterm (TVars.make1 (divide_tvar, cT), Vars.empty) divide_const)
(Numeral.mk_cnumber cT a))
(Numeral.mk_cnumber cT b)
end
in
{is_const = numeral_is_const,
dest_const = dest_const,
mk_const = mk_const,
conv = Numeral_Simprocs.field_comp_conv}
end;
(* logical content *)
val semiringN = "semiring";
val ringN = "ring";
val fieldN = "field";
val idomN = "idom";
fun declare raw_key
{semiring = raw_semiring0, ring = raw_ring0, field = raw_field0, idom = raw_idom, ideal = raw_ideal}
lthy =
let
val ctxt' = fold Proof_Context.augment (fst raw_semiring0 @ fst raw_ring0 @ fst raw_field0) lthy;
val prepare_ops = apfst (Variable.export_terms ctxt' lthy #> map (Thm.cterm_of lthy));
val raw_semiring = prepare_ops raw_semiring0;
val raw_ring = prepare_ops raw_ring0;
val raw_field = prepare_ops raw_field0;
in
- lthy |> Local_Theory.declaration {syntax = false, pervasive = false} (fn phi => fn context =>
+ lthy |> Local_Theory.declaration {syntax = false, pervasive = false, pos = \<^here>} (fn phi => fn context =>
let
val ctxt = Context.proof_of context;
val key = Morphism.thm phi raw_key;
fun transform_ops_rules (ops, rules) =
(map (Morphism.cterm phi) ops, Morphism.fact phi rules);
val (sr_ops, sr_rules) = transform_ops_rules raw_semiring;
val (r_ops, r_rules) = transform_ops_rules raw_ring;
val (f_ops, f_rules) = transform_ops_rules raw_field;
val idom = Morphism.fact phi raw_idom;
val ideal = Morphism.fact phi raw_ideal;
fun check kind name xs n =
null xs orelse length xs = n orelse
error ("Expected " ^ string_of_int n ^ " " ^ kind ^ " for " ^ name);
val check_ops = check "operations";
val check_rules = check "rules";
val _ =
check_ops semiringN sr_ops 5 andalso
check_rules semiringN sr_rules 36 andalso
check_ops ringN r_ops 2 andalso
check_rules ringN r_rules 2 andalso
check_ops fieldN f_ops 2 andalso
check_rules fieldN f_rules 2 andalso
check_rules idomN idom 2;
val mk_meta = Local_Defs.meta_rewrite_rule ctxt;
val sr_rules' = map mk_meta sr_rules;
val r_rules' = map mk_meta r_rules;
val f_rules' = map mk_meta f_rules;
fun rule i = nth sr_rules' (i - 1);
val (cx, cy) = Thm.dest_binop (hd sr_ops);
val cz = rule 34 |> Thm.rhs_of |> Thm.dest_arg |> Thm.dest_arg;
val cn = rule 36 |> Thm.rhs_of |> Thm.dest_arg |> Thm.dest_arg;
val ((clx, crx), (cly, cry)) =
rule 13 |> Thm.rhs_of |> Thm.dest_binop |> apply2 Thm.dest_binop;
val ((ca, cb), (cc, cd)) =
rule 20 |> Thm.lhs_of |> Thm.dest_binop |> apply2 Thm.dest_binop;
val cm = rule 1 |> Thm.rhs_of |> Thm.dest_arg;
val (cp, cq) = rule 26 |> Thm.lhs_of |> Thm.dest_binop |> apply2 Thm.dest_arg;
val vars = [ca, cb, cc, cd, cm, cn, cp, cq, cx, cy, cz, clx, crx, cly, cry];
val semiring = (sr_ops, sr_rules');
val ring = (r_ops, r_rules');
val field = (f_ops, f_rules');
val ideal' = map (Thm.symmetric o mk_meta) ideal
in
context
|> Data.map (AList.update Thm.eq_thm (key,
({vars = vars, semiring = semiring, ring = ring, field = field, idom = idom, ideal = ideal'},
(if null f_ops then semiring_funs else field_funs))))
end)
end;
(** auxiliary **)
fun is_comb ct =
(case Thm.term_of ct of
_ $ _ => true
| _ => false);
val concl = Thm.cprop_of #> Thm.dest_arg;
fun is_binop ct ct' =
(case Thm.term_of ct' of
c $ _ $ _ => Thm.term_of ct aconv c
| _ => false);
fun dest_binop ct ct' =
if is_binop ct ct' then Thm.dest_binop ct'
else raise CTERM ("dest_binop: bad binop", [ct, ct'])
fun inst_thm inst =
Thm.instantiate (TVars.empty, Vars.make (map (apfst (dest_Var o Thm.term_of)) inst));
val dest_number = Thm.term_of #> HOLogic.dest_number #> snd;
val is_number = can dest_number;
fun numeral01_conv ctxt =
Simplifier.rewrite (put_simpset HOL_basic_ss ctxt addsimps [@{thm numeral_One}]);
fun zero1_numeral_conv ctxt =
Simplifier.rewrite (put_simpset HOL_basic_ss ctxt addsimps [@{thm numeral_One} RS sym]);
fun zerone_conv ctxt cv =
zero1_numeral_conv ctxt then_conv cv then_conv numeral01_conv ctxt;
val nat_add_ss = simpset_of
(put_simpset HOL_basic_ss \<^context>
addsimps @{thms arith_simps} @ @{thms diff_nat_numeral} @ @{thms rel_simps}
@ @{thms if_False if_True Nat.add_0 add_Suc add_numeral_left Suc_eq_plus1}
@ map (fn th => th RS sym) @{thms numerals});
fun nat_add_conv ctxt =
zerone_conv ctxt (Simplifier.rewrite (put_simpset nat_add_ss ctxt));
val zeron_tm = \<^cterm>\<open>0::nat\<close>;
val onen_tm = \<^cterm>\<open>1::nat\<close>;
val true_tm = \<^cterm>\<open>True\<close>;
(** normalizing conversions **)
(* core conversion *)
fun semiring_normalizers_conv vars (sr_ops, sr_rules) (r_ops, r_rules) (f_ops, f_rules)
(is_semiring_constant, semiring_add_conv, semiring_mul_conv, semiring_pow_conv) =
let
val [pthm_02, pthm_03, pthm_04, pthm_05, pthm_07, pthm_08,
pthm_09, pthm_10, pthm_11, pthm_12, pthm_13, pthm_14, pthm_15, pthm_16,
pthm_17, pthm_18, pthm_19, pthm_21, pthm_22, pthm_23, pthm_24,
pthm_25, pthm_26, pthm_27, pthm_28, pthm_29, pthm_30, pthm_31, pthm_32,
pthm_33, pthm_34, pthm_35, pthm_36, pthm_37, pthm_38, _] = sr_rules;
val [ca, cb, cc, cd, cm, cn, cp, cq, cx, cy, cz, clx, crx, cly, cry] = vars;
val [add_pat, mul_pat, pow_pat, zero_tm, one_tm] = sr_ops;
val [add_tm, mul_tm, pow_tm] = map (Thm.dest_fun o Thm.dest_fun) [add_pat, mul_pat, pow_pat];
val dest_add = dest_binop add_tm
val dest_mul = dest_binop mul_tm
fun dest_pow tm =
let val (l,r) = dest_binop pow_tm tm
in if is_number r then (l,r) else raise CTERM ("dest_pow",[tm])
end;
val is_add = is_binop add_tm
val is_mul = is_binop mul_tm
val (neg_mul, sub_add, sub_tm, neg_tm, dest_sub, cx', cy') =
(case (r_ops, r_rules) of
([sub_pat, neg_pat], [neg_mul, sub_add]) =>
let
val sub_tm = Thm.dest_fun (Thm.dest_fun sub_pat)
val neg_tm = Thm.dest_fun neg_pat
val dest_sub = dest_binop sub_tm
in (neg_mul, sub_add, sub_tm, neg_tm, dest_sub, neg_mul |> concl |> Thm.dest_arg,
sub_add |> concl |> Thm.dest_arg |> Thm.dest_arg)
end
| _ => (TrueI, TrueI, true_tm, true_tm, (fn t => (t,t)), true_tm, true_tm));
val (divide_inverse, divide_tm, inverse_tm) =
(case (f_ops, f_rules) of
([divide_pat, inverse_pat], [div_inv, _]) =>
let val div_tm = funpow 2 Thm.dest_fun divide_pat
val inv_tm = Thm.dest_fun inverse_pat
in (div_inv, div_tm, inv_tm)
end
| _ => (TrueI, true_tm, true_tm));
in fn variable_ord =>
let
(* Conversion for "x^n * x^m", with either x^n = x and/or x^m = x possible. *)
(* Also deals with "const * const", but both terms must involve powers of *)
(* the same variable, or both be constants, or behaviour may be incorrect. *)
fun powvar_mul_conv ctxt tm =
let
val (l,r) = dest_mul tm
in if is_semiring_constant l andalso is_semiring_constant r
then semiring_mul_conv tm
else
((let
val (lx,ln) = dest_pow l
in
((let val (_, rn) = dest_pow r
val th1 = inst_thm [(cx,lx),(cp,ln),(cq,rn)] pthm_29
val (tm1,tm2) = Thm.dest_comb(concl th1) in
Thm.transitive th1 (Drule.arg_cong_rule tm1 (nat_add_conv ctxt tm2)) end)
handle CTERM _ =>
(let val th1 = inst_thm [(cx,lx),(cq,ln)] pthm_31
val (tm1,tm2) = Thm.dest_comb(concl th1) in
Thm.transitive th1 (Drule.arg_cong_rule tm1 (nat_add_conv ctxt tm2)) end)) end)
handle CTERM _ =>
((let val (rx,rn) = dest_pow r
val th1 = inst_thm [(cx,rx),(cq,rn)] pthm_30
val (tm1,tm2) = Thm.dest_comb(concl th1) in
Thm.transitive th1 (Drule.arg_cong_rule tm1 (nat_add_conv ctxt tm2)) end)
handle CTERM _ => inst_thm [(cx,l)] pthm_32
))
end;
(* Remove "1 * m" from a monomial, and just leave m. *)
fun monomial_deone th =
(let val (l,r) = dest_mul(concl th) in
if l aconvc one_tm
then Thm.transitive th (inst_thm [(ca,r)] pthm_13) else th end)
handle CTERM _ => th;
(* Conversion for "(monomial)^n", where n is a numeral. *)
fun monomial_pow_conv ctxt =
let
fun monomial_pow tm bod ntm =
if not(is_comb bod)
then Thm.reflexive tm
else
if is_semiring_constant bod
then semiring_pow_conv tm
else
let
val (lopr,r) = Thm.dest_comb bod
in if not(is_comb lopr)
then Thm.reflexive tm
else
let
val (opr,l) = Thm.dest_comb lopr
in
if opr aconvc pow_tm andalso is_number r
then
let val th1 = inst_thm [(cx,l),(cp,r),(cq,ntm)] pthm_34
val (l,r) = Thm.dest_comb(concl th1)
in Thm.transitive th1 (Drule.arg_cong_rule l (nat_add_conv ctxt r))
end
else
if opr aconvc mul_tm
then
let
val th1 = inst_thm [(cx,l),(cy,r),(cq,ntm)] pthm_33
val (xy,z) = Thm.dest_comb(concl th1)
val (x,y) = Thm.dest_comb xy
val thl = monomial_pow y l ntm
val thr = monomial_pow z r ntm
in Thm.transitive th1 (Thm.combination (Drule.arg_cong_rule x thl) thr)
end
else Thm.reflexive tm
end
end
in fn tm =>
let
val (lopr,r) = Thm.dest_comb tm
val (opr,l) = Thm.dest_comb lopr
in if not (opr aconvc pow_tm) orelse not(is_number r)
then raise CTERM ("monomial_pow_conv", [tm])
else if r aconvc zeron_tm
then inst_thm [(cx,l)] pthm_35
else if r aconvc onen_tm
then inst_thm [(cx,l)] pthm_36
else monomial_deone(monomial_pow tm l r)
end
end;
(* Multiplication of canonical monomials. *)
fun monomial_mul_conv ctxt =
let
fun powvar tm =
if is_semiring_constant tm then one_tm
else
((let val (lopr,r) = Thm.dest_comb tm
val (opr,l) = Thm.dest_comb lopr
in if opr aconvc pow_tm andalso is_number r then l
else raise CTERM ("monomial_mul_conv",[tm]) end)
handle CTERM _ => tm) (* FIXME !? *)
fun vorder x y =
if x aconvc y then 0
else
if x aconvc one_tm then ~1
else if y aconvc one_tm then 1
else if is_less (variable_ord (x, y)) then ~1 else 1
fun monomial_mul tm l r =
((let val (lx,ly) = dest_mul l val vl = powvar lx
in
((let
val (rx,ry) = dest_mul r
val vr = powvar rx
val ord = vorder vl vr
in
if ord = 0
then
let
val th1 = inst_thm [(clx,lx),(cly,ly),(crx,rx),(cry,ry)] pthm_15
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm1
val th2 = Drule.fun_cong_rule (Drule.arg_cong_rule tm3 (powvar_mul_conv ctxt tm4)) tm2
val th3 = Thm.transitive th1 th2
val (tm5,tm6) = Thm.dest_comb(concl th3)
val (tm7,tm8) = Thm.dest_comb tm6
val th4 = monomial_mul tm6 (Thm.dest_arg tm7) tm8
in Thm.transitive th3 (Drule.arg_cong_rule tm5 th4)
end
else
let val th0 = if ord < 0 then pthm_16 else pthm_17
val th1 = inst_thm [(clx,lx),(cly,ly),(crx,rx),(cry,ry)] th0
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm2
in Thm.transitive th1 (Drule.arg_cong_rule tm1 (monomial_mul tm2 (Thm.dest_arg tm3) tm4))
end
end)
handle CTERM _ =>
(let val vr = powvar r val ord = vorder vl vr
in
if ord = 0 then
let
val th1 = inst_thm [(clx,lx),(cly,ly),(crx,r)] pthm_18
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm1
val th2 = Drule.fun_cong_rule (Drule.arg_cong_rule tm3 (powvar_mul_conv ctxt tm4)) tm2
in Thm.transitive th1 th2
end
else
if ord < 0 then
let val th1 = inst_thm [(clx,lx),(cly,ly),(crx,r)] pthm_19
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm2
in Thm.transitive th1 (Drule.arg_cong_rule tm1 (monomial_mul tm2 (Thm.dest_arg tm3) tm4))
end
else inst_thm [(ca,l),(cb,r)] pthm_09
end)) end)
handle CTERM _ =>
(let val vl = powvar l in
((let
val (rx,ry) = dest_mul r
val vr = powvar rx
val ord = vorder vl vr
in if ord = 0 then
let val th1 = inst_thm [(clx,l),(crx,rx),(cry,ry)] pthm_21
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm1
in Thm.transitive th1 (Drule.fun_cong_rule (Drule.arg_cong_rule tm3 (powvar_mul_conv ctxt tm4)) tm2)
end
else if ord > 0 then
let val th1 = inst_thm [(clx,l),(crx,rx),(cry,ry)] pthm_22
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm2
in Thm.transitive th1 (Drule.arg_cong_rule tm1 (monomial_mul tm2 (Thm.dest_arg tm3) tm4))
end
else Thm.reflexive tm
end)
handle CTERM _ =>
(let val vr = powvar r
val ord = vorder vl vr
in if ord = 0 then powvar_mul_conv ctxt tm
else if ord > 0 then inst_thm [(ca,l),(cb,r)] pthm_09
else Thm.reflexive tm
end)) end))
in fn tm => let val (l,r) = dest_mul tm in monomial_deone(monomial_mul tm l r)
end
end;
(* Multiplication by monomial of a polynomial. *)
fun polynomial_monomial_mul_conv ctxt =
let
fun pmm_conv tm =
let val (l,r) = dest_mul tm
in
((let val (y,z) = dest_add r
val th1 = inst_thm [(cx,l),(cy,y),(cz,z)] pthm_37
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm1
val th2 =
Thm.combination (Drule.arg_cong_rule tm3 (monomial_mul_conv ctxt tm4)) (pmm_conv tm2)
in Thm.transitive th1 th2
end)
handle CTERM _ => monomial_mul_conv ctxt tm)
end
in pmm_conv
end;
(* Addition of two monomials identical except for constant multiples. *)
fun monomial_add_conv tm =
let val (l,r) = dest_add tm
in if is_semiring_constant l andalso is_semiring_constant r
then semiring_add_conv tm
else
let val th1 =
if is_mul l andalso is_semiring_constant(Thm.dest_arg1 l)
then if is_mul r andalso is_semiring_constant(Thm.dest_arg1 r) then
inst_thm [(ca,Thm.dest_arg1 l),(cm,Thm.dest_arg r), (cb,Thm.dest_arg1 r)] pthm_02
else inst_thm [(ca,Thm.dest_arg1 l),(cm,r)] pthm_03
else if is_mul r andalso is_semiring_constant(Thm.dest_arg1 r)
then inst_thm [(cm,l),(ca,Thm.dest_arg1 r)] pthm_04
else inst_thm [(cm,r)] pthm_05
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm1
val th2 = Drule.arg_cong_rule tm3 (semiring_add_conv tm4)
val th3 = Thm.transitive th1 (Drule.fun_cong_rule th2 tm2)
val tm5 = concl th3
in
if (Thm.dest_arg1 tm5) aconvc zero_tm
then Thm.transitive th3 (inst_thm [(ca,Thm.dest_arg tm5)] pthm_11)
else monomial_deone th3
end
end;
(* Ordering on monomials. *)
fun striplist dest =
let fun strip x acc =
((let val (l,r) = dest x in
strip l (strip r acc) end)
handle CTERM _ => x::acc) (* FIXME !? *)
in fn x => strip x []
end;
fun powervars tm =
let val ptms = striplist dest_mul tm
in if is_semiring_constant (hd ptms) then tl ptms else ptms
end;
val num_0 = 0;
val num_1 = 1;
fun dest_varpow tm =
((let val (x,n) = dest_pow tm in (x,dest_number n) end)
handle CTERM _ =>
(tm,(if is_semiring_constant tm then num_0 else num_1)));
val morder =
let fun lexorder ls =
case ls of
([],[]) => 0
| (_ ,[]) => ~1
| ([], _) => 1
| (((x1,n1)::vs1),((x2,n2)::vs2)) =>
(case variable_ord (x1, x2) of
LESS => 1
| GREATER => ~1
| EQUAL =>
if n1 < n2 then ~1
else if n2 < n1 then 1
else lexorder (vs1, vs2))
in fn tm1 => fn tm2 =>
let val vdegs1 = map dest_varpow (powervars tm1)
val vdegs2 = map dest_varpow (powervars tm2)
val deg1 = fold (Integer.add o snd) vdegs1 num_0
val deg2 = fold (Integer.add o snd) vdegs2 num_0
in if deg1 < deg2 then ~1 else if deg1 > deg2 then 1
else lexorder (vdegs1, vdegs2)
end
end;
(* Addition of two polynomials. *)
fun polynomial_add_conv ctxt =
let
fun dezero_rule th =
let
val tm = concl th
in
if not(is_add tm) then th else
let val (lopr,r) = Thm.dest_comb tm
val l = Thm.dest_arg lopr
in
if l aconvc zero_tm
then Thm.transitive th (inst_thm [(ca,r)] pthm_07) else
if r aconvc zero_tm
then Thm.transitive th (inst_thm [(ca,l)] pthm_08) else th
end
end
fun padd tm =
let
val (l,r) = dest_add tm
in
if l aconvc zero_tm then inst_thm [(ca,r)] pthm_07
else if r aconvc zero_tm then inst_thm [(ca,l)] pthm_08
else
if is_add l
then
let val (a,b) = dest_add l
in
if is_add r then
let val (c,d) = dest_add r
val ord = morder a c
in
if ord = 0 then
let val th1 = inst_thm [(ca,a),(cb,b),(cc,c),(cd,d)] pthm_23
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm1
val th2 = Drule.arg_cong_rule tm3 (monomial_add_conv tm4)
in dezero_rule (Thm.transitive th1 (Thm.combination th2 (padd tm2)))
end
else (* ord <> 0*)
let val th1 =
if ord > 0 then inst_thm [(ca,a),(cb,b),(cc,r)] pthm_24
else inst_thm [(ca,l),(cc,c),(cd,d)] pthm_25
val (tm1,tm2) = Thm.dest_comb(concl th1)
in dezero_rule (Thm.transitive th1 (Drule.arg_cong_rule tm1 (padd tm2)))
end
end
else (* not (is_add r)*)
let val ord = morder a r
in
if ord = 0 then
let val th1 = inst_thm [(ca,a),(cb,b),(cc,r)] pthm_26
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm1
val th2 = Drule.fun_cong_rule (Drule.arg_cong_rule tm3 (monomial_add_conv tm4)) tm2
in dezero_rule (Thm.transitive th1 th2)
end
else (* ord <> 0*)
if ord > 0 then
let val th1 = inst_thm [(ca,a),(cb,b),(cc,r)] pthm_24
val (tm1,tm2) = Thm.dest_comb(concl th1)
in dezero_rule (Thm.transitive th1 (Drule.arg_cong_rule tm1 (padd tm2)))
end
else dezero_rule (inst_thm [(ca,l),(cc,r)] pthm_27)
end
end
else (* not (is_add l)*)
if is_add r then
let val (c,d) = dest_add r
val ord = morder l c
in
if ord = 0 then
let val th1 = inst_thm [(ca,l),(cc,c),(cd,d)] pthm_28
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm1
val th2 = Drule.fun_cong_rule (Drule.arg_cong_rule tm3 (monomial_add_conv tm4)) tm2
in dezero_rule (Thm.transitive th1 th2)
end
else
if ord > 0 then Thm.reflexive tm
else
let val th1 = inst_thm [(ca,l),(cc,c),(cd,d)] pthm_25
val (tm1,tm2) = Thm.dest_comb(concl th1)
in dezero_rule (Thm.transitive th1 (Drule.arg_cong_rule tm1 (padd tm2)))
end
end
else
let val ord = morder l r
in
if ord = 0 then monomial_add_conv tm
else if ord > 0 then dezero_rule(Thm.reflexive tm)
else dezero_rule (inst_thm [(ca,l),(cc,r)] pthm_27)
end
end
in padd
end;
(* Multiplication of two polynomials. *)
fun polynomial_mul_conv ctxt =
let
fun pmul tm =
let val (l,r) = dest_mul tm
in
if not(is_add l) then polynomial_monomial_mul_conv ctxt tm
else
if not(is_add r) then
let val th1 = inst_thm [(ca,l),(cb,r)] pthm_09
in Thm.transitive th1 (polynomial_monomial_mul_conv ctxt (concl th1))
end
else
let val (a,b) = dest_add l
val th1 = inst_thm [(ca,a),(cb,b),(cc,r)] pthm_10
val (tm1,tm2) = Thm.dest_comb(concl th1)
val (tm3,tm4) = Thm.dest_comb tm1
val th2 = Drule.arg_cong_rule tm3 (polynomial_monomial_mul_conv ctxt tm4)
val th3 = Thm.transitive th1 (Thm.combination th2 (pmul tm2))
in Thm.transitive th3 (polynomial_add_conv ctxt (concl th3))
end
end
in fn tm =>
let val (l,r) = dest_mul tm
in
if l aconvc zero_tm then inst_thm [(ca,r)] pthm_11
else if r aconvc zero_tm then inst_thm [(ca,l)] pthm_12
else if l aconvc one_tm then inst_thm [(ca,r)] pthm_13
else if r aconvc one_tm then inst_thm [(ca,l)] pthm_14
else pmul tm
end
end;
(* Power of polynomial (optimized for the monomial and trivial cases). *)
fun num_conv ctxt n =
nat_add_conv ctxt (Thm.apply \<^cterm>\<open>Suc\<close> (Numeral.mk_cnumber \<^ctyp>\<open>nat\<close> (dest_number n - 1)))
|> Thm.symmetric;
fun polynomial_pow_conv ctxt =
let
fun ppow tm =
let val (l,n) = dest_pow tm
in
if n aconvc zeron_tm then inst_thm [(cx,l)] pthm_35
else if n aconvc onen_tm then inst_thm [(cx,l)] pthm_36
else
let val th1 = num_conv ctxt n
val th2 = inst_thm [(cx,l),(cq,Thm.dest_arg (concl th1))] pthm_38
val (tm1,tm2) = Thm.dest_comb(concl th2)
val th3 = Thm.transitive th2 (Drule.arg_cong_rule tm1 (ppow tm2))
val th4 = Thm.transitive (Drule.arg_cong_rule (Thm.dest_fun tm) th1) th3
in Thm.transitive th4 (polynomial_mul_conv ctxt (concl th4))
end
end
in fn tm =>
if is_add(Thm.dest_arg1 tm) then ppow tm else monomial_pow_conv ctxt tm
end;
(* Negation. *)
fun polynomial_neg_conv ctxt tm =
let val (l,r) = Thm.dest_comb tm in
if not (l aconvc neg_tm) then raise CTERM ("polynomial_neg_conv",[tm]) else
let val th1 = inst_thm [(cx', r)] neg_mul
val th2 = Thm.transitive th1 (Conv.arg1_conv semiring_mul_conv (concl th1))
in Thm.transitive th2 (polynomial_monomial_mul_conv ctxt (concl th2))
end
end;
(* Subtraction. *)
fun polynomial_sub_conv ctxt tm =
let val (l,r) = dest_sub tm
val th1 = inst_thm [(cx', l), (cy', r)] sub_add
val (tm1,tm2) = Thm.dest_comb(concl th1)
val th2 = Drule.arg_cong_rule tm1 (polynomial_neg_conv ctxt tm2)
in Thm.transitive th1 (Thm.transitive th2 (polynomial_add_conv ctxt (concl th2)))
end;
(* Conversion from HOL term. *)
fun polynomial_conv ctxt tm =
if is_semiring_constant tm then semiring_add_conv tm
else if not(is_comb tm) then Thm.reflexive tm
else
let val (lopr,r) = Thm.dest_comb tm
in if lopr aconvc neg_tm then
let val th1 = Drule.arg_cong_rule lopr (polynomial_conv ctxt r)
in Thm.transitive th1 (polynomial_neg_conv ctxt (concl th1))
end
else if lopr aconvc inverse_tm then
let val th1 = Drule.arg_cong_rule lopr (polynomial_conv ctxt r)
in Thm.transitive th1 (semiring_mul_conv (concl th1))
end
else
if not(is_comb lopr) then Thm.reflexive tm
else
let val (opr,l) = Thm.dest_comb lopr
in if opr aconvc pow_tm andalso is_number r
then
let val th1 = Drule.fun_cong_rule (Drule.arg_cong_rule opr (polynomial_conv ctxt l)) r
in Thm.transitive th1 (polynomial_pow_conv ctxt (concl th1))
end
else if opr aconvc divide_tm
then
let val th1 = Thm.combination (Drule.arg_cong_rule opr (polynomial_conv ctxt l))
(polynomial_conv ctxt r)
val th2 = (Conv.rewr_conv divide_inverse then_conv polynomial_mul_conv ctxt)
(Thm.rhs_of th1)
in Thm.transitive th1 th2
end
else
if opr aconvc add_tm orelse opr aconvc mul_tm orelse opr aconvc sub_tm
then
let val th1 =
Thm.combination
(Drule.arg_cong_rule opr (polynomial_conv ctxt l)) (polynomial_conv ctxt r)
val f = if opr aconvc add_tm then polynomial_add_conv ctxt
else if opr aconvc mul_tm then polynomial_mul_conv ctxt
else polynomial_sub_conv ctxt
in Thm.transitive th1 (f (concl th1))
end
else Thm.reflexive tm
end
end;
in
{main = polynomial_conv,
add = polynomial_add_conv,
mul = polynomial_mul_conv,
pow = polynomial_pow_conv,
neg = polynomial_neg_conv,
sub = polynomial_sub_conv}
end
end;
val nat_exp_ss =
simpset_of
(put_simpset HOL_basic_ss \<^context>
addsimps (@{thms eval_nat_numeral} @ @{thms diff_nat_numeral} @ @{thms arith_simps} @ @{thms rel_simps})
addsimps [@{thm Let_def}, @{thm if_False}, @{thm if_True}, @{thm Nat.add_0}, @{thm add_Suc}]);
(* various normalizing conversions *)
fun semiring_normalizers_ord_wrapper ctxt ({vars, semiring, ring, field, idom, ideal},
{conv, dest_const, mk_const, is_const}) term_ord =
let
val pow_conv =
Conv.arg_conv (Simplifier.rewrite (put_simpset nat_exp_ss ctxt))
then_conv Simplifier.rewrite
(put_simpset HOL_basic_ss ctxt addsimps [nth (snd semiring) 31, nth (snd semiring) 34])
then_conv conv ctxt
val dat = (is_const, conv ctxt, conv ctxt, pow_conv)
in semiring_normalizers_conv vars semiring ring field dat term_ord end;
fun semiring_normalize_ord_wrapper ctxt
({vars, semiring, ring, field, idom, ideal}, {conv, dest_const, mk_const, is_const}) term_ord =
#main (semiring_normalizers_ord_wrapper ctxt
({vars = vars, semiring = semiring, ring = ring, field = field, idom = idom, ideal = ideal},
{conv = conv, dest_const = dest_const, mk_const = mk_const, is_const = is_const}) term_ord) ctxt;
fun semiring_normalize_wrapper ctxt data =
semiring_normalize_ord_wrapper ctxt data Thm.term_ord;
fun semiring_normalize_ord_conv ctxt ord tm =
(case match ctxt tm of
NONE => Thm.reflexive tm
| SOME res => semiring_normalize_ord_wrapper ctxt res ord tm);
fun semiring_normalize_conv ctxt = semiring_normalize_ord_conv ctxt Thm.term_ord;
end;
diff --git a/src/HOL/Tools/typedef.ML b/src/HOL/Tools/typedef.ML
--- a/src/HOL/Tools/typedef.ML
+++ b/src/HOL/Tools/typedef.ML
@@ -1,373 +1,373 @@
(* Title: HOL/Tools/typedef.ML
Author: Markus Wenzel and Stefan Berghofer, TU Muenchen
Gordon/HOL-style type definitions: create a new syntactic type
represented by a non-empty set.
*)
signature TYPEDEF =
sig
type info =
{rep_type: typ, abs_type: typ, Rep_name: string, Abs_name: string, axiom_name: string} *
{inhabited: thm, type_definition: thm, Rep: thm, Rep_inverse: thm, Abs_inverse: thm,
Rep_inject: thm, Abs_inject: thm, Rep_cases: thm, Abs_cases: thm,
Rep_induct: thm, Abs_induct: thm}
val transform_info: morphism -> info -> info
val get_info: Proof.context -> string -> info list
val get_info_global: theory -> string -> info list
val interpretation: (string -> local_theory -> local_theory) -> theory -> theory
type bindings = {Rep_name: binding, Abs_name: binding, type_definition_name: binding}
val default_bindings: binding -> bindings
val make_bindings: binding -> bindings option -> bindings
val make_morphisms: binding -> (binding * binding) option -> bindings
val overloaded: bool Config.T
val add_typedef: {overloaded: bool} -> binding * (string * sort) list * mixfix ->
term -> bindings option -> (Proof.context -> tactic) -> local_theory ->
(string * info) * local_theory
val add_typedef_global: {overloaded: bool} -> binding * (string * sort) list * mixfix ->
term -> bindings option -> (Proof.context -> tactic) -> theory ->
(string * info) * theory
val typedef: {overloaded: bool} -> binding * (string * sort) list * mixfix ->
term -> bindings option -> local_theory -> Proof.state
val typedef_cmd: {overloaded: bool} -> binding * (string * string option) list * mixfix ->
string -> bindings option -> local_theory -> Proof.state
end;
structure Typedef: TYPEDEF =
struct
(** type definitions **)
(* theory data *)
type info =
(*global part*)
{rep_type: typ, abs_type: typ, Rep_name: string, Abs_name: string, axiom_name: string} *
(*local part*)
{inhabited: thm, type_definition: thm, Rep: thm, Rep_inverse: thm, Abs_inverse: thm,
Rep_inject: thm, Abs_inject: thm, Rep_cases: thm, Abs_cases: thm,
Rep_induct: thm, Abs_induct: thm};
fun transform_info phi (info: info) =
let
val thm = Morphism.thm phi;
val (global_info, {inhabited, type_definition, Rep, Rep_inverse, Abs_inverse,
Rep_inject, Abs_inject, Rep_cases, Abs_cases, Rep_induct, Abs_induct}) = info;
in
(global_info,
{inhabited = thm inhabited, type_definition = thm type_definition,
Rep = thm Rep, Rep_inverse = thm Rep_inverse, Abs_inverse = thm Abs_inverse,
Rep_inject = thm Rep_inject, Abs_inject = thm Abs_inject,
Rep_cases = thm Rep_cases, Abs_cases = thm Abs_cases,
Rep_induct = thm Rep_induct, Abs_induct = thm Abs_induct})
end;
structure Data = Generic_Data
(
type T = info list Symtab.table;
val empty = Symtab.empty;
fun merge data = Symtab.merge_list (K true) data;
);
fun get_info_generic context =
Symtab.lookup_list (Data.get context) #>
map (transform_info (Morphism.transfer_morphism'' context));
val get_info = get_info_generic o Context.Proof;
val get_info_global = get_info_generic o Context.Theory;
fun put_info name info =
Data.map (Symtab.cons_list (name, transform_info Morphism.trim_context_morphism info));
(* global interpretation *)
structure Typedef_Plugin = Plugin(type T = string);
val typedef_plugin = Plugin_Name.declare_setup \<^binding>\<open>typedef\<close>;
fun interpretation f =
Typedef_Plugin.interpretation typedef_plugin
(fn name => fn lthy =>
lthy
|> Local_Theory.map_background_naming
(Name_Space.root_path #> Name_Space.add_path (Long_Name.qualifier name))
|> f name
|> Local_Theory.restore_background_naming lthy);
(* primitive typedef axiomatization -- for fresh typedecl *)
val typedef_overloaded = Attrib.setup_config_bool \<^binding>\<open>typedef_overloaded\<close> (K false);
fun mk_inhabited A =
let val T = HOLogic.dest_setT (Term.fastype_of A)
in HOLogic.mk_Trueprop (HOLogic.exists_const T $ Abs ("x", T, HOLogic.mk_mem (Bound 0, A))) end;
fun mk_typedef newT oldT RepC AbsC A =
let
val typedefC =
Const (\<^const_name>\<open>type_definition\<close>,
(newT --> oldT) --> (oldT --> newT) --> HOLogic.mk_setT oldT --> HOLogic.boolT);
in Logic.mk_implies (mk_inhabited A, HOLogic.mk_Trueprop (typedefC $ RepC $ AbsC $ A)) end;
fun primitive_typedef {overloaded} type_definition_name newT oldT Rep_name Abs_name A lthy =
let
(* errors *)
fun show_names pairs = commas_quote (map fst pairs);
val lhs_tfrees = Term.add_tfreesT newT [];
val rhs_tfrees = Term.add_tfreesT oldT [];
val _ =
(case fold (remove (op =)) lhs_tfrees rhs_tfrees of
[] => ()
| extras => error ("Extra type variables in representing set: " ^ show_names extras));
val _ =
(case Term.add_frees A [] of [] =>
[]
| xs => error ("Illegal variables in representing set: " ^ show_names xs));
(* axiomatization *)
val ((RepC, AbsC), consts_lthy) = lthy
|> Local_Theory.background_theory_result
(Sign.declare_const lthy ((Rep_name, newT --> oldT), NoSyn) ##>>
Sign.declare_const lthy ((Abs_name, oldT --> newT), NoSyn));
val const_dep = Theory.const_dep (Proof_Context.theory_of consts_lthy);
val defs_context = Proof_Context.defs_context consts_lthy;
val A_consts = fold_aterms (fn Const c => insert (op =) (const_dep c) | _ => I) A [];
val A_types =
(fold_types o fold_subtypes) (fn Type t => insert (op =) (Theory.type_dep t) | _ => I) A [];
val typedef_deps = A_consts @ A_types;
val newT_dep = Theory.type_dep (dest_Type newT);
val ((axiom_name, axiom), axiom_lthy) = consts_lthy
|> Local_Theory.background_theory_result
(Thm.add_axiom consts_lthy (type_definition_name, mk_typedef newT oldT RepC AbsC A) ##>
Theory.add_deps defs_context "" newT_dep typedef_deps ##>
Theory.add_deps defs_context "" (const_dep (dest_Const RepC)) [newT_dep] ##>
Theory.add_deps defs_context "" (const_dep (dest_Const AbsC)) [newT_dep]);
val axiom_defs = Theory.defs_of (Proof_Context.theory_of axiom_lthy);
val newT_deps = maps #2 (Defs.get_deps axiom_defs (#1 newT_dep));
val _ =
if null newT_deps orelse overloaded orelse Config.get lthy typedef_overloaded then ()
else
error (Pretty.string_of (Pretty.chunks
[Pretty.paragraph
(Pretty.text "Type definition with open dependencies, use" @
[Pretty.brk 1, Pretty.str "\"", Pretty.keyword1 "typedef", Pretty.brk 1,
Pretty.str "(", Pretty.keyword2 "overloaded", Pretty.str ")\"", Pretty.brk 1] @
Pretty.text "or enable configuration option \"typedef_overloaded\" in the context."),
Pretty.block [Pretty.str " Type:", Pretty.brk 2, Syntax.pretty_typ axiom_lthy newT],
Pretty.block (Pretty.str " Deps:" :: Pretty.brk 2 ::
Pretty.commas
(map (Defs.pretty_entry (Proof_Context.defs_context axiom_lthy)) newT_deps))]))
in ((RepC, AbsC, axiom_name, axiom), axiom_lthy) end;
(* derived bindings *)
type bindings = {Rep_name: binding, Abs_name: binding, type_definition_name: binding};
fun prefix_binding prfx name =
Binding.reset_pos (Binding.qualify_name false name (prfx ^ Binding.name_of name));
fun qualify_binding name = Binding.qualify false (Binding.name_of name);
fun default_bindings name =
{Rep_name = prefix_binding "Rep_" name,
Abs_name = prefix_binding "Abs_" name,
type_definition_name = prefix_binding "type_definition_" name};
fun make_bindings name NONE = default_bindings name
| make_bindings _ (SOME bindings) = bindings;
fun make_morphisms name NONE = default_bindings name
| make_morphisms name (SOME (Rep_name, Abs_name)) =
{Rep_name = qualify_binding name Rep_name,
Abs_name = qualify_binding name Abs_name,
type_definition_name = #type_definition_name (default_bindings name)};
(* prepare_typedef *)
fun prepare_typedef prep_term overloaded (name, raw_args, mx) raw_set opt_bindings lthy =
let
(* rhs *)
val tmp_ctxt = lthy |> fold (Variable.declare_typ o TFree) raw_args;
val set = prep_term tmp_ctxt raw_set;
val tmp_ctxt' = tmp_ctxt |> Variable.declare_term set;
val setT = Term.fastype_of set;
val oldT = HOLogic.dest_setT setT handle TYPE _ =>
error ("Not a set type: " ^ quote (Syntax.string_of_typ lthy setT));
val bname = Binding.name_of name;
val goal = mk_inhabited set;
val goal_pat = mk_inhabited (Var (the_default (bname, 0) (Lexicon.read_variable bname), setT));
(* lhs *)
val args = map (Proof_Context.check_tfree tmp_ctxt') raw_args;
val (newT, typedecl_lthy) = lthy
|> Typedecl.typedecl {final = false} (name, args, mx)
||> Variable.declare_term set;
val Type (full_name, _) = newT;
(* axiomatization *)
val {Rep_name, Abs_name, type_definition_name} = make_bindings name opt_bindings;
val ((RepC, AbsC, axiom_name, typedef), typedef_lthy) = typedecl_lthy
|> primitive_typedef overloaded type_definition_name newT oldT Rep_name Abs_name set;
val alias_lthy = typedef_lthy
|> Local_Theory.const_alias Rep_name (#1 (Term.dest_Const RepC))
|> Local_Theory.const_alias Abs_name (#1 (Term.dest_Const AbsC));
(* result *)
fun note ((b, atts), th) =
Local_Theory.note ((b, atts), [th]) #>> (fn (_, [th']) => th');
fun typedef_result inhabited lthy1 =
let
val ((_, [type_definition]), lthy2) = lthy1
|> Local_Theory.note ((type_definition_name, []), [inhabited RS typedef]);
fun make th = Goal.norm_result lthy2 (type_definition RS th);
val (((((((((Rep, Rep_inverse), Abs_inverse), Rep_inject), Abs_inject), Rep_cases),
Abs_cases), Rep_induct), Abs_induct), lthy3) = lthy2
|> note ((Rep_name, []), make @{thm type_definition.Rep})
||>> note ((Binding.suffix_name "_inverse" Rep_name, []),
make @{thm type_definition.Rep_inverse})
||>> note ((Binding.suffix_name "_inverse" Abs_name, []),
make @{thm type_definition.Abs_inverse})
||>> note ((Binding.suffix_name "_inject" Rep_name, []),
make @{thm type_definition.Rep_inject})
||>> note ((Binding.suffix_name "_inject" Abs_name, []),
make @{thm type_definition.Abs_inject})
||>> note ((Binding.suffix_name "_cases" Rep_name,
[Attrib.case_names [Binding.name_of Rep_name],
- Attrib.internal (K (Induct.cases_pred full_name))]),
+ Attrib.internal \<^here> (K (Induct.cases_pred full_name))]),
make @{thm type_definition.Rep_cases})
||>> note ((Binding.suffix_name "_cases" Abs_name,
[Attrib.case_names [Binding.name_of Abs_name],
- Attrib.internal (K (Induct.cases_type full_name))]),
+ Attrib.internal \<^here> (K (Induct.cases_type full_name))]),
make @{thm type_definition.Abs_cases})
||>> note ((Binding.suffix_name "_induct" Rep_name,
[Attrib.case_names [Binding.name_of Rep_name],
- Attrib.internal (K (Induct.induct_pred full_name))]),
+ Attrib.internal \<^here> (K (Induct.induct_pred full_name))]),
make @{thm type_definition.Rep_induct})
||>> note ((Binding.suffix_name "_induct" Abs_name,
[Attrib.case_names [Binding.name_of Abs_name],
- Attrib.internal (K (Induct.induct_type full_name))]),
+ Attrib.internal \<^here> (K (Induct.induct_type full_name))]),
make @{thm type_definition.Abs_induct});
val info =
({rep_type = oldT, abs_type = newT, Rep_name = #1 (Term.dest_Const RepC),
Abs_name = #1 (Term.dest_Const AbsC), axiom_name = axiom_name},
{inhabited = inhabited, type_definition = type_definition,
Rep = Rep, Rep_inverse = Rep_inverse, Abs_inverse = Abs_inverse,
Rep_inject = Rep_inject, Abs_inject = Abs_inject, Rep_cases = Rep_cases,
Abs_cases = Abs_cases, Rep_induct = Rep_induct, Abs_induct = Abs_induct});
in
lthy3
- |> Local_Theory.declaration {syntax = false, pervasive = true}
+ |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
(fn phi => put_info full_name (transform_info phi info))
|> Typedef_Plugin.data Plugin_Name.default_filter full_name
|> pair (full_name, info)
end;
in ((goal, goal_pat, typedef_result), alias_lthy) end
handle ERROR msg =>
cat_error msg ("The error(s) above occurred in typedef " ^ Binding.print name);
(* add_typedef: tactic interface *)
fun add_typedef overloaded typ set opt_bindings tac lthy =
let
val ((goal, _, typedef_result), lthy') =
prepare_typedef Syntax.check_term overloaded typ set opt_bindings lthy;
val inhabited = Goal.prove lthy' [] [] goal (tac o #context) |> Goal.norm_result lthy';
in typedef_result inhabited lthy' end;
fun add_typedef_global overloaded typ set opt_bindings tac =
Named_Target.theory_map_result (apsnd o transform_info)
(add_typedef overloaded typ set opt_bindings tac)
(* typedef: proof interface *)
local
fun gen_typedef prep_term prep_constraint overloaded (b, raw_args, mx) set opt_bindings lthy =
let
val args = map (apsnd (prep_constraint lthy)) raw_args;
val ((goal, goal_pat, typedef_result), lthy') =
prepare_typedef prep_term overloaded (b, args, mx) set opt_bindings lthy;
fun after_qed [[th]] = snd o typedef_result th;
in Proof.theorem NONE after_qed [[(goal, [goal_pat])]] lthy' end;
in
val typedef = gen_typedef Syntax.check_term (K I);
val typedef_cmd = gen_typedef Syntax.read_term Typedecl.read_constraint;
end;
(** outer syntax **)
val _ =
Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>typedef\<close>
"HOL type definition (requires non-emptiness proof)"
(Parse_Spec.overloaded -- Parse.type_args_constrained -- Parse.binding -- Parse.opt_mixfix --
(\<^keyword>\<open>=\<close> |-- Parse.term) --
Scan.option (\<^keyword>\<open>morphisms\<close> |-- Parse.!!! (Parse.binding -- Parse.binding))
>> (fn (((((overloaded, vs), t), mx), A), opt_morphs) => fn lthy =>
typedef_cmd {overloaded = overloaded} (t, vs, mx) A
(SOME (make_morphisms t opt_morphs)) lthy));
val overloaded = typedef_overloaded;
(** theory export **)
val _ =
(Theory.setup o Thy_Info.add_presentation) (fn context => fn thy =>
if Export_Theory.export_enabled context then
let
val parent_spaces = map Sign.type_space (Theory.parents_of thy);
val typedefs =
Name_Space.dest_table (#types (Type.rep_tsig (Sign.tsig_of thy)))
|> maps (fn (name, _) =>
if exists (fn space => Name_Space.declared space name) parent_spaces then []
else
get_info_global thy name
|> map (fn ({rep_type, abs_type, Rep_name, Abs_name, axiom_name}, _) =>
(name, (rep_type, (abs_type, (Rep_name, (Abs_name, axiom_name)))))));
val encode =
let open XML.Encode Term_XML.Encode
in list (pair string (pair typ (pair typ (pair string (pair string string))))) end;
in
if null typedefs then ()
else Export_Theory.export_body thy "typedefs" (encode typedefs)
end
else ());
end;
diff --git a/src/HOL/Topological_Spaces.thy b/src/HOL/Topological_Spaces.thy
--- a/src/HOL/Topological_Spaces.thy
+++ b/src/HOL/Topological_Spaces.thy
@@ -1,3970 +1,3959 @@
(* Title: HOL/Topological_Spaces.thy
Author: Brian Huffman
Author: Johannes Hölzl
*)
section \<open>Topological Spaces\<close>
theory Topological_Spaces
imports Main
begin
named_theorems continuous_intros "structural introduction rules for continuity"
subsection \<open>Topological space\<close>
class "open" =
fixes "open" :: "'a set \<Rightarrow> bool"
class topological_space = "open" +
assumes open_UNIV [simp, intro]: "open UNIV"
assumes open_Int [intro]: "open S \<Longrightarrow> open T \<Longrightarrow> open (S \<inter> T)"
assumes open_Union [intro]: "\<forall>S\<in>K. open S \<Longrightarrow> open (\<Union>K)"
begin
definition closed :: "'a set \<Rightarrow> bool"
where "closed S \<longleftrightarrow> open (- S)"
lemma open_empty [continuous_intros, intro, simp]: "open {}"
using open_Union [of "{}"] by simp
lemma open_Un [continuous_intros, intro]: "open S \<Longrightarrow> open T \<Longrightarrow> open (S \<union> T)"
using open_Union [of "{S, T}"] by simp
lemma open_UN [continuous_intros, intro]: "\<forall>x\<in>A. open (B x) \<Longrightarrow> open (\<Union>x\<in>A. B x)"
using open_Union [of "B ` A"] by simp
lemma open_Inter [continuous_intros, intro]: "finite S \<Longrightarrow> \<forall>T\<in>S. open T \<Longrightarrow> open (\<Inter>S)"
by (induction set: finite) auto
lemma open_INT [continuous_intros, intro]: "finite A \<Longrightarrow> \<forall>x\<in>A. open (B x) \<Longrightarrow> open (\<Inter>x\<in>A. B x)"
using open_Inter [of "B ` A"] by simp
lemma openI:
assumes "\<And>x. x \<in> S \<Longrightarrow> \<exists>T. open T \<and> x \<in> T \<and> T \<subseteq> S"
shows "open S"
proof -
have "open (\<Union>{T. open T \<and> T \<subseteq> S})" by auto
moreover have "\<Union>{T. open T \<and> T \<subseteq> S} = S" by (auto dest!: assms)
ultimately show "open S" by simp
qed
lemma open_subopen: "open S \<longleftrightarrow> (\<forall>x\<in>S. \<exists>T. open T \<and> x \<in> T \<and> T \<subseteq> S)"
by (auto intro: openI)
lemma closed_empty [continuous_intros, intro, simp]: "closed {}"
unfolding closed_def by simp
lemma closed_Un [continuous_intros, intro]: "closed S \<Longrightarrow> closed T \<Longrightarrow> closed (S \<union> T)"
unfolding closed_def by auto
lemma closed_UNIV [continuous_intros, intro, simp]: "closed UNIV"
unfolding closed_def by simp
lemma closed_Int [continuous_intros, intro]: "closed S \<Longrightarrow> closed T \<Longrightarrow> closed (S \<inter> T)"
unfolding closed_def by auto
lemma closed_INT [continuous_intros, intro]: "\<forall>x\<in>A. closed (B x) \<Longrightarrow> closed (\<Inter>x\<in>A. B x)"
unfolding closed_def by auto
lemma closed_Inter [continuous_intros, intro]: "\<forall>S\<in>K. closed S \<Longrightarrow> closed (\<Inter>K)"
unfolding closed_def uminus_Inf by auto
lemma closed_Union [continuous_intros, intro]: "finite S \<Longrightarrow> \<forall>T\<in>S. closed T \<Longrightarrow> closed (\<Union>S)"
by (induct set: finite) auto
lemma closed_UN [continuous_intros, intro]:
"finite A \<Longrightarrow> \<forall>x\<in>A. closed (B x) \<Longrightarrow> closed (\<Union>x\<in>A. B x)"
using closed_Union [of "B ` A"] by simp
lemma open_closed: "open S \<longleftrightarrow> closed (- S)"
by (simp add: closed_def)
lemma closed_open: "closed S \<longleftrightarrow> open (- S)"
by (rule closed_def)
lemma open_Diff [continuous_intros, intro]: "open S \<Longrightarrow> closed T \<Longrightarrow> open (S - T)"
by (simp add: closed_open Diff_eq open_Int)
lemma closed_Diff [continuous_intros, intro]: "closed S \<Longrightarrow> open T \<Longrightarrow> closed (S - T)"
by (simp add: open_closed Diff_eq closed_Int)
lemma open_Compl [continuous_intros, intro]: "closed S \<Longrightarrow> open (- S)"
by (simp add: closed_open)
lemma closed_Compl [continuous_intros, intro]: "open S \<Longrightarrow> closed (- S)"
by (simp add: open_closed)
lemma open_Collect_neg: "closed {x. P x} \<Longrightarrow> open {x. \<not> P x}"
unfolding Collect_neg_eq by (rule open_Compl)
lemma open_Collect_conj:
assumes "open {x. P x}" "open {x. Q x}"
shows "open {x. P x \<and> Q x}"
using open_Int[OF assms] by (simp add: Int_def)
lemma open_Collect_disj:
assumes "open {x. P x}" "open {x. Q x}"
shows "open {x. P x \<or> Q x}"
using open_Un[OF assms] by (simp add: Un_def)
lemma open_Collect_ex: "(\<And>i. open {x. P i x}) \<Longrightarrow> open {x. \<exists>i. P i x}"
using open_UN[of UNIV "\<lambda>i. {x. P i x}"] unfolding Collect_ex_eq by simp
lemma open_Collect_imp: "closed {x. P x} \<Longrightarrow> open {x. Q x} \<Longrightarrow> open {x. P x \<longrightarrow> Q x}"
unfolding imp_conv_disj by (intro open_Collect_disj open_Collect_neg)
lemma open_Collect_const: "open {x. P}"
by (cases P) auto
lemma closed_Collect_neg: "open {x. P x} \<Longrightarrow> closed {x. \<not> P x}"
unfolding Collect_neg_eq by (rule closed_Compl)
lemma closed_Collect_conj:
assumes "closed {x. P x}" "closed {x. Q x}"
shows "closed {x. P x \<and> Q x}"
using closed_Int[OF assms] by (simp add: Int_def)
lemma closed_Collect_disj:
assumes "closed {x. P x}" "closed {x. Q x}"
shows "closed {x. P x \<or> Q x}"
using closed_Un[OF assms] by (simp add: Un_def)
lemma closed_Collect_all: "(\<And>i. closed {x. P i x}) \<Longrightarrow> closed {x. \<forall>i. P i x}"
using closed_INT[of UNIV "\<lambda>i. {x. P i x}"] by (simp add: Collect_all_eq)
lemma closed_Collect_imp: "open {x. P x} \<Longrightarrow> closed {x. Q x} \<Longrightarrow> closed {x. P x \<longrightarrow> Q x}"
unfolding imp_conv_disj by (intro closed_Collect_disj closed_Collect_neg)
lemma closed_Collect_const: "closed {x. P}"
by (cases P) auto
end
subsection \<open>Hausdorff and other separation properties\<close>
class t0_space = topological_space +
assumes t0_space: "x \<noteq> y \<Longrightarrow> \<exists>U. open U \<and> \<not> (x \<in> U \<longleftrightarrow> y \<in> U)"
class t1_space = topological_space +
assumes t1_space: "x \<noteq> y \<Longrightarrow> \<exists>U. open U \<and> x \<in> U \<and> y \<notin> U"
instance t1_space \<subseteq> t0_space
by standard (fast dest: t1_space)
context t1_space begin
lemma separation_t1: "x \<noteq> y \<longleftrightarrow> (\<exists>U. open U \<and> x \<in> U \<and> y \<notin> U)"
using t1_space[of x y] by blast
lemma closed_singleton [iff]: "closed {a}"
proof -
let ?T = "\<Union>{S. open S \<and> a \<notin> S}"
have "open ?T"
by (simp add: open_Union)
also have "?T = - {a}"
by (auto simp add: set_eq_iff separation_t1)
finally show "closed {a}"
by (simp only: closed_def)
qed
lemma closed_insert [continuous_intros, simp]:
assumes "closed S"
shows "closed (insert a S)"
proof -
from closed_singleton assms have "closed ({a} \<union> S)"
by (rule closed_Un)
then show "closed (insert a S)"
by simp
qed
lemma finite_imp_closed: "finite S \<Longrightarrow> closed S"
by (induct pred: finite) simp_all
end
text \<open>T2 spaces are also known as Hausdorff spaces.\<close>
class t2_space = topological_space +
assumes hausdorff: "x \<noteq> y \<Longrightarrow> \<exists>U V. open U \<and> open V \<and> x \<in> U \<and> y \<in> V \<and> U \<inter> V = {}"
instance t2_space \<subseteq> t1_space
by standard (fast dest: hausdorff)
lemma (in t2_space) separation_t2: "x \<noteq> y \<longleftrightarrow> (\<exists>U V. open U \<and> open V \<and> x \<in> U \<and> y \<in> V \<and> U \<inter> V = {})"
using hausdorff [of x y] by blast
lemma (in t0_space) separation_t0: "x \<noteq> y \<longleftrightarrow> (\<exists>U. open U \<and> \<not> (x \<in> U \<longleftrightarrow> y \<in> U))"
using t0_space [of x y] by blast
text \<open>A classical separation axiom for topological space, the T3 axiom -- also called regularity:
if a point is not in a closed set, then there are open sets separating them.\<close>
class t3_space = t2_space +
assumes t3_space: "closed S \<Longrightarrow> y \<notin> S \<Longrightarrow> \<exists>U V. open U \<and> open V \<and> y \<in> U \<and> S \<subseteq> V \<and> U \<inter> V = {}"
text \<open>A classical separation axiom for topological space, the T4 axiom -- also called normality:
if two closed sets are disjoint, then there are open sets separating them.\<close>
class t4_space = t2_space +
assumes t4_space: "closed S \<Longrightarrow> closed T \<Longrightarrow> S \<inter> T = {} \<Longrightarrow> \<exists>U V. open U \<and> open V \<and> S \<subseteq> U \<and> T \<subseteq> V \<and> U \<inter> V = {}"
text \<open>T4 is stronger than T3, and weaker than metric.\<close>
instance t4_space \<subseteq> t3_space
proof
fix S and y::'a assume "closed S" "y \<notin> S"
then show "\<exists>U V. open U \<and> open V \<and> y \<in> U \<and> S \<subseteq> V \<and> U \<inter> V = {}"
using t4_space[of "{y}" S] by auto
qed
text \<open>A perfect space is a topological space with no isolated points.\<close>
class perfect_space = topological_space +
assumes not_open_singleton: "\<not> open {x}"
lemma (in perfect_space) UNIV_not_singleton: "UNIV \<noteq> {x}"
for x::'a
by (metis (no_types) open_UNIV not_open_singleton)
subsection \<open>Generators for toplogies\<close>
inductive generate_topology :: "'a set set \<Rightarrow> 'a set \<Rightarrow> bool" for S :: "'a set set"
where
UNIV: "generate_topology S UNIV"
| Int: "generate_topology S (a \<inter> b)" if "generate_topology S a" and "generate_topology S b"
| UN: "generate_topology S (\<Union>K)" if "(\<And>k. k \<in> K \<Longrightarrow> generate_topology S k)"
| Basis: "generate_topology S s" if "s \<in> S"
hide_fact (open) UNIV Int UN Basis
lemma generate_topology_Union:
"(\<And>k. k \<in> I \<Longrightarrow> generate_topology S (K k)) \<Longrightarrow> generate_topology S (\<Union>k\<in>I. K k)"
using generate_topology.UN [of "K ` I"] by auto
lemma topological_space_generate_topology: "class.topological_space (generate_topology S)"
by standard (auto intro: generate_topology.intros)
subsection \<open>Order topologies\<close>
class order_topology = order + "open" +
assumes open_generated_order: "open = generate_topology (range (\<lambda>a. {..< a}) \<union> range (\<lambda>a. {a <..}))"
begin
subclass topological_space
unfolding open_generated_order
by (rule topological_space_generate_topology)
lemma open_greaterThan [continuous_intros, simp]: "open {a <..}"
unfolding open_generated_order by (auto intro: generate_topology.Basis)
lemma open_lessThan [continuous_intros, simp]: "open {..< a}"
unfolding open_generated_order by (auto intro: generate_topology.Basis)
lemma open_greaterThanLessThan [continuous_intros, simp]: "open {a <..< b}"
unfolding greaterThanLessThan_eq by (simp add: open_Int)
end
class linorder_topology = linorder + order_topology
lemma closed_atMost [continuous_intros, simp]: "closed {..a}"
for a :: "'a::linorder_topology"
by (simp add: closed_open)
lemma closed_atLeast [continuous_intros, simp]: "closed {a..}"
for a :: "'a::linorder_topology"
by (simp add: closed_open)
lemma closed_atLeastAtMost [continuous_intros, simp]: "closed {a..b}"
for a b :: "'a::linorder_topology"
proof -
have "{a .. b} = {a ..} \<inter> {.. b}"
by auto
then show ?thesis
by (simp add: closed_Int)
qed
lemma (in order) less_separate:
assumes "x < y"
shows "\<exists>a b. x \<in> {..< a} \<and> y \<in> {b <..} \<and> {..< a} \<inter> {b <..} = {}"
proof (cases "\<exists>z. x < z \<and> z < y")
case True
then obtain z where "x < z \<and> z < y" ..
then have "x \<in> {..< z} \<and> y \<in> {z <..} \<and> {z <..} \<inter> {..< z} = {}"
by auto
then show ?thesis by blast
next
case False
with \<open>x < y\<close> have "x \<in> {..< y}" "y \<in> {x <..}" "{x <..} \<inter> {..< y} = {}"
by auto
then show ?thesis by blast
qed
instance linorder_topology \<subseteq> t2_space
proof
fix x y :: 'a
show "x \<noteq> y \<Longrightarrow> \<exists>U V. open U \<and> open V \<and> x \<in> U \<and> y \<in> V \<and> U \<inter> V = {}"
using less_separate [of x y] less_separate [of y x]
by (elim neqE; metis open_lessThan open_greaterThan Int_commute)
qed
lemma (in linorder_topology) open_right:
assumes "open S" "x \<in> S"
and gt_ex: "x < y"
shows "\<exists>b>x. {x ..< b} \<subseteq> S"
using assms unfolding open_generated_order
proof induct
case UNIV
then show ?case by blast
next
case (Int A B)
then obtain a b where "a > x" "{x ..< a} \<subseteq> A" "b > x" "{x ..< b} \<subseteq> B"
by auto
then show ?case
by (auto intro!: exI[of _ "min a b"])
next
case UN
then show ?case by blast
next
case Basis
then show ?case
by (fastforce intro: exI[of _ y] gt_ex)
qed
lemma (in linorder_topology) open_left:
assumes "open S" "x \<in> S"
and lt_ex: "y < x"
shows "\<exists>b<x. {b <.. x} \<subseteq> S"
using assms unfolding open_generated_order
proof induction
case UNIV
then show ?case by blast
next
case (Int A B)
then obtain a b where "a < x" "{a <.. x} \<subseteq> A" "b < x" "{b <.. x} \<subseteq> B"
by auto
then show ?case
by (auto intro!: exI[of _ "max a b"])
next
case UN
then show ?case by blast
next
case Basis
then show ?case
by (fastforce intro: exI[of _ y] lt_ex)
qed
subsection \<open>Setup some topologies\<close>
subsubsection \<open>Boolean is an order topology\<close>
class discrete_topology = topological_space +
assumes open_discrete: "\<And>A. open A"
instance discrete_topology < t2_space
proof
fix x y :: 'a
assume "x \<noteq> y"
then show "\<exists>U V. open U \<and> open V \<and> x \<in> U \<and> y \<in> V \<and> U \<inter> V = {}"
by (intro exI[of _ "{_}"]) (auto intro!: open_discrete)
qed
instantiation bool :: linorder_topology
begin
definition open_bool :: "bool set \<Rightarrow> bool"
where "open_bool = generate_topology (range (\<lambda>a. {..< a}) \<union> range (\<lambda>a. {a <..}))"
instance
by standard (rule open_bool_def)
end
instance bool :: discrete_topology
proof
fix A :: "bool set"
have *: "{False <..} = {True}" "{..< True} = {False}"
by auto
have "A = UNIV \<or> A = {} \<or> A = {False <..} \<or> A = {..< True}"
using subset_UNIV[of A] unfolding UNIV_bool * by blast
then show "open A"
by auto
qed
instantiation nat :: linorder_topology
begin
definition open_nat :: "nat set \<Rightarrow> bool"
where "open_nat = generate_topology (range (\<lambda>a. {..< a}) \<union> range (\<lambda>a. {a <..}))"
instance
by standard (rule open_nat_def)
end
instance nat :: discrete_topology
proof
fix A :: "nat set"
have "open {n}" for n :: nat
proof (cases n)
case 0
moreover have "{0} = {..<1::nat}"
by auto
ultimately show ?thesis
by auto
next
case (Suc n')
then have "{n} = {..<Suc n} \<inter> {n' <..}"
by auto
with Suc show ?thesis
by (auto intro: open_lessThan open_greaterThan)
qed
then have "open (\<Union>a\<in>A. {a})"
by (intro open_UN) auto
then show "open A"
by simp
qed
instantiation int :: linorder_topology
begin
definition open_int :: "int set \<Rightarrow> bool"
where "open_int = generate_topology (range (\<lambda>a. {..< a}) \<union> range (\<lambda>a. {a <..}))"
instance
by standard (rule open_int_def)
end
instance int :: discrete_topology
proof
fix A :: "int set"
have "{..<i + 1} \<inter> {i-1 <..} = {i}" for i :: int
by auto
then have "open {i}" for i :: int
using open_Int[OF open_lessThan[of "i + 1"] open_greaterThan[of "i - 1"]] by auto
then have "open (\<Union>a\<in>A. {a})"
by (intro open_UN) auto
then show "open A"
by simp
qed
subsubsection \<open>Topological filters\<close>
definition (in topological_space) nhds :: "'a \<Rightarrow> 'a filter"
where "nhds a = (INF S\<in>{S. open S \<and> a \<in> S}. principal S)"
definition (in topological_space) at_within :: "'a \<Rightarrow> 'a set \<Rightarrow> 'a filter"
("at (_)/ within (_)" [1000, 60] 60)
where "at a within s = inf (nhds a) (principal (s - {a}))"
abbreviation (in topological_space) at :: "'a \<Rightarrow> 'a filter" ("at")
where "at x \<equiv> at x within (CONST UNIV)"
abbreviation (in order_topology) at_right :: "'a \<Rightarrow> 'a filter"
where "at_right x \<equiv> at x within {x <..}"
abbreviation (in order_topology) at_left :: "'a \<Rightarrow> 'a filter"
where "at_left x \<equiv> at x within {..< x}"
lemma (in topological_space) nhds_generated_topology:
"open = generate_topology T \<Longrightarrow> nhds x = (INF S\<in>{S\<in>T. x \<in> S}. principal S)"
unfolding nhds_def
proof (safe intro!: antisym INF_greatest)
fix S
assume "generate_topology T S" "x \<in> S"
then show "(INF S\<in>{S \<in> T. x \<in> S}. principal S) \<le> principal S"
by induct
(auto intro: INF_lower order_trans simp: inf_principal[symmetric] simp del: inf_principal)
qed (auto intro!: INF_lower intro: generate_topology.intros)
lemma (in topological_space) eventually_nhds:
"eventually P (nhds a) \<longleftrightarrow> (\<exists>S. open S \<and> a \<in> S \<and> (\<forall>x\<in>S. P x))"
unfolding nhds_def by (subst eventually_INF_base) (auto simp: eventually_principal)
lemma eventually_eventually:
"eventually (\<lambda>y. eventually P (nhds y)) (nhds x) = eventually P (nhds x)"
by (auto simp: eventually_nhds)
lemma (in topological_space) eventually_nhds_in_open:
"open s \<Longrightarrow> x \<in> s \<Longrightarrow> eventually (\<lambda>y. y \<in> s) (nhds x)"
by (subst eventually_nhds) blast
lemma (in topological_space) eventually_nhds_x_imp_x: "eventually P (nhds x) \<Longrightarrow> P x"
by (subst (asm) eventually_nhds) blast
lemma (in topological_space) nhds_neq_bot [simp]: "nhds a \<noteq> bot"
by (simp add: trivial_limit_def eventually_nhds)
lemma (in t1_space) t1_space_nhds: "x \<noteq> y \<Longrightarrow> (\<forall>\<^sub>F x in nhds x. x \<noteq> y)"
by (drule t1_space) (auto simp: eventually_nhds)
lemma (in topological_space) nhds_discrete_open: "open {x} \<Longrightarrow> nhds x = principal {x}"
by (auto simp: nhds_def intro!: antisym INF_greatest INF_lower2[of "{x}"])
lemma (in discrete_topology) nhds_discrete: "nhds x = principal {x}"
by (simp add: nhds_discrete_open open_discrete)
lemma (in discrete_topology) at_discrete: "at x within S = bot"
unfolding at_within_def nhds_discrete by simp
lemma (in discrete_topology) tendsto_discrete:
"filterlim (f :: 'b \<Rightarrow> 'a) (nhds y) F \<longleftrightarrow> eventually (\<lambda>x. f x = y) F"
by (auto simp: nhds_discrete filterlim_principal)
lemma (in topological_space) at_within_eq:
"at x within s = (INF S\<in>{S. open S \<and> x \<in> S}. principal (S \<inter> s - {x}))"
unfolding nhds_def at_within_def
by (subst INF_inf_const2[symmetric]) (auto simp: Diff_Int_distrib)
lemma (in topological_space) eventually_at_filter:
"eventually P (at a within s) \<longleftrightarrow> eventually (\<lambda>x. x \<noteq> a \<longrightarrow> x \<in> s \<longrightarrow> P x) (nhds a)"
by (simp add: at_within_def eventually_inf_principal imp_conjL[symmetric] conj_commute)
lemma (in topological_space) at_le: "s \<subseteq> t \<Longrightarrow> at x within s \<le> at x within t"
unfolding at_within_def by (intro inf_mono) auto
lemma (in topological_space) eventually_at_topological:
"eventually P (at a within s) \<longleftrightarrow> (\<exists>S. open S \<and> a \<in> S \<and> (\<forall>x\<in>S. x \<noteq> a \<longrightarrow> x \<in> s \<longrightarrow> P x))"
by (simp add: eventually_nhds eventually_at_filter)
lemma eventually_at_in_open:
assumes "open A" "x \<in> A"
shows "eventually (\<lambda>y. y \<in> A - {x}) (at x)"
using assms eventually_at_topological by blast
lemma eventually_at_in_open':
assumes "open A" "x \<in> A"
shows "eventually (\<lambda>y. y \<in> A) (at x)"
using assms eventually_at_topological by blast
lemma (in topological_space) at_within_open: "a \<in> S \<Longrightarrow> open S \<Longrightarrow> at a within S = at a"
unfolding filter_eq_iff eventually_at_topological by (metis open_Int Int_iff UNIV_I)
lemma (in topological_space) at_within_open_NO_MATCH:
"a \<in> s \<Longrightarrow> open s \<Longrightarrow> NO_MATCH UNIV s \<Longrightarrow> at a within s = at a"
by (simp only: at_within_open)
lemma (in topological_space) at_within_open_subset:
"a \<in> S \<Longrightarrow> open S \<Longrightarrow> S \<subseteq> T \<Longrightarrow> at a within T = at a"
by (metis at_le at_within_open dual_order.antisym subset_UNIV)
lemma (in topological_space) at_within_nhd:
assumes "x \<in> S" "open S" "T \<inter> S - {x} = U \<inter> S - {x}"
shows "at x within T = at x within U"
unfolding filter_eq_iff eventually_at_filter
proof (intro allI eventually_subst)
have "eventually (\<lambda>x. x \<in> S) (nhds x)"
using \<open>x \<in> S\<close> \<open>open S\<close> by (auto simp: eventually_nhds)
then show "\<forall>\<^sub>F n in nhds x. (n \<noteq> x \<longrightarrow> n \<in> T \<longrightarrow> P n) = (n \<noteq> x \<longrightarrow> n \<in> U \<longrightarrow> P n)" for P
by eventually_elim (insert \<open>T \<inter> S - {x} = U \<inter> S - {x}\<close>, blast)
qed
lemma (in topological_space) at_within_empty [simp]: "at a within {} = bot"
unfolding at_within_def by simp
lemma (in topological_space) at_within_union:
"at x within (S \<union> T) = sup (at x within S) (at x within T)"
unfolding filter_eq_iff eventually_sup eventually_at_filter
by (auto elim!: eventually_rev_mp)
lemma (in topological_space) at_eq_bot_iff: "at a = bot \<longleftrightarrow> open {a}"
unfolding trivial_limit_def eventually_at_topological
by (metis UNIV_I empty_iff is_singletonE is_singletonI' singleton_iff)
lemma (in t1_space) eventually_neq_at_within:
"eventually (\<lambda>w. w \<noteq> x) (at z within A)"
by (smt (verit, ccfv_threshold) eventually_True eventually_at_topological separation_t1)
lemma (in perfect_space) at_neq_bot [simp]: "at a \<noteq> bot"
by (simp add: at_eq_bot_iff not_open_singleton)
lemma (in order_topology) nhds_order:
"nhds x = inf (INF a\<in>{x <..}. principal {..< a}) (INF a\<in>{..< x}. principal {a <..})"
proof -
have 1: "{S \<in> range lessThan \<union> range greaterThan. x \<in> S} =
(\<lambda>a. {..< a}) ` {x <..} \<union> (\<lambda>a. {a <..}) ` {..< x}"
by auto
show ?thesis
by (simp only: nhds_generated_topology[OF open_generated_order] INF_union 1 INF_image comp_def)
qed
lemma (in topological_space) filterlim_at_within_If:
assumes "filterlim f G (at x within (A \<inter> {x. P x}))"
and "filterlim g G (at x within (A \<inter> {x. \<not>P x}))"
shows "filterlim (\<lambda>x. if P x then f x else g x) G (at x within A)"
proof (rule filterlim_If)
note assms(1)
also have "at x within (A \<inter> {x. P x}) = inf (nhds x) (principal (A \<inter> Collect P - {x}))"
by (simp add: at_within_def)
also have "A \<inter> Collect P - {x} = (A - {x}) \<inter> Collect P"
by blast
also have "inf (nhds x) (principal \<dots>) = inf (at x within A) (principal (Collect P))"
by (simp add: at_within_def inf_assoc)
finally show "filterlim f G (inf (at x within A) (principal (Collect P)))" .
next
note assms(2)
also have "at x within (A \<inter> {x. \<not> P x}) = inf (nhds x) (principal (A \<inter> {x. \<not> P x} - {x}))"
by (simp add: at_within_def)
also have "A \<inter> {x. \<not> P x} - {x} = (A - {x}) \<inter> {x. \<not> P x}"
by blast
also have "inf (nhds x) (principal \<dots>) = inf (at x within A) (principal {x. \<not> P x})"
by (simp add: at_within_def inf_assoc)
finally show "filterlim g G (inf (at x within A) (principal {x. \<not> P x}))" .
qed
lemma (in topological_space) filterlim_at_If:
assumes "filterlim f G (at x within {x. P x})"
and "filterlim g G (at x within {x. \<not>P x})"
shows "filterlim (\<lambda>x. if P x then f x else g x) G (at x)"
using assms by (intro filterlim_at_within_If) simp_all
lemma (in linorder_topology) at_within_order:
assumes "UNIV \<noteq> {x}"
shows "at x within s =
inf (INF a\<in>{x <..}. principal ({..< a} \<inter> s - {x}))
(INF a\<in>{..< x}. principal ({a <..} \<inter> s - {x}))"
proof (cases "{x <..} = {}" "{..< x} = {}" rule: case_split [case_product case_split])
case True_True
have "UNIV = {..< x} \<union> {x} \<union> {x <..}"
by auto
with assms True_True show ?thesis
by auto
qed (auto simp del: inf_principal simp: at_within_def nhds_order Int_Diff
inf_principal[symmetric] INF_inf_const2 inf_sup_aci[where 'a="'a filter"])
lemma (in linorder_topology) at_left_eq:
"y < x \<Longrightarrow> at_left x = (INF a\<in>{..< x}. principal {a <..< x})"
by (subst at_within_order)
(auto simp: greaterThan_Int_greaterThan greaterThanLessThan_eq[symmetric] min.absorb2 INF_constant
intro!: INF_lower2 inf_absorb2)
lemma (in linorder_topology) eventually_at_left:
"y < x \<Longrightarrow> eventually P (at_left x) \<longleftrightarrow> (\<exists>b<x. \<forall>y>b. y < x \<longrightarrow> P y)"
unfolding at_left_eq
by (subst eventually_INF_base) (auto simp: eventually_principal Ball_def)
lemma (in linorder_topology) at_right_eq:
"x < y \<Longrightarrow> at_right x = (INF a\<in>{x <..}. principal {x <..< a})"
by (subst at_within_order)
(auto simp: lessThan_Int_lessThan greaterThanLessThan_eq[symmetric] max.absorb2 INF_constant Int_commute
intro!: INF_lower2 inf_absorb1)
lemma (in linorder_topology) eventually_at_right:
"x < y \<Longrightarrow> eventually P (at_right x) \<longleftrightarrow> (\<exists>b>x. \<forall>y>x. y < b \<longrightarrow> P y)"
unfolding at_right_eq
by (subst eventually_INF_base) (auto simp: eventually_principal Ball_def)
lemma eventually_at_right_less: "\<forall>\<^sub>F y in at_right (x::'a::{linorder_topology, no_top}). x < y"
using gt_ex[of x] eventually_at_right[of x] by auto
lemma trivial_limit_at_right_top: "at_right (top::_::{order_top,linorder_topology}) = bot"
by (auto simp: filter_eq_iff eventually_at_topological)
lemma trivial_limit_at_left_bot: "at_left (bot::_::{order_bot,linorder_topology}) = bot"
by (auto simp: filter_eq_iff eventually_at_topological)
lemma trivial_limit_at_left_real [simp]: "\<not> trivial_limit (at_left x)"
for x :: "'a::{no_bot,dense_order,linorder_topology}"
using lt_ex [of x]
by safe (auto simp add: trivial_limit_def eventually_at_left dest: dense)
lemma trivial_limit_at_right_real [simp]: "\<not> trivial_limit (at_right x)"
for x :: "'a::{no_top,dense_order,linorder_topology}"
using gt_ex[of x]
by safe (auto simp add: trivial_limit_def eventually_at_right dest: dense)
lemma (in linorder_topology) at_eq_sup_left_right: "at x = sup (at_left x) (at_right x)"
by (auto simp: eventually_at_filter filter_eq_iff eventually_sup
elim: eventually_elim2 eventually_mono)
lemma (in linorder_topology) eventually_at_split:
"eventually P (at x) \<longleftrightarrow> eventually P (at_left x) \<and> eventually P (at_right x)"
by (subst at_eq_sup_left_right) (simp add: eventually_sup)
lemma (in order_topology) eventually_at_leftI:
assumes "\<And>x. x \<in> {a<..<b} \<Longrightarrow> P x" "a < b"
shows "eventually P (at_left b)"
using assms unfolding eventually_at_topological by (intro exI[of _ "{a<..}"]) auto
lemma (in order_topology) eventually_at_rightI:
assumes "\<And>x. x \<in> {a<..<b} \<Longrightarrow> P x" "a < b"
shows "eventually P (at_right a)"
using assms unfolding eventually_at_topological by (intro exI[of _ "{..<b}"]) auto
lemma eventually_filtercomap_nhds:
"eventually P (filtercomap f (nhds x)) \<longleftrightarrow> (\<exists>S. open S \<and> x \<in> S \<and> (\<forall>x. f x \<in> S \<longrightarrow> P x))"
unfolding eventually_filtercomap eventually_nhds by auto
lemma eventually_filtercomap_at_topological:
"eventually P (filtercomap f (at A within B)) \<longleftrightarrow>
(\<exists>S. open S \<and> A \<in> S \<and> (\<forall>x. f x \<in> S \<inter> B - {A} \<longrightarrow> P x))" (is "?lhs = ?rhs")
unfolding at_within_def filtercomap_inf eventually_inf_principal filtercomap_principal
eventually_filtercomap_nhds eventually_principal by blast
lemma eventually_at_right_field:
"eventually P (at_right x) \<longleftrightarrow> (\<exists>b>x. \<forall>y>x. y < b \<longrightarrow> P y)"
for x :: "'a::{linordered_field, linorder_topology}"
using linordered_field_no_ub[rule_format, of x]
by (auto simp: eventually_at_right)
lemma eventually_at_left_field:
"eventually P (at_left x) \<longleftrightarrow> (\<exists>b<x. \<forall>y>b. y < x \<longrightarrow> P y)"
for x :: "'a::{linordered_field, linorder_topology}"
using linordered_field_no_lb[rule_format, of x]
by (auto simp: eventually_at_left)
lemma filtermap_nhds_eq_imp_filtermap_at_eq:
assumes "filtermap f (nhds z) = nhds (f z)"
assumes "eventually (\<lambda>x. f x = f z \<longrightarrow> x = z) (at z)"
shows "filtermap f (at z) = at (f z)"
proof (rule filter_eqI)
fix P :: "'a \<Rightarrow> bool"
have "eventually P (filtermap f (at z)) \<longleftrightarrow> (\<forall>\<^sub>F x in nhds z. x \<noteq> z \<longrightarrow> P (f x))"
by (simp add: eventually_filtermap eventually_at_filter)
also have "\<dots> \<longleftrightarrow> (\<forall>\<^sub>F x in nhds z. f x \<noteq> f z \<longrightarrow> P (f x))"
by (rule eventually_cong [OF assms(2)[unfolded eventually_at_filter]]) auto
also have "\<dots> \<longleftrightarrow> (\<forall>\<^sub>F x in filtermap f (nhds z). x \<noteq> f z \<longrightarrow> P x)"
by (simp add: eventually_filtermap)
also have "filtermap f (nhds z) = nhds (f z)"
by (rule assms)
also have "(\<forall>\<^sub>F x in nhds (f z). x \<noteq> f z \<longrightarrow> P x) \<longleftrightarrow> (\<forall>\<^sub>F x in at (f z). P x)"
by (simp add: eventually_at_filter)
finally show "eventually P (filtermap f (at z)) = eventually P (at (f z))" .
qed
subsubsection \<open>Tendsto\<close>
abbreviation (in topological_space)
tendsto :: "('b \<Rightarrow> 'a) \<Rightarrow> 'a \<Rightarrow> 'b filter \<Rightarrow> bool" (infixr "\<longlongrightarrow>" 55)
where "(f \<longlongrightarrow> l) F \<equiv> filterlim f (nhds l) F"
definition (in t2_space) Lim :: "'f filter \<Rightarrow> ('f \<Rightarrow> 'a) \<Rightarrow> 'a"
where "Lim A f = (THE l. (f \<longlongrightarrow> l) A)"
lemma (in topological_space) tendsto_eq_rhs: "(f \<longlongrightarrow> x) F \<Longrightarrow> x = y \<Longrightarrow> (f \<longlongrightarrow> y) F"
by simp
named_theorems tendsto_intros "introduction rules for tendsto"
setup \<open>
Global_Theory.add_thms_dynamic (\<^binding>\<open>tendsto_eq_intros\<close>,
fn context =>
Named_Theorems.get (Context.proof_of context) \<^named_theorems>\<open>tendsto_intros\<close>
|> map_filter (try (fn thm => @{thm tendsto_eq_rhs} OF [thm])))
\<close>
context topological_space begin
lemma tendsto_def:
"(f \<longlongrightarrow> l) F \<longleftrightarrow> (\<forall>S. open S \<longrightarrow> l \<in> S \<longrightarrow> eventually (\<lambda>x. f x \<in> S) F)"
unfolding nhds_def filterlim_INF filterlim_principal by auto
lemma tendsto_cong: "(f \<longlongrightarrow> c) F \<longleftrightarrow> (g \<longlongrightarrow> c) F" if "eventually (\<lambda>x. f x = g x) F"
by (rule filterlim_cong [OF refl refl that])
lemma tendsto_mono: "F \<le> F' \<Longrightarrow> (f \<longlongrightarrow> l) F' \<Longrightarrow> (f \<longlongrightarrow> l) F"
unfolding tendsto_def le_filter_def by fast
lemma tendsto_ident_at [tendsto_intros, simp, intro]: "((\<lambda>x. x) \<longlongrightarrow> a) (at a within s)"
by (auto simp: tendsto_def eventually_at_topological)
lemma tendsto_const [tendsto_intros, simp, intro]: "((\<lambda>x. k) \<longlongrightarrow> k) F"
by (simp add: tendsto_def)
lemma filterlim_at:
"(LIM x F. f x :> at b within s) \<longleftrightarrow> eventually (\<lambda>x. f x \<in> s \<and> f x \<noteq> b) F \<and> (f \<longlongrightarrow> b) F"
by (simp add: at_within_def filterlim_inf filterlim_principal conj_commute)
lemma (in -)
assumes "filterlim f (nhds L) F"
shows tendsto_imp_filterlim_at_right:
"eventually (\<lambda>x. f x > L) F \<Longrightarrow> filterlim f (at_right L) F"
and tendsto_imp_filterlim_at_left:
"eventually (\<lambda>x. f x < L) F \<Longrightarrow> filterlim f (at_left L) F"
using assms by (auto simp: filterlim_at elim: eventually_mono)
lemma filterlim_at_withinI:
assumes "filterlim f (nhds c) F"
assumes "eventually (\<lambda>x. f x \<in> A - {c}) F"
shows "filterlim f (at c within A) F"
using assms by (simp add: filterlim_at)
lemma filterlim_atI:
assumes "filterlim f (nhds c) F"
assumes "eventually (\<lambda>x. f x \<noteq> c) F"
shows "filterlim f (at c) F"
using assms by (intro filterlim_at_withinI) simp_all
lemma topological_tendstoI:
"(\<And>S. open S \<Longrightarrow> l \<in> S \<Longrightarrow> eventually (\<lambda>x. f x \<in> S) F) \<Longrightarrow> (f \<longlongrightarrow> l) F"
by (auto simp: tendsto_def)
lemma topological_tendstoD:
"(f \<longlongrightarrow> l) F \<Longrightarrow> open S \<Longrightarrow> l \<in> S \<Longrightarrow> eventually (\<lambda>x. f x \<in> S) F"
by (auto simp: tendsto_def)
lemma tendsto_bot [simp]: "(f \<longlongrightarrow> a) bot"
by (simp add: tendsto_def)
lemma tendsto_eventually: "eventually (\<lambda>x. f x = l) net \<Longrightarrow> ((\<lambda>x. f x) \<longlongrightarrow> l) net"
by (rule topological_tendstoI) (auto elim: eventually_mono)
(* Contributed by Dominique Unruh *)
lemma tendsto_principal_singleton[simp]:
shows "(f \<longlongrightarrow> f x) (principal {x})"
unfolding tendsto_def eventually_principal by simp
end
lemma (in topological_space) filterlim_within_subset:
"filterlim f l (at x within S) \<Longrightarrow> T \<subseteq> S \<Longrightarrow> filterlim f l (at x within T)"
by (blast intro: filterlim_mono at_le)
lemmas tendsto_within_subset = filterlim_within_subset
lemma (in order_topology) order_tendsto_iff:
"(f \<longlongrightarrow> x) F \<longleftrightarrow> (\<forall>l<x. eventually (\<lambda>x. l < f x) F) \<and> (\<forall>u>x. eventually (\<lambda>x. f x < u) F)"
by (auto simp: nhds_order filterlim_inf filterlim_INF filterlim_principal)
lemma (in order_topology) order_tendstoI:
"(\<And>a. a < y \<Longrightarrow> eventually (\<lambda>x. a < f x) F) \<Longrightarrow> (\<And>a. y < a \<Longrightarrow> eventually (\<lambda>x. f x < a) F) \<Longrightarrow>
(f \<longlongrightarrow> y) F"
by (auto simp: order_tendsto_iff)
lemma (in order_topology) order_tendstoD:
assumes "(f \<longlongrightarrow> y) F"
shows "a < y \<Longrightarrow> eventually (\<lambda>x. a < f x) F"
and "y < a \<Longrightarrow> eventually (\<lambda>x. f x < a) F"
using assms by (auto simp: order_tendsto_iff)
lemma (in linorder_topology) tendsto_max[tendsto_intros]:
assumes X: "(X \<longlongrightarrow> x) net"
and Y: "(Y \<longlongrightarrow> y) net"
shows "((\<lambda>x. max (X x) (Y x)) \<longlongrightarrow> max x y) net"
proof (rule order_tendstoI)
fix a
assume "a < max x y"
then show "eventually (\<lambda>x. a < max (X x) (Y x)) net"
using order_tendstoD(1)[OF X, of a] order_tendstoD(1)[OF Y, of a]
by (auto simp: less_max_iff_disj elim: eventually_mono)
next
fix a
assume "max x y < a"
then show "eventually (\<lambda>x. max (X x) (Y x) < a) net"
using order_tendstoD(2)[OF X, of a] order_tendstoD(2)[OF Y, of a]
by (auto simp: eventually_conj_iff)
qed
lemma (in linorder_topology) tendsto_min[tendsto_intros]:
assumes X: "(X \<longlongrightarrow> x) net"
and Y: "(Y \<longlongrightarrow> y) net"
shows "((\<lambda>x. min (X x) (Y x)) \<longlongrightarrow> min x y) net"
proof (rule order_tendstoI)
fix a
assume "a < min x y"
then show "eventually (\<lambda>x. a < min (X x) (Y x)) net"
using order_tendstoD(1)[OF X, of a] order_tendstoD(1)[OF Y, of a]
by (auto simp: eventually_conj_iff)
next
fix a
assume "min x y < a"
then show "eventually (\<lambda>x. min (X x) (Y x) < a) net"
using order_tendstoD(2)[OF X, of a] order_tendstoD(2)[OF Y, of a]
by (auto simp: min_less_iff_disj elim: eventually_mono)
qed
lemma (in order_topology)
assumes "a < b"
shows at_within_Icc_at_right: "at a within {a..b} = at_right a"
and at_within_Icc_at_left: "at b within {a..b} = at_left b"
using order_tendstoD(2)[OF tendsto_ident_at assms, of "{a<..}"]
using order_tendstoD(1)[OF tendsto_ident_at assms, of "{..<b}"]
by (auto intro!: order_class.order_antisym filter_leI
simp: eventually_at_filter less_le
elim: eventually_elim2)
lemma (in order_topology) at_within_Icc_at: "a < x \<Longrightarrow> x < b \<Longrightarrow> at x within {a..b} = at x"
by (rule at_within_open_subset[where S="{a<..<b}"]) auto
lemma (in t2_space) tendsto_unique:
assumes "F \<noteq> bot"
and "(f \<longlongrightarrow> a) F"
and "(f \<longlongrightarrow> b) F"
shows "a = b"
proof (rule ccontr)
assume "a \<noteq> b"
obtain U V where "open U" "open V" "a \<in> U" "b \<in> V" "U \<inter> V = {}"
using hausdorff [OF \<open>a \<noteq> b\<close>] by fast
have "eventually (\<lambda>x. f x \<in> U) F"
using \<open>(f \<longlongrightarrow> a) F\<close> \<open>open U\<close> \<open>a \<in> U\<close> by (rule topological_tendstoD)
moreover
have "eventually (\<lambda>x. f x \<in> V) F"
using \<open>(f \<longlongrightarrow> b) F\<close> \<open>open V\<close> \<open>b \<in> V\<close> by (rule topological_tendstoD)
ultimately
have "eventually (\<lambda>x. False) F"
proof eventually_elim
case (elim x)
then have "f x \<in> U \<inter> V" by simp
with \<open>U \<inter> V = {}\<close> show ?case by simp
qed
with \<open>\<not> trivial_limit F\<close> show "False"
by (simp add: trivial_limit_def)
qed
lemma (in t2_space) tendsto_const_iff:
fixes a b :: 'a
assumes "\<not> trivial_limit F"
shows "((\<lambda>x. a) \<longlongrightarrow> b) F \<longleftrightarrow> a = b"
by (auto intro!: tendsto_unique [OF assms tendsto_const])
lemma (in t2_space) tendsto_unique':
assumes "F \<noteq> bot"
shows "\<exists>\<^sub>\<le>\<^sub>1l. (f \<longlongrightarrow> l) F"
using Uniq_def assms local.tendsto_unique by fastforce
lemma Lim_in_closed_set:
assumes "closed S" "eventually (\<lambda>x. f(x) \<in> S) F" "F \<noteq> bot" "(f \<longlongrightarrow> l) F"
shows "l \<in> S"
proof (rule ccontr)
assume "l \<notin> S"
with \<open>closed S\<close> have "open (- S)" "l \<in> - S"
by (simp_all add: open_Compl)
with assms(4) have "eventually (\<lambda>x. f x \<in> - S) F"
by (rule topological_tendstoD)
with assms(2) have "eventually (\<lambda>x. False) F"
by (rule eventually_elim2) simp
with assms(3) show "False"
by (simp add: eventually_False)
qed
lemma (in t3_space) nhds_closed:
assumes "x \<in> A" and "open A"
shows "\<exists>A'. x \<in> A' \<and> closed A' \<and> A' \<subseteq> A \<and> eventually (\<lambda>y. y \<in> A') (nhds x)"
proof -
from assms have "\<exists>U V. open U \<and> open V \<and> x \<in> U \<and> - A \<subseteq> V \<and> U \<inter> V = {}"
by (intro t3_space) auto
then obtain U V where UV: "open U" "open V" "x \<in> U" "-A \<subseteq> V" "U \<inter> V = {}"
by auto
have "eventually (\<lambda>y. y \<in> U) (nhds x)"
using \<open>open U\<close> and \<open>x \<in> U\<close> by (intro eventually_nhds_in_open)
hence "eventually (\<lambda>y. y \<in> -V) (nhds x)"
by eventually_elim (use UV in auto)
with UV show ?thesis by (intro exI[of _ "-V"]) auto
qed
lemma (in order_topology) increasing_tendsto:
assumes bdd: "eventually (\<lambda>n. f n \<le> l) F"
and en: "\<And>x. x < l \<Longrightarrow> eventually (\<lambda>n. x < f n) F"
shows "(f \<longlongrightarrow> l) F"
using assms by (intro order_tendstoI) (auto elim!: eventually_mono)
lemma (in order_topology) decreasing_tendsto:
assumes bdd: "eventually (\<lambda>n. l \<le> f n) F"
and en: "\<And>x. l < x \<Longrightarrow> eventually (\<lambda>n. f n < x) F"
shows "(f \<longlongrightarrow> l) F"
using assms by (intro order_tendstoI) (auto elim!: eventually_mono)
lemma (in order_topology) tendsto_sandwich:
assumes ev: "eventually (\<lambda>n. f n \<le> g n) net" "eventually (\<lambda>n. g n \<le> h n) net"
assumes lim: "(f \<longlongrightarrow> c) net" "(h \<longlongrightarrow> c) net"
shows "(g \<longlongrightarrow> c) net"
proof (rule order_tendstoI)
fix a
show "a < c \<Longrightarrow> eventually (\<lambda>x. a < g x) net"
using order_tendstoD[OF lim(1), of a] ev by (auto elim: eventually_elim2)
next
fix a
show "c < a \<Longrightarrow> eventually (\<lambda>x. g x < a) net"
using order_tendstoD[OF lim(2), of a] ev by (auto elim: eventually_elim2)
qed
lemma (in t1_space) limit_frequently_eq:
assumes "F \<noteq> bot"
and "frequently (\<lambda>x. f x = c) F"
and "(f \<longlongrightarrow> d) F"
shows "d = c"
proof (rule ccontr)
assume "d \<noteq> c"
from t1_space[OF this] obtain U where "open U" "d \<in> U" "c \<notin> U"
by blast
with assms have "eventually (\<lambda>x. f x \<in> U) F"
unfolding tendsto_def by blast
then have "eventually (\<lambda>x. f x \<noteq> c) F"
by eventually_elim (insert \<open>c \<notin> U\<close>, blast)
with assms(2) show False
unfolding frequently_def by contradiction
qed
lemma (in t1_space) tendsto_imp_eventually_ne:
assumes "(f \<longlongrightarrow> c) F" "c \<noteq> c'"
shows "eventually (\<lambda>z. f z \<noteq> c') F"
proof (cases "F=bot")
case True
thus ?thesis by auto
next
case False
show ?thesis
proof (rule ccontr)
assume "\<not> eventually (\<lambda>z. f z \<noteq> c') F"
then have "frequently (\<lambda>z. f z = c') F"
by (simp add: frequently_def)
from limit_frequently_eq[OF False this \<open>(f \<longlongrightarrow> c) F\<close>] and \<open>c \<noteq> c'\<close> show False
by contradiction
qed
qed
lemma (in linorder_topology) tendsto_le:
assumes F: "\<not> trivial_limit F"
and x: "(f \<longlongrightarrow> x) F"
and y: "(g \<longlongrightarrow> y) F"
and ev: "eventually (\<lambda>x. g x \<le> f x) F"
shows "y \<le> x"
proof (rule ccontr)
assume "\<not> y \<le> x"
with less_separate[of x y] obtain a b where xy: "x < a" "b < y" "{..<a} \<inter> {b<..} = {}"
by (auto simp: not_le)
then have "eventually (\<lambda>x. f x < a) F" "eventually (\<lambda>x. b < g x) F"
using x y by (auto intro: order_tendstoD)
with ev have "eventually (\<lambda>x. False) F"
by eventually_elim (insert xy, fastforce)
with F show False
by (simp add: eventually_False)
qed
lemma (in linorder_topology) tendsto_lowerbound:
assumes x: "(f \<longlongrightarrow> x) F"
and ev: "eventually (\<lambda>i. a \<le> f i) F"
and F: "\<not> trivial_limit F"
shows "a \<le> x"
using F x tendsto_const ev by (rule tendsto_le)
lemma (in linorder_topology) tendsto_upperbound:
assumes x: "(f \<longlongrightarrow> x) F"
and ev: "eventually (\<lambda>i. a \<ge> f i) F"
and F: "\<not> trivial_limit F"
shows "a \<ge> x"
by (rule tendsto_le [OF F tendsto_const x ev])
lemma filterlim_at_within_not_equal:
fixes f::"'a \<Rightarrow> 'b::t2_space"
assumes "filterlim f (at a within s) F"
shows "eventually (\<lambda>w. f w\<in>s \<and> f w \<noteq>b) F"
proof (cases "a=b")
case True
then show ?thesis using assms by (simp add: filterlim_at)
next
case False
from hausdorff[OF this] obtain U V where UV:"open U" "open V" "a \<in> U" "b \<in> V" "U \<inter> V = {}"
by auto
have "(f \<longlongrightarrow> a) F" using assms filterlim_at by auto
then have "\<forall>\<^sub>F x in F. f x \<in> U" using UV unfolding tendsto_def by auto
moreover have "\<forall>\<^sub>F x in F. f x \<in> s \<and> f x\<noteq>a" using assms filterlim_at by auto
ultimately show ?thesis
apply eventually_elim
using UV by auto
qed
subsubsection \<open>Rules about \<^const>\<open>Lim\<close>\<close>
lemma tendsto_Lim: "\<not> trivial_limit net \<Longrightarrow> (f \<longlongrightarrow> l) net \<Longrightarrow> Lim net f = l"
unfolding Lim_def using tendsto_unique [of net f] by auto
lemma Lim_ident_at: "\<not> trivial_limit (at x within s) \<Longrightarrow> Lim (at x within s) (\<lambda>x. x) = x"
by (rule tendsto_Lim[OF _ tendsto_ident_at]) auto
lemma Lim_cong:
assumes "eventually (\<lambda>x. f x = g x) F" "F = G"
shows "Lim F f = Lim G g"
proof (cases "(\<exists>c. (f \<longlongrightarrow> c) F) \<and> F \<noteq> bot")
case True
then obtain c where c: "(f \<longlongrightarrow> c) F"
by blast
hence "Lim F f = c"
using True by (intro tendsto_Lim) auto
moreover have "(f \<longlongrightarrow> c) F \<longleftrightarrow> (g \<longlongrightarrow> c) G"
using assms by (intro filterlim_cong) auto
with True c assms have "Lim G g = c"
by (intro tendsto_Lim) auto
ultimately show ?thesis
by simp
next
case False
show ?thesis
proof (cases "F = bot")
case True
thus ?thesis using assms
by (auto simp: Topological_Spaces.Lim_def)
next
case False
have "(f \<longlongrightarrow> c) F \<longleftrightarrow> (g \<longlongrightarrow> c) G" for c
using assms by (intro filterlim_cong) auto
thus ?thesis
by (auto simp: Topological_Spaces.Lim_def)
qed
qed
lemma eventually_Lim_ident_at:
"(\<forall>\<^sub>F y in at x within X. P (Lim (at x within X) (\<lambda>x. x)) y) \<longleftrightarrow>
(\<forall>\<^sub>F y in at x within X. P x y)" for x::"'a::t2_space"
by (cases "at x within X = bot") (auto simp: Lim_ident_at)
lemma filterlim_at_bot_at_right:
fixes f :: "'a::linorder_topology \<Rightarrow> 'b::linorder"
assumes mono: "\<And>x y. Q x \<Longrightarrow> Q y \<Longrightarrow> x \<le> y \<Longrightarrow> f x \<le> f y"
and bij: "\<And>x. P x \<Longrightarrow> f (g x) = x" "\<And>x. P x \<Longrightarrow> Q (g x)"
and Q: "eventually Q (at_right a)"
and bound: "\<And>b. Q b \<Longrightarrow> a < b"
and P: "eventually P at_bot"
shows "filterlim f at_bot (at_right a)"
proof -
from P obtain x where x: "\<And>y. y \<le> x \<Longrightarrow> P y"
unfolding eventually_at_bot_linorder by auto
show ?thesis
proof (intro filterlim_at_bot_le[THEN iffD2] allI impI)
fix z
assume "z \<le> x"
with x have "P z" by auto
have "eventually (\<lambda>x. x \<le> g z) (at_right a)"
using bound[OF bij(2)[OF \<open>P z\<close>]]
unfolding eventually_at_right[OF bound[OF bij(2)[OF \<open>P z\<close>]]]
by (auto intro!: exI[of _ "g z"])
with Q show "eventually (\<lambda>x. f x \<le> z) (at_right a)"
by eventually_elim (metis bij \<open>P z\<close> mono)
qed
qed
lemma filterlim_at_top_at_left:
fixes f :: "'a::linorder_topology \<Rightarrow> 'b::linorder"
assumes mono: "\<And>x y. Q x \<Longrightarrow> Q y \<Longrightarrow> x \<le> y \<Longrightarrow> f x \<le> f y"
and bij: "\<And>x. P x \<Longrightarrow> f (g x) = x" "\<And>x. P x \<Longrightarrow> Q (g x)"
and Q: "eventually Q (at_left a)"
and bound: "\<And>b. Q b \<Longrightarrow> b < a"
and P: "eventually P at_top"
shows "filterlim f at_top (at_left a)"
proof -
from P obtain x where x: "\<And>y. x \<le> y \<Longrightarrow> P y"
unfolding eventually_at_top_linorder by auto
show ?thesis
proof (intro filterlim_at_top_ge[THEN iffD2] allI impI)
fix z
assume "x \<le> z"
with x have "P z" by auto
have "eventually (\<lambda>x. g z \<le> x) (at_left a)"
using bound[OF bij(2)[OF \<open>P z\<close>]]
unfolding eventually_at_left[OF bound[OF bij(2)[OF \<open>P z\<close>]]]
by (auto intro!: exI[of _ "g z"])
with Q show "eventually (\<lambda>x. z \<le> f x) (at_left a)"
by eventually_elim (metis bij \<open>P z\<close> mono)
qed
qed
lemma filterlim_split_at:
"filterlim f F (at_left x) \<Longrightarrow> filterlim f F (at_right x) \<Longrightarrow>
filterlim f F (at x)"
for x :: "'a::linorder_topology"
by (subst at_eq_sup_left_right) (rule filterlim_sup)
lemma filterlim_at_split:
"filterlim f F (at x) \<longleftrightarrow> filterlim f F (at_left x) \<and> filterlim f F (at_right x)"
for x :: "'a::linorder_topology"
by (subst at_eq_sup_left_right) (simp add: filterlim_def filtermap_sup)
lemma eventually_nhds_top:
fixes P :: "'a :: {order_top,linorder_topology} \<Rightarrow> bool"
and b :: 'a
assumes "b < top"
shows "eventually P (nhds top) \<longleftrightarrow> (\<exists>b<top. (\<forall>z. b < z \<longrightarrow> P z))"
unfolding eventually_nhds
proof safe
fix S :: "'a set"
assume "open S" "top \<in> S"
note open_left[OF this \<open>b < top\<close>]
moreover assume "\<forall>s\<in>S. P s"
ultimately show "\<exists>b<top. \<forall>z>b. P z"
by (auto simp: subset_eq Ball_def)
next
fix b
assume "b < top" "\<forall>z>b. P z"
then show "\<exists>S. open S \<and> top \<in> S \<and> (\<forall>xa\<in>S. P xa)"
by (intro exI[of _ "{b <..}"]) auto
qed
lemma tendsto_at_within_iff_tendsto_nhds:
"(g \<longlongrightarrow> g l) (at l within S) \<longleftrightarrow> (g \<longlongrightarrow> g l) (inf (nhds l) (principal S))"
unfolding tendsto_def eventually_at_filter eventually_inf_principal
by (intro ext all_cong imp_cong) (auto elim!: eventually_mono)
subsection \<open>Limits on sequences\<close>
abbreviation (in topological_space)
LIMSEQ :: "[nat \<Rightarrow> 'a, 'a] \<Rightarrow> bool" ("((_)/ \<longlonglongrightarrow> (_))" [60, 60] 60)
where "X \<longlonglongrightarrow> L \<equiv> (X \<longlongrightarrow> L) sequentially"
abbreviation (in t2_space) lim :: "(nat \<Rightarrow> 'a) \<Rightarrow> 'a"
where "lim X \<equiv> Lim sequentially X"
definition (in topological_space) convergent :: "(nat \<Rightarrow> 'a) \<Rightarrow> bool"
where "convergent X = (\<exists>L. X \<longlonglongrightarrow> L)"
lemma lim_def: "lim X = (THE L. X \<longlonglongrightarrow> L)"
unfolding Lim_def ..
lemma lim_explicit:
"f \<longlonglongrightarrow> f0 \<longleftrightarrow> (\<forall>S. open S \<longrightarrow> f0 \<in> S \<longrightarrow> (\<exists>N. \<forall>n\<ge>N. f n \<in> S))"
unfolding tendsto_def eventually_sequentially by auto
subsection \<open>Monotone sequences and subsequences\<close>
text \<open>
Definition of monotonicity.
The use of disjunction here complicates proofs considerably.
One alternative is to add a Boolean argument to indicate the direction.
Another is to develop the notions of increasing and decreasing first.
\<close>
definition monoseq :: "(nat \<Rightarrow> 'a::order) \<Rightarrow> bool"
where "monoseq X \<longleftrightarrow> (\<forall>m. \<forall>n\<ge>m. X m \<le> X n) \<or> (\<forall>m. \<forall>n\<ge>m. X n \<le> X m)"
abbreviation incseq :: "(nat \<Rightarrow> 'a::order) \<Rightarrow> bool"
where "incseq X \<equiv> mono X"
lemma incseq_def: "incseq X \<longleftrightarrow> (\<forall>m. \<forall>n\<ge>m. X n \<ge> X m)"
unfolding mono_def ..
abbreviation decseq :: "(nat \<Rightarrow> 'a::order) \<Rightarrow> bool"
where "decseq X \<equiv> antimono X"
lemma decseq_def: "decseq X \<longleftrightarrow> (\<forall>m. \<forall>n\<ge>m. X n \<le> X m)"
unfolding antimono_def ..
subsubsection \<open>Definition of subsequence.\<close>
(* For compatibility with the old "subseq" *)
lemma strict_mono_leD: "strict_mono r \<Longrightarrow> m \<le> n \<Longrightarrow> r m \<le> r n"
by (erule (1) monoD [OF strict_mono_mono])
lemma strict_mono_id: "strict_mono id"
by (simp add: strict_mono_def)
lemma incseq_SucI: "(\<And>n. X n \<le> X (Suc n)) \<Longrightarrow> incseq X"
using lift_Suc_mono_le[of X] by (auto simp: incseq_def)
lemma incseqD: "incseq f \<Longrightarrow> i \<le> j \<Longrightarrow> f i \<le> f j"
by (auto simp: incseq_def)
lemma incseq_SucD: "incseq A \<Longrightarrow> A i \<le> A (Suc i)"
using incseqD[of A i "Suc i"] by auto
lemma incseq_Suc_iff: "incseq f \<longleftrightarrow> (\<forall>n. f n \<le> f (Suc n))"
by (auto intro: incseq_SucI dest: incseq_SucD)
lemma incseq_const[simp, intro]: "incseq (\<lambda>x. k)"
unfolding incseq_def by auto
lemma decseq_SucI: "(\<And>n. X (Suc n) \<le> X n) \<Longrightarrow> decseq X"
using order.lift_Suc_mono_le[OF dual_order, of X] by (auto simp: decseq_def)
lemma decseqD: "decseq f \<Longrightarrow> i \<le> j \<Longrightarrow> f j \<le> f i"
by (auto simp: decseq_def)
lemma decseq_SucD: "decseq A \<Longrightarrow> A (Suc i) \<le> A i"
using decseqD[of A i "Suc i"] by auto
lemma decseq_Suc_iff: "decseq f \<longleftrightarrow> (\<forall>n. f (Suc n) \<le> f n)"
by (auto intro: decseq_SucI dest: decseq_SucD)
lemma decseq_const[simp, intro]: "decseq (\<lambda>x. k)"
unfolding decseq_def by auto
lemma monoseq_iff: "monoseq X \<longleftrightarrow> incseq X \<or> decseq X"
unfolding monoseq_def incseq_def decseq_def ..
lemma monoseq_Suc: "monoseq X \<longleftrightarrow> (\<forall>n. X n \<le> X (Suc n)) \<or> (\<forall>n. X (Suc n) \<le> X n)"
unfolding monoseq_iff incseq_Suc_iff decseq_Suc_iff ..
lemma monoI1: "\<forall>m. \<forall>n \<ge> m. X m \<le> X n \<Longrightarrow> monoseq X"
by (simp add: monoseq_def)
lemma monoI2: "\<forall>m. \<forall>n \<ge> m. X n \<le> X m \<Longrightarrow> monoseq X"
by (simp add: monoseq_def)
lemma mono_SucI1: "\<forall>n. X n \<le> X (Suc n) \<Longrightarrow> monoseq X"
by (simp add: monoseq_Suc)
lemma mono_SucI2: "\<forall>n. X (Suc n) \<le> X n \<Longrightarrow> monoseq X"
by (simp add: monoseq_Suc)
lemma monoseq_minus:
fixes a :: "nat \<Rightarrow> 'a::ordered_ab_group_add"
assumes "monoseq a"
shows "monoseq (\<lambda> n. - a n)"
proof (cases "\<forall>m. \<forall>n \<ge> m. a m \<le> a n")
case True
then have "\<forall>m. \<forall>n \<ge> m. - a n \<le> - a m" by auto
then show ?thesis by (rule monoI2)
next
case False
then have "\<forall>m. \<forall>n \<ge> m. - a m \<le> - a n"
using \<open>monoseq a\<close>[unfolded monoseq_def] by auto
then show ?thesis by (rule monoI1)
qed
subsubsection \<open>Subsequence (alternative definition, (e.g. Hoskins)\<close>
-lemma strict_mono_Suc_iff: "strict_mono f \<longleftrightarrow> (\<forall>n. f n < f (Suc n))"
-proof (intro iffI strict_monoI)
- assume *: "\<forall>n. f n < f (Suc n)"
- fix m n :: nat assume "m < n"
- thus "f m < f n"
- by (induction rule: less_Suc_induct) (use * in auto)
-qed (auto simp: strict_mono_def)
-
-lemma strict_mono_add: "strict_mono (\<lambda>n::'a::linordered_semidom. n + k)"
- by (auto simp: strict_mono_def)
-
text \<open>For any sequence, there is a monotonic subsequence.\<close>
lemma seq_monosub:
fixes s :: "nat \<Rightarrow> 'a::linorder"
shows "\<exists>f. strict_mono f \<and> monoseq (\<lambda>n. (s (f n)))"
proof (cases "\<forall>n. \<exists>p>n. \<forall>m\<ge>p. s m \<le> s p")
case True
then have "\<exists>f. \<forall>n. (\<forall>m\<ge>f n. s m \<le> s (f n)) \<and> f n < f (Suc n)"
by (intro dependent_nat_choice) (auto simp: conj_commute)
then obtain f :: "nat \<Rightarrow> nat"
where f: "strict_mono f" and mono: "\<And>n m. f n \<le> m \<Longrightarrow> s m \<le> s (f n)"
by (auto simp: strict_mono_Suc_iff)
then have "incseq f"
unfolding strict_mono_Suc_iff incseq_Suc_iff by (auto intro: less_imp_le)
then have "monoseq (\<lambda>n. s (f n))"
by (auto simp add: incseq_def intro!: mono monoI2)
with f show ?thesis
by auto
next
case False
then obtain N where N: "p > N \<Longrightarrow> \<exists>m>p. s p < s m" for p
by (force simp: not_le le_less)
have "\<exists>f. \<forall>n. N < f n \<and> f n < f (Suc n) \<and> s (f n) \<le> s (f (Suc n))"
proof (intro dependent_nat_choice)
fix x
assume "N < x" with N[of x]
show "\<exists>y>N. x < y \<and> s x \<le> s y"
by (auto intro: less_trans)
qed auto
then show ?thesis
by (auto simp: monoseq_iff incseq_Suc_iff strict_mono_Suc_iff)
qed
lemma seq_suble:
assumes sf: "strict_mono (f :: nat \<Rightarrow> nat)"
shows "n \<le> f n"
proof (induct n)
case 0
show ?case by simp
next
case (Suc n)
with sf [unfolded strict_mono_Suc_iff, rule_format, of n] have "n < f (Suc n)"
by arith
then show ?case by arith
qed
lemma eventually_subseq:
"strict_mono r \<Longrightarrow> eventually P sequentially \<Longrightarrow> eventually (\<lambda>n. P (r n)) sequentially"
unfolding eventually_sequentially by (metis seq_suble le_trans)
lemma not_eventually_sequentiallyD:
assumes "\<not> eventually P sequentially"
shows "\<exists>r::nat\<Rightarrow>nat. strict_mono r \<and> (\<forall>n. \<not> P (r n))"
proof -
from assms have "\<forall>n. \<exists>m\<ge>n. \<not> P m"
unfolding eventually_sequentially by (simp add: not_less)
then obtain r where "\<And>n. r n \<ge> n" "\<And>n. \<not> P (r n)"
by (auto simp: choice_iff)
then show ?thesis
by (auto intro!: exI[of _ "\<lambda>n. r (((Suc \<circ> r) ^^ Suc n) 0)"]
simp: less_eq_Suc_le strict_mono_Suc_iff)
qed
lemma sequentially_offset:
assumes "eventually (\<lambda>i. P i) sequentially"
shows "eventually (\<lambda>i. P (i + k)) sequentially"
using assms by (rule eventually_sequentially_seg [THEN iffD2])
lemma seq_offset_neg:
"(f \<longlongrightarrow> l) sequentially \<Longrightarrow> ((\<lambda>i. f(i - k)) \<longlongrightarrow> l) sequentially"
apply (erule filterlim_compose)
apply (simp add: filterlim_def le_sequentially eventually_filtermap eventually_sequentially, arith)
done
lemma filterlim_subseq: "strict_mono f \<Longrightarrow> filterlim f sequentially sequentially"
unfolding filterlim_iff by (metis eventually_subseq)
lemma strict_mono_o: "strict_mono r \<Longrightarrow> strict_mono s \<Longrightarrow> strict_mono (r \<circ> s)"
unfolding strict_mono_def by simp
lemma strict_mono_compose: "strict_mono r \<Longrightarrow> strict_mono s \<Longrightarrow> strict_mono (\<lambda>x. r (s x))"
using strict_mono_o[of r s] by (simp add: o_def)
lemma incseq_imp_monoseq: "incseq X \<Longrightarrow> monoseq X"
by (simp add: incseq_def monoseq_def)
lemma decseq_imp_monoseq: "decseq X \<Longrightarrow> monoseq X"
by (simp add: decseq_def monoseq_def)
lemma decseq_eq_incseq: "decseq X = incseq (\<lambda>n. - X n)"
for X :: "nat \<Rightarrow> 'a::ordered_ab_group_add"
by (simp add: decseq_def incseq_def)
lemma INT_decseq_offset:
assumes "decseq F"
shows "(\<Inter>i. F i) = (\<Inter>i\<in>{n..}. F i)"
proof safe
fix x i
assume x: "x \<in> (\<Inter>i\<in>{n..}. F i)"
show "x \<in> F i"
proof cases
from x have "x \<in> F n" by auto
also assume "i \<le> n" with \<open>decseq F\<close> have "F n \<subseteq> F i"
unfolding decseq_def by simp
finally show ?thesis .
qed (insert x, simp)
qed auto
lemma LIMSEQ_const_iff: "(\<lambda>n. k) \<longlonglongrightarrow> l \<longleftrightarrow> k = l"
for k l :: "'a::t2_space"
using trivial_limit_sequentially by (rule tendsto_const_iff)
lemma LIMSEQ_SUP: "incseq X \<Longrightarrow> X \<longlonglongrightarrow> (SUP i. X i :: 'a::{complete_linorder,linorder_topology})"
by (intro increasing_tendsto)
(auto simp: SUP_upper less_SUP_iff incseq_def eventually_sequentially intro: less_le_trans)
lemma LIMSEQ_INF: "decseq X \<Longrightarrow> X \<longlonglongrightarrow> (INF i. X i :: 'a::{complete_linorder,linorder_topology})"
by (intro decreasing_tendsto)
(auto simp: INF_lower INF_less_iff decseq_def eventually_sequentially intro: le_less_trans)
lemma LIMSEQ_ignore_initial_segment: "f \<longlonglongrightarrow> a \<Longrightarrow> (\<lambda>n. f (n + k)) \<longlonglongrightarrow> a"
unfolding tendsto_def by (subst eventually_sequentially_seg[where k=k])
lemma LIMSEQ_offset: "(\<lambda>n. f (n + k)) \<longlonglongrightarrow> a \<Longrightarrow> f \<longlonglongrightarrow> a"
unfolding tendsto_def
by (subst (asm) eventually_sequentially_seg[where k=k])
lemma LIMSEQ_Suc: "f \<longlonglongrightarrow> l \<Longrightarrow> (\<lambda>n. f (Suc n)) \<longlonglongrightarrow> l"
by (drule LIMSEQ_ignore_initial_segment [where k="Suc 0"]) simp
lemma LIMSEQ_imp_Suc: "(\<lambda>n. f (Suc n)) \<longlonglongrightarrow> l \<Longrightarrow> f \<longlonglongrightarrow> l"
by (rule LIMSEQ_offset [where k="Suc 0"]) simp
lemma LIMSEQ_lessThan_iff_atMost:
shows "(\<lambda>n. f {..<n}) \<longlonglongrightarrow> x \<longleftrightarrow> (\<lambda>n. f {..n}) \<longlonglongrightarrow> x"
apply (subst filterlim_sequentially_Suc [symmetric])
apply (simp only: lessThan_Suc_atMost)
done
lemma (in t2_space) LIMSEQ_Uniq: "\<exists>\<^sub>\<le>\<^sub>1l. X \<longlonglongrightarrow> l"
by (simp add: tendsto_unique')
lemma (in t2_space) LIMSEQ_unique: "X \<longlonglongrightarrow> a \<Longrightarrow> X \<longlonglongrightarrow> b \<Longrightarrow> a = b"
using trivial_limit_sequentially by (rule tendsto_unique)
lemma LIMSEQ_le_const: "X \<longlonglongrightarrow> x \<Longrightarrow> \<exists>N. \<forall>n\<ge>N. a \<le> X n \<Longrightarrow> a \<le> x"
for a x :: "'a::linorder_topology"
by (simp add: eventually_at_top_linorder tendsto_lowerbound)
lemma LIMSEQ_le: "X \<longlonglongrightarrow> x \<Longrightarrow> Y \<longlonglongrightarrow> y \<Longrightarrow> \<exists>N. \<forall>n\<ge>N. X n \<le> Y n \<Longrightarrow> x \<le> y"
for x y :: "'a::linorder_topology"
using tendsto_le[of sequentially Y y X x] by (simp add: eventually_sequentially)
lemma LIMSEQ_le_const2: "X \<longlonglongrightarrow> x \<Longrightarrow> \<exists>N. \<forall>n\<ge>N. X n \<le> a \<Longrightarrow> x \<le> a"
for a x :: "'a::linorder_topology"
by (rule LIMSEQ_le[of X x "\<lambda>n. a"]) auto
lemma Lim_bounded: "f \<longlonglongrightarrow> l \<Longrightarrow> \<forall>n\<ge>M. f n \<le> C \<Longrightarrow> l \<le> C"
for l :: "'a::linorder_topology"
by (intro LIMSEQ_le_const2) auto
lemma Lim_bounded2:
fixes f :: "nat \<Rightarrow> 'a::linorder_topology"
assumes lim:"f \<longlonglongrightarrow> l" and ge: "\<forall>n\<ge>N. f n \<ge> C"
shows "l \<ge> C"
using ge
by (intro tendsto_le[OF trivial_limit_sequentially lim tendsto_const])
(auto simp: eventually_sequentially)
lemma lim_mono:
fixes X Y :: "nat \<Rightarrow> 'a::linorder_topology"
assumes "\<And>n. N \<le> n \<Longrightarrow> X n \<le> Y n"
and "X \<longlonglongrightarrow> x"
and "Y \<longlonglongrightarrow> y"
shows "x \<le> y"
using assms(1) by (intro LIMSEQ_le[OF assms(2,3)]) auto
lemma Sup_lim:
fixes a :: "'a::{complete_linorder,linorder_topology}"
assumes "\<And>n. b n \<in> s"
and "b \<longlonglongrightarrow> a"
shows "a \<le> Sup s"
by (metis Lim_bounded assms complete_lattice_class.Sup_upper)
lemma Inf_lim:
fixes a :: "'a::{complete_linorder,linorder_topology}"
assumes "\<And>n. b n \<in> s"
and "b \<longlonglongrightarrow> a"
shows "Inf s \<le> a"
by (metis Lim_bounded2 assms complete_lattice_class.Inf_lower)
lemma SUP_Lim:
fixes X :: "nat \<Rightarrow> 'a::{complete_linorder,linorder_topology}"
assumes inc: "incseq X"
and l: "X \<longlonglongrightarrow> l"
shows "(SUP n. X n) = l"
using LIMSEQ_SUP[OF inc] tendsto_unique[OF trivial_limit_sequentially l]
by simp
lemma INF_Lim:
fixes X :: "nat \<Rightarrow> 'a::{complete_linorder,linorder_topology}"
assumes dec: "decseq X"
and l: "X \<longlonglongrightarrow> l"
shows "(INF n. X n) = l"
using LIMSEQ_INF[OF dec] tendsto_unique[OF trivial_limit_sequentially l]
by simp
lemma convergentD: "convergent X \<Longrightarrow> \<exists>L. X \<longlonglongrightarrow> L"
by (simp add: convergent_def)
lemma convergentI: "X \<longlonglongrightarrow> L \<Longrightarrow> convergent X"
by (auto simp add: convergent_def)
lemma convergent_LIMSEQ_iff: "convergent X \<longleftrightarrow> X \<longlonglongrightarrow> lim X"
by (auto intro: theI LIMSEQ_unique simp add: convergent_def lim_def)
lemma convergent_const: "convergent (\<lambda>n. c)"
by (rule convergentI) (rule tendsto_const)
lemma monoseq_le:
"monoseq a \<Longrightarrow> a \<longlonglongrightarrow> x \<Longrightarrow>
(\<forall>n. a n \<le> x) \<and> (\<forall>m. \<forall>n\<ge>m. a m \<le> a n) \<or>
(\<forall>n. x \<le> a n) \<and> (\<forall>m. \<forall>n\<ge>m. a n \<le> a m)"
for x :: "'a::linorder_topology"
by (metis LIMSEQ_le_const LIMSEQ_le_const2 decseq_def incseq_def monoseq_iff)
lemma LIMSEQ_subseq_LIMSEQ: "X \<longlonglongrightarrow> L \<Longrightarrow> strict_mono f \<Longrightarrow> (X \<circ> f) \<longlonglongrightarrow> L"
unfolding comp_def by (rule filterlim_compose [of X, OF _ filterlim_subseq])
lemma convergent_subseq_convergent: "convergent X \<Longrightarrow> strict_mono f \<Longrightarrow> convergent (X \<circ> f)"
by (auto simp: convergent_def intro: LIMSEQ_subseq_LIMSEQ)
lemma limI: "X \<longlonglongrightarrow> L \<Longrightarrow> lim X = L"
by (rule tendsto_Lim) (rule trivial_limit_sequentially)
lemma lim_le: "convergent f \<Longrightarrow> (\<And>n. f n \<le> x) \<Longrightarrow> lim f \<le> x"
for x :: "'a::linorder_topology"
using LIMSEQ_le_const2[of f "lim f" x] by (simp add: convergent_LIMSEQ_iff)
lemma lim_const [simp]: "lim (\<lambda>m. a) = a"
by (simp add: limI)
subsubsection \<open>Increasing and Decreasing Series\<close>
lemma incseq_le: "incseq X \<Longrightarrow> X \<longlonglongrightarrow> L \<Longrightarrow> X n \<le> L"
for L :: "'a::linorder_topology"
by (metis incseq_def LIMSEQ_le_const)
lemma decseq_ge: "decseq X \<Longrightarrow> X \<longlonglongrightarrow> L \<Longrightarrow> L \<le> X n"
for L :: "'a::linorder_topology"
by (metis decseq_def LIMSEQ_le_const2)
subsection \<open>First countable topologies\<close>
class first_countable_topology = topological_space +
assumes first_countable_basis:
"\<exists>A::nat \<Rightarrow> 'a set. (\<forall>i. x \<in> A i \<and> open (A i)) \<and> (\<forall>S. open S \<and> x \<in> S \<longrightarrow> (\<exists>i. A i \<subseteq> S))"
lemma (in first_countable_topology) countable_basis_at_decseq:
obtains A :: "nat \<Rightarrow> 'a set" where
"\<And>i. open (A i)" "\<And>i. x \<in> (A i)"
"\<And>S. open S \<Longrightarrow> x \<in> S \<Longrightarrow> eventually (\<lambda>i. A i \<subseteq> S) sequentially"
proof atomize_elim
from first_countable_basis[of x] obtain A :: "nat \<Rightarrow> 'a set"
where nhds: "\<And>i. open (A i)" "\<And>i. x \<in> A i"
and incl: "\<And>S. open S \<Longrightarrow> x \<in> S \<Longrightarrow> \<exists>i. A i \<subseteq> S"
by auto
define F where "F n = (\<Inter>i\<le>n. A i)" for n
show "\<exists>A. (\<forall>i. open (A i)) \<and> (\<forall>i. x \<in> A i) \<and>
(\<forall>S. open S \<longrightarrow> x \<in> S \<longrightarrow> eventually (\<lambda>i. A i \<subseteq> S) sequentially)"
proof (safe intro!: exI[of _ F])
fix i
show "open (F i)"
using nhds(1) by (auto simp: F_def)
show "x \<in> F i"
using nhds(2) by (auto simp: F_def)
next
fix S
assume "open S" "x \<in> S"
from incl[OF this] obtain i where "F i \<subseteq> S"
unfolding F_def by auto
moreover have "\<And>j. i \<le> j \<Longrightarrow> F j \<subseteq> F i"
by (simp add: Inf_superset_mono F_def image_mono)
ultimately show "eventually (\<lambda>i. F i \<subseteq> S) sequentially"
by (auto simp: eventually_sequentially)
qed
qed
lemma (in first_countable_topology) nhds_countable:
obtains X :: "nat \<Rightarrow> 'a set"
where "decseq X" "\<And>n. open (X n)" "\<And>n. x \<in> X n" "nhds x = (INF n. principal (X n))"
proof -
from first_countable_basis obtain A :: "nat \<Rightarrow> 'a set"
where *: "\<And>n. x \<in> A n" "\<And>n. open (A n)" "\<And>S. open S \<Longrightarrow> x \<in> S \<Longrightarrow> \<exists>i. A i \<subseteq> S"
by metis
show thesis
proof
show "decseq (\<lambda>n. \<Inter>i\<le>n. A i)"
by (simp add: antimono_iff_le_Suc atMost_Suc)
show "x \<in> (\<Inter>i\<le>n. A i)" "\<And>n. open (\<Inter>i\<le>n. A i)" for n
using * by auto
with * show "nhds x = (INF n. principal (\<Inter>i\<le>n. A i))"
unfolding nhds_def
apply (intro INF_eq)
apply fastforce
apply blast
done
qed
qed
lemma (in first_countable_topology) countable_basis:
obtains A :: "nat \<Rightarrow> 'a set" where
"\<And>i. open (A i)" "\<And>i. x \<in> A i"
"\<And>F. (\<forall>n. F n \<in> A n) \<Longrightarrow> F \<longlonglongrightarrow> x"
proof atomize_elim
obtain A :: "nat \<Rightarrow> 'a set" where *:
"\<And>i. open (A i)"
"\<And>i. x \<in> A i"
"\<And>S. open S \<Longrightarrow> x \<in> S \<Longrightarrow> eventually (\<lambda>i. A i \<subseteq> S) sequentially"
by (rule countable_basis_at_decseq) blast
have "eventually (\<lambda>n. F n \<in> S) sequentially"
if "\<forall>n. F n \<in> A n" "open S" "x \<in> S" for F S
using *(3)[of S] that by (auto elim: eventually_mono simp: subset_eq)
with * show "\<exists>A. (\<forall>i. open (A i)) \<and> (\<forall>i. x \<in> A i) \<and> (\<forall>F. (\<forall>n. F n \<in> A n) \<longrightarrow> F \<longlonglongrightarrow> x)"
by (intro exI[of _ A]) (auto simp: tendsto_def)
qed
lemma (in first_countable_topology) sequentially_imp_eventually_nhds_within:
assumes "\<forall>f. (\<forall>n. f n \<in> s) \<and> f \<longlonglongrightarrow> a \<longrightarrow> eventually (\<lambda>n. P (f n)) sequentially"
shows "eventually P (inf (nhds a) (principal s))"
proof (rule ccontr)
obtain A :: "nat \<Rightarrow> 'a set" where *:
"\<And>i. open (A i)"
"\<And>i. a \<in> A i"
"\<And>F. \<forall>n. F n \<in> A n \<Longrightarrow> F \<longlonglongrightarrow> a"
by (rule countable_basis) blast
assume "\<not> ?thesis"
with * have "\<exists>F. \<forall>n. F n \<in> s \<and> F n \<in> A n \<and> \<not> P (F n)"
unfolding eventually_inf_principal eventually_nhds
by (intro choice) fastforce
then obtain F where F: "\<forall>n. F n \<in> s" and "\<forall>n. F n \<in> A n" and F': "\<forall>n. \<not> P (F n)"
by blast
with * have "F \<longlonglongrightarrow> a"
by auto
then have "eventually (\<lambda>n. P (F n)) sequentially"
using assms F by simp
then show False
by (simp add: F')
qed
lemma (in first_countable_topology) eventually_nhds_within_iff_sequentially:
"eventually P (inf (nhds a) (principal s)) \<longleftrightarrow>
(\<forall>f. (\<forall>n. f n \<in> s) \<and> f \<longlonglongrightarrow> a \<longrightarrow> eventually (\<lambda>n. P (f n)) sequentially)"
proof (safe intro!: sequentially_imp_eventually_nhds_within)
assume "eventually P (inf (nhds a) (principal s))"
then obtain S where "open S" "a \<in> S" "\<forall>x\<in>S. x \<in> s \<longrightarrow> P x"
by (auto simp: eventually_inf_principal eventually_nhds)
moreover
fix f
assume "\<forall>n. f n \<in> s" "f \<longlonglongrightarrow> a"
ultimately show "eventually (\<lambda>n. P (f n)) sequentially"
by (auto dest!: topological_tendstoD elim: eventually_mono)
qed
lemma (in first_countable_topology) eventually_nhds_iff_sequentially:
"eventually P (nhds a) \<longleftrightarrow> (\<forall>f. f \<longlonglongrightarrow> a \<longrightarrow> eventually (\<lambda>n. P (f n)) sequentially)"
using eventually_nhds_within_iff_sequentially[of P a UNIV] by simp
(*Thanks to Sébastien Gouëzel*)
lemma Inf_as_limit:
fixes A::"'a::{linorder_topology, first_countable_topology, complete_linorder} set"
assumes "A \<noteq> {}"
shows "\<exists>u. (\<forall>n. u n \<in> A) \<and> u \<longlonglongrightarrow> Inf A"
proof (cases "Inf A \<in> A")
case True
show ?thesis
by (rule exI[of _ "\<lambda>n. Inf A"], auto simp add: True)
next
case False
obtain y where "y \<in> A" using assms by auto
then have "Inf A < y" using False Inf_lower less_le by auto
obtain F :: "nat \<Rightarrow> 'a set" where F: "\<And>i. open (F i)" "\<And>i. Inf A \<in> F i"
"\<And>u. (\<forall>n. u n \<in> F n) \<Longrightarrow> u \<longlonglongrightarrow> Inf A"
by (metis first_countable_topology_class.countable_basis)
define u where "u = (\<lambda>n. SOME z. z \<in> F n \<and> z \<in> A)"
have "\<exists>z. z \<in> U \<and> z \<in> A" if "Inf A \<in> U" "open U" for U
proof -
obtain b where "b > Inf A" "{Inf A ..<b} \<subseteq> U"
using open_right[OF \<open>open U\<close> \<open>Inf A \<in> U\<close> \<open>Inf A < y\<close>] by auto
obtain z where "z < b" "z \<in> A"
using \<open>Inf A < b\<close> Inf_less_iff by auto
then have "z \<in> {Inf A ..<b}"
by (simp add: Inf_lower)
then show ?thesis using \<open>z \<in> A\<close> \<open>{Inf A ..<b} \<subseteq> U\<close> by auto
qed
then have *: "u n \<in> F n \<and> u n \<in> A" for n
using \<open>Inf A \<in> F n\<close> \<open>open (F n)\<close> unfolding u_def by (metis (no_types, lifting) someI_ex)
then have "u \<longlonglongrightarrow> Inf A" using F(3) by simp
then show ?thesis using * by auto
qed
lemma tendsto_at_iff_sequentially:
"(f \<longlongrightarrow> a) (at x within s) \<longleftrightarrow> (\<forall>X. (\<forall>i. X i \<in> s - {x}) \<longrightarrow> X \<longlonglongrightarrow> x \<longrightarrow> ((f \<circ> X) \<longlonglongrightarrow> a))"
for f :: "'a::first_countable_topology \<Rightarrow> _"
unfolding filterlim_def[of _ "nhds a"] le_filter_def eventually_filtermap
at_within_def eventually_nhds_within_iff_sequentially comp_def
by metis
lemma approx_from_above_dense_linorder:
fixes x::"'a::{dense_linorder, linorder_topology, first_countable_topology}"
assumes "x < y"
shows "\<exists>u. (\<forall>n. u n > x) \<and> (u \<longlonglongrightarrow> x)"
proof -
obtain A :: "nat \<Rightarrow> 'a set" where A: "\<And>i. open (A i)" "\<And>i. x \<in> A i"
"\<And>F. (\<forall>n. F n \<in> A n) \<Longrightarrow> F \<longlonglongrightarrow> x"
by (metis first_countable_topology_class.countable_basis)
define u where "u = (\<lambda>n. SOME z. z \<in> A n \<and> z > x)"
have "\<exists>z. z \<in> U \<and> x < z" if "x \<in> U" "open U" for U
using open_right[OF \<open>open U\<close> \<open>x \<in> U\<close> \<open>x < y\<close>]
by (meson atLeastLessThan_iff dense less_imp_le subset_eq)
then have *: "u n \<in> A n \<and> x < u n" for n
using \<open>x \<in> A n\<close> \<open>open (A n)\<close> unfolding u_def by (metis (no_types, lifting) someI_ex)
then have "u \<longlonglongrightarrow> x" using A(3) by simp
then show ?thesis using * by auto
qed
lemma approx_from_below_dense_linorder:
fixes x::"'a::{dense_linorder, linorder_topology, first_countable_topology}"
assumes "x > y"
shows "\<exists>u. (\<forall>n. u n < x) \<and> (u \<longlonglongrightarrow> x)"
proof -
obtain A :: "nat \<Rightarrow> 'a set" where A: "\<And>i. open (A i)" "\<And>i. x \<in> A i"
"\<And>F. (\<forall>n. F n \<in> A n) \<Longrightarrow> F \<longlonglongrightarrow> x"
by (metis first_countable_topology_class.countable_basis)
define u where "u = (\<lambda>n. SOME z. z \<in> A n \<and> z < x)"
have "\<exists>z. z \<in> U \<and> z < x" if "x \<in> U" "open U" for U
using open_left[OF \<open>open U\<close> \<open>x \<in> U\<close> \<open>x > y\<close>]
by (meson dense greaterThanAtMost_iff less_imp_le subset_eq)
then have *: "u n \<in> A n \<and> u n < x" for n
using \<open>x \<in> A n\<close> \<open>open (A n)\<close> unfolding u_def by (metis (no_types, lifting) someI_ex)
then have "u \<longlonglongrightarrow> x" using A(3) by simp
then show ?thesis using * by auto
qed
subsection \<open>Function limit at a point\<close>
abbreviation LIM :: "('a::topological_space \<Rightarrow> 'b::topological_space) \<Rightarrow> 'a \<Rightarrow> 'b \<Rightarrow> bool"
("((_)/ \<midarrow>(_)/\<rightarrow> (_))" [60, 0, 60] 60)
where "f \<midarrow>a\<rightarrow> L \<equiv> (f \<longlongrightarrow> L) (at a)"
lemma tendsto_within_open: "a \<in> S \<Longrightarrow> open S \<Longrightarrow> (f \<longlongrightarrow> l) (at a within S) \<longleftrightarrow> (f \<midarrow>a\<rightarrow> l)"
by (simp add: tendsto_def at_within_open[where S = S])
lemma tendsto_within_open_NO_MATCH:
"a \<in> S \<Longrightarrow> NO_MATCH UNIV S \<Longrightarrow> open S \<Longrightarrow> (f \<longlongrightarrow> l)(at a within S) \<longleftrightarrow> (f \<longlongrightarrow> l)(at a)"
for f :: "'a::topological_space \<Rightarrow> 'b::topological_space"
using tendsto_within_open by blast
lemma LIM_const_not_eq[tendsto_intros]: "k \<noteq> L \<Longrightarrow> \<not> (\<lambda>x. k) \<midarrow>a\<rightarrow> L"
for a :: "'a::perfect_space" and k L :: "'b::t2_space"
by (simp add: tendsto_const_iff)
lemmas LIM_not_zero = LIM_const_not_eq [where L = 0]
lemma LIM_const_eq: "(\<lambda>x. k) \<midarrow>a\<rightarrow> L \<Longrightarrow> k = L"
for a :: "'a::perfect_space" and k L :: "'b::t2_space"
by (simp add: tendsto_const_iff)
lemma LIM_unique: "f \<midarrow>a\<rightarrow> L \<Longrightarrow> f \<midarrow>a\<rightarrow> M \<Longrightarrow> L = M"
for a :: "'a::perfect_space" and L M :: "'b::t2_space"
using at_neq_bot by (rule tendsto_unique)
lemma LIM_Uniq: "\<exists>\<^sub>\<le>\<^sub>1L::'b::t2_space. f \<midarrow>a\<rightarrow> L"
for a :: "'a::perfect_space"
by (auto simp add: Uniq_def LIM_unique)
text \<open>Limits are equal for functions equal except at limit point.\<close>
lemma LIM_equal: "\<forall>x. x \<noteq> a \<longrightarrow> f x = g x \<Longrightarrow> (f \<midarrow>a\<rightarrow> l) \<longleftrightarrow> (g \<midarrow>a\<rightarrow> l)"
by (simp add: tendsto_def eventually_at_topological)
lemma LIM_cong: "a = b \<Longrightarrow> (\<And>x. x \<noteq> b \<Longrightarrow> f x = g x) \<Longrightarrow> l = m \<Longrightarrow> (f \<midarrow>a\<rightarrow> l) \<longleftrightarrow> (g \<midarrow>b\<rightarrow> m)"
by (simp add: LIM_equal)
lemma tendsto_cong_limit: "(f \<longlongrightarrow> l) F \<Longrightarrow> k = l \<Longrightarrow> (f \<longlongrightarrow> k) F"
by simp
lemma tendsto_at_iff_tendsto_nhds: "g \<midarrow>l\<rightarrow> g l \<longleftrightarrow> (g \<longlongrightarrow> g l) (nhds l)"
unfolding tendsto_def eventually_at_filter
by (intro ext all_cong imp_cong) (auto elim!: eventually_mono)
lemma tendsto_compose: "g \<midarrow>l\<rightarrow> g l \<Longrightarrow> (f \<longlongrightarrow> l) F \<Longrightarrow> ((\<lambda>x. g (f x)) \<longlongrightarrow> g l) F"
unfolding tendsto_at_iff_tendsto_nhds by (rule filterlim_compose[of g])
lemma tendsto_compose_eventually:
"g \<midarrow>l\<rightarrow> m \<Longrightarrow> (f \<longlongrightarrow> l) F \<Longrightarrow> eventually (\<lambda>x. f x \<noteq> l) F \<Longrightarrow> ((\<lambda>x. g (f x)) \<longlongrightarrow> m) F"
by (rule filterlim_compose[of g _ "at l"]) (auto simp add: filterlim_at)
lemma LIM_compose_eventually:
assumes "f \<midarrow>a\<rightarrow> b"
and "g \<midarrow>b\<rightarrow> c"
and "eventually (\<lambda>x. f x \<noteq> b) (at a)"
shows "(\<lambda>x. g (f x)) \<midarrow>a\<rightarrow> c"
using assms(2,1,3) by (rule tendsto_compose_eventually)
lemma tendsto_compose_filtermap: "((g \<circ> f) \<longlongrightarrow> T) F \<longleftrightarrow> (g \<longlongrightarrow> T) (filtermap f F)"
by (simp add: filterlim_def filtermap_filtermap comp_def)
lemma tendsto_compose_at:
assumes f: "(f \<longlongrightarrow> y) F" and g: "(g \<longlongrightarrow> z) (at y)" and fg: "eventually (\<lambda>w. f w = y \<longrightarrow> g y = z) F"
shows "((g \<circ> f) \<longlongrightarrow> z) F"
proof -
have "(\<forall>\<^sub>F a in F. f a \<noteq> y) \<or> g y = z"
using fg by force
moreover have "(g \<longlongrightarrow> z) (filtermap f F) \<or> \<not> (\<forall>\<^sub>F a in F. f a \<noteq> y)"
by (metis (no_types) filterlim_atI filterlim_def tendsto_mono f g)
ultimately show ?thesis
by (metis (no_types) f filterlim_compose filterlim_filtermap g tendsto_at_iff_tendsto_nhds tendsto_compose_filtermap)
qed
lemma tendsto_nhds_iff: "(f \<longlongrightarrow> (c :: 'a :: t1_space)) (nhds x) \<longleftrightarrow> f \<midarrow>x\<rightarrow> c \<and> f x = c"
proof safe
assume lim: "(f \<longlongrightarrow> c) (nhds x)"
show "f x = c"
proof (rule ccontr)
assume "f x \<noteq> c"
hence "c \<noteq> f x"
by auto
then obtain A where A: "open A" "c \<in> A" "f x \<notin> A"
by (subst (asm) separation_t1) auto
with lim obtain B where "open B" "x \<in> B" "\<And>x. x \<in> B \<Longrightarrow> f x \<in> A"
unfolding tendsto_def eventually_nhds by metis
with \<open>f x \<notin> A\<close> show False
by blast
qed
show "(f \<longlongrightarrow> c) (at x)"
using lim by (rule filterlim_mono) (auto simp: at_within_def)
next
assume "f \<midarrow>x\<rightarrow> f x" "c = f x"
thus "(f \<longlongrightarrow> f x) (nhds x)"
unfolding tendsto_def eventually_at_filter by (fast elim: eventually_mono)
qed
subsubsection \<open>Relation of \<open>LIM\<close> and \<open>LIMSEQ\<close>\<close>
lemma (in first_countable_topology) sequentially_imp_eventually_within:
"(\<forall>f. (\<forall>n. f n \<in> s \<and> f n \<noteq> a) \<and> f \<longlonglongrightarrow> a \<longrightarrow> eventually (\<lambda>n. P (f n)) sequentially) \<Longrightarrow>
eventually P (at a within s)"
unfolding at_within_def
by (intro sequentially_imp_eventually_nhds_within) auto
lemma (in first_countable_topology) sequentially_imp_eventually_at:
"(\<forall>f. (\<forall>n. f n \<noteq> a) \<and> f \<longlonglongrightarrow> a \<longrightarrow> eventually (\<lambda>n. P (f n)) sequentially) \<Longrightarrow> eventually P (at a)"
using sequentially_imp_eventually_within [where s=UNIV] by simp
lemma LIMSEQ_SEQ_conv:
"(\<forall>S. (\<forall>n. S n \<noteq> a) \<and> S \<longlonglongrightarrow> a \<longrightarrow> (\<lambda>n. X (S n)) \<longlonglongrightarrow> L) \<longleftrightarrow> X \<midarrow>a\<rightarrow> L" (is "?lhs=?rhs")
for a :: "'a::first_countable_topology" and L :: "'b::topological_space"
proof
assume ?lhs then show ?rhs
by (simp add: sequentially_imp_eventually_within tendsto_def)
next
assume ?rhs then show ?lhs
using tendsto_compose_eventually eventuallyI by blast
qed
lemma sequentially_imp_eventually_at_left:
fixes a :: "'a::{linorder_topology,first_countable_topology}"
assumes b[simp]: "b < a"
and *: "\<And>f. (\<And>n. b < f n) \<Longrightarrow> (\<And>n. f n < a) \<Longrightarrow> incseq f \<Longrightarrow> f \<longlonglongrightarrow> a \<Longrightarrow>
eventually (\<lambda>n. P (f n)) sequentially"
shows "eventually P (at_left a)"
proof (safe intro!: sequentially_imp_eventually_within)
fix X
assume X: "\<forall>n. X n \<in> {..< a} \<and> X n \<noteq> a" "X \<longlonglongrightarrow> a"
show "eventually (\<lambda>n. P (X n)) sequentially"
proof (rule ccontr)
assume neg: "\<not> ?thesis"
have "\<exists>s. \<forall>n. (\<not> P (X (s n)) \<and> b < X (s n)) \<and> (X (s n) \<le> X (s (Suc n)) \<and> Suc (s n) \<le> s (Suc n))"
(is "\<exists>s. ?P s")
proof (rule dependent_nat_choice)
have "\<not> eventually (\<lambda>n. b < X n \<longrightarrow> P (X n)) sequentially"
by (intro not_eventually_impI neg order_tendstoD(1) [OF X(2) b])
then show "\<exists>x. \<not> P (X x) \<and> b < X x"
by (auto dest!: not_eventuallyD)
next
fix x n
have "\<not> eventually (\<lambda>n. Suc x \<le> n \<longrightarrow> b < X n \<longrightarrow> X x < X n \<longrightarrow> P (X n)) sequentially"
using X
by (intro not_eventually_impI order_tendstoD(1)[OF X(2)] eventually_ge_at_top neg) auto
then show "\<exists>n. (\<not> P (X n) \<and> b < X n) \<and> (X x \<le> X n \<and> Suc x \<le> n)"
by (auto dest!: not_eventuallyD)
qed
then obtain s where "?P s" ..
with X have "b < X (s n)"
and "X (s n) < a"
and "incseq (\<lambda>n. X (s n))"
and "(\<lambda>n. X (s n)) \<longlonglongrightarrow> a"
and "\<not> P (X (s n))"
for n
by (auto simp: strict_mono_Suc_iff Suc_le_eq incseq_Suc_iff
intro!: LIMSEQ_subseq_LIMSEQ[OF \<open>X \<longlonglongrightarrow> a\<close>, unfolded comp_def])
from *[OF this(1,2,3,4)] this(5) show False
by auto
qed
qed
lemma tendsto_at_left_sequentially:
fixes a b :: "'b::{linorder_topology,first_countable_topology}"
assumes "b < a"
assumes *: "\<And>S. (\<And>n. S n < a) \<Longrightarrow> (\<And>n. b < S n) \<Longrightarrow> incseq S \<Longrightarrow> S \<longlonglongrightarrow> a \<Longrightarrow>
(\<lambda>n. X (S n)) \<longlonglongrightarrow> L"
shows "(X \<longlongrightarrow> L) (at_left a)"
using assms by (simp add: tendsto_def [where l=L] sequentially_imp_eventually_at_left)
lemma sequentially_imp_eventually_at_right:
fixes a b :: "'a::{linorder_topology,first_countable_topology}"
assumes b[simp]: "a < b"
assumes *: "\<And>f. (\<And>n. a < f n) \<Longrightarrow> (\<And>n. f n < b) \<Longrightarrow> decseq f \<Longrightarrow> f \<longlonglongrightarrow> a \<Longrightarrow>
eventually (\<lambda>n. P (f n)) sequentially"
shows "eventually P (at_right a)"
proof (safe intro!: sequentially_imp_eventually_within)
fix X
assume X: "\<forall>n. X n \<in> {a <..} \<and> X n \<noteq> a" "X \<longlonglongrightarrow> a"
show "eventually (\<lambda>n. P (X n)) sequentially"
proof (rule ccontr)
assume neg: "\<not> ?thesis"
have "\<exists>s. \<forall>n. (\<not> P (X (s n)) \<and> X (s n) < b) \<and> (X (s (Suc n)) \<le> X (s n) \<and> Suc (s n) \<le> s (Suc n))"
(is "\<exists>s. ?P s")
proof (rule dependent_nat_choice)
have "\<not> eventually (\<lambda>n. X n < b \<longrightarrow> P (X n)) sequentially"
by (intro not_eventually_impI neg order_tendstoD(2) [OF X(2) b])
then show "\<exists>x. \<not> P (X x) \<and> X x < b"
by (auto dest!: not_eventuallyD)
next
fix x n
have "\<not> eventually (\<lambda>n. Suc x \<le> n \<longrightarrow> X n < b \<longrightarrow> X n < X x \<longrightarrow> P (X n)) sequentially"
using X
by (intro not_eventually_impI order_tendstoD(2)[OF X(2)] eventually_ge_at_top neg) auto
then show "\<exists>n. (\<not> P (X n) \<and> X n < b) \<and> (X n \<le> X x \<and> Suc x \<le> n)"
by (auto dest!: not_eventuallyD)
qed
then obtain s where "?P s" ..
with X have "a < X (s n)"
and "X (s n) < b"
and "decseq (\<lambda>n. X (s n))"
and "(\<lambda>n. X (s n)) \<longlonglongrightarrow> a"
and "\<not> P (X (s n))"
for n
by (auto simp: strict_mono_Suc_iff Suc_le_eq decseq_Suc_iff
intro!: LIMSEQ_subseq_LIMSEQ[OF \<open>X \<longlonglongrightarrow> a\<close>, unfolded comp_def])
from *[OF this(1,2,3,4)] this(5) show False
by auto
qed
qed
lemma tendsto_at_right_sequentially:
fixes a :: "_ :: {linorder_topology, first_countable_topology}"
assumes "a < b"
and *: "\<And>S. (\<And>n. a < S n) \<Longrightarrow> (\<And>n. S n < b) \<Longrightarrow> decseq S \<Longrightarrow> S \<longlonglongrightarrow> a \<Longrightarrow>
(\<lambda>n. X (S n)) \<longlonglongrightarrow> L"
shows "(X \<longlongrightarrow> L) (at_right a)"
using assms by (simp add: tendsto_def [where l=L] sequentially_imp_eventually_at_right)
subsection \<open>Continuity\<close>
subsubsection \<open>Continuity on a set\<close>
definition continuous_on :: "'a set \<Rightarrow> ('a::topological_space \<Rightarrow> 'b::topological_space) \<Rightarrow> bool"
where "continuous_on s f \<longleftrightarrow> (\<forall>x\<in>s. (f \<longlongrightarrow> f x) (at x within s))"
lemma continuous_on_cong [cong]:
"s = t \<Longrightarrow> (\<And>x. x \<in> t \<Longrightarrow> f x = g x) \<Longrightarrow> continuous_on s f \<longleftrightarrow> continuous_on t g"
unfolding continuous_on_def
by (intro ball_cong filterlim_cong) (auto simp: eventually_at_filter)
lemma continuous_on_cong_simp:
"s = t \<Longrightarrow> (\<And>x. x \<in> t =simp=> f x = g x) \<Longrightarrow> continuous_on s f \<longleftrightarrow> continuous_on t g"
unfolding simp_implies_def by (rule continuous_on_cong)
lemma continuous_on_topological:
"continuous_on s f \<longleftrightarrow>
(\<forall>x\<in>s. \<forall>B. open B \<longrightarrow> f x \<in> B \<longrightarrow> (\<exists>A. open A \<and> x \<in> A \<and> (\<forall>y\<in>s. y \<in> A \<longrightarrow> f y \<in> B)))"
unfolding continuous_on_def tendsto_def eventually_at_topological by metis
lemma continuous_on_open_invariant:
"continuous_on s f \<longleftrightarrow> (\<forall>B. open B \<longrightarrow> (\<exists>A. open A \<and> A \<inter> s = f -` B \<inter> s))"
proof safe
fix B :: "'b set"
assume "continuous_on s f" "open B"
then have "\<forall>x\<in>f -` B \<inter> s. (\<exists>A. open A \<and> x \<in> A \<and> s \<inter> A \<subseteq> f -` B)"
by (auto simp: continuous_on_topological subset_eq Ball_def imp_conjL)
then obtain A where "\<forall>x\<in>f -` B \<inter> s. open (A x) \<and> x \<in> A x \<and> s \<inter> A x \<subseteq> f -` B"
unfolding bchoice_iff ..
then show "\<exists>A. open A \<and> A \<inter> s = f -` B \<inter> s"
by (intro exI[of _ "\<Union>x\<in>f -` B \<inter> s. A x"]) auto
next
assume B: "\<forall>B. open B \<longrightarrow> (\<exists>A. open A \<and> A \<inter> s = f -` B \<inter> s)"
show "continuous_on s f"
unfolding continuous_on_topological
proof safe
fix x B
assume "x \<in> s" "open B" "f x \<in> B"
with B obtain A where A: "open A" "A \<inter> s = f -` B \<inter> s"
by auto
with \<open>x \<in> s\<close> \<open>f x \<in> B\<close> show "\<exists>A. open A \<and> x \<in> A \<and> (\<forall>y\<in>s. y \<in> A \<longrightarrow> f y \<in> B)"
by (intro exI[of _ A]) auto
qed
qed
lemma continuous_on_open_vimage:
"open s \<Longrightarrow> continuous_on s f \<longleftrightarrow> (\<forall>B. open B \<longrightarrow> open (f -` B \<inter> s))"
unfolding continuous_on_open_invariant
by (metis open_Int Int_absorb Int_commute[of s] Int_assoc[of _ _ s])
corollary continuous_imp_open_vimage:
assumes "continuous_on s f" "open s" "open B" "f -` B \<subseteq> s"
shows "open (f -` B)"
by (metis assms continuous_on_open_vimage le_iff_inf)
corollary open_vimage[continuous_intros]:
assumes "open s"
and "continuous_on UNIV f"
shows "open (f -` s)"
using assms by (simp add: continuous_on_open_vimage [OF open_UNIV])
lemma continuous_on_closed_invariant:
"continuous_on s f \<longleftrightarrow> (\<forall>B. closed B \<longrightarrow> (\<exists>A. closed A \<and> A \<inter> s = f -` B \<inter> s))"
proof -
have *: "(\<And>A. P A \<longleftrightarrow> Q (- A)) \<Longrightarrow> (\<forall>A. P A) \<longleftrightarrow> (\<forall>A. Q A)"
for P Q :: "'b set \<Rightarrow> bool"
by (metis double_compl)
show ?thesis
unfolding continuous_on_open_invariant
by (intro *) (auto simp: open_closed[symmetric])
qed
lemma continuous_on_closed_vimage:
"closed s \<Longrightarrow> continuous_on s f \<longleftrightarrow> (\<forall>B. closed B \<longrightarrow> closed (f -` B \<inter> s))"
unfolding continuous_on_closed_invariant
by (metis closed_Int Int_absorb Int_commute[of s] Int_assoc[of _ _ s])
corollary closed_vimage_Int[continuous_intros]:
assumes "closed s"
and "continuous_on t f"
and t: "closed t"
shows "closed (f -` s \<inter> t)"
using assms by (simp add: continuous_on_closed_vimage [OF t])
corollary closed_vimage[continuous_intros]:
assumes "closed s"
and "continuous_on UNIV f"
shows "closed (f -` s)"
using closed_vimage_Int [OF assms] by simp
lemma continuous_on_empty [simp]: "continuous_on {} f"
by (simp add: continuous_on_def)
lemma continuous_on_sing [simp]: "continuous_on {x} f"
by (simp add: continuous_on_def at_within_def)
lemma continuous_on_open_Union:
"(\<And>s. s \<in> S \<Longrightarrow> open s) \<Longrightarrow> (\<And>s. s \<in> S \<Longrightarrow> continuous_on s f) \<Longrightarrow> continuous_on (\<Union>S) f"
unfolding continuous_on_def
by safe (metis open_Union at_within_open UnionI)
lemma continuous_on_open_UN:
"(\<And>s. s \<in> S \<Longrightarrow> open (A s)) \<Longrightarrow> (\<And>s. s \<in> S \<Longrightarrow> continuous_on (A s) f) \<Longrightarrow>
continuous_on (\<Union>s\<in>S. A s) f"
by (rule continuous_on_open_Union) auto
lemma continuous_on_open_Un:
"open s \<Longrightarrow> open t \<Longrightarrow> continuous_on s f \<Longrightarrow> continuous_on t f \<Longrightarrow> continuous_on (s \<union> t) f"
using continuous_on_open_Union [of "{s,t}"] by auto
lemma continuous_on_closed_Un:
"closed s \<Longrightarrow> closed t \<Longrightarrow> continuous_on s f \<Longrightarrow> continuous_on t f \<Longrightarrow> continuous_on (s \<union> t) f"
by (auto simp add: continuous_on_closed_vimage closed_Un Int_Un_distrib)
lemma continuous_on_closed_Union:
assumes "finite I"
"\<And>i. i \<in> I \<Longrightarrow> closed (U i)"
"\<And>i. i \<in> I \<Longrightarrow> continuous_on (U i) f"
shows "continuous_on (\<Union> i \<in> I. U i) f"
using assms
by (induction I) (auto intro!: continuous_on_closed_Un)
lemma continuous_on_If:
assumes closed: "closed s" "closed t"
and cont: "continuous_on s f" "continuous_on t g"
and P: "\<And>x. x \<in> s \<Longrightarrow> \<not> P x \<Longrightarrow> f x = g x" "\<And>x. x \<in> t \<Longrightarrow> P x \<Longrightarrow> f x = g x"
shows "continuous_on (s \<union> t) (\<lambda>x. if P x then f x else g x)"
(is "continuous_on _ ?h")
proof-
from P have "\<forall>x\<in>s. f x = ?h x" "\<forall>x\<in>t. g x = ?h x"
by auto
with cont have "continuous_on s ?h" "continuous_on t ?h"
by simp_all
with closed show ?thesis
by (rule continuous_on_closed_Un)
qed
lemma continuous_on_cases:
"closed s \<Longrightarrow> closed t \<Longrightarrow> continuous_on s f \<Longrightarrow> continuous_on t g \<Longrightarrow>
\<forall>x. (x\<in>s \<and> \<not> P x) \<or> (x \<in> t \<and> P x) \<longrightarrow> f x = g x \<Longrightarrow>
continuous_on (s \<union> t) (\<lambda>x. if P x then f x else g x)"
by (rule continuous_on_If) auto
lemma continuous_on_id[continuous_intros,simp]: "continuous_on s (\<lambda>x. x)"
unfolding continuous_on_def by fast
lemma continuous_on_id'[continuous_intros,simp]: "continuous_on s id"
unfolding continuous_on_def id_def by fast
lemma continuous_on_const[continuous_intros,simp]: "continuous_on s (\<lambda>x. c)"
unfolding continuous_on_def by auto
lemma continuous_on_subset: "continuous_on s f \<Longrightarrow> t \<subseteq> s \<Longrightarrow> continuous_on t f"
unfolding continuous_on_def
by (metis subset_eq tendsto_within_subset)
lemma continuous_on_compose[continuous_intros]:
"continuous_on s f \<Longrightarrow> continuous_on (f ` s) g \<Longrightarrow> continuous_on s (g \<circ> f)"
unfolding continuous_on_topological by simp metis
lemma continuous_on_compose2:
"continuous_on t g \<Longrightarrow> continuous_on s f \<Longrightarrow> f ` s \<subseteq> t \<Longrightarrow> continuous_on s (\<lambda>x. g (f x))"
using continuous_on_compose[of s f g] continuous_on_subset by (force simp add: comp_def)
lemma continuous_on_generate_topology:
assumes *: "open = generate_topology X"
and **: "\<And>B. B \<in> X \<Longrightarrow> \<exists>C. open C \<and> C \<inter> A = f -` B \<inter> A"
shows "continuous_on A f"
unfolding continuous_on_open_invariant
proof safe
fix B :: "'a set"
assume "open B"
then show "\<exists>C. open C \<and> C \<inter> A = f -` B \<inter> A"
unfolding *
proof induct
case (UN K)
then obtain C where "\<And>k. k \<in> K \<Longrightarrow> open (C k)" "\<And>k. k \<in> K \<Longrightarrow> C k \<inter> A = f -` k \<inter> A"
by metis
then show ?case
by (intro exI[of _ "\<Union>k\<in>K. C k"]) blast
qed (auto intro: **)
qed
lemma continuous_onI_mono:
fixes f :: "'a::linorder_topology \<Rightarrow> 'b::{dense_order,linorder_topology}"
assumes "open (f`A)"
and mono: "\<And>x y. x \<in> A \<Longrightarrow> y \<in> A \<Longrightarrow> x \<le> y \<Longrightarrow> f x \<le> f y"
shows "continuous_on A f"
proof (rule continuous_on_generate_topology[OF open_generated_order], safe)
have monoD: "\<And>x y. x \<in> A \<Longrightarrow> y \<in> A \<Longrightarrow> f x < f y \<Longrightarrow> x < y"
by (auto simp: not_le[symmetric] mono)
have "\<exists>x. x \<in> A \<and> f x < b \<and> a < x" if a: "a \<in> A" and fa: "f a < b" for a b
proof -
obtain y where "f a < y" "{f a ..< y} \<subseteq> f`A"
using open_right[OF \<open>open (f`A)\<close>, of "f a" b] a fa
by auto
obtain z where z: "f a < z" "z < min b y"
using dense[of "f a" "min b y"] \<open>f a < y\<close> \<open>f a < b\<close> by auto
then obtain c where "z = f c" "c \<in> A"
using \<open>{f a ..< y} \<subseteq> f`A\<close>[THEN subsetD, of z] by (auto simp: less_imp_le)
with a z show ?thesis
by (auto intro!: exI[of _ c] simp: monoD)
qed
then show "\<exists>C. open C \<and> C \<inter> A = f -` {..<b} \<inter> A" for b
by (intro exI[of _ "(\<Union>x\<in>{x\<in>A. f x < b}. {..< x})"])
(auto intro: le_less_trans[OF mono] less_imp_le)
have "\<exists>x. x \<in> A \<and> b < f x \<and> x < a" if a: "a \<in> A" and fa: "b < f a" for a b
proof -
note a fa
moreover
obtain y where "y < f a" "{y <.. f a} \<subseteq> f`A"
using open_left[OF \<open>open (f`A)\<close>, of "f a" b] a fa
by auto
then obtain z where z: "max b y < z" "z < f a"
using dense[of "max b y" "f a"] \<open>y < f a\<close> \<open>b < f a\<close> by auto
then obtain c where "z = f c" "c \<in> A"
using \<open>{y <.. f a} \<subseteq> f`A\<close>[THEN subsetD, of z] by (auto simp: less_imp_le)
with a z show ?thesis
by (auto intro!: exI[of _ c] simp: monoD)
qed
then show "\<exists>C. open C \<and> C \<inter> A = f -` {b <..} \<inter> A" for b
by (intro exI[of _ "(\<Union>x\<in>{x\<in>A. b < f x}. {x <..})"])
(auto intro: less_le_trans[OF _ mono] less_imp_le)
qed
lemma continuous_on_IccI:
"\<lbrakk>(f \<longlongrightarrow> f a) (at_right a);
(f \<longlongrightarrow> f b) (at_left b);
(\<And>x. a < x \<Longrightarrow> x < b \<Longrightarrow> f \<midarrow>x\<rightarrow> f x); a < b\<rbrakk> \<Longrightarrow>
continuous_on {a .. b} f"
for a::"'a::linorder_topology"
using at_within_open[of _ "{a<..<b}"]
by (auto simp: continuous_on_def at_within_Icc_at_right at_within_Icc_at_left le_less
at_within_Icc_at)
lemma
fixes a b::"'a::linorder_topology"
assumes "continuous_on {a .. b} f" "a < b"
shows continuous_on_Icc_at_rightD: "(f \<longlongrightarrow> f a) (at_right a)"
and continuous_on_Icc_at_leftD: "(f \<longlongrightarrow> f b) (at_left b)"
using assms
by (auto simp: at_within_Icc_at_right at_within_Icc_at_left continuous_on_def
dest: bspec[where x=a] bspec[where x=b])
lemma continuous_on_discrete [simp]:
"continuous_on A (f :: 'a :: discrete_topology \<Rightarrow> _)"
by (auto simp: continuous_on_def at_discrete)
lemma continuous_on_of_nat [continuous_intros]:
assumes "continuous_on A f"
shows "continuous_on A (\<lambda>n. of_nat (f n))"
using continuous_on_compose[OF assms continuous_on_discrete[of _ of_nat]]
by (simp add: o_def)
lemma continuous_on_of_int [continuous_intros]:
assumes "continuous_on A f"
shows "continuous_on A (\<lambda>n. of_int (f n))"
using continuous_on_compose[OF assms continuous_on_discrete[of _ of_int]]
by (simp add: o_def)
subsubsection \<open>Continuity at a point\<close>
definition continuous :: "'a::t2_space filter \<Rightarrow> ('a \<Rightarrow> 'b::topological_space) \<Rightarrow> bool"
where "continuous F f \<longleftrightarrow> (f \<longlongrightarrow> f (Lim F (\<lambda>x. x))) F"
lemma continuous_bot[continuous_intros, simp]: "continuous bot f"
unfolding continuous_def by auto
lemma continuous_trivial_limit: "trivial_limit net \<Longrightarrow> continuous net f"
by simp
lemma continuous_within: "continuous (at x within s) f \<longleftrightarrow> (f \<longlongrightarrow> f x) (at x within s)"
by (cases "trivial_limit (at x within s)") (auto simp add: Lim_ident_at continuous_def)
lemma continuous_within_topological:
"continuous (at x within s) f \<longleftrightarrow>
(\<forall>B. open B \<longrightarrow> f x \<in> B \<longrightarrow> (\<exists>A. open A \<and> x \<in> A \<and> (\<forall>y\<in>s. y \<in> A \<longrightarrow> f y \<in> B)))"
unfolding continuous_within tendsto_def eventually_at_topological by metis
lemma continuous_within_compose[continuous_intros]:
"continuous (at x within s) f \<Longrightarrow> continuous (at (f x) within f ` s) g \<Longrightarrow>
continuous (at x within s) (g \<circ> f)"
by (simp add: continuous_within_topological) metis
lemma continuous_within_compose2:
"continuous (at x within s) f \<Longrightarrow> continuous (at (f x) within f ` s) g \<Longrightarrow>
continuous (at x within s) (\<lambda>x. g (f x))"
using continuous_within_compose[of x s f g] by (simp add: comp_def)
lemma continuous_at: "continuous (at x) f \<longleftrightarrow> f \<midarrow>x\<rightarrow> f x"
using continuous_within[of x UNIV f] by simp
lemma continuous_ident[continuous_intros, simp]: "continuous (at x within S) (\<lambda>x. x)"
unfolding continuous_within by (rule tendsto_ident_at)
lemma continuous_id[continuous_intros, simp]: "continuous (at x within S) id"
by (simp add: id_def)
lemma continuous_const[continuous_intros, simp]: "continuous F (\<lambda>x. c)"
unfolding continuous_def by (rule tendsto_const)
lemma continuous_on_eq_continuous_within:
"continuous_on s f \<longleftrightarrow> (\<forall>x\<in>s. continuous (at x within s) f)"
unfolding continuous_on_def continuous_within ..
lemma continuous_discrete [simp]:
"continuous (at x within A) (f :: 'a :: discrete_topology \<Rightarrow> _)"
by (auto simp: continuous_def at_discrete)
abbreviation isCont :: "('a::t2_space \<Rightarrow> 'b::topological_space) \<Rightarrow> 'a \<Rightarrow> bool"
where "isCont f a \<equiv> continuous (at a) f"
lemma isCont_def: "isCont f a \<longleftrightarrow> f \<midarrow>a\<rightarrow> f a"
by (rule continuous_at)
lemma isContD: "isCont f x \<Longrightarrow> f \<midarrow>x\<rightarrow> f x"
by (simp add: isCont_def)
lemma isCont_cong:
assumes "eventually (\<lambda>x. f x = g x) (nhds x)"
shows "isCont f x \<longleftrightarrow> isCont g x"
proof -
from assms have [simp]: "f x = g x"
by (rule eventually_nhds_x_imp_x)
from assms have "eventually (\<lambda>x. f x = g x) (at x)"
by (auto simp: eventually_at_filter elim!: eventually_mono)
with assms have "isCont f x \<longleftrightarrow> isCont g x" unfolding isCont_def
by (intro filterlim_cong) (auto elim!: eventually_mono)
with assms show ?thesis by simp
qed
lemma continuous_at_imp_continuous_at_within: "isCont f x \<Longrightarrow> continuous (at x within s) f"
by (auto intro: tendsto_mono at_le simp: continuous_at continuous_within)
lemma continuous_on_eq_continuous_at: "open s \<Longrightarrow> continuous_on s f \<longleftrightarrow> (\<forall>x\<in>s. isCont f x)"
by (simp add: continuous_on_def continuous_at at_within_open[of _ s])
lemma continuous_within_open: "a \<in> A \<Longrightarrow> open A \<Longrightarrow> continuous (at a within A) f \<longleftrightarrow> isCont f a"
by (simp add: at_within_open_NO_MATCH)
lemma continuous_at_imp_continuous_on: "\<forall>x\<in>s. isCont f x \<Longrightarrow> continuous_on s f"
by (auto intro: continuous_at_imp_continuous_at_within simp: continuous_on_eq_continuous_within)
lemma isCont_o2: "isCont f a \<Longrightarrow> isCont g (f a) \<Longrightarrow> isCont (\<lambda>x. g (f x)) a"
unfolding isCont_def by (rule tendsto_compose)
lemma continuous_at_compose[continuous_intros]: "isCont f a \<Longrightarrow> isCont g (f a) \<Longrightarrow> isCont (g \<circ> f) a"
unfolding o_def by (rule isCont_o2)
lemma isCont_tendsto_compose: "isCont g l \<Longrightarrow> (f \<longlongrightarrow> l) F \<Longrightarrow> ((\<lambda>x. g (f x)) \<longlongrightarrow> g l) F"
unfolding isCont_def by (rule tendsto_compose)
lemma continuous_on_tendsto_compose:
assumes f_cont: "continuous_on s f"
and g: "(g \<longlongrightarrow> l) F"
and l: "l \<in> s"
and ev: "\<forall>\<^sub>Fx in F. g x \<in> s"
shows "((\<lambda>x. f (g x)) \<longlongrightarrow> f l) F"
proof -
from f_cont l have f: "(f \<longlongrightarrow> f l) (at l within s)"
by (simp add: continuous_on_def)
have i: "((\<lambda>x. if g x = l then f l else f (g x)) \<longlongrightarrow> f l) F"
by (rule filterlim_If)
(auto intro!: filterlim_compose[OF f] eventually_conj tendsto_mono[OF _ g]
simp: filterlim_at eventually_inf_principal eventually_mono[OF ev])
show ?thesis
by (rule filterlim_cong[THEN iffD1[OF _ i]]) auto
qed
lemma continuous_within_compose3:
"isCont g (f x) \<Longrightarrow> continuous (at x within s) f \<Longrightarrow> continuous (at x within s) (\<lambda>x. g (f x))"
using continuous_at_imp_continuous_at_within continuous_within_compose2 by blast
lemma at_within_isCont_imp_nhds:
fixes f:: "'a:: {t2_space,perfect_space} \<Rightarrow> 'b:: t2_space"
assumes "\<forall>\<^sub>F w in at z. f w = g w" "isCont f z" "isCont g z"
shows "\<forall>\<^sub>F w in nhds z. f w = g w"
proof -
have "g \<midarrow>z\<rightarrow> f z"
using assms isContD tendsto_cong by blast
moreover have "g \<midarrow>z\<rightarrow> g z" using \<open>isCont g z\<close> using isCont_def by blast
ultimately have "f z=g z" using LIM_unique by auto
moreover have "\<forall>\<^sub>F x in nhds z. x \<noteq> z \<longrightarrow> f x = g x"
using assms unfolding eventually_at_filter by auto
ultimately show ?thesis
by (auto elim:eventually_mono)
qed
lemma filtermap_nhds_open_map':
assumes cont: "isCont f a"
and "open A" "a \<in> A"
and open_map: "\<And>S. open S \<Longrightarrow> S \<subseteq> A \<Longrightarrow> open (f ` S)"
shows "filtermap f (nhds a) = nhds (f a)"
unfolding filter_eq_iff
proof safe
fix P
assume "eventually P (filtermap f (nhds a))"
then obtain S where S: "open S" "a \<in> S" "\<forall>x\<in>S. P (f x)"
by (auto simp: eventually_filtermap eventually_nhds)
show "eventually P (nhds (f a))"
unfolding eventually_nhds
proof (rule exI [of _ "f ` (A \<inter> S)"], safe)
show "open (f ` (A \<inter> S))"
using S by (intro open_Int assms) auto
show "f a \<in> f ` (A \<inter> S)"
using assms S by auto
show "P (f x)" if "x \<in> A" "x \<in> S" for x
using S that by auto
qed
qed (metis filterlim_iff tendsto_at_iff_tendsto_nhds isCont_def eventually_filtermap cont)
lemma filtermap_nhds_open_map:
assumes cont: "isCont f a"
and open_map: "\<And>S. open S \<Longrightarrow> open (f`S)"
shows "filtermap f (nhds a) = nhds (f a)"
using cont filtermap_nhds_open_map' open_map by blast
lemma continuous_at_split:
"continuous (at x) f \<longleftrightarrow> continuous (at_left x) f \<and> continuous (at_right x) f"
for x :: "'a::linorder_topology"
by (simp add: continuous_within filterlim_at_split)
lemma continuous_on_max [continuous_intros]:
fixes f g :: "'a::topological_space \<Rightarrow> 'b::linorder_topology"
shows "continuous_on A f \<Longrightarrow> continuous_on A g \<Longrightarrow> continuous_on A (\<lambda>x. max (f x) (g x))"
by (auto simp: continuous_on_def intro!: tendsto_max)
lemma continuous_on_min [continuous_intros]:
fixes f g :: "'a::topological_space \<Rightarrow> 'b::linorder_topology"
shows "continuous_on A f \<Longrightarrow> continuous_on A g \<Longrightarrow> continuous_on A (\<lambda>x. min (f x) (g x))"
by (auto simp: continuous_on_def intro!: tendsto_min)
lemma continuous_max [continuous_intros]:
fixes f :: "'a::t2_space \<Rightarrow> 'b::linorder_topology"
shows "\<lbrakk>continuous F f; continuous F g\<rbrakk> \<Longrightarrow> continuous F (\<lambda>x. (max (f x) (g x)))"
by (simp add: tendsto_max continuous_def)
lemma continuous_min [continuous_intros]:
fixes f :: "'a::t2_space \<Rightarrow> 'b::linorder_topology"
shows "\<lbrakk>continuous F f; continuous F g\<rbrakk> \<Longrightarrow> continuous F (\<lambda>x. (min (f x) (g x)))"
by (simp add: tendsto_min continuous_def)
text \<open>
The following open/closed Collect lemmas are ported from
Sébastien Gouëzel's \<open>Ergodic_Theory\<close>.
\<close>
lemma open_Collect_neq:
fixes f g :: "'a::topological_space \<Rightarrow> 'b::t2_space"
assumes f: "continuous_on UNIV f" and g: "continuous_on UNIV g"
shows "open {x. f x \<noteq> g x}"
proof (rule openI)
fix t
assume "t \<in> {x. f x \<noteq> g x}"
then obtain U V where *: "open U" "open V" "f t \<in> U" "g t \<in> V" "U \<inter> V = {}"
by (auto simp add: separation_t2)
with open_vimage[OF \<open>open U\<close> f] open_vimage[OF \<open>open V\<close> g]
show "\<exists>T. open T \<and> t \<in> T \<and> T \<subseteq> {x. f x \<noteq> g x}"
by (intro exI[of _ "f -` U \<inter> g -` V"]) auto
qed
lemma closed_Collect_eq:
fixes f g :: "'a::topological_space \<Rightarrow> 'b::t2_space"
assumes f: "continuous_on UNIV f" and g: "continuous_on UNIV g"
shows "closed {x. f x = g x}"
using open_Collect_neq[OF f g] by (simp add: closed_def Collect_neg_eq)
lemma open_Collect_less:
fixes f g :: "'a::topological_space \<Rightarrow> 'b::linorder_topology"
assumes f: "continuous_on UNIV f" and g: "continuous_on UNIV g"
shows "open {x. f x < g x}"
proof (rule openI)
fix t
assume t: "t \<in> {x. f x < g x}"
show "\<exists>T. open T \<and> t \<in> T \<and> T \<subseteq> {x. f x < g x}"
proof (cases "\<exists>z. f t < z \<and> z < g t")
case True
then obtain z where "f t < z \<and> z < g t" by blast
then show ?thesis
using open_vimage[OF _ f, of "{..< z}"] open_vimage[OF _ g, of "{z <..}"]
by (intro exI[of _ "f -` {..<z} \<inter> g -` {z<..}"]) auto
next
case False
then have *: "{g t ..} = {f t <..}" "{..< g t} = {.. f t}"
using t by (auto intro: leI)
show ?thesis
using open_vimage[OF _ f, of "{..< g t}"] open_vimage[OF _ g, of "{f t <..}"] t
apply (intro exI[of _ "f -` {..< g t} \<inter> g -` {f t<..}"])
apply (simp add: open_Int)
apply (auto simp add: *)
done
qed
qed
lemma closed_Collect_le:
fixes f g :: "'a :: topological_space \<Rightarrow> 'b::linorder_topology"
assumes f: "continuous_on UNIV f"
and g: "continuous_on UNIV g"
shows "closed {x. f x \<le> g x}"
using open_Collect_less [OF g f]
by (simp add: closed_def Collect_neg_eq[symmetric] not_le)
subsubsection \<open>Open-cover compactness\<close>
context topological_space
begin
definition compact :: "'a set \<Rightarrow> bool" where
compact_eq_Heine_Borel: (* This name is used for backwards compatibility *)
"compact S \<longleftrightarrow> (\<forall>C. (\<forall>c\<in>C. open c) \<and> S \<subseteq> \<Union>C \<longrightarrow> (\<exists>D\<subseteq>C. finite D \<and> S \<subseteq> \<Union>D))"
lemma compactI:
assumes "\<And>C. \<forall>t\<in>C. open t \<Longrightarrow> s \<subseteq> \<Union>C \<Longrightarrow> \<exists>C'. C' \<subseteq> C \<and> finite C' \<and> s \<subseteq> \<Union>C'"
shows "compact s"
unfolding compact_eq_Heine_Borel using assms by metis
lemma compact_empty[simp]: "compact {}"
by (auto intro!: compactI)
lemma compactE: (*related to COMPACT_IMP_HEINE_BOREL in HOL Light*)
assumes "compact S" "S \<subseteq> \<Union>\<T>" "\<And>B. B \<in> \<T> \<Longrightarrow> open B"
obtains \<T>' where "\<T>' \<subseteq> \<T>" "finite \<T>'" "S \<subseteq> \<Union>\<T>'"
by (meson assms compact_eq_Heine_Borel)
lemma compactE_image:
assumes "compact S"
and opn: "\<And>T. T \<in> C \<Longrightarrow> open (f T)"
and S: "S \<subseteq> (\<Union>c\<in>C. f c)"
obtains C' where "C' \<subseteq> C" and "finite C'" and "S \<subseteq> (\<Union>c\<in>C'. f c)"
apply (rule compactE[OF \<open>compact S\<close> S])
using opn apply force
by (metis finite_subset_image)
lemma compact_Int_closed [intro]:
assumes "compact S"
and "closed T"
shows "compact (S \<inter> T)"
proof (rule compactI)
fix C
assume C: "\<forall>c\<in>C. open c"
assume cover: "S \<inter> T \<subseteq> \<Union>C"
from C \<open>closed T\<close> have "\<forall>c\<in>C \<union> {- T}. open c"
by auto
moreover from cover have "S \<subseteq> \<Union>(C \<union> {- T})"
by auto
ultimately have "\<exists>D\<subseteq>C \<union> {- T}. finite D \<and> S \<subseteq> \<Union>D"
using \<open>compact S\<close> unfolding compact_eq_Heine_Borel by auto
then obtain D where "D \<subseteq> C \<union> {- T} \<and> finite D \<and> S \<subseteq> \<Union>D" ..
then show "\<exists>D\<subseteq>C. finite D \<and> S \<inter> T \<subseteq> \<Union>D"
by (intro exI[of _ "D - {-T}"]) auto
qed
lemma compact_diff: "\<lbrakk>compact S; open T\<rbrakk> \<Longrightarrow> compact(S - T)"
by (simp add: Diff_eq compact_Int_closed open_closed)
lemma inj_setminus: "inj_on uminus (A::'a set set)"
by (auto simp: inj_on_def)
subsection \<open>Finite intersection property\<close>
lemma compact_fip:
"compact U \<longleftrightarrow>
(\<forall>A. (\<forall>a\<in>A. closed a) \<longrightarrow> (\<forall>B \<subseteq> A. finite B \<longrightarrow> U \<inter> \<Inter>B \<noteq> {}) \<longrightarrow> U \<inter> \<Inter>A \<noteq> {})"
(is "_ \<longleftrightarrow> ?R")
proof (safe intro!: compact_eq_Heine_Borel[THEN iffD2])
fix A
assume "compact U"
assume A: "\<forall>a\<in>A. closed a" "U \<inter> \<Inter>A = {}"
assume fin: "\<forall>B \<subseteq> A. finite B \<longrightarrow> U \<inter> \<Inter>B \<noteq> {}"
from A have "(\<forall>a\<in>uminus`A. open a) \<and> U \<subseteq> \<Union>(uminus`A)"
by auto
with \<open>compact U\<close> obtain B where "B \<subseteq> A" "finite (uminus`B)" "U \<subseteq> \<Union>(uminus`B)"
unfolding compact_eq_Heine_Borel by (metis subset_image_iff)
with fin[THEN spec, of B] show False
by (auto dest: finite_imageD intro: inj_setminus)
next
fix A
assume ?R
assume "\<forall>a\<in>A. open a" "U \<subseteq> \<Union>A"
then have "U \<inter> \<Inter>(uminus`A) = {}" "\<forall>a\<in>uminus`A. closed a"
by auto
with \<open>?R\<close> obtain B where "B \<subseteq> A" "finite (uminus`B)" "U \<inter> \<Inter>(uminus`B) = {}"
by (metis subset_image_iff)
then show "\<exists>T\<subseteq>A. finite T \<and> U \<subseteq> \<Union>T"
by (auto intro!: exI[of _ B] inj_setminus dest: finite_imageD)
qed
lemma compact_imp_fip:
assumes "compact S"
and "\<And>T. T \<in> F \<Longrightarrow> closed T"
and "\<And>F'. finite F' \<Longrightarrow> F' \<subseteq> F \<Longrightarrow> S \<inter> (\<Inter>F') \<noteq> {}"
shows "S \<inter> (\<Inter>F) \<noteq> {}"
using assms unfolding compact_fip by auto
lemma compact_imp_fip_image:
assumes "compact s"
and P: "\<And>i. i \<in> I \<Longrightarrow> closed (f i)"
and Q: "\<And>I'. finite I' \<Longrightarrow> I' \<subseteq> I \<Longrightarrow> (s \<inter> (\<Inter>i\<in>I'. f i) \<noteq> {})"
shows "s \<inter> (\<Inter>i\<in>I. f i) \<noteq> {}"
proof -
from P have "\<forall>i \<in> f ` I. closed i"
by blast
moreover have "\<forall>A. finite A \<and> A \<subseteq> f ` I \<longrightarrow> (s \<inter> (\<Inter>A) \<noteq> {})"
by (metis Q finite_subset_image)
ultimately show "s \<inter> (\<Inter>(f ` I)) \<noteq> {}"
by (metis \<open>compact s\<close> compact_imp_fip)
qed
end
lemma (in t2_space) compact_imp_closed:
assumes "compact s"
shows "closed s"
unfolding closed_def
proof (rule openI)
fix y
assume "y \<in> - s"
let ?C = "\<Union>x\<in>s. {u. open u \<and> x \<in> u \<and> eventually (\<lambda>y. y \<notin> u) (nhds y)}"
have "s \<subseteq> \<Union>?C"
proof
fix x
assume "x \<in> s"
with \<open>y \<in> - s\<close> have "x \<noteq> y" by clarsimp
then have "\<exists>u v. open u \<and> open v \<and> x \<in> u \<and> y \<in> v \<and> u \<inter> v = {}"
by (rule hausdorff)
with \<open>x \<in> s\<close> show "x \<in> \<Union>?C"
unfolding eventually_nhds by auto
qed
then obtain D where "D \<subseteq> ?C" and "finite D" and "s \<subseteq> \<Union>D"
by (rule compactE [OF \<open>compact s\<close>]) auto
from \<open>D \<subseteq> ?C\<close> have "\<forall>x\<in>D. eventually (\<lambda>y. y \<notin> x) (nhds y)"
by auto
with \<open>finite D\<close> have "eventually (\<lambda>y. y \<notin> \<Union>D) (nhds y)"
by (simp add: eventually_ball_finite)
with \<open>s \<subseteq> \<Union>D\<close> have "eventually (\<lambda>y. y \<notin> s) (nhds y)"
by (auto elim!: eventually_mono)
then show "\<exists>t. open t \<and> y \<in> t \<and> t \<subseteq> - s"
by (simp add: eventually_nhds subset_eq)
qed
lemma compact_continuous_image:
assumes f: "continuous_on s f"
and s: "compact s"
shows "compact (f ` s)"
proof (rule compactI)
fix C
assume "\<forall>c\<in>C. open c" and cover: "f`s \<subseteq> \<Union>C"
with f have "\<forall>c\<in>C. \<exists>A. open A \<and> A \<inter> s = f -` c \<inter> s"
unfolding continuous_on_open_invariant by blast
then obtain A where A: "\<forall>c\<in>C. open (A c) \<and> A c \<inter> s = f -` c \<inter> s"
unfolding bchoice_iff ..
with cover have "\<And>c. c \<in> C \<Longrightarrow> open (A c)" "s \<subseteq> (\<Union>c\<in>C. A c)"
by (fastforce simp add: subset_eq set_eq_iff)+
from compactE_image[OF s this] obtain D where "D \<subseteq> C" "finite D" "s \<subseteq> (\<Union>c\<in>D. A c)" .
with A show "\<exists>D \<subseteq> C. finite D \<and> f`s \<subseteq> \<Union>D"
by (intro exI[of _ D]) (fastforce simp add: subset_eq set_eq_iff)+
qed
lemma continuous_on_inv:
fixes f :: "'a::topological_space \<Rightarrow> 'b::t2_space"
assumes "continuous_on s f"
and "compact s"
and "\<forall>x\<in>s. g (f x) = x"
shows "continuous_on (f ` s) g"
unfolding continuous_on_topological
proof (clarsimp simp add: assms(3))
fix x :: 'a and B :: "'a set"
assume "x \<in> s" and "open B" and "x \<in> B"
have 1: "\<forall>x\<in>s. f x \<in> f ` (s - B) \<longleftrightarrow> x \<in> s - B"
using assms(3) by (auto, metis)
have "continuous_on (s - B) f"
using \<open>continuous_on s f\<close> Diff_subset
by (rule continuous_on_subset)
moreover have "compact (s - B)"
using \<open>open B\<close> and \<open>compact s\<close>
unfolding Diff_eq by (intro compact_Int_closed closed_Compl)
ultimately have "compact (f ` (s - B))"
by (rule compact_continuous_image)
then have "closed (f ` (s - B))"
by (rule compact_imp_closed)
then have "open (- f ` (s - B))"
by (rule open_Compl)
moreover have "f x \<in> - f ` (s - B)"
using \<open>x \<in> s\<close> and \<open>x \<in> B\<close> by (simp add: 1)
moreover have "\<forall>y\<in>s. f y \<in> - f ` (s - B) \<longrightarrow> y \<in> B"
by (simp add: 1)
ultimately show "\<exists>A. open A \<and> f x \<in> A \<and> (\<forall>y\<in>s. f y \<in> A \<longrightarrow> y \<in> B)"
by fast
qed
lemma continuous_on_inv_into:
fixes f :: "'a::topological_space \<Rightarrow> 'b::t2_space"
assumes s: "continuous_on s f" "compact s"
and f: "inj_on f s"
shows "continuous_on (f ` s) (the_inv_into s f)"
by (rule continuous_on_inv[OF s]) (auto simp: the_inv_into_f_f[OF f])
lemma (in linorder_topology) compact_attains_sup:
assumes "compact S" "S \<noteq> {}"
shows "\<exists>s\<in>S. \<forall>t\<in>S. t \<le> s"
proof (rule classical)
assume "\<not> (\<exists>s\<in>S. \<forall>t\<in>S. t \<le> s)"
then obtain t where t: "\<forall>s\<in>S. t s \<in> S" and "\<forall>s\<in>S. s < t s"
by (metis not_le)
then have "\<And>s. s\<in>S \<Longrightarrow> open {..< t s}" "S \<subseteq> (\<Union>s\<in>S. {..< t s})"
by auto
with \<open>compact S\<close> obtain C where "C \<subseteq> S" "finite C" and C: "S \<subseteq> (\<Union>s\<in>C. {..< t s})"
by (metis compactE_image)
with \<open>S \<noteq> {}\<close> have Max: "Max (t`C) \<in> t`C" and "\<forall>s\<in>t`C. s \<le> Max (t`C)"
by (auto intro!: Max_in)
with C have "S \<subseteq> {..< Max (t`C)}"
by (auto intro: less_le_trans simp: subset_eq)
with t Max \<open>C \<subseteq> S\<close> show ?thesis
by fastforce
qed
lemma (in linorder_topology) compact_attains_inf:
assumes "compact S" "S \<noteq> {}"
shows "\<exists>s\<in>S. \<forall>t\<in>S. s \<le> t"
proof (rule classical)
assume "\<not> (\<exists>s\<in>S. \<forall>t\<in>S. s \<le> t)"
then obtain t where t: "\<forall>s\<in>S. t s \<in> S" and "\<forall>s\<in>S. t s < s"
by (metis not_le)
then have "\<And>s. s\<in>S \<Longrightarrow> open {t s <..}" "S \<subseteq> (\<Union>s\<in>S. {t s <..})"
by auto
with \<open>compact S\<close> obtain C where "C \<subseteq> S" "finite C" and C: "S \<subseteq> (\<Union>s\<in>C. {t s <..})"
by (metis compactE_image)
with \<open>S \<noteq> {}\<close> have Min: "Min (t`C) \<in> t`C" and "\<forall>s\<in>t`C. Min (t`C) \<le> s"
by (auto intro!: Min_in)
with C have "S \<subseteq> {Min (t`C) <..}"
by (auto intro: le_less_trans simp: subset_eq)
with t Min \<open>C \<subseteq> S\<close> show ?thesis
by fastforce
qed
lemma continuous_attains_sup:
fixes f :: "'a::topological_space \<Rightarrow> 'b::linorder_topology"
shows "compact s \<Longrightarrow> s \<noteq> {} \<Longrightarrow> continuous_on s f \<Longrightarrow> (\<exists>x\<in>s. \<forall>y\<in>s. f y \<le> f x)"
using compact_attains_sup[of "f ` s"] compact_continuous_image[of s f] by auto
lemma continuous_attains_inf:
fixes f :: "'a::topological_space \<Rightarrow> 'b::linorder_topology"
shows "compact s \<Longrightarrow> s \<noteq> {} \<Longrightarrow> continuous_on s f \<Longrightarrow> (\<exists>x\<in>s. \<forall>y\<in>s. f x \<le> f y)"
using compact_attains_inf[of "f ` s"] compact_continuous_image[of s f] by auto
subsection \<open>Connectedness\<close>
context topological_space
begin
definition "connected S \<longleftrightarrow>
\<not> (\<exists>A B. open A \<and> open B \<and> S \<subseteq> A \<union> B \<and> A \<inter> B \<inter> S = {} \<and> A \<inter> S \<noteq> {} \<and> B \<inter> S \<noteq> {})"
lemma connectedI:
"(\<And>A B. open A \<Longrightarrow> open B \<Longrightarrow> A \<inter> U \<noteq> {} \<Longrightarrow> B \<inter> U \<noteq> {} \<Longrightarrow> A \<inter> B \<inter> U = {} \<Longrightarrow> U \<subseteq> A \<union> B \<Longrightarrow> False)
\<Longrightarrow> connected U"
by (auto simp: connected_def)
lemma connected_empty [simp]: "connected {}"
by (auto intro!: connectedI)
lemma connected_sing [simp]: "connected {x}"
by (auto intro!: connectedI)
lemma connectedD:
"connected A \<Longrightarrow> open U \<Longrightarrow> open V \<Longrightarrow> U \<inter> V \<inter> A = {} \<Longrightarrow> A \<subseteq> U \<union> V \<Longrightarrow> U \<inter> A = {} \<or> V \<inter> A = {}"
by (auto simp: connected_def)
end
lemma connected_closed:
"connected s \<longleftrightarrow>
\<not> (\<exists>A B. closed A \<and> closed B \<and> s \<subseteq> A \<union> B \<and> A \<inter> B \<inter> s = {} \<and> A \<inter> s \<noteq> {} \<and> B \<inter> s \<noteq> {})"
apply (simp add: connected_def del: ex_simps, safe)
apply (drule_tac x="-A" in spec)
apply (drule_tac x="-B" in spec)
apply (fastforce simp add: closed_def [symmetric])
apply (drule_tac x="-A" in spec)
apply (drule_tac x="-B" in spec)
apply (fastforce simp add: open_closed [symmetric])
done
lemma connected_closedD:
"\<lbrakk>connected s; A \<inter> B \<inter> s = {}; s \<subseteq> A \<union> B; closed A; closed B\<rbrakk> \<Longrightarrow> A \<inter> s = {} \<or> B \<inter> s = {}"
by (simp add: connected_closed)
lemma connected_Union:
assumes cs: "\<And>s. s \<in> S \<Longrightarrow> connected s"
and ne: "\<Inter>S \<noteq> {}"
shows "connected(\<Union>S)"
proof (rule connectedI)
fix A B
assume A: "open A" and B: "open B" and Alap: "A \<inter> \<Union>S \<noteq> {}" and Blap: "B \<inter> \<Union>S \<noteq> {}"
and disj: "A \<inter> B \<inter> \<Union>S = {}" and cover: "\<Union>S \<subseteq> A \<union> B"
have disjs:"\<And>s. s \<in> S \<Longrightarrow> A \<inter> B \<inter> s = {}"
using disj by auto
obtain sa where sa: "sa \<in> S" "A \<inter> sa \<noteq> {}"
using Alap by auto
obtain sb where sb: "sb \<in> S" "B \<inter> sb \<noteq> {}"
using Blap by auto
obtain x where x: "\<And>s. s \<in> S \<Longrightarrow> x \<in> s"
using ne by auto
then have "x \<in> \<Union>S"
using \<open>sa \<in> S\<close> by blast
then have "x \<in> A \<or> x \<in> B"
using cover by auto
then show False
using cs [unfolded connected_def]
by (metis A B IntI Sup_upper sa sb disjs x cover empty_iff subset_trans)
qed
lemma connected_Un: "connected s \<Longrightarrow> connected t \<Longrightarrow> s \<inter> t \<noteq> {} \<Longrightarrow> connected (s \<union> t)"
using connected_Union [of "{s,t}"] by auto
lemma connected_diff_open_from_closed:
assumes st: "s \<subseteq> t"
and tu: "t \<subseteq> u"
and s: "open s"
and t: "closed t"
and u: "connected u"
and ts: "connected (t - s)"
shows "connected(u - s)"
proof (rule connectedI)
fix A B
assume AB: "open A" "open B" "A \<inter> (u - s) \<noteq> {}" "B \<inter> (u - s) \<noteq> {}"
and disj: "A \<inter> B \<inter> (u - s) = {}"
and cover: "u - s \<subseteq> A \<union> B"
then consider "A \<inter> (t - s) = {}" | "B \<inter> (t - s) = {}"
using st ts tu connectedD [of "t-s" "A" "B"] by auto
then show False
proof cases
case 1
then have "(A - t) \<inter> (B \<union> s) \<inter> u = {}"
using disj st by auto
moreover have "u \<subseteq> (A - t) \<union> (B \<union> s)"
using 1 cover by auto
ultimately show False
using connectedD [of u "A - t" "B \<union> s"] AB s t 1 u by auto
next
case 2
then have "(A \<union> s) \<inter> (B - t) \<inter> u = {}"
using disj st by auto
moreover have "u \<subseteq> (A \<union> s) \<union> (B - t)"
using 2 cover by auto
ultimately show False
using connectedD [of u "A \<union> s" "B - t"] AB s t 2 u by auto
qed
qed
lemma connected_iff_const:
fixes S :: "'a::topological_space set"
shows "connected S \<longleftrightarrow> (\<forall>P::'a \<Rightarrow> bool. continuous_on S P \<longrightarrow> (\<exists>c. \<forall>s\<in>S. P s = c))"
proof safe
fix P :: "'a \<Rightarrow> bool"
assume "connected S" "continuous_on S P"
then have "\<And>b. \<exists>A. open A \<and> A \<inter> S = P -` {b} \<inter> S"
unfolding continuous_on_open_invariant by (simp add: open_discrete)
from this[of True] this[of False]
obtain t f where "open t" "open f" and *: "f \<inter> S = P -` {False} \<inter> S" "t \<inter> S = P -` {True} \<inter> S"
by meson
then have "t \<inter> S = {} \<or> f \<inter> S = {}"
by (intro connectedD[OF \<open>connected S\<close>]) auto
then show "\<exists>c. \<forall>s\<in>S. P s = c"
proof (rule disjE)
assume "t \<inter> S = {}"
then show ?thesis
unfolding * by (intro exI[of _ False]) auto
next
assume "f \<inter> S = {}"
then show ?thesis
unfolding * by (intro exI[of _ True]) auto
qed
next
assume P: "\<forall>P::'a \<Rightarrow> bool. continuous_on S P \<longrightarrow> (\<exists>c. \<forall>s\<in>S. P s = c)"
show "connected S"
proof (rule connectedI)
fix A B
assume *: "open A" "open B" "A \<inter> S \<noteq> {}" "B \<inter> S \<noteq> {}" "A \<inter> B \<inter> S = {}" "S \<subseteq> A \<union> B"
have "continuous_on S (\<lambda>x. x \<in> A)"
unfolding continuous_on_open_invariant
proof safe
fix C :: "bool set"
have "C = UNIV \<or> C = {True} \<or> C = {False} \<or> C = {}"
using subset_UNIV[of C] unfolding UNIV_bool by auto
with * show "\<exists>T. open T \<and> T \<inter> S = (\<lambda>x. x \<in> A) -` C \<inter> S"
by (intro exI[of _ "(if True \<in> C then A else {}) \<union> (if False \<in> C then B else {})"]) auto
qed
from P[rule_format, OF this] obtain c where "\<And>s. s \<in> S \<Longrightarrow> (s \<in> A) = c"
by blast
with * show False
by (cases c) auto
qed
qed
lemma connectedD_const: "connected S \<Longrightarrow> continuous_on S P \<Longrightarrow> \<exists>c. \<forall>s\<in>S. P s = c"
for P :: "'a::topological_space \<Rightarrow> bool"
by (auto simp: connected_iff_const)
lemma connectedI_const:
"(\<And>P::'a::topological_space \<Rightarrow> bool. continuous_on S P \<Longrightarrow> \<exists>c. \<forall>s\<in>S. P s = c) \<Longrightarrow> connected S"
by (auto simp: connected_iff_const)
lemma connected_local_const:
assumes "connected A" "a \<in> A" "b \<in> A"
and *: "\<forall>a\<in>A. eventually (\<lambda>b. f a = f b) (at a within A)"
shows "f a = f b"
proof -
obtain S where S: "\<And>a. a \<in> A \<Longrightarrow> a \<in> S a" "\<And>a. a \<in> A \<Longrightarrow> open (S a)"
"\<And>a x. a \<in> A \<Longrightarrow> x \<in> S a \<Longrightarrow> x \<in> A \<Longrightarrow> f a = f x"
using * unfolding eventually_at_topological by metis
let ?P = "\<Union>b\<in>{b\<in>A. f a = f b}. S b" and ?N = "\<Union>b\<in>{b\<in>A. f a \<noteq> f b}. S b"
have "?P \<inter> A = {} \<or> ?N \<inter> A = {}"
using \<open>connected A\<close> S \<open>a\<in>A\<close>
by (intro connectedD) (auto, metis)
then show "f a = f b"
proof
assume "?N \<inter> A = {}"
then have "\<forall>x\<in>A. f a = f x"
using S(1) by auto
with \<open>b\<in>A\<close> show ?thesis by auto
next
assume "?P \<inter> A = {}" then show ?thesis
using \<open>a \<in> A\<close> S(1)[of a] by auto
qed
qed
lemma (in linorder_topology) connectedD_interval:
assumes "connected U"
and xy: "x \<in> U" "y \<in> U"
and "x \<le> z" "z \<le> y"
shows "z \<in> U"
proof -
have eq: "{..<z} \<union> {z<..} = - {z}"
by auto
have "\<not> connected U" if "z \<notin> U" "x < z" "z < y"
using xy that
apply (simp only: connected_def simp_thms)
apply (rule_tac exI[of _ "{..< z}"])
apply (rule_tac exI[of _ "{z <..}"])
apply (auto simp add: eq)
done
with assms show "z \<in> U"
by (metis less_le)
qed
lemma (in linorder_topology) not_in_connected_cases:
assumes conn: "connected S"
assumes nbdd: "x \<notin> S"
assumes ne: "S \<noteq> {}"
obtains "bdd_above S" "\<And>y. y \<in> S \<Longrightarrow> x \<ge> y" | "bdd_below S" "\<And>y. y \<in> S \<Longrightarrow> x \<le> y"
proof -
obtain s where "s \<in> S" using ne by blast
{
assume "s \<le> x"
have "False" if "x \<le> y" "y \<in> S" for y
using connectedD_interval[OF conn \<open>s \<in> S\<close> \<open>y \<in> S\<close> \<open>s \<le> x\<close> \<open>x \<le> y\<close>] \<open>x \<notin> S\<close>
by simp
then have wit: "y \<in> S \<Longrightarrow> x \<ge> y" for y
using le_cases by blast
then have "bdd_above S"
by (rule local.bdd_aboveI)
note this wit
} moreover {
assume "x \<le> s"
have "False" if "x \<ge> y" "y \<in> S" for y
using connectedD_interval[OF conn \<open>y \<in> S\<close> \<open>s \<in> S\<close> \<open>x \<ge> y\<close> \<open>s \<ge> x\<close> ] \<open>x \<notin> S\<close>
by simp
then have wit: "y \<in> S \<Longrightarrow> x \<le> y" for y
using le_cases by blast
then have "bdd_below S"
by (rule bdd_belowI)
note this wit
} ultimately show ?thesis
by (meson le_cases that)
qed
lemma connected_continuous_image:
assumes *: "continuous_on s f"
and "connected s"
shows "connected (f ` s)"
proof (rule connectedI_const)
fix P :: "'b \<Rightarrow> bool"
assume "continuous_on (f ` s) P"
then have "continuous_on s (P \<circ> f)"
by (rule continuous_on_compose[OF *])
from connectedD_const[OF \<open>connected s\<close> this] show "\<exists>c. \<forall>s\<in>f ` s. P s = c"
by auto
qed
lemma connected_Un_UN:
assumes "connected A" "\<And>X. X \<in> B \<Longrightarrow> connected X" "\<And>X. X \<in> B \<Longrightarrow> A \<inter> X \<noteq> {}"
shows "connected (A \<union> \<Union>B)"
proof (rule connectedI_const)
fix f :: "'a \<Rightarrow> bool"
assume f: "continuous_on (A \<union> \<Union>B) f"
have "connected A" "continuous_on A f"
by (auto intro: assms continuous_on_subset[OF f(1)])
from connectedD_const[OF this] obtain c where c: "\<And>x. x \<in> A \<Longrightarrow> f x = c"
by metis
have "f x = c" if "x \<in> X" "X \<in> B" for x X
proof -
have "connected X" "continuous_on X f"
using that by (auto intro: assms continuous_on_subset[OF f])
from connectedD_const[OF this] obtain c' where c': "\<And>x. x \<in> X \<Longrightarrow> f x = c'"
by metis
from assms(3) and that obtain y where "y \<in> A \<inter> X"
by auto
with c[of y] c'[of y] c'[of x] that show ?thesis
by auto
qed
with c show "\<exists>c. \<forall>x\<in>A \<union> \<Union> B. f x = c"
by (intro exI[of _ c]) auto
qed
section \<open>Linear Continuum Topologies\<close>
class linear_continuum_topology = linorder_topology + linear_continuum
begin
lemma Inf_notin_open:
assumes A: "open A"
and bnd: "\<forall>a\<in>A. x < a"
shows "Inf A \<notin> A"
proof
assume "Inf A \<in> A"
then obtain b where "b < Inf A" "{b <.. Inf A} \<subseteq> A"
using open_left[of A "Inf A" x] assms by auto
with dense[of b "Inf A"] obtain c where "c < Inf A" "c \<in> A"
by (auto simp: subset_eq)
then show False
using cInf_lower[OF \<open>c \<in> A\<close>] bnd
by (metis not_le less_imp_le bdd_belowI)
qed
lemma Sup_notin_open:
assumes A: "open A"
and bnd: "\<forall>a\<in>A. a < x"
shows "Sup A \<notin> A"
proof
assume "Sup A \<in> A"
with assms obtain b where "Sup A < b" "{Sup A ..< b} \<subseteq> A"
using open_right[of A "Sup A" x] by auto
with dense[of "Sup A" b] obtain c where "Sup A < c" "c \<in> A"
by (auto simp: subset_eq)
then show False
using cSup_upper[OF \<open>c \<in> A\<close>] bnd
by (metis less_imp_le not_le bdd_aboveI)
qed
end
instance linear_continuum_topology \<subseteq> perfect_space
proof
fix x :: 'a
obtain y where "x < y \<or> y < x"
using ex_gt_or_lt [of x] ..
with Inf_notin_open[of "{x}" y] Sup_notin_open[of "{x}" y] show "\<not> open {x}"
by auto
qed
lemma connectedI_interval:
fixes U :: "'a :: linear_continuum_topology set"
assumes *: "\<And>x y z. x \<in> U \<Longrightarrow> y \<in> U \<Longrightarrow> x \<le> z \<Longrightarrow> z \<le> y \<Longrightarrow> z \<in> U"
shows "connected U"
proof (rule connectedI)
{
fix A B
assume "open A" "open B" "A \<inter> B \<inter> U = {}" "U \<subseteq> A \<union> B"
fix x y
assume "x < y" "x \<in> A" "y \<in> B" "x \<in> U" "y \<in> U"
let ?z = "Inf (B \<inter> {x <..})"
have "x \<le> ?z" "?z \<le> y"
using \<open>y \<in> B\<close> \<open>x < y\<close> by (auto intro: cInf_lower cInf_greatest)
with \<open>x \<in> U\<close> \<open>y \<in> U\<close> have "?z \<in> U"
by (rule *)
moreover have "?z \<notin> B \<inter> {x <..}"
using \<open>open B\<close> by (intro Inf_notin_open) auto
ultimately have "?z \<in> A"
using \<open>x \<le> ?z\<close> \<open>A \<inter> B \<inter> U = {}\<close> \<open>x \<in> A\<close> \<open>U \<subseteq> A \<union> B\<close> by auto
have "\<exists>b\<in>B. b \<in> A \<and> b \<in> U" if "?z < y"
proof -
obtain a where "?z < a" "{?z ..< a} \<subseteq> A"
using open_right[OF \<open>open A\<close> \<open>?z \<in> A\<close> \<open>?z < y\<close>] by auto
moreover obtain b where "b \<in> B" "x < b" "b < min a y"
using cInf_less_iff[of "B \<inter> {x <..}" "min a y"] \<open>?z < a\<close> \<open>?z < y\<close> \<open>x < y\<close> \<open>y \<in> B\<close>
by auto
moreover have "?z \<le> b"
using \<open>b \<in> B\<close> \<open>x < b\<close>
by (intro cInf_lower) auto
moreover have "b \<in> U"
using \<open>x \<le> ?z\<close> \<open>?z \<le> b\<close> \<open>b < min a y\<close>
by (intro *[OF \<open>x \<in> U\<close> \<open>y \<in> U\<close>]) (auto simp: less_imp_le)
ultimately show ?thesis
by (intro bexI[of _ b]) auto
qed
then have False
using \<open>?z \<le> y\<close> \<open>?z \<in> A\<close> \<open>y \<in> B\<close> \<open>y \<in> U\<close> \<open>A \<inter> B \<inter> U = {}\<close>
unfolding le_less by blast
}
note not_disjoint = this
fix A B assume AB: "open A" "open B" "U \<subseteq> A \<union> B" "A \<inter> B \<inter> U = {}"
moreover assume "A \<inter> U \<noteq> {}" then obtain x where x: "x \<in> U" "x \<in> A" by auto
moreover assume "B \<inter> U \<noteq> {}" then obtain y where y: "y \<in> U" "y \<in> B" by auto
moreover note not_disjoint[of B A y x] not_disjoint[of A B x y]
ultimately show False
by (cases x y rule: linorder_cases) auto
qed
lemma connected_iff_interval: "connected U \<longleftrightarrow> (\<forall>x\<in>U. \<forall>y\<in>U. \<forall>z. x \<le> z \<longrightarrow> z \<le> y \<longrightarrow> z \<in> U)"
for U :: "'a::linear_continuum_topology set"
by (auto intro: connectedI_interval dest: connectedD_interval)
lemma connected_UNIV[simp]: "connected (UNIV::'a::linear_continuum_topology set)"
by (simp add: connected_iff_interval)
lemma connected_Ioi[simp]: "connected {a<..}"
for a :: "'a::linear_continuum_topology"
by (auto simp: connected_iff_interval)
lemma connected_Ici[simp]: "connected {a..}"
for a :: "'a::linear_continuum_topology"
by (auto simp: connected_iff_interval)
lemma connected_Iio[simp]: "connected {..<a}"
for a :: "'a::linear_continuum_topology"
by (auto simp: connected_iff_interval)
lemma connected_Iic[simp]: "connected {..a}"
for a :: "'a::linear_continuum_topology"
by (auto simp: connected_iff_interval)
lemma connected_Ioo[simp]: "connected {a<..<b}"
for a b :: "'a::linear_continuum_topology"
unfolding connected_iff_interval by auto
lemma connected_Ioc[simp]: "connected {a<..b}"
for a b :: "'a::linear_continuum_topology"
by (auto simp: connected_iff_interval)
lemma connected_Ico[simp]: "connected {a..<b}"
for a b :: "'a::linear_continuum_topology"
by (auto simp: connected_iff_interval)
lemma connected_Icc[simp]: "connected {a..b}"
for a b :: "'a::linear_continuum_topology"
by (auto simp: connected_iff_interval)
lemma connected_contains_Ioo:
fixes A :: "'a :: linorder_topology set"
assumes "connected A" "a \<in> A" "b \<in> A" shows "{a <..< b} \<subseteq> A"
using connectedD_interval[OF assms] by (simp add: subset_eq Ball_def less_imp_le)
lemma connected_contains_Icc:
fixes A :: "'a::linorder_topology set"
assumes "connected A" "a \<in> A" "b \<in> A"
shows "{a..b} \<subseteq> A"
proof
fix x assume "x \<in> {a..b}"
then have "x = a \<or> x = b \<or> x \<in> {a<..<b}"
by auto
then show "x \<in> A"
using assms connected_contains_Ioo[of A a b] by auto
qed
subsection \<open>Intermediate Value Theorem\<close>
lemma IVT':
fixes f :: "'a::linear_continuum_topology \<Rightarrow> 'b::linorder_topology"
assumes y: "f a \<le> y" "y \<le> f b" "a \<le> b"
and *: "continuous_on {a .. b} f"
shows "\<exists>x. a \<le> x \<and> x \<le> b \<and> f x = y"
proof -
have "connected {a..b}"
unfolding connected_iff_interval by auto
from connected_continuous_image[OF * this, THEN connectedD_interval, of "f a" "f b" y] y
show ?thesis
by (auto simp add: atLeastAtMost_def atLeast_def atMost_def)
qed
lemma IVT2':
fixes f :: "'a :: linear_continuum_topology \<Rightarrow> 'b :: linorder_topology"
assumes y: "f b \<le> y" "y \<le> f a" "a \<le> b"
and *: "continuous_on {a .. b} f"
shows "\<exists>x. a \<le> x \<and> x \<le> b \<and> f x = y"
proof -
have "connected {a..b}"
unfolding connected_iff_interval by auto
from connected_continuous_image[OF * this, THEN connectedD_interval, of "f b" "f a" y] y
show ?thesis
by (auto simp add: atLeastAtMost_def atLeast_def atMost_def)
qed
lemma IVT:
fixes f :: "'a::linear_continuum_topology \<Rightarrow> 'b::linorder_topology"
shows "f a \<le> y \<Longrightarrow> y \<le> f b \<Longrightarrow> a \<le> b \<Longrightarrow> (\<forall>x. a \<le> x \<and> x \<le> b \<longrightarrow> isCont f x) \<Longrightarrow>
\<exists>x. a \<le> x \<and> x \<le> b \<and> f x = y"
by (rule IVT') (auto intro: continuous_at_imp_continuous_on)
lemma IVT2:
fixes f :: "'a::linear_continuum_topology \<Rightarrow> 'b::linorder_topology"
shows "f b \<le> y \<Longrightarrow> y \<le> f a \<Longrightarrow> a \<le> b \<Longrightarrow> (\<forall>x. a \<le> x \<and> x \<le> b \<longrightarrow> isCont f x) \<Longrightarrow>
\<exists>x. a \<le> x \<and> x \<le> b \<and> f x = y"
by (rule IVT2') (auto intro: continuous_at_imp_continuous_on)
lemma continuous_inj_imp_mono:
fixes f :: "'a::linear_continuum_topology \<Rightarrow> 'b::linorder_topology"
assumes x: "a < x" "x < b"
and cont: "continuous_on {a..b} f"
and inj: "inj_on f {a..b}"
shows "(f a < f x \<and> f x < f b) \<or> (f b < f x \<and> f x < f a)"
proof -
note I = inj_on_eq_iff[OF inj]
{
assume "f x < f a" "f x < f b"
then obtain s t where "x \<le> s" "s \<le> b" "a \<le> t" "t \<le> x" "f s = f t" "f x < f s"
using IVT'[of f x "min (f a) (f b)" b] IVT2'[of f x "min (f a) (f b)" a] x
by (auto simp: continuous_on_subset[OF cont] less_imp_le)
with x I have False by auto
}
moreover
{
assume "f a < f x" "f b < f x"
then obtain s t where "x \<le> s" "s \<le> b" "a \<le> t" "t \<le> x" "f s = f t" "f s < f x"
using IVT'[of f a "max (f a) (f b)" x] IVT2'[of f b "max (f a) (f b)" x] x
by (auto simp: continuous_on_subset[OF cont] less_imp_le)
with x I have False by auto
}
ultimately show ?thesis
using I[of a x] I[of x b] x less_trans[OF x]
by (auto simp add: le_less less_imp_neq neq_iff)
qed
lemma continuous_at_Sup_mono:
fixes f :: "'a::{linorder_topology,conditionally_complete_linorder} \<Rightarrow>
'b::{linorder_topology,conditionally_complete_linorder}"
assumes "mono f"
and cont: "continuous (at_left (Sup S)) f"
and S: "S \<noteq> {}" "bdd_above S"
shows "f (Sup S) = (SUP s\<in>S. f s)"
proof (rule antisym)
have f: "(f \<longlongrightarrow> f (Sup S)) (at_left (Sup S))"
using cont unfolding continuous_within .
show "f (Sup S) \<le> (SUP s\<in>S. f s)"
proof cases
assume "Sup S \<in> S"
then show ?thesis
by (rule cSUP_upper) (auto intro: bdd_above_image_mono S \<open>mono f\<close>)
next
assume "Sup S \<notin> S"
from \<open>S \<noteq> {}\<close> obtain s where "s \<in> S"
by auto
with \<open>Sup S \<notin> S\<close> S have "s < Sup S"
unfolding less_le by (blast intro: cSup_upper)
show ?thesis
proof (rule ccontr)
assume "\<not> ?thesis"
with order_tendstoD(1)[OF f, of "SUP s\<in>S. f s"] obtain b where "b < Sup S"
and *: "\<And>y. b < y \<Longrightarrow> y < Sup S \<Longrightarrow> (SUP s\<in>S. f s) < f y"
by (auto simp: not_le eventually_at_left[OF \<open>s < Sup S\<close>])
with \<open>S \<noteq> {}\<close> obtain c where "c \<in> S" "b < c"
using less_cSupD[of S b] by auto
with \<open>Sup S \<notin> S\<close> S have "c < Sup S"
unfolding less_le by (blast intro: cSup_upper)
from *[OF \<open>b < c\<close> \<open>c < Sup S\<close>] cSUP_upper[OF \<open>c \<in> S\<close> bdd_above_image_mono[of f]]
show False
by (auto simp: assms)
qed
qed
qed (intro cSUP_least \<open>mono f\<close>[THEN monoD] cSup_upper S)
lemma continuous_at_Sup_antimono:
fixes f :: "'a::{linorder_topology,conditionally_complete_linorder} \<Rightarrow>
'b::{linorder_topology,conditionally_complete_linorder}"
assumes "antimono f"
and cont: "continuous (at_left (Sup S)) f"
and S: "S \<noteq> {}" "bdd_above S"
shows "f (Sup S) = (INF s\<in>S. f s)"
proof (rule antisym)
have f: "(f \<longlongrightarrow> f (Sup S)) (at_left (Sup S))"
using cont unfolding continuous_within .
show "(INF s\<in>S. f s) \<le> f (Sup S)"
proof cases
assume "Sup S \<in> S"
then show ?thesis
by (intro cINF_lower) (auto intro: bdd_below_image_antimono S \<open>antimono f\<close>)
next
assume "Sup S \<notin> S"
from \<open>S \<noteq> {}\<close> obtain s where "s \<in> S"
by auto
with \<open>Sup S \<notin> S\<close> S have "s < Sup S"
unfolding less_le by (blast intro: cSup_upper)
show ?thesis
proof (rule ccontr)
assume "\<not> ?thesis"
with order_tendstoD(2)[OF f, of "INF s\<in>S. f s"] obtain b where "b < Sup S"
and *: "\<And>y. b < y \<Longrightarrow> y < Sup S \<Longrightarrow> f y < (INF s\<in>S. f s)"
by (auto simp: not_le eventually_at_left[OF \<open>s < Sup S\<close>])
with \<open>S \<noteq> {}\<close> obtain c where "c \<in> S" "b < c"
using less_cSupD[of S b] by auto
with \<open>Sup S \<notin> S\<close> S have "c < Sup S"
unfolding less_le by (blast intro: cSup_upper)
from *[OF \<open>b < c\<close> \<open>c < Sup S\<close>] cINF_lower[OF bdd_below_image_antimono, of f S c] \<open>c \<in> S\<close>
show False
by (auto simp: assms)
qed
qed
qed (intro cINF_greatest \<open>antimono f\<close>[THEN antimonoD] cSup_upper S)
lemma continuous_at_Inf_mono:
fixes f :: "'a::{linorder_topology,conditionally_complete_linorder} \<Rightarrow>
'b::{linorder_topology,conditionally_complete_linorder}"
assumes "mono f"
and cont: "continuous (at_right (Inf S)) f"
and S: "S \<noteq> {}" "bdd_below S"
shows "f (Inf S) = (INF s\<in>S. f s)"
proof (rule antisym)
have f: "(f \<longlongrightarrow> f (Inf S)) (at_right (Inf S))"
using cont unfolding continuous_within .
show "(INF s\<in>S. f s) \<le> f (Inf S)"
proof cases
assume "Inf S \<in> S"
then show ?thesis
by (rule cINF_lower[rotated]) (auto intro: bdd_below_image_mono S \<open>mono f\<close>)
next
assume "Inf S \<notin> S"
from \<open>S \<noteq> {}\<close> obtain s where "s \<in> S"
by auto
with \<open>Inf S \<notin> S\<close> S have "Inf S < s"
unfolding less_le by (blast intro: cInf_lower)
show ?thesis
proof (rule ccontr)
assume "\<not> ?thesis"
with order_tendstoD(2)[OF f, of "INF s\<in>S. f s"] obtain b where "Inf S < b"
and *: "\<And>y. Inf S < y \<Longrightarrow> y < b \<Longrightarrow> f y < (INF s\<in>S. f s)"
by (auto simp: not_le eventually_at_right[OF \<open>Inf S < s\<close>])
with \<open>S \<noteq> {}\<close> obtain c where "c \<in> S" "c < b"
using cInf_lessD[of S b] by auto
with \<open>Inf S \<notin> S\<close> S have "Inf S < c"
unfolding less_le by (blast intro: cInf_lower)
from *[OF \<open>Inf S < c\<close> \<open>c < b\<close>] cINF_lower[OF bdd_below_image_mono[of f] \<open>c \<in> S\<close>]
show False
by (auto simp: assms)
qed
qed
qed (intro cINF_greatest \<open>mono f\<close>[THEN monoD] cInf_lower \<open>bdd_below S\<close> \<open>S \<noteq> {}\<close>)
lemma continuous_at_Inf_antimono:
fixes f :: "'a::{linorder_topology,conditionally_complete_linorder} \<Rightarrow>
'b::{linorder_topology,conditionally_complete_linorder}"
assumes "antimono f"
and cont: "continuous (at_right (Inf S)) f"
and S: "S \<noteq> {}" "bdd_below S"
shows "f (Inf S) = (SUP s\<in>S. f s)"
proof (rule antisym)
have f: "(f \<longlongrightarrow> f (Inf S)) (at_right (Inf S))"
using cont unfolding continuous_within .
show "f (Inf S) \<le> (SUP s\<in>S. f s)"
proof cases
assume "Inf S \<in> S"
then show ?thesis
by (rule cSUP_upper) (auto intro: bdd_above_image_antimono S \<open>antimono f\<close>)
next
assume "Inf S \<notin> S"
from \<open>S \<noteq> {}\<close> obtain s where "s \<in> S"
by auto
with \<open>Inf S \<notin> S\<close> S have "Inf S < s"
unfolding less_le by (blast intro: cInf_lower)
show ?thesis
proof (rule ccontr)
assume "\<not> ?thesis"
with order_tendstoD(1)[OF f, of "SUP s\<in>S. f s"] obtain b where "Inf S < b"
and *: "\<And>y. Inf S < y \<Longrightarrow> y < b \<Longrightarrow> (SUP s\<in>S. f s) < f y"
by (auto simp: not_le eventually_at_right[OF \<open>Inf S < s\<close>])
with \<open>S \<noteq> {}\<close> obtain c where "c \<in> S" "c < b"
using cInf_lessD[of S b] by auto
with \<open>Inf S \<notin> S\<close> S have "Inf S < c"
unfolding less_le by (blast intro: cInf_lower)
from *[OF \<open>Inf S < c\<close> \<open>c < b\<close>] cSUP_upper[OF \<open>c \<in> S\<close> bdd_above_image_antimono[of f]]
show False
by (auto simp: assms)
qed
qed
qed (intro cSUP_least \<open>antimono f\<close>[THEN antimonoD] cInf_lower S)
subsection \<open>Uniform spaces\<close>
class uniformity =
fixes uniformity :: "('a \<times> 'a) filter"
begin
abbreviation uniformity_on :: "'a set \<Rightarrow> ('a \<times> 'a) filter"
where "uniformity_on s \<equiv> inf uniformity (principal (s\<times>s))"
end
lemma uniformity_Abort:
"uniformity =
Filter.abstract_filter (\<lambda>u. Code.abort (STR ''uniformity is not executable'') (\<lambda>u. uniformity))"
by simp
class open_uniformity = "open" + uniformity +
assumes open_uniformity:
"\<And>U. open U \<longleftrightarrow> (\<forall>x\<in>U. eventually (\<lambda>(x', y). x' = x \<longrightarrow> y \<in> U) uniformity)"
begin
subclass topological_space
by standard (force elim: eventually_mono eventually_elim2 simp: split_beta' open_uniformity)+
end
class uniform_space = open_uniformity +
assumes uniformity_refl: "eventually E uniformity \<Longrightarrow> E (x, x)"
and uniformity_sym: "eventually E uniformity \<Longrightarrow> eventually (\<lambda>(x, y). E (y, x)) uniformity"
and uniformity_trans:
"eventually E uniformity \<Longrightarrow>
\<exists>D. eventually D uniformity \<and> (\<forall>x y z. D (x, y) \<longrightarrow> D (y, z) \<longrightarrow> E (x, z))"
begin
lemma uniformity_bot: "uniformity \<noteq> bot"
using uniformity_refl by auto
lemma uniformity_trans':
"eventually E uniformity \<Longrightarrow>
eventually (\<lambda>((x, y), (y', z)). y = y' \<longrightarrow> E (x, z)) (uniformity \<times>\<^sub>F uniformity)"
by (drule uniformity_trans) (auto simp add: eventually_prod_same)
lemma uniformity_transE:
assumes "eventually E uniformity"
obtains D where "eventually D uniformity" "\<And>x y z. D (x, y) \<Longrightarrow> D (y, z) \<Longrightarrow> E (x, z)"
using uniformity_trans [OF assms] by auto
lemma eventually_nhds_uniformity:
"eventually P (nhds x) \<longleftrightarrow> eventually (\<lambda>(x', y). x' = x \<longrightarrow> P y) uniformity"
(is "_ \<longleftrightarrow> ?N P x")
unfolding eventually_nhds
proof safe
assume *: "?N P x"
have "?N (?N P) x" if "?N P x" for x
proof -
from that obtain D where ev: "eventually D uniformity"
and D: "D (a, b) \<Longrightarrow> D (b, c) \<Longrightarrow> case (a, c) of (x', y) \<Rightarrow> x' = x \<longrightarrow> P y" for a b c
by (rule uniformity_transE) simp
from ev show ?thesis
by eventually_elim (insert ev D, force elim: eventually_mono split: prod.split)
qed
then have "open {x. ?N P x}"
by (simp add: open_uniformity)
then show "\<exists>S. open S \<and> x \<in> S \<and> (\<forall>x\<in>S. P x)"
by (intro exI[of _ "{x. ?N P x}"]) (auto dest: uniformity_refl simp: *)
qed (force simp add: open_uniformity elim: eventually_mono)
subsubsection \<open>Totally bounded sets\<close>
definition totally_bounded :: "'a set \<Rightarrow> bool"
where "totally_bounded S \<longleftrightarrow>
(\<forall>E. eventually E uniformity \<longrightarrow> (\<exists>X. finite X \<and> (\<forall>s\<in>S. \<exists>x\<in>X. E (x, s))))"
lemma totally_bounded_empty[iff]: "totally_bounded {}"
by (auto simp add: totally_bounded_def)
lemma totally_bounded_subset: "totally_bounded S \<Longrightarrow> T \<subseteq> S \<Longrightarrow> totally_bounded T"
by (fastforce simp add: totally_bounded_def)
lemma totally_bounded_Union[intro]:
assumes M: "finite M" "\<And>S. S \<in> M \<Longrightarrow> totally_bounded S"
shows "totally_bounded (\<Union>M)"
unfolding totally_bounded_def
proof safe
fix E
assume "eventually E uniformity"
with M obtain X where "\<forall>S\<in>M. finite (X S) \<and> (\<forall>s\<in>S. \<exists>x\<in>X S. E (x, s))"
by (metis totally_bounded_def)
with \<open>finite M\<close> show "\<exists>X. finite X \<and> (\<forall>s\<in>\<Union>M. \<exists>x\<in>X. E (x, s))"
by (intro exI[of _ "\<Union>S\<in>M. X S"]) force
qed
subsubsection \<open>Cauchy filter\<close>
definition cauchy_filter :: "'a filter \<Rightarrow> bool"
where "cauchy_filter F \<longleftrightarrow> F \<times>\<^sub>F F \<le> uniformity"
definition Cauchy :: "(nat \<Rightarrow> 'a) \<Rightarrow> bool"
where Cauchy_uniform: "Cauchy X = cauchy_filter (filtermap X sequentially)"
lemma Cauchy_uniform_iff:
"Cauchy X \<longleftrightarrow> (\<forall>P. eventually P uniformity \<longrightarrow> (\<exists>N. \<forall>n\<ge>N. \<forall>m\<ge>N. P (X n, X m)))"
unfolding Cauchy_uniform cauchy_filter_def le_filter_def eventually_prod_same
eventually_filtermap eventually_sequentially
proof safe
let ?U = "\<lambda>P. eventually P uniformity"
{
fix P
assume "?U P" "\<forall>P. ?U P \<longrightarrow> (\<exists>Q. (\<exists>N. \<forall>n\<ge>N. Q (X n)) \<and> (\<forall>x y. Q x \<longrightarrow> Q y \<longrightarrow> P (x, y)))"
then obtain Q N where "\<And>n. n \<ge> N \<Longrightarrow> Q (X n)" "\<And>x y. Q x \<Longrightarrow> Q y \<Longrightarrow> P (x, y)"
by metis
then show "\<exists>N. \<forall>n\<ge>N. \<forall>m\<ge>N. P (X n, X m)"
by blast
next
fix P
assume "?U P" and P: "\<forall>P. ?U P \<longrightarrow> (\<exists>N. \<forall>n\<ge>N. \<forall>m\<ge>N. P (X n, X m))"
then obtain Q where "?U Q" and Q: "\<And>x y z. Q (x, y) \<Longrightarrow> Q (y, z) \<Longrightarrow> P (x, z)"
by (auto elim: uniformity_transE)
then have "?U (\<lambda>x. Q x \<and> (\<lambda>(x, y). Q (y, x)) x)"
unfolding eventually_conj_iff by (simp add: uniformity_sym)
from P[rule_format, OF this]
obtain N where N: "\<And>n m. n \<ge> N \<Longrightarrow> m \<ge> N \<Longrightarrow> Q (X n, X m) \<and> Q (X m, X n)"
by auto
show "\<exists>Q. (\<exists>N. \<forall>n\<ge>N. Q (X n)) \<and> (\<forall>x y. Q x \<longrightarrow> Q y \<longrightarrow> P (x, y))"
proof (safe intro!: exI[of _ "\<lambda>x. \<forall>n\<ge>N. Q (x, X n) \<and> Q (X n, x)"] exI[of _ N] N)
fix x y
assume "\<forall>n\<ge>N. Q (x, X n) \<and> Q (X n, x)" "\<forall>n\<ge>N. Q (y, X n) \<and> Q (X n, y)"
then have "Q (x, X N)" "Q (X N, y)" by auto
then show "P (x, y)"
by (rule Q)
qed
}
qed
lemma nhds_imp_cauchy_filter:
assumes *: "F \<le> nhds x"
shows "cauchy_filter F"
proof -
have "F \<times>\<^sub>F F \<le> nhds x \<times>\<^sub>F nhds x"
by (intro prod_filter_mono *)
also have "\<dots> \<le> uniformity"
unfolding le_filter_def eventually_nhds_uniformity eventually_prod_same
proof safe
fix P
assume "eventually P uniformity"
then obtain Ql where ev: "eventually Ql uniformity"
and "Ql (x, y) \<Longrightarrow> Ql (y, z) \<Longrightarrow> P (x, z)" for x y z
by (rule uniformity_transE) simp
with ev[THEN uniformity_sym]
show "\<exists>Q. eventually (\<lambda>(x', y). x' = x \<longrightarrow> Q y) uniformity \<and>
(\<forall>x y. Q x \<longrightarrow> Q y \<longrightarrow> P (x, y))"
by (rule_tac exI[of _ "\<lambda>y. Ql (y, x) \<and> Ql (x, y)"]) (fastforce elim: eventually_elim2)
qed
finally show ?thesis
by (simp add: cauchy_filter_def)
qed
lemma LIMSEQ_imp_Cauchy: "X \<longlonglongrightarrow> x \<Longrightarrow> Cauchy X"
unfolding Cauchy_uniform filterlim_def by (intro nhds_imp_cauchy_filter)
lemma Cauchy_subseq_Cauchy:
assumes "Cauchy X" "strict_mono f"
shows "Cauchy (X \<circ> f)"
unfolding Cauchy_uniform comp_def filtermap_filtermap[symmetric] cauchy_filter_def
by (rule order_trans[OF _ \<open>Cauchy X\<close>[unfolded Cauchy_uniform cauchy_filter_def]])
(intro prod_filter_mono filtermap_mono filterlim_subseq[OF \<open>strict_mono f\<close>, unfolded filterlim_def])
lemma convergent_Cauchy: "convergent X \<Longrightarrow> Cauchy X"
unfolding convergent_def by (erule exE, erule LIMSEQ_imp_Cauchy)
definition complete :: "'a set \<Rightarrow> bool"
where complete_uniform: "complete S \<longleftrightarrow>
(\<forall>F \<le> principal S. F \<noteq> bot \<longrightarrow> cauchy_filter F \<longrightarrow> (\<exists>x\<in>S. F \<le> nhds x))"
lemma (in uniform_space) cauchy_filter_complete_converges:
assumes "cauchy_filter F" "complete A" "F \<le> principal A" "F \<noteq> bot"
shows "\<exists>c. F \<le> nhds c"
using assms unfolding complete_uniform by blast
end
subsubsection \<open>Uniformly continuous functions\<close>
definition uniformly_continuous_on :: "'a set \<Rightarrow> ('a::uniform_space \<Rightarrow> 'b::uniform_space) \<Rightarrow> bool"
where uniformly_continuous_on_uniformity: "uniformly_continuous_on s f \<longleftrightarrow>
(LIM (x, y) (uniformity_on s). (f x, f y) :> uniformity)"
lemma uniformly_continuous_onD:
"uniformly_continuous_on s f \<Longrightarrow> eventually E uniformity \<Longrightarrow>
eventually (\<lambda>(x, y). x \<in> s \<longrightarrow> y \<in> s \<longrightarrow> E (f x, f y)) uniformity"
by (simp add: uniformly_continuous_on_uniformity filterlim_iff
eventually_inf_principal split_beta' mem_Times_iff imp_conjL)
lemma uniformly_continuous_on_const[continuous_intros]: "uniformly_continuous_on s (\<lambda>x. c)"
by (auto simp: uniformly_continuous_on_uniformity filterlim_iff uniformity_refl)
lemma uniformly_continuous_on_id[continuous_intros]: "uniformly_continuous_on s (\<lambda>x. x)"
by (auto simp: uniformly_continuous_on_uniformity filterlim_def)
lemma uniformly_continuous_on_compose:
"uniformly_continuous_on s g \<Longrightarrow> uniformly_continuous_on (g`s) f \<Longrightarrow>
uniformly_continuous_on s (\<lambda>x. f (g x))"
using filterlim_compose[of "\<lambda>(x, y). (f x, f y)" uniformity
"uniformity_on (g`s)" "\<lambda>(x, y). (g x, g y)" "uniformity_on s"]
by (simp add: split_beta' uniformly_continuous_on_uniformity
filterlim_inf filterlim_principal eventually_inf_principal mem_Times_iff)
lemma uniformly_continuous_imp_continuous:
assumes f: "uniformly_continuous_on s f"
shows "continuous_on s f"
by (auto simp: filterlim_iff eventually_at_filter eventually_nhds_uniformity continuous_on_def
elim: eventually_mono dest!: uniformly_continuous_onD[OF f])
section \<open>Product Topology\<close>
subsection \<open>Product is a topological space\<close>
instantiation prod :: (topological_space, topological_space) topological_space
begin
definition open_prod_def[code del]:
"open (S :: ('a \<times> 'b) set) \<longleftrightarrow>
(\<forall>x\<in>S. \<exists>A B. open A \<and> open B \<and> x \<in> A \<times> B \<and> A \<times> B \<subseteq> S)"
lemma open_prod_elim:
assumes "open S" and "x \<in> S"
obtains A B where "open A" and "open B" and "x \<in> A \<times> B" and "A \<times> B \<subseteq> S"
using assms unfolding open_prod_def by fast
lemma open_prod_intro:
assumes "\<And>x. x \<in> S \<Longrightarrow> \<exists>A B. open A \<and> open B \<and> x \<in> A \<times> B \<and> A \<times> B \<subseteq> S"
shows "open S"
using assms unfolding open_prod_def by fast
instance
proof
show "open (UNIV :: ('a \<times> 'b) set)"
unfolding open_prod_def by auto
next
fix S T :: "('a \<times> 'b) set"
assume "open S" "open T"
show "open (S \<inter> T)"
proof (rule open_prod_intro)
fix x
assume x: "x \<in> S \<inter> T"
from x have "x \<in> S" by simp
obtain Sa Sb where A: "open Sa" "open Sb" "x \<in> Sa \<times> Sb" "Sa \<times> Sb \<subseteq> S"
using \<open>open S\<close> and \<open>x \<in> S\<close> by (rule open_prod_elim)
from x have "x \<in> T" by simp
obtain Ta Tb where B: "open Ta" "open Tb" "x \<in> Ta \<times> Tb" "Ta \<times> Tb \<subseteq> T"
using \<open>open T\<close> and \<open>x \<in> T\<close> by (rule open_prod_elim)
let ?A = "Sa \<inter> Ta" and ?B = "Sb \<inter> Tb"
have "open ?A \<and> open ?B \<and> x \<in> ?A \<times> ?B \<and> ?A \<times> ?B \<subseteq> S \<inter> T"
using A B by (auto simp add: open_Int)
then show "\<exists>A B. open A \<and> open B \<and> x \<in> A \<times> B \<and> A \<times> B \<subseteq> S \<inter> T"
by fast
qed
next
fix K :: "('a \<times> 'b) set set"
assume "\<forall>S\<in>K. open S"
then show "open (\<Union>K)"
unfolding open_prod_def by fast
qed
end
declare [[code abort: "open :: ('a::topological_space \<times> 'b::topological_space) set \<Rightarrow> bool"]]
lemma open_Times: "open S \<Longrightarrow> open T \<Longrightarrow> open (S \<times> T)"
unfolding open_prod_def by auto
lemma fst_vimage_eq_Times: "fst -` S = S \<times> UNIV"
by auto
lemma snd_vimage_eq_Times: "snd -` S = UNIV \<times> S"
by auto
lemma open_vimage_fst: "open S \<Longrightarrow> open (fst -` S)"
by (simp add: fst_vimage_eq_Times open_Times)
lemma open_vimage_snd: "open S \<Longrightarrow> open (snd -` S)"
by (simp add: snd_vimage_eq_Times open_Times)
lemma closed_vimage_fst: "closed S \<Longrightarrow> closed (fst -` S)"
unfolding closed_open vimage_Compl [symmetric]
by (rule open_vimage_fst)
lemma closed_vimage_snd: "closed S \<Longrightarrow> closed (snd -` S)"
unfolding closed_open vimage_Compl [symmetric]
by (rule open_vimage_snd)
lemma closed_Times: "closed S \<Longrightarrow> closed T \<Longrightarrow> closed (S \<times> T)"
proof -
have "S \<times> T = (fst -` S) \<inter> (snd -` T)"
by auto
then show "closed S \<Longrightarrow> closed T \<Longrightarrow> closed (S \<times> T)"
by (simp add: closed_vimage_fst closed_vimage_snd closed_Int)
qed
lemma subset_fst_imageI: "A \<times> B \<subseteq> S \<Longrightarrow> y \<in> B \<Longrightarrow> A \<subseteq> fst ` S"
unfolding image_def subset_eq by force
lemma subset_snd_imageI: "A \<times> B \<subseteq> S \<Longrightarrow> x \<in> A \<Longrightarrow> B \<subseteq> snd ` S"
unfolding image_def subset_eq by force
lemma open_image_fst:
assumes "open S"
shows "open (fst ` S)"
proof (rule openI)
fix x
assume "x \<in> fst ` S"
then obtain y where "(x, y) \<in> S"
by auto
then obtain A B where "open A" "open B" "x \<in> A" "y \<in> B" "A \<times> B \<subseteq> S"
using \<open>open S\<close> unfolding open_prod_def by auto
from \<open>A \<times> B \<subseteq> S\<close> \<open>y \<in> B\<close> have "A \<subseteq> fst ` S"
by (rule subset_fst_imageI)
with \<open>open A\<close> \<open>x \<in> A\<close> have "open A \<and> x \<in> A \<and> A \<subseteq> fst ` S"
by simp
then show "\<exists>T. open T \<and> x \<in> T \<and> T \<subseteq> fst ` S" ..
qed
lemma open_image_snd:
assumes "open S"
shows "open (snd ` S)"
proof (rule openI)
fix y
assume "y \<in> snd ` S"
then obtain x where "(x, y) \<in> S"
by auto
then obtain A B where "open A" "open B" "x \<in> A" "y \<in> B" "A \<times> B \<subseteq> S"
using \<open>open S\<close> unfolding open_prod_def by auto
from \<open>A \<times> B \<subseteq> S\<close> \<open>x \<in> A\<close> have "B \<subseteq> snd ` S"
by (rule subset_snd_imageI)
with \<open>open B\<close> \<open>y \<in> B\<close> have "open B \<and> y \<in> B \<and> B \<subseteq> snd ` S"
by simp
then show "\<exists>T. open T \<and> y \<in> T \<and> T \<subseteq> snd ` S" ..
qed
lemma nhds_prod: "nhds (a, b) = nhds a \<times>\<^sub>F nhds b"
unfolding nhds_def
proof (subst prod_filter_INF, auto intro!: antisym INF_greatest simp: principal_prod_principal)
fix S T
assume "open S" "a \<in> S" "open T" "b \<in> T"
then show "(INF x \<in> {S. open S \<and> (a, b) \<in> S}. principal x) \<le> principal (S \<times> T)"
by (intro INF_lower) (auto intro!: open_Times)
next
fix S'
assume "open S'" "(a, b) \<in> S'"
then obtain S T where "open S" "a \<in> S" "open T" "b \<in> T" "S \<times> T \<subseteq> S'"
by (auto elim: open_prod_elim)
then show "(INF x \<in> {S. open S \<and> a \<in> S}. INF y \<in> {S. open S \<and> b \<in> S}.
principal (x \<times> y)) \<le> principal S'"
by (auto intro!: INF_lower2)
qed
subsubsection \<open>Continuity of operations\<close>
lemma tendsto_fst [tendsto_intros]:
assumes "(f \<longlongrightarrow> a) F"
shows "((\<lambda>x. fst (f x)) \<longlongrightarrow> fst a) F"
proof (rule topological_tendstoI)
fix S
assume "open S" and "fst a \<in> S"
then have "open (fst -` S)" and "a \<in> fst -` S"
by (simp_all add: open_vimage_fst)
with assms have "eventually (\<lambda>x. f x \<in> fst -` S) F"
by (rule topological_tendstoD)
then show "eventually (\<lambda>x. fst (f x) \<in> S) F"
by simp
qed
lemma tendsto_snd [tendsto_intros]:
assumes "(f \<longlongrightarrow> a) F"
shows "((\<lambda>x. snd (f x)) \<longlongrightarrow> snd a) F"
proof (rule topological_tendstoI)
fix S
assume "open S" and "snd a \<in> S"
then have "open (snd -` S)" and "a \<in> snd -` S"
by (simp_all add: open_vimage_snd)
with assms have "eventually (\<lambda>x. f x \<in> snd -` S) F"
by (rule topological_tendstoD)
then show "eventually (\<lambda>x. snd (f x) \<in> S) F"
by simp
qed
lemma tendsto_Pair [tendsto_intros]:
assumes "(f \<longlongrightarrow> a) F" and "(g \<longlongrightarrow> b) F"
shows "((\<lambda>x. (f x, g x)) \<longlongrightarrow> (a, b)) F"
unfolding nhds_prod using assms by (rule filterlim_Pair)
lemma continuous_fst[continuous_intros]: "continuous F f \<Longrightarrow> continuous F (\<lambda>x. fst (f x))"
unfolding continuous_def by (rule tendsto_fst)
lemma continuous_snd[continuous_intros]: "continuous F f \<Longrightarrow> continuous F (\<lambda>x. snd (f x))"
unfolding continuous_def by (rule tendsto_snd)
lemma continuous_Pair[continuous_intros]:
"continuous F f \<Longrightarrow> continuous F g \<Longrightarrow> continuous F (\<lambda>x. (f x, g x))"
unfolding continuous_def by (rule tendsto_Pair)
lemma continuous_on_fst[continuous_intros]:
"continuous_on s f \<Longrightarrow> continuous_on s (\<lambda>x. fst (f x))"
unfolding continuous_on_def by (auto intro: tendsto_fst)
lemma continuous_on_snd[continuous_intros]:
"continuous_on s f \<Longrightarrow> continuous_on s (\<lambda>x. snd (f x))"
unfolding continuous_on_def by (auto intro: tendsto_snd)
lemma continuous_on_Pair[continuous_intros]:
"continuous_on s f \<Longrightarrow> continuous_on s g \<Longrightarrow> continuous_on s (\<lambda>x. (f x, g x))"
unfolding continuous_on_def by (auto intro: tendsto_Pair)
lemma continuous_on_swap[continuous_intros]: "continuous_on A prod.swap"
by (simp add: prod.swap_def continuous_on_fst continuous_on_snd
continuous_on_Pair continuous_on_id)
lemma continuous_on_swap_args:
assumes "continuous_on (A\<times>B) (\<lambda>(x,y). d x y)"
shows "continuous_on (B\<times>A) (\<lambda>(x,y). d y x)"
proof -
have "(\<lambda>(x,y). d y x) = (\<lambda>(x,y). d x y) \<circ> prod.swap"
by force
then show ?thesis
by (metis assms continuous_on_compose continuous_on_swap product_swap)
qed
lemma isCont_fst [simp]: "isCont f a \<Longrightarrow> isCont (\<lambda>x. fst (f x)) a"
by (fact continuous_fst)
lemma isCont_snd [simp]: "isCont f a \<Longrightarrow> isCont (\<lambda>x. snd (f x)) a"
by (fact continuous_snd)
lemma isCont_Pair [simp]: "\<lbrakk>isCont f a; isCont g a\<rbrakk> \<Longrightarrow> isCont (\<lambda>x. (f x, g x)) a"
by (fact continuous_Pair)
lemma continuous_on_compose_Pair:
assumes f: "continuous_on (Sigma A B) (\<lambda>(a, b). f a b)"
assumes g: "continuous_on C g"
assumes h: "continuous_on C h"
assumes subset: "\<And>c. c \<in> C \<Longrightarrow> g c \<in> A" "\<And>c. c \<in> C \<Longrightarrow> h c \<in> B (g c)"
shows "continuous_on C (\<lambda>c. f (g c) (h c))"
using continuous_on_compose2[OF f continuous_on_Pair[OF g h]] subset
by auto
subsubsection \<open>Connectedness of products\<close>
proposition connected_Times:
assumes S: "connected S" and T: "connected T"
shows "connected (S \<times> T)"
proof (rule connectedI_const)
fix P::"'a \<times> 'b \<Rightarrow> bool"
assume P[THEN continuous_on_compose2, continuous_intros]: "continuous_on (S \<times> T) P"
have "continuous_on S (\<lambda>s. P (s, t))" if "t \<in> T" for t
by (auto intro!: continuous_intros that)
from connectedD_const[OF S this]
obtain c1 where c1: "\<And>s t. t \<in> T \<Longrightarrow> s \<in> S \<Longrightarrow> P (s, t) = c1 t"
by metis
moreover
have "continuous_on T (\<lambda>t. P (s, t))" if "s \<in> S" for s
by (auto intro!: continuous_intros that)
from connectedD_const[OF T this]
obtain c2 where "\<And>s t. t \<in> T \<Longrightarrow> s \<in> S \<Longrightarrow> P (s, t) = c2 s"
by metis
ultimately show "\<exists>c. \<forall>s\<in>S \<times> T. P s = c"
by auto
qed
corollary connected_Times_eq [simp]:
"connected (S \<times> T) \<longleftrightarrow> S = {} \<or> T = {} \<or> connected S \<and> connected T" (is "?lhs = ?rhs")
proof
assume L: ?lhs
show ?rhs
proof cases
assume "S \<noteq> {} \<and> T \<noteq> {}"
moreover
have "connected (fst ` (S \<times> T))" "connected (snd ` (S \<times> T))"
using continuous_on_fst continuous_on_snd continuous_on_id
by (blast intro: connected_continuous_image [OF _ L])+
ultimately show ?thesis
by auto
qed auto
qed (auto simp: connected_Times)
subsubsection \<open>Separation axioms\<close>
instance prod :: (t0_space, t0_space) t0_space
proof
fix x y :: "'a \<times> 'b"
assume "x \<noteq> y"
then have "fst x \<noteq> fst y \<or> snd x \<noteq> snd y"
by (simp add: prod_eq_iff)
then show "\<exists>U. open U \<and> (x \<in> U) \<noteq> (y \<in> U)"
by (fast dest: t0_space elim: open_vimage_fst open_vimage_snd)
qed
instance prod :: (t1_space, t1_space) t1_space
proof
fix x y :: "'a \<times> 'b"
assume "x \<noteq> y"
then have "fst x \<noteq> fst y \<or> snd x \<noteq> snd y"
by (simp add: prod_eq_iff)
then show "\<exists>U. open U \<and> x \<in> U \<and> y \<notin> U"
by (fast dest: t1_space elim: open_vimage_fst open_vimage_snd)
qed
instance prod :: (t2_space, t2_space) t2_space
proof
fix x y :: "'a \<times> 'b"
assume "x \<noteq> y"
then have "fst x \<noteq> fst y \<or> snd x \<noteq> snd y"
by (simp add: prod_eq_iff)
then show "\<exists>U V. open U \<and> open V \<and> x \<in> U \<and> y \<in> V \<and> U \<inter> V = {}"
by (fast dest: hausdorff elim: open_vimage_fst open_vimage_snd)
qed
lemma isCont_swap[continuous_intros]: "isCont prod.swap a"
using continuous_on_eq_continuous_within continuous_on_swap by blast
lemma open_diagonal_complement:
"open {(x,y) |x y. x \<noteq> (y::('a::t2_space))}"
proof -
have "open {(x, y). x \<noteq> (y::'a)}"
unfolding split_def by (intro open_Collect_neq continuous_intros)
also have "{(x, y). x \<noteq> (y::'a)} = {(x, y) |x y. x \<noteq> (y::'a)}"
by auto
finally show ?thesis .
qed
lemma closed_diagonal:
"closed {y. \<exists> x::('a::t2_space). y = (x,x)}"
proof -
have "{y. \<exists> x::'a. y = (x,x)} = UNIV - {(x,y) | x y. x \<noteq> y}" by auto
then show ?thesis using open_diagonal_complement closed_Diff by auto
qed
lemma open_superdiagonal:
"open {(x,y) | x y. x > (y::'a::{linorder_topology})}"
proof -
have "open {(x, y). x > (y::'a)}"
unfolding split_def by (intro open_Collect_less continuous_intros)
also have "{(x, y). x > (y::'a)} = {(x, y) |x y. x > (y::'a)}"
by auto
finally show ?thesis .
qed
lemma closed_subdiagonal:
"closed {(x,y) | x y. x \<le> (y::'a::{linorder_topology})}"
proof -
have "{(x,y) | x y. x \<le> (y::'a)} = UNIV - {(x,y) | x y. x > (y::'a)}" by auto
then show ?thesis using open_superdiagonal closed_Diff by auto
qed
lemma open_subdiagonal:
"open {(x,y) | x y. x < (y::'a::{linorder_topology})}"
proof -
have "open {(x, y). x < (y::'a)}"
unfolding split_def by (intro open_Collect_less continuous_intros)
also have "{(x, y). x < (y::'a)} = {(x, y) |x y. x < (y::'a)}"
by auto
finally show ?thesis .
qed
lemma closed_superdiagonal:
"closed {(x,y) | x y. x \<ge> (y::('a::{linorder_topology}))}"
proof -
have "{(x,y) | x y. x \<ge> (y::'a)} = UNIV - {(x,y) | x y. x < y}" by auto
then show ?thesis using open_subdiagonal closed_Diff by auto
qed
end
diff --git a/src/Provers/order_tac.ML b/src/Provers/order_tac.ML
--- a/src/Provers/order_tac.ML
+++ b/src/Provers/order_tac.ML
@@ -1,434 +1,435 @@
signature REIFY_TABLE =
sig
type table
val empty : table
val get_var : term -> table -> (int * table)
val get_term : int -> table -> term option
end
structure Reifytab: REIFY_TABLE =
struct
type table = (int * (term * int) list)
val empty = (0, [])
fun get_var t (max_var, tis) =
(case AList.lookup Envir.aeconv tis t of
SOME v => (v, (max_var, tis))
| NONE => (max_var, (max_var + 1, (t, max_var) :: tis))
)
fun get_term v (_, tis) = Library.find_first (fn (_, v2) => v = v2) tis
|> Option.map fst
end
signature LOGIC_SIGNATURE =
sig
val mk_Trueprop : term -> term
val dest_Trueprop : term -> term
val Trueprop_conv : conv -> conv
val Not : term
val conj : term
val disj : term
val notI : thm (* (P \<Longrightarrow> False) \<Longrightarrow> \<not> P *)
val ccontr : thm (* (\<not> P \<Longrightarrow> False) \<Longrightarrow> P *)
val conjI : thm (* P \<Longrightarrow> Q \<Longrightarrow> P \<and> Q *)
val conjE : thm (* P \<and> Q \<Longrightarrow> (P \<Longrightarrow> Q \<Longrightarrow> R) \<Longrightarrow> R *)
val disjE : thm (* P \<or> Q \<Longrightarrow> (P \<Longrightarrow> R) \<Longrightarrow> (Q \<Longrightarrow> R) \<Longrightarrow> R *)
val not_not_conv : conv (* \<not> (\<not> P) \<equiv> P *)
val de_Morgan_conj_conv : conv (* \<not> (P \<and> Q) \<equiv> \<not> P \<or> \<not> Q *)
val de_Morgan_disj_conv : conv (* \<not> (P \<or> Q) \<equiv> \<not> P \<and> \<not> Q *)
val conj_disj_distribL_conv : conv (* P \<and> (Q \<or> R) \<equiv> (P \<and> Q) \<or> (P \<and> R) *)
val conj_disj_distribR_conv : conv (* (Q \<or> R) \<and> P \<equiv> (Q \<and> P) \<or> (R \<and> P) *)
end
(* Control tracing output of the solver. *)
val order_trace_cfg = Attrib.setup_config_bool @{binding "order_trace"} (K false)
(* In partial orders, literals of the form \<not> x < y will force the order solver to perform case
distinctions, which leads to an exponential blowup of the runtime. The split limit controls
the number of literals of this form that are passed to the solver.
*)
val order_split_limit_cfg = Attrib.setup_config_int @{binding "order_split_limit"} (K 8)
datatype order_kind = Order | Linorder
type order_literal = (bool * Order_Procedure.order_atom)
type order_context = {
kind : order_kind,
ops : term list, thms : (string * thm) list, conv_thms : (string * thm) list
}
signature BASE_ORDER_TAC =
sig
val tac :
(order_literal Order_Procedure.fm -> Order_Procedure.prf_trm option)
-> order_context -> thm list
-> Proof.context -> int -> tactic
end
functor Base_Order_Tac(
structure Logic_Sig : LOGIC_SIGNATURE; val excluded_types : typ list) : BASE_ORDER_TAC =
struct
open Order_Procedure
fun expect _ (SOME x) = x
| expect f NONE = f ()
fun list_curry0 f = (fn [] => f, 0)
fun list_curry1 f = (fn [x] => f x, 1)
fun list_curry2 f = (fn [x, y] => f x y, 2)
fun dereify_term consts reifytab t =
let
fun dereify_term' (App (t1, t2)) = (dereify_term' t1) $ (dereify_term' t2)
| dereify_term' (Const s) =
AList.lookup (op =) consts s
|> expect (fn () => raise TERM ("Const " ^ s ^ " not in", map snd consts))
| dereify_term' (Var v) = Reifytab.get_term (integer_of_int v) reifytab |> the
in
dereify_term' t
end
fun dereify_order_fm (eq, le, lt) reifytab t =
let
val consts = [
("eq", eq), ("le", le), ("lt", lt),
("Not", Logic_Sig.Not), ("disj", Logic_Sig.disj), ("conj", Logic_Sig.conj)
]
in
dereify_term consts reifytab t
end
fun strip_AppP t =
let fun strip (AppP (f, s), ss) = strip (f, s::ss)
| strip x = x
in strip (t, []) end
fun replay_conv convs cvp =
let
val convs = convs @
[("all_conv", list_curry0 Conv.all_conv)] @
map (apsnd list_curry1) [
("atom_conv", I),
("neg_atom_conv", I),
("arg_conv", Conv.arg_conv)] @
map (apsnd list_curry2) [
("combination_conv", Conv.combination_conv),
("then_conv", curry (op then_conv))]
fun lookup_conv convs c = AList.lookup (op =) convs c
|> expect (fn () => error ("Can't replay conversion: " ^ c))
fun rp_conv t =
(case strip_AppP t ||> map rp_conv of
(PThm c, cvs) =>
let val (conv, arity) = lookup_conv convs c
in if arity = length cvs
then conv cvs
else error ("Expected " ^ Int.toString arity ^ " arguments for conversion " ^
c ^ " but got " ^ (length cvs |> Int.toString) ^ " arguments")
end
| _ => error "Unexpected constructor in conversion proof")
in
rp_conv cvp
end
fun replay_prf_trm replay_conv dereify ctxt thmtab assmtab p =
let
fun replay_prf_trm' _ (PThm s) =
AList.lookup (op =) thmtab s
|> expect (fn () => error ("Cannot replay theorem: " ^ s))
| replay_prf_trm' assmtab (Appt (p, t)) =
replay_prf_trm' assmtab p
|> Drule.infer_instantiate' ctxt [SOME (Thm.cterm_of ctxt (dereify t))]
| replay_prf_trm' assmtab (AppP (p1, p2)) =
apply2 (replay_prf_trm' assmtab) (p2, p1) |> op COMP
| replay_prf_trm' assmtab (AbsP (reified_t, p)) =
let
val t = dereify reified_t
val t_thm = Logic_Sig.mk_Trueprop t |> Thm.cterm_of ctxt |> Assumption.assume ctxt
val rp = replay_prf_trm' (Termtab.update (Thm.prop_of t_thm, t_thm) assmtab) p
in
Thm.implies_intr (Thm.cprop_of t_thm) rp
end
| replay_prf_trm' assmtab (Bound reified_t) =
let
val t = dereify reified_t |> Logic_Sig.mk_Trueprop
in
Termtab.lookup assmtab t
|> expect (fn () => raise TERM ("Assumption not found:", t::Termtab.keys assmtab))
end
| replay_prf_trm' assmtab (Conv (t, cp, p)) =
let
val thm = replay_prf_trm' assmtab (Bound t)
val conv = Logic_Sig.Trueprop_conv (replay_conv cp)
val conv_thm = Conv.fconv_rule conv thm
val conv_term = Thm.prop_of conv_thm
in
replay_prf_trm' (Termtab.update (conv_term, conv_thm) assmtab) p
end
in
replay_prf_trm' assmtab p
end
fun replay_order_prf_trm ord_ops {thms = thms, conv_thms = conv_thms, ...} ctxt reifytab assmtab =
let
val thmtab = thms @ [
("conjE", Logic_Sig.conjE), ("conjI", Logic_Sig.conjI), ("disjE", Logic_Sig.disjE)
]
val convs = map (apsnd list_curry0) (
map (apsnd Conv.rewr_conv) conv_thms @
[
("not_not_conv", Logic_Sig.not_not_conv),
("de_Morgan_conj_conv", Logic_Sig.de_Morgan_conj_conv),
("de_Morgan_disj_conv", Logic_Sig.de_Morgan_disj_conv),
("conj_disj_distribR_conv", Logic_Sig.conj_disj_distribR_conv),
("conj_disj_distribL_conv", Logic_Sig.conj_disj_distribL_conv)
])
val dereify = dereify_order_fm ord_ops reifytab
in
replay_prf_trm (replay_conv convs) dereify ctxt thmtab assmtab
end
fun strip_Not (nt $ t) = if nt = Logic_Sig.Not then t else nt $ t
| strip_Not t = t
fun limit_not_less [_, _, lt] ctxt decomp_prems =
let
val thy = Proof_Context.theory_of ctxt
val trace = Config.get ctxt order_trace_cfg
val limit = Config.get ctxt order_split_limit_cfg
fun is_not_less_term t =
case try (strip_Not o Logic_Sig.dest_Trueprop) t of
SOME (binop $ _ $ _) => Pattern.matches thy (lt, binop)
| NONE => false
val not_less_prems = filter (is_not_less_term o Thm.prop_of o fst) decomp_prems
val _ = if trace andalso length not_less_prems > limit
then tracing "order split limit exceeded"
else ()
in
filter_out (is_not_less_term o Thm.prop_of o fst) decomp_prems @
take limit not_less_prems
end
fun decomp [eq, le, lt] ctxt t =
let
fun is_excluded t = exists (fn ty => ty = fastype_of t) excluded_types
fun decomp'' (binop $ t1 $ t2) =
let
open Order_Procedure
val thy = Proof_Context.theory_of ctxt
fun try_match pat = try (Pattern.match thy (pat, binop)) (Vartab.empty, Vartab.empty)
in if is_excluded t1 then NONE
else case (try_match eq, try_match le, try_match lt) of
(SOME env, _, _) => SOME (true, EQ, (t1, t2), env)
| (_, SOME env, _) => SOME (true, LEQ, (t1, t2), env)
| (_, _, SOME env) => SOME (true, LESS, (t1, t2), env)
| _ => NONE
end
| decomp'' _ = NONE
fun decomp' (nt $ t) =
if nt = Logic_Sig.Not
then decomp'' t |> Option.map (fn (b, c, p, e) => (not b, c, p, e))
else decomp'' (nt $ t)
| decomp' t = decomp'' t
in
try Logic_Sig.dest_Trueprop t |> Option.mapPartial decomp'
end
fun maximal_envs envs =
let
fun test_opt p (SOME x) = p x
| test_opt _ NONE = false
fun leq_env (tyenv1, tenv1) (tyenv2, tenv2) =
Vartab.forall (fn (v, ty) =>
Vartab.lookup tyenv2 v |> test_opt (fn ty2 => ty2 = ty)) tyenv1
andalso
Vartab.forall (fn (v, (ty, t)) =>
Vartab.lookup tenv2 v |> test_opt (fn (ty2, t2) => ty2 = ty andalso t2 aconv t)) tenv1
fun fold_env (i, env) es = fold_index (fn (i2, env2) => fn es =>
if i = i2 then es else if leq_env env env2 then (i, i2) :: es else es) envs es
val env_order = fold_index fold_env envs []
val graph = fold_index (fn (i, env) => fn g => Int_Graph.new_node (i, env) g)
envs Int_Graph.empty
val graph = fold Int_Graph.add_edge env_order graph
val strong_conns = Int_Graph.strong_conn graph
val maximals =
filter (fn comp => length comp = length (Int_Graph.all_succs graph comp)) strong_conns
in
map (Int_Graph.all_preds graph) maximals
end
fun order_tac raw_order_proc octxt simp_prems =
Subgoal.FOCUS (fn {prems=prems, context=ctxt, ...} =>
let
val trace = Config.get ctxt order_trace_cfg
fun these' _ [] = []
| these' f (x :: xs) = case f x of NONE => these' f xs | SOME y => (x, y) :: these' f xs
val prems = simp_prems @ prems
|> filter (fn p => null (Term.add_vars (Thm.prop_of p) []))
|> map (Conv.fconv_rule Thm.eta_conversion)
val decomp_prems = these' (decomp (#ops octxt) ctxt o Thm.prop_of) prems
fun env_of (_, (_, _, _, env)) = env
val env_groups = maximal_envs (map env_of decomp_prems)
fun order_tac' (_, []) = no_tac
| order_tac' (env, decomp_prems) =
let
val [eq, le, lt] = #ops octxt |> map (Envir.eta_contract o Envir.subst_term env)
val decomp_prems = case #kind octxt of
Order => limit_not_less (#ops octxt) ctxt decomp_prems
| _ => decomp_prems
fun reify_prem (_, (b, ctor, (x, y), _)) (ps, reifytab) =
(Reifytab.get_var x ##>> Reifytab.get_var y) reifytab
|>> (fn vp => (b, ctor (apply2 Int_of_integer vp)) :: ps)
val (reified_prems, reifytab) = fold_rev reify_prem decomp_prems ([], Reifytab.empty)
val reified_prems_conj = foldl1 (fn (x, a) => And (x, a)) (map Atom reified_prems)
val prems_conj_thm = map fst decomp_prems
|> foldl1 (fn (x, a) => Logic_Sig.conjI OF [x, a])
|> Conv.fconv_rule Thm.eta_conversion
val prems_conj = prems_conj_thm |> Thm.prop_of
val proof = raw_order_proc reified_prems_conj
val pretty_term_list =
Pretty.list "" "" o map (Syntax.pretty_term (Config.put show_types true ctxt))
val pretty_thm_list = Pretty.list "" "" o map (Thm.pretty_thm ctxt)
fun pretty_type_of t = Pretty.block [ Pretty.str "::", Pretty.brk 1,
Pretty.quote (Syntax.pretty_typ ctxt (Term.fastype_of t)) ]
fun pretty_trace () =
[ ("order kind:", Pretty.str (@{make_string} (#kind octxt)))
, ("order operators:", Pretty.block [ pretty_term_list [eq, le, lt], Pretty.brk 1
, pretty_type_of le ])
, ("premises:", pretty_thm_list prems)
, ("selected premises:", pretty_thm_list (map fst decomp_prems))
, ("reified premises:", Pretty.str (@{make_string} reified_prems))
, ("contradiction:", Pretty.str (@{make_string} (Option.isSome proof)))
] |> map (fn (t, pp) => Pretty.block [Pretty.str t, Pretty.brk 1, pp])
|> Pretty.big_list "order solver called with the parameters"
val _ = if trace then tracing (Pretty.string_of (pretty_trace ())) else ()
val assmtab = Termtab.make [(prems_conj, prems_conj_thm)]
val replay = replay_order_prf_trm (eq, le, lt) octxt ctxt reifytab assmtab
in
case proof of
NONE => no_tac
| SOME p => SOLVED' (resolve_tac ctxt [replay p]) 1
end
in
map (fn is => ` (env_of o hd) (map (nth decomp_prems) is) |> order_tac') env_groups
|> FIRST
end)
val ad_absurdum_tac = SUBGOAL (fn (A, i) =>
case try (Logic_Sig.dest_Trueprop o Logic.strip_assums_concl) A of
SOME (nt $ _) =>
if nt = Logic_Sig.Not
then resolve0_tac [Logic_Sig.notI] i
else resolve0_tac [Logic_Sig.ccontr] i
| _ => resolve0_tac [Logic_Sig.ccontr] i)
fun tac raw_order_proc octxt simp_prems ctxt =
ad_absurdum_tac THEN' order_tac raw_order_proc octxt simp_prems ctxt
end
functor Order_Tac(structure Base_Tac : BASE_ORDER_TAC) = struct
fun order_context_eq ({kind = kind1, ops = ops1, ...}, {kind = kind2, ops = ops2, ...}) =
kind1 = kind2 andalso eq_list (op aconv) (ops1, ops2)
fun order_data_eq (x, y) = order_context_eq (fst x, fst y)
structure Data = Generic_Data(
type T = (order_context * (order_context -> thm list -> Proof.context -> int -> tactic)) list
val empty = []
fun merge data = Library.merge order_data_eq data
)
fun declare (octxt as {kind = kind, raw_proc = raw_proc, ...}) lthy =
- lthy |> Local_Theory.declaration {syntax = false, pervasive = false} (fn phi => fn context =>
- let
- val ops = map (Morphism.term phi) (#ops octxt)
- val thms = map (fn (s, thm) => (s, Morphism.thm phi thm)) (#thms octxt)
- val conv_thms = map (fn (s, thm) => (s, Morphism.thm phi thm)) (#conv_thms octxt)
- val octxt' = {kind = kind, ops = ops, thms = thms, conv_thms = conv_thms}
- in
- context |> Data.map (Library.insert order_data_eq (octxt', raw_proc))
- end)
+ lthy |> Local_Theory.declaration {syntax = false, pervasive = false, pos = \<^here>}
+ (fn phi => fn context =>
+ let
+ val ops = map (Morphism.term phi) (#ops octxt)
+ val thms = map (fn (s, thm) => (s, Morphism.thm phi thm)) (#thms octxt)
+ val conv_thms = map (fn (s, thm) => (s, Morphism.thm phi thm)) (#conv_thms octxt)
+ val octxt' = {kind = kind, ops = ops, thms = thms, conv_thms = conv_thms}
+ in
+ context |> Data.map (Library.insert order_data_eq (octxt', raw_proc))
+ end)
fun declare_order {
ops = {eq = eq, le = le, lt = lt},
thms = {
trans = trans, (* x \<le> y \<Longrightarrow> y \<le> z \<Longrightarrow> x \<le> z *)
refl = refl, (* x \<le> x *)
eqD1 = eqD1, (* x = y \<Longrightarrow> x \<le> y *)
eqD2 = eqD2, (* x = y \<Longrightarrow> y \<le> x *)
antisym = antisym, (* x \<le> y \<Longrightarrow> y \<le> x \<Longrightarrow> x = y *)
contr = contr (* \<not> P \<Longrightarrow> P \<Longrightarrow> R *)
},
conv_thms = {
less_le = less_le, (* x < y \<equiv> x \<le> y \<and> x \<noteq> y *)
nless_le = nless_le (* \<not> a < b \<equiv> \<not> a \<le> b \<or> a = b *)
}
} =
declare {
kind = Order,
ops = [eq, le, lt],
thms = [("trans", trans), ("refl", refl), ("eqD1", eqD1), ("eqD2", eqD2),
("antisym", antisym), ("contr", contr)],
conv_thms = [("less_le", less_le), ("nless_le", nless_le)],
raw_proc = Base_Tac.tac Order_Procedure.po_contr_prf
}
fun declare_linorder {
ops = {eq = eq, le = le, lt = lt},
thms = {
trans = trans, (* x \<le> y \<Longrightarrow> y \<le> z \<Longrightarrow> x \<le> z *)
refl = refl, (* x \<le> x *)
eqD1 = eqD1, (* x = y \<Longrightarrow> x \<le> y *)
eqD2 = eqD2, (* x = y \<Longrightarrow> y \<le> x *)
antisym = antisym, (* x \<le> y \<Longrightarrow> y \<le> x \<Longrightarrow> x = y *)
contr = contr (* \<not> P \<Longrightarrow> P \<Longrightarrow> R *)
},
conv_thms = {
less_le = less_le, (* x < y \<equiv> x \<le> y \<and> x \<noteq> y *)
nless_le = nless_le, (* \<not> x < y \<equiv> y \<le> x *)
nle_le = nle_le (* \<not> a \<le> b \<equiv> b \<le> a \<and> b \<noteq> a *)
}
} =
declare {
kind = Linorder,
ops = [eq, le, lt],
thms = [("trans", trans), ("refl", refl), ("eqD1", eqD1), ("eqD2", eqD2),
("antisym", antisym), ("contr", contr)],
conv_thms = [("less_le", less_le), ("nless_le", nless_le), ("nle_le", nle_le)],
raw_proc = Base_Tac.tac Order_Procedure.lo_contr_prf
}
(* Try to solve the goal by calling the order solver with each of the declared orders. *)
fun tac simp_prems ctxt =
let fun app_tac (octxt, tac0) = CHANGED o tac0 octxt simp_prems ctxt
in FIRST' (map app_tac (Data.get (Context.Proof ctxt))) end
end
diff --git a/src/Pure/Admin/build_release.scala b/src/Pure/Admin/build_release.scala
--- a/src/Pure/Admin/build_release.scala
+++ b/src/Pure/Admin/build_release.scala
@@ -1,951 +1,951 @@
/* Title: Pure/Admin/build_release.scala
Author: Makarius
Build full Isabelle distribution from repository.
*/
package isabelle
object Build_Release {
/** release context **/
private def execute(dir: Path, script: String): Unit =
Isabelle_System.bash(script, cwd = dir.file).check
private def execute_tar(dir: Path, args: String, strip: Boolean = false): Process_Result =
Isabelle_System.gnutar(args, dir = dir, strip = strip).check
private def bash_java_opens(args: String*): String =
Bash.strings(args.toList.flatMap(arg => List("--add-opens", arg + "=ALL-UNNAMED")))
object Release_Context {
def apply(
target_dir: Path,
release_name: String = "",
progress: Progress = new Progress
): Release_Context = {
val date = Date.now()
val dist_name = proper_string(release_name) getOrElse ("Isabelle_" + Date.Format.date(date))
val dist_dir = (target_dir + Path.explode("dist-" + dist_name)).absolute
new Release_Context(release_name, dist_name, dist_dir, progress)
}
}
class Release_Context private[Build_Release](
val release_name: String,
val dist_name: String,
val dist_dir: Path,
val progress: Progress
) {
override def toString: String = dist_name
val isabelle: Path = Path.explode(dist_name)
val isabelle_dir: Path = dist_dir + isabelle
val isabelle_archive: Path = dist_dir + isabelle.tar.gz
val isabelle_library_archive: Path = dist_dir + Path.explode(dist_name + "_library.tar.gz")
def other_isabelle(dir: Path, suffix: String = "-build"): Other_Isabelle =
Other_Isabelle(dir + isabelle,
isabelle_identifier = dist_name + suffix,
progress = progress)
def make_announce(id: String): Unit = {
if (release_name.isEmpty) {
File.write(isabelle_dir + Path.explode("ANNOUNCE"),
"""
IMPORTANT NOTE
==============
This is a snapshot of Isabelle/""" + id + """ from the repository.
""")
}
}
def make_contrib(): Unit = {
Isabelle_System.make_directory(Components.contrib(isabelle_dir))
File.write(Components.contrib(isabelle_dir, name = "README"),
"""This directory contains add-on components that contribute to the main
Isabelle distribution. Separate licensing conditions apply, see each
directory individually.
""")
}
def bundle_info(platform: Platform.Family.Value): Bundle_Info =
platform match {
case Platform.Family.linux_arm =>
Bundle_Info(platform, "Linux (ARM)", dist_name + "_linux_arm.tar.gz")
case Platform.Family.linux => Bundle_Info(platform, "Linux", dist_name + "_linux.tar.gz")
case Platform.Family.macos => Bundle_Info(platform, "macOS", dist_name + "_macos.tar.gz")
case Platform.Family.windows => Bundle_Info(platform, "Windows", dist_name + ".exe")
}
}
sealed case class Bundle_Info(
platform: Platform.Family.Value,
platform_description: String,
name: String
) {
def path: Path = Path.explode(name)
}
/** release archive **/
val ISABELLE: Path = Path.basic("Isabelle")
val ISABELLE_ID: Path = Path.explode("etc/ISABELLE_ID")
val ISABELLE_TAGS: Path = Path.explode("etc/ISABELLE_TAGS")
val ISABELLE_IDENTIFIER: Path = Path.explode("etc/ISABELLE_IDENTIFIER")
object Release_Archive {
def make(bytes: Bytes, rename: String = ""): Release_Archive = {
Isabelle_System.with_tmp_dir("build_release")(dir =>
Isabelle_System.with_tmp_file("archive", ext = "tar.gz") { archive_path =>
val isabelle_dir = Isabelle_System.make_directory(dir + ISABELLE)
Bytes.write(archive_path, bytes)
execute_tar(isabelle_dir, "-xzf " + File.bash_path(archive_path), strip = true)
val id = File.read(isabelle_dir + ISABELLE_ID)
val tags = File.read(isabelle_dir + ISABELLE_TAGS)
val identifier = File.read(isabelle_dir + ISABELLE_IDENTIFIER)
val (bytes1, identifier1) =
if (rename.isEmpty || rename == identifier) (bytes, identifier)
else {
File.write(isabelle_dir + ISABELLE_IDENTIFIER, rename)
Isabelle_System.move_file(isabelle_dir, dir + Path.basic(rename))
execute_tar(dir, "-czf " + File.bash_path(archive_path) + " " + Bash.string(rename))
(Bytes.read(archive_path), rename)
}
new Release_Archive(bytes1, id, tags, identifier1)
}
)
}
def read(path: Path, rename: String = ""): Release_Archive =
make(Bytes.read(path), rename = rename)
def get(
url: String,
rename: String = "",
progress: Progress = new Progress
): Release_Archive = {
val bytes =
if (Path.is_wellformed(url)) Bytes.read(Path.explode(url))
else Isabelle_System.download(url, progress = progress).bytes
make(bytes, rename = rename)
}
}
case class Release_Archive private[Build_Release](
bytes: Bytes, id: String, tags: String, identifier: String) {
override def toString: String = identifier
}
/** generated content **/
/* bundled components */
class Bundled(platform: Option[Platform.Family.Value] = None) {
def detect(s: String): Boolean =
s.startsWith("#bundled") && !s.startsWith("#bundled ")
def apply(name: String): String =
"#bundled" + (platform match { case None => "" case Some(plat) => "-" + plat }) + ":" + name
private val Pattern1 = ("""^#bundled:(.*)$""").r
private val Pattern2 = ("""^#bundled-(.*):(.*)$""").r
def unapply(s: String): Option[String] =
s match {
case Pattern1(name) => Some(name)
case Pattern2(Platform.Family(plat), name) if platform == Some(plat) => Some(name)
case _ => None
}
}
def record_bundled_components(dir: Path): Unit = {
val catalogs =
List("main", "bundled").map((_, new Bundled())) :::
Platform.Family.list.flatMap(platform =>
List(platform.toString, "bundled-" + platform.toString).
map((_, new Bundled(platform = Some(platform)))))
File.append(Components.Directory(dir).components,
terminate_lines("#bundled components" ::
(for {
(catalog, bundled) <- catalogs.iterator
path = Components.admin(dir) + Path.basic(catalog)
if path.is_file
line <- split_lines(File.read(path))
if line.nonEmpty && !line.startsWith("#")
} yield bundled(line)).toList))
}
def get_bundled_components(dir: Path, platform: Platform.Family.Value): (List[String], String) = {
val Bundled = new Bundled(platform = Some(platform))
val components =
for { Bundled(name) <- Components.Directory(dir).read_components() } yield name
val jdk_component =
components.find(_.startsWith("jdk")) getOrElse error("Missing jdk component")
(components, jdk_component)
}
def activate_components(
dir: Path, platform: Platform.Family.Value, more_names: List[String]): Unit = {
def contrib_name(name: String): String =
Components.contrib(name = name).implode
val Bundled = new Bundled(platform = Some(platform))
val component_dir = Components.Directory(dir)
component_dir.write_components(
component_dir.read_components().flatMap(line =>
line match {
case Bundled(name) =>
if (Components.Directory(Components.contrib(dir, name)).ok) Some(contrib_name(name))
else None
case _ => if (Bundled.detect(line)) None else Some(line)
}) ::: more_names.map(contrib_name))
}
/** build release **/
/* build heaps */
private def build_heaps(
options: Options,
platform: Platform.Family.Value,
build_sessions: List[String],
local_dir: Path,
progress: Progress = new Progress,
): Unit = {
val server_option = "build_host_" + platform.toString
val server = options.string(server_option)
progress.echo("Building heaps for " + commas_quote(build_sessions) +
" (" + server_option + " = " + quote(server) + ") ...")
val ssh =
if (server.nonEmpty) SSH.open_session(options, server)
else if (Platform.family == platform) SSH.Local
else error("Undefined option " + server_option + ": cannot build heaps")
try {
Isabelle_System.with_tmp_file("tmp", ext = "tar") { local_tmp_tar =>
execute_tar(local_dir, "-cf " + File.bash_path(local_tmp_tar) + " .")
ssh.with_tmp_dir { remote_dir =>
val remote_tmp_tar = remote_dir + Path.basic("tmp.tar")
ssh.write_file(remote_tmp_tar, local_tmp_tar)
val build_command =
- "bin/isabelle build -o system_heaps -b -- " + Bash.strings(build_sessions)
+ "bin/isabelle build -o parallel_proofs=0 -o system_heaps -b -- " + Bash.strings(build_sessions)
def system_apple(b: Boolean): String =
"""{ echo "ML_system_apple = """ + b + """" > "$(bin/isabelle getenv -b ISABELLE_HOME_USER)/etc/preferences"; }"""
val build_script =
List(
"cd " + File.bash_path(remote_dir),
"tar -xf tmp.tar",
"""mkdir -p "$(bin/isabelle getenv -b ISABELLE_HOME_USER)/etc" """,
system_apple(false),
build_command,
system_apple(true),
build_command,
"tar -cf tmp.tar heaps")
ssh.execute(build_script.mkString(" && "), settings = false).check
ssh.read_file(remote_tmp_tar, local_tmp_tar)
}
execute_tar(local_dir, "-xvf " + File.bash_path(local_tmp_tar))
.out_lines.sorted.foreach(progress.echo(_))
}
}
finally { ssh.close() }
}
/* Isabelle application */
def make_isabelle_options(path: Path, options: List[String], line_ending: String = "\n"): Unit = {
val title = "# Java runtime options"
File.write(path, (title :: options).map(_ + line_ending).mkString)
}
def make_isabelle_app(
platform: Platform.Family.Value,
isabelle_target: Path,
isabelle_name: String,
jdk_component: String,
classpath: List[Path],
dock_icon: Boolean = false): Unit = {
val script = """#!/usr/bin/env bash
#
# Author: Makarius
#
# Main Isabelle application script.
# minimal Isabelle environment
ISABELLE_HOME="$(cd "$(dirname "$0")"; cd "$(pwd -P)/../.."; pwd)"
source "$ISABELLE_HOME/lib/scripts/isabelle-platform"
#paranoia settings -- avoid intrusion of alien options
unset "_JAVA_OPTIONS"
unset "JAVA_TOOL_OPTIONS"
#paranoia settings -- avoid problems of Java/Swing versus XIM/IBus etc.
unset XMODIFIERS
COMPONENT="$ISABELLE_HOME/contrib/""" + jdk_component + """"
source "$COMPONENT/etc/settings"
# main
declare -a JAVA_OPTIONS=($(grep -v '^#' "$ISABELLE_HOME/Isabelle.options"))
"$ISABELLE_HOME/bin/isabelle" env "$ISABELLE_HOME/lib/scripts/java-gui-setup"
exec "$ISABELLE_JDK_HOME/bin/java" \
"-Disabelle.root=$ISABELLE_HOME" "${JAVA_OPTIONS[@]}" \
-classpath """" + classpath.map(p => "$ISABELLE_HOME/" + p.implode).mkString(":") + """" \
"-splash:$ISABELLE_HOME/lib/logo/isabelle.gif" \
""" + (if (dock_icon) """"-Xdock:icon=$ISABELLE_HOME/lib/logo/isabelle_transparent-128.png" \
""" else "") + """isabelle.jedit.JEdit_Main "$@"
"""
val script_path = isabelle_target + Path.explode("lib/scripts/Isabelle_app")
File.write(script_path, script)
File.set_executable(script_path, true)
val component_dir = isabelle_target + Path.explode("contrib/Isabelle_app")
Isabelle_System.move_file(
component_dir + Path.explode(Platform.Family.standard(platform)) + Path.explode("Isabelle"),
isabelle_target + Path.explode(isabelle_name))
Isabelle_System.rm_tree(component_dir)
}
def make_isabelle_plist(path: Path, isabelle_name: String, isabelle_rev: String): Unit = {
File.write(path, """<?xml version="1.0" ?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleIconFile</key>
<string>isabelle.icns</string>
<key>CFBundleIdentifier</key>
<string>de.tum.in.isabelle</string>
<key>CFBundleDisplayName</key>
<string>""" + isabelle_name + """</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>""" + isabelle_name + """</string>
<key>CFBundlePackageType</key>
<string>APPL</string>
<key>CFBundleShortVersionString</key>
<string>""" + isabelle_name + """</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>""" + isabelle_rev + """</string>
<key>NSHumanReadableCopyright</key>
<string></string>
<key>LSMinimumSystemVersion</key>
<string>10.11</string>
<key>LSApplicationCategoryType</key>
<string>public.app-category.developer-tools</string>
<key>NSHighResolutionCapable</key>
<string>true</string>
<key>NSSupportsAutomaticGraphicsSwitching</key>
<string>true</string>
<key>CFBundleDocumentTypes</key>
<array>
<dict>
<key>CFBundleTypeExtensions</key>
<array>
<string>thy</string>
</array>
<key>CFBundleTypeIconFile</key>
<string>theory.icns</string>
<key>CFBundleTypeName</key>
<string>Isabelle theory file</string>
<key>CFBundleTypeRole</key>
<string>Editor</string>
<key>LSTypeIsPackage</key>
<false/>
</dict>
</array>
</dict>
</plist>
""")
}
/* NEWS */
private def make_news(other_isabelle: Other_Isabelle): Unit = {
val news_file = other_isabelle.isabelle_home + Path.explode("NEWS")
val doc_dir = other_isabelle.isabelle_home + Path.explode("doc")
val fonts_dir = Isabelle_System.make_directory(doc_dir + Path.explode("fonts"))
Isabelle_Fonts.make_entries(getenv = other_isabelle.getenv, hidden = true).
foreach(entry => Isabelle_System.copy_file(entry.path, fonts_dir))
HTML.write_document(doc_dir, "NEWS.html",
List(HTML.title("NEWS")),
List(
HTML.chapter("NEWS"),
HTML.source(Symbol.decode(File.read(news_file)))))
}
/* main */
def use_release_archive(
context: Release_Context,
archive: Release_Archive,
id: String = ""
): Unit = {
if (id.nonEmpty && id != archive.id) {
error("Mismatch of release identification " + id + " vs. archive " + archive.id)
}
if (!context.isabelle_archive.is_file || Bytes.read(context.isabelle_archive) != archive.bytes) {
Bytes.write(context.isabelle_archive, archive.bytes)
}
}
def build_release_archive(
context: Release_Context,
version: String,
parallel_jobs: Int = 1
): Unit = {
val progress = context.progress
val hg = Mercurial.self_repository()
val id =
try { hg.id(version) }
catch { case ERROR(msg) => cat_error("Bad repository version: " + version, msg) }
if (context.isabelle_archive.is_file) {
progress.echo_warning("Found existing release archive: " + context.isabelle_archive)
use_release_archive(context, Release_Archive.read(context.isabelle_archive), id = id)
}
else {
progress.echo_warning("Preparing release " + context.dist_name + " ...")
Isabelle_System.new_directory(context.dist_dir)
hg.archive(File.standard_path(context.isabelle_dir), rev = id)
for (name <- List(".hg_archival.txt", ".hgtags", ".hgignore", "README_REPOSITORY")) {
(context.isabelle_dir + Path.explode(name)).file.delete
}
File.write(context.isabelle_dir + ISABELLE_ID, id)
File.write(context.isabelle_dir + ISABELLE_TAGS, hg.tags(rev = id))
File.write(context.isabelle_dir + ISABELLE_IDENTIFIER, context.dist_name)
context.make_announce(id)
context.make_contrib()
execute(context.isabelle_dir, """find . -print | xargs chmod -f u+rw""")
record_bundled_components(context.isabelle_dir)
/* build tools and documentation */
val other_isabelle = context.other_isabelle(context.dist_dir)
other_isabelle.init(echo = true)
try {
other_isabelle.bash(
"bin/isabelle build_doc -a -o system_heaps -j " + parallel_jobs, echo = true).check
}
catch { case ERROR(msg) => cat_error("Failed to build documentation:", msg) }
make_news(other_isabelle)
for (name <- List("Admin", "browser_info", "heaps")) {
Isabelle_System.rm_tree(other_isabelle.isabelle_home + Path.explode(name))
}
other_isabelle.cleanup()
progress.echo_warning("Creating release archive " + context.isabelle_archive + " ...")
execute(context.dist_dir, """chmod -R a+r . && chmod -R u+w . && chmod -R g=o .""")
execute(context.dist_dir,
"""find . -type f "(" -name "*.thy" -o -name "*.ML" -o -name "*.scala" ")" -print | xargs chmod -f u-w""")
execute_tar(context.dist_dir, "-czf " +
File.bash_path(context.isabelle_archive) + " " + Bash.string(context.dist_name))
}
}
def default_platform_families: List[Platform.Family.Value] = Platform.Family.list0
def build_release(
options: Options,
context: Release_Context,
afp_rev: String = "",
platform_families: List[Platform.Family.Value] = default_platform_families,
more_components: List[Path] = Nil,
website: Option[Path] = None,
build_sessions: List[String] = Nil,
build_library: Boolean = false,
parallel_jobs: Int = 1
): Unit = {
val progress = context.progress
/* release directory */
val archive = Release_Archive.read(context.isabelle_archive)
for (path <- List(context.isabelle, ISABELLE)) {
Isabelle_System.rm_tree(context.dist_dir + path)
}
Isabelle_System.with_tmp_file("archive", ext = "tar.gz") { archive_path =>
Bytes.write(archive_path, archive.bytes)
val extract =
List("README", "NEWS", "ANNOUNCE", "COPYRIGHT", "CONTRIBUTORS", "doc").
map(name => context.dist_name + "/" + name)
execute_tar(context.dist_dir,
"-xzf " + File.bash_path(archive_path) + " " + Bash.strings(extract))
}
Isabelle_System.symlink(Path.explode(context.dist_name), context.dist_dir + ISABELLE)
/* make application bundles */
val bundle_infos = platform_families.map(context.bundle_info)
for (bundle_info <- bundle_infos) {
val isabelle_name = context.dist_name
val platform = bundle_info.platform
progress.echo("\nApplication bundle for " + platform)
Isabelle_System.with_tmp_dir("build_release") { tmp_dir =>
// release archive
execute_tar(tmp_dir, "-xzf " + File.bash_path(context.isabelle_archive))
val other_isabelle = context.other_isabelle(tmp_dir)
val isabelle_target = other_isabelle.isabelle_home
// bundled components
progress.echo("Bundled components:")
val contrib_dir = Components.contrib(isabelle_target)
val (bundled_components, jdk_component) =
get_bundled_components(isabelle_target, platform)
for (name <- bundled_components) {
Components.resolve(Components.default_components_base, name,
target_dir = Some(contrib_dir),
copy_dir = Some(context.dist_dir + Path.explode("contrib")),
progress = progress)
}
val more_components_names =
more_components.map(Components.unpack(contrib_dir, _, progress = progress))
activate_components(isabelle_target, platform, more_components_names)
// Java parameters
val java_options: List[String] =
(for {
variable <-
List(
"ISABELLE_JAVA_SYSTEM_OPTIONS",
"JEDIT_JAVA_SYSTEM_OPTIONS",
"JEDIT_JAVA_OPTIONS")
opt <- Word.explode(other_isabelle.getenv(variable))
}
yield {
val s = "-Dapple.awt.application.name="
if (opt.startsWith(s)) s + isabelle_name else opt
}) ::: List("-Disabelle.jedit_server=" + isabelle_name)
val classpath: List[Path] = {
val base = isabelle_target.absolute
val classpath1 = Path.split(other_isabelle.getenv("ISABELLE_CLASSPATH"))
val classpath2 = Path.split(other_isabelle.getenv("ISABELLE_SETUP_CLASSPATH"))
(classpath1 ::: classpath2).map { path =>
val abs_path = path.absolute
File.relative_path(base, abs_path) match {
case Some(rel_path) => rel_path
case None => error("Bad classpath element: " + abs_path)
}
}
}
val jedit_options = Path.explode("src/Tools/jEdit/etc/options")
val jedit_props =
Path.explode(other_isabelle.getenv("JEDIT_HOME") + "/properties/jEdit.props")
// build heaps
if (build_sessions.nonEmpty) {
build_heaps(options, platform, build_sessions, isabelle_target, progress = progress)
}
// application bundling
Components.clean_base(contrib_dir, platforms = List(platform))
platform match {
case Platform.Family.linux_arm | Platform.Family.linux =>
File.change(isabelle_target + jedit_options) {
_.replaceAll("jedit_reset_font_size : int =.*", "jedit_reset_font_size : int = 24")
}
File.change(isabelle_target + jedit_props) {
_.replaceAll("console.fontsize=.*", "console.fontsize=18")
.replaceAll("helpviewer.fontsize=.*", "helpviewer.fontsize=18")
.replaceAll("metal.primary.fontsize=.*", "metal.primary.fontsize=18")
.replaceAll("metal.secondary.fontsize=.*", "metal.secondary.fontsize=18")
.replaceAll("view.fontsize=.*", "view.fontsize=24")
.replaceAll("view.gutter.fontsize=.*", "view.gutter.fontsize=16")
}
make_isabelle_options(
isabelle_target + Path.explode("Isabelle.options"), java_options)
make_isabelle_app(platform, isabelle_target, isabelle_name, jdk_component, classpath)
progress.echo("Packaging " + bundle_info.name + " ...")
execute_tar(tmp_dir,
"-czf " + File.bash_path(context.dist_dir + bundle_info.path) + " " +
Bash.string(isabelle_name))
case Platform.Family.macos =>
File.change(isabelle_target + jedit_props) {
_.replaceAll("delete-line.shortcut=.*", "delete-line.shortcut=C+d")
.replaceAll("delete.shortcut2=.*", "delete.shortcut2=A+d")
}
// macOS application bundle
val app_contents = isabelle_target + Path.explode("Contents")
for (icon <- List("lib/logo/isabelle.icns", "lib/logo/theory.icns")) {
Isabelle_System.copy_file(isabelle_target + Path.explode(icon),
Isabelle_System.make_directory(app_contents + Path.explode("Resources")))
}
make_isabelle_plist(
app_contents + Path.explode("Info.plist"), isabelle_name, archive.id)
make_isabelle_app(platform, isabelle_target, isabelle_name, jdk_component,
classpath, dock_icon = true)
val isabelle_options = Path.explode("Isabelle.options")
make_isabelle_options(
isabelle_target + isabelle_options,
java_options ::: List("-Disabelle.app=true"))
// application archive
progress.echo("Packaging " + bundle_info.name + " ...")
val isabelle_app = Path.explode(isabelle_name + ".app")
Isabelle_System.move_file(tmp_dir + Path.explode(isabelle_name),
tmp_dir + isabelle_app)
execute_tar(tmp_dir,
"-czf " + File.bash_path(context.dist_dir + bundle_info.path) + " " +
File.bash_path(isabelle_app))
case Platform.Family.windows =>
File.change(isabelle_target + jedit_props) {
_.replaceAll("foldPainter=.*", "foldPainter=Square")
}
// application launcher
Isabelle_System.move_file(isabelle_target + Path.explode("contrib/windows_app"), tmp_dir)
val app_template = Path.explode("~~/Admin/Windows/launch4j")
make_isabelle_options(
isabelle_target + Path.explode(isabelle_name + ".l4j.ini"),
java_options, line_ending = "\r\n")
val isabelle_xml = Path.explode("isabelle.xml")
val isabelle_exe = bundle_info.path
File.write(tmp_dir + isabelle_xml,
File.read(app_template + isabelle_xml)
.replace("{ISABELLE_NAME}", isabelle_name)
.replace("{OUTFILE}", File.platform_path(isabelle_target + isabelle_exe))
.replace("{ICON}",
File.platform_path(app_template + Path.explode("isabelle_transparent.ico")))
.replace("{SPLASH}",
File.platform_path(app_template + Path.explode("isabelle.bmp")))
.replace("{CLASSPATH}",
cat_lines(classpath.map(cp =>
" <cp>%EXEDIR%\\" + File.platform_path(cp).replace('/', '\\') + "</cp>")))
.replace("\\jdk\\", "\\" + jdk_component + "\\"))
val java_opts =
bash_java_opens(
"java.base/java.io",
"java.base/java.lang",
"java.base/java.lang.reflect",
"java.base/java.text",
"java.base/java.util",
"java.desktop/java.awt.font")
val launch4j_jar =
Path.explode("windows_app/launch4j-" + Platform.family + "/launch4j.jar")
execute(tmp_dir,
cat_lines(List(
"export LAUNCH4J=" + File.bash_platform_path(launch4j_jar),
"isabelle java " + java_opts + " -jar \"$LAUNCH4J\" isabelle.xml")))
Isabelle_System.copy_file(app_template + Path.explode("manifest.xml"),
isabelle_target + isabelle_exe.ext("manifest"))
// Cygwin setup
val cygwin_template = Path.explode("~~/Admin/Windows/Cygwin")
Isabelle_System.copy_file(cygwin_template + Path.explode("Cygwin-Terminal.bat"),
isabelle_target)
val cygwin_mirror =
File.read(isabelle_target + Path.explode("contrib/cygwin/isabelle/cygwin_mirror"))
val cygwin_bat = Path.explode("Cygwin-Setup.bat")
File.write(isabelle_target + cygwin_bat,
File.read(cygwin_template + cygwin_bat).replace("{MIRROR}", cygwin_mirror))
File.set_executable(isabelle_target + cygwin_bat, true)
for (name <- List("isabelle/postinstall", "isabelle/rebaseall")) {
val path = Path.explode(name)
Isabelle_System.copy_file(cygwin_template + path,
isabelle_target + Path.explode("contrib/cygwin") + path)
}
execute(isabelle_target,
"""find . -type f -not -name "*.exe" -not -name "*.dll" """ +
(if (Platform.is_macos) "-perm +100" else "-executable") +
" -print0 > contrib/cygwin/isabelle/executables")
execute(isabelle_target,
"""find . -type l -exec echo "{}" ";" -exec readlink "{}" ";" """ +
"""> contrib/cygwin/isabelle/symlinks""")
execute(isabelle_target, """find . -type l -exec rm "{}" ";" """)
File.write(isabelle_target + Path.explode("contrib/cygwin/isabelle/uninitialized"), "")
// executable archive (self-extracting 7z)
val archive_name = isabelle_name + ".7z"
val exe_archive = tmp_dir + Path.explode(archive_name)
exe_archive.file.delete
progress.echo("Packaging " + archive_name + " ...")
execute(tmp_dir,
"7z -y -bd a " + File.bash_path(exe_archive) + " " + Bash.string(isabelle_name))
if (!exe_archive.is_file) error("Failed to create archive: " + exe_archive)
val sfx_exe = tmp_dir + Path.explode("windows_app/7zsd_All_x64.sfx")
val sfx_txt =
File.read(Path.explode("~~/Admin/Windows/Installer/sfx.txt"))
.replace("{ISABELLE_NAME}", isabelle_name)
Bytes.write(context.dist_dir + isabelle_exe,
Bytes.read(sfx_exe) + Bytes(sfx_txt) + Bytes.read(exe_archive))
File.set_executable(context.dist_dir + isabelle_exe, true)
}
other_isabelle.cleanup()
}
progress.echo("DONE")
}
/* minimal website */
for (dir <- website) {
val website_platform_bundles =
for {
bundle_info <- bundle_infos
if (context.dist_dir + bundle_info.path).is_file
} yield (bundle_info.name, bundle_info)
val isabelle_link =
HTML.link(Isabelle_System.isabelle_repository.changeset(archive.id),
HTML.text("Isabelle/" + archive.id))
val afp_link =
HTML.link(Isabelle_System.afp_repository.changeset(afp_rev),
HTML.text("AFP/" + afp_rev))
HTML.write_document(dir, "index.html",
List(HTML.title(context.dist_name)),
List(
HTML.section(context.dist_name),
HTML.subsection("Downloads"),
HTML.itemize(
List(HTML.link(context.dist_name + ".tar.gz", HTML.text("Source archive"))) ::
website_platform_bundles.map({ case (bundle, bundle_info) =>
List(HTML.link(bundle, HTML.text(bundle_info.platform_description + " bundle"))) })),
HTML.subsection("Repositories"),
HTML.itemize(
List(List(isabelle_link)) ::: (if (afp_rev == "") Nil else List(List(afp_link))))))
Isabelle_System.copy_file(context.isabelle_archive, dir)
for ((bundle, _) <- website_platform_bundles) {
Isabelle_System.copy_file(context.dist_dir + Path.explode(bundle), dir)
}
}
/* HTML library */
if (build_library) {
if (context.isabelle_library_archive.is_file) {
progress.echo_warning("Library archive already exists: " + context.isabelle_library_archive)
}
else {
Isabelle_System.with_tmp_dir("build_release") { tmp_dir =>
val bundle =
context.dist_dir + Path.explode(context.dist_name + "_" + Platform.family + ".tar.gz")
execute_tar(tmp_dir, "-xzf " + File.bash_path(bundle))
val other_isabelle = context.other_isabelle(tmp_dir, suffix = "")
Isabelle_System.make_directory(other_isabelle.etc)
File.write(other_isabelle.etc_settings, "ML_OPTIONS=\"--minheap 1000 --maxheap 4000\"\n")
other_isabelle.bash("bin/isabelle build -f -j " + parallel_jobs +
" -o browser_info -o document=pdf -o document_variants=document:outline=/proof,/ML" +
" -o system_heaps -c -a -d '~~/src/Benchmarks'", echo = true).check
other_isabelle.isabelle_home_user.file.delete
execute(tmp_dir, "chmod -R a+r " + Bash.string(context.dist_name))
execute(tmp_dir, "chmod -R g=o " + Bash.string(context.dist_name))
execute_tar(tmp_dir, "-czf " + File.bash_path(context.isabelle_library_archive) +
" " + Bash.string(context.dist_name + "/browser_info"))
}
}
}
}
/** command line entry point **/
def main(args: Array[String]): Unit = {
Command_Line.tool {
var afp_rev = ""
var target_dir = Path.current
var release_name = ""
var source_archive = ""
var website: Option[Path] = None
var build_sessions: List[String] = Nil
var more_components: List[Path] = Nil
var parallel_jobs = 1
var build_library = false
var options = Options.init()
var platform_families = default_platform_families
var rev = ""
val getopts = Getopts("""
Usage: Admin/build_release [OPTIONS]
Options are:
-A REV corresponding AFP changeset id
-D DIR target directory (default ".")
-R RELEASE explicit release name
-S ARCHIVE use existing source archive (file or URL)
-W WEBSITE produce minimal website in given directory
-b SESSIONS build platform-specific session images (separated by commas)
-c ARCHIVE clean bundling with additional component .tar.gz archive
-j INT maximum number of parallel jobs (default 1)
-l build library
-o OPTION override Isabelle system OPTION (via NAME=VAL or NAME)
-p NAMES platform families (default: """ + quote(default_platform_families.mkString(",")) + """)
-r REV Mercurial changeset id (default: ARCHIVE or RELEASE or tip)
Build Isabelle release in base directory, using the local repository clone.
""",
"A:" -> (arg => afp_rev = arg),
"D:" -> (arg => target_dir = Path.explode(arg)),
"R:" -> (arg => release_name = arg),
"S:" -> (arg => source_archive = arg),
"W:" -> (arg => website = Some(Path.explode(arg))),
"b:" -> (arg => build_sessions = space_explode(',', arg)),
"c:" -> (arg =>
{
val path = Path.explode(arg)
Components.Archive.get_name(path.file_name)
more_components = more_components ::: List(path)
}),
"j:" -> (arg => parallel_jobs = Value.Int.parse(arg)),
"l" -> (_ => build_library = true),
"o:" -> (arg => options = options + arg),
"p:" -> (arg => platform_families = space_explode(',', arg).map(Platform.Family.parse)),
"r:" -> (arg => rev = arg))
val more_args = getopts(args)
if (more_args.nonEmpty) getopts.usage()
if (platform_families.contains(Platform.Family.windows) && !Isabelle_System.bash("7z i").ok)
error("Building for windows requires 7z")
val progress = new Console_Progress()
def make_context(name: String): Release_Context =
Release_Context(target_dir, release_name = name, progress = progress)
val context =
if (source_archive.isEmpty) {
val context = make_context(release_name)
val version = proper_string(rev) orElse proper_string(release_name) getOrElse "tip"
build_release_archive(context, version, parallel_jobs = parallel_jobs)
context
}
else {
val archive =
Release_Archive.get(source_archive, rename = release_name, progress = progress)
val context = make_context(archive.identifier)
Isabelle_System.make_directory(context.dist_dir)
use_release_archive(context, archive, id = rev)
context
}
build_release(options, context, afp_rev = afp_rev, platform_families = platform_families,
more_components = more_components, build_sessions = build_sessions,
build_library = build_library, parallel_jobs = parallel_jobs, website = website)
}
}
}
diff --git a/src/Pure/Admin/isabelle_cronjob.scala b/src/Pure/Admin/isabelle_cronjob.scala
--- a/src/Pure/Admin/isabelle_cronjob.scala
+++ b/src/Pure/Admin/isabelle_cronjob.scala
@@ -1,645 +1,646 @@
/* Title: Pure/Admin/isabelle_cronjob.scala
Author: Makarius
Main entry point for administrative cronjob at TUM.
*/
package isabelle
import java.nio.file.Files
import scala.annotation.tailrec
object Isabelle_Cronjob {
/* global resources: owned by main cronjob */
val backup = "lxbroy10:cronjob"
val main_dir: Path = Path.explode("~/cronjob")
val main_state_file: Path = main_dir + Path.explode("run/main.state")
val build_release_log: Path = main_dir + Path.explode("run/build_release.log")
val current_log: Path = main_dir + Path.explode("run/main.log") // owned by log service
val cumulative_log: Path = main_dir + Path.explode("log/main.log") // owned by log service
val isabelle_repos: Path = main_dir + Path.explode("isabelle")
val afp_repos: Path = main_dir + Path.explode("AFP")
val mailman_archives_dir = Path.explode("~/cronjob/Mailman")
val build_log_dirs =
List(Path.explode("~/log"), Path.explode("~/afp/log"), Path.explode("~/cronjob/log"))
/** logger tasks **/
sealed case class Logger_Task(name: String = "", body: Logger => Unit)
/* init and exit */
def get_rev(): String = Mercurial.repository(isabelle_repos).id()
def get_afp_rev(): String = Mercurial.repository(afp_repos).id()
val init: Logger_Task =
Logger_Task("init",
{ logger =>
Isabelle_Devel.make_index()
Mercurial.setup_repository(Isabelle_System.afp_repository.root, afp_repos)
File.write(logger.log_dir + Build_Log.log_filename("isabelle_identify", logger.start_date),
Build_Log.Identify.content(logger.start_date, Some(get_rev()), Some(get_afp_rev())))
Isabelle_System.bash(
File.bash_path(Component_Rsync.local_program) +
""" -a --include="*/" --include="plain_identify*" --exclude="*" """ +
Bash.string(backup + "/log/.") + " " + File.bash_path(main_dir) + "/log/.").check
- if (!Isabelle_Devel.cronjob_log.is_file)
+ if (!Isabelle_Devel.cronjob_log.is_file) {
Files.createSymbolicLink(Isabelle_Devel.cronjob_log.java_path, current_log.java_path)
+ }
})
val exit: Logger_Task =
Logger_Task("exit",
{ logger =>
Isabelle_System.bash(
File.bash_path(Component_Rsync.local_program) +
" -a " + File.bash_path(main_dir) + "/log/." + " " + Bash.string(backup) + "/log/.")
.check
})
/* Mailman archives */
val mailman_archives: Logger_Task =
Logger_Task("mailman_archives",
{ logger =>
Mailman.Isabelle_Users.download(mailman_archives_dir)
Mailman.Isabelle_Dev.download(mailman_archives_dir)
})
/* build release */
val build_release: Logger_Task =
Logger_Task("build_release", { logger =>
build_release_log.file.delete
Isabelle_Devel.release_snapshot(logger.options, get_rev(), get_afp_rev(),
progress = new File_Progress(build_release_log))
})
/* remote build_history */
sealed case class Item(
known: Boolean,
isabelle_version: String,
afp_version: Option[String],
pull_date: Date
) {
def unknown: Boolean = !known
def versions: (String, Option[String]) = (isabelle_version, afp_version)
def known_versions(rev: String, afp_rev: Option[String]): Boolean =
known && rev != "" && isabelle_version == rev &&
(afp_rev.isEmpty || afp_rev.get != "" && afp_version == afp_rev)
}
def recent_items(
db: SQL.Database,
days: Int,
rev: String,
afp_rev: Option[String],
sql: PostgreSQL.Source
): List[Item] = {
val afp = afp_rev.isDefined
db.execute_query_statement(
Build_Log.Data.select_recent_versions(
days = days, rev = rev, afp_rev = afp_rev, sql = SQL.where(sql)),
List.from[Item],
{ res =>
val known = res.bool(Build_Log.Data.known)
val isabelle_version = res.string(Build_Log.Prop.isabelle_version)
val afp_version = if (afp) proper_string(res.string(Build_Log.Prop.afp_version)) else None
val pull_date = res.date(Build_Log.Data.pull_date(afp))
Item(known, isabelle_version, afp_version, pull_date)
})
}
def unknown_runs(items: List[Item]): List[List[Item]] = {
val (run, rest) = Library.take_prefix[Item](_.unknown, items.dropWhile(_.known))
if (run.nonEmpty) run :: unknown_runs(rest) else Nil
}
sealed case class Remote_Build(
description: String,
host: String,
user: String = "",
port: Int = 0,
historic: Boolean = false,
history: Int = 0,
history_base: String = "build_history_base",
components_base: String = Components.dynamic_components_base,
clean_components: Boolean = true,
java_heap: String = "",
options: String = "",
args: String = "",
afp: Boolean = false,
bulky: Boolean = false,
more_hosts: List[String] = Nil,
detect: PostgreSQL.Source = "",
active: () => Boolean = () => true
) {
def open_session(options: Options): SSH.Session =
SSH.open_session(options, host = host, user = user, port = port)
def sql: PostgreSQL.Source =
SQL.and(
Build_Log.Prop.build_engine.equal(Build_History.engine),
Build_Log.Prop.build_host.member(host :: more_hosts),
if_proper(detect, SQL.enclose(detect)))
def profile: Build_Status.Profile =
Build_Status.Profile(description, history = history, afp = afp, bulky = bulky, sql = sql)
def pick(
options: Options,
rev: String = "",
filter: Item => Boolean = _ => true
): Option[(String, Option[String])] = {
val afp_rev = if (afp) Some(get_afp_rev()) else None
val store = Build_Log.store(options)
using(store.open_database()) { db =>
def pick_days(days: Int, gap: Int): Option[(String, Option[String])] = {
val items = recent_items(db, days, rev, afp_rev, sql).filter(filter)
def runs = unknown_runs(items).filter(run => run.length >= gap)
if (historic || items.exists(_.known_versions(rev, afp_rev))) {
val longest_run =
runs.foldLeft(List.empty[Item]) {
case (item1, item2) => if (item1.length >= item2.length) item1 else item2
}
if (longest_run.isEmpty) None
else Some(longest_run(longest_run.length / 2).versions)
}
else if (rev != "") Some((rev, afp_rev))
else runs.flatten.headOption.map(_.versions)
}
pick_days(options.int("build_log_history") max history, 2) orElse
pick_days(200, 5) orElse
pick_days(2000, 1)
}
}
def build_history_options: String =
" -h " + Bash.string(host) + " " +
if_proper(java_heap,
"-e 'ISABELLE_TOOL_JAVA_OPTIONS=\"$ISABELLE_TOOL_JAVA_OPTIONS -Xmx" + java_heap + "\"' ") +
options
}
val remote_builds_old: List[Remote_Build] =
List(
Remote_Build("macOS 10.15 Catalina", "laramac01", user = "makarius",
options = "-m32 -M4 -e ISABELLE_GHC_SETUP=true -p pide_session=false",
args = "-a -d '~~/src/Benchmarks'"),
Remote_Build("Linux A", "i21of4", user = "i21isatest",
options = "-m32 -M1x4,2,4" +
" -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" +
" -e ISABELLE_GHC_SETUP=true" +
" -e ISABELLE_MLTON=mlton" +
" -e ISABELLE_SMLNJ=sml" +
" -e ISABELLE_SWIPL=swipl",
args = "-a -d '~~/src/Benchmarks'"),
Remote_Build("Linux A", "lxbroy9",
java_heap = "2g", options = "-m32 -B -M1x2,2", args = "-N -g timing"),
Remote_Build("Linux Benchmarks", "lxbroy5", historic = true, history = 90,
java_heap = "2g",
options = "-m32 -B -M1x2,2 -t Benchmarks" +
" -e ISABELLE_GHC=ghc -e ISABELLE_MLTON=mlton -e ISABELLE_OCAML=ocaml" +
" -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind -e ISABELLE_SMLNJ=sml" +
" -e ISABELLE_SWIPL=swipl",
args = "-N -a -d '~~/src/Benchmarks'",
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("Benchmarks")),
Remote_Build("macOS 10.14 Mojave (Old)", "lapnipkow3",
options = "-m32 -M1,2 -e ISABELLE_GHC_SETUP=true -p pide_session=false",
args = "-a -d '~~/src/Benchmarks'"),
Remote_Build("AFP old bulky", "lrzcloud1",
options = "-m64 -M6 -U30000 -s10 -t AFP",
args = "-g large -g slow",
afp = true,
bulky = true,
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")),
Remote_Build("AFP old", "lxbroy7",
args = "-N -X large -X slow",
afp = true,
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP")),
Remote_Build("Poly/ML 5.7 Linux", "lxbroy8",
history_base = "37074e22e8be",
options = "-m32 -B -M1x2,2 -t polyml-5.7 -i 'init_component /home/isabelle/contrib/polyml-5.7'",
args = "-N -g timing",
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7") + " AND " +
Build_Log.Settings.ML_OPTIONS + " <> " + SQL.string("-H 500")),
Remote_Build("Poly/ML 5.7.1 Linux", "lxbroy8",
history_base = "a9d5b59c3e12",
options = "-m32 -B -M1x2,2 -t polyml-5.7.1-pre2 -i 'init_component /home/isabelle/contrib/polyml-test-905dae2ebfda'",
args = "-N -g timing",
detect =
Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7.1-pre1") + " OR " +
Build_Log.Prop.build_tags + " = " + SQL.string("polyml-5.7.1-pre2")),
Remote_Build("Poly/ML 5.7 macOS", "macbroy2",
history_base = "37074e22e8be",
options = "-m32 -B -M1x4,4 -t polyml-5.7 -i 'init_component /home/isabelle/contrib/polyml-5.7'",
args = "-a",
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7")),
Remote_Build("Poly/ML 5.7.1 macOS", "macbroy2",
history_base = "a9d5b59c3e12",
options = "-m32 -B -M1x4,4 -t polyml-5.7.1-pre2 -i 'init_component /home/isabelle/contrib/polyml-test-905dae2ebfda'",
args = "-a",
detect =
Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-5.7.1-pre1") + " OR " +
Build_Log.Prop.build_tags + " = " + SQL.string("polyml-5.7.1-pre2")),
Remote_Build("macOS", "macbroy2",
options = "-m32 -M8" +
" -e ISABELLE_GHC=ghc -e ISABELLE_MLTON=mlton -e ISABELLE_OCAML=ocaml" +
" -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" +
" -e ISABELLE_OPAM_ROOT=\"$ISABELLE_HOME/opam\"" +
" -e ISABELLE_SMLNJ=/home/isabelle/smlnj/110.85/bin/sml" +
" -p pide_session=false",
args = "-a",
detect = Build_Log.Prop.build_tags.undefined,
history_base = "2c0f24e927dd"),
Remote_Build("macOS, quick_and_dirty", "macbroy2",
options = "-m32 -M8 -t quick_and_dirty -p pide_session=false", args = "-a -o quick_and_dirty",
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("quick_and_dirty"),
history_base = "2c0f24e927dd"),
Remote_Build("macOS, skip_proofs", "macbroy2",
options = "-m32 -M8 -t skip_proofs -p pide_session=false", args = "-a -o skip_proofs",
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("skip_proofs"),
history_base = "2c0f24e927dd"),
Remote_Build("Poly/ML test", "lxbroy8",
options = "-m32 -B -M1x2,2 -t polyml-test -i 'init_component /home/isabelle/contrib/polyml-5.7-20170217'",
args = "-N -g timing",
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("polyml-test")),
Remote_Build("macOS 10.12 Sierra", "macbroy30", options = "-m32 -M2 -p pide_session=false", args = "-a",
detect = Build_Log.Prop.build_start.toString + " > date '2017-03-03'"),
Remote_Build("macOS 10.10 Yosemite", "macbroy31", options = "-m32 -M2 -p pide_session=false", args = "-a"),
Remote_Build("macOS 10.8 Mountain Lion", "macbroy30", options = "-m32 -M2", args = "-a",
detect = Build_Log.Prop.build_start.toString + " < date '2017-03-03'")) :::
{
- for { (n, hosts) <- List(1 -> List("lxbroy6"), 2 -> List("lxbroy8", "lxbroy5")) }
+ for ((n, hosts) <- List(1 -> List("lxbroy6"), 2 -> List("lxbroy8", "lxbroy5")))
yield {
Remote_Build("AFP old", host = hosts.head, more_hosts = hosts.tail,
options = "-m32 -M1x2 -t AFP -P" + n +
" -e ISABELLE_GHC=ghc" +
" -e ISABELLE_MLTON=mlton" +
" -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" +
" -e ISABELLE_SMLNJ=sml",
args = "-N -X large -X slow",
afp = true,
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP"))
}
}
val remote_builds1: List[List[Remote_Build]] = {
List(
List(Remote_Build("Linux A", "augsburg1",
options = "-m32 -B -M4" +
" -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" +
" -e ISABELLE_GHC_SETUP=true" +
" -e ISABELLE_MLTON=mlton -e ISABELLE_MLTON_OPTIONS=" +
" -e ISABELLE_SMLNJ=sml" +
" -e ISABELLE_SWIPL=swipl",
args = "-a -d '~~/src/Benchmarks'")),
List(Remote_Build("Linux B", "lxbroy10", historic = true, history = 90,
options = "-m32 -B -M1x4,2,4,6", args = "-N -g timing")),
List(Remote_Build("macOS 10.13 High Sierra", "lapbroy68",
options = "-m32 -B -M1,2,4 -e ISABELLE_GHC_SETUP=true -p pide_session=false",
args = "-a -d '~~/src/Benchmarks'")),
List(
Remote_Build("macOS 11 Big Sur", "mini1",
options = "-m32 -B -M1x2,2,4 -p pide_session=false" +
" -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" +
" -e ISABELLE_GHC_SETUP=true" +
" -e ISABELLE_MLTON=/usr/local/bin/mlton -e ISABELLE_MLTON_OPTIONS=" +
" -e ISABELLE_SMLNJ=/usr/local/smlnj/bin/sml" +
" -e ISABELLE_SWIPL=/usr/local/bin/swipl",
args = "-a -d '~~/src/Benchmarks'")),
List(
Remote_Build("macOS 10.14 Mojave", "mini2",
options = "-m32 -B -M1x2,2,4 -p pide_session=false" +
" -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" +
" -e ISABELLE_GHC_SETUP=true" +
" -e ISABELLE_MLTON=/usr/local/bin/mlton -e ISABELLE_MLTON_OPTIONS=" +
" -e ISABELLE_SMLNJ=/usr/local/smlnj/bin/sml" +
" -e ISABELLE_SWIPL=/usr/local/bin/swipl",
args = "-a -d '~~/src/Benchmarks'"),
Remote_Build("macOS, quick_and_dirty", "mini2",
options = "-m32 -M4 -t quick_and_dirty -p pide_session=false",
args = "-a -o quick_and_dirty",
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("quick_and_dirty")),
Remote_Build("macOS, skip_proofs", "mini2",
options = "-m32 -M4 -t skip_proofs -p pide_session=false", args = "-a -o skip_proofs",
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("skip_proofs"))),
List(
Remote_Build("macOS 12 Monterey", "monterey", user = "makarius",
options = "-m32 -M4 -e ISABELLE_GHC_SETUP=true -p pide_session=false",
args = "-a -d '~~/src/Benchmarks'")),
List(
Remote_Build("Windows", "vmnipkow9", historic = true, history = 90,
components_base = "/cygdrive/d/isatest/contrib",
options = "-m32 -M4" +
" -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" +
" -e ISABELLE_GHC_SETUP=true" +
" -e ISABELLE_SMLNJ=/usr/local/smlnj-110.81/bin/sml",
args = "-a",
detect =
Build_Log.Settings.ML_PLATFORM.toString + " = " + SQL.string("x86-windows") + " OR " +
Build_Log.Settings.ML_PLATFORM + " = " + SQL.string("x86_64_32-windows")),
Remote_Build("Windows", "vmnipkow9", historic = true, history = 90,
components_base = "/cygdrive/d/isatest/contrib",
options = "-m64 -M4" +
" -C /cygdrive/d/isatest/contrib" +
" -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAML_SETUP=true" +
" -e ISABELLE_GHC_SETUP=true" +
" -e ISABELLE_SMLNJ=/usr/local/smlnj-110.81/bin/sml",
args = "-a",
detect = Build_Log.Settings.ML_PLATFORM.toString + " = " + SQL.string("x86_64-windows"))))
}
val remote_builds2: List[List[Remote_Build]] =
List(
List(
Remote_Build("AFP", "lrzcloud2",
java_heap = "8g",
options = "-m32 -M1x5 -t AFP" +
" -e ISABELLE_GHC=ghc" +
" -e ISABELLE_MLTON=mlton -e ISABELLE_MLTON_OPTIONS=" +
" -e ISABELLE_OCAML=ocaml -e ISABELLE_OCAMLC=ocamlc -e ISABELLE_OCAMLFIND=ocamlfind" +
" -e ISABELLE_SMLNJ=sml",
args = "-a -X large -X slow",
afp = true,
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP"),
active = () => Date.now().unix_epoch_day % 2 == 0),
Remote_Build("AFP", "lrzcloud2",
java_heap = "8g",
options = "-m64 -M8 -U30000 -s10 -t AFP",
args = "-g large -g slow",
afp = true,
bulky = true,
detect = Build_Log.Prop.build_tags.toString + " = " + SQL.string("AFP"),
active = () => Date.now().unix_epoch_day % 2 == 1)))
def remote_build_history(
rev: String,
afp_rev: Option[String],
i: Int,
r: Remote_Build
) : Logger_Task = {
val task_name = "build_history-" + r.host
Logger_Task(task_name,
{ logger =>
using(r.open_session(logger.options)) { ssh =>
val results =
Build_History.remote_build(ssh,
isabelle_repos,
isabelle_repos.ext(r.host),
isabelle_identifier = "cronjob_build_history",
components_base = r.components_base,
clean_platform = r.clean_components,
clean_archives = r.clean_components,
rev = rev,
afp_repos = if (afp_rev.isDefined) Some(afp_repos) else None,
afp_rev = afp_rev.getOrElse(""),
options =
" -N " + Bash.string(task_name) + (if (i < 0) "" else "_" + (i + 1).toString) +
" -f " + r.build_history_options,
args = "-o timeout=10800 " + r.args)
for ((log_name, bytes) <- results) {
logger.log(Date.now(), log_name)
Bytes.write(logger.log_dir + Path.explode(log_name), bytes)
}
}
})
}
val build_status_profiles: List[Build_Status.Profile] =
(remote_builds_old :: remote_builds1 ::: remote_builds2).flatten.map(_.profile)
/** task logging **/
object Log_Service {
def apply(options: Options, progress: Progress = new Progress): Log_Service =
new Log_Service(options, progress)
}
class Log_Service private(val options: Options, progress: Progress) {
current_log.file.delete
private val thread: Consumer_Thread[String] =
Consumer_Thread.fork("cronjob: logger", daemon = true)(
consume =
{ (text: String) =>
// critical
File.append(current_log, text + "\n")
File.append(cumulative_log, text + "\n")
progress.echo(text)
true
})
def shutdown(): Unit = { thread.shutdown() }
val hostname: String = Isabelle_System.hostname()
def log(date: Date, task_name: String, msg: String): Unit =
if (task_name != "") {
thread.send(
"[" + Build_Log.print_date(date) + ", " + hostname + ", " + task_name + "]: " + msg)
}
def start_logger(start_date: Date, task_name: String): Logger =
new Logger(this, start_date, task_name)
def run_task(start_date: Date, task: Logger_Task): Unit = {
val logger = start_logger(start_date, task.name)
val res = Exn.capture { task.body(logger) }
val end_date = Date.now()
val err =
res match {
case Exn.Res(_) => None
case Exn.Exn(exn) =>
Output.writeln("Exception trace for " + quote(task.name) + ":")
exn.printStackTrace()
val first_line = split_lines(Exn.message(exn)).headOption getOrElse "exception"
Some(first_line)
}
logger.log_end(end_date, err)
}
def fork_task(start_date: Date, task: Logger_Task): Task =
new Task(task.name, run_task(start_date, task))
}
class Logger private[Isabelle_Cronjob](
log_service: Log_Service,
val start_date: Date,
val task_name: String
) {
def options: Options = log_service.options
def log(date: Date, msg: String): Unit = log_service.log(date, task_name, msg)
def log_end(end_date: Date, err: Option[String]): Unit = {
val elapsed_time = end_date.time - start_date.time
val msg =
(if (err.isEmpty) "finished" else "ERROR " + err.get) +
(if (elapsed_time.seconds < 3.0) "" else " (" + elapsed_time.message_hms + " elapsed time)")
log(end_date, msg)
}
val log_dir = Isabelle_System.make_directory(main_dir + Build_Log.log_subdir(start_date))
log(start_date, "started")
}
class Task private[Isabelle_Cronjob](name: String, body: => Unit) {
private val future: Future[Unit] = Future.thread("cronjob: " + name) { body }
def is_finished: Boolean = future.is_finished
}
/** cronjob **/
def cronjob(progress: Progress, exclude_task: Set[String]): Unit = {
/* soft lock */
val still_running =
try { Some(File.read(main_state_file)) }
catch { case ERROR(_) => None }
still_running match {
case None | Some("") =>
case Some(running) =>
error("Isabelle cronjob appears to be still running: " + running)
}
/* log service */
val log_service = Log_Service(Options.init(), progress = progress)
def run(start_date: Date, task: Logger_Task): Unit = log_service.run_task(start_date, task)
def run_now(task: Logger_Task): Unit = run(Date.now(), task)
/* structured tasks */
def SEQ(tasks: List[Logger_Task]): Logger_Task = Logger_Task(body = _ =>
for (task <- tasks.iterator if !exclude_task(task.name) || task.name == "") run_now(task))
def PAR(tasks: List[Logger_Task]): Logger_Task =
Logger_Task(body =
{ _ =>
@tailrec def join(running: List[Task]): Unit = {
running.partition(_.is_finished) match {
case (Nil, Nil) =>
case (Nil, _ :: _) => Time.seconds(0.5).sleep(); join(running)
case (_ :: _, remaining) => join(remaining)
}
}
val start_date = Date.now()
val running =
for (task <- tasks if !exclude_task(task.name))
yield log_service.fork_task(start_date, task)
join(running)
})
/* repository structure */
val hg = Mercurial.repository(isabelle_repos)
val hg_graph = hg.graph()
def history_base_filter(r: Remote_Build): Item => Boolean = {
val base_rev = hg.id(r.history_base)
val nodes = hg_graph.all_succs(List(base_rev)).toSet
(item: Item) => nodes(item.isabelle_version)
}
/* main */
val main_start_date = Date.now()
File.write(main_state_file, main_start_date.toString + " " + log_service.hostname)
run(main_start_date,
Logger_Task("isabelle_cronjob", logger =>
run_now(
SEQ(List(
init,
PAR(List(mailman_archives, build_release)),
PAR(
List(remote_builds1, remote_builds2).map(remote_builds =>
SEQ(List(
PAR(remote_builds.map(_.filter(_.active())).map(seq =>
SEQ(
for {
(r, i) <- (if (seq.length <= 1) seq.map((_, -1)) else seq.zipWithIndex)
(rev, afp_rev) <- r.pick(logger.options, hg.id(), history_base_filter(r))
} yield remote_build_history(rev, afp_rev, i, r)))),
Logger_Task("build_log_database",
logger =>
Build_Log.build_log_database(logger.options, build_log_dirs,
vacuum = true, ml_statistics = true,
snapshot = Some(Isabelle_Devel.build_log_snapshot))),
Logger_Task("build_status",
logger => Isabelle_Devel.build_status(logger.options)))))),
exit)))))
log_service.shutdown()
main_state_file.file.delete
}
/** command line entry point **/
def main(args: Array[String]): Unit = {
Command_Line.tool {
var force = false
var verbose = false
var exclude_task = Set.empty[String]
val getopts = Getopts("""
Usage: Admin/cronjob/main [OPTIONS]
Options are:
-f apply force to do anything
-v verbose
-x NAME exclude tasks with this name
""",
"f" -> (_ => force = true),
"v" -> (_ => verbose = true),
"x:" -> (arg => exclude_task += arg))
val more_args = getopts(args)
if (more_args.nonEmpty) getopts.usage()
val progress = if (verbose) new Console_Progress() else new Progress
if (force) cronjob(progress, exclude_task)
else error("Need to apply force to do anything")
}
}
}
diff --git a/src/Pure/General/basics.ML b/src/Pure/General/basics.ML
--- a/src/Pure/General/basics.ML
+++ b/src/Pure/General/basics.ML
@@ -1,131 +1,135 @@
(* Title: Pure/General/basics.ML
Author: Florian Haftmann and Makarius, TU Muenchen
Fundamental concepts.
*)
infix 1 |> |-> |>> ||> ||>>
infix 1 #> #-> #>> ##> ##>>
signature BASICS =
sig
(*functions*)
val |> : 'a * ('a -> 'b) -> 'b
val |-> : ('c * 'a) * ('c -> 'a -> 'b) -> 'b
val |>> : ('a * 'c) * ('a -> 'b) -> 'b * 'c
val ||> : ('c * 'a) * ('a -> 'b) -> 'c * 'b
val ||>> : ('c * 'a) * ('a -> 'd * 'b) -> ('c * 'd) * 'b
val #> : ('a -> 'b) * ('b -> 'c) -> 'a -> 'c
val #-> : ('a -> 'c * 'b) * ('c -> 'b -> 'd) -> 'a -> 'd
val #>> : ('a -> 'c * 'b) * ('c -> 'd) -> 'a -> 'd * 'b
val ##> : ('a -> 'c * 'b) * ('b -> 'd) -> 'a -> 'c * 'd
val ##>> : ('a -> 'c * 'b) * ('b -> 'e * 'd) -> 'a -> ('c * 'e) * 'd
val ` : ('b -> 'a) -> 'b -> 'a * 'b
val tap: ('b -> 'a) -> 'b -> 'b
(*options*)
val is_some: 'a option -> bool
val is_none: 'a option -> bool
val the: 'a option -> 'a
val these: 'a list option -> 'a list
val the_list: 'a option -> 'a list
val the_default: 'a -> 'a option -> 'a
val perhaps: ('a -> 'a option) -> 'a -> 'a
val merge_options: 'a option * 'a option -> 'a option
+ val join_options: ('a * 'a -> 'a) -> 'a option * 'a option -> 'a option
val eq_option: ('a * 'b -> bool) -> 'a option * 'b option -> bool
(*partiality*)
val try: ('a -> 'b) -> 'a -> 'b option
val can: ('a -> 'b) -> 'a -> bool
(*lists*)
val cons: 'a -> 'a list -> 'a list
val append: 'a list -> 'a list -> 'a list
val fold: ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b
val fold_rev: ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b
val fold_map: ('a -> 'b -> 'c * 'b) -> 'a list -> 'b -> 'c list * 'b
end;
structure Basics: BASICS =
struct
(* functions *)
(*application and structured results*)
fun x |> f = f x;
fun (x, y) |-> f = f x y;
fun (x, y) |>> f = (f x, y);
fun (x, y) ||> f = (x, f y);
fun (x, y) ||>> f = let val (z, y') = f y in ((x, z), y') end;
(*composition and structured results*)
fun (f #> g) x = x |> f |> g;
fun (f #-> g) x = x |> f |-> g;
fun (f #>> g) x = x |> f |>> g;
fun (f ##> g) x = x |> f ||> g;
fun (f ##>> g) x = x |> f ||>> g;
(*result views*)
fun `f = fn x => (f x, x);
fun tap f = fn x => (f x; x);
(* options *)
fun is_some (SOME _) = true
| is_some NONE = false;
fun is_none (SOME _) = false
| is_none NONE = true;
fun the (SOME x) = x
| the NONE = raise Option.Option;
fun these (SOME x) = x
| these NONE = [];
fun the_list (SOME x) = [x]
| the_list NONE = []
fun the_default x (SOME y) = y
| the_default x NONE = x;
fun perhaps f x = the_default x (f x);
fun merge_options (x, y) = if is_some x then x else y;
+fun join_options f (SOME x, SOME y) = SOME (f (x, y))
+ | join_options _ args = merge_options args;
+
fun eq_option eq (SOME x, SOME y) = eq (x, y)
| eq_option _ (NONE, NONE) = true
| eq_option _ _ = false;
(* partiality *)
fun try f x = SOME (f x)
handle exn => if Exn.is_interrupt exn then Exn.reraise exn else NONE;
fun can f x = is_some (try f x);
(* lists *)
fun cons x xs = x :: xs;
fun append xs ys = xs @ ys;
fun fold _ [] y = y
| fold f (x :: xs) y = fold f xs (f x y);
fun fold_rev _ [] y = y
| fold_rev f (x :: xs) y = f x (fold_rev f xs y);
fun fold_map _ [] y = ([], y)
| fold_map f (x :: xs) y =
let
val (x', y') = f x y;
val (xs', y'') = fold_map f xs y';
in (x' :: xs', y'') end;
end;
open Basics;
diff --git a/src/Pure/Isar/args.ML b/src/Pure/Isar/args.ML
--- a/src/Pure/Isar/args.ML
+++ b/src/Pure/Isar/args.ML
@@ -1,189 +1,190 @@
(* Title: Pure/Isar/args.ML
Author: Markus Wenzel, TU Muenchen
Quasi-inner syntax based on outer tokens: concrete argument syntax of
attributes, methods etc.
*)
signature ARGS =
sig
val context: Proof.context context_parser
val theory: theory context_parser
val symbolic: Token.T parser
val $$$ : string -> string parser
val add: string parser
val del: string parser
val colon: string parser
val query: string parser
val bang: string parser
val query_colon: string parser
val bang_colon: string parser
val parens: 'a parser -> 'a parser
val bracks: 'a parser -> 'a parser
val mode: string -> bool parser
val maybe: 'a parser -> 'a option parser
val name_token: Token.T parser
val name: string parser
val name_position: (string * Position.T) parser
val cartouche_inner_syntax: string parser
val cartouche_input: Input.source parser
val binding: binding parser
val alt_name: string parser
val liberal_name: string parser
val var: indexname parser
val internal_source: Token.src parser
val internal_name: Token.name_value parser
val internal_typ: typ parser
val internal_term: term parser
val internal_fact: thm list parser
- val internal_attribute: (morphism -> attribute) parser
- val internal_declaration: declaration parser
+ val internal_attribute: attribute Morphism.entity parser
+ val internal_declaration: Morphism.declaration_entity parser
val named_source: (Token.T -> Token.src) -> Token.src parser
val named_typ: (string -> typ) -> typ parser
val named_term: (string -> term) -> term parser
val named_fact: (string -> string option * thm list) -> thm list parser
- val named_attribute: (string * Position.T -> morphism -> attribute) ->
- (morphism -> attribute) parser
- val embedded_declaration: (Input.source -> declaration) -> declaration parser
+ val named_attribute: (string * Position.T -> attribute Morphism.entity) ->
+ attribute Morphism.entity parser
+ val embedded_declaration: (Input.source -> Morphism.declaration_entity) ->
+ Morphism.declaration_entity parser
val typ_abbrev: typ context_parser
val typ: typ context_parser
val term: term context_parser
val term_pattern: term context_parser
val term_abbrev: term context_parser
val prop: term context_parser
val type_name: {proper: bool, strict: bool} -> string context_parser
val const: {proper: bool, strict: bool} -> string context_parser
val goal_spec: ((int -> tactic) -> tactic) context_parser
end;
structure Args: ARGS =
struct
(** argument scanners **)
(* context *)
fun context x = (Scan.state >> Context.proof_of) x;
fun theory x = (Scan.state >> Context.theory_of) x;
(* basic *)
val ident = Parse.token
(Parse.short_ident || Parse.long_ident || Parse.sym_ident || Parse.term_var ||
Parse.type_ident || Parse.type_var || Parse.number);
val string = Parse.token Parse.string;
val alt_string = Parse.token (Parse.alt_string || Parse.cartouche);
val symbolic = Parse.token (Parse.keyword_with Token.ident_or_symbolic);
fun $$$ x =
(ident || Parse.token Parse.keyword) :|-- (fn tok =>
let val y = Token.content_of tok in
if x = y
then (Token.assign (SOME (Token.Literal (false, Markup.quasi_keyword))) tok; Scan.succeed x)
else Scan.fail
end);
val add = $$$ "add";
val del = $$$ "del";
val colon = $$$ ":";
val query = $$$ "?";
val bang = $$$ "!";
val query_colon = $$$ "?" ^^ $$$ ":";
val bang_colon = $$$ "!" ^^ $$$ ":";
fun parens scan = $$$ "(" |-- scan --| $$$ ")";
fun bracks scan = $$$ "[" |-- scan --| $$$ "]";
fun mode s = Scan.optional (parens ($$$ s) >> K true) false;
fun maybe scan = $$$ "_" >> K NONE || scan >> SOME;
val name_token = ident || string;
val name = name_token >> Token.content_of;
val name_position = name_token >> (Input.source_content o Token.input_of);
val cartouche = Parse.token Parse.cartouche;
val cartouche_inner_syntax = cartouche >> Token.inner_syntax_of;
val cartouche_input = cartouche >> Token.input_of;
val binding = Parse.input name >> (Binding.make o Input.source_content);
val alt_name = alt_string >> Token.content_of;
val liberal_name = (symbolic >> Token.content_of) || name;
val var =
(ident >> Token.content_of) :|-- (fn x =>
(case Lexicon.read_variable x of SOME v => Scan.succeed v | NONE => Scan.fail));
(* values *)
fun value dest = Scan.some (fn arg =>
(case Token.get_value arg of SOME v => (SOME (dest v) handle Match => NONE) | NONE => NONE));
val internal_source = value (fn Token.Source src => src);
val internal_name = value (fn Token.Name (a, _) => a);
val internal_typ = value (fn Token.Typ T => T);
val internal_term = value (fn Token.Term t => t);
val internal_fact = value (fn Token.Fact (_, ths) => ths);
val internal_attribute = value (fn Token.Attribute att => att);
val internal_declaration = value (fn Token.Declaration decl => decl);
fun named_source read =
internal_source || name_token >> Token.evaluate Token.Source read;
fun named_typ read =
internal_typ ||
Parse.token Parse.embedded >> Token.evaluate Token.Typ (read o Token.inner_syntax_of);
fun named_term read =
internal_term ||
Parse.token Parse.embedded >> Token.evaluate Token.Term (read o Token.inner_syntax_of);
fun named_fact get =
internal_fact ||
name_token >> Token.evaluate Token.Fact (get o Token.content_of) >> #2 ||
alt_string >> Token.evaluate Token.Fact (get o Token.inner_syntax_of) >> #2;
fun named_attribute att =
internal_attribute ||
name_token >>
Token.evaluate Token.Attribute (fn tok => att (Token.content_of tok, Token.pos_of tok));
fun embedded_declaration read =
internal_declaration ||
Parse.token Parse.embedded >> Token.evaluate Token.Declaration (read o Token.input_of);
(* terms and types *)
val typ_abbrev = Scan.peek (named_typ o Proof_Context.read_typ_abbrev o Context.proof_of);
val typ = Scan.peek (named_typ o Syntax.read_typ o Context.proof_of);
val term = Scan.peek (named_term o Syntax.read_term o Context.proof_of);
val term_pattern = Scan.peek (named_term o Proof_Context.read_term_pattern o Context.proof_of);
val term_abbrev = Scan.peek (named_term o Proof_Context.read_term_abbrev o Context.proof_of);
val prop = Scan.peek (named_term o Syntax.read_prop o Context.proof_of);
(* type and constant names *)
fun type_name flags =
Scan.peek (named_typ o Proof_Context.read_type_name flags o Context.proof_of)
>> (fn Type (c, _) => c | TFree (a, _) => a | _ => "");
fun const flags =
Scan.peek (named_term o Proof_Context.read_const flags o Context.proof_of)
>> (fn Const (c, _) => c | Free (x, _) => x | _ => "");
(* improper method arguments *)
val from_to =
Parse.nat -- ($$$ "-" |-- Parse.nat) >> (fn (i, j) => fn tac => Seq.INTERVAL tac i j) ||
Parse.nat --| $$$ "-" >> (fn i => fn tac => fn st => Seq.INTERVAL tac i (Thm.nprems_of st) st) ||
Parse.nat >> (fn i => fn tac => tac i) ||
$$$ "!" >> K ALLGOALS;
val goal = Parse.keyword_improper "[" |-- Parse.!!! (from_to --| Parse.keyword_improper "]");
fun goal_spec x = Scan.lift (Scan.optional goal (fn tac => tac 1)) x;
end;
diff --git a/src/Pure/Isar/attrib.ML b/src/Pure/Isar/attrib.ML
--- a/src/Pure/Isar/attrib.ML
+++ b/src/Pure/Isar/attrib.ML
@@ -1,646 +1,660 @@
(* Title: Pure/Isar/attrib.ML
Author: Markus Wenzel, TU Muenchen
Symbolic representation of attributes -- with name and syntax.
*)
signature ATTRIB =
sig
type thms = Attrib.thms
type fact = Attrib.fact
val print_attributes: bool -> Proof.context -> unit
val attribute_space: Context.generic -> Name_Space.T
val define_global: binding -> (Token.src -> attribute) -> string -> theory -> string * theory
val define: binding -> (Token.src -> attribute) -> string -> local_theory -> string * local_theory
val check_name_generic: Context.generic -> xstring * Position.T -> string
val check_name: Proof.context -> xstring * Position.T -> string
val check_src: Proof.context -> Token.src -> Token.src
val attribs: Token.src list context_parser
val opt_attribs: Token.src list context_parser
val pretty_attribs: Proof.context -> Token.src list -> Pretty.T list
val pretty_binding: Proof.context -> Attrib.binding -> string -> Pretty.T list
val attribute: Proof.context -> Token.src -> attribute
val attribute_global: theory -> Token.src -> attribute
val attribute_cmd: Proof.context -> Token.src -> attribute
val attribute_cmd_global: theory -> Token.src -> attribute
val map_specs: ('a list -> 'att list) ->
(('c * 'a list) * 'b) list -> (('c * 'att list) * 'b) list
val map_facts: ('a list -> 'att list) ->
(('c * 'a list) * ('d * 'a list) list) list ->
(('c * 'att list) * ('d * 'att list) list) list
val map_facts_refs: ('a list -> 'att list) -> ('b -> 'fact) ->
(('c * 'a list) * ('b * 'a list) list) list ->
(('c * 'att list) * ('fact * 'att list) list) list
val trim_context_binding: Attrib.binding -> Attrib.binding
val trim_context_thms: thms -> thms
val trim_context_fact: fact -> fact
val global_notes: string -> fact list -> theory ->
(string * thm list) list * theory
val local_notes: string -> fact list -> Proof.context ->
(string * thm list) list * Proof.context
val generic_notes: string -> fact list -> Context.generic ->
(string * thm list) list * Context.generic
val lazy_notes: string -> binding * thm list lazy -> Context.generic -> Context.generic
val eval_thms: Proof.context -> (Facts.ref * Token.src list) list -> thm list
val attribute_syntax: attribute context_parser -> Token.src -> attribute
val setup: binding -> attribute context_parser -> string -> theory -> theory
val local_setup: binding -> attribute context_parser -> string ->
local_theory -> local_theory
val attribute_setup: bstring * Position.T -> Input.source -> string ->
local_theory -> local_theory
- val internal: (morphism -> attribute) -> Token.src
- val internal_declaration: declaration -> thms
+ val internal: Position.T -> (morphism -> attribute) -> Token.src
+ val internal_declaration: Position.T -> Morphism.declaration_entity -> thms
val add_del: attribute -> attribute -> attribute context_parser
val thm: thm context_parser
val thms: thm list context_parser
val multi_thm: thm list context_parser
val transform_facts: morphism -> fact list -> fact list
val partial_evaluation: Proof.context -> fact list -> fact list
val print_options: bool -> Proof.context -> unit
val config_bool: binding -> (Context.generic -> bool) -> bool Config.T * (theory -> theory)
val config_int: binding -> (Context.generic -> int) -> int Config.T * (theory -> theory)
val config_real: binding -> (Context.generic -> real) -> real Config.T * (theory -> theory)
val config_string: binding -> (Context.generic -> string) -> string Config.T * (theory -> theory)
val setup_config_bool: binding -> (Context.generic -> bool) -> bool Config.T
val setup_config_int: binding -> (Context.generic -> int) -> int Config.T
val setup_config_real: binding -> (Context.generic -> real) -> real Config.T
val setup_config_string: binding -> (Context.generic -> string) -> string Config.T
val option_bool: string * Position.T -> bool Config.T * (theory -> theory)
val option_int: string * Position.T -> int Config.T * (theory -> theory)
val option_real: string * Position.T -> real Config.T * (theory -> theory)
val option_string: string * Position.T -> string Config.T * (theory -> theory)
val setup_option_bool: string * Position.T -> bool Config.T
val setup_option_int: string * Position.T -> int Config.T
val setup_option_real: string * Position.T -> real Config.T
val setup_option_string: string * Position.T -> string Config.T
val consumes: int -> Token.src
val constraints: int -> Token.src
val cases_open: Token.src
val case_names: string list -> Token.src
val case_conclusion: string * string list -> Token.src
end;
structure Attrib: sig type binding = Attrib.binding include ATTRIB end =
struct
open Attrib;
(** named attributes **)
(* theory data *)
structure Attributes = Generic_Data
(
type T = ((Token.src -> attribute) * string) Name_Space.table;
val empty : T = Name_Space.empty_table Markup.attributeN;
fun merge data : T = Name_Space.merge_tables data;
);
val ops_attributes = {get_data = Attributes.get, put_data = Attributes.put};
val get_attributes = Attributes.get o Context.Proof;
fun print_attributes verbose ctxt =
let
val attribs = get_attributes ctxt;
fun prt_attr (name, (_, "")) = Pretty.mark_str name
| prt_attr (name, (_, comment)) =
Pretty.block
(Pretty.mark_str name :: Pretty.str ":" :: Pretty.brk 2 :: Pretty.text comment);
in
[Pretty.big_list "attributes:" (map prt_attr (Name_Space.markup_table verbose ctxt attribs))]
|> Pretty.writeln_chunks
end;
val attribute_space = Name_Space.space_of_table o Attributes.get;
(* define *)
fun define_global binding att comment =
Entity.define_global ops_attributes binding (att, comment);
fun define binding att comment =
Entity.define ops_attributes binding (att, comment);
(* check *)
fun check_name_generic context = #1 o Name_Space.check context (Attributes.get context);
val check_name = check_name_generic o Context.Proof;
fun check_src ctxt src =
let
val _ =
if Token.checked_src src then ()
else Context_Position.report ctxt (#1 (Token.range_of src)) Markup.language_attribute;
in #1 (Token.check_src ctxt get_attributes src) end;
val attribs =
Args.context -- Scan.lift Parse.attribs
>> (fn (ctxt, srcs) => map (check_src ctxt) srcs);
val opt_attribs = Scan.optional attribs [];
(* pretty printing *)
fun pretty_attribs _ [] = []
| pretty_attribs ctxt srcs = [Pretty.enum "," "[" "]" (map (Token.pretty_src ctxt) srcs)];
fun pretty_binding ctxt (b, atts) sep =
(case (Binding.is_empty b, null atts) of
(true, true) => []
| (false, true) => [Pretty.block [Binding.pretty b, Pretty.str sep]]
| (true, false) => [Pretty.block (pretty_attribs ctxt atts @ [Pretty.str sep])]
| (false, false) =>
[Pretty.block
(Binding.pretty b :: Pretty.brk 1 :: pretty_attribs ctxt atts @ [Pretty.str sep])]);
(* get attributes *)
fun attribute_generic context =
let val table = Attributes.get context in
fn src =>
let
val name = #1 (Token.name_of_src src);
val label = Long_Name.qualify Markup.attributeN name;
val att = #1 (Name_Space.get table name) src;
- in Position.setmp_thread_data_label label att end
+ in Position.setmp_thread_data_label label att : attribute end
end;
val attribute = attribute_generic o Context.Proof;
val attribute_global = attribute_generic o Context.Theory;
fun attribute_cmd ctxt = attribute ctxt o check_src ctxt;
fun attribute_cmd_global thy = attribute_global thy o check_src (Proof_Context.init_global thy);
(* attributed declarations *)
fun map_specs f = map (apfst (apsnd f));
fun map_facts f = map (apfst (apsnd f) o apsnd (map (apsnd f)));
fun map_facts_refs f g = map_facts f #> map (apsnd (map (apfst g)));
-(* fact expressions *)
+(* implicit context *)
-val trim_context_binding: Attrib.binding -> Attrib.binding = apsnd (map Token.trim_context_src);
-val trim_context_thms: thms -> thms = (map o apfst o map) Thm.trim_context;
+val trim_context_binding: Attrib.binding -> Attrib.binding =
+ apsnd ((map o map) Token.trim_context);
+
+val trim_context_thms: thms -> thms =
+ map (fn (thms, atts) => (map Thm.trim_context thms, (map o map) Token.trim_context atts));
+
fun trim_context_fact (binding, thms) = (trim_context_binding binding, trim_context_thms thms);
+
+(* fact expressions *)
+
fun global_notes kind facts thy = thy |>
Global_Theory.note_thmss kind (map_facts (map (attribute_global thy)) facts);
fun local_notes kind facts ctxt = ctxt |>
Proof_Context.note_thmss kind (map_facts (map (attribute ctxt)) facts);
fun generic_notes kind facts context = context |>
Context.mapping_result (global_notes kind facts) (local_notes kind facts);
fun lazy_notes kind arg =
Context.mapping (Global_Theory.add_thms_lazy kind arg) (Proof_Context.add_thms_lazy kind arg);
fun eval_thms ctxt srcs = ctxt
|> Proof_Context.note_thmss ""
(map_facts_refs
(map (attribute_cmd ctxt)) (Proof_Context.get_fact ctxt) [(Binding.empty_atts, srcs)])
|> fst |> maps snd;
(* attribute setup *)
fun attribute_syntax scan src (context, th) =
let val (a, context') = Token.syntax_generic scan src context in a (context', th) end;
fun setup binding scan comment = define_global binding (attribute_syntax scan) comment #> snd;
fun local_setup binding scan comment = define binding (attribute_syntax scan) comment #> snd;
fun attribute_setup binding source comment =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read
("Theory.local_setup (Attrib.local_setup (" ^ ML_Syntax.make_binding binding ^ ") (") @
ML_Lex.read_source source @ ML_Lex.read (") " ^ ML_Syntax.print_string comment ^ ")"))
|> Context.proof_map;
(* internal attribute *)
-val _ = Theory.setup
- (setup (Binding.make ("attribute", \<^here>))
- (Scan.lift Args.internal_attribute >> Morphism.form)
- "internal attribute");
-
-fun internal_name ctxt name =
+fun make_name ctxt name =
Token.make_src (name, Position.none) [] |> check_src ctxt |> hd;
-val internal_attribute_name =
- internal_name (Context.the_local_context ()) "attribute";
+local
-fun internal att =
- internal_attribute_name ::
- [Token.make_string ("<attribute>", Position.none) |> Token.assign (SOME (Token.Attribute att))];
+val internal_binding = Binding.make ("attribute", \<^here>);
-fun internal_declaration decl =
- [([Drule.dummy_thm], [internal (fn phi => Thm.declaration_attribute (K (decl phi)))])];
+val _ = Theory.setup
+ (setup internal_binding
+ (Scan.lift Args.internal_attribute >> Morphism.form ||
+ Scan.lift Args.internal_declaration >> (Thm.declaration_attribute o K o Morphism.form))
+ "internal attribute");
+
+val internal_name = make_name (Context.the_local_context ()) (Binding.name_of internal_binding);
+fun internal_source name value = [internal_name, Token.assign (SOME value) (Token.make_string name)];
+
+in
+
+fun internal pos arg =
+ internal_source ("internal", pos) (Token.Attribute (Morphism.entity arg));
+
+fun internal_declaration pos arg =
+ [([Drule.dummy_thm], [internal_source ("declaration", pos) (Token.Declaration arg)])];
+
+end;
(* add/del syntax *)
fun add_del add del = Scan.lift (Args.add >> K add || Args.del >> K del || Scan.succeed add);
(** parsing attributed theorems **)
local
val fact_name =
Parse.position Args.internal_fact >> (fn (_, pos) => ("<fact>", pos)) || Args.name_position;
fun gen_thm pick = Scan.depend (fn context =>
let
val get = Proof_Context.get_fact_generic context;
val get_fact = get o Facts.Fact;
fun get_named is_sel pos name =
let val (a, ths) = get (Facts.Named ((name, pos), NONE))
in (if is_sel then NONE else a, ths) end;
in
Parse.$$$ "[" |-- Scan.pass context attribs --| Parse.$$$ "]" >> (fn srcs =>
let
val atts = map (attribute_generic context) srcs;
val (th', context') = fold (uncurry o Thm.apply_attribute) atts (Drule.dummy_thm, context);
in (context', pick ("", Position.none) [th']) end)
||
(Scan.ahead Args.alt_name -- Args.named_fact get_fact
>> (fn (s, fact) => ("", Facts.Fact s, fact)) ||
Scan.ahead (fact_name -- Scan.option Parse.thm_sel) :|--
(fn ((name, pos), sel) =>
Args.named_fact (get_named (is_some sel) pos) --| Scan.option Parse.thm_sel
>> (fn fact => (name, Facts.Named ((name, pos), sel), fact))))
-- Scan.pass context opt_attribs >> (fn ((name, thmref, fact), srcs) =>
let
val ths = Facts.select thmref fact;
val atts = map (attribute_generic context) srcs;
val (ths', context') =
fold_map (curry (fold (uncurry o Thm.apply_attribute) atts)) ths context;
in (context', pick (name, Facts.ref_pos thmref) ths') end)
end);
in
val thm = gen_thm Facts.the_single;
val multi_thm = gen_thm (K I);
val thms = Scan.repeats multi_thm;
end;
(* transform fact expressions *)
fun transform_facts phi = map (fn ((a, atts), bs) =>
((Morphism.binding phi a, (map o map) (Token.transform phi) atts),
bs |> map (fn (ths, btts) => (Morphism.fact phi ths, (map o map) (Token.transform phi) btts))));
(** partial evaluation -- observing rule/declaration/mixed attributes **)
(*NB: result length may change due to rearrangement of symbolic expression*)
local
fun apply_att src (context, th) =
let
val src1 = map Token.init_assignable src;
val result = attribute_generic context src1 (context, th);
val src2 = map Token.closure src1;
in (src2, result) end;
fun err msg src =
let val (name, pos) = Token.name_of_src src
in error (msg ^ " " ^ quote name ^ Position.here pos) end;
fun eval src ((th, dyn), (decls, context)) =
(case (apply_att src (context, th), dyn) of
((_, (NONE, SOME th')), NONE) => ((th', NONE), (decls, context))
| ((_, (NONE, SOME _)), SOME _) => err "Mixed dynamic attribute followed by static rule" src
| ((src', (SOME context', NONE)), NONE) =>
let
val decls' =
(case decls of
[] => [(th, [src'])]
| (th2, srcs2) :: rest =>
if Thm.eq_thm_strict (th, th2)
then ((th2, src' :: srcs2) :: rest)
else (th, [src']) :: (th2, srcs2) :: rest);
in ((th, NONE), (decls', context')) end
| ((src', (opt_context', opt_th')), _) =>
let
val context' = the_default context opt_context';
val th' = the_default th opt_th';
val dyn' =
(case dyn of
NONE => SOME (th, [src'])
| SOME (dyn_th, srcs) => SOME (dyn_th, src' :: srcs));
in ((th', dyn'), (decls, context')) end);
in
fun partial_evaluation ctxt facts =
(facts, Context.Proof (Context_Position.not_really ctxt)) |->
fold_map (fn ((b, more_atts), fact) => fn context =>
let
val (fact', (decls, context')) =
(fact, ([], context)) |-> fold_map (fn (ths, atts) => fn res1 =>
(ths, res1) |-> fold_map (fn th => fn res2 =>
let
val ((th', dyn'), res3) = fold eval (atts @ more_atts) ((th, NONE), res2);
val th_atts' =
(case dyn' of
NONE => (th', [])
| SOME (dyn_th', atts') => (dyn_th', rev atts'));
in (th_atts', res3) end))
|>> flat;
val decls' = rev (map (apsnd rev) decls);
val facts' =
if eq_list (eq_fst Thm.eq_thm_strict) (decls', fact') then
[((b, []), map2 (fn (th, atts1) => fn (_, atts2) => (th, atts1 @ atts2)) decls' fact')]
else if null decls' then [((b, []), fact')]
else [(Binding.empty_atts, decls'), ((b, []), fact')];
in (facts', context') end)
|> fst |> flat |> map (apsnd (map (apfst single)))
|> filter_out (fn (b, fact) => Binding.is_empty_atts b andalso forall (null o #2) fact);
end;
(** configuration options **)
(* naming *)
structure Configs = Theory_Data
(
type T = Config.value Config.T Symtab.table;
val empty = Symtab.empty;
fun merge data = Symtab.merge (K true) data;
);
fun print_options verbose ctxt =
let
fun prt (name, config) =
let val value = Config.get ctxt config in
Pretty.block [Pretty.mark_str name, Pretty.str (": " ^ Config.print_type value ^ " ="),
Pretty.brk 1, Pretty.str (Config.print_value value)]
end;
val space = attribute_space (Context.Proof ctxt);
val configs =
Name_Space.markup_entries verbose ctxt space
(Symtab.dest (Configs.get (Proof_Context.theory_of ctxt)));
in Pretty.writeln (Pretty.big_list "configuration options" (map prt configs)) end;
(* concrete syntax *)
local
val equals = Parse.$$$ "=";
fun scan_value (Config.Bool _) =
equals -- Args.$$$ "false" >> K (Config.Bool false) ||
equals -- Args.$$$ "true" >> K (Config.Bool true) ||
Scan.succeed (Config.Bool true)
| scan_value (Config.Int _) = equals |-- Parse.int >> Config.Int
| scan_value (Config.Real _) = equals |-- Parse.real >> Config.Real
| scan_value (Config.String _) = equals |-- Args.name >> Config.String;
fun scan_config thy config =
let val config_type = Config.get_global thy config
in scan_value config_type >> (K o Thm.declaration_attribute o K o Config.put_generic config) end;
fun register binding config thy =
let val name = Sign.full_name thy binding in
thy
- |> setup binding (Scan.lift (scan_config thy config) >> Morphism.form) "configuration option"
+ |> setup binding (Scan.lift (scan_config thy config) >> Morphism.form_entity)
+ "configuration option"
|> Configs.map (Symtab.update (name, config))
end;
fun declare make coerce binding default =
let
val name = Binding.name_of binding;
val pos = Binding.pos_of binding;
val config_value = Config.declare (name, pos) (make o default);
val config = coerce config_value;
in (config, register binding config_value) end;
in
fun register_config config =
register (Binding.make (Config.name_of config, Config.pos_of config)) config;
val register_config_bool = register_config o Config.bool_value;
val register_config_int = register_config o Config.int_value;
val register_config_real = register_config o Config.real_value;
val register_config_string = register_config o Config.string_value;
val config_bool = declare Config.Bool Config.bool;
val config_int = declare Config.Int Config.int;
val config_real = declare Config.Real Config.real;
val config_string = declare Config.String Config.string;
end;
(* implicit setup *)
local
fun setup_config declare_config binding default =
let
val (config, setup) = declare_config binding default;
val _ = Theory.setup setup;
in config end;
in
val setup_config_bool = setup_config config_bool;
val setup_config_int = setup_config config_int;
val setup_config_string = setup_config config_string;
val setup_config_real = setup_config config_real;
end;
(* system options *)
local
fun declare_option coerce (name, pos) =
let
val config = Config.declare_option (name, pos);
in (coerce config, register_config config) end;
fun setup_option coerce (name, pos) =
let
val config = Config.declare_option (name, pos);
val _ = Theory.setup (register_config config);
in coerce config end;
in
val option_bool = declare_option Config.bool;
val option_int = declare_option Config.int;
val option_real = declare_option Config.real;
val option_string = declare_option Config.string;
val setup_option_bool = setup_option Config.bool;
val setup_option_int = setup_option Config.int;
val setup_option_real = setup_option Config.real;
val setup_option_string = setup_option Config.string;
end;
(* theory setup *)
val _ = Theory.setup
(setup \<^binding>\<open>tagged\<close> (Scan.lift (Args.name -- Args.name) >> Thm.tag) "tagged theorem" #>
setup \<^binding>\<open>untagged\<close> (Scan.lift Args.name >> Thm.untag) "untagged theorem" #>
setup \<^binding>\<open>kind\<close> (Scan.lift Args.name >> Thm.kind) "theorem kind" #>
setup \<^binding>\<open>THEN\<close>
(Scan.lift (Scan.optional (Args.bracks Parse.nat) 1) -- thm
>> (fn (i, B) => Thm.rule_attribute [B] (fn _ => fn A => A RSN (i, B))))
"resolution with rule" #>
setup \<^binding>\<open>OF\<close>
(thms >> (fn Bs => Thm.rule_attribute Bs (fn _ => fn A => A OF Bs)))
"rule resolved with facts" #>
setup \<^binding>\<open>rename_abs\<close>
(Scan.lift (Scan.repeat (Args.maybe Args.name)) >> (fn vs =>
Thm.rule_attribute [] (K (Drule.rename_bvars' vs))))
"rename bound variables in abstractions" #>
setup \<^binding>\<open>unfolded\<close>
(thms >> (fn ths =>
Thm.rule_attribute ths (fn context => Local_Defs.unfold (Context.proof_of context) ths)))
"unfolded definitions" #>
setup \<^binding>\<open>folded\<close>
(thms >> (fn ths =>
Thm.rule_attribute ths (fn context => Local_Defs.fold (Context.proof_of context) ths)))
"folded definitions" #>
setup \<^binding>\<open>consumes\<close>
(Scan.lift (Scan.optional Parse.int 1) >> Rule_Cases.consumes)
"number of consumed facts" #>
setup \<^binding>\<open>constraints\<close>
(Scan.lift Parse.nat >> Rule_Cases.constraints)
"number of equality constraints" #>
setup \<^binding>\<open>cases_open\<close>
(Scan.succeed Rule_Cases.cases_open)
"rule with open parameters" #>
setup \<^binding>\<open>case_names\<close>
(Scan.lift (Scan.repeat (Args.name --
Scan.optional (Parse.$$$ "[" |-- Scan.repeat1 (Args.maybe Args.name) --| Parse.$$$ "]") []))
>> (fn cs =>
Rule_Cases.cases_hyp_names
(map #1 cs)
(map (map (the_default Rule_Cases.case_hypsN) o #2) cs)))
"named rule cases" #>
setup \<^binding>\<open>case_conclusion\<close>
(Scan.lift (Args.name -- Scan.repeat Args.name) >> Rule_Cases.case_conclusion)
"named conclusion of rule cases" #>
setup \<^binding>\<open>params\<close>
(Scan.lift (Parse.and_list1 (Scan.repeat Args.name)) >> Rule_Cases.params)
"named rule parameters" #>
setup \<^binding>\<open>rule_format\<close>
(Scan.lift (Args.mode "no_asm")
>> (fn true => Object_Logic.rule_format_no_asm | false => Object_Logic.rule_format))
"result put into canonical rule format" #>
setup \<^binding>\<open>elim_format\<close>
(Scan.succeed (Thm.rule_attribute [] (K Tactic.make_elim)))
"destruct rule turned into elimination rule format" #>
setup \<^binding>\<open>no_vars\<close>
(Scan.succeed (Thm.rule_attribute [] (Variable.import_vars o Context.proof_of)))
"imported schematic variables" #>
setup \<^binding>\<open>atomize\<close>
(Scan.succeed Object_Logic.declare_atomize) "declaration of atomize rule" #>
setup \<^binding>\<open>rulify\<close>
(Scan.succeed Object_Logic.declare_rulify) "declaration of rulify rule" #>
setup \<^binding>\<open>rotated\<close>
(Scan.lift (Scan.optional Parse.int 1
>> (fn n => Thm.rule_attribute [] (fn _ => rotate_prems n)))) "rotated theorem premises" #>
setup \<^binding>\<open>defn\<close>
(add_del Local_Defs.defn_add Local_Defs.defn_del)
"declaration of definitional transformations" #>
setup \<^binding>\<open>abs_def\<close>
(Scan.succeed (Thm.rule_attribute [] (Local_Defs.abs_def_rule o Context.proof_of)))
"abstract over free variables of definitional theorem" #>
register_config_bool Goal.quick_and_dirty #>
register_config_bool Ast.trace #>
register_config_bool Ast.stats #>
register_config_bool Printer.show_brackets #>
register_config_bool Printer.show_sorts #>
register_config_bool Printer.show_types #>
register_config_bool Printer.show_markup #>
register_config_bool Printer.show_structs #>
register_config_bool Printer.show_question_marks #>
register_config_bool Syntax.ambiguity_warning #>
register_config_int Syntax.ambiguity_limit #>
register_config_bool Syntax_Trans.eta_contract #>
register_config_bool Name_Space.names_long #>
register_config_bool Name_Space.names_short #>
register_config_bool Name_Space.names_unique #>
register_config_int ML_Print_Depth.print_depth #>
register_config_string ML_Env.ML_environment #>
register_config_bool ML_Env.ML_read_global #>
register_config_bool ML_Env.ML_write_global #>
register_config_bool ML_Options.source_trace #>
register_config_bool ML_Options.exception_trace #>
register_config_bool ML_Options.exception_debugger #>
register_config_bool ML_Options.debugger #>
register_config_bool Proof_Context.show_abbrevs #>
register_config_int Goal_Display.goals_limit #>
register_config_bool Goal_Display.show_main_goal #>
register_config_bool Thm.show_consts #>
register_config_bool Thm.show_hyps #>
register_config_bool Thm.show_tags #>
register_config_bool Pattern.unify_trace_failure #>
register_config_int Unify.trace_bound #>
register_config_int Unify.search_bound #>
register_config_bool Unify.trace_simp #>
register_config_bool Unify.trace_types #>
register_config_int Raw_Simplifier.simp_depth_limit #>
register_config_int Raw_Simplifier.simp_trace_depth_limit #>
register_config_bool Raw_Simplifier.simp_debug #>
register_config_bool Raw_Simplifier.simp_trace #>
register_config_bool Local_Defs.unfold_abs_def);
(* internal source *)
local
-val internal = internal_name (Context.the_local_context ());
+val make_name0 = make_name (Context.the_local_context ());
-val consumes_name = internal "consumes";
-val constraints_name = internal "constraints";
-val cases_open_name = internal "cases_open";
-val case_names_name = internal "case_names";
-val case_conclusion_name = internal "case_conclusion";
-
-fun make_string s = Token.make_string (s, Position.none);
+val consumes_name = make_name0 "consumes";
+val constraints_name = make_name0 "constraints";
+val cases_open_name = make_name0 "cases_open";
+val case_names_name = make_name0 "case_names";
+val case_conclusion_name = make_name0 "case_conclusion";
in
fun consumes i = consumes_name :: Token.make_int i;
fun constraints i = constraints_name :: Token.make_int i;
val cases_open = [cases_open_name];
-fun case_names names = case_names_name :: map make_string names;
-fun case_conclusion (name, names) = case_conclusion_name :: map make_string (name :: names);
+fun case_names names = case_names_name :: map Token.make_string0 names;
+fun case_conclusion (name, names) = case_conclusion_name :: map Token.make_string0 (name :: names);
end;
end;
\ No newline at end of file
diff --git a/src/Pure/Isar/bundle.ML b/src/Pure/Isar/bundle.ML
--- a/src/Pure/Isar/bundle.ML
+++ b/src/Pure/Isar/bundle.ML
@@ -1,236 +1,251 @@
(* Title: Pure/Isar/bundle.ML
Author: Makarius
Bundled declarations (notes etc.).
*)
signature BUNDLE =
sig
type name = string
val bundle_space: Context.generic -> Name_Space.T
val check: Proof.context -> xstring * Position.T -> string
val get: Proof.context -> name -> Attrib.thms
val read: Proof.context -> xstring * Position.T -> Attrib.thms
val extern: Proof.context -> string -> xstring
val print_bundles: bool -> Proof.context -> unit
val bundle: binding * Attrib.thms ->
(binding * typ option * mixfix) list -> local_theory -> local_theory
val bundle_cmd: binding * (Facts.ref * Token.src list) list ->
(binding * string option * mixfix) list -> local_theory -> local_theory
val init: binding -> theory -> local_theory
val unbundle: name list -> local_theory -> local_theory
val unbundle_cmd: (xstring * Position.T) list -> local_theory -> local_theory
val includes: name list -> Proof.context -> Proof.context
val includes_cmd: (xstring * Position.T) list -> Proof.context -> Proof.context
val include_: name list -> Proof.state -> Proof.state
val include_cmd: (xstring * Position.T) list -> Proof.state -> Proof.state
val including: name list -> Proof.state -> Proof.state
val including_cmd: (xstring * Position.T) list -> Proof.state -> Proof.state
end;
structure Bundle: BUNDLE =
struct
(** context data **)
structure Data = Generic_Data
(
type T = Attrib.thms Name_Space.table * Attrib.thms option;
val empty : T = (Name_Space.empty_table Markup.bundleN, NONE);
fun merge ((tab1, target1), (tab2, target2)) =
(Name_Space.merge_tables (tab1, tab2), merge_options (target1, target2));
);
(* bundles *)
type name = string
val get_all_generic = #1 o Data.get;
val get_all = get_all_generic o Context.Proof;
val bundle_space = Name_Space.space_of_table o #1 o Data.get;
fun check ctxt = #1 o Name_Space.check (Context.Proof ctxt) (get_all ctxt);
val get = Name_Space.get o get_all_generic o Context.Proof;
fun read ctxt = get ctxt o check ctxt;
fun extern ctxt = Name_Space.extern ctxt (Name_Space.space_of_table (get_all ctxt));
-fun define_bundle (b, bundle) context =
- let
- val bundle' = Attrib.trim_context_thms bundle;
- val (name, bundles') = Name_Space.define context true (b, bundle') (get_all_generic context);
- val context' = (Data.map o apfst o K) bundles' context;
- in (name, context') end;
-
(* target -- bundle under construction *)
fun the_target thy =
#2 (Data.get (Context.Theory thy))
|> \<^if_none>\<open>error "Missing bundle target"\<close>;
val reset_target = (Context.theory_map o Data.map o apsnd o K) NONE;
val set_target = Context.theory_map o Data.map o apsnd o K o SOME o Attrib.trim_context_thms;
fun augment_target thms =
Local_Theory.background_theory (fn thy => set_target (the_target thy @ thms) thy);
(* print bundles *)
fun pretty_bundle ctxt (markup_name, bundle) =
let
val prt_thm = Pretty.cartouche o Thm.pretty_thm ctxt;
fun prt_thm_attribs atts th =
Pretty.block (Pretty.breaks (prt_thm th :: Attrib.pretty_attribs ctxt atts));
fun prt_thms (ths, []) = map prt_thm ths
| prt_thms (ths, atts) = map (prt_thm_attribs atts) ths;
in
Pretty.block ([Pretty.keyword1 "bundle", Pretty.str " ", Pretty.mark_str markup_name] @
(if null bundle then [] else Pretty.fbreaks (Pretty.str " =" :: maps prt_thms bundle)))
end;
fun print_bundles verbose ctxt =
Pretty.writeln_chunks
(map (pretty_bundle ctxt) (Name_Space.markup_table verbose ctxt (get_all ctxt)));
(** define bundle **)
+(* context and morphisms *)
+
+val trim_context_bundle =
+ map (fn (fact, atts) => (map Thm.trim_context fact, (map o map) Token.trim_context atts));
+
+fun transfer_bundle thy =
+ map (fn (fact, atts) => (map (Thm.transfer thy) fact, (map o map) (Token.transfer thy) atts));
+
fun transform_bundle phi =
map (fn (fact, atts) => (Morphism.fact phi fact, (map o map) (Token.transform phi) atts));
+fun define_bundle (b, bundle) context =
+ let
+ val (name, bundles') = get_all_generic context
+ |> Name_Space.define context true (b, trim_context_bundle bundle);
+ val context' = (Data.map o apfst o K) bundles' context;
+ in (name, context') end;
+
(* command *)
local
fun gen_bundle prep_fact prep_att add_fixes (binding, raw_bundle) raw_fixes lthy =
let
val (_, ctxt') = add_fixes raw_fixes lthy;
val bundle0 = raw_bundle
|> map (fn (fact, atts) => (prep_fact ctxt' fact, map (prep_att ctxt') atts));
val bundle =
- Attrib.partial_evaluation ctxt' [(Binding.empty_atts, bundle0)] |> map snd |> flat
- |> transform_bundle (Proof_Context.export_morphism ctxt' lthy);
+ Attrib.partial_evaluation ctxt' [(Binding.empty_atts, bundle0)]
+ |> maps #2
+ |> transform_bundle (Proof_Context.export_morphism ctxt' lthy)
+ |> trim_context_bundle;
in
- lthy |> Local_Theory.declaration {syntax = false, pervasive = true}
- (fn phi => #2 o define_bundle (Morphism.binding phi binding, transform_bundle phi bundle))
+ lthy |> Local_Theory.declaration {syntax = false, pervasive = true, pos = Binding.pos_of binding}
+ (fn phi => fn context =>
+ let val psi = Morphism.set_trim_context'' context phi
+ in #2 (define_bundle (Morphism.binding psi binding, transform_bundle psi bundle) context) end)
end;
in
val bundle = gen_bundle (K I) (K I) Proof_Context.add_fixes;
val bundle_cmd = gen_bundle Proof_Context.get_fact Attrib.check_src Proof_Context.add_fixes_cmd;
end;
(* target *)
local
fun bad_operation _ = error "Not possible in bundle target";
fun conclude invisible binding =
Local_Theory.background_theory_result (fn thy =>
thy
|> invisible ? Context_Position.set_visible_global false
|> Context.Theory
|> define_bundle (binding, the_target thy)
||> (Context.the_theory
#> invisible ? Context_Position.restore_visible_global thy
#> reset_target));
fun pretty binding lthy =
let
val bundle = the_target (Proof_Context.theory_of lthy);
val (name, lthy') = lthy
|> Local_Theory.raw_theory (Context_Position.set_visible_global false)
|> conclude true binding;
val thy_ctxt' = Proof_Context.init_global (Proof_Context.theory_of lthy');
val markup_name =
Name_Space.markup_extern thy_ctxt' (Name_Space.space_of_table (get_all thy_ctxt')) name;
in [pretty_bundle lthy' (markup_name, bundle)] end;
fun bundle_notes kind facts lthy =
let
val bundle = facts
|> maps (fn ((_, more_atts), thms) => map (fn (ths, atts) => (ths, atts @ more_atts)) thms);
in
lthy
|> augment_target (transform_bundle (Local_Theory.standard_morphism_theory lthy) bundle)
|> Generic_Target.standard_notes (op <>) kind facts
|> Attrib.local_notes kind facts
end;
-fun bundle_declaration decl lthy =
+fun bundle_declaration pos decl lthy =
lthy
- |> (augment_target o Attrib.internal_declaration)
+ |> (augment_target o Attrib.internal_declaration pos)
(Morphism.transform (Local_Theory.standard_morphism_theory lthy) decl)
|> Generic_Target.standard_declaration (K true) decl;
in
fun init binding thy =
thy
|> Local_Theory.init
{background_naming = Sign.naming_of thy,
setup = set_target [] #> Proof_Context.init_global,
conclude = conclude false binding #> #2}
{define = bad_operation,
notes = bundle_notes,
abbrev = bad_operation,
- declaration = K bundle_declaration,
+ declaration = fn {pos, ...} => bundle_declaration pos,
theory_registration = bad_operation,
locale_dependency = bad_operation,
- pretty = pretty binding}
+ pretty = pretty binding};
end;
(** activate bundles **)
local
fun gen_activate notes prep_bundle args ctxt =
- let val decls = maps (prep_bundle ctxt) args in
+ let
+ val thy = Proof_Context.theory_of ctxt;
+ val decls = maps (prep_bundle ctxt) args |> transfer_bundle thy;
+ in
ctxt
|> Context_Position.set_visible false
|> notes [(Binding.empty_atts, decls)] |> #2
|> Context_Position.restore_visible ctxt
end;
fun gen_unbundle prep_bundle = gen_activate Local_Theory.notes prep_bundle;
fun gen_includes prep_bundle = gen_activate (Attrib.local_notes "") prep_bundle;
fun gen_include prep_bundle bs =
Proof.assert_forward #> Proof.map_context (gen_includes prep_bundle bs) #> Proof.reset_facts;
fun gen_including prep_bundle bs =
Proof.assert_backward #> Proof.map_context (gen_includes prep_bundle bs)
in
val unbundle = gen_unbundle get;
val unbundle_cmd = gen_unbundle read;
val includes = gen_includes get;
val includes_cmd = gen_includes read;
val include_ = gen_include get;
val include_cmd = gen_include read;
val including = gen_including get;
val including_cmd = gen_including read;
end;
end;
diff --git a/src/Pure/Isar/class.ML b/src/Pure/Isar/class.ML
--- a/src/Pure/Isar/class.ML
+++ b/src/Pure/Isar/class.ML
@@ -1,841 +1,846 @@
(* Title: Pure/Isar/class.ML
Author: Florian Haftmann, TU Muenchen
Type classes derived from primitive axclasses and locales.
*)
signature CLASS =
sig
(*classes*)
val is_class: theory -> class -> bool
val these_params: theory -> sort -> (string * (class * (string * typ))) list
val base_sort: theory -> class -> sort
val rules: theory -> class -> thm option * thm
val these_defs: theory -> sort -> thm list
val these_operations: theory -> sort -> (string * (class * ((typ * term) * bool))) list
val print_classes: Proof.context -> unit
val init: class -> theory -> Proof.context
val begin: class list -> sort -> Proof.context -> Proof.context
val const: class -> (binding * mixfix) * term -> term list * term list ->
local_theory -> local_theory
val abbrev: class -> Syntax.mode -> (binding * mixfix) * term -> local_theory ->
(term * term) * local_theory
val redeclare_operations: theory -> sort -> Proof.context -> Proof.context
val class_prefix: string -> string
val register: class -> class list -> ((string * typ) * (string * typ)) list ->
sort -> morphism -> morphism -> thm option -> thm option -> thm -> theory -> theory
(*instances*)
val instantiation: string list * (string * sort) list * sort -> theory -> local_theory
val instantiation_instance: (local_theory -> local_theory)
-> local_theory -> Proof.state
val prove_instantiation_instance: (Proof.context -> tactic)
-> local_theory -> local_theory
val prove_instantiation_exit: (Proof.context -> tactic)
-> local_theory -> theory
val prove_instantiation_exit_result: (morphism -> 'a -> 'b)
-> (Proof.context -> 'b -> tactic) -> 'a -> local_theory -> 'b * theory
val read_multi_arity: theory -> xstring list * xstring list * xstring
-> string list * (string * sort) list * sort
val instantiation_cmd: xstring list * xstring list * xstring -> theory -> local_theory
val instance_arity_cmd: xstring list * xstring list * xstring -> theory -> Proof.state
val theory_map_result: string list * (string * sort) list * sort
-> (morphism -> 'a -> 'b) -> (local_theory -> 'a * local_theory)
-> (Proof.context -> 'b -> tactic) -> theory -> 'b * theory
(*subclasses*)
val classrel: class * class -> theory -> Proof.state
val classrel_cmd: xstring * xstring -> theory -> Proof.state
val register_subclass: class * class -> morphism option -> Element.witness option
-> morphism -> local_theory -> local_theory
(*tactics*)
val intro_classes_tac: Proof.context -> thm list -> tactic
val standard_intro_classes_tac: Proof.context -> thm list -> tactic
(*diagnostics*)
val pretty_specification: theory -> class -> Pretty.T list
end;
structure Class: CLASS =
struct
(** class data **)
datatype class_data = Class_Data of {
(* static part *)
consts: (string * string) list
(*locale parameter ~> constant name*),
base_sort: sort,
base_morph: morphism
(*static part of canonical morphism*),
export_morph: morphism,
assm_intro: thm option,
of_class: thm,
axiom: thm option,
(* dynamic part *)
defs: thm Item_Net.T,
operations: (string * (class * ((typ * term) * bool))) list
(* n.b.
params = logical parameters of class
operations = operations participating in user-space type system
*)
};
fun make_class_data ((consts, base_sort, base_morph, export_morph, assm_intro, of_class, axiom),
(defs, operations)) =
Class_Data {consts = consts, base_sort = base_sort,
base_morph = base_morph, export_morph = export_morph, assm_intro = assm_intro,
of_class = of_class, axiom = axiom, defs = defs, operations = operations};
fun map_class_data f (Class_Data {consts, base_sort, base_morph, export_morph, assm_intro,
of_class, axiom, defs, operations}) =
make_class_data (f ((consts, base_sort, base_morph, export_morph, assm_intro, of_class, axiom),
(defs, operations)));
fun merge_class_data _ (Class_Data {consts = consts,
base_sort = base_sort, base_morph = base_morph, export_morph = export_morph, assm_intro = assm_intro,
of_class = of_class, axiom = axiom, defs = defs1, operations = operations1},
Class_Data {consts = _, base_sort = _, base_morph = _, export_morph = _, assm_intro = _,
of_class = _, axiom = _, defs = defs2, operations = operations2}) =
make_class_data ((consts, base_sort, base_morph, export_morph, assm_intro, of_class, axiom),
(Item_Net.merge (defs1, defs2),
AList.merge (op =) (K true) (operations1, operations2)));
structure Class_Data = Theory_Data
(
type T = class_data Graph.T
val empty = Graph.empty;
val merge = Graph.join merge_class_data;
);
(* queries *)
fun lookup_class_data thy class =
(case try (Graph.get_node (Class_Data.get thy)) class of
SOME (Class_Data data) => SOME data
| NONE => NONE);
fun the_class_data thy class =
lookup_class_data thy class
|> \<^if_none>\<open>error ("Undeclared class " ^ quote class)\<close>;
val is_class = is_some oo lookup_class_data;
val ancestry = Graph.all_succs o Class_Data.get;
val heritage = Graph.all_preds o Class_Data.get;
fun these_params thy =
let
fun params class =
let
val const_typs = (#params o Axclass.get_info thy) class;
val const_names = (#consts o the_class_data thy) class;
in
(map o apsnd)
(fn c => (class, (c, (the o AList.lookup (op =) const_typs) c))) const_names
end;
in maps params o ancestry thy end;
val base_sort = #base_sort oo the_class_data;
fun rules thy class =
let val {axiom, of_class, ...} = the_class_data thy class
in (axiom, of_class) end;
fun all_assm_intros thy =
Graph.fold (fn (_, (Class_Data {assm_intro, ...}, _)) => fold (insert Thm.eq_thm)
(the_list assm_intro)) (Class_Data.get thy) [];
fun these_defs thy = maps (Item_Net.content o #defs o the_class_data thy) o ancestry thy;
fun these_operations thy = maps (#operations o the_class_data thy) o ancestry thy;
val base_morphism = #base_morph oo the_class_data;
fun morphism thy class =
- (case Element.eq_morphism thy (these_defs thy [class]) of
- SOME eq_morph => base_morphism thy class $> eq_morph
- | NONE => base_morphism thy class);
+ Morphism.set_context thy
+ (base_morphism thy class $>
+ Morphism.default (Element.eq_morphism (these_defs thy [class])));
val export_morphism = #export_morph oo the_class_data;
fun pretty_param ctxt (c, ty) =
Pretty.block
[Name_Space.pretty ctxt (Proof_Context.const_space ctxt) c, Pretty.str " ::",
Pretty.brk 1, Syntax.pretty_typ ctxt ty];
fun print_classes ctxt =
let
val thy = Proof_Context.theory_of ctxt;
val algebra = Sign.classes_of thy;
val class_space = Proof_Context.class_space ctxt;
val type_space = Proof_Context.type_space ctxt;
val arities =
Symtab.build (Sorts.arities_of algebra |> Symtab.fold (fn (tyco, arities) =>
fold (fn (class, _) => Symtab.map_default (class, []) (insert (op =) tyco)) arities));
fun prt_supersort class =
Syntax.pretty_sort ctxt (Sign.minimize_sort thy (Sign.super_classes thy class));
fun prt_arity class tyco =
let
val Ss = Sorts.mg_domain algebra tyco [class];
in Syntax.pretty_arity ctxt (tyco, Ss, [class]) end;
fun prt_param (c, ty) = pretty_param ctxt (c, Type.strip_sorts_dummy ty);
fun prt_entry class =
Pretty.block
([Pretty.keyword1 "class", Pretty.brk 1,
Name_Space.pretty ctxt class_space class, Pretty.str ":", Pretty.fbrk,
Pretty.block [Pretty.str "supersort: ", prt_supersort class]] @
(case (these o Option.map #params o try (Axclass.get_info thy)) class of
[] => []
| params =>
[Pretty.fbrk, Pretty.big_list "parameters:" (map prt_param params)]) @
(case (these o Symtab.lookup arities) class of
[] => []
| ars =>
[Pretty.fbrk, Pretty.big_list "instances:"
(map (prt_arity class) (sort (Name_Space.extern_ord ctxt type_space) ars))]));
in
Sorts.all_classes algebra
|> sort (Name_Space.extern_ord ctxt class_space)
|> map prt_entry
|> Pretty.writeln_chunks2
end;
(* updaters *)
fun register class sups params base_sort base_morph export_morph
some_axiom some_assm_intro of_class thy =
let
val operations = map (fn (v_ty as (_, ty), (c, _)) =>
(c, (class, ((ty, Free v_ty), false)))) params;
val add_class = Graph.new_node (class,
make_class_data (((map o apply2) fst params, base_sort,
- base_morph, export_morph, Option.map Thm.trim_context some_assm_intro,
- Thm.trim_context of_class, Option.map Thm.trim_context some_axiom),
+ Morphism.reset_context base_morph,
+ Morphism.reset_context export_morph,
+ Option.map Thm.trim_context some_assm_intro,
+ Thm.trim_context of_class,
+ Option.map Thm.trim_context some_axiom),
(Thm.item_net, operations)))
#> fold (curry Graph.add_edge class) sups;
in Class_Data.map add_class thy end;
fun activate_defs class thms thy =
- (case Element.eq_morphism thy thms of
+ (case Element.eq_morphism thms of
SOME eq_morph =>
- fold (fn cls => fn thy =>
- Context.theory_map
- (Locale.amend_registration
- {inst = (cls, base_morphism thy cls),
- mixin = SOME (eq_morph, true),
- export = export_morphism thy cls}) thy) (heritage thy [class]) thy
+ fold (fn cls => fn thy' =>
+ (Context.theory_map o Locale.amend_registration)
+ {inst = (cls, base_morphism thy' cls),
+ mixin = SOME (eq_morph, true),
+ export = export_morphism thy' cls} thy') (heritage thy [class]) thy
| NONE => thy);
fun register_operation class (c, t) input_only thy =
let
val base_sort = base_sort thy class;
val prep_typ = map_type_tfree
(fn (v, sort) => if Name.aT = v
then TFree (v, base_sort) else TVar ((v, 0), sort));
val t' = map_types prep_typ t;
val ty' = Term.fastype_of t';
in
thy
|> (Class_Data.map o Graph.map_node class o map_class_data o apsnd o apsnd)
(cons (c, (class, ((ty', t'), input_only))))
end;
fun register_def class def_thm thy =
let
val sym_thm = Thm.trim_context (Thm.symmetric def_thm)
in
thy
|> (Class_Data.map o Graph.map_node class o map_class_data o apsnd o apfst)
(Item_Net.update sym_thm)
|> activate_defs class [sym_thm]
end;
(** classes and class target **)
(* class context syntax *)
fun make_rewrite t c_ty =
let
val vs = strip_abs_vars t;
val vts = map snd vs
|> Name.invent_names Name.context Name.uu
|> map (fn (v, T) => Var ((v, 0), T));
in (betapplys (t, vts), betapplys (Const c_ty, vts)) end;
fun these_unchecks thy =
these_operations thy
#> map_filter (fn (c, (_, ((ty, t), input_only))) =>
if input_only then NONE else SOME (make_rewrite t (c, ty)));
fun these_unchecks_reversed thy =
these_operations thy
#> map (fn (c, (_, ((ty, t), _))) => (Const (c, ty), t));
fun redeclare_const thy c =
let val b = Long_Name.base_name c
in Sign.intern_const thy b = c ? Variable.declare_const (b, c) end;
fun synchronize_class_syntax sort base_sort ctxt =
let
val thy = Proof_Context.theory_of ctxt;
val algebra = Sign.classes_of thy;
val operations = these_operations thy sort;
fun subst_class_typ sort = map_type_tfree (K (TVar ((Name.aT, 0), sort)));
val primary_constraints =
(map o apsnd) (subst_class_typ base_sort o fst o fst o snd) operations;
val secondary_constraints =
(map o apsnd) (fn (class, ((ty, _), _)) => subst_class_typ [class] ty) operations;
fun improve (c, ty) =
(case AList.lookup (op =) primary_constraints c of
SOME ty' =>
(case try (Type.raw_match (ty', ty)) Vartab.empty of
SOME tyenv =>
(case Vartab.lookup tyenv (Name.aT, 0) of
SOME (_, ty' as TVar (vi, sort)) =>
if Type_Infer.is_param vi andalso Sorts.sort_le algebra (base_sort, sort)
then SOME (ty', Term.aT base_sort)
else NONE
| _ => NONE)
| NONE => NONE)
| NONE => NONE);
fun subst (c, _) = Option.map (fst o snd) (AList.lookup (op =) operations c);
val unchecks = these_unchecks thy sort;
in
ctxt
|> fold (redeclare_const thy o fst) primary_constraints
|> Overloading.map_improvable_syntax (K {primary_constraints = primary_constraints,
secondary_constraints = secondary_constraints, improve = improve, subst = subst,
no_subst_in_abbrev_mode = true, unchecks = unchecks})
|> Overloading.set_primary_constraints
end;
fun synchronize_class_syntax_target class lthy =
lthy
|> Local_Theory.map_contexts
(K (synchronize_class_syntax [class] (base_sort (Proof_Context.theory_of lthy) class)));
fun redeclare_operations thy sort =
fold (redeclare_const thy o fst) (these_operations thy sort);
fun begin sort base_sort ctxt =
ctxt
|> Variable.declare_term (Logic.mk_type (Term.aT base_sort))
|> synchronize_class_syntax sort base_sort
|> Overloading.activate_improvable_syntax;
fun init class thy =
thy
|> Locale.init class
|> begin [class] (base_sort thy class);
(* class target *)
val class_prefix = Logic.const_of_class o Long_Name.base_name;
fun guess_morphism_identity (b, rhs) phi1 phi2 =
let
(*FIXME proper concept to identify morphism instead of educated guess*)
val name_of_binding = Name_Space.full_name Name_Space.global_naming;
val n1 = (name_of_binding o Morphism.binding phi1) b;
val n2 = (name_of_binding o Morphism.binding phi2) b;
val rhs1 = Morphism.term phi1 rhs;
val rhs2 = Morphism.term phi2 rhs;
in n1 = n2 andalso Term.aconv_untyped (rhs1, rhs2) end;
fun target_const class phi0 prmode (b, rhs) lthy =
let
- val export = Variable.export_morphism lthy (Local_Theory.target_of lthy);
+ val export =
+ Morphism.set_context' lthy (Variable.export_morphism lthy (Local_Theory.target_of lthy));
val guess_identity = guess_morphism_identity (b, rhs) export;
val guess_canonical = guess_morphism_identity (b, rhs) (export $> phi0);
in
lthy
|> Generic_Target.locale_target_const class
(not o (guess_identity orf guess_canonical)) prmode ((b, NoSyn), rhs)
end;
local
fun dangling_params_for lthy class (type_params, term_params) =
let
val class_param_names =
map fst (these_params (Proof_Context.theory_of lthy) [class]);
val dangling_term_params =
subtract (fn (v, Free (w, _)) => v = w | _ => false) class_param_names term_params;
in (type_params, dangling_term_params) end;
fun global_def (b, eq) thy =
let
val ((_, def_thm), thy') = thy |> Thm.add_def_global false false (b, eq);
val def_thm' = def_thm |> Thm.forall_intr_frees |> Thm.forall_elim_vars 0 |> Thm.varifyT_global;
val (_, thy'') = thy' |> Global_Theory.store_thm (b, def_thm');
in (def_thm', thy'') end;
fun canonical_const class phi dangling_params ((b, mx), rhs) thy =
let
val b_def = Binding.suffix_name "_dict" b;
val c = Sign.full_name thy b;
val ty = map Term.fastype_of dangling_params ---> Term.fastype_of rhs;
val def_eq = Logic.mk_equals (list_comb (Const (c, ty), dangling_params), rhs)
|> map_types Type.strip_sorts;
in
thy
|> Sign.declare_const_global ((b, Type.strip_sorts ty), mx)
|> snd
|> global_def (b_def, def_eq)
|-> (fn def_thm => register_def class def_thm)
|> null dangling_params ? register_operation class (c, rhs) false
|> Sign.add_const_constraint (c, SOME ty)
end;
in
fun const class ((b, mx), lhs) params lthy =
let
val phi = morphism (Proof_Context.theory_of lthy) class;
val dangling_params = map (Morphism.term phi) (uncurry append (dangling_params_for lthy class params));
in
lthy
|> target_const class phi Syntax.mode_default (b, lhs)
|> Local_Theory.raw_theory (canonical_const class phi dangling_params
((Morphism.binding phi b, if null dangling_params then mx else NoSyn), Morphism.term phi lhs))
|> Generic_Target.standard_const (fn (this, other) => other <> 0 andalso this <> other)
Syntax.mode_default ((b, if null dangling_params then NoSyn else mx), lhs)
|> synchronize_class_syntax_target class
end;
end;
local
fun canonical_abbrev class phi prmode with_syntax ((b, mx), rhs) thy =
let
val c = Sign.full_name thy b;
val constrain = map_atyps (fn T as TFree (v, _) =>
if v = Name.aT then TFree (v, [class]) else T | T => T);
val rhs' = map_types constrain rhs;
in
thy
|> Sign.add_abbrev (#1 prmode) (b, Logic.varify_types_global rhs')
|> snd
|> with_syntax ? Sign.notation true prmode [(Const (c, fastype_of rhs), mx)]
|> with_syntax ? register_operation class (c, rhs)
(#1 prmode = Print_Mode.input)
|> Sign.add_const_constraint (c, SOME (fastype_of rhs'))
end;
fun canonical_abbrev_target class phi prmode ((b, mx), rhs) lthy =
let
val thy = Proof_Context.theory_of lthy;
val preprocess = perhaps (try (Pattern.rewrite_term_top thy (these_unchecks thy [class]) []));
val (global_rhs, (_, (_, term_params))) =
Generic_Target.export_abbrev lthy preprocess rhs;
val mx' = Generic_Target.check_mixfix_global (b, null term_params) mx;
in
lthy
|> Local_Theory.raw_theory (canonical_abbrev class phi prmode (null term_params)
((Morphism.binding phi b, mx'), Logic.unvarify_types_global global_rhs))
end;
fun further_abbrev_target class phi prmode (b, mx) rhs params =
Generic_Target.background_abbrev (b, rhs) (snd params)
#-> (fn (lhs, _) => target_const class phi prmode (b, lhs)
#> Generic_Target.standard_const (fn (this, other) => other <> 0 andalso this <> other) prmode ((b, mx), lhs))
in
fun abbrev class prmode ((b, mx), rhs) lthy =
let
val thy = Proof_Context.theory_of lthy;
val phi = morphism thy class;
val rhs_generic =
perhaps (try (Pattern.rewrite_term_top thy (these_unchecks_reversed thy [class]) [])) rhs;
in
lthy
|> canonical_abbrev_target class phi prmode ((b, mx), rhs)
|> Generic_Target.abbrev (further_abbrev_target class phi) prmode ((b, mx), rhs_generic)
||> synchronize_class_syntax_target class
end;
end;
(* subclasses *)
fun register_subclass (sub, sup) some_dep_morph some_witn export lthy =
let
val thy = Proof_Context.theory_of lthy;
- val intros = (snd o rules thy) sup :: map_filter I
- [Option.map (Drule.export_without_context_open o Element.conclude_witness lthy) some_witn,
- (fst o rules thy) sub];
+ val conclude_witness =
+ Thm.trim_context o Drule.export_without_context_open o Element.conclude_witness lthy;
+ val intros =
+ (snd o rules thy) sup ::
+ map_filter I [Option.map conclude_witness some_witn, (fst o rules thy) sub];
val classrel =
Goal.prove_sorry_global thy [] [] (Logic.mk_classrel (sub, sup))
(fn {context = ctxt, ...} => EVERY (map (TRYALL o resolve_tac ctxt o single) intros));
val diff_sort = Sign.complete_sort thy [sup]
|> subtract (op =) (Sign.complete_sort thy [sub])
|> filter (is_class thy);
val add_dependency =
(case some_dep_morph of
SOME dep_morph =>
Generic_Target.locale_dependency sub
{inst = (sup, dep_morph $> Element.satisfy_morphism (the_list some_witn)),
mixin = NONE, export = export}
| NONE => I);
in
lthy
|> Local_Theory.raw_theory
(Axclass.add_classrel classrel
#> Class_Data.map (Graph.add_edge (sub, sup))
#> activate_defs sub (these_defs thy diff_sort))
|> add_dependency
|> synchronize_class_syntax_target sub
end;
local
fun gen_classrel mk_prop classrel thy =
let
fun after_qed results =
Proof_Context.background_theory ((fold o fold) Axclass.add_classrel results);
in
thy
|> Proof_Context.init_global
|> Proof.theorem NONE after_qed [[(mk_prop thy classrel, [])]]
end;
in
val classrel =
gen_classrel (Logic.mk_classrel oo Axclass.cert_classrel);
val classrel_cmd =
gen_classrel (Logic.mk_classrel oo Axclass.read_classrel);
end; (*local*)
(** instantiation target **)
(* bookkeeping *)
datatype instantiation = Instantiation of {
arities: string list * (string * sort) list * sort,
params: ((string * string) * (string * typ)) list
(*(instantiation parameter, type constructor), (local instantiation parameter, typ)*)
}
fun make_instantiation (arities, params) =
Instantiation {arities = arities, params = params};
val empty_instantiation = make_instantiation (([], [], []), []);
structure Instantiation = Proof_Data
(
type T = instantiation;
fun init _ = empty_instantiation;
);
val get_instantiation =
(fn Instantiation data => data) o Instantiation.get o Local_Theory.target_of;
fun map_instantiation f =
(Local_Theory.target o Instantiation.map)
(fn Instantiation {arities, params} => make_instantiation (f (arities, params)));
fun the_instantiation lthy =
(case get_instantiation lthy of
{arities = ([], [], []), ...} => error "No instantiation target"
| data => data);
val instantiation_params = #params o get_instantiation;
fun instantiation_param lthy b = instantiation_params lthy
|> find_first (fn (_, (v, _)) => Binding.name_of b = v)
|> Option.map (fst o fst);
fun read_multi_arity thy (raw_tycos, raw_sorts, raw_sort) =
let
val ctxt = Proof_Context.init_global thy;
val all_arities = map (fn raw_tyco => Proof_Context.read_arity ctxt
(raw_tyco, raw_sorts, raw_sort)) raw_tycos;
val tycos = map #1 all_arities;
val (_, sorts, sort) = hd all_arities;
val vs = Name.invent_names Name.context Name.aT sorts;
in (tycos, vs, sort) end;
(* syntax *)
fun synchronize_inst_syntax ctxt =
let
val Instantiation {params, ...} = Instantiation.get ctxt;
val lookup_inst_param = Axclass.lookup_inst_param
(Sign.consts_of (Proof_Context.theory_of ctxt)) params;
fun subst (c, ty) =
(case lookup_inst_param (c, ty) of
SOME (v_ty as (_, ty)) => SOME (ty, Free v_ty)
| NONE => NONE);
val unchecks =
map (fn ((c, _), v_ty as (_, ty)) => (Free v_ty, Const (c, ty))) params;
in
ctxt
|> Overloading.map_improvable_syntax (fn {primary_constraints, improve, ...} =>
{primary_constraints = primary_constraints, secondary_constraints = [],
improve = improve, subst = subst, no_subst_in_abbrev_mode = false,
unchecks = unchecks})
end;
fun resort_terms ctxt algebra consts constraints ts =
let
fun matchings (Const (c_ty as (c, _))) =
(case constraints c of
NONE => I
| SOME sorts =>
fold2 (curry (Sorts.meet_sort algebra)) (Consts.typargs consts c_ty) sorts)
| matchings _ = I;
val tvartab = Vartab.build ((fold o fold_aterms) matchings ts)
handle Sorts.CLASS_ERROR e => error (Sorts.class_error (Context.Proof ctxt) e);
val inst = map_type_tvar
(fn (vi, sort) => TVar (vi, the_default sort (Vartab.lookup tvartab vi)));
in if Vartab.is_empty tvartab then ts else (map o map_types) inst ts end;
(* target *)
fun define_overloaded (c, U) b (b_def, rhs) lthy =
let
val name = Binding.name_of b;
val pos = Binding.pos_of b;
val _ =
if Context_Position.is_reported lthy pos then
Position.report_text pos Markup.class_parameter
(Pretty.string_of
(Pretty.block [Pretty.keyword1 "class", Pretty.brk 1,
Pretty.str "parameter", Pretty.brk 1, pretty_param lthy (c, U)]))
else ();
in
lthy |> Local_Theory.background_theory_result
(Axclass.declare_overloaded (c, U) ##>> Axclass.define_overloaded b_def (c, rhs))
||> (map_instantiation o apsnd) (filter_out (fn (_, (v', _)) => v' = name))
||> Local_Theory.map_contexts (K synchronize_inst_syntax)
end;
fun foundation (((b, U), mx), (b_def, rhs)) params lthy =
(case instantiation_param lthy b of
SOME c =>
if Mixfix.is_empty mx then lthy |> define_overloaded (c, U) b (b_def, rhs)
else error ("Illegal mixfix syntax for overloaded constant " ^ quote c)
| NONE => lthy |> Generic_Target.theory_target_foundation (((b, U), mx), (b_def, rhs)) params);
fun pretty lthy =
let
val {arities = (tycos, vs, sort), params} = the_instantiation lthy;
fun pr_arity tyco = Syntax.pretty_arity lthy (tyco, map snd vs, sort);
fun pr_param ((c, _), (v, ty)) =
Pretty.block (Pretty.breaks
[Pretty.str v, Pretty.str "==", Proof_Context.pretty_const lthy c,
Pretty.str "::", Syntax.pretty_typ lthy ty]);
in
[Pretty.block
(Pretty.fbreaks (Pretty.keyword1 "instantiation" :: map pr_arity tycos @ map pr_param params))]
end;
fun conclude lthy =
let
val (tycos, vs, sort) = #arities (the_instantiation lthy);
val thy = Proof_Context.theory_of lthy;
val _ = tycos |> List.app (fn tyco =>
if Sign.of_sort thy (Type (tyco, map TFree vs), sort) then ()
else error ("Missing instance proof for type " ^ quote (Proof_Context.markup_type lthy tyco)));
in lthy end;
fun registration thy_ctxt {inst, mixin, export} lthy =
lthy
|> Generic_Target.theory_registration
{inst = inst,
mixin = mixin,
export = export $> Proof_Context.export_morphism lthy thy_ctxt}
(*handle fixed types variables on target context properly*);
fun instantiation (tycos, vs, sort) thy =
let
val _ = if null tycos then error "At least one arity must be given" else ();
val class_params = these_params thy (filter (can (Axclass.get_info thy)) sort);
fun get_param tyco (param, (_, (c, ty))) =
if can (Axclass.param_of_inst thy) (c, tyco)
then NONE else SOME ((c, tyco),
(param ^ "_" ^ Long_Name.base_name tyco, map_atyps (K (Type (tyco, map TFree vs))) ty));
val params = map_product get_param tycos class_params |> map_filter I;
val _ = if null params andalso forall (fn tyco => can (Sign.arity_sorts thy tyco) sort) tycos
then error "No parameters and no pending instance proof obligations in instantiation."
else ();
val primary_constraints = map (apsnd
(map_atyps (K (TVar ((Name.aT, 0), [])))) o snd o snd) class_params;
val algebra = Sign.classes_of thy
|> fold (fn tyco => Sorts.add_arities (Context.Theory thy)
(tyco, map (fn class => (class, map snd vs)) sort)) tycos;
val consts = Sign.consts_of thy;
val improve_constraints = AList.lookup (op =)
(map (fn (_, (class, (c, _))) => (c, [[class]])) class_params);
fun resort_check ctxt ts = resort_terms ctxt algebra consts improve_constraints ts;
val lookup_inst_param = Axclass.lookup_inst_param consts params;
fun improve (c, ty) =
(case lookup_inst_param (c, ty) of
SOME (_, ty') => if Sign.typ_instance thy (ty', ty) then SOME (ty, ty') else NONE
| NONE => NONE);
in
thy
|> Local_Theory.init
{background_naming = Sign.naming_of thy,
setup = Proof_Context.init_global
#> Instantiation.put (make_instantiation ((tycos, vs, sort), params))
#> fold (Variable.declare_typ o TFree) vs
#> fold (Variable.declare_names o Free o snd) params
#> (Overloading.map_improvable_syntax) (K {primary_constraints = primary_constraints,
secondary_constraints = [], improve = improve, subst = K NONE,
no_subst_in_abbrev_mode = false, unchecks = []})
#> Overloading.activate_improvable_syntax
#> Context.proof_map (Syntax_Phases.term_check 0 "resorting" resort_check)
#> synchronize_inst_syntax,
conclude = conclude}
{define = Generic_Target.define foundation,
notes = Generic_Target.notes Generic_Target.theory_target_notes,
abbrev = Generic_Target.abbrev Generic_Target.theory_target_abbrev,
declaration = K Generic_Target.theory_declaration,
theory_registration = registration (Proof_Context.init_global thy),
locale_dependency = fn _ => error "Not possible in instantiation target",
pretty = pretty}
end;
fun instantiation_cmd arities thy =
instantiation (read_multi_arity thy arities) thy;
fun gen_instantiation_instance do_proof after_qed lthy =
let
val (tycos, vs, sort) = (#arities o the_instantiation) lthy;
val arities_proof = maps (fn tyco => Logic.mk_arities (tyco, map snd vs, sort)) tycos;
fun after_qed' results =
Local_Theory.background_theory (fold (Axclass.add_arity o Thm.varifyT_global) results)
#> after_qed;
in
lthy
|> do_proof after_qed' arities_proof
end;
val instantiation_instance = gen_instantiation_instance (fn after_qed => fn ts =>
Proof.theorem NONE (after_qed o map the_single) (map (fn t => [(t, [])]) ts));
fun prove_instantiation_instance tac = gen_instantiation_instance (fn after_qed =>
fn ts => fn lthy => after_qed (map (fn t => Goal.prove lthy [] [] t
(fn {context, ...} => tac context)) ts) lthy) I;
fun prove_instantiation_exit tac = prove_instantiation_instance tac
#> Local_Theory.exit_global;
fun prove_instantiation_exit_result f tac x lthy =
let
val morph = Proof_Context.export_morphism lthy
(Proof_Context.init_global (Proof_Context.theory_of lthy));
val y = f morph x;
in
lthy
|> prove_instantiation_exit (fn ctxt => tac ctxt y)
|> pair y
end;
fun theory_map_result arities f g tac =
instantiation arities
#> g
#-> prove_instantiation_exit_result f tac;
(* simplified instantiation interface with no class parameter *)
fun instance_arity_cmd raw_arities thy =
let
val (tycos, vs, sort) = read_multi_arity thy raw_arities;
val sorts = map snd vs;
val arities = maps (fn tyco => Logic.mk_arities (tyco, sorts, sort)) tycos;
fun after_qed results =
Proof_Context.background_theory ((fold o fold) Axclass.add_arity results);
in
thy
|> Proof_Context.init_global
|> Proof.theorem NONE after_qed (map (fn t => [(t, [])]) arities)
end;
(** tactics and methods **)
fun intro_classes_tac ctxt facts st =
let
val thy = Proof_Context.theory_of ctxt;
val classes = Sign.all_classes thy;
val class_trivs = map (Thm.class_triv thy) classes;
val class_intros = map_filter (try (#intro o Axclass.get_info thy)) classes;
val assm_intros = all_assm_intros thy;
in
Method.intros_tac ctxt (class_trivs @ class_intros @ assm_intros) facts st
end;
fun standard_intro_classes_tac ctxt facts st =
if null facts andalso not (Thm.no_prems st) then
(intro_classes_tac ctxt [] ORELSE
Locale.intro_locales_tac {strict = true, eager = true} ctxt []) st
else no_tac st;
fun standard_tac ctxt facts =
HEADGOAL (Method.some_rule_tac ctxt [] facts) ORELSE
standard_intro_classes_tac ctxt facts;
val _ = Theory.setup
(Method.setup \<^binding>\<open>intro_classes\<close> (Scan.succeed (METHOD o intro_classes_tac))
"back-chain introduction rules of classes" #>
Method.setup \<^binding>\<open>standard\<close> (Scan.succeed (METHOD o standard_tac))
"standard proof step: Pure intro/elim rule or class introduction");
(** diagnostics **)
fun pretty_specification thy class =
if is_class thy class then
let
val class_ctxt = init class thy;
val prt_class = Name_Space.pretty class_ctxt (Proof_Context.class_space class_ctxt);
val super_classes = Sign.minimize_sort thy (Sign.super_classes thy class);
val fix_args =
#params (Axclass.get_info thy class)
|> map (fn (c, T) => (Binding.name (Long_Name.base_name c), SOME T, NoSyn));
val fixes = if null fix_args then [] else [Element.Fixes fix_args];
val assumes = Locale.hyp_spec_of thy class;
val header =
[Pretty.keyword1 "class", Pretty.brk 1, prt_class class, Pretty.str " =", Pretty.brk 1] @
Pretty.separate " +" (map prt_class super_classes) @
(if null super_classes then [] else [Pretty.str " +"]);
val body =
if null fixes andalso null assumes then []
else
maps (Element.pretty_ctxt_no_attribs class_ctxt) (fixes @ assumes)
|> maps (fn prt => [Pretty.fbrk, prt]);
in if null body then [] else [Pretty.block (header @ body)] end
else [];
end;
diff --git a/src/Pure/Isar/class_declaration.ML b/src/Pure/Isar/class_declaration.ML
--- a/src/Pure/Isar/class_declaration.ML
+++ b/src/Pure/Isar/class_declaration.ML
@@ -1,398 +1,401 @@
(* Title: Pure/Isar/class_declaration.ML
Author: Florian Haftmann, TU Muenchen
Declaring classes and subclass relations.
*)
signature CLASS_DECLARATION =
sig
val class: binding -> Bundle.name list -> class list ->
Element.context_i list -> theory -> string * local_theory
val class_cmd: binding -> (xstring * Position.T) list -> xstring list ->
Element.context list -> theory -> string * local_theory
val prove_subclass: tactic -> class ->
local_theory -> local_theory
val subclass: class -> local_theory -> Proof.state
val subclass_cmd: xstring -> local_theory -> Proof.state
end;
structure Class_Declaration: CLASS_DECLARATION =
struct
(** class definitions **)
local
(* calculating class-related rules including canonical interpretation *)
fun calculate thy class sups base_sort param_map assm_axiom =
let
val thy_ctxt = Proof_Context.init_global thy;
val certT = Thm.trim_context_ctyp o Thm.global_ctyp_of thy;
val cert = Thm.trim_context_cterm o Thm.global_cterm_of thy;
+ val conclude_witness = Thm.trim_context o Element.conclude_witness thy_ctxt;
(* instantiation of canonical interpretation *)
val a_tfree = (Name.aT, base_sort);
val a_type = TFree a_tfree;
val param_map_const = (map o apsnd) Const param_map;
val param_map_inst =
Frees.build (param_map |> fold (fn (x, (c, T)) =>
let val T' = map_atyps (K a_type) T
in Frees.add ((x, T'), cert (Const (c, T'))) end));
val const_morph =
Element.instantiate_normalize_morphism (TFrees.empty, param_map_inst);
val typ_morph =
Element.instantiate_normalize_morphism
(TFrees.make1 (a_tfree, certT (Term.aT [class])), Frees.empty)
val (([raw_props], _, [(_, raw_inst_morph)], _, export_morph), _) = thy_ctxt
|> Expression.cert_goal_expression ([(class, (("", false),
(Expression.Named param_map_const, [])))], []);
val (props, inst_morph) =
if null param_map
then (raw_props |> map (Morphism.term typ_morph),
raw_inst_morph $> typ_morph)
else (raw_props, raw_inst_morph); (*FIXME proper handling in
locale.ML / expression.ML would be desirable*)
(* witness for canonical interpretation *)
val some_prop = try the_single props;
val some_witn = Option.map (fn prop =>
let
val sup_axioms = map_filter (fst o Class.rules thy) sups;
val loc_intro_tac =
(case Locale.intros_of thy class of
(_, NONE) => all_tac
| (_, SOME intro) => ALLGOALS (resolve_tac thy_ctxt [intro]));
val tac = loc_intro_tac
THEN ALLGOALS (Proof_Context.fact_tac thy_ctxt (sup_axioms @ the_list assm_axiom));
in Element.prove_witness thy_ctxt prop tac end) some_prop;
- val some_axiom = Option.map (Element.conclude_witness thy_ctxt) some_witn;
+ val some_axiom = Option.map conclude_witness some_witn;
(* canonical interpretation *)
val base_morph = inst_morph
$> Morphism.binding_morphism "class_binding" (Binding.prefix false (Class.class_prefix class))
$> Element.satisfy_morphism (the_list some_witn);
- val eq_morph = Element.eq_morphism thy (Class.these_defs thy sups);
+ val eq_morph =
+ Element.eq_morphism (Class.these_defs thy sups)
+ |> Option.map (Morphism.set_context thy);
(* assm_intro *)
fun prove_assm_intro thm =
let
val ((_, [thm']), _) = Variable.import true [thm] thy_ctxt;
val const_eq_morph =
(case eq_morph of
SOME eq_morph => const_morph $> eq_morph
| NONE => const_morph);
val thm'' = Morphism.thm const_eq_morph thm';
in
Goal.prove_sorry_global thy [] [] (Thm.prop_of thm'')
(fn {context = ctxt, ...} => ALLGOALS (Proof_Context.fact_tac ctxt [thm'']))
end;
val some_assm_intro = Option.map prove_assm_intro (fst (Locale.intros_of thy class));
(* of_class *)
val of_class_prop_concl = Logic.mk_of_class (a_type, class);
val of_class_prop =
(case some_prop of
NONE => of_class_prop_concl
| SOME prop => Logic.mk_implies (Morphism.term const_morph
((map_types o map_atyps) (K a_type) prop), of_class_prop_concl));
val sup_of_classes = map (snd o Class.rules thy) sups;
val loc_axiom_intros = map Drule.export_without_context_open (Locale.axioms_of thy class);
val axclass_intro = #intro (Axclass.get_info thy class);
val base_sort_trivs = Thm.of_sort (Thm.global_ctyp_of thy a_type, base_sort);
fun tac ctxt =
REPEAT (SOMEGOAL
(match_tac ctxt (axclass_intro :: sup_of_classes @ loc_axiom_intros @ base_sort_trivs)
ORELSE' assume_tac ctxt));
val of_class = Goal.prove_sorry_global thy [] [] of_class_prop (tac o #context);
in (base_morph, eq_morph, export_morph, some_axiom, some_assm_intro, of_class) end;
(* reading and processing class specifications *)
fun prep_class_elems prep_decl ctxt sups raw_elems =
let
(* user space type system: only permits 'a type variable, improves towards 'a *)
val thy = Proof_Context.theory_of ctxt;
val algebra = Sign.classes_of thy;
val inter_sort = curry (Sorts.inter_sort algebra);
val proto_base_sort =
if null sups then Sign.defaultS thy
else fold inter_sort (map (Class.base_sort thy) sups) [];
val is_param = member (op =) (map fst (Class.these_params thy sups));
val base_constraints = (map o apsnd)
(map_type_tfree (K (TVar ((Name.aT, 0), proto_base_sort))) o fst o fst o snd)
(Class.these_operations thy sups);
val singleton_fixate = burrow_types (fn Ts =>
let
val tfrees = fold Term.add_tfreesT Ts [];
val inferred_sort =
(fold o fold_atyps) (fn TVar (_, S) => inter_sort S | _ => I) Ts [];
val fixate_sort =
(case tfrees of
[] => inferred_sort
| [(a, S)] =>
if a <> Name.aT then
error ("No type variable other than " ^ Name.aT ^ " allowed in class specification")
else if Sorts.sort_le algebra (S, inferred_sort) then S
else
error ("Type inference imposes additional sort constraint " ^
Syntax.string_of_sort_global thy inferred_sort ^
" of type parameter " ^ Name.aT ^ " of sort " ^
Syntax.string_of_sort_global thy S)
| _ => error "Multiple type variables in class specification");
val fixateT = Term.aT fixate_sort;
in
(map o map_atyps)
(fn T as TVar (xi, _) => if Type_Infer.is_param xi then fixateT else T | T => T) Ts
end);
fun unify_params ts =
let
val param_Ts = (fold o fold_aterms)
(fn Free (v, T) => if is_param v then fold_atyps (insert (op =)) T else I | _ => I) ts [];
val param_namesT = map_filter (try (fst o dest_TVar)) param_Ts;
val param_T = if null param_namesT then NONE
else SOME (case get_first (try dest_TFree) param_Ts of
SOME v_sort => TFree v_sort |
NONE => TVar (hd param_namesT, proto_base_sort));
in case param_T of
NONE => ts |
SOME T => map (subst_TVars (map (rpair T) param_namesT)) ts
end;
(* preprocessing elements, retrieving base sort from type-checked elements *)
val raw_supexpr =
(map (fn sup => (sup, (("", false), (Expression.Positional [], [])))) sups, []);
val init_class_body =
fold (Proof_Context.add_const_constraint o apsnd SOME) base_constraints
#> Class.redeclare_operations thy sups
#> Context.proof_map (Syntax_Phases.term_check 0 "singleton_fixate" (K singleton_fixate));
val ((raw_supparams, _, raw_inferred_elems, _), _) =
ctxt
|> Context.proof_map (Syntax_Phases.term_check 0 "unify_params" (K unify_params))
|> prep_decl raw_supexpr init_class_body raw_elems;
fun filter_element (Element.Fixes []) = NONE
| filter_element (e as Element.Fixes _) = SOME e
| filter_element (Element.Constrains []) = NONE
| filter_element (e as Element.Constrains _) = SOME e
| filter_element (Element.Assumes []) = NONE
| filter_element (e as Element.Assumes _) = SOME e
| filter_element (Element.Defines _) =
error ("\"defines\" element not allowed in class specification.")
| filter_element (Element.Notes _) =
error ("\"notes\" element not allowed in class specification.")
| filter_element (Element.Lazy_Notes _) =
error ("\"notes\" element not allowed in class specification.");
val inferred_elems = map_filter filter_element raw_inferred_elems;
fun fold_element_types f (Element.Fixes fxs) = fold (fn (_, SOME T, _) => f T) fxs
| fold_element_types f (Element.Constrains cnstrs) = fold (f o snd) cnstrs
| fold_element_types f (Element.Assumes assms) = fold (fold (fn (t, ts) =>
fold_types f t #> (fold o fold_types) f ts) o snd) assms;
val base_sort =
if null inferred_elems then proto_base_sort
else
(case (fold o fold_element_types) Term.add_tfreesT inferred_elems [] of
[] => error "No type variable in class specification"
| [(_, sort)] => sort
| _ => error "Multiple type variables in class specification");
val supparams = map (fn ((c, T), _) =>
(c, map_atyps (K (Term.aT base_sort)) T)) raw_supparams;
val supparam_names = map fst supparams;
fun mk_param ((c, _), _) = Free (c, (the o AList.lookup (op =) supparams) c);
val supexpr = (map (fn sup => (sup, (("", false),
(Expression.Positional (map (SOME o mk_param) (Locale.params_of thy sup)), [])))) sups,
map (fn (c, T) => (Binding.name c, SOME T, NoSyn)) supparams);
in (base_sort, supparam_names, supexpr, inferred_elems) end;
val cert_class_elems = prep_class_elems Expression.cert_declaration;
val read_class_elems = prep_class_elems Expression.cert_read_declaration;
fun prep_class_spec prep_class prep_include prep_class_elems ctxt raw_supclasses raw_includes raw_elems =
let
val thy = Proof_Context.theory_of ctxt;
(* prepare import *)
val inter_sort = curry (Sorts.inter_sort (Sign.classes_of thy));
val sups = Sign.minimize_sort thy (map (prep_class ctxt) raw_supclasses);
val _ =
(case filter_out (Class.is_class thy) sups of
[] => ()
| no_classes => error ("No (proper) classes: " ^ commas_quote no_classes));
val raw_supparams = (map o apsnd) (snd o snd) (Class.these_params thy sups);
val raw_supparam_names = map fst raw_supparams;
val _ =
if has_duplicates (op =) raw_supparam_names then
error ("Duplicate parameter(s) in superclasses: " ^
(commas_quote (duplicates (op =) raw_supparam_names)))
else ();
(* infer types and base sort *)
val includes = map (prep_include ctxt) raw_includes;
val includes_ctxt = Bundle.includes includes ctxt;
val (base_sort, supparam_names, supexpr, inferred_elems) = prep_class_elems includes_ctxt sups raw_elems;
val sup_sort = inter_sort base_sort sups;
(* process elements as class specification *)
val class_ctxt = Class.begin sups base_sort includes_ctxt;
val ((_, _, syntax_elems, _), _) = class_ctxt
|> Expression.cert_declaration supexpr I inferred_elems;
fun check_vars e vs =
if null vs then
error ("No type variable in part of specification element " ^
Pretty.string_of (Pretty.chunks (Element.pretty_ctxt class_ctxt e)))
else ();
fun check_element (e as Element.Fixes fxs) =
List.app (fn (_, SOME T, _) => check_vars e (Term.add_tfreesT T [])) fxs
| check_element (e as Element.Assumes assms) =
List.app (fn (_, ts_pss) =>
List.app (fn (t, _) => check_vars e (Term.add_tfrees t [])) ts_pss) assms
| check_element _ = ();
val _ = List.app check_element syntax_elems;
fun fork_syn (Element.Fixes xs) =
fold_map (fn (c, ty, syn) => cons (c, syn) #> pair (c, ty, NoSyn)) xs
#>> Element.Fixes
| fork_syn x = pair x;
val (elems, global_syntax) = fold_map fork_syn syntax_elems [];
in (((sups, supparam_names), (sup_sort, base_sort, supexpr)), (includes, elems, global_syntax)) end;
val cert_class_spec = prep_class_spec (K I) (K I) cert_class_elems;
val read_class_spec = prep_class_spec Proof_Context.read_class Bundle.check read_class_elems;
(* class establishment *)
fun add_consts class base_sort sups supparam_names global_syntax thy =
let
(*FIXME simplify*)
val supconsts = supparam_names
|> AList.make (snd o the o AList.lookup (op =) (Class.these_params thy sups))
|> (map o apsnd o apsnd o map_atyps) (K (Term.aT [class]));
val all_params = Locale.params_of thy class;
val raw_params = (snd o chop (length supparam_names)) all_params;
fun add_const ((raw_c, raw_ty), _) thy =
let
val b = Binding.name raw_c;
val c = Sign.full_name thy b;
val ty = map_atyps (K (Term.aT base_sort)) raw_ty;
val ty0 = Type.strip_sorts ty;
val ty' = map_atyps (K (Term.aT [class])) ty0;
val syn = (the_default NoSyn o AList.lookup Binding.eq_name global_syntax) b;
in
thy
|> Sign.declare_const_global ((b, ty0), syn)
|> snd
|> pair ((Variable.check_name b, ty), (c, ty'))
end;
in
thy
|> Sign.add_path (Class.class_prefix class)
|> fold_map add_const raw_params
||> Sign.restore_naming thy
|-> (fn params => pair (supconsts @ (map o apfst) fst params, params))
end;
fun adjungate_axclass bname class base_sort sups supsort supparam_names global_syntax thy =
let
(*FIXME simplify*)
fun globalize param_map = map_aterms
(fn Free (v, ty) => Const ((fst o the o AList.lookup (op =) param_map) v, ty)
| t => t);
val raw_pred = Locale.intros_of thy class
|> fst
|> Option.map (Logic.unvarify_global o Logic.strip_imp_concl o Thm.prop_of);
fun get_axiom thy =
(case #axioms (Axclass.get_info thy class) of
[] => NONE
| [thm] => SOME thm);
in
thy
|> add_consts class base_sort sups supparam_names global_syntax |-> (fn (param_map, params) =>
Axclass.define_class (bname, supsort)
(map (fst o snd) params)
[(Binding.empty_atts, Option.map (globalize param_map) raw_pred |> the_list)]
#> snd
#> `get_axiom
#-> (fn assm_axiom => fold (Sign.add_const_constraint o apsnd SOME o snd) params
#> pair (param_map, params, assm_axiom)))
end;
fun gen_class prep_class_spec b raw_includes raw_supclasses raw_elems thy =
let
val class = Sign.full_name thy b;
val ctxt = Proof_Context.init_global thy;
val (((sups, supparam_names), (supsort, base_sort, supexpr)), (includes, elems, global_syntax)) =
prep_class_spec ctxt raw_supclasses raw_includes raw_elems;
val of_class_binding = Binding.qualify_name true b "intro_of_class";
in
thy
|> Expression.add_locale b (Binding.qualify true "class" b) includes supexpr elems
|> snd |> Local_Theory.exit_global
|> adjungate_axclass b class base_sort sups supsort supparam_names global_syntax
|-> (fn (param_map, params, assm_axiom) =>
`(fn thy => calculate thy class sups base_sort param_map assm_axiom)
#-> (fn (base_morph, eq_morph, export_morph, some_axiom, some_assm_intro, of_class) =>
Context.theory_map (Locale.add_registration
{inst = (class, base_morph),
mixin = Option.map (rpair true) eq_morph,
export = export_morph})
#> Class.register class sups params base_sort base_morph export_morph some_axiom some_assm_intro of_class
#> Global_Theory.store_thm (of_class_binding, of_class)))
|> snd
|> Named_Target.init includes class
|> pair class
end;
in
val class = gen_class cert_class_spec;
val class_cmd = gen_class read_class_spec;
end; (*local*)
(** subclass relations **)
local
fun gen_subclass prep_class do_proof raw_sup lthy =
let
val thy = Proof_Context.theory_of lthy;
val proto_sup = prep_class thy raw_sup;
val proto_sub =
(case Named_Target.class_of lthy of
SOME class => class
| NONE => error "Not in a class target");
val (sub, sup) = Axclass.cert_classrel thy (proto_sub, proto_sup);
val expr = ([(sup, (("", false), (Expression.Positional [], [])))], []);
val (([props], _, deps, _, export), goal_ctxt) =
Expression.cert_goal_expression expr lthy;
val some_prop = try the_single props;
val some_dep_morph = try the_single (map snd deps);
fun after_qed some_wit =
Class.register_subclass (sub, sup) some_dep_morph some_wit export;
in do_proof after_qed some_prop goal_ctxt end;
fun user_proof after_qed some_prop =
Element.witness_proof (after_qed o try the_single o the_single)
[the_list some_prop];
fun tactic_proof tac after_qed some_prop ctxt =
after_qed (Option.map
(fn prop => Element.prove_witness ctxt prop tac) some_prop) ctxt;
in
fun prove_subclass tac = gen_subclass (K I) (tactic_proof tac);
fun subclass x = gen_subclass (K I) user_proof x;
fun subclass_cmd x =
gen_subclass (Proof_Context.read_class o Proof_Context.init_global) user_proof x;
end; (*local*)
end;
diff --git a/src/Pure/Isar/code.ML b/src/Pure/Isar/code.ML
--- a/src/Pure/Isar/code.ML
+++ b/src/Pure/Isar/code.ML
@@ -1,1583 +1,1570 @@
(* Title: Pure/Isar/code.ML
Author: Florian Haftmann, TU Muenchen
Abstract executable ingredients of theory. Management of data
dependent on executable ingredients as synchronized cache; purged
on any change of underlying executable ingredients.
*)
signature CODE =
sig
(*constants*)
val check_const: theory -> term -> string
val read_const: theory -> string -> string
val string_of_const: theory -> string -> string
val args_number: theory -> string -> int
(*constructor sets*)
val constrset_of_consts: theory -> (string * typ) list
-> string * ((string * sort) list * (string * ((string * sort) list * typ list)) list)
(*code equations and certificates*)
val assert_eqn: theory -> thm * bool -> thm * bool
val assert_abs_eqn: theory -> string option -> thm -> thm * (string * string)
type cert
val constrain_cert: theory -> sort list -> cert -> cert
val conclude_cert: cert -> cert
val typargs_deps_of_cert: theory -> cert -> (string * sort) list * (string * typ list) list
val equations_of_cert: theory -> cert -> ((string * sort) list * typ)
* (((string option * term) list * (string option * term)) * (thm option * bool)) list option
val pretty_cert: theory -> cert -> Pretty.T list
(*executable code*)
type constructors
type abs_type
val type_interpretation: (string -> theory -> theory) -> theory -> theory
val datatype_interpretation: (string * constructors -> theory -> theory) -> theory -> theory
val abstype_interpretation: (string * abs_type -> theory -> theory) -> theory -> theory
val declare_datatype_global: (string * typ) list -> theory -> theory
val declare_datatype_cmd: string list -> theory -> theory
val declare_abstype: thm -> local_theory -> local_theory
val declare_abstype_global: thm -> theory -> theory
val declare_default_eqns: (thm * bool) list -> local_theory -> local_theory
val declare_default_eqns_global: (thm * bool) list -> theory -> theory
val declare_eqns: (thm * bool) list -> local_theory -> local_theory
val declare_eqns_global: (thm * bool) list -> theory -> theory
val add_eqn_global: thm * bool -> theory -> theory
val del_eqn_global: thm -> theory -> theory
val declare_abstract_eqn: thm -> local_theory -> local_theory
val declare_abstract_eqn_global: thm -> theory -> theory
val declare_aborting_global: string -> theory -> theory
val declare_unimplemented_global: string -> theory -> theory
val declare_case_global: thm -> theory -> theory
val declare_undefined_global: string -> theory -> theory
val get_type: theory -> string -> constructors * bool
val get_type_of_constr_or_abstr: theory -> string -> (string * bool) option
val is_constr: theory -> string -> bool
val is_abstr: theory -> string -> bool
val get_cert: Proof.context -> ((thm * bool) list -> (thm * bool) list option) list
-> string -> cert
type case_schema
val get_case_schema: theory -> string -> case_schema option
val get_case_cong: theory -> string -> thm option
val is_undefined: theory -> string -> bool
val print_codesetup: theory -> unit
end;
signature CODE_DATA_ARGS =
sig
type T
val empty: T
end;
signature CODE_DATA =
sig
type T
val change: theory option -> (T -> T) -> T
val change_yield: theory option -> (T -> 'a * T) -> 'a * T
end;
signature PRIVATE_CODE =
sig
include CODE
val declare_data: Any.T -> serial
val change_yield_data: serial * ('a -> Any.T) * (Any.T -> 'a)
-> theory -> ('a -> 'b * 'a) -> 'b * 'a
end;
structure Code : PRIVATE_CODE =
struct
(** auxiliary **)
(* printing *)
fun string_of_typ thy =
Syntax.string_of_typ (Config.put show_sorts true (Syntax.init_pretty_global thy));
fun string_of_const thy c =
let val ctxt = Proof_Context.init_global thy in
case Axclass.inst_of_param thy c of
SOME (c, tyco) =>
Proof_Context.extern_const ctxt c ^ " " ^ enclose "[" "]"
(Proof_Context.extern_type ctxt tyco)
| NONE => Proof_Context.extern_const ctxt c
end;
(* constants *)
fun const_typ thy = Type.strip_sorts o Sign.the_const_type thy;
fun args_number thy = length o binder_types o const_typ thy;
fun devarify ty =
let
val tys = build (fold_atyps (fn TVar vi_sort => AList.update (op =) vi_sort) ty);
val vs = Name.invent Name.context Name.aT (length tys);
val mapping = map2 (fn v => fn (vi, sort) => (vi, TFree (v, sort))) vs tys;
in Term.typ_subst_TVars mapping ty end;
fun typscheme thy (c, ty) =
(map dest_TFree (Sign.const_typargs thy (c, ty)), Type.strip_sorts ty);
fun typscheme_equiv (ty1, ty2) =
Type.raw_instance (devarify ty1, ty2) andalso Type.raw_instance (devarify ty2, ty1);
fun check_bare_const thy t = case try dest_Const t
of SOME c_ty => c_ty
| NONE => error ("Not a constant: " ^ Syntax.string_of_term_global thy t);
fun check_unoverload thy (c, ty) =
let
val c' = Axclass.unoverload_const thy (c, ty);
val ty_decl = const_typ thy c';
in
if typscheme_equiv (ty_decl, Logic.varifyT_global ty)
then c'
else
error ("Type\n" ^ string_of_typ thy ty ^
"\nof constant " ^ quote c ^
"\nis too specific compared to declared type\n" ^
string_of_typ thy ty_decl)
end;
fun check_const thy = check_unoverload thy o check_bare_const thy;
fun read_bare_const thy = check_bare_const thy o Syntax.read_term_global thy;
fun read_const thy = check_unoverload thy o read_bare_const thy;
(** executable specifications **)
(* types *)
datatype type_spec = Constructors of {
constructors: (string * ((string * sort) list * typ list)) list,
case_combinators: string list}
| Abstractor of {
abs_rep: thm,
abstractor: string * ((string * sort) list * typ),
projection: string,
more_abstract_functions: string list};
fun concrete_constructors_of (Constructors {constructors, ...}) =
constructors
| concrete_constructors_of _ =
[];
fun constructors_of (Constructors {constructors, ...}) =
(constructors, false)
| constructors_of (Abstractor {abstractor = (co, (vs, ty)), ...}) =
([(co, (vs, [ty]))], true);
fun case_combinators_of (Constructors {case_combinators, ...}) =
case_combinators
| case_combinators_of (Abstractor _) =
[];
fun add_case_combinator c (vs, Constructors {constructors, case_combinators}) =
(vs, Constructors {constructors = constructors,
case_combinators = insert (op =) c case_combinators});
fun projection_of (Constructors _) =
NONE
| projection_of (Abstractor {projection, ...}) =
SOME projection;
fun abstract_functions_of (Constructors _) =
[]
| abstract_functions_of (Abstractor {more_abstract_functions, projection, ...}) =
projection :: more_abstract_functions;
fun add_abstract_function c (vs, Abstractor {abs_rep, abstractor, projection, more_abstract_functions}) =
(vs, Abstractor {abs_rep = abs_rep, abstractor = abstractor, projection = projection,
more_abstract_functions = insert (op =) c more_abstract_functions});
fun join_same_types' (Constructors {constructors, case_combinators = case_combinators1},
Constructors {case_combinators = case_combinators2, ...}) =
Constructors {constructors = constructors,
case_combinators = merge (op =) (case_combinators1, case_combinators2)}
| join_same_types' (Abstractor {abs_rep, abstractor, projection, more_abstract_functions = more_abstract_functions1},
Abstractor {more_abstract_functions = more_abstract_functions2, ...}) =
Abstractor {abs_rep = abs_rep, abstractor = abstractor, projection = projection,
more_abstract_functions = merge (op =) (more_abstract_functions1, more_abstract_functions2)};
fun join_same_types ((vs, spec1), (_, spec2)) = (vs, join_same_types' (spec1, spec2));
(* functions *)
datatype fun_spec =
Eqns of bool * (thm * bool) list
| Proj of term * (string * string)
| Abstr of thm * (string * string);
val unimplemented = Eqns (true, []);
fun is_unimplemented (Eqns (true, [])) = true
| is_unimplemented _ = false;
fun is_default (Eqns (true, _)) = true
| is_default _ = false;
val aborting = Eqns (false, []);
fun associated_abstype (Proj (_, tyco_abs)) = SOME tyco_abs
| associated_abstype (Abstr (_, tyco_abs)) = SOME tyco_abs
| associated_abstype _ = NONE;
(* cases *)
type case_schema = int * (int * (string * int) option list);
datatype case_spec =
No_Case
| Case of {schema: case_schema, tycos: string list, cong: thm}
| Undefined;
fun associated_datatypes (Case {tycos, schema = (_, (_, raw_cos)), ...}) = (tycos, map fst (map_filter I raw_cos))
| associated_datatypes _ = ([], []);
(** background theory data store **)
(* historized declaration data *)
structure History =
struct
type 'a T = {
entry: 'a,
suppressed: bool, (*incompatible entries are merely suppressed after theory merge but sustain*)
history: serial list (*explicit trace of declaration history supports non-monotonic declarations*)
} Symtab.table;
fun some_entry (SOME {suppressed = false, entry, ...}) = SOME entry
| some_entry _ = NONE;
fun lookup table =
Symtab.lookup table #> some_entry;
fun register key entry table =
if is_some (Symtab.lookup table key)
then Symtab.map_entry key
(fn {history, ...} => {entry = entry, suppressed = false, history = serial () :: history}) table
else Symtab.update (key, {entry = entry, suppressed = false, history = [serial ()]}) table;
fun modify_entry key f = Symtab.map_entry key
(fn {entry, suppressed, history} => {entry = f entry, suppressed = suppressed, history = history});
fun all table = Symtab.dest table
|> map_filter (fn (key, {entry, suppressed = false, ...}) => SOME (key, entry) | _ => NONE);
local
fun merge_history join_same
({entry = entry1, history = history1, ...}, {entry = entry2, history = history2, ...}) =
let
val history = merge (op =) (history1, history2);
val entry = if hd history1 = hd history2 then join_same (entry1, entry2)
else if hd history = hd history1 then entry1 else entry2;
in {entry = entry, suppressed = false, history = history} end;
in
fun join join_same tables = Symtab.join (K (merge_history join_same)) tables;
fun suppress key = Symtab.map_entry key
(fn {entry, history, ...} => {entry = entry, suppressed = true, history = history});
fun suppress_except f = Symtab.map (fn key => fn {entry, suppressed, history} =>
{entry = entry, suppressed = suppressed orelse (not o f) (key, entry), history = history});
end;
end;
datatype specs = Specs of {
types: ((string * sort) list * type_spec) History.T,
pending_eqns: (thm * bool) list Symtab.table,
functions: fun_spec History.T,
cases: case_spec History.T
};
fun types_of (Specs {types, ...}) = types;
fun pending_eqns_of (Specs {pending_eqns, ...}) = pending_eqns;
fun functions_of (Specs {functions, ...}) = functions;
fun cases_of (Specs {cases, ...}) = cases;
fun make_specs (types, ((pending_eqns, functions), cases)) =
Specs {types = types, pending_eqns = pending_eqns,
functions = functions, cases = cases};
val empty_specs =
make_specs (Symtab.empty, ((Symtab.empty, Symtab.empty), Symtab.empty));
fun map_specs f (Specs {types = types, pending_eqns = pending_eqns,
functions = functions, cases = cases}) =
make_specs (f (types, ((pending_eqns, functions), cases)));
fun merge_specs (Specs {types = types1, pending_eqns = _,
functions = functions1, cases = cases1},
Specs {types = types2, pending_eqns = _,
functions = functions2, cases = cases2}) =
let
val types = History.join join_same_types (types1, types2);
val all_types = map (snd o snd) (History.all types);
fun check_abstype (c, fun_spec) = case associated_abstype fun_spec of
NONE => true
| SOME (tyco, abs) => (case History.lookup types tyco of
NONE => false
| SOME (_, Constructors _) => false
| SOME (_, Abstractor {abstractor = (abs', _), projection, more_abstract_functions, ...}) =>
abs = abs' andalso (c = projection orelse member (op =) more_abstract_functions c));
fun check_datatypes (_, case_spec) =
let
val (tycos, required_constructors) = associated_datatypes case_spec;
val allowed_constructors =
tycos
|> maps (these o Option.map (concrete_constructors_of o snd) o History.lookup types)
|> map fst;
in subset (op =) (required_constructors, allowed_constructors) end;
val all_constructors =
maps (fst o constructors_of) all_types;
val functions = History.join fst (functions1, functions2)
|> fold (History.suppress o fst) all_constructors
|> History.suppress_except check_abstype;
val cases = History.join fst (cases1, cases2)
|> History.suppress_except check_datatypes;
in make_specs (types, ((Symtab.empty, functions), cases)) end;
val map_types = map_specs o apfst;
val map_pending_eqns = map_specs o apsnd o apfst o apfst;
val map_functions = map_specs o apsnd o apfst o apsnd;
val map_cases = map_specs o apsnd o apsnd;
(* data slots dependent on executable code *)
(*private copy avoids potential conflict of table exceptions*)
structure Datatab = Table(type key = int val ord = int_ord);
local
type kind = {empty: Any.T};
val kinds = Synchronized.var "Code_Data" (Datatab.empty: kind Datatab.table);
fun invoke f k =
(case Datatab.lookup (Synchronized.value kinds) k of
SOME kind => f kind
| NONE => raise Fail "Invalid code data identifier");
in
fun declare_data empty =
let
val k = serial ();
val kind = {empty = empty};
val _ = Synchronized.change kinds (Datatab.update (k, kind));
in k end;
fun invoke_init k = invoke (fn kind => #empty kind) k;
end; (*local*)
(* global theory store *)
local
type data = Any.T Datatab.table;
-fun make_dataref thy =
- (Context.theory_long_name thy,
- Synchronized.var "code data" (NONE : (data * Context.theory_id) option));
+fun make_dataref () =
+ Synchronized.var "code data" (NONE : (data * Context.theory_id) option);
structure Code_Data = Theory_Data
(
type T = specs * (string * (data * Context.theory_id) option Synchronized.var);
- val empty = (empty_specs, make_dataref (Context.the_global_context ()));
+ val empty =
+ (empty_specs, (Context.theory_long_name (Context.the_global_context ()), make_dataref ()));
fun merge ((specs1, dataref), (specs2, _)) =
(merge_specs (specs1, specs2), dataref);
);
fun init_dataref thy =
- if #1 (#2 (Code_Data.get thy)) = Context.theory_long_name thy then NONE
- else SOME ((Code_Data.map o apsnd) (fn _ => make_dataref thy) thy)
+ let val thy_name = Context.theory_long_name thy in
+ if #1 (#2 (Code_Data.get thy)) = thy_name then NONE
+ else SOME ((Code_Data.map o apsnd) (K (thy_name, make_dataref ())) thy)
+ end;
in
val _ = Theory.setup (Theory.at_begin init_dataref);
(* access to executable specifications *)
val specs_of : theory -> specs = fst o Code_Data.get;
fun modify_specs f thy =
- Code_Data.map (fn (specs, _) => (f specs, make_dataref thy)) thy;
+ let val thy_name = Context.theory_long_name thy
+ in Code_Data.map (fn (specs, _) => (f specs, (thy_name, make_dataref ()))) thy end;
(* access to data dependent on executable specifications *)
fun change_yield_data (kind, mk, dest) theory f =
let
val dataref = #2 (#2 (Code_Data.get theory));
val (datatab, thy_id) = case Synchronized.value dataref
of SOME (datatab, thy_id) =>
if Context.eq_thy_id (Context.theory_id theory, thy_id)
then (datatab, thy_id)
else (Datatab.empty, Context.theory_id theory)
| NONE => (Datatab.empty, Context.theory_id theory)
val data = case Datatab.lookup datatab kind
of SOME data => data
| NONE => invoke_init kind;
val result as (_, data') = f (dest data);
val _ = Synchronized.change dataref
((K o SOME) (Datatab.update (kind, mk data') datatab, thy_id));
in result end;
end; (*local*)
(* pending function equations *)
(* Ideally, *all* equations implementing a functions would be treated as
*one* atomic declaration; unfortunately, we cannot implement this:
the too-well-established declaration interface are Isar attributes
which operate on *one* single theorem. Hence we treat such Isar
declarations as "pending" and historize them as proper declarations
at the end of each theory. *)
fun modify_pending_eqns c f specs =
let
val existing_eqns = case History.lookup (functions_of specs) c of
SOME (Eqns (false, eqns)) => eqns
| _ => [];
in
specs
|> map_pending_eqns (Symtab.map_default (c, existing_eqns) f)
end;
fun register_fun_spec c spec =
map_pending_eqns (Symtab.delete_safe c)
#> map_functions (History.register c spec);
fun lookup_fun_spec specs c =
case Symtab.lookup (pending_eqns_of specs) c of
SOME eqns => Eqns (false, eqns)
| NONE => (case History.lookup (functions_of specs) c of
SOME spec => spec
| NONE => unimplemented);
fun lookup_proper_fun_spec specs c =
let
val spec = lookup_fun_spec specs c
in
if is_unimplemented spec then NONE else SOME spec
end;
fun all_fun_specs specs =
map_filter (fn c => Option.map (pair c) (lookup_proper_fun_spec specs c))
(union (op =)
((Symtab.keys o pending_eqns_of) specs)
((Symtab.keys o functions_of) specs));
fun historize_pending_fun_specs thy =
let
val pending_eqns = (pending_eqns_of o specs_of) thy;
in if Symtab.is_empty pending_eqns
then
NONE
else
thy
|> modify_specs (map_functions
(Symtab.fold (fn (c, eqs) => History.register c (Eqns (false, eqs))) pending_eqns)
#> map_pending_eqns (K Symtab.empty))
|> SOME
end;
val _ = Theory.setup (Theory.at_end historize_pending_fun_specs);
(** foundation **)
(* types *)
fun no_constr thy s (c, ty) = error ("Not a datatype constructor:\n" ^ string_of_const thy c
^ " :: " ^ string_of_typ thy ty ^ "\n" ^ enclose "(" ")" s);
fun analyze_constructor thy (c, ty) =
let
val _ = Thm.global_cterm_of thy (Const (c, ty));
val ty_decl = devarify (const_typ thy c);
fun last_typ c_ty ty =
let
val tfrees = Term.add_tfreesT ty [];
val (tyco, vs) = (apsnd o map) dest_TFree (dest_Type (body_type ty))
handle TYPE _ => no_constr thy "bad type" c_ty
val _ = if tyco = "fun" then no_constr thy "bad type" c_ty else ();
val _ =
if has_duplicates (eq_fst (op =)) vs
then no_constr thy "duplicate type variables in datatype" c_ty else ();
val _ =
if length tfrees <> length vs
then no_constr thy "type variables missing in datatype" c_ty else ();
in (tyco, vs) end;
val (tyco, _) = last_typ (c, ty) ty_decl;
val (_, vs) = last_typ (c, ty) ty;
in ((tyco, map snd vs), (c, (map fst vs, ty))) end;
fun constrset_of_consts thy consts =
let
val _ = map (fn (c, _) => if (is_some o Axclass.class_of_param thy) c
then error ("Is a class parameter: " ^ string_of_const thy c) else ()) consts;
val raw_constructors = map (analyze_constructor thy) consts;
val tyco = case distinct (op =) (map (fst o fst) raw_constructors)
of [tyco] => tyco
| [] => error "Empty constructor set"
| tycos => error ("Different type constructors in constructor set: " ^ commas_quote tycos)
val vs = Name.invent Name.context Name.aT (Sign.arity_number thy tyco);
fun inst vs' (c, (vs, ty)) =
let
val the_v = the o AList.lookup (op =) (vs ~~ vs');
val ty' = map_type_tfree (fn (v, _) => TFree (the_v v, [])) ty;
val (vs'', ty'') = typscheme thy (c, ty');
in (c, (vs'', binder_types ty'')) end;
val constructors = map (inst vs o snd) raw_constructors;
in (tyco, (map (rpair []) vs, constructors)) end;
fun lookup_vs_type_spec thy = History.lookup ((types_of o specs_of) thy);
type constructors =
(string * sort) list * (string * ((string * sort) list * typ list)) list;
fun get_type thy tyco = case lookup_vs_type_spec thy tyco
of SOME (vs, type_spec) => apfst (pair vs) (constructors_of type_spec)
| NONE => Sign.arity_number thy tyco
|> Name.invent Name.context Name.aT
|> map (rpair [])
|> rpair []
|> rpair false;
type abs_type =
(string * sort) list * {abs_rep: thm, abstractor: string * ((string * sort) list * typ), projection: string};
fun get_abstype_spec thy tyco = case lookup_vs_type_spec thy tyco of
SOME (vs, Abstractor {abs_rep, abstractor, projection, ...}) =>
- (vs, {abs_rep = abs_rep, abstractor = abstractor, projection = projection})
+ (vs, {abs_rep = Thm.transfer thy abs_rep, abstractor = abstractor, projection = projection})
| _ => error ("Not an abstract type: " ^ tyco);
fun get_type_of_constr_or_abstr thy c =
case (body_type o const_typ thy) c
of Type (tyco, _) => let val ((_, cos), abstract) = get_type thy tyco
in if member (op =) (map fst cos) c then SOME (tyco, abstract) else NONE end
| _ => NONE;
fun is_constr thy c = case get_type_of_constr_or_abstr thy c
of SOME (_, false) => true
| _ => false;
fun is_abstr thy c = case get_type_of_constr_or_abstr thy c
of SOME (_, true) => true
| _ => false;
(* bare code equations *)
(* convention for variables:
?x ?'a for free-floating theorems (e.g. in the data store)
?x 'a for certificates
x 'a for final representation of equations
*)
exception BAD_THM of string;
fun bad_thm msg = raise BAD_THM msg;
datatype strictness = Silent | Liberal | Strict
fun handle_strictness thm_of f strictness thy x = SOME (f x)
handle BAD_THM msg => case strictness of
Silent => NONE
| Liberal => (warning (msg ^ ", in theorem:\n" ^ Thm.string_of_thm_global thy (thm_of x)); NONE)
| Strict => error (msg ^ ", in theorem:\n" ^ Thm.string_of_thm_global thy (thm_of x));
fun is_linear thm =
let
val (_, args) = (strip_comb o fst o Logic.dest_equals o Thm.plain_prop_of) thm
in
not (has_duplicates (op =) ((fold o fold_aterms)
(fn Var (v, _) => cons v | _ => I) args []))
end;
fun check_decl_ty thy (c, ty) =
let
val ty_decl = const_typ thy c;
in if typscheme_equiv (ty_decl, ty) then ()
else bad_thm ("Type\n" ^ string_of_typ thy ty
^ "\nof constant " ^ quote c
^ "\nis too specific compared to declared type\n"
^ string_of_typ thy ty_decl)
end;
fun check_eqn thy {allow_nonlinear, allow_consts, allow_pats} thm (lhs, rhs) =
let
fun vars_of t = fold_aterms (fn Var (v, _) => insert (op =) v
| Free _ => bad_thm "Illegal free variable"
| _ => I) t [];
fun tvars_of t = fold_term_types (fn _ =>
fold_atyps (fn TVar (v, _) => insert (op =) v
| TFree _ => bad_thm "Illegal free type variable")) t [];
val lhs_vs = vars_of lhs;
val rhs_vs = vars_of rhs;
val lhs_tvs = tvars_of lhs;
val rhs_tvs = tvars_of rhs;
val _ = if null (subtract (op =) lhs_vs rhs_vs)
then ()
else bad_thm "Free variables on right hand side of equation";
val _ = if null (subtract (op =) lhs_tvs rhs_tvs)
then ()
else bad_thm "Free type variables on right hand side of equation";
val (head, args) = strip_comb lhs;
val (c, ty) = case head
of Const (c_ty as (_, ty)) => (Axclass.unoverload_const thy c_ty, ty)
| _ => bad_thm "Equation not headed by constant";
fun check _ (Abs _) = bad_thm "Abstraction on left hand side of equation"
| check 0 (Var _) = ()
| check _ (Var _) = bad_thm "Variable with application on left hand side of equation"
| check n (t1 $ t2) = (check (n+1) t1; check 0 t2)
| check n (Const (c_ty as (c, ty))) =
if allow_pats then let
val c' = Axclass.unoverload_const thy c_ty
in if n = (length o binder_types) ty
then if allow_consts orelse is_constr thy c'
then ()
else bad_thm (quote c ^ " is not a constructor, on left hand side of equation")
else bad_thm ("Partially applied constant " ^ quote c ^ " on left hand side of equation")
end else bad_thm ("Pattern not allowed here, but constant " ^ quote c ^ " encountered on left hand side of equation")
val _ = map (check 0) args;
val _ = if allow_nonlinear orelse is_linear thm then ()
else bad_thm "Duplicate variables on left hand side of equation";
val _ = if (is_none o Axclass.class_of_param thy) c then ()
else bad_thm "Overloaded constant as head in equation";
val _ = if not (is_constr thy c) then ()
else bad_thm "Constructor as head in equation";
val _ = if not (is_abstr thy c) then ()
else bad_thm "Abstractor as head in equation";
val _ = check_decl_ty thy (c, ty);
val _ = case strip_type ty of
(Type (tyco, _) :: _, _) => (case lookup_vs_type_spec thy tyco of
SOME (_, type_spec) => (case projection_of type_spec of
SOME proj =>
if c = proj
then bad_thm "Projection as head in equation"
else ()
| _ => ())
| _ => ())
| _ => ();
in () end;
local
fun raw_assert_eqn thy check_patterns (thm, proper) =
let
val (lhs, rhs) = (Logic.dest_equals o Thm.plain_prop_of) thm
handle TERM _ => bad_thm "Not an equation"
| THM _ => bad_thm "Not a proper equation";
val _ = check_eqn thy {allow_nonlinear = not proper,
allow_consts = not (proper andalso check_patterns), allow_pats = true} thm (lhs, rhs);
in (thm, proper) end;
fun raw_assert_abs_eqn thy some_tyco thm =
let
val (full_lhs, rhs) = (Logic.dest_equals o Thm.plain_prop_of) thm
handle TERM _ => bad_thm "Not an equation"
| THM _ => bad_thm "Not a proper equation";
val (proj_t, lhs) = dest_comb full_lhs
handle TERM _ => bad_thm "Not an abstract equation";
val (proj, ty) = dest_Const proj_t
handle TERM _ => bad_thm "Not an abstract equation";
val (tyco, Ts) = (dest_Type o domain_type) ty
handle TERM _ => bad_thm "Not an abstract equation"
| TYPE _ => bad_thm "Not an abstract equation";
val _ = case some_tyco of SOME tyco' => if tyco = tyco' then ()
else bad_thm ("Abstract type mismatch:" ^ quote tyco ^ " vs. " ^ quote tyco')
| NONE => ();
val (vs, proj', (abs', _)) = case lookup_vs_type_spec thy tyco
of SOME (vs, Abstractor spec) => (vs, #projection spec, #abstractor spec)
| _ => bad_thm ("Not an abstract type: " ^ tyco);
val _ = if proj = proj' then ()
else bad_thm ("Projection mismatch: " ^ quote proj ^ " vs. " ^ quote proj');
val _ = check_eqn thy {allow_nonlinear = false,
allow_consts = false, allow_pats = false} thm (lhs, rhs);
val _ = if ListPair.all (fn (T, (_, sort)) => Sign.of_sort thy (T, sort)) (Ts, vs) then ()
else error ("Type arguments do not satisfy sort constraints of abstype certificate.");
in (thm, (tyco, abs')) end;
in
fun generic_assert_eqn strictness thy check_patterns eqn =
handle_strictness fst (raw_assert_eqn thy check_patterns) strictness thy eqn;
fun generic_assert_abs_eqn strictness thy check_patterns thm =
handle_strictness I (raw_assert_abs_eqn thy check_patterns) strictness thy thm;
end;
fun assert_eqn thy = the o generic_assert_eqn Strict thy true;
fun assert_abs_eqn thy some_tyco = the o generic_assert_abs_eqn Strict thy some_tyco;
val head_eqn = dest_Const o fst o strip_comb o fst o Logic.dest_equals o Thm.plain_prop_of;
fun const_typ_eqn thy thm =
let
val (c, ty) = head_eqn thm;
val c' = Axclass.unoverload_const thy (c, ty);
(*permissive wrt. to overloaded constants!*)
in (c', ty) end;
fun const_eqn thy = fst o const_typ_eqn thy;
fun const_abs_eqn thy = Axclass.unoverload_const thy o dest_Const o fst o strip_comb o snd
o dest_comb o fst o Logic.dest_equals o Thm.plain_prop_of;
fun mk_proj tyco vs ty abs rep =
let
val ty_abs = Type (tyco, map TFree vs);
val xarg = Var (("x", 0), ty);
in Logic.mk_equals (Const (rep, ty_abs --> ty) $ (Const (abs, ty --> ty_abs) $ xarg), xarg) end;
(* technical transformations of code equations *)
fun meta_rewrite thy = Local_Defs.meta_rewrite_rule (Proof_Context.init_global thy);
fun same_arity thy thms =
let
val lhs_rhss = map (Logic.dest_equals o Thm.plain_prop_of) thms;
val k = fold (Integer.max o length o snd o strip_comb o fst) lhs_rhss 0;
fun expand_eta (lhs, rhs) thm =
let
val l = k - length (snd (strip_comb lhs));
val (raw_vars, _) = Term.strip_abs_eta l rhs;
val vars = burrow_fst (Name.variant_list (map (fst o fst) (Term.add_vars lhs [])))
raw_vars;
fun expand (v, ty) thm = Drule.fun_cong_rule thm
(Thm.global_cterm_of thy (Var ((v, 0), ty)));
in
thm
|> fold expand vars
|> Conv.fconv_rule Drule.beta_eta_conversion
end;
in map2 expand_eta lhs_rhss thms end;
fun mk_desymbolization pre post mk vs =
let
val names = map (pre o fst o fst) vs
|> map (Name.desymbolize (SOME false))
|> Name.variant_list []
|> map post;
in map_filter (fn (((v, i), x), v') =>
if v = v' andalso i = 0 then NONE
else SOME (((v, i), x), mk ((v', 0), x))) (vs ~~ names)
end;
fun desymbolize_tvars thy thms =
let
val tvs = build (fold (Term.add_tvars o Thm.prop_of) thms);
val instT =
mk_desymbolization (unprefix "'") (prefix "'") (Thm.global_ctyp_of thy o TVar) tvs;
in map (Thm.instantiate (TVars.make instT, Vars.empty)) thms end;
fun desymbolize_vars thy thm =
let
val vs = Term.add_vars (Thm.prop_of thm) [];
val inst = mk_desymbolization I I (Thm.global_cterm_of thy o Var) vs;
in Thm.instantiate (TVars.empty, Vars.make inst) thm end;
fun canonize_thms thy = desymbolize_tvars thy #> same_arity thy #> map (desymbolize_vars thy);
(* preparation and classification of code equations *)
fun prep_eqn strictness thy =
apfst (meta_rewrite thy)
#> generic_assert_eqn strictness thy false
#> Option.map (fn eqn => (const_eqn thy (fst eqn), eqn));
fun prep_eqns strictness thy =
map_filter (prep_eqn strictness thy)
#> AList.group (op =);
fun prep_abs_eqn strictness thy =
meta_rewrite thy
#> generic_assert_abs_eqn strictness thy NONE
#> Option.map (fn abs_eqn => (const_abs_eqn thy (fst abs_eqn), abs_eqn));
fun prep_maybe_abs_eqn thy raw_thm =
let
val thm = meta_rewrite thy raw_thm;
val some_abs_thm = generic_assert_abs_eqn Silent thy NONE thm;
in case some_abs_thm of
SOME (thm, tyco) => SOME (const_abs_eqn thy thm, ((thm, true), SOME tyco))
| NONE => generic_assert_eqn Liberal thy false (thm, false)
|> Option.map (fn (thm, _) => (const_eqn thy thm, ((thm, is_linear thm), NONE)))
end;
(* abstype certificates *)
local
fun raw_abstype_cert thy proto_thm =
let
val thm = (Axclass.unoverload (Proof_Context.init_global thy) o meta_rewrite thy) proto_thm;
val (lhs, rhs) = Logic.dest_equals (Thm.plain_prop_of thm)
handle TERM _ => bad_thm "Not an equation"
| THM _ => bad_thm "Not a proper equation";
val ((abs, raw_ty), ((rep, rep_ty), param)) = (apsnd (apfst dest_Const o dest_comb)
o apfst dest_Const o dest_comb) lhs
handle TERM _ => bad_thm "Not an abstype certificate";
val _ = apply2 (fn c => if (is_some o Axclass.class_of_param thy) c
then error ("Is a class parameter: " ^ string_of_const thy c) else ()) (abs, rep);
val _ = check_decl_ty thy (abs, raw_ty);
val _ = check_decl_ty thy (rep, rep_ty);
val _ = if length (binder_types raw_ty) = 1
then ()
else bad_thm "Bad type for abstract constructor";
val _ = (fst o dest_Var) param
handle TERM _ => bad_thm "Not an abstype certificate";
val _ = if param = rhs then () else bad_thm "Not an abstype certificate";
val ((tyco, sorts), (abs, (vs, ty'))) =
analyze_constructor thy (abs, devarify raw_ty);
val ty = domain_type ty';
val (vs', _) = typscheme thy (abs, ty');
in (tyco, (vs ~~ sorts, ((abs, (vs', ty)), (rep, thm)))) end;
in
fun check_abstype_cert strictness thy proto_thm =
handle_strictness I (raw_abstype_cert thy) strictness thy proto_thm;
end;
(* code equation certificates *)
fun build_head thy (c, ty) =
Thm.global_cterm_of thy (Logic.mk_equals (Free ("HEAD", ty), Const (c, ty)));
fun get_head thy cert_thm =
let
val [head] = Thm.chyps_of cert_thm;
val (_, Const (c, ty)) = (Logic.dest_equals o Thm.term_of) head;
in (typscheme thy (c, ty), head) end;
fun typscheme_projection thy =
typscheme thy o dest_Const o fst o dest_comb o fst o Logic.dest_equals;
fun typscheme_abs thy =
typscheme thy o dest_Const o fst o strip_comb o snd o dest_comb o fst o Logic.dest_equals o Thm.prop_of;
fun constrain_thm thy vs sorts thm =
let
val mapping = map2 (fn (v, sort) => fn sort' =>
(v, Sorts.inter_sort (Sign.classes_of thy) (sort, sort'))) vs sorts;
val instT =
TVars.build
(fold2 (fn (v, sort) => fn (_, sort') =>
TVars.add (((v, 0), sort), Thm.global_ctyp_of thy (TFree (v, sort')))) vs mapping);
val subst = (Term.map_types o map_type_tfree)
(fn (v, _) => TFree (v, the (AList.lookup (op =) mapping v)));
in
thm
|> Thm.varifyT_global
|> Thm.instantiate (instT, Vars.empty)
|> pair subst
end;
fun concretify_abs thy tyco abs_thm =
let
val (_, {abstractor = (c_abs, _), abs_rep, ...}) = get_abstype_spec thy tyco;
val lhs = (fst o Logic.dest_equals o Thm.prop_of) abs_thm
val ty = fastype_of lhs;
val ty_abs = (fastype_of o snd o dest_comb) lhs;
val abs = Thm.global_cterm_of thy (Const (c_abs, ty --> ty_abs));
val raw_concrete_thm = Drule.transitive_thm OF [Thm.symmetric abs_rep, Thm.combination (Thm.reflexive abs) abs_thm];
in (c_abs, (Thm.varifyT_global o zero_var_indexes) raw_concrete_thm) end;
fun add_rhss_of_eqn thy t =
let
val (args, rhs) = (apfst (snd o strip_comb) o Logic.dest_equals) t;
fun add_const (Const (c, ty)) = insert (op =) (c, Sign.const_typargs thy (c, ty))
| add_const _ = I
val add_consts = fold_aterms add_const
in add_consts rhs o fold add_consts args end;
val dest_eqn = apfst (snd o strip_comb) o Logic.dest_equals o Logic.unvarify_global;
abstype cert = Nothing of thm
| Equations of thm * bool list
| Projection of term * string
| Abstract of thm * string
with
fun dummy_thm ctxt c =
let
val thy = Proof_Context.theory_of ctxt;
val raw_ty = devarify (const_typ thy c);
val (vs, _) = typscheme thy (c, raw_ty);
val sortargs = case Axclass.class_of_param thy c
of SOME class => [[class]]
| NONE => (case get_type_of_constr_or_abstr thy c
of SOME (tyco, _) => (map snd o fst o the)
(AList.lookup (op =) ((snd o fst o get_type thy) tyco) c)
| NONE => replicate (length vs) []);
val the_sort = the o AList.lookup (op =) (map fst vs ~~ sortargs);
val ty = map_type_tfree (fn (v, _) => TFree (v, the_sort v)) raw_ty
val chead = build_head thy (c, ty);
in Thm.weaken chead Drule.dummy_thm end;
fun nothing_cert ctxt c = Nothing (dummy_thm ctxt c);
fun cert_of_eqns ctxt c [] = Equations (dummy_thm ctxt c, [])
| cert_of_eqns ctxt c raw_eqns =
let
val thy = Proof_Context.theory_of ctxt;
val eqns = burrow_fst (canonize_thms thy) raw_eqns;
val _ = map (assert_eqn thy) eqns;
val (thms, propers) = split_list eqns;
val _ = map (fn thm => if c = const_eqn thy thm then ()
else error ("Wrong head of code equation,\nexpected constant "
^ string_of_const thy c ^ "\n" ^ Thm.string_of_thm_global thy thm)) thms;
val tvars_of = build_rev o Term.add_tvarsT;
val vss = map (tvars_of o snd o head_eqn) thms;
val inter_sorts =
build o fold (curry (Sorts.inter_sort (Sign.classes_of thy)) o snd);
val sorts = map_transpose inter_sorts vss;
val vts = Name.invent_names Name.context Name.aT sorts;
fun instantiate vs =
Thm.instantiate (TVars.make (vs ~~ map (Thm.ctyp_of ctxt o TFree) vts), Vars.empty);
val thms' = map2 instantiate vss thms;
val head_thm = Thm.symmetric (Thm.assume (build_head thy (head_eqn (hd thms'))));
fun head_conv ct = if can Thm.dest_comb ct
then Conv.fun_conv head_conv ct
else Conv.rewr_conv head_thm ct;
val rewrite_head = Conv.fconv_rule (Conv.arg1_conv head_conv);
val cert_thm = Conjunction.intr_balanced (map rewrite_head thms');
in Equations (cert_thm, propers) end;
fun cert_of_proj ctxt proj tyco =
let
val thy = Proof_Context.theory_of ctxt
val (vs, {abstractor = (abs, (_, ty)), projection = proj', ...}) = get_abstype_spec thy tyco;
val _ = if proj = proj' then () else
error ("Wrong head of projection,\nexpected constant " ^ string_of_const thy proj);
in Projection (mk_proj tyco vs ty abs proj, tyco) end;
fun cert_of_abs ctxt tyco c raw_abs_thm =
let
val thy = Proof_Context.theory_of ctxt;
val abs_thm = singleton (canonize_thms thy) raw_abs_thm;
val _ = assert_abs_eqn thy (SOME tyco) abs_thm;
val _ = if c = const_abs_eqn thy abs_thm then ()
else error ("Wrong head of abstract code equation,\nexpected constant "
^ string_of_const thy c ^ "\n" ^ Thm.string_of_thm_global thy abs_thm);
in Abstract (Thm.legacy_freezeT abs_thm, tyco) end;
fun constrain_cert_thm thy sorts cert_thm =
let
val ((vs, _), head) = get_head thy cert_thm;
val (subst, cert_thm') = cert_thm
|> Thm.implies_intr head
|> constrain_thm thy vs sorts;
val head' = Thm.term_of head
|> subst
|> Thm.global_cterm_of thy;
val cert_thm'' = cert_thm'
|> Thm.elim_implies (Thm.assume head');
in cert_thm'' end;
fun constrain_cert thy sorts (Nothing cert_thm) =
Nothing (constrain_cert_thm thy sorts cert_thm)
| constrain_cert thy sorts (Equations (cert_thm, propers)) =
Equations (constrain_cert_thm thy sorts cert_thm, propers)
| constrain_cert _ _ (cert as Projection _) =
cert
| constrain_cert thy sorts (Abstract (abs_thm, tyco)) =
Abstract (snd (constrain_thm thy (fst (typscheme_abs thy abs_thm)) sorts abs_thm), tyco);
fun conclude_cert (Nothing cert_thm) =
- Nothing (Thm.close_derivation \<^here> cert_thm)
+ Nothing (Thm.close_derivation \<^here> cert_thm |> Thm.trim_context)
| conclude_cert (Equations (cert_thm, propers)) =
- Equations (Thm.close_derivation \<^here> cert_thm, propers)
+ Equations (Thm.close_derivation \<^here> cert_thm |> Thm.trim_context, propers)
| conclude_cert (cert as Projection _) =
cert
| conclude_cert (Abstract (abs_thm, tyco)) =
- Abstract (Thm.close_derivation \<^here> abs_thm, tyco);
+ Abstract (Thm.close_derivation \<^here> abs_thm |> Thm.trim_context, tyco);
fun typscheme_of_cert thy (Nothing cert_thm) =
fst (get_head thy cert_thm)
| typscheme_of_cert thy (Equations (cert_thm, _)) =
fst (get_head thy cert_thm)
| typscheme_of_cert thy (Projection (proj, _)) =
typscheme_projection thy proj
| typscheme_of_cert thy (Abstract (abs_thm, _)) =
typscheme_abs thy abs_thm;
fun typargs_deps_of_cert thy (Nothing cert_thm) =
let
val vs = (fst o fst) (get_head thy cert_thm);
in (vs, []) end
| typargs_deps_of_cert thy (Equations (cert_thm, propers)) =
let
val vs = (fst o fst) (get_head thy cert_thm);
val equations = if null propers then [] else
Thm.prop_of cert_thm
|> Logic.dest_conjunction_balanced (length propers);
in (vs, build (fold (add_rhss_of_eqn thy) equations)) end
| typargs_deps_of_cert thy (Projection (t, _)) =
(fst (typscheme_projection thy t), add_rhss_of_eqn thy t [])
| typargs_deps_of_cert thy (Abstract (abs_thm, tyco)) =
let
val vs = fst (typscheme_abs thy abs_thm);
val (_, concrete_thm) = concretify_abs thy tyco abs_thm;
in (vs, add_rhss_of_eqn thy (Logic.unvarify_types_global (Thm.prop_of concrete_thm)) []) end;
fun equations_of_cert thy (cert as Nothing _) =
(typscheme_of_cert thy cert, NONE)
| equations_of_cert thy (cert as Equations (cert_thm, propers)) =
let
val tyscm = typscheme_of_cert thy cert;
val thms = if null propers then [] else
cert_thm
+ |> Thm.transfer thy
|> Local_Defs.expand [snd (get_head thy cert_thm)]
|> Thm.varifyT_global
|> Conjunction.elim_balanced (length propers);
fun abstractions (args, rhs) = (map (pair NONE) args, (NONE, rhs));
in (tyscm, SOME (map (abstractions o dest_eqn o Thm.prop_of) thms ~~ (map SOME thms ~~ propers))) end
| equations_of_cert thy (Projection (t, tyco)) =
let
val (_, {abstractor = (abs, _), ...}) = get_abstype_spec thy tyco;
val tyscm = typscheme_projection thy t;
val t' = Logic.varify_types_global t;
fun abstractions (args, rhs) = (map (pair (SOME abs)) args, (NONE, rhs));
in (tyscm, SOME [((abstractions o dest_eqn) t', (NONE, true))]) end
| equations_of_cert thy (Abstract (abs_thm, tyco)) =
let
val tyscm = typscheme_abs thy abs_thm;
- val (abs, concrete_thm) = concretify_abs thy tyco abs_thm;
+ val (abs, concrete_thm) = concretify_abs thy tyco (Thm.transfer thy abs_thm);
fun abstractions (args, rhs) = (map (pair NONE) args, (SOME abs, rhs));
in
(tyscm, SOME [((abstractions o dest_eqn o Thm.prop_of) concrete_thm,
(SOME (Thm.varifyT_global abs_thm), true))])
end;
fun pretty_cert _ (Nothing _) =
[]
| pretty_cert thy (cert as Equations _) =
(map_filter
(Option.map (Thm.pretty_thm_global thy o
Axclass.overload (Proof_Context.init_global thy)) o fst o snd)
o these o snd o equations_of_cert thy) cert
| pretty_cert thy (Projection (t, _)) =
[Syntax.pretty_term_global thy (Logic.varify_types_global t)]
| pretty_cert thy (Abstract (abs_thm, _)) =
[(Thm.pretty_thm_global thy o
Axclass.overload (Proof_Context.init_global thy) o Thm.varifyT_global) abs_thm];
end;
(* code certificate access with preprocessing *)
fun eqn_conv conv ct =
let
fun lhs_conv ct = if can Thm.dest_comb ct
then Conv.combination_conv lhs_conv conv ct
else Conv.all_conv ct;
in Conv.combination_conv (Conv.arg_conv lhs_conv) conv ct end;
fun rewrite_eqn conv ctxt =
singleton (Variable.trade (K (map (Conv.fconv_rule (conv (Simplifier.rewrite ctxt))))) ctxt)
fun apply_functrans ctxt functrans =
let
fun trace_eqns s eqns = (Pretty.writeln o Pretty.chunks)
(Pretty.str s :: map (Thm.pretty_thm ctxt o fst) eqns);
val tracing = if Config.get ctxt simp_trace then trace_eqns else (K o K) ();
in
tap (tracing "before function transformation")
#> (perhaps o perhaps_loop o perhaps_apply) functrans
#> tap (tracing "after function transformation")
end;
fun preprocess conv ctxt =
rewrite_eqn conv ctxt
#> Axclass.unoverload ctxt;
fun get_cert ctxt functrans c =
case lookup_proper_fun_spec (specs_of (Proof_Context.theory_of ctxt)) c of
NONE => nothing_cert ctxt c
| SOME (Eqns (_, eqns)) => eqns
|> (map o apfst) (Thm.transfer' ctxt)
|> apply_functrans ctxt functrans
|> (map o apfst) (preprocess eqn_conv ctxt)
|> cert_of_eqns ctxt c
| SOME (Proj (_, (tyco, _))) => cert_of_proj ctxt c tyco
| SOME (Abstr (abs_thm, (tyco, _))) => abs_thm
|> Thm.transfer' ctxt
|> preprocess Conv.arg_conv ctxt
|> cert_of_abs ctxt tyco c;
(* case certificates *)
local
fun raw_case_cert thm =
let
val ((head, raw_case_expr), cases) = (apfst Logic.dest_equals
o apsnd Logic.dest_conjunctions o Logic.dest_implies o Thm.plain_prop_of) thm;
val _ = case head of Free _ => ()
| Var _ => ()
| _ => raise TERM ("case_cert", []);
val ([(case_var, _)], case_expr) = Term.strip_abs_eta 1 raw_case_expr;
val (Const (case_const, _), raw_params) = strip_comb case_expr;
val n = find_index (fn Free (v, _) => v = case_var | _ => false) raw_params;
val _ = if n = ~1 then raise TERM ("case_cert", []) else ();
val params = map (fst o dest_Var) (nth_drop n raw_params);
fun dest_case t =
let
val (head' $ t_co, rhs) = Logic.dest_equals t;
val _ = if head' = head then () else raise TERM ("case_cert", []);
val (Const (co, _), args) = strip_comb t_co;
val (Var (param, _), args') = strip_comb rhs;
val _ = if args' = args then () else raise TERM ("case_cert", []);
in (param, co) end;
fun analyze_cases cases =
let
val co_list = build (fold (AList.update (op =) o dest_case) cases);
in map (AList.lookup (op =) co_list) params end;
fun analyze_let t =
let
val (head' $ arg, Var (param', _) $ arg') = Logic.dest_equals t;
val _ = if head' = head then () else raise TERM ("case_cert", []);
val _ = if arg' = arg then () else raise TERM ("case_cert", []);
val _ = if [param'] = params then () else raise TERM ("case_cert", []);
in [] end;
fun analyze (cases as [let_case]) =
(analyze_cases cases handle Bind => analyze_let let_case)
| analyze cases = analyze_cases cases;
in (case_const, (n, analyze cases)) end;
in
fun case_cert thm = raw_case_cert thm
handle Bind => error "bad case certificate"
| TERM _ => error "bad case certificate";
end;
fun lookup_case_spec thy = History.lookup ((cases_of o specs_of) thy);
fun get_case_schema thy c = case lookup_case_spec thy c of
SOME (Case {schema, ...}) => SOME schema
| _ => NONE;
fun get_case_cong thy c = case lookup_case_spec thy c of
SOME (Case {cong, ...}) => SOME cong
| _ => NONE;
fun is_undefined thy c = case lookup_case_spec thy c of
SOME Undefined => true
| _ => false;
(* diagnostic *)
fun print_codesetup thy =
let
val ctxt = Proof_Context.init_global thy;
val specs = specs_of thy;
fun pretty_equations const thms =
(Pretty.block o Pretty.fbreaks)
(Pretty.str (string_of_const thy const) :: map (Thm.pretty_thm_item ctxt) thms);
fun pretty_function (const, Eqns (_, eqns)) =
pretty_equations const (map fst eqns)
| pretty_function (const, Proj (proj, _)) = Pretty.block
[Pretty.str (string_of_const thy const), Pretty.fbrk, Syntax.pretty_term ctxt proj]
| pretty_function (const, Abstr (thm, _)) = pretty_equations const [thm];
fun pretty_typ (tyco, vs) = Pretty.str
(string_of_typ thy (Type (tyco, map TFree vs)));
fun pretty_type_spec (typ, (cos, abstract)) = if null cos
then pretty_typ typ
else (Pretty.block o Pretty.breaks) (
pretty_typ typ
:: Pretty.str "="
:: (if abstract then [Pretty.str "(abstract)"] else [])
@ separate (Pretty.str "|") (map (fn (c, (_, [])) => Pretty.str (string_of_const thy c)
| (c, (_, tys)) =>
(Pretty.block o Pretty.breaks)
(Pretty.str (string_of_const thy c)
:: Pretty.str "of"
:: map (Pretty.quote o Syntax.pretty_typ_global thy) tys)) cos)
);
fun pretty_case_param NONE = "<ignored>"
| pretty_case_param (SOME (c, _)) = string_of_const thy c
fun pretty_case (const, Case {schema = (_, (_, [])), ...}) =
Pretty.str (string_of_const thy const)
| pretty_case (const, Case {schema = (_, (_, cos)), ...}) =
(Pretty.block o Pretty.breaks) [
Pretty.str (string_of_const thy const), Pretty.str "with",
(Pretty.block o Pretty.commas o map (Pretty.str o pretty_case_param)) cos]
| pretty_case (const, Undefined) =
(Pretty.block o Pretty.breaks) [
Pretty.str (string_of_const thy const), Pretty.str "<undefined>"];
val functions = all_fun_specs specs
|> sort (string_ord o apply2 fst);
val types = History.all (types_of specs)
|> map (fn (tyco, (vs, spec)) =>
((tyco, vs), constructors_of spec))
|> sort (string_ord o apply2 (fst o fst));
val cases = History.all (cases_of specs)
|> filter (fn (_, No_Case) => false | _ => true)
|> sort (string_ord o apply2 fst);
in
Pretty.writeln_chunks [
Pretty.block (
Pretty.str "types:" :: Pretty.fbrk
:: (Pretty.fbreaks o map pretty_type_spec) types
),
Pretty.block (
Pretty.str "functions:" :: Pretty.fbrk
:: (Pretty.fbreaks o map pretty_function) functions
),
Pretty.block (
Pretty.str "cases:" :: Pretty.fbrk
:: (Pretty.fbreaks o map pretty_case) cases
)
]
end;
(** declaration of executable ingredients **)
(* plugins for dependent applications *)
structure Codetype_Plugin = Plugin(type T = string);
val codetype_plugin = Plugin_Name.declare_setup \<^binding>\<open>codetype\<close>;
fun type_interpretation f =
Codetype_Plugin.interpretation codetype_plugin
(fn tyco => Local_Theory.background_theory
(fn thy =>
thy
|> Sign.root_path
|> Sign.add_path (Long_Name.qualifier tyco)
|> f tyco
|> Sign.restore_naming thy));
fun datatype_interpretation f =
type_interpretation (fn tyco => fn thy =>
case get_type thy tyco of
(spec, false) => f (tyco, spec) thy
| (_, true) => thy
);
fun abstype_interpretation f =
type_interpretation (fn tyco => fn thy =>
case try (get_abstype_spec thy) tyco of
SOME spec => f (tyco, spec) thy
| NONE => thy
);
fun register_tyco_for_plugin tyco =
Named_Target.theory_map (Codetype_Plugin.data_default tyco);
(* abstract code declarations *)
-local
-
-fun generic_code_declaration strictness lift_phi f x =
- Local_Theory.declaration
- {syntax = false, pervasive = false}
+fun code_declaration strictness lift_phi f x =
+ Local_Theory.declaration {syntax = false, pervasive = false, pos = Position.thread_data ()}
(fn phi => Context.mapping (f strictness (lift_phi phi x)) I);
-in
-
-fun silent_code_declaration lift_phi = generic_code_declaration Silent lift_phi;
-fun code_declaration lift_phi = generic_code_declaration Liberal lift_phi;
-
-end;
-
(* types *)
fun invalidate_constructors_of (_, type_spec) =
fold (fn (c, _) => History.register c unimplemented) (fst (constructors_of type_spec));
fun invalidate_abstract_functions_of (_, type_spec) =
fold (fn c => History.register c unimplemented) (abstract_functions_of type_spec);
fun invalidate_case_combinators_of (_, type_spec) =
fold (fn c => History.register c No_Case) (case_combinators_of type_spec);
fun register_type (tyco, vs_typ_spec) specs =
let
val olds = the_list (History.lookup (types_of specs) tyco);
in
specs
|> map_functions (fold invalidate_abstract_functions_of olds
#> invalidate_constructors_of vs_typ_spec)
|> map_cases (fold invalidate_case_combinators_of olds)
|> map_types (History.register tyco vs_typ_spec)
end;
fun declare_datatype_global proto_constrs thy =
let
fun unoverload_const_typ (c, ty) =
(Axclass.unoverload_const thy (c, ty), ty);
val constrs = map unoverload_const_typ proto_constrs;
val (tyco, (vs, cos)) = constrset_of_consts thy constrs;
in
thy
|> modify_specs (register_type
(tyco, (vs, Constructors {constructors = cos, case_combinators = []})))
|> register_tyco_for_plugin tyco
end;
fun declare_datatype_cmd raw_constrs thy =
declare_datatype_global (map (read_bare_const thy) raw_constrs) thy;
fun generic_declare_abstype strictness proto_thm thy =
case check_abstype_cert strictness thy proto_thm of
SOME (tyco, (vs, (abstractor as (abs, (_, ty)), (proj, abs_rep)))) =>
thy
|> modify_specs (register_type
- (tyco, (vs, Abstractor {abstractor = abstractor, projection = proj, abs_rep = abs_rep, more_abstract_functions = []}))
+ (tyco, (vs, Abstractor {abstractor = abstractor, projection = proj,
+ abs_rep = Thm.trim_context abs_rep, more_abstract_functions = []}))
#> register_fun_spec proj
(Proj (Logic.varify_types_global (mk_proj tyco vs ty abs proj), (tyco, abs))))
|> register_tyco_for_plugin tyco
| NONE => thy;
val declare_abstype_global = generic_declare_abstype Strict;
-
-val declare_abstype =
- code_declaration Morphism.thm generic_declare_abstype;
+val declare_abstype = code_declaration Liberal Morphism.thm generic_declare_abstype;
(* functions *)
(*
strictness wrt. shape of theorem propositions:
* default equations: silent
* using declarations and attributes: warnings (after morphism application!)
* using global declarations (... -> thy -> thy): strict
* internal processing after storage: strict
*)
local
fun subsumptive_add thy verbose (thm, proper) eqns =
let
val args_of = drop_prefix is_Var o rev o snd o strip_comb
o Term.map_types Type.strip_sorts o fst o Logic.dest_equals o Thm.plain_prop_of
o Thm.transfer thy;
val args = args_of thm;
val incr_idx = Logic.incr_indexes ([], [], Thm.maxidx_of thm + 1);
fun matches_args args' =
let
val k = length args' - length args
in if k >= 0
then Pattern.matchess thy (args, (map incr_idx o drop k) args')
else false
end;
fun drop (thm', proper') = if (proper orelse not proper')
andalso matches_args (args_of thm') then
(if verbose then warning ("Code generator: dropping subsumed code equation\n" ^
Thm.string_of_thm_global thy thm') else (); true)
else false;
in (thm |> Thm.close_derivation \<^here> |> Thm.trim_context, proper) :: filter_out drop eqns end;
fun add_eqn_for (c, eqn) thy =
thy |> modify_specs (modify_pending_eqns c (subsumptive_add thy true eqn));
fun add_eqns_for default (c, proto_eqns) thy =
thy |> modify_specs (fn specs =>
if is_default (lookup_fun_spec specs c) orelse not default
then
let
val eqns = []
|> fold_rev (subsumptive_add thy (not default)) proto_eqns;
in specs |> register_fun_spec c (Eqns (default, eqns)) end
else specs);
fun add_abstract_for (c, (thm, tyco_abs as (tyco, _))) =
- modify_specs (register_fun_spec c (Abstr (Thm.close_derivation \<^here> thm, tyco_abs))
+ modify_specs (register_fun_spec c (Abstr (Thm.close_derivation \<^here> thm |> Thm.trim_context, tyco_abs))
#> map_types (History.modify_entry tyco (add_abstract_function c)))
in
fun generic_declare_eqns default strictness raw_eqns thy =
fold (add_eqns_for default) (prep_eqns strictness thy raw_eqns) thy;
fun generic_add_eqn strictness raw_eqn thy =
fold add_eqn_for (the_list (prep_eqn strictness thy raw_eqn)) thy;
fun generic_declare_abstract_eqn strictness raw_abs_eqn thy =
fold add_abstract_for (the_list (prep_abs_eqn strictness thy raw_abs_eqn)) thy;
fun add_maybe_abs_eqn_liberal thm thy =
case prep_maybe_abs_eqn thy thm
of SOME (c, (eqn, NONE)) => add_eqn_for (c, eqn) thy
| SOME (c, ((thm, _), SOME tyco)) => add_abstract_for (c, (thm, tyco)) thy
| NONE => thy;
end;
val declare_default_eqns_global = generic_declare_eqns true Silent;
-
-val declare_default_eqns =
- silent_code_declaration (map o apfst o Morphism.thm) (generic_declare_eqns true);
+val declare_default_eqns = code_declaration Silent (map o apfst o Morphism.thm) (generic_declare_eqns true);
val declare_eqns_global = generic_declare_eqns false Strict;
-
-val declare_eqns =
- code_declaration (map o apfst o Morphism.thm) (generic_declare_eqns false);
+val declare_eqns = code_declaration Liberal (map o apfst o Morphism.thm) (generic_declare_eqns false);
val add_eqn_global = generic_add_eqn Strict;
fun del_eqn_global thm thy =
case prep_eqn Liberal thy (thm, false) of
SOME (c, (thm, _)) =>
modify_specs (modify_pending_eqns c (filter_out (fn (thm', _) => Thm.eq_thm_prop (thm, thm')))) thy
| NONE => thy;
val declare_abstract_eqn_global = generic_declare_abstract_eqn Strict;
-
-val declare_abstract_eqn =
- code_declaration Morphism.thm generic_declare_abstract_eqn;
+val declare_abstract_eqn = code_declaration Liberal Morphism.thm generic_declare_abstract_eqn;
fun declare_aborting_global c =
modify_specs (register_fun_spec c aborting);
fun declare_unimplemented_global c =
modify_specs (register_fun_spec c unimplemented);
(* cases *)
fun case_cong thy case_const (num_args, (pos, _)) =
let
val ([x, y], ctxt) = fold_map Name.variant ["A", "A'"] Name.context;
val (zs, _) = fold_map Name.variant (replicate (num_args - 1) "") ctxt;
val (ws, vs) = chop pos zs;
val T = devarify (const_typ thy case_const);
val Ts = binder_types T;
val T_cong = nth Ts pos;
fun mk_prem z = Free (z, T_cong);
fun mk_concl z = list_comb (Const (case_const, T), map2 (curry Free) (ws @ z :: vs) Ts);
val (prem, concl) = apply2 Logic.mk_equals (apply2 mk_prem (x, y), apply2 mk_concl (x, y));
in
Goal.prove_sorry_global thy (x :: y :: zs) [prem] concl
(fn {context = ctxt', prems} =>
Simplifier.rewrite_goals_tac ctxt' prems
THEN ALLGOALS (Proof_Context.fact_tac ctxt' [Drule.reflexive_thm]))
end;
fun declare_case_global thm thy =
let
val (case_const, (k, cos)) = case_cert thm;
fun get_type_of_constr c = case get_type_of_constr_or_abstr thy c of
SOME (c, false) => SOME c
| _ => NONE;
val cos_with_tycos =
(map_filter o Option.map) (fn c => (c, get_type_of_constr c)) cos;
val _ = case map_filter (fn (c, NONE) => SOME c | _ => NONE) cos_with_tycos of
[] => ()
| cs => error ("Non-constructor(s) in case certificate: " ^ commas_quote cs);
val tycos = distinct (op =) (map_filter snd cos_with_tycos);
val schema = (1 + Int.max (1, length cos),
(k, (map o Option.map) (fn c => (c, args_number thy c)) cos));
val cong = case_cong thy case_const schema;
in
thy
|> modify_specs (map_cases (History.register case_const
(Case {schema = schema, tycos = tycos, cong = cong}))
#> map_types (fold (fn tyco => History.modify_entry tyco
(add_case_combinator case_const)) tycos))
end;
fun declare_undefined_global c =
(modify_specs o map_cases) (History.register c Undefined);
(* attributes *)
fun code_attribute f = Thm.declaration_attribute
(fn thm => Context.mapping (f thm) I);
fun code_thm_attribute g f =
Scan.lift (g |-- Scan.succeed (code_attribute f));
fun code_const_attribute g f =
Scan.lift (g -- Args.colon) |-- Scan.repeat1 Args.term
>> (fn ts => code_attribute (K (fold (fn t => fn thy => f ((check_const thy o Logic.unvarify_types_global) t) thy) ts)));
val _ = Theory.setup
(let
val code_attribute_parser =
code_thm_attribute (Args.$$$ "equation")
(fn thm => generic_add_eqn Liberal (thm, true))
|| code_thm_attribute (Args.$$$ "nbe")
(fn thm => generic_add_eqn Liberal (thm, false))
|| code_thm_attribute (Args.$$$ "abstract")
(generic_declare_abstract_eqn Liberal)
|| code_thm_attribute (Args.$$$ "abstype")
(generic_declare_abstype Liberal)
|| code_thm_attribute Args.del
del_eqn_global
|| code_const_attribute (Args.$$$ "abort")
declare_aborting_global
|| code_const_attribute (Args.$$$ "drop")
declare_unimplemented_global
|| Scan.succeed (code_attribute
add_maybe_abs_eqn_liberal);
in
Attrib.setup \<^binding>\<open>code\<close> code_attribute_parser
"declare theorems for code generation"
end);
end; (*struct*)
(* type-safe interfaces for data dependent on executable code *)
functor Code_Data(Data: CODE_DATA_ARGS): CODE_DATA =
struct
type T = Data.T;
exception Data of T;
fun dest (Data x) = x
val kind = Code.declare_data (Data Data.empty);
val data_op = (kind, Data, dest);
fun change_yield (SOME thy) f = Code.change_yield_data data_op thy f
| change_yield NONE f = f Data.empty
fun change some_thy f = snd (change_yield some_thy (pair () o f));
end;
structure Code : CODE = struct open Code; end;
diff --git a/src/Pure/Isar/element.ML b/src/Pure/Isar/element.ML
--- a/src/Pure/Isar/element.ML
+++ b/src/Pure/Isar/element.ML
@@ -1,473 +1,494 @@
(* Title: Pure/Isar/element.ML
Author: Makarius
Explicit data structures for some Isar language elements, with derived
logical operations.
*)
signature ELEMENT =
sig
type ('typ, 'term) obtain = binding * ((binding * 'typ option * mixfix) list * 'term list)
type obtains = (string, string) obtain list
type obtains_i = (typ, term) obtain list
datatype ('typ, 'term) stmt =
Shows of (Attrib.binding * ('term * 'term list) list) list |
Obtains of ('typ, 'term) obtain list
type statement = (string, string) stmt
type statement_i = (typ, term) stmt
datatype ('typ, 'term, 'fact) ctxt =
Fixes of (binding * 'typ option * mixfix) list |
Constrains of (string * 'typ) list |
Assumes of (Attrib.binding * ('term * 'term list) list) list |
Defines of (Attrib.binding * ('term * 'term list)) list |
Notes of string * (Attrib.binding * ('fact * Token.src list) list) list |
Lazy_Notes of string * (binding * 'fact lazy)
type context = (string, string, Facts.ref) ctxt
type context_i = (typ, term, thm list) ctxt
val map_ctxt: {binding: binding -> binding, typ: 'typ -> 'a, term: 'term -> 'b,
pattern: 'term -> 'b, fact: 'fact -> 'c, attrib: Token.src -> Token.src} ->
('typ, 'term, 'fact) ctxt -> ('a, 'b, 'c) ctxt
val map_ctxt_attrib: (Token.src -> Token.src) ->
('typ, 'term, 'fact) ctxt -> ('typ, 'term, 'fact) ctxt
+ val trim_context_ctxt: context_i -> context_i
+ val transfer_ctxt: theory -> context_i -> context_i
val transform_ctxt: morphism -> context_i -> context_i
val pretty_stmt: Proof.context -> statement_i -> Pretty.T list
val pretty_ctxt: Proof.context -> context_i -> Pretty.T list
val pretty_ctxt_no_attribs: Proof.context -> context_i -> Pretty.T list
val pretty_statement: Proof.context -> string -> thm -> Pretty.T
type witness
val prove_witness: Proof.context -> term -> tactic -> witness
val witness_proof: (witness list list -> Proof.context -> Proof.context) ->
term list list -> Proof.context -> Proof.state
val witness_proof_eqs: (witness list list -> thm list -> Proof.context -> Proof.context) ->
term list list -> term list -> Proof.context -> Proof.state
val witness_local_proof: (witness list list -> Proof.state -> Proof.state) ->
string -> term list list -> Proof.context -> Proof.state -> Proof.state
val witness_local_proof_eqs: (witness list list -> thm list -> Proof.state -> Proof.state) ->
string -> term list list -> term list -> Proof.context -> Proof.state -> Proof.state
val transform_witness: morphism -> witness -> witness
val conclude_witness: Proof.context -> witness -> thm
val pretty_witness: Proof.context -> witness -> Pretty.T
val instantiate_normalize_morphism: ctyp TFrees.table * cterm Frees.table -> morphism
val satisfy_morphism: witness list -> morphism
- val eq_term_morphism: theory -> term list -> morphism option
- val eq_morphism: theory -> thm list -> morphism option
+ val eq_term_morphism: term list -> morphism option
+ val eq_morphism: thm list -> morphism option
val init: context_i -> Context.generic -> Context.generic
val activate_i: context_i -> Proof.context -> context_i * Proof.context
val activate: (typ, term, Facts.ref) ctxt -> Proof.context -> context_i * Proof.context
end;
structure Element: ELEMENT =
struct
(** language elements **)
(* statement *)
type ('typ, 'term) obtain = binding * ((binding * 'typ option * mixfix) list * 'term list);
type obtains = (string, string) obtain list;
type obtains_i = (typ, term) obtain list;
datatype ('typ, 'term) stmt =
Shows of (Attrib.binding * ('term * 'term list) list) list |
Obtains of ('typ, 'term) obtain list;
type statement = (string, string) stmt;
type statement_i = (typ, term) stmt;
(* context *)
datatype ('typ, 'term, 'fact) ctxt =
Fixes of (binding * 'typ option * mixfix) list |
Constrains of (string * 'typ) list |
Assumes of (Attrib.binding * ('term * 'term list) list) list |
Defines of (Attrib.binding * ('term * 'term list)) list |
Notes of string * (Attrib.binding * ('fact * Token.src list) list) list |
Lazy_Notes of string * (binding * 'fact lazy);
type context = (string, string, Facts.ref) ctxt;
type context_i = (typ, term, thm list) ctxt;
fun map_ctxt {binding, typ, term, pattern, fact, attrib} =
fn Fixes fixes => Fixes (fixes |> map (fn (x, T, mx) => (binding x, Option.map typ T, mx)))
| Constrains xs => Constrains (xs |> map (fn (x, T) =>
(Variable.check_name (binding (Binding.name x)), typ T)))
| Assumes asms => Assumes (asms |> map (fn ((a, atts), propps) =>
((binding a, map attrib atts), propps |> map (fn (t, ps) => (term t, map pattern ps)))))
| Defines defs => Defines (defs |> map (fn ((a, atts), (t, ps)) =>
((binding a, map attrib atts), (term t, map pattern ps))))
| Notes (kind, facts) => Notes (kind, facts |> map (fn ((a, atts), bs) =>
((binding a, map attrib atts), bs |> map (fn (ths, btts) => (fact ths, map attrib btts)))))
| Lazy_Notes (kind, (a, ths)) => Lazy_Notes (kind, (binding a, Lazy.map fact ths));
fun map_ctxt_attrib attrib =
map_ctxt {binding = I, typ = I, term = I, pattern = I, fact = I, attrib = attrib};
+val trim_context_ctxt: context_i -> context_i = map_ctxt
+ {binding = I, typ = I, term = I, pattern = I,
+ fact = map Thm.trim_context,
+ attrib = map Token.trim_context};
+
+fun transfer_ctxt thy: context_i -> context_i = map_ctxt
+ {binding = I, typ = I, term = I, pattern = I,
+ fact = map (Thm.transfer thy),
+ attrib = map (Token.transfer thy)};
+
fun transform_ctxt phi = map_ctxt
{binding = Morphism.binding phi,
typ = Morphism.typ phi,
term = Morphism.term phi,
pattern = Morphism.term phi,
fact = Morphism.fact phi,
attrib = map (Token.transform phi)};
(** pretty printing **)
fun pretty_items _ _ [] = []
| pretty_items keyword sep (x :: ys) =
Pretty.block [Pretty.keyword2 keyword, Pretty.brk 1, x] ::
map (fn y => Pretty.block [Pretty.str " ", Pretty.keyword2 sep, Pretty.brk 1, y]) ys;
(* pretty_stmt *)
fun pretty_stmt ctxt =
let
val prt_typ = Pretty.quote o Syntax.pretty_typ ctxt;
val prt_term = Pretty.quote o Syntax.pretty_term ctxt;
val prt_terms = separate (Pretty.keyword2 "and") o map prt_term;
val prt_binding = Attrib.pretty_binding ctxt;
val prt_name = Proof_Context.pretty_name ctxt;
fun prt_show (a, ts) =
Pretty.block (Pretty.breaks (prt_binding a ":" @ prt_terms (map fst ts)));
fun prt_var (x, SOME T, _) = Pretty.block
[prt_name (Binding.name_of x), Pretty.str " ::", Pretty.brk 1, prt_typ T]
| prt_var (x, NONE, _) = prt_name (Binding.name_of x);
val prt_vars = separate (Pretty.keyword2 "and") o map prt_var;
fun prt_obtain (_, ([], props)) = Pretty.block (Pretty.breaks (prt_terms props))
| prt_obtain (_, (vars, props)) = Pretty.block (Pretty.breaks
(prt_vars vars @ [Pretty.keyword2 "where"] @ prt_terms props));
in
fn Shows shows => pretty_items "shows" "and" (map prt_show shows)
| Obtains obtains => pretty_items "obtains" "|" (map prt_obtain obtains)
end;
(* pretty_ctxt *)
fun gen_pretty_ctxt show_attribs ctxt =
let
val prt_typ = Pretty.quote o Syntax.pretty_typ ctxt;
val prt_term = Pretty.quote o Syntax.pretty_term ctxt;
val prt_thm = Pretty.cartouche o Thm.pretty_thm ctxt;
val prt_name = Proof_Context.pretty_name ctxt;
fun prt_binding (b, atts) =
Attrib.pretty_binding ctxt (b, if show_attribs then atts else []);
fun prt_fact (ths, atts) =
if not show_attribs orelse null atts then map prt_thm ths
else
Pretty.enclose "(" ")" (Pretty.breaks (map prt_thm ths)) ::
Attrib.pretty_attribs ctxt atts;
fun prt_mixfix NoSyn = []
| prt_mixfix mx = [Pretty.brk 2, Mixfix.pretty_mixfix mx];
fun prt_fix (x, SOME T, mx) = Pretty.block (prt_name (Binding.name_of x) :: Pretty.str " ::" ::
Pretty.brk 1 :: prt_typ T :: prt_mixfix mx)
| prt_fix (x, NONE, mx) = Pretty.block (prt_name (Binding.name_of x) :: prt_mixfix mx);
fun prt_constrain (x, T) = prt_fix (Binding.name x, SOME T, NoSyn);
fun prt_asm (a, ts) =
Pretty.block (Pretty.breaks (prt_binding a ":" @ map (prt_term o fst) ts));
fun prt_def (a, (t, _)) =
Pretty.block (Pretty.breaks (prt_binding a ":" @ [prt_term t]));
fun prt_note (a, ths) =
Pretty.block (Pretty.breaks (flat (prt_binding a " =" :: map prt_fact ths)));
fun notes_kind "" = "notes"
| notes_kind kind = "notes " ^ kind;
in
fn Fixes fixes => pretty_items "fixes" "and" (map prt_fix fixes)
| Constrains xs => pretty_items "constrains" "and" (map prt_constrain xs)
| Assumes asms => pretty_items "assumes" "and" (map prt_asm asms)
| Defines defs => pretty_items "defines" "and" (map prt_def defs)
| Notes (kind, facts) => pretty_items (notes_kind kind) "and" (map prt_note facts)
| Lazy_Notes (kind, (a, ths)) =>
pretty_items (notes_kind kind) "and" [prt_note ((a, []), [(Lazy.force ths, [])])]
end;
val pretty_ctxt = gen_pretty_ctxt true;
val pretty_ctxt_no_attribs = gen_pretty_ctxt false;
(* pretty_statement *)
local
fun standard_elim ctxt th =
(case Object_Logic.elim_concl ctxt th of
SOME C =>
let
val thesis = Var ((Auto_Bind.thesisN, Thm.maxidx_of th + 1), fastype_of C);
val insts = (TVars.empty, Vars.make1 (Term.dest_Var C, Thm.cterm_of ctxt thesis));
val th' = Thm.instantiate insts th;
in (th', true) end
| NONE => (th, false));
fun thm_name ctxt kind th prts =
let val head =
if Thm.has_name_hint th then
Pretty.block [Pretty.keyword1 kind, Pretty.brk 1,
Proof_Context.pretty_name ctxt (Long_Name.base_name (Thm.get_name_hint th)), Pretty.str ":"]
else Pretty.keyword1 kind
in Pretty.block (Pretty.fbreaks (head :: prts)) end;
fun obtain prop ctxt =
let
val ((ps, prop'), ctxt') = Variable.focus NONE prop ctxt;
fun fix (x, T) = (Binding.name (Variable.revert_fixed ctxt' x), SOME T, NoSyn);
val xs = map (fix o #2) ps;
val As = Logic.strip_imp_prems prop';
in ((Binding.empty, (xs, As)), ctxt') end;
in
fun pretty_statement ctxt kind raw_th =
let
val (th, is_elim) = standard_elim ctxt (Raw_Simplifier.norm_hhf ctxt raw_th);
val ((_, [th']), ctxt') = Variable.import true [th] (Variable.set_body true ctxt);
val prop = Thm.prop_of th';
val (prems, concl) = Logic.strip_horn prop;
val concl_term = Object_Logic.drop_judgment ctxt concl;
val (assumes, cases) =
chop_suffix (fn prem => is_elim andalso concl aconv Logic.strip_assums_concl prem) prems;
val is_thesis = if null cases then K false else fn v => v aconv concl_term;
val fixes =
rev (fold_aterms (fn v as Free (x, T) =>
if Variable.is_newly_fixed ctxt' ctxt x andalso not (is_thesis v)
then insert (op =) (Variable.revert_fixed ctxt' x, T) else I | _ => I) prop []);
in
pretty_ctxt ctxt' (Fixes (map (fn (x, T) => (Binding.name x, SOME T, NoSyn)) fixes)) @
pretty_ctxt ctxt' (Assumes (map (fn t => (Binding.empty_atts, [(t, [])])) assumes)) @
(if null cases then pretty_stmt ctxt' (Shows [(Binding.empty_atts, [(concl, [])])])
else
let val (clauses, ctxt'') = fold_map obtain cases ctxt'
in pretty_stmt ctxt'' (Obtains clauses) end)
end |> thm_name ctxt kind raw_th;
end;
(** logical operations **)
(* witnesses -- hypotheses as protected facts *)
datatype witness = Witness of term * thm;
val mark_witness = Logic.protect;
fun witness_prop (Witness (t, _)) = t;
fun witness_hyps (Witness (_, th)) = Thm.hyps_of th;
fun map_witness f (Witness witn) = Witness (f witn);
fun transform_witness phi = map_witness (fn (t, th) => (Morphism.term phi t, Morphism.thm phi th));
fun prove_witness ctxt t tac =
Witness (t,
Goal.prove ctxt [] [] (mark_witness t)
(fn _ => resolve_tac ctxt [Drule.protectI] 1 THEN tac)
- |> Thm.close_derivation \<^here>);
+ |> Thm.close_derivation \<^here>
+ |> Thm.trim_context);
local
val refine_witness =
Proof.refine_singleton (Method.Basic (fn ctxt => CONTEXT_TACTIC o
K (ALLGOALS (CONJUNCTS (ALLGOALS (CONJUNCTS (TRYALL (resolve_tac ctxt [Drule.protectI]))))))));
fun gen_witness_proof proof after_qed wit_propss eq_props =
let
val propss =
(map o map) (fn prop => (mark_witness prop, [])) wit_propss @
[map (rpair []) eq_props];
fun after_qed' thmss =
- let val (wits, eqs) = split_last ((map o map) (Thm.close_derivation \<^here>) thmss);
+ let
+ val (wits, eqs) =
+ split_last ((map o map) (Thm.close_derivation \<^here> #> Thm.trim_context) thmss);
in after_qed ((map2 o map2) (curry Witness) wit_propss wits) eqs end;
in proof after_qed' propss #> refine_witness end;
fun proof_local cmd goal_ctxt after_qed propp =
let
fun after_qed' (result_ctxt, results) state' =
after_qed (burrow (Proof_Context.export result_ctxt (Proof.context_of state')) results) state';
in
Proof.map_context (K goal_ctxt) #>
Proof.internal_goal (K (K ())) (Proof_Context.get_mode goal_ctxt) true cmd
NONE after_qed' [] [] (map (pair Binding.empty_atts) propp) #> snd
end;
in
fun witness_proof after_qed wit_propss =
gen_witness_proof (Proof.theorem NONE) (fn wits => fn _ => after_qed wits)
wit_propss [];
val witness_proof_eqs = gen_witness_proof (Proof.theorem NONE);
fun witness_local_proof after_qed cmd wit_propss goal_ctxt =
gen_witness_proof (proof_local cmd goal_ctxt)
(fn wits => fn _ => after_qed wits) wit_propss [];
fun witness_local_proof_eqs after_qed cmd wit_propss eq_props goal_ctxt =
gen_witness_proof (proof_local cmd goal_ctxt) after_qed wit_propss eq_props;
end;
fun conclude_witness ctxt (Witness (_, th)) =
- Goal.conclude th
+ Goal.conclude (Thm.transfer' ctxt th)
|> Raw_Simplifier.norm_hhf_protect ctxt
|> Thm.close_derivation \<^here>;
fun pretty_witness ctxt witn =
let val prt_term = Pretty.quote o Syntax.pretty_term ctxt in
Pretty.block (prt_term (witness_prop witn) ::
(if Config.get ctxt show_hyps then [Pretty.brk 2, Pretty.list "[" "]"
(map prt_term (witness_hyps witn))] else []))
end;
(* instantiate frees, with beta normalization *)
fun instantiate_normalize_morphism insts =
Morphism.instantiate_frees_morphism insts $>
Morphism.term_morphism "beta_norm" Envir.beta_norm $>
Morphism.thm_morphism "beta_conversion" (Conv.fconv_rule (Thm.beta_conversion true));
(* satisfy hypotheses *)
local
val norm_term = Envir.beta_eta_contract;
val norm_conv = Drule.beta_eta_conversion;
val norm_cterm = Thm.rhs_of o norm_conv;
fun find_witness witns hyp =
(case find_first (fn Witness (t, _) => hyp aconv t) witns of
NONE =>
let val hyp' = norm_term hyp
in find_first (fn Witness (t, _) => hyp' aconv norm_term t) witns end
| some => some);
fun compose_witness (Witness (_, th)) r =
let
- val th' = Goal.conclude th;
+ val th' = Goal.conclude (Thm.transfer (Thm.theory_of_thm r) th);
val A = Thm.cprem_of r 1;
in
Thm.implies_elim
(Conv.gconv_rule norm_conv 1 r)
(Conv.fconv_rule norm_conv
(Thm.instantiate (Thm.match (apply2 norm_cterm (Thm.cprop_of th', A))) th'))
end;
in
fun satisfy_thm witns thm =
(Thm.chyps_of thm, thm) |-> fold (fn hyp =>
(case find_witness witns (Thm.term_of hyp) of
NONE => I
| SOME w => Thm.implies_intr hyp #> compose_witness w));
val satisfy_morphism = Morphism.thm_morphism "Element.satisfy" o satisfy_thm;
end;
(* rewriting with equalities *)
(* for activating declarations only *)
-fun eq_term_morphism _ [] = NONE
- | eq_term_morphism thy props =
+fun eq_term_morphism [] = NONE
+ | eq_term_morphism props =
let
- fun decomp_simp prop =
+ fun decomp_simp ctxt prop =
let
- val ctxt = Proof_Context.init_global thy;
val _ = Logic.no_prems prop orelse
error ("Bad conditional rewrite rule " ^ Syntax.string_of_term ctxt prop);
- val lhsrhs = Logic.dest_equals prop
- handle TERM _ => error ("Rewrite rule not a meta-equality " ^ Syntax.string_of_term ctxt prop);
- in lhsrhs end;
+ in
+ Logic.dest_equals prop handle TERM _ =>
+ error ("Rewrite rule not a meta-equality " ^ Syntax.string_of_term ctxt prop)
+ end;
+ fun rewrite_term thy =
+ let val ctxt = Proof_Context.init_global thy
+ in Pattern.rewrite_term thy (map (decomp_simp ctxt) props) [] end;
val phi =
Morphism.morphism "Element.eq_term_morphism"
{binding = [],
typ = [],
- term = [Pattern.rewrite_term thy (map decomp_simp props) []],
- fact = [fn _ => error "Illegal application of Element.eq_term_morphism"]};
+ term = [rewrite_term o Morphism.the_theory],
+ fact = [fn _ => fn _ => error "Illegal application of Element.eq_term_morphism"]};
in SOME phi end;
-fun eq_morphism _ [] = NONE
- | eq_morphism thy thms =
+fun eq_morphism [] = NONE
+ | eq_morphism thms =
let
- (* FIXME proper context!? *)
- fun rewrite th = rewrite_rule (Proof_Context.init_global (Thm.theory_of_thm th)) thms th;
+ val thms0 = map Thm.trim_context thms;
+ fun rewrite_term thy =
+ Raw_Simplifier.rewrite_term thy (map (Thm.transfer thy) thms0) [];
+ fun rewrite thy =
+ Raw_Simplifier.rewrite_rule (Proof_Context.init_global thy) (map (Thm.transfer thy) thms0);
val phi =
Morphism.morphism "Element.eq_morphism"
{binding = [],
typ = [],
- term = [Raw_Simplifier.rewrite_term thy thms []],
- fact = [map rewrite]};
+ term = [rewrite_term o Morphism.the_theory],
+ fact = [map o rewrite o Morphism.the_theory]};
in SOME phi end;
(** activate in context **)
(* init *)
fun init (Fixes fixes) = Context.map_proof (Proof_Context.add_fixes fixes #> #2)
| init (Constrains _) = I
| init (Assumes asms) = Context.map_proof (fn ctxt =>
let
val asms' = Attrib.map_specs (map (Attrib.attribute ctxt)) asms;
val (_, ctxt') = ctxt
|> fold Proof_Context.augment (maps (map #1 o #2) asms')
|> Proof_Context.add_assms Assumption.assume_export asms';
in ctxt' end)
| init (Defines defs) = Context.map_proof (fn ctxt =>
let
val defs' = Attrib.map_specs (map (Attrib.attribute ctxt)) defs;
val asms = defs' |> map (fn (b, (t, ps)) =>
let val (_, t') = Local_Defs.cert_def ctxt (K []) t (* FIXME adapt ps? *)
in (t', (b, [(t', ps)])) end);
val (_, ctxt') = ctxt
|> fold Proof_Context.augment (map #1 asms)
|> Proof_Context.add_assms Local_Defs.def_export (map #2 asms);
in ctxt' end)
| init (Notes (kind, facts)) = Attrib.generic_notes kind facts #> #2
| init (Lazy_Notes (kind, ths)) = Attrib.lazy_notes kind ths;
(* activate *)
fun activate_i elem ctxt =
let
val elem' =
(case (map_ctxt_attrib o map) Token.init_assignable elem of
Defines defs =>
Defines (defs |> map (fn ((a, atts), (t, ps)) =>
((Thm.def_binding_optional
(Binding.name (#1 (#1 (Local_Defs.cert_def ctxt (K []) t)))) a, atts),
(t, ps))))
| e => e);
val ctxt' = Context.proof_map (init elem') ctxt;
in ((map_ctxt_attrib o map) Token.closure elem', ctxt') end;
fun activate raw_elem ctxt =
let val elem = raw_elem |> map_ctxt
{binding = I,
typ = I,
term = I,
pattern = I,
fact = Proof_Context.get_fact ctxt,
attrib = Attrib.check_src ctxt}
in activate_i elem ctxt end;
end;
diff --git a/src/Pure/Isar/entity.ML b/src/Pure/Isar/entity.ML
--- a/src/Pure/Isar/entity.ML
+++ b/src/Pure/Isar/entity.ML
@@ -1,58 +1,59 @@
(* Title: Pure/Isar/entity.ML
Author: Makarius
Entity definitions within a global or local theory context.
*)
signature ENTITY =
sig
type 'a data_ops =
{get_data: Context.generic -> 'a Name_Space.table,
put_data: 'a Name_Space.table -> Context.generic -> Context.generic}
val define_global: 'a data_ops -> binding -> 'a -> theory -> string * theory
val define: 'a data_ops -> binding -> 'a -> local_theory -> string * local_theory
end;
structure Entity: ENTITY =
struct
(* context data *)
type 'a data_ops =
{get_data: Context.generic -> 'a Name_Space.table,
put_data: 'a Name_Space.table -> Context.generic -> Context.generic};
(* global definition (foundation) *)
fun define_global {get_data, put_data} b x thy =
let
val context = Context.Theory thy;
val (name, data') = Name_Space.define context true (b, x) (get_data context);
in (name, Context.the_theory (put_data data' context)) end;
(* local definition *)
fun alias {get_data, put_data} binding name =
- Local_Theory.declaration {syntax = false, pervasive = false} (fn phi => fn context =>
- let
- val naming = Name_Space.naming_of context;
- val binding' = Morphism.binding phi binding;
- val data' = Name_Space.alias_table naming binding' name (get_data context);
- in put_data data' context end);
+ Local_Theory.declaration {syntax = false, pervasive = false, pos = Binding.pos_of binding}
+ (fn phi => fn context =>
+ let
+ val naming = Name_Space.naming_of context;
+ val binding' = Morphism.binding phi binding;
+ val data' = Name_Space.alias_table naming binding' name (get_data context);
+ in put_data data' context end);
fun transfer {get_data, put_data} ctxt =
let
val data0 = get_data (Context.Theory (Proof_Context.theory_of ctxt));
val data' = Name_Space.merge_tables (data0, get_data (Context.Proof ctxt));
in Context.proof_map (put_data data') ctxt end;
fun define ops binding x =
Local_Theory.background_theory_result (define_global ops binding x)
#-> (fn name =>
Local_Theory.map_contexts (K (transfer ops))
#> alias ops binding name
#> pair name);
end;
diff --git a/src/Pure/Isar/expression.ML b/src/Pure/Isar/expression.ML
--- a/src/Pure/Isar/expression.ML
+++ b/src/Pure/Isar/expression.ML
@@ -1,880 +1,881 @@
(* Title: Pure/Isar/expression.ML
Author: Clemens Ballarin, TU Muenchen
Locale expressions and user interface layer of locales.
*)
signature EXPRESSION =
sig
(* Locale expressions *)
datatype 'term map = Positional of 'term option list | Named of (string * 'term) list
type 'term rewrites = (Attrib.binding * 'term) list
type ('name, 'term) expr = ('name * ((string * bool) * ('term map * 'term rewrites))) list
type expression_i = (string, term) expr * (binding * typ option * mixfix) list
type expression = (xstring * Position.T, string) expr * (binding * string option * mixfix) list
(* Processing of context statements *)
val cert_statement: Element.context_i list -> Element.statement_i ->
Proof.context -> (Attrib.binding * (term * term list) list) list * Proof.context
val read_statement: Element.context list -> Element.statement ->
Proof.context -> (Attrib.binding * (term * term list) list) list * Proof.context
(* Declaring locales *)
val cert_declaration: expression_i -> (Proof.context -> Proof.context) ->
Element.context_i list ->
Proof.context -> (((string * typ) * mixfix) list * (string * morphism) list
* Element.context_i list * Proof.context) * ((string * typ) list * Proof.context)
val cert_read_declaration: expression_i -> (Proof.context -> Proof.context) ->
Element.context list ->
Proof.context -> (((string * typ) * mixfix) list * (string * morphism) list
* Element.context_i list * Proof.context) * ((string * typ) list * Proof.context)
(*FIXME*)
val read_declaration: expression -> (Proof.context -> Proof.context) -> Element.context list ->
Proof.context -> (((string * typ) * mixfix) list * (string * morphism) list
* Element.context_i list * Proof.context) * ((string * typ) list * Proof.context)
val add_locale: binding -> binding -> Bundle.name list ->
expression_i -> Element.context_i list -> theory -> string * local_theory
val add_locale_cmd: binding -> binding -> (xstring * Position.T) list ->
expression -> Element.context list -> theory -> string * local_theory
(* Processing of locale expressions *)
val cert_goal_expression: expression_i -> Proof.context ->
(term list list * term list list * (string * morphism) list * (Attrib.binding * term) list list * morphism) * Proof.context
val read_goal_expression: expression -> Proof.context ->
(term list list * term list list * (string * morphism) list * (Attrib.binding * term) list list * morphism) * Proof.context
end;
structure Expression : EXPRESSION =
struct
datatype ctxt = datatype Element.ctxt;
(*** Expressions ***)
datatype 'term map =
Positional of 'term option list |
Named of (string * 'term) list;
type 'term rewrites = (Attrib.binding * 'term) list;
type ('name, 'term) expr = ('name * ((string * bool) * ('term map * 'term rewrites))) list;
type expression_i = (string, term) expr * (binding * typ option * mixfix) list;
type expression = (xstring * Position.T, string) expr * (binding * string option * mixfix) list;
(** Internalise locale names in expr **)
fun check_expr thy instances = map (apfst (Locale.check thy)) instances;
(** Parameters of expression **)
(*Sanity check of instantiations and extraction of implicit parameters.
The latter only occurs iff strict = false.
Positional instantiations are extended to match full length of parameter list
of instantiated locale.*)
fun parameters_of thy strict (expr, fixed) =
let
val ctxt = Proof_Context.init_global thy;
fun reject_dups message xs =
(case duplicates (op =) xs of
[] => ()
| dups => error (message ^ commas dups));
fun parm_eq ((p1, mx1), (p2, mx2)) =
p1 = p2 andalso
(Mixfix.equal (mx1, mx2) orelse
error ("Conflicting syntax for parameter " ^ quote p1 ^ " in expression" ^
Position.here_list [Mixfix.pos_of mx1, Mixfix.pos_of mx2]));
fun params_loc loc = Locale.params_of thy loc |> map (apfst #1);
fun params_inst (loc, (prfx, (Positional insts, eqns))) =
let
val ps = params_loc loc;
val d = length ps - length insts;
val insts' =
if d < 0 then
error ("More arguments than parameters in instantiation of locale " ^
quote (Locale.markup_name ctxt loc))
else insts @ replicate d NONE;
val ps' = (ps ~~ insts') |>
map_filter (fn (p, NONE) => SOME p | (_, SOME _) => NONE);
in (ps', (loc, (prfx, (Positional insts', eqns)))) end
| params_inst (loc, (prfx, (Named insts, eqns))) =
let
val _ =
reject_dups "Duplicate instantiation of the following parameter(s): "
(map fst insts);
val ps' = (insts, params_loc loc) |-> fold (fn (p, _) => fn ps =>
if AList.defined (op =) ps p then AList.delete (op =) p ps
else error (quote p ^ " not a parameter of instantiated expression"));
in (ps', (loc, (prfx, (Named insts, eqns)))) end;
fun params_expr is =
let
val (is', ps') = fold_map (fn i => fn ps =>
let
val (ps', i') = params_inst i;
val ps'' = distinct parm_eq (ps @ ps');
in (i', ps'') end) is []
in (ps', is') end;
val (implicit, expr') = params_expr expr;
val implicit' = map #1 implicit;
val fixed' = map (Variable.check_name o #1) fixed;
val _ = reject_dups "Duplicate fixed parameter(s): " fixed';
val implicit'' =
if strict then []
else
let
val _ =
reject_dups
"Parameter(s) declared simultaneously in expression and for clause: "
(implicit' @ fixed');
in map (fn (x, mx) => (Binding.name x, NONE, mx)) implicit end;
in (expr', implicit'' @ fixed) end;
(** Read instantiation **)
(* Parse positional or named instantiation *)
local
fun prep_inst prep_term ctxt parms (Positional insts) =
(insts ~~ parms) |> map
(fn (NONE, p) => Free (p, dummyT)
| (SOME t, _) => prep_term ctxt t)
| prep_inst prep_term ctxt parms (Named insts) =
parms |> map (fn p =>
(case AList.lookup (op =) insts p of
SOME t => prep_term ctxt t |
NONE => Free (p, dummyT)));
in
fun parse_inst x = prep_inst Syntax.parse_term x;
fun make_inst x = prep_inst (K I) x;
end;
(* Instantiation morphism *)
fun inst_morphism params ((prfx, mandatory), insts') ctxt =
let
(* parameters *)
val parm_types = map #2 params;
val type_parms = fold Term.add_tfreesT parm_types [];
(* type inference *)
val parm_types' = map (Type_Infer.paramify_vars o Logic.varifyT_global) parm_types;
val type_parms' = fold Term.add_tvarsT parm_types' [];
val checked =
(map (Logic.mk_type o TVar) type_parms' @ map2 Type.constraint parm_types' insts')
|> Syntax.check_terms (Config.put Type_Infer.object_logic false ctxt)
val (type_parms'', insts'') = chop (length type_parms') checked;
(* context *)
val ctxt' = fold Proof_Context.augment checked ctxt;
val certT = Thm.trim_context_ctyp o Thm.ctyp_of ctxt';
val cert = Thm.trim_context_cterm o Thm.cterm_of ctxt';
(* instantiation *)
val instT =
TFrees.build
(fold2 (fn v => fn T => not (TFree v = T) ? TFrees.add (v, T))
type_parms (map Logic.dest_type type_parms''));
val cert_inst =
Frees.build
(fold2 (fn v => fn t => not (Free v = t) ? Frees.add (v, cert t))
(map #1 params ~~ map (Term_Subst.instantiateT_frees instT) parm_types) insts'');
in
(Element.instantiate_normalize_morphism (TFrees.map (K certT) instT, cert_inst) $>
Morphism.binding_morphism "Expression.inst" (Binding.prefix mandatory prfx), ctxt')
end;
(*** Locale processing ***)
(** Parsing **)
fun parse_elem prep_typ prep_term ctxt =
Element.map_ctxt
{binding = I,
typ = prep_typ ctxt,
term = prep_term (Proof_Context.set_mode Proof_Context.mode_schematic ctxt),
pattern = prep_term (Proof_Context.set_mode Proof_Context.mode_pattern ctxt),
fact = I,
attrib = I};
fun prepare_stmt prep_prop prep_obtains ctxt stmt =
(case stmt of
Element.Shows raw_shows =>
raw_shows |> (map o apsnd o map) (fn (t, ps) =>
(prep_prop (Proof_Context.set_mode Proof_Context.mode_schematic ctxt) t,
map (prep_prop (Proof_Context.set_mode Proof_Context.mode_pattern ctxt)) ps))
| Element.Obtains raw_obtains =>
let
val ((_, thesis), thesis_ctxt) = Obtain.obtain_thesis ctxt;
val obtains = prep_obtains thesis_ctxt thesis raw_obtains;
in map (fn (b, t) => ((b, []), [(t, [])])) obtains end);
(** Simultaneous type inference: instantiations + elements + statement **)
local
fun mk_type T = (Logic.mk_type T, []);
fun mk_term t = (t, []);
fun mk_propp (p, pats) = (Type.constraint propT p, pats);
fun dest_type (T, []) = Logic.dest_type T;
fun dest_term (t, []) = t;
fun dest_propp (p, pats) = (p, pats);
fun extract_inst (_, (_, ts)) = map mk_term ts;
fun restore_inst ((l, (p, _)), cs) = (l, (p, map dest_term cs));
fun extract_eqns es = map (mk_term o snd) es;
fun restore_eqns (es, cs) = map2 (fn (b, _) => fn c => (b, dest_term c)) es cs;
fun extract_elem (Fixes fixes) = map (#2 #> the_list #> map mk_type) fixes
| extract_elem (Constrains csts) = map (#2 #> single #> map mk_type) csts
| extract_elem (Assumes asms) = map (#2 #> map mk_propp) asms
| extract_elem (Defines defs) = map (fn (_, (t, ps)) => [mk_propp (t, ps)]) defs
| extract_elem (Notes _) = []
| extract_elem (Lazy_Notes _) = [];
fun restore_elem (Fixes fixes, css) =
(fixes ~~ css) |> map (fn ((x, _, mx), cs) =>
(x, cs |> map dest_type |> try hd, mx)) |> Fixes
| restore_elem (Constrains csts, css) =
(csts ~~ css) |> map (fn ((x, _), cs) =>
(x, cs |> map dest_type |> hd)) |> Constrains
| restore_elem (Assumes asms, css) =
(asms ~~ css) |> map (fn ((b, _), cs) => (b, map dest_propp cs)) |> Assumes
| restore_elem (Defines defs, css) =
(defs ~~ css) |> map (fn ((b, _), [c]) => (b, dest_propp c)) |> Defines
| restore_elem (elem as Notes _, _) = elem
| restore_elem (elem as Lazy_Notes _, _) = elem;
fun prep (_, pats) (ctxt, t :: ts) =
let val ctxt' = Proof_Context.augment t ctxt
in
((t, Syntax.check_props (Proof_Context.set_mode Proof_Context.mode_pattern ctxt') pats),
(ctxt', ts))
end;
fun check cs ctxt =
let
val (cs', (ctxt', _)) = fold_map prep cs
(ctxt, Syntax.check_terms
(Proof_Context.set_mode Proof_Context.mode_schematic ctxt) (map fst cs));
in (cs', ctxt') end;
in
fun check_autofix insts eqnss elems concl ctxt =
let
val inst_cs = map extract_inst insts;
val eqns_cs = map extract_eqns eqnss;
val elem_css = map extract_elem elems;
val concl_cs = (map o map) mk_propp (map snd concl);
(* Type inference *)
val (inst_cs' :: eqns_cs' :: css', ctxt') =
(fold_burrow o fold_burrow) check (inst_cs :: eqns_cs :: elem_css @ [concl_cs]) ctxt;
val (elem_css', [concl_cs']) = chop (length elem_css) css';
in
((map restore_inst (insts ~~ inst_cs'),
map restore_eqns (eqnss ~~ eqns_cs'),
map restore_elem (elems ~~ elem_css'),
map fst concl ~~ concl_cs'), ctxt')
end;
end;
(** Prepare locale elements **)
fun declare_elem prep_var (Fixes fixes) ctxt =
let val (vars, _) = fold_map prep_var fixes ctxt
in ctxt |> Proof_Context.add_fixes vars |> snd end
| declare_elem prep_var (Constrains csts) ctxt =
ctxt |> fold_map (fn (x, T) => prep_var (Binding.name x, SOME T, NoSyn)) csts |> snd
| declare_elem _ (Assumes _) ctxt = ctxt
| declare_elem _ (Defines _) ctxt = ctxt
| declare_elem _ (Notes _) ctxt = ctxt
| declare_elem _ (Lazy_Notes _) ctxt = ctxt;
(** Finish locale elements **)
fun finish_inst ctxt (loc, (prfx, inst)) =
let
val thy = Proof_Context.theory_of ctxt;
val (morph, _) = inst_morphism (map #1 (Locale.params_of thy loc)) (prfx, inst) ctxt;
in (loc, morph) end;
fun finish_fixes (parms: (string * typ) list) = map (fn (binding, _, mx) =>
let val x = Binding.name_of binding
in (binding, AList.lookup (op =) parms x, mx) end);
local
fun closeup _ _ false elem = elem
| closeup (outer_ctxt, ctxt) parms true elem =
let
(* FIXME consider closing in syntactic phase -- before type checking *)
fun close_frees t =
let
val rev_frees =
Term.fold_aterms (fn Free (x, T) =>
if Variable.is_fixed outer_ctxt x orelse AList.defined (op =) parms x then I
else insert (op =) (x, T) | _ => I) t [];
in fold (Logic.all o Free) rev_frees t end;
fun no_binds [] = []
| no_binds _ = error "Illegal term bindings in context element";
in
(case elem of
Assumes asms => Assumes (asms |> map (fn (a, propps) =>
(a, map (fn (t, ps) => (close_frees t, no_binds ps)) propps)))
| Defines defs => Defines (defs |> map (fn ((name, atts), (t, ps)) =>
let val ((c, _), t') = Local_Defs.cert_def ctxt (K []) (close_frees t)
in ((Thm.def_binding_optional (Binding.name c) name, atts), (t', no_binds ps)) end))
| e => e)
end;
in
fun finish_elem _ parms _ (Fixes fixes) = Fixes (finish_fixes parms fixes)
| finish_elem _ _ _ (Constrains _) = Constrains []
| finish_elem ctxts parms do_close (Assumes asms) = closeup ctxts parms do_close (Assumes asms)
| finish_elem ctxts parms do_close (Defines defs) = closeup ctxts parms do_close (Defines defs)
| finish_elem _ _ _ (elem as Notes _) = elem
| finish_elem _ _ _ (elem as Lazy_Notes _) = elem;
end;
(** Process full context statement: instantiations + elements + statement **)
(* Interleave incremental parsing and type inference over entire parsed stretch. *)
local
fun abs_def ctxt =
Thm.cterm_of ctxt #> Assumption.assume ctxt #> Local_Defs.abs_def_rule ctxt #> Thm.prop_of;
fun prep_full_context_statement
parse_typ parse_prop prep_obtains prep_var_elem prep_inst prep_eqns prep_attr prep_var_inst prep_expr
{strict, do_close, fixed_frees} raw_import init_body raw_elems raw_stmt ctxt1 =
let
val thy = Proof_Context.theory_of ctxt1;
val (raw_insts, fixed) = parameters_of thy strict (apfst (prep_expr thy) raw_import);
fun prep_insts_cumulative (loc, (prfx, (inst, eqns))) (i, insts, eqnss, ctxt) =
let
val params = map #1 (Locale.params_of thy loc);
val inst' = prep_inst ctxt (map #1 params) inst;
val parm_types' =
params |> map (#2 #> Logic.varifyT_global #>
Term.map_type_tvar (fn ((x, _), S) => TVar ((x, i), S)) #>
Type_Infer.paramify_vars);
val inst'' = map2 Type.constraint parm_types' inst';
val insts' = insts @ [(loc, (prfx, inst''))];
val ((insts'', _, _, _), ctxt2) = check_autofix insts' [] [] [] ctxt;
val inst''' = insts'' |> List.last |> snd |> snd;
val (inst_morph, _) = inst_morphism params (prfx, inst''') ctxt;
val ctxt' = Locale.activate_declarations (loc, inst_morph) ctxt2
handle ERROR msg => if null eqns then error msg else
(Locale.tracing ctxt1
(msg ^ "\nFalling back to reading rewrites clause before activation.");
ctxt2);
val attrss = map (apsnd (map (prep_attr ctxt)) o fst) eqns;
val eqns' = (prep_eqns ctxt' o map snd) eqns;
val eqnss' = [attrss ~~ eqns'];
val ((_, [eqns''], _, _), _) = check_autofix insts'' eqnss' [] [] ctxt';
val rewrite_morph = eqns'
|> map (abs_def ctxt')
|> Variable.export_terms ctxt' ctxt
- |> Element.eq_term_morphism (Proof_Context.theory_of ctxt)
- |> the_default Morphism.identity;
+ |> Element.eq_term_morphism
+ |> Morphism.default;
val ctxt'' = Locale.activate_declarations (loc, inst_morph $> rewrite_morph) ctxt;
val eqnss' = eqnss @ [attrss ~~ Variable.export_terms ctxt' ctxt eqns'];
in (i + 1, insts', eqnss', ctxt'') end;
fun prep_elem raw_elem ctxt =
let
val ctxt' = ctxt
|> Context_Position.set_visible false
|> declare_elem prep_var_elem raw_elem
|> Context_Position.restore_visible ctxt;
val elems' = parse_elem parse_typ parse_prop ctxt' raw_elem;
in (elems', ctxt') end;
val fors = fold_map prep_var_inst fixed ctxt1 |> fst;
val ctxt2 = ctxt1 |> Proof_Context.add_fixes fors |> snd;
val (_, insts', eqnss', ctxt3) = fold prep_insts_cumulative raw_insts (0, [], [], ctxt2);
fun prep_stmt elems ctxt =
check_autofix insts' [] elems (prepare_stmt parse_prop prep_obtains ctxt raw_stmt) ctxt;
val _ =
if fixed_frees then ()
else
(case fold (fold (Variable.add_frees ctxt3) o snd o snd) insts' [] of
[] => ()
| frees => error ("Illegal free variables in expression: " ^
commas_quote (map (Syntax.string_of_term ctxt3 o Free) (rev frees))));
val ((insts, _, elems', concl), ctxt4) = ctxt3
|> init_body
|> fold_map prep_elem raw_elems
|-> prep_stmt;
(* parameters from expression and elements *)
val xs = maps (fn Fixes fixes => map (Variable.check_name o #1) fixes | _ => [])
(Fixes fors :: elems');
val (parms, ctxt5) = fold_map Proof_Context.inferred_param xs ctxt4;
val fors' = finish_fixes parms fors;
val fixed = map (fn (b, SOME T, mx) => ((Binding.name_of b, T), mx)) fors';
val deps = map (finish_inst ctxt5) insts;
val elems'' = map (finish_elem (ctxt1, ctxt5) parms do_close) elems';
in ((fixed, deps, eqnss', elems'', concl), (parms, ctxt5)) end;
in
fun cert_full_context_statement x =
prep_full_context_statement (K I) (K I) Obtain.cert_obtains
Proof_Context.cert_var make_inst Syntax.check_props (K I) Proof_Context.cert_var (K I) x;
fun cert_read_full_context_statement x =
prep_full_context_statement Syntax.parse_typ Syntax.parse_prop Obtain.parse_obtains
Proof_Context.read_var make_inst Syntax.check_props (K I) Proof_Context.cert_var (K I) x;
fun read_full_context_statement x =
prep_full_context_statement Syntax.parse_typ Syntax.parse_prop Obtain.parse_obtains
Proof_Context.read_var parse_inst Syntax.read_props Attrib.check_src Proof_Context.read_var check_expr x;
end;
(* Context statement: elements + statement *)
local
fun prep_statement prep activate raw_elems raw_stmt ctxt =
let
val ((_, _, _, elems, concl), _) =
prep {strict = true, do_close = false, fixed_frees = true}
([], []) I raw_elems raw_stmt ctxt;
val ctxt' = ctxt
|> Proof_Context.set_stmt true
|> fold_map activate elems |> #2
|> Proof_Context.restore_stmt ctxt;
in (concl, ctxt') end;
in
fun cert_statement x = prep_statement cert_full_context_statement Element.activate_i x;
fun read_statement x = prep_statement read_full_context_statement Element.activate x;
end;
(* Locale declaration: import + elements *)
fun fix_params params =
Proof_Context.add_fixes (map (fn ((x, T), mx) => (Binding.name x, SOME T, mx)) params) #> snd;
local
fun prep_declaration prep activate raw_import init_body raw_elems ctxt =
let
val ((fixed, deps, eqnss, elems, _), (parms, ctxt0)) =
prep {strict = false, do_close = true, fixed_frees = false}
raw_import init_body raw_elems (Element.Shows []) ctxt;
val _ = null (flat eqnss) orelse error "Illegal rewrites clause(s) in declaration of locale";
(* Declare parameters and imported facts *)
val ctxt' = ctxt
|> fix_params fixed
|> fold (Context.proof_map o Locale.activate_facts NONE) deps;
val (elems', ctxt'') = ctxt'
|> Proof_Context.set_stmt true
|> fold_map activate elems
||> Proof_Context.restore_stmt ctxt';
in ((fixed, deps, elems', ctxt''), (parms, ctxt0)) end;
in
fun cert_declaration x = prep_declaration cert_full_context_statement Element.activate_i x;
fun cert_read_declaration x = prep_declaration cert_read_full_context_statement Element.activate x;
fun read_declaration x = prep_declaration read_full_context_statement Element.activate x;
end;
(* Locale expression to set up a goal *)
local
fun props_of thy (name, morph) =
let val (asm, defs) = Locale.specification_of thy name
in map (Morphism.term morph) (the_list asm @ defs) end;
fun prep_goal_expression prep expression ctxt =
let
val thy = Proof_Context.theory_of ctxt;
val ((fixed, deps, eqnss, _, _), _) =
prep {strict = true, do_close = true, fixed_frees = true} expression I []
(Element.Shows []) ctxt;
(* proof obligations *)
val propss = map (props_of thy) deps;
val eq_propss = (map o map) snd eqnss;
val goal_ctxt = ctxt
|> fix_params fixed
|> (fold o fold) Proof_Context.augment (propss @ eq_propss);
val export = Proof_Context.export_morphism goal_ctxt ctxt;
val exp_fact = Drule.zero_var_indexes_list o map Thm.strip_shyps o Morphism.fact export;
val exp_term = Term_Subst.zero_var_indexes o Morphism.term export;
val exp_typ = Logic.type_map exp_term;
val export' =
Morphism.morphism "Expression.prep_goal"
- {binding = [], typ = [exp_typ], term = [exp_term], fact = [exp_fact]};
+ {binding = [], typ = [K exp_typ], term = [K exp_term], fact = [K exp_fact]};
in ((propss, eq_propss, deps, eqnss, export'), goal_ctxt) end;
in
fun cert_goal_expression x = prep_goal_expression cert_full_context_statement x;
fun read_goal_expression x = prep_goal_expression read_full_context_statement x;
end;
(*** Locale declarations ***)
(* extract specification text *)
val norm_term = Envir.beta_norm oo Term.subst_atomic;
fun bind_def ctxt eq (env, eqs) =
let
val _ = Local_Defs.cert_def ctxt (K []) eq;
val ((y, T), b) = Local_Defs.abs_def eq;
val b' = norm_term env b;
fun err msg = error (msg ^ ": " ^ quote y);
in
(case filter (fn (Free (y', _), _) => y = y' | _ => false) env of
[] => ((Free (y, T), b') :: env, eq :: eqs)
| dups =>
if forall (fn (_, b'') => b' aconv b'') dups then (env, eqs)
else err "Attempt to redefine variable")
end;
(* text has the following structure:
(((exts, exts'), (ints, ints')), (xs, env, defs))
where
exts: external assumptions (terms in assumes elements)
exts': dito, normalised wrt. env
ints: internal assumptions (terms in assumptions from insts)
ints': dito, normalised wrt. env
xs: the free variables in exts' and ints' and rhss of definitions,
this includes parameters except defined parameters
env: list of term pairs encoding substitutions, where the first term
is a free variable; substitutions represent defines elements and
the rhs is normalised wrt. the previous env
defs: the equations from the defines elements
*)
fun eval_text _ _ (Fixes _) text = text
| eval_text _ _ (Constrains _) text = text
| eval_text _ is_ext (Assumes asms)
(((exts, exts'), (ints, ints')), (env, defs)) =
let
val ts = maps (map #1 o #2) asms;
val ts' = map (norm_term env) ts;
val spec' =
if is_ext then ((exts @ ts, exts' @ ts'), (ints, ints'))
else ((exts, exts'), (ints @ ts, ints' @ ts'));
in (spec', (env, defs)) end
| eval_text ctxt _ (Defines defs) (spec, binds) =
(spec, fold (bind_def ctxt o #1 o #2) defs binds)
| eval_text _ _ (Notes _) text = text
| eval_text _ _ (Lazy_Notes _) text = text;
fun eval_inst ctxt (loc, morph) text =
let
val thy = Proof_Context.theory_of ctxt;
val (asm, defs) = Locale.specification_of thy loc;
val asm' = Option.map (Morphism.term morph) asm;
val defs' = map (Morphism.term morph) defs;
val text' =
text |>
(if is_some asm then
eval_text ctxt false (Assumes [(Binding.empty_atts, [(the asm', [])])])
else I) |>
(if not (null defs) then
eval_text ctxt false (Defines (map (fn def => (Binding.empty_atts, (def, []))) defs'))
else I)
(* FIXME clone from locale.ML *)
in text' end;
fun eval_elem ctxt elem text =
eval_text ctxt true elem text;
fun eval ctxt deps elems =
let
val text' = fold (eval_inst ctxt) deps ((([], []), ([], [])), ([], []));
val ((spec, (_, defs))) = fold (eval_elem ctxt) elems text';
in (spec, defs) end;
(* axiomsN: name of theorem set with destruct rules for locale predicates,
also name suffix of delta predicates and assumptions. *)
val axiomsN = "axioms";
local
(* introN: name of theorems for introduction rules of locale and
delta predicates *)
val introN = "intro";
fun atomize_spec ctxt ts =
let
val t = Logic.mk_conjunction_balanced ts;
val body = Object_Logic.atomize_term ctxt t;
val bodyT = Term.fastype_of body;
in
if bodyT = propT
then (t, propT, Thm.reflexive (Thm.cterm_of ctxt t))
else (body, bodyT, Object_Logic.atomize ctxt (Thm.cterm_of ctxt t))
end;
(* achieve plain syntax for locale predicates (without "PROP") *)
fun aprop_tr' n c =
let
val c' = Lexicon.mark_const c;
fun tr' (_: Proof.context) T args =
if T <> dummyT andalso length args = n
then Syntax.const "_aprop" $ Term.list_comb (Syntax.const c', args)
else raise Match;
in (c', tr') end;
(* define one predicate including its intro rule and axioms
- binding: predicate name
- parms: locale parameters
- defs: thms representing substitutions from defines elements
- ts: terms representing locale assumptions (not normalised wrt. defs)
- norm_ts: terms representing locale assumptions (normalised wrt. defs)
- thy: the theory
*)
fun def_pred binding parms defs ts norm_ts thy =
let
val name = Sign.full_name thy binding;
val thy_ctxt = Proof_Context.init_global thy;
val (body, bodyT, body_eq) = atomize_spec thy_ctxt norm_ts;
val env = Names.build (Names.add_free_names body);
val xs = filter (Names.defined env o #1) parms;
val Ts = map #2 xs;
val type_tfrees = TFrees.build (fold TFrees.add_tfreesT Ts);
val extra_tfrees =
TFrees.build (TFrees.add_tfrees_unless (TFrees.defined type_tfrees) body)
|> TFrees.keys |> map TFree;
val predT = map Term.itselfT extra_tfrees ---> Ts ---> bodyT;
val args = map Logic.mk_type extra_tfrees @ map Free xs;
val head = Term.list_comb (Const (name, predT), args);
val statement = Object_Logic.ensure_propT thy_ctxt head;
val ([pred_def], defs_thy) =
thy
|> bodyT = propT ? Sign.typed_print_translation [aprop_tr' (length args) name]
|> Sign.declare_const_global ((binding, predT), NoSyn) |> snd
|> Global_Theory.add_defs false [((Thm.def_binding binding, Logic.mk_equals (head, body)), [])];
val defs_ctxt = Proof_Context.init_global defs_thy |> Variable.declare_term head;
val intro = Goal.prove_global defs_thy [] norm_ts statement
(fn {context = ctxt, ...} =>
rewrite_goals_tac ctxt [pred_def] THEN
compose_tac defs_ctxt (false, body_eq RS Drule.equal_elim_rule1, 1) 1 THEN
compose_tac defs_ctxt
(false,
Conjunction.intr_balanced (map (Thm.assume o Thm.cterm_of defs_ctxt) norm_ts), 0) 1);
val conjuncts =
(Drule.equal_elim_rule2 OF
[body_eq, rewrite_rule defs_ctxt [pred_def] (Thm.assume (Thm.cterm_of defs_ctxt statement))])
|> Conjunction.elim_balanced (length ts);
val (_, axioms_ctxt) = defs_ctxt
|> Assumption.add_assumes (maps Thm.chyps_of (defs @ conjuncts));
val axioms = ts ~~ conjuncts |> map (fn (t, ax) =>
Element.prove_witness axioms_ctxt t
(rewrite_goals_tac axioms_ctxt defs THEN compose_tac axioms_ctxt (false, ax, 0) 1));
in ((statement, intro, axioms), defs_thy) end;
in
(* main predicate definition function *)
fun define_preds binding parms (((exts, exts'), (ints, ints')), defs) thy =
let
val ctxt = Proof_Context.init_global thy;
val defs' = map (Thm.cterm_of ctxt #> Assumption.assume ctxt #> Drule.abs_def) defs;
- val (a_pred, a_intro, a_axioms, thy'') =
+ val (a_pred, a_intro, a_axioms, thy2) =
if null exts then (NONE, NONE, [], thy)
else
let
val abinding =
if null ints then binding else Binding.suffix_name ("_" ^ axiomsN) binding;
- val ((statement, intro, axioms), thy') =
+ val ((statement, intro, axioms), thy1) =
thy
|> def_pred abinding parms defs' exts exts';
- val ((_, [intro']), thy'') =
- thy'
+ val ((_, [intro']), thy2) =
+ thy1
|> Sign.qualified_path true abinding
|> Global_Theory.note_thms ""
((Binding.name introN, []), [([intro], [Locale.unfold_add])])
- ||> Sign.restore_naming thy';
- in (SOME statement, SOME intro', axioms, thy'') end;
- val (b_pred, b_intro, b_axioms, thy'''') =
- if null ints then (NONE, NONE, [], thy'')
+ ||> Sign.restore_naming thy1;
+ in (SOME statement, SOME intro', axioms, thy2) end;
+ val (b_pred, b_intro, b_axioms, thy4) =
+ if null ints then (NONE, NONE, [], thy2)
else
let
- val ((statement, intro, axioms), thy''') =
- thy''
+ val ((statement, intro, axioms), thy3) =
+ thy2
|> def_pred binding parms defs' (ints @ the_list a_pred) (ints' @ the_list a_pred);
- val ctxt''' = Proof_Context.init_global thy''';
- val ([(_, [intro']), _], thy'''') =
- thy'''
+ val conclude_witness =
+ Drule.export_without_context o Element.conclude_witness (Proof_Context.init_global thy3);
+ val ([(_, [intro']), _], thy4) =
+ thy3
|> Sign.qualified_path true binding
|> Global_Theory.note_thmss ""
[((Binding.name introN, []), [([intro], [Locale.intro_add])]),
((Binding.name axiomsN, []),
- [(map (Drule.export_without_context o Element.conclude_witness ctxt''') axioms,
- [])])]
- ||> Sign.restore_naming thy''';
- in (SOME statement, SOME intro', axioms, thy'''') end;
- in ((a_pred, a_intro, a_axioms), (b_pred, b_intro, b_axioms), thy'''') end;
+ [(map conclude_witness axioms, [])])]
+ ||> Sign.restore_naming thy3;
+ in (SOME statement, SOME intro', axioms, thy4) end;
+ in ((a_pred, a_intro, a_axioms), (b_pred, b_intro, b_axioms), thy4) end;
end;
local
fun assumes_to_notes (Assumes asms) axms =
fold_map (fn (a, spec) => fn axs =>
let val (ps, qs) = chop (length spec) axs
in ((a, [(ps, [])]), qs) end) asms axms
|> apfst (curry Notes "")
| assumes_to_notes e axms = (e, axms);
fun defines_to_notes ctxt (Defines defs) =
Notes ("", map (fn (a, (def, _)) =>
(a, [([Assumption.assume ctxt (Thm.cterm_of ctxt def)],
- [(Attrib.internal o K) Locale.witness_add])])) defs)
+ [Attrib.internal @{here} (K Locale.witness_add)])])) defs)
| defines_to_notes _ e = e;
val is_hyp = fn Assumes _ => true | Defines _ => true | _ => false;
fun gen_add_locale prep_include prep_decl
binding raw_predicate_binding raw_includes raw_import raw_body thy =
let
val name = Sign.full_name thy binding;
val _ = Locale.defined thy name andalso
error ("Duplicate definition of locale " ^ quote name);
val ctxt = Proof_Context.init_global thy;
val includes = map (prep_include ctxt) raw_includes;
val ((fixed, deps, body_elems, _), (parms, ctxt')) =
ctxt
|> Bundle.includes includes
|> prep_decl raw_import I raw_body;
val text as (((_, exts'), _), defs) = eval ctxt' deps body_elems;
val type_tfrees = TFrees.build (fold (TFrees.add_tfreesT o #2) parms);
val extra_tfrees =
TFrees.build (fold (TFrees.add_tfrees_unless (TFrees.defined type_tfrees)) exts')
|> TFrees.keys;
val _ =
if null extra_tfrees then ()
else warning ("Additional type variable(s) in locale specification " ^
Binding.print binding ^ ": " ^
commas (map (Syntax.string_of_typ ctxt' o TFree) extra_tfrees));
val predicate_binding =
if Binding.is_empty raw_predicate_binding then binding
else raw_predicate_binding;
val ((a_statement, a_intro, a_axioms), (b_statement, b_intro, b_axioms), thy') =
define_preds predicate_binding parms text thy;
val pred_ctxt = Proof_Context.init_global thy';
val a_satisfy = Element.satisfy_morphism a_axioms;
val b_satisfy = Element.satisfy_morphism b_axioms;
val params = fixed @
maps (fn Fixes fixes =>
map (fn (b, SOME T, mx) => ((Binding.name_of b, T), mx)) fixes | _ => []) body_elems;
val asm = if is_some b_statement then b_statement else a_statement;
val hyp_spec = filter is_hyp body_elems;
val notes =
if is_some asm then
[("", [((Binding.suffix_name ("_" ^ axiomsN) binding, []),
[([Assumption.assume pred_ctxt (Thm.cterm_of pred_ctxt (the asm))],
- [(Attrib.internal o K) Locale.witness_add])])])]
+ [Attrib.internal @{here} (K Locale.witness_add)])])])]
else [];
val notes' =
body_elems
+ |> map (Element.transfer_ctxt thy')
|> map (defines_to_notes pred_ctxt)
|> map (Element.transform_ctxt a_satisfy)
|> (fn elems =>
fold_map assumes_to_notes elems (map (Element.conclude_witness pred_ctxt) a_axioms))
|> fst
|> map (Element.transform_ctxt b_satisfy)
|> map_filter (fn Notes notes => SOME notes | _ => NONE);
val deps' = map (fn (l, morph) => (l, morph $> b_satisfy)) deps;
val axioms = map (Element.conclude_witness pred_ctxt) b_axioms;
val loc_ctxt = thy'
|> Locale.register_locale binding (extra_tfrees, params)
(asm, rev defs) (a_intro, b_intro) axioms hyp_spec [] (rev notes) (rev deps')
|> Named_Target.init includes name
|> fold (fn (kind, facts) => Local_Theory.notes_kind kind facts #> snd) notes';
in (name, loc_ctxt) end;
in
val add_locale = gen_add_locale (K I) cert_declaration;
val add_locale_cmd = gen_add_locale Bundle.check read_declaration;
end;
end;
diff --git a/src/Pure/Isar/generic_target.ML b/src/Pure/Isar/generic_target.ML
--- a/src/Pure/Isar/generic_target.ML
+++ b/src/Pure/Isar/generic_target.ML
@@ -1,479 +1,481 @@
(* Title: Pure/Isar/generic_target.ML
Author: Makarius
Author: Florian Haftmann, TU Muenchen
Common target infrastructure.
*)
signature GENERIC_TARGET =
sig
(*auxiliary*)
val export_abbrev: Proof.context ->
(term -> term) -> term -> term * ((string * sort) list * (term list * term list))
val check_mixfix: Proof.context -> binding * (string * sort) list -> mixfix -> mixfix
val check_mixfix_global: binding * bool -> mixfix -> mixfix
(*background primitives*)
val background_foundation: ((binding * typ) * mixfix) * (binding * term) ->
term list * term list -> local_theory -> (term * thm) * local_theory
- val background_declaration: declaration -> local_theory -> local_theory
+ val background_declaration: Morphism.declaration_entity -> local_theory -> local_theory
val background_abbrev: binding * term -> term list -> local_theory -> (term * term) * local_theory
val add_foundation_interpretation: (binding * (term * term list) -> Context.generic -> Context.generic) ->
theory -> theory
(*nested local theories primitives*)
val standard_facts: local_theory -> Proof.context -> Attrib.fact list -> Attrib.fact list
val standard_notes: (int * int -> bool) -> string -> Attrib.fact list ->
local_theory -> local_theory
- val standard_declaration: (int * int -> bool) ->
- (morphism -> Context.generic -> Context.generic) -> local_theory -> local_theory
+ val standard_declaration: (int * int -> bool) -> Morphism.declaration_entity ->
+ local_theory -> local_theory
val standard_const: (int * int -> bool) -> Syntax.mode -> (binding * mixfix) * term ->
local_theory -> local_theory
val local_interpretation: Locale.registration ->
local_theory -> local_theory
(*lifting target primitives to local theory operations*)
val define: (((binding * typ) * mixfix) * (binding * term) ->
term list * term list -> local_theory -> (term * thm) * local_theory) ->
bool -> (binding * mixfix) * (Attrib.binding * term) -> local_theory ->
(term * (string * thm)) * local_theory
val notes:
(string -> Attrib.fact list -> Attrib.fact list -> local_theory -> local_theory) ->
string -> Attrib.fact list -> local_theory -> (string * thm list) list * local_theory
val abbrev: (Syntax.mode -> binding * mixfix -> term ->
term list * term list -> local_theory -> local_theory) ->
Syntax.mode -> (binding * mixfix) * term -> local_theory -> (term * term) * local_theory
(*theory target primitives*)
val theory_target_foundation: ((binding * typ) * mixfix) * (binding * term) ->
term list * term list -> local_theory -> (term * thm) * local_theory
val theory_target_notes: string -> Attrib.fact list -> Attrib.fact list ->
local_theory -> local_theory
val theory_target_abbrev: Syntax.mode -> (binding * mixfix) -> term -> term list * term list ->
local_theory -> local_theory
(*theory target operations*)
val theory_abbrev: Syntax.mode -> (binding * mixfix) * term ->
local_theory -> (term * term) * local_theory
- val theory_declaration: declaration -> local_theory -> local_theory
+ val theory_declaration: Morphism.declaration_entity -> local_theory -> local_theory
val theory_registration: Locale.registration -> local_theory -> local_theory
(*locale target primitives*)
val locale_target_notes: string -> string -> Attrib.fact list -> Attrib.fact list ->
local_theory -> local_theory
val locale_target_abbrev: string -> Syntax.mode ->
(binding * mixfix) -> term -> term list * term list -> local_theory -> local_theory
- val locale_target_declaration: string -> bool -> declaration -> local_theory -> local_theory
+ val locale_target_declaration: string -> {syntax: bool, pos: Position.T} ->
+ Morphism.declaration_entity -> local_theory -> local_theory
val locale_target_const: string -> (morphism -> bool) -> Syntax.mode ->
(binding * mixfix) * term -> local_theory -> local_theory
(*locale operations*)
val locale_abbrev: string -> Syntax.mode -> (binding * mixfix) * term ->
local_theory -> (term * term) * local_theory
- val locale_declaration: string -> {syntax: bool, pervasive: bool} -> declaration ->
- local_theory -> local_theory
+ val locale_declaration: string -> {syntax: bool, pervasive: bool, pos: Position.T} ->
+ Morphism.declaration_entity -> local_theory -> local_theory
val locale_const: string -> Syntax.mode -> (binding * mixfix) * term ->
local_theory -> local_theory
val locale_dependency: string -> Locale.registration ->
local_theory -> local_theory
end
structure Generic_Target: GENERIC_TARGET =
struct
(** consts **)
fun export_abbrev lthy preprocess rhs =
let
val thy_ctxt = Proof_Context.init_global (Proof_Context.theory_of lthy);
val rhs' = rhs
|> Assumption.export_term lthy (Local_Theory.target_of lthy)
|> preprocess;
val term_params =
map Free (sort (Variable.fixed_ord lthy o apply2 #1) (Variable.add_fixed lthy rhs' []));
val u = fold_rev lambda term_params rhs';
val global_rhs = singleton (Variable.polymorphic thy_ctxt) u;
val type_tfrees = TFrees.build (TFrees.add_tfreesT (Term.fastype_of u));
val extra_tfrees =
TFrees.build (TFrees.add_tfrees_unless (TFrees.defined type_tfrees) u)
|> TFrees.keys;
val type_params = map (Logic.mk_type o TFree) extra_tfrees;
in (global_rhs, (extra_tfrees, (type_params, term_params))) end;
fun check_mixfix ctxt (b, extra_tfrees) mx =
if null extra_tfrees then mx
else
(if Context_Position.is_visible ctxt then
warning
("Additional type variable(s) in specification of " ^ Binding.print b ^ ": " ^
commas (map (Syntax.string_of_typ ctxt o TFree) extra_tfrees) ^
(if Mixfix.is_empty mx then ""
else "\nDropping mixfix syntax " ^ Pretty.string_of (Mixfix.pretty_mixfix mx)))
else (); NoSyn);
fun check_mixfix_global (b, no_params) mx =
if no_params orelse Mixfix.is_empty mx then mx
else
(warning ("Dropping global mixfix syntax: " ^ Binding.print b ^ " " ^
Pretty.string_of (Mixfix.pretty_mixfix mx)); NoSyn);
fun same_const (Const (c, _), Const (c', _)) = c = c'
| same_const (t $ _, t' $ _) = same_const (t, t')
| same_const (_, _) = false;
-fun const_decl phi_pred prmode ((b, mx), rhs) phi context =
+fun const_decl phi_pred prmode ((b, mx), rhs) = Morphism.entity (fn phi => fn context =>
if phi_pred phi then
let
val b' = Morphism.binding phi b;
val rhs' = Morphism.term phi rhs;
val same_shape = Term.aconv_untyped (rhs, rhs');
val same_stem = same_shape orelse same_const (rhs, rhs');
val const_alias =
if same_shape then
(case rhs' of
Const (c, T) =>
let
val thy = Context.theory_of context;
val ctxt = Context.proof_of context;
in
(case Type_Infer_Context.const_type ctxt c of
SOME T' => if Sign.typ_equiv thy (T, T') then SOME c else NONE
| NONE => NONE)
end
| _ => NONE)
else NONE;
in
(case const_alias of
SOME c =>
context
|> Context.mapping (Sign.const_alias b' c) (Proof_Context.const_alias b' c)
- |> Morphism.form (Proof_Context.generic_notation true prmode [(rhs', mx)])
+ |> Morphism.form_entity (Proof_Context.generic_notation true prmode [(rhs', mx)])
| NONE =>
context
|> Proof_Context.generic_add_abbrev Print_Mode.internal
(b', Term.close_schematic_term rhs')
|-> (fn (const as Const (c, _), _) => same_stem ?
(Proof_Context.generic_revert_abbrev (#1 prmode) c #>
same_shape ?
- Morphism.form (Proof_Context.generic_notation true prmode [(const, mx)]))))
+ Morphism.form_entity (Proof_Context.generic_notation true prmode [(const, mx)]))))
end
- else context;
+ else context);
(** background primitives **)
structure Foundation_Interpretations = Theory_Data
(
- type T = (binding * (term * term list) -> Context.generic -> Context.generic) Inttab.table;
- val empty = Inttab.empty;
- val merge = Inttab.merge (K true);
+ type T = ((binding * (term * term list) -> Context.generic -> Context.generic) * stamp) list
+ val empty = [];
+ val merge = Library.merge (eq_snd (op =));
);
fun add_foundation_interpretation f =
- Foundation_Interpretations.map (Inttab.update_new (serial (), f));
+ Foundation_Interpretations.map (cons (f, stamp ()));
fun foundation_interpretation binding_const_params lthy =
let
val interps = Foundation_Interpretations.get (Proof_Context.theory_of lthy);
- val interp = Inttab.fold (fn (_, f) => f binding_const_params) interps;
+ val interp = fold (fn (f, _) => f binding_const_params) interps;
in
lthy
|> Local_Theory.background_theory (Context.theory_map interp)
|> Local_Theory.map_contexts (K (Context.proof_map interp))
end;
fun background_foundation (((b, U), mx), (b_def, rhs)) (type_params, term_params) lthy =
let
val params = type_params @ term_params;
val target_params = type_params
@ take_prefix is_Free (Variable.export_terms lthy (Local_Theory.target_of lthy) term_params);
val mx' = check_mixfix_global (b, null params) mx;
val (const, lthy2) = lthy
|> Local_Theory.background_theory_result (Sign.declare_const lthy ((b, U), mx'));
val lhs = Term.list_comb (const, params);
val ((_, def), lthy3) = lthy2
|> Local_Theory.background_theory_result
(Thm.add_def (Proof_Context.defs_context lthy2) false false
(Thm.def_binding_optional b b_def, Logic.mk_equals (lhs, rhs)))
||> foundation_interpretation (b, (const, target_params));
in ((lhs, def), lthy3) end;
fun background_declaration decl lthy =
let
fun theory_decl context =
Local_Theory.standard_form lthy
(Proof_Context.init_global (Context.theory_of context)) decl context;
in Local_Theory.background_theory (Context.theory_map theory_decl) lthy end;
fun background_abbrev (b, global_rhs) params =
Local_Theory.background_theory_result (Sign.add_abbrev Print_Mode.internal (b, global_rhs))
#>> apply2 (fn t => Term.list_comb (Logic.unvarify_global t, params))
(** nested local theories primitives **)
fun standard_facts lthy ctxt =
Attrib.transform_facts (Local_Theory.standard_morphism lthy ctxt);
fun standard_notes pred kind facts lthy =
Local_Theory.map_contexts (fn level => fn ctxt =>
if pred (Local_Theory.level lthy, level)
then Attrib.local_notes kind (standard_facts lthy ctxt facts) ctxt |> snd
else ctxt) lthy;
fun standard_declaration pred decl lthy =
Local_Theory.map_contexts (fn level => fn ctxt =>
if pred (Local_Theory.level lthy, level)
then Context.proof_map (Local_Theory.standard_form lthy ctxt decl) ctxt
else ctxt) lthy;
fun standard_const pred prmode ((b, mx), rhs) =
standard_declaration pred (const_decl (K true) prmode ((b, mx), rhs));
fun standard_registration pred registration lthy =
Local_Theory.map_contexts (fn level =>
if pred (Local_Theory.level lthy, level)
then Context.proof_map (Locale.add_registration registration)
else I) lthy;
val local_interpretation = standard_registration (fn (n, level) => level >= n - 1);
(** lifting target primitives to local theory operations **)
(* define *)
fun define foundation internal ((b, mx), ((b_def, atts), rhs)) lthy =
let
val thy_ctxt = Proof_Context.init_global (Proof_Context.theory_of lthy);
(*term and type parameters*)
val ((defs, _), rhs') = Thm.cterm_of lthy rhs
|> Local_Defs.export_cterm lthy thy_ctxt ||> Thm.term_of;
val xs = Variable.add_fixed lthy rhs' [];
val T = Term.fastype_of rhs;
val type_tfrees = TFrees.build (TFrees.add_tfreesT T #> fold (TFrees.add_tfreesT o #2) xs);
val extra_tfrees =
TFrees.build (rhs |> TFrees.add_tfrees_unless (TFrees.defined type_tfrees))
|> TFrees.keys;
val mx' = check_mixfix lthy (b, extra_tfrees) mx;
val type_params = map (Logic.mk_type o TFree) extra_tfrees;
val term_params = map Free (sort (Variable.fixed_ord lthy o apply2 #1) xs);
val params = type_params @ term_params;
val U = map Term.fastype_of params ---> T;
(*foundation*)
val ((lhs', global_def), lthy2) = lthy
|> foundation (((b, U), mx'), (b_def, rhs')) (type_params, term_params);
(*local definition*)
val ([(lhs, (_, local_def))], lthy3) = lthy2
|> Context_Position.set_visible false
|> Local_Defs.define [((b, NoSyn), (Binding.empty_atts, lhs'))]
||> Context_Position.restore_visible lthy2;
(*result*)
val def =
Thm.transitive local_def global_def
|> Local_Defs.contract lthy3 defs (Thm.cterm_of lthy3 (Logic.mk_equals (lhs, rhs)));
val ([(res_name, [res])], lthy4) = lthy3
|> Local_Theory.notes [((if internal then Binding.empty else b_def, atts), [([def], [])])];
in ((lhs, (res_name, res)), lthy4) end;
(* notes *)
local
fun import_export_proof ctxt (name, raw_th) =
let
val thy_ctxt = Proof_Context.init_global (Proof_Context.theory_of ctxt);
(*export assumes/defines*)
val th = Goal.norm_result ctxt raw_th;
val ((defs, asms), th') = Local_Defs.export ctxt thy_ctxt th;
val asms' = map (rewrite_rule ctxt (Drule.norm_hhf_eqs @ defs)) asms;
(*export fixes*)
val tfrees =
TFrees.build (Thm.fold_terms {hyps = true} TFrees.add_tfrees th')
|> TFrees.keys |> map TFree;
val frees =
Frees.build (Thm.fold_terms {hyps = true} Frees.add_frees th')
|> Frees.list_set_rev |> map Free;
val (th'' :: vs) =
(th' :: map (Drule.mk_term o Thm.cterm_of ctxt) (map Logic.mk_type tfrees @ frees))
|> Variable.export ctxt thy_ctxt
|> Drule.zero_var_indexes_list;
(*thm definition*)
val result = Global_Theory.name_thm Global_Theory.official1 name th'';
(*import fixes*)
val (tvars, vars) =
chop (length tfrees) (map (Thm.term_of o Drule.dest_term) vs)
|>> map Logic.dest_type;
val instT =
TVars.build (fold2 (fn a => fn b =>
(case a of TVar v => TVars.add (v, b) | _ => I)) tvars tfrees);
val cinstT = TVars.map (K (Thm.ctyp_of ctxt)) instT;
val cinst =
Vars.build
(fold2 (fn v => fn t =>
(case v of
Var (xi, T) =>
Vars.add ((xi, Term_Subst.instantiateT instT T),
Thm.cterm_of ctxt (Term.map_types (Term_Subst.instantiateT instT) t))
| _ => I)) vars frees);
val result' = Thm.instantiate (cinstT, cinst) result;
(*import assumes/defines*)
val result'' =
(fold (curry op COMP) asms' result'
handle THM _ => raise THM ("Failed to re-import result", 0, result' :: asms'))
|> Local_Defs.contract ctxt defs (Thm.cprop_of th)
|> Goal.norm_result ctxt
|> Global_Theory.name_thm Global_Theory.unofficial2 name;
in (result'', result) end;
fun bind_name lthy b =
(Local_Theory.full_name lthy b, Binding.default_pos_of b);
fun map_facts f = map (apsnd (map (apfst (map f))));
in
fun notes target_notes kind facts lthy =
let
val facts' = facts
|> map (fn (a, bs) =>
(a, Global_Theory.burrow_fact (Global_Theory.name_multi (bind_name lthy (fst a))) bs))
|> map_facts (import_export_proof lthy);
val local_facts = map_facts #1 facts';
val global_facts = map_facts #2 facts';
in
lthy
|> target_notes kind global_facts (Attrib.partial_evaluation lthy local_facts)
|> Attrib.local_notes kind local_facts
end;
end;
(* abbrev *)
fun abbrev target_abbrev prmode ((b, mx), rhs) lthy =
let
val (global_rhs, (extra_tfrees, (type_params, term_params))) = export_abbrev lthy I rhs;
val mx' = check_mixfix lthy (b, extra_tfrees) mx;
in
lthy
|> target_abbrev prmode (b, mx') global_rhs (type_params, term_params)
|> Context_Position.set_visible false
|> Proof_Context.add_abbrev Print_Mode.internal (b, rhs) |> snd
|> Local_Defs.fixed_abbrev ((b, NoSyn), rhs)
||> Context_Position.restore_visible lthy
end;
(** theory target primitives **)
fun theory_target_foundation (((b, U), mx), (b_def, rhs)) (type_params, term_params) =
background_foundation (((b, U), mx), (b_def, rhs)) (type_params, term_params)
#-> (fn (lhs, def) => standard_const (op <>) Syntax.mode_default ((b, mx), lhs)
#> pair (lhs, def));
fun theory_target_notes kind global_facts local_facts =
Local_Theory.background_theory (Attrib.global_notes kind global_facts #> snd)
#> standard_notes (op <>) kind local_facts;
fun theory_target_abbrev prmode (b, mx) global_rhs params =
Local_Theory.background_theory_result
(Sign.add_abbrev (#1 prmode) (b, global_rhs) #->
(fn (lhs, _) => (* FIXME type_params!? *)
Sign.notation true prmode
[(lhs, check_mixfix_global (b, null (snd params)) mx)] #> pair lhs))
#-> (fn lhs =>
standard_const (op <>) prmode
((b, if null (snd params) then NoSyn else mx),
Term.list_comb (Logic.unvarify_global lhs, snd params)));
(** theory operations **)
val theory_abbrev = abbrev theory_target_abbrev;
fun theory_declaration decl =
background_declaration decl #> standard_declaration (K true) decl;
fun target_registration lthy {inst, mixin, export} =
{inst = inst, mixin = mixin,
export = export $> Proof_Context.export_morphism lthy (Local_Theory.target_of lthy)};
fun theory_registration registration lthy =
lthy
|> (Local_Theory.raw_theory o Context.theory_map)
(Locale.add_registration (target_registration lthy registration))
|> standard_registration (K true) registration;
(** locale target primitives **)
fun locale_target_notes locale kind global_facts local_facts =
Local_Theory.background_theory
(Attrib.global_notes kind (Attrib.map_facts (K []) global_facts) #> snd) #>
(fn lthy => lthy |>
Local_Theory.target (fn ctxt => ctxt |>
Locale.add_facts locale kind (standard_facts lthy ctxt local_facts))) #>
standard_notes (fn (this, other) => other <> 0 andalso this <> other) kind local_facts;
-fun locale_target_declaration locale syntax decl lthy = lthy
+fun locale_target_declaration locale params decl lthy = lthy
|> Local_Theory.target (fn ctxt => ctxt |>
- Locale.add_declaration locale syntax
+ Locale.add_declaration locale params
(Morphism.transform (Local_Theory.standard_morphism lthy ctxt) decl));
fun locale_target_const locale phi_pred prmode ((b, mx), rhs) =
- locale_target_declaration locale true (const_decl phi_pred prmode ((b, mx), rhs))
+ locale_target_declaration locale {syntax = true, pos = Binding.pos_of b}
+ (const_decl phi_pred prmode ((b, mx), rhs));
(** locale operations **)
-fun locale_declaration locale {syntax, pervasive} decl =
+fun locale_declaration locale {syntax, pervasive, pos} decl =
pervasive ? background_declaration decl
- #> locale_target_declaration locale syntax decl
+ #> locale_target_declaration locale {syntax = syntax, pos = pos} decl
#> standard_declaration (fn (_, other) => other <> 0) decl;
fun locale_const locale prmode ((b, mx), rhs) =
locale_target_const locale (K true) prmode ((b, mx), rhs)
#> standard_const (fn (this, other) => other <> 0 andalso this <> other) prmode ((b, mx), rhs);
fun locale_dependency loc registration lthy =
lthy
|> Local_Theory.raw_theory (Locale.add_dependency loc registration)
|> standard_registration (K true) registration;
(** locale abbreviations **)
fun locale_target_abbrev locale prmode (b, mx) global_rhs params =
background_abbrev (b, global_rhs) (snd params)
#-> (fn (lhs, _) => locale_const locale prmode ((b, mx), lhs));
fun locale_abbrev locale = abbrev (locale_target_abbrev locale);
end;
diff --git a/src/Pure/Isar/interpretation.ML b/src/Pure/Isar/interpretation.ML
--- a/src/Pure/Isar/interpretation.ML
+++ b/src/Pure/Isar/interpretation.ML
@@ -1,245 +1,245 @@
(* Title: Pure/Isar/interpretation.ML
Author: Clemens Ballarin, TU Muenchen
Author: Florian Haftmann, TU Muenchen
Locale interpretation.
*)
signature INTERPRETATION =
sig
type 'a defines = (Attrib.binding * ((binding * mixfix) * 'a)) list
(*interpretation in proofs*)
val interpret: Expression.expression_i -> Proof.state -> Proof.state
val interpret_cmd: Expression.expression -> Proof.state -> Proof.state
(*interpretation in local theories*)
val interpretation: Expression.expression_i -> local_theory -> Proof.state
val interpretation_cmd: Expression.expression -> local_theory -> Proof.state
(*interpretation into global theories*)
val global_interpretation: Expression.expression_i ->
term defines -> local_theory -> Proof.state
val global_interpretation_cmd: Expression.expression ->
string defines -> local_theory -> Proof.state
(*interpretation between locales*)
val sublocale: Expression.expression_i ->
term defines -> local_theory -> Proof.state
val sublocale_cmd: Expression.expression ->
string defines -> local_theory -> Proof.state
val global_sublocale: string -> Expression.expression_i ->
term defines -> theory -> Proof.state
val global_sublocale_cmd: xstring * Position.T -> Expression.expression ->
string defines -> theory -> Proof.state
(*mixed Isar interface*)
val isar_interpretation: Expression.expression_i -> local_theory -> Proof.state
val isar_interpretation_cmd: Expression.expression -> local_theory -> Proof.state
end;
structure Interpretation : INTERPRETATION =
struct
(** common interpretation machinery **)
type 'a defines = (Attrib.binding * ((binding * mixfix) * 'a)) list
(* reading of locale expressions with rewrite morphisms *)
local
fun augment_with_def prep_term ((name, atts), ((b, mx), raw_rhs)) lthy =
let
val rhs = prep_term lthy raw_rhs;
val lthy' = Variable.declare_term rhs lthy;
val ((_, (_, def)), lthy'') =
Local_Theory.define ((b, mx), ((Thm.def_binding_optional b name, atts), rhs)) lthy';
in (Thm.symmetric def, lthy'') end;
fun augment_with_defs _ [] _ = pair []
(*quasi-inhomogeneous type: definitions demand local theory rather than bare proof context*)
| augment_with_defs prep_term raw_defs deps =
Local_Theory.begin_nested
#> snd
#> fold Locale.activate_declarations deps
#> fold_map (augment_with_def prep_term) raw_defs
#> Local_Theory.end_nested_result Morphism.fact;
fun prep_interpretation prep_expr prep_term
expression raw_defs initial_ctxt =
let
val ((propss, eq_propss, deps, eqnss, export), expr_ctxt) = prep_expr expression initial_ctxt;
val (def_eqns, def_ctxt) =
augment_with_defs prep_term raw_defs deps expr_ctxt;
val export' = Proof_Context.export_morphism def_ctxt expr_ctxt;
in (((propss, eq_propss, deps, eqnss, export, export'), def_eqns), def_ctxt) end;
in
fun cert_interpretation expression =
prep_interpretation Expression.cert_goal_expression Syntax.check_term expression;
fun read_interpretation expression =
prep_interpretation Expression.read_goal_expression Syntax.read_term expression;
end;
(* interpretation machinery *)
local
fun abs_def_rule eqns ctxt =
(map (Local_Defs.abs_def_rule ctxt) (maps snd eqns), ctxt);
fun note_eqns_register note add_registration
deps eqnss witss def_eqns thms export export' ctxt =
let
val factss = thms
|> unflat ((map o map) #1 eqnss)
- |> map2 (map2 (fn b => fn eq => (b, [([Morphism.thm export eq], [])]))) ((map o map) #1 eqnss);
+ |> map2 (map2 (fn b => fn eq =>
+ (b, [([Morphism.thm export (Thm.transfer' ctxt eq)], [])]))) ((map o map) #1 eqnss);
val (eqnss', ctxt') =
fold_map (fn facts => note Thm.theoremK facts #-> abs_def_rule) factss ctxt;
val defs = (Binding.empty_atts, [(map (Morphism.thm (export' $> export)) def_eqns, [])]);
val (eqns', ctxt'') = ctxt' |> note Thm.theoremK [defs] |-> abs_def_rule;
+ val transform_witness = Element.transform_witness (Morphism.set_trim_context' ctxt' export');
val deps' =
(deps ~~ witss) |> map (fn ((dep, morph), wits) =>
- (dep, morph $> Element.satisfy_morphism (map (Element.transform_witness export') wits)));
+ (dep, morph $> Element.satisfy_morphism (map transform_witness wits)));
fun register (dep, eqns) ctxt =
ctxt |> add_registration
{inst = dep,
- mixin =
- Option.map (rpair true)
- (Element.eq_morphism (Proof_Context.theory_of ctxt) (eqns @ eqns')),
+ mixin = Option.map (rpair true) (Element.eq_morphism (eqns @ eqns')),
export = export};
in ctxt'' |> fold register (deps' ~~ eqnss') end;
in
fun generic_interpretation prep_interpretation setup_proof note add_registration
expression raw_defs initial_ctxt =
let
val (((propss, eq_propss, deps, eqnss, export, export'), def_eqns), goal_ctxt) =
prep_interpretation expression raw_defs initial_ctxt;
fun after_qed witss eqns =
note_eqns_register note add_registration deps eqnss witss def_eqns eqns export export';
in setup_proof after_qed propss (flat eq_propss) goal_ctxt end;
end;
(** interfaces **)
(* interpretation in proofs *)
local
fun setup_proof state after_qed propss eqns goal_ctxt =
Element.witness_local_proof_eqs
(fn witss => fn eqns => Proof.map_context (after_qed witss eqns) #> Proof.reset_facts)
"interpret" propss eqns goal_ctxt state;
fun add_registration_proof registration ctxt = ctxt
|> Proof_Context.set_stmt false
|> Context.proof_map (Locale.add_registration registration)
|> Proof_Context.restore_stmt ctxt;
fun gen_interpret prep_interpretation expression state =
Proof.assert_forward_or_chain state
|> Proof.context_of
|> generic_interpretation prep_interpretation (setup_proof state)
Attrib.local_notes add_registration_proof expression [];
in
val interpret = gen_interpret cert_interpretation;
val interpret_cmd = gen_interpret read_interpretation;
end;
(* interpretation in local theories *)
val add_registration_local_theory =
Named_Target.revoke_reinitializability oo Generic_Target.local_interpretation;
fun interpretation expression =
generic_interpretation cert_interpretation Element.witness_proof_eqs
Local_Theory.notes_kind add_registration_local_theory expression [];
fun interpretation_cmd expression =
generic_interpretation read_interpretation Element.witness_proof_eqs
Local_Theory.notes_kind add_registration_local_theory expression [];
(* interpretation into global theories *)
fun global_interpretation expression =
generic_interpretation cert_interpretation Element.witness_proof_eqs
Local_Theory.notes_kind Local_Theory.theory_registration expression;
fun global_interpretation_cmd expression =
generic_interpretation read_interpretation Element.witness_proof_eqs
Local_Theory.notes_kind Local_Theory.theory_registration expression;
(* interpretation between locales *)
fun sublocale expression =
generic_interpretation cert_interpretation Element.witness_proof_eqs
Local_Theory.notes_kind Local_Theory.locale_dependency expression;
fun sublocale_cmd expression =
generic_interpretation read_interpretation Element.witness_proof_eqs
Local_Theory.notes_kind Local_Theory.locale_dependency expression;
local
fun gen_global_sublocale prep_loc prep_interpretation
raw_locale expression raw_defs thy =
let
val lthy = Named_Target.init [] (prep_loc thy raw_locale) thy;
fun setup_proof after_qed =
Element.witness_proof_eqs
(fn wits => fn eqs => after_qed wits eqs #> Local_Theory.exit);
in
lthy |>
generic_interpretation prep_interpretation setup_proof
Local_Theory.notes_kind Local_Theory.locale_dependency expression raw_defs
end;
in
fun global_sublocale expression =
gen_global_sublocale (K I) cert_interpretation expression;
fun global_sublocale_cmd raw_expression =
gen_global_sublocale Locale.check read_interpretation raw_expression;
end;
(* mixed Isar interface *)
local
fun register_or_activate lthy =
if Named_Target.is_theory lthy
then Local_Theory.theory_registration
else add_registration_local_theory;
fun gen_isar_interpretation prep_interpretation expression lthy =
generic_interpretation prep_interpretation Element.witness_proof_eqs
Local_Theory.notes_kind (register_or_activate lthy) expression [] lthy;
in
fun isar_interpretation expression =
gen_isar_interpretation cert_interpretation expression;
fun isar_interpretation_cmd raw_expression =
gen_isar_interpretation read_interpretation raw_expression;
end;
end;
diff --git a/src/Pure/Isar/isar_cmd.ML b/src/Pure/Isar/isar_cmd.ML
--- a/src/Pure/Isar/isar_cmd.ML
+++ b/src/Pure/Isar/isar_cmd.ML
@@ -1,301 +1,302 @@
(* Title: Pure/Isar/isar_cmd.ML
Author: Markus Wenzel, TU Muenchen
Miscellaneous Isar commands.
*)
signature ISAR_CMD =
sig
val setup: Input.source -> theory -> theory
val local_setup: Input.source -> Proof.context -> Proof.context
val parse_ast_translation: Input.source -> theory -> theory
val parse_translation: Input.source -> theory -> theory
val print_translation: Input.source -> theory -> theory
val typed_print_translation: Input.source -> theory -> theory
val print_ast_translation: Input.source -> theory -> theory
val translations: (xstring * string) Syntax.trrule list -> theory -> theory
val no_translations: (xstring * string) Syntax.trrule list -> theory -> theory
val oracle: bstring * Position.range -> Input.source -> theory -> theory
val declaration: {syntax: bool, pervasive: bool} -> Input.source -> local_theory -> local_theory
val simproc_setup: string * Position.T -> string list -> Input.source ->
local_theory -> local_theory
val qed: Method.text_range option -> Toplevel.transition -> Toplevel.transition
val terminal_proof: Method.text_range * Method.text_range option ->
Toplevel.transition -> Toplevel.transition
val default_proof: Toplevel.transition -> Toplevel.transition
val immediate_proof: Toplevel.transition -> Toplevel.transition
val done_proof: Toplevel.transition -> Toplevel.transition
val skip_proof: Toplevel.transition -> Toplevel.transition
val ml_diag: bool -> Input.source -> Toplevel.transition -> Toplevel.transition
val diag_state: Proof.context -> Toplevel.state
val diag_goal: Proof.context -> {context: Proof.context, facts: thm list, goal: thm}
val pretty_theorems: bool -> Toplevel.state -> Pretty.T list
val print_stmts: string list * (Facts.ref * Token.src list) list
-> Toplevel.transition -> Toplevel.transition
val print_thms: string list * (Facts.ref * Token.src list) list
-> Toplevel.transition -> Toplevel.transition
val print_prfs: bool -> string list * (Facts.ref * Token.src list) list option
-> Toplevel.transition -> Toplevel.transition
val print_prop: (string list * string) -> Toplevel.transition -> Toplevel.transition
val print_term: (string list * string) -> Toplevel.transition -> Toplevel.transition
val print_type: (string list * (string * string option)) ->
Toplevel.transition -> Toplevel.transition
end;
structure Isar_Cmd: ISAR_CMD =
struct
(** theory declarations **)
(* generic setup *)
fun setup source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read "Theory.setup (" @ ML_Lex.read_source source @ ML_Lex.read ")")
|> Context.theory_map;
fun local_setup source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read "Theory.local_setup (" @ ML_Lex.read_source source @ ML_Lex.read ")")
|> Context.proof_map;
(* translation functions *)
fun parse_ast_translation source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read "Theory.setup (Sign.parse_ast_translation (" @
ML_Lex.read_source source @ ML_Lex.read "))")
|> Context.theory_map;
fun parse_translation source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read "Theory.setup (Sign.parse_translation (" @
ML_Lex.read_source source @ ML_Lex.read "))")
|> Context.theory_map;
fun print_translation source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read "Theory.setup (Sign.print_translation (" @
ML_Lex.read_source source @ ML_Lex.read "))")
|> Context.theory_map;
fun typed_print_translation source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read "Theory.setup (Sign.typed_print_translation (" @
ML_Lex.read_source source @ ML_Lex.read "))")
|> Context.theory_map;
fun print_ast_translation source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read "Theory.setup (Sign.print_ast_translation (" @
ML_Lex.read_source source @ ML_Lex.read "))")
|> Context.theory_map;
(* translation rules *)
fun read_trrules thy raw_rules =
let
val ctxt = Proof_Context.init_global thy;
val read_root =
#1 o dest_Type o Proof_Context.read_type_name {proper = true, strict = false} ctxt;
in
raw_rules
|> map (Syntax.map_trrule (fn (r, s) => Syntax_Phases.parse_ast_pattern ctxt (read_root r, s)))
end;
fun translations args thy = Sign.add_trrules (read_trrules thy args) thy;
fun no_translations args thy = Sign.del_trrules (read_trrules thy args) thy;
(* oracles *)
fun oracle (name, range) source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read "val " @
ML_Lex.read_range range name @
ML_Lex.read
(" = snd (Context.>>> (Context.map_theory_result (Thm.add_oracle (" ^
ML_Syntax.make_binding (name, #1 range) ^ ", ") @
ML_Lex.read_source source @
ML_Lex.read "))))")
|> Context.theory_map;
(* declarations *)
fun declaration {syntax, pervasive} source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read
("Theory.local_setup (Local_Theory.declaration {syntax = " ^
- Bool.toString syntax ^ ", pervasive = " ^ Bool.toString pervasive ^ "} (") @
+ Bool.toString syntax ^ ", pervasive = " ^ Bool.toString pervasive ^
+ ", pos = " ^ ML_Syntax.print_position (Position.thread_data ()) ^ "} (") @
ML_Lex.read_source source @ ML_Lex.read "))")
|> Context.proof_map;
(* simprocs *)
fun simproc_setup name lhss source =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read
("Theory.local_setup (Simplifier.define_simproc_cmd (" ^
ML_Syntax.make_binding name ^ ") {lhss = " ^ ML_Syntax.print_strings lhss ^
", proc = (") @ ML_Lex.read_source source @ ML_Lex.read ")})")
|> Context.proof_map;
(* local endings *)
fun local_qed m = Toplevel.proof (Proof.local_qed (m, true));
val local_terminal_proof = Toplevel.proof o Proof.local_future_terminal_proof;
val local_default_proof = Toplevel.proof Proof.local_default_proof;
val local_immediate_proof = Toplevel.proof Proof.local_immediate_proof;
val local_done_proof = Toplevel.proof Proof.local_done_proof;
val local_skip_proof = Toplevel.proof' Proof.local_skip_proof;
(* global endings *)
fun global_qed m = Toplevel.end_proof (K (Proof.global_qed (m, true)));
val global_terminal_proof = Toplevel.end_proof o K o Proof.global_future_terminal_proof;
val global_default_proof = Toplevel.end_proof (K Proof.global_default_proof);
val global_immediate_proof = Toplevel.end_proof (K Proof.global_immediate_proof);
val global_skip_proof = Toplevel.end_proof Proof.global_skip_proof;
val global_done_proof = Toplevel.end_proof (K Proof.global_done_proof);
(* common endings *)
fun qed m = local_qed m o global_qed m;
fun terminal_proof m = local_terminal_proof m o global_terminal_proof m;
val default_proof = local_default_proof o global_default_proof;
val immediate_proof = local_immediate_proof o global_immediate_proof;
val done_proof = local_done_proof o global_done_proof;
val skip_proof = local_skip_proof o global_skip_proof;
(* diagnostic ML evaluation *)
structure Diag_State = Proof_Data
(
type T = Toplevel.state option;
fun init _ = NONE;
);
fun ml_diag verbose source = Toplevel.keep (fn state =>
let
val opt_ctxt =
try Toplevel.generic_theory_of state
|> Option.map (Context.proof_of #> Diag_State.put (SOME state));
val flags = ML_Compiler.verbose verbose ML_Compiler.flags;
in ML_Context.eval_source_in opt_ctxt flags source end);
fun diag_state ctxt =
(case Diag_State.get ctxt of
SOME st => st
| NONE => Toplevel.make_state NONE);
val diag_goal = Proof.goal o Toplevel.proof_of o diag_state;
val _ = Theory.setup
(ML_Antiquotation.value (Binding.qualify true "Isar" \<^binding>\<open>state\<close>)
(Scan.succeed "Isar_Cmd.diag_state ML_context") #>
ML_Antiquotation.value (Binding.qualify true "Isar" \<^binding>\<open>goal\<close>)
(Scan.succeed "Isar_Cmd.diag_goal ML_context"));
(* theorems of theory or proof context *)
fun pretty_theorems verbose st =
if Toplevel.is_proof st then
Proof_Context.pretty_local_facts verbose (Toplevel.context_of st)
else
let
val ctxt = Toplevel.context_of st;
val prev_thys =
(case Toplevel.previous_theory_of st of
SOME thy => [thy]
| NONE => Theory.parents_of (Proof_Context.theory_of ctxt));
in Proof_Display.pretty_theorems_diff verbose prev_thys ctxt end;
(* print theorems, terms, types etc. *)
local
fun string_of_stmts ctxt args =
Attrib.eval_thms ctxt args
|> map (Element.pretty_statement ctxt Thm.theoremK)
|> Pretty.chunks2 |> Pretty.string_of;
fun string_of_thms ctxt args =
Pretty.string_of (Proof_Context.pretty_fact ctxt ("", Attrib.eval_thms ctxt args));
fun string_of_prfs full state arg =
Pretty.string_of
(case arg of
NONE =>
let
val {context = ctxt, goal = thm} = Proof.simple_goal (Toplevel.proof_of state);
val thy = Proof_Context.theory_of ctxt;
val prf = Thm.proof_of thm;
val prop = Thm.full_prop_of thm;
val prf' = Proofterm.rewrite_proof_notypes ([], []) prf;
in
Proof_Syntax.pretty_proof ctxt
(if full then Proofterm.reconstruct_proof thy prop prf' else prf')
end
| SOME srcs =>
let
val ctxt = Toplevel.context_of state;
val pretty_proof = Proof_Syntax.pretty_standard_proof_of ctxt full;
in Pretty.chunks (map pretty_proof (Attrib.eval_thms ctxt srcs)) end);
fun string_of_prop ctxt s =
let
val prop = Syntax.read_prop ctxt s;
val ctxt' = Proof_Context.augment prop ctxt;
in Pretty.string_of (Pretty.quote (Syntax.pretty_term ctxt' prop)) end;
fun string_of_term ctxt s =
let
val t = Syntax.read_term ctxt s;
val T = Term.type_of t;
val ctxt' = Proof_Context.augment t ctxt;
in
Pretty.string_of
(Pretty.block [Pretty.quote (Syntax.pretty_term ctxt' t), Pretty.fbrk,
Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' T)])
end;
fun string_of_type ctxt (s, NONE) =
let val T = Syntax.read_typ ctxt s
in Pretty.string_of (Pretty.quote (Syntax.pretty_typ ctxt T)) end
| string_of_type ctxt (s1, SOME s2) =
let
val ctxt' = Config.put show_sorts true ctxt;
val raw_T = Syntax.parse_typ ctxt' s1;
val S = Syntax.read_sort ctxt' s2;
val T =
Syntax.check_term ctxt'
(Logic.mk_type raw_T |> Type.constraint (Term.itselfT (Type_Infer.anyT S)))
|> Logic.dest_type;
in Pretty.string_of (Pretty.quote (Syntax.pretty_typ ctxt' T)) end;
fun print_item string_of (modes, arg) = Toplevel.keep (fn state =>
Print_Mode.with_modes modes (fn () => writeln (string_of state arg)) ());
in
val print_stmts = print_item (string_of_stmts o Toplevel.context_of);
val print_thms = print_item (string_of_thms o Toplevel.context_of);
val print_prfs = print_item o string_of_prfs;
val print_prop = print_item (string_of_prop o Toplevel.context_of);
val print_term = print_item (string_of_term o Toplevel.context_of);
val print_type = print_item (string_of_type o Toplevel.context_of);
end;
end;
diff --git a/src/Pure/Isar/local_defs.ML b/src/Pure/Isar/local_defs.ML
--- a/src/Pure/Isar/local_defs.ML
+++ b/src/Pure/Isar/local_defs.ML
@@ -1,270 +1,270 @@
(* Title: Pure/Isar/local_defs.ML
Author: Makarius
Local definitions.
*)
signature LOCAL_DEFS =
sig
val cert_def: Proof.context -> (string -> Position.T list) -> term -> (string * typ) * term
val abs_def: term -> (string * typ) * term
val expand: cterm list -> thm -> thm
val def_export: Assumption.export
val define: ((binding * mixfix) * (Thm.binding * term)) list -> Proof.context ->
(term * (string * thm)) list * Proof.context
val fixed_abbrev: (binding * mixfix) * term -> Proof.context ->
(term * term) * Proof.context
val export: Proof.context -> Proof.context -> thm -> (thm list * thm list) * thm
val export_cterm: Proof.context -> Proof.context -> cterm -> (thm list * thm list) * cterm
val contract: Proof.context -> thm list -> cterm -> thm -> thm
val print_rules: Proof.context -> unit
val defn_add: attribute
val defn_del: attribute
val meta_rewrite_conv: Proof.context -> conv
val meta_rewrite_rule: Proof.context -> thm -> thm
val abs_def_rule: Proof.context -> thm -> thm
val unfold_abs_def: bool Config.T
val unfold: Proof.context -> thm list -> thm -> thm
val unfold_goals: Proof.context -> thm list -> thm -> thm
val unfold_tac: Proof.context -> thm list -> tactic
val unfold0: Proof.context -> thm list -> thm -> thm
val unfold0_goals: Proof.context -> thm list -> thm -> thm
val unfold0_tac: Proof.context -> thm list -> tactic
val fold: Proof.context -> thm list -> thm -> thm
val fold_tac: Proof.context -> thm list -> tactic
val derived_def: Proof.context -> (string -> Position.T list) -> {conditional: bool} ->
term -> ((string * typ) * term) * (Proof.context -> thm -> thm)
end;
structure Local_Defs: LOCAL_DEFS =
struct
(** primitive definitions **)
(* prepare defs *)
fun cert_def ctxt get_pos eq =
let
fun err msg =
cat_error msg ("The error(s) above occurred in definition:\n" ^
quote (Syntax.string_of_term ctxt eq));
val ((lhs, _), args, eq') = eq
|> Sign.no_vars ctxt
|> Primitive_Defs.dest_def ctxt
{check_head = Term.is_Free,
check_free_lhs = not o Variable.is_fixed ctxt,
check_free_rhs = if Variable.is_body ctxt then K true else Variable.is_fixed ctxt,
check_tfree = K true}
handle TERM (msg, _) => err msg | ERROR msg => err msg;
val _ =
Context_Position.reports ctxt
(maps (fn Free (x, _) => Syntax_Phases.reports_of_scope (get_pos x) | _ => []) args);
in (Term.dest_Free (Term.head_of lhs), eq') end;
val abs_def = Primitive_Defs.abs_def #>> Term.dest_Free;
fun mk_def ctxt args =
let
val (bs, rhss) = split_list args;
val Ts = map Term.fastype_of rhss;
val (xs, _) = ctxt
|> Context_Position.set_visible false
|> Proof_Context.add_fixes (map2 (fn b => fn T => (b, SOME T, NoSyn)) bs Ts);
val lhss = ListPair.map Free (xs, Ts);
in map Logic.mk_equals (lhss ~~ rhss) end;
(* export defs *)
val head_of_def =
Term.dest_Free o Term.head_of o #1 o Logic.dest_equals o Term.strip_all_body;
(*
[x, x \<equiv> a]
:
B x
-----------
B a
*)
fun expand defs =
Drule.implies_intr_list defs
#> Drule.generalize
(Names.empty, Names.build (fold (Names.add_set o #1 o head_of_def o Thm.term_of) defs))
#> funpow (length defs) (fn th => Drule.reflexive_thm RS th);
val expand_term = Envir.expand_term_defs dest_Free o map (abs_def o Thm.term_of);
fun def_export _ defs = (expand defs, expand_term defs);
(* define *)
fun define defs ctxt =
let
val ((xs, mxs), specs) = defs |> split_list |>> split_list;
val (bs, rhss) = specs |> split_list;
val eqs = mk_def ctxt (xs ~~ rhss);
val lhss = map (fst o Logic.dest_equals) eqs;
in
ctxt
|> Proof_Context.add_fixes (map2 (fn x => fn mx => (x, NONE, mx)) xs mxs) |> #2
|> fold Variable.declare_term eqs
|> Proof_Context.add_assms def_export (map2 (fn b => fn eq => (b, [(eq, [])])) bs eqs)
|>> map2 (fn lhs => fn (name, [th]) => (lhs, (name, th))) lhss
end;
(* fixed_abbrev *)
fun fixed_abbrev ((x, mx), rhs) ctxt =
let
val T = Term.fastype_of rhs;
val ([x'], ctxt') = ctxt
|> Variable.declare_term rhs
|> Proof_Context.add_fixes [(x, SOME T, mx)];
val lhs = Free (x', T);
val _ = cert_def ctxt' (K []) (Logic.mk_equals (lhs, rhs));
fun abbrev_export _ _ = (I, Envir.expand_term_defs dest_Free [((x', T), rhs)]);
val (_, ctxt'') = Assumption.add_assms abbrev_export [] ctxt';
in ((lhs, rhs), ctxt'') end;
(* specific export -- result based on educated guessing *)
(*
[xs, xs \<equiv> as]
:
B xs
--------------
B as
*)
fun export inner outer th =
let
val defs_asms =
Assumption.local_assms_of inner outer
|> filter_out (Drule.is_sort_constraint o Thm.term_of)
|> map (Thm.assume #> (fn asm =>
(case try (head_of_def o Thm.prop_of) asm of
NONE => (asm, false)
| SOME x =>
let val t = Free x in
(case try (Assumption.export_term inner outer) t of
NONE => (asm, false)
| SOME u =>
if t aconv u then (asm, false)
else (Drule.abs_def (Variable.gen_all outer asm), true))
end)));
- in (apply2 (map #1) (List.partition #2 defs_asms), Assumption.export false inner outer th) end;
+ in (apply2 (map #1) (List.partition #2 defs_asms), Assumption.export inner outer th) end;
(*
[xs, xs \<equiv> as]
:
TERM b xs
-------------- and --------------
TERM b as b xs \<equiv> b as
*)
fun export_cterm inner outer ct =
export inner outer (Drule.mk_term ct) ||> Drule.dest_term;
fun contract ctxt defs ct th =
th COMP (Raw_Simplifier.rewrite ctxt true defs ct COMP_INCR Drule.equal_elim_rule2);
(** defived definitions **)
(* transformation via rewrite rules *)
structure Rules = Generic_Data
(
type T = thm list;
val empty = [];
val merge = Thm.merge_thms;
);
fun print_rules ctxt =
Pretty.writeln (Pretty.big_list "definitional rewrite rules:"
(map (Thm.pretty_thm_item ctxt) (Rules.get (Context.Proof ctxt))));
val defn_add = Thm.declaration_attribute (Rules.map o Thm.add_thm o Thm.trim_context);
val defn_del = Thm.declaration_attribute (Rules.map o Thm.del_thm);
(* meta rewrite rules *)
fun meta_rewrite_conv ctxt =
Raw_Simplifier.rewrite_cterm (false, false, false) (K (K NONE))
(ctxt
|> Raw_Simplifier.init_simpset (Rules.get (Context.Proof ctxt))
|> Raw_Simplifier.add_eqcong Drule.equals_cong); (*protect meta-level equality*)
val meta_rewrite_rule = Conv.fconv_rule o meta_rewrite_conv;
fun abs_def_rule ctxt = meta_rewrite_rule ctxt #> Drule.abs_def;
(* unfold object-level rules *)
val unfold_abs_def = Config.declare_bool ("unfold_abs_def", \<^here>) (K true);
local
fun gen_unfold rewrite ctxt rews =
let val meta_rews = map (meta_rewrite_rule ctxt) rews in
if Config.get ctxt unfold_abs_def then
rewrite ctxt meta_rews #>
rewrite ctxt (map (perhaps (try Drule.abs_def)) meta_rews)
else rewrite ctxt meta_rews
end;
val no_unfold_abs_def = Config.put unfold_abs_def false;
in
val unfold = gen_unfold Raw_Simplifier.rewrite_rule;
val unfold_goals = gen_unfold Raw_Simplifier.rewrite_goals_rule;
val unfold_tac = PRIMITIVE oo unfold_goals;
val unfold0 = unfold o no_unfold_abs_def;
val unfold0_goals = unfold_goals o no_unfold_abs_def;
val unfold0_tac = unfold_tac o no_unfold_abs_def;
end
(* fold object-level rules *)
fun fold ctxt rews = Raw_Simplifier.fold_rule ctxt (map (meta_rewrite_rule ctxt) rews);
fun fold_tac ctxt rews = Raw_Simplifier.fold_goals_tac ctxt (map (meta_rewrite_rule ctxt) rews);
(* derived defs -- potentially within the object-logic *)
fun derived_def ctxt get_pos {conditional} prop =
let
val ((c, T), rhs) = prop
|> Thm.cterm_of ctxt
|> meta_rewrite_conv ctxt
|> (snd o Logic.dest_equals o Thm.prop_of)
|> conditional ? Logic.strip_imp_concl
|> (abs_def o #2 o cert_def ctxt get_pos);
fun prove def_ctxt0 def =
let
val def_ctxt = Proof_Context.augment prop def_ctxt0;
val def_thm =
Goal.prove def_ctxt [] [] prop
(fn {context = goal_ctxt, ...} =>
ALLGOALS
(CONVERSION (meta_rewrite_conv goal_ctxt) THEN'
rewrite_goal_tac goal_ctxt [def] THEN'
resolve_tac goal_ctxt [Drule.reflexive_thm]))
handle ERROR msg => cat_error msg "Failed to prove definitional specification";
in
def_thm
|> singleton (Proof_Context.export def_ctxt def_ctxt0)
|> Drule.zero_var_indexes
end;
in (((c, T), rhs), prove) end;
end;
diff --git a/src/Pure/Isar/local_theory.ML b/src/Pure/Isar/local_theory.ML
--- a/src/Pure/Isar/local_theory.ML
+++ b/src/Pure/Isar/local_theory.ML
@@ -1,436 +1,440 @@
(* Title: Pure/Isar/local_theory.ML
Author: Makarius
Local theory operations, with abstract target context.
*)
type local_theory = Proof.context;
type generic_theory = Context.generic;
structure Attrib =
struct
type binding = binding * Token.src list;
type thms = (thm list * Token.src list) list;
type fact = binding * thms;
end;
structure Locale =
struct
type registration = {inst: string * morphism, mixin: (morphism * bool) option, export: morphism};
end;
signature LOCAL_THEORY =
sig
type operations
val assert: local_theory -> local_theory
val level: Proof.context -> int
val map_contexts: (int -> Proof.context -> Proof.context) -> local_theory -> local_theory
val background_naming_of: local_theory -> Name_Space.naming
val map_background_naming: (Name_Space.naming -> Name_Space.naming) ->
local_theory -> local_theory
val restore_background_naming: local_theory -> local_theory -> local_theory
val full_name: local_theory -> binding -> string
val new_group: local_theory -> local_theory
val reset_group: local_theory -> local_theory
val standard_morphism: local_theory -> Proof.context -> morphism
val standard_morphism_theory: local_theory -> morphism
- val standard_form: local_theory -> Proof.context -> (morphism -> 'a) -> 'a
+ val standard_form: local_theory -> Proof.context -> 'a Morphism.entity -> 'a
val raw_theory_result: (theory -> 'a * theory) -> local_theory -> 'a * local_theory
val raw_theory: (theory -> theory) -> local_theory -> local_theory
val background_theory_result: (theory -> 'a * theory) -> local_theory -> 'a * local_theory
val background_theory: (theory -> theory) -> local_theory -> local_theory
val target_of: local_theory -> Proof.context
val target: (Proof.context -> Proof.context) -> local_theory -> local_theory
val target_morphism: local_theory -> morphism
val propagate_ml_env: generic_theory -> generic_theory
val touch_ml_env: generic_theory -> generic_theory
val operations_of: local_theory -> operations
val define: (binding * mixfix) * (Attrib.binding * term) -> local_theory ->
(term * (string * thm)) * local_theory
val define_internal: (binding * mixfix) * (Attrib.binding * term) -> local_theory ->
(term * (string * thm)) * local_theory
val note: Attrib.binding * thm list -> local_theory -> (string * thm list) * local_theory
val notes: Attrib.fact list -> local_theory -> (string * thm list) list * local_theory
val notes_kind: string -> Attrib.fact list -> local_theory ->
(string * thm list) list * local_theory
val abbrev: Syntax.mode -> (binding * mixfix) * term -> local_theory ->
(term * term) * local_theory
- val declaration: {syntax: bool, pervasive: bool} -> declaration -> local_theory -> local_theory
+ val declaration: {syntax: bool, pervasive: bool, pos: Position.T} -> Morphism.declaration ->
+ local_theory -> local_theory
val theory_registration: Locale.registration -> local_theory -> local_theory
val locale_dependency: Locale.registration -> local_theory -> local_theory
val pretty: local_theory -> Pretty.T list
val add_thms_dynamic: binding * (Context.generic -> thm list) -> local_theory -> local_theory
val set_defsort: sort -> local_theory -> local_theory
val syntax: bool -> Syntax.mode -> (string * typ * mixfix) list ->
local_theory -> local_theory
val syntax_cmd: bool -> Syntax.mode -> (string * string * mixfix) list ->
local_theory -> local_theory
val type_notation: bool -> Syntax.mode -> (typ * mixfix) list -> local_theory -> local_theory
val type_notation_cmd: bool -> Syntax.mode -> (string * mixfix) list ->
local_theory -> local_theory
val notation: bool -> Syntax.mode -> (term * mixfix) list -> local_theory -> local_theory
val notation_cmd: bool -> Syntax.mode -> (string * mixfix) list -> local_theory -> local_theory
val type_alias: binding -> string -> local_theory -> local_theory
val const_alias: binding -> string -> local_theory -> local_theory
val init: {background_naming: Name_Space.naming, setup: theory -> Proof.context,
conclude: local_theory -> Proof.context} -> operations -> theory -> local_theory
val exit: local_theory -> Proof.context
val exit_global: local_theory -> theory
val exit_result: (morphism -> 'a -> 'b) -> 'a * local_theory -> 'b * Proof.context
val exit_result_global: (morphism -> 'a -> 'b) -> 'a * local_theory -> 'b * theory
val begin_nested: local_theory -> Binding.scope * local_theory
val end_nested: local_theory -> local_theory
val end_nested_result: (morphism -> 'a -> 'b) -> 'a * local_theory -> 'b * local_theory
end;
signature PRIVATE_LOCAL_THEORY =
sig
include LOCAL_THEORY
val reset: local_theory -> local_theory
end
structure Local_Theory: PRIVATE_LOCAL_THEORY =
struct
(** local theory data **)
(* type lthy *)
type operations =
{define: bool -> (binding * mixfix) * (Attrib.binding * term) -> local_theory ->
(term * (string * thm)) * local_theory,
notes: string -> Attrib.fact list -> local_theory -> (string * thm list) list * local_theory,
abbrev: Syntax.mode -> (binding * mixfix) * term -> local_theory ->
(term * term) * local_theory,
- declaration: {syntax: bool, pervasive: bool} -> declaration -> local_theory -> local_theory,
+ declaration: {syntax: bool, pervasive: bool, pos: Position.T} -> Morphism.declaration_entity ->
+ local_theory -> local_theory,
theory_registration: Locale.registration -> local_theory -> local_theory,
locale_dependency: Locale.registration -> local_theory -> local_theory,
pretty: local_theory -> Pretty.T list};
type lthy =
{background_naming: Name_Space.naming,
operations: operations,
conclude: Proof.context -> Proof.context,
target: Proof.context};
fun make_lthy (background_naming, operations, conclude, target) : lthy =
{background_naming = background_naming, operations = operations,
conclude = conclude, target = target};
(* context data *)
structure Data = Proof_Data
(
type T = lthy list;
fun init _ = [];
);
(* nested structure *)
val level = length o Data.get; (*1: main target at bottom, >= 2: nested target context*)
fun assert lthy =
if level lthy = 0 then error "Missing local theory context" else lthy;
fun assert_bottom lthy =
let
val _ = assert lthy;
in
if level lthy > 1 then error "Not at bottom of local theory nesting"
else lthy
end;
fun assert_not_bottom lthy =
let
val _ = assert lthy;
in
if level lthy = 1 then error "Already at bottom of local theory nesting"
else lthy
end;
val bottom_of = List.last o Data.get o assert;
val top_of = hd o Data.get o assert;
fun map_top f =
assert #>
Data.map (fn {background_naming, operations, conclude, target} :: parents =>
make_lthy (f (background_naming, operations, conclude, target)) :: parents);
fun reset lthy = #target (top_of lthy) |> Data.put (Data.get lthy);
fun map_contexts f lthy =
let val n = level lthy in
lthy |> (Data.map o map_index)
(fn (i, {background_naming, operations, conclude, target}) =>
make_lthy (background_naming, operations, conclude,
target
|> Context_Position.set_visible false
|> f (n - i - 1)
|> Context_Position.restore_visible target))
|> f n
end;
(* naming for background theory *)
val background_naming_of = #background_naming o top_of;
fun map_background_naming f =
map_top (fn (background_naming, operations, conclude, target) =>
(f background_naming, operations, conclude, target));
val restore_background_naming = map_background_naming o K o background_naming_of;
val full_name = Name_Space.full_name o background_naming_of;
val new_group = map_background_naming Name_Space.new_group;
val reset_group = map_background_naming Name_Space.reset_group;
(* standard morphisms *)
fun standard_morphism lthy ctxt =
- Proof_Context.norm_export_morphism lthy ctxt $>
- Morphism.binding_morphism "Local_Theory.standard_binding"
- (Name_Space.transform_binding (Proof_Context.naming_of lthy));
+ Morphism.set_context' lthy
+ (Proof_Context.export_morphism lthy ctxt $>
+ Morphism.thm_morphism' "Local_Theory.standard" (Goal.norm_result o Proof_Context.init_global) $>
+ Morphism.binding_morphism "Local_Theory.standard_binding"
+ (Name_Space.transform_binding (Proof_Context.naming_of lthy)));
fun standard_morphism_theory lthy =
standard_morphism lthy (Proof_Context.init_global (Proof_Context.theory_of lthy));
fun standard_form lthy ctxt x =
Morphism.form (Morphism.transform (standard_morphism lthy ctxt) x);
(* background theory *)
fun raw_theory_result f lthy =
let
val (res, thy') = f (Proof_Context.theory_of lthy);
val lthy' = map_contexts (K (Proof_Context.transfer thy')) lthy;
in (res, lthy') end;
fun raw_theory f = #2 o raw_theory_result (f #> pair ());
fun background_theory_result f lthy =
let
val naming =
background_naming_of lthy
|> Name_Space.transform_naming (Proof_Context.naming_of lthy);
in
lthy |> raw_theory_result (fn thy =>
thy
|> Sign.map_naming (K naming)
|> f
||> Sign.restore_naming thy)
end;
fun background_theory f = #2 o background_theory_result (f #> pair ());
(* target contexts *)
val target_of = #target o bottom_of;
fun target f lthy =
let
val ctxt = target_of lthy;
val ctxt' = ctxt
|> Context_Position.set_visible false
|> f
|> Context_Position.restore_visible ctxt;
val thy' = Proof_Context.theory_of ctxt';
in map_contexts (fn 0 => K ctxt' | _ => Proof_Context.transfer thy') lthy end;
fun target_morphism lthy = standard_morphism lthy (target_of lthy);
fun propagate_ml_env (context as Context.Proof lthy) =
let val inherit = ML_Env.inherit [context] in
lthy
|> background_theory (Context.theory_map inherit)
|> map_contexts (K (Context.proof_map inherit))
|> Context.Proof
end
| propagate_ml_env context = context;
fun touch_ml_env context =
if Context.enabled_tracing () then
(case context of
Context.Theory _ => ML_Env.touch context
| Context.Proof _ => context)
else context;
(** operations **)
val operations_of = #operations o top_of;
fun operation f lthy = f (operations_of lthy) lthy;
fun operation1 f x = operation (fn ops => f ops x);
fun operation2 f x y = operation (fn ops => f ops x y);
(* primitives *)
val pretty = operation #pretty;
val abbrev = operation2 #abbrev;
val define = operation2 #define false;
val define_internal = operation2 #define true;
val notes_kind = operation2 #notes;
-val declaration = operation2 #declaration;
+fun declaration args = operation2 #declaration args o Morphism.entity;
val theory_registration = operation1 #theory_registration;
fun locale_dependency registration =
assert_bottom #> operation1 #locale_dependency registration;
(* theorems *)
val notes = notes_kind "";
fun note (a, ths) = notes [(a, [(ths, [])])] #>> the_single;
fun add_thms_dynamic (binding, f) lthy =
lthy
|> background_theory_result (fn thy => thy
|> Global_Theory.add_thms_dynamic' (Sign.inherit_naming thy lthy) (binding, f))
|-> (fn name =>
map_contexts (fn _ => fn ctxt =>
Proof_Context.transfer_facts (Proof_Context.theory_of ctxt) ctxt) #>
- declaration {syntax = false, pervasive = false} (fn phi =>
+ declaration {syntax = false, pervasive = false, pos = Binding.pos_of binding} (fn phi =>
let val binding' = Morphism.binding phi binding in
Context.mapping
(Global_Theory.alias_fact binding' name)
(Proof_Context.alias_fact binding' name)
end));
(* default sort *)
fun set_defsort S =
- declaration {syntax = true, pervasive = false}
+ declaration {syntax = true, pervasive = false, pos = Position.thread_data ()}
(K (Context.mapping (Sign.set_defsort S) (Proof_Context.set_defsort S)));
(* syntax *)
fun gen_syntax prep_type add mode raw_args lthy =
let
val args = map (fn (c, T, mx) => (c, prep_type lthy T, mx)) raw_args;
val args' = map (fn (c, T, mx) => (c, T, Mixfix.reset_pos mx)) args;
val _ = lthy |> Context_Position.is_visible lthy ? Proof_Context.syntax add mode args;
in
- declaration {syntax = true, pervasive = false}
+ declaration {syntax = true, pervasive = false, pos = Position.thread_data ()}
(fn _ => Proof_Context.generic_syntax add mode args') lthy
end;
val syntax = gen_syntax (K I);
val syntax_cmd = gen_syntax Proof_Context.read_typ_syntax;
(* notation *)
local
fun gen_type_notation prep_type add mode raw_args lthy =
let
val prepare = prep_type lthy #> Logic.type_map (Assumption.export_term lthy (target_of lthy));
val args = map (apfst prepare) raw_args;
val args' = map (apsnd Mixfix.reset_pos) args;
val _ = lthy |> Context_Position.is_visible lthy ? Proof_Context.type_notation add mode args;
in
- declaration {syntax = true, pervasive = false}
+ declaration {syntax = true, pervasive = false, pos = Position.thread_data ()}
(Proof_Context.generic_type_notation add mode args') lthy
end;
fun gen_notation prep_const add mode raw_args lthy =
let
val prepare = prep_const lthy #> Assumption.export_term lthy (target_of lthy);
val args = map (apfst prepare) raw_args;
val args' = map (apsnd Mixfix.reset_pos) args;
val _ = lthy |> Context_Position.is_visible lthy ? Proof_Context.notation add mode args;
in
- declaration {syntax = true, pervasive = false}
+ declaration {syntax = true, pervasive = false, pos = Position.thread_data ()}
(Proof_Context.generic_notation add mode args') lthy
end;
in
val type_notation = gen_type_notation (K I);
val type_notation_cmd =
gen_type_notation (Proof_Context.read_type_name {proper = true, strict = false});
val notation = gen_notation (K I);
val notation_cmd = gen_notation (Proof_Context.read_const {proper = false, strict = false});
end;
(* name space aliases *)
fun syntax_alias global_alias local_alias b name =
- declaration {syntax = true, pervasive = false} (fn phi =>
+ declaration {syntax = true, pervasive = false, pos = Position.thread_data ()} (fn phi =>
let val b' = Morphism.binding phi b
in Context.mapping (global_alias b' name) (local_alias b' name) end);
val type_alias = syntax_alias Sign.type_alias Proof_Context.type_alias;
val const_alias = syntax_alias Sign.const_alias Proof_Context.const_alias;
(** manage targets **)
(* main target *)
fun init_target background_naming conclude operations target =
Data.map (K [make_lthy (background_naming, operations, conclude, target)]) target
fun init {background_naming, setup, conclude} operations thy =
thy
|> Sign.change_begin
|> setup
|> init_target background_naming (conclude #> target_of #> Sign.change_end_local) operations;
val exit_of = #conclude o bottom_of;
fun exit lthy = exit_of lthy (assert_bottom lthy);
val exit_global = Proof_Context.theory_of o exit;
fun exit_result decl (x, lthy) =
let
val ctxt = exit lthy;
val phi = standard_morphism lthy ctxt;
in (decl phi x, ctxt) end;
fun exit_result_global decl (x, lthy) =
let
val thy = exit_global lthy;
val thy_ctxt = Proof_Context.init_global thy;
val phi = standard_morphism lthy thy_ctxt;
in (decl phi x, thy) end;
(* nested targets *)
fun begin_nested lthy =
let
val _ = assert lthy;
val (scope, target) = Proof_Context.new_scope lthy;
val entry = make_lthy (background_naming_of lthy, operations_of lthy,
Proof_Context.restore_naming lthy, target);
val lthy' = Data.map (cons entry) target;
in (scope, lthy') end;
fun end_nested lthy =
let
val _ = assert_not_bottom lthy;
val ({conclude, ...} :: rest) = Data.get lthy;
in lthy |> Data.put rest |> reset |> conclude end;
fun end_nested_result decl (x, lthy) =
let
val outer_lthy = end_nested lthy;
val phi = Proof_Context.export_morphism lthy outer_lthy;
in (decl phi x, outer_lthy) end;
end;
diff --git a/src/Pure/Isar/locale.ML b/src/Pure/Isar/locale.ML
--- a/src/Pure/Isar/locale.ML
+++ b/src/Pure/Isar/locale.ML
@@ -1,786 +1,804 @@
(* Title: Pure/Isar/locale.ML
Author: Clemens Ballarin, TU Muenchen
Locales -- managed Isar proof contexts, based on Pure predicates.
Draws basic ideas from Florian Kammueller's original version of
locales, but uses the richer infrastructure of Isar instead of the raw
meta-logic. Furthermore, structured composition of contexts (with merge
and instantiation) is provided, as well as type-inference of the
signature parts and predicate definitions of the specification text.
Interpretation enables the transfer of declarations and theorems to other
contexts, namely those defined by theories, structured proofs and locales
themselves.
A comprehensive account of locales is available:
[1] Clemens Ballarin. Locales: a module system for mathematical theories.
Journal of Automated Reasoning, 52(2):123-153, 2014.
See also:
[2] Clemens Ballarin. Locales and Locale Expressions in Isabelle/Isar.
In Stefano Berardi et al., Types for Proofs and Programs: International
Workshop, TYPES 2003, Torino, Italy, LNCS 3085, pages 34-50, 2004.
[3] Clemens Ballarin. Interpretation of Locales in Isabelle: Managing
Dependencies between Locales. Technical Report TUM-I0607, Technische
Universitaet Muenchen, 2006.
[4] Clemens Ballarin. Interpretation of Locales in Isabelle: Theories and
Proof Contexts. In J.M. Borwein and W.M. Farmer, MKM 2006, LNAI 4108,
pages 31-43, 2006.
*)
signature LOCALE =
sig
(* Locale specification *)
val register_locale: binding ->
(string * sort) list * ((string * typ) * mixfix) list ->
term option * term list ->
thm option * thm option -> thm list ->
Element.context_i list ->
- declaration list ->
+ Morphism.declaration_entity list ->
(string * Attrib.fact list) list ->
(string * morphism) list -> theory -> theory
val locale_space: theory -> Name_Space.T
val intern: theory -> xstring -> string
val check: theory -> xstring * Position.T -> string
val extern: theory -> string -> xstring
val markup_name: Proof.context -> string -> string
val pretty_name: Proof.context -> string -> Pretty.T
val defined: theory -> string -> bool
val parameters_of: theory -> string -> (string * sort) list * ((string * typ) * mixfix) list
val params_of: theory -> string -> ((string * typ) * mixfix) list
val intros_of: theory -> string -> thm option * thm option
val axioms_of: theory -> string -> thm list
val instance_of: theory -> string -> morphism -> term list
val specification_of: theory -> string -> term option * term list
val hyp_spec_of: theory -> string -> Element.context_i list
(* Storing results *)
val add_facts: string -> string -> Attrib.fact list -> Proof.context -> Proof.context
- val add_declaration: string -> bool -> declaration -> Proof.context -> Proof.context
+ val add_declaration: string -> {syntax: bool, pos: Position.T} ->
+ Morphism.declaration_entity -> Proof.context -> Proof.context
(* Activation *)
val activate_facts: morphism option -> string * morphism -> Context.generic -> Context.generic
val activate_declarations: string * morphism -> Proof.context -> Proof.context
val init: string -> theory -> Proof.context
(* Reasoning about locales *)
val get_witnesses: Proof.context -> thm list
val get_intros: Proof.context -> thm list
val get_unfolds: Proof.context -> thm list
val witness_add: attribute
val intro_add: attribute
val unfold_add: attribute
val intro_locales_tac: {strict: bool, eager: bool} -> Proof.context -> thm list -> tactic
(* Registrations and dependencies *)
type registration = {inst: string * morphism, mixin: (morphism * bool) option, export: morphism}
val amend_registration: registration -> Context.generic -> Context.generic
val add_registration: registration -> Context.generic -> Context.generic
val registrations_of: Context.generic -> string -> (string * morphism) list
val add_dependency: string -> registration -> theory -> theory
(* Diagnostic *)
val get_locales: theory -> string list
val locale_notes: theory -> string -> (string * Attrib.fact list) list
val pretty_locales: theory -> bool -> Pretty.T
val pretty_locale: theory -> bool -> string -> Pretty.T
val pretty_registrations: Proof.context -> string -> Pretty.T
val pretty_locale_deps: theory -> {name: string, parents: string list, body: Pretty.T} list
type locale_dependency =
{source: string, target: string, prefix: (string * bool) list, morphism: morphism,
pos: Position.T, serial: serial}
val dest_dependencies: theory list -> theory -> locale_dependency list
val tracing : Proof.context -> string -> unit
end;
structure Locale: LOCALE =
struct
datatype ctxt = datatype Element.ctxt;
(*** Locales ***)
type dep = {name: string, morphisms: morphism * morphism, pos: Position.T, serial: serial};
fun eq_dep (dep1: dep, dep2: dep) = #serial dep1 = #serial dep2;
+fun transfer_dep thy ({name, morphisms, pos, serial}: dep) : dep =
+ {name = name, morphisms = apply2 (Morphism.set_context thy) morphisms, pos = pos, serial = serial};
+
fun make_dep (name, morphisms) : dep =
- {name = name, morphisms = morphisms, pos = Position.thread_data (), serial = serial ()};
+ {name = name,
+ morphisms = apply2 Morphism.reset_context morphisms,
+ pos = Position.thread_data (),
+ serial = serial ()};
(*table of mixin lists, per list mixins in reverse order of declaration;
lists indexed by registration/dependency serial,
entries for empty lists may be omitted*)
type mixin = (morphism * bool) * serial;
type mixins = mixin list Inttab.table;
fun lookup_mixins (mixins: mixins) serial' = Inttab.lookup_list mixins serial';
val merge_mixins: mixins * mixins -> mixins = Inttab.merge_list (eq_snd op =);
-fun insert_mixin serial' mixin : mixins -> mixins = Inttab.cons_list (serial', (mixin, serial ()));
+fun insert_mixin serial' (morph, b) : mixins -> mixins =
+ Inttab.cons_list (serial', ((Morphism.reset_context morph, b), serial ()));
fun rename_mixin (old, new) (mixins: mixins) =
(case Inttab.lookup mixins old of
NONE => mixins
| SOME mixin => Inttab.delete old mixins |> Inttab.update_new (new, mixin));
fun compose_mixins (mixins: mixin list) =
fold_rev Morphism.compose (map (fst o fst) mixins) Morphism.identity;
datatype locale = Loc of {
(* static part *)
(*type and term parameters*)
parameters: (string * sort) list * ((string * typ) * mixfix) list,
(*assumptions (as a single predicate expression) and defines*)
spec: term option * term list,
intros: thm option * thm option,
axioms: thm list,
(*diagnostic device: theorem part of hypothetical body as specified by the user*)
hyp_spec: Element.context_i list,
(* dynamic part *)
(*syntax declarations*)
- syntax_decls: (declaration * serial) list,
+ syntax_decls: (Morphism.declaration_entity * serial) list,
(*theorem declarations*)
notes: ((string * Attrib.fact list) * serial) list,
(*locale dependencies (sublocale relation) in reverse order*)
dependencies: dep list,
(*mixin part of dependencies*)
mixins: mixins
};
fun mk_locale ((parameters, spec, intros, axioms, hyp_spec),
((syntax_decls, notes), (dependencies, mixins))) =
Loc {parameters = parameters, spec = spec, intros = intros, axioms = axioms, hyp_spec = hyp_spec,
syntax_decls = syntax_decls, notes = notes, dependencies = dependencies, mixins = mixins};
fun map_locale f (Loc {parameters, spec, intros, axioms, hyp_spec,
syntax_decls, notes, dependencies, mixins}) =
mk_locale (f ((parameters, spec, intros, axioms, hyp_spec),
((syntax_decls, notes), (dependencies, mixins))));
fun merge_locale
(Loc {parameters, spec, intros, axioms, hyp_spec, syntax_decls, notes, dependencies, mixins},
Loc {syntax_decls = syntax_decls', notes = notes',
dependencies = dependencies', mixins = mixins', ...}) =
mk_locale
((parameters, spec, intros, axioms, hyp_spec),
((merge (eq_snd op =) (syntax_decls, syntax_decls'),
merge (eq_snd op =) (notes, notes')),
(merge eq_dep (dependencies, dependencies'),
(merge_mixins (mixins, mixins')))));
structure Locales = Theory_Data
(
type T = locale Name_Space.table;
val empty : T = Name_Space.empty_table Markup.localeN;
val merge = Name_Space.join_tables (K merge_locale);
);
val locale_space = Name_Space.space_of_table o Locales.get;
val intern = Name_Space.intern o locale_space;
fun check thy = #1 o Name_Space.check (Context.Theory thy) (Locales.get thy);
val _ = Theory.setup
(ML_Antiquotation.inline_embedded \<^binding>\<open>locale\<close>
(Args.theory -- Scan.lift Parse.embedded_position >>
(ML_Syntax.print_string o uncurry check)));
fun extern thy = Name_Space.extern (Proof_Context.init_global thy) (locale_space thy);
fun markup_extern ctxt =
Name_Space.markup_extern ctxt (locale_space (Proof_Context.theory_of ctxt));
fun markup_name ctxt name = markup_extern ctxt name |-> Markup.markup;
fun pretty_name ctxt name = markup_extern ctxt name |> Pretty.mark_str;
val get_locale = Name_Space.lookup o Locales.get;
val defined = is_some oo get_locale;
fun the_locale thy name =
(case get_locale thy name of
SOME (Loc loc) => loc
| NONE => error ("Unknown locale " ^ quote name));
fun register_locale
binding parameters spec intros axioms hyp_spec syntax_decls notes dependencies thy =
thy |> Locales.map (Name_Space.define (Context.Theory thy) true
(binding,
mk_locale ((parameters, spec, (apply2 o Option.map) Thm.trim_context intros,
- map Thm.trim_context axioms, hyp_spec),
- ((map (fn decl => (decl, serial ())) syntax_decls, map (fn n => (n, serial ())) notes),
+ map Thm.trim_context axioms,
+ map Element.trim_context_ctxt hyp_spec),
+ ((map (fn decl => (Morphism.entity_reset_context decl, serial ())) syntax_decls,
+ map (fn (a, facts) => ((a, map Attrib.trim_context_fact facts), serial ())) notes),
(map (fn (name, morph) => make_dep (name, (morph, Morphism.identity))) dependencies,
Inttab.empty)))) #> snd);
(* FIXME Morphism.identity *)
fun change_locale name =
Locales.map o Name_Space.map_table_entry name o map_locale o apsnd;
(** Primitive operations **)
fun parameters_of thy = #parameters o the_locale thy;
val params_of = #2 oo parameters_of;
fun intros_of thy = (apply2 o Option.map) (Thm.transfer thy) o #intros o the_locale thy;
fun axioms_of thy = map (Thm.transfer thy) o #axioms o the_locale thy;
fun instance_of thy name morph = params_of thy name |>
- map (Morphism.term morph o Free o #1);
+ map (Morphism.term (Morphism.set_context thy morph) o Free o #1);
fun specification_of thy = #spec o the_locale thy;
-fun hyp_spec_of thy = #hyp_spec o the_locale thy;
+fun hyp_spec_of thy = map (Element.transfer_ctxt thy) o #hyp_spec o the_locale thy;
-fun dependencies_of thy = #dependencies o the_locale thy;
+fun dependencies_of thy = map (transfer_dep thy) o #dependencies o the_locale thy;
-fun mixins_of thy name serial = lookup_mixins (#mixins (the_locale thy name)) serial;
+fun mixins_of thy name serial =
+ lookup_mixins (#mixins (the_locale thy name)) serial
+ |> (map o apfst o apfst) (Morphism.set_context thy);
(* Print instance and qualifiers *)
fun pretty_reg_inst ctxt qs (name, ts) =
let
fun print_qual (qual, mandatory) = qual ^ (if mandatory then "" else "?");
fun prt_quals qs = Pretty.str (space_implode "." (map print_qual qs));
val prt_term = Pretty.quote o Syntax.pretty_term ctxt;
fun prt_term' t =
if Config.get ctxt show_types
then Pretty.block [prt_term t, Pretty.brk 1, Pretty.str "::",
Pretty.brk 1, (Pretty.quote o Syntax.pretty_typ ctxt) (type_of t)]
else prt_term t;
fun prt_inst ts =
Pretty.block (Pretty.breaks (pretty_name ctxt name :: map prt_term' ts));
in
(case qs of
[] => prt_inst ts
| qs => Pretty.block [prt_quals qs, Pretty.brk 1, Pretty.str ":", Pretty.brk 1, prt_inst ts])
end;
fun pretty_reg ctxt export (name, morph) =
let
val thy = Proof_Context.theory_of ctxt;
val morph' = morph $> export;
val qs = Morphism.binding_prefix morph';
val ts = instance_of thy name morph';
in pretty_reg_inst ctxt qs (name, ts) end;
(*** Identifiers: activated locales in theory or proof context ***)
type idents = term list list Symtab.table; (* name ~> instance (grouped by name) *)
val empty_idents : idents = Symtab.empty;
val insert_idents = Symtab.insert_list (eq_list (op aconv));
val merge_idents = Symtab.merge_list (eq_list (op aconv));
fun redundant_ident thy idents (name, instance) =
exists (fn pat => Pattern.matchess thy (pat, instance)) (Symtab.lookup_list idents name);
structure Idents = Generic_Data
(
type T = idents;
val empty = empty_idents;
val merge = merge_idents;
);
(** Resolve locale dependencies in a depth-first fashion **)
local
val roundup_bound = 120;
fun add thy depth stem export (name, morph) (deps, marked) =
if depth > roundup_bound
then error "Roundup bound exceeded (sublocale relation probably not terminating)."
else
let
val instance = instance_of thy name (morph $> stem $> export);
in
if redundant_ident thy marked (name, instance) then (deps, marked)
else
let
(*no inheritance of mixins, regardless of requests by clients*)
val dependencies =
dependencies_of thy name |> map (fn dep as {morphisms = (morph', export'), ...} =>
(#name dep, morph' $> export' $> compose_mixins (mixins_of thy name (#serial dep))));
val marked' = insert_idents (name, instance) marked;
val (deps', marked'') =
fold_rev (add thy (depth + 1) (morph $> stem) export) dependencies
([], marked');
in ((name, morph $> stem) :: deps' @ deps, marked'') end
end;
in
(* Note that while identifiers always have the external (exported) view, activate_dep
is presented with the internal view. *)
fun roundup thy activate_dep export (name, morph) (marked, input) =
let
(* Find all dependencies including new ones (which are dependencies enriching
existing registrations). *)
val (dependencies, marked') =
add thy 0 Morphism.identity export (name, morph) ([], empty_idents);
(* Filter out fragments from marked; these won't be activated. *)
val dependencies' = filter_out (fn (name, morph) =>
redundant_ident thy marked (name, instance_of thy name (morph $> export))) dependencies;
in
(merge_idents (marked, marked'), input |> fold_rev activate_dep dependencies')
end;
end;
(*** Registrations: interpretations in theories or proof contexts ***)
val total_ident_ord = prod_ord fast_string_ord (list_ord Term_Ord.fast_term_ord);
structure Idtab = Table(type key = string * term list val ord = total_ident_ord);
type reg = {morphisms: morphism * morphism, pos: Position.T, serial: serial};
val eq_reg: reg * reg -> bool = op = o apply2 #serial;
(* FIXME consolidate with locale dependencies, consider one data slot only *)
structure Global_Registrations = Theory_Data'
(
(*registrations, indexed by locale name and instance;
unique registration serial points to mixin list*)
type T = reg Idtab.table * mixins;
val empty: T = (Idtab.empty, Inttab.empty);
fun merge args =
let
val ctxt0 = Syntax.init_pretty_global (#1 (hd args));
fun recursive_merge ((regs1, mixins1), (regs2, mixins2)) : T =
(Idtab.merge eq_reg (regs1, regs2), merge_mixins (mixins1, mixins2))
handle Idtab.DUP id =>
(*distinct interpretations with same base: merge their mixins*)
let
val reg1 = Idtab.lookup regs1 id |> the;
val reg2 = Idtab.lookup regs2 id |> the;
val reg2' =
{morphisms = #morphisms reg2,
pos = Position.thread_data (),
serial = #serial reg1};
val regs2' = Idtab.update (id, reg2') regs2;
val mixins2' = rename_mixin (#serial reg2, #serial reg1) mixins2;
val _ =
warning ("Removed duplicate interpretation after retrieving its mixins" ^
Position.here_list [#pos reg1, #pos reg2] ^ ":\n " ^
Pretty.string_of (pretty_reg_inst ctxt0 [] id));
in recursive_merge ((regs1, mixins1), (regs2', mixins2')) end;
in Library.foldl1 recursive_merge (map #2 args) end;
);
structure Local_Registrations = Proof_Data
(
type T = Global_Registrations.T;
val init = Global_Registrations.get;
);
val get_registrations = Context.cases Global_Registrations.get Local_Registrations.get;
fun map_registrations f (Context.Theory thy) = Context.Theory (Global_Registrations.map f thy)
| map_registrations f (Context.Proof ctxt) = Context.Proof (Local_Registrations.map f ctxt);
(* Primitive operations *)
fun add_reg thy export (name, morph) =
let
- val reg = {morphisms = (morph, export), pos = Position.thread_data (), serial = serial ()};
+ val reg =
+ {morphisms = (Morphism.reset_context morph, Morphism.reset_context export),
+ pos = Position.thread_data (),
+ serial = serial ()};
val id = (name, instance_of thy name (morph $> export));
in (map_registrations o apfst) (Idtab.insert (K false) (id, reg)) end;
fun add_mixin serial' mixin =
(* registration to be amended identified by its serial id *)
(map_registrations o apsnd) (insert_mixin serial' mixin);
val get_regs = #1 o get_registrations;
fun get_mixins context (name, morph) =
let
val thy = Context.theory_of context;
val (regs, mixins) = get_registrations context;
in
(case Idtab.lookup regs (name, instance_of thy name morph) of
NONE => []
| SOME {serial, ...} => lookup_mixins mixins serial)
end;
fun collect_mixins context (name, morph) =
let
val thy = Context.theory_of context;
in
roundup thy (fn dep => fn mixins => merge (eq_snd op =) (mixins, get_mixins context dep))
Morphism.identity (name, morph)
(insert_idents (name, instance_of thy name morph) empty_idents, [])
|> snd |> filter (snd o fst) (* only inheritable mixins *)
|> (fn x => merge (eq_snd op =) (x, get_mixins context (name, morph)))
|> compose_mixins
end;
(*** Activate context elements of locale ***)
fun activate_err msg kind (name, morph) context =
cat_error msg ("The above error(s) occurred while activating " ^ kind ^ " of locale instance\n" ^
(pretty_reg (Context.proof_of context) Morphism.identity (name, morph) |>
Pretty.string_of));
fun init_element elem context =
context
|> Context.mapping I (Thm.unchecked_hyps #> Context_Position.not_really)
|> Element.init elem
|> Context.mapping I (fn ctxt =>
let val ctxt0 = Context.proof_of context
in ctxt |> Context_Position.restore_visible ctxt0 |> Thm.restore_hyps ctxt0 end);
(* Potentially lazy notes *)
fun make_notes kind = map (fn ((b, atts), facts) =>
if null atts andalso forall (null o #2) facts
then Lazy_Notes (kind, (b, Lazy.value (maps #1 facts)))
else Notes (kind, [((b, atts), facts)]));
fun locale_notes thy loc =
fold (cons o #1) (#notes (the_locale thy loc)) [];
fun lazy_notes thy loc =
locale_notes thy loc
|> maps (fn (kind, notes) => make_notes kind notes);
fun consolidate_notes elems =
elems
|> map_filter (fn Lazy_Notes (_, (_, ths)) => SOME ths | _ => NONE)
|> Lazy.consolidate
|> ignore;
fun force_notes (Lazy_Notes (kind, (b, ths))) = Notes (kind, [((b, []), [(Lazy.force ths, [])])])
| force_notes elem = elem;
(* Declarations, facts and entire locale content *)
val trace_locales =
Attrib.setup_config_bool (Binding.make ("trace_locales", \<^here>)) (K false);
fun tracing context msg =
if Config.get context trace_locales then Output.tracing msg else ();
fun trace kind (name, morph) context =
tracing (Context.proof_of context) ("Activating " ^ kind ^ " of " ^
(pretty_reg (Context.proof_of context) Morphism.identity (name, morph) |> Pretty.string_of));
fun activate_syntax_decls (name, morph) context =
let
val _ = trace "syntax" (name, morph) context;
val thy = Context.theory_of context;
val {syntax_decls, ...} = the_locale thy name;
+ val form_syntax_decl =
+ Morphism.form o Morphism.transform morph o Morphism.entity_set_context thy;
in
- context
- |> fold_rev (fn (decl, _) => decl morph) syntax_decls
+ fold_rev (form_syntax_decl o #1) syntax_decls context
handle ERROR msg => activate_err msg "syntax" (name, morph) context
end;
-fun activate_notes activ_elem transfer context export' (name, morph) input =
+fun activate_notes activ_elem context export' (name, morph) input =
let
val thy = Context.theory_of context;
val mixin =
(case export' of
NONE => Morphism.identity
| SOME export => collect_mixins context (name, morph $> export) $> export);
- val morph' = transfer input $> morph $> mixin;
+ val morph' = Morphism.set_context thy (morph $> mixin);
val notes' = map (Element.transform_ctxt morph') (lazy_notes thy name);
in
(notes', input) |-> fold (fn elem => fn res =>
- activ_elem (Element.transform_ctxt (transfer res) elem) res)
+ activ_elem (Element.transfer_ctxt thy elem) res)
end handle ERROR msg => activate_err msg "facts" (name, morph) context;
-fun activate_notes_trace activ_elem transfer context export' (name, morph) context' =
+fun activate_notes_trace activ_elem context export' (name, morph) context' =
let
val _ = trace "facts" (name, morph) context';
in
- activate_notes activ_elem transfer context export' (name, morph) context'
+ activate_notes activ_elem context export' (name, morph) context'
end;
-fun activate_all name thy activ_elem transfer (marked, input) =
+fun activate_all name thy activ_elem (marked, input) =
let
val {parameters = (_, params), spec = (asm, defs), ...} = the_locale thy name;
val input' = input |>
(not (null params) ?
activ_elem (Fixes (map (fn ((x, T), mx) => (Binding.name x, SOME T, mx)) params))) |>
(* FIXME type parameters *)
(case asm of SOME A => activ_elem (Assumes [(Binding.empty_atts, [(A, [])])]) | _ => I) |>
(not (null defs) ?
activ_elem (Defines (map (fn def => (Binding.empty_atts, (def, []))) defs)));
- val activate = activate_notes activ_elem transfer (Context.Theory thy) NONE;
+ val activate = activate_notes activ_elem (Context.Theory thy) NONE;
in
roundup thy activate Morphism.identity (name, Morphism.identity) (marked, input')
end;
(** Public activation functions **)
fun activate_facts export dep context =
context
|> Context_Position.set_visible_generic false
|> pair (Idents.get context)
|> roundup (Context.theory_of context)
- (activate_notes_trace init_element Morphism.transfer_morphism'' context export)
- (the_default Morphism.identity export) dep
+ (activate_notes_trace init_element context export)
+ (Morphism.default export) dep
|-> Idents.put
|> Context_Position.restore_visible_generic context;
fun activate_declarations dep = Context.proof_map (fn context =>
context
|> Context_Position.set_visible_generic false
|> pair (Idents.get context)
|> roundup (Context.theory_of context) activate_syntax_decls Morphism.identity dep
|-> Idents.put
|> Context_Position.restore_visible_generic context);
fun init name thy =
let
val context = Context.Proof (Proof_Context.init_global thy);
val marked = Idents.get context;
in
context
|> Context_Position.set_visible_generic false
|> pair empty_idents
- |> activate_all name thy init_element Morphism.transfer_morphism''
+ |> activate_all name thy init_element
|-> (fn marked' => Idents.put (merge_idents (marked, marked')))
|> Context_Position.restore_visible_generic context
|> Context.proof_of
end;
(*** Add and extend registrations ***)
type registration = Locale.registration;
fun amend_registration {mixin = NONE, ...} context = context
| amend_registration {inst = (name, morph), mixin = SOME mixin, export} context =
let
val thy = Context.theory_of context;
val ctxt = Context.proof_of context;
val regs = get_regs context;
val base = instance_of thy name (morph $> export);
val serial' =
(case Idtab.lookup regs (name, base) of
NONE =>
error ("No interpretation of locale " ^ quote (markup_name ctxt name) ^
" with\nparameter instantiation " ^
space_implode " " (map (quote o Syntax.string_of_term_global thy) base) ^
" available")
| SOME {serial = serial', ...} => serial');
in
add_mixin serial' mixin context
end;
(* Note that a registration that would be subsumed by an existing one will not be
generated, and it will not be possible to amend it. *)
fun add_registration {inst = (name, base_morph), mixin, export} context =
let
val thy = Context.theory_of context;
val pos_morph = Morphism.binding_morphism "position" (Binding.set_pos (Position.thread_data ()));
val mix_morph = (case mixin of NONE => base_morph | SOME (mix, _) => base_morph $> mix);
val inst = instance_of thy name mix_morph;
val idents = Idents.get context;
in
if redundant_ident thy idents (name, inst) then context (* FIXME amend mixins? *)
else
(idents, context)
(* add new registrations with inherited mixins *)
|> roundup thy (add_reg thy export) export (name, mix_morph) |> #2
(* add mixin *)
|> amend_registration {inst = (name, mix_morph), mixin = mixin, export = export}
(* activate import hierarchy as far as not already active *)
|> activate_facts (SOME export) (name, mix_morph $> pos_morph)
end;
(*** Dependencies ***)
fun registrations_of context loc =
Idtab.fold_rev (fn ((name, _), {morphisms, ...}) =>
name = loc ? cons (name, morphisms)) (get_regs context) []
(*with inherited mixins*)
|> map (fn (name, (base, export)) =>
(name, base $> (collect_mixins context (name, base $> export)) $> export));
fun add_dependency loc {inst = (name, morph), mixin, export} thy =
let
val dep = make_dep (name, (morph, export));
val add_dep =
apfst (cons dep) #>
apsnd (case mixin of NONE => I | SOME mixin => insert_mixin (#serial dep) mixin);
val thy' = change_locale loc (apsnd add_dep) thy;
val context' = Context.Theory thy';
val (_, regs) =
fold_rev (roundup thy' cons export)
(registrations_of context' loc) (Idents.get context', []);
in
fold_rev (fn inst => Context.theory_map (add_registration {inst = inst, mixin = NONE, export = export}))
regs thy'
end;
(*** Storing results ***)
fun add_facts loc kind facts ctxt =
if null facts then ctxt
else
let
val stored_notes = ((kind, map Attrib.trim_context_fact facts), serial ());
val applied_notes = make_notes kind facts;
- fun apply_notes morph = applied_notes |> fold (fn elem => fn context =>
- let val elem' = Element.transform_ctxt (Morphism.transfer_morphism'' context $> morph) elem
- in Element.init elem' context end);
- val apply_registrations = Context.theory_map (fn context =>
- fold_rev (apply_notes o #2) (registrations_of context loc) context);
+ fun apply_notes morph = applied_notes |> fold (fn elem => fn thy =>
+ let val elem' = Element.transform_ctxt (Morphism.set_context thy morph) elem
+ in Context.theory_map (Element.init elem') thy end);
+ fun apply_registrations thy =
+ fold_rev (apply_notes o #2) (registrations_of (Context.Theory thy) loc) thy;
in
ctxt
|> Attrib.local_notes kind facts |> #2
|> Proof_Context.background_theory
((change_locale loc o apfst o apsnd) (cons stored_notes) #> apply_registrations)
end;
-fun add_declaration loc syntax decl =
- syntax ?
- Proof_Context.background_theory ((change_locale loc o apfst o apfst) (cons (decl, serial ())))
- #> add_facts loc "" [(Binding.empty_atts, Attrib.internal_declaration decl)];
+fun add_declaration loc {syntax, pos} decl =
+ let val decl0 = Morphism.entity_reset_context decl in
+ syntax ?
+ Proof_Context.background_theory ((change_locale loc o apfst o apfst) (cons (decl0, serial ())))
+ #> add_facts loc "" [(Binding.empty_atts, Attrib.internal_declaration pos decl0)]
+ end;
(*** Reasoning about locales ***)
(* Storage for witnesses, intro and unfold rules *)
structure Thms = Generic_Data
(
type T = thm Item_Net.T * thm Item_Net.T * thm Item_Net.T;
val empty = (Thm.item_net, Thm.item_net, Thm.item_net);
fun merge ((witnesses1, intros1, unfolds1), (witnesses2, intros2, unfolds2)) =
(Item_Net.merge (witnesses1, witnesses2),
Item_Net.merge (intros1, intros2),
Item_Net.merge (unfolds1, unfolds2));
);
fun get_thms which ctxt =
map (Thm.transfer' ctxt) (which (Thms.get (Context.Proof ctxt)));
val get_witnesses = get_thms (Item_Net.content o #1);
val get_intros = get_thms (Item_Net.content o #2);
val get_unfolds = get_thms (Item_Net.content o #3);
val witness_add =
Thm.declaration_attribute (fn th =>
Thms.map (fn (x, y, z) => (Item_Net.update (Thm.trim_context th) x, y, z)));
val intro_add =
Thm.declaration_attribute (fn th =>
Thms.map (fn (x, y, z) => (x, Item_Net.update (Thm.trim_context th) y, z)));
val unfold_add =
Thm.declaration_attribute (fn th =>
Thms.map (fn (x, y, z) => (x, y, Item_Net.update (Thm.trim_context th) z)));
(* Tactics *)
fun intro_locales_tac {strict, eager} ctxt =
(if strict then Method.intros_tac else Method.try_intros_tac) ctxt
(get_witnesses ctxt @ get_intros ctxt @ (if eager then get_unfolds ctxt else []));
val _ = Theory.setup
(Method.setup \<^binding>\<open>intro_locales\<close>
(Scan.succeed (METHOD o intro_locales_tac {strict = false, eager = false}))
"back-chain introduction rules of locales without unfolding predicates" #>
Method.setup \<^binding>\<open>unfold_locales\<close>
(Scan.succeed (METHOD o intro_locales_tac {strict = false, eager = true}))
"back-chain all introduction rules of locales");
(*** diagnostic commands and interfaces ***)
fun get_locales thy = map #1 (Name_Space.dest_table (Locales.get thy));
fun pretty_locales thy verbose =
Pretty.block
(Pretty.breaks
(Pretty.str "locales:" ::
map (Pretty.mark_str o #1)
(Name_Space.markup_table verbose (Proof_Context.init_global thy) (Locales.get thy))));
fun pretty_locale thy show_facts name =
let
val locale_ctxt = init name thy;
fun cons_elem (elem as Notes _) = show_facts ? cons elem
| cons_elem (elem as Lazy_Notes _) = show_facts ? cons elem
| cons_elem elem = cons elem;
val elems =
- activate_all name thy cons_elem (K (Morphism.transfer_morphism thy)) (empty_idents, [])
+ activate_all name thy cons_elem (empty_idents, [])
|> snd |> rev
|> tap consolidate_notes
|> map force_notes;
in
Pretty.block (Pretty.keyword1 "locale" :: Pretty.brk 1 :: pretty_name locale_ctxt name ::
maps (fn elem => [Pretty.fbrk, Pretty.chunks (Element.pretty_ctxt locale_ctxt elem)]) elems)
end;
fun pretty_registrations ctxt name =
(case registrations_of (Context.Proof ctxt) name of
[] => Pretty.str "no interpretations"
| regs => Pretty.big_list "interpretations:" (map (pretty_reg ctxt Morphism.identity) (rev regs)));
fun pretty_locale_deps thy =
let
fun make_node name =
{name = name,
parents = map #name (dependencies_of thy name),
body = pretty_locale thy false name};
val names = sort_strings (Name_Space.fold_table (cons o #1) (Locales.get thy) []);
in map make_node names end;
type locale_dependency =
{source: string, target: string, prefix: (string * bool) list, morphism: morphism,
pos: Position.T, serial: serial};
fun dest_dependencies prev_thys thy =
let
fun remove_prev loc prev_thy deps =
(case get_locale prev_thy loc of
NONE => deps
| SOME (Loc {dependencies = prev_deps, ...}) =>
if eq_list eq_dep (prev_deps, deps) then []
else subtract eq_dep prev_deps deps);
fun result loc (dep: dep) =
let val morphism = op $> (#morphisms dep) in
{source = #name dep,
target = loc,
prefix = Morphism.binding_prefix morphism,
morphism = morphism,
pos = #pos dep,
serial = #serial dep}
end;
fun add (loc, Loc {dependencies = deps, ...}) =
fold (cons o result loc) (fold (remove_prev loc) prev_thys deps);
in
Name_Space.fold_table add (Locales.get thy) []
|> sort (int_ord o apply2 #serial)
end;
end;
diff --git a/src/Pure/Isar/method.ML b/src/Pure/Isar/method.ML
--- a/src/Pure/Isar/method.ML
+++ b/src/Pure/Isar/method.ML
@@ -1,829 +1,837 @@
(* Title: Pure/Isar/method.ML
Author: Markus Wenzel, TU Muenchen
Isar proof methods.
*)
signature METHOD =
sig
type method = thm list -> context_tactic
val CONTEXT_METHOD: (thm list -> context_tactic) -> method
val METHOD: (thm list -> tactic) -> method
val fail: method
val succeed: method
val insert_tac: Proof.context -> thm list -> int -> tactic
val insert: thm list -> method
val SIMPLE_METHOD: tactic -> method
val SIMPLE_METHOD': (int -> tactic) -> method
val SIMPLE_METHOD'': ((int -> tactic) -> tactic) -> (int -> tactic) -> method
val goal_cases_tac: string list -> context_tactic
val cheating: bool -> method
val intro: Proof.context -> thm list -> method
val elim: Proof.context -> thm list -> method
val unfold: thm list -> Proof.context -> method
val fold: thm list -> Proof.context -> method
val atomize: bool -> Proof.context -> method
val this: Proof.context -> method
val fact: thm list -> Proof.context -> method
val assm_tac: Proof.context -> int -> tactic
val all_assm_tac: Proof.context -> tactic
val assumption: Proof.context -> method
val rule_trace: bool Config.T
val trace: Proof.context -> thm list -> unit
val rule_tac: Proof.context -> thm list -> thm list -> int -> tactic
val some_rule_tac: Proof.context -> thm list -> thm list -> int -> tactic
val intros_tac: Proof.context -> thm list -> thm list -> tactic
val try_intros_tac: Proof.context -> thm list -> thm list -> tactic
val rule: Proof.context -> thm list -> method
val erule: Proof.context -> int -> thm list -> method
val drule: Proof.context -> int -> thm list -> method
val frule: Proof.context -> int -> thm list -> method
val method_space: Context.generic -> Name_Space.T
val set_tactic: (morphism -> thm list -> tactic) -> Context.generic -> Context.generic
val clean_facts: thm list -> thm list
val set_facts: thm list -> Proof.context -> Proof.context
val get_facts: Proof.context -> thm list
type combinator_info
val no_combinator_info: combinator_info
datatype combinator = Then | Then_All_New | Orelse | Try | Repeat1 | Select_Goals of int
datatype text =
Source of Token.src |
Basic of Proof.context -> method |
Combinator of combinator_info * combinator * text list
val map_source: (Token.src -> Token.src) -> text -> text
val primitive_text: (Proof.context -> thm -> thm) -> text
val succeed_text: text
val standard_text: text
val this_text: text
val done_text: text
val sorry_text: bool -> text
val finish_text: text option * bool -> text
val print_methods: bool -> Proof.context -> unit
val check_name: Proof.context -> xstring * Position.T -> string
val check_src: Proof.context -> Token.src -> Token.src
val check_text: Proof.context -> text -> text
val checked_text: text -> bool
val method_syntax: (Proof.context -> method) context_parser ->
Token.src -> Proof.context -> method
val setup: binding -> (Proof.context -> method) context_parser -> string -> theory -> theory
val local_setup: binding -> (Proof.context -> method) context_parser -> string ->
local_theory -> local_theory
val method_setup: bstring * Position.T -> Input.source -> string -> local_theory -> local_theory
val method: Proof.context -> Token.src -> Proof.context -> method
val method_closure: Proof.context -> Token.src -> Token.src
val closure: bool Config.T
val method_cmd: Proof.context -> Token.src -> Proof.context -> method
val detect_closure_state: thm -> bool
val STATIC: (unit -> unit) -> context_tactic
val RUNTIME: context_tactic -> context_tactic
val sleep: Time.time -> context_tactic
val evaluate: text -> Proof.context -> method
val evaluate_runtime: text -> Proof.context -> method
type modifier = {init: Proof.context -> Proof.context, attribute: attribute, pos: Position.T}
val modifier: attribute -> Position.T -> modifier
val old_section_parser: bool Config.T
val sections: modifier parser list -> unit context_parser
type text_range = text * Position.range
val text: text_range option -> text option
val position: text_range option -> Position.T
val reports_of: text_range -> Position.report list
val report: text_range -> unit
val parser: int -> text_range parser
val parse: text_range parser
val parse_by: ((text_range * text_range option) * Position.report list) parser
val read: Proof.context -> Token.src -> text
val read_closure: Proof.context -> Token.src -> text * Token.src
val read_closure_input: Proof.context -> Input.source -> text * Token.src
val text_closure: text context_parser
end;
structure Method: METHOD =
struct
(** proof methods **)
(* type method *)
type method = thm list -> context_tactic;
fun CONTEXT_METHOD tac : method =
fn facts => CONTEXT_TACTIC (ALLGOALS Goal.conjunction_tac) #> Seq.maps_results (tac facts);
fun METHOD tac : method =
fn facts => CONTEXT_TACTIC (ALLGOALS Goal.conjunction_tac THEN tac facts);
val fail = METHOD (K no_tac);
val succeed = METHOD (K all_tac);
(* insert facts *)
fun insert_tac _ [] _ = all_tac
| insert_tac ctxt facts i =
EVERY (map (fn r => resolve_tac ctxt [Thm.forall_intr_vars r COMP_INCR revcut_rl] i) facts);
fun insert thms =
CONTEXT_METHOD (fn _ => fn (ctxt, st) =>
st |> ALLGOALS (insert_tac ctxt thms) |> TACTIC_CONTEXT ctxt);
fun SIMPLE_METHOD tac =
CONTEXT_METHOD (fn facts => fn (ctxt, st) =>
st |> (ALLGOALS (insert_tac ctxt facts) THEN tac) |> TACTIC_CONTEXT ctxt);
fun SIMPLE_METHOD'' quant tac =
CONTEXT_METHOD (fn facts => fn (ctxt, st) =>
st |> quant (insert_tac ctxt facts THEN' tac) |> TACTIC_CONTEXT ctxt);
val SIMPLE_METHOD' = SIMPLE_METHOD'' HEADGOAL;
(* goals as cases *)
fun goal_cases_tac case_names : context_tactic =
fn (ctxt, st) =>
let
val cases =
(if null case_names then map string_of_int (1 upto Thm.nprems_of st) else case_names)
|> map (rpair [] o rpair [])
|> Rule_Cases.make_common ctxt (Thm.prop_of (Rule_Cases.internalize_params st));
in CONTEXT_CASES cases all_tac (ctxt, st) end;
(* cheating *)
fun cheating int = CONTEXT_METHOD (fn _ => fn (ctxt, st) =>
if int orelse Config.get ctxt quick_and_dirty then
TACTIC_CONTEXT ctxt (ALLGOALS (Skip_Proof.cheat_tac ctxt) st)
else error "Cheating requires quick_and_dirty mode!");
(* unfold intro/elim rules *)
fun intro ctxt ths = SIMPLE_METHOD' (CHANGED_PROP o REPEAT_ALL_NEW (match_tac ctxt ths));
fun elim ctxt ths = SIMPLE_METHOD' (CHANGED_PROP o REPEAT_ALL_NEW (ematch_tac ctxt ths));
(* unfold/fold definitions *)
fun unfold_meth ths ctxt = SIMPLE_METHOD (CHANGED_PROP (Local_Defs.unfold_tac ctxt ths));
fun fold_meth ths ctxt = SIMPLE_METHOD (CHANGED_PROP (Local_Defs.fold_tac ctxt ths));
(* atomize rule statements *)
fun atomize false ctxt =
SIMPLE_METHOD' (CHANGED_PROP o Object_Logic.atomize_prems_tac ctxt)
| atomize true ctxt =
Context_Tactic.CONTEXT_TACTIC o
K (HEADGOAL (CHANGED_PROP o Object_Logic.full_atomize_tac ctxt));
(* this -- resolve facts directly *)
fun this ctxt = METHOD (EVERY o map (HEADGOAL o resolve_tac ctxt o single));
(* fact -- composition by facts from context *)
fun fact [] ctxt = SIMPLE_METHOD' (Proof_Context.some_fact_tac ctxt)
| fact rules ctxt = SIMPLE_METHOD' (Proof_Context.fact_tac ctxt rules);
(* assumption *)
local
fun cond_rtac ctxt cond rule = SUBGOAL (fn (prop, i) =>
if cond (Logic.strip_assums_concl prop)
then resolve_tac ctxt [rule] i else no_tac);
in
fun assm_tac ctxt =
assume_tac ctxt APPEND'
Goal.assume_rule_tac ctxt APPEND'
cond_rtac ctxt (can Logic.dest_equals) Drule.reflexive_thm APPEND'
cond_rtac ctxt (can Logic.dest_term) Drule.termI;
fun all_assm_tac ctxt =
let
fun tac i st =
if i > Thm.nprems_of st then all_tac st
else ((assm_tac ctxt i THEN tac i) ORELSE tac (i + 1)) st;
in tac 1 end;
fun assumption ctxt = METHOD (HEADGOAL o
(fn [] => assm_tac ctxt
| [fact] => solve_tac ctxt [fact]
| _ => K no_tac));
fun finish immed ctxt =
METHOD (K ((if immed then all_assm_tac ctxt else all_tac) THEN flexflex_tac ctxt));
end;
(* rule etc. -- single-step refinements *)
val rule_trace = Attrib.setup_config_bool \<^binding>\<open>rule_trace\<close> (fn _ => false);
fun trace ctxt rules =
if Config.get ctxt rule_trace andalso not (null rules) then
Pretty.big_list "rules:" (map (Thm.pretty_thm_item ctxt) rules)
|> Pretty.string_of |> tracing
else ();
local
fun gen_rule_tac tac ctxt rules facts =
(fn i => fn st =>
if null facts then tac ctxt rules i st
else
Seq.maps (fn rule => (tac ctxt o single) rule i st)
(Drule.multi_resolves (SOME ctxt) facts rules))
THEN_ALL_NEW Goal.norm_hhf_tac ctxt;
fun gen_arule_tac tac ctxt j rules facts =
EVERY' (gen_rule_tac tac ctxt rules facts :: replicate j (assume_tac ctxt));
fun gen_some_rule_tac tac ctxt arg_rules facts = SUBGOAL (fn (goal, i) =>
let
val rules =
if not (null arg_rules) then arg_rules
else flat (Context_Rules.find_rules ctxt false facts goal);
in trace ctxt rules; tac ctxt rules facts i end);
fun meth tac x y = METHOD (HEADGOAL o tac x y);
fun meth' tac x y z = METHOD (HEADGOAL o tac x y z);
in
val rule_tac = gen_rule_tac resolve_tac;
val rule = meth rule_tac;
val some_rule_tac = gen_some_rule_tac rule_tac;
val some_rule = meth some_rule_tac;
val erule = meth' (gen_arule_tac eresolve_tac);
val drule = meth' (gen_arule_tac dresolve_tac);
val frule = meth' (gen_arule_tac forward_tac);
end;
(* intros_tac -- pervasive search spanned by intro rules *)
fun gen_intros_tac goals ctxt intros facts =
goals (insert_tac ctxt facts THEN'
REPEAT_ALL_NEW (resolve_tac ctxt intros))
THEN Tactic.distinct_subgoals_tac;
val intros_tac = gen_intros_tac ALLGOALS;
val try_intros_tac = gen_intros_tac TRYALL;
(** method syntax **)
(* context data *)
structure Data = Generic_Data
(
type T =
{methods: ((Token.src -> Proof.context -> method) * string) Name_Space.table,
ml_tactic: (morphism -> thm list -> tactic) option,
facts: thm list option};
val empty : T =
{methods = Name_Space.empty_table Markup.methodN, ml_tactic = NONE, facts = NONE};
fun merge
({methods = methods1, ml_tactic = ml_tactic1, facts = facts1},
{methods = methods2, ml_tactic = ml_tactic2, facts = facts2}) : T =
{methods = Name_Space.merge_tables (methods1, methods2),
ml_tactic = merge_options (ml_tactic1, ml_tactic2),
facts = merge_options (facts1, facts2)};
);
fun map_data f = Data.map (fn {methods, ml_tactic, facts} =>
let val (methods', ml_tactic', facts') = f (methods, ml_tactic, facts)
in {methods = methods', ml_tactic = ml_tactic', facts = facts'} end);
val get_methods = #methods o Data.get;
val ops_methods =
{get_data = get_methods,
put_data = fn methods => map_data (fn (_, ml_tactic, facts) => (methods, ml_tactic, facts))};
val method_space = Name_Space.space_of_table o get_methods;
(* ML tactic *)
fun set_tactic ml_tactic = map_data (fn (methods, _, facts) => (methods, SOME ml_tactic, facts));
fun the_tactic context =
#ml_tactic (Data.get context)
|> \<^if_none>\<open>raise Fail "Undefined ML tactic"\<close>;
val parse_tactic =
Scan.state :|-- (fn context =>
Scan.lift (Args.embedded_declaration (fn source =>
let
val tac =
context
|> ML_Context.expression (Input.pos_of source)
(ML_Lex.read "Context.>> (Method.set_tactic (fn morphism: Morphism.morphism => fn facts: thm list => (" @
ML_Lex.read_source source @ ML_Lex.read ")))")
|> the_tactic;
in
- fn phi => set_tactic (fn _ => Context.setmp_generic_context (SOME context) (tac phi))
- end)) >> (fn decl => Morphism.form (the_tactic (Morphism.form decl context))));
+ Morphism.entity (fn phi =>
+ set_tactic (fn _ => Context.setmp_generic_context (SOME context) (tac phi)))
+ end)) >> (fn decl => Morphism.form_entity (the_tactic (Morphism.form decl context))));
(* method facts *)
val clean_facts = filter_out Thm.is_dummy;
fun set_facts facts =
(Context.proof_map o map_data)
(fn (methods, ml_tactic, _) => (methods, ml_tactic, SOME (clean_facts facts)));
val get_facts_generic = these o #facts o Data.get;
val get_facts = get_facts_generic o Context.Proof;
val _ =
Theory.setup
(Global_Theory.add_thms_dynamic (Binding.make ("method_facts", \<^here>), get_facts_generic));
(* method text *)
datatype combinator_info = Combinator_Info of {keywords: Position.T list};
fun combinator_info keywords = Combinator_Info {keywords = keywords};
val no_combinator_info = combinator_info [];
datatype combinator = Then | Then_All_New | Orelse | Try | Repeat1 | Select_Goals of int;
datatype text =
Source of Token.src |
Basic of Proof.context -> method |
Combinator of combinator_info * combinator * text list;
fun map_source f (Source src) = Source (f src)
| map_source _ (Basic meth) = Basic meth
| map_source f (Combinator (info, comb, txts)) = Combinator (info, comb, map (map_source f) txts);
fun primitive_text r = Basic (SIMPLE_METHOD o PRIMITIVE o r);
val succeed_text = Basic (K succeed);
val standard_text = Source (Token.make_src ("standard", Position.none) []);
val this_text = Basic this;
val done_text = Basic (K (SIMPLE_METHOD all_tac));
fun sorry_text int = Basic (fn _ => cheating int);
fun finish_text (NONE, immed) = Basic (finish immed)
| finish_text (SOME txt, immed) =
Combinator (no_combinator_info, Then, [txt, Basic (finish immed)]);
(* method definitions *)
fun print_methods verbose ctxt =
let
val meths = get_methods (Context.Proof ctxt);
fun prt_meth (name, (_, "")) = Pretty.mark_str name
| prt_meth (name, (_, comment)) =
Pretty.block
(Pretty.mark_str name :: Pretty.str ":" :: Pretty.brk 2 :: Pretty.text comment);
in
[Pretty.big_list "methods:" (map prt_meth (Name_Space.markup_table verbose ctxt meths))]
|> Pretty.writeln_chunks
end;
(* define *)
fun define_global binding meth comment =
Entity.define_global ops_methods binding (meth, comment);
fun define binding meth comment =
Entity.define ops_methods binding (meth, comment);
(* check *)
fun check_name ctxt =
let val context = Context.Proof ctxt
in #1 o Name_Space.check context (get_methods context) end;
fun check_src ctxt =
#1 o Token.check_src ctxt (get_methods o Context.Proof);
fun check_text ctxt (Source src) = Source (check_src ctxt src)
| check_text _ (Basic m) = Basic m
| check_text ctxt (Combinator (x, y, body)) = Combinator (x, y, map (check_text ctxt) body);
fun checked_text (Source src) = Token.checked_src src
| checked_text (Basic _) = true
| checked_text (Combinator (_, _, body)) = forall checked_text body;
val _ = Theory.setup
(ML_Antiquotation.inline_embedded \<^binding>\<open>method\<close>
(Args.context -- Scan.lift Parse.embedded_position >>
(ML_Syntax.print_string o uncurry check_name)));
(* method setup *)
fun method_syntax scan src ctxt : method =
let val (m, ctxt') = Token.syntax scan src ctxt in m ctxt' end;
fun setup binding scan comment = define_global binding (method_syntax scan) comment #> snd;
fun local_setup binding scan comment = define binding (method_syntax scan) comment #> snd;
fun method_setup binding source comment =
ML_Context.expression (Input.pos_of source)
(ML_Lex.read
("Theory.local_setup (Method.local_setup (" ^ ML_Syntax.make_binding binding ^ ") (") @
ML_Lex.read_source source @ ML_Lex.read (")" ^ ML_Syntax.print_string comment ^ ")"))
|> Context.proof_map;
(* prepare methods *)
fun method ctxt =
let val table = get_methods (Context.Proof ctxt)
in fn src => #1 (Name_Space.get table (#1 (Token.name_of_src src))) src end;
fun method_closure ctxt src =
let
val src' = map Token.init_assignable src;
val ctxt' = Context_Position.not_really ctxt;
val _ = Seq.pull (method ctxt' src' ctxt' [] (ctxt', Goal.protect 0 Drule.dummy_thm));
in map Token.closure src' end;
val closure = Config.declare_bool ("Method.closure", \<^here>) (K true);
fun method_cmd ctxt =
check_src ctxt #>
Config.get ctxt closure ? method_closure ctxt #>
method ctxt;
(* static vs. runtime state *)
fun detect_closure_state st =
(case try Logic.dest_term (Thm.concl_of (perhaps (try Goal.conclude) st)) of
NONE => false
| SOME t => Term.is_dummy_pattern t);
fun STATIC test : context_tactic =
fn (ctxt, st) =>
if detect_closure_state st then (test (); Seq.single (Seq.Result (ctxt, st))) else Seq.empty;
fun RUNTIME (tac: context_tactic) (ctxt, st) =
if detect_closure_state st then Seq.empty else tac (ctxt, st);
fun sleep t = RUNTIME (fn ctxt_st => (OS.Process.sleep t; Seq.single (Seq.Result ctxt_st)));
(* evaluate method text *)
local
val op THEN = Seq.THEN;
fun BYPASS_CONTEXT (tac: tactic) =
fn result =>
(case result of
Seq.Error _ => Seq.single result
| Seq.Result (ctxt, st) => tac st |> TACTIC_CONTEXT ctxt);
val preparation = BYPASS_CONTEXT (ALLGOALS Goal.conjunction_tac);
fun RESTRICT_GOAL i n method =
BYPASS_CONTEXT (PRIMITIVE (Goal.restrict i n)) THEN
method THEN
BYPASS_CONTEXT (PRIMITIVE (Goal.unrestrict i));
fun SELECT_GOAL method i = RESTRICT_GOAL i 1 method;
fun (method1 THEN_ALL_NEW method2) i (result : context_state Seq.result) =
(case result of
Seq.Error _ => Seq.single result
| Seq.Result (_, st) =>
result |> method1 i
|> Seq.maps (fn result' =>
(case result' of
Seq.Error _ => Seq.single result'
| Seq.Result (_, st') =>
result' |> Seq.INTERVAL method2 i (i + Thm.nprems_of st' - Thm.nprems_of st))))
fun COMBINATOR1 comb [meth] = comb meth
| COMBINATOR1 _ _ = raise Fail "Method combinator requires exactly one argument";
fun combinator Then = Seq.EVERY
| combinator Then_All_New =
(fn [] => Seq.single
| methods =>
preparation THEN (foldl1 (op THEN_ALL_NEW) (map SELECT_GOAL methods) 1))
| combinator Orelse = Seq.FIRST
| combinator Try = COMBINATOR1 Seq.TRY
| combinator Repeat1 = COMBINATOR1 Seq.REPEAT1
| combinator (Select_Goals n) =
COMBINATOR1 (fn method => preparation THEN RESTRICT_GOAL 1 n method);
in
fun evaluate text ctxt0 facts =
let
val ctxt = set_facts facts ctxt0;
fun eval0 m = Seq.single #> Seq.maps_results (m facts);
fun eval (Basic m) = eval0 (m ctxt)
| eval (Source src) = eval0 (method_cmd ctxt src ctxt)
| eval (Combinator (_, c, txts)) = combinator c (map eval txts);
in eval text o Seq.Result end;
end;
fun evaluate_runtime text ctxt =
let
val text' =
text |> (map_source o map o Token.map_facts)
(fn SOME name =>
(case Proof_Context.lookup_fact ctxt name of
SOME {dynamic = true, thms} => K thms
| _ => I)
| NONE => I);
val ctxt' = Config.put closure false ctxt;
in fn facts => RUNTIME (fn st => evaluate text' ctxt' facts st) end;
(** concrete syntax **)
(* type modifier *)
type modifier =
{init: Proof.context -> Proof.context, attribute: attribute, pos: Position.T};
fun modifier attribute pos : modifier = {init = I, attribute = attribute, pos = pos};
(* sections *)
val old_section_parser = Config.declare_bool ("Method.old_section_parser", \<^here>) (K false);
local
fun thms ss =
Scan.repeats (Scan.unless (Scan.lift (Scan.first ss)) Attrib.multi_thm);
fun app {init, attribute, pos = _} ths context =
fold_map (Thm.apply_attribute attribute) ths (Context.map_proof init context);
fun section ss = Scan.depend (fn context => (Scan.first ss -- Scan.pass context (thms ss)) :|--
(fn (m, ths) => Scan.succeed (swap (app m ths context))));
in
fun old_sections ss = Scan.repeat (section ss) >> K ();
end;
local
fun sect (modifier : modifier parser) = Scan.depend (fn context =>
Scan.ahead Parse.not_eof -- Scan.trace modifier -- Scan.repeat (Scan.unless modifier Parse.thm)
>> (fn ((tok0, ({init, attribute, pos}, modifier_toks)), xthms) =>
let
val decl =
(case Token.get_value tok0 of
SOME (Token.Declaration decl) => decl
| _ =>
let
val ctxt = Context.proof_of context;
val prep_att = Attrib.check_src ctxt #> map (Token.assign NONE);
val thms =
map (fn (a, bs) => (Proof_Context.get_fact ctxt a, map prep_att bs)) xthms;
val facts =
Attrib.partial_evaluation ctxt [((Binding.name "dummy", []), thms)]
- |> map (fn (_, bs) => ((Binding.empty, [Attrib.internal (K attribute)]), bs));
+ |> map (fn (_, bs) =>
+ ((Binding.empty, [Attrib.internal pos (K attribute)]),
+ Attrib.trim_context_thms bs));
- fun decl phi =
- Context.mapping I init #>
- Attrib.generic_notes "" (Attrib.transform_facts phi facts) #> snd;
+ val decl =
+ Morphism.entity (fn phi => fn context =>
+ let val psi = Morphism.set_context'' context phi in
+ context
+ |> Context.mapping I init
+ |> Attrib.generic_notes "" (Attrib.transform_facts psi facts)
+ |> snd
+ end);
val modifier_report =
(#1 (Token.range_of modifier_toks),
Position.entity_markup Markup.method_modifierN ("", pos));
val _ =
Context_Position.reports ctxt (modifier_report :: Token.reports_of_value tok0);
val _ = Token.assign (SOME (Token.Declaration decl)) tok0;
in decl end);
in (Morphism.form decl context, decl) end));
in
fun sections ss =
Args.context :|-- (fn ctxt =>
if Config.get ctxt old_section_parser then old_sections ss
else Scan.repeat (sect (Scan.first ss)) >> K ());
end;
(* extra rule methods *)
fun xrule_meth meth =
Scan.lift (Scan.optional (Args.parens Parse.nat) 0) -- Attrib.thms >>
(fn (n, ths) => fn ctxt => meth ctxt n ths);
(* text range *)
type text_range = text * Position.range;
fun text NONE = NONE
| text (SOME (txt, _)) = SOME txt;
fun position NONE = Position.none
| position (SOME (_, (pos, _))) = pos;
(* reports *)
local
fun keyword_positions (Source _) = []
| keyword_positions (Basic _) = []
| keyword_positions (Combinator (Combinator_Info {keywords}, _, texts)) =
keywords @ maps keyword_positions texts;
in
fun reports_of ((text, (pos, _)): text_range) =
(pos, Markup.language_method) ::
maps (fn p => map (pair p) (Markup.keyword3 :: Completion.suppress_abbrevs ""))
(keyword_positions text);
fun report text_range =
if Context_Position.reports_enabled0 ()
then Position.reports (reports_of text_range) else ();
end;
(* parser *)
local
fun is_symid_meth s =
s <> "|" andalso s <> "?" andalso s <> "+" andalso Token.ident_or_symbolic s;
in
fun parser pri =
let
val meth_name = Parse.token Parse.name;
fun meth5 x =
(meth_name >> (Source o single) ||
Scan.ahead Parse.cartouche |-- Parse.not_eof >> (fn tok =>
Source (Token.make_src ("cartouche", Position.none) [tok])) ||
Parse.$$$ "(" |-- Parse.!!! (meth0 --| Parse.$$$ ")")) x
and meth4 x =
(meth5 -- Parse.position (Parse.$$$ "?")
>> (fn (m, (_, pos)) => Combinator (combinator_info [pos], Try, [m])) ||
meth5 -- Parse.position (Parse.$$$ "+")
>> (fn (m, (_, pos)) => Combinator (combinator_info [pos], Repeat1, [m])) ||
meth5 -- (Parse.position (Parse.$$$ "[") --
Scan.optional Parse.nat 1 -- Parse.position (Parse.$$$ "]"))
>> (fn (m, (((_, pos1), n), (_, pos2))) =>
Combinator (combinator_info [pos1, pos2], Select_Goals n, [m])) ||
meth5) x
and meth3 x =
(meth_name ::: Parse.args1 is_symid_meth >> Source ||
meth4) x
and meth2 x =
(Parse.enum1_positions "," meth3
>> (fn ([m], _) => m | (ms, ps) => Combinator (combinator_info ps, Then, ms))) x
and meth1 x =
(Parse.enum1_positions ";" meth2
>> (fn ([m], _) => m | (ms, ps) => Combinator (combinator_info ps, Then_All_New, ms))) x
and meth0 x =
(Parse.enum1_positions "|" meth1
>> (fn ([m], _) => m | (ms, ps) => Combinator (combinator_info ps, Orelse, ms))) x;
val meth =
nth [meth0, meth1, meth2, meth3, meth4, meth5] pri
handle General.Subscript => raise Fail ("Bad method parser priority " ^ string_of_int pri);
in Scan.trace meth >> (fn (m, toks) => (m, Token.range_of toks)) end;
val parse = parser 4;
end;
val parse_by =
Parse.$$$ "by" |-- parse -- Scan.option parse
>> (fn (m1, m2) => ((m1, m2), maps reports_of (m1 :: the_list m2)));
(* read method text *)
fun read ctxt src =
(case Scan.read Token.stopper (Parse.!!! (parser 0 --| Scan.ahead Parse.eof)) src of
SOME (text, range) =>
if checked_text text then text
else (report (text, range); check_text ctxt text)
| NONE => error ("Failed to parse method" ^ Position.here (#1 (Token.range_of src))));
fun read_closure ctxt src0 =
let
val src1 = map Token.init_assignable src0;
val text = read ctxt src1 |> map_source (method_closure ctxt);
val src2 = map Token.closure src1;
in (text, src2) end;
fun read_closure_input ctxt =
let val keywords = Keyword.no_major_keywords (Thy_Header.get_keywords' ctxt)
in Parse.read_embedded ctxt keywords (Scan.many Token.not_eof) #> read_closure ctxt end;
val text_closure =
Args.context -- Scan.lift (Parse.token Parse.embedded) >> (fn (ctxt, tok) =>
(case Token.get_value tok of
SOME (Token.Source src) => read ctxt src
| _ =>
let
val (text, src) = read_closure_input ctxt (Token.input_of tok);
val _ = Token.assign (SOME (Token.Source src)) tok;
in text end));
(* theory setup *)
val _ = Theory.setup
(setup \<^binding>\<open>fail\<close> (Scan.succeed (K fail)) "force failure" #>
setup \<^binding>\<open>succeed\<close> (Scan.succeed (K succeed)) "succeed" #>
setup \<^binding>\<open>sleep\<close> (Scan.lift Parse.real >> (fn s => fn _ => fn _ => sleep (seconds s)))
"succeed after delay (in seconds)" #>
setup \<^binding>\<open>-\<close> (Scan.succeed (K (SIMPLE_METHOD all_tac)))
"insert current facts, nothing else" #>
setup \<^binding>\<open>goal_cases\<close> (Scan.lift (Scan.repeat Args.name_token) >> (fn names => fn _ =>
CONTEXT_METHOD (fn _ => fn (ctxt, st) =>
(case drop (Thm.nprems_of st) names of
[] => NONE
| bad =>
if detect_closure_state st then NONE
else
SOME (fn () => ("Excessive case name(s): " ^ commas_quote (map Token.content_of bad) ^
Position.here (#1 (Token.range_of bad)))))
|> (fn SOME msg => Seq.single (Seq.Error msg)
| NONE => goal_cases_tac (map Token.content_of names) (ctxt, st)))))
"bind cases for goals" #>
setup \<^binding>\<open>subproofs\<close> (text_closure >> (Context_Tactic.SUBPROOFS ooo evaluate_runtime))
"apply proof method to subproofs with closed derivation" #>
setup \<^binding>\<open>insert\<close> (Attrib.thms >> (K o insert))
"insert theorems, ignoring facts" #>
setup \<^binding>\<open>intro\<close> (Attrib.thms >> (fn ths => fn ctxt => intro ctxt ths))
"repeatedly apply introduction rules" #>
setup \<^binding>\<open>elim\<close> (Attrib.thms >> (fn ths => fn ctxt => elim ctxt ths))
"repeatedly apply elimination rules" #>
setup \<^binding>\<open>unfold\<close> (Attrib.thms >> unfold_meth) "unfold definitions" #>
setup \<^binding>\<open>fold\<close> (Attrib.thms >> fold_meth) "fold definitions" #>
setup \<^binding>\<open>atomize\<close> (Scan.lift (Args.mode "full") >> atomize)
"present local premises as object-level statements" #>
setup \<^binding>\<open>rule\<close> (Attrib.thms >> (fn ths => fn ctxt => some_rule ctxt ths))
"apply some intro/elim rule" #>
setup \<^binding>\<open>erule\<close> (xrule_meth erule) "apply rule in elimination manner (improper)" #>
setup \<^binding>\<open>drule\<close> (xrule_meth drule) "apply rule in destruct manner (improper)" #>
setup \<^binding>\<open>frule\<close> (xrule_meth frule) "apply rule in forward manner (improper)" #>
setup \<^binding>\<open>this\<close> (Scan.succeed this) "apply current facts as rules" #>
setup \<^binding>\<open>fact\<close> (Attrib.thms >> fact) "composition by facts from context" #>
setup \<^binding>\<open>assumption\<close> (Scan.succeed assumption)
"proof by assumption, preferring facts" #>
setup \<^binding>\<open>rename_tac\<close> (Args.goal_spec -- Scan.lift (Scan.repeat1 Args.name) >>
(fn (quant, xs) => K (SIMPLE_METHOD'' quant (rename_tac xs))))
"rename parameters of goal" #>
setup \<^binding>\<open>rotate_tac\<close> (Args.goal_spec -- Scan.lift (Scan.optional Parse.int 1) >>
(fn (quant, i) => K (SIMPLE_METHOD'' quant (rotate_tac i))))
"rotate assumptions of goal" #>
setup \<^binding>\<open>tactic\<close> (parse_tactic >> (K o METHOD))
"ML tactic as proof method" #>
setup \<^binding>\<open>raw_tactic\<close> (parse_tactic >> (fn tac => fn _ => Context_Tactic.CONTEXT_TACTIC o tac))
"ML tactic as raw proof method" #>
setup \<^binding>\<open>use\<close>
(Attrib.thms -- (Scan.lift (Parse.$$$ "in") |-- text_closure) >>
(fn (thms, text) => fn ctxt => fn _ => evaluate_runtime text ctxt thms))
"indicate method facts and context for method expression");
(*final declarations of this structure!*)
val unfold = unfold_meth;
val fold = fold_meth;
end;
val CONTEXT_METHOD = Method.CONTEXT_METHOD;
val METHOD = Method.METHOD;
val SIMPLE_METHOD = Method.SIMPLE_METHOD;
val SIMPLE_METHOD' = Method.SIMPLE_METHOD';
val SIMPLE_METHOD'' = Method.SIMPLE_METHOD'';
diff --git a/src/Pure/Isar/proof.ML b/src/Pure/Isar/proof.ML
--- a/src/Pure/Isar/proof.ML
+++ b/src/Pure/Isar/proof.ML
@@ -1,1364 +1,1364 @@
(* Title: Pure/Isar/proof.ML
Author: Markus Wenzel, TU Muenchen
The Isar/VM proof language interpreter: maintains a structured flow of
context elements, goals, refinements, and facts.
*)
signature PROOF =
sig
type context = Proof.context
type method = Method.method
type state
val init: context -> state
val level: state -> int
val assert_bottom: bool -> state -> state
val context_of: state -> context
val theory_of: state -> theory
val map_context: (context -> context) -> state -> state
val map_context_result : (context -> 'a * context) -> state -> 'a * state
val map_contexts: (context -> context) -> state -> state
val propagate_ml_env: state -> state
val report_improper: state -> unit
val the_facts: state -> thm list
val the_fact: state -> thm
val set_facts: thm list -> state -> state
val reset_facts: state -> state
val improper_reset_facts: state -> state
val assert_forward: state -> state
val assert_chain: state -> state
val assert_forward_or_chain: state -> state
val assert_backward: state -> state
val assert_no_chain: state -> state
val enter_forward: state -> state
val enter_chain: state -> state
val enter_backward: state -> state
val using_facts: thm list -> state -> state
val pretty_state: state -> Pretty.T list
val refine: Method.text -> state -> state Seq.result Seq.seq
val refine_end: Method.text -> state -> state Seq.result Seq.seq
val refine_singleton: Method.text -> state -> state
val refine_insert: thm list -> state -> state
val refine_primitive: (Proof.context -> thm -> thm) -> state -> state
val goal_finished: state -> bool
val raw_goal: state -> {context: context, facts: thm list, goal: thm}
val goal: state -> {context: context, facts: thm list, goal: thm}
val simple_goal: state -> {context: context, goal: thm}
val let_bind: (term list * term) list -> state -> state
val let_bind_cmd: (string list * string) list -> state -> state
val write: Syntax.mode -> (term * mixfix) list -> state -> state
val write_cmd: Syntax.mode -> (string * mixfix) list -> state -> state
val fix: (binding * typ option * mixfix) list -> state -> state
val fix_cmd: (binding * string option * mixfix) list -> state -> state
val assm: Assumption.export -> (binding * typ option * mixfix) list ->
(term * term list) list list -> (Thm.binding * (term * term list) list) list ->
state -> state
val assm_cmd: Assumption.export -> (binding * string option * mixfix) list ->
(string * string list) list list -> (Attrib.binding * (string * string list) list) list ->
state -> state
val assume: (binding * typ option * mixfix) list ->
(term * term list) list list -> (Thm.binding * (term * term list) list) list ->
state -> state
val assume_cmd: (binding * string option * mixfix) list ->
(string * string list) list list -> (Attrib.binding * (string * string list) list) list ->
state -> state
val presume: (binding * typ option * mixfix) list ->
(term * term list) list list -> (Thm.binding * (term * term list) list) list ->
state -> state
val presume_cmd: (binding * string option * mixfix) list ->
(string * string list) list list -> (Attrib.binding * (string * string list) list) list ->
state -> state
val chain: state -> state
val chain_facts: thm list -> state -> state
val note_thmss: (Thm.binding * (thm list * attribute list) list) list -> state -> state
val note_thmss_cmd: (Attrib.binding * (Facts.ref * Token.src list) list) list -> state -> state
val from_thmss: ((thm list * attribute list) list) list -> state -> state
val from_thmss_cmd: ((Facts.ref * Token.src list) list) list -> state -> state
val with_thmss: ((thm list * attribute list) list) list -> state -> state
val with_thmss_cmd: ((Facts.ref * Token.src list) list) list -> state -> state
val supply: (Thm.binding * (thm list * attribute list) list) list -> state -> state
val supply_cmd: (Attrib.binding * (Facts.ref * Token.src list) list) list -> state -> state
val using: ((thm list * attribute list) list) list -> state -> state
val using_cmd: ((Facts.ref * Token.src list) list) list -> state -> state
val unfolding: ((thm list * attribute list) list) list -> state -> state
val unfolding_cmd: ((Facts.ref * Token.src list) list) list -> state -> state
val case_: Thm.binding * ((string * Position.T) * binding option list) -> state -> state
val case_cmd: Attrib.binding * ((string * Position.T) * binding option list) -> state -> state
val define: (binding * typ option * mixfix) list ->
(binding * typ option * mixfix) list ->
(Thm.binding * (term * term list) list) list -> state -> state
val define_cmd: (binding * string option * mixfix) list ->
(binding * string option * mixfix) list ->
(Attrib.binding * (string * string list) list) list -> state -> state
val begin_block: state -> state
val next_block: state -> state
val end_block: state -> state
val begin_notepad: context -> state
val end_notepad: state -> context
val is_notepad: state -> bool
val reset_notepad: state -> state
val proof: Method.text_range option -> state -> state Seq.result Seq.seq
val defer: int -> state -> state
val prefer: int -> state -> state
val apply: Method.text_range -> state -> state Seq.result Seq.seq
val apply_end: Method.text_range -> state -> state Seq.result Seq.seq
val local_qed: Method.text_range option * bool -> state -> state
val theorem: Method.text option -> (thm list list -> context -> context) ->
(term * term list) list list -> context -> state
val theorem_cmd: Method.text option -> (thm list list -> context -> context) ->
(string * string list) list list -> context -> state
val global_qed: Method.text_range option * bool -> state -> context
val schematic_goal: state -> bool
val is_relevant: state -> bool
val local_terminal_proof: Method.text_range * Method.text_range option -> state -> state
val local_default_proof: state -> state
val local_immediate_proof: state -> state
val local_skip_proof: bool -> state -> state
val local_done_proof: state -> state
val global_terminal_proof: Method.text_range * Method.text_range option -> state -> context
val global_default_proof: state -> context
val global_immediate_proof: state -> context
val global_skip_proof: bool -> state -> context
val global_done_proof: state -> context
val internal_goal: (context -> (string * string) * (string * thm list) list -> unit) ->
Proof_Context.mode -> bool -> string -> Method.text option ->
(context * thm list list -> state -> state) ->
(binding * typ option * mixfix) list ->
(Thm.binding * (term * term list) list) list ->
(Thm.binding * (term * term list) list) list -> state -> thm list * state
val have: bool -> Method.text option -> (context * thm list list -> state -> state) ->
(binding * typ option * mixfix) list ->
(Thm.binding * (term * term list) list) list ->
(Thm.binding * (term * term list) list) list -> bool -> state -> thm list * state
val have_cmd: bool -> Method.text option -> (context * thm list list -> state -> state) ->
(binding * string option * mixfix) list ->
(Attrib.binding * (string * string list) list) list ->
(Attrib.binding * (string * string list) list) list -> bool -> state -> thm list * state
val show: bool -> Method.text option -> (context * thm list list -> state -> state) ->
(binding * typ option * mixfix) list ->
(Thm.binding * (term * term list) list) list ->
(Thm.binding * (term * term list) list) list -> bool -> state -> thm list * state
val show_cmd: bool -> Method.text option -> (context * thm list list -> state -> state) ->
(binding * string option * mixfix) list ->
(Attrib.binding * (string * string list) list) list ->
(Attrib.binding * (string * string list) list) list -> bool -> state -> thm list * state
val future_proof: (state -> ('a * context) future) -> state -> 'a future * state
val local_future_terminal_proof: Method.text_range * Method.text_range option -> state -> state
val global_future_terminal_proof: Method.text_range * Method.text_range option -> state -> context
end;
structure Proof: PROOF =
struct
type context = Proof.context;
type method = Method.method;
(** proof state **)
(* datatype state *)
datatype mode = Forward | Chain | Backward;
datatype state =
State of node Stack.T
and node =
Node of
{context: context,
facts: (thm list * bool) option,
mode: mode,
goal: goal option}
and goal =
Goal of
{statement: (string * Position.T) * term list list * term,
(*goal kind and statement (starting with vars), initial proposition*)
using: thm list, (*goal facts*)
goal: thm, (*subgoals \<Longrightarrow> statement*)
before_qed: Method.text option,
after_qed:
(context * thm list list -> state -> state) *
(context * thm list list -> context -> context)};
val _ =
ML_system_pp (fn _ => fn _ => fn _: state =>
Pretty.to_polyml (Pretty.str "<Proof.state>"));
fun make_goal (statement, using, goal, before_qed, after_qed) =
Goal {statement = statement, using = using, goal = goal,
before_qed = before_qed, after_qed = after_qed};
fun make_node (context, facts, mode, goal) =
Node {context = context, facts = facts, mode = mode, goal = goal};
fun map_node f (Node {context, facts, mode, goal}) =
make_node (f (context, facts, mode, goal));
val init_context =
Proof_Context.set_stmt true #>
Proof_Context.map_naming (K Name_Space.local_naming);
fun init ctxt =
State (Stack.init (make_node (init_context ctxt, NONE, Forward, NONE)));
fun top (State stack) = Stack.top stack |> (fn Node node => node);
fun map_top f (State stack) = State (Stack.map_top (map_node f) stack);
fun map_all f (State stack) = State (Stack.map_all (map_node f) stack);
(** basic proof state operations **)
(* block structure *)
fun open_block (State stack) = State (Stack.push stack);
fun close_block (State stack) = State (Stack.pop stack)
handle List.Empty => error "Unbalanced block parentheses";
fun level (State stack) = Stack.level stack;
fun assert_bottom b state =
let val b' = level state <= 2 in
if b andalso not b' then error "Not at bottom of proof"
else if not b andalso b' then error "Already at bottom of proof"
else state
end;
(* context *)
val context_of = #context o top;
val theory_of = Proof_Context.theory_of o context_of;
fun map_context f =
map_top (fn (ctxt, facts, mode, goal) => (f ctxt, facts, mode, goal));
fun map_context_result f state =
f (context_of state) ||> (fn ctxt => map_context (K ctxt) state);
fun map_contexts f = map_all (fn (ctxt, facts, mode, goal) => (f ctxt, facts, mode, goal));
fun propagate_ml_env state = map_contexts
(Context.proof_map (ML_Env.inherit [Context.Proof (context_of state)])) state;
(* facts *)
fun report_improper state =
Context_Position.report (context_of state) (Position.thread_data ()) Markup.improper;
val get_facts = #facts o top;
fun the_facts state =
(case get_facts state of
SOME (facts, proper) => (if proper then () else report_improper state; facts)
| NONE => error "No current facts available");
fun the_fact state =
(case the_facts state of
[thm] => thm
| _ => error "Single theorem expected");
fun put_facts index facts =
map_top (fn (ctxt, _, mode, goal) => (ctxt, facts, mode, goal)) #>
map_context (Proof_Context.put_thms index (Auto_Bind.thisN, Option.map #1 facts));
fun set_facts thms = put_facts false (SOME (thms, true));
val reset_facts = put_facts false NONE;
fun improper_reset_facts state =
(case get_facts state of
SOME (thms, _) => put_facts false (SOME (thms, false)) state
| NONE => state);
fun these_factss more_facts (named_factss, state) =
(named_factss, state |> set_facts (maps snd named_factss @ more_facts));
fun export_facts inner outer =
(case get_facts inner of
NONE => reset_facts outer
| SOME (thms, proper) =>
let val thms' = Proof_Context.export (context_of inner) (context_of outer) thms
in put_facts true (SOME (thms', proper)) outer end);
(* mode *)
val get_mode = #mode o top;
fun put_mode mode = map_top (fn (ctxt, facts, _, goal) => (ctxt, facts, mode, goal));
val mode_name = (fn Forward => "state" | Chain => "chain" | Backward => "prove");
fun assert_mode pred state =
let val mode = get_mode state in
if pred mode then state
else error ("Illegal application of proof command in " ^ quote (mode_name mode) ^ " mode")
end;
val assert_forward = assert_mode (fn mode => mode = Forward);
val assert_chain = assert_mode (fn mode => mode = Chain);
val assert_forward_or_chain = assert_mode (fn mode => mode = Forward orelse mode = Chain);
val assert_backward = assert_mode (fn mode => mode = Backward);
val assert_no_chain = assert_mode (fn mode => mode <> Chain);
val enter_forward = put_mode Forward;
val enter_chain = put_mode Chain;
val enter_backward = put_mode Backward;
(* current goal *)
fun current_goal state =
(case top state of
{context = ctxt, goal = SOME (Goal goal), ...} => (ctxt, goal)
| _ => error "No current goal");
fun assert_current_goal g state =
let val g' = can current_goal state in
if g andalso not g' then error "No goal in this block"
else if not g andalso g' then error "Goal present in this block"
else state
end;
fun put_goal goal = map_top (fn (ctxt, facts, mode, _) => (ctxt, facts, mode, goal));
val set_goal = put_goal o SOME;
val reset_goal = put_goal NONE;
val before_qed = #before_qed o #2 o current_goal;
(* nested goal *)
fun map_goal f (State stack) =
(case Stack.dest stack of
(Node {context = ctxt, facts, mode, goal = SOME goal}, node :: nodes) =>
let
val Goal {statement, using, goal, before_qed, after_qed} = goal;
val (ctxt', statement', using', goal', before_qed', after_qed') =
f (ctxt, statement, using, goal, before_qed, after_qed);
val goal' = make_goal (statement', using', goal', before_qed', after_qed');
in State (Stack.make (make_node (ctxt', facts, mode, SOME goal')) (node :: nodes)) end
| (top_node, node :: nodes) =>
let
val State stack' = map_goal f (State (Stack.make node nodes));
val (node', nodes') = Stack.dest stack';
in State (Stack.make top_node (node' :: nodes')) end
| _ => State stack);
fun provide_goal goal =
map_goal (fn (ctxt, statement, using, _, before_qed, after_qed) =>
(ctxt, statement, using, goal, before_qed, after_qed));
fun using_facts using =
map_goal (fn (ctxt, statement, _, goal, before_qed, after_qed) =>
(ctxt, statement, using, goal, before_qed, after_qed));
fun find_goal state =
try current_goal state
|> \<^if_none>\<open>find_goal (close_block state handle ERROR _ => error "No proof goal")\<close>;
(** pretty_state **)
local
fun pretty_sep prts [] = prts
| pretty_sep [] prts = prts
| pretty_sep prts1 prts2 = prts1 @ [Pretty.str ""] @ prts2;
in
fun pretty_state state =
let
val {context = ctxt, facts, mode, goal = _} = top state;
val verbose = Config.get ctxt Proof_Context.verbose;
val prt_facts = Proof_Display.pretty_goal_facts ctxt;
fun prt_goal (SOME (_, goal)) =
let
val {statement = (_, propss, _), using, goal, before_qed = _, after_qed = _} = goal;
val head = [Proof_Display.pretty_goal_header goal];
val body = Goal_Display.pretty_goals ctxt goal;
val foot = Proof_Display.pretty_goal_inst ctxt propss goal;
in
pretty_sep
(prt_facts "using" (if mode <> Backward orelse null using then NONE else SOME using))
(head @ body @ foot)
end
| prt_goal NONE = [];
val prt_ctxt =
if verbose orelse mode = Forward then Proof_Context.pretty_context ctxt
else if mode = Backward then Proof_Context.pretty_ctxt ctxt
else [];
val position_markup = Position.markup (Position.thread_data ()) Markup.position;
in
[Pretty.block
[Pretty.mark_str (position_markup, "proof"), Pretty.str (" (" ^ mode_name mode ^ ")")]] @
(if null prt_ctxt then [] else prt_ctxt @ [Pretty.str ""]) @
(if verbose orelse mode = Forward then
pretty_sep (prt_facts "" (Option.map #1 facts)) (prt_goal (try find_goal state))
else if mode = Chain then prt_facts "picking" (Option.map #1 facts)
else prt_goal (try find_goal state))
end;
end;
(** proof steps **)
(* refine via method *)
local
fun apply_method text ctxt state =
find_goal state |> (fn (goal_ctxt, {statement, using, goal, before_qed, after_qed}) =>
Method.evaluate text ctxt using (goal_ctxt, goal)
|> Seq.map_result (fn (goal_ctxt', goal') =>
state |> map_goal (K (goal_ctxt', statement, using, goal', before_qed, after_qed))));
in
fun refine text state = apply_method text (context_of state) state;
fun refine_end text state = apply_method text (#1 (find_goal state)) state;
fun refine_singleton text = refine text #> Seq.the_result "";
fun refine_insert ths =
refine_singleton (Method.Basic (K (Method.insert ths)));
fun refine_primitive r =
refine_singleton (Method.Basic (fn ctxt => fn _ => CONTEXT_TACTIC (PRIMITIVE (r ctxt))));
end;
(* refine via sub-proof *)
local
fun finish_tac _ 0 = K all_tac
| finish_tac ctxt n =
Goal.norm_hhf_tac ctxt THEN'
SUBGOAL (fn (goal, i) =>
if can Logic.unprotect (Logic.strip_assums_concl goal) then
eresolve_tac ctxt [Drule.protectI] i THEN finish_tac ctxt (n - 1) i
else finish_tac ctxt (n - 1) (i + 1));
fun goal_tac ctxt rule =
Goal.norm_hhf_tac ctxt THEN'
resolve_tac ctxt [rule] THEN'
finish_tac ctxt (Thm.nprems_of rule);
fun FINDGOAL tac st =
let fun find i n = if i > n then Seq.fail else Seq.APPEND (tac i, find (i + 1) n)
in find 1 (Thm.nprems_of st) st end;
fun protect_prem i th =
Thm.bicompose NONE {flatten = false, match = false, incremented = true}
(false, Drule.incr_indexes th Drule.protectD, 1) i th
|> Seq.hd;
fun protect_prems th =
fold_rev protect_prem (1 upto Thm.nprems_of th) th;
in
fun refine_goals print_rule result_ctxt result state =
let
val (goal_ctxt, {goal, ...}) = find_goal state;
fun refine rule st =
(print_rule goal_ctxt rule; FINDGOAL (goal_tac goal_ctxt rule) st);
in
result
|> map (Raw_Simplifier.norm_hhf result_ctxt #> protect_prems)
- |> Proof_Context.goal_export result_ctxt goal_ctxt
+ |> Proof_Context.export_goal result_ctxt goal_ctxt
|> (fn rules => Seq.lift provide_goal (EVERY (map refine rules) goal) state)
end;
end;
(* conclude goal *)
fun conclude_goal ctxt goal propss =
let
val thy = Proof_Context.theory_of ctxt;
val _ =
Context.subthy_id (Thm.theory_id goal, Context.theory_id thy) orelse
error "Bad background theory of goal state";
val _ = Thm.no_prems goal orelse error (Proof_Display.string_of_goal ctxt goal);
fun err_lost () = error ("Lost goal structure:\n" ^ Thm.string_of_thm ctxt goal);
val th =
(Goal.conclude (Thm.close_derivation \<^here> goal) handle THM _ => err_lost ())
|> Drule.flexflex_unique (SOME ctxt)
|> Thm.check_shyps ctxt
|> Thm.check_hyps (Context.Proof ctxt);
val goal_propss = filter_out null propss;
val results =
Conjunction.elim_balanced (length goal_propss) th
|> map2 Conjunction.elim_balanced (map length goal_propss)
handle THM _ => err_lost ();
val matcher =
Unify.matcher (Context.Proof ctxt)
(map (Soft_Type_System.purge ctxt) (flat goal_propss)) (map Thm.prop_of (flat results));
val _ =
is_none matcher andalso error ("Proved a different theorem:\n" ^ Thm.string_of_thm ctxt th);
fun recover_result ([] :: pss) thss = [] :: recover_result pss thss
| recover_result (_ :: pss) (ths :: thss) = ths :: recover_result pss thss
| recover_result [] [] = []
| recover_result _ _ = err_lost ();
in recover_result propss results end;
val finished_goal_error = "Failed to finish proof";
fun finished_goal pos state =
let val (ctxt, {goal, ...}) = find_goal state in
if Thm.no_prems goal then Seq.Result state
else
Seq.Error (fn () =>
finished_goal_error ^ Position.here pos ^ ":\n" ^
Proof_Display.string_of_goal ctxt goal)
end;
(* goal views -- corresponding to methods *)
val goal_finished = Thm.no_prems o #goal o #2 o find_goal;
fun raw_goal state =
let val (ctxt, {using, goal, ...}) = find_goal state
in {context = ctxt, facts = using, goal = goal} end;
val goal = raw_goal o refine_insert [];
fun simple_goal state =
let
val (_, {using, ...}) = find_goal state;
val (ctxt, {goal, ...}) = find_goal (refine_insert using state);
in {context = ctxt, goal = goal} end;
fun method_error kind pos state =
Seq.single (Proof_Display.method_error kind pos (raw_goal state));
(*** structured proof commands ***)
(** context elements **)
(* let bindings *)
local
fun gen_bind prep_terms raw_binds =
assert_forward #> map_context (fn ctxt =>
let
fun prep_bind (raw_pats, t) ctxt1 =
let
val T = Term.fastype_of t;
val ctxt2 = Variable.declare_term t ctxt1;
val pats = prep_terms (Proof_Context.set_mode Proof_Context.mode_pattern ctxt2) T raw_pats;
val binds = Proof_Context.simult_matches ctxt2 (t, pats);
in (binds, ctxt2) end;
val ts = prep_terms ctxt dummyT (map snd raw_binds);
val (binds, ctxt') = apfst flat (fold_map prep_bind (map fst raw_binds ~~ ts) ctxt);
val binds' = map #1 binds ~~ Variable.exportT_terms ctxt' ctxt (map #2 binds);
val ctxt'' =
ctxt
|> fold Variable.declare_term (map #2 binds')
|> fold Proof_Context.bind_term binds';
val _ = Variable.warn_extra_tfrees ctxt ctxt'';
in ctxt'' end)
#> reset_facts;
fun read_terms ctxt T =
map (Syntax.parse_term ctxt #> Type.constraint T) #> Syntax.check_terms ctxt;
in
val let_bind = gen_bind (fn ctxt => fn _ => map (Proof_Context.cert_term ctxt));
val let_bind_cmd = gen_bind read_terms;
end;
(* concrete syntax *)
local
fun read_arg (c, mx) ctxt =
(case Proof_Context.read_const {proper = false, strict = false} ctxt c of
Free (x, _) =>
let
val ctxt' =
ctxt |> is_none (Variable.default_type ctxt x) ?
Variable.declare_constraints (Free (x, Mixfix.default_constraint mx));
val t = Free (#1 (Proof_Context.inferred_param x ctxt'));
in ((t, mx), ctxt') end
| t => ((t, mx), ctxt));
fun gen_write prep_arg mode args =
assert_forward
#> map_context (fold_map prep_arg args #-> Proof_Context.notation true mode)
#> reset_facts;
in
val write = gen_write pair;
val write_cmd = gen_write read_arg;
end;
(* fix *)
local
fun gen_fix add_fixes raw_fixes =
assert_forward
#> map_context (snd o add_fixes raw_fixes)
#> reset_facts;
in
val fix = gen_fix Proof_Context.add_fixes;
val fix_cmd = gen_fix Proof_Context.add_fixes_cmd;
end;
(* assume *)
local
fun gen_assume prep_statement prep_att export raw_fixes raw_prems raw_concls state =
let
val ctxt = context_of state;
val bindings = map (apsnd (map (prep_att ctxt)) o fst) raw_concls;
val {fixes = params, assumes = prems_propss, shows = concl_propss, result_binds, text, ...} =
#1 (prep_statement raw_fixes raw_prems (map snd raw_concls) ctxt);
val propss = (map o map) (Logic.close_prop params (flat prems_propss)) concl_propss;
in
state
|> assert_forward
|> map_context_result (fn ctxt =>
ctxt
|> Proof_Context.augment text
|> fold Variable.maybe_bind_term result_binds
|> fold_burrow (Assumption.add_assms export o map (Thm.cterm_of ctxt)) propss
|-> (fn premss =>
Proof_Context.note_thmss "" (bindings ~~ (map o map) (fn th => ([th], [])) premss)))
|> these_factss [] |> #2
end;
in
val assm = gen_assume Proof_Context.cert_statement (K I);
val assm_cmd = gen_assume Proof_Context.read_statement Attrib.attribute_cmd;
val assume = assm Assumption.assume_export;
val assume_cmd = assm_cmd Assumption.assume_export;
val presume = assm Assumption.presume_export;
val presume_cmd = assm_cmd Assumption.presume_export;
end;
(** facts **)
(* chain *)
val chain =
assert_forward
#> (fn state => set_facts (Method.clean_facts (the_facts state)) state)
#> enter_chain;
fun chain_facts facts =
set_facts facts
#> chain;
(* note etc. *)
fun empty_bindings args = map (pair Binding.empty_atts) args;
local
fun gen_thmss more_facts opt_chain opt_result prep_atts prep_fact args state =
state
|> assert_forward
|> map_context_result (fn ctxt => ctxt |> Proof_Context.note_thmss ""
(Attrib.map_facts_refs (map (prep_atts ctxt)) (prep_fact ctxt) args))
|> these_factss (more_facts state)
||> opt_chain
|> opt_result;
in
val note_thmss = gen_thmss (K []) I #2 (K I) (K I);
val note_thmss_cmd = gen_thmss (K []) I #2 Attrib.attribute_cmd Proof_Context.get_fact;
val from_thmss = gen_thmss (K []) chain #2 (K I) (K I) o empty_bindings;
val from_thmss_cmd =
gen_thmss (K []) chain #2 Attrib.attribute_cmd Proof_Context.get_fact o empty_bindings;
val with_thmss = gen_thmss the_facts chain #2 (K I) (K I) o empty_bindings;
val with_thmss_cmd =
gen_thmss the_facts chain #2 Attrib.attribute_cmd Proof_Context.get_fact o empty_bindings;
val local_results = gen_thmss (K []) I I (K I) (K I) o map (apsnd Thm.simple_fact);
end;
(* facts during goal refinement *)
local
fun gen_supply prep_att prep_fact args state =
state
|> assert_backward
|> map_context (fn ctxt => ctxt |> Proof_Context.note_thmss ""
(Attrib.map_facts_refs (map (prep_att ctxt)) (prep_fact ctxt) args) |> snd);
in
val supply = gen_supply (K I) (K I);
val supply_cmd = gen_supply Attrib.attribute_cmd Proof_Context.get_fact;
end;
(* using/unfolding *)
local
fun gen_using f g prep_att prep_fact args state =
state
|> assert_backward
|> map_context_result
(fn ctxt => ctxt |> Proof_Context.note_thmss ""
(Attrib.map_facts_refs (map (prep_att ctxt)) (prep_fact ctxt) (empty_bindings args)))
|> (fn (named_facts, state') =>
state' |> map_goal (fn (goal_ctxt, statement, using, goal, before_qed, after_qed) =>
let
val ctxt = context_of state';
val ths = maps snd named_facts;
in (goal_ctxt, statement, f ctxt ths using, g ctxt ths goal, before_qed, after_qed) end));
fun append_using _ ths using = using @ filter_out Thm.is_dummy ths;
fun unfold_using ctxt ths = map (Local_Defs.unfold ctxt ths);
val unfold_goals = Local_Defs.unfold_goals;
in
val using = gen_using append_using (K (K I)) (K I) (K I);
val using_cmd = gen_using append_using (K (K I)) Attrib.attribute_cmd Proof_Context.get_fact;
val unfolding = gen_using unfold_using unfold_goals (K I) (K I);
val unfolding_cmd = gen_using unfold_using unfold_goals Attrib.attribute_cmd Proof_Context.get_fact;
end;
(* case *)
local
fun gen_case internal prep_att ((raw_binding, raw_atts), ((name, pos), xs)) state =
let
val ctxt = context_of state;
val binding = if Binding.is_empty raw_binding then Binding.make (name, pos) else raw_binding;
val atts = map (prep_att ctxt) raw_atts;
val (asms, state') = state |> map_context_result (fn ctxt =>
ctxt |> Proof_Context.apply_case (Proof_Context.check_case ctxt internal (name, pos) xs));
val assumptions =
asms |> map (fn (a, ts) => ((Binding.qualify_name true binding a, []), map (rpair []) ts));
in
state'
|> assume [] [] assumptions
|> map_context (fold Variable.unbind_term Auto_Bind.no_facts)
|> `the_facts |-> (fn thms => note_thmss [((binding, atts), [(thms, [])])])
end;
in
val case_ = gen_case true (K I);
val case_cmd = gen_case false Attrib.attribute_cmd;
end;
(* define *)
local
fun gen_define prep_stmt prep_att raw_decls raw_fixes raw_defs state =
let
val _ = assert_forward state;
val ctxt = context_of state;
(*vars*)
val ({vars, propss, result_binds, ...}, vars_ctxt) =
prep_stmt (raw_decls @ raw_fixes) (map snd raw_defs) ctxt;
val (decls, fixes) = chop (length raw_decls) vars;
val show_term = Syntax.string_of_term vars_ctxt;
(*defs*)
fun match_defs (((b, _, mx), (_, Free (x, T))) :: more_decls) ((((y, U), t), _) :: more_defs) =
if x = y then ((b, mx), (Binding.empty_atts, t)) :: match_defs more_decls more_defs
else
error ("Mismatch of declaration " ^ show_term (Free (x, T)) ^ " wrt. definition " ^
show_term (Free (y, U)))
| match_defs [] [] = []
| match_defs more_decls more_defs =
error ("Mismatch of declarations " ^ commas (map (show_term o #2 o #2) more_decls) ^
(if null more_decls then "" else " ") ^
"wrt. definitions " ^ commas (map (show_term o Free o #1 o #1) more_defs));
val derived_def = Local_Defs.derived_def ctxt (K []) {conditional = false};
val defs1 = map (derived_def o Logic.close_prop (map #2 fixes) []) (flat propss);
val defs2 = match_defs decls defs1;
val (defs3, defs_ctxt) = Local_Defs.define defs2 ctxt;
(*notes*)
val def_thmss =
map (fn (((_, prove), ((b, _), _)), (_, (_, th))) => (b, prove defs_ctxt th))
(defs1 ~~ defs2 ~~ defs3)
|> unflat (map snd raw_defs);
val notes =
map2 (fn ((a, raw_atts), _) => fn def_thms =>
((Thm.def_binding_optional (Binding.conglomerate (map #1 def_thms)) a,
map (prep_att defs_ctxt) raw_atts), map (fn (_, th) => ([th], [])) def_thms))
raw_defs def_thmss;
in
state
|> map_context (K defs_ctxt #> fold Variable.bind_term result_binds)
|> note_thmss notes
end;
in
val define = gen_define Proof_Context.cert_stmt (K I);
val define_cmd = gen_define Proof_Context.read_stmt Attrib.attribute_cmd;
end;
(** proof structure **)
(* blocks *)
val begin_block =
assert_forward
#> open_block
#> reset_goal
#> open_block;
val next_block =
assert_forward
#> close_block
#> open_block
#> reset_goal
#> reset_facts;
fun end_block state =
state
|> assert_forward
|> assert_bottom false
|> close_block
|> assert_current_goal false
|> close_block
|> export_facts state;
(* global notepad *)
val begin_notepad =
init
#> open_block
#> map_context (Variable.set_body true)
#> open_block;
val end_notepad =
assert_forward
#> assert_bottom true
#> close_block
#> assert_current_goal false
#> close_block
#> context_of;
fun get_notepad_context (State stack) =
let
fun escape [Node {goal = SOME _, ...}, Node {goal = NONE, ...}] = NONE
| escape [Node {goal = SOME _, ...}] = NONE
| escape [Node {goal = NONE, context = ctxt, ...}] = SOME ctxt
| escape [] = NONE
| escape (_ :: rest) = escape rest;
in escape (op :: (Stack.dest stack)) end;
val is_notepad = is_some o get_notepad_context;
fun reset_notepad state =
begin_notepad (the_default (context_of state) (get_notepad_context state));
(* sub-proofs *)
fun proof opt_text =
Seq.APPEND
(assert_backward
#> refine (the_default Method.standard_text (Method.text opt_text))
#> Seq.map_result
(using_facts []
#> enter_forward
#> open_block
#> reset_goal),
method_error "initial" (Method.position opt_text));
fun end_proof bot (prev_pos, (opt_text, immed)) =
let
val (finish_text, terminal_pos, finished_pos) =
(case opt_text of
NONE => (Method.finish_text (NONE, immed), Position.none, prev_pos)
| SOME (text, (pos, end_pos)) => (Method.finish_text (SOME text, immed), pos, end_pos));
in
Seq.APPEND (fn state =>
state
|> assert_forward
|> assert_bottom bot
|> close_block
|> assert_current_goal true
|> using_facts []
|> `before_qed |-> (refine o the_default Method.succeed_text)
|> Seq.maps_results (refine finish_text),
method_error "terminal" terminal_pos)
#> Seq.maps_results (Seq.single o finished_goal finished_pos)
end;
fun check_result msg sq =
(case Seq.pull sq of
NONE => error msg
| SOME (s, _) => s);
(* unstructured refinement *)
fun defer i =
assert_no_chain #>
refine_singleton (Method.Basic (fn _ => METHOD (fn _ => ASSERT_SUBGOAL defer_tac i)));
fun prefer i =
assert_no_chain #>
refine_singleton (Method.Basic (fn _ => METHOD (fn _ => ASSERT_SUBGOAL prefer_tac i)));
fun apply (text, (pos, _)) =
Seq.APPEND (assert_backward #> refine text #> Seq.map_result (using_facts []),
method_error "" pos);
fun apply_end (text, (pos, _)) =
Seq.APPEND (assert_forward #> refine_end text, method_error "" pos);
(** goals **)
(* generic goals *)
local
val is_var =
can (dest_TVar o Logic.dest_type o Logic.dest_term) orf
can (dest_Var o Logic.dest_term);
fun implicit_vars props =
let
val var_props = take_prefix is_var props;
val explicit_vars = fold Term.add_vars var_props [];
in
fold Term.add_vars props [] |> map_filter (fn v =>
if member (op =) explicit_vars v then NONE else SOME (Logic.mk_term (Var v)))
end;
fun refine_terms n =
refine_singleton (Method.Basic (fn ctxt => CONTEXT_TACTIC o
K (HEADGOAL (PRECISE_CONJUNCTS n
(HEADGOAL (CONJUNCTS (ALLGOALS (resolve_tac ctxt [Drule.termI]))))))));
in
fun generic_goal kind before_qed after_qed goal_ctxt goal_propss result_binds state =
let
val chaining = can assert_chain state;
val pos = Position.thread_data ();
val goal_props = flat goal_propss;
val goal_vars = implicit_vars goal_props;
val propss' = goal_vars :: goal_propss;
val goal_propss' = filter_out null propss';
val goal =
Logic.mk_conjunction_balanced (map Logic.mk_conjunction_balanced goal_propss')
|> Thm.cterm_of goal_ctxt
|> Thm.weaken_sorts' goal_ctxt;
val statement = ((kind, pos), propss', Thm.term_of goal);
val after_qed' = after_qed |>> (fn after_local => fn results =>
map_context (fold Variable.maybe_bind_term result_binds) #> after_local results);
in
state
|> assert_forward_or_chain
|> enter_forward
|> open_block
|> enter_backward
|> map_context
(K goal_ctxt
#> init_context
#> Variable.set_body true
#> Proof_Context.auto_bind_goal goal_props)
|> set_goal (make_goal (statement, [], Goal.init goal, before_qed, after_qed'))
|> chaining ? (`the_facts #-> using_facts)
|> reset_facts
|> not (null goal_vars) ? refine_terms (length goal_propss')
|> forall null goal_propss' ? refine_singleton (Method.Basic Method.assumption)
end;
fun generic_qed state =
let
val (goal_ctxt, goal_config) = current_goal state;
val {statement = (_, propss, _), goal, after_qed, ...} = goal_config;
val results = tl (conclude_goal goal_ctxt goal propss);
val res =
{goal_ctxt = goal_ctxt,
goal_config = goal_config,
after_qed = after_qed,
results = results};
in state |> close_block |> pair res end;
end;
(* local goals *)
fun local_goal print_results prep_statement prep_att strict_asm
kind before_qed after_qed raw_fixes raw_assumes raw_shows state =
let
val ctxt = context_of state;
val add_assumes =
Assumption.add_assms
(if strict_asm then Assumption.assume_export else Assumption.presume_export);
(*params*)
val ({fixes = params, assumes = assumes_propss, shows = shows_propss,
result_binds, result_text, text}, params_ctxt) = ctxt
|> prep_statement raw_fixes (map snd raw_assumes) (map snd raw_shows);
(*prems*)
val (assumes_bindings, shows_bindings) =
apply2 (map (apsnd (map (prep_att ctxt)) o fst)) (raw_assumes, raw_shows);
val (that_fact, goal_ctxt) = params_ctxt
|> fold Proof_Context.augment (text :: flat (assumes_propss @ shows_propss))
|> fold_burrow add_assumes ((map o map) (Thm.cterm_of params_ctxt) assumes_propss)
|> (fn (premss, ctxt') =>
let
val prems = Assumption.local_prems_of ctxt' ctxt;
val ctxt'' =
ctxt'
|> not (null assumes_propss) ?
(snd o Proof_Context.note_thms ""
((Binding.name Auto_Bind.thatN, []), [(prems, [])]))
|> (snd o Proof_Context.note_thmss ""
(assumes_bindings ~~ map (map (fn th => ([th], []))) premss))
in (prems, ctxt'') end);
(*result*)
val results_bindings = map (apfst Binding.default_pos) shows_bindings;
fun after_qed' (result_ctxt, results) state' =
let
val ctxt' = context_of state';
val export0 =
- Assumption.export false result_ctxt (Proof_Context.augment result_text ctxt') #>
+ Assumption.export result_ctxt (Proof_Context.augment result_text ctxt') #>
fold_rev (fn (x, v) => Thm.forall_intr_name (x, Thm.cterm_of params_ctxt v)) params #>
Raw_Simplifier.norm_hhf_protect ctxt';
val export = map export0 #> Variable.export result_ctxt ctxt';
in
state'
|> map_context (Proof_Context.augment result_text)
|> local_results (results_bindings ~~ burrow export results)
|-> (fn res => tap (fn st => print_results (context_of st) ((kind, ""), res) : unit))
|> after_qed (result_ctxt, results)
end;
in
state
|> generic_goal kind before_qed (after_qed', K I) goal_ctxt shows_propss result_binds
|> pair that_fact
end;
fun local_qeds arg =
end_proof false arg #> Seq.map_result
(generic_qed #-> (fn {after_qed, goal_ctxt, results, ...} => #1 after_qed (goal_ctxt, results)));
fun local_qed arg =
local_qeds (Position.none, arg) #> Seq.the_result finished_goal_error;
(* global goals *)
fun global_goal prep_propp before_qed after_qed propp ctxt =
let
val (propss, binds) =
prep_propp (Proof_Context.set_mode Proof_Context.mode_schematic ctxt) propp;
val goal_ctxt = ctxt
|> (fold o fold) Proof_Context.augment propss
|> fold Variable.bind_term binds;
fun after_qed' (result_ctxt, results) ctxt' = ctxt'
|> Proof_Context.restore_naming ctxt
|> after_qed (burrow (Proof_Context.export result_ctxt ctxt') results);
in
ctxt
|> init
|> generic_goal "" before_qed (K I, after_qed') goal_ctxt propss []
end;
val theorem = global_goal Proof_Context.cert_propp;
val theorem_cmd = global_goal Proof_Context.read_propp;
fun global_qeds arg =
end_proof true arg #> Seq.map_result
(generic_qed #> (fn ({goal_ctxt, goal_config, after_qed, results}, state) =>
((goal_ctxt, Goal goal_config), #2 after_qed (goal_ctxt, results) (context_of state))));
fun global_goal_inst ((goal_ctxt, Goal goal), result_ctxt: context) =
let
val {statement = (_, propss, _), goal, ...} = goal;
val _ =
(case Proof_Display.pretty_goal_inst goal_ctxt propss goal of
[] => ()
| prts => Output.state (Pretty.string_of (Pretty.chunks prts)));
in result_ctxt end;
fun global_qed arg =
global_qeds (Position.none, arg) #> Seq.the_result finished_goal_error #> global_goal_inst;
(* relevant proof states *)
fun schematic_goal state =
let val (_, {statement = (_, _, prop), ...}) = find_goal state
in Term.is_schematic prop end;
fun is_relevant state =
(case try find_goal state of
NONE => true
| SOME (_, {statement = (_, _, prop), goal, ...}) =>
Term.is_schematic prop orelse not (Logic.protect prop aconv Thm.concl_of goal));
(* terminal proof steps *)
local
fun terminal_proof qeds initial terminal state =
let
val ctxt = context_of state;
val check_closure = Method.check_text ctxt #> Method.map_source (Method.method_closure ctxt);
val initial' = apfst check_closure initial;
val terminal' = (apfst o Option.map o apfst) check_closure terminal;
in
if Goal.skip_proofs_enabled () andalso not (is_relevant state) then
state
|> proof (SOME (Method.sorry_text true, #2 initial'))
|> Seq.maps_results (qeds (#2 (#2 initial), (NONE, #2 terminal)))
else
state
|> proof (SOME initial')
|> Seq.maps_results (qeds (#2 (#2 initial), terminal'))
end |> Seq.the_result "";
in
fun local_terminal_proof (text, opt_text) = terminal_proof local_qeds text (opt_text, true);
val local_default_proof = local_terminal_proof ((Method.standard_text, Position.no_range), NONE);
val local_immediate_proof = local_terminal_proof ((Method.this_text, Position.no_range), NONE);
val local_done_proof = terminal_proof local_qeds (Method.done_text, Position.no_range) (NONE, false);
fun global_terminal_proof (text, opt_text) =
terminal_proof global_qeds text (opt_text, true) #> global_goal_inst;
val global_default_proof = global_terminal_proof ((Method.standard_text, Position.no_range), NONE);
val global_immediate_proof = global_terminal_proof ((Method.this_text, Position.no_range), NONE);
val global_done_proof =
terminal_proof global_qeds (Method.done_text, Position.no_range) (NONE, false) #> global_goal_inst;
end;
(* skip proofs *)
fun local_skip_proof int state =
local_terminal_proof ((Method.sorry_text int, Position.no_range), NONE) state before
Skip_Proof.report (context_of state);
fun global_skip_proof int state =
global_terminal_proof ((Method.sorry_text int, Position.no_range), NONE) state before
Skip_Proof.report (context_of state);
(* common goal statements *)
fun internal_goal print_results mode =
local_goal print_results
(fn a => fn b => fn c => Proof_Context.cert_statement a b c o Proof_Context.set_mode mode) (K I);
local
fun gen_have prep_statement prep_att strict_asm before_qed after_qed fixes assumes shows int =
local_goal (Proof_Display.print_results int (Position.thread_data ()))
prep_statement prep_att strict_asm "have" before_qed after_qed fixes assumes shows;
fun gen_show prep_statement prep_att strict_asm before_qed after_qed fixes assumes shows int state =
let
val testing = Unsynchronized.ref false;
val rule = Unsynchronized.ref (NONE: thm option);
fun fail_msg ctxt =
"Local statement fails to refine any pending goal" ::
(case ! rule of NONE => [] | SOME th => [Proof_Display.string_of_rule ctxt "Failed" th])
|> cat_lines;
val pos = Position.thread_data ();
fun print_results ctxt res =
if ! testing then ()
else Proof_Display.print_results int pos ctxt res;
fun print_rule ctxt th =
if ! testing then rule := SOME th
else if int then
Proof_Display.string_of_rule ctxt "Successful" th
|> Markup.markup Markup.text_fold
|> Output.state
else ();
val test_proof =
local_skip_proof true
|> Unsynchronized.setmp testing true
|> Exn.interruptible_capture;
fun after_qed' (result_ctxt, results) state' = state'
|> refine_goals print_rule result_ctxt (flat results)
|> check_result "Failed to refine any pending goal"
|> after_qed (result_ctxt, results);
in
state
|> local_goal print_results prep_statement prep_att strict_asm
"show" before_qed after_qed' fixes assumes shows
||> int ? (fn goal_state =>
(case test_proof (map_context (Context_Position.set_visible false) goal_state) of
Exn.Res _ => goal_state
| Exn.Exn exn => raise Exn.EXCEPTIONS ([exn, ERROR (fail_msg (context_of goal_state))])))
end;
in
val have = gen_have Proof_Context.cert_statement (K I);
val have_cmd = gen_have Proof_Context.read_statement Attrib.attribute_cmd;
val show = gen_show Proof_Context.cert_statement (K I);
val show_cmd = gen_show Proof_Context.read_statement Attrib.attribute_cmd;
end;
(** future proofs **)
(* full proofs *)
local
structure Result = Proof_Data
(
type T = thm option;
fun init _ = NONE;
);
fun the_result ctxt =
Result.get ctxt |> \<^if_none>\<open>error "No result of forked proof"\<close>;
val set_result = Result.put o SOME;
val reset_result = Result.put NONE;
in
fun future_proof fork_proof state =
let
val _ = assert_backward state;
val (goal_ctxt, goal) = find_goal state;
val {statement as (kind, _, prop), using, goal, before_qed, after_qed} = goal;
val _ = is_relevant state andalso error "Cannot fork relevant proof";
val after_qed' =
(fn (_, [[th]]) => map_context (set_result th),
fn (_, [[th]]) => set_result th);
val result_ctxt =
state
|> map_context reset_result
|> map_goal (K (goal_ctxt, (kind, [[], [prop]], prop), using, goal, before_qed, after_qed'))
|> fork_proof;
val future_thm = Future.map (the_result o snd) result_ctxt;
val finished_goal = Goal.protect 0 (Goal.future_result goal_ctxt future_thm prop);
val state' =
state
|> map_goal (K (goal_ctxt, statement, using, finished_goal, NONE, after_qed));
in (Future.map fst result_ctxt, state') end;
end;
(* terminal proofs *)
local
fun future_terminal_proof proof1 proof2 done state =
if Future.proofs_enabled 3 andalso
not (Proofterm.proofs_enabled ()) andalso
not (is_relevant state)
then
state |> future_proof (fn state' =>
let val pos = Position.thread_data () in
Execution.fork {name = "Proof.future_terminal_proof", pos = pos, pri = ~1}
(fn () => ((), Timing.protocol "by" pos proof2 state'))
end) |> snd |> done
else proof1 state;
in
fun local_future_terminal_proof meths =
future_terminal_proof
(local_terminal_proof meths)
(local_terminal_proof meths #> context_of) local_done_proof;
fun global_future_terminal_proof meths =
future_terminal_proof
(global_terminal_proof meths)
(global_terminal_proof meths) global_done_proof;
end;
end;
diff --git a/src/Pure/Isar/proof_context.ML b/src/Pure/Isar/proof_context.ML
--- a/src/Pure/Isar/proof_context.ML
+++ b/src/Pure/Isar/proof_context.ML
@@ -1,1676 +1,1672 @@
(* Title: Pure/Isar/proof_context.ML
Author: Markus Wenzel, TU Muenchen
The key concept of Isar proof contexts: elevates primitive local
reasoning Gamma |- phi to a structured concept, with generic context
elements. See also structure Variable and Assumption.
*)
signature PROOF_CONTEXT =
sig
val theory_of: Proof.context -> theory
val init_global: theory -> Proof.context
val get_global: {long: bool} -> theory -> string -> Proof.context
type mode
val mode_default: mode
val mode_pattern: mode
val mode_schematic: mode
val mode_abbrev: mode
val set_mode: mode -> Proof.context -> Proof.context
val get_mode: Proof.context -> mode
val restore_mode: Proof.context -> Proof.context -> Proof.context
val abbrev_mode: Proof.context -> bool
val syn_of: Proof.context -> Syntax.syntax
val tsig_of: Proof.context -> Type.tsig
val set_defsort: sort -> Proof.context -> Proof.context
val default_sort: Proof.context -> indexname -> sort
val arity_sorts: Proof.context -> string -> sort -> sort list
val consts_of: Proof.context -> Consts.T
val set_syntax_mode: Syntax.mode -> Proof.context -> Proof.context
val restore_syntax_mode: Proof.context -> Proof.context -> Proof.context
val map_naming: (Name_Space.naming -> Name_Space.naming) -> Proof.context -> Proof.context
val naming_of: Proof.context -> Name_Space.naming
val restore_naming: Proof.context -> Proof.context -> Proof.context
val full_name: Proof.context -> binding -> string
val get_scope: Proof.context -> Binding.scope option
val new_scope: Proof.context -> Binding.scope * Proof.context
val private_scope: Binding.scope -> Proof.context -> Proof.context
val private: Position.T -> Proof.context -> Proof.context
val qualified_scope: Binding.scope -> Proof.context -> Proof.context
val qualified: Position.T -> Proof.context -> Proof.context
val concealed: Proof.context -> Proof.context
val class_space: Proof.context -> Name_Space.T
val type_space: Proof.context -> Name_Space.T
val const_space: Proof.context -> Name_Space.T
val defs_context: Proof.context -> Defs.context
val intern_class: Proof.context -> xstring -> string
val intern_type: Proof.context -> xstring -> string
val intern_const: Proof.context -> xstring -> string
val extern_class: Proof.context -> string -> xstring
val markup_class: Proof.context -> string -> string
val pretty_class: Proof.context -> string -> Pretty.T
val extern_type: Proof.context -> string -> xstring
val markup_type: Proof.context -> string -> string
val pretty_type: Proof.context -> string -> Pretty.T
val extern_const: Proof.context -> string -> xstring
val markup_const: Proof.context -> string -> string
val pretty_const: Proof.context -> string -> Pretty.T
val transfer: theory -> Proof.context -> Proof.context
val transfer_facts: theory -> Proof.context -> Proof.context
val background_theory: (theory -> theory) -> Proof.context -> Proof.context
val background_theory_result: (theory -> 'a * theory) -> Proof.context -> 'a * Proof.context
val facts_of: Proof.context -> Facts.T
val facts_of_fact: Proof.context -> string -> Facts.T
val markup_extern_fact: Proof.context -> string -> Markup.T list * xstring
val augment: term -> Proof.context -> Proof.context
val print_name: Proof.context -> string -> string
val pretty_name: Proof.context -> string -> Pretty.T
val pretty_term_abbrev: Proof.context -> term -> Pretty.T
val pretty_fact: Proof.context -> string * thm list -> Pretty.T
val check_class: Proof.context -> xstring * Position.T -> class * Position.report list
val read_class: Proof.context -> string -> class
val read_typ: Proof.context -> string -> typ
val read_typ_syntax: Proof.context -> string -> typ
val read_typ_abbrev: Proof.context -> string -> typ
val cert_typ: Proof.context -> typ -> typ
val cert_typ_syntax: Proof.context -> typ -> typ
val cert_typ_abbrev: Proof.context -> typ -> typ
val infer_type: Proof.context -> string * typ -> typ
val inferred_param: string -> Proof.context -> (string * typ) * Proof.context
val inferred_fixes: Proof.context -> (string * typ) list * Proof.context
val check_type_name: {proper: bool, strict: bool} -> Proof.context ->
xstring * Position.T -> typ * Position.report list
val read_type_name: {proper: bool, strict: bool} -> Proof.context -> string -> typ
val consts_completion_message: Proof.context -> xstring * Position.T list -> string
val check_const: {proper: bool, strict: bool} -> Proof.context ->
xstring * Position.T list -> term * Position.report list
val read_const: {proper: bool, strict: bool} -> Proof.context -> string -> term
val read_arity: Proof.context -> xstring * string list * string -> arity
val cert_arity: Proof.context -> arity -> arity
val allow_dummies: Proof.context -> Proof.context
val prepare_sortsT: Proof.context -> typ list -> string list * typ list
val prepare_sorts: Proof.context -> term list -> string list * term list
val check_tfree: Proof.context -> string * sort -> string * sort
val read_term_pattern: Proof.context -> string -> term
val read_term_schematic: Proof.context -> string -> term
val read_term_abbrev: Proof.context -> string -> term
val show_abbrevs: bool Config.T
val expand_abbrevs: Proof.context -> term -> term
val cert_term: Proof.context -> term -> term
val cert_prop: Proof.context -> term -> term
val def_type: Proof.context -> indexname -> typ option
val standard_typ_check: Proof.context -> typ list -> typ list
val standard_term_check_finish: Proof.context -> term list -> term list
val standard_term_uncheck: Proof.context -> term list -> term list
- val goal_export: Proof.context -> Proof.context -> thm list -> thm list
+ val export_: {goal: bool} -> Proof.context -> Proof.context -> thm list -> thm list
val export: Proof.context -> Proof.context -> thm list -> thm list
+ val export_goal: Proof.context -> Proof.context -> thm list -> thm list
val export_morphism: Proof.context -> Proof.context -> morphism
- val norm_export_morphism: Proof.context -> Proof.context -> morphism
val auto_bind_goal: term list -> Proof.context -> Proof.context
val auto_bind_facts: term list -> Proof.context -> Proof.context
val simult_matches: Proof.context -> term * term list -> (indexname * term) list
val maybe_bind_term: indexname * term option -> Proof.context -> Proof.context
val bind_term: indexname * term -> Proof.context -> Proof.context
val cert_propp: Proof.context -> (term * term list) list list ->
(term list list * (indexname * term) list)
val read_propp: Proof.context -> (string * string list) list list ->
(term list list * (indexname * term) list)
val fact_tac: Proof.context -> thm list -> int -> tactic
val some_fact_tac: Proof.context -> int -> tactic
val lookup_fact: Proof.context -> string -> {dynamic: bool, thms: thm list} option
val dynamic_facts_dummy: bool Config.T
val get_fact_generic: Context.generic -> Facts.ref -> string option * thm list
val get_fact: Proof.context -> Facts.ref -> thm list
val get_fact_single: Proof.context -> Facts.ref -> thm
val get_thms: Proof.context -> xstring -> thm list
val get_thm: Proof.context -> xstring -> thm
val is_stmt: Proof.context -> bool
val set_stmt: bool -> Proof.context -> Proof.context
val restore_stmt: Proof.context -> Proof.context -> Proof.context
val add_thms_dynamic: binding * (Context.generic -> thm list) ->
Proof.context -> string * Proof.context
val add_thms_lazy: string -> (binding * thm list lazy) -> Proof.context -> Proof.context
val note_thms: string -> Thm.binding * (thm list * attribute list) list ->
Proof.context -> (string * thm list) * Proof.context
val note_thmss: string -> (Thm.binding * (thm list * attribute list) list) list ->
Proof.context -> (string * thm list) list * Proof.context
val put_thms: bool -> string * thm list option -> Proof.context -> Proof.context
val alias_fact: binding -> string -> Proof.context -> Proof.context
val read_var: binding * string option * mixfix -> Proof.context ->
(binding * typ option * mixfix) * Proof.context
val cert_var: binding * typ option * mixfix -> Proof.context ->
(binding * typ option * mixfix) * Proof.context
val add_fixes: (binding * typ option * mixfix) list -> Proof.context ->
string list * Proof.context
val add_fixes_cmd: (binding * string option * mixfix) list -> Proof.context ->
string list * Proof.context
val add_assms: Assumption.export ->
(Thm.binding * (term * term list) list) list ->
Proof.context -> (string * thm list) list * Proof.context
val add_assms_cmd: Assumption.export ->
(Thm.binding * (string * string list) list) list ->
Proof.context -> (string * thm list) list * Proof.context
val dest_cases: Proof.context option -> Proof.context -> (string * Rule_Cases.T) list
val update_cases: (string * Rule_Cases.T option) list -> Proof.context -> Proof.context
val apply_case: Rule_Cases.T -> Proof.context -> (string * term list) list * Proof.context
val check_case: Proof.context -> bool ->
string * Position.T -> binding option list -> Rule_Cases.T
val check_syntax_const: Proof.context -> string * Position.T -> string
val syntax: bool -> Syntax.mode -> (string * typ * mixfix) list ->
Proof.context -> Proof.context
val generic_syntax: bool -> Syntax.mode -> (string * typ * mixfix) list ->
Context.generic -> Context.generic
val type_notation: bool -> Syntax.mode -> (typ * mixfix) list -> Proof.context -> Proof.context
val notation: bool -> Syntax.mode -> (term * mixfix) list -> Proof.context -> Proof.context
val generic_type_notation: bool -> Syntax.mode -> (typ * mixfix) list -> morphism ->
Context.generic -> Context.generic
val generic_notation: bool -> Syntax.mode -> (term * mixfix) list -> morphism ->
Context.generic -> Context.generic
val type_alias: binding -> string -> Proof.context -> Proof.context
val const_alias: binding -> string -> Proof.context -> Proof.context
val add_const_constraint: string * typ option -> Proof.context -> Proof.context
val add_abbrev: string -> binding * term -> Proof.context -> (term * term) * Proof.context
val revert_abbrev: string -> string -> Proof.context -> Proof.context
val generic_add_abbrev: string -> binding * term -> Context.generic ->
(term * term) * Context.generic
val generic_revert_abbrev: string -> string -> Context.generic -> Context.generic
type stmt =
{vars: ((binding * typ option * mixfix) * (string * term)) list,
propss: term list list,
binds: (indexname * term) list,
result_binds: (indexname * term) list}
val cert_stmt: (binding * typ option * mixfix) list -> (term * term list) list list ->
Proof.context -> stmt * Proof.context
val read_stmt: (binding * string option * mixfix) list -> (string * string list) list list ->
Proof.context -> stmt * Proof.context
type statement =
{fixes: (string * term) list,
assumes: term list list,
shows: term list list,
result_binds: (indexname * term option) list,
text: term,
result_text: term}
val cert_statement: (binding * typ option * mixfix) list ->
(term * term list) list list -> (term * term list) list list -> Proof.context ->
statement * Proof.context
val read_statement: (binding * string option * mixfix) list ->
(string * string list) list list -> (string * string list) list list -> Proof.context ->
statement * Proof.context
val print_syntax: Proof.context -> unit
val print_abbrevs: bool -> Proof.context -> unit
val pretty_term_bindings: Proof.context -> Pretty.T list
val pretty_local_facts: bool -> Proof.context -> Pretty.T list
val print_local_facts: bool -> Proof.context -> unit
val pretty_cases: Proof.context -> Pretty.T list
val print_cases_proof: Proof.context -> Proof.context -> string
val debug: bool Config.T
val verbose: bool Config.T
val pretty_ctxt: Proof.context -> Pretty.T list
val pretty_context: Proof.context -> Pretty.T list
end;
structure Proof_Context: PROOF_CONTEXT =
struct
val theory_of = Proof_Context.theory_of;
val init_global = Proof_Context.init_global;
val get_global = Proof_Context.get_global;
(** inner syntax mode **)
datatype mode =
Mode of
{pattern: bool, (*pattern binding schematic variables*)
schematic: bool, (*term referencing loose schematic variables*)
abbrev: bool}; (*abbrev mode -- no normalization*)
fun make_mode (pattern, schematic, abbrev) =
Mode {pattern = pattern, schematic = schematic, abbrev = abbrev};
val mode_default = make_mode (false, false, false);
val mode_pattern = make_mode (true, false, false);
val mode_schematic = make_mode (false, true, false);
val mode_abbrev = make_mode (false, false, true);
(** Isar proof context information **)
type cases = Rule_Cases.T Name_Space.table;
val empty_cases: cases = Name_Space.empty_table Markup.caseN;
datatype data =
Data of
{mode: mode, (*inner syntax mode*)
syntax: Local_Syntax.T, (*local syntax*)
tsig: Type.tsig * Type.tsig, (*local/global type signature -- local name space / defsort only*)
consts: Consts.T * Consts.T, (*local/global consts -- local name space / abbrevs only*)
facts: Facts.T, (*local facts, based on initial global facts*)
cases: cases}; (*named case contexts*)
fun make_data (mode, syntax, tsig, consts, facts, cases) =
Data {mode = mode, syntax = syntax, tsig = tsig, consts = consts, facts = facts, cases = cases};
structure Data = Proof_Data
(
type T = data;
fun init thy =
make_data (mode_default,
Local_Syntax.init thy,
(Type.change_ignore (Sign.tsig_of thy), Sign.tsig_of thy),
(Consts.change_ignore (Sign.consts_of thy), Sign.consts_of thy),
Global_Theory.facts_of thy,
empty_cases);
);
fun rep_data ctxt = Data.get ctxt |> (fn Data rep => rep);
fun map_data_result f ctxt =
let
val Data {mode, syntax, tsig, consts, facts, cases} = Data.get ctxt;
val (res, data') = f (mode, syntax, tsig, consts, facts, cases) ||> make_data;
in (res, Data.put data' ctxt) end;
fun map_data f = snd o map_data_result (pair () o f);
fun set_mode mode = map_data (fn (_, syntax, tsig, consts, facts, cases) =>
(mode, syntax, tsig, consts, facts, cases));
fun map_syntax f =
map_data (fn (mode, syntax, tsig, consts, facts, cases) =>
(mode, f syntax, tsig, consts, facts, cases));
fun map_syntax_idents f ctxt =
let val (opt_idents', syntax') = f (#syntax (rep_data ctxt)) in
ctxt
|> map_syntax (K syntax')
|> (case opt_idents' of NONE => I | SOME idents' => Syntax_Trans.put_idents idents')
end;
fun map_tsig f =
map_data (fn (mode, syntax, tsig, consts, facts, cases) =>
(mode, syntax, f tsig, consts, facts, cases));
fun map_consts f =
map_data (fn (mode, syntax, tsig, consts, facts, cases) =>
(mode, syntax, tsig, f consts, facts, cases));
fun map_facts_result f =
map_data_result (fn (mode, syntax, tsig, consts, facts, cases) =>
let val (res, facts') = f facts
in (res, (mode, syntax, tsig, consts, facts', cases)) end);
fun map_facts f = snd o map_facts_result (pair () o f);
fun map_cases f =
map_data (fn (mode, syntax, tsig, consts, facts, cases) =>
(mode, syntax, tsig, consts, facts, f cases));
val get_mode = #mode o rep_data;
val restore_mode = set_mode o get_mode;
val abbrev_mode = get_mode #> (fn Mode {abbrev, ...} => abbrev);
val syntax_of = #syntax o rep_data;
val syn_of = Local_Syntax.syn_of o syntax_of;
val set_syntax_mode = map_syntax o Local_Syntax.set_mode;
val restore_syntax_mode = map_syntax o Local_Syntax.restore_mode o syntax_of;
val tsig_of = #1 o #tsig o rep_data;
val set_defsort = map_tsig o apfst o Type.set_defsort;
fun default_sort ctxt = the_default (Type.defaultS (tsig_of ctxt)) o Variable.def_sort ctxt;
fun arity_sorts ctxt = Type.arity_sorts (Context.Proof ctxt) (tsig_of ctxt);
val consts_of = #1 o #consts o rep_data;
val cases_of = #cases o rep_data;
(* naming *)
val naming_of = Name_Space.naming_of o Context.Proof;
val map_naming = Context.proof_map o Name_Space.map_naming;
val restore_naming = map_naming o K o naming_of;
val full_name = Name_Space.full_name o naming_of;
val get_scope = Name_Space.get_scope o naming_of;
fun new_scope ctxt =
let
val (scope, naming') = Name_Space.new_scope (naming_of ctxt);
val ctxt' = map_naming (K naming') ctxt;
in (scope, ctxt') end;
val private_scope = map_naming o Name_Space.private_scope;
val private = map_naming o Name_Space.private;
val qualified_scope = map_naming o Name_Space.qualified_scope;
val qualified = map_naming o Name_Space.qualified;
val concealed = map_naming Name_Space.concealed;
(* name spaces *)
val class_space = Type.class_space o tsig_of;
val type_space = Type.type_space o tsig_of;
val const_space = Consts.space_of o consts_of;
fun defs_context ctxt = (ctxt, (const_space ctxt, type_space ctxt));
val intern_class = Name_Space.intern o class_space;
val intern_type = Name_Space.intern o type_space;
val intern_const = Name_Space.intern o const_space;
fun extern_class ctxt = Name_Space.extern ctxt (class_space ctxt);
fun extern_type ctxt = Name_Space.extern ctxt (type_space ctxt);
fun extern_const ctxt = Name_Space.extern ctxt (const_space ctxt);
fun markup_class ctxt c = Name_Space.markup_extern ctxt (class_space ctxt) c |-> Markup.markup;
fun markup_type ctxt c = Name_Space.markup_extern ctxt (type_space ctxt) c |-> Markup.markup;
fun markup_const ctxt c = Name_Space.markup_extern ctxt (const_space ctxt) c |-> Markup.markup;
fun pretty_class ctxt c = Name_Space.markup_extern ctxt (class_space ctxt) c |> Pretty.mark_str;
fun pretty_type ctxt c = Name_Space.markup_extern ctxt (type_space ctxt) c |> Pretty.mark_str;
fun pretty_const ctxt c = Name_Space.markup_extern ctxt (const_space ctxt) c |> Pretty.mark_str;
(* theory transfer *)
fun transfer_syntax thy ctxt = ctxt |>
map_syntax (Local_Syntax.rebuild thy) |>
map_tsig (fn tsig as (local_tsig, global_tsig) =>
let val thy_tsig = Sign.tsig_of thy in
if Type.eq_tsig (thy_tsig, global_tsig) then tsig
else (Type.merge_tsig (Context.Proof ctxt) (local_tsig, thy_tsig), thy_tsig) (*historic merge order*)
end) |>
map_consts (fn consts as (local_consts, global_consts) =>
let val thy_consts = Sign.consts_of thy in
if Consts.eq_consts (thy_consts, global_consts) then consts
else (Consts.merge (local_consts, thy_consts), thy_consts) (*historic merge order*)
end);
fun transfer thy = Context.raw_transfer thy #> transfer_syntax thy;
fun transfer_facts thy =
map_facts (fn local_facts => Facts.merge (Global_Theory.facts_of thy, local_facts));
fun background_theory f ctxt = transfer (f (theory_of ctxt)) ctxt;
fun background_theory_result f ctxt =
let val (res, thy') = f (theory_of ctxt)
in (res, ctxt |> transfer thy') end;
(* hybrid facts *)
val facts_of = #facts o rep_data;
fun facts_of_fact ctxt name =
let
val local_facts = facts_of ctxt;
val global_facts = Global_Theory.facts_of (theory_of ctxt);
in
if Facts.defined local_facts name
then local_facts else global_facts
end;
fun markup_extern_fact ctxt name =
let
val facts = facts_of_fact ctxt name;
val (markup, xname) = Facts.markup_extern ctxt facts name;
val markups =
if Facts.is_dynamic facts name then [markup, Markup.dynamic_fact name]
else [markup];
in (markups, xname) end;
(* augment context by implicit term declarations *)
fun augment t ctxt = ctxt
|> Variable.add_fixes_implicit t
|> Variable.declare_term t
|> Soft_Type_System.augment t;
(** pretty printing **)
val print_name = Token.print_name o Thy_Header.get_keywords';
val pretty_name = Pretty.str oo print_name;
fun pretty_term_abbrev ctxt = Syntax.pretty_term (set_mode mode_abbrev ctxt);
fun pretty_fact_name ctxt a =
Pretty.block [Pretty.marks_str (markup_extern_fact ctxt a), Pretty.str ":"];
fun pretty_fact ctxt =
let
val pretty_thm = Thm.pretty_thm ctxt;
val pretty_thms = map (Thm.pretty_thm_item ctxt);
in
fn ("", [th]) => pretty_thm th
| ("", ths) => Pretty.blk (0, Pretty.fbreaks (pretty_thms ths))
| (a, [th]) => Pretty.block [pretty_fact_name ctxt a, Pretty.brk 1, pretty_thm th]
| (a, ths) => Pretty.block (Pretty.fbreaks (pretty_fact_name ctxt a :: pretty_thms ths))
end;
(** prepare types **)
(* classes *)
fun check_class ctxt (xname, pos) =
let
val tsig = tsig_of ctxt;
val class_space = Type.class_space tsig;
val name = Type.cert_class tsig (Name_Space.intern class_space xname)
handle TYPE (msg, _, _) =>
error (msg ^ Position.here pos ^
Completion.markup_report
[Name_Space.completion (Context.Proof ctxt) class_space (K true) (xname, pos)]);
val reports =
if Context_Position.is_reported ctxt pos
then [(pos, Name_Space.markup class_space name)] else [];
in (name, reports) end;
fun read_class ctxt text =
let
val source = Syntax.read_input text;
val (c, reports) = check_class ctxt (Input.source_content source);
val _ = Context_Position.reports ctxt reports;
in c end;
(* types *)
fun read_typ_mode mode ctxt s =
Syntax.read_typ (Type.set_mode mode ctxt) s;
val read_typ = read_typ_mode Type.mode_default;
val read_typ_syntax = read_typ_mode Type.mode_syntax;
val read_typ_abbrev = read_typ_mode Type.mode_abbrev;
fun cert_typ_mode mode ctxt T =
Type.cert_typ_mode mode (tsig_of ctxt) T
handle TYPE (msg, _, _) => error msg;
val cert_typ = cert_typ_mode Type.mode_default;
val cert_typ_syntax = cert_typ_mode Type.mode_syntax;
val cert_typ_abbrev = cert_typ_mode Type.mode_abbrev;
(** prepare terms and propositions **)
(* inferred types of parameters *)
fun infer_type ctxt x =
Term.fastype_of (singleton (Syntax.check_terms (set_mode mode_schematic ctxt)) (Free x));
fun inferred_param x ctxt =
let val p = (x, infer_type ctxt (x, dummyT))
in (p, ctxt |> Variable.declare_term (Free p)) end;
fun inferred_fixes ctxt =
fold_map inferred_param (map #2 (Variable.dest_fixes ctxt)) ctxt;
(* type names *)
fun check_type_name {proper, strict} ctxt (c, pos) =
if Lexicon.is_tid c then
if proper then error ("Not a type constructor: " ^ quote c ^ Position.here pos)
else
let
val reports =
if Context_Position.is_reported ctxt pos
then [(pos, Markup.tfree)] else [];
in (TFree (c, default_sort ctxt (c, ~1)), reports) end
else
let
val ((d, reports), decl) = Type.check_decl (Context.Proof ctxt) (tsig_of ctxt) (c, pos);
fun err () = error ("Bad type name: " ^ quote d ^ Position.here pos);
val args =
(case decl of
Type.LogicalType n => n
| Type.Abbreviation (vs, _, _) => if strict then err () else length vs
| Type.Nonterminal => if strict then err () else 0);
in (Type (d, replicate args dummyT), reports) end;
fun read_type_name flags ctxt text =
let
val source = Syntax.read_input text;
val (T, reports) = check_type_name flags ctxt (Input.source_content source);
val _ = Context_Position.reports ctxt reports;
in T end;
(* constant names *)
fun consts_completion_message ctxt (c, ps) =
ps |> map (fn pos =>
Name_Space.completion (Context.Proof ctxt) (Consts.space_of (consts_of ctxt)) (K true) (c, pos))
|> Completion.markup_report;
fun check_const {proper, strict} ctxt (c, ps) =
let
val _ =
Name.reject_internal (c, ps) handle ERROR msg =>
error (msg ^ consts_completion_message ctxt (c, ps));
fun err msg = error (msg ^ Position.here_list ps);
val consts = consts_of ctxt;
val fixed = if proper then NONE else Variable.lookup_fixed ctxt c;
val (t, reports) =
(case (fixed, Variable.lookup_const ctxt c) of
(SOME x, NONE) =>
let
val reports = ps
|> filter (Context_Position.is_reported ctxt)
|> map (fn pos =>
(pos, Markup.name x (if Name.is_skolem x then Markup.skolem else Markup.free)));
in (Free (x, infer_type ctxt (x, dummyT)), reports) end
| (_, SOME d) =>
let
val T = Consts.type_scheme consts d handle TYPE (msg, _, _) => err msg;
val reports = ps
|> filter (Context_Position.is_reported ctxt)
|> map (fn pos => (pos, Name_Space.markup (Consts.space_of consts) d));
in (Const (d, T), reports) end
| _ => Consts.check_const (Context.Proof ctxt) consts (c, ps));
val _ =
(case (strict, t) of
(true, Const (d, _)) =>
(ignore (Consts.the_const consts d) handle TYPE (msg, _, _) => err msg)
| _ => ());
in (t, reports) end;
fun read_const flags ctxt text =
let
val source = Syntax.read_input text;
val (c, pos) = Input.source_content source;
val (t, reports) = check_const flags ctxt (c, [pos]);
val _ = Context_Position.reports ctxt reports;
in t end;
(* type arities *)
local
fun prep_arity prep_tycon prep_sort ctxt (t, Ss, S) =
let val arity = (prep_tycon ctxt t, map (prep_sort ctxt) Ss, prep_sort ctxt S)
in Type.add_arity (Context.Proof ctxt) arity (tsig_of ctxt); arity end;
in
val read_arity =
prep_arity ((#1 o dest_Type) oo read_type_name {proper = true, strict = true}) Syntax.read_sort;
val cert_arity = prep_arity (K I) (Type.cert_sort o tsig_of);
end;
(* read_term *)
fun read_term_mode mode ctxt = Syntax.read_term (set_mode mode ctxt);
val read_term_pattern = read_term_mode mode_pattern;
val read_term_schematic = read_term_mode mode_schematic;
val read_term_abbrev = read_term_mode mode_abbrev;
(* local abbreviations *)
local
fun certify_consts ctxt =
Consts.certify (Context.Proof ctxt) (tsig_of ctxt)
(not (abbrev_mode ctxt)) (consts_of ctxt);
fun expand_binds ctxt =
let
val Mode {pattern, schematic, ...} = get_mode ctxt;
fun reject_schematic (t as Var _) =
error ("Unbound schematic variable: " ^ Syntax.string_of_term ctxt t)
| reject_schematic (Abs (_, _, t)) = reject_schematic t
| reject_schematic (t $ u) = (reject_schematic t; reject_schematic u)
| reject_schematic _ = ();
in
if pattern then I
else Variable.expand_binds ctxt #> (if schematic then I else tap reject_schematic)
end;
in
fun expand_abbrevs ctxt = certify_consts ctxt #> expand_binds ctxt;
end;
val show_abbrevs = Config.declare_bool ("show_abbrevs", \<^here>) (K true);
fun contract_abbrevs ctxt t =
let
val thy = theory_of ctxt;
val consts = consts_of ctxt;
val Mode {abbrev, ...} = get_mode ctxt;
val retrieve = Consts.retrieve_abbrevs consts (print_mode_value () @ [""]);
fun match_abbrev u = Option.map #1 (get_first (Pattern.match_rew thy u) (retrieve u));
in
if abbrev orelse not (Config.get ctxt show_abbrevs) orelse not (can Term.type_of t) then t
else Pattern.rewrite_term_top thy [] [match_abbrev] t
end;
(* patterns *)
fun prepare_patternT ctxt T =
let
val Mode {pattern, schematic, ...} = get_mode ctxt;
val _ =
pattern orelse schematic orelse
T |> Term.exists_subtype
(fn T as TVar (xi, _) =>
not (Type_Infer.is_param xi) andalso
error ("Illegal schematic type variable: " ^ Syntax.string_of_typ ctxt T)
| _ => false)
in T end;
local
val dummies = Config.declare_bool ("Proof_Context.dummies", \<^here>) (K false);
fun check_dummies ctxt t =
if Config.get ctxt dummies then t
else Term.no_dummy_patterns t handle TERM _ => error "Illegal dummy pattern(s) in term";
fun prepare_dummies ts = #1 (fold_map Term.replace_dummy_patterns ts 1);
in
val allow_dummies = Config.put dummies true;
fun prepare_patterns ctxt =
let val Mode {pattern, ...} = get_mode ctxt in
Type_Infer.fixate ctxt pattern #>
pattern ? Variable.polymorphic ctxt #>
(map o Term.map_types) (prepare_patternT ctxt) #>
(if pattern then prepare_dummies else map (check_dummies ctxt))
end;
end;
(* sort constraints *)
local
fun prepare_sorts_env ctxt tys =
let
val tsig = tsig_of ctxt;
val defaultS = Type.defaultS tsig;
val dummy_var = ("'_dummy_", ~1);
fun constraint (xi, raw_S) env =
let val (ps, S) = Term_Position.decode_positionS raw_S in
if xi = dummy_var orelse S = dummyS then env
else
Vartab.insert (op =) (xi, Type.minimize_sort tsig S) env
handle Vartab.DUP _ =>
error ("Inconsistent sort constraints for type variable " ^
quote (Term.string_of_vname' xi) ^ Position.here_list ps)
end;
val env =
Vartab.build (tys |> (fold o fold_atyps)
(fn TFree (x, S) => constraint ((x, ~1), S)
| TVar v => constraint v
| _ => I));
fun get_sort xi raw_S =
if xi = dummy_var then
Type.minimize_sort tsig (#2 (Term_Position.decode_positionS raw_S))
else
(case (Vartab.lookup env xi, Variable.def_sort ctxt xi) of
(NONE, NONE) => defaultS
| (NONE, SOME S) => S
| (SOME S, NONE) => S
| (SOME S, SOME S') =>
if Type.eq_sort tsig (S, S') then S'
else
error ("Sort constraint " ^ Syntax.string_of_sort ctxt S ^
" inconsistent with default " ^ Syntax.string_of_sort ctxt S' ^
" for type variable " ^ quote (Term.string_of_vname' xi)));
fun add_report S pos reports =
if Position.is_reported pos andalso not (AList.defined (op =) reports pos) then
(pos, Position.reported_text pos Markup.sorting (Syntax.string_of_sort ctxt S)) :: reports
else reports;
fun get_sort_reports xi raw_S =
let
val ps = #1 (Term_Position.decode_positionS raw_S);
val S = get_sort xi raw_S handle ERROR msg => error (msg ^ Position.here_list ps);
in fold (add_report S) ps end;
val reports =
(fold o fold_atyps)
(fn T =>
if Term_Position.is_positionT T then I
else
(case T of
TFree (x, raw_S) => get_sort_reports (x, ~1) raw_S
| TVar (xi, raw_S) => get_sort_reports xi raw_S
| _ => I)) tys [];
in (map #2 reports, get_sort) end;
fun replace_sortsT get_sort =
map_atyps
(fn T =>
if Term_Position.is_positionT T then T
else
(case T of
TFree (x, raw_S) => TFree (x, get_sort (x, ~1) raw_S)
| TVar (xi, raw_S) => TVar (xi, get_sort xi raw_S)
| _ => T));
in
fun prepare_sortsT ctxt tys =
let val (sorting_report, get_sort) = prepare_sorts_env ctxt tys
in (sorting_report, map (replace_sortsT get_sort) tys) end;
fun prepare_sorts ctxt tms =
let
val tys = rev ((fold o fold_types) cons tms []);
val (sorting_report, get_sort) = prepare_sorts_env ctxt tys;
in (sorting_report, (map o map_types) (replace_sortsT get_sort) tms) end;
fun check_tfree ctxt v =
let
val (sorting_report, [TFree a]) = prepare_sortsT ctxt [TFree v];
val _ = if Context_Position.reports_enabled ctxt then Output.report sorting_report else ();
in a end;
end;
(* certify terms *)
local
fun gen_cert prop ctxt t =
t
|> expand_abbrevs ctxt
|> (fn t' =>
#1 (Sign.certify' prop (Context.Proof ctxt) false (consts_of ctxt) (theory_of ctxt) t')
handle TYPE (msg, _, _) => error msg | TERM (msg, _) => error msg);
in
val cert_term = gen_cert false;
val cert_prop = gen_cert true;
end;
(* check/uncheck *)
fun def_type ctxt =
let val Mode {pattern, ...} = get_mode ctxt
in Variable.def_type ctxt pattern end;
fun standard_typ_check ctxt =
map (cert_typ_mode (Type.get_mode ctxt) ctxt #> prepare_patternT ctxt);
val standard_term_check_finish = prepare_patterns;
fun standard_term_uncheck ctxt = map (contract_abbrevs ctxt);
(** export results **)
-fun common_export is_goal inner outer =
- map (Assumption.export is_goal inner outer) #>
+fun export_ goal inner outer =
+ map (Assumption.export_ goal inner outer) #>
Variable.export inner outer;
-val goal_export = common_export true;
-val export = common_export false;
+val export = export_{goal = false};
+val export_goal = export_{goal = true};
fun export_morphism inner outer =
Assumption.export_morphism inner outer $>
Variable.export_morphism inner outer;
-fun norm_export_morphism inner outer =
- export_morphism inner outer $>
- Morphism.thm_morphism "Proof_Context.norm_export" (Goal.norm_result outer);
-
(** term bindings **)
(* auto bindings *)
fun auto_bind f props ctxt = fold Variable.maybe_bind_term (f ctxt props) ctxt;
val auto_bind_goal = auto_bind Auto_Bind.goal;
val auto_bind_facts = auto_bind Auto_Bind.facts;
(* match bindings *)
fun simult_matches ctxt (t, pats) =
(case Seq.pull (Unify.matchers (Context.Proof ctxt) (map (rpair t) pats)) of
NONE => error "Pattern match failed!"
| SOME (env, _) => Vartab.fold (fn (v, (_, t)) => cons (v, t)) (Envir.term_env env) []);
fun maybe_bind_term (xi, t) ctxt =
ctxt
|> Variable.maybe_bind_term (xi, Option.map (cert_term (set_mode mode_default ctxt)) t);
val bind_term = maybe_bind_term o apsnd SOME;
(* propositions with patterns *)
local
fun prep_propp prep_props ctxt raw_args =
let
val props = prep_props ctxt (maps (map fst) raw_args);
val props_ctxt = fold Variable.declare_term props ctxt;
val patss = maps (map (prep_props (set_mode mode_pattern props_ctxt) o snd)) raw_args;
val propps = unflat raw_args (props ~~ patss);
val binds = (maps o maps) (simult_matches props_ctxt) propps;
in (map (map fst) propps, binds) end;
in
val cert_propp = prep_propp (map o cert_prop);
val read_propp = prep_propp Syntax.read_props;
end;
(** theorems **)
(* fact_tac *)
local
fun comp_hhf_tac ctxt th i st =
PRIMSEQ (Thm.bicompose (SOME ctxt) {flatten = true, match = false, incremented = true}
(false, Drule.lift_all ctxt (Thm.cprem_of st i) th, 0) i) st;
fun comp_incr_tac _ [] _ = no_tac
| comp_incr_tac ctxt (th :: ths) i =
(fn st => comp_hhf_tac ctxt (Drule.incr_indexes st th) i st) APPEND
comp_incr_tac ctxt ths i;
val vacuous_facts = [Drule.termI];
in
fun potential_facts ctxt prop =
let
val body = Term.strip_all_body prop;
val vacuous =
filter (fn th => Term.could_unify (body, Thm.concl_of th)) vacuous_facts
|> map (rpair Position.none);
in Facts.could_unify (facts_of ctxt) body @ vacuous end;
fun fact_tac ctxt facts = Goal.norm_hhf_tac ctxt THEN' comp_incr_tac ctxt facts;
fun some_fact_tac ctxt =
SUBGOAL (fn (goal, i) => fact_tac ctxt (map #1 (potential_facts ctxt goal)) i);
end;
(* lookup facts *)
fun lookup_fact ctxt name =
let
val context = Context.Proof ctxt;
val thy = Proof_Context.theory_of ctxt;
in
(case Facts.lookup context (facts_of ctxt) name of
NONE => Facts.lookup context (Global_Theory.facts_of thy) name
| some => some)
end;
(* retrieve facts *)
val dynamic_facts_dummy = Config.declare_bool ("dynamic_facts_dummy_", \<^here>) (K false);
local
fun retrieve_global context =
Facts.retrieve context (Global_Theory.facts_of (Context.theory_of context));
fun retrieve_generic (context as Context.Proof ctxt) arg =
(Facts.retrieve context (facts_of ctxt) arg handle ERROR local_msg =>
(retrieve_global context arg handle ERROR _ => error local_msg))
| retrieve_generic context arg = retrieve_global context arg;
fun retrieve pick context (Facts.Fact s) =
let
val ctxt = Context.the_proof context;
val pos = Syntax.read_input_pos s;
val prop =
Syntax.read_prop (ctxt |> set_mode mode_default |> allow_dummies) s
|> singleton (Variable.polymorphic ctxt);
fun err ps msg =
error (msg ^ Position.here_list (pos :: ps) ^ ":\n" ^ Syntax.string_of_term ctxt prop);
val (prop', _) = Term.replace_dummy_patterns prop (Variable.maxidx_of ctxt + 1);
fun prove th = Goal.prove ctxt [] [] prop' (K (ALLGOALS (fact_tac ctxt [th])));
val results = map_filter (try (apfst prove)) (potential_facts ctxt prop');
val (thm, thm_pos) =
(case distinct (eq_fst Thm.eq_thm_prop) results of
[res] => res
| [] => err [] "Failed to retrieve literal fact"
| dups => err (distinct (op =) (map #2 dups)) "Ambiguous specification of literal fact");
val markup = Position.entity_markup Markup.literal_factN ("", thm_pos);
val _ = Context_Position.report_generic context pos markup;
in pick true ("", thm_pos) [thm] end
| retrieve pick context (Facts.Named ((xname, pos), sel)) =
let
val thy = Context.theory_of context;
fun immediate thms = {name = xname, dynamic = false, thms = map (Thm.transfer thy) thms};
val {name, dynamic, thms} =
(case xname of
"" => immediate [Drule.dummy_thm]
| "_" => immediate [Drule.asm_rl]
| "nothing" => immediate []
| _ => retrieve_generic context (xname, pos));
val thms' =
if dynamic andalso Config.get_generic context dynamic_facts_dummy
then [Drule.free_dummy_thm]
else Facts.select (Facts.Named ((name, pos), sel)) thms;
in pick (dynamic andalso is_none sel) (name, pos) thms' end;
in
val get_fact_generic =
retrieve (fn dynamic => fn (name, _) => fn thms =>
(if dynamic then SOME name else NONE, thms));
val get_fact = retrieve (K (K I)) o Context.Proof;
val get_fact_single = retrieve (K Facts.the_single) o Context.Proof;
fun get_thms ctxt = get_fact ctxt o Facts.named;
fun get_thm ctxt = get_fact_single ctxt o Facts.named;
end;
(* inner statement mode *)
val inner_stmt = Config.declare_bool ("inner_stmt", \<^here>) (K false);
fun is_stmt ctxt = Config.get ctxt inner_stmt;
val set_stmt = Config.put inner_stmt;
val restore_stmt = set_stmt o is_stmt;
(* facts *)
fun add_thms_dynamic arg ctxt =
ctxt |> map_facts_result (Facts.add_dynamic (Context.Proof ctxt) arg);
local
fun add_facts {index} arg ctxt = ctxt
|> map_facts_result (Facts.add_static (Context.Proof ctxt) {strict = false, index = index} arg);
fun update_facts flags (b, SOME ths) ctxt = ctxt |> add_facts flags (b, Lazy.value ths) |> #2
| update_facts _ (b, NONE) ctxt = ctxt |> map_facts (Facts.del (full_name ctxt b));
fun bind_name ctxt b = (full_name ctxt b, Binding.default_pos_of b);
in
fun add_thms_lazy kind (b, ths) ctxt =
let
val name_pos = bind_name ctxt b;
val ths' =
Global_Theory.check_thms_lazy ths
|> Lazy.map_finished
(Global_Theory.name_thms Global_Theory.unofficial1 name_pos #> map (Thm.kind_rule kind));
val (_, ctxt') = add_facts {index = is_stmt ctxt} (b, ths') ctxt;
in ctxt' end;
fun note_thms kind ((b, more_atts), facts) ctxt =
let
val (name, pos) = bind_name ctxt b;
val facts' = facts
|> Global_Theory.burrow_fact (Global_Theory.name_thms Global_Theory.unofficial1 (name, pos));
fun app (ths, atts) =
fold_map (Thm.proof_attributes (surround (Thm.kind kind) (atts @ more_atts))) ths;
val (res, ctxt') = fold_map app facts' ctxt;
val thms = Global_Theory.name_thms Global_Theory.unofficial2 (name, pos) (flat res);
val (_, ctxt'') = ctxt' |> add_facts {index = is_stmt ctxt} (b, Lazy.value thms);
in ((name, thms), ctxt'') end;
val note_thmss = fold_map o note_thms;
fun put_thms index thms ctxt = ctxt
|> map_naming (K Name_Space.local_naming)
|> Context_Position.set_visible false
|> update_facts {index = index} (apfst Binding.name thms)
|> Context_Position.restore_visible ctxt
|> restore_naming ctxt;
end;
fun alias_fact b c ctxt = map_facts (Facts.alias (naming_of ctxt) b c) ctxt;
(** basic logical entities **)
(* variables *)
fun declare_var (x, opt_T, mx) ctxt =
let val T = (case opt_T of SOME T => T | NONE => Mixfix.default_constraint mx)
in (T, ctxt |> Variable.declare_constraints (Free (x, T))) end;
fun add_syntax vars ctxt =
map_syntax_idents (Local_Syntax.add_syntax ctxt (map (pair Local_Syntax.Fixed) vars)) ctxt;
fun check_var internal b =
let
val x = Variable.check_name b;
val check = if internal then Name.reject_skolem else Name.reject_internal;
val _ =
if can check (x, []) andalso Symbol_Pos.is_identifier x then ()
else error ("Bad name: " ^ Binding.print b);
in x end;
local
fun check_mixfix ctxt (b, T, mx) =
let
val ([x], ctxt') = Variable.add_fixes_binding [Binding.reset_pos b] ctxt;
val mx' = Mixfix.reset_pos mx;
val _ = add_syntax [(x, T, if Context_Position.reports_enabled ctxt then mx else mx')] ctxt';
in mx' end;
fun prep_var prep_typ internal (b, raw_T, mx) ctxt =
let
val x = check_var internal b;
fun cond_tvars T =
if internal then T
else Type.no_tvars T handle TYPE (msg, _, _) => error msg;
val opt_T = Option.map (cond_tvars o prep_typ ctxt) raw_T;
val (T, ctxt') = ctxt |> declare_var (x, opt_T, mx);
val mx' = if Mixfix.is_empty mx then mx else check_mixfix ctxt' (b, T, mx);
in ((b, SOME T, mx'), ctxt') end;
in
val read_var = prep_var Syntax.read_typ false;
val cert_var = prep_var cert_typ true;
end;
(* syntax *)
fun check_syntax_const ctxt (c, pos) =
if is_some (Syntax.lookup_const (syn_of ctxt) c) then c
else error ("Unknown syntax const: " ^ quote c ^ Position.here pos);
fun syntax add mode args ctxt =
let val args' = map (pair Local_Syntax.Const) args
in ctxt |> map_syntax (#2 o Local_Syntax.update_modesyntax ctxt add mode args') end;
fun generic_syntax add mode args =
Context.mapping (Sign.syntax add mode args) (syntax add mode args);
(* notation *)
local
fun type_syntax (Type (c, args), mx) =
SOME (Local_Syntax.Type, (Lexicon.mark_type c, Mixfix.make_type (length args), mx))
| type_syntax _ = NONE;
fun const_syntax _ (Free (x, T), mx) = SOME (Local_Syntax.Fixed, (x, T, mx))
| const_syntax ctxt (Const (c, _), mx) =
(case try (Consts.type_scheme (consts_of ctxt)) c of
SOME T => SOME (Local_Syntax.Const, (Lexicon.mark_const c, T, mx))
| NONE => NONE)
| const_syntax _ _ = NONE;
fun gen_notation make_syntax add mode args ctxt =
ctxt |> map_syntax_idents
(Local_Syntax.update_modesyntax ctxt add mode (map_filter (make_syntax ctxt) args));
in
val type_notation = gen_notation (K type_syntax);
val notation = gen_notation const_syntax;
fun generic_type_notation add mode args phi =
let
val args' = args |> map_filter (fn (T, mx) =>
let
val T' = Morphism.typ phi T;
val similar = (case (T, T') of (Type (c, _), Type (c', _)) => c = c' | _ => false);
in if similar then SOME (T', mx) else NONE end);
in Context.mapping (Sign.type_notation add mode args') (type_notation add mode args') end;
fun generic_notation add mode args phi =
let
val args' = args |> map_filter (fn (t, mx) =>
let val t' = Morphism.term phi t
in if Term.aconv_untyped (t, t') then SOME (t', mx) else NONE end);
in Context.mapping (Sign.notation add mode args') (notation add mode args') end;
end;
(* aliases *)
fun type_alias b c ctxt = (map_tsig o apfst) (Type.type_alias (naming_of ctxt) b c) ctxt;
fun const_alias b c ctxt = (map_consts o apfst) (Consts.alias (naming_of ctxt) b c) ctxt;
(* local constants *)
fun add_const_constraint (c, opt_T) ctxt =
let
fun prepT raw_T =
let val T = cert_typ ctxt raw_T
in cert_term ctxt (Const (c, T)); T end;
in ctxt |> (map_consts o apfst) (Consts.constrain (c, Option.map prepT opt_T)) end;
fun add_abbrev mode (b, raw_t) ctxt =
let
val t0 = cert_term (ctxt |> set_mode mode_abbrev) raw_t
handle ERROR msg => cat_error msg ("in constant abbreviation " ^ Binding.print b);
val [t] = Variable.exportT_terms (Variable.declare_term t0 ctxt) ctxt [t0];
val ((lhs, rhs), consts') = consts_of ctxt
|> Consts.abbreviate (Context.Proof ctxt) (tsig_of ctxt) mode (b, t);
in
ctxt
|> (map_consts o apfst) (K consts')
|> Variable.declare_term rhs
|> pair (lhs, rhs)
end;
fun revert_abbrev mode c = (map_consts o apfst) (Consts.revert_abbrev mode c);
fun generic_add_abbrev mode arg =
Context.mapping_result (Sign.add_abbrev mode arg) (add_abbrev mode arg);
fun generic_revert_abbrev mode arg =
Context.mapping (Sign.revert_abbrev mode arg) (revert_abbrev mode arg);
(* fixes *)
local
fun gen_fixes prep_var raw_vars ctxt =
let
val (vars, _) = fold_map prep_var raw_vars ctxt;
val (xs, ctxt') = Variable.add_fixes_binding (map #1 vars) ctxt;
val _ =
Context_Position.reports ctxt'
(flat (map2 (fn x => fn pos =>
[(pos, Variable.markup ctxt' x), (pos, Variable.markup_entity_def ctxt' x)])
xs (map (Binding.pos_of o #1) vars)));
val vars' = map2 (fn x => fn (_, opt_T, mx) => (x, opt_T, mx)) xs vars;
val (Ts, ctxt'') = fold_map declare_var vars' ctxt';
val vars'' = map2 (fn T => fn (x, _, mx) => (x, T, mx)) Ts vars';
in (xs, add_syntax vars'' ctxt'') end;
in
val add_fixes = gen_fixes cert_var;
val add_fixes_cmd = gen_fixes read_var;
end;
(** assumptions **)
local
fun gen_assms prep_propp exp args ctxt =
let
val (propss, binds) = prep_propp ctxt (map snd args);
val props = flat propss;
in
ctxt
|> fold Variable.declare_term props
|> tap (Variable.warn_extra_tfrees ctxt)
|> fold_burrow (Assumption.add_assms exp o map (Thm.cterm_of ctxt)) propss
|-> (fn premss =>
auto_bind_facts props
#> fold Variable.bind_term binds
#> note_thmss "" (map fst args ~~ map (map (fn th => ([th], []))) premss))
end;
in
val add_assms = gen_assms cert_propp;
val add_assms_cmd = gen_assms read_propp;
end;
(** cases **)
fun dest_cases prev_ctxt ctxt =
let
val serial_of = #serial oo (Name_Space.the_entry o Name_Space.space_of_table);
val ignored =
(case prev_ctxt of
NONE => Intset.empty
| SOME ctxt0 =>
let val cases0 = cases_of ctxt0 in
Intset.build (cases0 |> Name_Space.fold_table (fn (a, _) =>
Intset.insert (serial_of cases0 a)))
end);
val cases = cases_of ctxt;
in
Name_Space.fold_table (fn (a, c) =>
let val i = serial_of cases a
in not (Intset.member ignored i) ? cons (i, (a, c)) end) cases []
|> sort (int_ord o apply2 #1) |> map #2
end;
local
fun drop_schematic (b as (xi, SOME t)) = if Term.exists_subterm is_Var t then (xi, NONE) else b
| drop_schematic b = b;
fun update_case _ ("", _) cases = cases
| update_case _ (name, NONE) cases = Name_Space.del_table name cases
| update_case context (name, SOME c) cases =
#2 (Name_Space.define context false (Binding.name name, c) cases);
fun fix (b, T) ctxt =
let val ([x], ctxt') = add_fixes [(b, SOME T, NoSyn)] ctxt
in (Free (x, T), ctxt') end;
in
fun update_cases args ctxt =
let val context = Context.Proof ctxt |> Name_Space.map_naming (K Name_Space.global_naming);
in map_cases (fold (update_case context) args) ctxt end;
fun case_result c ctxt =
let
val Rule_Cases.Case {fixes, ...} = c;
val (ts, ctxt') = ctxt |> fold_map fix fixes;
val Rule_Cases.Case {assumes, binds, cases, ...} = Rule_Cases.apply ts c;
in
ctxt'
|> fold (maybe_bind_term o drop_schematic) binds
|> update_cases (map (apsnd SOME) cases)
|> pair (assumes, (binds, cases))
end;
val apply_case = apfst fst oo case_result;
fun check_case ctxt internal (name, pos) param_specs =
let
val (_, Rule_Cases.Case {fixes, assumes, binds, cases}) =
Name_Space.check (Context.Proof ctxt) (cases_of ctxt) (name, pos);
val _ = List.app (fn NONE => () | SOME b => ignore (check_var internal b)) param_specs;
fun replace (opt_x :: xs) ((y, T) :: ys) = (the_default y opt_x, T) :: replace xs ys
| replace [] ys = ys
| replace (_ :: _) [] =
error ("Too many parameters for case " ^ quote name ^ Position.here pos);
val fixes' = replace param_specs fixes;
val binds' = map drop_schematic binds;
in
if null (fold (Term.add_tvarsT o snd) fixes []) andalso
null (fold (fold Term.add_vars o snd) assumes []) then
Rule_Cases.Case {fixes = fixes', assumes = assumes, binds = binds', cases = cases}
else error ("Illegal schematic variable(s) in case " ^ quote name ^ Position.here pos)
end;
end;
(* structured statements *)
type stmt =
{vars: ((binding * typ option * mixfix) * (string * term)) list,
propss: term list list,
binds: (indexname * term) list,
result_binds: (indexname * term) list};
type statement =
{fixes: (string * term) list,
assumes: term list list,
shows: term list list,
result_binds: (indexname * term option) list,
text: term,
result_text: term};
local
fun export_binds ctxt' ctxt params binds =
let
val rhss =
map (the_list o Option.map (Logic.close_term params) o snd) binds
|> burrow (Variable.export_terms ctxt' ctxt)
|> map (try the_single);
in map fst binds ~~ rhss end;
fun prep_stmt prep_var prep_propp raw_vars raw_propps ctxt =
let
val (vars, vars_ctxt) = fold_map prep_var raw_vars ctxt;
val xs = map (Variable.check_name o #1) vars;
val (xs', fixes_ctxt) = add_fixes vars vars_ctxt;
val (propss, binds) = prep_propp fixes_ctxt raw_propps;
val (ps, params_ctxt) = fixes_ctxt
|> (fold o fold) Variable.declare_term propss
|> fold_map inferred_param xs';
val params = xs ~~ map Free ps;
val vars' = map2 (fn (b, _, mx) => fn (_, T) => (b, SOME T, mx)) vars ps;
val binds' = binds
|> map (apsnd SOME)
|> export_binds params_ctxt ctxt params
|> map (apsnd the);
val _ = Variable.warn_extra_tfrees fixes_ctxt params_ctxt;
val result : stmt =
{vars = vars' ~~ params, propss = propss, binds = binds, result_binds = binds'};
in (result, params_ctxt) end;
fun prep_statement prep_var prep_propp raw_fixes raw_assumes raw_shows ctxt =
let
val ((fixes, (assumes, shows), binds), ctxt') = ctxt
|> prep_stmt prep_var prep_propp raw_fixes (raw_assumes @ raw_shows)
|-> (fn {vars, propss, binds, ...} =>
fold Variable.bind_term binds #>
pair (map #2 vars, chop (length raw_assumes) propss, binds));
val binds' =
(Auto_Bind.facts ctxt' (flat shows) @
(case try List.last (flat shows) of
NONE => []
| SOME prop => map (apsnd (SOME o Auto_Bind.abs_params prop)) binds))
|> export_binds ctxt' ctxt fixes;
val text = Logic.close_prop fixes (flat assumes) (Logic.mk_conjunction_list (flat shows));
val text' = singleton (Variable.export_terms ctxt' ctxt) text;
val result : statement =
{fixes = fixes,
assumes = assumes,
shows = shows,
result_binds = binds',
text = text,
result_text = text'};
in (result, ctxt') end;
in
val cert_stmt = prep_stmt cert_var cert_propp;
val read_stmt = prep_stmt read_var read_propp;
val cert_statement = prep_statement cert_var cert_propp;
val read_statement = prep_statement read_var read_propp;
end;
(** print context information **)
(* local syntax *)
val print_syntax = Syntax.print_syntax o syn_of;
(* abbreviations *)
fun pretty_abbrevs verbose show_globals ctxt =
let
val space = const_space ctxt;
val (constants, global_constants) =
apply2 (#constants o Consts.dest) (#consts (rep_data ctxt));
val globals = Symtab.make global_constants;
fun add_abbr (_, (_, NONE)) = I
| add_abbr (c, (T, SOME t)) =
if not show_globals andalso Symtab.defined globals c then I
else cons (c, Logic.mk_equals (Const (c, T), t));
val abbrevs = Name_Space.markup_entries verbose ctxt space (fold add_abbr constants []);
in
if null abbrevs then []
else [Pretty.big_list "abbreviations:" (map (pretty_term_abbrev ctxt o #2) abbrevs)]
end;
fun print_abbrevs verbose = Pretty.writeln_chunks o pretty_abbrevs verbose true;
(* term bindings *)
fun pretty_term_bindings ctxt =
let
val binds = Variable.binds_of ctxt;
fun prt_bind (xi, (T, t)) = pretty_term_abbrev ctxt (Logic.mk_equals (Var (xi, T), t));
in
if Vartab.is_empty binds then []
else [Pretty.big_list "term bindings:" (map prt_bind (Vartab.dest binds))]
end;
(* local facts *)
fun pretty_local_facts verbose ctxt =
let
val facts = facts_of ctxt;
val props = map #1 (Facts.props facts);
val local_facts =
(if null props then [] else [("<unnamed>", props)]) @
Facts.dest_static verbose [Global_Theory.facts_of (theory_of ctxt)] facts;
in
if null local_facts then []
else
[Pretty.big_list "local facts:"
(map #1 (sort_by (#1 o #2) (map (`(pretty_fact ctxt)) local_facts)))]
end;
fun print_local_facts verbose ctxt =
Pretty.writeln_chunks (pretty_local_facts verbose ctxt);
(* named local contexts *)
local
fun pretty_case (name, (fixes, ((asms, (lets, cs)), ctxt))) =
let
val prt_name = pretty_name ctxt;
val prt_term = Syntax.pretty_term ctxt;
fun prt_let (xi, t) = Pretty.block
[Pretty.quote (prt_term (Var (xi, Term.fastype_of t))), Pretty.str " =", Pretty.brk 1,
Pretty.quote (prt_term t)];
fun prt_asm (a, ts) =
Pretty.block (Pretty.breaks
((if a = "" then [] else [prt_name a, Pretty.str ":"]) @
map (Pretty.quote o prt_term) ts));
fun prt_sect _ _ _ [] = []
| prt_sect head sep prt xs =
[Pretty.block (Pretty.breaks (head ::
flat (separate sep (map (single o prt) xs))))];
in
Pretty.block
(prt_name name :: Pretty.str ":" :: Pretty.fbrk ::
Pretty.fbreaks
(prt_sect (Pretty.keyword1 "fix") [] (prt_name o Binding.name_of o fst) fixes @
prt_sect (Pretty.keyword1 "let") [Pretty.keyword2 "and"] prt_let
(map_filter (fn (xi, SOME t) => SOME (xi, t) | _ => NONE) lets) @
(if forall (null o #2) asms then []
else prt_sect (Pretty.keyword1 "assume") [Pretty.keyword2 "and"] prt_asm asms) @
prt_sect (Pretty.str "subcases:") [] (prt_name o fst) cs))
end;
in
fun pretty_cases ctxt =
let
val cases =
dest_cases NONE ctxt |> map (fn (name, c as Rule_Cases.Case {fixes, ...}) =>
(name, (fixes, case_result c ctxt)));
in
if null cases then []
else [Pretty.big_list "cases:" (map pretty_case cases)]
end;
end;
fun print_cases_proof ctxt0 ctxt =
let
fun trim_name x = if Name.is_internal x then Name.clean x else "_";
val trim_names = map trim_name #> drop_suffix (equal "_");
fun print_case name xs =
(case trim_names xs of
[] => print_name ctxt name
| xs' => enclose "(" ")" (space_implode " " (map (print_name ctxt) (name :: xs'))));
fun is_case x t =
x = Rule_Cases.case_conclN andalso not (Term.exists_subterm Term.is_Var t);
fun indentation depth = prefix (Symbol.spaces (2 * depth));
fun print_proof depth (name, Rule_Cases.Case {fixes, binds, cases, ...}) =
let
val indent = indentation depth;
val head = indent ("case " ^ print_case name (map (Binding.name_of o #1) fixes));
val tail =
if null cases then
let val concl =
if exists (fn ((x, _), SOME t) => is_case x t | _ => false) binds
then Rule_Cases.case_conclN else Auto_Bind.thesisN
in indent ("then show ?" ^ concl ^ " sorry") end
else print_proofs depth cases;
in head ^ "\n" ^ tail end
and print_proofs 0 [] = ""
| print_proofs depth cases =
let
val indent = indentation depth;
val body = map (print_proof (depth + 1)) cases |> separate (indent "next")
in
if depth = 0 then body @ [indent "qed"]
else if length cases = 1 then body
else indent "{" :: body @ [indent "}"]
end |> cat_lines;
in
(case print_proofs 0 (dest_cases (SOME ctxt0) ctxt) of
"" => ""
| s => "Proof outline with cases:\n" ^ Active.sendback_markup_command s)
end;
(* core context *)
val debug = Config.declare_bool ("Proof_Context.debug", \<^here>) (K false);
val verbose = Config.declare_bool ("Proof_Context.verbose", \<^here>) (K false);
fun pretty_ctxt ctxt =
if not (Config.get ctxt debug) then []
else
let
val prt_term = Syntax.pretty_term ctxt;
(*structures*)
val {structs, ...} = Syntax_Trans.get_idents ctxt;
val prt_structs =
if null structs then []
else [Pretty.block (Pretty.str "structures:" :: Pretty.brk 1 ::
Pretty.commas (map Pretty.str structs))];
(*fixes*)
fun prt_fix (x, x') =
if x = x' then Pretty.str x
else Pretty.block [Pretty.str x, Pretty.str " =", Pretty.brk 1, prt_term (Syntax.free x')];
val fixes =
filter_out ((Name.is_internal orf member (op =) structs) o #1)
(Variable.dest_fixes ctxt);
val prt_fixes =
if null fixes then []
else [Pretty.block (Pretty.str "fixed variables:" :: Pretty.brk 1 ::
Pretty.commas (map prt_fix fixes))];
(*assumptions*)
val prt_assms =
(case Assumption.all_prems_of ctxt of
[] => []
| prems => [Pretty.big_list "assumptions:" [pretty_fact ctxt ("", prems)]]);
in prt_structs @ prt_fixes @ prt_assms end;
(* main context *)
fun pretty_context ctxt =
let
val verbose = Config.get ctxt verbose;
fun verb f x = if verbose then f (x ()) else [];
val prt_term = Syntax.pretty_term ctxt;
val prt_typ = Syntax.pretty_typ ctxt;
val prt_sort = Syntax.pretty_sort ctxt;
(*theory*)
val pretty_thy = Pretty.block
[Pretty.str "theory:", Pretty.brk 1, Context.pretty_thy (theory_of ctxt)];
(*defaults*)
fun prt_atom prt prtT (x, X) = Pretty.block
[prt x, Pretty.str " ::", Pretty.brk 1, prtT X];
fun prt_var (x, ~1) = prt_term (Syntax.free x)
| prt_var xi = prt_term (Syntax.var xi);
fun prt_varT (x, ~1) = prt_typ (TFree (x, []))
| prt_varT xi = prt_typ (TVar (xi, []));
val prt_defT = prt_atom prt_var prt_typ;
val prt_defS = prt_atom prt_varT prt_sort;
val (types, sorts) = Variable.constraints_of ctxt;
in
verb single (K pretty_thy) @
pretty_ctxt ctxt @
verb (pretty_abbrevs true false) (K ctxt) @
verb pretty_term_bindings (K ctxt) @
verb (pretty_local_facts true) (K ctxt) @
verb pretty_cases (K ctxt) @
verb single (fn () => Pretty.big_list "type constraints:" (map prt_defT (Vartab.dest types))) @
verb single (fn () => Pretty.big_list "default sorts:" (map prt_defS (Vartab.dest sorts)))
end;
end;
val show_abbrevs = Proof_Context.show_abbrevs;
diff --git a/src/Pure/Isar/spec_rules.ML b/src/Pure/Isar/spec_rules.ML
--- a/src/Pure/Isar/spec_rules.ML
+++ b/src/Pure/Isar/spec_rules.ML
@@ -1,188 +1,189 @@
(* Title: Pure/Isar/spec_rules.ML
Author: Makarius
Rules that characterize specifications, with optional name and
rough classification.
NB: In the face of arbitrary morphisms, the original shape of
specifications may get lost.
*)
signature SPEC_RULES =
sig
datatype recursion =
Primrec of string list | Recdef | Primcorec of string list | Corec | Unknown_Recursion
val recursion_ord: recursion ord
val encode_recursion: recursion XML.Encode.T
datatype rough_classification = Equational of recursion | Inductive | Co_Inductive | Unknown
val rough_classification_ord: rough_classification ord
val equational_primrec: string list -> rough_classification
val equational_recdef: rough_classification
val equational_primcorec: string list -> rough_classification
val equational_corec: rough_classification
val equational: rough_classification
val is_equational: rough_classification -> bool
val is_inductive: rough_classification -> bool
val is_co_inductive: rough_classification -> bool
val is_relational: rough_classification -> bool
val is_unknown: rough_classification -> bool
val encode_rough_classification: rough_classification XML.Encode.T
type spec_rule =
{pos: Position.T,
name: string,
rough_classification: rough_classification,
terms: term list,
rules: thm list}
val get: Proof.context -> spec_rule list
val get_global: theory -> spec_rule list
val dest_theory: theory -> spec_rule list
val retrieve: Proof.context -> term -> spec_rule list
val retrieve_global: theory -> term -> spec_rule list
val add: binding -> rough_classification -> term list -> thm list -> local_theory -> local_theory
val add_global: binding -> rough_classification -> term list -> thm list -> theory -> theory
end;
structure Spec_Rules: SPEC_RULES =
struct
(* recursion *)
datatype recursion =
Primrec of string list | Recdef | Primcorec of string list | Corec | Unknown_Recursion;
val recursion_index =
fn Primrec _ => 0 | Recdef => 1 | Primcorec _ => 2 | Corec => 3 | Unknown_Recursion => 4;
fun recursion_ord (Primrec Ts1, Primrec Ts2) = list_ord fast_string_ord (Ts1, Ts2)
| recursion_ord (Primcorec Ts1, Primcorec Ts2) = list_ord fast_string_ord (Ts1, Ts2)
| recursion_ord rs = int_ord (apply2 recursion_index rs);
val encode_recursion =
let open XML.Encode in
variant
[fn Primrec a => ([], list string a),
fn Recdef => ([], []),
fn Primcorec a => ([], list string a),
fn Corec => ([], []),
fn Unknown_Recursion => ([], [])]
end;
(* rough classification *)
datatype rough_classification = Equational of recursion | Inductive | Co_Inductive | Unknown;
fun rough_classification_ord (Equational r1, Equational r2) = recursion_ord (r1, r2)
| rough_classification_ord cs =
int_ord (apply2 (fn Equational _ => 0 | Inductive => 1 | Co_Inductive => 2 | Unknown => 3) cs);
val equational_primrec = Equational o Primrec;
val equational_recdef = Equational Recdef;
val equational_primcorec = Equational o Primcorec;
val equational_corec = Equational Corec;
val equational = Equational Unknown_Recursion;
val is_equational = fn Equational _ => true | _ => false;
val is_inductive = fn Inductive => true | _ => false;
val is_co_inductive = fn Co_Inductive => true | _ => false;
val is_relational = is_inductive orf is_co_inductive;
val is_unknown = fn Unknown => true | _ => false;
val encode_rough_classification =
let open XML.Encode in
variant
[fn Equational r => ([], encode_recursion r),
fn Inductive => ([], []),
fn Co_Inductive => ([], []),
fn Unknown => ([], [])]
end;
(* rules *)
type spec_rule =
{pos: Position.T,
name: string,
rough_classification: rough_classification,
terms: term list,
rules: thm list};
fun eq_spec (specs: spec_rule * spec_rule) =
(op =) (apply2 #name specs) andalso
is_equal (rough_classification_ord (apply2 #rough_classification specs)) andalso
eq_list (op aconv) (apply2 #terms specs) andalso
eq_list Thm.eq_thm_prop (apply2 #rules specs);
fun map_spec_rules f ({pos, name, rough_classification, terms, rules}: spec_rule) : spec_rule =
{pos = pos, name = name, rough_classification = rough_classification, terms = terms,
rules = map f rules};
-structure Rules = Generic_Data
+structure Data = Generic_Data
(
type T = spec_rule Item_Net.T;
val empty : T = Item_Net.init eq_spec #terms;
val merge = Item_Net.merge;
);
(* get *)
fun get_generic imports context =
let
val thy = Context.theory_of context;
val transfer = Global_Theory.transfer_theories thy;
fun imported spec =
- imports |> exists (fn thy => Item_Net.member (Rules.get (Context.Theory thy)) spec);
+ imports |> exists (fn thy => Item_Net.member (Data.get (Context.Theory thy)) spec);
in
- Item_Net.content (Rules.get context)
+ Item_Net.content (Data.get context)
|> filter_out imported
|> (map o map_spec_rules) transfer
end;
val get = get_generic [] o Context.Proof;
val get_global = get_generic [] o Context.Theory;
fun dest_theory thy = rev (get_generic (Theory.parents_of thy) (Context.Theory thy));
(* retrieve *)
fun retrieve_generic context =
- Item_Net.retrieve (Rules.get context)
+ Item_Net.retrieve (Data.get context)
#> (map o map_spec_rules) (Thm.transfer'' context);
val retrieve = retrieve_generic o Context.Proof;
val retrieve_global = retrieve_generic o Context.Theory;
(* add *)
fun add b rough_classification terms rules lthy =
- let val thms0 = map Thm.trim_context (map (Drule.mk_term o Thm.cterm_of lthy) terms @ rules) in
- lthy |> Local_Theory.declaration {syntax = false, pervasive = true}
+ let
+ val n = length terms;
+ val thms0 = map Thm.trim_context (map (Drule.mk_term o Thm.cterm_of lthy) terms @ rules);
+ in
+ lthy |> Local_Theory.declaration {syntax = false, pervasive = true, pos = Binding.pos_of b}
(fn phi => fn context =>
let
+ val psi = Morphism.set_trim_context'' context phi;
val pos = Position.thread_data ();
- val name = Name_Space.full_name (Name_Space.naming_of context) (Morphism.binding phi b);
+ val name = Name_Space.full_name (Name_Space.naming_of context) (Morphism.binding psi b);
val (terms', rules') =
- map (Thm.transfer (Context.theory_of context)) thms0
- |> Morphism.fact phi
- |> chop (length terms)
- |>> map (Thm.term_of o Drule.dest_term)
- ||> map Thm.trim_context;
+ chop n (Morphism.fact psi thms0)
+ |>> map (Thm.term_of o Drule.dest_term);
in
- context |> (Rules.map o Item_Net.update)
+ context |> (Data.map o Item_Net.update)
{pos = pos, name = name, rough_classification = rough_classification,
terms = terms', rules = rules'}
end)
end;
fun add_global b rough_classification terms rules thy =
- thy |> (Context.theory_map o Rules.map o Item_Net.update)
+ thy |> (Context.theory_map o Data.map o Item_Net.update)
{pos = Position.thread_data (),
name = Sign.full_name thy b,
rough_classification = rough_classification,
terms = terms,
rules = map Thm.trim_context rules};
end;
diff --git a/src/Pure/Isar/specification.ML b/src/Pure/Isar/specification.ML
--- a/src/Pure/Isar/specification.ML
+++ b/src/Pure/Isar/specification.ML
@@ -1,446 +1,444 @@
(* Title: Pure/Isar/specification.ML
Author: Makarius
Derived local theory specifications --- with type-inference and
toplevel polymorphism.
*)
signature SPECIFICATION =
sig
val read_props: string list -> (binding * string option * mixfix) list -> Proof.context ->
term list * Proof.context
val check_spec_open: (binding * typ option * mixfix) list ->
(binding * typ option * mixfix) list -> term list -> term -> Proof.context ->
((binding * typ option * mixfix) list * string list * (string -> Position.T list) * term) *
Proof.context
val read_spec_open: (binding * string option * mixfix) list ->
(binding * string option * mixfix) list -> string list -> string -> Proof.context ->
((binding * typ option * mixfix) list * string list * (string -> Position.T list) * term) *
Proof.context
type multi_specs =
((Attrib.binding * term) * term list * (binding * typ option * mixfix) list) list
type multi_specs_cmd =
((Attrib.binding * string) * string list * (binding * string option * mixfix) list) list
val check_multi_specs: (binding * typ option * mixfix) list -> multi_specs -> Proof.context ->
(((binding * typ) * mixfix) list * (Attrib.binding * term) list) * Proof.context
val read_multi_specs: (binding * string option * mixfix) list -> multi_specs_cmd -> Proof.context ->
(((binding * typ) * mixfix) list * (Attrib.binding * term) list) * Proof.context
val axiomatization: (binding * typ option * mixfix) list ->
(binding * typ option * mixfix) list -> term list ->
(Attrib.binding * term) list -> theory -> (term list * thm list) * theory
val axiomatization_cmd: (binding * string option * mixfix) list ->
(binding * string option * mixfix) list -> string list ->
(Attrib.binding * string) list -> theory -> (term list * thm list) * theory
val axiom: Attrib.binding * term -> theory -> thm * theory
val definition: (binding * typ option * mixfix) option ->
(binding * typ option * mixfix) list -> term list -> Attrib.binding * term ->
local_theory -> (term * (string * thm)) * local_theory
val definition_cmd: (binding * string option * mixfix) option ->
(binding * string option * mixfix) list -> string list -> Attrib.binding * string ->
bool -> local_theory -> (term * (string * thm)) * local_theory
val abbreviation: Syntax.mode -> (binding * typ option * mixfix) option ->
(binding * typ option * mixfix) list -> term -> bool -> local_theory -> local_theory
val abbreviation_cmd: Syntax.mode -> (binding * string option * mixfix) option ->
(binding * string option * mixfix) list -> string -> bool -> local_theory -> local_theory
val alias: binding * string -> local_theory -> local_theory
val alias_cmd: binding * (xstring * Position.T) -> local_theory -> local_theory
val type_alias: binding * string -> local_theory -> local_theory
val type_alias_cmd: binding * (xstring * Position.T) -> local_theory -> local_theory
val theorems: string ->
(Attrib.binding * Attrib.thms) list ->
(binding * typ option * mixfix) list ->
bool -> local_theory -> (string * thm list) list * local_theory
val theorems_cmd: string ->
(Attrib.binding * (Facts.ref * Token.src list) list) list ->
(binding * string option * mixfix) list ->
bool -> local_theory -> (string * thm list) list * local_theory
val theorem: bool -> string -> Method.text option ->
(thm list list -> local_theory -> local_theory) -> Attrib.binding ->
string list -> Element.context_i list -> Element.statement_i ->
bool -> local_theory -> Proof.state
val theorem_cmd: bool -> string -> Method.text option ->
(thm list list -> local_theory -> local_theory) -> Attrib.binding ->
(xstring * Position.T) list -> Element.context list -> Element.statement ->
bool -> local_theory -> Proof.state
val schematic_theorem: bool -> string -> Method.text option ->
(thm list list -> local_theory -> local_theory) -> Attrib.binding ->
string list -> Element.context_i list -> Element.statement_i ->
bool -> local_theory -> Proof.state
val schematic_theorem_cmd: bool -> string -> Method.text option ->
(thm list list -> local_theory -> local_theory) -> Attrib.binding ->
(xstring * Position.T) list -> Element.context list -> Element.statement ->
bool -> local_theory -> Proof.state
end;
structure Specification: SPECIFICATION =
struct
(* prepare propositions *)
fun read_props raw_props raw_fixes ctxt =
let
val (_, ctxt1) = ctxt |> Proof_Context.add_fixes_cmd raw_fixes;
val props1 = map (Syntax.parse_prop ctxt1) raw_props;
val (props2, ctxt2) = ctxt1 |> fold_map Variable.fix_dummy_patterns props1;
val props3 = Syntax.check_props ctxt2 props2;
val ctxt3 = ctxt2 |> fold Variable.declare_term props3;
in (props3, ctxt3) end;
(* prepare specification *)
fun get_positions ctxt x =
let
fun get Cs (Const ("_type_constraint_", C) $ t) = get (C :: Cs) t
| get Cs (Free (y, T)) =
if x = y then
map_filter Term_Position.decode_positionT
(T :: map (Type.constraint_type ctxt) Cs)
else []
| get _ (t $ u) = get [] t @ get [] u
| get _ (Abs (_, _, t)) = get [] t
| get _ _ = [];
in get [] end;
local
fun prep_decls prep_var raw_vars ctxt =
let
val (vars, ctxt') = fold_map prep_var raw_vars ctxt;
val (xs, ctxt'') = ctxt'
|> Context_Position.set_visible false
|> Proof_Context.add_fixes vars
||> Context_Position.restore_visible ctxt';
val _ =
Context_Position.reports ctxt''
(map (Binding.pos_of o #1) vars ~~
map (Variable.markup_entity_def ctxt'' ##> Properties.remove Markup.kindN) xs);
in ((vars, xs), ctxt'') end;
fun close_form ctxt ys prems concl =
let
val xs = rev (fold (Variable.add_free_names ctxt) (prems @ [concl]) (rev ys));
val pos_props = Logic.strip_imp_concl concl :: Logic.strip_imp_prems concl @ prems;
fun get_pos x = maps (get_positions ctxt x) pos_props;
val _ = Context_Position.reports ctxt (maps (Syntax_Phases.reports_of_scope o get_pos) xs);
in Logic.close_prop_constraint (Variable.default_type ctxt) (xs ~~ xs) prems concl end;
fun dummy_frees ctxt xs tss =
let
val names =
Variable.names_of ((fold o fold) Variable.declare_term tss ctxt)
|> fold Name.declare xs;
val (tss', _) = (fold_map o fold_map) Term.free_dummy_patterns tss names;
in tss' end;
fun prep_spec_open prep_var parse_prop raw_vars raw_params raw_prems raw_concl ctxt =
let
val ((vars, xs), vars_ctxt) = prep_decls prep_var raw_vars ctxt;
val (ys, params_ctxt) = vars_ctxt |> fold_map prep_var raw_params |-> Proof_Context.add_fixes;
val props =
map (parse_prop params_ctxt) (raw_concl :: raw_prems)
|> singleton (dummy_frees params_ctxt (xs @ ys));
val concl :: prems = Syntax.check_props params_ctxt props;
val spec = Logic.list_implies (prems, concl);
val spec_ctxt = Variable.declare_term spec params_ctxt;
fun get_pos x = maps (get_positions spec_ctxt x) props;
in ((vars, xs, get_pos, spec), spec_ctxt) end;
fun prep_specs prep_var parse_prop prep_att raw_vars raw_specss ctxt =
let
val ((vars, xs), vars_ctxt) = prep_decls prep_var raw_vars ctxt;
val propss0 =
raw_specss |> map (fn ((_, raw_concl), raw_prems, raw_params) =>
let val (ys, ctxt') = vars_ctxt |> fold_map prep_var raw_params |-> Proof_Context.add_fixes
in (ys, map (pair ctxt') (raw_concl :: raw_prems)) end);
val props =
burrow (grouped 10 Par_List.map_independent (uncurry parse_prop)) (map #2 propss0)
|> dummy_frees vars_ctxt xs
|> map2 (fn (ys, _) => fn concl :: prems => close_form vars_ctxt ys prems concl) propss0;
val specs = Syntax.check_props vars_ctxt props;
val specs_ctxt = vars_ctxt |> fold Variable.declare_term specs;
val ps = specs_ctxt |> fold_map Proof_Context.inferred_param xs |> fst;
val params = map2 (fn (b, _, mx) => fn (_, T) => ((b, T), mx)) vars ps;
val name_atts: Attrib.binding list =
map (fn ((name, atts), _) => (name, map (prep_att ctxt) atts)) (map #1 raw_specss);
in ((params, name_atts ~~ specs), specs_ctxt) end;
in
val check_spec_open = prep_spec_open Proof_Context.cert_var (K I);
val read_spec_open = prep_spec_open Proof_Context.read_var Syntax.parse_prop;
type multi_specs =
((Attrib.binding * term) * term list * (binding * typ option * mixfix) list) list;
type multi_specs_cmd =
((Attrib.binding * string) * string list * (binding * string option * mixfix) list) list;
fun check_multi_specs xs specs =
prep_specs Proof_Context.cert_var (K I) (K I) xs specs;
fun read_multi_specs xs specs =
prep_specs Proof_Context.read_var Syntax.parse_prop Attrib.check_src xs specs;
end;
(* axiomatization -- within global theory *)
fun gen_axioms prep_stmt prep_att raw_decls raw_fixes raw_prems raw_concls thy =
let
(*specification*)
val ({vars, propss = [prems, concls], ...}, vars_ctxt) =
Proof_Context.init_global thy
|> prep_stmt (raw_decls @ raw_fixes) ((map o map) (rpair []) [raw_prems, map snd raw_concls]);
val (decls, fixes) = chop (length raw_decls) vars;
val frees =
rev ((fold o fold) (Variable.add_frees vars_ctxt) [prems, concls] [])
|> map (fn (x, T) => (x, Free (x, T)));
val close = Logic.close_prop (map #2 fixes @ frees) prems;
val specs =
map ((apsnd o map) (prep_att vars_ctxt) o fst) raw_concls ~~ map close concls;
val spec_name =
Binding.conglomerate (if null decls then map (#1 o #1) specs else map (#1 o #1) decls);
(*consts*)
val (consts, consts_thy) = thy
|> fold_map (fn ((b, _, mx), (_, t)) => Theory.specify_const ((b, Term.type_of t), mx)) decls;
val subst = Term.subst_atomic (map (#2 o #2) decls ~~ consts);
(*axioms*)
val (axioms, axioms_thy) =
(specs, consts_thy) |-> fold_map (fn ((b, atts), prop) =>
Thm.add_axiom_global (b, subst prop) #>> (fn (_, th) => ((b, atts), [([th], [])])));
(*facts*)
val (facts, facts_lthy) = axioms_thy
|> Named_Target.theory_init
|> Spec_Rules.add spec_name Spec_Rules.Unknown consts (maps (maps #1 o #2) axioms)
|> Local_Theory.notes axioms;
in ((consts, map (the_single o #2) facts), Local_Theory.exit_global facts_lthy) end;
val axiomatization = gen_axioms Proof_Context.cert_stmt (K I);
val axiomatization_cmd = gen_axioms Proof_Context.read_stmt Attrib.check_src;
fun axiom (b, ax) = axiomatization [] [] [] [(b, ax)] #>> (hd o snd);
(* definition *)
fun gen_def prep_spec prep_att raw_var raw_params raw_prems ((a, raw_atts), raw_spec) int lthy =
let
val atts = map (prep_att lthy) raw_atts;
val ((vars, xs, get_pos, spec), _) = lthy
|> prep_spec (the_list raw_var) raw_params raw_prems raw_spec;
val (((x, T), rhs), prove) = Local_Defs.derived_def lthy get_pos {conditional = true} spec;
val _ = Name.reject_internal (x, []);
val (b, mx) =
(case (vars, xs) of
([], []) => (Binding.make (x, (case get_pos x of [] => Position.none | p :: _ => p)), NoSyn)
| ([(b, _, mx)], [y]) =>
if x = y then (b, mx)
else
error ("Head of definition " ^ quote x ^ " differs from declaration " ^ quote y ^
Position.here (Binding.pos_of b)));
val name = Thm.def_binding_optional b a;
val ((lhs, (_, raw_th)), lthy2) = lthy
|> Local_Theory.define_internal ((b, mx), ((Binding.suffix_name "_raw" name, []), rhs));
- val th = prove lthy2 raw_th;
- val lthy3 = lthy2 |> Spec_Rules.add name Spec_Rules.equational [lhs] [th];
+ val ([(def_name, [th])], lthy3) = lthy2
+ |> Local_Theory.notes [((name, atts), [([prove lthy2 raw_th], [])])];
- val ([(def_name, [th'])], lthy4) = lthy3
- |> Local_Theory.notes [((name, atts), [([th], [])])];
+ val lthy4 = lthy3
+ |> Spec_Rules.add name Spec_Rules.equational [lhs] [th]
+ |> Code.declare_default_eqns [(th, true)];
- val lthy5 = lthy4
- |> Code.declare_default_eqns [(th', true)];
-
- val lhs' = Morphism.term (Local_Theory.target_morphism lthy5) lhs;
+ val lhs' = Morphism.term (Local_Theory.target_morphism lthy4) lhs;
val _ =
- Proof_Display.print_consts int (Position.thread_data ()) lthy5
+ Proof_Display.print_consts int (Position.thread_data ()) lthy4
(Frees.defined (Frees.build (Frees.add_frees lhs'))) [(x, T)];
- in ((lhs, (def_name, th')), lthy5) end;
+ in ((lhs, (def_name, th)), lthy4) end;
fun definition xs ys As B = gen_def check_spec_open (K I) xs ys As B false;
val definition_cmd = gen_def read_spec_open Attrib.check_src;
(* abbreviation *)
fun gen_abbrev prep_spec mode raw_var raw_params raw_spec int lthy =
let
val lthy1 = lthy |> Proof_Context.set_syntax_mode mode;
val ((vars, xs, get_pos, spec), _) = lthy
|> Proof_Context.set_mode Proof_Context.mode_abbrev
|> prep_spec (the_list raw_var) raw_params [] raw_spec;
val ((x, T), rhs) = Local_Defs.abs_def (#2 (Local_Defs.cert_def lthy1 get_pos spec));
val _ = Name.reject_internal (x, []);
val (b, mx) =
(case (vars, xs) of
([], []) => (Binding.make (x, (case get_pos x of [] => Position.none | p :: _ => p)), NoSyn)
| ([(b, _, mx)], [y]) =>
if x = y then (b, mx)
else
error ("Head of abbreviation " ^ quote x ^ " differs from declaration " ^ quote y ^
Position.here (Binding.pos_of b)));
val lthy2 = lthy1
|> Local_Theory.abbrev mode ((b, mx), rhs) |> snd
|> Proof_Context.restore_syntax_mode lthy;
val _ = Proof_Display.print_consts int (Position.thread_data ()) lthy2 (K false) [(x, T)];
in lthy2 end;
val abbreviation = gen_abbrev check_spec_open;
val abbreviation_cmd = gen_abbrev read_spec_open;
(* alias *)
fun gen_alias decl check (b, arg) lthy =
let
val (c, reports) = check {proper = true, strict = false} lthy arg;
val _ = Context_Position.reports lthy reports;
in decl b c lthy end;
val alias =
gen_alias Local_Theory.const_alias (K (K (fn c => (c, []))));
val alias_cmd =
gen_alias Local_Theory.const_alias
(fn flags => fn ctxt => fn (c, pos) =>
apfst (#1 o dest_Const) (Proof_Context.check_const flags ctxt (c, [pos])));
val type_alias =
gen_alias Local_Theory.type_alias (K (K (fn c => (c, []))));
val type_alias_cmd =
gen_alias Local_Theory.type_alias (apfst (#1 o dest_Type) ooo Proof_Context.check_type_name);
(* fact statements *)
local
fun gen_theorems prep_fact prep_att add_fixes
kind raw_facts raw_fixes int lthy =
let
val facts = raw_facts |> map (fn ((name, atts), bs) =>
((name, map (prep_att lthy) atts),
bs |> map (fn (b, more_atts) => (prep_fact lthy b, map (prep_att lthy) more_atts))));
val (_, ctxt') = add_fixes raw_fixes lthy;
val facts' = facts
|> Attrib.partial_evaluation ctxt'
|> Attrib.transform_facts (Proof_Context.export_morphism ctxt' lthy);
val (res, lthy') = lthy |> Local_Theory.notes_kind kind facts';
val _ = Proof_Display.print_results int (Position.thread_data ()) lthy' ((kind, ""), res);
in (res, lthy') end;
in
val theorems = gen_theorems (K I) (K I) Proof_Context.add_fixes;
val theorems_cmd = gen_theorems Proof_Context.get_fact Attrib.check_src Proof_Context.add_fixes_cmd;
end;
(* complex goal statements *)
local
fun prep_statement prep_att prep_stmt raw_elems raw_stmt ctxt =
let
val (stmt, elems_ctxt) = prep_stmt raw_elems raw_stmt ctxt;
val prems = Assumption.local_prems_of elems_ctxt ctxt;
val stmt_ctxt = fold (fold (Proof_Context.augment o fst) o snd) stmt elems_ctxt;
in
(case raw_stmt of
Element.Shows _ =>
let val stmt' = Attrib.map_specs (map prep_att) stmt
in (([], prems, stmt', NONE), stmt_ctxt) end
| Element.Obtains raw_obtains =>
let
val asms_ctxt = stmt_ctxt
|> fold (fn ((name, _), asm) =>
snd o Proof_Context.add_assms Assumption.assume_export
[((name, [Context_Rules.intro_query NONE]), asm)]) stmt;
val that = Assumption.local_prems_of asms_ctxt stmt_ctxt;
val ([(_, that')], that_ctxt) = asms_ctxt
|> Proof_Context.set_stmt true
|> Proof_Context.note_thmss "" [((Binding.name Auto_Bind.thatN, []), [(that, [])])]
||> Proof_Context.restore_stmt asms_ctxt;
val stmt' = [(Binding.empty_atts, [(#2 (#1 (Obtain.obtain_thesis ctxt)), [])])];
in ((Obtain.obtains_attribs raw_obtains, prems, stmt', SOME that'), that_ctxt) end)
end;
fun gen_theorem schematic bundle_includes prep_att prep_stmt
long kind before_qed after_qed (name, raw_atts) raw_includes raw_elems raw_concl int lthy =
let
val _ = Local_Theory.assert lthy;
val elems = raw_elems |> map (Element.map_ctxt_attrib (prep_att lthy));
val ((more_atts, prems, stmt, facts), goal_ctxt) = lthy
|> bundle_includes raw_includes
|> prep_statement (prep_att lthy) prep_stmt elems raw_concl;
val atts = more_atts @ map (prep_att lthy) raw_atts;
val pos = Position.thread_data ();
fun after_qed' results goal_ctxt' =
let
val results' =
burrow (map (Goal.norm_result lthy) o Proof_Context.export goal_ctxt' lthy) results;
val (res, lthy') =
if forall (Binding.is_empty_atts o fst) stmt then (map (pair "") results', lthy)
else
Local_Theory.notes_kind kind
(map2 (fn (b, _) => fn ths => (b, [(ths, [])])) stmt results') lthy;
val lthy'' =
if Binding.is_empty_atts (name, atts) then
(Proof_Display.print_results int pos lthy' ((kind, ""), res); lthy')
else
let
val ([(res_name, _)], lthy'') =
Local_Theory.notes_kind kind [((name, atts), [(maps #2 res, [])])] lthy';
val _ = Proof_Display.print_results int pos lthy' ((kind, res_name), res);
in lthy'' end;
in after_qed results' lthy'' end;
val prems_name = if long then Auto_Bind.assmsN else Auto_Bind.thatN;
in
goal_ctxt
|> not (null prems) ?
(Proof_Context.note_thmss "" [((Binding.name prems_name, []), [(prems, [])])] #> snd)
|> Proof.theorem before_qed after_qed' (map snd stmt)
|> (case facts of NONE => I | SOME ths => Proof.refine_insert ths)
|> tap (fn state => not schematic andalso Proof.schematic_goal state andalso
error "Illegal schematic goal statement")
end;
in
val theorem =
gen_theorem false Bundle.includes (K I) Expression.cert_statement;
val theorem_cmd =
gen_theorem false Bundle.includes_cmd Attrib.check_src Expression.read_statement;
val schematic_theorem =
gen_theorem true Bundle.includes (K I) Expression.cert_statement;
val schematic_theorem_cmd =
gen_theorem true Bundle.includes_cmd Attrib.check_src Expression.read_statement;
end;
end;
diff --git a/src/Pure/Isar/token.ML b/src/Pure/Isar/token.ML
--- a/src/Pure/Isar/token.ML
+++ b/src/Pure/Isar/token.ML
@@ -1,802 +1,839 @@
(* Title: Pure/Isar/token.ML
Author: Markus Wenzel, TU Muenchen
Outer token syntax for Isabelle/Isar.
*)
signature TOKEN =
sig
datatype kind =
(*immediate source*)
Command | Keyword | Ident | Long_Ident | Sym_Ident | Var | Type_Ident | Type_Var | Nat |
Float | Space |
(*delimited content*)
String | Alt_String | Cartouche |
Control of Antiquote.control |
Comment of Comment.kind option |
(*special content*)
Error of string | EOF
val control_kind: kind
val str_of_kind: kind -> string
type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T}
type T
type src = T list
type name_value = {name: string, kind: string, print: Proof.context -> Markup.T * xstring}
datatype value =
Source of src |
Literal of bool * Markup.T |
Name of name_value * morphism |
Typ of typ |
Term of term |
Fact of string option * thm list |
- Attribute of morphism -> attribute |
- Declaration of declaration |
+ Attribute of attribute Morphism.entity |
+ Declaration of Morphism.declaration_entity |
Files of file Exn.result list |
Output of XML.body option
val pos_of: T -> Position.T
val adjust_offsets: (int -> int option) -> T -> T
val eof: T
val is_eof: T -> bool
val not_eof: T -> bool
val stopper: T Scan.stopper
val kind_of: T -> kind
val is_kind: kind -> T -> bool
val get_control: T -> Antiquote.control option
val is_command: T -> bool
val keyword_with: (string -> bool) -> T -> bool
val is_command_modifier: T -> bool
val ident_with: (string -> bool) -> T -> bool
val is_proper: T -> bool
val is_comment: T -> bool
val is_informal_comment: T -> bool
val is_formal_comment: T -> bool
val is_document_marker: T -> bool
val is_ignored: T -> bool
val is_begin_ignore: T -> bool
val is_end_ignore: T -> bool
val is_error: T -> bool
val is_space: T -> bool
val is_blank: T -> bool
val is_newline: T -> bool
val range_of: T list -> Position.range
val core_range_of: T list -> Position.range
val content_of: T -> string
val source_of: T -> string
val input_of: T -> Input.source
val inner_syntax_of: T -> string
val keyword_markup: bool * Markup.T -> string -> Markup.T
val completion_report: T -> Position.report_text list
val reports: Keyword.keywords -> T -> Position.report_text list
val markups: Keyword.keywords -> T -> Markup.T list
val unparse: T -> string
val print: T -> string
val text_of: T -> string * string
val file_source: file -> Input.source
val get_files: T -> file Exn.result list
val put_files: file Exn.result list -> T -> T
val get_output: T -> XML.body option
val put_output: XML.body -> T -> T
val get_value: T -> value option
val reports_of_value: T -> Position.report list
val name_value: name_value -> value
val get_name: T -> name_value option
val declare_maxidx: T -> Proof.context -> Proof.context
val map_facts: (string option -> thm list -> thm list) -> T -> T
- val trim_context_src: src -> src
+ val trim_context: T -> T
+ val transfer: theory -> T -> T
val transform: morphism -> T -> T
val init_assignable: T -> T
val assign: value option -> T -> T
val evaluate: ('a -> value) -> (T -> 'a) -> T -> 'a
val closure: T -> T
val pretty_value: Proof.context -> T -> Pretty.T
val name_of_src: src -> string * Position.T
val args_of_src: src -> T list
val checked_src: src -> bool
val check_src: Proof.context -> (Proof.context -> 'a Name_Space.table) -> src -> src * 'a
val pretty_src: Proof.context -> src -> Pretty.T
val ident_or_symbolic: string -> bool
val read_cartouche: Symbol_Pos.T list -> T
val tokenize: Keyword.keywords -> {strict: bool} -> Symbol_Pos.T list -> T list
val explode: Keyword.keywords -> Position.T -> string -> T list
val explode0: Keyword.keywords -> string -> T list
val print_name: Keyword.keywords -> string -> string
val print_properties: Keyword.keywords -> Properties.T -> string
val make: (int * int) * string -> Position.T -> T * Position.T
val make_string: string * Position.T -> T
+ val make_string0: string -> T
val make_int: int -> T list
val make_src: string * Position.T -> T list -> src
type 'a parser = T list -> 'a * T list
type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list)
val syntax_generic: 'a context_parser -> src -> Context.generic -> 'a * Context.generic
val syntax: 'a context_parser -> src -> Proof.context -> 'a * Proof.context
end;
structure Token: TOKEN =
struct
(** tokens **)
(* token kind *)
datatype kind =
(*immediate source*)
Command | Keyword | Ident | Long_Ident | Sym_Ident | Var | Type_Ident | Type_Var | Nat |
Float | Space |
(*delimited content*)
String | Alt_String | Cartouche |
Control of Antiquote.control |
Comment of Comment.kind option |
(*special content*)
Error of string | EOF;
val control_kind = Control Antiquote.no_control;
fun equiv_kind kind kind' =
(case (kind, kind') of
(Control _, Control _) => true
| (Error _, Error _) => true
| _ => kind = kind');
val str_of_kind =
fn Command => "command"
| Keyword => "keyword"
| Ident => "identifier"
| Long_Ident => "long identifier"
| Sym_Ident => "symbolic identifier"
| Var => "schematic variable"
| Type_Ident => "type variable"
| Type_Var => "schematic type variable"
| Nat => "natural number"
| Float => "floating-point number"
| Space => "white space"
| String => "quoted string"
| Alt_String => "back-quoted string"
| Cartouche => "text cartouche"
| Control _ => "control cartouche"
| Comment NONE => "informal comment"
| Comment (SOME _) => "formal comment"
| Error _ => "bad input"
| EOF => "end-of-input";
val immediate_kinds =
Vector.fromList
[Command, Keyword, Ident, Long_Ident, Sym_Ident, Var, Type_Ident, Type_Var, Nat, Float, Space];
val delimited_kind =
(fn String => true
| Alt_String => true
| Cartouche => true
| Control _ => true
| Comment _ => true
| _ => false);
(* datatype token *)
(*The value slot assigns an (optional) internal value to a token,
usually as a side-effect of special scanner setup (see also
args.ML). Note that an assignable ref designates an intermediate
state of internalization -- it is NOT meant to persist.*)
type file = {src_path: Path.T, lines: string list, digest: SHA1.digest, pos: Position.T};
type name_value = {name: string, kind: string, print: Proof.context -> Markup.T * xstring};
datatype T = Token of (Symbol_Pos.text * Position.range) * (kind * string) * slot
and slot =
Slot |
Value of value option |
Assignable of value option Unsynchronized.ref
and value =
Source of T list |
Literal of bool * Markup.T |
Name of name_value * morphism |
Typ of typ |
Term of term |
Fact of string option * thm list | (*optional name for dynamic fact, i.e. fact "variable"*)
- Attribute of morphism -> attribute |
- Declaration of declaration |
+ Attribute of attribute Morphism.entity |
+ Declaration of Morphism.declaration_entity |
Files of file Exn.result list |
Output of XML.body option;
type src = T list;
(* position *)
fun pos_of (Token ((_, (pos, _)), _, _)) = pos;
fun end_pos_of (Token ((_, (_, pos)), _, _)) = pos;
fun adjust_offsets adjust (Token ((x, range), y, z)) =
Token ((x, apply2 (Position.adjust_offsets adjust) range), y, z);
(* stopper *)
fun mk_eof pos = Token (("", (pos, Position.none)), (EOF, ""), Slot);
val eof = mk_eof Position.none;
fun is_eof (Token (_, (EOF, _), _)) = true
| is_eof _ = false;
val not_eof = not o is_eof;
val stopper =
Scan.stopper (fn [] => eof | toks => mk_eof (end_pos_of (List.last toks))) is_eof;
(* kind of token *)
fun kind_of (Token (_, (k, _), _)) = k;
fun is_kind k (Token (_, (k', _), _)) = equiv_kind k k';
fun get_control tok =
(case kind_of tok of Control control => SOME control | _ => NONE);
val is_command = is_kind Command;
fun keyword_with pred (Token (_, (Keyword, x), _)) = pred x
| keyword_with _ _ = false;
val is_command_modifier = keyword_with (fn x => x = "private" orelse x = "qualified");
fun ident_with pred (Token (_, (Ident, x), _)) = pred x
| ident_with _ _ = false;
fun is_ignored (Token (_, (Space, _), _)) = true
| is_ignored (Token (_, (Comment NONE, _), _)) = true
| is_ignored _ = false;
fun is_proper (Token (_, (Space, _), _)) = false
| is_proper (Token (_, (Comment _, _), _)) = false
| is_proper _ = true;
fun is_comment (Token (_, (Comment _, _), _)) = true
| is_comment _ = false;
fun is_informal_comment (Token (_, (Comment NONE, _), _)) = true
| is_informal_comment _ = false;
fun is_formal_comment (Token (_, (Comment (SOME _), _), _)) = true
| is_formal_comment _ = false;
fun is_document_marker (Token (_, (Comment (SOME Comment.Marker), _), _)) = true
| is_document_marker _ = false;
fun is_begin_ignore (Token (_, (Comment NONE, "<"), _)) = true
| is_begin_ignore _ = false;
fun is_end_ignore (Token (_, (Comment NONE, ">"), _)) = true
| is_end_ignore _ = false;
fun is_error (Token (_, (Error _, _), _)) = true
| is_error _ = false;
(* blanks and newlines -- space tokens obey lines *)
fun is_space (Token (_, (Space, _), _)) = true
| is_space _ = false;
fun is_blank (Token (_, (Space, x), _)) = not (String.isSuffix "\n" x)
| is_blank _ = false;
fun is_newline (Token (_, (Space, x), _)) = String.isSuffix "\n" x
| is_newline _ = false;
(* range of tokens *)
fun range_of (toks as tok :: _) =
let val pos' = end_pos_of (List.last toks)
in Position.range (pos_of tok, pos') end
| range_of [] = Position.no_range;
val core_range_of =
drop_prefix is_ignored #> drop_suffix is_ignored #> range_of;
(* token content *)
fun content_of (Token (_, (_, x), _)) = x;
fun source_of (Token ((source, _), _, _)) = source;
fun input_of (Token ((source, range), (kind, _), _)) =
Input.source (delimited_kind kind) source range;
fun inner_syntax_of tok =
let val x = content_of tok
in if YXML.detect x then x else Syntax.implode_input (input_of tok) end;
(* markup reports *)
local
val token_kind_markup =
fn Var => (Markup.var, "")
| Type_Ident => (Markup.tfree, "")
| Type_Var => (Markup.tvar, "")
| String => (Markup.string, "")
| Alt_String => (Markup.alt_string, "")
| Cartouche => (Markup.cartouche, "")
| Control _ => (Markup.cartouche, "")
| Comment _ => (Markup.comment, "")
| Error msg => (Markup.bad (), msg)
| _ => (Markup.empty, "");
fun keyword_reports tok = map (fn markup => ((pos_of tok, markup), ""));
fun command_markups keywords x =
if Keyword.is_theory_end keywords x then [Markup.keyword2 |> Markup.keyword_properties]
else
(if Keyword.is_proof_asm keywords x then [Markup.keyword3]
else if Keyword.is_improper keywords x then [Markup.keyword1, Markup.improper]
else [Markup.keyword1])
|> map Markup.command_properties;
in
fun keyword_markup (important, keyword) x =
if important orelse Symbol.is_ascii_identifier x then keyword else Markup.delimiter;
fun completion_report tok =
if is_kind Keyword tok
then map (fn m => ((pos_of tok, m), "")) (Completion.suppress_abbrevs (content_of tok))
else [];
fun reports keywords tok =
if is_command tok then
keyword_reports tok (command_markups keywords (content_of tok))
else if is_kind Keyword tok then
keyword_reports tok
[keyword_markup (false, Markup.keyword2 |> Markup.keyword_properties) (content_of tok)]
else
let
val pos = pos_of tok;
val (m, text) = token_kind_markup (kind_of tok);
val deleted = Symbol_Pos.explode_deleted (source_of tok, pos);
in ((pos, m), text) :: map (fn p => ((p, Markup.delete), "")) deleted end;
fun markups keywords = map (#2 o #1) o reports keywords;
end;
(* unparse *)
fun unparse (Token (_, (kind, x), _)) =
(case kind of
String => Symbol_Pos.quote_string_qq x
| Alt_String => Symbol_Pos.quote_string_bq x
| Cartouche => cartouche x
| Control control => Symbol_Pos.content (Antiquote.control_symbols control)
| Comment NONE => enclose "(*" "*)" x
| EOF => ""
| _ => x);
fun print tok = Markup.markups (markups Keyword.empty_keywords tok) (unparse tok);
fun text_of tok =
let
val k = str_of_kind (kind_of tok);
val ms = markups Keyword.empty_keywords tok;
val s = unparse tok;
in
if s = "" then (k, "")
else if size s < 40 andalso not (exists_string (fn c => c = "\n") s)
then (k ^ " " ^ Markup.markups ms s, "")
else (k, Markup.markups ms s)
end;
(** associated values **)
(* inlined file content *)
fun file_source (file: file) =
let
val text = cat_lines (#lines file);
val end_pos = Position.symbol_explode text (#pos file);
in Input.source true text (Position.range (#pos file, end_pos)) end;
fun get_files (Token (_, _, Value (SOME (Files files)))) = files
| get_files _ = [];
fun put_files [] tok = tok
| put_files files (Token (x, y, Slot)) = Token (x, y, Value (SOME (Files files)))
| put_files _ tok = raise Fail ("Cannot put inlined files here" ^ Position.here (pos_of tok));
(* document output *)
fun get_output (Token (_, _, Value (SOME (Output output)))) = output
| get_output _ = NONE;
fun put_output output (Token (x, y, Slot)) = Token (x, y, Value (SOME (Output (SOME output))))
| put_output _ tok = raise Fail ("Cannot put document output here" ^ Position.here (pos_of tok));
(* access values *)
fun get_value (Token (_, _, Value v)) = v
| get_value _ = NONE;
fun map_value f (Token (x, y, Value (SOME v))) = Token (x, y, Value (SOME (f v)))
| map_value _ tok = tok;
(* reports of value *)
fun get_assignable_value (Token (_, _, Assignable r)) = ! r
| get_assignable_value (Token (_, _, Value v)) = v
| get_assignable_value _ = NONE;
fun reports_of_value tok =
(case get_assignable_value tok of
SOME (Literal markup) =>
let
val pos = pos_of tok;
val x = content_of tok;
in
if Position.is_reported pos then
map (pair pos) (keyword_markup markup x :: Completion.suppress_abbrevs x)
else []
end
| _ => []);
(* name value *)
fun name_value a = Name (a, Morphism.identity);
fun get_name tok =
(case get_assignable_value tok of
SOME (Name (a, _)) => SOME a
| _ => NONE);
(* maxidx *)
fun declare_maxidx tok =
(case get_value tok of
SOME (Source src) => fold declare_maxidx src
| SOME (Typ T) => Variable.declare_maxidx (Term.maxidx_of_typ T)
| SOME (Term t) => Variable.declare_maxidx (Term.maxidx_of_term t)
| SOME (Fact (_, ths)) => fold (Variable.declare_maxidx o Thm.maxidx_of) ths
| SOME (Attribute _) => I (* FIXME !? *)
| SOME (Declaration decl) =>
(fn ctxt =>
let val ctxt' = Context.proof_map (Morphism.form decl) ctxt
in Variable.declare_maxidx (Variable.maxidx_of ctxt') ctxt end)
| _ => I);
(* fact values *)
fun map_facts f =
map_value (fn v =>
(case v of
Source src => Source (map (map_facts f) src)
| Fact (a, ths) => Fact (a, f a ths)
| _ => v));
-val trim_context_src = (map o map_facts) (K (map Thm.trim_context));
+
+(* implicit context *)
+
+local
+
+fun context thm_context morphism_context attribute_context declaration_context =
+ let
+ fun token_context tok = map_value
+ (fn Source src => Source (map token_context src)
+ | Fact (a, ths) => Fact (a, map thm_context ths)
+ | Name (a, phi) => Name (a, morphism_context phi)
+ | Attribute a => Attribute (attribute_context a)
+ | Declaration a => Declaration (declaration_context a)
+ | v => v) tok;
+ in token_context end;
+
+in
+
+val trim_context =
+ context Thm.trim_context Morphism.reset_context
+ Morphism.entity_reset_context Morphism.entity_reset_context;
+
+fun transfer thy =
+ context (Thm.transfer thy) (Morphism.set_context thy)
+ (Morphism.entity_set_context thy) (Morphism.entity_set_context thy);
+
+end;
(* transform *)
fun transform phi =
map_value (fn v =>
(case v of
Source src => Source (map (transform phi) src)
| Literal _ => v
| Name (a, psi) => Name (a, psi $> phi)
| Typ T => Typ (Morphism.typ phi T)
| Term t => Term (Morphism.term phi t)
| Fact (a, ths) => Fact (a, Morphism.fact phi ths)
| Attribute att => Attribute (Morphism.transform phi att)
| Declaration decl => Declaration (Morphism.transform phi decl)
| Files _ => v
| Output _ => v));
(* static binding *)
(*1st stage: initialize assignable slots*)
fun init_assignable tok =
(case tok of
Token (x, y, Slot) => Token (x, y, Assignable (Unsynchronized.ref NONE))
| Token (_, _, Value _) => tok
| Token (_, _, Assignable r) => (r := NONE; tok));
(*2nd stage: assign values as side-effect of scanning*)
fun assign v tok =
(case tok of
Token (x, y, Slot) => Token (x, y, Value v)
| Token (_, _, Value _) => tok
| Token (_, _, Assignable r) => (r := v; tok));
fun evaluate mk eval arg =
let val x = eval arg in (assign (SOME (mk x)) arg; x) end;
(*3rd stage: static closure of final values*)
fun closure (Token (x, y, Assignable (Unsynchronized.ref v))) = Token (x, y, Value v)
| closure tok = tok;
(* pretty *)
+fun pretty_keyword3 tok =
+ let val props = Position.properties_of (pos_of tok)
+ in Pretty.mark_str (Markup.properties props Markup.keyword3, unparse tok) end;
+
fun pretty_value ctxt tok =
(case get_value tok of
SOME (Literal markup) =>
let val x = content_of tok
in Pretty.mark_str (keyword_markup markup x, x) end
| SOME (Name ({print, ...}, _)) => Pretty.quote (Pretty.mark_str (print ctxt))
| SOME (Typ T) => Syntax.pretty_typ ctxt T
| SOME (Term t) => Syntax.pretty_term ctxt t
| SOME (Fact (_, ths)) =>
Pretty.enclose "(" ")" (Pretty.breaks (map (Pretty.cartouche o Thm.pretty_thm ctxt) ths))
+ | SOME (Attribute _) => pretty_keyword3 tok
+ | SOME (Declaration _) => pretty_keyword3 tok
| _ => Pretty.marks_str (markups Keyword.empty_keywords tok, unparse tok));
(* src *)
fun dest_src ([]: src) = raise Fail "Empty token source"
| dest_src (head :: args) = (head, args);
fun name_of_src src =
let
val head = #1 (dest_src src);
val name =
(case get_name head of
SOME {name, ...} => name
| NONE => content_of head);
in (name, pos_of head) end;
val args_of_src = #2 o dest_src;
fun pretty_src ctxt src =
let
val (head, args) = dest_src src;
val prt_name =
(case get_name head of
SOME {print, ...} => Pretty.mark_str (print ctxt)
| NONE => Pretty.str (content_of head));
in Pretty.block (Pretty.breaks (Pretty.quote prt_name :: map (pretty_value ctxt) args)) end;
fun checked_src (head :: _) = is_some (get_name head)
| checked_src [] = true;
fun check_src ctxt get_table src =
let
val (head, args) = dest_src src;
val table = get_table ctxt;
in
(case get_name head of
SOME {name, ...} => (src, Name_Space.get table name)
| NONE =>
let
val pos = pos_of head;
val (name, x) = Name_Space.check (Context.Proof ctxt) table (content_of head, pos);
val _ = Context_Position.report ctxt pos Markup.operator;
val kind = Name_Space.kind_of (Name_Space.space_of_table table);
fun print ctxt' =
Name_Space.markup_extern ctxt' (Name_Space.space_of_table (get_table ctxt')) name;
val value = name_value {name = name, kind = kind, print = print};
val head' = closure (assign (SOME value) head);
in (head' :: args, x) end)
end;
(** scanners **)
open Basic_Symbol_Pos;
val err_prefix = "Outer lexical error: ";
fun !!! msg = Symbol_Pos.!!! (fn () => err_prefix ^ msg);
(* scan symbolic idents *)
val scan_symid =
Scan.many1 (Symbol.is_symbolic_char o Symbol_Pos.symbol) ||
Scan.one (Symbol.is_symbolic o Symbol_Pos.symbol) >> single;
fun is_symid str =
(case try Symbol.explode str of
SOME [s] => Symbol.is_symbolic s orelse Symbol.is_symbolic_char s
| SOME ss => forall Symbol.is_symbolic_char ss
| _ => false);
fun ident_or_symbolic "begin" = false
| ident_or_symbolic ":" = true
| ident_or_symbolic "::" = true
| ident_or_symbolic s = Symbol_Pos.is_identifier s orelse is_symid s;
(* scan cartouche *)
val scan_cartouche =
Symbol_Pos.scan_pos --
((Symbol_Pos.scan_cartouche err_prefix >> Symbol_Pos.cartouche_content) -- Symbol_Pos.scan_pos);
(* scan space *)
fun space_symbol (s, _) = Symbol.is_blank s andalso s <> "\n";
val scan_space =
Scan.many1 space_symbol @@@ Scan.optional ($$$ "\n") [] ||
Scan.many space_symbol @@@ $$$ "\n";
(* scan comment *)
val scan_comment =
Symbol_Pos.scan_pos -- (Symbol_Pos.scan_comment_body err_prefix -- Symbol_Pos.scan_pos);
(** token sources **)
local
fun token_leq ((_, syms1), (_, syms2)) = length syms1 <= length syms2;
fun token k ss =
Token ((Symbol_Pos.implode ss, Symbol_Pos.range ss), (k, Symbol_Pos.content ss), Slot);
fun token_range k (pos1, (ss, pos2)) =
Token (Symbol_Pos.implode_range (pos1, pos2) ss, (k, Symbol_Pos.content ss), Slot);
fun scan_token keywords = !!! "bad input"
(Symbol_Pos.scan_string_qq err_prefix >> token_range String ||
Symbol_Pos.scan_string_bq err_prefix >> token_range Alt_String ||
scan_comment >> token_range (Comment NONE) ||
Comment.scan_outer >> (fn (k, ss) => token (Comment (SOME k)) ss) ||
scan_cartouche >> token_range Cartouche ||
Antiquote.scan_control err_prefix >> (fn control =>
token (Control control) (Antiquote.control_symbols control)) ||
scan_space >> token Space ||
(Scan.max token_leq
(Scan.max token_leq
(Scan.literal (Keyword.major_keywords keywords) >> pair Command)
(Scan.literal (Keyword.minor_keywords keywords) >> pair Keyword))
(Lexicon.scan_longid >> pair Long_Ident ||
Lexicon.scan_id >> pair Ident ||
Lexicon.scan_var >> pair Var ||
Lexicon.scan_tid >> pair Type_Ident ||
Lexicon.scan_tvar >> pair Type_Var ||
Symbol_Pos.scan_float >> pair Float ||
Symbol_Pos.scan_nat >> pair Nat ||
scan_symid >> pair Sym_Ident) >> uncurry token));
fun recover msg =
(Symbol_Pos.recover_string_qq ||
Symbol_Pos.recover_string_bq ||
Symbol_Pos.recover_cartouche ||
Symbol_Pos.recover_comment ||
Scan.one (Symbol.not_eof o Symbol_Pos.symbol) >> single)
>> (single o token (Error msg));
in
fun make_source keywords {strict} =
let
val scan_strict = Scan.bulk (scan_token keywords);
val scan = if strict then scan_strict else Scan.recover scan_strict recover;
in Source.source Symbol_Pos.stopper scan end;
fun read_cartouche syms =
(case Scan.read Symbol_Pos.stopper (scan_cartouche >> token_range Cartouche) syms of
SOME tok => tok
| NONE => error ("Single cartouche expected" ^ Position.here (#1 (Symbol_Pos.range syms))));
end;
(* explode *)
fun tokenize keywords strict syms =
Source.of_list syms |> make_source keywords strict |> Source.exhaust;
fun explode keywords pos text =
Symbol_Pos.explode (text, pos) |> tokenize keywords {strict = false};
fun explode0 keywords = explode keywords Position.none;
(* print names in parsable form *)
fun print_name keywords name =
((case explode keywords Position.none name of
[tok] => not (member (op =) [Ident, Long_Ident, Sym_Ident, Nat] (kind_of tok))
| _ => true) ? Symbol_Pos.quote_string_qq) name;
fun print_properties keywords =
map (apply2 (print_name keywords) #> (fn (a, b) => a ^ " = " ^ b)) #> commas #> enclose "[" "]";
(* make *)
fun make ((k, n), s) pos =
let
val pos' = Position.shift_offsets {remove_id = false} n pos;
val range = Position.range (pos, pos');
val tok =
if 0 <= k andalso k < Vector.length immediate_kinds then
Token ((s, range), (Vector.nth immediate_kinds k, s), Slot)
else
(case explode Keyword.empty_keywords pos s of
[tok] => tok
| _ => Token ((s, range), (Error (err_prefix ^ "exactly one token expected"), s), Slot))
in (tok, pos') end;
fun make_string (s, pos) =
let
val Token ((x, _), y, z) = #1 (make ((~1, 0), Symbol_Pos.quote_string_qq s) Position.none);
val pos' = Position.no_range_position pos;
in Token ((x, (pos', pos')), y, z) end;
+fun make_string0 s = make_string (s, Position.none);
+
val make_int = explode Keyword.empty_keywords Position.none o signed_string_of_int;
fun make_src a args = make_string a :: args;
(** parsers **)
type 'a parser = T list -> 'a * T list;
type 'a context_parser = Context.generic * T list -> 'a * (Context.generic * T list);
(* wrapped syntax *)
-fun syntax_generic scan src context =
+fun syntax_generic scan src0 context =
let
+ val src = map (transfer (Context.theory_of context)) src0;
val (name, pos) = name_of_src src;
val old_reports = maps reports_of_value src;
val args1 = map init_assignable (args_of_src src);
fun reported_text () =
if Context_Position.reports_enabled_generic context then
let val new_reports = maps (reports_of_value o closure) args1 in
if old_reports <> new_reports then
map (fn (p, m) => Position.reported_text p m "") new_reports
else []
end
else [];
in
(case Scan.error (Scan.finite' stopper (Scan.option scan)) (context, args1) of
(SOME x, (context', [])) =>
let val _ = Output.report (reported_text ())
in (x, context') end
| (_, (context', args2)) =>
let
val print_name =
(case get_name (hd src) of
NONE => quote name
| SOME {kind, print, ...} =>
let
val ctxt' = Context.proof_of context';
val (markup, xname) = print ctxt';
in plain_words kind ^ " " ^ quote (Markup.markup markup xname) end);
val print_args =
if null args2 then "" else ":\n " ^ space_implode " " (map print args2);
in
error ("Bad arguments for " ^ print_name ^ Position.here pos ^ print_args ^
Markup.markup_report (implode (reported_text ())))
end)
end;
fun syntax scan src = apsnd Context.the_proof o syntax_generic scan src o Context.Proof;
end;
type 'a parser = 'a Token.parser;
type 'a context_parser = 'a Token.context_parser;
diff --git a/src/Pure/assumption.ML b/src/Pure/assumption.ML
--- a/src/Pure/assumption.ML
+++ b/src/Pure/assumption.ML
@@ -1,151 +1,161 @@
(* Title: Pure/assumption.ML
Author: Makarius
Context assumptions, parameterized by export rules.
*)
signature ASSUMPTION =
sig
type export = bool -> cterm list -> (thm -> thm) * (term -> term)
val assume_export: export
val presume_export: export
val assume: Proof.context -> cterm -> thm
val assume_hyps: cterm -> Proof.context -> thm * Proof.context
val all_assms_of: Proof.context -> cterm list
val all_prems_of: Proof.context -> thm list
val local_assms_of: Proof.context -> Proof.context -> cterm list
val local_prems_of: Proof.context -> Proof.context -> thm list
val add_assms: export -> cterm list -> Proof.context -> thm list * Proof.context
val add_assumes: cterm list -> Proof.context -> thm list * Proof.context
- val export: bool -> Proof.context -> Proof.context -> thm -> thm
val export_term: Proof.context -> Proof.context -> term -> term
+ val export_: {goal: bool} -> Proof.context -> Proof.context -> thm -> thm
+ val export: Proof.context -> Proof.context -> thm -> thm
+ val export_goal: Proof.context -> Proof.context -> thm -> thm
val export_morphism: Proof.context -> Proof.context -> morphism
end;
structure Assumption: ASSUMPTION =
struct
(** basic rules **)
type export = bool -> cterm list -> (thm -> thm) * (term -> term);
(*
[A]
:
B
--------
#A \<Longrightarrow> B
*)
fun assume_export is_goal asms =
(if is_goal then Drule.implies_intr_protected asms else Drule.implies_intr_list asms, fn t => t);
(*
[A]
:
B
-------
A \<Longrightarrow> B
*)
fun presume_export _ = assume_export false;
fun assume ctxt = Raw_Simplifier.norm_hhf ctxt o Thm.assume;
fun assume_hyps ct ctxt =
let val (th, ctxt') = Thm.assume_hyps ct ctxt
in (Raw_Simplifier.norm_hhf ctxt' th, ctxt') end;
(** local context data **)
datatype data = Data of
{assms: (export * cterm list) list, (*assumes: A \<Longrightarrow> _*)
prems: thm list}; (*prems: A |- norm_hhf A*)
fun make_data (assms, prems) = Data {assms = assms, prems = prems};
val empty_data = make_data ([], []);
structure Data = Proof_Data
(
type T = data;
fun init _ = empty_data;
);
fun map_data f = Data.map (fn Data {assms, prems} => make_data (f (assms, prems)));
fun rep_data ctxt = Data.get ctxt |> (fn Data rep => rep);
(* all assumptions *)
val all_assumptions_of = #assms o rep_data;
val all_assms_of = maps #2 o all_assumptions_of;
val all_prems_of = #prems o rep_data;
(* local assumptions *)
local
fun drop_prefix eq (args as (x :: xs, y :: ys)) =
if eq (x, y) then drop_prefix eq (xs, ys) else args
| drop_prefix _ args = args;
fun check_result ctxt kind term_of res =
(case res of
([], rest) => rest
| (bad :: _, _) =>
raise Fail ("Outer context disagrees on " ^ kind ^ ": " ^
Syntax.string_of_term ctxt (term_of bad)));
in
fun local_assumptions_of inner outer =
drop_prefix (eq_snd (eq_list Thm.aconvc)) (apply2 all_assumptions_of (outer, inner))
|>> maps #2
|> check_result outer "assumption" Thm.term_of;
val local_assms_of = maps #2 oo local_assumptions_of;
fun local_prems_of inner outer =
drop_prefix Thm.eq_thm_prop (apply2 all_prems_of (outer, inner))
|> check_result outer "premise" Thm.prop_of;
end;
(* add assumptions *)
fun add_assms export new_asms ctxt =
let val (new_prems, ctxt') = fold_map assume_hyps new_asms ctxt in
ctxt'
|> map_data (fn (asms, prems) => (asms @ [(export, new_asms)], prems @ new_prems))
|> pair new_prems
end;
val add_assumes = add_assms assume_export;
(* export *)
-fun export is_goal inner outer =
+fun export_term inner outer =
+ fold_rev (fn (e, As) => #2 (e false As)) (local_assumptions_of inner outer);
+
+fun export_thm is_goal inner outer =
+ fold_rev (fn (e, As) => #1 (e is_goal As)) (local_assumptions_of inner outer);
+
+fun export_{goal} inner outer =
Raw_Simplifier.norm_hhf_protect inner #>
- fold_rev (fn (e, As) => #1 (e is_goal As)) (local_assumptions_of inner outer) #>
+ export_thm goal inner outer #>
Raw_Simplifier.norm_hhf_protect outer;
-fun export_term inner outer =
- fold_rev (fn (e, As) => #2 (e false As)) (local_assumptions_of inner outer);
+val export = export_{goal = false};
+val export_goal = export_{goal = true};
fun export_morphism inner outer =
let
- val thm = export false inner outer;
+ val export0 = export_thm false inner outer;
+ fun thm thy =
+ let val norm = norm_hhf_protect (Proof_Context.init_global thy)
+ in norm #> export0 #> norm end;
val term = export_term inner outer;
val typ = Logic.type_map term;
in
- Morphism.transfer_morphism' inner $>
- Morphism.transfer_morphism' outer $>
Morphism.morphism "Assumption.export"
- {binding = [], typ = [typ], term = [term], fact = [map thm]}
+ {binding = [], typ = [K typ], term = [K term], fact = [map o thm o Morphism.the_theory]}
+ |> Morphism.set_context (Proof_Context.theory_of inner)
end;
end;
diff --git a/src/Pure/context.ML b/src/Pure/context.ML
--- a/src/Pure/context.ML
+++ b/src/Pure/context.ML
@@ -1,827 +1,836 @@
(* Title: Pure/context.ML
Author: Markus Wenzel, TU Muenchen
Generic theory contexts with unique identity, arbitrarily typed data,
monotonic development graph and history support. Generic proof
contexts with arbitrarily typed data.
Firm naming conventions:
thy, thy', thy1, thy2: theory
ctxt, ctxt', ctxt1, ctxt2: Proof.context
context: Context.generic
*)
signature BASIC_CONTEXT =
sig
type theory
exception THEORY of string * theory list
structure Proof: sig type context end
structure Proof_Context:
sig
val theory_of: Proof.context -> theory
val init_global: theory -> Proof.context
val get_global: {long: bool} -> theory -> string -> Proof.context
end
end;
signature CONTEXT =
sig
include BASIC_CONTEXT
(*theory data*)
type data_kind = int
val data_kinds: unit -> (data_kind * Position.T) list
(*theory context*)
type id = int
type theory_id
val theory_id: theory -> theory_id
val data_timing: bool Unsynchronized.ref
val parents_of: theory -> theory list
val ancestors_of: theory -> theory list
val theory_id_ord: theory_id ord
val theory_id_name: {long: bool} -> theory_id -> string
val theory_long_name: theory -> string
val theory_base_name: theory -> string
val theory_name: {long: bool} -> theory -> string
val theory_identifier: theory -> id
val PureN: string
val pretty_thy: theory -> Pretty.T
val pretty_abbrev_thy: theory -> Pretty.T
val get_theory: {long: bool} -> theory -> string -> theory
val eq_thy_id: theory_id * theory_id -> bool
val eq_thy: theory * theory -> bool
val proper_subthy_id: theory_id * theory_id -> bool
val proper_subthy: theory * theory -> bool
val subthy_id: theory_id * theory_id -> bool
val subthy: theory * theory -> bool
val join_thys: theory list -> theory
val begin_thy: string -> theory list -> theory
val finish_thy: theory -> theory
val theory_data_sizeof1: theory -> (Position.T * int) list
(*proof context*)
val raw_transfer: theory -> Proof.context -> Proof.context
(*certificate*)
datatype certificate = Certificate of theory | Certificate_Id of theory_id
val certificate_theory: certificate -> theory
val certificate_theory_id: certificate -> theory_id
val eq_certificate: certificate * certificate -> bool
val join_certificate: certificate * certificate -> certificate
+ val join_certificate_theory: theory * theory -> theory
(*generic context*)
datatype generic = Theory of theory | Proof of Proof.context
val theory_tracing: bool Unsynchronized.ref
val proof_tracing: bool Unsynchronized.ref
val enabled_tracing: unit -> bool
val finish_tracing: unit ->
{contexts: (generic * Position.T) list,
active_contexts: int,
active_theories: int,
active_proofs: int,
total_contexts: int,
total_theories: int,
total_proofs: int}
val cases: (theory -> 'a) -> (Proof.context -> 'a) -> generic -> 'a
val mapping: (theory -> theory) -> (Proof.context -> Proof.context) -> generic -> generic
val mapping_result: (theory -> 'a * theory) -> (Proof.context -> 'a * Proof.context) ->
generic -> 'a * generic
val the_theory: generic -> theory
val the_proof: generic -> Proof.context
val map_theory: (theory -> theory) -> generic -> generic
val map_proof: (Proof.context -> Proof.context) -> generic -> generic
val map_theory_result: (theory -> 'a * theory) -> generic -> 'a * generic
val map_proof_result: (Proof.context -> 'a * Proof.context) -> generic -> 'a * generic
val theory_map: (generic -> generic) -> theory -> theory
val proof_map: (generic -> generic) -> Proof.context -> Proof.context
val theory_of: generic -> theory (*total*)
val proof_of: generic -> Proof.context (*total*)
(*thread data*)
val get_generic_context: unit -> generic option
val put_generic_context: generic option -> unit
val setmp_generic_context: generic option -> ('a -> 'b) -> 'a -> 'b
val the_generic_context: unit -> generic
val the_global_context: unit -> theory
val the_local_context: unit -> Proof.context
val >> : (generic -> generic) -> unit
val >>> : (generic -> 'a * generic) -> 'a
end;
signature PRIVATE_CONTEXT =
sig
include CONTEXT
structure Theory_Data:
sig
val declare: Position.T -> Any.T -> ((theory * Any.T) list -> Any.T) -> data_kind
val get: data_kind -> (Any.T -> 'a) -> theory -> 'a
val put: data_kind -> ('a -> Any.T) -> 'a -> theory -> theory
end
structure Proof_Data:
sig
val declare: (theory -> Any.T) -> data_kind
val get: data_kind -> (Any.T -> 'a) -> Proof.context -> 'a
val put: data_kind -> ('a -> Any.T) -> 'a -> Proof.context -> Proof.context
end
end;
structure Context: PRIVATE_CONTEXT =
struct
(*** type definitions ***)
(* context data *)
(*private copy avoids potential conflict of table exceptions*)
structure Datatab = Table(type key = int val ord = int_ord);
type data_kind = int;
val data_kind = Counter.make ();
(* theory identity *)
type id = int;
val new_id = Counter.make ();
abstype theory_id =
Thy_Id of
{id: id, (*identifier*)
ids: Intset.T, (*cumulative identifiers -- symbolic body content*)
name: string, (*official theory name*)
stage: int} (*index for anonymous updates*)
with
fun rep_theory_id (Thy_Id args) = args;
val make_theory_id = Thy_Id;
end;
(* theory allocation state *)
type state = {stage: int} Synchronized.var;
fun make_state () : state =
Synchronized.var "Context.state" {stage = 0};
fun next_stage (state: state) =
Synchronized.change_result state (fn {stage} => (stage + 1, {stage = stage + 1}));
(* theory and proof context *)
datatype theory =
Thy_Undef
| Thy of
(*allocation state*)
state *
(*identity*)
{theory_id: theory_id,
theory_token: theory Unsynchronized.ref,
theory_token_pos: Position.T} *
(*ancestry*)
{parents: theory list, (*immediate predecessors*)
ancestors: theory list} * (*all predecessors -- canonical reverse order*)
(*data*)
Any.T Datatab.table; (*body content*)
datatype proof =
Prf_Undef
| Prf of
(*identity*)
proof Unsynchronized.ref * (*token*)
Position.T * (*token_pos*)
theory *
(*data*)
Any.T Datatab.table;
structure Proof = struct type context = proof end;
datatype generic = Theory of theory | Proof of Proof.context;
(* heap allocations *)
val theory_tracing = Unsynchronized.ref false;
val proof_tracing = Unsynchronized.ref false;
fun enabled_tracing () = ! theory_tracing orelse ! proof_tracing;
local
fun cons_tokens var token =
Synchronized.change var (fn (n, tokens) => (n + 1, Weak.weak (SOME token) :: tokens));
fun finish_tokens var =
Synchronized.change_result var (fn (n, tokens) =>
let
val tokens' = filter Unsynchronized.weak_active tokens;
val results = map_filter Unsynchronized.weak_peek tokens';
in ((n, results), (n, tokens')) end);
fun make_token guard var token0 =
if ! guard then
let
val token = Unsynchronized.ref (! token0);
val pos = Position.thread_data ();
fun assign res = (token := res; cons_tokens var token; res);
in (token, pos, assign) end
else (token0, Position.none, I);
val theory_tokens = Synchronized.var "theory_tokens" (0, []: theory Unsynchronized.weak_ref list);
val proof_tokens = Synchronized.var "proof_tokens" (0, []: proof Unsynchronized.weak_ref list);
val theory_token0 = Unsynchronized.ref Thy_Undef;
val proof_token0 = Unsynchronized.ref Prf_Undef;
in
fun theory_token () = make_token theory_tracing theory_tokens theory_token0;
fun proof_token () = make_token proof_tracing proof_tokens proof_token0;
fun finish_tracing () =
let
val _ = ML_Heap.full_gc ();
val (total_theories, token_theories) = finish_tokens theory_tokens;
val (total_proofs, token_proofs) = finish_tokens proof_tokens;
fun cons1 (thy as Thy (_, {theory_token_pos, ...}, _, _)) = cons (Theory thy, theory_token_pos)
| cons1 _ = I;
fun cons2 (ctxt as Prf (_, proof_token_pos, _, _)) = cons (Proof ctxt, proof_token_pos)
| cons2 _ = I;
val contexts = build (fold cons1 token_theories #> fold cons2 token_proofs);
val active_theories = fold (fn (Theory _, _) => Integer.add 1 | _ => I) contexts 0;
val active_proofs = fold (fn (Proof _, _) => Integer.add 1 | _ => I) contexts 0;
in
{contexts = contexts,
active_contexts = active_theories + active_proofs,
active_theories = active_theories,
active_proofs = active_proofs,
total_contexts = total_theories + total_proofs,
total_theories = total_theories,
total_proofs = total_proofs}
end;
end;
(*** theory operations ***)
fun rep_theory (Thy args) = args;
exception THEORY of string * theory list;
val state_of = #1 o rep_theory;
val theory_identity = #2 o rep_theory;
val theory_id = #theory_id o theory_identity;
val identity_of = rep_theory_id o theory_id;
val ancestry_of = #3 o rep_theory;
val data_of = #4 o rep_theory;
fun make_ancestry parents ancestors = {parents = parents, ancestors = ancestors};
fun stage_final stage = stage = 0;
val theory_id_stage = #stage o rep_theory_id;
val theory_id_final = stage_final o theory_id_stage;
val theory_id_ord = int_ord o apply2 (#id o rep_theory_id);
fun theory_id_name {long} thy_id =
let val name = #name (rep_theory_id thy_id)
in if long then name else Long_Name.base_name name end;
val theory_long_name = #name o identity_of;
val theory_base_name = Long_Name.base_name o theory_long_name;
fun theory_name {long} = if long then theory_long_name else theory_base_name;
val theory_identifier = #id o identity_of;
val parents_of = #parents o ancestry_of;
val ancestors_of = #ancestors o ancestry_of;
(* names *)
val PureN = "Pure";
fun display_name thy_id =
let
val name = theory_id_name {long = false} thy_id;
val final = theory_id_final thy_id;
in if final then name else name ^ ":" ^ string_of_int (theory_id_stage thy_id) end;
fun display_names thy =
let
val name = display_name (theory_id thy);
val ancestor_names = map theory_long_name (ancestors_of thy);
in rev (name :: ancestor_names) end;
val pretty_thy = Pretty.str_list "{" "}" o display_names;
val _ = ML_system_pp (fn _ => fn _ => Pretty.to_polyml o pretty_thy);
fun pretty_abbrev_thy thy =
let
val names = display_names thy;
val n = length names;
val abbrev = if n > 5 then "..." :: List.drop (names, n - 5) else names;
in Pretty.str_list "{" "}" abbrev end;
fun get_theory long thy name =
if theory_name long thy <> name then
(case find_first (fn thy' => theory_name long thy' = name) (ancestors_of thy) of
SOME thy' => thy'
| NONE => error ("Unknown ancestor theory " ^ quote name))
else if theory_id_final (theory_id thy) then thy
else error ("Unfinished theory " ^ quote name);
(* identity *)
fun merge_ids thys =
fold (identity_of #> (fn {id, ids, ...} => fn acc => Intset.merge (acc, ids) |> Intset.insert id))
thys Intset.empty;
val eq_thy_id = op = o apply2 (#id o rep_theory_id);
val eq_thy = op = o apply2 (#id o identity_of);
val proper_subthy_id = apply2 rep_theory_id #> (fn ({id, ...}, {ids, ...}) => Intset.member ids id);
val proper_subthy = proper_subthy_id o apply2 theory_id;
fun subthy_id p = eq_thy_id p orelse proper_subthy_id p;
val subthy = subthy_id o apply2 theory_id;
(* consistent ancestors *)
fun eq_thy_consistent (thy1, thy2) =
eq_thy (thy1, thy2) orelse
(theory_base_name thy1 = theory_base_name thy2 andalso
raise THEORY ("Duplicate theory name", [thy1, thy2]));
fun extend_ancestors thy thys =
if member eq_thy_consistent thys thy then
raise THEORY ("Duplicate theory node", thy :: thys)
else thy :: thys;
val merge_ancestors = merge eq_thy_consistent;
val eq_ancestry =
apply2 ancestry_of #>
(fn ({parents, ancestors}, {parents = parents', ancestors = ancestors'}) =>
eq_list eq_thy (parents, parents') andalso eq_list eq_thy (ancestors, ancestors'));
(** theory data **)
(* data kinds and access methods *)
val data_timing = Unsynchronized.ref false;
local
type kind =
{pos: Position.T,
empty: Any.T,
merge: (theory * Any.T) list -> Any.T};
val kinds = Synchronized.var "Theory_Data" (Datatab.empty: kind Datatab.table);
fun the_kind k =
(case Datatab.lookup (Synchronized.value kinds) k of
SOME kind => kind
| NONE => raise Fail "Invalid theory data identifier");
in
fun data_kinds () =
Datatab.fold_rev (fn (k, {pos, ...}) => cons (k, pos)) (Synchronized.value kinds) [];
val invoke_pos = #pos o the_kind;
val invoke_empty = #empty o the_kind;
fun invoke_merge kind args =
if ! data_timing then
Timing.cond_timeit true ("Theory_Data.merge" ^ Position.here (#pos kind))
(fn () => #merge kind args)
else #merge kind args;
fun declare_data pos empty merge =
let
val k = data_kind ();
val kind = {pos = pos, empty = empty, merge = merge};
val _ = Synchronized.change kinds (Datatab.update (k, kind));
in k end;
fun lookup_data k thy = Datatab.lookup (data_of thy) k;
fun get_data k thy =
(case lookup_data k thy of
SOME x => x
| NONE => invoke_empty k);
fun merge_data [] = Datatab.empty
| merge_data [thy] = data_of thy
| merge_data thys =
let
fun merge (k, kind) data =
(case map_filter (fn thy => lookup_data k thy |> Option.map (pair thy)) thys of
[] => data
| [(_, x)] => Datatab.default (k, x) data
| args => Datatab.update (k, invoke_merge kind args) data);
in Datatab.fold merge (Synchronized.value kinds) (data_of (hd thys)) end;
end;
(** build theories **)
(* create theory *)
fun create_thy state ids name stage ancestry data =
let
val theory_id = make_theory_id {id = new_id (), ids = ids, name = name, stage = stage};
val (token, pos, assign) = theory_token ();
val identity = {theory_id = theory_id, theory_token = token, theory_token_pos = pos};
in assign (Thy (state, identity, ancestry, data)) end;
(* primitives *)
val pre_pure_thy =
let
val state = make_state ();
val stage = next_stage state;
in create_thy state Intset.empty PureN stage (make_ancestry [] []) Datatab.empty end;
local
fun change_thy finish f thy =
let
val {name, stage, ...} = identity_of thy;
val Thy (state, _, ancestry, data) = thy;
val ancestry' =
if stage_final stage
then make_ancestry [thy] (extend_ancestors thy (ancestors_of thy))
else ancestry;
val ids' = merge_ids [thy];
val stage' = if finish then 0 else next_stage state;
val data' = f data;
in create_thy state ids' name stage' ancestry' data' end;
in
val update_thy = change_thy false;
val finish_thy = change_thy true I;
end;
(* join: unfinished theory nodes *)
fun join_thys [] = raise List.Empty
| join_thys thys =
let
val thy0 = hd thys;
val name0 = theory_long_name thy0;
val state0 = state_of thy0;
fun ok thy =
not (theory_id_final (theory_id thy)) andalso
theory_long_name thy = name0 andalso
eq_ancestry (thy0, thy);
val _ =
(case filter_out ok thys of
[] => ()
| bad => raise THEORY ("Cannot join theories", bad));
val stage = next_stage state0;
val ids = merge_ids thys;
val data = merge_data thys;
in create_thy state0 ids name0 stage (ancestry_of thy0) data end;
(* merge: finished theory nodes *)
fun make_parents thys =
let val thys' = distinct eq_thy thys
in thys' |> filter_out (fn thy => exists (fn thy' => proper_subthy (thy, thy')) thys') end;
fun begin_thy name imports =
if name = "" then error ("Bad theory name: " ^ quote name)
else if null imports then error "Missing theory imports"
else
let
val parents = make_parents imports;
val ancestors =
Library.foldl1 merge_ancestors (map ancestors_of parents)
|> fold extend_ancestors parents;
val ancestry = make_ancestry parents ancestors;
val state = make_state ();
val stage = next_stage state;
val ids = merge_ids parents;
val data = merge_data parents;
in create_thy state ids name stage ancestry data |> tap finish_thy end;
(* theory data *)
structure Theory_Data =
struct
val declare = declare_data;
fun get k dest thy = dest (get_data k thy);
fun put k make x = update_thy (Datatab.update (k, make x));
fun sizeof1 k thy =
Datatab.lookup (data_of thy) k |> Option.map ML_Heap.sizeof1;
end;
fun theory_data_sizeof1 thy =
build (data_of thy |> Datatab.fold_rev (fn (k, _) =>
(case Theory_Data.sizeof1 k thy of
NONE => I
| SOME n => (cons (invoke_pos k, n)))));
(*** proof context ***)
(* proof data kinds *)
local
val kinds = Synchronized.var "Proof_Data" (Datatab.empty: (theory -> Any.T) Datatab.table);
fun init_data thy =
Synchronized.value kinds |> Datatab.map (fn _ => fn init => init thy);
fun init_new_data thy =
Synchronized.value kinds |> Datatab.fold (fn (k, init) => fn data =>
if Datatab.defined data k then data
else Datatab.update (k, init thy) data);
fun init_fallback k thy =
(case Datatab.lookup (Synchronized.value kinds) k of
SOME init => init thy
| NONE => raise Fail "Invalid proof data identifier");
in
fun raw_transfer thy' (ctxt as Prf (_, _, thy, data)) =
if eq_thy (thy, thy') then ctxt
else if proper_subthy (thy, thy') then
let
val (token', pos', assign) = proof_token ();
val data' = init_new_data thy' data;
in assign (Prf (token', pos', thy', data')) end
else error "Cannot transfer proof context: not a super theory";
structure Proof_Context =
struct
fun theory_of (Prf (_, _, thy, _)) = thy;
fun init_global thy =
let val (token, pos, assign) = proof_token ()
in assign (Prf (token, pos, thy, init_data thy)) end;
fun get_global long thy name = init_global (get_theory long thy name);
end;
structure Proof_Data =
struct
fun declare init =
let
val k = data_kind ();
val _ = Synchronized.change kinds (Datatab.update (k, init));
in k end;
fun get k dest (Prf (_, _, thy, data)) =
(case Datatab.lookup data k of
SOME x => x
| NONE => init_fallback k thy) |> dest;
fun put k make x (Prf (_, _, thy, data)) =
let
val (token', pos', assign) = proof_token ();
val data' = Datatab.update (k, make x) data;
in assign (Prf (token', pos', thy, data')) end;
end;
end;
(*** theory certificate ***)
datatype certificate = Certificate of theory | Certificate_Id of theory_id;
fun certificate_theory (Certificate thy) = thy
| certificate_theory (Certificate_Id thy_id) =
error ("No content for theory certificate " ^ display_name thy_id);
fun certificate_theory_id (Certificate thy) = theory_id thy
| certificate_theory_id (Certificate_Id thy_id) = thy_id;
fun eq_certificate (Certificate thy1, Certificate thy2) = eq_thy (thy1, thy2)
| eq_certificate (Certificate_Id thy_id1, Certificate_Id thy_id2) = eq_thy_id (thy_id1, thy_id2)
| eq_certificate _ = false;
+fun err_join (thy_id1, thy_id2) =
+ error ("Cannot join unrelated theory certificates " ^
+ display_name thy_id1 ^ " and " ^ display_name thy_id2);
+
fun join_certificate (cert1, cert2) =
let val (thy_id1, thy_id2) = apply2 certificate_theory_id (cert1, cert2) in
if eq_thy_id (thy_id1, thy_id2) then (case cert1 of Certificate _ => cert1 | _ => cert2)
else if proper_subthy_id (thy_id2, thy_id1) then cert1
else if proper_subthy_id (thy_id1, thy_id2) then cert2
- else
- error ("Cannot join unrelated theory certificates " ^
- display_name thy_id1 ^ " and " ^ display_name thy_id2)
+ else err_join (thy_id1, thy_id2)
end;
+fun join_certificate_theory (thy1, thy2) =
+ let val (thy_id1, thy_id2) = apply2 theory_id (thy1, thy2) in
+ if subthy_id (thy_id2, thy_id1) then thy1
+ else if proper_subthy_id (thy_id1, thy_id2) then thy2
+ else err_join (thy_id1, thy_id2)
+ end;
(*** generic context ***)
fun cases f _ (Theory thy) = f thy
| cases _ g (Proof prf) = g prf;
fun mapping f g = cases (Theory o f) (Proof o g);
fun mapping_result f g = cases (apsnd Theory o f) (apsnd Proof o g);
val the_theory = cases I (fn _ => error "Ill-typed context: theory expected");
val the_proof = cases (fn _ => error "Ill-typed context: proof expected") I;
fun map_theory f = Theory o f o the_theory;
fun map_proof f = Proof o f o the_proof;
fun map_theory_result f = apsnd Theory o f o the_theory;
fun map_proof_result f = apsnd Proof o f o the_proof;
fun theory_map f = the_theory o f o Theory;
fun proof_map f = the_proof o f o Proof;
val theory_of = cases I Proof_Context.theory_of;
val proof_of = cases Proof_Context.init_global I;
(** thread data **)
local val generic_context_var = Thread_Data.var () : generic Thread_Data.var in
fun get_generic_context () = Thread_Data.get generic_context_var;
val put_generic_context = Thread_Data.put generic_context_var;
fun setmp_generic_context opt_context = Thread_Data.setmp generic_context_var opt_context;
fun the_generic_context () =
(case get_generic_context () of
SOME context => context
| _ => error "Unknown context");
val the_global_context = theory_of o the_generic_context;
val the_local_context = proof_of o the_generic_context;
end;
fun >>> f =
let
val (res, context') = f (the_generic_context ());
val _ = put_generic_context (SOME context');
in res end;
nonfix >>;
fun >> f = >>> (fn context => ((), f context));
val _ = put_generic_context (SOME (Theory pre_pure_thy));
end;
structure Basic_Context: BASIC_CONTEXT = Context;
open Basic_Context;
(*** type-safe interfaces for data declarations ***)
(** theory data **)
signature THEORY_DATA'_ARGS =
sig
type T
val empty: T
val merge: (theory * T) list -> T
end;
signature THEORY_DATA_ARGS =
sig
type T
val empty: T
val merge: T * T -> T
end;
signature THEORY_DATA =
sig
type T
val get: theory -> T
val put: T -> theory -> theory
val map: (T -> T) -> theory -> theory
end;
functor Theory_Data'(Data: THEORY_DATA'_ARGS): THEORY_DATA =
struct
type T = Data.T;
exception Data of T;
val kind =
let val pos = Position.thread_data () in
Context.Theory_Data.declare
pos
(Data Data.empty)
(Data o Data.merge o map (fn (thy, Data x) => (thy, x)))
end;
val get = Context.Theory_Data.get kind (fn Data x => x);
val put = Context.Theory_Data.put kind Data;
fun map f thy = put (f (get thy)) thy;
end;
functor Theory_Data(Data: THEORY_DATA_ARGS): THEORY_DATA =
Theory_Data'
(
type T = Data.T;
val empty = Data.empty;
fun merge args = Library.foldl (fn (a, (_, b)) => Data.merge (a, b)) (#2 (hd args), tl args)
);
(** proof data **)
signature PROOF_DATA_ARGS =
sig
type T
val init: theory -> T
end;
signature PROOF_DATA =
sig
type T
val get: Proof.context -> T
val put: T -> Proof.context -> Proof.context
val map: (T -> T) -> Proof.context -> Proof.context
end;
functor Proof_Data(Data: PROOF_DATA_ARGS): PROOF_DATA =
struct
type T = Data.T;
exception Data of T;
val kind = Context.Proof_Data.declare (Data o Data.init);
val get = Context.Proof_Data.get kind (fn Data x => x);
val put = Context.Proof_Data.put kind Data;
fun map f prf = put (f (get prf)) prf;
end;
(** generic data **)
signature GENERIC_DATA_ARGS =
sig
type T
val empty: T
val merge: T * T -> T
end;
signature GENERIC_DATA =
sig
type T
val get: Context.generic -> T
val put: T -> Context.generic -> Context.generic
val map: (T -> T) -> Context.generic -> Context.generic
end;
functor Generic_Data(Data: GENERIC_DATA_ARGS): GENERIC_DATA =
struct
structure Thy_Data = Theory_Data(Data);
structure Prf_Data = Proof_Data(type T = Data.T val init = Thy_Data.get);
type T = Data.T;
fun get (Context.Theory thy) = Thy_Data.get thy
| get (Context.Proof prf) = Prf_Data.get prf;
fun put x (Context.Theory thy) = Context.Theory (Thy_Data.put x thy)
| put x (Context.Proof prf) = Context.Proof (Prf_Data.put x prf);
fun map f ctxt = put (f (get ctxt)) ctxt;
end;
(*hide private interface*)
structure Context: CONTEXT = Context;
diff --git a/src/Pure/drule.ML b/src/Pure/drule.ML
--- a/src/Pure/drule.ML
+++ b/src/Pure/drule.ML
@@ -1,838 +1,842 @@
(* Title: Pure/drule.ML
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Derived rules and other operations on theorems.
*)
infix 0 RL RLN MRS OF COMP INCR_COMP COMP_INCR;
signature BASIC_DRULE =
sig
val mk_implies: cterm * cterm -> cterm
val list_implies: cterm list * cterm -> cterm
val strip_imp_prems: cterm -> cterm list
val strip_imp_concl: cterm -> cterm
val cprems_of: thm -> cterm list
val forall_intr_list: cterm list -> thm -> thm
val forall_elim_list: cterm list -> thm -> thm
val lift_all: Proof.context -> cterm -> thm -> thm
val implies_elim_list: thm -> thm list -> thm
val implies_intr_list: cterm list -> thm -> thm
val instantiate_normalize: ctyp TVars.table * cterm Vars.table -> thm -> thm
val instantiate'_normalize: ctyp option list -> cterm option list -> thm -> thm
val infer_instantiate_types: Proof.context -> ((indexname * typ) * cterm) list -> thm -> thm
val infer_instantiate: Proof.context -> (indexname * cterm) list -> thm -> thm
val infer_instantiate': Proof.context -> cterm option list -> thm -> thm
val zero_var_indexes_list: thm list -> thm list
val zero_var_indexes: thm -> thm
val implies_intr_hyps: thm -> thm
val rotate_prems: int -> thm -> thm
val rearrange_prems: int list -> thm -> thm
val RLN: thm list * (int * thm list) -> thm list
val RL: thm list * thm list -> thm list
val MRS: thm list * thm -> thm
val OF: thm * thm list -> thm
val COMP: thm * thm -> thm
val INCR_COMP: thm * thm -> thm
val COMP_INCR: thm * thm -> thm
val size_of_thm: thm -> int
val reflexive_thm: thm
val symmetric_thm: thm
val transitive_thm: thm
val extensional: thm -> thm
val asm_rl: thm
val cut_rl: thm
val revcut_rl: thm
val thin_rl: thm
end;
signature DRULE =
sig
include BASIC_DRULE
val outer_params: term -> (string * typ) list
val generalize: Names.set * Names.set -> thm -> thm
val list_comb: cterm * cterm list -> cterm
val strip_comb: cterm -> cterm * cterm list
val beta_conv: cterm -> cterm -> cterm
val flexflex_unique: Proof.context option -> thm -> thm
val export_without_context: thm -> thm
val export_without_context_open: thm -> thm
val store_thm: binding -> thm -> thm
val store_standard_thm: binding -> thm -> thm
val store_thm_open: binding -> thm -> thm
val store_standard_thm_open: binding -> thm -> thm
val multi_resolve: Proof.context option -> thm list -> thm -> thm Seq.seq
val multi_resolves: Proof.context option -> thm list -> thm list -> thm Seq.seq
val compose: thm * int * thm -> thm
val equals_cong: thm
val imp_cong: thm
val swap_prems_eq: thm
val imp_cong_rule: thm -> thm -> thm
val arg_cong_rule: cterm -> thm -> thm
val binop_cong_rule: cterm -> thm -> thm -> thm
val fun_cong_rule: thm -> cterm -> thm
val beta_eta_conversion: cterm -> thm
val eta_contraction_rule: thm -> thm
val norm_hhf_eq: thm
val norm_hhf_eqs: thm list
- val is_norm_hhf: term -> bool
+ val is_norm_hhf: {protect: bool} -> term -> bool
val norm_hhf: theory -> term -> term
val norm_hhf_cterm: Proof.context -> cterm -> cterm
val protect: cterm -> cterm
val protectI: thm
val protectD: thm
val protect_cong: thm
val implies_intr_protected: cterm list -> thm -> thm
val termI: thm
val mk_term: cterm -> thm
val dest_term: thm -> cterm
val cterm_rule: (thm -> thm) -> cterm -> cterm
val add_frees_cterm: cterm -> Cterms.set -> Cterms.set
val add_vars_cterm: cterm -> Cterms.set -> Cterms.set
val dummy_thm: thm
val free_dummy_thm: thm
val is_sort_constraint: term -> bool
val sort_constraintI: thm
val sort_constraint_eq: thm
val with_subgoal: int -> (thm -> thm) -> thm -> thm
val comp_no_flatten: thm * int -> int -> thm -> thm
val rename_bvars: (string * string) list -> thm -> thm
val rename_bvars': string option list -> thm -> thm
val incr_indexes: thm -> thm -> thm
val incr_indexes2: thm -> thm -> thm -> thm
val triv_forall_equality: thm
val distinct_prems_rl: thm
val equal_intr_rule: thm
val equal_elim_rule1: thm
val equal_elim_rule2: thm
val remdups_rl: thm
val abs_def: thm -> thm
end;
structure Drule: DRULE =
struct
(** some cterm->cterm operations: faster than calling cterm_of! **)
(* A1\<Longrightarrow>...An\<Longrightarrow>B goes to [A1,...,An], where B is not an implication *)
fun strip_imp_prems ct =
let val (cA, cB) = Thm.dest_implies ct
in cA :: strip_imp_prems cB end
handle TERM _ => [];
(* A1\<Longrightarrow>...An\<Longrightarrow>B goes to B, where B is not an implication *)
fun strip_imp_concl ct =
(case Thm.term_of ct of
Const ("Pure.imp", _) $ _ $ _ => strip_imp_concl (Thm.dest_arg ct)
| _ => ct);
(*The premises of a theorem, as a cterm list*)
val cprems_of = strip_imp_prems o Thm.cprop_of;
fun certify t = Thm.global_cterm_of (Context.the_global_context ()) t;
val implies = certify Logic.implies;
fun mk_implies (A, B) = Thm.apply (Thm.apply implies A) B;
(*cterm version of list_implies: [A1,...,An], B goes to \<lbrakk>A1;...;An\<rbrakk>\<Longrightarrow>B *)
fun list_implies([], B) = B
| list_implies(A::As, B) = mk_implies (A, list_implies(As,B));
(*cterm version of list_comb: maps (f, [t1,...,tn]) to f(t1,...,tn) *)
fun list_comb (f, []) = f
| list_comb (f, t::ts) = list_comb (Thm.apply f t, ts);
(*cterm version of strip_comb: maps f(t1,...,tn) to (f, [t1,...,tn]) *)
fun strip_comb ct =
let
fun stripc (p as (ct, cts)) =
let val (ct1, ct2) = Thm.dest_comb ct
in stripc (ct1, ct2 :: cts) end handle CTERM _ => p
in stripc (ct, []) end;
(*Beta-conversion for cterms, where x is an abstraction. Simply returns the rhs
of the meta-equality returned by the beta_conversion rule.*)
fun beta_conv x y =
Thm.dest_arg (Thm.cprop_of (Thm.beta_conversion false (Thm.apply x y)));
(** Standardization of rules **)
(*Generalization over a list of variables*)
val forall_intr_list = fold_rev Thm.forall_intr;
fun outer_params t =
let val vs = Term.strip_all_vars t
in Name.variant_list [] (map (Name.clean o #1) vs) ~~ map #2 vs end;
(*lift vars wrt. outermost goal parameters
-- reverses the effect of gen_all modulo higher-order unification*)
fun lift_all ctxt raw_goal raw_th =
let
val thy = Proof_Context.theory_of ctxt;
val goal = Thm.transfer_cterm thy raw_goal;
val th = Thm.transfer thy raw_th;
val maxidx = Thm.maxidx_of th;
val ps = outer_params (Thm.term_of goal)
|> map (fn (x, T) => Var ((x, maxidx + 1), Logic.incr_tvar (maxidx + 1) T));
val Ts = map Term.fastype_of ps;
val inst =
Vars.build (th |> (Thm.fold_terms {hyps = false} o Term.fold_aterms)
(fn t => fn inst =>
(case t of
Var (xi, T) =>
if Vars.defined inst (xi, T) then inst
else
let val ct = Thm.cterm_of ctxt (Term.list_comb (Var (xi, Ts ---> T), ps))
in Vars.add ((xi, T), ct) inst end
| _ => inst)));
in
th
|> Thm.instantiate (TVars.empty, inst)
|> fold_rev (Thm.forall_intr o Thm.cterm_of ctxt) ps
end;
(*direct generalization*)
fun generalize names th = Thm.generalize names (Thm.maxidx_of th + 1) th;
(*specialization over a list of cterms*)
val forall_elim_list = fold Thm.forall_elim;
(*maps A1,...,An |- B to \<lbrakk>A1;...;An\<rbrakk> \<Longrightarrow> B*)
val implies_intr_list = fold_rev Thm.implies_intr;
(*maps \<lbrakk>A1;...;An\<rbrakk> \<Longrightarrow> B and [A1,...,An] to B*)
fun implies_elim_list impth ths = fold Thm.elim_implies ths impth;
(*Reset Var indexes to zero, renaming to preserve distinctness*)
fun zero_var_indexes_list [] = []
| zero_var_indexes_list ths =
let
val (instT, inst) =
Term_Subst.zero_var_indexes_inst Name.context (map Thm.full_prop_of ths);
val tvars = TVars.build (fold Thm.add_tvars ths);
val the_tvar = the o TVars.lookup tvars;
val instT' = instT |> TVars.map (fn v => fn TVar (b, _) => Thm.rename_tvar b (the_tvar v));
val vars = Vars.build (fold (Thm.add_vars o Thm.instantiate (instT', Vars.empty)) ths);
val the_var = the o Vars.lookup vars;
val inst' =
inst |> Vars.map (fn v => fn Var (b, _) => Thm.var (b, Thm.ctyp_of_cterm (the_var v)));
in map (Thm.adjust_maxidx_thm ~1 o Thm.instantiate (instT', inst')) ths end;
val zero_var_indexes = singleton zero_var_indexes_list;
(** Standard form of object-rule: no hypotheses, flexflex constraints,
Frees, or outer quantifiers; all generality expressed by Vars of index 0.**)
(*Discharge all hypotheses.*)
fun implies_intr_hyps th = fold Thm.implies_intr (Thm.chyps_of th) th;
(*Squash a theorem's flexflex constraints provided it can be done uniquely.
This step can lose information.*)
fun flexflex_unique opt_ctxt th =
if null (Thm.tpairs_of th) then th
else
(case distinct Thm.eq_thm (Seq.list_of (Thm.flexflex_rule opt_ctxt th)) of
[th] => th
| [] => raise THM ("flexflex_unique: impossible constraints", 0, [th])
| _ => raise THM ("flexflex_unique: multiple unifiers", 0, [th]));
(* old-style export without context *)
val export_without_context_open =
implies_intr_hyps
#> Thm.forall_intr_frees
#> `Thm.maxidx_of
#-> (fn maxidx =>
Thm.forall_elim_vars (maxidx + 1)
#> Thm.strip_shyps
#> zero_var_indexes
#> Thm.varifyT_global);
val export_without_context =
flexflex_unique NONE
#> export_without_context_open
#> Thm.close_derivation \<^here>;
(*Rotates a rule's premises to the left by k*)
fun rotate_prems 0 = I
| rotate_prems k = Thm.permute_prems 0 k;
fun with_subgoal i f = rotate_prems (i - 1) #> f #> rotate_prems (1 - i);
(*Permute prems, where the i-th position in the argument list (counting from 0)
gives the position within the original thm to be transferred to position i.
Any remaining trailing positions are left unchanged.*)
val rearrange_prems =
let
fun rearr new [] thm = thm
| rearr new (p :: ps) thm =
rearr (new + 1)
(map (fn q => if new <= q andalso q < p then q + 1 else q) ps)
(Thm.permute_prems (new + 1) (new - p) (Thm.permute_prems new (p - new) thm))
in rearr 0 end;
(*Resolution: multiple arguments, multiple results*)
local
fun res opt_ctxt th i rule =
(Thm.biresolution opt_ctxt false [(false, th)] i rule handle THM _ => Seq.empty)
|> Seq.map Thm.solve_constraints;
fun multi_res _ _ [] rule = Seq.single rule
| multi_res opt_ctxt i (th :: ths) rule =
Seq.maps (res opt_ctxt th i) (multi_res opt_ctxt (i + 1) ths rule);
in
fun multi_resolve opt_ctxt = multi_res opt_ctxt 1;
fun multi_resolves opt_ctxt facts rules =
Seq.maps (multi_resolve opt_ctxt facts) (Seq.of_list rules);
end;
(*For joining lists of rules*)
fun thas RLN (i, thbs) =
let
val resolve = Thm.biresolution NONE false (map (pair false) thas) i
fun resb thb = Seq.list_of (resolve thb) handle THM _ => []
in maps resb thbs |> map Thm.solve_constraints end;
fun thas RL thbs = thas RLN (1, thbs);
(*Isar-style multi-resolution*)
fun bottom_rl OF rls =
(case Seq.chop 2 (multi_resolve NONE rls bottom_rl) of
([th], _) => Thm.solve_constraints th
| ([], _) => raise THM ("OF: no unifiers", 0, bottom_rl :: rls)
| _ => raise THM ("OF: multiple unifiers", 0, bottom_rl :: rls));
(*Resolve a list of rules against bottom_rl from right to left;
makes proof trees*)
fun rls MRS bottom_rl = bottom_rl OF rls;
(*compose Q and \<lbrakk>...,Qi,Q(i+1),...\<rbrakk> \<Longrightarrow> R to \<lbrakk>...,Q(i+1),...\<rbrakk> \<Longrightarrow> R
with no lifting or renaming! Q may contain \<Longrightarrow> or meta-quantifiers
ALWAYS deletes premise i *)
fun compose (tha, i, thb) =
Thm.bicompose NONE {flatten = true, match = false, incremented = false} (false, tha, 0) i thb
|> Seq.list_of |> distinct Thm.eq_thm
|> (fn [th] => Thm.solve_constraints th
| _ => raise THM ("compose: unique result expected", i, [tha, thb]));
(** theorem equality **)
(*Useful "distance" function for BEST_FIRST*)
val size_of_thm = size_of_term o Thm.full_prop_of;
(*** Meta-Rewriting Rules ***)
val read_prop = certify o Simple_Syntax.read_prop;
fun store_thm name th =
Context.>>> (Context.map_theory_result (Global_Theory.store_thm (name, th)));
fun store_thm_open name th =
Context.>>> (Context.map_theory_result (Global_Theory.store_thm_open (name, th)));
fun store_standard_thm name th = store_thm name (export_without_context th);
fun store_standard_thm_open name th = store_thm_open name (export_without_context_open th);
val reflexive_thm =
let val cx = certify (Var(("x",0),TVar(("'a",0),[])))
in store_standard_thm_open (Binding.make ("reflexive", \<^here>)) (Thm.reflexive cx) end;
val symmetric_thm =
let
val xy = read_prop "x::'a \<equiv> y::'a";
val thm = Thm.implies_intr xy (Thm.symmetric (Thm.assume xy));
in store_standard_thm_open (Binding.make ("symmetric", \<^here>)) thm end;
val transitive_thm =
let
val xy = read_prop "x::'a \<equiv> y::'a";
val yz = read_prop "y::'a \<equiv> z::'a";
val xythm = Thm.assume xy;
val yzthm = Thm.assume yz;
val thm = Thm.implies_intr yz (Thm.transitive xythm yzthm);
in store_standard_thm_open (Binding.make ("transitive", \<^here>)) thm end;
fun extensional eq =
let val eq' =
Thm.abstract_rule "x" (Thm.dest_arg (fst (Thm.dest_equals (Thm.cprop_of eq)))) eq
in Thm.equal_elim (Thm.eta_conversion (Thm.cprop_of eq')) eq' end;
val equals_cong =
store_standard_thm_open (Binding.make ("equals_cong", \<^here>))
(Thm.reflexive (read_prop "x::'a \<equiv> y::'a"));
val imp_cong =
let
val ABC = read_prop "A \<Longrightarrow> B::prop \<equiv> C::prop"
val AB = read_prop "A \<Longrightarrow> B"
val AC = read_prop "A \<Longrightarrow> C"
val A = read_prop "A"
in
store_standard_thm_open (Binding.make ("imp_cong", \<^here>))
(Thm.implies_intr ABC (Thm.equal_intr
(Thm.implies_intr AB (Thm.implies_intr A
(Thm.equal_elim (Thm.implies_elim (Thm.assume ABC) (Thm.assume A))
(Thm.implies_elim (Thm.assume AB) (Thm.assume A)))))
(Thm.implies_intr AC (Thm.implies_intr A
(Thm.equal_elim (Thm.symmetric (Thm.implies_elim (Thm.assume ABC) (Thm.assume A)))
(Thm.implies_elim (Thm.assume AC) (Thm.assume A)))))))
end;
val swap_prems_eq =
let
val ABC = read_prop "A \<Longrightarrow> B \<Longrightarrow> C"
val BAC = read_prop "B \<Longrightarrow> A \<Longrightarrow> C"
val A = read_prop "A"
val B = read_prop "B"
in
store_standard_thm_open (Binding.make ("swap_prems_eq", \<^here>))
(Thm.equal_intr
(Thm.implies_intr ABC (Thm.implies_intr B (Thm.implies_intr A
(Thm.implies_elim (Thm.implies_elim (Thm.assume ABC) (Thm.assume A)) (Thm.assume B)))))
(Thm.implies_intr BAC (Thm.implies_intr A (Thm.implies_intr B
(Thm.implies_elim (Thm.implies_elim (Thm.assume BAC) (Thm.assume B)) (Thm.assume A))))))
end;
val imp_cong_rule = Thm.combination o Thm.combination (Thm.reflexive implies);
fun arg_cong_rule ct th = Thm.combination (Thm.reflexive ct) th; (*AP_TERM in LCF/HOL*)
fun fun_cong_rule th ct = Thm.combination th (Thm.reflexive ct); (*AP_THM in LCF/HOL*)
fun binop_cong_rule ct th1 th2 = Thm.combination (arg_cong_rule ct th1) th2;
fun beta_eta_conversion ct =
let val thm = Thm.beta_conversion true ct
in Thm.transitive thm (Thm.eta_conversion (Thm.rhs_of thm)) end;
(*Contract all eta-redexes in the theorem, lest they give rise to needless abstractions*)
fun eta_contraction_rule th =
Thm.equal_elim (Thm.eta_conversion (Thm.cprop_of th)) th;
(* abs_def *)
(*
f ?x1 ... ?xn \<equiv> u
--------------------
f \<equiv> \<lambda>x1 ... xn. u
*)
local
fun contract_lhs th =
Thm.transitive (Thm.symmetric (beta_eta_conversion (#1 (Thm.dest_equals (Thm.cprop_of th))))) th;
fun var_args ct =
(case try Thm.dest_comb ct of
SOME (f, arg) =>
(case Thm.term_of arg of
Var ((x, _), _) => update (eq_snd (op aconvc)) (x, arg) (var_args f)
| _ => [])
| NONE => []);
in
fun abs_def th =
let
val th' = contract_lhs th;
val args = var_args (Thm.lhs_of th');
in contract_lhs (fold (uncurry Thm.abstract_rule) args th') end;
end;
(*** Some useful meta-theorems ***)
(*The rule V/V, obtains assumption solving for eresolve_tac*)
val asm_rl =
store_standard_thm_open (Binding.make ("asm_rl", \<^here>))
(Thm.trivial (read_prop "?psi"));
(*Meta-level cut rule: \<lbrakk>V \<Longrightarrow> W; V\<rbrakk> \<Longrightarrow> W *)
val cut_rl =
store_standard_thm_open (Binding.make ("cut_rl", \<^here>))
(Thm.trivial (read_prop "?psi \<Longrightarrow> ?theta"));
(*Generalized elim rule for one conclusion; cut_rl with reversed premises:
\<lbrakk>PROP V; PROP V \<Longrightarrow> PROP W\<rbrakk> \<Longrightarrow> PROP W *)
val revcut_rl =
let
val V = read_prop "V";
val VW = read_prop "V \<Longrightarrow> W";
in
store_standard_thm_open (Binding.make ("revcut_rl", \<^here>))
(Thm.implies_intr V
(Thm.implies_intr VW (Thm.implies_elim (Thm.assume VW) (Thm.assume V))))
end;
(*for deleting an unwanted assumption*)
val thin_rl =
let
val V = read_prop "V";
val W = read_prop "W";
val thm = Thm.implies_intr V (Thm.implies_intr W (Thm.assume W));
in store_standard_thm_open (Binding.make ("thin_rl", \<^here>)) thm end;
(* (\<And>x. PROP ?V) \<equiv> PROP ?V Allows removal of redundant parameters*)
val triv_forall_equality =
let
val V = read_prop "V";
val QV = read_prop "\<And>x::'a. V";
val x = certify (Free ("x", Term.aT []));
in
store_standard_thm_open (Binding.make ("triv_forall_equality", \<^here>))
(Thm.equal_intr (Thm.implies_intr QV (Thm.forall_elim x (Thm.assume QV)))
(Thm.implies_intr V (Thm.forall_intr x (Thm.assume V))))
end;
(* (PROP ?Phi \<Longrightarrow> PROP ?Phi \<Longrightarrow> PROP ?Psi) \<Longrightarrow>
(PROP ?Phi \<Longrightarrow> PROP ?Psi)
*)
val distinct_prems_rl =
let
val AAB = read_prop "Phi \<Longrightarrow> Phi \<Longrightarrow> Psi";
val A = read_prop "Phi";
in
store_standard_thm_open (Binding.make ("distinct_prems_rl", \<^here>))
(implies_intr_list [AAB, A]
(implies_elim_list (Thm.assume AAB) [Thm.assume A, Thm.assume A]))
end;
(* \<lbrakk>PROP ?phi \<Longrightarrow> PROP ?psi; PROP ?psi \<Longrightarrow> PROP ?phi\<rbrakk>
\<Longrightarrow> PROP ?phi \<equiv> PROP ?psi
Introduction rule for \<equiv> as a meta-theorem.
*)
val equal_intr_rule =
let
val PQ = read_prop "phi \<Longrightarrow> psi";
val QP = read_prop "psi \<Longrightarrow> phi";
in
store_standard_thm_open (Binding.make ("equal_intr_rule", \<^here>))
(Thm.implies_intr PQ
(Thm.implies_intr QP (Thm.equal_intr (Thm.assume PQ) (Thm.assume QP))))
end;
(* PROP ?phi \<equiv> PROP ?psi \<Longrightarrow> PROP ?phi \<Longrightarrow> PROP ?psi *)
val equal_elim_rule1 =
let
val eq = read_prop "phi::prop \<equiv> psi::prop";
val P = read_prop "phi";
in
store_standard_thm_open (Binding.make ("equal_elim_rule1", \<^here>))
(Thm.equal_elim (Thm.assume eq) (Thm.assume P) |> implies_intr_list [eq, P])
end;
(* PROP ?psi \<equiv> PROP ?phi \<Longrightarrow> PROP ?phi \<Longrightarrow> PROP ?psi *)
val equal_elim_rule2 =
store_standard_thm_open (Binding.make ("equal_elim_rule2", \<^here>))
(symmetric_thm RS equal_elim_rule1);
(* PROP ?phi \<Longrightarrow> PROP ?phi \<Longrightarrow> PROP ?psi \<Longrightarrow> PROP ?psi *)
val remdups_rl =
let
val P = read_prop "phi";
val Q = read_prop "psi";
val thm = implies_intr_list [P, P, Q] (Thm.assume Q);
in store_standard_thm_open (Binding.make ("remdups_rl", \<^here>)) thm end;
(** embedded terms and types **)
local
val A = certify (Free ("A", propT));
val axiom = Thm.unvarify_axiom (Context.the_global_context ());
val prop_def = axiom "Pure.prop_def";
val term_def = axiom "Pure.term_def";
val sort_constraint_def = axiom "Pure.sort_constraint_def";
val C = Thm.lhs_of sort_constraint_def;
val T = Thm.dest_arg C;
val CA = mk_implies (C, A);
in
(* protect *)
val protect = Thm.apply (certify Logic.protectC);
val protectI =
store_standard_thm (Binding.concealed (Binding.make ("protectI", \<^here>)))
(Thm.equal_elim (Thm.symmetric prop_def) (Thm.assume A));
val protectD =
store_standard_thm (Binding.concealed (Binding.make ("protectD", \<^here>)))
(Thm.equal_elim prop_def (Thm.assume (protect A)));
val protect_cong =
store_standard_thm_open (Binding.make ("protect_cong", \<^here>))
(Thm.reflexive (protect A));
fun implies_intr_protected asms th =
let val asms' = map protect asms in
implies_elim_list
(implies_intr_list asms th)
(map (fn asm' => Thm.assume asm' RS protectD) asms')
|> implies_intr_list asms'
end;
(* term *)
val termI =
store_standard_thm (Binding.concealed (Binding.make ("termI", \<^here>)))
(Thm.equal_elim (Thm.symmetric term_def) (Thm.forall_intr A (Thm.trivial A)));
fun mk_term ct =
let
val cT = Thm.ctyp_of_cterm ct;
val T = Thm.typ_of cT;
in
Thm.instantiate (TVars.make1 ((("'a", 0), []), cT), Vars.make1 ((("x", 0), T), ct)) termI
end;
fun dest_term th =
let val cprop = strip_imp_concl (Thm.cprop_of th) in
if can Logic.dest_term (Thm.term_of cprop) then
Thm.dest_arg cprop
else raise THM ("dest_term", 0, [th])
end;
fun cterm_rule f = dest_term o f o mk_term;
val add_frees_cterm = Cterms.add_frees o mk_term;
val add_vars_cterm = Cterms.add_vars o mk_term;
val dummy_thm = mk_term (certify Term.dummy_prop);
val free_dummy_thm = Thm.tag_free_dummy dummy_thm;
(* sort_constraint *)
fun is_sort_constraint (Const ("Pure.sort_constraint", _) $ Const ("Pure.type", _)) = true
| is_sort_constraint _ = false;
val sort_constraintI =
store_standard_thm (Binding.concealed (Binding.make ("sort_constraintI", \<^here>)))
(Thm.equal_elim (Thm.symmetric sort_constraint_def) (mk_term T));
val sort_constraint_eq =
store_standard_thm (Binding.concealed (Binding.make ("sort_constraint_eq", \<^here>)))
(Thm.equal_intr
(Thm.implies_intr CA (Thm.implies_elim (Thm.assume CA)
(Thm.unvarify_global (Context.the_global_context ()) sort_constraintI)))
(implies_intr_list [A, C] (Thm.assume A)));
end;
(* HHF normalization *)
(* (PROP ?phi \<Longrightarrow> (\<And>x. PROP ?psi x)) \<equiv> (\<And>x. PROP ?phi \<Longrightarrow> PROP ?psi x) *)
val norm_hhf_eq =
let
val aT = TFree ("'a", []);
val x = Free ("x", aT);
val phi = Free ("phi", propT);
val psi = Free ("psi", aT --> propT);
val cx = certify x;
val cphi = certify phi;
val lhs = certify (Logic.mk_implies (phi, Logic.all x (psi $ x)));
val rhs = certify (Logic.all x (Logic.mk_implies (phi, psi $ x)));
in
Thm.equal_intr
(Thm.implies_elim (Thm.assume lhs) (Thm.assume cphi)
|> Thm.forall_elim cx
|> Thm.implies_intr cphi
|> Thm.forall_intr cx
|> Thm.implies_intr lhs)
(Thm.implies_elim
(Thm.assume rhs |> Thm.forall_elim cx) (Thm.assume cphi)
|> Thm.forall_intr cx
|> Thm.implies_intr cphi
|> Thm.implies_intr rhs)
|> store_standard_thm_open (Binding.make ("norm_hhf_eq", \<^here>))
end;
val norm_hhf_prop = Logic.dest_equals (Thm.prop_of norm_hhf_eq);
val norm_hhf_eqs = [norm_hhf_eq, sort_constraint_eq];
-fun is_norm_hhf (Const ("Pure.sort_constraint", _)) = false
- | is_norm_hhf (Const ("Pure.imp", _) $ _ $ (Const ("Pure.all", _) $ _)) = false
- | is_norm_hhf (Abs _ $ _) = false
- | is_norm_hhf (t $ u) = is_norm_hhf t andalso is_norm_hhf u
- | is_norm_hhf (Abs (_, _, t)) = is_norm_hhf t
- | is_norm_hhf _ = true;
+fun is_norm_hhf {protect} =
+ let
+ fun is_norm (Const ("Pure.sort_constraint", _)) = false
+ | is_norm (Const ("Pure.imp", _) $ _ $ (Const ("Pure.all", _) $ _)) = false
+ | is_norm (Const ("Pure.prop", _) $ t) = protect orelse is_norm t
+ | is_norm (Abs _ $ _) = false
+ | is_norm (t $ u) = is_norm t andalso is_norm u
+ | is_norm (Abs (_, _, t)) = is_norm t
+ | is_norm _ = true;
+ in is_norm end;
fun norm_hhf thy t =
- if is_norm_hhf t then t
+ if is_norm_hhf {protect = false} t then t
else Pattern.rewrite_term thy [norm_hhf_prop] [] t;
fun norm_hhf_cterm ctxt raw_ct =
let
val thy = Proof_Context.theory_of ctxt;
val ct = Thm.transfer_cterm thy raw_ct;
val t = Thm.term_of ct;
- in if is_norm_hhf t then ct else Thm.cterm_of ctxt (norm_hhf thy t) end;
+ in if is_norm_hhf {protect = false} t then ct else Thm.cterm_of ctxt (norm_hhf thy t) end;
(* var indexes *)
fun incr_indexes th = Thm.incr_indexes (Thm.maxidx_of th + 1);
fun incr_indexes2 th1 th2 =
Thm.incr_indexes (Int.max (Thm.maxidx_of th1, Thm.maxidx_of th2) + 1);
local
(*compose Q and \<lbrakk>Q1,Q2,...,Qk\<rbrakk> \<Longrightarrow> R to \<lbrakk>Q2,...,Qk\<rbrakk> \<Longrightarrow> R getting unique result*)
fun comp incremented th1 th2 =
Thm.bicompose NONE {flatten = true, match = false, incremented = incremented}
(false, th1, 0) 1 th2
|> Seq.list_of |> distinct Thm.eq_thm
|> (fn [th] => Thm.solve_constraints th | _ => raise THM ("COMP", 1, [th1, th2]));
in
fun th1 COMP th2 = comp false th1 th2;
fun th1 INCR_COMP th2 = comp true (incr_indexes th2 th1) th2;
fun th1 COMP_INCR th2 = comp true th1 (incr_indexes th1 th2);
end;
fun comp_no_flatten (th, n) i rule =
(case distinct Thm.eq_thm (Seq.list_of
(Thm.bicompose NONE {flatten = false, match = false, incremented = true}
(false, th, n) i (incr_indexes th rule))) of
[th'] => Thm.solve_constraints th'
| [] => raise THM ("comp_no_flatten", i, [th, rule])
| _ => raise THM ("comp_no_flatten: unique result expected", i, [th, rule]));
(** variations on Thm.instantiate **)
fun instantiate_normalize instpair th =
Thm.adjust_maxidx_thm ~1 (Thm.instantiate instpair th COMP_INCR asm_rl);
fun instantiate'_normalize Ts ts th =
Thm.adjust_maxidx_thm ~1 (Thm.instantiate' Ts ts th COMP_INCR asm_rl);
(*instantiation with type-inference for variables*)
fun infer_instantiate_types _ [] th = th
| infer_instantiate_types ctxt args raw_th =
let
val thy = Proof_Context.theory_of ctxt;
val th = Thm.transfer thy raw_th;
fun infer ((xi, T), cu) (tyenv, maxidx) =
let
val _ = Thm.ctyp_of ctxt T;
val _ = Thm.transfer_cterm thy cu;
val U = Thm.typ_of_cterm cu;
val maxidx' = maxidx
|> Integer.max (#2 xi)
|> Term.maxidx_typ T
|> Integer.max (Thm.maxidx_of_cterm cu);
val (tyenv', maxidx'') = Sign.typ_unify thy (T, U) (tyenv, maxidx')
handle Type.TUNIFY =>
let
val t = Var (xi, T);
val u = Thm.term_of cu;
in
raise THM ("infer_instantiate_types: type " ^
Syntax.string_of_typ ctxt (Envir.norm_type tyenv T) ^ " of variable " ^
Syntax.string_of_term ctxt (Term.map_types (Envir.norm_type tyenv) t) ^
"\ncannot be unified with type " ^
Syntax.string_of_typ ctxt (Envir.norm_type tyenv U) ^ " of term " ^
Syntax.string_of_term ctxt (Term.map_types (Envir.norm_type tyenv) u),
0, [th])
end;
in (tyenv', maxidx'') end;
val (tyenv, _) = fold infer args (Vartab.empty, 0);
val instT =
TVars.build (tyenv |> Vartab.fold (fn (xi, (S, T)) =>
TVars.add ((xi, S), Thm.ctyp_of ctxt (Envir.norm_type tyenv T))));
val inst =
Vars.build (args |> fold (fn ((xi, T), cu) =>
Vars.add ((xi, Envir.norm_type tyenv T),
Thm.instantiate_cterm (instT, Vars.empty) (Thm.transfer_cterm thy cu))));
in instantiate_normalize (instT, inst) th end
handle CTERM (msg, _) => raise THM (msg, 0, [raw_th])
| TERM (msg, _) => raise THM (msg, 0, [raw_th])
| TYPE (msg, _, _) => raise THM (msg, 0, [raw_th]);
fun infer_instantiate _ [] th = th
| infer_instantiate ctxt args th =
let
val vars = Term.add_vars (Thm.full_prop_of th) [];
val dups = duplicates (eq_fst op =) vars;
val _ = null dups orelse
raise THM ("infer_instantiate: inconsistent types for variables " ^
commas_quote (map (Syntax.string_of_term (Config.put show_types true ctxt) o Var) dups),
0, [th]);
val args' = args |> map_filter (fn (xi, cu) =>
AList.lookup (op =) vars xi |> Option.map (fn T => ((xi, T), cu)));
in infer_instantiate_types ctxt args' th end;
fun infer_instantiate' ctxt args th =
let
val vars = build_rev (Term.add_vars (Thm.full_prop_of th));
val args' = zip_options vars args
handle ListPair.UnequalLengths =>
raise THM ("infer_instantiate': more instantiations than variables in thm", 0, [th]);
in infer_instantiate_types ctxt args' th end;
(** renaming of bound variables **)
(* replace bound variables x_i in thm by y_i *)
(* where vs = [(x_1, y_1), ..., (x_n, y_n)] *)
fun rename_bvars [] thm = thm
| rename_bvars vs thm =
let
fun rename (Abs (x, T, t)) = Abs (AList.lookup (op =) vs x |> the_default x, T, rename t)
| rename (t $ u) = rename t $ rename u
| rename a = a;
in Thm.renamed_prop (rename (Thm.prop_of thm)) thm end;
(* renaming in left-to-right order *)
fun rename_bvars' xs thm =
let
fun rename [] t = ([], t)
| rename (x' :: xs) (Abs (x, T, t)) =
let val (xs', t') = rename xs t
in (xs', Abs (the_default x x', T, t')) end
| rename xs (t $ u) =
let
val (xs', t') = rename xs t;
val (xs'', u') = rename xs' u;
in (xs'', t' $ u') end
| rename xs a = (xs, a);
in
(case rename xs (Thm.prop_of thm) of
([], prop') => Thm.renamed_prop prop' thm
| _ => error "More names than abstractions in theorem")
end;
end;
structure Basic_Drule: BASIC_DRULE = Drule;
open Basic_Drule;
diff --git a/src/Pure/ex/Def.thy b/src/Pure/ex/Def.thy
--- a/src/Pure/ex/Def.thy
+++ b/src/Pure/ex/Def.thy
@@ -1,112 +1,106 @@
(* Title: Pure/ex/Def.thy
Author: Makarius
Primitive constant definition, without fact definition;
automatic expansion via Simplifier (simproc).
*)
theory Def
imports Pure
keywords "def" :: thy_defn
begin
ML \<open>
signature DEF =
sig
val get_def: Proof.context -> cterm -> thm option
val def: (binding * typ option * mixfix) option ->
(binding * typ option * mixfix) list -> term -> local_theory -> term * local_theory
val def_cmd: (binding * string option * mixfix) option ->
(binding * string option * mixfix) list -> string -> local_theory -> term * local_theory
end;
structure Def: DEF =
struct
(* context data *)
-type def = {lhs: term, mk_eq: morphism -> thm};
+type def = {lhs: term, eq: thm};
val eq_def : def * def -> bool = op aconv o apply2 #lhs;
-fun transform_def phi ({lhs, mk_eq}: def) =
- {lhs = Morphism.term phi lhs, mk_eq = Morphism.transform phi mk_eq};
+fun transform_def phi ({lhs, eq}: def) =
+ {lhs = Morphism.term phi lhs, eq = Morphism.thm phi eq};
structure Data = Generic_Data
(
type T = def Item_Net.T;
val empty : T = Item_Net.init eq_def (single o #lhs);
val merge = Item_Net.merge;
);
fun declare_def lhs eq lthy =
- let
- val eq0 = Thm.trim_context eq;
- val def: def = {lhs = lhs, mk_eq = fn phi => Morphism.thm phi eq0};
- in
- lthy |> Local_Theory.declaration {syntax = false, pervasive = true}
- (fn phi => (Data.map o Item_Net.update) (transform_def phi def))
+ let val def0: def = {lhs = lhs, eq = Thm.trim_context eq} in
+ lthy |> Local_Theory.declaration {syntax = false, pervasive = true, pos = \<^here>}
+ (fn phi => fn context =>
+ let val psi = Morphism.set_trim_context'' context phi
+ in (Data.map o Item_Net.update) (transform_def psi def0) context end)
end;
fun get_def ctxt ct =
let
val thy = Proof_Context.theory_of ctxt;
val data = Data.get (Context.Proof ctxt);
val t = Thm.term_of ct;
- fun match_def {lhs, mk_eq} =
+ fun match_def {lhs, eq} =
if Pattern.matches thy (lhs, t) then
- let
- val inst = Thm.match (Thm.cterm_of ctxt lhs, ct);
- val eq =
- Morphism.form mk_eq
- |> Thm.transfer thy
- |> Thm.instantiate inst;
- in SOME eq end
+ let val inst = Thm.match (Thm.cterm_of ctxt lhs, ct)
+ in SOME (Thm.instantiate inst (Thm.transfer thy eq)) end
else NONE;
in Item_Net.retrieve_matching data t |> get_first match_def end;
(* simproc setup *)
val _ =
(Theory.setup o Named_Target.theory_map)
(Simplifier.define_simproc \<^binding>\<open>expand_def\<close>
{lhss = [Free ("x", TFree ("'a", []))], proc = K get_def});
(* Isar command *)
fun gen_def prep_spec raw_var raw_params raw_spec lthy =
let
val ((vars, xs, get_pos, spec), _) = lthy
|> prep_spec (the_list raw_var) raw_params [] raw_spec;
val (((x, _), rhs), prove) = Local_Defs.derived_def lthy get_pos {conditional = false} spec;
val _ = Name.reject_internal (x, []);
val (b, mx) =
(case (vars, xs) of
([], []) => (Binding.make (x, (case get_pos x of [] => Position.none | p :: _ => p)), NoSyn)
| ([(b, _, mx)], [y]) =>
if x = y then (b, mx)
else
error ("Head of definition " ^ quote x ^ " differs from declaration " ^ quote y ^
Position.here (Binding.pos_of b)));
val ((lhs, (_, eq)), lthy') = lthy
|> Local_Theory.define_internal ((b, mx), (Binding.empty_atts, rhs));
(*sanity check for original specification*)
val _: thm = prove lthy' eq;
in (lhs, declare_def lhs eq lthy') end;
val def = gen_def Specification.check_spec_open;
val def_cmd = gen_def Specification.read_spec_open;
val _ =
Outer_Syntax.local_theory \<^command_keyword>\<open>def\<close>
"primitive constant definition, without fact definition"
(Scan.option Parse_Spec.constdecl -- Parse.prop -- Parse.for_fixes
>> (fn ((decl, spec), params) => #2 o def_cmd decl params spec));
end;
\<close>
end
diff --git a/src/Pure/goal.ML b/src/Pure/goal.ML
--- a/src/Pure/goal.ML
+++ b/src/Pure/goal.ML
@@ -1,336 +1,336 @@
(* Title: Pure/goal.ML
Author: Makarius
Goals in tactical theorem proving, with support for forked proofs.
*)
signature BASIC_GOAL =
sig
val quick_and_dirty: bool Config.T
val SELECT_GOAL: tactic -> int -> tactic
val PREFER_GOAL: tactic -> int -> tactic
val CONJUNCTS: tactic -> int -> tactic
val PRECISE_CONJUNCTS: int -> tactic -> int -> tactic
end;
signature GOAL =
sig
include BASIC_GOAL
val init: cterm -> thm
val protect: int -> thm -> thm
val conclude: thm -> thm
val check_finished: Proof.context -> thm -> thm
val finish: Proof.context -> thm -> thm
val norm_result: Proof.context -> thm -> thm
val skip_proofs_enabled: unit -> bool
val future_result: Proof.context -> thm future -> term -> thm
val prove_internal: Proof.context -> cterm list -> cterm -> (thm list -> tactic) -> thm
val prove_common: Proof.context -> int option -> string list -> term list -> term list ->
({prems: thm list, context: Proof.context} -> tactic) -> thm list
val prove_future: Proof.context -> string list -> term list -> term ->
({prems: thm list, context: Proof.context} -> tactic) -> thm
val prove: Proof.context -> string list -> term list -> term ->
({prems: thm list, context: Proof.context} -> tactic) -> thm
val prove_global_future: theory -> string list -> term list -> term ->
({prems: thm list, context: Proof.context} -> tactic) -> thm
val prove_global: theory -> string list -> term list -> term ->
({prems: thm list, context: Proof.context} -> tactic) -> thm
val prove_sorry: Proof.context -> string list -> term list -> term ->
({prems: thm list, context: Proof.context} -> tactic) -> thm
val prove_sorry_global: theory -> string list -> term list -> term ->
({prems: thm list, context: Proof.context} -> tactic) -> thm
val restrict: int -> int -> thm -> thm
val unrestrict: int -> thm -> thm
val conjunction_tac: int -> tactic
val precise_conjunction_tac: int -> int -> tactic
val recover_conjunction_tac: tactic
val norm_hhf_tac: Proof.context -> int -> tactic
val assume_rule_tac: Proof.context -> int -> tactic
end;
structure Goal: GOAL =
struct
(** goals **)
(*
-------- (init)
C \<Longrightarrow> #C
*)
fun init C = Thm.instantiate (TVars.empty, Vars.make1 ((("A", 0), propT), C)) Drule.protectI;
(*
A1 \<Longrightarrow> ... \<Longrightarrow> An \<Longrightarrow> C
------------------------ (protect n)
A1 \<Longrightarrow> ... \<Longrightarrow> An \<Longrightarrow> #C
*)
fun protect n th = Drule.comp_no_flatten (th, n) 1 Drule.protectI;
(*
A \<Longrightarrow> ... \<Longrightarrow> #C
---------------- (conclude)
A \<Longrightarrow> ... \<Longrightarrow> C
*)
fun conclude th = Drule.comp_no_flatten (th, Thm.nprems_of th) 1 Drule.protectD;
(*
#C
--- (finish)
C
*)
fun check_finished ctxt th =
if Thm.no_prems th then th
else raise THM ("Proof failed.\n" ^ Goal_Display.string_of_goal ctxt th, 0, [th]);
fun finish ctxt = check_finished ctxt #> conclude;
(** results **)
(* normal form *)
fun norm_result ctxt =
Drule.flexflex_unique (SOME ctxt)
#> Raw_Simplifier.norm_hhf_protect ctxt
#> Thm.strip_shyps
#> Drule.zero_var_indexes;
(* scheduling parameters *)
fun skip_proofs_enabled () =
let val skip = Options.default_bool "skip_proofs" in
if Proofterm.proofs_enabled () andalso skip then
(warning "Proof terms enabled -- cannot skip proofs"; false)
else skip
end;
(* future_result *)
fun future_result ctxt result prop =
let
val assms = Assumption.all_assms_of ctxt;
val As = map Thm.term_of assms;
val frees = Frees.build (fold Frees.add_frees (prop :: As));
val xs = Frees.fold_rev (cons o Thm.cterm_of ctxt o Free o #1) frees [];
val tfrees = TFrees.build (fold TFrees.add_tfrees (prop :: As));
val Ts = Names.build (TFrees.fold (Names.add_set o #1 o #1) tfrees);
val instT =
TVars.build (tfrees |> TFrees.fold (fn ((a, S), _) =>
TVars.add (((a, 0), S), Thm.ctyp_of ctxt (TFree (a, S)))));
val global_prop =
Logic.list_implies (As, prop)
|> Frees.fold_rev (Logic.all o Free o #1) frees
|> Logic.varify_types_global
|> Thm.cterm_of ctxt
|> Thm.weaken_sorts' ctxt;
val global_result = result |> Future.map
(Drule.flexflex_unique (SOME ctxt) #>
Drule.implies_intr_list assms #>
Drule.forall_intr_list xs #>
Thm.adjust_maxidx_thm ~1 #>
Thm.generalize (Ts, Names.empty) 0 #>
Thm.strip_shyps #>
Thm.solve_constraints);
val local_result =
Thm.future global_result global_prop
|> Thm.close_derivation \<^here>
|> Thm.instantiate (instT, Vars.empty)
|> Drule.forall_elim_list xs
|> fold (Thm.elim_implies o Thm.assume) assms
|> Thm.solve_constraints;
in local_result end;
(** tactical theorem proving **)
(* prove_internal -- minimal checks, no normalization of result! *)
fun prove_internal ctxt casms cprop tac =
(case SINGLE (tac (map (Assumption.assume ctxt) casms)) (init cprop) of
SOME th => Drule.implies_intr_list casms (finish ctxt th)
| NONE => error "Tactic failed");
(* prove variations *)
fun prove_common ctxt fork_pri xs asms props tac =
let
val thy = Proof_Context.theory_of ctxt;
val schematic = exists Term.is_schematic props;
val immediate = is_none fork_pri;
val future = Future.proofs_enabled 1 andalso not (Proofterm.proofs_enabled ());
val skip = not immediate andalso not schematic andalso future andalso skip_proofs_enabled ();
val pos = Position.thread_data ();
fun err msg =
cat_error msg
("The error(s) above occurred for the goal statement" ^ Position.here pos ^ ":\n" ^
Syntax.string_of_term ctxt (Logic.list_implies (asms, Logic.mk_conjunction_list props)));
fun cert_safe t = Thm.cterm_of ctxt (Envir.beta_norm (Term.no_dummy_patterns t))
handle TERM (msg, _) => err msg | TYPE (msg, _, _) => err msg;
val casms = map cert_safe asms;
val cprops = map cert_safe props;
val (prems, ctxt') = ctxt
|> Variable.add_fixes_direct xs
|> fold Variable.declare_term (asms @ props)
|> Assumption.add_assumes casms
||> Variable.set_body true;
val stmt = Thm.weaken_sorts' ctxt' (Conjunction.mk_conjunction_balanced cprops);
fun tac' args st =
if skip then ALLGOALS (Skip_Proof.cheat_tac ctxt) st before Skip_Proof.report ctxt
else tac args st;
fun result () =
(case SINGLE (tac' {prems = prems, context = ctxt'}) (init stmt) of
NONE => err "Tactic failed"
| SOME st =>
let
val _ =
Context.subthy_id (Thm.theory_id st, Context.theory_id thy) orelse
err "Bad background theory of goal state";
val res =
(finish ctxt' st
|> Drule.flexflex_unique (SOME ctxt')
|> Thm.check_shyps ctxt'
|> Thm.check_hyps (Context.Proof ctxt'))
handle THM (msg, _, _) => err msg | ERROR msg => err msg;
in
if is_none (Unify.matcher (Context.Proof ctxt') [Thm.term_of stmt] [Thm.prop_of res])
then err ("Proved a different theorem: " ^ Syntax.string_of_term ctxt' (Thm.prop_of res))
else res
end);
val res =
if immediate orelse schematic orelse not future orelse skip then result ()
else
future_result ctxt'
(Execution.fork {name = "Goal.prove", pos = Position.thread_data (), pri = the fork_pri}
result)
(Thm.term_of stmt);
in
res
|> Thm.close_derivation \<^here>
|> Conjunction.elim_balanced (length props)
- |> map (Assumption.export false ctxt' ctxt)
+ |> map (Assumption.export ctxt' ctxt)
|> Variable.export ctxt' ctxt
|> map Drule.zero_var_indexes
end;
fun prove_future_pri ctxt pri xs asms prop tac =
hd (prove_common ctxt (SOME pri) xs asms [prop] tac);
fun prove_future ctxt = prove_future_pri ctxt ~1;
fun prove ctxt xs asms prop tac = hd (prove_common ctxt NONE xs asms [prop] tac);
fun prove_global_future thy xs asms prop tac =
Drule.export_without_context (prove_future (Proof_Context.init_global thy) xs asms prop tac);
fun prove_global thy xs asms prop tac =
Drule.export_without_context (prove (Proof_Context.init_global thy) xs asms prop tac);
(* skip proofs *)
val quick_and_dirty = Config.declare_option_bool ("quick_and_dirty", \<^here>);
fun prove_sorry ctxt xs asms prop tac =
if Config.get ctxt quick_and_dirty then
prove ctxt xs asms prop (fn _ => ALLGOALS (Skip_Proof.cheat_tac ctxt))
else (if Future.proofs_enabled 1 then prove_future_pri ctxt ~2 else prove ctxt) xs asms prop tac;
fun prove_sorry_global thy xs asms prop tac =
Drule.export_without_context
(prove_sorry (Proof_Context.init_global thy) xs asms prop tac);
(** goal structure **)
(* rearrange subgoals *)
fun restrict i n st =
if i < 1 orelse n < 1 orelse i + n - 1 > Thm.nprems_of st
then raise THM ("Goal.restrict", i, [st])
else rotate_prems (i - 1) st |> protect n;
fun unrestrict i = conclude #> rotate_prems (1 - i);
(*with structural marker*)
fun SELECT_GOAL tac i st =
if Thm.nprems_of st = 1 andalso i = 1 then tac st
else (PRIMITIVE (restrict i 1) THEN tac THEN PRIMITIVE (unrestrict i)) st;
(*without structural marker*)
fun PREFER_GOAL tac i st =
if i < 1 orelse i > Thm.nprems_of st then Seq.empty
else (PRIMITIVE (rotate_prems (i - 1)) THEN tac THEN PRIMITIVE (rotate_prems (1 - i))) st;
(* multiple goals *)
fun precise_conjunction_tac 0 i = eq_assume_tac i
| precise_conjunction_tac 1 i = SUBGOAL (K all_tac) i
| precise_conjunction_tac n i = PRIMITIVE (Drule.with_subgoal i (Conjunction.curry_balanced n));
val adhoc_conjunction_tac = REPEAT_ALL_NEW
(SUBGOAL (fn (goal, i) =>
if can Logic.dest_conjunction goal then resolve0_tac [Conjunction.conjunctionI] i
else no_tac));
val conjunction_tac = SUBGOAL (fn (goal, i) =>
precise_conjunction_tac (length (Logic.dest_conjunctions goal)) i ORELSE
TRY (adhoc_conjunction_tac i));
val recover_conjunction_tac = PRIMITIVE (fn th =>
Conjunction.uncurry_balanced (Thm.nprems_of th) th);
fun PRECISE_CONJUNCTS n tac =
SELECT_GOAL (precise_conjunction_tac n 1
THEN tac
THEN recover_conjunction_tac);
fun CONJUNCTS tac =
SELECT_GOAL (conjunction_tac 1
THEN tac
THEN recover_conjunction_tac);
(* hhf normal form *)
fun norm_hhf_tac ctxt =
resolve_tac ctxt [Drule.asm_rl] (*cheap approximation -- thanks to builtin Logic.flatten_params*)
THEN' SUBGOAL (fn (t, i) =>
- if Drule.is_norm_hhf t then all_tac
+ if Drule.is_norm_hhf {protect = false} t then all_tac
else rewrite_goal_tac ctxt Drule.norm_hhf_eqs i);
(* non-atomic goal assumptions *)
fun non_atomic (Const ("Pure.imp", _) $ _ $ _) = true
| non_atomic (Const ("Pure.all", _) $ _) = true
| non_atomic _ = false;
fun assume_rule_tac ctxt = norm_hhf_tac ctxt THEN' CSUBGOAL (fn (goal, i) =>
let
val ((_, goal'), ctxt') = Variable.focus_cterm NONE goal ctxt;
val goal'' = Drule.cterm_rule (singleton (Variable.export ctxt' ctxt)) goal';
val Rs = filter (non_atomic o Thm.term_of) (Drule.strip_imp_prems goal'');
val tacs = Rs |> map (fn R =>
eresolve_tac ctxt [Raw_Simplifier.norm_hhf ctxt (Thm.trivial R)] THEN_ALL_NEW assume_tac ctxt);
in fold_rev (curry op APPEND') tacs (K no_tac) i end);
end;
structure Basic_Goal: BASIC_GOAL = Goal;
open Basic_Goal;
diff --git a/src/Pure/morphism.ML b/src/Pure/morphism.ML
--- a/src/Pure/morphism.ML
+++ b/src/Pure/morphism.ML
@@ -1,175 +1,260 @@
(* Title: Pure/morphism.ML
Author: Makarius
Abstract morphisms on formal entities.
*)
infix 1 $>
signature BASIC_MORPHISM =
sig
type morphism
- type declaration = morphism -> Context.generic -> Context.generic
val $> : morphism * morphism -> morphism
end
signature MORPHISM =
sig
include BASIC_MORPHISM
exception MORPHISM of string * exn
+ val the_theory: theory option -> theory
+ val set_context: theory -> morphism -> morphism
+ val set_context': Proof.context -> morphism -> morphism
+ val set_context'': Context.generic -> morphism -> morphism
+ val reset_context: morphism -> morphism
val morphism: string ->
- {binding: (binding -> binding) list,
- typ: (typ -> typ) list,
- term: (term -> term) list,
- fact: (thm list -> thm list) list} -> morphism
+ {binding: (theory option -> binding -> binding) list,
+ typ: (theory option -> typ -> typ) list,
+ term: (theory option -> term -> term) list,
+ fact: (theory option -> thm list -> thm list) list} -> morphism
val is_identity: morphism -> bool
+ val is_empty: morphism -> bool
val pretty: morphism -> Pretty.T
val binding: morphism -> binding -> binding
val binding_prefix: morphism -> (string * bool) list
val typ: morphism -> typ -> typ
val term: morphism -> term -> term
val fact: morphism -> thm list -> thm list
val thm: morphism -> thm -> thm
val cterm: morphism -> cterm -> cterm
val identity: morphism
+ val default: morphism option -> morphism
val compose: morphism -> morphism -> morphism
- val transform: morphism -> (morphism -> 'a) -> morphism -> 'a
- val form: (morphism -> 'a) -> 'a
+ type 'a entity
+ val entity: (morphism -> 'a) -> 'a entity
+ val entity_reset_context: 'a entity -> 'a entity
+ val entity_set_context: theory -> 'a entity -> 'a entity
+ val entity_set_context': Proof.context -> 'a entity -> 'a entity
+ val entity_set_context'': Context.generic -> 'a entity -> 'a entity
+ val transform: morphism -> 'a entity -> 'a entity
+ val transform_reset_context: morphism -> 'a entity -> 'a entity
+ val form: 'a entity -> 'a
+ val form_entity: (morphism -> 'a) -> 'a
+ type declaration = morphism -> Context.generic -> Context.generic
+ type declaration_entity = (Context.generic -> Context.generic) entity
val binding_morphism: string -> (binding -> binding) -> morphism
+ val typ_morphism': string -> (theory -> typ -> typ) -> morphism
val typ_morphism: string -> (typ -> typ) -> morphism
+ val term_morphism': string -> (theory -> term -> term) -> morphism
val term_morphism: string -> (term -> term) -> morphism
+ val fact_morphism': string -> (theory -> thm list -> thm list) -> morphism
val fact_morphism: string -> (thm list -> thm list) -> morphism
+ val thm_morphism': string -> (theory -> thm -> thm) -> morphism
val thm_morphism: string -> (thm -> thm) -> morphism
val transfer_morphism: theory -> morphism
val transfer_morphism': Proof.context -> morphism
val transfer_morphism'': Context.generic -> morphism
val trim_context_morphism: morphism
+ val set_trim_context: theory -> morphism -> morphism
+ val set_trim_context': Proof.context -> morphism -> morphism
+ val set_trim_context'': Context.generic -> morphism -> morphism
val instantiate_frees_morphism: ctyp TFrees.table * cterm Frees.table -> morphism
val instantiate_morphism: ctyp TVars.table * cterm Vars.table -> morphism
end;
structure Morphism: MORPHISM =
struct
(* named functions *)
-type 'a funs = (string * ('a -> 'a)) list;
+type 'a funs = (string * (theory option -> 'a -> 'a)) list;
exception MORPHISM of string * exn;
-fun app (name, f) x = f x
+fun app context (name, f) x = f context x
handle exn =>
if Exn.is_interrupt exn then Exn.reraise exn else raise MORPHISM (name, exn);
-fun apply fs = fold_rev app fs;
+
+(* optional context *)
+
+fun the_theory (SOME thy) = thy
+ | the_theory NONE = raise Fail "Morphism lacks theory context";
+
+fun join_transfer (SOME thy) = Thm.join_transfer thy
+ | join_transfer NONE = I;
+
+val join_context = join_options Context.join_certificate_theory;
(* type morphism *)
datatype morphism = Morphism of
- {names: string list,
+ {context: theory option,
+ names: string list,
binding: binding funs,
typ: typ funs,
term: term funs,
fact: thm list funs};
-type declaration = morphism -> Context.generic -> Context.generic;
+fun rep (Morphism args) = args;
+
+fun apply which phi =
+ let val args = rep phi
+ in fold_rev (app (#context args)) (which args) end;
+
+fun put_context context (Morphism {context = _, names, binding, typ, term, fact}) =
+ Morphism {context = context, names = names, binding = binding, typ = typ, term = term, fact = fact};
+
+val set_context = put_context o SOME;
+val set_context' = set_context o Proof_Context.theory_of;
+val set_context'' = set_context o Context.theory_of;
+val reset_context = put_context NONE;
fun morphism a {binding, typ, term, fact} =
Morphism {
+ context = NONE,
names = if a = "" then [] else [a],
binding = map (pair a) binding,
typ = map (pair a) typ,
term = map (pair a) term,
fact = map (pair a) fact};
(*syntactic test only!*)
-fun is_identity (Morphism {names, binding, typ, term, fact}) =
+fun is_identity (Morphism {context = _, names, binding, typ, term, fact}) =
null names andalso null binding andalso null typ andalso null term andalso null fact;
-fun pretty (Morphism {names, ...}) = Pretty.enum ";" "{" "}" (map Pretty.str (rev names));
+fun is_empty phi = is_none (#context (rep phi)) andalso is_identity phi;
+
+fun pretty phi = Pretty.enum ";" "{" "}" (map Pretty.str (rev (#names (rep phi))));
val _ = ML_system_pp (fn _ => fn _ => Pretty.to_polyml o pretty);
-fun binding (Morphism {binding, ...}) = apply binding;
+val binding = apply #binding;
fun binding_prefix morph = Binding.name "x" |> binding morph |> Binding.prefix_of;
-fun typ (Morphism {typ, ...}) = apply typ;
-fun term (Morphism {term, ...}) = apply term;
-fun fact (Morphism {fact, ...}) = apply fact;
+val typ = apply #typ;
+val term = apply #term;
+fun fact phi = map (join_transfer (#context (rep phi))) #> apply #fact phi;
val thm = singleton o fact;
val cterm = Drule.cterm_rule o thm;
(* morphism combinators *)
val identity = morphism "" {binding = [], typ = [], term = [], fact = []};
-fun compose
- (Morphism {names = names1, binding = binding1, typ = typ1, term = term1, fact = fact1})
- (Morphism {names = names2, binding = binding2, typ = typ2, term = term2, fact = fact2}) =
- Morphism {
- names = names1 @ names2,
- binding = binding1 @ binding2,
- typ = typ1 @ typ2,
- term = term1 @ term2,
- fact = fact1 @ fact2};
+val default = the_default identity;
+
+fun compose phi1 phi2 =
+ if is_empty phi1 then phi2
+ else if is_empty phi2 then phi1
+ else
+ let
+ val {context = context1, names = names1, binding = binding1,
+ typ = typ1, term = term1, fact = fact1} = rep phi1;
+ val {context = context2, names = names2, binding = binding2,
+ typ = typ2, term = term2, fact = fact2} = rep phi2;
+ in
+ Morphism {
+ context = join_context (context1, context2),
+ names = names1 @ names2,
+ binding = binding1 @ binding2,
+ typ = typ1 @ typ2,
+ term = term1 @ term2,
+ fact = fact1 @ fact2}
+ end;
fun phi1 $> phi2 = compose phi2 phi1;
-fun transform phi f = fn psi => f (phi $> psi);
-fun form f = f identity;
+
+(* abstract entities *)
+
+datatype 'a entity = Entity of (morphism -> 'a) * morphism;
+fun entity f = Entity (f, identity);
+
+fun entity_morphism g (Entity (f, phi)) = Entity (f, g phi);
+fun entity_reset_context a = entity_morphism reset_context a;
+fun entity_set_context thy a = entity_morphism (set_context thy) a;
+fun entity_set_context' ctxt a = entity_morphism (set_context' ctxt) a;
+fun entity_set_context'' context a = entity_morphism (set_context'' context) a;
+
+fun transform phi = entity_morphism (compose phi);
+fun transform_reset_context phi = entity_morphism (reset_context o compose phi);
+
+fun form (Entity (f, phi)) = f phi;
+fun form_entity f = f identity;
+
+type declaration = morphism -> Context.generic -> Context.generic;
+type declaration_entity = (Context.generic -> Context.generic) entity;
(* concrete morphisms *)
-fun binding_morphism a binding = morphism a {binding = [binding], typ = [], term = [], fact = []};
-fun typ_morphism a typ = morphism a {binding = [], typ = [typ], term = [], fact = []};
-fun term_morphism a term = morphism a {binding = [], typ = [], term = [term], fact = []};
-fun fact_morphism a fact = morphism a {binding = [], typ = [], term = [], fact = [fact]};
-fun thm_morphism a thm = morphism a {binding = [], typ = [], term = [], fact = [map thm]};
+fun binding_morphism a binding = morphism a {binding = [K binding], typ = [], term = [], fact = []};
+fun typ_morphism' a typ = morphism a {binding = [], typ = [typ o the_theory], term = [], fact = []};
+fun typ_morphism a typ = morphism a {binding = [], typ = [K typ], term = [], fact = []};
+fun term_morphism' a term = morphism a {binding = [], typ = [], term = [term o the_theory], fact = []};
+fun term_morphism a term = morphism a {binding = [], typ = [], term = [K term], fact = []};
+fun fact_morphism' a fact = morphism a {binding = [], typ = [], term = [], fact = [fact o the_theory]};
+fun fact_morphism a fact = morphism a {binding = [], typ = [], term = [], fact = [K fact]};
+fun thm_morphism' a thm = morphism a {binding = [], typ = [], term = [], fact = [map o thm o the_theory]};
+fun thm_morphism a thm = morphism a {binding = [], typ = [], term = [], fact = [K (map thm)]};
-val transfer_morphism = thm_morphism "transfer" o Thm.join_transfer;
+fun transfer_morphism thy = fact_morphism "transfer" I |> set_context thy;
val transfer_morphism' = transfer_morphism o Proof_Context.theory_of;
val transfer_morphism'' = transfer_morphism o Context.theory_of;
val trim_context_morphism = thm_morphism "trim_context" Thm.trim_context;
+fun set_trim_context thy phi = set_context thy phi $> trim_context_morphism;
+val set_trim_context' = set_trim_context o Proof_Context.theory_of;
+val set_trim_context'' = set_trim_context o Context.theory_of;
+
(* instantiate *)
fun instantiate_frees_morphism (cinstT, cinst) =
if TFrees.is_empty cinstT andalso Frees.is_empty cinst then identity
else
let
val instT = TFrees.map (K Thm.typ_of) cinstT;
val inst = Frees.map (K Thm.term_of) cinst;
in
morphism "instantiate_frees"
{binding = [],
typ =
if TFrees.is_empty instT then []
- else [Term_Subst.instantiateT_frees instT],
- term = [Term_Subst.instantiate_frees (instT, inst)],
- fact = [map (Thm.instantiate_frees (cinstT, cinst))]}
+ else [K (Term_Subst.instantiateT_frees instT)],
+ term = [K (Term_Subst.instantiate_frees (instT, inst))],
+ fact = [K (map (Thm.instantiate_frees (cinstT, cinst)))]}
end;
fun instantiate_morphism (cinstT, cinst) =
if TVars.is_empty cinstT andalso Vars.is_empty cinst then identity
else
let
val instT = TVars.map (K Thm.typ_of) cinstT;
val inst = Vars.map (K Thm.term_of) cinst;
in
morphism "instantiate"
{binding = [],
typ =
if TVars.is_empty instT then []
- else [Term_Subst.instantiateT instT],
- term = [Term_Subst.instantiate (instT, inst)],
- fact = [map (Thm.instantiate (cinstT, cinst))]}
+ else [K (Term_Subst.instantiateT instT)],
+ term = [K (Term_Subst.instantiate (instT, inst))],
+ fact = [K (map (Thm.instantiate (cinstT, cinst)))]}
end;
end;
structure Basic_Morphism: BASIC_MORPHISM = Morphism;
open Basic_Morphism;
diff --git a/src/Pure/raw_simplifier.ML b/src/Pure/raw_simplifier.ML
--- a/src/Pure/raw_simplifier.ML
+++ b/src/Pure/raw_simplifier.ML
@@ -1,1456 +1,1456 @@
(* Title: Pure/raw_simplifier.ML
Author: Tobias Nipkow and Stefan Berghofer, TU Muenchen
Higher-order Simplification.
*)
infix 4
addsimps delsimps addsimprocs delsimprocs
setloop addloop delloop
setSSolver addSSolver setSolver addSolver;
signature BASIC_RAW_SIMPLIFIER =
sig
val simp_depth_limit: int Config.T
val simp_trace_depth_limit: int Config.T
val simp_debug: bool Config.T
val simp_trace: bool Config.T
type cong_name = bool * string
type rrule
val mk_rrules: Proof.context -> thm list -> rrule list
val eq_rrule: rrule * rrule -> bool
type proc
type solver
val mk_solver: string -> (Proof.context -> int -> tactic) -> solver
type simpset
val empty_ss: simpset
val merge_ss: simpset * simpset -> simpset
val dest_ss: simpset ->
{simps: (string * thm) list,
procs: (string * term list) list,
congs: (cong_name * thm) list,
weak_congs: cong_name list,
loopers: string list,
unsafe_solvers: string list,
safe_solvers: string list}
type simproc
val eq_simproc: simproc * simproc -> bool
val cert_simproc: theory -> string ->
- {lhss: term list, proc: morphism -> Proof.context -> cterm -> thm option} -> simproc
+ {lhss: term list, proc: (Proof.context -> cterm -> thm option) Morphism.entity} -> simproc
val transform_simproc: morphism -> simproc -> simproc
val simpset_of: Proof.context -> simpset
val put_simpset: simpset -> Proof.context -> Proof.context
val simpset_map: Proof.context -> (Proof.context -> Proof.context) -> simpset -> simpset
val map_theory_simpset: (Proof.context -> Proof.context) -> theory -> theory
val empty_simpset: Proof.context -> Proof.context
val clear_simpset: Proof.context -> Proof.context
val addsimps: Proof.context * thm list -> Proof.context
val delsimps: Proof.context * thm list -> Proof.context
val addsimprocs: Proof.context * simproc list -> Proof.context
val delsimprocs: Proof.context * simproc list -> Proof.context
val setloop: Proof.context * (Proof.context -> int -> tactic) -> Proof.context
val addloop: Proof.context * (string * (Proof.context -> int -> tactic)) -> Proof.context
val delloop: Proof.context * string -> Proof.context
val setSSolver: Proof.context * solver -> Proof.context
val addSSolver: Proof.context * solver -> Proof.context
val setSolver: Proof.context * solver -> Proof.context
val addSolver: Proof.context * solver -> Proof.context
val rewrite_rule: Proof.context -> thm list -> thm -> thm
val rewrite_goals_rule: Proof.context -> thm list -> thm -> thm
val rewrite_goals_tac: Proof.context -> thm list -> tactic
val rewrite_goal_tac: Proof.context -> thm list -> int -> tactic
val prune_params_tac: Proof.context -> tactic
val fold_rule: Proof.context -> thm list -> thm -> thm
val fold_goals_tac: Proof.context -> thm list -> tactic
val norm_hhf: Proof.context -> thm -> thm
val norm_hhf_protect: Proof.context -> thm -> thm
end;
signature RAW_SIMPLIFIER =
sig
include BASIC_RAW_SIMPLIFIER
exception SIMPLIFIER of string * thm list
type trace_ops
val set_trace_ops: trace_ops -> theory -> theory
val subgoal_tac: Proof.context -> int -> tactic
val loop_tac: Proof.context -> int -> tactic
val solvers: Proof.context -> solver list * solver list
val map_ss: (Proof.context -> Proof.context) -> Context.generic -> Context.generic
val prems_of: Proof.context -> thm list
val add_simp: thm -> Proof.context -> Proof.context
val del_simp: thm -> Proof.context -> Proof.context
val flip_simp: thm -> Proof.context -> Proof.context
val init_simpset: thm list -> Proof.context -> Proof.context
val add_eqcong: thm -> Proof.context -> Proof.context
val del_eqcong: thm -> Proof.context -> Proof.context
val add_cong: thm -> Proof.context -> Proof.context
val del_cong: thm -> Proof.context -> Proof.context
val mksimps: Proof.context -> thm -> thm list
val set_mksimps: (Proof.context -> thm -> thm list) -> Proof.context -> Proof.context
val set_mkcong: (Proof.context -> thm -> thm) -> Proof.context -> Proof.context
val set_mksym: (Proof.context -> thm -> thm option) -> Proof.context -> Proof.context
val set_mkeqTrue: (Proof.context -> thm -> thm option) -> Proof.context -> Proof.context
val set_term_ord: term ord -> Proof.context -> Proof.context
val set_subgoaler: (Proof.context -> int -> tactic) -> Proof.context -> Proof.context
val solver: Proof.context -> solver -> int -> tactic
val default_mk_sym: Proof.context -> thm -> thm option
val add_prems: thm list -> Proof.context -> Proof.context
val set_reorient: (Proof.context -> term list -> term -> term -> bool) ->
Proof.context -> Proof.context
val set_solvers: solver list -> Proof.context -> Proof.context
val rewrite_cterm: bool * bool * bool ->
(Proof.context -> thm -> thm option) -> Proof.context -> conv
val rewrite_term: theory -> thm list -> (term -> term option) list -> term -> term
val rewrite_thm: bool * bool * bool ->
(Proof.context -> thm -> thm option) -> Proof.context -> thm -> thm
val generic_rewrite_goal_tac: bool * bool * bool ->
(Proof.context -> tactic) -> Proof.context -> int -> tactic
val rewrite: Proof.context -> bool -> thm list -> conv
end;
structure Raw_Simplifier: RAW_SIMPLIFIER =
struct
(** datatype simpset **)
(* congruence rules *)
type cong_name = bool * string;
fun cong_name (Const (a, _)) = SOME (true, a)
| cong_name (Free (a, _)) = SOME (false, a)
| cong_name _ = NONE;
structure Congtab = Table(type key = cong_name val ord = prod_ord bool_ord fast_string_ord);
(* rewrite rules *)
type rrule =
{thm: thm, (*the rewrite rule*)
name: string, (*name of theorem from which rewrite rule was extracted*)
lhs: term, (*the left-hand side*)
elhs: cterm, (*the eta-contracted lhs*)
extra: bool, (*extra variables outside of elhs*)
fo: bool, (*use first-order matching*)
perm: bool}; (*the rewrite rule is permutative*)
fun trim_context_rrule ({thm, name, lhs, elhs, extra, fo, perm}: rrule) =
{thm = Thm.trim_context thm, name = name, lhs = lhs, elhs = Thm.trim_context_cterm elhs,
extra = extra, fo = fo, perm = perm};
(*
Remarks:
- elhs is used for matching,
lhs only for preservation of bound variable names;
- fo is set iff
either elhs is first-order (no Var is applied),
in which case fo-matching is complete,
or elhs is not a pattern,
in which case there is nothing better to do;
*)
fun eq_rrule ({thm = thm1, ...}: rrule, {thm = thm2, ...}: rrule) =
Thm.eq_thm_prop (thm1, thm2);
(* FIXME: it seems that the conditions on extra variables are too liberal if
prems are nonempty: does solving the prems really guarantee instantiation of
all its Vars? Better: a dynamic check each time a rule is applied.
*)
fun rewrite_rule_extra_vars prems elhs erhs =
let
val elhss = elhs :: prems;
val tvars = TVars.build (fold TVars.add_tvars elhss);
val vars = Vars.build (fold Vars.add_vars elhss);
in
erhs |> Term.exists_type (Term.exists_subtype
(fn TVar v => not (TVars.defined tvars v) | _ => false)) orelse
erhs |> Term.exists_subterm
(fn Var v => not (Vars.defined vars v) | _ => false)
end;
fun rrule_extra_vars elhs thm =
rewrite_rule_extra_vars [] (Thm.term_of elhs) (Thm.full_prop_of thm);
fun mk_rrule2 {thm, name, lhs, elhs, perm} =
let
val t = Thm.term_of elhs;
val fo = Pattern.first_order t orelse not (Pattern.pattern t);
val extra = rrule_extra_vars elhs thm;
in {thm = thm, name = name, lhs = lhs, elhs = elhs, extra = extra, fo = fo, perm = perm} end;
(*simple test for looping rewrite rules and stupid orientations*)
fun default_reorient ctxt prems lhs rhs =
rewrite_rule_extra_vars prems lhs rhs
orelse
is_Var (head_of lhs)
orelse
(* turns t = x around, which causes a headache if x is a local variable -
usually it is very useful :-(
is_Free rhs andalso not(is_Free lhs) andalso not(Logic.occs(rhs,lhs))
andalso not(exists_subterm is_Var lhs)
orelse
*)
exists (fn t => Logic.occs (lhs, t)) (rhs :: prems)
orelse
null prems andalso Pattern.matches (Proof_Context.theory_of ctxt) (lhs, rhs)
(*the condition "null prems" is necessary because conditional rewrites
with extra variables in the conditions may terminate although
the rhs is an instance of the lhs; example: ?m < ?n \<Longrightarrow> f ?n \<equiv> f ?m *)
orelse
is_Const lhs andalso not (is_Const rhs);
(* simplification procedures *)
datatype proc =
Proc of
{name: string,
lhs: term,
proc: Proof.context -> cterm -> thm option,
stamp: stamp};
fun eq_proc (Proc {stamp = stamp1, ...}, Proc {stamp = stamp2, ...}) = stamp1 = stamp2;
(* solvers *)
datatype solver =
Solver of
{name: string,
solver: Proof.context -> int -> tactic,
id: stamp};
fun mk_solver name solver = Solver {name = name, solver = solver, id = stamp ()};
fun solver_name (Solver {name, ...}) = name;
fun solver ctxt (Solver {solver = tac, ...}) = tac ctxt;
fun eq_solver (Solver {id = id1, ...}, Solver {id = id2, ...}) = (id1 = id2);
(* simplification sets *)
(*A simpset contains data required during conversion:
rules: discrimination net of rewrite rules;
prems: current premises;
depth: simp_depth and exceeded flag;
congs: association list of congruence rules and
a list of `weak' congruence constants.
A congruence is `weak' if it avoids normalization of some argument.
procs: discrimination net of simplification procedures
(functions that prove rewrite rules on the fly);
mk_rews:
mk: turn simplification thms into rewrite rules;
mk_cong: prepare congruence rules;
mk_sym: turn \<equiv> around;
mk_eq_True: turn P into P \<equiv> True;
term_ord: for ordered rewriting;*)
datatype simpset =
Simpset of
{rules: rrule Net.net,
prems: thm list,
depth: int * bool Unsynchronized.ref} *
{congs: thm Congtab.table * cong_name list,
procs: proc Net.net,
mk_rews:
{mk: Proof.context -> thm -> thm list,
mk_cong: Proof.context -> thm -> thm,
mk_sym: Proof.context -> thm -> thm option,
mk_eq_True: Proof.context -> thm -> thm option,
reorient: Proof.context -> term list -> term -> term -> bool},
term_ord: term ord,
subgoal_tac: Proof.context -> int -> tactic,
loop_tacs: (string * (Proof.context -> int -> tactic)) list,
solvers: solver list * solver list};
fun internal_ss (Simpset (_, ss2)) = ss2;
fun make_ss1 (rules, prems, depth) = {rules = rules, prems = prems, depth = depth};
fun map_ss1 f {rules, prems, depth} = make_ss1 (f (rules, prems, depth));
fun make_ss2 (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) =
{congs = congs, procs = procs, mk_rews = mk_rews, term_ord = term_ord,
subgoal_tac = subgoal_tac, loop_tacs = loop_tacs, solvers = solvers};
fun map_ss2 f {congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers} =
make_ss2 (f (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers));
fun make_simpset (args1, args2) = Simpset (make_ss1 args1, make_ss2 args2);
fun dest_ss (Simpset ({rules, ...}, {congs, procs, loop_tacs, solvers, ...})) =
{simps = Net.entries rules
|> map (fn {name, thm, ...} => (name, thm)),
procs = Net.entries procs
|> map (fn Proc {name, lhs, stamp, ...} => ((name, lhs), stamp))
|> partition_eq (eq_snd op =)
|> map (fn ps => (fst (fst (hd ps)), map (snd o fst) ps)),
congs = congs |> fst |> Congtab.dest,
weak_congs = congs |> snd,
loopers = map fst loop_tacs,
unsafe_solvers = map solver_name (#1 solvers),
safe_solvers = map solver_name (#2 solvers)};
(* empty *)
fun init_ss depth mk_rews term_ord subgoal_tac solvers =
make_simpset ((Net.empty, [], depth),
((Congtab.empty, []), Net.empty, mk_rews, term_ord, subgoal_tac, [], solvers));
fun default_mk_sym _ th = SOME (th RS Drule.symmetric_thm);
val empty_ss =
init_ss (0, Unsynchronized.ref false)
{mk = fn _ => fn th => if can Logic.dest_equals (Thm.concl_of th) then [th] else [],
mk_cong = K I,
mk_sym = default_mk_sym,
mk_eq_True = K (K NONE),
reorient = default_reorient}
Term_Ord.term_ord (K (K no_tac)) ([], []);
(* merge *) (*NOTE: ignores some fields of 2nd simpset*)
fun merge_ss (ss1, ss2) =
if pointer_eq (ss1, ss2) then ss1
else
let
val Simpset ({rules = rules1, prems = prems1, depth = depth1},
{congs = (congs1, weak1), procs = procs1, mk_rews, term_ord, subgoal_tac,
loop_tacs = loop_tacs1, solvers = (unsafe_solvers1, solvers1)}) = ss1;
val Simpset ({rules = rules2, prems = prems2, depth = depth2},
{congs = (congs2, weak2), procs = procs2, mk_rews = _, term_ord = _, subgoal_tac = _,
loop_tacs = loop_tacs2, solvers = (unsafe_solvers2, solvers2)}) = ss2;
val rules' = Net.merge eq_rrule (rules1, rules2);
val prems' = Thm.merge_thms (prems1, prems2);
val depth' = if #1 depth1 < #1 depth2 then depth2 else depth1;
val congs' = Congtab.merge (K true) (congs1, congs2);
val weak' = merge (op =) (weak1, weak2);
val procs' = Net.merge eq_proc (procs1, procs2);
val loop_tacs' = AList.merge (op =) (K true) (loop_tacs1, loop_tacs2);
val unsafe_solvers' = merge eq_solver (unsafe_solvers1, unsafe_solvers2);
val solvers' = merge eq_solver (solvers1, solvers2);
in
make_simpset ((rules', prems', depth'), ((congs', weak'), procs',
mk_rews, term_ord, subgoal_tac, loop_tacs', (unsafe_solvers', solvers')))
end;
(** context data **)
structure Simpset = Generic_Data
(
type T = simpset;
val empty = empty_ss;
val merge = merge_ss;
);
val simpset_of = Simpset.get o Context.Proof;
fun map_simpset f = Context.proof_map (Simpset.map f);
fun map_simpset1 f = map_simpset (fn Simpset (ss1, ss2) => Simpset (map_ss1 f ss1, ss2));
fun map_simpset2 f = map_simpset (fn Simpset (ss1, ss2) => Simpset (ss1, map_ss2 f ss2));
fun put_simpset ss = map_simpset (K ss);
fun simpset_map ctxt f ss = ctxt |> put_simpset ss |> f |> simpset_of;
val empty_simpset = put_simpset empty_ss;
fun map_theory_simpset f thy =
let
val ctxt' = f (Proof_Context.init_global thy);
val thy' = Proof_Context.theory_of ctxt';
in Context.theory_map (Simpset.map (K (simpset_of ctxt'))) thy' end;
fun map_ss f = Context.mapping (map_theory_simpset (f o Context_Position.not_really)) f;
val clear_simpset =
map_simpset (fn Simpset ({depth, ...}, {mk_rews, term_ord, subgoal_tac, solvers, ...}) =>
init_ss depth mk_rews term_ord subgoal_tac solvers);
(* accessors for tactis *)
fun subgoal_tac ctxt = (#subgoal_tac o internal_ss o simpset_of) ctxt ctxt;
fun loop_tac ctxt =
FIRST' (map (fn (_, tac) => tac ctxt) (rev ((#loop_tacs o internal_ss o simpset_of) ctxt)));
val solvers = #solvers o internal_ss o simpset_of
(* simp depth *)
(*
The simp_depth_limit is meant to abort infinite recursion of the simplifier
early but should not terminate "normal" executions.
As of 2017, 25 would suffice; 40 builds in a safety margin.
*)
val simp_depth_limit = Config.declare_int ("simp_depth_limit", \<^here>) (K 40);
val simp_trace_depth_limit = Config.declare_int ("simp_trace_depth_limit", \<^here>) (K 1);
fun inc_simp_depth ctxt =
ctxt |> map_simpset1 (fn (rules, prems, (depth, exceeded)) =>
(rules, prems,
(depth + 1,
if depth = Config.get ctxt simp_trace_depth_limit
then Unsynchronized.ref false else exceeded)));
fun simp_depth ctxt =
let val Simpset ({depth = (depth, _), ...}, _) = simpset_of ctxt
in depth end;
(* diagnostics *)
exception SIMPLIFIER of string * thm list;
val simp_debug = Config.declare_bool ("simp_debug", \<^here>) (K false);
val simp_trace = Config.declare_bool ("simp_trace", \<^here>) (K false);
fun cond_warning ctxt msg =
if Context_Position.is_really_visible ctxt then warning (msg ()) else ();
fun cond_tracing' ctxt flag msg =
if Config.get ctxt flag then
let
val Simpset ({depth = (depth, exceeded), ...}, _) = simpset_of ctxt;
val depth_limit = Config.get ctxt simp_trace_depth_limit;
in
if depth > depth_limit then
if ! exceeded then () else (tracing "simp_trace_depth_limit exceeded!"; exceeded := true)
else (tracing (enclose "[" "]" (string_of_int depth) ^ msg ()); exceeded := false)
end
else ();
fun cond_tracing ctxt = cond_tracing' ctxt simp_trace;
fun print_term ctxt s t =
s ^ "\n" ^ Syntax.string_of_term ctxt t;
fun print_thm ctxt s (name, th) =
print_term ctxt (if name = "" then s else s ^ " " ^ quote name ^ ":") (Thm.full_prop_of th);
(** simpset operations **)
(* prems *)
fun prems_of ctxt =
let val Simpset ({prems, ...}, _) = simpset_of ctxt in prems end;
fun add_prems ths =
map_simpset1 (fn (rules, prems, depth) => (rules, ths @ prems, depth));
(* maintain simp rules *)
fun del_rrule loud (rrule as {thm, elhs, ...}) ctxt =
ctxt |> map_simpset1 (fn (rules, prems, depth) =>
(Net.delete_term eq_rrule (Thm.term_of elhs, rrule) rules, prems, depth))
handle Net.DELETE =>
(if not loud then ()
else cond_warning ctxt
(fn () => print_thm ctxt "Rewrite rule not in simpset:" ("", thm));
ctxt);
fun insert_rrule (rrule as {thm, name, ...}) ctxt =
(cond_tracing ctxt (fn () => print_thm ctxt "Adding rewrite rule" (name, thm));
ctxt |> map_simpset1 (fn (rules, prems, depth) =>
let
val rrule2 as {elhs, ...} = mk_rrule2 rrule;
val rules' = Net.insert_term eq_rrule (Thm.term_of elhs, trim_context_rrule rrule2) rules;
in (rules', prems, depth) end)
handle Net.INSERT =>
(cond_warning ctxt (fn () => print_thm ctxt "Ignoring duplicate rewrite rule:" ("", thm));
ctxt));
val vars_set = Vars.build o Vars.add_vars;
local
fun vperm (Var _, Var _) = true
| vperm (Abs (_, _, s), Abs (_, _, t)) = vperm (s, t)
| vperm (t1 $ t2, u1 $ u2) = vperm (t1, u1) andalso vperm (t2, u2)
| vperm (t, u) = (t = u);
fun var_perm (t, u) = vperm (t, u) andalso Vars.eq_set (apply2 vars_set (t, u));
in
fun decomp_simp thm =
let
val prop = Thm.prop_of thm;
val prems = Logic.strip_imp_prems prop;
val concl = Drule.strip_imp_concl (Thm.cprop_of thm);
val (lhs, rhs) = Thm.dest_equals concl handle TERM _ =>
raise SIMPLIFIER ("Rewrite rule not a meta-equality", [thm]);
val elhs = Thm.dest_arg (Thm.cprop_of (Thm.eta_conversion lhs));
val erhs = Envir.eta_contract (Thm.term_of rhs);
val perm =
var_perm (Thm.term_of elhs, erhs) andalso
not (Thm.term_of elhs aconv erhs) andalso
not (is_Var (Thm.term_of elhs));
in (prems, Thm.term_of lhs, elhs, Thm.term_of rhs, perm) end;
end;
fun decomp_simp' thm =
let val (_, lhs, _, rhs, _) = decomp_simp thm in
if Thm.nprems_of thm > 0 then raise SIMPLIFIER ("Bad conditional rewrite rule", [thm])
else (lhs, rhs)
end;
fun mk_eq_True ctxt (thm, name) =
let val Simpset (_, {mk_rews = {mk_eq_True, ...}, ...}) = simpset_of ctxt in
(case mk_eq_True ctxt thm of
NONE => []
| SOME eq_True =>
let val (_, lhs, elhs, _, _) = decomp_simp eq_True;
in [{thm = eq_True, name = name, lhs = lhs, elhs = elhs, perm = false}] end)
end;
(*create the rewrite rule and possibly also the eq_True variant,
in case there are extra vars on the rhs*)
fun rrule_eq_True ctxt thm name lhs elhs rhs thm2 =
let val rrule = {thm = thm, name = name, lhs = lhs, elhs = elhs, perm = false} in
if rewrite_rule_extra_vars [] lhs rhs then
mk_eq_True ctxt (thm2, name) @ [rrule]
else [rrule]
end;
fun mk_rrule ctxt (thm, name) =
let val (prems, lhs, elhs, rhs, perm) = decomp_simp thm in
if perm then [{thm = thm, name = name, lhs = lhs, elhs = elhs, perm = true}]
else
(*weak test for loops*)
if rewrite_rule_extra_vars prems lhs rhs orelse is_Var (Thm.term_of elhs)
then mk_eq_True ctxt (thm, name)
else rrule_eq_True ctxt thm name lhs elhs rhs thm
end |> map (fn {thm, name, lhs, elhs, perm} =>
{thm = Thm.trim_context thm, name = name, lhs = lhs,
elhs = Thm.trim_context_cterm elhs, perm = perm});
fun orient_rrule ctxt (thm, name) =
let
val (prems, lhs, elhs, rhs, perm) = decomp_simp thm;
val Simpset (_, {mk_rews = {reorient, mk_sym, ...}, ...}) = simpset_of ctxt;
in
if perm then [{thm = thm, name = name, lhs = lhs, elhs = elhs, perm = true}]
else if reorient ctxt prems lhs rhs then
if reorient ctxt prems rhs lhs
then mk_eq_True ctxt (thm, name)
else
(case mk_sym ctxt thm of
NONE => []
| SOME thm' =>
let val (_, lhs', elhs', rhs', _) = decomp_simp thm'
in rrule_eq_True ctxt thm' name lhs' elhs' rhs' thm end)
else rrule_eq_True ctxt thm name lhs elhs rhs thm
end;
fun extract_rews ctxt sym thms =
let
val Simpset (_, {mk_rews = {mk, ...}, ...}) = simpset_of ctxt;
val mk =
if sym then fn ctxt => fn th => (mk ctxt th) RL [Drule.symmetric_thm]
else mk
in maps (fn thm => map (rpair (Thm.get_name_hint thm)) (mk ctxt thm)) thms
end;
fun extract_safe_rrules ctxt thm =
maps (orient_rrule ctxt) (extract_rews ctxt false [thm]);
fun mk_rrules ctxt thms =
let
val rews = extract_rews ctxt false thms
val raw_rrules = flat (map (mk_rrule ctxt) rews)
in map mk_rrule2 raw_rrules end
(* add/del rules explicitly *)
local
fun comb_simps ctxt comb mk_rrule sym thms =
let val rews = extract_rews ctxt sym (map (Thm.transfer' ctxt) thms);
in fold (fold comb o mk_rrule) rews ctxt end;
(*
This code checks if the symetric version of a rule is already in the simpset.
However, the variable names in the two versions of the rule may differ.
Thus the current test modulo eq_rrule is too weak to be useful
and needs to be refined.
fun present ctxt rules (rrule as {thm, elhs, ...}) =
(Net.insert_term eq_rrule (Thm.term_of elhs, trim_context_rrule rrule) rules;
false)
handle Net.INSERT =>
(cond_warning ctxt
(fn () => print_thm ctxt "Symmetric rewrite rule already in simpset:" ("", thm));
true);
fun sym_present ctxt thms =
let
val rews = extract_rews ctxt true (map (Thm.transfer' ctxt) thms);
val rrules = map mk_rrule2 (flat(map (mk_rrule ctxt) rews))
val Simpset({rules, ...},_) = simpset_of ctxt
in exists (present ctxt rules) rrules end
*)
in
fun ctxt addsimps thms =
comb_simps ctxt insert_rrule (mk_rrule ctxt) false thms;
fun addsymsimps ctxt thms =
comb_simps ctxt insert_rrule (mk_rrule ctxt) true thms;
fun ctxt delsimps thms =
comb_simps ctxt (del_rrule true) (map mk_rrule2 o mk_rrule ctxt) false thms;
fun delsimps_quiet ctxt thms =
comb_simps ctxt (del_rrule false) (map mk_rrule2 o mk_rrule ctxt) false thms;
fun add_simp thm ctxt = ctxt addsimps [thm];
(*
with check for presence of symmetric version:
if sym_present ctxt [thm]
then (cond_warning ctxt (fn () => print_thm ctxt "Ignoring rewrite rule:" ("", thm)); ctxt)
else ctxt addsimps [thm];
*)
fun del_simp thm ctxt = ctxt delsimps [thm];
fun flip_simp thm ctxt = addsymsimps (delsimps_quiet ctxt [thm]) [thm];
end;
fun init_simpset thms ctxt = ctxt
|> Context_Position.set_visible false
|> empty_simpset
|> fold add_simp thms
|> Context_Position.restore_visible ctxt;
(* congs *)
local
fun is_full_cong_prems [] [] = true
| is_full_cong_prems [] _ = false
| is_full_cong_prems (p :: prems) varpairs =
(case Logic.strip_assums_concl p of
Const ("Pure.eq", _) $ lhs $ rhs =>
let val (x, xs) = strip_comb lhs and (y, ys) = strip_comb rhs in
is_Var x andalso forall is_Bound xs andalso
not (has_duplicates (op =) xs) andalso xs = ys andalso
member (op =) varpairs (x, y) andalso
is_full_cong_prems prems (remove (op =) (x, y) varpairs)
end
| _ => false);
fun is_full_cong thm =
let
val prems = Thm.prems_of thm and concl = Thm.concl_of thm;
val (lhs, rhs) = Logic.dest_equals concl;
val (f, xs) = strip_comb lhs and (g, ys) = strip_comb rhs;
in
f = g andalso not (has_duplicates (op =) (xs @ ys)) andalso length xs = length ys andalso
is_full_cong_prems prems (xs ~~ ys)
end;
fun mk_cong ctxt =
let val Simpset (_, {mk_rews = {mk_cong = f, ...}, ...}) = simpset_of ctxt
in f ctxt end;
in
fun add_eqcong thm ctxt = ctxt |> map_simpset2
(fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) =>
let
val (lhs, _) = Logic.dest_equals (Thm.concl_of thm)
handle TERM _ => raise SIMPLIFIER ("Congruence not a meta-equality", [thm]);
(*val lhs = Envir.eta_contract lhs;*)
val a = the (cong_name (head_of lhs)) handle Option.Option =>
raise SIMPLIFIER ("Congruence must start with a constant or free variable", [thm]);
val (xs, weak) = congs;
val xs' = Congtab.update (a, Thm.trim_context thm) xs;
val weak' = if is_full_cong thm then weak else a :: weak;
in ((xs', weak'), procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) end);
fun del_eqcong thm ctxt = ctxt |> map_simpset2
(fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) =>
let
val (lhs, _) = Logic.dest_equals (Thm.concl_of thm)
handle TERM _ => raise SIMPLIFIER ("Congruence not a meta-equality", [thm]);
(*val lhs = Envir.eta_contract lhs;*)
val a = the (cong_name (head_of lhs)) handle Option.Option =>
raise SIMPLIFIER ("Congruence must start with a constant", [thm]);
val (xs, _) = congs;
val xs' = Congtab.delete_safe a xs;
val weak' = Congtab.fold (fn (a, th) => if is_full_cong th then I else insert (op =) a) xs' [];
in ((xs', weak'), procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) end);
fun add_cong thm ctxt = add_eqcong (mk_cong ctxt thm) ctxt;
fun del_cong thm ctxt = del_eqcong (mk_cong ctxt thm) ctxt;
end;
(* simprocs *)
datatype simproc =
Simproc of
{name: string,
lhss: term list,
- proc: morphism -> Proof.context -> cterm -> thm option,
+ proc: (Proof.context -> cterm -> thm option) Morphism.entity,
stamp: stamp};
fun eq_simproc (Simproc {stamp = stamp1, ...}, Simproc {stamp = stamp2, ...}) = stamp1 = stamp2;
fun cert_simproc thy name {lhss, proc} =
Simproc {name = name, lhss = map (Sign.cert_term thy) lhss, proc = proc, stamp = stamp ()};
fun transform_simproc phi (Simproc {name, lhss, proc, stamp}) =
Simproc
{name = name,
lhss = map (Morphism.term phi) lhss,
- proc = Morphism.transform phi proc,
+ proc = Morphism.transform_reset_context phi proc,
stamp = stamp};
local
fun add_proc (proc as Proc {name, lhs, ...}) ctxt =
(cond_tracing ctxt (fn () =>
print_term ctxt ("Adding simplification procedure " ^ quote name ^ " for") lhs);
ctxt |> map_simpset2
(fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) =>
(congs, Net.insert_term eq_proc (lhs, proc) procs,
mk_rews, term_ord, subgoal_tac, loop_tacs, solvers))
handle Net.INSERT =>
(cond_warning ctxt (fn () => "Ignoring duplicate simplification procedure " ^ quote name);
ctxt));
fun del_proc (proc as Proc {name, lhs, ...}) ctxt =
ctxt |> map_simpset2
(fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) =>
(congs, Net.delete_term eq_proc (lhs, proc) procs,
mk_rews, term_ord, subgoal_tac, loop_tacs, solvers))
handle Net.DELETE =>
(cond_warning ctxt (fn () => "Simplification procedure " ^ quote name ^ " not in simpset");
ctxt);
fun prep_procs (Simproc {name, lhss, proc, stamp}) =
lhss |> map (fn lhs => Proc {name = name, lhs = lhs, proc = Morphism.form proc, stamp = stamp});
in
fun ctxt addsimprocs ps = fold (fold add_proc o prep_procs) ps ctxt;
fun ctxt delsimprocs ps = fold (fold del_proc o prep_procs) ps ctxt;
end;
(* mk_rews *)
local
fun map_mk_rews f =
map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) =>
let
val {mk, mk_cong, mk_sym, mk_eq_True, reorient} = mk_rews;
val (mk', mk_cong', mk_sym', mk_eq_True', reorient') =
f (mk, mk_cong, mk_sym, mk_eq_True, reorient);
val mk_rews' = {mk = mk', mk_cong = mk_cong', mk_sym = mk_sym', mk_eq_True = mk_eq_True',
reorient = reorient'};
in (congs, procs, mk_rews', term_ord, subgoal_tac, loop_tacs, solvers) end);
in
fun mksimps ctxt =
let val Simpset (_, {mk_rews = {mk, ...}, ...}) = simpset_of ctxt
in mk ctxt end;
fun set_mksimps mk = map_mk_rews (fn (_, mk_cong, mk_sym, mk_eq_True, reorient) =>
(mk, mk_cong, mk_sym, mk_eq_True, reorient));
fun set_mkcong mk_cong = map_mk_rews (fn (mk, _, mk_sym, mk_eq_True, reorient) =>
(mk, mk_cong, mk_sym, mk_eq_True, reorient));
fun set_mksym mk_sym = map_mk_rews (fn (mk, mk_cong, _, mk_eq_True, reorient) =>
(mk, mk_cong, mk_sym, mk_eq_True, reorient));
fun set_mkeqTrue mk_eq_True = map_mk_rews (fn (mk, mk_cong, mk_sym, _, reorient) =>
(mk, mk_cong, mk_sym, mk_eq_True, reorient));
fun set_reorient reorient = map_mk_rews (fn (mk, mk_cong, mk_sym, mk_eq_True, _) =>
(mk, mk_cong, mk_sym, mk_eq_True, reorient));
end;
(* term_ord *)
fun set_term_ord term_ord =
map_simpset2 (fn (congs, procs, mk_rews, _, subgoal_tac, loop_tacs, solvers) =>
(congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers));
(* tactics *)
fun set_subgoaler subgoal_tac =
map_simpset2 (fn (congs, procs, mk_rews, term_ord, _, loop_tacs, solvers) =>
(congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers));
fun ctxt setloop tac = ctxt |>
map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, _, solvers) =>
(congs, procs, mk_rews, term_ord, subgoal_tac, [("", tac)], solvers));
fun ctxt addloop (name, tac) = ctxt |>
map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) =>
(congs, procs, mk_rews, term_ord, subgoal_tac,
AList.update (op =) (name, tac) loop_tacs, solvers));
fun ctxt delloop name = ctxt |>
map_simpset2 (fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, solvers) =>
(congs, procs, mk_rews, term_ord, subgoal_tac,
(if AList.defined (op =) loop_tacs name then ()
else cond_warning ctxt (fn () => "No such looper in simpset: " ^ quote name);
AList.delete (op =) name loop_tacs), solvers));
fun ctxt setSSolver solver = ctxt |> map_simpset2
(fn (congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, (unsafe_solvers, _)) =>
(congs, procs, mk_rews, term_ord, subgoal_tac, loop_tacs, (unsafe_solvers, [solver])));
fun ctxt addSSolver solver = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord,
subgoal_tac, loop_tacs, (unsafe_solvers, solvers)) => (congs, procs, mk_rews, term_ord,
subgoal_tac, loop_tacs, (unsafe_solvers, insert eq_solver solver solvers)));
fun ctxt setSolver solver = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord,
subgoal_tac, loop_tacs, (_, solvers)) => (congs, procs, mk_rews, term_ord,
subgoal_tac, loop_tacs, ([solver], solvers)));
fun ctxt addSolver solver = ctxt |> map_simpset2 (fn (congs, procs, mk_rews, term_ord,
subgoal_tac, loop_tacs, (unsafe_solvers, solvers)) => (congs, procs, mk_rews, term_ord,
subgoal_tac, loop_tacs, (insert eq_solver solver unsafe_solvers, solvers)));
fun set_solvers solvers = map_simpset2 (fn (congs, procs, mk_rews, term_ord,
subgoal_tac, loop_tacs, _) => (congs, procs, mk_rews, term_ord,
subgoal_tac, loop_tacs, (solvers, solvers)));
(* trace operations *)
type trace_ops =
{trace_invoke: {depth: int, term: term} -> Proof.context -> Proof.context,
trace_apply: {unconditional: bool, term: term, thm: thm, rrule: rrule} ->
Proof.context -> (Proof.context -> (thm * term) option) -> (thm * term) option};
structure Trace_Ops = Theory_Data
(
type T = trace_ops;
val empty: T =
{trace_invoke = fn _ => fn ctxt => ctxt,
trace_apply = fn _ => fn ctxt => fn cont => cont ctxt};
fun merge (trace_ops, _) = trace_ops;
);
val set_trace_ops = Trace_Ops.put;
val trace_ops = Trace_Ops.get o Proof_Context.theory_of;
fun trace_invoke args ctxt = #trace_invoke (trace_ops ctxt) args ctxt;
fun trace_apply args ctxt = #trace_apply (trace_ops ctxt) args ctxt;
(** rewriting **)
(*
Uses conversions, see:
L C Paulson, A higher-order implementation of rewriting,
Science of Computer Programming 3 (1983), pages 119-149.
*)
fun check_conv ctxt msg thm thm' =
let
val thm'' = Thm.transitive thm thm' handle THM _ =>
let
val nthm' =
Thm.transitive (Thm.symmetric (Drule.beta_eta_conversion (Thm.lhs_of thm'))) thm'
in Thm.transitive thm nthm' handle THM _ =>
let
val nthm =
Thm.transitive thm (Drule.beta_eta_conversion (Thm.rhs_of thm))
in Thm.transitive nthm nthm' end
end
val _ =
if msg then cond_tracing ctxt (fn () => print_thm ctxt "SUCCEEDED" ("", thm'))
else ();
in SOME thm'' end
handle THM _ =>
let
val _ $ _ $ prop0 = Thm.prop_of thm;
val _ =
cond_tracing ctxt (fn () =>
print_thm ctxt "Proved wrong theorem (bad subgoaler?)" ("", thm') ^ "\n" ^
print_term ctxt "Should have proved:" prop0);
in NONE end;
(* mk_procrule *)
fun mk_procrule ctxt thm =
let
val (prems, lhs, elhs, rhs, _) = decomp_simp thm
val thm' = Thm.close_derivation \<^here> thm;
in
if rewrite_rule_extra_vars prems lhs rhs
then (cond_warning ctxt (fn () => print_thm ctxt "Extra vars on rhs:" ("", thm)); [])
else [mk_rrule2 {thm = thm', name = "", lhs = lhs, elhs = elhs, perm = false}]
end;
(* rewritec: conversion to apply the meta simpset to a term *)
(*Since the rewriting strategy is bottom-up, we avoid re-normalizing already
normalized terms by carrying around the rhs of the rewrite rule just
applied. This is called the `skeleton'. It is decomposed in parallel
with the term. Once a Var is encountered, the corresponding term is
already in normal form.
skel0 is a dummy skeleton that is to enforce complete normalization.*)
val skel0 = Bound 0;
(*Use rhs as skeleton only if the lhs does not contain unnormalized bits.
The latter may happen iff there are weak congruence rules for constants
in the lhs.*)
fun uncond_skel ((_, weak), (lhs, rhs)) =
if null weak then rhs (*optimization*)
else if exists_subterm
(fn Const (a, _) => member (op =) weak (true, a)
| Free (a, _) => member (op =) weak (false, a)
| _ => false) lhs then skel0
else rhs;
(*Behaves like unconditional rule if rhs does not contain vars not in the lhs.
Otherwise those vars may become instantiated with unnormalized terms
while the premises are solved.*)
fun cond_skel (args as (_, (lhs, rhs))) =
if Vars.subset (vars_set rhs, vars_set lhs) then uncond_skel args
else skel0;
(*
Rewriting -- we try in order:
(1) beta reduction
(2) unconditional rewrite rules
(3) conditional rewrite rules
(4) simplification procedures
IMPORTANT: rewrite rules must not introduce new Vars or TVars!
*)
fun rewritec (prover, maxt) ctxt t =
let
val thy = Proof_Context.theory_of ctxt;
val Simpset ({rules, ...}, {congs, procs, term_ord, ...}) = simpset_of ctxt;
val eta_thm = Thm.eta_conversion t;
val eta_t' = Thm.rhs_of eta_thm;
val eta_t = Thm.term_of eta_t';
fun rew rrule =
let
val {thm = thm0, name, lhs, elhs = elhs0, extra, fo, perm} = rrule;
val thm = Thm.transfer thy thm0;
val elhs = Thm.transfer_cterm thy elhs0;
val prop = Thm.prop_of thm;
val (rthm, elhs') =
if maxt = ~1 orelse not extra then (thm, elhs)
else (Thm.incr_indexes (maxt + 1) thm, Thm.incr_indexes_cterm (maxt + 1) elhs);
val insts =
if fo then Thm.first_order_match (elhs', eta_t')
else Thm.match (elhs', eta_t');
val thm' = Thm.instantiate insts (Thm.rename_boundvars lhs eta_t rthm);
val prop' = Thm.prop_of thm';
val unconditional = Logic.no_prems prop';
val (lhs', rhs') = Logic.dest_equals (Logic.strip_imp_concl prop');
val trace_args = {unconditional = unconditional, term = eta_t, thm = thm', rrule = rrule};
in
if perm andalso is_greater_equal (term_ord (rhs', lhs'))
then
(cond_tracing ctxt (fn () =>
print_thm ctxt "Cannot apply permutative rewrite rule" (name, thm) ^ "\n" ^
print_thm ctxt "Term does not become smaller:" ("", thm'));
NONE)
else
(cond_tracing ctxt (fn () =>
print_thm ctxt "Applying instance of rewrite rule" (name, thm));
if unconditional
then
(cond_tracing ctxt (fn () => print_thm ctxt "Rewriting:" ("", thm'));
trace_apply trace_args ctxt (fn ctxt' =>
let
val lr = Logic.dest_equals prop;
val SOME thm'' = check_conv ctxt' false eta_thm thm';
in SOME (thm'', uncond_skel (congs, lr)) end))
else
(cond_tracing ctxt (fn () => print_thm ctxt "Trying to rewrite:" ("", thm'));
if simp_depth ctxt > Config.get ctxt simp_depth_limit
then (cond_tracing ctxt (fn () => "simp_depth_limit exceeded - giving up"); NONE)
else
trace_apply trace_args ctxt (fn ctxt' =>
(case prover ctxt' thm' of
NONE => (cond_tracing ctxt' (fn () => print_thm ctxt' "FAILED" ("", thm')); NONE)
| SOME thm2 =>
(case check_conv ctxt' true eta_thm thm2 of
NONE => NONE
| SOME thm2' =>
let
val concl = Logic.strip_imp_concl prop;
val lr = Logic.dest_equals concl;
in SOME (thm2', cond_skel (congs, lr)) end)))))
end;
fun rews [] = NONE
| rews (rrule :: rrules) =
let val opt = rew rrule handle Pattern.MATCH => NONE
in (case opt of NONE => rews rrules | some => some) end;
fun sort_rrules rrs =
let
fun is_simple ({thm, ...}: rrule) =
(case Thm.prop_of thm of
Const ("Pure.eq", _) $ _ $ _ => true
| _ => false);
fun sort [] (re1, re2) = re1 @ re2
| sort (rr :: rrs) (re1, re2) =
if is_simple rr
then sort rrs (rr :: re1, re2)
else sort rrs (re1, rr :: re2);
in sort rrs ([], []) end;
fun proc_rews [] = NONE
| proc_rews (Proc {name, proc, lhs, ...} :: ps) =
if Pattern.matches (Proof_Context.theory_of ctxt) (lhs, Thm.term_of t) then
(cond_tracing' ctxt simp_debug (fn () =>
print_term ctxt ("Trying procedure " ^ quote name ^ " on:") eta_t);
(case proc ctxt eta_t' of
NONE => (cond_tracing' ctxt simp_debug (fn () => "FAILED"); proc_rews ps)
| SOME raw_thm =>
(cond_tracing ctxt (fn () =>
print_thm ctxt ("Procedure " ^ quote name ^ " produced rewrite rule:")
("", raw_thm));
(case rews (mk_procrule ctxt raw_thm) of
NONE =>
(cond_tracing ctxt (fn () =>
print_term ctxt ("IGNORED result of simproc " ^ quote name ^
" -- does not match") (Thm.term_of t));
proc_rews ps)
| some => some))))
else proc_rews ps;
in
(case eta_t of
Abs _ $ _ => SOME (Thm.transitive eta_thm (Thm.beta_conversion false eta_t'), skel0)
| _ =>
(case rews (sort_rrules (Net.match_term rules eta_t)) of
NONE => proc_rews (Net.match_term procs eta_t)
| some => some))
end;
(* conversion to apply a congruence rule to a term *)
fun congc prover ctxt maxt cong t =
let
val rthm = Thm.incr_indexes (maxt + 1) cong;
val rlhs = fst (Thm.dest_equals (Drule.strip_imp_concl (Thm.cprop_of rthm)));
val insts = Thm.match (rlhs, t)
(* Thm.match can raise Pattern.MATCH;
is handled when congc is called *)
val thm' =
Thm.instantiate insts (Thm.rename_boundvars (Thm.term_of rlhs) (Thm.term_of t) rthm);
val _ =
cond_tracing ctxt (fn () => print_thm ctxt "Applying congruence rule:" ("", thm'));
fun err (msg, thm) = (cond_tracing ctxt (fn () => print_thm ctxt msg ("", thm)); NONE);
in
(case prover thm' of
NONE => err ("Congruence proof failed. Could not prove", thm')
| SOME thm2 =>
(case check_conv ctxt true (Drule.beta_eta_conversion t) thm2 of
NONE => err ("Congruence proof failed. Should not have proved", thm2)
| SOME thm2' =>
if op aconv (apply2 Thm.term_of (Thm.dest_equals (Thm.cprop_of thm2')))
then NONE else SOME thm2'))
end;
val vA = (("A", 0), propT);
val vB = (("B", 0), propT);
val vC = (("C", 0), propT);
fun transitive1 NONE NONE = NONE
| transitive1 (SOME thm1) NONE = SOME thm1
| transitive1 NONE (SOME thm2) = SOME thm2
| transitive1 (SOME thm1) (SOME thm2) = SOME (Thm.transitive thm1 thm2);
fun transitive2 thm = transitive1 (SOME thm);
fun transitive3 thm = transitive1 thm o SOME;
fun bottomc ((simprem, useprem, mutsimp), prover, maxidx) =
let
fun botc skel ctxt t =
if is_Var skel then NONE
else
(case subc skel ctxt t of
some as SOME thm1 =>
(case rewritec (prover, maxidx) ctxt (Thm.rhs_of thm1) of
SOME (thm2, skel2) =>
transitive2 (Thm.transitive thm1 thm2)
(botc skel2 ctxt (Thm.rhs_of thm2))
| NONE => some)
| NONE =>
(case rewritec (prover, maxidx) ctxt t of
SOME (thm2, skel2) => transitive2 thm2
(botc skel2 ctxt (Thm.rhs_of thm2))
| NONE => NONE))
and try_botc ctxt t =
(case botc skel0 ctxt t of
SOME trec1 => trec1
| NONE => Thm.reflexive t)
and subc skel ctxt t0 =
let val Simpset (_, {congs, ...}) = simpset_of ctxt in
(case Thm.term_of t0 of
Abs (a, _, _) =>
let
val ((v, t'), ctxt') = Variable.dest_abs_cterm t0 ctxt;
val skel' = (case skel of Abs (_, _, sk) => sk | _ => skel0);
in
(case botc skel' ctxt' t' of
SOME thm => SOME (Thm.abstract_rule a v thm)
| NONE => NONE)
end
| t $ _ =>
(case t of
Const ("Pure.imp", _) $ _ => impc t0 ctxt
| Abs _ =>
let val thm = Thm.beta_conversion false t0
in
(case subc skel0 ctxt (Thm.rhs_of thm) of
NONE => SOME thm
| SOME thm' => SOME (Thm.transitive thm thm'))
end
| _ =>
let
fun appc () =
let
val (tskel, uskel) =
(case skel of
tskel $ uskel => (tskel, uskel)
| _ => (skel0, skel0));
val (ct, cu) = Thm.dest_comb t0;
in
(case botc tskel ctxt ct of
SOME thm1 =>
(case botc uskel ctxt cu of
SOME thm2 => SOME (Thm.combination thm1 thm2)
| NONE => SOME (Thm.combination thm1 (Thm.reflexive cu)))
| NONE =>
(case botc uskel ctxt cu of
SOME thm1 => SOME (Thm.combination (Thm.reflexive ct) thm1)
| NONE => NONE))
end;
val (h, ts) = strip_comb t;
in
(case cong_name h of
SOME a =>
(case Congtab.lookup (fst congs) a of
NONE => appc ()
| SOME cong =>
(*post processing: some partial applications h t1 ... tj, j <= length ts,
may be a redex. Example: map (\<lambda>x. x) = (\<lambda>xs. xs) wrt map_cong*)
(let
val thm = congc (prover ctxt) ctxt maxidx cong t0;
val t = the_default t0 (Option.map Thm.rhs_of thm);
val (cl, cr) = Thm.dest_comb t
val dVar = Var(("", 0), dummyT)
val skel =
list_comb (h, replicate (length ts) dVar)
in
(case botc skel ctxt cl of
NONE => thm
| SOME thm' =>
transitive3 thm (Thm.combination thm' (Thm.reflexive cr)))
end handle Pattern.MATCH => appc ()))
| _ => appc ())
end)
| _ => NONE)
end
and impc ct ctxt =
if mutsimp then mut_impc0 [] ct [] [] ctxt
else nonmut_impc ct ctxt
and rules_of_prem prem ctxt =
if maxidx_of_term (Thm.term_of prem) <> ~1
then
(cond_tracing ctxt (fn () =>
print_term ctxt "Cannot add premise as rewrite rule because it contains (type) unknowns:"
(Thm.term_of prem));
(([], NONE), ctxt))
else
let val (asm, ctxt') = Thm.assume_hyps prem ctxt
in ((extract_safe_rrules ctxt' asm, SOME asm), ctxt') end
and add_rrules (rrss, asms) ctxt =
(fold o fold) insert_rrule rrss ctxt |> add_prems (map_filter I asms)
and disch r prem eq =
let
val (lhs, rhs) = Thm.dest_equals (Thm.cprop_of eq);
val eq' =
Thm.implies_elim
(Thm.instantiate (TVars.empty, Vars.make3 (vA, prem) (vB, lhs) (vC, rhs))
Drule.imp_cong)
(Thm.implies_intr prem eq);
in
if not r then eq'
else
let
val (prem', concl) = Thm.dest_implies lhs;
val (prem'', _) = Thm.dest_implies rhs;
in
Thm.transitive
(Thm.transitive
(Thm.instantiate (TVars.empty, Vars.make3 (vA, prem') (vB, prem) (vC, concl))
Drule.swap_prems_eq)
eq')
(Thm.instantiate (TVars.empty, Vars.make3 (vA, prem) (vB, prem'') (vC, concl))
Drule.swap_prems_eq)
end
end
and rebuild [] _ _ _ _ eq = eq
| rebuild (prem :: prems) concl (_ :: rrss) (_ :: asms) ctxt eq =
let
val ctxt' = add_rrules (rev rrss, rev asms) ctxt;
val concl' =
Drule.mk_implies (prem, the_default concl (Option.map Thm.rhs_of eq));
val dprem = Option.map (disch false prem);
in
(case rewritec (prover, maxidx) ctxt' concl' of
NONE => rebuild prems concl' rrss asms ctxt (dprem eq)
| SOME (eq', _) =>
transitive2 (fold (disch false) prems (the (transitive3 (dprem eq) eq')))
(mut_impc0 (rev prems) (Thm.rhs_of eq') (rev rrss) (rev asms) ctxt))
end
and mut_impc0 prems concl rrss asms ctxt =
let
val prems' = strip_imp_prems concl;
val ((rrss', asms'), ctxt') = fold_map rules_of_prem prems' ctxt |>> split_list;
in
mut_impc (prems @ prems') (strip_imp_concl concl) (rrss @ rrss')
(asms @ asms') [] [] [] [] ctxt' ~1 ~1
end
and mut_impc [] concl [] [] prems' rrss' asms' eqns ctxt changed k =
transitive1 (fold (fn (eq1, prem) => fn eq2 => transitive1 eq1
(Option.map (disch false prem) eq2)) (eqns ~~ prems') NONE)
(if changed > 0 then
mut_impc (rev prems') concl (rev rrss') (rev asms')
[] [] [] [] ctxt ~1 changed
else rebuild prems' concl rrss' asms' ctxt
(botc skel0 (add_rrules (rev rrss', rev asms') ctxt) concl))
| mut_impc (prem :: prems) concl (rrs :: rrss) (asm :: asms)
prems' rrss' asms' eqns ctxt changed k =
(case (if k = 0 then NONE else botc skel0 (add_rrules
(rev rrss' @ rrss, rev asms' @ asms) ctxt) prem) of
NONE => mut_impc prems concl rrss asms (prem :: prems')
(rrs :: rrss') (asm :: asms') (NONE :: eqns) ctxt changed
(if k = 0 then 0 else k - 1)
| SOME eqn =>
let
val prem' = Thm.rhs_of eqn;
val tprems = map Thm.term_of prems;
val i = 1 + fold Integer.max (map (fn p =>
find_index (fn q => q aconv p) tprems) (Thm.hyps_of eqn)) ~1;
val ((rrs', asm'), ctxt') = rules_of_prem prem' ctxt;
in
mut_impc prems concl rrss asms (prem' :: prems')
(rrs' :: rrss') (asm' :: asms')
(SOME (fold_rev (disch true)
(take i prems)
(Drule.imp_cong_rule eqn (Thm.reflexive (Drule.list_implies
(drop i prems, concl))))) :: eqns)
ctxt' (length prems') ~1
end)
(*legacy code -- only for backwards compatibility*)
and nonmut_impc ct ctxt =
let
val (prem, conc) = Thm.dest_implies ct;
val thm1 = if simprem then botc skel0 ctxt prem else NONE;
val prem1 = the_default prem (Option.map Thm.rhs_of thm1);
val ctxt1 =
if not useprem then ctxt
else
let val ((rrs, asm), ctxt') = rules_of_prem prem1 ctxt
in add_rrules ([rrs], [asm]) ctxt' end;
in
(case botc skel0 ctxt1 conc of
NONE =>
(case thm1 of
NONE => NONE
| SOME thm1' => SOME (Drule.imp_cong_rule thm1' (Thm.reflexive conc)))
| SOME thm2 =>
let val thm2' = disch false prem1 thm2 in
(case thm1 of
NONE => SOME thm2'
| SOME thm1' =>
SOME (Thm.transitive (Drule.imp_cong_rule thm1' (Thm.reflexive conc)) thm2'))
end)
end;
in try_botc end;
(* Meta-rewriting: rewrites t to u and returns the theorem t \<equiv> u *)
(*
Parameters:
mode = (simplify A,
use A in simplifying B,
use prems of B (if B is again a meta-impl.) to simplify A)
when simplifying A \<Longrightarrow> B
prover: how to solve premises in conditional rewrites and congruences
*)
fun rewrite_cterm mode prover raw_ctxt raw_ct =
let
val thy = Proof_Context.theory_of raw_ctxt;
val ct = raw_ct
|> Thm.transfer_cterm thy
|> Thm.adjust_maxidx_cterm ~1;
val maxidx = Thm.maxidx_of_cterm ct;
val ctxt =
raw_ctxt
|> Variable.set_body true
|> Context_Position.set_visible false
|> inc_simp_depth
|> (fn ctxt => trace_invoke {depth = simp_depth ctxt, term = Thm.term_of ct} ctxt);
val _ =
cond_tracing ctxt (fn () =>
print_term ctxt "SIMPLIFIER INVOKED ON THE FOLLOWING TERM:" (Thm.term_of ct));
in
ct
|> bottomc (mode, Option.map (Drule.flexflex_unique (SOME ctxt)) oo prover, maxidx) ctxt
|> Thm.solve_constraints
end;
val simple_prover =
SINGLE o (fn ctxt => ALLGOALS (resolve_tac ctxt (prems_of ctxt)));
fun rewrite _ _ [] = Thm.reflexive
| rewrite ctxt full thms =
rewrite_cterm (full, false, false) simple_prover (init_simpset thms ctxt);
fun rewrite_rule ctxt = Conv.fconv_rule o rewrite ctxt true;
(*simple term rewriting -- no proof*)
fun rewrite_term thy rules procs =
Pattern.rewrite_term thy (map decomp_simp' rules) procs;
fun rewrite_thm mode prover ctxt = Conv.fconv_rule (rewrite_cterm mode prover ctxt);
(*Rewrite the subgoals of a proof state (represented by a theorem)*)
fun rewrite_goals_rule ctxt thms th =
Conv.fconv_rule (Conv.prems_conv ~1 (rewrite_cterm (true, true, true) simple_prover
(init_simpset thms ctxt))) th;
(** meta-rewriting tactics **)
(*Rewrite all subgoals*)
fun rewrite_goals_tac ctxt defs = PRIMITIVE (rewrite_goals_rule ctxt defs);
(*Rewrite one subgoal*)
fun generic_rewrite_goal_tac mode prover_tac ctxt i thm =
if 0 < i andalso i <= Thm.nprems_of thm then
Seq.single (Conv.gconv_rule (rewrite_cterm mode (SINGLE o prover_tac) ctxt) i thm)
else Seq.empty;
fun rewrite_goal_tac ctxt thms =
generic_rewrite_goal_tac (true, false, false) (K no_tac) (init_simpset thms ctxt);
(*Prunes all redundant parameters from the proof state by rewriting.*)
fun prune_params_tac ctxt = rewrite_goals_tac ctxt [Drule.triv_forall_equality];
(* for folding definitions, handling critical pairs *)
(*The depth of nesting in a term*)
fun term_depth (Abs (_, _, t)) = 1 + term_depth t
| term_depth (f $ t) = 1 + Int.max (term_depth f, term_depth t)
| term_depth _ = 0;
val lhs_of_thm = #1 o Logic.dest_equals o Thm.prop_of;
(*folding should handle critical pairs! E.g. K \<equiv> Inl 0, S \<equiv> Inr (Inl 0)
Returns longest lhs first to avoid folding its subexpressions.*)
fun sort_lhs_depths defs =
let val keylist = AList.make (term_depth o lhs_of_thm) defs
val keys = sort_distinct (rev_order o int_ord) (map #2 keylist)
in map (AList.find (op =) keylist) keys end;
val rev_defs = sort_lhs_depths o map Thm.symmetric;
fun fold_rule ctxt defs = fold (rewrite_rule ctxt) (rev_defs defs);
fun fold_goals_tac ctxt defs = EVERY (map (rewrite_goals_tac ctxt) (rev_defs defs));
(* HHF normal form: \<And> before \<Longrightarrow>, outermost \<And> generalized *)
local
-fun gen_norm_hhf ss ctxt0 th0 =
+fun gen_norm_hhf protect ss ctxt0 th0 =
let
val (ctxt, th) = Thm.join_transfer_context (ctxt0, th0);
val th' =
- if Drule.is_norm_hhf (Thm.prop_of th) then th
+ if Drule.is_norm_hhf protect (Thm.prop_of th) then th
else
Conv.fconv_rule (rewrite_cterm (true, false, false) (K (K NONE)) (put_simpset ss ctxt)) th;
in th' |> Thm.adjust_maxidx_thm ~1 |> Variable.gen_all ctxt end;
val hhf_ss =
Context.the_local_context ()
|> init_simpset Drule.norm_hhf_eqs
|> simpset_of;
val hhf_protect_ss =
Context.the_local_context ()
|> init_simpset Drule.norm_hhf_eqs
|> add_eqcong Drule.protect_cong
|> simpset_of;
in
-val norm_hhf = gen_norm_hhf hhf_ss;
-val norm_hhf_protect = gen_norm_hhf hhf_protect_ss;
+val norm_hhf = gen_norm_hhf {protect = false} hhf_ss;
+val norm_hhf_protect = gen_norm_hhf {protect = true} hhf_protect_ss;
end;
end;
structure Basic_Meta_Simplifier: BASIC_RAW_SIMPLIFIER = Raw_Simplifier;
open Basic_Meta_Simplifier;
diff --git a/src/Pure/simplifier.ML b/src/Pure/simplifier.ML
--- a/src/Pure/simplifier.ML
+++ b/src/Pure/simplifier.ML
@@ -1,433 +1,434 @@
(* Title: Pure/simplifier.ML
Author: Tobias Nipkow and Markus Wenzel, TU Muenchen
Generic simplifier, suitable for most logics (see also
raw_simplifier.ML for the actual meta-level rewriting engine).
*)
signature BASIC_SIMPLIFIER =
sig
include BASIC_RAW_SIMPLIFIER
val simp_tac: Proof.context -> int -> tactic
val asm_simp_tac: Proof.context -> int -> tactic
val full_simp_tac: Proof.context -> int -> tactic
val asm_lr_simp_tac: Proof.context -> int -> tactic
val asm_full_simp_tac: Proof.context -> int -> tactic
val safe_simp_tac: Proof.context -> int -> tactic
val safe_asm_simp_tac: Proof.context -> int -> tactic
val safe_full_simp_tac: Proof.context -> int -> tactic
val safe_asm_lr_simp_tac: Proof.context -> int -> tactic
val safe_asm_full_simp_tac: Proof.context -> int -> tactic
val simplify: Proof.context -> thm -> thm
val asm_simplify: Proof.context -> thm -> thm
val full_simplify: Proof.context -> thm -> thm
val asm_lr_simplify: Proof.context -> thm -> thm
val asm_full_simplify: Proof.context -> thm -> thm
end;
signature SIMPLIFIER =
sig
include BASIC_SIMPLIFIER
val map_ss: (Proof.context -> Proof.context) -> Context.generic -> Context.generic
val attrib: (thm -> Proof.context -> Proof.context) -> attribute
val simp_add: attribute
val simp_del: attribute
val simp_flip: attribute
val cong_add: attribute
val cong_del: attribute
val check_simproc: Proof.context -> xstring * Position.T -> string
val the_simproc: Proof.context -> string -> simproc
type 'a simproc_spec = {lhss: 'a list, proc: morphism -> Proof.context -> cterm -> thm option}
val make_simproc: Proof.context -> string -> term simproc_spec -> simproc
val define_simproc: binding -> term simproc_spec -> local_theory -> local_theory
val define_simproc_cmd: binding -> string simproc_spec -> local_theory -> local_theory
val pretty_simpset: bool -> Proof.context -> Pretty.T
val default_mk_sym: Proof.context -> thm -> thm option
val prems_of: Proof.context -> thm list
val add_simp: thm -> Proof.context -> Proof.context
val del_simp: thm -> Proof.context -> Proof.context
val init_simpset: thm list -> Proof.context -> Proof.context
val add_eqcong: thm -> Proof.context -> Proof.context
val del_eqcong: thm -> Proof.context -> Proof.context
val add_cong: thm -> Proof.context -> Proof.context
val del_cong: thm -> Proof.context -> Proof.context
val add_prems: thm list -> Proof.context -> Proof.context
val mksimps: Proof.context -> thm -> thm list
val set_mksimps: (Proof.context -> thm -> thm list) -> Proof.context -> Proof.context
val set_mkcong: (Proof.context -> thm -> thm) -> Proof.context -> Proof.context
val set_mksym: (Proof.context -> thm -> thm option) -> Proof.context -> Proof.context
val set_mkeqTrue: (Proof.context -> thm -> thm option) -> Proof.context -> Proof.context
val set_term_ord: term ord -> Proof.context -> Proof.context
val set_subgoaler: (Proof.context -> int -> tactic) -> Proof.context -> Proof.context
type trace_ops
val set_trace_ops: trace_ops -> theory -> theory
val rewrite: Proof.context -> conv
val asm_rewrite: Proof.context -> conv
val full_rewrite: Proof.context -> conv
val asm_lr_rewrite: Proof.context -> conv
val asm_full_rewrite: Proof.context -> conv
val cong_modifiers: Method.modifier parser list
val simp_modifiers': Method.modifier parser list
val simp_modifiers: Method.modifier parser list
val method_setup: Method.modifier parser list -> theory -> theory
val unsafe_solver_tac: Proof.context -> int -> tactic
val unsafe_solver: solver
val safe_solver_tac: Proof.context -> int -> tactic
val safe_solver: solver
end;
structure Simplifier: SIMPLIFIER =
struct
open Raw_Simplifier;
(** declarations **)
(* attributes *)
fun attrib f = Thm.declaration_attribute (map_ss o f);
val simp_add = attrib add_simp;
val simp_del = attrib del_simp;
val simp_flip = attrib flip_simp;
val cong_add = attrib add_cong;
val cong_del = attrib del_cong;
(** named simprocs **)
structure Simprocs = Generic_Data
(
type T = simproc Name_Space.table;
val empty : T = Name_Space.empty_table "simproc";
fun merge data : T = Name_Space.merge_tables data;
);
(* get simprocs *)
val get_simprocs = Simprocs.get o Context.Proof;
fun check_simproc ctxt = Name_Space.check (Context.Proof ctxt) (get_simprocs ctxt) #> #1;
val the_simproc = Name_Space.get o get_simprocs;
val _ = Theory.setup
(ML_Antiquotation.value_embedded \<^binding>\<open>simproc\<close>
(Args.context -- Scan.lift Parse.embedded_position
>> (fn (ctxt, name) =>
"Simplifier.the_simproc ML_context " ^ ML_Syntax.print_string (check_simproc ctxt name))));
(* define simprocs *)
type 'a simproc_spec = {lhss: 'a list, proc: morphism -> Proof.context -> cterm -> thm option};
fun make_simproc ctxt name {lhss, proc} =
let
val ctxt' = fold Proof_Context.augment lhss ctxt;
val lhss' = Variable.export_terms ctxt' ctxt lhss;
in
- cert_simproc (Proof_Context.theory_of ctxt) name {lhss = lhss', proc = proc}
+ cert_simproc (Proof_Context.theory_of ctxt) name
+ {lhss = lhss', proc = Morphism.entity proc}
end;
local
fun def_simproc prep b {lhss, proc} lthy =
let
val simproc =
make_simproc lthy (Local_Theory.full_name lthy b) {lhss = prep lthy lhss, proc = proc};
in
- lthy |> Local_Theory.declaration {syntax = false, pervasive = false} (fn phi => fn context =>
- let
- val b' = Morphism.binding phi b;
- val simproc' = transform_simproc phi simproc;
- in
- context
- |> Simprocs.map (#2 o Name_Space.define context true (b', simproc'))
- |> map_ss (fn ctxt => ctxt addsimprocs [simproc'])
- end)
+ lthy |> Local_Theory.declaration {syntax = false, pervasive = false, pos = Binding.pos_of b}
+ (fn phi => fn context =>
+ let
+ val b' = Morphism.binding phi b;
+ val simproc' = transform_simproc phi simproc;
+ in
+ context
+ |> Simprocs.map (#2 o Name_Space.define context true (b', simproc'))
+ |> map_ss (fn ctxt => ctxt addsimprocs [simproc'])
+ end)
end;
in
val define_simproc = def_simproc Syntax.check_terms;
val define_simproc_cmd = def_simproc Syntax.read_terms;
end;
(** congruence rule to protect foundational terms of local definitions **)
local
-fun make_cong ctxt = Thm.close_derivation \<^here> o Thm.reflexive
- o Thm.cterm_of ctxt o Logic.varify_global o list_comb;
-
-fun add_cong (const_binding, (const, target_params)) gthy =
- if null target_params
- then gthy
+fun add_foundation_cong (binding, (const, target_params)) gthy =
+ if null target_params then gthy
else
let
- val cong = make_cong (Context.proof_of gthy) (const, target_params)
- val cong_binding = Binding.qualify_name true const_binding "cong"
+ val thy = Context.theory_of gthy;
+ val cong =
+ list_comb (const, target_params)
+ |> Logic.varify_global
+ |> Thm.global_cterm_of thy
+ |> Thm.reflexive
+ |> Thm.close_derivation \<^here>;
+ val cong_binding = Binding.qualify_name true binding "cong";
in
gthy
- |> Attrib.generic_notes Thm.theoremK
- [((cong_binding, []), [([cong], [])])]
- |> snd
+ |> Attrib.generic_notes Thm.theoremK [((cong_binding, []), [([cong], [])])]
+ |> #2
end;
-in
+val _ = Theory.setup (Generic_Target.add_foundation_interpretation add_foundation_cong);
-val _ = Theory.setup (Generic_Target.add_foundation_interpretation add_cong);
-
-end;
+in end;
(** pretty_simpset **)
fun pretty_simpset verbose ctxt =
let
val pretty_term = Syntax.pretty_term ctxt;
val pretty_thm = Thm.pretty_thm ctxt;
val pretty_thm_item = Thm.pretty_thm_item ctxt;
fun pretty_simproc (name, lhss) =
Pretty.block
(Pretty.mark_str name :: Pretty.str ":" :: Pretty.fbrk ::
Pretty.fbreaks (map (Pretty.item o single o pretty_term) lhss));
fun pretty_cong_name (const, name) =
pretty_term ((if const then Const else Free) (name, dummyT));
fun pretty_cong (name, thm) =
Pretty.block [pretty_cong_name name, Pretty.str ":", Pretty.brk 1, pretty_thm thm];
val {simps, procs, congs, loopers, unsafe_solvers, safe_solvers, ...} =
dest_ss (simpset_of ctxt);
val simprocs =
Name_Space.markup_entries verbose ctxt (Name_Space.space_of_table (get_simprocs ctxt)) procs;
in
[Pretty.big_list "simplification rules:" (map (pretty_thm_item o #2) simps),
Pretty.big_list "simplification procedures:" (map pretty_simproc simprocs),
Pretty.big_list "congruences:" (map pretty_cong congs),
Pretty.strs ("loopers:" :: map quote loopers),
Pretty.strs ("unsafe solvers:" :: map quote unsafe_solvers),
Pretty.strs ("safe solvers:" :: map quote safe_solvers)]
|> Pretty.chunks
end;
(** simplification tactics and rules **)
fun solve_all_tac solvers ctxt =
let
val subgoal_tac = Raw_Simplifier.subgoal_tac (Raw_Simplifier.set_solvers solvers ctxt);
val solve_tac = subgoal_tac THEN_ALL_NEW (K no_tac);
in DEPTH_SOLVE (solve_tac 1) end;
(*NOTE: may instantiate unknowns that appear also in other subgoals*)
fun generic_simp_tac safe mode ctxt =
let
val loop_tac = Raw_Simplifier.loop_tac ctxt;
val (unsafe_solvers, solvers) = Raw_Simplifier.solvers ctxt;
val solve_tac = FIRST' (map (Raw_Simplifier.solver ctxt)
(rev (if safe then solvers else unsafe_solvers)));
fun simp_loop_tac i =
Raw_Simplifier.generic_rewrite_goal_tac mode (solve_all_tac unsafe_solvers) ctxt i THEN
(solve_tac i ORELSE TRY ((loop_tac THEN_ALL_NEW simp_loop_tac) i));
in PREFER_GOAL (simp_loop_tac 1) end;
local
fun simp rew mode ctxt thm =
let
val (unsafe_solvers, _) = Raw_Simplifier.solvers ctxt;
val tacf = solve_all_tac (rev unsafe_solvers);
fun prover s th = Option.map #1 (Seq.pull (tacf s th));
in rew mode prover ctxt thm end;
in
val simp_thm = simp Raw_Simplifier.rewrite_thm;
val simp_cterm = simp Raw_Simplifier.rewrite_cterm;
end;
(* tactics *)
val simp_tac = generic_simp_tac false (false, false, false);
val asm_simp_tac = generic_simp_tac false (false, true, false);
val full_simp_tac = generic_simp_tac false (true, false, false);
val asm_lr_simp_tac = generic_simp_tac false (true, true, false);
val asm_full_simp_tac = generic_simp_tac false (true, true, true);
(*not totally safe: may instantiate unknowns that appear also in other subgoals*)
val safe_simp_tac = generic_simp_tac true (false, false, false);
val safe_asm_simp_tac = generic_simp_tac true (false, true, false);
val safe_full_simp_tac = generic_simp_tac true (true, false, false);
val safe_asm_lr_simp_tac = generic_simp_tac true (true, true, false);
val safe_asm_full_simp_tac = generic_simp_tac true (true, true, true);
(* conversions *)
val simplify = simp_thm (false, false, false);
val asm_simplify = simp_thm (false, true, false);
val full_simplify = simp_thm (true, false, false);
val asm_lr_simplify = simp_thm (true, true, false);
val asm_full_simplify = simp_thm (true, true, true);
val rewrite = simp_cterm (false, false, false);
val asm_rewrite = simp_cterm (false, true, false);
val full_rewrite = simp_cterm (true, false, false);
val asm_lr_rewrite = simp_cterm (true, true, false);
val asm_full_rewrite = simp_cterm (true, true, true);
(** concrete syntax of attributes **)
(* add / del *)
val simpN = "simp";
val flipN = "flip"
val congN = "cong";
val onlyN = "only";
val no_asmN = "no_asm";
val no_asm_useN = "no_asm_use";
val no_asm_simpN = "no_asm_simp";
val asm_lrN = "asm_lr";
(* simprocs *)
local
val add_del =
(Args.del -- Args.colon >> K (op delsimprocs) ||
Scan.option (Args.add -- Args.colon) >> K (op addsimprocs))
- >> (fn f => fn simproc => fn phi => Thm.declaration_attribute
- (K (Raw_Simplifier.map_ss (fn ctxt => f (ctxt, [transform_simproc phi simproc])))));
+ >> (fn f => fn simproc => Morphism.entity (fn phi => Thm.declaration_attribute
+ (K (Raw_Simplifier.map_ss (fn ctxt => f (ctxt, [transform_simproc phi simproc]))))));
in
val simproc_att =
(Args.context -- Scan.lift add_del) :|-- (fn (ctxt, decl) =>
Scan.repeat1 (Scan.lift (Args.named_attribute (decl o the_simproc ctxt o check_simproc ctxt))))
>> (fn atts => Thm.declaration_attribute (fn th =>
fold (fn att => Thm.attribute_declaration (Morphism.form att) th) atts));
end;
(* conversions *)
local
fun conv_mode x =
((Args.parens (Args.$$$ no_asmN) >> K simplify ||
Args.parens (Args.$$$ no_asm_simpN) >> K asm_simplify ||
Args.parens (Args.$$$ no_asm_useN) >> K full_simplify ||
Scan.succeed asm_full_simplify) |> Scan.lift) x;
in
val simplified = conv_mode -- Attrib.thms >>
(fn (f, ths) => Thm.rule_attribute ths (fn context =>
f ((if null ths then I else Raw_Simplifier.clear_simpset)
(Context.proof_of context) addsimps ths)));
end;
(* setup attributes *)
val _ = Theory.setup
(Attrib.setup \<^binding>\<open>simp\<close> (Attrib.add_del simp_add simp_del)
"declaration of Simplifier rewrite rule" #>
Attrib.setup \<^binding>\<open>cong\<close> (Attrib.add_del cong_add cong_del)
"declaration of Simplifier congruence rule" #>
Attrib.setup \<^binding>\<open>simproc\<close> simproc_att
"declaration of simplification procedures" #>
Attrib.setup \<^binding>\<open>simplified\<close> simplified "simplified rule");
(** method syntax **)
val cong_modifiers =
[Args.$$$ congN -- Args.colon >> K (Method.modifier cong_add \<^here>),
Args.$$$ congN -- Args.add -- Args.colon >> K (Method.modifier cong_add \<^here>),
Args.$$$ congN -- Args.del -- Args.colon >> K (Method.modifier cong_del \<^here>)];
val simp_modifiers =
[Args.$$$ simpN -- Args.colon >> K (Method.modifier simp_add \<^here>),
Args.$$$ simpN -- Args.add -- Args.colon >> K (Method.modifier simp_add \<^here>),
Args.$$$ simpN -- Args.del -- Args.colon >> K (Method.modifier simp_del \<^here>),
Args.$$$ simpN -- Args.$$$ flipN -- Args.colon >> K (Method.modifier simp_flip \<^here>),
Args.$$$ simpN -- Args.$$$ onlyN -- Args.colon >>
K {init = Raw_Simplifier.clear_simpset, attribute = simp_add, pos = \<^here>}]
@ cong_modifiers;
val simp_modifiers' =
[Args.add -- Args.colon >> K (Method.modifier simp_add \<^here>),
Args.del -- Args.colon >> K (Method.modifier simp_del \<^here>),
Args.$$$ flipN -- Args.colon >> K (Method.modifier simp_flip \<^here>),
Args.$$$ onlyN -- Args.colon >>
K {init = Raw_Simplifier.clear_simpset, attribute = simp_add, pos = \<^here>}]
@ cong_modifiers;
val simp_options =
(Args.parens (Args.$$$ no_asmN) >> K simp_tac ||
Args.parens (Args.$$$ no_asm_simpN) >> K asm_simp_tac ||
Args.parens (Args.$$$ no_asm_useN) >> K full_simp_tac ||
Args.parens (Args.$$$ asm_lrN) >> K asm_lr_simp_tac ||
Scan.succeed asm_full_simp_tac);
fun simp_method more_mods meth =
Scan.lift simp_options --|
Method.sections (more_mods @ simp_modifiers') >>
(fn tac => fn ctxt => METHOD (fn facts => meth ctxt tac facts));
(** setup **)
fun method_setup more_mods =
Method.setup \<^binding>\<open>simp\<close>
(simp_method more_mods (fn ctxt => fn tac => fn facts =>
HEADGOAL (Method.insert_tac ctxt facts THEN'
(CHANGED_PROP oo tac) ctxt)))
"simplification" #>
Method.setup \<^binding>\<open>simp_all\<close>
(simp_method more_mods (fn ctxt => fn tac => fn facts =>
ALLGOALS (Method.insert_tac ctxt facts) THEN
(CHANGED_PROP o PARALLEL_ALLGOALS o tac) ctxt))
"simplification (all goals)";
fun unsafe_solver_tac ctxt =
FIRST' [resolve_tac ctxt (Drule.reflexive_thm :: Raw_Simplifier.prems_of ctxt), assume_tac ctxt];
val unsafe_solver = mk_solver "Pure unsafe" unsafe_solver_tac;
(*no premature instantiation of variables during simplification*)
fun safe_solver_tac ctxt =
FIRST' [match_tac ctxt (Drule.reflexive_thm :: Raw_Simplifier.prems_of ctxt), eq_assume_tac];
val safe_solver = mk_solver "Pure safe" safe_solver_tac;
val _ =
Theory.setup
(method_setup [] #> Context.theory_map (map_ss (fn ctxt =>
empty_simpset ctxt
setSSolver safe_solver
setSolver unsafe_solver
|> set_subgoaler asm_simp_tac)));
end;
structure Basic_Simplifier: BASIC_SIMPLIFIER = Simplifier;
open Basic_Simplifier;
diff --git a/src/Pure/variable.ML b/src/Pure/variable.ML
--- a/src/Pure/variable.ML
+++ b/src/Pure/variable.ML
@@ -1,789 +1,788 @@
(* Title: Pure/variable.ML
Author: Makarius
Fixed type/term variables and polymorphic term abbreviations.
*)
signature VARIABLE =
sig
val names_of: Proof.context -> Name.context
val binds_of: Proof.context -> (typ * term) Vartab.table
val maxidx_of: Proof.context -> int
val constraints_of: Proof.context -> typ Vartab.table * sort Vartab.table
val is_declared: Proof.context -> string -> bool
val check_name: binding -> string
val default_type: Proof.context -> string -> typ option
val def_type: Proof.context -> bool -> indexname -> typ option
val def_sort: Proof.context -> indexname -> sort option
val declare_maxidx: int -> Proof.context -> Proof.context
val declare_names: term -> Proof.context -> Proof.context
val declare_constraints: term -> Proof.context -> Proof.context
val declare_internal: term -> Proof.context -> Proof.context
val declare_term: term -> Proof.context -> Proof.context
val declare_typ: typ -> Proof.context -> Proof.context
val declare_prf: Proofterm.proof -> Proof.context -> Proof.context
val declare_thm: thm -> Proof.context -> Proof.context
val variant_frees: Proof.context -> term list -> (string * 'a) list -> (string * 'a) list
val bind_term: indexname * term -> Proof.context -> Proof.context
val unbind_term: indexname -> Proof.context -> Proof.context
val maybe_bind_term: indexname * term option -> Proof.context -> Proof.context
val expand_binds: Proof.context -> term -> term
val lookup_const: Proof.context -> string -> string option
val is_const: Proof.context -> string -> bool
val declare_const: string * string -> Proof.context -> Proof.context
val next_bound: string * typ -> Proof.context -> term * Proof.context
val revert_bounds: Proof.context -> term -> term
val is_body: Proof.context -> bool
val set_body: bool -> Proof.context -> Proof.context
val restore_body: Proof.context -> Proof.context -> Proof.context
val improper_fixes: Proof.context -> Proof.context
val restore_proper_fixes: Proof.context -> Proof.context -> Proof.context
val is_improper: Proof.context -> string -> bool
val is_fixed: Proof.context -> string -> bool
val is_newly_fixed: Proof.context -> Proof.context -> string -> bool
val fixed_ord: Proof.context -> string ord
val intern_fixed: Proof.context -> string -> string
val lookup_fixed: Proof.context -> string -> string option
val revert_fixed: Proof.context -> string -> string
val markup_fixed: Proof.context -> string -> Markup.T
val markup: Proof.context -> string -> Markup.T
val markup_entity_def: Proof.context -> string -> Markup.T
val dest_fixes: Proof.context -> (string * string) list
val add_fixed_names: Proof.context -> term -> string list -> string list
val add_fixed: Proof.context -> term -> (string * typ) list -> (string * typ) list
val add_newly_fixed: Proof.context -> Proof.context ->
term -> (string * typ) list -> (string * typ) list
val add_free_names: Proof.context -> term -> string list -> string list
val add_frees: Proof.context -> term -> (string * typ) list -> (string * typ) list
val add_fixes_binding: binding list -> Proof.context -> string list * Proof.context
val add_fixes: string list -> Proof.context -> string list * Proof.context
val add_fixes_direct: string list -> Proof.context -> Proof.context
val add_fixes_implicit: term -> Proof.context -> Proof.context
val fix_dummy_patterns: term -> Proof.context -> term * Proof.context
val variant_fixes: string list -> Proof.context -> string list * Proof.context
val gen_all: Proof.context -> thm -> thm
val export_terms: Proof.context -> Proof.context -> term list -> term list
val exportT_terms: Proof.context -> Proof.context -> term list -> term list
val exportT: Proof.context -> Proof.context -> thm list -> thm list
val export_prf: Proof.context -> Proof.context -> Proofterm.proof -> Proofterm.proof
val export: Proof.context -> Proof.context -> thm list -> thm list
val export_morphism: Proof.context -> Proof.context -> morphism
val invent_types: sort list -> Proof.context -> (string * sort) list * Proof.context
val importT_inst: term list -> Proof.context -> typ TVars.table * Proof.context
val import_inst: bool -> term list -> Proof.context ->
(typ TVars.table * term Vars.table) * Proof.context
val importT_terms: term list -> Proof.context -> term list * Proof.context
val import_terms: bool -> term list -> Proof.context -> term list * Proof.context
val importT: thm list -> Proof.context -> (ctyp TVars.table * thm list) * Proof.context
val import_prf: bool -> Proofterm.proof -> Proof.context -> Proofterm.proof * Proof.context
val import: bool -> thm list -> Proof.context ->
((ctyp TVars.table * cterm Vars.table) * thm list) * Proof.context
val import_vars: Proof.context -> thm -> thm
val tradeT: (Proof.context -> thm list -> thm list) -> Proof.context -> thm list -> thm list
val trade: (Proof.context -> thm list -> thm list) -> Proof.context -> thm list -> thm list
val dest_abs: term -> Proof.context -> ((string * typ) * term) * Proof.context
val dest_abs_cterm: cterm -> Proof.context -> (cterm * cterm) * Proof.context
val dest_all: term -> Proof.context -> ((string * typ) * term) * Proof.context
val dest_all_cterm: cterm -> Proof.context -> (cterm * cterm) * Proof.context
val is_bound_focus: Proof.context -> bool
val set_bound_focus: bool -> Proof.context -> Proof.context
val restore_bound_focus: Proof.context -> Proof.context -> Proof.context
val focus_params: binding list option -> term -> Proof.context ->
(string list * (string * typ) list) * Proof.context
val focus: binding list option -> term -> Proof.context ->
((string * (string * typ)) list * term) * Proof.context
val focus_cterm: binding list option -> cterm -> Proof.context ->
((string * cterm) list * cterm) * Proof.context
val focus_subgoal: binding list option -> int -> thm -> Proof.context ->
((string * cterm) list * cterm) * Proof.context
val warn_extra_tfrees: Proof.context -> Proof.context -> unit
val polymorphic_types: Proof.context -> term list -> (indexname * sort) list * term list
val polymorphic: Proof.context -> term list -> term list
end;
structure Variable: VARIABLE =
struct
(** local context data **)
type fixes = (string * bool) Name_Space.table;
val empty_fixes: fixes = Name_Space.empty_table Markup.fixedN;
datatype data = Data of
{names: Name.context, (*type/term variable names*)
consts: string Symtab.table, (*consts within the local scope*)
bounds: int * ((string * typ) * string) list, (*next index, internal name, type, external name*)
fixes: fixes, (*term fixes -- global name space, intern ~> extern*)
binds: (typ * term) Vartab.table, (*term bindings*)
type_occs: string list Symtab.table, (*type variables -- possibly within term variables*)
maxidx: int, (*maximum var index*)
constraints:
typ Vartab.table * (*type constraints*)
sort Vartab.table}; (*default sorts*)
fun make_data (names, consts, bounds, fixes, binds, type_occs, maxidx, constraints) =
Data {names = names, consts = consts, bounds = bounds, fixes = fixes, binds = binds,
type_occs = type_occs, maxidx = maxidx, constraints = constraints};
val empty_data =
make_data (Name.context, Symtab.empty, (0, []), empty_fixes, Vartab.empty,
Symtab.empty, ~1, (Vartab.empty, Vartab.empty));
structure Data = Proof_Data
(
type T = data;
fun init _ = empty_data;
);
fun map_data f =
Data.map (fn Data {names, consts, bounds, fixes, binds, type_occs, maxidx, constraints} =>
make_data (f (names, consts, bounds, fixes, binds, type_occs, maxidx, constraints)));
fun map_names f =
map_data (fn (names, consts, bounds, fixes, binds, type_occs, maxidx, constraints) =>
(f names, consts, bounds, fixes, binds, type_occs, maxidx, constraints));
fun map_consts f =
map_data (fn (names, consts, bounds, fixes, binds, type_occs, maxidx, constraints) =>
(names, f consts, bounds, fixes, binds, type_occs, maxidx, constraints));
fun map_bounds f =
map_data (fn (names, consts, bounds, fixes, binds, type_occs, maxidx, constraints) =>
(names, consts, f bounds, fixes, binds, type_occs, maxidx, constraints));
fun map_fixes f =
map_data (fn (names, consts, bounds, fixes, binds, type_occs, maxidx, constraints) =>
(names, consts, bounds, f fixes, binds, type_occs, maxidx, constraints));
fun map_binds f =
map_data (fn (names, consts, bounds, fixes, binds, type_occs, maxidx, constraints) =>
(names, consts, bounds, fixes, f binds, type_occs, maxidx, constraints));
fun map_type_occs f =
map_data (fn (names, consts, bounds, fixes, binds, type_occs, maxidx, constraints) =>
(names, consts, bounds, fixes, binds, f type_occs, maxidx, constraints));
fun map_maxidx f =
map_data (fn (names, consts, bounds, fixes, binds, type_occs, maxidx, constraints) =>
(names, consts, bounds, fixes, binds, type_occs, f maxidx, constraints));
fun map_constraints f =
map_data (fn (names, consts, bounds, fixes, binds, type_occs, maxidx, constraints) =>
(names, consts, bounds, fixes, binds, type_occs, maxidx, f constraints));
fun rep_data ctxt = Data.get ctxt |> (fn Data rep => rep);
val names_of = #names o rep_data;
val fixes_of = #fixes o rep_data;
val fixes_space = Name_Space.space_of_table o fixes_of;
val binds_of = #binds o rep_data;
val type_occs_of = #type_occs o rep_data;
val maxidx_of = #maxidx o rep_data;
val constraints_of = #constraints o rep_data;
val is_declared = Name.is_declared o names_of;
val check_name = Name_Space.base_name o tap Binding.check;
(** declarations **)
(* default sorts and types *)
fun default_type ctxt x = Vartab.lookup (#1 (constraints_of ctxt)) (x, ~1);
fun def_type ctxt pattern xi =
let val {binds, constraints = (types, _), ...} = rep_data ctxt in
(case Vartab.lookup types xi of
NONE =>
if pattern then NONE
else Vartab.lookup binds xi |> Option.map (Type.mark_polymorphic o #1)
| some => some)
end;
val def_sort = Vartab.lookup o #2 o constraints_of;
(* maxidx *)
val declare_maxidx = map_maxidx o Integer.max;
(* names *)
fun declare_type_names t =
map_names (fold_types (fold_atyps Term.declare_typ_names) t) #>
map_maxidx (fold_types Term.maxidx_typ t);
fun declare_names t =
declare_type_names t #>
map_names (fold_aterms Term.declare_term_frees t) #>
map_maxidx (Term.maxidx_term t);
(* type occurrences *)
fun decl_type_occsT T = fold_atyps (fn TFree (a, _) => Symtab.default (a, []) | _ => I) T;
val decl_type_occs = fold_term_types
(fn Free (x, _) => fold_atyps (fn TFree (a, _) => Symtab.insert_list (op =) (a, x) | _ => I)
| _ => decl_type_occsT);
val declare_type_occsT = map_type_occs o fold_types decl_type_occsT;
val declare_type_occs = map_type_occs o decl_type_occs;
(* constraints *)
fun constrain_tvar (xi, raw_S) =
let val S = #2 (Term_Position.decode_positionS raw_S)
in if S = dummyS then Vartab.delete_safe xi else Vartab.update (xi, S) end;
fun declare_constraints t = map_constraints (fn (types, sorts) =>
let
val types' = fold_aterms
(fn Free (x, T) => Vartab.update ((x, ~1), T)
| Var v => Vartab.update v
| _ => I) t types;
val sorts' = (fold_types o fold_atyps)
(fn TFree (x, S) => constrain_tvar ((x, ~1), S)
| TVar v => constrain_tvar v
| _ => I) t sorts;
in (types', sorts') end)
#> declare_type_occsT t
#> declare_type_names t;
(* common declarations *)
fun declare_internal t =
declare_names t #>
declare_type_occs t #>
Thm.declare_term_sorts t;
fun declare_term t =
declare_internal t #>
declare_constraints t;
val declare_typ = declare_term o Logic.mk_type;
val declare_prf =
Proofterm.fold_proof_terms_types declare_internal (declare_internal o Logic.mk_type);
val declare_thm = Thm.fold_terms {hyps = true} declare_internal;
(* renaming term/type frees *)
fun variant_frees ctxt ts frees =
let
val names = names_of (fold declare_names ts ctxt);
val xs = fst (fold_map Name.variant (map #1 frees) names);
in xs ~~ map snd frees end;
(** term bindings **)
fun bind_term ((x, i), t) =
let
val u = Term.close_schematic_term t;
val U = Term.fastype_of u;
in declare_term u #> map_binds (Vartab.update ((x, i), (U, u))) end;
val unbind_term = map_binds o Vartab.delete_safe;
fun maybe_bind_term (xi, SOME t) = bind_term (xi, t)
| maybe_bind_term (xi, NONE) = unbind_term xi;
fun expand_binds ctxt =
let
val binds = binds_of ctxt;
val get = fn Var (xi, _) => Vartab.lookup binds xi | _ => NONE;
in Envir.beta_norm o Envir.expand_term get end;
(** consts **)
val lookup_const = Symtab.lookup o #consts o rep_data;
val is_const = is_some oo lookup_const;
val declare_fixed = map_consts o Symtab.delete_safe;
val declare_const = map_consts o Symtab.update;
(** bounds **)
fun inc_bound (a, T) ctxt =
let
val b = Name.bound (#1 (#bounds (rep_data ctxt)));
val ctxt' = ctxt |> map_bounds (fn (next, bounds) => (next + 1, ((b, T), a) :: bounds));
in (Free (b, T), ctxt') end;
fun next_bound a ctxt =
let val (x as Free (b, _), ctxt') = inc_bound a ctxt
in if Name.is_declared (names_of ctxt') b then inc_bound a ctxt' else (x, ctxt') end;
fun revert_bounds ctxt t =
(case #2 (#bounds (rep_data ctxt)) of
[] => t
| bounds =>
let
val names = Term.declare_term_names t (names_of ctxt);
val xs = rev (#1 (fold_map Name.variant (rev (map #2 bounds)) names));
fun substs (((b, T), _), x') =
let fun subst U = (Free (b, U), Syntax_Trans.mark_bound_abs (x', U))
in [subst T, subst (Type_Annotation.ignore_type T)] end;
in Term.subst_atomic (maps substs (bounds ~~ xs)) t end);
(** fixes **)
(* inner body mode *)
val inner_body = Config.declare_bool ("inner_body", \<^here>) (K false);
val is_body = Config.apply inner_body;
val set_body = Config.put inner_body;
val restore_body = set_body o is_body;
(* proper mode *)
val proper_fixes = Config.declare_bool ("proper_fixes", \<^here>) (K true);
val improper_fixes = Config.put proper_fixes false;
val restore_proper_fixes = Config.put proper_fixes o Config.apply proper_fixes;
fun is_improper ctxt x =
(case Name_Space.lookup (fixes_of ctxt) x of
SOME (_, proper) => not proper
| NONE => false);
(* specialized name space *)
val is_fixed = Name_Space.defined o fixes_of;
fun is_newly_fixed inner outer = is_fixed inner andf (not o is_fixed outer);
val fixed_ord = Name_Space.entry_ord o fixes_space;
val intern_fixed = Name_Space.intern o fixes_space;
fun lookup_fixed ctxt x =
let val x' = intern_fixed ctxt x
in if is_fixed ctxt x' then SOME x' else NONE end;
fun revert_fixed ctxt x =
(case Name_Space.lookup (fixes_of ctxt) x of
SOME (x', _) => if intern_fixed ctxt x' = x then x' else x
| NONE => x);
fun markup_fixed ctxt x =
Name_Space.markup (fixes_space ctxt) x
|> Markup.name (revert_fixed ctxt x);
fun markup ctxt x =
if is_improper ctxt x then Markup.improper
else if Name.is_skolem x then Markup.skolem
else Markup.free;
val markup_entity_def = Name_Space.markup_def o fixes_space;
fun dest_fixes ctxt =
Name_Space.fold_table (fn (x, (y, _)) => cons (y, x)) (fixes_of ctxt) []
|> sort (Name_Space.entry_ord (fixes_space ctxt) o apply2 #2);
(* collect variables *)
fun add_free_names ctxt =
fold_aterms (fn Free (x, _) => not (is_fixed ctxt x) ? insert (op =) x | _ => I);
fun add_frees ctxt =
fold_aterms (fn Free (x, T) => not (is_fixed ctxt x) ? insert (op =) (x, T) | _ => I);
fun add_fixed_names ctxt =
fold_aterms (fn Free (x, _) => is_fixed ctxt x ? insert (op =) x | _ => I);
fun add_fixed ctxt =
fold_aterms (fn Free (x, T) => is_fixed ctxt x ? insert (op =) (x, T) | _ => I);
fun add_newly_fixed ctxt' ctxt =
fold_aterms (fn Free (x, T) => is_newly_fixed ctxt' ctxt x ? insert (op =) (x, T) | _ => I);
(* declarations *)
local
fun err_dups dups =
error ("Duplicate fixed variable(s): " ^ commas (map Binding.print dups));
fun new_fixed ((x, x'), pos) ctxt =
if is_some (lookup_fixed ctxt x') then err_dups [Binding.make (x, pos)]
else
let
val proper = Config.get ctxt proper_fixes;
val context = Context.Proof ctxt
|> Name_Space.map_naming (K Name_Space.global_naming)
|> Context_Position.set_visible_generic false;
in
ctxt
|> map_fixes
(Name_Space.define context true (Binding.make (x', pos), (x, proper)) #> snd #>
x <> "" ? Name_Space.alias_table Name_Space.global_naming (Binding.make (x, pos)) x')
|> declare_fixed x
|> declare_constraints (Syntax.free x')
end;
fun new_fixes names' args =
map_names (K names') #>
fold new_fixed args #>
pair (map (#2 o #1) args);
in
fun add_fixes_binding bs ctxt =
let
val _ =
(case filter (Name.is_skolem o Binding.name_of) bs of
[] => ()
| bads => error ("Illegal internal Skolem constant(s): " ^ commas (map Binding.print bads)));
val _ =
(case duplicates (op = o apply2 Binding.name_of) bs of
[] => ()
| dups => err_dups dups);
val xs = map check_name bs;
val names = names_of ctxt;
val (xs', names') =
if is_body ctxt then fold_map Name.variant xs names |>> map Name.skolem
else (xs, fold Name.declare xs names);
in ctxt |> new_fixes names' ((xs ~~ xs') ~~ map Binding.pos_of bs) end;
fun variant_names ctxt raw_xs =
let
val names = names_of ctxt;
val xs = map (fn x => Name.clean x |> Name.is_internal x ? Name.internal) raw_xs;
val (xs', names') = fold_map Name.variant xs names |>> (is_body ctxt ? map Name.skolem);
in (names', xs ~~ xs') end;
fun variant_fixes xs ctxt =
let val (names', vs) = variant_names ctxt xs;
in ctxt |> new_fixes names' (map (rpair Position.none) vs) end;
fun bound_fixes xs ctxt =
let
val (names', vs) = variant_names ctxt (map #1 xs);
val (ys, ctxt') = fold_map next_bound (map2 (fn (x', _) => fn (_, T) => (x', T)) vs xs) ctxt;
val fixes = map2 (fn (x, _) => fn Free (y, _) => ((x, y), Position.none)) vs ys;
in ctxt' |> new_fixes names' fixes end;
end;
val add_fixes = add_fixes_binding o map Binding.name;
fun add_fixes_direct xs ctxt = ctxt
|> set_body false
|> (snd o add_fixes xs)
|> restore_body ctxt;
fun add_fixes_implicit t ctxt = ctxt
|> not (is_body ctxt) ? add_fixes_direct (rev (add_free_names ctxt t []));
(* dummy patterns *)
fun fix_dummy_patterns (Const ("Pure.dummy_pattern", T)) ctxt =
let val ([x], ctxt') = ctxt |> set_body true |> add_fixes [Name.uu_] ||> restore_body ctxt
in (Free (x, T), ctxt') end
| fix_dummy_patterns (Abs (x, T, b)) ctxt =
let val (b', ctxt') = fix_dummy_patterns b ctxt
in (Abs (x, T, b'), ctxt') end
| fix_dummy_patterns (t $ u) ctxt =
let
val (t', ctxt') = fix_dummy_patterns t ctxt;
val (u', ctxt'') = fix_dummy_patterns u ctxt';
in (t' $ u', ctxt'') end
| fix_dummy_patterns a ctxt = (a, ctxt);
(** export -- generalize type/term variables (beware of closure sizes) **)
fun gen_all ctxt th =
let
val i = Thm.maxidx_thm th (maxidx_of ctxt) + 1;
fun gen (x, T) = Thm.forall_elim (Thm.cterm_of ctxt (Var ((x, i), T)));
in fold gen (Drule.outer_params (Thm.prop_of th)) th end;
fun export_inst inner outer =
let
val declared_outer = is_declared outer;
val still_fixed = not o is_newly_fixed inner outer;
val gen_fixes =
Names.build (fixes_of inner |> Name_Space.fold_table (fn (y, _) =>
not (is_fixed outer y) ? Names.add_set y));
val type_occs_inner = type_occs_of inner;
fun gen_fixesT ts =
Names.build (fold decl_type_occs ts type_occs_inner |> Symtab.fold (fn (a, xs) =>
if declared_outer a orelse exists still_fixed xs
then I else Names.add_set a));
in (gen_fixesT, gen_fixes) end;
fun exportT_inst inner outer = #1 (export_inst inner outer);
fun exportT_terms inner outer =
let
val mk_tfrees = exportT_inst inner outer;
val maxidx = maxidx_of outer;
in
fn ts => ts |> map
(Term_Subst.generalize (mk_tfrees ts, Names.empty)
(fold (Term.fold_types Term.maxidx_typ) ts maxidx + 1))
end;
fun export_terms inner outer =
let
val (mk_tfrees, tfrees) = export_inst inner outer;
val maxidx = maxidx_of outer;
in
fn ts => ts |> map
(Term_Subst.generalize (mk_tfrees ts, tfrees)
(fold Term.maxidx_term ts maxidx + 1))
end;
fun export_prf inner outer prf =
let
val (mk_tfrees, frees) = export_inst (declare_prf prf inner) outer;
val tfrees = mk_tfrees [];
val maxidx = maxidx_of outer;
val idx = Proofterm.maxidx_proof prf maxidx + 1;
val gen_term = Term_Subst.generalize_same (tfrees, frees) idx;
val gen_typ = Term_Subst.generalizeT_same tfrees idx;
in Same.commit (Proofterm.map_proof_terms_same gen_term gen_typ) prf end;
fun gen_export (mk_tfrees, frees) maxidx ths =
let
val tfrees = mk_tfrees (map Thm.full_prop_of ths);
val idx = fold Thm.maxidx_thm ths maxidx + 1;
in map (Thm.generalize (tfrees, frees) idx) ths end;
fun exportT inner outer = gen_export (exportT_inst inner outer, Names.empty) (maxidx_of outer);
fun export inner outer = gen_export (export_inst inner outer) (maxidx_of outer);
fun export_morphism inner outer =
let
val fact = export inner outer;
val term = singleton (export_terms inner outer);
val typ = Logic.type_map term;
in
- Morphism.transfer_morphism' inner $>
- Morphism.transfer_morphism' outer $>
- Morphism.morphism "Variable.export" {binding = [], typ = [typ], term = [term], fact = [fact]}
+ Morphism.morphism "Variable.export"
+ {binding = [], typ = [K typ], term = [K term], fact = [K fact]}
end;
(** import -- fix schematic type/term variables **)
fun invent_types Ss ctxt =
let
val tfrees = Name.invent (names_of ctxt) Name.aT (length Ss) ~~ Ss;
val ctxt' = fold (declare_constraints o Logic.mk_type o TFree) tfrees ctxt;
in (tfrees, ctxt') end;
fun importT_inst ts ctxt =
let
val tvars = TVars.build (fold TVars.add_tvars ts) |> TVars.list_set;
val (tfrees, ctxt') = invent_types (map #2 tvars) ctxt;
val instT = TVars.build (fold2 (fn a => fn b => TVars.add (a, TFree b)) tvars tfrees);
in (instT, ctxt') end;
fun import_inst is_open ts ctxt =
let
val ren = Name.clean #> (if is_open then I else Name.internal);
val (instT, ctxt') = importT_inst ts ctxt;
val vars =
Vars.build (fold Vars.add_vars ts) |> Vars.list_set
|> map (apsnd (Term_Subst.instantiateT instT));
val (ys, ctxt'') = variant_fixes (map (ren o #1 o #1) vars) ctxt';
val inst = Vars.build (fold2 (fn (x, T) => fn y => Vars.add ((x, T), Free (y, T))) vars ys);
in ((instT, inst), ctxt'') end;
fun importT_terms ts ctxt =
let val (instT, ctxt') = importT_inst ts ctxt
in (map (Term_Subst.instantiate (instT, Vars.empty)) ts, ctxt') end;
fun import_terms is_open ts ctxt =
let val (inst, ctxt') = import_inst is_open ts ctxt
in (map (Term_Subst.instantiate inst) ts, ctxt') end;
fun importT ths ctxt =
let
val (instT, ctxt') = importT_inst (map Thm.full_prop_of ths) ctxt;
val instT' = TVars.map (K (Thm.ctyp_of ctxt')) instT;
val ths' = map (Thm.instantiate (instT', Vars.empty)) ths;
in ((instT', ths'), ctxt') end;
fun import_prf is_open prf ctxt =
let
val ts = rev (Proofterm.fold_proof_terms_types cons (cons o Logic.mk_type) prf []);
val (insts, ctxt') = import_inst is_open ts ctxt;
in (Proofterm.instantiate insts prf, ctxt') end;
fun import is_open ths ctxt =
let
val ((instT, inst), ctxt') = import_inst is_open (map Thm.full_prop_of ths) ctxt;
val instT' = TVars.map (K (Thm.ctyp_of ctxt')) instT;
val inst' = Vars.map (K (Thm.cterm_of ctxt')) inst;
val ths' = map (Thm.instantiate (instT', inst')) ths;
in (((instT', inst'), ths'), ctxt') end;
fun import_vars ctxt th =
let val ((_, [th']), _) = ctxt |> set_body false |> import true [th];
in th' end;
(* import/export *)
fun gen_trade imp exp f ctxt ths =
let val ((_, ths'), ctxt') = imp ths ctxt
in exp ctxt' ctxt (f ctxt' ths') end;
val tradeT = gen_trade importT exportT;
val trade = gen_trade (import true) export;
(* destruct binders *)
local
fun gen_dest_abs exn dest term_of arg ctxt =
(case term_of arg of
Abs (a, T, _) =>
let
val (x, ctxt') = yield_singleton bound_fixes (a, T) ctxt;
val res = dest x arg handle Term.USED_FREE _ =>
raise Fail ("Bad context: clash of fresh free for bound: " ^
Syntax.string_of_term ctxt (Free (x, T)) ^ " vs. " ^
Syntax.string_of_term ctxt' (Free (x, T)));
in (res, ctxt') end
| _ => raise exn ("dest_abs", [arg]));
in
val dest_abs = gen_dest_abs TERM Term.dest_abs_fresh I;
val dest_abs_cterm = gen_dest_abs CTERM Thm.dest_abs_fresh Thm.term_of;
fun dest_all t ctxt =
(case t of
Const ("Pure.all", _) $ u => dest_abs u ctxt
| _ => raise TERM ("dest_all", [t]));
fun dest_all_cterm ct ctxt =
(case Thm.term_of ct of
Const ("Pure.all", _) $ _ => dest_abs_cterm (Thm.dest_arg ct) ctxt
| _ => raise CTERM ("dest_all", [ct]));
end;
(* focus on outermost parameters: \<And>x y z. B *)
val bound_focus = Config.declare_bool ("bound_focus", \<^here>) (K false);
val is_bound_focus = Config.apply bound_focus;
val set_bound_focus = Config.put bound_focus;
val restore_bound_focus = set_bound_focus o is_bound_focus;
fun focus_params bindings t ctxt =
let
val ps = Term.variant_frees t (Term.strip_all_vars t); (*as they are printed :-*)
val (xs, Ts) = split_list ps;
val (xs', ctxt') =
(case bindings of
SOME bs => ctxt |> set_body true |> add_fixes_binding bs ||> restore_body ctxt
| NONE => if is_bound_focus ctxt then bound_fixes ps ctxt else variant_fixes xs ctxt);
val ps' = xs' ~~ Ts;
val ctxt'' = ctxt' |> fold (declare_constraints o Free) ps';
in ((xs, ps'), ctxt'') end;
fun focus bindings t ctxt =
let
val ((xs, ps), ctxt') = focus_params bindings t ctxt;
val t' = Term.subst_bounds (rev (map Free ps), Term.strip_all_body t);
in (((xs ~~ ps), t'), ctxt') end;
fun forall_elim_prop t prop =
Thm.beta_conversion false (Thm.apply (Thm.dest_arg prop) t)
|> Thm.cprop_of |> Thm.dest_arg;
fun focus_cterm bindings goal ctxt =
let
val ((xs, ps), ctxt') = focus_params bindings (Thm.term_of goal) ctxt;
val ps' = map (Thm.cterm_of ctxt' o Free) ps;
val goal' = fold forall_elim_prop ps' goal;
in ((xs ~~ ps', goal'), ctxt') end;
fun focus_subgoal bindings i st =
let
val all_vars = Vars.build (Thm.fold_terms {hyps = false} Vars.add_vars st);
in
Vars.fold (unbind_term o #1 o #1) all_vars #>
Vars.fold (declare_constraints o Var o #1) all_vars #>
focus_cterm bindings (Thm.cprem_of st i)
end;
(** implicit polymorphism **)
(* warn_extra_tfrees *)
fun warn_extra_tfrees ctxt1 ctxt2 =
let
fun occs_typ a = Term.exists_subtype (fn TFree (b, _) => a = b | _ => false);
fun occs_free a x =
(case def_type ctxt1 false (x, ~1) of
SOME T => if occs_typ a T then I else cons (a, x)
| NONE => cons (a, x));
val occs1 = type_occs_of ctxt1;
val occs2 = type_occs_of ctxt2;
val extras = Symtab.fold (fn (a, xs) =>
if Symtab.defined occs1 a then I else fold (occs_free a) xs) occs2 [];
val tfrees = map #1 extras |> sort_distinct string_ord;
val frees = map #2 extras |> sort_distinct string_ord;
in
if null extras orelse not (Context_Position.is_visible ctxt2) then ()
else warning ("Introduced fixed type variable(s): " ^ commas tfrees ^ " in " ^
space_implode " or " (map quote frees))
end;
(* polymorphic terms *)
fun polymorphic_types ctxt ts =
let
val ctxt' = fold declare_term ts ctxt;
val occs = type_occs_of ctxt;
val occs' = type_occs_of ctxt';
val types =
Names.build (occs' |> Symtab.fold (fn (a, _) =>
if Symtab.defined occs a then I else Names.add_set a));
val idx = maxidx_of ctxt' + 1;
val Ts' = (fold o fold_types o fold_atyps)
(fn T as TFree _ =>
(case Term_Subst.generalizeT types idx T of TVar v => insert (op =) v | _ => I)
| _ => I) ts [];
val ts' = map (Term_Subst.generalize (types, Names.empty) idx) ts;
in (rev Ts', ts') end;
fun polymorphic ctxt ts = snd (polymorphic_types ctxt ts);
end;
diff --git a/src/ZF/OrdQuant.thy b/src/ZF/OrdQuant.thy
--- a/src/ZF/OrdQuant.thy
+++ b/src/ZF/OrdQuant.thy
@@ -1,362 +1,360 @@
(* Title: ZF/OrdQuant.thy
Authors: Krzysztof Grabczewski and L C Paulson
*)
section \<open>Special quantifiers\<close>
theory OrdQuant imports Ordinal begin
subsection \<open>Quantifiers and union operator for ordinals\<close>
definition
(* Ordinal Quantifiers *)
oall :: "[i, i \<Rightarrow> o] \<Rightarrow> o" where
"oall(A, P) \<equiv> \<forall>x. x<A \<longrightarrow> P(x)"
definition
oex :: "[i, i \<Rightarrow> o] \<Rightarrow> o" where
"oex(A, P) \<equiv> \<exists>x. x<A \<and> P(x)"
definition
(* Ordinal Union *)
OUnion :: "[i, i \<Rightarrow> i] \<Rightarrow> i" where
"OUnion(i,B) \<equiv> {z: \<Union>x\<in>i. B(x). Ord(i)}"
syntax
"_oall" :: "[idt, i, o] \<Rightarrow> o" (\<open>(3\<forall>_<_./ _)\<close> 10)
"_oex" :: "[idt, i, o] \<Rightarrow> o" (\<open>(3\<exists>_<_./ _)\<close> 10)
"_OUNION" :: "[idt, i, i] \<Rightarrow> i" (\<open>(3\<Union>_<_./ _)\<close> 10)
translations
"\<forall>x<a. P" \<rightleftharpoons> "CONST oall(a, \<lambda>x. P)"
"\<exists>x<a. P" \<rightleftharpoons> "CONST oex(a, \<lambda>x. P)"
"\<Union>x<a. B" \<rightleftharpoons> "CONST OUnion(a, \<lambda>x. B)"
subsubsection \<open>simplification of the new quantifiers\<close>
(*MOST IMPORTANT that this is added to the simpset BEFORE Ord_atomize
is proved. Ord_atomize would convert this rule to
x < 0 \<Longrightarrow> P(x) \<equiv> True, which causes dire effects!*)
lemma [simp]: "(\<forall>x<0. P(x))"
by (simp add: oall_def)
lemma [simp]: "\<not>(\<exists>x<0. P(x))"
by (simp add: oex_def)
lemma [simp]: "(\<forall>x<succ(i). P(x)) <-> (Ord(i) \<longrightarrow> P(i) \<and> (\<forall>x<i. P(x)))"
apply (simp add: oall_def le_iff)
apply (blast intro: lt_Ord2)
done
lemma [simp]: "(\<exists>x<succ(i). P(x)) <-> (Ord(i) \<and> (P(i) | (\<exists>x<i. P(x))))"
apply (simp add: oex_def le_iff)
apply (blast intro: lt_Ord2)
done
subsubsection \<open>Union over ordinals\<close>
lemma Ord_OUN [intro,simp]:
"\<lbrakk>\<And>x. x<A \<Longrightarrow> Ord(B(x))\<rbrakk> \<Longrightarrow> Ord(\<Union>x<A. B(x))"
by (simp add: OUnion_def ltI Ord_UN)
lemma OUN_upper_lt:
"\<lbrakk>a<A; i < b(a); Ord(\<Union>x<A. b(x))\<rbrakk> \<Longrightarrow> i < (\<Union>x<A. b(x))"
by (unfold OUnion_def lt_def, blast )
lemma OUN_upper_le:
"\<lbrakk>a<A; i\<le>b(a); Ord(\<Union>x<A. b(x))\<rbrakk> \<Longrightarrow> i \<le> (\<Union>x<A. b(x))"
apply (unfold OUnion_def, auto)
apply (rule UN_upper_le )
apply (auto simp add: lt_def)
done
lemma Limit_OUN_eq: "Limit(i) \<Longrightarrow> (\<Union>x<i. x) = i"
by (simp add: OUnion_def Limit_Union_eq Limit_is_Ord)
(* No < version of this theorem: consider that @{term"(\<Union>i\<in>nat.i)=nat"}! *)
lemma OUN_least:
"(\<And>x. x<A \<Longrightarrow> B(x) \<subseteq> C) \<Longrightarrow> (\<Union>x<A. B(x)) \<subseteq> C"
by (simp add: OUnion_def UN_least ltI)
lemma OUN_least_le:
"\<lbrakk>Ord(i); \<And>x. x<A \<Longrightarrow> b(x) \<le> i\<rbrakk> \<Longrightarrow> (\<Union>x<A. b(x)) \<le> i"
by (simp add: OUnion_def UN_least_le ltI Ord_0_le)
lemma le_implies_OUN_le_OUN:
"\<lbrakk>\<And>x. x<A \<Longrightarrow> c(x) \<le> d(x)\<rbrakk> \<Longrightarrow> (\<Union>x<A. c(x)) \<le> (\<Union>x<A. d(x))"
by (blast intro: OUN_least_le OUN_upper_le le_Ord2 Ord_OUN)
lemma OUN_UN_eq:
"(\<And>x. x \<in> A \<Longrightarrow> Ord(B(x)))
\<Longrightarrow> (\<Union>z < (\<Union>x\<in>A. B(x)). C(z)) = (\<Union>x\<in>A. \<Union>z < B(x). C(z))"
by (simp add: OUnion_def)
lemma OUN_Union_eq:
"(\<And>x. x \<in> X \<Longrightarrow> Ord(x))
\<Longrightarrow> (\<Union>z < \<Union>(X). C(z)) = (\<Union>x\<in>X. \<Union>z < x. C(z))"
by (simp add: OUnion_def)
(*So that rule_format will get rid of this quantifier...*)
lemma atomize_oall [symmetric, rulify]:
"(\<And>x. x<A \<Longrightarrow> P(x)) \<equiv> Trueprop (\<forall>x<A. P(x))"
by (simp add: oall_def atomize_all atomize_imp)
subsubsection \<open>universal quantifier for ordinals\<close>
lemma oallI [intro!]:
"\<lbrakk>\<And>x. x<A \<Longrightarrow> P(x)\<rbrakk> \<Longrightarrow> \<forall>x<A. P(x)"
by (simp add: oall_def)
lemma ospec: "\<lbrakk>\<forall>x<A. P(x); x<A\<rbrakk> \<Longrightarrow> P(x)"
by (simp add: oall_def)
lemma oallE:
"\<lbrakk>\<forall>x<A. P(x); P(x) \<Longrightarrow> Q; \<not>x<A \<Longrightarrow> Q\<rbrakk> \<Longrightarrow> Q"
by (simp add: oall_def, blast)
lemma rev_oallE [elim]:
"\<lbrakk>\<forall>x<A. P(x); \<not>x<A \<Longrightarrow> Q; P(x) \<Longrightarrow> Q\<rbrakk> \<Longrightarrow> Q"
by (simp add: oall_def, blast)
(*Trival rewrite rule. @{term"(\<forall>x<a.P)<->P"} holds only if a is not 0!*)
lemma oall_simp [simp]: "(\<forall>x<a. True) <-> True"
by blast
(*Congruence rule for rewriting*)
lemma oall_cong [cong]:
"\<lbrakk>a=a'; \<And>x. x<a' \<Longrightarrow> P(x) <-> P'(x)\<rbrakk>
\<Longrightarrow> oall(a, \<lambda>x. P(x)) <-> oall(a', \<lambda>x. P'(x))"
by (simp add: oall_def)
subsubsection \<open>existential quantifier for ordinals\<close>
lemma oexI [intro]:
"\<lbrakk>P(x); x<A\<rbrakk> \<Longrightarrow> \<exists>x<A. P(x)"
apply (simp add: oex_def, blast)
done
(*Not of the general form for such rules... *)
lemma oexCI:
"\<lbrakk>\<forall>x<A. \<not>P(x) \<Longrightarrow> P(a); a<A\<rbrakk> \<Longrightarrow> \<exists>x<A. P(x)"
apply (simp add: oex_def, blast)
done
lemma oexE [elim!]:
"\<lbrakk>\<exists>x<A. P(x); \<And>x. \<lbrakk>x<A; P(x)\<rbrakk> \<Longrightarrow> Q\<rbrakk> \<Longrightarrow> Q"
apply (simp add: oex_def, blast)
done
lemma oex_cong [cong]:
"\<lbrakk>a=a'; \<And>x. x<a' \<Longrightarrow> P(x) <-> P'(x)\<rbrakk>
\<Longrightarrow> oex(a, \<lambda>x. P(x)) <-> oex(a', \<lambda>x. P'(x))"
apply (simp add: oex_def cong add: conj_cong)
done
subsubsection \<open>Rules for Ordinal-Indexed Unions\<close>
lemma OUN_I [intro]: "\<lbrakk>a<i; b \<in> B(a)\<rbrakk> \<Longrightarrow> b: (\<Union>z<i. B(z))"
by (unfold OUnion_def lt_def, blast)
lemma OUN_E [elim!]:
"\<lbrakk>b \<in> (\<Union>z<i. B(z)); \<And>a.\<lbrakk>b \<in> B(a); a<i\<rbrakk> \<Longrightarrow> R\<rbrakk> \<Longrightarrow> R"
apply (unfold OUnion_def lt_def, blast)
done
lemma OUN_iff: "b \<in> (\<Union>x<i. B(x)) <-> (\<exists>x<i. b \<in> B(x))"
by (unfold OUnion_def oex_def lt_def, blast)
lemma OUN_cong [cong]:
"\<lbrakk>i=j; \<And>x. x<j \<Longrightarrow> C(x)=D(x)\<rbrakk> \<Longrightarrow> (\<Union>x<i. C(x)) = (\<Union>x<j. D(x))"
by (simp add: OUnion_def lt_def OUN_iff)
lemma lt_induct:
"\<lbrakk>i<k; \<And>x.\<lbrakk>x<k; \<forall>y<x. P(y)\<rbrakk> \<Longrightarrow> P(x)\<rbrakk> \<Longrightarrow> P(i)"
apply (simp add: lt_def oall_def)
apply (erule conjE)
apply (erule Ord_induct, assumption, blast)
done
subsection \<open>Quantification over a class\<close>
definition
"rall" :: "[i\<Rightarrow>o, i\<Rightarrow>o] \<Rightarrow> o" where
"rall(M, P) \<equiv> \<forall>x. M(x) \<longrightarrow> P(x)"
definition
"rex" :: "[i\<Rightarrow>o, i\<Rightarrow>o] \<Rightarrow> o" where
"rex(M, P) \<equiv> \<exists>x. M(x) \<and> P(x)"
syntax
"_rall" :: "[pttrn, i\<Rightarrow>o, o] \<Rightarrow> o" (\<open>(3\<forall>_[_]./ _)\<close> 10)
"_rex" :: "[pttrn, i\<Rightarrow>o, o] \<Rightarrow> o" (\<open>(3\<exists>_[_]./ _)\<close> 10)
translations
"\<forall>x[M]. P" \<rightleftharpoons> "CONST rall(M, \<lambda>x. P)"
"\<exists>x[M]. P" \<rightleftharpoons> "CONST rex(M, \<lambda>x. P)"
subsubsection\<open>Relativized universal quantifier\<close>
lemma rallI [intro!]: "\<lbrakk>\<And>x. M(x) \<Longrightarrow> P(x)\<rbrakk> \<Longrightarrow> \<forall>x[M]. P(x)"
by (simp add: rall_def)
lemma rspec: "\<lbrakk>\<forall>x[M]. P(x); M(x)\<rbrakk> \<Longrightarrow> P(x)"
by (simp add: rall_def)
(*Instantiates x first: better for automatic theorem proving?*)
lemma rev_rallE [elim]:
"\<lbrakk>\<forall>x[M]. P(x); \<not> M(x) \<Longrightarrow> Q; P(x) \<Longrightarrow> Q\<rbrakk> \<Longrightarrow> Q"
by (simp add: rall_def, blast)
lemma rallE: "\<lbrakk>\<forall>x[M]. P(x); P(x) \<Longrightarrow> Q; \<not> M(x) \<Longrightarrow> Q\<rbrakk> \<Longrightarrow> Q"
by blast
(*Trival rewrite rule; (\<forall>x[M].P)<->P holds only if A is nonempty!*)
lemma rall_triv [simp]: "(\<forall>x[M]. P) \<longleftrightarrow> ((\<exists>x. M(x)) \<longrightarrow> P)"
by (simp add: rall_def)
(*Congruence rule for rewriting*)
lemma rall_cong [cong]:
"(\<And>x. M(x) \<Longrightarrow> P(x) <-> P'(x)) \<Longrightarrow> (\<forall>x[M]. P(x)) <-> (\<forall>x[M]. P'(x))"
by (simp add: rall_def)
subsubsection\<open>Relativized existential quantifier\<close>
lemma rexI [intro]: "\<lbrakk>P(x); M(x)\<rbrakk> \<Longrightarrow> \<exists>x[M]. P(x)"
by (simp add: rex_def, blast)
(*The best argument order when there is only one M(x)*)
lemma rev_rexI: "\<lbrakk>M(x); P(x)\<rbrakk> \<Longrightarrow> \<exists>x[M]. P(x)"
by blast
(*Not of the general form for such rules... *)
lemma rexCI: "\<lbrakk>\<forall>x[M]. \<not>P(x) \<Longrightarrow> P(a); M(a)\<rbrakk> \<Longrightarrow> \<exists>x[M]. P(x)"
by blast
lemma rexE [elim!]: "\<lbrakk>\<exists>x[M]. P(x); \<And>x. \<lbrakk>M(x); P(x)\<rbrakk> \<Longrightarrow> Q\<rbrakk> \<Longrightarrow> Q"
by (simp add: rex_def, blast)
(*We do not even have (\<exists>x[M]. True) <-> True unless A is nonempty\<And>*)
lemma rex_triv [simp]: "(\<exists>x[M]. P) \<longleftrightarrow> ((\<exists>x. M(x)) \<and> P)"
by (simp add: rex_def)
lemma rex_cong [cong]:
"(\<And>x. M(x) \<Longrightarrow> P(x) <-> P'(x)) \<Longrightarrow> (\<exists>x[M]. P(x)) <-> (\<exists>x[M]. P'(x))"
by (simp add: rex_def cong: conj_cong)
lemma rall_is_ball [simp]: "(\<forall>x[\<lambda>z. z\<in>A]. P(x)) <-> (\<forall>x\<in>A. P(x))"
by blast
lemma rex_is_bex [simp]: "(\<exists>x[\<lambda>z. z\<in>A]. P(x)) <-> (\<exists>x\<in>A. P(x))"
by blast
lemma atomize_rall: "(\<And>x. M(x) \<Longrightarrow> P(x)) \<equiv> Trueprop (\<forall>x[M]. P(x))"
by (simp add: rall_def atomize_all atomize_imp)
declare atomize_rall [symmetric, rulify]
lemma rall_simps1:
"(\<forall>x[M]. P(x) \<and> Q) <-> (\<forall>x[M]. P(x)) \<and> ((\<forall>x[M]. False) | Q)"
"(\<forall>x[M]. P(x) | Q) <-> ((\<forall>x[M]. P(x)) | Q)"
"(\<forall>x[M]. P(x) \<longrightarrow> Q) <-> ((\<exists>x[M]. P(x)) \<longrightarrow> Q)"
"(\<not>(\<forall>x[M]. P(x))) <-> (\<exists>x[M]. \<not>P(x))"
by blast+
lemma rall_simps2:
"(\<forall>x[M]. P \<and> Q(x)) <-> ((\<forall>x[M]. False) | P) \<and> (\<forall>x[M]. Q(x))"
"(\<forall>x[M]. P | Q(x)) <-> (P | (\<forall>x[M]. Q(x)))"
"(\<forall>x[M]. P \<longrightarrow> Q(x)) <-> (P \<longrightarrow> (\<forall>x[M]. Q(x)))"
by blast+
lemmas rall_simps [simp] = rall_simps1 rall_simps2
lemma rall_conj_distrib:
"(\<forall>x[M]. P(x) \<and> Q(x)) <-> ((\<forall>x[M]. P(x)) \<and> (\<forall>x[M]. Q(x)))"
by blast
lemma rex_simps1:
"(\<exists>x[M]. P(x) \<and> Q) <-> ((\<exists>x[M]. P(x)) \<and> Q)"
"(\<exists>x[M]. P(x) | Q) <-> (\<exists>x[M]. P(x)) | ((\<exists>x[M]. True) \<and> Q)"
"(\<exists>x[M]. P(x) \<longrightarrow> Q) <-> ((\<forall>x[M]. P(x)) \<longrightarrow> ((\<exists>x[M]. True) \<and> Q))"
"(\<not>(\<exists>x[M]. P(x))) <-> (\<forall>x[M]. \<not>P(x))"
by blast+
lemma rex_simps2:
"(\<exists>x[M]. P \<and> Q(x)) <-> (P \<and> (\<exists>x[M]. Q(x)))"
"(\<exists>x[M]. P | Q(x)) <-> ((\<exists>x[M]. True) \<and> P) | (\<exists>x[M]. Q(x))"
"(\<exists>x[M]. P \<longrightarrow> Q(x)) <-> (((\<forall>x[M]. False) | P) \<longrightarrow> (\<exists>x[M]. Q(x)))"
by blast+
lemmas rex_simps [simp] = rex_simps1 rex_simps2
lemma rex_disj_distrib:
"(\<exists>x[M]. P(x) | Q(x)) <-> ((\<exists>x[M]. P(x)) | (\<exists>x[M]. Q(x)))"
by blast
subsubsection\<open>One-point rule for bounded quantifiers\<close>
lemma rex_triv_one_point1 [simp]: "(\<exists>x[M]. x=a) <-> ( M(a))"
by blast
lemma rex_triv_one_point2 [simp]: "(\<exists>x[M]. a=x) <-> ( M(a))"
by blast
lemma rex_one_point1 [simp]: "(\<exists>x[M]. x=a \<and> P(x)) <-> ( M(a) \<and> P(a))"
by blast
lemma rex_one_point2 [simp]: "(\<exists>x[M]. a=x \<and> P(x)) <-> ( M(a) \<and> P(a))"
by blast
lemma rall_one_point1 [simp]: "(\<forall>x[M]. x=a \<longrightarrow> P(x)) <-> ( M(a) \<longrightarrow> P(a))"
by blast
lemma rall_one_point2 [simp]: "(\<forall>x[M]. a=x \<longrightarrow> P(x)) <-> ( M(a) \<longrightarrow> P(a))"
by blast
subsubsection\<open>Sets as Classes\<close>
definition
setclass :: "[i,i] \<Rightarrow> o" (\<open>##_\<close> [40] 40) where
"setclass(A) \<equiv> \<lambda>x. x \<in> A"
lemma setclass_iff [simp]: "setclass(A,x) <-> x \<in> A"
by (simp add: setclass_def)
lemma rall_setclass_is_ball [simp]: "(\<forall>x[##A]. P(x)) <-> (\<forall>x\<in>A. P(x))"
by auto
lemma rex_setclass_is_bex [simp]: "(\<exists>x[##A]. P(x)) <-> (\<exists>x\<in>A. P(x))"
by auto
ML
\<open>
val Ord_atomize =
atomize ([(\<^const_name>\<open>oall\<close>, @{thms ospec}), (\<^const_name>\<open>rall\<close>, @{thms rspec})] @
ZF_conn_pairs, ZF_mem_pairs);
\<close>
declaration \<open>fn _ =>
Simplifier.map_ss (Simplifier.set_mksimps (fn ctxt =>
map mk_eq o Ord_atomize o Variable.gen_all ctxt))
\<close>
text \<open>Setting up the one-point-rule simproc\<close>
simproc_setup defined_rex ("\<exists>x[M]. P(x) \<and> Q(x)") = \<open>
- fn _ => Quantifier1.rearrange_Bex
- (fn ctxt => unfold_tac ctxt @{thms rex_def})
+ K (Quantifier1.rearrange_Bex (fn ctxt => unfold_tac ctxt @{thms rex_def}))
\<close>
simproc_setup defined_rall ("\<forall>x[M]. P(x) \<longrightarrow> Q(x)") = \<open>
- fn _ => Quantifier1.rearrange_Ball
- (fn ctxt => unfold_tac ctxt @{thms rall_def})
+ K (Quantifier1.rearrange_Ball (fn ctxt => unfold_tac ctxt @{thms rall_def}))
\<close>
end
diff --git a/src/ZF/pair.thy b/src/ZF/pair.thy
--- a/src/ZF/pair.thy
+++ b/src/ZF/pair.thy
@@ -1,185 +1,183 @@
(* Title: ZF/pair.thy
Author: Lawrence C Paulson, Cambridge University Computer Laboratory
Copyright 1992 University of Cambridge
*)
section\<open>Ordered Pairs\<close>
theory pair imports upair
begin
ML_file \<open>simpdata.ML\<close>
setup \<open>
map_theory_simpset
(Simplifier.set_mksimps (fn ctxt => map mk_eq o ZF_atomize o Variable.gen_all ctxt)
#> Simplifier.add_cong @{thm if_weak_cong})
\<close>
ML \<open>val ZF_ss = simpset_of \<^context>\<close>
simproc_setup defined_Bex ("\<exists>x\<in>A. P(x) \<and> Q(x)") = \<open>
- fn _ => Quantifier1.rearrange_Bex
- (fn ctxt => unfold_tac ctxt @{thms Bex_def})
+ K (Quantifier1.rearrange_Bex (fn ctxt => unfold_tac ctxt @{thms Bex_def}))
\<close>
simproc_setup defined_Ball ("\<forall>x\<in>A. P(x) \<longrightarrow> Q(x)") = \<open>
- fn _ => Quantifier1.rearrange_Ball
- (fn ctxt => unfold_tac ctxt @{thms Ball_def})
+ K (Quantifier1.rearrange_Ball (fn ctxt => unfold_tac ctxt @{thms Ball_def}))
\<close>
(** Lemmas for showing that \<langle>a,b\<rangle> uniquely determines a and b **)
lemma singleton_eq_iff [iff]: "{a} = {b} \<longleftrightarrow> a=b"
by (rule extension [THEN iff_trans], blast)
lemma doubleton_eq_iff: "{a,b} = {c,d} \<longleftrightarrow> (a=c \<and> b=d) | (a=d \<and> b=c)"
by (rule extension [THEN iff_trans], blast)
lemma Pair_iff [simp]: "\<langle>a,b\<rangle> = \<langle>c,d\<rangle> \<longleftrightarrow> a=c \<and> b=d"
by (simp add: Pair_def doubleton_eq_iff, blast)
lemmas Pair_inject = Pair_iff [THEN iffD1, THEN conjE, elim!]
lemmas Pair_inject1 = Pair_iff [THEN iffD1, THEN conjunct1]
lemmas Pair_inject2 = Pair_iff [THEN iffD1, THEN conjunct2]
lemma Pair_not_0: "\<langle>a,b\<rangle> \<noteq> 0"
unfolding Pair_def
apply (blast elim: equalityE)
done
lemmas Pair_neq_0 = Pair_not_0 [THEN notE, elim!]
declare sym [THEN Pair_neq_0, elim!]
lemma Pair_neq_fst: "\<langle>a,b\<rangle>=a \<Longrightarrow> P"
proof (unfold Pair_def)
assume eq: "{{a, a}, {a, b}} = a"
have "{a, a} \<in> {{a, a}, {a, b}}" by (rule consI1)
hence "{a, a} \<in> a" by (simp add: eq)
moreover have "a \<in> {a, a}" by (rule consI1)
ultimately show "P" by (rule mem_asym)
qed
lemma Pair_neq_snd: "\<langle>a,b\<rangle>=b \<Longrightarrow> P"
proof (unfold Pair_def)
assume eq: "{{a, a}, {a, b}} = b"
have "{a, b} \<in> {{a, a}, {a, b}}" by blast
hence "{a, b} \<in> b" by (simp add: eq)
moreover have "b \<in> {a, b}" by blast
ultimately show "P" by (rule mem_asym)
qed
subsection\<open>Sigma: Disjoint Union of a Family of Sets\<close>
text\<open>Generalizes Cartesian product\<close>
lemma Sigma_iff [simp]: "\<langle>a,b\<rangle>: Sigma(A,B) \<longleftrightarrow> a \<in> A \<and> b \<in> B(a)"
by (simp add: Sigma_def)
lemma SigmaI [TC,intro!]: "\<lbrakk>a \<in> A; b \<in> B(a)\<rbrakk> \<Longrightarrow> \<langle>a,b\<rangle> \<in> Sigma(A,B)"
by simp
lemmas SigmaD1 = Sigma_iff [THEN iffD1, THEN conjunct1]
lemmas SigmaD2 = Sigma_iff [THEN iffD1, THEN conjunct2]
(*The general elimination rule*)
lemma SigmaE [elim!]:
"\<lbrakk>c \<in> Sigma(A,B);
\<And>x y.\<lbrakk>x \<in> A; y \<in> B(x); c=\<langle>x,y\<rangle>\<rbrakk> \<Longrightarrow> P
\<rbrakk> \<Longrightarrow> P"
by (unfold Sigma_def, blast)
lemma SigmaE2 [elim!]:
"\<lbrakk>\<langle>a,b\<rangle> \<in> Sigma(A,B);
\<lbrakk>a \<in> A; b \<in> B(a)\<rbrakk> \<Longrightarrow> P
\<rbrakk> \<Longrightarrow> P"
by (unfold Sigma_def, blast)
lemma Sigma_cong:
"\<lbrakk>A=A'; \<And>x. x \<in> A' \<Longrightarrow> B(x)=B'(x)\<rbrakk> \<Longrightarrow>
Sigma(A,B) = Sigma(A',B')"
by (simp add: Sigma_def)
(*Sigma_cong, Pi_cong NOT given to Addcongs: they cause
flex-flex pairs and the "Check your prover" error. Most
Sigmas and Pis are abbreviated as * or -> *)
lemma Sigma_empty1 [simp]: "Sigma(0,B) = 0"
by blast
lemma Sigma_empty2 [simp]: "A*0 = 0"
by blast
lemma Sigma_empty_iff: "A*B=0 \<longleftrightarrow> A=0 | B=0"
by blast
subsection\<open>Projections \<^term>\<open>fst\<close> and \<^term>\<open>snd\<close>\<close>
lemma fst_conv [simp]: "fst(\<langle>a,b\<rangle>) = a"
by (simp add: fst_def)
lemma snd_conv [simp]: "snd(\<langle>a,b\<rangle>) = b"
by (simp add: snd_def)
lemma fst_type [TC]: "p \<in> Sigma(A,B) \<Longrightarrow> fst(p) \<in> A"
by auto
lemma snd_type [TC]: "p \<in> Sigma(A,B) \<Longrightarrow> snd(p) \<in> B(fst(p))"
by auto
lemma Pair_fst_snd_eq: "a \<in> Sigma(A,B) \<Longrightarrow> <fst(a),snd(a)> = a"
by auto
subsection\<open>The Eliminator, \<^term>\<open>split\<close>\<close>
(*A META-equality, so that it applies to higher types as well...*)
lemma split [simp]: "split(\<lambda>x y. c(x,y), \<langle>a,b\<rangle>) \<equiv> c(a,b)"
by (simp add: split_def)
lemma split_type [TC]:
"\<lbrakk>p \<in> Sigma(A,B);
\<And>x y.\<lbrakk>x \<in> A; y \<in> B(x)\<rbrakk> \<Longrightarrow> c(x,y):C(\<langle>x,y\<rangle>)
\<rbrakk> \<Longrightarrow> split(\<lambda>x y. c(x,y), p) \<in> C(p)"
by (erule SigmaE, auto)
lemma expand_split:
"u \<in> A*B \<Longrightarrow>
R(split(c,u)) \<longleftrightarrow> (\<forall>x\<in>A. \<forall>y\<in>B. u = \<langle>x,y\<rangle> \<longrightarrow> R(c(x,y)))"
by (auto simp add: split_def)
subsection\<open>A version of \<^term>\<open>split\<close> for Formulae: Result Type \<^typ>\<open>o\<close>\<close>
lemma splitI: "R(a,b) \<Longrightarrow> split(R, \<langle>a,b\<rangle>)"
by (simp add: split_def)
lemma splitE:
"\<lbrakk>split(R,z); z \<in> Sigma(A,B);
\<And>x y. \<lbrakk>z = \<langle>x,y\<rangle>; R(x,y)\<rbrakk> \<Longrightarrow> P
\<rbrakk> \<Longrightarrow> P"
by (auto simp add: split_def)
lemma splitD: "split(R,\<langle>a,b\<rangle>) \<Longrightarrow> R(a,b)"
by (simp add: split_def)
text \<open>
\bigskip Complex rules for Sigma.
\<close>
lemma split_paired_Bex_Sigma [simp]:
"(\<exists>z \<in> Sigma(A,B). P(z)) \<longleftrightarrow> (\<exists>x \<in> A. \<exists>y \<in> B(x). P(\<langle>x,y\<rangle>))"
by blast
lemma split_paired_Ball_Sigma [simp]:
"(\<forall>z \<in> Sigma(A,B). P(z)) \<longleftrightarrow> (\<forall>x \<in> A. \<forall>y \<in> B(x). P(\<langle>x,y\<rangle>))"
by blast
end

File Metadata

Mime Type
application/octet-stream
Expires
Sat, Apr 27, 9:55 AM (2 d)
Storage Engine
chunks
Storage Format
Chunks
Storage Handle
6zjwa9CuFsPh
Default Alt Text
(4 MB)

Event Timeline