diff --git a/Admin/components/components.sha1 b/Admin/components/components.sha1 --- a/Admin/components/components.sha1 +++ b/Admin/components/components.sha1 @@ -1,491 +1,494 @@ 59a71e08c34ff01f3f5c4af00db5e16369527eb7 Haskabelle-2013.tar.gz 23a96ff4951d72f4024b6e8843262eda988bc151 Haskabelle-2014.tar.gz eccff31931fb128c1dd522cfc85495c9b66e67af Haskabelle-2015.tar.gz ed740867925dcf58692c8d3e350c28e3b4d4a60f Isabelle_app-20210126.tar.gz 8ee375cfc38972f080dbc78f07b68dac03efe968 ProofGeneral-3.7.1.1.tar.gz 847b52c0676b5eb0fbf0476f64fc08c2d72afd0c ProofGeneral-4.1.tar.gz 8e0b2b432755ef11d964e20637d1bc567d1c0477 ProofGeneral-4.2-1.tar.gz 51e1e0f399e934020565b2301358452c0bcc8a5e ProofGeneral-4.2-2.tar.gz 8472221c876a430cde325841ce52893328302712 ProofGeneral-4.2.tar.gz ce750fb7f26f6f51c03c6e78096a57b8eaf11d21 apache-commons-20211211.tar.gz fbe83b522cb37748ac1b3c943ad71704fdde2f82 bash_process-1.1.1.tar.gz bb9ef498cd594b4289221b96146d529c899da209 bash_process-1.1.tar.gz 81250148f8b89ac3587908fb20645081d7f53207 bash_process-1.2.1.tar.gz 97b2491382130a841b3bbaebdcf8720c4d4fb227 bash_process-1.2.2.tar.gz 5c5b7c18cc1dc2a4d22b997dac196da09eaca868 bash_process-1.2.3-1.tar.gz 48b01bd9436e243ffcb7297f08b498d0c0875ed9 bash_process-1.2.3.tar.gz 11815d5f3af0de9022e903ed8702c136591f06fe bash_process-1.2.4-1.tar.gz 729486311833e4eff0fbf2d8041dddad520ca88c bash_process-1.2.4-2.tar.gz 7ae9ec8aab2d8a811842d9dc67d8bf6c179e11ee bash_process-1.2.4.tar.gz 9e21f447bfa0431ae5097301d553dd6df3c58218 bash_process-1.2.tar.gz a65ce644b6094d41e9f991ef851cf05eff5dd0a9 bib2xhtml-20171221.tar.gz 4085dd6060a32d7e0d2e3f874c463a9964fd409b bib2xhtml-20190409.tar.gz f92cff635dfba5d4d77f469307369226c868542c cakeml-2.0.tar.gz e7ffe4238b61a3c1ee87aca4421e7a612e09b836 ci-extras-1.tar.gz e880f31f59bd403fb72fcd3b5afb413c3831a21c csdp-6.1-1.tar.gz 2659100ba8e28e7cb0ecb554178ee5315d4a87f5 csdp-6.1.1.tar.gz a2bd94f4f9281dc70dfda66cf28016c2ffef7ed7 csdp-6.1.tar.gz ec17080269737e4a97b4424a379924c09b338ca2 csdp-6.2.0.tar.gz 70105fd6fbfd1a868383fc510772b95234325d31 csdp-6.x.tar.gz 2f6417b8e96a0e4e8354fe0f1a253c18fb55d9a7 cvc3-2.4.1.tar.gz d70bfbe63590153c07709dea7084fbc39c669841 cvc4-1.5-1.tar.gz 541eac340464c5d34b70bb163ae277cc8829c40f cvc4-1.5-2.tar.gz 1a44895d2a440091a15cc92d7f77a06a2e432507 cvc4-1.5-3.tar.gz c0d8d5929b00e113752d8bf5d11241cd3bccafce cvc4-1.5-4.tar.gz ffb0d4739c10eb098eb092baef13eccf94a79bad cvc4-1.5-5.tar.gz 3682476dc5e915cf260764fa5b86f1ebdab57507 cvc4-1.5.tar.gz a5e02b5e990da4275dc5d4480c3b72fc73160c28 cvc4-1.5pre-1.tar.gz 4d9658fd2688ae8ac78da8fdfcbf85960f871b71 cvc4-1.5pre-2.tar.gz b01fdb93f2dc2b8bcfd41c6091d91b37d6e240f9 cvc4-1.5pre-3.tar.gz 76ff6103b8560f0e2778bbfbdb05f5fa18f850b7 cvc4-1.5pre-4.tar.gz 03aec2ec5757301c9df149f115d1f4f1d2cafd9e cvc4-1.5pre.tar.gz e99560d0b7cb9bafde2b0ec1a3a95af315918a25 cvc4-1.8.tar.gz 842d9526f37b928cf9e22f141884365129990d63 cygwin-20130110.tar.gz cb3b0706d208f104b800267697204f6d82f7b48a cygwin-20130114.tar.gz 3b44cca04855016d5f8cfb5101b2e0579ab80197 cygwin-20130117.tar.gz 1fde9ddf0fa4f398965113d0c0c4f0e97c78d008 cygwin-20130716.tar.gz a03735a53c2963eb0b453f6a7282d3419f28bf38 cygwin-20130916.tar.gz 7470125fc46e24ee188bdaacc6d560e01b6fa839 cygwin-20140520.tar.gz db4dedae026981c5f001be283180abc1962b79ad cygwin-20140521.tar.gz acbc4bf161ad21e96ecfe506266ccdbd288f8a6f cygwin-20140530.tar.gz 3dc680d9eb85276e8c3e9f6057dad0efe2d5aa41 cygwin-20140626.tar.gz 8e562dfe57a2f894f9461f4addedb88afa108152 cygwin-20140725.tar.gz 238d8e30e8e22495b7ea3f5ec36e852e97fe8bbf cygwin-20140813.tar.gz 629b8fbe35952d1551cd2a7ff08db697f6dff870 cygwin-20141024.tar.gz ce93d0b3b2743c4f4e5bba30c2889b3b7bc22f2c cygwin-20150410.tar.gz fa712dd5ec66ad16add1779d68aa171ff5694064 cygwin-20151210.tar.gz 056b843d5a3b69ecf8a52c06f2ce6e696dd275f9 cygwin-20151221.tar.gz 44f3a530f727e43a9413226c2423c9ca3e4c0cf5 cygwin-20161002.tar.gz dd56dd16d861fc6e1a008bf5e9da6f33ed6eb820 cygwin-20161022.tar.gz d9ad7aae99d54e3b9813151712eb88a441613f04 cygwin-20161024.tar.gz f8eb6a0f722e3cfe3775d1204c5c7063ee1f008e cygwin-20170828.tar.gz c22048912b010a5a0b4f2a3eb4d318d6953761e4 cygwin-20170930.tar.gz 5a3919e665947b820fd7f57787280c7512be3782 cygwin-20180604.tar.gz 2aa049170e8088de59bd70eed8220f552093932d cygwin-20190320.tar.gz fb898e263fcf6f847d97f564fe49ea0760bb453f cygwin-20190322.tar.gz cd01fac0ab4fdb50a2bbb6416da3f15a4d540da1 cygwin-20190524.tar.gz caa616fbab14c1fce790a87db5c4758c1322cf28 cygwin-20200116.tar.gz f053a9ab01f0be9cb456560f7eff66a8e7ba2fd2 cygwin-20200323.tar.gz 0107343cd2562618629f73b2581168f0045c3234 cygwin-20201002.tar.gz a3d481401b633c0ee6abf1da07d75da94076574c cygwin-20201130.tar.gz 5b1820b87b25d8f2d237515d9854e3ce54ee331b cygwin-20211002.tar.gz 5dff30be394d88dd83ea584fa6f8063bdcdc21fd cygwin-20211004.tar.gz fffaae24da4d274d34b8dc79a76b478b87ec31dd cygwin-20211007.tar.gz 0fe549949a025d65d52d6deca30554de8fca3b6e e-1.5.tar.gz 2e293256a134eb8e5b1a283361b15eb812fbfbf1 e-1.6-1.tar.gz e1919e72416cbd7ac8de5455caba8901acc7b44d e-1.6-2.tar.gz b98a98025d1f7e560ca6864a53296137dae736b4 e-1.6.tar.gz c11b25c919e2ec44fe2b6ac2086337b456344e97 e-1.8.tar.gz a895a96ec7e6fcc275114bb9b4c92b20fac73dba e-2.0-1.tar.gz 2ebd7e3067a2cdae3cb8b073345827013978d74b e-2.0-2.tar.gz fac44556dd16f666a2c186be30aa6d8c67228bb9 e-2.0-3.tar.gz 5d36fb62912cfcff7f3b99a6266c578aafc288b7 e-2.0-4.tar.gz 3223c51c0b16fe00ced4ae903041fff858e61742 e-2.0-5.tar.gz 6b962a6b4539b7ca4199977973c61a8c98a492e8 e-2.0.tar.gz 66449a7b68b7d85a7189e10735a81069356123b6 e-2.5-1.tar.gz 813b66ca151d7a39b5cacb39ab52acabc2a54845 e-2.5.tar.gz 6e63f9f354b8c06035952845b987080699a12d55 e-2.6-1.tar.gz a3bebab5df4294dac2dd7fd2065a94df00e0b3ff e-2.6.tar.gz 6d34b18ca0aa1e10bab6413045d079188c0e2dfb exec_process-1.0.1.tar.gz 8b9bffd10e396d965e815418295f2ee2849bea75 exec_process-1.0.2.tar.gz e6aada354da11e533af2dee3dcdd96c06479b053 exec_process-1.0.3.tar.gz ae7ee5becb26512f18c609e83b34612918bae5f0 exec_process-1.0.tar.gz 7a4b46752aa60c1ee6c53a2c128dedc8255a4568 flatlaf-0.46-1.tar.gz ed5cbc216389b655dac21a19e770a02a96867b85 flatlaf-0.46.tar.gz d37b38b9a27a6541c644e22eeebe9a339282173d flatlaf-1.0-rc1.tar.gz dac46ce81cee10fb36a9d39b414dec7b7b671545 flatlaf-1.0-rc2.tar.gz d94e6da7299004890c04a7b395a3f2d381a3281e flatlaf-1.0-rc3.tar.gz 7ca3e6a8c9bd837990e64d89e7fa07a7e7cf78ff flatlaf-1.0.tar.gz 9908e5ab721f1c0035c0ab04dc7ad0bd00a8db27 flatlaf-1.2.tar.gz 9534b721b7b78344f3225067ee4df28a5440b87e flatlaf-1.6.4.tar.gz 212a0f1f867511722024cc60156fd71872a16f92 flatlaf-1.6.tar.gz +6d4dbb6f2bde5804298d9008e3edceb0b9ee20ae flatlaf-2.4.tar.gz f339234ec18369679be0095264e0c0af7762f351 gnu-utils-20210414.tar.gz 71259aa46134e6cf2c6473b4fc408051b3336490 gnu-utils-20211030.tar.gz 683acd94761ef460cca1a628f650355370de5afb hol-light-bundle-0.5-126.tar.gz 511fa8df8be88eb0500032bbd17742d33bdd4636 hugo-0.88.1.tar.gz 989234b3799fe8750f3c24825d1f717c24fb0214 idea-icons-20210508.tar.gz 20b53cfc3ffc5b15c1eabc91846915b49b4c0367 isabelle_fonts-20151021.tar.gz 736844204b2ef83974cd9f0a215738b767958c41 isabelle_fonts-20151104.tar.gz 9502c1aea938021f154adadff254c5c55da344bd isabelle_fonts-20151106.tar.gz f5c63689a394b974ac0d365debda577c6fa31c07 isabelle_fonts-20151107.tar.gz 812101680b75f7fa9ee8e138ea6314fa4824ea2d isabelle_fonts-20151229.tar.gz 2730e1475c7d655655882e75743e0b451725a274 isabelle_fonts-20151231.tar.gz 1f004a6bf20088a7e8f1b3d4153aa85de6fc1091 isabelle_fonts-20160101.tar.gz 379d51ef3b71452dac34ba905def3daa8b590f2e isabelle_fonts-20160102.tar.gz 878536aab1eaf1a52da560c20bb41ab942971fa3 isabelle_fonts-20160227.tar.gz 8ff0eedf0191d808ecc58c6b3149a4697f29ab21 isabelle_fonts-20160812-1.tar.gz 9283e3b0b4c7239f57b18e076ec8bb21021832cb isabelle_fonts-20160812.tar.gz 620cffeb125e198b91a716da116f754d6cc8174b isabelle_fonts-20160830.tar.gz b70690c85c05d0ca5bc29287abd20142f6ddcfb0 isabelle_fonts-20171222.tar.gz c17c482e411bbaf992498041a3e1dea80336aaa6 isabelle_fonts-20171230.tar.gz 3affbb306baff37c360319b21cbaa2cc96ebb282 isabelle_fonts-20180113.tar.gz bee32019e5d7cf096ef2ea1d836c732e9a7628cc isabelle_fonts-20181124.tar.gz f249bc2c85bd2af9eee509de17187a766b74ab86 isabelle_fonts-20181129.tar.gz 928b5320073d04d93bcc5bc4347b6d01632b9d45 isabelle_fonts-20190210.tar.gz dfcdf9a757b9dc36cee87f82533b43c58ba84abe isabelle_fonts-20190309.tar.gz 95e3acf038df7fdeeacd8b4769930e6f57bf3692 isabelle_fonts-20190406.tar.gz dabcf5085d67c99159007007ff0e9bf775e423d1 isabelle_fonts-20190409.tar.gz 76827987c70051719e117138858930d42041f57d isabelle_fonts-20190717.tar.gz abc8aea3ae471f9313917008ac90e5c1c99e17da isabelle_fonts-20210317.tar.gz 3ff9195aab574fc75ca3b77af0adb33f9b6d7b74 isabelle_fonts-20210318.tar.gz b166b4bd583b6442a5d75eab06f7adbb66919d6d isabelle_fonts-20210319.tar.gz 9467ad54a9ac10a6e7e8db5458d8d2a5516eba96 isabelle_fonts-20210321.tar.gz 1f7a0b9829ecac6552b21e995ad0f0ac168634f3 isabelle_fonts-20210322.tar.gz 667000ce6dd6ea3c2d11601a41c206060468807d isabelle_fonts-20211004.tar.gz 916adccd2f40c55116b68b92ce1eccb24d4dd9a2 isabelle_setup-20210630.tar.gz c611e363287fcc9bdd93c33bef85fa4e66cd3f37 isabelle_setup-20210701.tar.gz a0e7527448ef0f7ce164a38a50dc26e98de3cad6 isabelle_setup-20210709.tar.gz e413706694b0968245ee15183af2d464814ce0a4 isabelle_setup-20210711.tar.gz d2c9fd7b73457a460111edd6eb93a133272935fb isabelle_setup-20210715.tar.gz a5f478ba1088f67c2c86dc2fa7764b6d884e5ae5 isabelle_setup-20210716-1.tar.gz 79fad009cb22aa5e7cb4aed3c810ad5f61790293 isabelle_setup-20210716.tar.gz 692a39f716998e556ec9559c9ca362fc8fc9d5b6 isabelle_setup-20210717-1.tar.gz 7322d6d84d75c486a58ed36630431db4499e3232 isabelle_setup-20210717-2.tar.gz 14f8508bcae9140815bb23e430e26d2cbc504b81 isabelle_setup-20210717.tar.gz ca801d5c380ea896ee32b309ff19ae5f34538963 isabelle_setup-20210718.tar.gz ac9739e38e4fbbfce1a71a0987a57b22f83922d3 isabelle_setup-20210724-1.tar.gz 4554679cc8ea31e539655810a14d14216b383d0e isabelle_setup-20210724-2.tar.gz 127a75ae33e97480d352087fcb9b47a632d77169 isabelle_setup-20210724.tar.gz 309909ec6d43ae460338e9af54c1b2a48adcb1ec isabelle_setup-20210726.tar.gz a14ce46c62c64c3413f3cc9239242e33570d0f3d isabelle_setup-20210922.tar.gz b22066a9dcde6f813352dcf6404ac184440a22df isabelle_setup-20211109.tar.gz 91c5d29e9fa40aee015e8e65ffea043e218c2fc5 isabelle_setup-20220323.tar.gz 056979bd1c08eb9d0d12cc1118b4ff70bfe2d594 isabelle_setup-20220701.tar.gz 0b2206f914336dec4923dd0479d8cee4b904f544 jdk-11+28.tar.gz e12574d838ed55ef2845acf1152329572ab0cc56 jdk-11.0.10+9.tar.gz 3e05213cad47dbef52804fe329395db9b4e57f39 jdk-11.0.2+9.tar.gz 06ac8993b5bebd02c70f1bd18ce13075f01115f3 jdk-11.0.3+7.tar.gz e7e3cc9b0550c1e5d71197ad8c30f92b622d7183 jdk-11.0.4+11.tar.gz 49007a84a2643a204ce4406770dfd574b97880d9 jdk-11.0.5+10.tar.gz 3c250e98eb82f98afc6744ddc9170d293f0677e1 jdk-11.0.6+10.tar.gz 76cf7a141e15db30bd975089c65c833b58092aa7 jdk-11.0.9+11.tar.gz 71d19df63816e9be1c4c5eb44aea7a44cfadb319 jdk-11.tar.gz 72455a2fdb6cced9cd563f4d5d6134f7a6c34913 jdk-15.0.1+9.tar.gz e8ae300e61b0b121018456d50010b555bc96ce10 jdk-15.0.2+7.tar.gz a426a32ad34014953c0f7d4cc6f44199572e1c38 jdk-17+35.tar.gz 85707cfe369d0d32accbe3d96a0730c87e8639b5 jdk-17.0.1+12.tar.gz 699ab2d723b2f1df151a7dbcbdf33ddad36c7978 jdk-17.0.2+8.tar.gz 8d83e433c1419e0c0cc5fd1762903d11b4a5752c jdk-6u31.tar.gz 38d2d2a91c66714c18430e136e7e5191af3996e6 jdk-7u11.tar.gz d765bc4ad2f34d494429b2a8c1563c49db224944 jdk-7u13.tar.gz 13a265e4b706ece26fdfa6fc9f4a3dd1366016d2 jdk-7u21.tar.gz 5080274f8721a18111a7f614793afe6c88726739 jdk-7u25.tar.gz dd24d63afd6d17b29ec9cb2b2464d4ff2e02de2c jdk-7u40.tar.gz ec740ee9ffd43551ddf1e5b91641405116af6291 jdk-7u6.tar.gz 71b629b2ce83dbb69967c4785530afce1bec3809 jdk-7u60.tar.gz e119f4cbfa2a39a53b9578d165d0dc44b59527b7 jdk-7u65.tar.gz d6d1c42989433839fe64f34eb77298ef6627aed4 jdk-7u67.tar.gz b66039bc6dc2bdb2992133743005e1e4fc58ae24 jdk-7u72.tar.gz d980055694ddfae430ee001c7ee877d535e97252 jdk-7u76.tar.gz baa6de37bb6f7a104ce5fe6506bca3d2572d601a jdk-7u80.tar.gz 7d5b152ac70f720bb9e783fa45ecadcf95069584 jdk-7u9.tar.gz baf275a68d3f799a841932e4e9a95a1a604058ae jdk-8u102.tar.gz 5442f1015a0657259be0590b04572cd933431df7 jdk-8u11.tar.gz 741de6a4a805a0f9fb917d1845409e99346c2747 jdk-8u112.tar.gz ae7df8bd0c18eb40237cf54cc28933f4893b9c92 jdk-8u121.tar.gz 51531a3a0c16e180ed95cb7d2bd680c2ec0aa553 jdk-8u131.tar.gz e45edcf184f608d6f4a7b966d65a5d3289462693 jdk-8u144.tar.gz 264e806b9300a4fb3b6e15ba0e2c664d4ea698c8 jdk-8u152.tar.gz 84b04d877a2ea3a4e2082297b540e14f76722bc5 jdk-8u162.tar.gz 87303a0de3fd595aa3857c8f7cececa036d6ed18 jdk-8u172.tar.gz 9ae0338a5277d8749b4b4c7e65fc627319d98b27 jdk-8u181.tar.gz cfecb1383faaf027ffbabfcd77a0b6a6521e0969 jdk-8u20.tar.gz 44ffeeae219782d40ce6822b580e608e72fd4c76 jdk-8u31.tar.gz c95ebf7777beb3e7ef10c0cf3f734cb78f9828e4 jdk-8u5.tar.gz 4132cf52d5025bf330d53b96a5c6466fef432377 jdk-8u51.tar.gz 74df343671deba03be7caa49de217d78b693f817 jdk-8u60.tar.gz dfb087bd64c3e5da79430e0ba706b9abc559c090 jdk-8u66.tar.gz 2ac389babd15aa5ddd1a424c1509e1c459e6fbb1 jdk-8u72.tar.gz caa0cf65481b6207f66437576643f41dabae3c83 jdk-8u92.tar.gz 778fd85c827ec49d2d658a832d20e63916186b0d jedit-20210715.tar.gz beb99f2cb0bd4e595c5c597d3970c46aa21616e4 jedit-20210717.tar.gz 33dd96cd83f2c6a26c035b7a0ee57624655224c5 jedit-20210724.tar.gz 0e4fd4d66388ddc760fa5fbd8d4a9a3b77cf59c7 jedit-20210802.tar.gz 258d527819583d740a3aa52dfef630eed389f8c6 jedit-20211019.tar.gz f4f3fcbd54488297a5d2fcd23a2595912d5ba80b jedit-20211103.tar.gz 44775a22f42a9d665696bfb49e53c79371c394b0 jedit_build-20111217.tar.gz a242a688810f2bccf24587b0062ce8027bf77fa2 jedit_build-20120304.tar.gz 4c948dee53f74361c097c08f49a1a5ff9b17bd1d jedit_build-20120307.tar.gz 9c221fe71af8a063fcffcce21672a97aea0a8d5b jedit_build-20120313.tar.gz ed72630f307729df08fdedb095f0af8725f81b9c jedit_build-20120327.tar.gz 6425f622625024c1de27f3730d6811f6370a19cd jedit_build-20120414.tar.gz 7b012f725ec1cc102dc259df178d511cc7890bba jedit_build-20120813.tar.gz 8e1d36f5071e3def2cb281f7fefe9f52352cb88f jedit_build-20120903.tar.gz 8fa0c67f59beba369ab836562eed4e56382f672a jedit_build-20121201.tar.gz 06e9be2627ebb95c45a9bcfa025d2eeef086b408 jedit_build-20130104.tar.gz c85c0829b8170f25aa65ec6852f505ce2a50639b jedit_build-20130628.tar.gz 5de3e399be2507f684b49dfd13da45228214bbe4 jedit_build-20130905.tar.gz 87136818fd5528d97288f5b06bd30c787229eb0d jedit_build-20130910.tar.gz c63189cbe39eb8104235a0928f579d9523de78a9 jedit_build-20130925.tar.gz 65cc13054be20d3a60474d406797c32a976d7db7 jedit_build-20130926.tar.gz 30ca171f745adf12b65c798c660ac77f9c0f9b4b jedit_build-20131106.tar.gz 054c1300128f8abd0f46a3e92c756ccdb96ff2af jedit_build-20140405.tar.gz 4a963665537ea66c69de4d761846541ebdbf69f2 jedit_build-20140511.tar.gz a9d637a30f6a87a3583f265da51e63e3619cff52 jedit_build-20140722.tar.gz f29391c53d85715f8454e1aaa304fbccc352928f jedit_build-20141018.tar.gz d7206d4c9d14d3f4c8115422b7391ffbcc6e80b4 jedit_build-20141026.tar.gz f15d36abc1780875a46b6dbd4568e43b776d5db6 jedit_build-20141104.tar.gz 14ce124c897abfa23713928dc034d6ef0e1c5031 jedit_build-20150228.tar.gz b5f7115384c167559211768eb5fe98138864473b jedit_build-20151023.tar.gz 8ba7b6791be788f316427cdcd805daeaa6935190 jedit_build-20151124.tar.gz c70c5a6c565d435a09a8639f8afd3de360708e1c jedit_build-20160330.tar.gz d4e1496c257659cf15458d718f4663cdd95a404e jedit_build-20161024.tar.gz d806c1c26b571b5b4ef05ea11e8b9cf936518e06 jedit_build-20170319.tar.gz 7bcb202e13358dd750e964b2f747664428b5d8b3 jedit_build-20180417.tar.gz 23c8a05687d05a6937f7d600ac3aa19e3ce59c9c jedit_build-20180504.tar.gz 9c64ee0705e5284b507ca527196081979d689519 jedit_build-20181025.tar.gz cfa65bf8720b9b798ffa0986bafbc8437f44f758 jedit_build-20181026.tar.gz 847492b75b38468268f9ea424d27d53f2d95cef4 jedit_build-20181203.tar.gz 536a38ed527115b4bf2545a2137ec57b6ffad718 jedit_build-20190120.tar.gz 58b9f03e5ec0b85f8123c31f5d8092dae5803773 jedit_build-20190130.tar.gz ec0aded5f2655e2de8bc4427106729e797584f2f jedit_build-20190224.tar.gz 1e53598a02ec8d8736b15f480cbe2c84767a7827 jedit_build-20190508.tar.gz b9c6f49d3f6ebe2e85a50595ce7412d01a4314ac jedit_build-20190717.tar.gz 1c753beb93e92e95e99e8ead23a68346bd1af44a jedit_build-20200610.tar.gz 533b1ee6459f59bcbe4f09e214ad2cb990fb6952 jedit_build-20200908.tar.gz f9966b5ed26740bb5b8bddbfe947fcefaea43d4d jedit_build-20201223.tar.gz 0bdbd36eda5992396e9c6b66aa24259d4dd7559c jedit_build-20210201.tar.gz a0744f1948abdde4bfb51dd4769b619e7444baf1 jedit_build-20210510-1.tar.gz 837d6c8f72ecb21ad59a2544c69aadc9f05684c6 jedit_build-20210510.tar.gz 7bdae3d24b10261f6cb277446cf9ecab6062bd6f jedit_build-20210708.tar.gz 0bd2bc2d9a491ba5fc8dd99df27c04f11a72e8fa jfreechart-1.0.14-1.tar.gz 8122526f1fc362ddae1a328bdbc2152853186fee jfreechart-1.0.14.tar.gz d911f63a5c9b4c7335bb73f805cb1711ce017a84 jfreechart-1.5.0.tar.gz d84b7d8ef273afec55284327fca7dd20f5ecb77a jfreechart-1.5.1.tar.gz 6fa0c221ef55919b684449f0111a8112358e94ff jfreechart-1.5.3.tar.gz c8a19a36adf6cefa779d85f22ded2f4654e68ea5 jortho-1.0-1.tar.gz 2155e0bdbd29cd3d2905454de2e7203b9661d239 jortho-1.0-2.tar.gz ffe179867cf5ffaabbb6bb096db9bdc0d7110065 jortho-1.0.tar.gz 6c737137cc597fc920943783382e928ea79e3feb kodkodi-1.2.16.tar.gz afb04f4048a87bb888fe7b05b0139cb060c7925b kodkodi-1.5.2-1.tar.gz 5f95c96bb99927f3a026050f85bd056f37a9189e kodkodi-1.5.2.tar.gz 0634a946b216f7f07f1a0f7e28cf345daa28828f kodkodi-1.5.3.tar.gz 52e95b3493d71902f9df89d0bb59d0046a5f0c63 kodkodi-1.5.4-1.tar.gz 267189c637de26cf304d699cfa95389da002b250 kodkodi-1.5.4.tar.gz 3ecdade953bb455ed2907952be287d7e5cf6533b kodkodi-1.5.5.tar.gz 8aa939f5127290eb9a99952d375be9ffbf90c43b kodkodi-1.5.6-1.tar.gz 6b12bf3f40b16fae8ff22aa39171fa018d107cb3 kodkodi-1.5.6.tar.gz c8b2e632f3ab959a4e037833a45e6360c8b72a99 kodkodi-1.5.7.tar.gz 377e36efb8608e6c828c7718d890e97fde2006a4 linux_app-20131007.tar.gz 759848095e2ad506083d92b5646947e3c32f27a0 linux_app-20191223.tar.gz 1a449ce69ac874e21804595d16aaaf5a0d0d0c10 linux_app-20200110.tar.gz 0aab4f73ff7f5e36f33276547e10897e1e56fb1d macos_app-20130716.tar.gz ad5d0e640ce3609a885cecab645389a2204e03bb macos_app-20150916.tar.gz 400af57ec5cd51f96928d9de00d077524a6fe316 macos_app-20181205.tar.gz 3bc42b8e22f0be5ec5614f1914066164c83498f8 macos_app-20181208.tar.gz 5fb1a2d21b220d0e588790c0203ac87c10ed0870 minisat-2.2.1-1.tar.gz ae76bfaade3bf72ff6b2d3aafcd52fa45609fcd1 minisat-2.2.1.tar.gz eda10c62da927a842c0a8881f726eac85e1cb4f7 naproche-20210122.tar.gz edcb517b7578db4eec1b6573b624f291776e11f6 naproche-20210124.tar.gz d858eb0ede6aea6b8cc40de63bd3a17f8f9f5300 naproche-20210129.tar.gz 810ee0f35adada9bf970c33fd80b986ab2255bf3 naproche-20210201.tar.gz 37bb6d934cfaf157efcadb349a0244d145ce15b0 naproche-20211211.tar.gz d098dd0873b1720a77dc4e060267f9a6c93f341a naproche-2d99afe5c349.tar.gz 4a4e56fd03b7ba4edd38046f853873a90cf55d1a naproche-4ad61140062f.tar.gz 77252e0b40f89825b9b5935f9f0c4cd5d4e7012a naproche-6d0d76ce2f2a.tar.gz 9c02ecf93863c3289002c5e5ac45a83e2505984c naproche-755224402e36.tar.gz e1b34e8f54e7e5844873612635444fed434718a1 naproche-7d0947a91dd5.tar.gz 26df569cee9c2fd91b9ac06714afd43f3b37a1dd nunchaku-0.3.tar.gz e573f2cbb57eb7b813ed5908753cfe2cb41033ca nunchaku-0.5.tar.gz 3d7b7690dfd09e25ad56e64b519f61f06e3ab706 old_vampire-4.2.2.tar.gz fe57793aca175336deea4f5e9c0d949a197850ac opam-1.2.2.tar.gz eb499a18e7040ca0fe1ca824c9dcb2087c47c9ba opam-2.0.3-1.tar.gz 002f74c9e65e650de2638bf54d7b012b8de76c28 opam-2.0.3.tar.gz ddb3b438430d9565adbf5e3d913bd52af8337511 opam-2.0.6.tar.gz fc66802c169f44511d3be30435eb89a11e635742 opam-2.0.7.tar.gz 108e947d17e9aa6170872614492d8f647802f483 opam-2.1.0.tar.gz f8d0218371457eabe2b4214427d9570de92ed861 pdfjs-2.12.313.tar.gz +aa7fc4a3d2cbd6c8744ddfeefd863828ea602bcd pdfjs-2.14.305.tar.gz 1c8cb6a8f4cbeaedce2d6d1ba8fc7e2ab3663aeb polyml-5.4.1.tar.gz a3f9c159a0ee9a63b7a5d0c835ed9c2c908f8b56 polyml-5.5.0-1.tar.gz 7d604a99355efbfc1459d80db3279ffa7ade3e39 polyml-5.5.0-2.tar.gz b3d776e6744f0cd2773d467bc2cfe1de3d1ca2fd polyml-5.5.0-3.tar.gz 1812e9fa6d163f63edb93e37d1217640a166cf3e polyml-5.5.0.tar.gz 36f5b8224f484721749682a3655c796a55a2718d polyml-5.5.1-1.tar.gz 36f78f27291a9ceb13bf1120b62a45625afd44a6 polyml-5.5.1.tar.gz a588640dbf5da9ae15455b02ef709764a48637dc polyml-5.5.2-1.tar.gz 4b690390946f7bfb777b89eb16d6f08987cca12f polyml-5.5.2-2.tar.gz 5b31ad8556e41dfd6d5e85f407818be399aa3d2a polyml-5.5.2-3.tar.gz 532f6e8814752aeb406c62fabcfd2cc05f8a7ca8 polyml-5.5.2.tar.gz 1c53f699d35c0db6c7cf4ea51f2310adbd1d0dc5 polyml-5.5.3-20150820.tar.gz b4b624fb5f34d1dc814fb4fb469fafd7d7ea018a polyml-5.5.3-20150908.tar.gz b668e1f43a41608a8eb365c5e19db6c54c72748a polyml-5.5.3-20150911.tar.gz 1f5cd9b1390dab13861f90dfc06d4180cc107587 polyml-5.5.3-20150916.tar.gz f78896e588e8ebb4da57bf0c95210b0f0fa9e551 polyml-5.6-1.tar.gz 21fa0592b7dfd23269063f42604438165630c0f0 polyml-5.6-2.tar.gz 03ba81e595fa6d6df069532d67ad3195c37d9046 polyml-5.6-20151123.tar.gz 822f489c18e38ce5ef979ec21dccce4473e09be6 polyml-5.6-20151206.tar.gz bd6a448f0e0d5787747f4f30ca661f9c1868e4a7 polyml-5.6-20151223.tar.gz 5b70c12c95a90d858f90c1945011289944ea8e17 polyml-5.6-20160118.tar.gz 5b19dc93082803b82aa553a5cfb3e914606c0ffd polyml-5.6.tar.gz 80b923fca3533bf291ff9da991f2262a98b68cc4 polyml-5.7-20170217.tar.gz 381a70cecf0fdee47f6842e2bdb5107ed52adab6 polyml-5.7.1-1.tar.gz 39dac33b569ac66f76126b8f4edc6d9227bd8a63 polyml-5.7.1-2.tar.gz 0b896ccc35bd3f2541cd55e6f0ed14637ed9fc68 polyml-5.7.1-4.tar.gz 262450ac9966abebae2e1d4f9ae703cfe0f5d8d9 polyml-5.7.1-5.tar.gz 1aeb57877d694db7fe4d4395287cddf3bc77710b polyml-5.7.1-6.tar.gz e3e7e20b1e0e5d5d68df4cd4caa1e1a7410d46b6 polyml-5.7.1-7.tar.gz 1430533c09b17f8be73798a47a5f409d43a04cf4 polyml-5.7.1-8.tar.gz 171b5783b88522a35e4822b19ef8ba838c04f494 polyml-5.7.1.tar.gz 5fbcab1da2b5eb97f24da2590ece189d55b3a105 polyml-5.7.tar.gz 51e024225b460900da5279f0b91b217085f98cf9 polyml-5.8-20190220.tar.gz 20a83fa58d497b533150defe39bcd4540529b25f polyml-5.8-20190306.tar.gz 9f0e9cd10df4c3383b063eb076e8b698ca50c3d0 polyml-5.8.1-20191101.tar.gz f46deb909d645ac8c140968e4d32b5763beb9add polyml-5.8.1-20191113.tar.gz 36a40a981b57daae0463d14940a8edf6fa1af179 polyml-5.8.1-20191114.tar.gz 525b05536b08c11a1eae943fe6818a8622326084 polyml-5.8.1-20191124.tar.gz 9043828803483ca14df64488dff014ad050a6d34 polyml-5.8.1-20200228.tar.gz 1186607e2c43b77db86731f12fbedb531ca50a21 polyml-5.8.1-20200708.tar.gz 22ae16bf7850e73b903d2ca8eb506da05b441cf3 polyml-5.8.1.tar.gz cb8e85387315f62dcfc6b21ec378186e58068f76 polyml-5.8.2.tar.gz d1fd6eced69dc1df7226432fcb824568e0994ff2 polyml-5.8.tar.gz fb40145228f84513a9b083b54678a7d61b9c34c4 polyml-5.9-5d4caa8f7148.tar.gz 0f1c903b043acf7b221821d8b6374b3f943a122b polyml-5.9-610a153b941d.tar.gz 5f00a47b8f5180b33e68fcc6c343b061957a0a98 polyml-5.9-960de0cd0795.tar.gz 7056b285af67902b32f5049349a064f073f05860 polyml-5.9-cc80e2b43c38.tar.gz 0c396bd6b46ff11a2432b91aab2be0248bd9b0a4 polyml-5.9.tar.gz 49f1adfacdd6d29fa9f72035d94a31eaac411a97 polyml-test-0a6ebca445fc.tar.gz 2a8c4421e0a03c0d6ad556b3c36c34eb11568adb polyml-test-1236652ebd55.tar.gz 8e83fb5088cf265902b8da753a8eac5fe3f6a14b polyml-test-159dc81efc3b.tar.gz b80c17398293d0c8f8d9923427176efb33cf2d89 polyml-test-15c840d48c9a.tar.gz a0064c157a59e2706e18512a49a6dca914fa17fc polyml-test-1b2dcf8f5202.tar.gz 4e6543dbbb2b2aa402fd61428e1c045c48f18b47 polyml-test-79534495ee94.tar.gz 853ab0e9ff2b73790cc80a2d36cbff8b03e50a8e polyml-test-7a7b742897e9.tar.gz 85bfda83d138e936fdafd68ed3627b1058e5c2c3 polyml-test-7e49fce62e3d.tar.gz c629cd499a724bbe37b962f727e4ff340c50299d polyml-test-8529546198aa.tar.gz 7df4857d73dbc9edda25a6ad329e47639e70fadf polyml-test-8fda4fd22441.tar.gz 2b7c02b67feb2f44dda6938a7244f4257e7c580c polyml-test-905dae2ebfda.tar.gz 3dfdc58e5d9b28f038a725e05c9c2f2ce0bb2632 polyml-test-a3cfdf648da-1.tar.gz e2f075b0cc709f4f7f6492b725362f9010b2c6d1 polyml-test-a3cfdf648da-2.tar.gz 33568f69ce813b7405386ddbefa14ad0342bb8f0 polyml-test-a3cfdf648da.tar.gz 4bedaac4f1fb9a9199aa63695735063c47059003 polyml-test-a444f281ccec.tar.gz f3031692edcc5d8028a42861e4e40779f0f9d3e1 polyml-test-b68438d33c69.tar.gz cb2318cff6ea9293cd16a4435a4fe28ad9dbe0b8 polyml-test-cf46747fee61.tar.gz 67ffed2f98864721bdb1e87f0ef250e4c69e6160 polyml-test-d68c6736402e.tar.gz b4ceeaac47f3baae41c2491a8368b03217946166 polyml-test-e7a662f8f9c4.tar.gz 609c7d09d3ed01156ff91261e801e2403ff93729 polyml-test-e8d82343b692.tar.gz b6d87466e9b44e8ef4a2fac74c96b139080a506a polyml-test-f54aa41240d0.tar.gz d365f3fc11c2427cafc62b3c79951880a1476ebb polyml-test-f86ae3dc1686.tar.gz a619177143fea42a464f49bb864665407c07a16c polyml-test-fb4f42af00fa.tar.gz 53123dc011b2d4b4e8fe307f3c9fa355718ad01a postgresql-42.1.1.tar.gz 3a5d31377ec07a5069957f5477a4848cfc89a594 postgresql-42.1.4.tar.gz 7d6ef4320d5163ceb052eb83c1cb3968f099a422 postgresql-42.2.18.tar.gz e7cd5c7955e9eb5ce8cd07feb97230b23d2eec40 postgresql-42.2.2.tar.gz 1aaa38429dc9aa7b1095394d9a7ba3465f8d6e04 postgresql-42.2.24.tar.gz 231b33c9c3c27d47e3ba01b399103d70509e0731 postgresql-42.2.5.tar.gz 6335fbc0658e447b5b9bc48c9ad36e33a05bb72b postgresql-42.2.9.tar.gz +f84c7ecafb07a0d763f1d70edc54f7c43c2e8c63 postgresql-42.4.0.tar.gz f132329ca1045858ef456cc08b197c9eeea6881b postgresql-9.4.1212.tar.gz f042bba5fb82c7eb8aee99f92eb6ec38c8a067f7 python-3.10.4.tar.gz 0885e1f1d8feaca78d2f204b6487e6eec6dfab4b scala-2.10.0.tar.gz f7dc7a4e1aea46408fd6e44b8cfacb33af61afbc scala-2.10.1.tar.gz 207e4916336335386589c918c5e3f3dcc14698f2 scala-2.10.2.tar.gz 21c8ee274ffa471ab54d4196ecd827bf3d43e591 scala-2.10.3.tar.gz d4688ddaf83037ca43b5bf271325fc53ae70e3aa scala-2.10.4.tar.gz 44d12297a78988ffd34363535e6a8e0d94c1d8b5 scala-2.11.0.tar.gz 14f20de82b25215a5e055631fb147356400625e6 scala-2.11.1.tar.gz 4fe9590d08e55760b86755d3fab750e90ac6c380 scala-2.11.2.tar.gz 27a296495b2167148de06314ed9a942f2dbe23fe scala-2.11.4.tar.gz 4b24326541161ce65424293ca9da3e7c2c6ab452 scala-2.11.5.tar.gz e7cf20e3b27c894c6127c7a37042c1667f57385e scala-2.11.6.tar.gz 4810c1b00719115df235be1c5991aa6ea7186134 scala-2.11.7.tar.gz 3eca4b80710996fff87ed1340dcea2c5f6ebf4f7 scala-2.11.8.tar.gz 0004e53f885fb165b50c95686dec40d99ab0bdbd scala-2.12.0.tar.gz 059cbdc58d36e3ac1fffcccd9139ecd34f271882 scala-2.12.10.tar.gz 82056106aa6fd37c159ea76d16096c20a749cccd scala-2.12.11.tar.gz fe7ff585acffaad7f0dd4a1d079134d15c26ed0d scala-2.12.12.tar.gz 74a8c3dab3a25a87357996ab3e95d825dc820fd0 scala-2.12.2.tar.gz d66796a68ec3254b46b17b1f8ee5bcc56a93aacf scala-2.12.3.tar.gz 1636556167dff2c191baf502c23f12e09181ef78 scala-2.12.4.tar.gz 8171f494bba54fb0d01c887f889ab8fde7171c2a scala-2.12.5.tar.gz 54c1b06fa2c5f6c2ab3d391ef342c0532cd7f392 scala-2.12.6.tar.gz 02358f00acc138371324b6248fdb62eed791c6bd scala-2.12.7.tar.gz 201c05ae9cc382ee6c08af49430e426f6bbe0d5a scala-2.12.8.tar.gz a0622fe75c3482ba7dc3ce74d58583b648a1ff0d scala-2.13.4-1.tar.gz ec53cce3c5edda1145ec5d13924a5f9418995c15 scala-2.13.4.tar.gz caedd48ae65db9d116a0e1712eec3a66fe95c712 scala-2.13.5-1.tar.gz f51981baf34c020ad103b262f81796c37abcaa4a scala-2.13.5.tar.gz 0a7cab09dec357dab7819273f2542ff1c3ea0968 scala-2.13.6.tar.gz 1f8532dba290c6b2ef364632f3f92e71da93baba scala-2.13.7.tar.gz b447017e81600cc5e30dd61b5d4962f6da01aa80 scala-2.8.1.final.tar.gz 5659440f6b86db29f0c9c0de7249b7e24a647126 scala-2.9.2.tar.gz 97c5b73011f4d6438b616e5940e6d759034f5414 scala-3.1.3.tar.gz abe7a3b50da529d557a478e9f631a22429418a67 smbc-0.4.1.tar.gz cbd491c0feba1d21019d05564e76dd04f592ccb4 spass-3.8ds-1.tar.gz edaa1268d82203067657aabcf0371ce7d4b579b9 spass-3.8ds-2.tar.gz 43b5afbcad575ab6817d2289756ca22fd2ef43a9 spass-3.8ds.tar.gz b016a785f1f78855c00d351ff598355c3b87450f sqlite-jdbc-3.18.0-1.tar.gz b85b5bc071a59ef2a8326ceb1617d5a9a5be41cf sqlite-jdbc-3.18.0.tar.gz e56117a67ab01fb24c7fc054ede3160cefdac5f8 sqlite-jdbc-3.20.0.tar.gz 27aeac6a91353d69f0438837798ac4ae6f9ff8c5 sqlite-jdbc-3.23.1.tar.gz 4d17611857fa3a93944c1f159c0fd2a161967aaf sqlite-jdbc-3.27.2.1.tar.gz 806be457eb79408fcc5a72aeca3f64b2d89a6b63 sqlite-jdbc-3.30.1.tar.gz cba2b194114216b226d75d49a70d1bd12b141ac8 sqlite-jdbc-3.32.3.2.tar.gz 29306acd6ce9f4c87032b2c271c6df035fe7d4d3 sqlite-jdbc-3.34.0.tar.gz 8a2ca4d02cfedbfe4dad4490f1ed3ddba33a009a sqlite-jdbc-3.36.0.3.tar.gz 8d20968603f45a2c640081df1ace6a8b0527452a sqlite-jdbc-3.8.11.2.tar.gz 2369f06e8d095f9ba26df938b1a96000e535afff ssh-java-20161009.tar.gz a2335d28b5b95d8d26500a53f1a9303fc5beaf36 ssh-java-20190323.tar.gz fdc415284e031ee3eb2f65828cbc6945736fe995 stack-1.9.1.tar.gz 6e19948ff4a821e2052fc9b3ddd9ae343f4fcdbb stack-1.9.3.tar.gz f969443705aa8619e93af5b34ea98d15cd7efaf1 stack-2.1.3.tar.gz ebd0221d038966aa8bde075f1b0189ff867b02ca stack-2.5.1.tar.gz fa2d882ec45cbc8c7d2f3838b705a8316696dc66 stack-2.7.3.tar.gz 1f4a2053cc1f34fa36c4d9d2ac906ad4ebc863fd sumatra_pdf-2.1.1.tar.gz 601e08d048d8e50b0729429c8928b667d9b6bde9 sumatra_pdf-2.3.2.tar.gz 14d46c2eb1a34821703da59d543433f581e91df3 sumatra_pdf-2.4.tar.gz 44d67b6742919ce59a42368fc60e2afa210a3e42 sumatra_pdf-2.5.2.tar.gz 89719a13bc92810730a430973684629426ed1b2a sumatra_pdf-3.0.tar.gz f5afcc82f8e734665d38867e99475d3ad0d5ed15 sumatra_pdf-3.1.1.tar.gz a45eca5c1277f42f87bb8dc12a3074ccf5488221 sumatra_pdf-3.1.2-1.tar.gz 3b3239b2e6f8062b90d819f3703e30a50f4fa1e7 sumatra_pdf-3.1.2-2.tar.gz 8486387f61557147ec06b1f637117c017c8f0528 sumatra_pdf-3.1.2.tar.gz e8648878f908e93d64a393231ab21fdac976a9c2 sumatra_pdf-3.3.3.tar.gz 869ea6d8ea35c8ba68d7fcb028f16b2b7064c5fd vampire-1.0.tar.gz 399f687b56575b93e730f68c91c989cb48aa34d8 vampire-4.2.2.tar.gz 0402978ca952f08eea73e483b694928ac402a304 vampire-4.5.1-1.tar.gz 26d9d171e169c6420a08aa99eda03ef5abb9c545 vampire-4.5.1.tar.gz 4571c042efd6fc3097e105a528826959acd888a3 vampire-4.6.tar.gz 98c5c79fef7256db9f64c8feea2edef0a789ce46 verit-2016post.tar.gz 52ba18a6c96b53c5ae9b179d5a805a0c08f1da6d verit-2020.10-rmx-1.tar.gz b6706e74e20e14038e9b38f0acdb5639a134246a verit-2020.10-rmx.tar.gz d33e1e36139e86b9e9a48d8b46a6f90d7863a51c verit-2021.06-rmx-1.tar.gz c11d1120fcefaec79f099fe2be05b03cd2aed8b9 verit-2021.06-rmx.tar.gz b576fd5d89767c1067541d4839fb749c6a68d22c verit-2021.06.1-rmx.tar.gz 19c6e5677b0a26cbc5805da79d00d06a66b7a671 verit-2021.06.2-rmx.tar.gz c4666a6d8080b5e376b50471fd2d9edeb1f9c988 vscode_extension-20220324.tar.gz 86c952d739d1eb868be88898982d4870a3d8c2dc vscode_extension-20220325.tar.gz 67b271186631f84efd97246bf85f6d8cfaa5edfd vscodium-1.65.2.tar.gz 81d21dfd0ea5c58f375301f5166be9dbf8921a7a windows_app-20130716.tar.gz fe15e1079cf5ad86f3cbab4553722a0d20002d11 windows_app-20130905.tar.gz e6a43b7b3b21295853bd2a63b27ea20bd6102f5f windows_app-20130906.tar.gz 8fe004aead867d4c82425afac481142bd3f01fb0 windows_app-20130908.tar.gz d273abdc7387462f77a127fa43095eed78332b5c windows_app-20130909.tar.gz c368908584e2bca38b3bcb20431d0c69399fc2f0 windows_app-20131130.tar.gz c3f5285481a95fde3c1961595b4dd0311ee7ac1f windows_app-20131201.tar.gz 14807afcf69e50d49663d5b48f4b103f30ae842b windows_app-20150821.tar.gz ed106181510e825bf959025d8e0a2fc3f78e7a3f windows_app-20180417.tar.gz e809e4ab0d33cb413a7c47dd947e7dbdfcca1c24 windows_app-20181002.tar.gz 9e96ba128a0617a9020a178781df49d48c997e19 windows_app-20181006.tar.gz 1c36a840320dfa9bac8af25fc289a4df5ea3eccb xz-java-1.2-1.tar.gz 2ae13aa17d0dc95ce254a52f1dba10929763a10d xz-java-1.2.tar.gz c22196148fcace5443a933238216cff5112948df xz-java-1.5.tar.gz 4368ee09154dff42666a8c87e072261745619e51 xz-java-1.6.tar.gz 63f5fa09e92a895cb9aea27d7142abc86c487d25 xz-java-1.8.tar.gz a06875bdadd653627a68d2083c5178c1264d8fc6 xz-java-1.9.tar.gz 4530a1aa6f4498ee3d78d6000fa71a3f63bd077f yices-1.0.28.tar.gz 3a8f77822278fe9250890e357248bc678d8fac95 z3-3.2-1.tar.gz 12ae71acde43bd7bed1e005c43034b208c0cba4c z3-3.2.tar.gz d94a716502c8503d63952bcb4d4176fac8b28704 z3-4.0.tar.gz 86e721296c400ada440e4a9ce11b9e845eec9e25 z3-4.3.0.tar.gz a8917c31b31c182edeec0aaa48870844960c8a61 z3-4.3.2pre-1.tar.gz 06b30757ff23aefbc30479785c212685ffd39f4d z3-4.3.2pre.tar.gz ed37c451b9b748901295898bf713b24d22cc8c17 z3-4.4.0_4.4.1.tar.gz 93e7e4bddc6afcf87fe2b6656cfcb1b1acd0a4f8 z3-4.4.0pre-1.tar.gz b1bc411c2083fc01577070b56b94514676f53854 z3-4.4.0pre-2.tar.gz 4c366ab255d2e9343fb635d44d4d55ddd24c76d0 z3-4.4.0pre-3.tar.gz 517ba7b94c1985416c5b411c8ae84456367eb231 z3-4.4.0pre.tar.gz 6e5d7a65757cac970eb5ad28cd62130c99f42c23 z3-4.4.1.tar.gz aa20745f0b03e606b1a4149598e0c7572b63c657 z3-4.8.3.tar.gz 9dfeb39c87393af7b6a34118507637aa53aca05e zipperposition-2.0-1.tar.gz b884c60653002a7811e3b652ae0515e825d98667 zipperposition-2.0.tar.gz b129ec4f8a4474953ec107536298ee08a01fbebc zipperposition-2.1-1.tar.gz 5f53a77efb5cbe9d0c95d74a1588cc923bd711a7 zipperposition-2.1.tar.gz diff --git a/Admin/components/main b/Admin/components/main --- a/Admin/components/main +++ b/Admin/components/main @@ -1,36 +1,36 @@ #main components for repository clones or release bundles gnu-utils-20211030 apache-commons-20211211 bash_process-1.2.4-2 bib2xhtml-20190409 csdp-6.1.1 cvc4-1.8 e-2.6-1 -flatlaf-1.6.4 +flatlaf-2.4 idea-icons-20210508 isabelle_fonts-20211004 isabelle_setup-20220701 jdk-17.0.2+8 jedit-20211103 jfreechart-1.5.3 jortho-1.0-2 kodkodi-1.5.7 minisat-2.2.1-1 nunchaku-0.5 opam-2.0.7 -pdfjs-2.12.313 +pdfjs-2.14.305 polyml-test-15c840d48c9a -postgresql-42.2.24 +postgresql-42.4.0 scala-3.1.3 smbc-0.4.1 spass-3.8ds-2 sqlite-jdbc-3.36.0.3 ssh-java-20190323 stack-2.7.3 vampire-4.6 verit-2021.06.2-rmx vscode_extension-20220325 vscodium-1.65.2 xz-java-1.9 z3-4.4.0_4.4.1 zipperposition-2.1-1 diff --git a/etc/build.props b/etc/build.props --- a/etc/build.props +++ b/etc/build.props @@ -1,306 +1,307 @@ title = Isabelle/Scala module = $ISABELLE_HOME/lib/classes/isabelle.jar main = isabelle.jedit.JEdit_Main resources = \ lib/services/java.nio.charset.spi.CharsetProvider:META-INF/services/ \ lib/logo/isabelle_transparent-32.gif:isabelle/ \ lib/logo/isabelle_transparent.gif:isabelle/ sources = \ src/HOL/SPARK/Tools/spark.scala \ src/HOL/Tools/ATP/system_on_tptp.scala \ src/HOL/Tools/Mirabelle/mirabelle.scala \ src/HOL/Tools/Nitpick/kodkod.scala \ src/Pure/Admin/afp.scala \ src/Pure/Admin/build_csdp.scala \ src/Pure/Admin/build_cygwin.scala \ src/Pure/Admin/build_doc.scala \ src/Pure/Admin/build_e.scala \ src/Pure/Admin/build_fonts.scala \ src/Pure/Admin/build_history.scala \ src/Pure/Admin/build_jcef.scala \ src/Pure/Admin/build_jdk.scala \ src/Pure/Admin/build_jedit.scala \ src/Pure/Admin/build_log.scala \ src/Pure/Admin/build_minisat.scala \ src/Pure/Admin/build_pdfjs.scala \ src/Pure/Admin/build_polyml.scala \ src/Pure/Admin/build_release.scala \ src/Pure/Admin/build_scala.scala \ src/Pure/Admin/build_spass.scala \ src/Pure/Admin/build_sqlite.scala \ src/Pure/Admin/build_status.scala \ src/Pure/Admin/build_vampire.scala \ src/Pure/Admin/build_verit.scala \ src/Pure/Admin/build_zipperposition.scala \ src/Pure/Admin/check_sources.scala \ src/Pure/Admin/ci_build_benchmark.scala \ src/Pure/Admin/ci_profile.scala \ src/Pure/Admin/isabelle_cronjob.scala \ src/Pure/Admin/isabelle_devel.scala \ src/Pure/Admin/jenkins.scala \ src/Pure/Admin/other_isabelle.scala \ src/Pure/Concurrent/consumer_thread.scala \ src/Pure/Concurrent/counter.scala \ src/Pure/Concurrent/delay.scala \ src/Pure/Concurrent/event_timer.scala \ src/Pure/Concurrent/future.scala \ src/Pure/Concurrent/isabelle_thread.scala \ src/Pure/Concurrent/mailbox.scala \ src/Pure/Concurrent/par_list.scala \ src/Pure/Concurrent/synchronized.scala \ src/Pure/GUI/color_value.scala \ src/Pure/GUI/desktop_app.scala \ src/Pure/GUI/gui.scala \ src/Pure/GUI/gui_thread.scala \ src/Pure/GUI/popup.scala \ src/Pure/GUI/wrap_panel.scala \ src/Pure/General/antiquote.scala \ src/Pure/General/base64.scala \ src/Pure/General/bytes.scala \ src/Pure/General/cache.scala \ src/Pure/General/codepoint.scala \ src/Pure/General/comment.scala \ src/Pure/General/completion.scala \ src/Pure/General/csv.scala \ src/Pure/General/date.scala \ src/Pure/General/exn.scala \ src/Pure/General/file.scala \ src/Pure/General/file_watcher.scala \ src/Pure/General/graph.scala \ src/Pure/General/graph_display.scala \ src/Pure/General/graphics_file.scala \ src/Pure/General/http.scala \ src/Pure/General/json.scala \ src/Pure/General/json_api.scala \ src/Pure/General/linear_set.scala \ src/Pure/General/logger.scala \ src/Pure/General/long_name.scala \ src/Pure/General/mailman.scala \ src/Pure/General/mercurial.scala \ src/Pure/General/multi_map.scala \ src/Pure/General/output.scala \ src/Pure/General/path.scala \ src/Pure/General/position.scala \ src/Pure/General/pretty.scala \ src/Pure/General/properties.scala \ src/Pure/General/rdf.scala \ src/Pure/General/rsync.scala \ src/Pure/General/scan.scala \ src/Pure/General/sha1.scala \ src/Pure/General/sql.scala \ src/Pure/General/ssh.scala \ src/Pure/General/symbol.scala \ src/Pure/General/time.scala \ src/Pure/General/timing.scala \ src/Pure/General/untyped.scala \ src/Pure/General/url.scala \ src/Pure/General/utf8.scala \ src/Pure/General/uuid.scala \ src/Pure/General/value.scala \ src/Pure/General/word.scala \ src/Pure/General/xz.scala \ src/Pure/Isar/document_structure.scala \ src/Pure/Isar/keyword.scala \ src/Pure/Isar/line_structure.scala \ src/Pure/Isar/outer_syntax.scala \ src/Pure/Isar/parse.scala \ src/Pure/Isar/token.scala \ src/Pure/ML/ml_console.scala \ src/Pure/ML/ml_lex.scala \ src/Pure/ML/ml_process.scala \ src/Pure/ML/ml_profiling.scala \ src/Pure/ML/ml_statistics.scala \ src/Pure/ML/ml_syntax.scala \ src/Pure/PIDE/byte_message.scala \ src/Pure/PIDE/command.scala \ src/Pure/PIDE/command_span.scala \ src/Pure/PIDE/document.scala \ src/Pure/PIDE/document_id.scala \ src/Pure/PIDE/document_status.scala \ src/Pure/PIDE/editor.scala \ src/Pure/PIDE/headless.scala \ src/Pure/PIDE/line.scala \ src/Pure/PIDE/markup.scala \ src/Pure/PIDE/markup_tree.scala \ src/Pure/PIDE/protocol.scala \ src/Pure/PIDE/protocol_handlers.scala \ src/Pure/PIDE/protocol_message.scala \ src/Pure/PIDE/prover.scala \ src/Pure/PIDE/query_operation.scala \ src/Pure/PIDE/rendering.scala \ src/Pure/PIDE/resources.scala \ src/Pure/PIDE/session.scala \ src/Pure/PIDE/text.scala \ src/Pure/PIDE/xml.scala \ src/Pure/PIDE/yxml.scala \ src/Pure/ROOT.scala \ src/Pure/System/bash.scala \ + src/Pure/System/classpath.scala \ src/Pure/System/command_line.scala \ src/Pure/System/components.scala \ src/Pure/System/executable.scala \ src/Pure/System/getopts.scala \ src/Pure/System/isabelle_charset.scala \ src/Pure/System/isabelle_fonts.scala \ src/Pure/System/isabelle_platform.scala \ src/Pure/System/isabelle_process.scala \ src/Pure/System/isabelle_system.scala \ src/Pure/System/isabelle_tool.scala \ src/Pure/System/java_statistics.scala \ src/Pure/System/linux.scala \ src/Pure/System/mingw.scala \ src/Pure/System/numa.scala \ src/Pure/System/options.scala \ src/Pure/System/platform.scala \ src/Pure/System/posix_interrupt.scala \ src/Pure/System/process_result.scala \ src/Pure/System/progress.scala \ src/Pure/System/scala.scala \ src/Pure/System/system_channel.scala \ src/Pure/System/tty_loop.scala \ src/Pure/Thy/bibtex.scala \ src/Pure/Thy/document_build.scala \ src/Pure/Thy/export.scala \ src/Pure/Thy/export_theory.scala \ src/Pure/Thy/file_format.scala \ src/Pure/Thy/html.scala \ src/Pure/Thy/latex.scala \ src/Pure/Thy/presentation.scala \ src/Pure/Thy/sessions.scala \ src/Pure/Thy/thy_element.scala \ src/Pure/Thy/thy_header.scala \ src/Pure/Thy/thy_syntax.scala \ src/Pure/Tools/build.scala \ src/Pure/Tools/build_docker.scala \ src/Pure/Tools/build_job.scala \ src/Pure/Tools/check_keywords.scala \ src/Pure/Tools/debugger.scala \ src/Pure/Tools/doc.scala \ src/Pure/Tools/dump.scala \ src/Pure/Tools/flarum.scala \ src/Pure/Tools/fontforge.scala \ src/Pure/Tools/java_monitor.scala \ src/Pure/Tools/logo.scala \ src/Pure/Tools/mkroot.scala \ src/Pure/Tools/phabricator.scala \ src/Pure/Tools/print_operation.scala \ src/Pure/Tools/profiling_report.scala \ src/Pure/Tools/scala_build.scala \ src/Pure/Tools/scala_project.scala \ src/Pure/Tools/server.scala \ src/Pure/Tools/server_commands.scala \ src/Pure/Tools/simplifier_trace.scala \ src/Pure/Tools/spell_checker.scala \ src/Pure/Tools/sync.scala \ src/Pure/Tools/task_statistics.scala \ src/Pure/Tools/update.scala \ src/Pure/Tools/update_cartouches.scala \ src/Pure/Tools/update_comments.scala \ src/Pure/Tools/update_header.scala \ src/Pure/Tools/update_then.scala \ src/Pure/Tools/update_theorems.scala \ src/Pure/library.scala \ src/Pure/pure_thy.scala \ src/Pure/term.scala \ src/Pure/term_xml.scala \ src/Pure/thm_name.scala \ src/Tools/Graphview/graph_file.scala \ src/Tools/Graphview/graph_panel.scala \ src/Tools/Graphview/graphview.scala \ src/Tools/Graphview/layout.scala \ src/Tools/Graphview/main_panel.scala \ src/Tools/Graphview/metrics.scala \ src/Tools/Graphview/model.scala \ src/Tools/Graphview/mutator.scala \ src/Tools/Graphview/mutator_dialog.scala \ src/Tools/Graphview/mutator_event.scala \ src/Tools/Graphview/popups.scala \ src/Tools/Graphview/shapes.scala \ src/Tools/Graphview/tree_panel.scala \ src/Tools/VSCode/src/build_vscode_extension.scala \ src/Tools/VSCode/src/build_vscodium.scala \ src/Tools/VSCode/src/channel.scala \ src/Tools/VSCode/src/dynamic_output.scala \ src/Tools/VSCode/src/language_server.scala \ src/Tools/VSCode/src/lsp.scala \ src/Tools/VSCode/src/preview_panel.scala \ src/Tools/VSCode/src/state_panel.scala \ src/Tools/VSCode/src/vscode_main.scala \ src/Tools/VSCode/src/vscode_model.scala \ src/Tools/VSCode/src/vscode_rendering.scala \ src/Tools/VSCode/src/vscode_resources.scala \ src/Tools/VSCode/src/vscode_spell_checker.scala \ src/Tools/jEdit/src/active.scala \ src/Tools/jEdit/src/base_plugin.scala \ src/Tools/jEdit/src/completion_popup.scala \ src/Tools/jEdit/src/context_menu.scala \ src/Tools/jEdit/src/debugger_dockable.scala \ src/Tools/jEdit/src/dockable.scala \ src/Tools/jEdit/src/document_model.scala \ src/Tools/jEdit/src/document_view.scala \ src/Tools/jEdit/src/documentation_dockable.scala \ src/Tools/jEdit/src/fold_handling.scala \ src/Tools/jEdit/src/font_info.scala \ src/Tools/jEdit/src/graphview_dockable.scala \ src/Tools/jEdit/src/info_dockable.scala \ src/Tools/jEdit/src/isabelle.scala \ src/Tools/jEdit/src/isabelle_encoding.scala \ src/Tools/jEdit/src/isabelle_export.scala \ src/Tools/jEdit/src/isabelle_options.scala \ src/Tools/jEdit/src/isabelle_session.scala \ src/Tools/jEdit/src/isabelle_vfs.scala \ src/Tools/jEdit/src/jedit_bibtex.scala \ src/Tools/jEdit/src/jedit_editor.scala \ src/Tools/jEdit/src/jedit_lib.scala \ src/Tools/jEdit/src/jedit_main.scala \ src/Tools/jEdit/src/jedit_options.scala \ src/Tools/jEdit/src/jedit_plugins.scala \ src/Tools/jEdit/src/jedit_rendering.scala \ src/Tools/jEdit/src/jedit_resources.scala \ src/Tools/jEdit/src/jedit_sessions.scala \ src/Tools/jEdit/src/jedit_spell_checker.scala \ src/Tools/jEdit/src/keymap_merge.scala \ src/Tools/jEdit/src/main_plugin.scala \ src/Tools/jEdit/src/monitor_dockable.scala \ src/Tools/jEdit/src/output_dockable.scala \ src/Tools/jEdit/src/pide_docking_framework.scala \ src/Tools/jEdit/src/pretty_text_area.scala \ src/Tools/jEdit/src/pretty_tooltip.scala \ src/Tools/jEdit/src/process_indicator.scala \ src/Tools/jEdit/src/protocol_dockable.scala \ src/Tools/jEdit/src/query_dockable.scala \ src/Tools/jEdit/src/raw_output_dockable.scala \ src/Tools/jEdit/src/rich_text_area.scala \ src/Tools/jEdit/src/session_build.scala \ src/Tools/jEdit/src/simplifier_trace_dockable.scala \ src/Tools/jEdit/src/simplifier_trace_window.scala \ src/Tools/jEdit/src/sledgehammer_dockable.scala \ src/Tools/jEdit/src/state_dockable.scala \ src/Tools/jEdit/src/status_widget.scala \ src/Tools/jEdit/src/symbols_dockable.scala \ src/Tools/jEdit/src/syntax_style.scala \ src/Tools/jEdit/src/syslog_dockable.scala \ src/Tools/jEdit/src/text_overview.scala \ src/Tools/jEdit/src/text_structure.scala \ src/Tools/jEdit/src/theories_dockable.scala \ src/Tools/jEdit/src/timing_dockable.scala \ src/Tools/jEdit/src/token_markup.scala services = \ isabelle.Bash$Handler \ isabelle.Bibtex$File_Format \ isabelle.Document_Build$Build_Engine \ isabelle.Document_Build$LuaLaTeX_Engine \ isabelle.Document_Build$PDFLaTeX_Engine \ isabelle.ML_Statistics$Handler \ isabelle.Print_Operation$Handler \ isabelle.Scala$Handler \ isabelle.Scala_Functions \ isabelle.Server_Commands \ isabelle.Sessions$File_Format \ isabelle.Simplifier_Trace$Handler \ isabelle.Tools \ isabelle.jedit.JEdit_Plugin0 \ isabelle.jedit.JEdit_Plugin1 \ isabelle.nitpick.Kodkod$Handler \ isabelle.nitpick.Scala_Functions \ isabelle.spark.SPARK$Load_Command1 \ isabelle.spark.SPARK$Load_Command2 diff --git a/src/Doc/Prog_Prove/Types_and_funs.thy b/src/Doc/Prog_Prove/Types_and_funs.thy --- a/src/Doc/Prog_Prove/Types_and_funs.thy +++ b/src/Doc/Prog_Prove/Types_and_funs.thy @@ -1,604 +1,605 @@ (*<*) theory Types_and_funs imports Main begin (*>*) text\ \vspace{-5ex} \section{Type and Function Definitions} Type synonyms are abbreviations for existing types, for example \index{string@\string\}\ type_synonym string = "char list" text\ Type synonyms are expanded after parsing and are not present in internal representation and output. They are mere conveniences for the reader. \subsection{Datatypes} \label{sec:datatypes} The general form of a datatype definition looks like this: \begin{quote} \begin{tabular}{@ {}rclcll} \indexed{\isacom{datatype}}{datatype} \('a\<^sub>1,\,'a\<^sub>n)t\ & = & $C_1 \ \"\\tau_{1,1}\"\ \dots \"\\tau_{1,n_1}\"\$ \\ & $|$ & \dots \\ & $|$ & $C_k \ \"\\tau_{k,1}\"\ \dots \"\\tau_{k,n_k}\"\$ \end{tabular} \end{quote} It introduces the constructors \ $C_i :: \tau_{i,1}\Rightarrow \cdots \Rightarrow \tau_{i,n_i} \Rightarrow$~\('a\<^sub>1,\,'a\<^sub>n)t\ \ and expresses that any value of this type is built from these constructors in a unique manner. Uniqueness is implied by the following properties of the constructors: \begin{itemize} \item \emph{Distinctness:} $C_i\ \ldots \neq C_j\ \dots$ \quad if $i \neq j$ \item \emph{Injectivity:} \begin{tabular}[t]{l} $(C_i \ x_1 \dots x_{n_i} = C_i \ y_1 \dots y_{n_i}) =$\\ $(x_1 = y_1 \land \dots \land x_{n_i} = y_{n_i})$ \end{tabular} \end{itemize} The fact that any value of the datatype is built from the constructors implies the \concept{structural induction}\index{induction} rule: to show $P~x$ for all $x$ of type \('a\<^sub>1,\,'a\<^sub>n)t\, one needs to show $P(C_i\ x_1 \dots x_{n_i})$ (for each $i$) assuming $P(x_j)$ for all $j$ where $\tau_{i,j} =$~\('a\<^sub>1,\,'a\<^sub>n)t\. Distinctness and injectivity are applied automatically by \auto\ and other proof methods. Induction must be applied explicitly. Like in functional programming languages, datatype values can be taken apart with case expressions\index{case expression}\index{case expression@\case ... of\}, for example \begin{quote} \noquotes{@{term[source] "(case xs of [] \ 0 | x # _ \ Suc x)"}} \end{quote} Case expressions must be enclosed in parentheses. As an example of a datatype beyond \<^typ>\nat\ and \list\, consider binary trees: \ datatype 'a tree = Tip | Node "'a tree" 'a "'a tree" text\with a mirror function:\ fun mirror :: "'a tree \ 'a tree" where "mirror Tip = Tip" | "mirror (Node l a r) = Node (mirror r) a (mirror l)" text\The following lemma illustrates induction:\ lemma "mirror(mirror t) = t" apply(induction t) txt\yields @{subgoals[display]} The induction step contains two induction hypotheses, one for each subtree. An application of \auto\ finishes the proof. A very simple but also very useful datatype is the predefined @{datatype[display] option}\index{option@\option\}\index{None@\<^const>\None\}\index{Some@\<^const>\Some\} Its sole purpose is to add a new element \<^const>\None\ to an existing type \<^typ>\'a\. To make sure that \<^const>\None\ is distinct from all the elements of \<^typ>\'a\, you wrap them up in \<^const>\Some\ and call the new type \<^typ>\'a option\. A typical application is a lookup function on a list of key-value pairs, often called an association list: \ (*<*) apply auto done (*>*) fun lookup :: "('a * 'b) list \ 'a \ 'b option" where "lookup [] x = None" | "lookup ((a,b) # ps) x = (if a = x then Some b else lookup ps x)" text\ Note that \\\<^sub>1 * \\<^sub>2\ is the type of pairs, also written \\\<^sub>1 \ \\<^sub>2\. Pairs can be taken apart either by pattern matching (as above) or with the projection functions \<^const>\fst\ and \<^const>\snd\: @{thm fst_conv[of x y]} and @{thm snd_conv[of x y]}. Tuples are simulated by pairs nested to the right: \<^term>\(a,b,c)\ is short for \(a, (b, c))\ and \\\<^sub>1 \ \\<^sub>2 \ \\<^sub>3\ is short for \\\<^sub>1 \ (\\<^sub>2 \ \\<^sub>3)\. \subsection{Definitions} Non-recursive functions can be defined as in the following example: \index{definition@\isacom{definition}}\ definition sq :: "nat \ nat" where "sq n = n * n" text\Such definitions do not allow pattern matching but only \f x\<^sub>1 \ x\<^sub>n = t\, where \f\ does not occur in \t\. \subsection{Abbreviations} Abbreviations are similar to definitions: \index{abbreviation@\isacom{abbreviation}}\ abbreviation sq' :: "nat \ nat" where "sq' n \ n * n" text\The key difference is that \<^const>\sq'\ is only syntactic sugar: after parsing, \<^term>\sq' t\ is replaced by \mbox{\<^term>\t*t\}; before printing, every occurrence of \<^term>\u*u\ is replaced by \mbox{\<^term>\sq' u\}. Internally, \<^const>\sq'\ does not exist. This is the advantage of abbreviations over definitions: definitions need to be expanded explicitly (\autoref{sec:rewr-defs}) whereas abbreviations are already expanded upon parsing. However, abbreviations should be introduced sparingly: if abused, they can lead to a confusing discrepancy between the internal and external view of a term. The ASCII representation of \\\ is \texttt{==} or \xsymbol{equiv}. \subsection{Recursive Functions} \label{sec:recursive-funs} Recursive functions are defined with \indexed{\isacom{fun}}{fun} by pattern matching over datatype constructors. The order of equations matters, as in functional programming languages. However, all HOL functions must be total. This simplifies the logic --- terms are always defined --- but means that recursive functions must terminate. Otherwise one could define a function \<^prop>\f n = f n + (1::nat)\ and conclude \mbox{\<^prop>\(0::nat) = 1\} by subtracting \<^term>\f n\ on both sides. Isabelle's automatic termination checker requires that the arguments of recursive calls on the right-hand side must be strictly smaller than the arguments on the left-hand side. In the simplest case, this means that one fixed argument position decreases in size with each recursive call. The size is measured as the number of constructors (excluding 0-ary ones, e.g., \Nil\). Lexicographic combinations are also recognized. In more complicated situations, the user may have to prove termination by hand. For details see~@{cite Krauss}. Functions defined with \isacom{fun} come with their own induction schema that mirrors the recursion schema and is derived from the termination order. For example, \ fun div2 :: "nat \ nat" where "div2 0 = 0" | "div2 (Suc 0) = 0" | "div2 (Suc(Suc n)) = Suc(div2 n)" text\does not just define \<^const>\div2\ but also proves a customized induction rule: \[ \inferrule{ \mbox{@{thm (prem 1) div2.induct}}\\ \mbox{@{thm (prem 2) div2.induct}}\\ \mbox{@{thm (prem 3) div2.induct}}} {\mbox{@{thm (concl) div2.induct[of _ "m"]}}} \] This customized induction rule can simplify inductive proofs. For example, \ lemma "div2 n = n div 2" apply(induction n rule: div2.induct) txt\(where the infix \div\ is the predefined division operation) yields the subgoals @{subgoals[display,margin=65]} An application of \auto\ finishes the proof. Had we used ordinary structural induction on \n\, the proof would have needed an additional case analysis in the induction step. This example leads to the following induction heuristic: \begin{quote} \emph{Let \f\ be a recursive function. If the definition of \f\ is more complicated than having one equation for each constructor of some datatype, then properties of \f\ are best proved via \f.induct\.\index{inductionrule@\.induct\}} \end{quote} The general case is often called \concept{computation induction}, because the induction follows the (terminating!) computation. For every defining equation \begin{quote} \f(e) = \ f(r\<^sub>1) \ f(r\<^sub>k) \\ \end{quote} where \f(r\<^sub>i)\, \i=1\k\, are all the recursive calls, the induction rule \f.induct\ contains one premise of the form \begin{quote} \P(r\<^sub>1) \ \ \ P(r\<^sub>k) \ P(e)\ \end{quote} If \f :: \\<^sub>1 \ \ \ \\<^sub>n \ \\ then \f.induct\ is applied like this: \begin{quote} \isacom{apply}\(induction x\<^sub>1 \ x\<^sub>n rule: f.induct)\\index{inductionrule@\induction ... rule:\} \end{quote} where typically there is a call \f x\<^sub>1 \ x\<^sub>n\ in the goal. But note that the induction rule does not mention \f\ at all, except in its name, and is applicable independently of \f\. \subsection*{Exercises} \begin{exercise} Starting from the type \'a tree\ defined in the text, define a function \contents ::\ \<^typ>\'a tree \ 'a list\ that collects all values in a tree in a list, in any order, without removing duplicates. Then define a function \sum_tree ::\ \<^typ>\nat tree \ nat\ that sums up all values in a tree of natural numbers and prove \<^prop>\sum_tree t = sum_list(contents t)\ -(where \<^const>\sum_list\ is predefined). +where \<^const>\sum_list\ is predefined by the equations +@{thm sum_list.Nil[where 'a=nat]} and +@{thm sum_list.Cons}. \end{exercise} \begin{exercise} Define the two functions \pre_order\ and \post_order\ of type @{typ "'a tree \ 'a list"} that traverse a tree and collect all stored values in the respective order in a list. Prove \<^prop>\pre_order (mirror t) = rev (post_order t)\. \end{exercise} \begin{exercise} Define a function \intersperse ::\ \<^typ>\'a \ 'a list \ 'a list\ such that \intersperse a [x\<^sub>1, ..., x\<^sub>n] = [x\<^sub>1, a, x\<^sub>2, a, ..., a, x\<^sub>n]\. Now prove that \<^prop>\map f (intersperse a xs) = intersperse (f a) (map f xs)\. \end{exercise} \section{Induction Heuristics}\index{induction heuristics} We have already noted that theorems about recursive functions are proved by induction. In case the function has more than one argument, we have followed the following heuristic in the proofs about the append function: \begin{quote} \emph{Perform induction on argument number $i$\\ if the function is defined by recursion on argument number $i$.} \end{quote} The key heuristic, and the main point of this section, is to \emph{generalize the goal before induction}. The reason is simple: if the goal is too specific, the induction hypothesis is too weak to allow the induction step to go through. Let us illustrate the idea with an example. Function \<^const>\rev\ has quadratic worst-case running time because it calls append for each element of the list and append is linear in its first argument. A linear time version of \<^const>\rev\ requires an extra argument where the result is accumulated gradually, using only~\#\: \ (*<*) apply auto done (*>*) fun itrev :: "'a list \ 'a list \ 'a list" where "itrev [] ys = ys" | "itrev (x#xs) ys = itrev xs (x#ys)" text\The behaviour of \<^const>\itrev\ is simple: it reverses its first argument by stacking its elements onto the second argument, and it returns that second argument when the first one becomes empty. Note that \<^const>\itrev\ is tail-recursive: it can be compiled into a loop; no stack is necessary for executing it. -Naturally, we would like to show that \<^const>\itrev\ does indeed reverse -its first argument provided the second one is empty: +Naturally, we would like to show that \<^const>\itrev\ reverses its first argument: \ lemma "itrev xs [] = rev xs" txt\There is no choice as to the induction variable: \ apply(induction xs) apply(auto) txt\ Unfortunately, this attempt does not prove the induction step: @{subgoals[display,margin=70]} The induction hypothesis is too weak. The fixed argument,~\<^term>\[]\, prevents it from rewriting the conclusion. This example suggests a heuristic: \begin{quote} \emph{Generalize goals for induction by replacing constants by variables.} \end{quote} Of course one cannot do this naively: \<^prop>\itrev xs ys = rev xs\ is just not true. The correct generalization is \ (*<*)oops(*>*) lemma "itrev xs ys = rev xs @ ys" (*<*)apply(induction xs, auto)(*>*) txt\ If \ys\ is replaced by \<^term>\[]\, the right-hand side simplifies to \<^term>\rev xs\, as required. In this instance it was easy to guess the right generalization. Other situations can require a good deal of creativity. Although we now have two variables, only \xs\ is suitable for induction, and we repeat our proof attempt. Unfortunately, we are still not there: @{subgoals[display,margin=65]} The induction hypothesis is still too weak, but this time it takes no intuition to generalize: the problem is that the \ys\ in the induction hypothesis is fixed, but the induction hypothesis needs to be applied with \<^term>\a # ys\ instead of \ys\. Hence we prove the theorem for all \ys\ instead of a fixed one. We can instruct induction to perform this generalization for us by adding \arbitrary: ys\\index{arbitrary@\arbitrary:\}. \ (*<*)oops lemma "itrev xs ys = rev xs @ ys" (*>*) apply(induction xs arbitrary: ys) -txt\The induction hypothesis in the induction step is now universally quantified over \ys\: +txt\The induction hypothesis is now universally quantified over \ys\: @{subgoals[display,margin=65]} Thus the proof succeeds: \ apply(auto) done text\ This leads to another heuristic for generalization: \begin{quote} \emph{Generalize induction by generalizing all free variables\\ {\em(except the induction variable itself)}.} \end{quote} Generalization is best performed with \arbitrary: y\<^sub>1 \ y\<^sub>k\. This heuristic prevents trivial failures like the one above. However, it should not be applied blindly. It is not always required, and the additional quantifiers can complicate matters in some cases. The variables that need to be quantified are typically those that change in recursive calls. \subsection*{Exercises} \begin{exercise} Write a tail-recursive variant of the \add\ function on \<^typ>\nat\: \<^term>\itadd :: nat \ nat \ nat\. Tail-recursive means that in the recursive case, \itadd\ needs to call itself directly: \mbox{\<^term>\itadd (Suc m) n\} \= itadd \\. Prove \<^prop>\itadd m n = add m n\. \end{exercise} \section{Simplification} So far we have talked a lot about simplifying terms without explaining the concept. \conceptidx{Simplification}{simplification} means \begin{itemize} \item using equations $l = r$ from left to right (only), \item as long as possible. \end{itemize} To emphasize the directionality, equations that have been given the \simp\ attribute are called \conceptidx{simplification rules}{simplification rule}. Logically, they are still symmetric, but proofs by simplification use them only in the left-to-right direction. The proof tool that performs simplifications is called the \concept{simplifier}. It is the basis of \auto\ and other related proof methods. The idea of simplification is best explained by an example. Given the simplification rules \[ \begin{array}{rcl@ {\quad}l} \<^term>\0 + n::nat\ &\=\& \n\ & (1) \\ \<^term>\(Suc m) + n\ &\=\& \<^term>\Suc (m + n)\ & (2) \\ \(Suc m \ Suc n)\ &\=\& \(m \ n)\ & (3)\\ \(0 \ m)\ &\=\& \<^const>\True\ & (4) \end{array} \] the formula \<^prop>\0 + Suc 0 \ Suc 0 + x\ is simplified to \<^const>\True\ as follows: \[ \begin{array}{r@ {}c@ {}l@ {\quad}l} \(0 + Suc 0\ & \leq & \Suc 0 + x)\ & \stackrel{(1)}{=} \\ \(Suc 0\ & \leq & \Suc 0 + x)\ & \stackrel{(2)}{=} \\ \(Suc 0\ & \leq & \Suc (0 + x))\ & \stackrel{(3)}{=} \\ \(0\ & \leq & \0 + x)\ & \stackrel{(4)}{=} \\[1ex] & \<^const>\True\ \end{array} \] Simplification is often also called \concept{rewriting} and simplification rules \conceptidx{rewrite rules}{rewrite rule}. \subsection{Simplification Rules} The attribute \simp\ declares theorems to be simplification rules, which the simplifier will use automatically. In addition, \isacom{datatype} and \isacom{fun} commands implicitly declare some simplification rules: \isacom{datatype} the distinctness and injectivity rules, \isacom{fun} the defining equations. Definitions are not declared as simplification rules automatically! Nearly any theorem can become a simplification rule. The simplifier will try to transform it into an equation. For example, the theorem \<^prop>\\ P\ is turned into \<^prop>\P = False\. Only equations that really simplify, like \<^prop>\rev (rev xs) = xs\ and \<^prop>\xs @ [] = xs\, should be declared as simplification rules. Equations that may be counterproductive as simplification rules should only be used in specific proof steps (see \autoref{sec:simp} below). Distributivity laws, for example, alter the structure of terms and can produce an exponential blow-up. \subsection{Conditional Simplification Rules} Simplification rules can be conditional. Before applying such a rule, the simplifier will first try to prove the preconditions, again by simplification. For example, given the simplification rules \begin{quote} \<^prop>\p(0::nat) = True\\\ \<^prop>\p(x) \ f(x) = g(x)\, \end{quote} the term \<^term>\f(0::nat)\ simplifies to \<^term>\g(0::nat)\ but \<^term>\f(1::nat)\ does not simplify because \<^prop>\p(1::nat)\ is not provable. \subsection{Termination} Simplification can run forever, for example if both \<^prop>\f x = g x\ and \<^prop>\g(x) = f(x)\ are simplification rules. It is the user's responsibility not to include simplification rules that can lead to nontermination, either on their own or in combination with other simplification rules. The right-hand side of a simplification rule should always be ``simpler'' than the left-hand side --- in some sense. But since termination is undecidable, such a check cannot be automated completely and Isabelle makes little attempt to detect nontermination. When conditional simplification rules are applied, their preconditions are proved first. Hence all preconditions need to be simpler than the left-hand side of the conclusion. For example \begin{quote} \<^prop>\n < m \ (n < Suc m) = True\ \end{quote} is suitable as a simplification rule: both \<^prop>\n and \<^const>\True\ are simpler than \mbox{\<^prop>\n < Suc m\}. But \begin{quote} \<^prop>\Suc n < m \ (n < m) = True\ \end{quote} leads to nontermination: when trying to rewrite \<^prop>\n to \<^const>\True\ one first has to prove \mbox{\<^prop>\Suc n < m\}, which can be rewritten to \<^const>\True\ provided \<^prop>\Suc(Suc n) < m\, \emph{ad infinitum}. \subsection{The \indexed{\simp\}{simp} Proof Method} \label{sec:simp} So far we have only used the proof method \auto\. Method \simp\ is the key component of \auto\, but \auto\ can do much more. In some cases, \auto\ is overeager and modifies the proof state too much. In such cases the more predictable \simp\ method should be used. Given a goal \begin{quote} \1. \ P\<^sub>1; \; P\<^sub>m \ \ C\ \end{quote} the command \begin{quote} \isacom{apply}\(simp add: th\<^sub>1 \ th\<^sub>n)\ \end{quote} simplifies the assumptions \P\<^sub>i\ and the conclusion \C\ using \begin{itemize} \item all simplification rules, including the ones coming from \isacom{datatype} and \isacom{fun}, \item the additional lemmas \th\<^sub>1 \ th\<^sub>n\, and \item the assumptions. \end{itemize} In addition to or instead of \add\ there is also \del\ for removing simplification rules temporarily. Both are optional. Method \auto\ can be modified similarly: \begin{quote} \isacom{apply}\(auto simp add: \ simp del: \)\ \end{quote} Here the modifiers are \simp add\ and \simp del\ instead of just \add\ and \del\ because \auto\ does not just perform simplification. Note that \simp\ acts only on subgoal 1, \auto\ acts on all subgoals. There is also \simp_all\, which applies \simp\ to all subgoals. \subsection{Rewriting with Definitions} \label{sec:rewr-defs} Definitions introduced by the command \isacom{definition} can also be used as simplification rules, but by default they are not: the simplifier does not expand them automatically. Definitions are intended for introducing abstract concepts and not merely as abbreviations. Of course, we need to expand the definition initially, but once we have proved enough abstract properties of the new constant, we can forget its original definition. This style makes proofs more robust: if the definition has to be changed, only the proofs of the abstract properties will be affected. The definition of a function \f\ is a theorem named \f_def\ and can be added to a call of \simp\ like any other theorem: \begin{quote} \isacom{apply}\(simp add: f_def)\ \end{quote} In particular, let-expressions can be unfolded by making @{thm[source] Let_def} a simplification rule. \subsection{Case Splitting With \simp\} Goals containing if-expressions are automatically split into two cases by \simp\ using the rule \begin{quote} \<^prop>\P(if A then s else t) = ((A \ P(s)) \ (~A \ P(t)))\ \end{quote} For example, \simp\ can prove \begin{quote} \<^prop>\(A \ B) = (if A then B else False)\ \end{quote} because both \<^prop>\A \ (A & B) = B\ and \<^prop>\~A \ (A & B) = False\ simplify to \<^const>\True\. We can split case-expressions similarly. For \nat\ the rule looks like this: @{prop[display,margin=65,indent=4]"P(case e of 0 \ a | Suc n \ b n) = ((e = 0 \ P a) \ (\n. e = Suc n \ P(b n)))"} Case expressions are not split automatically by \simp\, but \simp\ can be instructed to do so: \begin{quote} \isacom{apply}\(simp split: nat.split)\ \end{quote} splits all case-expressions over natural numbers. For an arbitrary datatype \t\ it is \t.split\\index{split@\.split\} instead of @{thm[source] nat.split}. Method \auto\ can be modified in exactly the same way. The modifier \indexed{\split:\}{split} can be followed by multiple names. Splitting if or case-expressions in the assumptions requires \split: if_splits\ or \split: t.splits\. \ifsem\else \subsection{Rewriting \let\ and Numerals} Let-expressions (\<^term>\let x = s in t\) can be expanded explicitly with the simplification rule @{thm[source] Let_def}. The simplifier will not expand \let\s automatically in many cases. Numerals of type \<^typ>\nat\ can be converted to \<^const>\Suc\ terms with the simplification rule @{thm[source] numeral_eq_Suc}. This is required, for example, when a function that is defined by pattern matching with \<^const>\Suc\ is applied to a numeral: if \f\ is defined by \f 0 = ...\ and \f (Suc n) = ...\, the simplifier cannot simplify \f 2\ unless \2\ is converted to \<^term>\Suc(Suc 0)\ (via @{thm[source] numeral_eq_Suc}). \fi \subsection*{Exercises} \exercise\label{exe:tree0} Define a datatype \tree0\ of binary tree skeletons which do not store any information, neither in the inner nodes nor in the leaves. Define a function \nodes :: tree0 \ nat\ that counts the number of all nodes (inner nodes and leaves) in such a tree. Consider the following recursive function: \ (*<*) datatype tree0 = Tip | Node tree0 tree0 (*>*) fun explode :: "nat \ tree0 \ tree0" where "explode 0 t = t" | "explode (Suc n) t = explode n (Node t t)" text \ Find an equation expressing the size of a tree after exploding it (\noquotes{@{term [source] "nodes (explode n t)"}}) as a function of \<^term>\nodes t\ and \n\. Prove your equation. You may use the usual arithmetic operators, including the exponentiation operator ``\^\''. For example, \noquotes{@{prop [source] "2 ^ 2 = 4"}}. Hint: simplifying with the list of theorems @{thm[source] algebra_simps} takes care of common algebraic properties of the arithmetic operators. \endexercise \exercise Define arithmetic expressions in one variable over integers (type \<^typ>\int\) as a data type: \ datatype exp = Var | Const int | Add exp exp | Mult exp exp text\ Define a function \noquotes{@{term [source]"eval :: exp \ int \ int"}} such that \<^term>\eval e x\ evaluates \e\ at the value \x\. A polynomial can be represented as a list of coefficients, starting with the constant. For example, \<^term>\[4, 2, -1, 3::int]\ represents the polynomial $4 + 2x - x^2 + 3x^3$. Define a function \noquotes{@{term [source] "evalp :: int list \ int \ int"}} that evaluates a polynomial at the given value. Define a function \noquotes{@{term[source] "coeffs :: exp \ int list"}} that transforms an expression into a polynomial. This may require auxiliary functions. Prove that \coeffs\ preserves the value of the expression: \mbox{\<^prop>\evalp (coeffs e) x = eval e x\.} Hint: consider the hint in Exercise~\ref{exe:tree0}. \endexercise \ (*<*) end (*>*) diff --git a/src/Doc/System/Presentation.thy b/src/Doc/System/Presentation.thy --- a/src/Doc/System/Presentation.thy +++ b/src/Doc/System/Presentation.thy @@ -1,203 +1,202 @@ (*:maxLineLen=78:*) theory Presentation imports Base begin chapter \Presenting theories \label{ch:present}\ text \ Isabelle provides several ways to present the outcome of formal developments, including WWW-based browsable libraries or actual printable documents. Presentation is centered around the concept of \<^emph>\sessions\ (\chref{ch:session}). The global session structure is that of a tree, with Isabelle Pure at its root, further object-logics derived (e.g.\ HOLCF from HOL, and HOL from Pure), and application sessions further on in the hierarchy. The command-line tools @{tool_ref mkroot} and @{tool_ref build} provide the primary means for managing Isabelle sessions, including options for presentation: ``\<^verbatim>\document=pdf\'' generates PDF output from the theory session, and ``\<^verbatim>\document_output=dir\'' emits a copy of the document sources with the PDF into the given directory (relative to the session directory). Alternatively, @{tool_ref document} may be used to turn the generated {\LaTeX} sources of a session (exports from its build database) into PDF. \ section \Generating HTML browser information \label{sec:info}\ text \ As a side-effect of building sessions, Isabelle is able to generate theory browsing information, including HTML documents that show the theory sources and the relationship with its ancestors and descendants. Besides the HTML file that is generated for every theory, Isabelle stores links to all theories of a session in an index file. As a second hierarchy, groups of sessions are organized as \<^emph>\chapters\, with a separate index. Note that the implicit tree structure of the session build hierarchy is \<^emph>\not\ relevant for the presentation. \<^medskip> To generate theory browsing information for an existing session, just invoke @{tool build} with suitable options: @{verbatim [display] \isabelle build -o browser_info -v -c FOL\} The presentation output will appear in a sub-directory \<^path>\$ISABELLE_BROWSER_INFO\, according to the chapter and session name. Many Isabelle sessions (such as \<^session>\HOL-Library\ in \<^dir>\~~/src/HOL/Library\) also provide theory documents in PDF. These are prepared automatically as well if enabled like this: @{verbatim [display] \isabelle build -o browser_info -o document -v -c HOL-Library\} Enabling both browser info and document preparation simultaneously causes an appropriate ``document'' link to be included in the HTML index. Documents may be generated independently of browser information as well, see \secref{sec:tool-document} for further details. \<^bigskip> The theory browsing information is stored in the directory determined by the @{setting_ref ISABELLE_BROWSER_INFO} setting, with sub-directory structure according to the chapter and session name. In order to present Isabelle applications on the web, the corresponding subdirectory from @{setting ISABELLE_BROWSER_INFO} can be put on a WWW server. \ section \Preparing session root directories \label{sec:tool-mkroot}\ text \ The @{tool_def mkroot} tool configures a given directory as session root, with some \<^verbatim>\ROOT\ file and optional document source directory. Its usage is: @{verbatim [display] \Usage: isabelle mkroot [OPTIONS] [DIRECTORY] Options are: -A LATEX provide author in LaTeX notation (default: user name) -I init Mercurial repository and add generated files -T LATEX provide title in LaTeX notation (default: session name) -n NAME alternative session name (default: directory base name) Prepare session root directory (default: current directory). \} The results are placed in the given directory \dir\, which refers to the current directory by default. The @{tool mkroot} tool is conservative in the sense that it does not overwrite existing files or directories. Earlier attempts to generate a session root need to be deleted manually. The generated session template will be accompanied by a formal document, with \DIRECTORY\\<^verbatim>\/document/root.tex\ as its {\LaTeX} entry point (see also \chref{ch:present}). Options \<^verbatim>\-T\ and \<^verbatim>\-A\ specify the document title and author explicitly, using {\LaTeX} source notation. Option \<^verbatim>\-I\ initializes a Mercurial repository in the target directory, and adds all generated files (without commit). Option \<^verbatim>\-n\ specifies an alternative session name; otherwise the base name of the given directory is used. \<^medskip> The implicit Isabelle settings variable @{setting ISABELLE_LOGIC} specifies the parent session. \ subsubsection \Examples\ text \ Produce session \<^verbatim>\Test\ within a separate directory of the same name: @{verbatim [display] \isabelle mkroot Test && isabelle build -D Test\} \<^medskip> Upgrade the current directory into a session ROOT with document preparation, and build it: @{verbatim [display] \isabelle mkroot && isabelle build -D .\} \ section \Preparing Isabelle session documents \label{sec:tool-document}\ text \ The @{tool_def document} tool prepares logic session documents. Its usage is: @{verbatim [display] \Usage: isabelle document [OPTIONS] SESSION Options are: -O DIR output directory for LaTeX sources and resulting PDF -P DIR output directory for resulting PDF -S DIR output directory for LaTeX sources -V verbose latex -d DIR include session directory -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose build Prepare the theory document of a session.\} Generated {\LaTeX} sources are taken from the session build database: @{tool_ref build} is invoked beforehand to ensure that it is up-to-date. Further files are generated on the spot, notably essential Isabelle style files, and \<^verbatim>\session.tex\ to input all theory sources from the session (excluding imports from other sessions). \<^medskip> Options \<^verbatim>\-d\, \<^verbatim>\-o\, \<^verbatim>\-v\ have the same meaning as for @{tool build}. \<^medskip> Option \<^verbatim>\-V\ prints full output of {\LaTeX} tools. \<^medskip> Option \<^verbatim>\-O\~\dir\ specifies the output directory for generated {\LaTeX} sources and the result PDF file. Options \<^verbatim>\-P\ and \<^verbatim>\-S\ only refer to the PDF and sources, respectively. For example, for output directory ``\<^verbatim>\output\'' and the default document variant ``\<^verbatim>\document\'', the generated document sources are placed into the subdirectory \<^verbatim>\output/document/\ and the resulting PDF into \<^verbatim>\output/document.pdf\. \<^medskip> Isabelle is usually smart enough to create the PDF from the given \<^verbatim>\root.tex\ and optional \<^verbatim>\root.bib\ (bibliography) and \<^verbatim>\root.idx\ (index) using standard {\LaTeX} tools. Actual command-lines are given by settings @{setting_ref ISABELLE_LUALATEX} (or @{setting_ref ISABELLE_PDFLATEX}), @{setting_ref ISABELLE_BIBTEX}, @{setting_ref ISABELLE_MAKEINDEX}: these variables are used without quoting in shell scripts, and thus may contain additional options. The system option @{system_option_def "document_build"} specifies an alternative build engine, e.g. within the session \<^verbatim>\ROOT\ file as ``\<^verbatim>\options [document_build = pdflatex]\''. The following standard engines are available: \<^item> \<^verbatim>\lualatex\ (default) uses the shell command \<^verbatim>\$ISABELLE_LUALATEX\ on the main \<^verbatim>\root.tex\ file, with further runs of \<^verbatim>\$ISABELLE_BIBTEX\ and \<^verbatim>\$ISABELLE_MAKEINDEX\ as required. \<^item> \<^verbatim>\pdflatex\ uses \<^verbatim>\$ISABELLE_PDFLATEX\ instead of \<^verbatim>\$ISABELLE_LUALATEX\, and the other tools as above. \<^item> \<^verbatim>\build\ invokes an executable script of the same name in a private directory containing all \isakeyword{document\_files} and other generated document sources. The script is invoked as ``\<^verbatim>\./build pdf\~\name\'' for the document variant name; it needs to produce a corresponding \name\\<^verbatim>\.pdf\ file by arbitrary means on its own. Further engines can be defined by add-on components in Isabelle/Scala (\secref{sec:scala-build}), providing a service class derived from - \<^scala_type>\isabelle.Document_Build.Engine\. Available classes are listed - in \<^scala>\isabelle.Document_Build.engines\. + \<^scala_type>\isabelle.Document_Build.Engine\. \ subsubsection \Examples\ text \ Produce the document from session \<^verbatim>\FOL\ with full verbosity, and a copy in the current directory (subdirectory \<^verbatim>\document\ and file \<^verbatim>\document.pdf)\: @{verbatim [display] \isabelle document -v -V -O. FOL\} \ end diff --git a/src/HOL/Analysis/Infinite_Products.thy b/src/HOL/Analysis/Infinite_Products.thy --- a/src/HOL/Analysis/Infinite_Products.thy +++ b/src/HOL/Analysis/Infinite_Products.thy @@ -1,1813 +1,1837 @@ (*File: HOL/Analysis/Infinite_Product.thy Author: Manuel Eberl & LC Paulson Basic results about convergence and absolute convergence of infinite products and their connection to summability. *) section \Infinite Products\ theory Infinite_Products imports Topology_Euclidean_Space Complex_Transcendental begin subsection\<^marker>\tag unimportant\ \Preliminaries\ lemma sum_le_prod: fixes f :: "'a \ 'b :: linordered_semidom" assumes "\x. x \ A \ f x \ 0" shows "sum f A \ (\x\A. 1 + f x)" using assms proof (induction A rule: infinite_finite_induct) case (insert x A) from insert.hyps have "sum f A + f x * (\x\A. 1) \ (\x\A. 1 + f x) + f x * (\x\A. 1 + f x)" by (intro add_mono insert mult_left_mono prod_mono) (auto intro: insert.prems) with insert.hyps show ?case by (simp add: algebra_simps) qed simp_all lemma prod_le_exp_sum: fixes f :: "'a \ real" assumes "\x. x \ A \ f x \ 0" shows "prod (\x. 1 + f x) A \ exp (sum f A)" using assms proof (induction A rule: infinite_finite_induct) case (insert x A) have "(1 + f x) * (\x\A. 1 + f x) \ exp (f x) * exp (sum f A)" using insert.prems by (intro mult_mono insert prod_nonneg exp_ge_add_one_self) auto with insert.hyps show ?case by (simp add: algebra_simps exp_add) qed simp_all lemma lim_ln_1_plus_x_over_x_at_0: "(\x::real. ln (1 + x) / x) \0\ 1" proof (rule lhopital) show "(\x::real. ln (1 + x)) \0\ 0" by (rule tendsto_eq_intros refl | simp)+ have "eventually (\x::real. x \ {-1/2<..<1/2}) (nhds 0)" by (rule eventually_nhds_in_open) auto hence *: "eventually (\x::real. x \ {-1/2<..<1/2}) (at 0)" by (rule filter_leD [rotated]) (simp_all add: at_within_def) show "eventually (\x::real. ((\x. ln (1 + x)) has_field_derivative inverse (1 + x)) (at x)) (at 0)" using * by eventually_elim (auto intro!: derivative_eq_intros simp: field_simps) show "eventually (\x::real. ((\x. x) has_field_derivative 1) (at x)) (at 0)" using * by eventually_elim (auto intro!: derivative_eq_intros simp: field_simps) show "\\<^sub>F x in at 0. x \ 0" by (auto simp: at_within_def eventually_inf_principal) show "(\x::real. inverse (1 + x) / 1) \0\ 1" by (rule tendsto_eq_intros refl | simp)+ qed auto subsection\Definitions and basic properties\ definition\<^marker>\tag important\ raw_has_prod :: "[nat \ 'a::{t2_space, comm_semiring_1}, nat, 'a] \ bool" where "raw_has_prod f M p \ (\n. \i\n. f (i+M)) \ p \ p \ 0" text\The nonzero and zero cases, as in \emph{Complex Analysis} by Joseph Bak and Donald J.Newman, page 241\ text\<^marker>\tag important\ \%whitespace\ definition\<^marker>\tag important\ has_prod :: "(nat \ 'a::{t2_space, comm_semiring_1}) \ 'a \ bool" (infixr "has'_prod" 80) where "f has_prod p \ raw_has_prod f 0 p \ (\i q. p = 0 \ f i = 0 \ raw_has_prod f (Suc i) q)" definition\<^marker>\tag important\ convergent_prod :: "(nat \ 'a :: {t2_space,comm_semiring_1}) \ bool" where "convergent_prod f \ \M p. raw_has_prod f M p" definition\<^marker>\tag important\ prodinf :: "(nat \ 'a::{t2_space, comm_semiring_1}) \ 'a" (binder "\" 10) where "prodinf f = (THE p. f has_prod p)" lemmas prod_defs = raw_has_prod_def has_prod_def convergent_prod_def prodinf_def lemma has_prod_subst[trans]: "f = g \ g has_prod z \ f has_prod z" by simp lemma has_prod_cong: "(\n. f n = g n) \ f has_prod c \ g has_prod c" by presburger lemma raw_has_prod_nonzero [simp]: "\ raw_has_prod f M 0" by (simp add: raw_has_prod_def) lemma raw_has_prod_eq_0: fixes f :: "nat \ 'a::{semidom,t2_space}" assumes p: "raw_has_prod f m p" and i: "f i = 0" "i \ m" shows "p = 0" proof - have eq0: "(\k\n. f (k+m)) = 0" if "i - m \ n" for n proof - have "\k\n. f (k + m) = 0" using i that by auto then show ?thesis by auto qed have "(\n. \i\n. f (i + m)) \ 0" by (rule LIMSEQ_offset [where k = "i-m"]) (simp add: eq0) with p show ?thesis unfolding raw_has_prod_def using LIMSEQ_unique by blast qed lemma raw_has_prod_Suc: "raw_has_prod f (Suc M) a \ raw_has_prod (\n. f (Suc n)) M a" unfolding raw_has_prod_def by auto lemma has_prod_0_iff: "f has_prod 0 \ (\i. f i = 0 \ (\p. raw_has_prod f (Suc i) p))" by (simp add: has_prod_def) lemma has_prod_unique2: fixes f :: "nat \ 'a::{semidom,t2_space}" assumes "f has_prod a" "f has_prod b" shows "a = b" using assms by (auto simp: has_prod_def raw_has_prod_eq_0) (meson raw_has_prod_def sequentially_bot tendsto_unique) lemma has_prod_unique: fixes f :: "nat \ 'a :: {semidom,t2_space}" shows "f has_prod s \ s = prodinf f" by (simp add: has_prod_unique2 prodinf_def the_equality) lemma convergent_prod_altdef: fixes f :: "nat \ 'a :: {t2_space,comm_semiring_1}" shows "convergent_prod f \ (\M L. (\n\M. f n \ 0) \ (\n. \i\n. f (i+M)) \ L \ L \ 0)" proof assume "convergent_prod f" then obtain M L where *: "(\n. \i\n. f (i+M)) \ L" "L \ 0" by (auto simp: prod_defs) have "f i \ 0" if "i \ M" for i proof assume "f i = 0" have **: "eventually (\n. (\i\n. f (i+M)) = 0) sequentially" using eventually_ge_at_top[of "i - M"] proof eventually_elim case (elim n) with \f i = 0\ and \i \ M\ show ?case by (auto intro!: bexI[of _ "i - M"] prod_zero) qed have "(\n. (\i\n. f (i+M))) \ 0" unfolding filterlim_iff by (auto dest!: eventually_nhds_x_imp_x intro!: eventually_mono[OF **]) from tendsto_unique[OF _ this *(1)] and *(2) show False by simp qed with * show "(\M L. (\n\M. f n \ 0) \ (\n. \i\n. f (i+M)) \ L \ L \ 0)" by blast qed (auto simp: prod_defs) +lemma raw_has_prod_norm: + fixes a :: "'a ::real_normed_field" + assumes "raw_has_prod f M a" + shows "raw_has_prod (\n. norm (f n)) M (norm a)" + using assms by (auto simp: raw_has_prod_def prod_norm tendsto_norm) + +lemma has_prod_norm: + fixes a :: "'a ::real_normed_field" + assumes f: "f has_prod a" + shows "(\n. norm (f n)) has_prod (norm a)" + using f [unfolded has_prod_def] +proof (elim disjE exE conjE) + assume f0: "raw_has_prod f 0 a" + then show "(\n. norm (f n)) has_prod norm a" + using has_prod_def raw_has_prod_norm by blast +next + fix i p + assume "a = 0" and "f i = 0" and p: "raw_has_prod f (Suc i) p" + then have "Ex (raw_has_prod (\n. norm (f n)) (Suc i))" + using raw_has_prod_norm by blast + then show ?thesis + by (metis \a = 0\ \f i = 0\ has_prod_0_iff norm_zero) +qed + subsection\Absolutely convergent products\ definition\<^marker>\tag important\ abs_convergent_prod :: "(nat \ _) \ bool" where "abs_convergent_prod f \ convergent_prod (\i. 1 + norm (f i - 1))" lemma abs_convergent_prodI: assumes "convergent (\n. \i\n. 1 + norm (f i - 1))" shows "abs_convergent_prod f" proof - from assms obtain L where L: "(\n. \i\n. 1 + norm (f i - 1)) \ L" by (auto simp: convergent_def) have "L \ 1" proof (rule tendsto_le) show "eventually (\n. (\i\n. 1 + norm (f i - 1)) \ 1) sequentially" proof (intro always_eventually allI) fix n have "(\i\n. 1 + norm (f i - 1)) \ (\i\n. 1)" by (intro prod_mono) auto thus "(\i\n. 1 + norm (f i - 1)) \ 1" by simp qed qed (use L in simp_all) hence "L \ 0" by auto with L show ?thesis unfolding abs_convergent_prod_def prod_defs by (intro exI[of _ "0::nat"] exI[of _ L]) auto qed lemma fixes f :: "nat \ 'a :: {topological_semigroup_mult,t2_space,idom}" assumes "convergent_prod f" shows convergent_prod_imp_convergent: "convergent (\n. \i\n. f i)" and convergent_prod_to_zero_iff [simp]: "(\n. \i\n. f i) \ 0 \ (\i. f i = 0)" proof - from assms obtain M L where M: "\n. n \ M \ f n \ 0" and "(\n. \i\n. f (i + M)) \ L" and "L \ 0" by (auto simp: convergent_prod_altdef) note this(2) also have "(\n. \i\n. f (i + M)) = (\n. \i=M..M+n. f i)" by (intro ext prod.reindex_bij_witness[of _ "\n. n - M" "\n. n + M"]) auto finally have "(\n. (\ii=M..M+n. f i)) \ (\in. (\ii=M..M+n. f i)) = (\n. (\i\{..{M..M+n}. f i))" by (subst prod.union_disjoint) auto also have "(\n. {.. {M..M+n}) = (\n. {..n+M})" by auto finally have lim: "(\n. prod f {..n}) \ prod f {..n. \i\n. f i)" by (auto simp: convergent_def) show "(\n. \i\n. f i) \ 0 \ (\i. f i = 0)" proof assume "\i. f i = 0" then obtain i where "f i = 0" by auto moreover with M have "i < M" by (cases "i < M") auto ultimately have "(\in. \i\n. f i) \ 0" by simp next assume "(\n. \i\n. f i) \ 0" from tendsto_unique[OF _ this lim] and \L \ 0\ show "\i. f i = 0" by auto qed qed lemma convergent_prod_iff_nz_lim: fixes f :: "nat \ 'a :: {topological_semigroup_mult,t2_space,idom}" assumes "\i. f i \ 0" shows "convergent_prod f \ (\L. (\n. \i\n. f i) \ L \ L \ 0)" (is "?lhs \ ?rhs") proof assume ?lhs then show ?rhs using assms convergentD convergent_prod_imp_convergent convergent_prod_to_zero_iff by blast next assume ?rhs then show ?lhs unfolding prod_defs by (rule_tac x=0 in exI) auto qed lemma\<^marker>\tag important\ convergent_prod_iff_convergent: fixes f :: "nat \ 'a :: {topological_semigroup_mult,t2_space,idom}" assumes "\i. f i \ 0" shows "convergent_prod f \ convergent (\n. \i\n. f i) \ lim (\n. \i\n. f i) \ 0" by (force simp: convergent_prod_iff_nz_lim assms convergent_def limI) lemma bounded_imp_convergent_prod: fixes a :: "nat \ real" assumes 1: "\n. a n \ 1" and bounded: "\n. (\i\n. a i) \ B" shows "convergent_prod a" proof - have "bdd_above (range(\n. \i\n. a i))" by (meson bdd_aboveI2 bounded) moreover have "incseq (\n. \i\n. a i)" unfolding mono_def by (metis 1 prod_mono2 atMost_subset_iff dual_order.trans finite_atMost zero_le_one) ultimately obtain p where p: "(\n. \i\n. a i) \ p" using LIMSEQ_incseq_SUP by blast then have "p \ 0" by (metis "1" not_one_le_zero prod_ge_1 LIMSEQ_le_const) with 1 p show ?thesis by (metis convergent_prod_iff_nz_lim not_one_le_zero) qed lemma abs_convergent_prod_altdef: fixes f :: "nat \ 'a :: {one,real_normed_vector}" shows "abs_convergent_prod f \ convergent (\n. \i\n. 1 + norm (f i - 1))" proof assume "abs_convergent_prod f" thus "convergent (\n. \i\n. 1 + norm (f i - 1))" by (auto simp: abs_convergent_prod_def intro!: convergent_prod_imp_convergent) qed (auto intro: abs_convergent_prodI) lemma Weierstrass_prod_ineq: fixes f :: "'a \ real" assumes "\x. x \ A \ f x \ {0..1}" shows "1 - sum f A \ (\x\A. 1 - f x)" using assms proof (induction A rule: infinite_finite_induct) case (insert x A) from insert.hyps and insert.prems have "1 - sum f A + f x * (\x\A. 1 - f x) \ (\x\A. 1 - f x) + f x * (\x\A. 1)" by (intro insert.IH add_mono mult_left_mono prod_mono) auto with insert.hyps show ?case by (simp add: algebra_simps) qed simp_all lemma norm_prod_minus1_le_prod_minus1: fixes f :: "nat \ 'a :: {real_normed_div_algebra,comm_ring_1}" shows "norm (prod (\n. 1 + f n) A - 1) \ prod (\n. 1 + norm (f n)) A - 1" proof (induction A rule: infinite_finite_induct) case (insert x A) from insert.hyps have "norm ((\n\insert x A. 1 + f n) - 1) = norm ((\n\A. 1 + f n) - 1 + f x * (\n\A. 1 + f n))" by (simp add: algebra_simps) also have "\ \ norm ((\n\A. 1 + f n) - 1) + norm (f x * (\n\A. 1 + f n))" by (rule norm_triangle_ineq) also have "norm (f x * (\n\A. 1 + f n)) = norm (f x) * (\x\A. norm (1 + f x))" by (simp add: prod_norm norm_mult) also have "(\x\A. norm (1 + f x)) \ (\x\A. norm (1::'a) + norm (f x))" by (intro prod_mono norm_triangle_ineq ballI conjI) auto also have "norm (1::'a) = 1" by simp also note insert.IH also have "(\n\A. 1 + norm (f n)) - 1 + norm (f x) * (\x\A. 1 + norm (f x)) = (\n\insert x A. 1 + norm (f n)) - 1" using insert.hyps by (simp add: algebra_simps) finally show ?case by - (simp_all add: mult_left_mono) qed simp_all lemma convergent_prod_imp_ev_nonzero: fixes f :: "nat \ 'a :: {t2_space,comm_semiring_1}" assumes "convergent_prod f" shows "eventually (\n. f n \ 0) sequentially" using assms by (auto simp: eventually_at_top_linorder convergent_prod_altdef) lemma convergent_prod_imp_LIMSEQ: fixes f :: "nat \ 'a :: {real_normed_field}" assumes "convergent_prod f" shows "f \ 1" proof - from assms obtain M L where L: "(\n. \i\n. f (i+M)) \ L" "\n. n \ M \ f n \ 0" "L \ 0" by (auto simp: convergent_prod_altdef) hence L': "(\n. \i\Suc n. f (i+M)) \ L" by (subst filterlim_sequentially_Suc) have "(\n. (\i\Suc n. f (i+M)) / (\i\n. f (i+M))) \ L / L" using L L' by (intro tendsto_divide) simp_all also from L have "L / L = 1" by simp also have "(\n. (\i\Suc n. f (i+M)) / (\i\n. f (i+M))) = (\n. f (n + Suc M))" using assms L by (auto simp: fun_eq_iff atMost_Suc) finally show ?thesis by (rule LIMSEQ_offset) qed lemma abs_convergent_prod_imp_summable: fixes f :: "nat \ 'a :: real_normed_div_algebra" assumes "abs_convergent_prod f" shows "summable (\i. norm (f i - 1))" proof - from assms have "convergent (\n. \i\n. 1 + norm (f i - 1))" unfolding abs_convergent_prod_def by (rule convergent_prod_imp_convergent) then obtain L where L: "(\n. \i\n. 1 + norm (f i - 1)) \ L" unfolding convergent_def by blast have "convergent (\n. \i\n. norm (f i - 1))" proof (rule Bseq_monoseq_convergent) have "eventually (\n. (\i\n. 1 + norm (f i - 1)) < L + 1) sequentially" using L(1) by (rule order_tendstoD) simp_all hence "\\<^sub>F x in sequentially. norm (\i\x. norm (f i - 1)) \ L + 1" proof eventually_elim case (elim n) have "norm (\i\n. norm (f i - 1)) = (\i\n. norm (f i - 1))" unfolding real_norm_def by (intro abs_of_nonneg sum_nonneg) simp_all also have "\ \ (\i\n. 1 + norm (f i - 1))" by (rule sum_le_prod) auto also have "\ < L + 1" by (rule elim) finally show ?case by simp qed thus "Bseq (\n. \i\n. norm (f i - 1))" by (rule BfunI) next show "monoseq (\n. \i\n. norm (f i - 1))" by (rule mono_SucI1) auto qed thus "summable (\i. norm (f i - 1))" by (simp add: summable_iff_convergent') qed lemma summable_imp_abs_convergent_prod: fixes f :: "nat \ 'a :: real_normed_div_algebra" assumes "summable (\i. norm (f i - 1))" shows "abs_convergent_prod f" proof (intro abs_convergent_prodI Bseq_monoseq_convergent) show "monoseq (\n. \i\n. 1 + norm (f i - 1))" by (intro mono_SucI1) (auto simp: atMost_Suc algebra_simps intro!: mult_nonneg_nonneg prod_nonneg) next show "Bseq (\n. \i\n. 1 + norm (f i - 1))" proof (rule Bseq_eventually_mono) show "eventually (\n. norm (\i\n. 1 + norm (f i - 1)) \ norm (exp (\i\n. norm (f i - 1)))) sequentially" by (intro always_eventually allI) (auto simp: abs_prod exp_sum intro!: prod_mono) next from assms have "(\n. \i\n. norm (f i - 1)) \ (\i. norm (f i - 1))" using sums_def_le by blast hence "(\n. exp (\i\n. norm (f i - 1))) \ exp (\i. norm (f i - 1))" by (rule tendsto_exp) hence "convergent (\n. exp (\i\n. norm (f i - 1)))" by (rule convergentI) thus "Bseq (\n. exp (\i\n. norm (f i - 1)))" by (rule convergent_imp_Bseq) qed qed theorem abs_convergent_prod_conv_summable: fixes f :: "nat \ 'a :: real_normed_div_algebra" shows "abs_convergent_prod f \ summable (\i. norm (f i - 1))" by (blast intro: abs_convergent_prod_imp_summable summable_imp_abs_convergent_prod) lemma abs_convergent_prod_imp_LIMSEQ: fixes f :: "nat \ 'a :: {comm_ring_1,real_normed_div_algebra}" assumes "abs_convergent_prod f" shows "f \ 1" proof - from assms have "summable (\n. norm (f n - 1))" by (rule abs_convergent_prod_imp_summable) from summable_LIMSEQ_zero[OF this] have "(\n. f n - 1) \ 0" by (simp add: tendsto_norm_zero_iff) from tendsto_add[OF this tendsto_const[of 1]] show ?thesis by simp qed lemma abs_convergent_prod_imp_ev_nonzero: fixes f :: "nat \ 'a :: {comm_ring_1,real_normed_div_algebra}" assumes "abs_convergent_prod f" shows "eventually (\n. f n \ 0) sequentially" proof - from assms have "f \ 1" by (rule abs_convergent_prod_imp_LIMSEQ) hence "eventually (\n. dist (f n) 1 < 1) at_top" by (auto simp: tendsto_iff) thus ?thesis by eventually_elim auto qed subsection\<^marker>\tag unimportant\ \Ignoring initial segments\ lemma convergent_prod_offset: assumes "convergent_prod (\n. f (n + m))" shows "convergent_prod f" proof - from assms obtain M L where "(\n. \k\n. f (k + (M + m))) \ L" "L \ 0" by (auto simp: prod_defs add.assoc) thus "convergent_prod f" unfolding prod_defs by blast qed lemma abs_convergent_prod_offset: assumes "abs_convergent_prod (\n. f (n + m))" shows "abs_convergent_prod f" using assms unfolding abs_convergent_prod_def by (rule convergent_prod_offset) lemma raw_has_prod_ignore_initial_segment: fixes f :: "nat \ 'a :: real_normed_field" assumes "raw_has_prod f M p" "N \ M" obtains q where "raw_has_prod f N q" proof - have p: "(\n. \k\n. f (k + M)) \ p" and "p \ 0" using assms by (auto simp: raw_has_prod_def) then have nz: "\n. n \ M \ f n \ 0" using assms by (auto simp: raw_has_prod_eq_0) define C where "C = (\k 0" by (auto simp: C_def) from p have "(\i. \k\i + (N-M). f (k + M)) \ p" by (rule LIMSEQ_ignore_initial_segment) also have "(\i. \k\i + (N-M). f (k + M)) = (\n. C * (\k\n. f (k + N)))" proof (rule ext, goal_cases) case (1 n) have "{..n+(N-M)} = {..<(N-M)} \ {(N-M)..n+(N-M)}" by auto also have "(\k\\. f (k + M)) = C * (\k=(N-M)..n+(N-M). f (k + M))" unfolding C_def by (rule prod.union_disjoint) auto also have "(\k=(N-M)..n+(N-M). f (k + M)) = (\k\n. f (k + (N-M) + M))" by (intro ext prod.reindex_bij_witness[of _ "\k. k + (N-M)" "\k. k - (N-M)"]) auto finally show ?case using \N \ M\ by (simp add: add_ac) qed finally have "(\n. C * (\k\n. f (k + N)) / C) \ p / C" by (intro tendsto_divide tendsto_const) auto hence "(\n. \k\n. f (k + N)) \ p / C" by simp moreover from \p \ 0\ have "p / C \ 0" by simp ultimately show ?thesis using raw_has_prod_def that by blast qed corollary\<^marker>\tag unimportant\ convergent_prod_ignore_initial_segment: fixes f :: "nat \ 'a :: real_normed_field" assumes "convergent_prod f" shows "convergent_prod (\n. f (n + m))" using assms unfolding convergent_prod_def apply clarify apply (erule_tac N="M+m" in raw_has_prod_ignore_initial_segment) apply (auto simp add: raw_has_prod_def add_ac) done corollary\<^marker>\tag unimportant\ convergent_prod_ignore_nonzero_segment: fixes f :: "nat \ 'a :: real_normed_field" assumes f: "convergent_prod f" and nz: "\i. i \ M \ f i \ 0" shows "\p. raw_has_prod f M p" using convergent_prod_ignore_initial_segment [OF f] by (metis convergent_LIMSEQ_iff convergent_prod_iff_convergent le_add_same_cancel2 nz prod_defs(1) zero_order(1)) corollary\<^marker>\tag unimportant\ abs_convergent_prod_ignore_initial_segment: assumes "abs_convergent_prod f" shows "abs_convergent_prod (\n. f (n + m))" using assms unfolding abs_convergent_prod_def by (rule convergent_prod_ignore_initial_segment) subsection\More elementary properties\ theorem abs_convergent_prod_imp_convergent_prod: fixes f :: "nat \ 'a :: {real_normed_div_algebra,complete_space,comm_ring_1}" assumes "abs_convergent_prod f" shows "convergent_prod f" proof - from assms have "eventually (\n. f n \ 0) sequentially" by (rule abs_convergent_prod_imp_ev_nonzero) then obtain N where N: "f n \ 0" if "n \ N" for n by (auto simp: eventually_at_top_linorder) let ?P = "\n. \i\n. f (i + N)" and ?Q = "\n. \i\n. 1 + norm (f (i + N) - 1)" have "Cauchy ?P" proof (rule CauchyI', goal_cases) case (1 \) from assms have "abs_convergent_prod (\n. f (n + N))" by (rule abs_convergent_prod_ignore_initial_segment) hence "Cauchy ?Q" unfolding abs_convergent_prod_def by (intro convergent_Cauchy convergent_prod_imp_convergent) from CauchyD[OF this 1] obtain M where M: "norm (?Q m - ?Q n) < \" if "m \ M" "n \ M" for m n by blast show ?case proof (rule exI[of _ M], safe, goal_cases) case (1 m n) have "dist (?P m) (?P n) = norm (?P n - ?P m)" by (simp add: dist_norm norm_minus_commute) also from 1 have "{..n} = {..m} \ {m<..n}" by auto hence "norm (?P n - ?P m) = norm (?P m * (\k\{m<..n}. f (k + N)) - ?P m)" by (subst prod.union_disjoint [symmetric]) (auto simp: algebra_simps) also have "\ = norm (?P m * ((\k\{m<..n}. f (k + N)) - 1))" by (simp add: algebra_simps) also have "\ = (\k\m. norm (f (k + N))) * norm ((\k\{m<..n}. f (k + N)) - 1)" by (simp add: norm_mult prod_norm) also have "\ \ ?Q m * ((\k\{m<..n}. 1 + norm (f (k + N) - 1)) - 1)" using norm_prod_minus1_le_prod_minus1[of "\k. f (k + N) - 1" "{m<..n}"] norm_triangle_ineq[of 1 "f k - 1" for k] by (intro mult_mono prod_mono ballI conjI norm_prod_minus1_le_prod_minus1 prod_nonneg) auto also have "\ = ?Q m * (\k\{m<..n}. 1 + norm (f (k + N) - 1)) - ?Q m" by (simp add: algebra_simps) also have "?Q m * (\k\{m<..n}. 1 + norm (f (k + N) - 1)) = (\k\{..m}\{m<..n}. 1 + norm (f (k + N) - 1))" by (rule prod.union_disjoint [symmetric]) auto also from 1 have "{..m}\{m<..n} = {..n}" by auto also have "?Q n - ?Q m \ norm (?Q n - ?Q m)" by simp also from 1 have "\ < \" by (intro M) auto finally show ?case . qed qed hence conv: "convergent ?P" by (rule Cauchy_convergent) then obtain L where L: "?P \ L" by (auto simp: convergent_def) have "L \ 0" proof assume [simp]: "L = 0" from tendsto_norm[OF L] have limit: "(\n. \k\n. norm (f (k + N))) \ 0" by (simp add: prod_norm) from assms have "(\n. f (n + N)) \ 1" by (intro abs_convergent_prod_imp_LIMSEQ abs_convergent_prod_ignore_initial_segment) hence "eventually (\n. norm (f (n + N) - 1) < 1) sequentially" by (auto simp: tendsto_iff dist_norm) then obtain M0 where M0: "norm (f (n + N) - 1) < 1" if "n \ M0" for n by (auto simp: eventually_at_top_linorder) { fix M assume M: "M \ M0" with M0 have M: "norm (f (n + N) - 1) < 1" if "n \ M" for n using that by simp have "(\n. \k\n. 1 - norm (f (k+M+N) - 1)) \ 0" proof (rule tendsto_sandwich) show "eventually (\n. (\k\n. 1 - norm (f (k+M+N) - 1)) \ 0) sequentially" using M by (intro always_eventually prod_nonneg allI ballI) (auto intro: less_imp_le) have "norm (1::'a) - norm (f (i + M + N) - 1) \ norm (f (i + M + N))" for i using norm_triangle_ineq3[of "f (i + M + N)" 1] by simp thus "eventually (\n. (\k\n. 1 - norm (f (k+M+N) - 1)) \ (\k\n. norm (f (k+M+N)))) at_top" using M by (intro always_eventually allI prod_mono ballI conjI) (auto intro: less_imp_le) define C where "C = (\k 0" by (auto simp: C_def) from L have "(\n. norm (\k\n+M. f (k + N))) \ 0" by (intro LIMSEQ_ignore_initial_segment) (simp add: tendsto_norm_zero_iff) also have "(\n. norm (\k\n+M. f (k + N))) = (\n. C * (\k\n. norm (f (k + M + N))))" proof (rule ext, goal_cases) case (1 n) have "{..n+M} = {.. {M..n+M}" by auto also have "norm (\k\\. f (k + N)) = C * norm (\k=M..n+M. f (k + N))" unfolding C_def by (subst prod.union_disjoint) (auto simp: norm_mult prod_norm) also have "(\k=M..n+M. f (k + N)) = (\k\n. f (k + N + M))" by (intro prod.reindex_bij_witness[of _ "\i. i + M" "\i. i - M"]) auto finally show ?case by (simp add: add_ac prod_norm) qed finally have "(\n. C * (\k\n. norm (f (k + M + N))) / C) \ 0 / C" by (intro tendsto_divide tendsto_const) auto thus "(\n. \k\n. norm (f (k + M + N))) \ 0" by simp qed simp_all have "1 - (\i. norm (f (i + M + N) - 1)) \ 0" proof (rule tendsto_le) show "eventually (\n. 1 - (\k\n. norm (f (k+M+N) - 1)) \ (\k\n. 1 - norm (f (k+M+N) - 1))) at_top" using M by (intro always_eventually allI Weierstrass_prod_ineq) (auto intro: less_imp_le) show "(\n. \k\n. 1 - norm (f (k+M+N) - 1)) \ 0" by fact show "(\n. 1 - (\k\n. norm (f (k + M + N) - 1))) \ 1 - (\i. norm (f (i + M + N) - 1))" by (intro tendsto_intros summable_LIMSEQ' summable_ignore_initial_segment abs_convergent_prod_imp_summable assms) qed simp_all hence "(\i. norm (f (i + M + N) - 1)) \ 1" by simp also have "\ + (\ii. norm (f (i + N) - 1))" by (intro suminf_split_initial_segment [symmetric] summable_ignore_initial_segment abs_convergent_prod_imp_summable assms) finally have "1 + (\i (\i. norm (f (i + N) - 1))" by simp } note * = this have "1 + (\i. norm (f (i + N) - 1)) \ (\i. norm (f (i + N) - 1))" proof (rule tendsto_le) show "(\M. 1 + (\i 1 + (\i. norm (f (i + N) - 1))" by (intro tendsto_intros summable_LIMSEQ summable_ignore_initial_segment abs_convergent_prod_imp_summable assms) show "eventually (\M. 1 + (\i (\i. norm (f (i + N) - 1))) at_top" using eventually_ge_at_top[of M0] by eventually_elim (use * in auto) qed simp_all thus False by simp qed with L show ?thesis by (auto simp: prod_defs) qed lemma raw_has_prod_cases: fixes f :: "nat \ 'a :: {idom,topological_semigroup_mult,t2_space}" assumes "raw_has_prod f M p" obtains i where "in. \i\n. f (i + M)) \ p" "p \ 0" using assms unfolding raw_has_prod_def by blast+ then have "(\n. prod f {..i\n. f (i + M))) \ prod f {..i\n. f (i + M)) = prod f {..n+M}" for n proof - have "{..n+M} = {.. {M..n+M}" by auto then have "prod f {..n+M} = prod f {.. = prod f {..i\n. f (i + M))" by (metis (mono_tags, lifting) add.left_neutral atMost_atLeast0 prod.shift_bounds_cl_nat_ivl) finally show ?thesis by metis qed ultimately have "(\n. prod f {..n}) \ prod f {..i 0" using \p \ 0\ assms that by (auto simp: raw_has_prod_def) then show thesis using that by blast qed corollary convergent_prod_offset_0: fixes f :: "nat \ 'a :: {idom,topological_semigroup_mult,t2_space}" assumes "convergent_prod f" "\i. f i \ 0" shows "\p. raw_has_prod f 0 p" using assms convergent_prod_def raw_has_prod_cases by blast lemma prodinf_eq_lim: fixes f :: "nat \ 'a :: {idom,topological_semigroup_mult,t2_space}" assumes "convergent_prod f" "\i. f i \ 0" shows "prodinf f = lim (\n. \i\n. f i)" using assms convergent_prod_offset_0 [OF assms] by (simp add: prod_defs lim_def) (metis (no_types) assms(1) convergent_prod_to_zero_iff) lemma has_prod_one[simp, intro]: "(\n. 1) has_prod 1" unfolding prod_defs by auto lemma convergent_prod_one[simp, intro]: "convergent_prod (\n. 1)" unfolding prod_defs by auto lemma prodinf_cong: "(\n. f n = g n) \ prodinf f = prodinf g" by presburger lemma convergent_prod_cong: fixes f g :: "nat \ 'a::{field,topological_semigroup_mult,t2_space}" assumes ev: "eventually (\x. f x = g x) sequentially" and f: "\i. f i \ 0" and g: "\i. g i \ 0" shows "convergent_prod f = convergent_prod g" proof - from assms obtain N where N: "\n\N. f n = g n" by (auto simp: eventually_at_top_linorder) define C where "C = (\k 0" by (simp add: f) have *: "eventually (\n. prod f {..n} = C * prod g {..n}) sequentially" using eventually_ge_at_top[of N] proof eventually_elim case (elim n) then have "{..n} = {.. {N..n}" by auto also have "prod f \ = prod f {.. {N..n})" by (intro prod.union_disjoint [symmetric]) auto also from elim have "{.. {N..n} = {..n}" by auto finally show "prod f {..n} = C * prod g {..n}" . qed then have cong: "convergent (\n. prod f {..n}) = convergent (\n. C * prod g {..n})" by (rule convergent_cong) show ?thesis proof assume cf: "convergent_prod f" with f have "\ (\n. prod f {..n}) \ 0" by simp then have "\ (\n. prod g {..n}) \ 0" using * \C \ 0\ filterlim_cong by fastforce then show "convergent_prod g" by (metis convergent_mult_const_iff \C \ 0\ cong cf convergent_LIMSEQ_iff convergent_prod_iff_convergent convergent_prod_imp_convergent g) next assume cg: "convergent_prod g" have "\a. C * a \ 0 \ (\n. prod g {..n}) \ a" by (metis (no_types) \C \ 0\ cg convergent_prod_iff_nz_lim divide_eq_0_iff g nonzero_mult_div_cancel_right) then show "convergent_prod f" using "*" tendsto_mult_left filterlim_cong by (fastforce simp add: convergent_prod_iff_nz_lim f) qed qed lemma has_prod_finite: fixes f :: "nat \ 'a::{semidom,t2_space}" assumes [simp]: "finite N" and f: "\n. n \ N \ f n = 1" shows "f has_prod (\n\N. f n)" proof - have eq: "prod f {..n + Suc (Max N)} = prod f N" for n proof (rule prod.mono_neutral_right) show "N \ {..n + Suc (Max N)}" by (auto simp: le_Suc_eq trans_le_add2) show "\i\{..n + Suc (Max N)} - N. f i = 1" using f by blast qed auto show ?thesis proof (cases "\n\N. f n \ 0") case True then have "prod f N \ 0" by simp moreover have "(\n. prod f {..n}) \ prod f N" by (rule LIMSEQ_offset[of _ "Suc (Max N)"]) (simp add: eq atLeast0LessThan del: add_Suc_right) ultimately show ?thesis by (simp add: raw_has_prod_def has_prod_def) next case False then obtain k where "k \ N" "f k = 0" by auto let ?Z = "{n \ N. f n = 0}" have maxge: "Max ?Z \ n" if "f n = 0" for n using Max_ge [of ?Z] \finite N\ \f n = 0\ by (metis (mono_tags) Collect_mem_eq f finite_Collect_conjI mem_Collect_eq zero_neq_one) let ?q = "prod f {Suc (Max ?Z)..Max N}" have [simp]: "?q \ 0" using maxge Suc_n_not_le_n le_trans by force have eq: "(\i\n + Max N. f (Suc (i + Max ?Z))) = ?q" for n proof - have "(\i\n + Max N. f (Suc (i + Max ?Z))) = prod f {Suc (Max ?Z)..n + Max N + Suc (Max ?Z)}" proof (rule prod.reindex_cong [where l = "\i. i + Suc (Max ?Z)", THEN sym]) show "{Suc (Max ?Z)..n + Max N + Suc (Max ?Z)} = (\i. i + Suc (Max ?Z)) ` {..n + Max N}" using le_Suc_ex by fastforce qed (auto simp: inj_on_def) also have "\ = ?q" by (rule prod.mono_neutral_right) (use Max.coboundedI [OF \finite N\] f in \force+\) finally show ?thesis . qed have q: "raw_has_prod f (Suc (Max ?Z)) ?q" proof (simp add: raw_has_prod_def) show "(\n. \i\n. f (Suc (i + Max ?Z))) \ ?q" by (rule LIMSEQ_offset[of _ "(Max N)"]) (simp add: eq) qed show ?thesis unfolding has_prod_def proof (intro disjI2 exI conjI) show "prod f N = 0" using \f k = 0\ \k \ N\ \finite N\ prod_zero by blast show "f (Max ?Z) = 0" using Max_in [of ?Z] \finite N\ \f k = 0\ \k \ N\ by auto qed (use q in auto) qed qed corollary\<^marker>\tag unimportant\ has_prod_0: fixes f :: "nat \ 'a::{semidom,t2_space}" assumes "\n. f n = 1" shows "f has_prod 1" by (simp add: assms has_prod_cong) lemma prodinf_zero[simp]: "prodinf (\n. 1::'a::real_normed_field) = 1" using has_prod_unique by force lemma convergent_prod_finite: fixes f :: "nat \ 'a::{idom,t2_space}" assumes "finite N" "\n. n \ N \ f n = 1" shows "convergent_prod f" proof - have "\n p. raw_has_prod f n p" using assms has_prod_def has_prod_finite by blast then show ?thesis by (simp add: convergent_prod_def) qed lemma has_prod_If_finite_set: fixes f :: "nat \ 'a::{idom,t2_space}" shows "finite A \ (\r. if r \ A then f r else 1) has_prod (\r\A. f r)" using has_prod_finite[of A "(\r. if r \ A then f r else 1)"] by simp lemma has_prod_If_finite: fixes f :: "nat \ 'a::{idom,t2_space}" shows "finite {r. P r} \ (\r. if P r then f r else 1) has_prod (\r | P r. f r)" using has_prod_If_finite_set[of "{r. P r}"] by simp lemma convergent_prod_If_finite_set[simp, intro]: fixes f :: "nat \ 'a::{idom,t2_space}" shows "finite A \ convergent_prod (\r. if r \ A then f r else 1)" by (simp add: convergent_prod_finite) lemma convergent_prod_If_finite[simp, intro]: fixes f :: "nat \ 'a::{idom,t2_space}" shows "finite {r. P r} \ convergent_prod (\r. if P r then f r else 1)" using convergent_prod_def has_prod_If_finite has_prod_def by fastforce lemma has_prod_single: fixes f :: "nat \ 'a::{idom,t2_space}" shows "(\r. if r = i then f r else 1) has_prod f i" using has_prod_If_finite[of "\r. r = i"] by simp context fixes f :: "nat \ 'a :: real_normed_field" begin lemma convergent_prod_imp_has_prod: assumes "convergent_prod f" shows "\p. f has_prod p" proof - obtain M p where p: "raw_has_prod f M p" using assms convergent_prod_def by blast then have "p \ 0" using raw_has_prod_nonzero by blast with p have fnz: "f i \ 0" if "i \ M" for i using raw_has_prod_eq_0 that by blast define C where "C = (\nn\M. f n \ 0") case True then have "C \ 0" by (simp add: C_def) then show ?thesis by (meson True assms convergent_prod_offset_0 fnz has_prod_def nat_le_linear) next case False let ?N = "GREATEST n. f n = 0" have 0: "f ?N = 0" using fnz False by (metis (mono_tags, lifting) GreatestI_ex_nat nat_le_linear) have "f i \ 0" if "i > ?N" for i by (metis (mono_tags, lifting) Greatest_le_nat fnz leD linear that) then have "\p. raw_has_prod f (Suc ?N) p" using assms by (auto simp: intro!: convergent_prod_ignore_nonzero_segment) then show ?thesis unfolding has_prod_def using 0 by blast qed qed lemma convergent_prod_has_prod [intro]: shows "convergent_prod f \ f has_prod (prodinf f)" unfolding prodinf_def by (metis convergent_prod_imp_has_prod has_prod_unique theI') lemma convergent_prod_LIMSEQ: shows "convergent_prod f \ (\n. \i\n. f i) \ prodinf f" by (metis convergent_LIMSEQ_iff convergent_prod_has_prod convergent_prod_imp_convergent convergent_prod_to_zero_iff raw_has_prod_eq_0 has_prod_def prodinf_eq_lim zero_le) theorem has_prod_iff: "f has_prod x \ convergent_prod f \ prodinf f = x" proof assume "f has_prod x" then show "convergent_prod f \ prodinf f = x" apply safe using convergent_prod_def has_prod_def apply blast using has_prod_unique by blast qed auto lemma convergent_prod_has_prod_iff: "convergent_prod f \ f has_prod prodinf f" by (auto simp: has_prod_iff convergent_prod_has_prod) lemma prodinf_finite: assumes N: "finite N" and f: "\n. n \ N \ f n = 1" shows "prodinf f = (\n\N. f n)" using has_prod_finite[OF assms, THEN has_prod_unique] by simp end subsection\<^marker>\tag unimportant\ \Infinite products on ordered topological monoids\ lemma LIMSEQ_prod_0: fixes f :: "nat \ 'a::{semidom,topological_space}" assumes "f i = 0" shows "(\n. prod f {..n}) \ 0" proof (subst tendsto_cong) show "\\<^sub>F n in sequentially. prod f {..n} = 0" proof show "prod f {..n} = 0" if "n \ i" for n using that assms by auto qed qed auto lemma LIMSEQ_prod_nonneg: fixes f :: "nat \ 'a::{linordered_semidom,linorder_topology}" assumes 0: "\n. 0 \ f n" and a: "(\n. prod f {..n}) \ a" shows "a \ 0" by (simp add: "0" prod_nonneg LIMSEQ_le_const [OF a]) context fixes f :: "nat \ 'a::{linordered_semidom,linorder_topology}" begin lemma has_prod_le: assumes f: "f has_prod a" and g: "g has_prod b" and le: "\n. 0 \ f n \ f n \ g n" shows "a \ b" proof (cases "a=0 \ b=0") case True then show ?thesis proof assume [simp]: "a=0" have "b \ 0" proof (rule LIMSEQ_prod_nonneg) show "(\n. prod g {..n}) \ b" using g by (auto simp: has_prod_def raw_has_prod_def LIMSEQ_prod_0) qed (use le order_trans in auto) then show ?thesis by auto next assume [simp]: "b=0" then obtain i where "g i = 0" using g by (auto simp: prod_defs) then have "f i = 0" using antisym le by force then have "a=0" using f by (auto simp: prod_defs LIMSEQ_prod_0 LIMSEQ_unique) then show ?thesis by auto qed next case False then show ?thesis using assms unfolding has_prod_def raw_has_prod_def by (force simp: LIMSEQ_prod_0 intro!: LIMSEQ_le prod_mono) qed lemma prodinf_le: assumes f: "f has_prod a" and g: "g has_prod b" and le: "\n. 0 \ f n \ f n \ g n" shows "prodinf f \ prodinf g" using has_prod_le [OF assms] has_prod_unique f g by blast end lemma prod_le_prodinf: fixes f :: "nat \ 'a::{linordered_idom,linorder_topology}" assumes "f has_prod a" "\i. 0 \ f i" "\i. i\n \ 1 \ f i" shows "prod f {.. prodinf f" by(rule has_prod_le[OF has_prod_If_finite_set]) (use assms has_prod_unique in auto) lemma prodinf_nonneg: fixes f :: "nat \ 'a::{linordered_idom,linorder_topology}" assumes "f has_prod a" "\i. 1 \ f i" shows "1 \ prodinf f" using prod_le_prodinf[of f a 0] assms by (metis order_trans prod_ge_1 zero_le_one) lemma prodinf_le_const: fixes f :: "nat \ real" assumes "convergent_prod f" "\n. prod f {.. x" shows "prodinf f \ x" by (metis lessThan_Suc_atMost assms convergent_prod_LIMSEQ LIMSEQ_le_const2) lemma prodinf_eq_one_iff [simp]: fixes f :: "nat \ real" assumes f: "convergent_prod f" and ge1: "\n. 1 \ f n" shows "prodinf f = 1 \ (\n. f n = 1)" proof assume "prodinf f = 1" then have "(\n. \i 1" using convergent_prod_LIMSEQ[of f] assms by (simp add: LIMSEQ_lessThan_iff_atMost) then have "\i. (\n\{i}. f n) \ 1" proof (rule LIMSEQ_le_const) have "1 \ prod f n" for n by (simp add: ge1 prod_ge_1) have "prod f {..\n. 1 \ prod f n\ \prodinf f = 1\ antisym f convergent_prod_has_prod ge1 order_trans prod_le_prodinf zero_le_one) then have "(\n\{i}. f n) \ prod f {.. Suc i" for i n by (metis mult.left_neutral order_refl prod.cong prod.neutral_const prod.lessThan_Suc) then show "\N. \n\N. (\n\{i}. f n) \ prod f {..n. f n = 1" by (auto intro!: antisym) qed (metis prodinf_zero fun_eq_iff) lemma prodinf_pos_iff: fixes f :: "nat \ real" assumes "convergent_prod f" "\n. 1 \ f n" shows "1 < prodinf f \ (\i. 1 < f i)" using prod_le_prodinf[of f 1] prodinf_eq_one_iff by (metis convergent_prod_has_prod assms less_le prodinf_nonneg) lemma less_1_prodinf2: fixes f :: "nat \ real" assumes "convergent_prod f" "\n. 1 \ f n" "1 < f i" shows "1 < prodinf f" proof - have "1 < (\n \ prodinf f" by (intro prod_le_prodinf) (use assms order_trans zero_le_one in \blast+\) finally show ?thesis . qed lemma less_1_prodinf: fixes f :: "nat \ real" shows "\convergent_prod f; \n. 1 < f n\ \ 1 < prodinf f" by (intro less_1_prodinf2[where i=1]) (auto intro: less_imp_le) lemma prodinf_nonzero: fixes f :: "nat \ 'a :: {idom,topological_semigroup_mult,t2_space}" assumes "convergent_prod f" "\i. f i \ 0" shows "prodinf f \ 0" by (metis assms convergent_prod_offset_0 has_prod_unique raw_has_prod_def has_prod_def) lemma less_0_prodinf: fixes f :: "nat \ real" assumes f: "convergent_prod f" and 0: "\i. f i > 0" shows "0 < prodinf f" proof - have "prodinf f \ 0" by (metis assms less_irrefl prodinf_nonzero) moreover have "0 < (\n 0" using convergent_prod_LIMSEQ [OF f] LIMSEQ_prod_nonneg 0 less_le by blast ultimately show ?thesis by auto qed lemma prod_less_prodinf2: fixes f :: "nat \ real" assumes f: "convergent_prod f" and 1: "\m. m\n \ 1 \ f m" and 0: "\m. 0 < f m" and i: "n \ i" "1 < f i" shows "prod f {.. prod f {.. prodinf f" using prod_le_prodinf[of f _ "Suc i"] by (meson "0" "1" Suc_leD convergent_prod_has_prod f \n \ i\ le_trans less_eq_real_def) ultimately show ?thesis by (metis le_less_trans mult.commute not_le prod.lessThan_Suc) qed lemma prod_less_prodinf: fixes f :: "nat \ real" assumes f: "convergent_prod f" and 1: "\m. m\n \ 1 < f m" and 0: "\m. 0 < f m" shows "prod f {.. real" assumes pos: "\n. 1 \ f n" and le: "\n. (\i x" shows "\p. raw_has_prod f 0 p" unfolding raw_has_prod_def add_0_right proof (rule exI LIMSEQ_incseq_SUP conjI)+ show "bdd_above (range (\n. prod f {..n}))" by (metis bdd_aboveI2 le lessThan_Suc_atMost) then have "(SUP i. prod f {..i}) > 0" by (metis UNIV_I cSUP_upper less_le_trans pos prod_pos zero_less_one) then show "(SUP i. prod f {..i}) \ 0" by auto show "incseq (\n. prod f {..n})" using pos order_trans [OF zero_le_one] by (auto simp: mono_def intro!: prod_mono2) qed lemma convergent_prodI_nonneg_bounded: fixes f :: "nat \ real" assumes "\n. 1 \ f n" "\n. (\i x" shows "convergent_prod f" using convergent_prod_def raw_has_prodI_bounded [OF assms] by blast subsection\<^marker>\tag unimportant\ \Infinite products on topological spaces\ context fixes f g :: "nat \ 'a::{t2_space,topological_semigroup_mult,idom}" begin lemma raw_has_prod_mult: "\raw_has_prod f M a; raw_has_prod g M b\ \ raw_has_prod (\n. f n * g n) M (a * b)" by (force simp add: prod.distrib tendsto_mult raw_has_prod_def) lemma has_prod_mult_nz: "\f has_prod a; g has_prod b; a \ 0; b \ 0\ \ (\n. f n * g n) has_prod (a * b)" by (simp add: raw_has_prod_mult has_prod_def) end context fixes f g :: "nat \ 'a::real_normed_field" begin lemma has_prod_mult: assumes f: "f has_prod a" and g: "g has_prod b" shows "(\n. f n * g n) has_prod (a * b)" using f [unfolded has_prod_def] proof (elim disjE exE conjE) assume f0: "raw_has_prod f 0 a" show ?thesis using g [unfolded has_prod_def] proof (elim disjE exE conjE) assume g0: "raw_has_prod g 0 b" with f0 show ?thesis by (force simp add: has_prod_def prod.distrib tendsto_mult raw_has_prod_def) next fix j q assume "b = 0" and "g j = 0" and q: "raw_has_prod g (Suc j) q" obtain p where p: "raw_has_prod f (Suc j) p" using f0 raw_has_prod_ignore_initial_segment by blast then have "Ex (raw_has_prod (\n. f n * g n) (Suc j))" using q raw_has_prod_mult by blast then show ?thesis using \b = 0\ \g j = 0\ has_prod_0_iff by fastforce qed next fix i p assume "a = 0" and "f i = 0" and p: "raw_has_prod f (Suc i) p" show ?thesis using g [unfolded has_prod_def] proof (elim disjE exE conjE) assume g0: "raw_has_prod g 0 b" obtain q where q: "raw_has_prod g (Suc i) q" using g0 raw_has_prod_ignore_initial_segment by blast then have "Ex (raw_has_prod (\n. f n * g n) (Suc i))" using raw_has_prod_mult p by blast then show ?thesis using \a = 0\ \f i = 0\ has_prod_0_iff by fastforce next fix j q assume "b = 0" and "g j = 0" and q: "raw_has_prod g (Suc j) q" obtain p' where p': "raw_has_prod f (Suc (max i j)) p'" by (metis raw_has_prod_ignore_initial_segment max_Suc_Suc max_def p) moreover obtain q' where q': "raw_has_prod g (Suc (max i j)) q'" by (metis raw_has_prod_ignore_initial_segment max.cobounded2 max_Suc_Suc q) ultimately show ?thesis using \b = 0\ by (simp add: has_prod_def) (metis \f i = 0\ \g j = 0\ raw_has_prod_mult max_def) qed qed lemma convergent_prod_mult: assumes f: "convergent_prod f" and g: "convergent_prod g" shows "convergent_prod (\n. f n * g n)" unfolding convergent_prod_def proof - obtain M p N q where p: "raw_has_prod f M p" and q: "raw_has_prod g N q" using convergent_prod_def f g by blast+ then obtain p' q' where p': "raw_has_prod f (max M N) p'" and q': "raw_has_prod g (max M N) q'" by (meson raw_has_prod_ignore_initial_segment max.cobounded1 max.cobounded2) then show "\M p. raw_has_prod (\n. f n * g n) M p" using raw_has_prod_mult by blast qed lemma prodinf_mult: "convergent_prod f \ convergent_prod g \ prodinf f * prodinf g = (\n. f n * g n)" by (intro has_prod_unique has_prod_mult convergent_prod_has_prod) end context fixes f :: "'i \ nat \ 'a::real_normed_field" and I :: "'i set" begin lemma has_prod_prod: "(\i. i \ I \ (f i) has_prod (x i)) \ (\n. \i\I. f i n) has_prod (\i\I. x i)" by (induct I rule: infinite_finite_induct) (auto intro!: has_prod_mult) lemma prodinf_prod: "(\i. i \ I \ convergent_prod (f i)) \ (\n. \i\I. f i n) = (\i\I. \n. f i n)" using has_prod_unique[OF has_prod_prod, OF convergent_prod_has_prod] by simp lemma convergent_prod_prod: "(\i. i \ I \ convergent_prod (f i)) \ convergent_prod (\n. \i\I. f i n)" using convergent_prod_has_prod_iff has_prod_prod prodinf_prod by force end subsection\<^marker>\tag unimportant\ \Infinite summability on real normed fields\ context fixes f :: "nat \ 'a::real_normed_field" begin lemma raw_has_prod_Suc_iff: "raw_has_prod f M (a * f M) \ raw_has_prod (\n. f (Suc n)) M a \ f M \ 0" proof - have "raw_has_prod f M (a * f M) \ (\i. \j\Suc i. f (j+M)) \ a * f M \ a * f M \ 0" by (subst filterlim_sequentially_Suc) (simp add: raw_has_prod_def) also have "\ \ (\i. (\j\i. f (Suc j + M)) * f M) \ a * f M \ a * f M \ 0" by (simp add: ac_simps atMost_Suc_eq_insert_0 image_Suc_atMost prod.atLeast1_atMost_eq lessThan_Suc_atMost del: prod.cl_ivl_Suc) also have "\ \ raw_has_prod (\n. f (Suc n)) M a \ f M \ 0" proof safe assume tends: "(\i. (\j\i. f (Suc j + M)) * f M) \ a * f M" and 0: "a * f M \ 0" with tendsto_divide[OF tends tendsto_const, of "f M"] show "raw_has_prod (\n. f (Suc n)) M a" by (simp add: raw_has_prod_def) qed (auto intro: tendsto_mult_right simp: raw_has_prod_def) finally show ?thesis . qed lemma has_prod_Suc_iff: assumes "f 0 \ 0" shows "(\n. f (Suc n)) has_prod a \ f has_prod (a * f 0)" proof (cases "a = 0") case True then show ?thesis proof (simp add: has_prod_def, safe) fix i x assume "f (Suc i) = 0" and "raw_has_prod (\n. f (Suc n)) (Suc i) x" then obtain y where "raw_has_prod f (Suc (Suc i)) y" by (metis (no_types) raw_has_prod_eq_0 Suc_n_not_le_n raw_has_prod_Suc_iff raw_has_prod_ignore_initial_segment raw_has_prod_nonzero linear) then show "\i. f i = 0 \ Ex (raw_has_prod f (Suc i))" using \f (Suc i) = 0\ by blast next fix i x assume "f i = 0" and x: "raw_has_prod f (Suc i) x" then obtain j where j: "i = Suc j" by (metis assms not0_implies_Suc) moreover have "\ y. raw_has_prod (\n. f (Suc n)) i y" using x by (auto simp: raw_has_prod_def) then show "\i. f (Suc i) = 0 \ Ex (raw_has_prod (\n. f (Suc n)) (Suc i))" using \f i = 0\ j by blast qed next case False then show ?thesis by (auto simp: has_prod_def raw_has_prod_Suc_iff assms) qed lemma convergent_prod_Suc_iff [simp]: shows "convergent_prod (\n. f (Suc n)) = convergent_prod f" proof assume "convergent_prod f" then obtain M L where M_nz:"\n\M. f n \ 0" and M_L:"(\n. \i\n. f (i + M)) \ L" and "L \ 0" unfolding convergent_prod_altdef by auto have "(\n. \i\n. f (Suc (i + M))) \ L / f M" proof - have "(\n. \i\{0..Suc n}. f (i + M)) \ L" using M_L apply (subst (asm) filterlim_sequentially_Suc[symmetric]) using atLeast0AtMost by auto then have "(\n. f M * (\i\{0..n}. f (Suc (i + M)))) \ L" apply (subst (asm) prod.atLeast0_atMost_Suc_shift) by simp then have "(\n. (\i\{0..n}. f (Suc (i + M)))) \ L/f M" apply (drule_tac tendsto_divide) using M_nz[rule_format,of M,simplified] by auto then show ?thesis unfolding atLeast0AtMost . qed then show "convergent_prod (\n. f (Suc n))" unfolding convergent_prod_altdef apply (rule_tac exI[where x=M]) apply (rule_tac exI[where x="L/f M"]) using M_nz \L\0\ by auto next assume "convergent_prod (\n. f (Suc n))" then obtain M where "\L. (\n\M. f (Suc n) \ 0) \ (\n. \i\n. f (Suc (i + M))) \ L \ L \ 0" unfolding convergent_prod_altdef by auto then show "convergent_prod f" unfolding convergent_prod_altdef apply (rule_tac exI[where x="Suc M"]) using Suc_le_D by auto qed lemma raw_has_prod_inverse: assumes "raw_has_prod f M a" shows "raw_has_prod (\n. inverse (f n)) M (inverse a)" using assms unfolding raw_has_prod_def by (auto dest: tendsto_inverse simp: prod_inversef [symmetric]) lemma has_prod_inverse: assumes "f has_prod a" shows "(\n. inverse (f n)) has_prod (inverse a)" using assms raw_has_prod_inverse unfolding has_prod_def by auto lemma convergent_prod_inverse: assumes "convergent_prod f" shows "convergent_prod (\n. inverse (f n))" using assms unfolding convergent_prod_def by (blast intro: raw_has_prod_inverse elim: ) end context fixes f :: "nat \ 'a::real_normed_field" begin lemma raw_has_prod_Suc_iff': "raw_has_prod f M a \ raw_has_prod (\n. f (Suc n)) M (a / f M) \ f M \ 0" by (metis raw_has_prod_eq_0 add.commute add.left_neutral raw_has_prod_Suc_iff raw_has_prod_nonzero le_add1 nonzero_mult_div_cancel_right times_divide_eq_left) lemma has_prod_divide: "f has_prod a \ g has_prod b \ (\n. f n / g n) has_prod (a / b)" unfolding divide_inverse by (intro has_prod_inverse has_prod_mult) lemma convergent_prod_divide: assumes f: "convergent_prod f" and g: "convergent_prod g" shows "convergent_prod (\n. f n / g n)" using f g has_prod_divide has_prod_iff by blast lemma prodinf_divide: "convergent_prod f \ convergent_prod g \ prodinf f / prodinf g = (\n. f n / g n)" by (intro has_prod_unique has_prod_divide convergent_prod_has_prod) lemma prodinf_inverse: "convergent_prod f \ (\n. inverse (f n)) = inverse (\n. f n)" by (intro has_prod_unique [symmetric] has_prod_inverse convergent_prod_has_prod) lemma has_prod_Suc_imp: assumes "(\n. f (Suc n)) has_prod a" shows "f has_prod (a * f 0)" proof - have "f has_prod (a * f 0)" when "raw_has_prod (\n. f (Suc n)) 0 a" apply (cases "f 0=0") using that unfolding has_prod_def raw_has_prod_Suc by (auto simp add: raw_has_prod_Suc_iff) moreover have "f has_prod (a * f 0)" when "(\i q. a = 0 \ f (Suc i) = 0 \ raw_has_prod (\n. f (Suc n)) (Suc i) q)" proof - from that obtain i q where "a = 0" "f (Suc i) = 0" "raw_has_prod (\n. f (Suc n)) (Suc i) q" by auto then show ?thesis unfolding has_prod_def by (auto intro!:exI[where x="Suc i"] simp:raw_has_prod_Suc) qed ultimately show "f has_prod (a * f 0)" using assms unfolding has_prod_def by auto qed lemma has_prod_iff_shift: assumes "\i. i < n \ f i \ 0" shows "(\i. f (i + n)) has_prod a \ f has_prod (a * (\ii. f (Suc i + n)) has_prod a \ (\i. f (i + n)) has_prod (a * f n)" by (subst has_prod_Suc_iff) auto with Suc show ?case by (simp add: ac_simps) qed corollary\<^marker>\tag unimportant\ has_prod_iff_shift': assumes "\i. i < n \ f i \ 0" shows "(\i. f (i + n)) has_prod (a / (\i f has_prod a" by (simp add: assms has_prod_iff_shift) lemma has_prod_one_iff_shift: assumes "\i. i < n \ f i = 1" shows "(\i. f (i+n)) has_prod a \ (\i. f i) has_prod a" by (simp add: assms has_prod_iff_shift) lemma convergent_prod_iff_shift [simp]: shows "convergent_prod (\i. f (i + n)) \ convergent_prod f" apply safe using convergent_prod_offset apply blast using convergent_prod_ignore_initial_segment convergent_prod_def by blast lemma has_prod_split_initial_segment: assumes "f has_prod a" "\i. i < n \ f i \ 0" shows "(\i. f (i + n)) has_prod (a / (\ii. i < n \ f i \ 0" shows "(\i. f (i + n)) = (\i. f i) / (\ii. i < n \ f i \ 0" shows "prodinf f = (\i. f (i + n)) * (\i 0" shows "(\n. f (Suc n)) = prodinf f / f 0" using prodinf_split_initial_segment[of 1] assms by simp end context fixes f :: "nat \ 'a::real_normed_field" begin lemma convergent_prod_inverse_iff [simp]: "convergent_prod (\n. inverse (f n)) \ convergent_prod f" by (auto dest: convergent_prod_inverse) lemma convergent_prod_const_iff [simp]: fixes c :: "'a :: {real_normed_field}" shows "convergent_prod (\_. c) \ c = 1" proof assume "convergent_prod (\_. c)" then show "c = 1" using convergent_prod_imp_LIMSEQ LIMSEQ_unique by blast next assume "c = 1" then show "convergent_prod (\_. c)" by auto qed lemma has_prod_power: "f has_prod a \ (\i. f i ^ n) has_prod (a ^ n)" by (induction n) (auto simp: has_prod_mult) lemma convergent_prod_power: "convergent_prod f \ convergent_prod (\i. f i ^ n)" by (induction n) (auto simp: convergent_prod_mult) lemma prodinf_power: "convergent_prod f \ prodinf (\i. f i ^ n) = prodinf f ^ n" by (metis has_prod_unique convergent_prod_imp_has_prod has_prod_power) end subsection\Exponentials and logarithms\ context fixes f :: "nat \ 'a::{real_normed_field,banach}" begin lemma sums_imp_has_prod_exp: assumes "f sums s" shows "raw_has_prod (\i. exp (f i)) 0 (exp s)" using assms continuous_on_exp [of UNIV "\x::'a. x"] using continuous_on_tendsto_compose [of UNIV exp "(\n. sum f {..n})" s] by (simp add: prod_defs sums_def_le exp_sum) lemma convergent_prod_exp: assumes "summable f" shows "convergent_prod (\i. exp (f i))" using sums_imp_has_prod_exp assms unfolding summable_def convergent_prod_def by blast lemma prodinf_exp: assumes "summable f" shows "prodinf (\i. exp (f i)) = exp (suminf f)" proof - have "f sums suminf f" using assms by blast then have "(\i. exp (f i)) has_prod exp (suminf f)" by (simp add: has_prod_def sums_imp_has_prod_exp) then show ?thesis by (rule has_prod_unique [symmetric]) qed end theorem convergent_prod_iff_summable_real: fixes a :: "nat \ real" assumes "\n. a n > 0" shows "convergent_prod (\k. 1 + a k) \ summable a" (is "?lhs = ?rhs") proof assume ?lhs then obtain p where "raw_has_prod (\k. 1 + a k) 0 p" by (metis assms add_less_same_cancel2 convergent_prod_offset_0 not_one_less_zero) then have to_p: "(\n. \k\n. 1 + a k) \ p" by (auto simp: raw_has_prod_def) moreover have le: "(\k\n. a k) \ (\k\n. 1 + a k)" for n by (rule sum_le_prod) (use assms less_le in force) have "(\k\n. 1 + a k) \ p" for n proof (rule incseq_le [OF _ to_p]) show "incseq (\n. \k\n. 1 + a k)" using assms by (auto simp: mono_def order.strict_implies_order intro!: prod_mono2) qed with le have "(\k\n. a k) \ p" for n by (metis order_trans) with assms bounded_imp_summable show ?rhs by (metis not_less order.asym) next assume R: ?rhs have "(\k\n. 1 + a k) \ exp (suminf a)" for n proof - have "(\k\n. 1 + a k) \ exp (\k\n. a k)" for n by (rule prod_le_exp_sum) (use assms less_le in force) moreover have "exp (\k\n. a k) \ exp (suminf a)" for n unfolding exp_le_cancel_iff by (meson sum_le_suminf R assms finite_atMost less_eq_real_def) ultimately show ?thesis by (meson order_trans) qed then obtain L where L: "(\n. \k\n. 1 + a k) \ L" by (metis assms bounded_imp_convergent_prod convergent_prod_iff_nz_lim le_add_same_cancel1 le_add_same_cancel2 less_le not_le zero_le_one) moreover have "L \ 0" proof assume "L = 0" with L have "(\n. \k\n. 1 + a k) \ 0" by simp moreover have "(\k\n. 1 + a k) > 1" for n by (simp add: assms less_1_prod) ultimately show False by (meson Lim_bounded2 not_one_le_zero less_imp_le) qed ultimately show ?lhs using assms convergent_prod_iff_nz_lim by (metis add_less_same_cancel1 less_le not_le zero_less_one) qed lemma exp_suminf_prodinf_real: fixes f :: "nat \ real" assumes ge0:"\n. f n \ 0" and ac: "abs_convergent_prod (\n. exp (f n))" shows "prodinf (\i. exp (f i)) = exp (suminf f)" proof - have "summable f" using ac unfolding abs_convergent_prod_conv_summable proof (elim summable_comparison_test') fix n have "\f n\ = f n" by (simp add: ge0) also have "\ \ exp (f n) - 1" by (metis diff_diff_add exp_ge_add_one_self ge_iff_diff_ge_0) finally show "norm (f n) \ norm (exp (f n) - 1)" by simp qed then show ?thesis by (simp add: prodinf_exp) qed lemma has_prod_imp_sums_ln_real: fixes f :: "nat \ real" assumes "raw_has_prod f 0 p" and 0: "\x. f x > 0" shows "(\i. ln (f i)) sums (ln p)" proof - have "p > 0" using assms unfolding prod_defs by (metis LIMSEQ_prod_nonneg less_eq_real_def) then show ?thesis using assms continuous_on_ln [of "{0<..}" "\x. x"] using continuous_on_tendsto_compose [of "{0<..}" ln "(\n. prod f {..n})" p] by (auto simp: prod_defs sums_def_le ln_prod order_tendstoD) qed lemma summable_ln_real: fixes f :: "nat \ real" assumes f: "convergent_prod f" and 0: "\x. f x > 0" shows "summable (\i. ln (f i))" proof - obtain M p where "raw_has_prod f M p" using f convergent_prod_def by blast then consider i where "i real" assumes f: "convergent_prod f" and 0: "\x. f x > 0" shows "suminf (\i. ln (f i)) = ln (prodinf f)" proof - have "f has_prod prodinf f" by (simp add: f has_prod_iff) then have "raw_has_prod f 0 (prodinf f)" by (metis "0" has_prod_def less_irrefl) then have "(\i. ln (f i)) sums ln (prodinf f)" using "0" has_prod_imp_sums_ln_real by blast then show ?thesis by (rule sums_unique [symmetric]) qed lemma prodinf_exp_real: fixes f :: "nat \ real" assumes f: "convergent_prod f" and 0: "\x. f x > 0" shows "prodinf f = exp (suminf (\i. ln (f i)))" by (simp add: "0" f less_0_prodinf suminf_ln_real) theorem Ln_prodinf_complex: fixes z :: "nat \ complex" assumes z: "\j. z j \ 0" and \: "\ \ 0" shows "((\n. \j\n. z j) \ \) \ (\k. (\n. (\j\n. Ln (z j))) \ Ln \ + of_int k * (of_real(2*pi) * \))" (is "?lhs = ?rhs") proof assume L: ?lhs have pnz: "(\j\n. z j) \ 0" for n using z by auto define \ where "\ \ Arg \ + 2*pi" then have "\ > pi" using Arg_def mpi_less_Im_Ln by fastforce have \_eq: "\ = cmod \ * exp (\ * \)" using Arg_def Arg_eq \ unfolding \_def by (simp add: algebra_simps exp_add) define \ where "\ \ \n. THE t. is_Arg (\j\n. z j) t \ t \ {\-pi<..\+pi}" have uniq: "\!s. is_Arg (\j\n. z j) s \ s \ {\-pi<..\+pi}" for n using Argument_exists_unique [OF pnz] by metis have \: "is_Arg (\j\n. z j) (\ n)" and \_interval: "\ n \ {\-pi<..\+pi}" for n unfolding \_def using theI' [OF uniq] by metis+ have \_pos: "\j. \ j > 0" using \_interval \\ > pi\ by simp (meson diff_gt_0_iff_gt less_trans) have "(\j\n. z j) = cmod (\j\n. z j) * exp (\ * \ n)" for n using \ by (auto simp: is_Arg_def) then have eq: "(\n. \j\n. z j) = (\n. cmod (\j\n. z j) * exp (\ * \ n))" by simp then have "(\n. (cmod (\j\n. z j)) * exp (\ * (\ n))) \ \" using L by force then obtain k where k: "(\j. \ j - of_int (k j) * (2 * pi)) \ \" using L by (subst (asm) \_eq) (auto simp add: eq z \ polar_convergence) moreover have "\\<^sub>F n in sequentially. k n = 0" proof - have *: "kj = 0" if "dist (vj - real_of_int kj * 2) V < 1" "vj \ {V - 1<..V + 1}" for kj vj V using that by (auto simp: dist_norm) have "\\<^sub>F j in sequentially. dist (\ j - of_int (k j) * (2 * pi)) \ < pi" using tendstoD [OF k] pi_gt_zero by blast then show ?thesis proof (rule eventually_mono) fix j assume d: "dist (\ j - real_of_int (k j) * (2 * pi)) \ < pi" show "k j = 0" by (rule * [of "\ j/pi" _ "\/pi"]) (use \_interval [of j] d in \simp_all add: divide_simps dist_norm\) qed qed ultimately have \to\: "\ \ \" apply (simp only: tendsto_def) apply (erule all_forward imp_forward asm_rl)+ apply (drule (1) eventually_conj) apply (auto elim: eventually_mono) done then have to0: "(\n. \\ (Suc n) - \ n\) \ 0" by (metis (full_types) diff_self filterlim_sequentially_Suc tendsto_diff tendsto_rabs_zero) have "\k. Im (\j\n. Ln (z j)) - of_int k * (2*pi) = \ n" for n proof (rule is_Arg_exp_diff_2pi) show "is_Arg (exp (\j\n. Ln (z j))) (\ n)" using pnz \ by (simp add: is_Arg_def exp_sum prod_norm) qed then have "\k. (\j\n. Im (Ln (z j))) = \ n + of_int k * (2*pi)" for n by (simp add: algebra_simps) then obtain k where k: "\n. (\j\n. Im (Ln (z j))) = \ n + of_int (k n) * (2*pi)" by metis obtain K where "\\<^sub>F n in sequentially. k n = K" proof - have k_le: "(2*pi) * \k (Suc n) - k n\ \ \\ (Suc n) - \ n\ + \Im (Ln (z (Suc n)))\" for n proof - have "(\j\Suc n. Im (Ln (z j))) - (\j\n. Im (Ln (z j))) = Im (Ln (z (Suc n)))" by simp then show ?thesis using k [of "Suc n"] k [of n] by (auto simp: abs_if algebra_simps) qed have "z \ 1" using L \ convergent_prod_iff_nz_lim z by (blast intro: convergent_prod_imp_LIMSEQ) with z have "(\n. Ln (z n)) \ Ln 1" using isCont_tendsto_compose [OF continuous_at_Ln] nonpos_Reals_one_I by blast then have "(\n. Ln (z n)) \ 0" by simp then have "(\n. \Im (Ln (z (Suc n)))\) \ 0" by (metis LIMSEQ_unique \z \ 1\ continuous_at_Ln filterlim_sequentially_Suc isCont_tendsto_compose nonpos_Reals_one_I tendsto_Im tendsto_rabs_zero_iff zero_complex.simps(2)) then have "\\<^sub>F n in sequentially. \Im (Ln (z (Suc n)))\ < 1" by (simp add: order_tendsto_iff) moreover have "\\<^sub>F n in sequentially. \\ (Suc n) - \ n\ < 1" using to0 by (simp add: order_tendsto_iff) ultimately have "\\<^sub>F n in sequentially. (2*pi) * \k (Suc n) - k n\ < 1 + 1" proof (rule eventually_elim2) fix n assume "\Im (Ln (z (Suc n)))\ < 1" and "\\ (Suc n) - \ n\ < 1" with k_le [of n] show "2 * pi * real_of_int \k (Suc n) - k n\ < 1 + 1" by linarith qed then have "\\<^sub>F n in sequentially. real_of_int\k (Suc n) - k n\ < 1" proof (rule eventually_mono) fix n :: "nat" assume "2 * pi * \k (Suc n) - k n\ < 1 + 1" then have "\k (Suc n) - k n\ < 2 / (2*pi)" by (simp add: field_simps) also have "... < 1" using pi_ge_two by auto finally show "real_of_int \k (Suc n) - k n\ < 1" . qed then obtain N where N: "\n. n\N \ \k (Suc n) - k n\ = 0" using eventually_sequentially less_irrefl of_int_abs by fastforce have "k (N+i) = k N" for i proof (induction i) case (Suc i) with N [of "N+i"] show ?case by auto qed simp then have "\n. n\N \ k n = k N" using le_Suc_ex by auto then show ?thesis by (force simp add: eventually_sequentially intro: that) qed with \to\ have "(\n. (\j\n. Im (Ln (z j)))) \ \ + of_int K * (2*pi)" by (simp add: k tendsto_add tendsto_mult tendsto_eventually) moreover have "(\n. (\k\n. Re (Ln (z k)))) \ Re (Ln \)" using assms continuous_imp_tendsto [OF isCont_ln tendsto_norm [OF L]] by (simp add: o_def flip: prod_norm ln_prod) ultimately show ?rhs by (rule_tac x="K+1" in exI) (auto simp: tendsto_complex_iff \_def Arg_def assms algebra_simps) next assume ?rhs then obtain r where r: "(\n. (\k\n. Ln (z k))) \ Ln \ + of_int r * (of_real(2*pi) * \)" .. have "(\n. exp (\k\n. Ln (z k))) \ \" using assms continuous_imp_tendsto [OF isCont_exp r] exp_integer_2pi [of r] by (simp add: o_def exp_add algebra_simps) moreover have "exp (\k\n. Ln (z k)) = (\k\n. z k)" for n by (simp add: exp_sum add_eq_0_iff assms) ultimately show ?lhs by auto qed text\Prop 17.2 of Bak and Newman, Complex Analysis, p.242\ proposition convergent_prod_iff_summable_complex: fixes z :: "nat \ complex" assumes "\k. z k \ 0" shows "convergent_prod (\k. z k) \ summable (\k. Ln (z k))" (is "?lhs = ?rhs") proof assume ?lhs then obtain p where p: "(\n. \k\n. z k) \ p" and "p \ 0" using convergent_prod_LIMSEQ prodinf_nonzero add_eq_0_iff assms by fastforce then show ?rhs using Ln_prodinf_complex assms by (auto simp: prodinf_nonzero summable_def sums_def_le) next assume R: ?rhs have "(\k\n. z k) = exp (\k\n. Ln (z k))" for n by (simp add: exp_sum add_eq_0_iff assms) then have "(\n. \k\n. z k) \ exp (suminf (\k. Ln (z k)))" using continuous_imp_tendsto [OF isCont_exp summable_LIMSEQ' [OF R]] by (simp add: o_def) then show ?lhs by (subst convergent_prod_iff_convergent) (auto simp: convergent_def tendsto_Lim assms add_eq_0_iff) qed text\Prop 17.3 of Bak and Newman, Complex Analysis\ proposition summable_imp_convergent_prod_complex: fixes z :: "nat \ complex" assumes z: "summable (\k. norm (z k))" and non0: "\k. z k \ -1" shows "convergent_prod (\k. 1 + z k)" proof - note if_cong [cong] power_Suc [simp del] obtain N where N: "\k. k\N \ norm (z k) < 1/2" using summable_LIMSEQ_zero [OF z] by (metis diff_zero dist_norm half_gt_zero_iff less_numeral_extra(1) lim_sequentially tendsto_norm_zero_iff) have "norm (Ln (1 + z k)) \ 2 * norm (z k)" if "k \ N" for k proof (cases "z k = 0") case False let ?f = "\i. cmod ((- 1) ^ i * z k ^ i / of_nat (Suc i))" have normf: "norm (?f n) \ (1 / 2) ^ n" for n proof - have "norm (?f n) = cmod (z k) ^ n / cmod (1 + of_nat n)" by (auto simp: norm_divide norm_mult norm_power) also have "\ \ cmod (z k) ^ n" by (auto simp: field_split_simps mult_le_cancel_left1 in_Reals_norm) also have "\ \ (1 / 2) ^ n" using N [OF that] by (simp add: power_mono) finally show "norm (?f n) \ (1 / 2) ^ n" . qed have summablef: "summable ?f" by (intro normf summable_comparison_test' [OF summable_geometric [of "1/2"]]) auto have "(\n. (- 1) ^ Suc n / of_nat n * z k ^ n) sums Ln (1 + z k)" using Ln_series [of "z k"] N that by fastforce then have *: "(\i. z k * (((- 1) ^ i * z k ^ i) / (Suc i))) sums Ln (1 + z k)" using sums_split_initial_segment [where n= 1] by (force simp: power_Suc mult_ac) then have "norm (Ln (1 + z k)) = norm (suminf (\i. z k * (((- 1) ^ i * z k ^ i) / (Suc i))))" using sums_unique by force also have "\ = norm (z k * suminf (\i. ((- 1) ^ i * z k ^ i) / (Suc i)))" apply (subst suminf_mult) using * False by (auto simp: sums_summable intro: summable_mult_D [of "z k"]) also have "\ = norm (z k) * norm (suminf (\i. ((- 1) ^ i * z k ^ i) / (Suc i)))" by (simp add: norm_mult) also have "\ \ norm (z k) * suminf (\i. norm (((- 1) ^ i * z k ^ i) / (Suc i)))" by (intro mult_left_mono summable_norm summablef) auto also have "\ \ norm (z k) * suminf (\i. (1/2) ^ i)" by (intro mult_left_mono suminf_le) (use summable_geometric [of "1/2"] summablef normf in auto) also have "\ \ norm (z k) * 2" using suminf_geometric [of "1/2::real"] by simp finally show ?thesis by (simp add: mult_ac) qed simp then have "summable (\k. Ln (1 + z k))" by (metis summable_comparison_test summable_mult z) with non0 show ?thesis by (simp add: add_eq_0_iff convergent_prod_iff_summable_complex) qed lemma summable_Ln_complex: fixes z :: "nat \ complex" assumes "convergent_prod z" "\k. z k \ 0" shows "summable (\k. Ln (z k))" using convergent_prod_def assms convergent_prod_iff_summable_complex by blast subsection\<^marker>\tag unimportant\ \Embeddings from the reals into some complete real normed field\ lemma tendsto_eq_of_real_lim: assumes "(\n. of_real (f n) :: 'a::{complete_space,real_normed_field}) \ q" shows "q = of_real (lim f)" proof - have "convergent (\n. of_real (f n) :: 'a)" using assms convergent_def by blast then have "convergent f" unfolding convergent_def by (simp add: convergent_eq_Cauchy Cauchy_def) then show ?thesis by (metis LIMSEQ_unique assms convergentD sequentially_bot tendsto_Lim tendsto_of_real) qed lemma tendsto_eq_of_real: assumes "(\n. of_real (f n) :: 'a::{complete_space,real_normed_field}) \ q" obtains r where "q = of_real r" using tendsto_eq_of_real_lim assms by blast lemma has_prod_of_real_iff [simp]: "(\n. of_real (f n) :: 'a::{complete_space,real_normed_field}) has_prod of_real c \ f has_prod c" (is "?lhs = ?rhs") proof assume ?lhs then show ?rhs apply (auto simp: prod_defs LIMSEQ_prod_0 tendsto_of_real_iff simp flip: of_real_prod) using tendsto_eq_of_real by (metis of_real_0 tendsto_of_real_iff) next assume ?rhs with tendsto_of_real_iff show ?lhs by (fastforce simp: prod_defs simp flip: of_real_prod) qed end diff --git a/src/HOL/Data_Structures/Set2_Join.thy b/src/HOL/Data_Structures/Set2_Join.thy --- a/src/HOL/Data_Structures/Set2_Join.thy +++ b/src/HOL/Data_Structures/Set2_Join.thy @@ -1,376 +1,377 @@ (* Author: Tobias Nipkow *) section "Join-Based Implementation of Sets" theory Set2_Join imports Isin2 begin text \This theory implements the set operations \insert\, \delete\, \union\, \inter\section and \diff\erence. The implementation is based on binary search trees. All operations are reduced to a single operation \join l x r\ that joins two BSTs \l\ and \r\ and an element \x\ such that \l < x < r\. The theory is based on theory \<^theory>\HOL-Data_Structures.Tree2\ where nodes have an additional field. This field is ignored here but it means that this theory can be instantiated with red-black trees (see theory \<^file>\Set2_Join_RBT.thy\) and other balanced trees. This approach is very concrete and fixes the type of trees. Alternatively, one could assume some abstract type \<^typ>\'t\ of trees with suitable decomposition and recursion operators on it.\ locale Set2_Join = fixes join :: "('a::linorder*'b) tree \ 'a \ ('a*'b) tree \ ('a*'b) tree" fixes inv :: "('a*'b) tree \ bool" assumes set_join: "set_tree (join l a r) = set_tree l \ {a} \ set_tree r" assumes bst_join: "bst (Node l (a, b) r) \ bst (join l a r)" assumes inv_Leaf: "inv \\" assumes inv_join: "\ inv l; inv r \ \ inv (join l a r)" assumes inv_Node: "\ inv (Node l (a,b) r) \ \ inv l \ inv r" begin declare set_join [simp] Let_def[simp] subsection "\split_min\" fun split_min :: "('a*'b) tree \ 'a \ ('a*'b) tree" where "split_min (Node l (a, _) r) = (if l = Leaf then (a,r) else let (m,l') = split_min l in (m, join l' a r))" lemma split_min_set: "\ split_min t = (m,t'); t \ Leaf \ \ m \ set_tree t \ set_tree t = {m} \ set_tree t'" proof(induction t arbitrary: t' rule: tree2_induct) case Node thus ?case by(auto split: prod.splits if_splits dest: inv_Node) next case Leaf thus ?case by simp qed lemma split_min_bst: "\ split_min t = (m,t'); bst t; t \ Leaf \ \ bst t' \ (\x \ set_tree t'. m < x)" proof(induction t arbitrary: t' rule: tree2_induct) case Node thus ?case by(fastforce simp: split_min_set bst_join split: prod.splits if_splits) next case Leaf thus ?case by simp qed lemma split_min_inv: "\ split_min t = (m,t'); inv t; t \ Leaf \ \ inv t'" proof(induction t arbitrary: t' rule: tree2_induct) case Node thus ?case by(auto simp: inv_join split: prod.splits if_splits dest: inv_Node) next case Leaf thus ?case by simp qed subsection "\join2\" -definition join2 :: "('a*'b) tree \ ('a*'b) tree \ ('a*'b) tree" where -"join2 l r = (if r = Leaf then l else let (m,r') = split_min r in join l m r')" +fun join2 :: "('a*'b) tree \ ('a*'b) tree \ ('a*'b) tree" where +"join2 l \\ = l" | +"join2 l r = (let (m,r') = split_min r in join l m r')" lemma set_join2[simp]: "set_tree (join2 l r) = set_tree l \ set_tree r" -by(simp add: join2_def split_min_set split: prod.split) +by(cases r)(simp_all add: split_min_set split: prod.split) lemma bst_join2: "\ bst l; bst r; \x \ set_tree l. \y \ set_tree r. x < y \ \ bst (join2 l r)" -by(simp add: join2_def bst_join split_min_set split_min_bst split: prod.split) +by(cases r)(simp_all add: bst_join split_min_set split_min_bst split: prod.split) lemma inv_join2: "\ inv l; inv r \ \ inv (join2 l r)" -by(simp add: join2_def inv_join split_min_set split_min_inv split: prod.split) +by(cases r)(simp_all add: inv_join split_min_set split_min_inv split: prod.split) subsection "\split\" fun split :: "('a*'b)tree \ 'a \ ('a*'b)tree \ bool \ ('a*'b)tree" where "split Leaf k = (Leaf, False, Leaf)" | "split (Node l (a, _) r) x = (case cmp x a of LT \ let (l1,b,l2) = split l x in (l1, b, join l2 a r) | GT \ let (r1,b,r2) = split r x in (join l a r1, b, r2) | EQ \ (l, True, r))" lemma split: "split t x = (l,b,r) \ bst t \ set_tree l = {a \ set_tree t. a < x} \ set_tree r = {a \ set_tree t. x < a} \ (b = (x \ set_tree t)) \ bst l \ bst r" proof(induction t arbitrary: l b r rule: tree2_induct) case Leaf thus ?case by simp next case (Node y a b z l c r) consider (LT) l1 xin l2 where "(l1,xin,l2) = split y x" and "split \y, (a, b), z\ x = (l1, xin, join l2 a z)" and "cmp x a = LT" | (GT) r1 xin r2 where "(r1,xin,r2) = split z x" and "split \y, (a, b), z\ x = (join y a r1, xin, r2)" and "cmp x a = GT" | (EQ) "split \y, (a, b), z\ x = (y, True, z)" and "cmp x a = EQ" by (force split: cmp_val.splits prod.splits if_splits) thus ?case proof cases case (LT l1 xin l2) with Node.IH(1)[OF \(l1,xin,l2) = split y x\[symmetric]] Node.prems show ?thesis by (force intro!: bst_join) next case (GT r1 xin r2) with Node.IH(2)[OF \(r1,xin,r2) = split z x\[symmetric]] Node.prems show ?thesis by (force intro!: bst_join) next case EQ with Node.prems show ?thesis by auto qed qed lemma split_inv: "split t x = (l,b,r) \ inv t \ inv l \ inv r" proof(induction t arbitrary: l b r rule: tree2_induct) case Leaf thus ?case by simp next case Node thus ?case by(force simp: inv_join split!: prod.splits if_splits dest!: inv_Node) qed declare split.simps[simp del] subsection "\insert\" definition insert :: "'a \ ('a*'b) tree \ ('a*'b) tree" where "insert x t = (let (l,_,r) = split t x in join l x r)" lemma set_tree_insert: "bst t \ set_tree (insert x t) = {x} \ set_tree t" by(auto simp add: insert_def split split: prod.split) lemma bst_insert: "bst t \ bst (insert x t)" by(auto simp add: insert_def bst_join dest: split split: prod.split) lemma inv_insert: "inv t \ inv (insert x t)" by(force simp: insert_def inv_join dest: split_inv split: prod.split) subsection "\delete\" definition delete :: "'a \ ('a*'b) tree \ ('a*'b) tree" where "delete x t = (let (l,_,r) = split t x in join2 l r)" lemma set_tree_delete: "bst t \ set_tree (delete x t) = set_tree t - {x}" by(auto simp: delete_def split split: prod.split) lemma bst_delete: "bst t \ bst (delete x t)" by(force simp add: delete_def intro: bst_join2 dest: split split: prod.split) lemma inv_delete: "inv t \ inv (delete x t)" by(force simp: delete_def inv_join2 dest: split_inv split: prod.split) subsection "\union\" fun union :: "('a*'b)tree \ ('a*'b)tree \ ('a*'b)tree" where "union t1 t2 = (if t1 = Leaf then t2 else if t2 = Leaf then t1 else case t1 of Node l1 (a, _) r1 \ let (l2,_ ,r2) = split t2 a; l' = union l1 l2; r' = union r1 r2 in join l' a r')" declare union.simps [simp del] lemma set_tree_union: "bst t2 \ set_tree (union t1 t2) = set_tree t1 \ set_tree t2" proof(induction t1 t2 rule: union.induct) case (1 t1 t2) then show ?case by (auto simp: union.simps[of t1 t2] split split: tree.split prod.split) qed lemma bst_union: "\ bst t1; bst t2 \ \ bst (union t1 t2)" proof(induction t1 t2 rule: union.induct) case (1 t1 t2) thus ?case by(fastforce simp: union.simps[of t1 t2] set_tree_union split intro!: bst_join split: tree.split prod.split) qed lemma inv_union: "\ inv t1; inv t2 \ \ inv (union t1 t2)" proof(induction t1 t2 rule: union.induct) case (1 t1 t2) thus ?case by(auto simp:union.simps[of t1 t2] inv_join split_inv split!: tree.split prod.split dest: inv_Node) qed subsection "\inter\" fun inter :: "('a*'b)tree \ ('a*'b)tree \ ('a*'b)tree" where "inter t1 t2 = (if t1 = Leaf then Leaf else if t2 = Leaf then Leaf else case t1 of Node l1 (a, _) r1 \ let (l2,b,r2) = split t2 a; l' = inter l1 l2; r' = inter r1 r2 in if b then join l' a r' else join2 l' r')" declare inter.simps [simp del] lemma set_tree_inter: "\ bst t1; bst t2 \ \ set_tree (inter t1 t2) = set_tree t1 \ set_tree t2" proof(induction t1 t2 rule: inter.induct) case (1 t1 t2) show ?case proof (cases t1 rule: tree2_cases) case Leaf thus ?thesis by (simp add: inter.simps) next case [simp]: (Node l1 a _ r1) show ?thesis proof (cases "t2 = Leaf") case True thus ?thesis by (simp add: inter.simps) next case False let ?L1 = "set_tree l1" let ?R1 = "set_tree r1" have *: "a \ ?L1 \ ?R1" using \bst t1\ by (fastforce) obtain l2 b r2 where sp: "split t2 a = (l2,b,r2)" using prod_cases3 by blast let ?L2 = "set_tree l2" let ?R2 = "set_tree r2" let ?A = "if b then {a} else {}" have t2: "set_tree t2 = ?L2 \ ?R2 \ ?A" and **: "?L2 \ ?R2 = {}" "a \ ?L2 \ ?R2" "?L1 \ ?R2 = {}" "?L2 \ ?R1 = {}" using split[OF sp] \bst t1\ \bst t2\ by (force, force, force, force, force) have IHl: "set_tree (inter l1 l2) = set_tree l1 \ set_tree l2" using "1.IH"(1)[OF _ False _ _ sp[symmetric]] "1.prems"(1,2) split[OF sp] by simp have IHr: "set_tree (inter r1 r2) = set_tree r1 \ set_tree r2" using "1.IH"(2)[OF _ False _ _ sp[symmetric]] "1.prems"(1,2) split[OF sp] by simp have "set_tree t1 \ set_tree t2 = (?L1 \ ?R1 \ {a}) \ (?L2 \ ?R2 \ ?A)" by(simp add: t2) also have "\ = (?L1 \ ?L2) \ (?R1 \ ?R2) \ ?A" using * ** by auto also have "\ = set_tree (inter t1 t2)" using IHl IHr sp inter.simps[of t1 t2] False by(simp) finally show ?thesis by simp qed qed qed lemma bst_inter: "\ bst t1; bst t2 \ \ bst (inter t1 t2)" proof(induction t1 t2 rule: inter.induct) case (1 t1 t2) thus ?case by(fastforce simp: inter.simps[of t1 t2] set_tree_inter split intro!: bst_join bst_join2 split: tree.split prod.split) qed lemma inv_inter: "\ inv t1; inv t2 \ \ inv (inter t1 t2)" proof(induction t1 t2 rule: inter.induct) case (1 t1 t2) thus ?case by(auto simp: inter.simps[of t1 t2] inv_join inv_join2 split_inv split!: tree.split prod.split dest: inv_Node) qed subsection "\diff\" fun diff :: "('a*'b)tree \ ('a*'b)tree \ ('a*'b)tree" where "diff t1 t2 = (if t1 = Leaf then Leaf else if t2 = Leaf then t1 else case t2 of Node l2 (a, _) r2 \ let (l1,_,r1) = split t1 a; l' = diff l1 l2; r' = diff r1 r2 in join2 l' r')" declare diff.simps [simp del] lemma set_tree_diff: "\ bst t1; bst t2 \ \ set_tree (diff t1 t2) = set_tree t1 - set_tree t2" proof(induction t1 t2 rule: diff.induct) case (1 t1 t2) show ?case proof (cases t2 rule: tree2_cases) case Leaf thus ?thesis by (simp add: diff.simps) next case [simp]: (Node l2 a _ r2) show ?thesis proof (cases "t1 = Leaf") case True thus ?thesis by (simp add: diff.simps) next case False let ?L2 = "set_tree l2" let ?R2 = "set_tree r2" obtain l1 b r1 where sp: "split t1 a = (l1,b,r1)" using prod_cases3 by blast let ?L1 = "set_tree l1" let ?R1 = "set_tree r1" let ?A = "if b then {a} else {}" have t1: "set_tree t1 = ?L1 \ ?R1 \ ?A" and **: "a \ ?L1 \ ?R1" "?L1 \ ?R2 = {}" "?L2 \ ?R1 = {}" using split[OF sp] \bst t1\ \bst t2\ by (force, force, force, force) have IHl: "set_tree (diff l1 l2) = set_tree l1 - set_tree l2" using "1.IH"(1)[OF False _ _ _ sp[symmetric]] "1.prems"(1,2) split[OF sp] by simp have IHr: "set_tree (diff r1 r2) = set_tree r1 - set_tree r2" using "1.IH"(2)[OF False _ _ _ sp[symmetric]] "1.prems"(1,2) split[OF sp] by simp have "set_tree t1 - set_tree t2 = (?L1 \ ?R1) - (?L2 \ ?R2 \ {a})" by(simp add: t1) also have "\ = (?L1 - ?L2) \ (?R1 - ?R2)" using ** by auto also have "\ = set_tree (diff t1 t2)" using IHl IHr sp diff.simps[of t1 t2] False by(simp) finally show ?thesis by simp qed qed qed lemma bst_diff: "\ bst t1; bst t2 \ \ bst (diff t1 t2)" proof(induction t1 t2 rule: diff.induct) case (1 t1 t2) thus ?case by(fastforce simp: diff.simps[of t1 t2] set_tree_diff split intro!: bst_join bst_join2 split: tree.split prod.split) qed lemma inv_diff: "\ inv t1; inv t2 \ \ inv (diff t1 t2)" proof(induction t1 t2 rule: diff.induct) case (1 t1 t2) thus ?case by(auto simp: diff.simps[of t1 t2] inv_join inv_join2 split_inv split!: tree.split prod.split dest: inv_Node) qed text \Locale \<^locale>\Set2_Join\ implements locale \<^locale>\Set2\:\ sublocale Set2 where empty = Leaf and insert = insert and delete = delete and isin = isin and union = union and inter = inter and diff = diff and set = set_tree and invar = "\t. inv t \ bst t" proof (standard, goal_cases) case 1 show ?case by (simp) next case 2 thus ?case by(simp add: isin_set_tree) next case 3 thus ?case by (simp add: set_tree_insert) next case 4 thus ?case by (simp add: set_tree_delete) next case 5 thus ?case by (simp add: inv_Leaf) next case 6 thus ?case by (simp add: bst_insert inv_insert) next case 7 thus ?case by (simp add: bst_delete inv_delete) next case 8 thus ?case by(simp add: set_tree_union) next case 9 thus ?case by(simp add: set_tree_inter) next case 10 thus ?case by(simp add: set_tree_diff) next case 11 thus ?case by (simp add: bst_union inv_union) next case 12 thus ?case by (simp add: bst_inter inv_inter) next case 13 thus ?case by (simp add: bst_diff inv_diff) qed end interpretation unbal: Set2_Join where join = "\l x r. Node l (x, ()) r" and inv = "\t. True" proof (standard, goal_cases) case 1 show ?case by simp next case 2 thus ?case by simp next case 3 thus ?case by simp next case 4 thus ?case by simp next case 5 thus ?case by simp qed end \ No newline at end of file diff --git a/src/HOL/Library/Infinite_Set.thy b/src/HOL/Library/Infinite_Set.thy --- a/src/HOL/Library/Infinite_Set.thy +++ b/src/HOL/Library/Infinite_Set.thy @@ -1,614 +1,618 @@ (* Title: HOL/Library/Infinite_Set.thy Author: Stephan Merz *) section \Infinite Sets and Related Concepts\ theory Infinite_Set imports Main begin subsection \The set of natural numbers is infinite\ lemma infinite_nat_iff_unbounded_le: "infinite S \ (\m. \n\m. n \ S)" for S :: "nat set" using frequently_cofinite[of "\x. x \ S"] by (simp add: cofinite_eq_sequentially frequently_def eventually_sequentially) lemma infinite_nat_iff_unbounded: "infinite S \ (\m. \n>m. n \ S)" for S :: "nat set" using frequently_cofinite[of "\x. x \ S"] by (simp add: cofinite_eq_sequentially frequently_def eventually_at_top_dense) lemma finite_nat_iff_bounded: "finite S \ (\k. S \ {.. (\k. S \ {.. k})" for S :: "nat set" using infinite_nat_iff_unbounded[of S] by (simp add: subset_eq) (metis not_le) lemma finite_nat_bounded: "finite S \ \k. S \ {.. For a set of natural numbers to be infinite, it is enough to know that for any number larger than some \k\, there is some larger number that is an element of the set. \ lemma unbounded_k_infinite: "\m>k. \n>m. n \ S \ infinite (S::nat set)" apply (clarsimp simp add: finite_nat_set_iff_bounded) apply (drule_tac x="Suc (max m k)" in spec) using less_Suc_eq apply fastforce done lemma nat_not_finite: "finite (UNIV::nat set) \ R" by simp lemma range_inj_infinite: fixes f :: "nat \ 'a" assumes "inj f" shows "infinite (range f)" proof assume "finite (range f)" from this assms have "finite (UNIV::nat set)" by (rule finite_imageD) then show False by simp qed subsection \The set of integers is also infinite\ lemma infinite_int_iff_infinite_nat_abs: "infinite S \ infinite ((nat \ abs) ` S)" for S :: "int set" proof (unfold Not_eq_iff, rule iffI) assume "finite ((nat \ abs) ` S)" then have "finite (nat ` (abs ` S))" by (simp add: image_image cong: image_cong) moreover have "inj_on nat (abs ` S)" by (rule inj_onI) auto ultimately have "finite (abs ` S)" by (rule finite_imageD) then show "finite S" by (rule finite_image_absD) qed simp proposition infinite_int_iff_unbounded_le: "infinite S \ (\m. \n. \n\ \ m \ n \ S)" for S :: "int set" by (simp add: infinite_int_iff_infinite_nat_abs infinite_nat_iff_unbounded_le o_def image_def) (metis abs_ge_zero nat_le_eq_zle le_nat_iff) proposition infinite_int_iff_unbounded: "infinite S \ (\m. \n. \n\ > m \ n \ S)" for S :: "int set" by (simp add: infinite_int_iff_infinite_nat_abs infinite_nat_iff_unbounded o_def image_def) (metis (full_types) nat_le_iff nat_mono not_le) proposition finite_int_iff_bounded: "finite S \ (\k. abs ` S \ {.. (\k. abs ` S \ {.. k})" for S :: "int set" using infinite_int_iff_unbounded[of S] by (simp add: subset_eq) (metis not_le) subsection \Infinitely Many and Almost All\ text \ We often need to reason about the existence of infinitely many (resp., all but finitely many) objects satisfying some predicate, so we introduce corresponding binders and their proof rules. \ lemma not_INFM [simp]: "\ (INFM x. P x) \ (MOST x. \ P x)" by (rule not_frequently) lemma not_MOST [simp]: "\ (MOST x. P x) \ (INFM x. \ P x)" by (rule not_eventually) lemma INFM_const [simp]: "(INFM x::'a. P) \ P \ infinite (UNIV::'a set)" by (simp add: frequently_const_iff) lemma MOST_const [simp]: "(MOST x::'a. P) \ P \ finite (UNIV::'a set)" by (simp add: eventually_const_iff) lemma INFM_imp_distrib: "(INFM x. P x \ Q x) \ ((MOST x. P x) \ (INFM x. Q x))" by (rule frequently_imp_iff) lemma MOST_imp_iff: "MOST x. P x \ (MOST x. P x \ Q x) \ (MOST x. Q x)" by (auto intro: eventually_rev_mp eventually_mono) lemma INFM_conjI: "INFM x. P x \ MOST x. Q x \ INFM x. P x \ Q x" by (rule frequently_rev_mp[of P]) (auto elim: eventually_mono) text \Properties of quantifiers with injective functions.\ lemma INFM_inj: "INFM x. P (f x) \ inj f \ INFM x. P x" using finite_vimageI[of "{x. P x}" f] by (auto simp: frequently_cofinite) lemma MOST_inj: "MOST x. P x \ inj f \ MOST x. P (f x)" using finite_vimageI[of "{x. \ P x}" f] by (auto simp: eventually_cofinite) text \Properties of quantifiers with singletons.\ lemma not_INFM_eq [simp]: "\ (INFM x. x = a)" "\ (INFM x. a = x)" unfolding frequently_cofinite by simp_all lemma MOST_neq [simp]: "MOST x. x \ a" "MOST x. a \ x" unfolding eventually_cofinite by simp_all lemma INFM_neq [simp]: "(INFM x::'a. x \ a) \ infinite (UNIV::'a set)" "(INFM x::'a. a \ x) \ infinite (UNIV::'a set)" unfolding frequently_cofinite by simp_all lemma MOST_eq [simp]: "(MOST x::'a. x = a) \ finite (UNIV::'a set)" "(MOST x::'a. a = x) \ finite (UNIV::'a set)" unfolding eventually_cofinite by simp_all lemma MOST_eq_imp: "MOST x. x = a \ P x" "MOST x. a = x \ P x" unfolding eventually_cofinite by simp_all text \Properties of quantifiers over the naturals.\ lemma MOST_nat: "(\\<^sub>\n. P n) \ (\m. \n>m. P n)" for P :: "nat \ bool" by (auto simp add: eventually_cofinite finite_nat_iff_bounded_le subset_eq simp flip: not_le) lemma MOST_nat_le: "(\\<^sub>\n. P n) \ (\m. \n\m. P n)" for P :: "nat \ bool" by (auto simp add: eventually_cofinite finite_nat_iff_bounded subset_eq simp flip: not_le) lemma INFM_nat: "(\\<^sub>\n. P n) \ (\m. \n>m. P n)" for P :: "nat \ bool" by (simp add: frequently_cofinite infinite_nat_iff_unbounded) lemma INFM_nat_le: "(\\<^sub>\n. P n) \ (\m. \n\m. P n)" for P :: "nat \ bool" by (simp add: frequently_cofinite infinite_nat_iff_unbounded_le) lemma MOST_INFM: "infinite (UNIV::'a set) \ MOST x::'a. P x \ INFM x::'a. P x" by (simp add: eventually_frequently) lemma MOST_Suc_iff: "(MOST n. P (Suc n)) \ (MOST n. P n)" by (simp add: cofinite_eq_sequentially) lemma MOST_SucI: "MOST n. P n \ MOST n. P (Suc n)" and MOST_SucD: "MOST n. P (Suc n) \ MOST n. P n" by (simp_all add: MOST_Suc_iff) lemma MOST_ge_nat: "MOST n::nat. m \ n" by (simp add: cofinite_eq_sequentially) \ \legacy names\ lemma Inf_many_def: "Inf_many P \ infinite {x. P x}" by (fact frequently_cofinite) lemma Alm_all_def: "Alm_all P \ \ (INFM x. \ P x)" by simp lemma INFM_iff_infinite: "(INFM x. P x) \ infinite {x. P x}" by (fact frequently_cofinite) lemma MOST_iff_cofinite: "(MOST x. P x) \ finite {x. \ P x}" by (fact eventually_cofinite) lemma INFM_EX: "(\\<^sub>\x. P x) \ (\x. P x)" by (fact frequently_ex) lemma ALL_MOST: "\x. P x \ \\<^sub>\x. P x" by (fact always_eventually) lemma INFM_mono: "\\<^sub>\x. P x \ (\x. P x \ Q x) \ \\<^sub>\x. Q x" by (fact frequently_elim1) lemma MOST_mono: "\\<^sub>\x. P x \ (\x. P x \ Q x) \ \\<^sub>\x. Q x" by (fact eventually_mono) lemma INFM_disj_distrib: "(\\<^sub>\x. P x \ Q x) \ (\\<^sub>\x. P x) \ (\\<^sub>\x. Q x)" by (fact frequently_disj_iff) lemma MOST_rev_mp: "\\<^sub>\x. P x \ \\<^sub>\x. P x \ Q x \ \\<^sub>\x. Q x" by (fact eventually_rev_mp) lemma MOST_conj_distrib: "(\\<^sub>\x. P x \ Q x) \ (\\<^sub>\x. P x) \ (\\<^sub>\x. Q x)" by (fact eventually_conj_iff) lemma MOST_conjI: "MOST x. P x \ MOST x. Q x \ MOST x. P x \ Q x" by (fact eventually_conj) lemma INFM_finite_Bex_distrib: "finite A \ (INFM y. \x\A. P x y) \ (\x\A. INFM y. P x y)" by (fact frequently_bex_finite_distrib) lemma MOST_finite_Ball_distrib: "finite A \ (MOST y. \x\A. P x y) \ (\x\A. MOST y. P x y)" by (fact eventually_ball_finite_distrib) lemma INFM_E: "INFM x. P x \ (\x. P x \ thesis) \ thesis" by (fact frequentlyE) lemma MOST_I: "(\x. P x) \ MOST x. P x" by (rule eventuallyI) lemmas MOST_iff_finiteNeg = MOST_iff_cofinite subsection \Enumeration of an Infinite Set\ text \The set's element type must be wellordered (e.g. the natural numbers).\ text \ Could be generalized to \<^prop>\enumerate' S n = (SOME t. t \ s \ finite {s\S. s < t} \ card {s\S. s < t} = n)\. \ primrec (in wellorder) enumerate :: "'a set \ nat \ 'a" where enumerate_0: "enumerate S 0 = (LEAST n. n \ S)" | enumerate_Suc: "enumerate S (Suc n) = enumerate (S - {LEAST n. n \ S}) n" lemma enumerate_Suc': "enumerate S (Suc n) = enumerate (S - {enumerate S 0}) n" by simp lemma enumerate_in_set: "infinite S \ enumerate S n \ S" proof (induct n arbitrary: S) case 0 then show ?case by (fastforce intro: LeastI dest!: infinite_imp_nonempty) next case (Suc n) then show ?case by simp (metis DiffE infinite_remove) qed declare enumerate_0 [simp del] enumerate_Suc [simp del] lemma enumerate_step: "infinite S \ enumerate S n < enumerate S (Suc n)" proof (induction n arbitrary: S) case 0 then have "enumerate S 0 \ enumerate S (Suc 0)" by (simp add: enumerate_0 Least_le enumerate_in_set) moreover have "enumerate (S - {enumerate S 0}) 0 \ S - {enumerate S 0}" by (meson "0.prems" enumerate_in_set infinite_remove) then have "enumerate S 0 \ enumerate (S - {enumerate S 0}) 0" by auto ultimately show ?case by (simp add: enumerate_Suc') next case (Suc n) then show ?case by (simp add: enumerate_Suc') qed lemma enumerate_mono: "m < n \ infinite S \ enumerate S m < enumerate S n" by (induct m n rule: less_Suc_induct) (auto intro: enumerate_step) lemma enumerate_mono_iff [simp]: "infinite S \ enumerate S m < enumerate S n \ m < n" by (metis enumerate_mono less_asym less_linear) +lemma enumerate_mono_le_iff [simp]: + "infinite S \ enumerate S m \ enumerate S n \ m \ n" + by (meson enumerate_mono_iff not_le) + lemma le_enumerate: assumes S: "infinite S" shows "n \ enumerate S n" using S proof (induct n) case 0 then show ?case by simp next case (Suc n) then have "n \ enumerate S n" by simp also note enumerate_mono[of n "Suc n", OF _ \infinite S\] finally show ?case by simp qed lemma infinite_enumerate: assumes fS: "infinite S" shows "\r::nat\nat. strict_mono r \ (\n. r n \ S)" unfolding strict_mono_def using enumerate_in_set[OF fS] enumerate_mono[of _ _ S] fS by blast lemma enumerate_Suc'': fixes S :: "'a::wellorder set" assumes "infinite S" shows "enumerate S (Suc n) = (LEAST s. s \ S \ enumerate S n < s)" using assms proof (induct n arbitrary: S) case 0 then have "\s \ S. enumerate S 0 \ s" by (auto simp: enumerate.simps intro: Least_le) then show ?case unfolding enumerate_Suc' enumerate_0[of "S - {enumerate S 0}"] by (intro arg_cong[where f = Least] ext) auto next case (Suc n S) show ?case using enumerate_mono[OF zero_less_Suc \infinite S\, of n] \infinite S\ apply (subst (1 2) enumerate_Suc') apply (subst Suc) apply (use \infinite S\ in simp) apply (intro arg_cong[where f = Least] ext) apply (auto simp flip: enumerate_Suc') done qed lemma enumerate_Ex: fixes S :: "nat set" assumes S: "infinite S" and s: "s \ S" shows "\n. enumerate S n = s" using s proof (induct s rule: less_induct) case (less s) show ?case proof (cases "\y\S. y < s") case True let ?y = "Max {s'\S. s' < s}" from True have y: "\x. ?y < x \ (\s'\S. s' < s \ s' < x)" by (subst Max_less_iff) auto then have y_in: "?y \ {s'\S. s' < s}" by (intro Max_in) auto with less.hyps[of ?y] obtain n where "enumerate S n = ?y" by auto with S have "enumerate S (Suc n) = s" by (auto simp: y less enumerate_Suc'' intro!: Least_equality) then show ?thesis by auto next case False then have "\t\S. s \ t" by auto with \s \ S\ show ?thesis by (auto intro!: exI[of _ 0] Least_equality simp: enumerate_0) qed qed lemma inj_enumerate: fixes S :: "'a::wellorder set" assumes S: "infinite S" shows "inj (enumerate S)" unfolding inj_on_def proof clarsimp show "\x y. enumerate S x = enumerate S y \ x = y" by (metis neq_iff enumerate_mono[OF _ \infinite S\]) qed text \To generalise this, we'd need a condition that all initial segments were finite\ lemma bij_enumerate: fixes S :: "nat set" assumes S: "infinite S" shows "bij_betw (enumerate S) UNIV S" proof - have "\s \ S. \i. enumerate S i = s" using enumerate_Ex[OF S] by auto moreover note \infinite S\ inj_enumerate ultimately show ?thesis unfolding bij_betw_def by (auto intro: enumerate_in_set) qed lemma fixes S :: "nat set" assumes S: "infinite S" shows range_enumerate: "range (enumerate S) = S" and strict_mono_enumerate: "strict_mono (enumerate S)" by (auto simp add: bij_betw_imp_surj_on bij_enumerate assms strict_mono_def) text \A pair of weird and wonderful lemmas from HOL Light.\ lemma finite_transitivity_chain: assumes "finite A" and R: "\x. \ R x x" "\x y z. \R x y; R y z\ \ R x z" and A: "\x. x \ A \ \y. y \ A \ R x y" shows "A = {}" using \finite A\ A proof (induct A) case empty then show ?case by simp next case (insert a A) have False using R(1)[of a] R(2)[of _ a] insert(3,4) by blast thus ?case .. qed corollary Union_maximal_sets: assumes "finite \" shows "\{T \ \. \U\\. \ T \ U} = \\" (is "?lhs = ?rhs") proof show "?lhs \ ?rhs" by force show "?rhs \ ?lhs" proof (rule Union_subsetI) fix S assume "S \ \" have "{T \ \. S \ T} = {}" if "\ (\y. y \ {T \ \. \U\\. \ T \ U} \ S \ y)" proof - have \
: "\x. x \ \ \ S \ x \ \y. y \ \ \ S \ y \ x \ y" using that by (blast intro: dual_order.trans psubset_imp_subset) show ?thesis proof (rule finite_transitivity_chain [of _ "\T U. S \ T \ T \ U"]) qed (use assms in \auto intro: \
\) qed with \S \ \\ show "\y. y \ {T \ \. \U\\. \ T \ U} \ S \ y" by blast qed qed subsection \Properties of @{term enumerate} on finite sets\ lemma finite_enumerate_in_set: "\finite S; n < card S\ \ enumerate S n \ S" proof (induction n arbitrary: S) case 0 then show ?case by (metis all_not_in_conv card.empty enumerate.simps(1) not_less0 wellorder_Least_lemma(1)) next case (Suc n) show ?case using Suc.prems Suc.IH [of "S - {LEAST n. n \ S}"] apply (simp add: enumerate.simps) by (metis Diff_empty Diff_insert0 Suc_lessD card.remove less_Suc_eq) qed lemma finite_enumerate_step: "\finite S; Suc n < card S\ \ enumerate S n < enumerate S (Suc n)" proof (induction n arbitrary: S) case 0 then have "enumerate S 0 \ enumerate S (Suc 0)" by (simp add: Least_le enumerate.simps(1) finite_enumerate_in_set) moreover have "enumerate (S - {enumerate S 0}) 0 \ S - {enumerate S 0}" by (metis 0 Suc_lessD Suc_less_eq card_Suc_Diff1 enumerate_in_set finite_enumerate_in_set) then have "enumerate S 0 \ enumerate (S - {enumerate S 0}) 0" by auto ultimately show ?case by (simp add: enumerate_Suc') next case (Suc n) then show ?case by (simp add: enumerate_Suc' finite_enumerate_in_set) qed lemma finite_enumerate_mono: "\m < n; finite S; n < card S\ \ enumerate S m < enumerate S n" by (induct m n rule: less_Suc_induct) (auto intro: finite_enumerate_step) lemma finite_enumerate_mono_iff [simp]: "\finite S; m < card S; n < card S\ \ enumerate S m < enumerate S n \ m < n" by (metis finite_enumerate_mono less_asym less_linear) lemma finite_le_enumerate: assumes "finite S" "n < card S" shows "n \ enumerate S n" using assms proof (induction n) case 0 then show ?case by simp next case (Suc n) then have "n \ enumerate S n" by simp also note finite_enumerate_mono[of n "Suc n", OF _ \finite S\] finally show ?case using Suc.prems(2) Suc_leI by blast qed lemma finite_enumerate: assumes fS: "finite S" shows "\r::nat\nat. strict_mono_on {.. (\n S)" unfolding strict_mono_def using finite_enumerate_in_set[OF fS] finite_enumerate_mono[of _ _ S] fS by (metis lessThan_iff strict_mono_on_def) lemma finite_enumerate_Suc'': fixes S :: "'a::wellorder set" assumes "finite S" "Suc n < card S" shows "enumerate S (Suc n) = (LEAST s. s \ S \ enumerate S n < s)" using assms proof (induction n arbitrary: S) case 0 then have "\s \ S. enumerate S 0 \ s" by (auto simp: enumerate.simps intro: Least_le) then show ?case unfolding enumerate_Suc' enumerate_0[of "S - {enumerate S 0}"] by (metis Diff_iff dual_order.strict_iff_order singletonD singletonI) next case (Suc n S) then have "Suc n < card (S - {enumerate S 0})" using Suc.prems(2) finite_enumerate_in_set by force then show ?case apply (subst (1 2) enumerate_Suc') apply (simp add: Suc) apply (intro arg_cong[where f = Least] HOL.ext) using finite_enumerate_mono[OF zero_less_Suc \finite S\, of n] Suc.prems by (auto simp flip: enumerate_Suc') qed lemma finite_enumerate_initial_segment: fixes S :: "'a::wellorder set" assumes "finite S" and n: "n < card (S \ {.. {.. S \ n < s) = (LEAST n. n \ S)" proof (rule Least_equality) have "\t. t \ S \ t < s" by (metis "0" card_gt_0_iff disjoint_iff_not_equal lessThan_iff) then show "(LEAST n. n \ S) \ S \ (LEAST n. n \ S) < s" by (meson LeastI Least_le le_less_trans) qed (simp add: Least_le) then show ?case by (auto simp: enumerate_0) next case (Suc n) then have less_card: "Suc n < card S" by (meson assms(1) card_mono inf_sup_ord(1) leD le_less_linear order.trans) obtain T where T: "T \ {s \ S. enumerate S n < s}" by (metis Infinite_Set.enumerate_step enumerate_in_set finite_enumerate_in_set finite_enumerate_step less_card mem_Collect_eq) have "(LEAST x. x \ S \ x < s \ enumerate S n < x) = (LEAST x. x \ S \ enumerate S n < x)" (is "_ = ?r") proof (intro Least_equality conjI) show "?r \ S" by (metis (mono_tags, lifting) LeastI mem_Collect_eq T) have "\ s \ ?r" using not_less_Least [of _ "\x. x \ S \ enumerate S n < x"] Suc assms by (metis (mono_tags, lifting) Int_Collect Suc_lessD finite_Int finite_enumerate_in_set finite_enumerate_step lessThan_def less_le_trans) then show "?r < s" by auto show "enumerate S n < ?r" by (metis (no_types, lifting) LeastI mem_Collect_eq T) qed (auto simp: Least_le) then show ?case using Suc assms by (simp add: finite_enumerate_Suc'' less_card) qed lemma finite_enumerate_Ex: fixes S :: "'a::wellorder set" assumes S: "finite S" and s: "s \ S" shows "\ny\S. y < s") case True let ?T = "S \ {..finite S\]) from True have y: "\x. Max ?T < x \ (\s'\S. s' < s \ s' < x)" by (subst Max_less_iff) (auto simp: \finite ?T\) then have y_in: "Max ?T \ {s'\S. s' < s}" using Max_in \finite ?T\ by fastforce with less.IH[of "Max ?T" ?T] obtain n where n: "enumerate ?T n = Max ?T" "n < card ?T" using \finite ?T\ by blast then have "Suc n < card S" using TS less_trans_Suc by blast with S n have "enumerate S (Suc n) = s" by (subst finite_enumerate_Suc'') (auto simp: y finite_enumerate_initial_segment less finite_enumerate_Suc'' intro!: Least_equality) then show ?thesis using \Suc n < card S\ by blast next case False then have "\t\S. s \ t" by auto moreover have "0 < card S" using card_0_eq less.prems by blast ultimately show ?thesis using \s \ S\ by (auto intro!: exI[of _ 0] Least_equality simp: enumerate_0) qed qed lemma finite_enum_subset: assumes "\i. i < card X \ enumerate X i = enumerate Y i" and "finite X" "finite Y" "card X \ card Y" shows "X \ Y" by (metis assms finite_enumerate_Ex finite_enumerate_in_set less_le_trans subsetI) lemma finite_enum_ext: assumes "\i. i < card X \ enumerate X i = enumerate Y i" and "finite X" "finite Y" "card X = card Y" shows "X = Y" by (intro antisym finite_enum_subset) (auto simp: assms) lemma finite_bij_enumerate: fixes S :: "'a::wellorder set" assumes S: "finite S" shows "bij_betw (enumerate S) {..n m. \n \ m; n < card S; m < card S\ \ enumerate S n \ enumerate S m" using finite_enumerate_mono[OF _ \finite S\] by (auto simp: neq_iff) then have "inj_on (enumerate S) {..s \ S. \ifinite S\ ultimately show ?thesis unfolding bij_betw_def by (auto intro: finite_enumerate_in_set) qed lemma ex_bij_betw_strict_mono_card: fixes M :: "'a::wellorder set" assumes "finite M" obtains h where "bij_betw h {..Lexicographic order on lists\ +text \This version prioritises length and can yield wellorderings\ + theory List_Lenlexorder imports Main begin instantiation list :: (ord) ord begin definition list_less_def: "xs < ys \ (xs, ys) \ lenlex {(u, v). u < v}" definition list_le_def: "(xs :: _ list) \ ys \ xs < ys \ xs = ys" instance .. end instance list :: (order) order proof have tr: "trans {(u, v::'a). u < v}" using trans_def by fastforce have \
: False if "(xs,ys) \ lenlex {(u, v). u < v}" "(ys,xs) \ lenlex {(u, v). u < v}" for xs ys :: "'a list" proof - have "(xs,xs) \ lenlex {(u, v). u < v}" using that transD [OF lenlex_transI [OF tr]] by blast then show False by (meson case_prodD lenlex_irreflexive less_irrefl mem_Collect_eq) qed show "xs \ xs" for xs :: "'a list" by (simp add: list_le_def) show "xs \ zs" if "xs \ ys" and "ys \ zs" for xs ys zs :: "'a list" using that transD [OF lenlex_transI [OF tr]] by (auto simp add: list_le_def list_less_def) show "xs = ys" if "xs \ ys" "ys \ xs" for xs ys :: "'a list" using \
that list_le_def list_less_def by blast show "xs < ys \ xs \ ys \ \ ys \ xs" for xs ys :: "'a list" by (auto simp add: list_less_def list_le_def dest: \
) qed instance list :: (linorder) linorder proof fix xs ys :: "'a list" have "total (lenlex {(u, v::'a). u < v})" by (rule total_lenlex) (auto simp: total_on_def) then show "xs \ ys \ ys \ xs" by (auto simp add: total_on_def list_le_def list_less_def) qed +instance list :: (wellorder) wellorder +proof + fix P :: "'a list \ bool" and a + assume "\x. (\y. y < x \ P y) \ P x" + then show "P a" + unfolding list_less_def by (metis wf_lenlex wf_induct wf_lenlex wf) +qed + instantiation list :: (linorder) distrib_lattice begin definition "(inf :: 'a list \ _) = min" definition "(sup :: 'a list \ _) = max" instance by standard (auto simp add: inf_list_def sup_list_def max_min_distrib2) end lemma not_less_Nil [simp]: "\ x < []" by (simp add: list_less_def) lemma Nil_less_Cons [simp]: "[] < a # x" by (simp add: list_less_def) lemma Cons_less_Cons: "a # x < b # y \ length x < length y \ length x = length y \ (a < b \ a = b \ x < y)" using lenlex_length by (fastforce simp: list_less_def Cons_lenlex_iff) lemma le_Nil [simp]: "x \ [] \ x = []" unfolding list_le_def by (cases x) auto lemma Nil_le_Cons [simp]: "[] \ x" unfolding list_le_def by (cases x) auto lemma Cons_le_Cons: "a # x \ b # y \ length x < length y \ length x = length y \ (a < b \ a = b \ x \ y)" by (auto simp: list_le_def Cons_less_Cons) instantiation list :: (order) order_bot begin definition "bot = []" instance by standard (simp add: bot_list_def) end end diff --git a/src/HOL/ex/Word_Lsb_Msb.thy b/src/HOL/ex/Word_Lsb_Msb.thy --- a/src/HOL/ex/Word_Lsb_Msb.thy +++ b/src/HOL/ex/Word_Lsb_Msb.thy @@ -1,121 +1,146 @@ theory Word_Lsb_Msb imports "HOL-Library.Word" begin class word = ring_bit_operations + fixes word_length :: \'a itself \ nat\ assumes word_length_positive [simp]: \0 < word_length TYPE('a)\ - and possible_bit_msb [simp]: \possible_bit TYPE('a) (word_length TYPE('a) - Suc 0)\ - and not_possible_bit_length [simp]: \\ possible_bit TYPE('a) (word_length TYPE('a))\ + and possible_bit_msb: \possible_bit TYPE('a) (word_length TYPE('a) - Suc 0)\ + and not_possible_bit_length: \\ possible_bit TYPE('a) (word_length TYPE('a))\ begin lemma word_length_not_0 [simp]: \word_length TYPE('a) \ 0\ using word_length_positive by simp +lemma possible_bit_iff_less_word_length: + \possible_bit TYPE('a) n \ n < word_length TYPE('a)\ (is \?P \ ?Q\) +proof + assume \?P\ + show ?Q + proof (rule ccontr) + assume \\ n < word_length TYPE('a)\ + then have \word_length TYPE('a) \ n\ + by simp + with \?P\ have \possible_bit TYPE('a) (word_length TYPE('a))\ + by (rule possible_bit_less_imp) + with not_possible_bit_length show False .. + qed +next + assume \?Q\ + then have \n \ word_length TYPE('a) - Suc 0\ + by simp + with possible_bit_msb show ?P + by (rule possible_bit_less_imp) +qed + end instantiation word :: (len) word begin definition word_length_word :: \'a word itself \ nat\ where [simp, code_unfold]: \word_length_word _ = LENGTH('a)\ instance by standard simp_all end context word begin context includes bit_operations_syntax begin abbreviation lsb :: \'a \ bool\ where \lsb \ odd\ definition msb :: \'a \ bool\ where \msb w = bit w (word_length TYPE('a) - Suc 0)\ lemma not_msb_0 [simp]: \\ msb 0\ by (simp add: msb_def) lemma msb_minus_1 [simp]: \msb (- 1)\ - by (simp add: msb_def) + by (simp add: msb_def possible_bit_iff_less_word_length) lemma msb_1_iff [simp]: \msb 1 \ word_length TYPE('a) = 1\ by (auto simp add: msb_def bit_simps le_less) lemma msb_minus_iff [simp]: \msb (- w) \ \ msb (w - 1)\ - by (simp add: msb_def bit_simps) + by (simp add: msb_def bit_simps possible_bit_iff_less_word_length) lemma msb_not_iff [simp]: \msb (NOT w) \ \ msb w\ - by (simp add: msb_def bit_simps) + by (simp add: msb_def bit_simps possible_bit_iff_less_word_length) lemma msb_and_iff [simp]: \msb (v AND w) \ msb v \ msb w\ by (simp add: msb_def bit_simps) lemma msb_or_iff [simp]: \msb (v OR w) \ msb v \ msb w\ by (simp add: msb_def bit_simps) lemma msb_xor_iff [simp]: \msb (v XOR w) \ \ (msb v \ msb w)\ by (simp add: msb_def bit_simps) lemma msb_exp_iff [simp]: \msb (2 ^ n) \ n = word_length TYPE('a) - Suc 0\ - by (simp add: msb_def bit_simps) + by (simp add: msb_def bit_simps possible_bit_iff_less_word_length) lemma msb_mask_iff [simp]: \msb (mask n) \ word_length TYPE('a) \ n\ - by (simp add: msb_def bit_simps less_diff_conv2 Suc_le_eq less_Suc_eq_le) + by (simp add: msb_def bit_simps less_diff_conv2 Suc_le_eq less_Suc_eq_le possible_bit_iff_less_word_length) lemma msb_set_bit_iff [simp]: \msb (set_bit n w) \ n = word_length TYPE('a) - Suc 0 \ msb w\ by (simp add: set_bit_eq_or ac_simps) lemma msb_unset_bit_iff [simp]: \msb (unset_bit n w) \ n \ word_length TYPE('a) - Suc 0 \ msb w\ by (simp add: unset_bit_eq_and_not ac_simps) lemma msb_flip_bit_iff [simp]: \msb (flip_bit n w) \ (n \ word_length TYPE('a) - Suc 0 \ msb w)\ by (auto simp add: flip_bit_eq_xor) lemma msb_push_bit_iff: \msb (push_bit n w) \ n < word_length TYPE('a) \ bit w (word_length TYPE('a) - Suc n)\ - by (simp add: msb_def bit_simps le_diff_conv2 Suc_le_eq) + by (simp add: msb_def bit_simps le_diff_conv2 Suc_le_eq possible_bit_iff_less_word_length) -(*lemma msb_drop_bit_iff [simp]: +lemma msb_drop_bit_iff [simp]: \msb (drop_bit n w) \ n = 0 \ msb w\ - apply (cases n) - apply simp_all - apply (auto simp add: msb_def bit_simps) - oops*) + by (cases n) + (auto simp add: msb_def bit_simps possible_bit_iff_less_word_length intro!: impossible_bit) lemma msb_take_bit_iff [simp]: \msb (take_bit n w) \ word_length TYPE('a) \ n \ msb w\ by (simp add: take_bit_eq_mask ac_simps) -(*lemma msb_signed_take_bit_iff: - \msb (signed_take_bit n w) \ P w n\ - unfolding signed_take_bit_def - apply (simp add: signed_take_bit_def not_le) - apply auto - oops*) +lemma msb_signed_take_bit_iff: + \msb (signed_take_bit n w) \ bit w (min n (word_length TYPE('a) - Suc 0))\ + by (simp add: msb_def bit_simps possible_bit_iff_less_word_length) + +definition signed_drop_bit :: \nat \ 'a \ 'a\ + where \signed_drop_bit n w = drop_bit n w + OR (of_bool (bit w (word_length TYPE('a) - Suc 0)) * NOT (mask (word_length TYPE('a) - Suc n)))\ + +lemma msb_signed_drop_bit_iff [simp]: + \msb (signed_drop_bit n w) \ msb w\ + by (simp add: signed_drop_bit_def bit_simps not_le not_less) + (simp add: msb_def) end end end diff --git a/src/Pure/Admin/build_pdfjs.scala b/src/Pure/Admin/build_pdfjs.scala --- a/src/Pure/Admin/build_pdfjs.scala +++ b/src/Pure/Admin/build_pdfjs.scala @@ -1,103 +1,103 @@ /* Title: Pure/Admin/build_pdfjs.scala Author: Makarius Build Isabelle component for Mozilla PDF.js. See also: - https://github.com/mozilla/pdf.js - https://github.com/mozilla/pdf.js/releases - https://github.com/mozilla/pdf.js/wiki/Setup-PDF.js-in-a-website */ package isabelle object Build_PDFjs { /* build pdfjs component */ val default_url = "https://github.com/mozilla/pdf.js/releases/download" - val default_version = "2.12.313" + val default_version = "2.14.305" def build_pdfjs( base_url: String = default_url, version: String = default_version, target_dir: Path = Path.current, progress: Progress = new Progress ): Unit = { Isabelle_System.require_command("unzip", test = "-h") /* component name */ val component = "pdfjs-" + version val component_dir = Isabelle_System.new_directory(target_dir + Path.basic(component)) progress.echo("Component " + component_dir) /* download */ val download_url = base_url + "/v" + version Isabelle_System.with_tmp_file("archive", ext = "zip") { archive_file => - Isabelle_System.download_file(download_url + "/pdfjs-" + version + "-dist.zip", + Isabelle_System.download_file(download_url + "/pdfjs-" + version + "-legacy-dist.zip", archive_file, progress = progress) Isabelle_System.bash("unzip -x " + File.bash_path(archive_file), cwd = component_dir.file).check } /* settings */ val etc_dir = Isabelle_System.make_directory(component_dir + Path.basic("etc")) File.write(etc_dir + Path.basic("settings"), """# -*- shell-script -*- :mode=shellscript: ISABELLE_PDFJS_HOME="$COMPONENT" """) /* README */ File.write(component_dir + Path.basic("README"), """This is PDF.js from """ + download_url + """ Makarius """ + Date.Format.date(Date.now()) + "\n") } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("build_pdfjs", "build component for Mozilla PDF.js", Scala_Project.here, { args => var target_dir = Path.current var base_url = default_url var version = default_version val getopts = Getopts(""" Usage: isabelle build_pdfjs [OPTIONS] Options are: -D DIR target directory (default ".") -U URL download URL (default: """" + default_url + """") -V VERSION version (default: """" + default_version + """") Build component for PDF.js. """, "D:" -> (arg => target_dir = Path.explode(arg)), "U:" -> (arg => base_url = arg), "V:" -> (arg => version = arg)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val progress = new Console_Progress() build_pdfjs(base_url = base_url, version = version, target_dir = target_dir, progress = progress) }) } diff --git a/src/Pure/General/bytes.scala b/src/Pure/General/bytes.scala --- a/src/Pure/General/bytes.scala +++ b/src/Pure/General/bytes.scala @@ -1,207 +1,206 @@ /* Title: Pure/General/bytes.scala Author: Makarius Immutable byte vectors versus UTF8 strings. */ package isabelle import java.io.{File => JFile, ByteArrayOutputStream, ByteArrayInputStream, OutputStream, InputStream, FileInputStream, FileOutputStream} import java.net.URL import org.tukaani.xz.{XZInputStream, XZOutputStream} object Bytes { val empty: Bytes = new Bytes(Array[Byte](), 0, 0) def apply(s: CharSequence): Bytes = { val str = s.toString if (str.isEmpty) empty else { val b = UTF8.bytes(str) new Bytes(b, 0, b.length) } } def apply(a: Array[Byte]): Bytes = apply(a, 0, a.length) def apply(a: Array[Byte], offset: Int, length: Int): Bytes = if (length == 0) empty else { val b = new Array[Byte](length) System.arraycopy(a, offset, b, 0, length) new Bytes(b, 0, b.length) } val newline: Bytes = apply("\n") /* base64 */ def decode_base64(s: String): Bytes = { val a = Base64.decode(s) new Bytes(a, 0, a.length) } /* read */ def read_stream(stream: InputStream, limit: Int = Integer.MAX_VALUE, hint: Int = 1024): Bytes = if (limit == 0) empty else { val out_size = (if (limit == Integer.MAX_VALUE) hint else limit) max 1024 val out = new ByteArrayOutputStream(out_size) val buf = new Array[Byte](8192) var m = 0 - var cont = true - while (cont) { + while ({ m = stream.read(buf, 0, buf.length min (limit - out.size)) if (m != -1) out.write(buf, 0, m) - cont = (m != -1 && limit > out.size) - } + m != -1 && limit > out.size + }) () new Bytes(out.toByteArray, 0, out.size) } def read(file: JFile): Bytes = { val length = file.length val limit = if (length < 0 || length > Integer.MAX_VALUE) Integer.MAX_VALUE else length.toInt using(new FileInputStream(file))(read_stream(_, limit = limit)) } def read(path: Path): Bytes = read(path.file) def read(url: URL): Bytes = using(url.openStream)(read_stream(_)) /* write */ def write(file: JFile, bytes: Bytes): Unit = using(new FileOutputStream(file))(bytes.write_stream(_)) def write(path: Path, bytes: Bytes): Unit = write(path.file, bytes) } final class Bytes private( protected val bytes: Array[Byte], protected val offset: Int, val length: Int) extends CharSequence { /* equality */ override def equals(that: Any): Boolean = { that match { case other: Bytes => if (this eq other) true else if (length != other.length) false else (0 until length).forall(i => bytes(offset + i) == other.bytes(other.offset + i)) case _ => false } } private lazy val hash: Int = { var h = 0 for (i <- offset until offset + length) { val b = bytes(i).asInstanceOf[Int] & 0xFF h = 31 * h + b } h } override def hashCode(): Int = hash /* content */ lazy val sha1_digest: SHA1.Digest = SHA1.digest(bytes) def is_empty: Boolean = length == 0 def iterator: Iterator[Byte] = for (i <- (offset until (offset + length)).iterator) yield bytes(i) def array: Array[Byte] = { val a = new Array[Byte](length) System.arraycopy(bytes, offset, a, 0, length) a } def text: String = UTF8.decode_permissive(this) def encode_base64: String = { val b = if (offset == 0 && length == bytes.length) bytes else Bytes(bytes, offset, length).bytes Base64.encode(b) } def maybe_encode_base64: (Boolean, String) = { val s = text if (this == Bytes(s)) (false, s) else (true, encode_base64) } override def toString: String = "Bytes(" + length + ")" def proper: Option[Bytes] = if (is_empty) None else Some(this) def proper_text: Option[String] = if (is_empty) None else Some(text) def +(other: Bytes): Bytes = if (other.is_empty) this else if (is_empty) other else { val new_bytes = new Array[Byte](length + other.length) System.arraycopy(bytes, offset, new_bytes, 0, length) System.arraycopy(other.bytes, other.offset, new_bytes, length, other.length) new Bytes(new_bytes, 0, new_bytes.length) } /* CharSequence operations */ def charAt(i: Int): Char = if (0 <= i && i < length) (bytes(offset + i).asInstanceOf[Int] & 0xFF).asInstanceOf[Char] else throw new IndexOutOfBoundsException def subSequence(i: Int, j: Int): Bytes = { if (0 <= i && i <= j && j <= length) new Bytes(bytes, offset + i, j - i) else throw new IndexOutOfBoundsException } def trim_line: Bytes = if (length >= 2 && charAt(length - 2) == 13 && charAt(length - 1) == 10) subSequence(0, length - 2) else if (length >= 1 && (charAt(length - 1) == 13 || charAt(length - 1) == 10)) subSequence(0, length - 1) else this /* streams */ def stream(): ByteArrayInputStream = new ByteArrayInputStream(bytes, offset, length) def write_stream(stream: OutputStream): Unit = stream.write(bytes, offset, length) /* XZ data compression */ def uncompress(cache: XZ.Cache = XZ.Cache()): Bytes = using(new XZInputStream(stream(), cache))(Bytes.read_stream(_, hint = length)) def compress(options: XZ.Options = XZ.options(), cache: XZ.Cache = XZ.Cache()): Bytes = { val result = new ByteArrayOutputStream(length) using(new XZOutputStream(result, options, cache))(write_stream(_)) new Bytes(result.toByteArray, 0, result.size) } def maybe_compress( options: XZ.Options = XZ.options(), cache: XZ.Cache = XZ.Cache() ) : (Boolean, Bytes) = { val compressed = compress(options = options, cache = cache) if (compressed.length < length) (true, compressed) else (false, this) } } diff --git a/src/Pure/General/file.scala b/src/Pure/General/file.scala --- a/src/Pure/General/file.scala +++ b/src/Pure/General/file.scala @@ -1,330 +1,336 @@ /* Title: Pure/General/file.scala Author: Makarius File-system operations. */ package isabelle import java.io.{BufferedWriter, OutputStreamWriter, FileOutputStream, BufferedOutputStream, OutputStream, InputStream, FileInputStream, BufferedInputStream, BufferedReader, InputStreamReader, File => JFile, IOException} import java.nio.file.{StandardOpenOption, Path => JPath, Files, SimpleFileVisitor, FileVisitOption, FileVisitResult} import java.nio.file.attribute.BasicFileAttributes -import java.net.{URL, MalformedURLException} +import java.net.{URI, URL, MalformedURLException} import java.util.zip.{GZIPInputStream, GZIPOutputStream} import java.util.EnumSet import org.tukaani.xz.{XZInputStream, XZOutputStream} import scala.collection.mutable object File { /* standard path (Cygwin or Posix) */ def standard_path(path: Path): String = path.expand.implode def standard_path(platform_path: String): String = isabelle.setup.Environment.standard_path(platform_path) def standard_path(file: JFile): String = standard_path(file.getPath) def standard_url(name: String): String = try { val url = new URL(name) if (url.getProtocol == "file" && Url.is_wellformed_file(name)) standard_path(Url.parse_file(name)) else name } catch { case _: MalformedURLException => standard_path(name) } /* platform path (Windows or Posix) */ def platform_path(standard_path: String): String = isabelle.setup.Environment.platform_path(standard_path) def platform_path(path: Path): String = platform_path(standard_path(path)) def platform_file(path: Path): JFile = new JFile(platform_path(path)) /* platform files */ def absolute(file: JFile): JFile = file.toPath.toAbsolutePath.normalize.toFile def absolute_name(file: JFile): String = absolute(file).getPath def canonical(file: JFile): JFile = file.getCanonicalFile def canonical_name(file: JFile): String = canonical(file).getPath def path(file: JFile): Path = Path.explode(standard_path(file)) def pwd(): Path = path(Path.current.absolute_file) + def uri(file: JFile): URI = file.toURI + def uri(path: Path): URI = path.file.toURI + + def url(file: JFile): URL = uri(file).toURL + def url(path: Path): URL = url(path.file) + /* relative paths */ def relative_path(base: Path, other: Path): Option[Path] = { val base_path = base.java_path val other_path = other.java_path if (other_path.startsWith(base_path)) Some(path(base_path.relativize(other_path).toFile)) else None } /* bash path */ def bash_path(path: Path): String = Bash.string(standard_path(path)) def bash_path(file: JFile): String = Bash.string(standard_path(file)) def bash_platform_path(path: Path): String = Bash.string(platform_path(path)) /* directory entries */ def check_dir(path: Path): Path = if (path.is_dir) path else error("No such directory: " + path) def check_file(path: Path): Path = if (path.is_file) path else error("No such file: " + path) /* directory content */ def read_dir(dir: Path): List[String] = { if (!dir.is_dir) error("No such directory: " + dir.toString) val files = dir.file.listFiles if (files == null) Nil else files.toList.map(_.getName).sorted } def get_dir(dir: Path): String = read_dir(dir).filter(name => (dir + Path.basic(name)).is_dir) match { case List(entry) => entry case dirs => error("Exactly one directory entry expected: " + commas_quote(dirs.sorted)) } def find_files( start: JFile, pred: JFile => Boolean = _ => true, include_dirs: Boolean = false, follow_links: Boolean = false ): List[JFile] = { val result = new mutable.ListBuffer[JFile] def check(file: JFile): Unit = if (pred(file)) result += file if (start.isFile) check(start) else if (start.isDirectory) { val options = if (follow_links) EnumSet.of(FileVisitOption.FOLLOW_LINKS) else EnumSet.noneOf(classOf[FileVisitOption]) Files.walkFileTree(start.toPath, options, Integer.MAX_VALUE, new SimpleFileVisitor[JPath] { override def preVisitDirectory( path: JPath, attrs: BasicFileAttributes ): FileVisitResult = { if (include_dirs) check(path.toFile) FileVisitResult.CONTINUE } override def visitFile( path: JPath, attrs: BasicFileAttributes ): FileVisitResult = { val file = path.toFile if (include_dirs || !file.isDirectory) check(file) FileVisitResult.CONTINUE } } ) } result.toList } /* read */ def read(file: JFile): String = Bytes.read(file).text def read(path: Path): String = read(path.file) def read_stream(reader: BufferedReader): String = { val output = new StringBuilder(100) var c = -1 while ({ c = reader.read; c != -1 }) output += c.toChar reader.close() output.toString } def read_stream(stream: InputStream): String = read_stream(new BufferedReader(new InputStreamReader(stream, UTF8.charset))) def read_gzip(file: JFile): String = read_stream(new GZIPInputStream(new BufferedInputStream(new FileInputStream(file)))) def read_gzip(path: Path): String = read_gzip(path.file) def read_xz(file: JFile): String = read_stream(new XZInputStream(new BufferedInputStream(new FileInputStream(file)))) def read_xz(path: Path): String = read_xz(path.file) /* read lines */ def read_line(reader: BufferedReader): Option[String] = { val line = try { reader.readLine} catch { case _: IOException => null } Option(line).map(Library.trim_line) } def read_lines(reader: BufferedReader, progress: String => Unit): List[String] = { val result = new mutable.ListBuffer[String] var line: Option[String] = None while ({ line = read_line(reader); line.isDefined }) { progress(line.get) result += line.get } reader.close() result.toList } /* write */ def writer(file: JFile): BufferedWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), UTF8.charset)) def write_file( file: JFile, text: String, make_stream: OutputStream => OutputStream ): Unit = { val stream = make_stream(new FileOutputStream(file)) using(new BufferedWriter(new OutputStreamWriter(stream, UTF8.charset)))(_.append(text)) } def write(file: JFile, text: String): Unit = write_file(file, text, s => s) def write(path: Path, text: String): Unit = write(path.file, text) def write_gzip(file: JFile, text: String): Unit = write_file(file, text, (s: OutputStream) => new GZIPOutputStream(new BufferedOutputStream(s))) def write_gzip(path: Path, text: String): Unit = write_gzip(path.file, text) def write_xz(file: JFile, text: String, options: XZ.Options): Unit = File.write_file(file, text, s => new XZOutputStream(new BufferedOutputStream(s), options)) def write_xz(file: JFile, text: String): Unit = write_xz(file, text, XZ.options()) def write_xz(path: Path, text: String, options: XZ.Options): Unit = write_xz(path.file, text, options) def write_xz(path: Path, text: String): Unit = write_xz(path, text, XZ.options()) def write_backup(path: Path, text: String): Unit = { if (path.is_file) Isabelle_System.move_file(path, path.backup) write(path, text) } def write_backup2(path: Path, text: String): Unit = { if (path.is_file) Isabelle_System.move_file(path, path.backup2) write(path, text) } /* append */ def append(file: JFile, text: String): Unit = Files.write(file.toPath, UTF8.bytes(text), StandardOpenOption.APPEND, StandardOpenOption.CREATE) def append(path: Path, text: String): Unit = append(path.file, text) /* change */ def change( path: Path, init: Boolean = false, strict: Boolean = false )(f: String => String): Unit = { if (!path.is_file && init) write(path, "") val x = read(path) val y = f(x) if (x != y) write(path, y) else if (strict) error("Unchanged file: " + path) } def change_lines(path: Path, init: Boolean = false, strict: Boolean = false)( f: List[String] => List[String]): Unit = change(path, init = init, strict = strict)(text => cat_lines(f(split_lines(text)))) /* eq */ def eq(file1: JFile, file2: JFile): Boolean = try { Files.isSameFile(file1.toPath, file2.toPath) } catch { case ERROR(_) => false } def eq(path1: Path, path2: Path): Boolean = eq(path1.file, path2.file) /* eq_content */ def eq_content(file1: JFile, file2: JFile): Boolean = if (eq(file1, file2)) true else if (file1.length != file2.length) false else Bytes.read(file1) == Bytes.read(file2) def eq_content(path1: Path, path2: Path): Boolean = eq_content(path1.file, path2.file) /* permissions */ def is_executable(path: Path): Boolean = { if (Platform.is_windows) Isabelle_System.bash("test -x " + bash_path(path)).check.ok else path.file.canExecute } def set_executable(path: Path, flag: Boolean): Unit = { if (Platform.is_windows && flag) Isabelle_System.chmod("a+x", path) else if (Platform.is_windows) Isabelle_System.chmod("a-x", path) else path.file.setExecutable(flag, false) } /* content */ object Content { def apply(path: Path, content: Bytes): Content_Bytes = new Content_Bytes(path, content) def apply(path: Path, content: String): Content_String = new Content_String(path, content) def apply(path: Path, content: XML.Body): Content_XML = new Content_XML(path, content) } trait Content { def path: Path def write(dir: Path): Unit override def toString: String = path.toString } final class Content_Bytes private[File](val path: Path, val content: Bytes) extends Content { def write(dir: Path): Unit = { val full_path = dir + path Isabelle_System.make_directory(full_path.expand.dir) Bytes.write(full_path, content) } } final class Content_String private[File](val path: Path, val content: String) extends Content { def write(dir: Path): Unit = { val full_path = dir + path Isabelle_System.make_directory(full_path.expand.dir) File.write(full_path, content) } } final class Content_XML private[File](val path: Path, val content: XML.Body) { def output(out: XML.Body => String): Content_String = new Content_String(path, out(content)) } } diff --git a/src/Pure/General/sha1.scala b/src/Pure/General/sha1.scala --- a/src/Pure/General/sha1.scala +++ b/src/Pure/General/sha1.scala @@ -1,55 +1,54 @@ /* Title: Pure/General/sha1.scala Author: Makarius SHA-1 message digest according to RFC 3174. */ package isabelle import java.io.{File => JFile, FileInputStream} import java.security.MessageDigest import isabelle.setup.{Build => Setup_Build} object SHA1 { final class Digest private[SHA1](rep: String) { override def toString: String = rep override def hashCode: Int = rep.hashCode override def equals(that: Any): Boolean = that match { case other: Digest => rep == other.toString case _ => false } def shasum(name: String): String = rep + " " + name } def fake_digest(rep: String): Digest = new Digest(rep) def make_digest(body: MessageDigest => Unit): Digest = { val digest_body = new Setup_Build.Digest_Body { def apply(sha: MessageDigest): Unit = body(sha)} new Digest(Setup_Build.make_digest(digest_body)) } def digest(file: JFile): Digest = make_digest(sha => using(new FileInputStream(file)) { stream => val buf = new Array[Byte](65536) var m = 0 - var cont = true - while (cont) { + while ({ m = stream.read(buf, 0, buf.length) if (m != -1) sha.update(buf, 0, m) - cont = (m != -1) - } + m != -1 + }) () }) def digest(path: Path): Digest = digest(path.file) def digest(bytes: Array[Byte]): Digest = make_digest(_.update(bytes)) def digest(bytes: Bytes): Digest = bytes.sha1_digest def digest(string: String): Digest = digest(Bytes(string)) def digest_set(digests: List[Digest]): Digest = digest(cat_lines(digests.map(_.toString).sorted)) val digest_length: Int = digest("").toString.length } diff --git a/src/Pure/System/classpath.scala b/src/Pure/System/classpath.scala new file mode 100644 --- /dev/null +++ b/src/Pure/System/classpath.scala @@ -0,0 +1,95 @@ +/* Title: Pure/System/classpath.scala + Author: Makarius + +Java classpath and Scala services. +*/ + +package isabelle + + +import java.io.{File => JFile} +import java.nio.file.Files +import java.net.URLClassLoader + +import scala.jdk.CollectionConverters._ + + +object Classpath { + abstract class Service + type Service_Class = Class[Service] + + def apply( + jar_files: List[JFile] = Nil, + jar_contents: List[File.Content_Bytes] = Nil): Classpath = + { + val jar_files0 = + for { + s <- space_explode(JFile.pathSeparatorChar, System.getProperty("java.class.path", "")) + if s.nonEmpty + } yield File.absolute(new JFile(s)) + + val jar_files1 = + jar_files.flatMap(start => File.find_files(start, _.getName.endsWith(".jar"))) + .map(File.absolute) + + val tmp_jars = + for (jar <- jar_contents) yield { + val tmp_jar = Files.createTempFile("jar", "jar").toFile + tmp_jar.deleteOnExit() + Bytes.write(tmp_jar, jar.content) + tmp_jar + } + new Classpath(jar_files0 ::: jar_files1, tmp_jars) + } +} + +class Classpath private(static_jars: List[JFile], dynamic_jars: List[JFile]) { + def jars: List[JFile] = static_jars ::: dynamic_jars + override def toString: String = jars.mkString("Classpath(", ", ", ")") + + def platform_path: String = jars.map(_.getPath).mkString(JFile.pathSeparator) + + val class_loader: ClassLoader = + { + val this_class_loader = this.getClass.getClassLoader + if (dynamic_jars.isEmpty) this_class_loader + else { + new URLClassLoader(dynamic_jars.map(File.url).toArray, this_class_loader) { + override def finalize(): Unit = { + for (jar <- dynamic_jars) { + try { jar.delete() } + catch { case _: Throwable => } + } + } + } + } + } + + private def init_services(where: String, names: List[String]): List[Classpath.Service_Class] = { + for (name <- names) yield { + def err(msg: String): Nothing = + error("Bad Isabelle/Scala service " + quote(name) + " in " + where + "\n" + msg) + try { Class.forName(name, true, class_loader).asInstanceOf[Classpath.Service_Class] } + catch { + case _: ClassNotFoundException => err("Class not found") + case exn: Throwable => err(Exn.message(exn)) + } + } + } + + val services: List[Classpath.Service_Class] = + { + val variable = "ISABELLE_SCALA_SERVICES" + val services_env = + init_services(quote(variable), space_explode(':', Isabelle_System.getenv_strict(variable))) + val services_jars = + jars.flatMap(jar => + init_services(File.standard_path(jar), + isabelle.setup.Build.get_services(jar.toPath).asScala.toList)) + services_env ::: services_jars + } + + def make_services[C](c: Class[C]): List[C] = + for { c1 <- services if Library.is_subclass(c1, c) } + yield c1.getDeclaredConstructor().newInstance().asInstanceOf[C] +} diff --git a/src/Pure/System/isabelle_system.scala b/src/Pure/System/isabelle_system.scala --- a/src/Pure/System/isabelle_system.scala +++ b/src/Pure/System/isabelle_system.scala @@ -1,506 +1,476 @@ /* Title: Pure/System/isabelle_system.scala Author: Makarius Miscellaneous Isabelle system operations. */ package isabelle import java.util.{Map => JMap, HashMap} import java.io.{File => JFile, IOException} import java.nio.file.{Path => JPath, Files, SimpleFileVisitor, FileVisitResult, StandardCopyOption, FileSystemException} import java.nio.file.attribute.BasicFileAttributes -import scala.jdk.CollectionConverters._ - object Isabelle_System { /* settings environment */ def settings(putenv: List[(String, String)] = Nil): JMap[String, String] = { val env0 = isabelle.setup.Environment.settings() if (putenv.isEmpty) env0 else { val env = new HashMap(env0) for ((a, b) <- putenv) env.put(a, b) env } } def getenv(name: String, env: JMap[String, String] = settings()): String = Option(env.get(name)).getOrElse("") def getenv_strict(name: String, env: JMap[String, String] = settings()): String = proper_string(getenv(name, env)) getOrElse error("Undefined Isabelle environment variable: " + quote(name)) /* services */ - abstract class Service + type Service = Classpath.Service - @volatile private var _services: Option[List[Class[Service]]] = None + @volatile private var _classpath: Option[Classpath] = None - def services(): List[Class[Service]] = { - if (_services.isEmpty) init() // unsynchronized check - _services.get + def classpath(): Classpath = { + if (_classpath.isEmpty) init() // unsynchronized check + _classpath.get } - def make_services[C](c: Class[C]): List[C] = - for { c1 <- services() if Library.is_subclass(c1, c) } - yield c1.getDeclaredConstructor().newInstance().asInstanceOf[C] + def make_services[C](c: Class[C]): List[C] = classpath().make_services(c) - /* init settings + services */ - - private def init_services(where: String, names: List[String]): List[Class[Service]] = { - for (name <- names) yield { - def err(msg: String): Nothing = - error("Bad Isabelle/Scala service " + quote(name) + " in " + where + "\n" + msg) - try { Class.forName(name).asInstanceOf[Class[Service]] } - catch { - case _: ClassNotFoundException => err("Class not found") - case exn: Throwable => err(Exn.message(exn)) - } - } - } - - def init_services_env(): List[Class[Service]] = - { - val variable = "ISABELLE_SCALA_SERVICES" - init_services(quote(variable), space_explode(':', getenv_strict(variable))) - } - - def init_services_jar(jar: Path): List[Class[Service]] = - init_services(jar.toString, isabelle.setup.Build.get_services(jar.java_path).asScala.toList) - - def init_services_jar(platform_jar: String): List[Class[Service]] = - init_services_jar(Path.explode(File.standard_path(platform_jar))) + /* init settings + classpath */ def init(isabelle_root: String = "", cygwin_root: String = ""): Unit = { isabelle.setup.Environment.init(isabelle_root, cygwin_root) synchronized { - if (_services.isEmpty) { - _services = Some(init_services_env() ::: Scala.get_classpath().flatMap(init_services_jar)) - } + if (_classpath.isEmpty) _classpath = Some(Classpath()) } } /* getetc -- static distribution parameters */ def getetc(name: String, root: Path = Path.ISABELLE_HOME): Option[String] = { val path = root + Path.basic("etc") + Path.basic(name) if (path.is_file) { Library.trim_split_lines(File.read(path)) match { case Nil => None case List(s) => Some(s) case _ => error("Single line expected in " + path.absolute) } } else None } /* Isabelle distribution identification */ def isabelle_id(root: Path = Path.ISABELLE_HOME): String = getetc("ISABELLE_ID", root = root) orElse Mercurial.archive_id(root) orElse Mercurial.id_repository(root, rev = "") getOrElse error("Failed to identify Isabelle distribution " + root.expand) object Isabelle_Id extends Scala.Fun_String("isabelle_id") { val here = Scala_Project.here def apply(arg: String): String = isabelle_id() } def isabelle_tags(root: Path = Path.ISABELLE_HOME): String = getetc("ISABELLE_TAGS", root = root) orElse Mercurial.archive_tags(root) getOrElse { if (Mercurial.is_repository(root)) { val hg = Mercurial.repository(root) hg.tags(rev = hg.parent()) } else "" } def export_isabelle_identifier(isabelle_identifier: String): String = "export ISABELLE_IDENTIFIER=" + Bash.string(isabelle_identifier) + "\n" def isabelle_identifier(): Option[String] = proper_string(getenv("ISABELLE_IDENTIFIER")) def isabelle_heading(): String = isabelle_identifier() match { case None => "" case Some(version) => " (" + version + ")" } def isabelle_name(): String = getenv_strict("ISABELLE_NAME") def identification(): String = "Isabelle" + (try { "/" + isabelle_id () } catch { case ERROR(_) => "" }) + isabelle_heading() /** file-system operations **/ /* scala functions */ private def apply_paths( args: List[String], fun: PartialFunction[List[Path], Unit] ): List[String] = { fun(args.map(Path.explode)) Nil } private def apply_paths1(args: List[String], fun: Path => Unit): List[String] = apply_paths(args, { case List(path) => fun(path) }) private def apply_paths2(args: List[String], fun: (Path, Path) => Unit): List[String] = apply_paths(args, { case List(path1, path2) => fun(path1, path2) }) private def apply_paths3(args: List[String], fun: (Path, Path, Path) => Unit): List[String] = apply_paths(args, { case List(path1, path2, path3) => fun(path1, path2, path3) }) /* permissions */ def chmod(arg: String, path: Path): Unit = bash("chmod " + arg + " " + File.bash_path(path)).check def chown(arg: String, path: Path): Unit = bash("chown " + arg + " " + File.bash_path(path)).check /* directories */ def make_directory(path: Path): Path = { if (!path.is_dir) { try { Files.createDirectories(path.java_path) } catch { case ERROR(_) => error("Failed to create directory: " + path.absolute) } } path } def new_directory(path: Path): Path = if (path.is_dir) error("Directory already exists: " + path.absolute) else make_directory(path) def copy_dir(dir1: Path, dir2: Path): Unit = { val res = bash("cp -a " + File.bash_path(dir1) + " " + File.bash_path(dir2)) if (!res.ok) { cat_error("Failed to copy directory " + dir1.absolute + " to " + dir2.absolute, res.err) } } def with_copy_dir[A](dir1: Path, dir2: Path)(body: => A): A = { if (dir2.is_file || dir2.is_dir) error("Directory already exists: " + dir2.absolute) else { try { copy_dir(dir1, dir2); body } finally { rm_tree(dir2 ) } } } object Make_Directory extends Scala.Fun_Strings("make_directory") { val here = Scala_Project.here def apply(args: List[String]): List[String] = apply_paths1(args, make_directory) } object Copy_Dir extends Scala.Fun_Strings("copy_dir") { val here = Scala_Project.here def apply(args: List[String]): List[String] = apply_paths2(args, copy_dir) } /* copy files */ def copy_file(src: JFile, dst: JFile): Unit = { val target = if (dst.isDirectory) new JFile(dst, src.getName) else dst if (!File.eq(src, target)) { try { Files.copy(src.toPath, target.toPath, StandardCopyOption.COPY_ATTRIBUTES, StandardCopyOption.REPLACE_EXISTING) } catch { case ERROR(msg) => cat_error("Failed to copy file " + File.path(src).absolute + " to " + File.path(dst).absolute, msg) } } } def copy_file(src: Path, dst: Path): Unit = copy_file(src.file, dst.file) def copy_file_base(base_dir: Path, src: Path, target_dir: Path): Unit = { val src1 = src.expand val src1_dir = src1.dir if (!src1.starts_basic) error("Illegal path specification " + src1 + " beyond base directory") copy_file(base_dir + src1, Isabelle_System.make_directory(target_dir + src1_dir)) } object Copy_File extends Scala.Fun_Strings("copy_file") { val here = Scala_Project.here def apply(args: List[String]): List[String] = apply_paths2(args, copy_file) } object Copy_File_Base extends Scala.Fun_Strings("copy_file_base") { val here = Scala_Project.here def apply(args: List[String]): List[String] = apply_paths3(args, copy_file_base) } /* move files */ def move_file(src: JFile, dst: JFile): Unit = { val target = if (dst.isDirectory) new JFile(dst, src.getName) else dst if (!File.eq(src, target)) Files.move(src.toPath, target.toPath, StandardCopyOption.REPLACE_EXISTING) } def move_file(src: Path, dst: Path): Unit = move_file(src.file, dst.file) /* symbolic link */ def symlink(src: Path, dst: Path, force: Boolean = false, native: Boolean = false): Unit = { val src_file = src.file val dst_file = dst.file val target = if (dst_file.isDirectory) new JFile(dst_file, src_file.getName) else dst_file if (force) target.delete def cygwin_link(): Unit = { if (native) { error("Failed to create native symlink on Windows: " + quote(src_file.toString) + "\n(but it could work as Administrator)") } else isabelle.setup.Environment.cygwin_link(File.standard_path(src), target) } try { Files.createSymbolicLink(target.toPath, src_file.toPath) } catch { case _: UnsupportedOperationException if Platform.is_windows => cygwin_link() case _: FileSystemException if Platform.is_windows => cygwin_link() } } /* tmp files */ def isabelle_tmp_prefix(): JFile = { val path = Path.explode("$ISABELLE_TMP_PREFIX") path.file.mkdirs // low-level mkdirs to avoid recursion via Isabelle environment File.platform_file(path) } def tmp_file(name: String, ext: String = "", base_dir: JFile = isabelle_tmp_prefix()): JFile = { val suffix = if (ext == "") "" else "." + ext val file = Files.createTempFile(base_dir.toPath, name, suffix).toFile file.deleteOnExit() file } def with_tmp_file[A](name: String, ext: String = "")(body: Path => A): A = { val file = tmp_file(name, ext) try { body(File.path(file)) } finally { file.delete } } /* tmp dirs */ def rm_tree(root: JFile): Unit = { root.delete if (root.isDirectory) { Files.walkFileTree(root.toPath, new SimpleFileVisitor[JPath] { override def visitFile(file: JPath, attrs: BasicFileAttributes): FileVisitResult = { try { Files.deleteIfExists(file) } catch { case _: IOException => } FileVisitResult.CONTINUE } override def postVisitDirectory(dir: JPath, e: IOException): FileVisitResult = { if (e == null) { try { Files.deleteIfExists(dir) } catch { case _: IOException => } FileVisitResult.CONTINUE } else throw e } } ) } } def rm_tree(root: Path): Unit = rm_tree(root.file) object Rm_Tree extends Scala.Fun_Strings("rm_tree") { val here = Scala_Project.here def apply(args: List[String]): List[String] = apply_paths1(args, rm_tree) } def tmp_dir(name: String, base_dir: JFile = isabelle_tmp_prefix()): JFile = { val dir = Files.createTempDirectory(base_dir.toPath, name).toFile dir.deleteOnExit() dir } def with_tmp_dir[A](name: String)(body: Path => A): A = { val dir = tmp_dir(name) try { body(File.path(dir)) } finally { rm_tree(dir) } } /* quasi-atomic update of directory */ def update_directory(dir: Path, f: Path => Unit): Unit = { val new_dir = dir.ext("new") val old_dir = dir.ext("old") rm_tree(new_dir) rm_tree(old_dir) f(new_dir) if (dir.is_dir) move_file(dir, old_dir) move_file(new_dir, dir) rm_tree(old_dir) } /** external processes **/ /* GNU bash */ def bash(script: String, description: String = "", cwd: JFile = null, env: JMap[String, String] = settings(), redirect: Boolean = false, input: String = "", progress_stdout: String => Unit = (_: String) => (), progress_stderr: String => Unit = (_: String) => (), watchdog: Option[Bash.Watchdog] = None, strict: Boolean = true, cleanup: () => Unit = () => () ): Process_Result = { Bash.process(script, description = description, cwd = cwd, env = env, redirect = redirect, cleanup = cleanup). result(input = input, progress_stdout = progress_stdout, progress_stderr = progress_stderr, watchdog = watchdog, strict = strict) } /* command-line tools */ def require_command(cmd: String, test: String = "--version"): Unit = { if (!bash(Bash.string(cmd) + " " + test).ok) error("Missing system command: " + quote(cmd)) } private lazy val gnutar_check: Boolean = try { bash("tar --version").check.out.containsSlice("GNU tar") || error("") } catch { case ERROR(_) => false } def gnutar( args: String, dir: Path = Path.current, original_owner: Boolean = false, strip: Int = 0, redirect: Boolean = false ): Process_Result = { val options = (if (dir.is_current) "" else "-C " + File.bash_path(dir) + " ") + (if (original_owner) "" else "--owner=root --group=staff ") + (if (strip <= 0) "" else "--strip-components=" + strip + " ") if (gnutar_check) bash("tar " + options + args, redirect = redirect) else error("Expected to find GNU tar executable") } def make_patch(base_dir: Path, src: Path, dst: Path, diff_options: String = ""): String = { with_tmp_file("patch") { patch => Isabelle_System.bash( "diff -ru " + diff_options + " -- " + File.bash_path(src) + " " + File.bash_path(dst) + " > " + File.bash_path(patch), cwd = base_dir.file).check_rc(_ <= 1) File.read(patch) } } def hostname(): String = bash("hostname -s").check.out def open(arg: String): Unit = bash("exec \"$ISABELLE_OPEN\" " + Bash.string(arg) + " >/dev/null 2>/dev/null &") def pdf_viewer(arg: Path): Unit = bash("exec \"$PDF_VIEWER\" " + File.bash_path(arg) + " >/dev/null 2>/dev/null &") def open_external_file(name: String): Boolean = { val ext = Library.take_suffix((c: Char) => c != '.', name.toList)._2.mkString val external = ext.nonEmpty && Library.space_explode(':', getenv("ISABELLE_EXTERNAL_FILES")).contains(ext) if (external) { if (ext == "pdf" && Path.is_wellformed(name)) pdf_viewer(Path.explode(name)) else open(name) } external } /** Isabelle resources **/ /* repository clone with Admin */ def admin(): Boolean = Path.explode("~~/Admin").is_dir /* default logic */ def default_logic(args: String*): String = { args.find(_ != "") match { case Some(logic) => logic case None => getenv_strict("ISABELLE_LOGIC") } } /* download file */ def download(url_name: String, progress: Progress = new Progress): HTTP.Content = { val url = Url(url_name) progress.echo("Getting " + quote(url_name)) try { HTTP.Client.get(url) } catch { case ERROR(msg) => cat_error("Failed to download " + quote(url_name), msg) } } def download_file(url_name: String, file: Path, progress: Progress = new Progress): Unit = Bytes.write(file, download(url_name, progress = progress).bytes) object Download extends Scala.Fun("download", thread = true) { val here = Scala_Project.here override def invoke(args: List[Bytes]): List[Bytes] = args.map(url => download(url.text).bytes) } /* repositories */ val isabelle_repository: Mercurial.Server = Mercurial.Server("https://isabelle.sketis.net/repos/isabelle") val afp_repository: Mercurial.Server = Mercurial.Server("https://isabelle.sketis.net/repos/afp-devel") def official_releases(): List[String] = Library.trim_split_lines( isabelle_repository.read_file(Path.explode("Admin/Release/official"))) } diff --git a/src/Pure/System/scala.scala b/src/Pure/System/scala.scala --- a/src/Pure/System/scala.scala +++ b/src/Pure/System/scala.scala @@ -1,390 +1,384 @@ /* Title: Pure/System/scala.scala Author: Makarius Support for Scala at runtime. */ package isabelle import java.io.{File => JFile, PrintStream, ByteArrayOutputStream, OutputStream} import scala.collection.mutable import scala.annotation.tailrec import dotty.tools.dotc.CompilationUnit import dotty.tools.dotc.ast.Trees.PackageDef import dotty.tools.dotc.ast.untpd import dotty.tools.dotc.core.Contexts.{Context => CompilerContext} import dotty.tools.dotc.core.NameOps.moduleClassName import dotty.tools.dotc.core.{Phases, StdNames} import dotty.tools.dotc.interfaces import dotty.tools.dotc.reporting.{Diagnostic, ConsoleReporter} import dotty.tools.dotc.util.{SourceFile, SourcePosition, NoSourcePosition} import dotty.tools.repl import dotty.tools.repl.{ReplCompiler, ReplDriver} object Scala { /** registered functions **/ abstract class Fun(val name: String, val thread: Boolean = false) { override def toString: String = name def single: Boolean = false def bytes: Boolean = false def position: Properties.T = here.position def here: Scala_Project.Here def invoke(args: List[Bytes]): List[Bytes] } trait Single_Fun extends Fun { override def single: Boolean = true } trait Bytes_Fun extends Fun { override def bytes: Boolean = true } abstract class Fun_Strings(name: String, thread: Boolean = false) extends Fun(name, thread = thread) { override def invoke(args: List[Bytes]): List[Bytes] = apply(args.map(_.text)).map(Bytes.apply) def apply(args: List[String]): List[String] } abstract class Fun_String(name: String, thread: Boolean = false) extends Fun_Strings(name, thread = thread) with Single_Fun { override def apply(args: List[String]): List[String] = List(apply(Library.the_single(args))) def apply(arg: String): String } abstract class Fun_Bytes(name: String, thread: Boolean = false) extends Fun(name, thread = thread) with Single_Fun with Bytes_Fun { override def invoke(args: List[Bytes]): List[Bytes] = List(apply(Library.the_single(args))) def apply(arg: Bytes): Bytes } val encode_fun: XML.Encode.T[Fun] = { fun => import XML.Encode._ pair(string, pair(pair(bool, bool), properties))( fun.name, ((fun.single, fun.bytes), fun.position)) } class Functions(val functions: Fun*) extends Isabelle_System.Service lazy val functions: List[Fun] = Isabelle_System.make_services(classOf[Functions]).flatMap(_.functions) /** demo functions **/ object Echo extends Fun_String("echo") { val here = Scala_Project.here def apply(arg: String): String = arg } object Sleep extends Fun_String("sleep") { val here = Scala_Project.here def apply(seconds: String): String = { val t = seconds match { case Value.Double(s) => Time.seconds(s) case _ => error("Malformed argument: " + quote(seconds)) } val t0 = Time.now() t.sleep() val t1 = Time.now() (t1 - t0).toString } } /** compiler **/ - def get_classpath(): List[String] = - space_explode(JFile.pathSeparatorChar, System.getProperty("java.class.path", "")) - .filter(_.nonEmpty) - object Compiler { object Message { object Kind extends Enumeration { val error, warning, info, other = Value } private val Header = """^--.* (Error|Warning|Info): .*$""".r val header_kind: String => Kind.Value = { case "Error" => Kind.error case "Warning" => Kind.warning case "Info" => Kind.info case _ => Kind.other } // see compiler/src/dotty/tools/dotc/reporting/MessageRendering.scala def split(str: String): List[Message] = { var kind = Kind.other val text = new mutable.StringBuilder val result = new mutable.ListBuffer[Message] def flush(): Unit = { if (text.nonEmpty) { result += Message(kind, text.toString) } kind = Kind.other text.clear() } for (line <- Library.trim_split_lines(str)) { line match { case Header(k) => flush(); kind = header_kind(k) case _ => if (line.startsWith("-- ")) flush() } if (text.nonEmpty) { text += '\n' } text ++= line } flush() result.toList } } sealed case class Message(kind: Message.Kind.Value, text: String) { def is_error: Boolean = kind == Message.Kind.error override def toString: String = text } sealed case class Result( state: repl.State, messages: List[Message], unit: Option[CompilationUnit] = None ) { val errors: List[String] = messages.flatMap(msg => if (msg.is_error) Some(msg.text) else None) def ok: Boolean = errors.isEmpty def check_state: repl.State = if (ok) state else error(cat_lines(errors)) override def toString: String = if (ok) "Result(ok)" else "Result(error)" } def context( settings: List[String] = Nil, - jar_dirs: List[JFile] = Nil, + jar_files: List[JFile] = Nil, class_loader: Option[ClassLoader] = None ): Context = { val isabelle_settings = Word.explode(Isabelle_System.getenv_strict("ISABELLE_SCALAC_OPTIONS")) - - def find_jars(dir: JFile): List[String] = - File.find_files(dir, file => file.getName.endsWith(".jar")). - map(File.absolute_name) - - val classpath = (get_classpath() ::: jar_dirs.flatMap(find_jars)).mkString(JFile.pathSeparator) - val settings1 = isabelle_settings ::: settings ::: List("-classpath", classpath) - new Context(settings1, class_loader) + val classpath = Classpath(jar_files = jar_files) + new Context(isabelle_settings ::: settings, classpath, class_loader) } class Context private [Compiler]( - val settings: List[String], + _settings: List[String], + val classpath: Classpath, val class_loader: Option[ClassLoader] = None ) { + def settings: List[String] = + _settings ::: List("-classpath", classpath.platform_path) + private val out_stream = new ByteArrayOutputStream(1024) private val out = new PrintStream(out_stream) private val driver: ReplDriver = new ReplDriver(settings.toArray, out, class_loader) def init_state: repl.State = driver.initialState def compile(source: String, state: repl.State = init_state): Result = { out.flush() out_stream.reset() val state1 = driver.run(source)(state) out.flush() val messages = Message.split(out_stream.toString(UTF8.charset)) out_stream.reset() Result(state1, messages) } } } object Toplevel extends Fun_String("scala_toplevel") { val here = Scala_Project.here def apply(source: String): String = { val errors = try { Compiler.context().compile(source).errors.map("Scala error: " + _) } catch { case ERROR(msg) => List(msg) } locally { import XML.Encode._; YXML.string_of_body(list(string)(errors)) } } } /** interpreter thread **/ object Interpreter { /* requests */ sealed abstract class Request case class Execute(command: (Compiler.Context, repl.State) => repl.State) extends Request case object Shutdown extends Request /* known interpreters */ private val known = Synchronized(Set.empty[Interpreter]) def add(interpreter: Interpreter): Unit = known.change(_ + interpreter) def del(interpreter: Interpreter): Unit = known.change(_ - interpreter) def get[A](which: PartialFunction[Interpreter, A]): Option[A] = known.value.collectFirst(which) } class Interpreter(context: Compiler.Context, out: OutputStream = Console.out) { interpreter => private val running = Synchronized[Option[Thread]](None) def running_thread(thread: Thread): Boolean = running.value.contains(thread) def interrupt_thread(): Unit = running.change({ opt => opt.foreach(_.interrupt()); opt }) private var state = context.init_state private lazy val thread: Consumer_Thread[Interpreter.Request] = Consumer_Thread.fork("Scala.Interpreter") { case Interpreter.Execute(command) => try { running.change(_ => Some(Thread.currentThread())) state = command(context, state) } finally { running.change(_ => None) Exn.Interrupt.dispose() } true case Interpreter.Shutdown => Interpreter.del(interpreter) false } def shutdown(): Unit = { thread.send(Interpreter.Shutdown) interrupt_thread() thread.shutdown() } def execute(command: (Compiler.Context, repl.State) => repl.State): Unit = thread.send(Interpreter.Execute(command)) def reset(): Unit = thread.send(Interpreter.Execute((context, _) => context.init_state)) Interpreter.add(interpreter) thread } /** invoke Scala functions from ML **/ /* invoke function */ object Tag extends Enumeration { val NULL, OK, ERROR, FAIL, INTERRUPT = Value } def function_thread(name: String): Boolean = functions.find(fun => fun.name == name) match { case Some(fun) => fun.thread case None => false } def function_body(name: String, args: List[Bytes]): (Tag.Value, List[Bytes]) = functions.find(fun => fun.name == name) match { case Some(fun) => Exn.capture { fun.invoke(args) } match { case Exn.Res(null) => (Tag.NULL, Nil) case Exn.Res(res) => (Tag.OK, res) case Exn.Exn(Exn.Interrupt()) => (Tag.INTERRUPT, Nil) case Exn.Exn(e) => (Tag.ERROR, List(Bytes(Exn.message(e)))) } case None => (Tag.FAIL, List(Bytes("Unknown Isabelle/Scala function: " + quote(name)))) } /* protocol handler */ class Handler extends Session.Protocol_Handler { private var session: Session = null private var futures = Map.empty[String, Future[Unit]] override def init(session: Session): Unit = synchronized { this.session = session } override def exit(): Unit = synchronized { for ((id, future) <- futures) cancel(id, future) futures = Map.empty } private def result(id: String, tag: Scala.Tag.Value, res: List[Bytes]): Unit = synchronized { if (futures.isDefinedAt(id)) { session.protocol_command_raw("Scala.result", Bytes(id) :: Bytes(tag.id.toString) :: res) futures -= id } } private def cancel(id: String, future: Future[Unit]): Unit = { future.cancel() result(id, Scala.Tag.INTERRUPT, Nil) } private def invoke_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Invoke_Scala(name, id) => def body(): Unit = { val (tag, res) = Scala.function_body(name, msg.chunks) result(id, tag, res) } val future = if (Scala.function_thread(name)) { Future.thread(name = Isabelle_Thread.make_name(base = "invoke_scala"))(body()) } else Future.fork(body()) futures += (id -> future) true case _ => false } } private def cancel_scala(msg: Prover.Protocol_Output): Boolean = synchronized { msg.properties match { case Markup.Cancel_Scala(id) => futures.get(id) match { case Some(future) => cancel(id, future) case None => } true case _ => false } } override val functions: Session.Protocol_Functions = List( Markup.Invoke_Scala.name -> invoke_scala, Markup.Cancel_Scala.name -> cancel_scala) } } class Scala_Functions extends Scala.Functions( Scala.Echo, Scala.Sleep, Scala.Toplevel, Scala_Build.Scala_Fun, Base64.Decode, Base64.Encode, XZ.Compress, XZ.Uncompress, Doc.Doc_Names, Bibtex.Check_Database, Isabelle_System.Make_Directory, Isabelle_System.Copy_Dir, Isabelle_System.Copy_File, Isabelle_System.Copy_File_Base, Isabelle_System.Rm_Tree, Isabelle_System.Download, Isabelle_System.Isabelle_Id, Isabelle_Tool.Isabelle_Tools, isabelle.atp.SystemOnTPTP.List_Systems, isabelle.atp.SystemOnTPTP.Run_System) diff --git a/src/Pure/Thy/document_build.scala b/src/Pure/Thy/document_build.scala --- a/src/Pure/Thy/document_build.scala +++ b/src/Pure/Thy/document_build.scala @@ -1,494 +1,493 @@ /* Title: Pure/Thy/document_build.scala Author: Makarius Build theory document (PDF) from session database. */ package isabelle object Document_Build { /* document variants */ abstract class Document_Name { def name: String def path: Path = Path.basic(name) override def toString: String = name } object Document_Variant { def parse(opt: String): Document_Variant = Library.space_explode('=', opt) match { case List(name) => Document_Variant(name, Latex.Tags.empty) case List(name, tags) => Document_Variant(name, Latex.Tags(tags)) case _ => error("Malformed document variant: " + quote(opt)) } } sealed case class Document_Variant(name: String, tags: Latex.Tags) extends Document_Name { def print: String = if (tags.toString.isEmpty) name else name + "=" + tags.toString } sealed case class Document_Input(name: String, sources: SHA1.Digest) extends Document_Name sealed case class Document_Output(name: String, sources: SHA1.Digest, log_xz: Bytes, pdf: Bytes) extends Document_Name { def log: String = log_xz.uncompress().text def log_lines: List[String] = split_lines(log) def write(db: SQL.Database, session_name: String): Unit = write_document(db, session_name, this) def write(dir: Path): Path = { val path = dir + Path.basic(name).pdf Isabelle_System.make_directory(path.expand.dir) Bytes.write(path, pdf) path } } /* SQL data model */ object Data { val session_name = SQL.Column.string("session_name").make_primary_key val name = SQL.Column.string("name").make_primary_key val sources = SQL.Column.string("sources") val log_xz = SQL.Column.bytes("log_xz") val pdf = SQL.Column.bytes("pdf") val table = SQL.Table("isabelle_documents", List(session_name, name, sources, log_xz, pdf)) def where_equal(session_name: String, name: String = ""): SQL.Source = "WHERE " + Data.session_name.equal(session_name) + (if (name == "") "" else " AND " + Data.name.equal(name)) } def read_documents(db: SQL.Database, session_name: String): List[Document_Input] = { val select = Data.table.select(List(Data.name, Data.sources), Data.where_equal(session_name)) db.using_statement(select)(stmt => stmt.execute_query().iterator({ res => val name = res.string(Data.name) val sources = res.string(Data.sources) Document_Input(name, SHA1.fake_digest(sources)) }).toList) } def read_document( db: SQL.Database, session_name: String, name: String ): Option[Document_Output] = { val select = Data.table.select(sql = Data.where_equal(session_name, name)) db.using_statement(select)({ stmt => val res = stmt.execute_query() if (res.next()) { val name = res.string(Data.name) val sources = res.string(Data.sources) val log_xz = res.bytes(Data.log_xz) val pdf = res.bytes(Data.pdf) Some(Document_Output(name, SHA1.fake_digest(sources), log_xz, pdf)) } else None }) } def write_document(db: SQL.Database, session_name: String, doc: Document_Output): Unit = { db.using_statement(Data.table.insert()){ stmt => stmt.string(1) = session_name stmt.string(2) = doc.name stmt.string(3) = doc.sources.toString stmt.bytes(4) = doc.log_xz stmt.bytes(5) = doc.pdf stmt.execute() } } /* context */ val texinputs: Path = Path.explode("~~/lib/texinputs") val isabelle_styles: List[Path] = List("isabelle.sty", "isabellesym.sty", "pdfsetup.sty", "railsetup.sty"). map(name => texinputs + Path.basic(name)) def context( session: String, deps: Sessions.Deps, db_context: Sessions.Database_Context, progress: Progress = new Progress ): Context = { val structure = deps.sessions_structure val info = structure(session) val base = deps(session) val hierarchy = deps.sessions_structure.build_hierarchy(session) val classpath = db_context.get_classpath(structure, session) new Context(info, base, hierarchy, db_context, classpath, progress) } final class Context private[Document_Build]( info: Sessions.Info, base: Sessions.Base, hierarchy: List[String], db_context: Sessions.Database_Context, val classpath: List[File.Content_Bytes], val progress: Progress = new Progress ) { /* session info */ def session: String = info.name def options: Options = info.options def document_bibliography: Boolean = options.bool("document_bibliography") def document_logo: Option[String] = options.string("document_logo") match { case "" => None case "_" => Some("") case name => Some(name) } def document_build: String = options.string("document_build") def get_engine(): Engine = { val name = document_build - engines.find(_.name == name).getOrElse(error("Bad document_build engine " + quote(name))) + Classpath(jar_contents = classpath).make_services(classOf[Engine]) + .find(_.name == name).getOrElse(error("Bad document_build engine " + quote(name))) } def get_export(theory: String, name: String): Export.Entry = db_context.get_export(hierarchy, theory, name) /* document content */ def documents: List[Document_Variant] = info.documents def session_theories: List[Document.Node.Name] = base.session_theories def document_theories: List[Document.Node.Name] = session_theories ::: base.document_theories lazy val document_latex: List[File.Content_XML] = for (name <- document_theories) yield { val path = Path.basic(tex_name(name)) val content = YXML.parse_body(get_export(name.theory, Export.DOCUMENT_LATEX).text) File.Content(path, content) } lazy val session_graph: File.Content = { val path = Presentation.session_graph_path val content = graphview.Graph_File.make_pdf(options, base.session_graph_display) File.Content(path, content) } lazy val session_tex: File.Content = { val path = Path.basic("session.tex") val content = Library.terminate_lines( base.session_theories.map(name => "\\input{" + tex_name(name) + "}")) File.Content(path, content) } lazy val isabelle_logo: Option[File.Content] = { document_logo.map(logo_name => Isabelle_System.with_tmp_file("logo", ext = "pdf") { tmp_path => Logo.create_logo(logo_name, output_file = tmp_path, quiet = true) val path = Path.basic("isabelle_logo.pdf") val content = Bytes.read(tmp_path) File.Content(path, content) }) } /* document directory */ def prepare_directory( dir: Path, doc: Document_Variant, latex_output: Latex.Output ): Directory = { val doc_dir = Isabelle_System.make_directory(dir + Path.basic(doc.name)) /* actual sources: with SHA1 digest */ isabelle_styles.foreach(Isabelle_System.copy_file(_, doc_dir)) val comment_latex = options.bool("document_comment_latex") if (!comment_latex) { Isabelle_System.copy_file(texinputs + Path.basic("comment.sty"), doc_dir) } doc.tags.sty(comment_latex).write(doc_dir) for ((base_dir, src) <- info.document_files) { Isabelle_System.copy_file_base(info.dir + base_dir, src, doc_dir) } session_tex.write(doc_dir) for (content <- document_latex) { content.output(latex_output(_, file_pos = content.path.implode_symbolic)) .write(doc_dir) } val root_name1 = "root_" + doc.name val root_name = if ((doc_dir + Path.explode(root_name1).tex).is_file) root_name1 else "root" val digests1 = List(doc.print, document_logo.toString, document_build).map(SHA1.digest) val digests2 = File.find_files(doc_dir.file, follow_links = true).map(SHA1.digest) val sources = SHA1.digest_set(digests1 ::: digests2) /* derived material: without SHA1 digest */ isabelle_logo.foreach(_.write(doc_dir)) session_graph.write(doc_dir) Directory(doc_dir, doc, root_name, sources) } def old_document(directory: Directory): Option[Document_Output] = for { old_doc <- db_context.input_database(session)(read_document(_, _, directory.doc.name)) if old_doc.sources == directory.sources } yield old_doc } sealed case class Directory( doc_dir: Path, doc: Document_Variant, root_name: String, sources: SHA1.Digest ) { def root_name_script(ext: String = ""): String = Bash.string(if (ext.isEmpty) root_name else root_name + "." + ext) def conditional_script(ext: String, exe: String, after: String = ""): String = "if [ -f " + root_name_script(ext) + " ]\n" + "then\n" + " " + exe + " " + root_name_script() + "\n" + (if (after.isEmpty) "" else " " + after) + "fi\n" def log_errors(): List[String] = Latex.latex_errors(doc_dir, root_name) ::: Bibtex.bibtex_errors(doc_dir, root_name) def make_document(log: List[String], errors: List[String]): Document_Output = { val root_pdf = Path.basic(root_name).pdf val result_pdf = doc_dir + root_pdf if (errors.nonEmpty) { val errors1 = errors ::: List("Failed to build document " + quote(doc.name)) throw new Build_Error(log, Exn.cat_message(errors1: _*)) } else if (!result_pdf.is_file) { val message = "Bad document result: expected to find " + root_pdf throw new Build_Error(log, message) } else { val log_xz = Bytes(cat_lines(log)).compress() val pdf = Bytes.read(result_pdf) Document_Output(doc.name, sources, log_xz, pdf) } } } /* build engines */ - lazy val engines: List[Engine] = Isabelle_System.make_services(classOf[Engine]) - abstract class Engine(val name: String) extends Isabelle_System.Service { override def toString: String = name def prepare_directory(context: Context, dir: Path, doc: Document_Variant): Directory def build_document(context: Context, directory: Directory, verbose: Boolean): Document_Output } abstract class Bash_Engine(name: String) extends Engine(name) { def prepare_directory(context: Context, dir: Path, doc: Document_Variant): Directory = context.prepare_directory(dir, doc, new Latex.Output(context.options)) def use_pdflatex: Boolean = false def latex_script(context: Context, directory: Directory): String = (if (use_pdflatex) "$ISABELLE_PDFLATEX" else "$ISABELLE_LUALATEX") + " " + directory.root_name_script() + "\n" def bibtex_script(context: Context, directory: Directory, latex: Boolean = false): String = { val ext = if (context.document_bibliography) "aux" else "bib" directory.conditional_script(ext, "$ISABELLE_BIBTEX", after = if (latex) latex_script(context, directory) else "") } def makeindex_script(context: Context, directory: Directory, latex: Boolean = false): String = directory.conditional_script("idx", "$ISABELLE_MAKEINDEX", after = if (latex) latex_script(context, directory) else "") def use_build_script: Boolean = false def build_script(context: Context, directory: Directory): String = { val has_build_script = (directory.doc_dir + Path.explode("build")).is_file if (!use_build_script && has_build_script) { error("Unexpected document build script for option document_build=" + quote(context.document_build)) } else if (use_build_script && !has_build_script) error("Missing document build script") else if (has_build_script) "./build pdf " + Bash.string(directory.doc.name) else { "set -e\n" + latex_script(context, directory) + bibtex_script(context, directory, latex = true) + makeindex_script(context, directory) + latex_script(context, directory) + makeindex_script(context, directory, latex = true) } } def build_document( context: Context, directory: Directory, verbose: Boolean ): Document_Output = { val result = context.progress.bash( build_script(context, directory), cwd = directory.doc_dir.file, echo = verbose, watchdog = Time.seconds(0.5)) val log = result.out_lines ::: result.err_lines val errors = (if (result.ok) Nil else List(result.err)) ::: directory.log_errors() directory.make_document(log, errors) } } class LuaLaTeX_Engine extends Bash_Engine("lualatex") class PDFLaTeX_Engine extends Bash_Engine("pdflatex") { override def use_pdflatex: Boolean = true } class Build_Engine extends Bash_Engine("build") { override def use_build_script: Boolean = true } /* build documents */ def tex_name(name: Document.Node.Name): String = name.theory_base_name + ".tex" class Build_Error(val log_lines: List[String], val message: String) extends Exn.User_Error(message) def build_documents( context: Context, output_sources: Option[Path] = None, output_pdf: Option[Path] = None, verbose: Boolean = false ): List[Document_Output] = { val progress = context.progress val engine = context.get_engine() val documents = for (doc <- context.documents) yield { Isabelle_System.with_tmp_dir("document") { tmp_dir => progress.echo("Preparing " + context.session + "/" + doc.name + " ...") val start = Time.now() output_sources.foreach(engine.prepare_directory(context, _, doc)) val directory = engine.prepare_directory(context, tmp_dir, doc) val document = context.old_document(directory) getOrElse engine.build_document(context, directory, verbose) val stop = Time.now() val timing = stop - start progress.echo("Finished " + context.session + "/" + doc.name + " (" + timing.message_hms + " elapsed time)") document } } for (dir <- output_pdf; doc <- documents) { val path = doc.write(dir) progress.echo("Document at " + path.absolute) } documents } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("document", "prepare session theory document", Scala_Project.here, { args => var output_sources: Option[Path] = None var output_pdf: Option[Path] = None var verbose_latex = false var dirs: List[Path] = Nil var options = Options.init() var verbose_build = false val getopts = Getopts(""" Usage: isabelle document [OPTIONS] SESSION Options are: -O DIR output directory for LaTeX sources and resulting PDF -P DIR output directory for resulting PDF -S DIR output directory for LaTeX sources -V verbose latex -d DIR include session directory -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose build Prepare the theory document of a session. """, "O:" -> (arg => { val dir = Path.explode(arg) output_sources = Some(dir) output_pdf = Some(dir) }), "P:" -> (arg => { output_pdf = Some(Path.explode(arg)) }), "S:" -> (arg => { output_sources = Some(Path.explode(arg)) }), "V" -> (_ => verbose_latex = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose_build = true)) val more_args = getopts(args) val session = more_args match { case List(a) => a case _ => getopts.usage() } val progress = new Console_Progress(verbose = verbose_build) val store = Sessions.store(options) progress.interrupt_handler { val res = Build.build(options, selection = Sessions.Selection.session(session), dirs = dirs, progress = progress, verbose = verbose_build) if (!res.ok) error("Failed to build session " + quote(session)) val deps = Sessions.load_structure(options + "document=pdf", dirs = dirs). selection_deps(Sessions.Selection.session(session)) if (output_sources.isEmpty && output_pdf.isEmpty) { progress.echo_warning("No output directory") } using(store.open_database_context()) { db_context => build_documents(context(session, deps, db_context, progress = progress), output_sources = output_sources, output_pdf = output_pdf, verbose = verbose_latex) } } }) } diff --git a/src/Pure/Thy/sessions.scala b/src/Pure/Thy/sessions.scala --- a/src/Pure/Thy/sessions.scala +++ b/src/Pure/Thy/sessions.scala @@ -1,1521 +1,1520 @@ /* Title: Pure/Thy/sessions.scala Author: Makarius Cumulative session information. */ package isabelle import java.io.{File => JFile} import java.nio.ByteBuffer import java.nio.channels.FileChannel import java.nio.file.StandardOpenOption import scala.collection.immutable.{SortedSet, SortedMap} import scala.collection.mutable object Sessions { /* session and theory names */ val ROOTS: Path = Path.explode("ROOTS") val ROOT: Path = Path.explode("ROOT") val roots_name: String = "ROOTS" val root_name: String = "ROOT" val theory_import: String = "Pure.Sessions" val UNSORTED = "Unsorted" val DRAFT = "Draft" def is_pure(name: String): Boolean = name == Thy_Header.PURE def exclude_session(name: String): Boolean = name == "" || name == DRAFT def exclude_theory(name: String): Boolean = name == root_name || name == "README" || name == "index" || name == "bib" /* ROOTS file format */ class File_Format extends isabelle.File_Format { val format_name: String = roots_name val file_ext = "" override def detect(name: String): Boolean = Thy_Header.split_file_name(name) match { case Some((_, file_name)) => file_name == roots_name case None => false } override def theory_suffix: String = "ROOTS_file" override def theory_content(name: String): String = """theory "ROOTS" imports Pure begin ROOTS_file """ + Outer_Syntax.quote_string(name) + """ end""" } /* base info and source dependencies */ sealed case class Base( pos: Position.T = Position.none, session_directories: Map[JFile, String] = Map.empty, global_theories: Map[String, String] = Map.empty, session_theories: List[Document.Node.Name] = Nil, document_theories: List[Document.Node.Name] = Nil, loaded_theories: Graph[String, Outer_Syntax] = Graph.string, used_theories: List[(Document.Node.Name, Options)] = Nil, load_commands: Map[Document.Node.Name, List[Command_Span.Span]] = Map.empty, known_theories: Map[String, Document.Node.Entry] = Map.empty, known_loaded_files: Map[String, List[Path]] = Map.empty, overall_syntax: Outer_Syntax = Outer_Syntax.empty, imported_sources: List[(Path, SHA1.Digest)] = Nil, sources: List[(Path, SHA1.Digest)] = Nil, session_graph_display: Graph_Display.Graph = Graph_Display.empty_graph, errors: List[String] = Nil ) { override def toString: String = "Sessions.Base(loaded_theories = " + loaded_theories.size + ", used_theories = " + used_theories.length + ")" def theory_qualifier(name: String): String = global_theories.getOrElse(name, Long_Name.qualifier(name)) def theory_qualifier(name: Document.Node.Name): String = theory_qualifier(name.theory) def loaded_theory(name: String): Boolean = loaded_theories.defined(name) def loaded_theory(name: Document.Node.Name): Boolean = loaded_theory(name.theory) def loaded_theory_syntax(name: String): Option[Outer_Syntax] = if (loaded_theory(name)) Some(loaded_theories.get_node(name)) else None def loaded_theory_syntax(name: Document.Node.Name): Option[Outer_Syntax] = loaded_theory_syntax(name.theory) def theory_syntax(name: Document.Node.Name): Outer_Syntax = loaded_theory_syntax(name) getOrElse overall_syntax def node_syntax(nodes: Document.Nodes, name: Document.Node.Name): Outer_Syntax = nodes(name).syntax orElse loaded_theory_syntax(name) getOrElse overall_syntax } sealed case class Deps(sessions_structure: Structure, session_bases: Map[String, Base]) { override def toString: String = "Sessions.Deps(" + sessions_structure + ")" def is_empty: Boolean = session_bases.isEmpty def apply(name: String): Base = session_bases(name) def get(name: String): Option[Base] = session_bases.get(name) def imported_sources(name: String): List[SHA1.Digest] = session_bases(name).imported_sources.map(_._2) def sources(name: String): List[SHA1.Digest] = session_bases(name).sources.map(_._2) def errors: List[String] = (for { (name, base) <- session_bases.iterator if base.errors.nonEmpty } yield cat_lines(base.errors) + "\nThe error(s) above occurred in session " + quote(name) + Position.here(base.pos) ).toList def check_errors: Deps = errors match { case Nil => this case errs => error(cat_lines(errs)) } } def deps(sessions_structure: Structure, progress: Progress = new Progress, inlined_files: Boolean = false, verbose: Boolean = false, list_files: Boolean = false, check_keywords: Set[String] = Set.empty ): Deps = { var cache_sources = Map.empty[JFile, SHA1.Digest] def check_sources(paths: List[Path]): List[(Path, SHA1.Digest)] = { for { path <- paths file = path.file if cache_sources.isDefinedAt(file) || file.isFile } yield { cache_sources.get(file) match { case Some(digest) => (path, digest) case None => val digest = SHA1.digest(file) cache_sources = cache_sources + (file -> digest) (path, digest) } } } val session_bases = sessions_structure.imports_topological_order.foldLeft(Map("" -> sessions_structure.bootstrap)) { case (session_bases, session_name) => progress.expose_interrupt() val info = sessions_structure(session_name) try { val deps_base = info.deps_base(session_bases) val resources = new Resources(sessions_structure, deps_base) if (verbose || list_files) { val groups = if (info.groups.isEmpty) "" else info.groups.mkString(" (", " ", ")") progress.echo("Session " + info.chapter_session + groups) } val dependencies = resources.session_dependencies(info) val overall_syntax = dependencies.overall_syntax val session_theories = dependencies.theories.filter(name => deps_base.theory_qualifier(name) == session_name) val theory_files = dependencies.theories.map(_.path) dependencies.load_commands val (load_commands, load_commands_errors) = try { if (inlined_files) (dependencies.load_commands, Nil) else (Nil, Nil) } catch { case ERROR(msg) => (Nil, List(msg)) } val loaded_files = load_commands.map({ case (name, spans) => dependencies.loaded_files(name, spans) }) val session_files = (theory_files ::: loaded_files.flatMap(_._2) ::: info.document_files.map(file => info.dir + file._1 + file._2)).map(_.expand) val imported_files = if (inlined_files) dependencies.imported_files else Nil if (list_files) progress.echo(cat_lines(session_files.map(_.implode).sorted.map(" " + _))) if (check_keywords.nonEmpty) { Check_Keywords.check_keywords( progress, overall_syntax.keywords, check_keywords, theory_files) } val session_graph_display: Graph_Display.Graph = { def session_node(name: String): Graph_Display.Node = Graph_Display.Node("[" + name + "]", "session." + name) def node(name: Document.Node.Name): Graph_Display.Node = { val qualifier = deps_base.theory_qualifier(name) if (qualifier == info.name) Graph_Display.Node(name.theory_base_name, "theory." + name.theory) else session_node(qualifier) } val required_sessions = dependencies.loaded_theories.all_preds(dependencies.theories.map(_.theory)) .map(theory => deps_base.theory_qualifier(theory)) .filter(name => name != info.name && sessions_structure.defined(name)) val required_subgraph = sessions_structure.imports_graph .restrict(sessions_structure.imports_graph.all_preds(required_sessions).toSet) .transitive_closure .restrict(required_sessions.toSet) .transitive_reduction_acyclic val graph0 = required_subgraph.topological_order.foldLeft(Graph_Display.empty_graph) { case (g, session) => val a = session_node(session) val bs = required_subgraph.imm_preds(session).toList.map(session_node) bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a)) } dependencies.entries.foldLeft(graph0) { case (g, entry) => val a = node(entry.name) val bs = entry.header.imports.map(node).filterNot(_ == a) bs.foldLeft((a :: bs).foldLeft(g)(_.default_node(_, Nil)))(_.add_edge(_, a)) } } val known_theories = dependencies.entries.iterator.map(entry => entry.name.theory -> entry). foldLeft(deps_base.known_theories)(_ + _) val known_loaded_files = deps_base.known_loaded_files ++ loaded_files val import_errors = { val known_sessions = sessions_structure.imports_requirements(List(session_name)).toSet for { name <- dependencies.theories qualifier = deps_base.theory_qualifier(name) if !known_sessions(qualifier) } yield "Bad import of theory " + quote(name.toString) + ": need to include sessions " + quote(qualifier) + " in ROOT" } val document_errors = info.document_theories.flatMap( { case (thy, pos) => val build_hierarchy = if (sessions_structure.build_graph.defined(session_name)) { sessions_structure.build_hierarchy(session_name) } else Nil def err(msg: String): Option[String] = Some(msg + " " + quote(thy) + Position.here(pos)) known_theories.get(thy).map(_.name) match { case None => err("Unknown document theory") case Some(name) => val qualifier = deps_base.theory_qualifier(name) if (session_theories.contains(name)) { err("Redundant document theory from this session:") } else if (build_hierarchy.contains(qualifier)) None else if (dependencies.theories.contains(name)) None else err("Document theory from other session not imported properly:") } }) val document_theories = info.document_theories.map({ case (thy, _) => known_theories(thy).name }) val dir_errors = { val ok = info.dirs.map(_.canonical_file).toSet val bad = (for { name <- session_theories.iterator path = name.master_dir_path if !ok(path.canonical_file) path1 = File.relative_path(info.dir.canonical, path).getOrElse(path) } yield (path1, name)).toList val bad_dirs = (for { (path1, _) <- bad } yield path1.toString).distinct.sorted val errs1 = for { (path1, name) <- bad } yield "Implicit use of directory " + path1 + " for theory " + quote(name.toString) val errs2 = if (bad_dirs.isEmpty) Nil else List("Implicit use of session directories: " + commas(bad_dirs)) val errs3 = for (p <- info.dirs if !p.is_dir) yield "No such directory: " + p val errs4 = (for { name <- session_theories.iterator name1 <- resources.find_theory_node(name.theory) if name.node != name1.node } yield "Incoherent theory file import:\n " + name.path + " vs. \n " + name1.path) .toList errs1 ::: errs2 ::: errs3 ::: errs4 } val sources_errors = for (p <- session_files if !p.is_file) yield "No such file: " + p val path_errors = try { Path.check_case_insensitive(session_files ::: imported_files); Nil } catch { case ERROR(msg) => List(msg) } val bibtex_errors = try { info.bibtex_entries; Nil } catch { case ERROR(msg) => List(msg) } val base = Base( pos = info.pos, session_directories = sessions_structure.session_directories, global_theories = sessions_structure.global_theories, session_theories = session_theories, document_theories = document_theories, loaded_theories = dependencies.loaded_theories, used_theories = dependencies.theories_adjunct, load_commands = load_commands.toMap, known_theories = known_theories, known_loaded_files = known_loaded_files, overall_syntax = overall_syntax, imported_sources = check_sources(imported_files), sources = check_sources(session_files), session_graph_display = session_graph_display, errors = dependencies.errors ::: load_commands_errors ::: import_errors ::: document_errors ::: dir_errors ::: sources_errors ::: path_errors ::: bibtex_errors) session_bases + (info.name -> base) } catch { case ERROR(msg) => cat_error(msg, "The error(s) above occurred in session " + quote(info.name) + Position.here(info.pos)) } } Deps(sessions_structure, session_bases) } /* base info */ sealed case class Base_Info( session: String, sessions_structure: Structure, errors: List[String], base: Base, infos: List[Info] ) { def check: Base_Info = if (errors.isEmpty) this else error(cat_lines(errors)) } def base_info(options: Options, session: String, progress: Progress = new Progress, dirs: List[Path] = Nil, include_sessions: List[String] = Nil, session_ancestor: Option[String] = None, session_requirements: Boolean = false ): Base_Info = { val full_sessions = load_structure(options, dirs = dirs) val selected_sessions = full_sessions.selection(Selection(sessions = session :: session_ancestor.toList)) val info = selected_sessions(session) val ancestor = session_ancestor orElse info.parent val (session1, infos1) = if (session_requirements && ancestor.isDefined) { val deps = Sessions.deps(selected_sessions, progress = progress) val base = deps(session) val ancestor_loaded = deps.get(ancestor.get) match { case Some(ancestor_base) if !selected_sessions.imports_requirements(List(ancestor.get)).contains(session) => ancestor_base.loaded_theories.defined _ case _ => error("Bad ancestor " + quote(ancestor.get) + " for session " + quote(session)) } val required_theories = for { thy <- base.loaded_theories.keys if !ancestor_loaded(thy) && base.theory_qualifier(thy) != session } yield thy if (required_theories.isEmpty) (ancestor.get, Nil) else { val other_name = info.name + "_requirements(" + ancestor.get + ")" Isabelle_System.isabelle_tmp_prefix() (other_name, List( make_info(info.options, dir_selected = false, dir = Path.explode("$ISABELLE_TMP_PREFIX"), chapter = info.chapter, Session_Entry( pos = info.pos, name = other_name, groups = info.groups, path = ".", parent = ancestor, description = "Required theory imports from other sessions", options = Nil, imports = info.deps, directories = Nil, theories = List((Nil, required_theories.map(thy => ((thy, Position.none), false)))), document_theories = Nil, document_files = Nil, export_files = Nil, export_classpath = Nil)))) } } else (session, Nil) val full_sessions1 = if (infos1.isEmpty) full_sessions else load_structure(options, dirs = dirs, infos = infos1) val selected_sessions1 = full_sessions1.selection(Selection(sessions = session1 :: session :: include_sessions)) val deps1 = Sessions.deps(selected_sessions1, progress = progress) Base_Info(session1, full_sessions1, deps1.errors, deps1(session1), infos1) } /* cumulative session info */ sealed case class Info( name: String, chapter: String, dir_selected: Boolean, pos: Position.T, groups: List[String], dir: Path, parent: Option[String], description: String, directories: List[Path], options: Options, imports: List[String], theories: List[(Options, List[(String, Position.T)])], global_theories: List[String], document_theories: List[(String, Position.T)], document_files: List[(Path, Path)], export_files: List[(Path, Int, List[String])], export_classpath: List[String], meta_digest: SHA1.Digest ) { def chapter_session: String = chapter + "/" + name def relative_path(info1: Info): String = if (name == info1.name) "" else if (chapter == info1.chapter) "../" + info1.name + "/" else "../../" + info1.chapter_session + "/" def deps: List[String] = parent.toList ::: imports def deps_base(session_bases: String => Base): Base = { val parent_base = session_bases(parent.getOrElse("")) val imports_bases = imports.map(session_bases) parent_base.copy( known_theories = (for { base <- imports_bases.iterator (_, entry) <- base.known_theories.iterator } yield (entry.name.theory -> entry)).foldLeft(parent_base.known_theories)(_ + _), known_loaded_files = imports_bases.iterator.map(_.known_loaded_files). foldLeft(parent_base.known_loaded_files)(_ ++ _)) } def dirs: List[Path] = dir :: directories def timeout_ignored: Boolean = !options.bool("timeout_build") || Time.seconds(options.real("timeout")) < Time.ms(1) def timeout: Time = Time.seconds(options.real("timeout") * options.real("timeout_scale")) def document_enabled: Boolean = options.string("document") match { case "" | "false" => false case "pdf" | "true" => true case doc => error("Bad document specification " + quote(doc)) } def document_variants: List[Document_Build.Document_Variant] = { val variants = Library.space_explode(':', options.string("document_variants")). map(Document_Build.Document_Variant.parse) val dups = Library.duplicates(variants.map(_.name)) if (dups.nonEmpty) error("Duplicate document variants: " + commas_quote(dups)) variants } def documents: List[Document_Build.Document_Variant] = { val variants = document_variants if (!document_enabled || document_files.isEmpty) Nil else variants } def document_output: Option[Path] = options.string("document_output") match { case "" => None case s => Some(dir + Path.explode(s)) } def browser_info: Boolean = options.bool("browser_info") lazy val bibtex_entries: List[Text.Info[String]] = (for { (document_dir, file) <- document_files.iterator if Bibtex.is_bibtex(file.file_name) info <- Bibtex.entries(File.read(dir + document_dir + file)).iterator } yield info).toList def record_proofs: Boolean = options.int("record_proofs") >= 2 def is_afp: Boolean = chapter == AFP.chapter def is_afp_bulky: Boolean = is_afp && groups.exists(AFP.groups_bulky.contains) } def make_info( options: Options, dir_selected: Boolean, dir: Path, chapter: String, entry: Session_Entry ): Info = { try { val name = entry.name if (exclude_session(name)) error("Bad session name") if (is_pure(name) && entry.parent.isDefined) error("Illegal parent session") if (!is_pure(name) && !entry.parent.isDefined) error("Missing parent session") val session_path = dir + Path.explode(entry.path) val directories = entry.directories.map(dir => session_path + Path.explode(dir)) val session_options = options ++ entry.options val theories = entry.theories.map({ case (opts, thys) => (session_options ++ opts, thys.map({ case ((thy, pos), _) => if (exclude_theory(thy)) error("Bad theory name " + quote(thy) + Position.here(pos)) else (thy, pos) })) }) val global_theories = for { (_, thys) <- entry.theories; ((thy, pos), global) <- thys if global } yield { val thy_name = Path.explode(thy).file_name if (Long_Name.is_qualified(thy_name)) error("Bad qualified name for global theory " + quote(thy_name) + Position.here(pos)) else thy_name } val conditions = theories.flatMap(thys => space_explode(',', thys._1.string("condition"))).distinct.sorted. map(x => (x, Isabelle_System.getenv(x) != "")) val document_files = entry.document_files.map({ case (s1, s2) => (Path.explode(s1), Path.explode(s2)) }) val export_files = entry.export_files.map({ case (dir, prune, pats) => (Path.explode(dir), prune, pats) }) val meta_digest = SHA1.digest( (name, chapter, entry.parent, entry.directories, entry.options, entry.imports, entry.theories_no_position, conditions, entry.document_theories_no_position, entry.document_files) .toString) Info(name, chapter, dir_selected, entry.pos, entry.groups, session_path, entry.parent, entry.description, directories, session_options, entry.imports, theories, global_theories, entry.document_theories, document_files, export_files, entry.export_classpath, meta_digest) } catch { case ERROR(msg) => error(msg + "\nThe error(s) above occurred in session entry " + quote(entry.name) + Position.here(entry.pos)) } } object Selection { val empty: Selection = Selection() val all: Selection = Selection(all_sessions = true) def session(session: String): Selection = Selection(sessions = List(session)) } sealed case class Selection( requirements: Boolean = false, all_sessions: Boolean = false, base_sessions: List[String] = Nil, exclude_session_groups: List[String] = Nil, exclude_sessions: List[String] = Nil, session_groups: List[String] = Nil, sessions: List[String] = Nil ) { def ++ (other: Selection): Selection = Selection( requirements = requirements || other.requirements, all_sessions = all_sessions || other.all_sessions, base_sessions = Library.merge(base_sessions, other.base_sessions), exclude_session_groups = Library.merge(exclude_session_groups, other.exclude_session_groups), exclude_sessions = Library.merge(exclude_sessions, other.exclude_sessions), session_groups = Library.merge(session_groups, other.session_groups), sessions = Library.merge(sessions, other.sessions)) } object Structure { val empty: Structure = make(Nil) def make(infos: List[Info]): Structure = { def add_edges( graph: Graph[String, Info], kind: String, edges: Info => Iterable[String] ) : Graph[String, Info] = { def add_edge(pos: Position.T, name: String, g: Graph[String, Info], parent: String) = { if (!g.defined(parent)) error("Bad " + kind + " session " + quote(parent) + " for " + quote(name) + Position.here(pos)) try { g.add_edge_acyclic(parent, name) } catch { case exn: Graph.Cycles[_] => error(cat_lines(exn.cycles.map(cycle => "Cyclic session dependency of " + cycle.map(c => quote(c.toString)).mkString(" via "))) + Position.here(pos)) } } graph.iterator.foldLeft(graph) { case (g, (name, (info, _))) => edges(info).foldLeft(g)(add_edge(info.pos, name, _, _)) } } val info_graph = infos.foldLeft(Graph.string[Info]) { case (graph, info) => if (graph.defined(info.name)) error("Duplicate session " + quote(info.name) + Position.here(info.pos) + Position.here(graph.get_node(info.name).pos)) else graph.new_node(info.name, info) } val build_graph = add_edges(info_graph, "parent", _.parent) val imports_graph = add_edges(build_graph, "imports", _.imports) val session_positions: List[(String, Position.T)] = (for ((name, (info, _)) <- info_graph.iterator) yield (name, info.pos)).toList val session_directories: Map[JFile, String] = (for { session <- imports_graph.topological_order.iterator info = info_graph.get_node(session) dir <- info.dirs.iterator } yield (info, dir)).foldLeft(Map.empty[JFile, String]) { case (dirs, (info, dir)) => val session = info.name val canonical_dir = dir.canonical_file dirs.get(canonical_dir) match { case Some(session1) => val info1 = info_graph.get_node(session1) error("Duplicate use of directory " + dir + "\n for session " + quote(session1) + Position.here(info1.pos) + "\n vs. session " + quote(session) + Position.here(info.pos)) case None => dirs + (canonical_dir -> session) } } val global_theories: Map[String, String] = (for { session <- imports_graph.topological_order.iterator info = info_graph.get_node(session) thy <- info.global_theories.iterator } yield (info, thy)).foldLeft(Thy_Header.bootstrap_global_theories.toMap) { case (global, (info, thy)) => val qualifier = info.name global.get(thy) match { case Some(qualifier1) if qualifier != qualifier1 => error("Duplicate global theory " + quote(thy) + Position.here(info.pos)) case _ => global + (thy -> qualifier) } } new Structure( session_positions, session_directories, global_theories, build_graph, imports_graph) } } final class Structure private[Sessions]( val session_positions: List[(String, Position.T)], val session_directories: Map[JFile, String], val global_theories: Map[String, String], val build_graph: Graph[String, Info], val imports_graph: Graph[String, Info] ) { sessions_structure => def bootstrap: Base = Base( session_directories = session_directories, global_theories = global_theories, overall_syntax = Thy_Header.bootstrap_syntax) def dest_session_directories: List[(String, String)] = for ((file, session) <- session_directories.toList) yield (File.standard_path(file), session) lazy val chapters: SortedMap[String, List[Info]] = build_graph.iterator.foldLeft(SortedMap.empty[String, List[Info]]) { case (chs, (_, (info, _))) => chs + (info.chapter -> (info :: chs.getOrElse(info.chapter, Nil))) } def build_graph_display: Graph_Display.Graph = Graph_Display.make_graph(build_graph) def imports_graph_display: Graph_Display.Graph = Graph_Display.make_graph(imports_graph) def defined(name: String): Boolean = imports_graph.defined(name) def apply(name: String): Info = imports_graph.get_node(name) def get(name: String): Option[Info] = if (defined(name)) Some(apply(name)) else None def theory_qualifier(name: String): String = global_theories.getOrElse(name, Long_Name.qualifier(name)) def check_sessions(names: List[String]): Unit = { val bad_sessions = SortedSet(names.filterNot(defined): _*).toList if (bad_sessions.nonEmpty) error("Undefined session(s): " + commas_quote(bad_sessions)) } def check_sessions(sel: Selection): Unit = check_sessions(sel.base_sessions ::: sel.exclude_sessions ::: sel.sessions) private def selected(graph: Graph[String, Info], sel: Selection): List[String] = { check_sessions(sel) val select_group = sel.session_groups.toSet val select_session = sel.sessions.toSet ++ imports_graph.all_succs(sel.base_sessions) val selected0 = if (sel.all_sessions) graph.keys else { (for { (name, (info, _)) <- graph.iterator if info.dir_selected || select_session(name) || info.groups.exists(select_group) } yield name).toList } if (sel.requirements) (graph.all_preds(selected0).toSet -- selected0).toList else selected0 } def selection(sel: Selection): Structure = { check_sessions(sel) val excluded = { val exclude_group = sel.exclude_session_groups.toSet val exclude_group_sessions = (for { (name, (info, _)) <- imports_graph.iterator if imports_graph.get_node(name).groups.exists(exclude_group) } yield name).toList imports_graph.all_succs(exclude_group_sessions ::: sel.exclude_sessions).toSet } def restrict(graph: Graph[String, Info]): Graph[String, Info] = { val sessions = graph.all_preds(selected(graph, sel)).filterNot(excluded) graph.restrict(graph.all_preds(sessions).toSet) } new Structure( session_positions, session_directories, global_theories, restrict(build_graph), restrict(imports_graph)) } def selection(session: String): Structure = selection(Selection.session(session)) def selection_deps( selection: Selection, progress: Progress = new Progress, loading_sessions: Boolean = false, inlined_files: Boolean = false, verbose: Boolean = false ): Deps = { val deps = Sessions.deps(sessions_structure.selection(selection), progress = progress, inlined_files = inlined_files, verbose = verbose) if (loading_sessions) { val selection_size = deps.sessions_structure.build_graph.size if (selection_size > 1) progress.echo("Loading " + selection_size + " sessions ...") } deps } def build_selection(sel: Selection): List[String] = selected(build_graph, sel) def build_descendants(ss: List[String]): List[String] = build_graph.all_succs(ss) def build_requirements(ss: List[String]): List[String] = build_graph.all_preds_rev(ss) def build_topological_order: List[String] = build_graph.topological_order def build_hierarchy(session: String): List[String] = build_graph.all_preds(List(session)) def imports_selection(sel: Selection): List[String] = selected(imports_graph, sel) def imports_descendants(ss: List[String]): List[String] = imports_graph.all_succs(ss) def imports_requirements(ss: List[String]): List[String] = imports_graph.all_preds_rev(ss) def imports_topological_order: List[String] = imports_graph.topological_order def imports_hierarchy(session: String): List[String] = imports_graph.all_preds(List(session)) def bibtex_entries: List[(String, List[String])] = build_topological_order.flatMap(name => apply(name).bibtex_entries match { case Nil => None case entries => Some(name -> entries.map(_.info)) }) override def toString: String = imports_graph.keys_iterator.mkString("Sessions.Structure(", ", ", ")") } /* parser */ private val CHAPTER = "chapter" private val SESSION = "session" private val IN = "in" private val DESCRIPTION = "description" private val DIRECTORIES = "directories" private val OPTIONS = "options" private val SESSIONS = "sessions" private val THEORIES = "theories" private val GLOBAL = "global" private val DOCUMENT_THEORIES = "document_theories" private val DOCUMENT_FILES = "document_files" private val EXPORT_FILES = "export_files" private val EXPORT_CLASSPATH = "export_classpath" val root_syntax: Outer_Syntax = Outer_Syntax.empty + "(" + ")" + "+" + "," + "=" + "[" + "]" + GLOBAL + IN + (CHAPTER, Keyword.THY_DECL) + (SESSION, Keyword.THY_DECL) + (DESCRIPTION, Keyword.QUASI_COMMAND) + (DIRECTORIES, Keyword.QUASI_COMMAND) + (OPTIONS, Keyword.QUASI_COMMAND) + (SESSIONS, Keyword.QUASI_COMMAND) + (THEORIES, Keyword.QUASI_COMMAND) + (DOCUMENT_THEORIES, Keyword.QUASI_COMMAND) + (DOCUMENT_FILES, Keyword.QUASI_COMMAND) + (EXPORT_FILES, Keyword.QUASI_COMMAND) + (EXPORT_CLASSPATH, Keyword.QUASI_COMMAND) abstract class Entry sealed case class Chapter(name: String) extends Entry sealed case class Session_Entry( pos: Position.T, name: String, groups: List[String], path: String, parent: Option[String], description: String, options: List[Options.Spec], imports: List[String], directories: List[String], theories: List[(List[Options.Spec], List[((String, Position.T), Boolean)])], document_theories: List[(String, Position.T)], document_files: List[(String, String)], export_files: List[(String, Int, List[String])], export_classpath: List[String] ) extends Entry { def theories_no_position: List[(List[Options.Spec], List[(String, Boolean)])] = theories.map({ case (a, b) => (a, b.map({ case ((c, _), d) => (c, d) })) }) def document_theories_no_position: List[String] = document_theories.map(_._1) } private object Parsers extends Options.Parsers { private val chapter: Parser[Chapter] = { val chapter_name = atom("chapter name", _.is_name) command(CHAPTER) ~! chapter_name ^^ { case _ ~ a => Chapter(a) } } private val session_entry: Parser[Session_Entry] = { val option = option_name ~ opt($$$("=") ~! option_value ^^ { case _ ~ x => x }) ^^ { case x ~ y => (x, y) } val options = $$$("[") ~> rep1sep(option, $$$(",")) <~ $$$("]") val theory_entry = position(theory_name) ~ opt_keyword(GLOBAL) ^^ { case x ~ y => (x, y) } val theories = $$$(THEORIES) ~! ((options | success(Nil)) ~ rep1(theory_entry)) ^^ { case _ ~ (x ~ y) => (x, y) } val in_path = $$$("(") ~! ($$$(IN) ~ path ~ $$$(")")) ^^ { case _ ~ (_ ~ x ~ _) => x } val document_theories = $$$(DOCUMENT_THEORIES) ~! rep1(position(name)) ^^ { case _ ~ x => x } val document_files = $$$(DOCUMENT_FILES) ~! ((in_path | success("document")) ~ rep1(path)) ^^ { case _ ~ (x ~ y) => y.map((x, _)) } val prune = $$$("[") ~! (nat ~ $$$("]")) ^^ { case _ ~ (x ~ _) => x } | success(0) val export_files = $$$(EXPORT_FILES) ~! ((in_path | success("export")) ~ prune ~ rep1(embedded)) ^^ { case _ ~ (x ~ y ~ z) => (x, y, z) } val export_classpath = $$$(EXPORT_CLASSPATH) ~! (rep1(embedded) | success(List("*:classpath/*.jar"))) ^^ { case _ ~ x => x } command(SESSION) ~! (position(session_name) ~ (($$$("(") ~! (rep1(name) <~ $$$(")")) ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(IN) ~! path ^^ { case _ ~ x => x }) | success(".")) ~ ($$$("=") ~! (opt(session_name ~! $$$("+") ^^ { case x ~ _ => x }) ~ (($$$(DESCRIPTION) ~! text ^^ { case _ ~ x => x }) | success("")) ~ (($$$(OPTIONS) ~! options ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(SESSIONS) ~! rep1(session_name) ^^ { case _ ~ x => x }) | success(Nil)) ~ (($$$(DIRECTORIES) ~! rep1(path) ^^ { case _ ~ x => x }) | success(Nil)) ~ rep(theories) ~ (opt(document_theories) ^^ (x => x.getOrElse(Nil))) ~ (rep(document_files) ^^ (x => x.flatten)) ~ rep(export_files) ~ opt(export_classpath)))) ^^ { case _ ~ ((a, pos) ~ b ~ c ~ (_ ~ (d ~ e ~ f ~ g ~ h ~ i ~ j ~ k ~ l ~ m))) => Session_Entry(pos, a, b, c, d, e, f, g, h, i, j, k, l, m.getOrElse(Nil)) } } def parse_root(path: Path): List[Entry] = { val toks = Token.explode(root_syntax.keywords, File.read(path)) val start = Token.Pos.file(path.implode) parse_all(rep(chapter | session_entry), Token.reader(toks, start)) match { case Success(result, _) => result case bad => error(bad.toString) } } } def parse_root(path: Path): List[Entry] = Parsers.parse_root(path) def parse_root_entries(path: Path): List[Session_Entry] = for (entry <- Parsers.parse_root(path) if entry.isInstanceOf[Session_Entry]) yield entry.asInstanceOf[Session_Entry] def read_root(options: Options, select: Boolean, path: Path): List[Info] = { var entry_chapter = UNSORTED val infos = new mutable.ListBuffer[Info] parse_root(path).foreach { case Chapter(name) => entry_chapter = name case entry: Session_Entry => infos += make_info(options, select, path.dir, entry_chapter, entry) } infos.toList } def parse_roots(roots: Path): List[String] = { for { line <- split_lines(File.read(roots)) if !(line == "" || line.startsWith("#")) } yield line } /* load sessions from certain directories */ def is_session_dir(dir: Path): Boolean = (dir + ROOT).is_file || (dir + ROOTS).is_file def check_session_dir(dir: Path): Path = if (is_session_dir(dir)) File.pwd() + dir.expand else error("Bad session root directory (missing ROOT or ROOTS): " + dir.expand.toString) def directories(dirs: List[Path], select_dirs: List[Path]): List[(Boolean, Path)] = { val default_dirs = Components.directories().filter(is_session_dir) for { (select, dir) <- (default_dirs ::: dirs).map((false, _)) ::: select_dirs.map((true, _)) } yield (select, dir.canonical) } def load_structure( options: Options, dirs: List[Path] = Nil, select_dirs: List[Path] = Nil, infos: List[Info] = Nil ): Structure = { def load_dir(select: Boolean, dir: Path): List[(Boolean, Path)] = load_root(select, dir) ::: load_roots(select, dir) def load_root(select: Boolean, dir: Path): List[(Boolean, Path)] = { val root = dir + ROOT if (root.is_file) List((select, root)) else Nil } def load_roots(select: Boolean, dir: Path): List[(Boolean, Path)] = { val roots = dir + ROOTS if (roots.is_file) { for { entry <- parse_roots(roots) dir1 = try { check_session_dir(dir + Path.explode(entry)) } catch { case ERROR(msg) => error(msg + "\nThe error(s) above occurred in session catalog " + roots.toString) } res <- load_dir(select, dir1) } yield res } else Nil } val roots = for { (select, dir) <- directories(dirs, select_dirs) res <- load_dir(select, check_session_dir(dir)) } yield res val unique_roots = roots.foldLeft(Map.empty[JFile, (Boolean, Path)]) { case (m, (select, path)) => val file = path.canonical_file m.get(file) match { case None => m + (file -> (select, path)) case Some((select1, path1)) => m + (file -> (select1 || select, path1)) } }.toList.map(_._2) Structure.make(unique_roots.flatMap(p => read_root(options, p._1, p._2)) ::: infos) } /* Isabelle tool wrapper */ val isabelle_tool = Isabelle_Tool("sessions", "explore structure of Isabelle sessions", Scala_Project.here, { args => var base_sessions: List[String] = Nil var select_dirs: List[Path] = Nil var requirements = false var exclude_session_groups: List[String] = Nil var all_sessions = false var dirs: List[Path] = Nil var session_groups: List[String] = Nil var exclude_sessions: List[String] = Nil val getopts = Getopts(""" Usage: isabelle sessions [OPTIONS] [SESSIONS ...] Options are: -B NAME include session NAME and all descendants -D DIR include session directory and select its sessions -R refer to requirements of selected sessions -X NAME exclude sessions from group NAME and all descendants -a select all sessions -d DIR include session directory -g NAME select session group NAME -x NAME exclude session NAME and all descendants Explore the structure of Isabelle sessions and print result names in topological order (on stdout). """, "B:" -> (arg => base_sessions = base_sessions ::: List(arg)), "D:" -> (arg => select_dirs = select_dirs ::: List(Path.explode(arg))), "R" -> (_ => requirements = true), "X:" -> (arg => exclude_session_groups = exclude_session_groups ::: List(arg)), "a" -> (_ => all_sessions = true), "d:" -> (arg => dirs = dirs ::: List(Path.explode(arg))), "g:" -> (arg => session_groups = session_groups ::: List(arg)), "x:" -> (arg => exclude_sessions = exclude_sessions ::: List(arg))) val sessions = getopts(args) val options = Options.init() val selection = Selection(requirements = requirements, all_sessions = all_sessions, base_sessions = base_sessions, exclude_session_groups = exclude_session_groups, exclude_sessions = exclude_sessions, session_groups = session_groups, sessions = sessions) val sessions_structure = load_structure(options, dirs = dirs, select_dirs = select_dirs).selection(selection) for (name <- sessions_structure.imports_topological_order) { Output.writeln(name, stdout = true) } }) /** heap file with SHA1 digest **/ private val sha1_prefix = "SHA1:" def read_heap_digest(heap: Path): Option[String] = { if (heap.is_file) { using(FileChannel.open(heap.java_path, StandardOpenOption.READ)) { file => val len = file.size val n = sha1_prefix.length + SHA1.digest_length if (len >= n) { file.position(len - n) val buf = ByteBuffer.allocate(n) var i = 0 var m = 0 - var cont = true - while (cont) { + while ({ m = file.read(buf) if (m != -1) i += m - cont = (m != -1 && n > i) - } + m != -1 && n > i + }) () if (i == n) { val prefix = new String(buf.array(), 0, sha1_prefix.length, UTF8.charset) val s = new String(buf.array(), sha1_prefix.length, SHA1.digest_length, UTF8.charset) if (prefix == sha1_prefix) Some(s) else None } else None } else None } } else None } def write_heap_digest(heap: Path): String = read_heap_digest(heap) match { case None => val s = SHA1.digest(heap).toString File.append(heap, sha1_prefix + s) s case Some(s) => s } /** persistent store **/ object Session_Info { val session_name = SQL.Column.string("session_name").make_primary_key // Build_Log.Session_Info val session_timing = SQL.Column.bytes("session_timing") val command_timings = SQL.Column.bytes("command_timings") val theory_timings = SQL.Column.bytes("theory_timings") val ml_statistics = SQL.Column.bytes("ml_statistics") val task_statistics = SQL.Column.bytes("task_statistics") val errors = SQL.Column.bytes("errors") val build_log_columns = List(session_name, session_timing, command_timings, theory_timings, ml_statistics, task_statistics, errors) // Build.Session_Info val sources = SQL.Column.string("sources") val input_heaps = SQL.Column.string("input_heaps") val output_heap = SQL.Column.string("output_heap") val return_code = SQL.Column.int("return_code") val build_columns = List(sources, input_heaps, output_heap, return_code) val table = SQL.Table("isabelle_session_info", build_log_columns ::: build_columns) } class Database_Context private[Sessions]( val store: Sessions.Store, database_server: Option[SQL.Database] ) extends AutoCloseable { def cache: Term.Cache = store.cache def close(): Unit = database_server.foreach(_.close()) def output_database[A](session: String)(f: SQL.Database => A): A = database_server match { case Some(db) => f(db) case None => using(store.open_database(session, output = true))(f) } def input_database[A](session: String)(f: (SQL.Database, String) => Option[A]): Option[A] = database_server match { case Some(db) => f(db, session) case None => store.try_open_database(session) match { case Some(db) => using(db)(f(_, session)) case None => None } } def read_export( sessions: List[String], theory_name: String, name: String ): Option[Export.Entry] = { val attempts = database_server match { case Some(db) => sessions.view.map(session_name => Export.Entry_Name(session = session_name, theory = theory_name, name = name) .read(db, store.cache)) case None => sessions.view.map(session_name => store.try_open_database(session_name) match { case Some(db) => using(db) { _ => Export.Entry_Name(session = session_name, theory = theory_name, name = name) .read(db, store.cache) } case None => None }) } attempts.collectFirst({ case Some(entry) => entry }) } def get_export( session_hierarchy: List[String], theory_name: String, name: String): Export.Entry = read_export(session_hierarchy, theory_name, name) getOrElse Export.empty_entry(theory_name, name) def get_classpath(structure: Structure, session: String): List[File.Content_Bytes] = { (for { name <- structure.build_requirements(List(session)) patterns = structure(name).export_classpath if patterns.nonEmpty } yield { input_database(name)((db, _) => db.transaction { val matcher = Export.make_matcher(patterns) val res = for { entry_name <- Export.read_entry_names(db, name) if matcher(entry_name) entry <- entry_name.read(db, store.cache) } yield File.Content(entry.entry_name.make_path(), entry.uncompressed) Some(res) } ).getOrElse(Nil) }).flatten } override def toString: String = { val s = database_server match { case Some(db) => db.toString case None => "input_dirs = " + store.input_dirs.map(_.absolute).mkString(", ") } "Database_Context(" + s + ")" } } def store(options: Options, cache: Term.Cache = Term.Cache.make()): Store = new Store(options, cache) class Store private[Sessions](val options: Options, val cache: Term.Cache) { store => override def toString: String = "Store(output_dir = " + output_dir.absolute + ")" /* directories */ val system_output_dir: Path = Path.explode("$ISABELLE_HEAPS_SYSTEM/$ML_IDENTIFIER") val user_output_dir: Path = Path.explode("$ISABELLE_HEAPS/$ML_IDENTIFIER") def system_heaps: Boolean = options.bool("system_heaps") val output_dir: Path = if (system_heaps) system_output_dir else user_output_dir val input_dirs: List[Path] = if (system_heaps) List(system_output_dir) else List(user_output_dir, system_output_dir) def presentation_dir: Path = if (system_heaps) Path.explode("$ISABELLE_BROWSER_INFO_SYSTEM") else Path.explode("$ISABELLE_BROWSER_INFO") /* file names */ def heap(name: String): Path = Path.basic(name) def database(name: String): Path = Path.basic("log") + Path.basic(name).ext("db") def log(name: String): Path = Path.basic("log") + Path.basic(name) def log_gz(name: String): Path = log(name).ext("gz") def output_heap(name: String): Path = output_dir + heap(name) def output_database(name: String): Path = output_dir + database(name) def output_log(name: String): Path = output_dir + log(name) def output_log_gz(name: String): Path = output_dir + log_gz(name) def prepare_output_dir(): Unit = Isabelle_System.make_directory(output_dir + Path.basic("log")) /* heap */ def find_heap(name: String): Option[Path] = input_dirs.map(_ + heap(name)).find(_.is_file) def find_heap_digest(name: String): Option[String] = find_heap(name).flatMap(read_heap_digest) def the_heap(name: String): Path = find_heap(name) getOrElse error("Missing heap image for session " + quote(name) + " -- expected in:\n" + cat_lines(input_dirs.map(dir => " " + dir.expand.implode))) /* database */ def find_database(name: String): Option[Path] = input_dirs.map(_ + database(name)).find(_.is_file) def database_server: Boolean = options.bool("build_database_server") def open_database_server(): SQL.Database = PostgreSQL.open_database( user = options.string("build_database_user"), password = options.string("build_database_password"), database = options.string("build_database_name"), host = options.string("build_database_host"), port = options.int("build_database_port"), ssh = options.proper_string("build_database_ssh_host").map(ssh_host => SSH.open_session(options, host = ssh_host, user = options.string("build_database_ssh_user"), port = options.int("build_database_ssh_port"))), ssh_close = true) def open_database_context(): Database_Context = new Database_Context(store, if (database_server) Some(open_database_server()) else None) def try_open_database(name: String, output: Boolean = false): Option[SQL.Database] = { def check(db: SQL.Database): Option[SQL.Database] = if (output || session_info_exists(db)) Some(db) else { db.close(); None } if (database_server) check(open_database_server()) else if (output) Some(SQLite.open_database(output_database(name))) else { (for { dir <- input_dirs.view path = dir + database(name) if path.is_file db <- check(SQLite.open_database(path)) } yield db).headOption } } def open_database(name: String, output: Boolean = false): SQL.Database = try_open_database(name, output = output) getOrElse error("Missing build database for session " + quote(name)) def clean_output(name: String): (Boolean, Boolean) = { val relevant_db = database_server && { try_open_database(name) match { case Some(db) => try { db.transaction { val relevant_db = session_info_defined(db, name) init_session_info(db, name) relevant_db } } finally { db.close() } case None => false } } val del = for { dir <- (if (system_heaps) List(user_output_dir, system_output_dir) else List(user_output_dir)) file <- List(heap(name), database(name), log(name), log_gz(name)) path = dir + file if path.is_file } yield path.file.delete val relevant = relevant_db || del.nonEmpty val ok = del.forall(b => b) (relevant, ok) } /* SQL database content */ def read_bytes(db: SQL.Database, name: String, column: SQL.Column): Bytes = db.using_statement(Session_Info.table.select(List(column), Session_Info.session_name.where_equal(name))) { stmt => val res = stmt.execute_query() if (!res.next()) Bytes.empty else res.bytes(column) } def read_properties(db: SQL.Database, name: String, column: SQL.Column): List[Properties.T] = Properties.uncompress(read_bytes(db, name, column), cache = cache) /* session info */ def init_session_info(db: SQL.Database, name: String): Unit = { db.transaction { db.create_table(Session_Info.table) db.using_statement( Session_Info.table.delete(Session_Info.session_name.where_equal(name)))(_.execute()) db.create_table(Export.Data.table) db.using_statement( Export.Data.table.delete(Export.Data.session_name.where_equal(name)))(_.execute()) db.create_table(Document_Build.Data.table) db.using_statement( Document_Build.Data.table.delete( Document_Build.Data.session_name.where_equal(name)))(_.execute()) } } def session_info_exists(db: SQL.Database): Boolean = { val tables = db.tables tables.contains(Session_Info.table.name) && tables.contains(Export.Data.table.name) } def session_info_defined(db: SQL.Database, name: String): Boolean = db.transaction { session_info_exists(db) && { db.using_statement( Session_Info.table.select(List(Session_Info.session_name), Session_Info.session_name.where_equal(name)))(stmt => stmt.execute_query().next()) } } def write_session_info( db: SQL.Database, name: String, build_log: Build_Log.Session_Info, build: Build.Session_Info ): Unit = { db.transaction { db.using_statement(Session_Info.table.insert()) { stmt => stmt.string(1) = name stmt.bytes(2) = Properties.encode(build_log.session_timing) stmt.bytes(3) = Properties.compress(build_log.command_timings, cache = cache.xz) stmt.bytes(4) = Properties.compress(build_log.theory_timings, cache = cache.xz) stmt.bytes(5) = Properties.compress(build_log.ml_statistics, cache = cache.xz) stmt.bytes(6) = Properties.compress(build_log.task_statistics, cache = cache.xz) stmt.bytes(7) = Build_Log.compress_errors(build_log.errors, cache = cache.xz) stmt.string(8) = build.sources stmt.string(9) = cat_lines(build.input_heaps) stmt.string(10) = build.output_heap getOrElse "" stmt.int(11) = build.return_code stmt.execute() } } } def read_session_timing(db: SQL.Database, name: String): Properties.T = Properties.decode(read_bytes(db, name, Session_Info.session_timing), cache = cache) def read_command_timings(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.command_timings) def read_theory_timings(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.theory_timings) def read_ml_statistics(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.ml_statistics) def read_task_statistics(db: SQL.Database, name: String): List[Properties.T] = read_properties(db, name, Session_Info.task_statistics) def read_theories(db: SQL.Database, name: String): List[String] = read_theory_timings(db, name).flatMap(Markup.Name.unapply) def read_errors(db: SQL.Database, name: String): List[String] = Build_Log.uncompress_errors(read_bytes(db, name, Session_Info.errors), cache = cache) def read_build(db: SQL.Database, name: String): Option[Build.Session_Info] = { if (db.tables.contains(Session_Info.table.name)) { db.using_statement(Session_Info.table.select(Session_Info.build_columns, Session_Info.session_name.where_equal(name))) { stmt => val res = stmt.execute_query() if (!res.next()) None else { Some( Build.Session_Info( res.string(Session_Info.sources), split_lines(res.string(Session_Info.input_heaps)), res.string(Session_Info.output_heap) match { case "" => None case s => Some(s) }, res.int(Session_Info.return_code))) } } } else None } } } diff --git a/src/Pure/library.scala b/src/Pure/library.scala --- a/src/Pure/library.scala +++ b/src/Pure/library.scala @@ -1,293 +1,292 @@ /* Title: Pure/library.scala Author: Makarius Basic library. */ package isabelle import scala.annotation.tailrec import scala.collection.mutable import scala.util.matching.Regex object Library { /* resource management */ def using[A <: AutoCloseable, B](a: A)(f: A => B): B = { try { f(a) } finally { if (a != null) a.close() } } /* integers */ private val small_int = 10000 private lazy val small_int_table = { val array = new Array[String](small_int) for (i <- 0 until small_int) array(i) = i.toString array } def is_small_int(s: String): Boolean = { val len = s.length 1 <= len && len <= 4 && s.forall(c => '0' <= c && c <= '9') && (len == 1 || s(0) != '0') } def signed_string_of_long(i: Long): String = if (0 <= i && i < small_int) small_int_table(i.toInt) else i.toString def signed_string_of_int(i: Int): String = if (0 <= i && i < small_int) small_int_table(i) else i.toString /* separated chunks */ def separate[A](s: A, list: List[A]): List[A] = { val result = new mutable.ListBuffer[A] var first = true for (x <- list) { if (first) { first = false result += x } else { result += s result += x } } result.toList } def separated_chunks(sep: Char => Boolean, source: CharSequence): Iterator[CharSequence] = new Iterator[CharSequence] { private val end = source.length private def next_chunk(i: Int): Option[(CharSequence, Int)] = { if (i < end) { var j = i - var cont = true - while (cont) { + while ({ j += 1 - cont = (j < end && !sep(source.charAt(j))) - } + j < end && !sep(source.charAt(j)) + }) () Some((source.subSequence(i + 1, j), j)) } else None } private var state: Option[(CharSequence, Int)] = if (end == 0) None else next_chunk(-1) def hasNext: Boolean = state.isDefined def next(): CharSequence = state match { case Some((s, i)) => state = next_chunk(i); s case None => Iterator.empty.next() } } def space_explode(sep: Char, str: String): List[String] = separated_chunks(_ == sep, str).map(_.toString).toList /* lines */ def terminate_lines(lines: IterableOnce[String]): String = lines.iterator.mkString("", "\n", "\n") def cat_lines(lines: IterableOnce[String]): String = lines.iterator.mkString("\n") def split_lines(str: String): List[String] = space_explode('\n', str) def prefix_lines(prfx: String, str: String): String = isabelle.setup.Library.prefix_lines(prfx, str) def indent_lines(n: Int, str: String): String = prefix_lines(Symbol.spaces(n), str) def first_line(source: CharSequence): String = { val lines = separated_chunks(_ == '\n', source) if (lines.hasNext) lines.next().toString else "" } def trim_line(s: String): String = isabelle.setup.Library.trim_line(s) def trim_split_lines(s: String): List[String] = split_lines(trim_line(s)).map(trim_line) def encode_lines(s: String): String = s.replace('\n', '\u000b') def decode_lines(s: String): String = s.replace('\u000b', '\n') /* strings */ def make_string(f: StringBuilder => Unit, capacity: Int = 16): String = { val s = new StringBuilder(capacity) f(s) s.toString } def try_unprefix(prfx: String, s: String): Option[String] = if (s.startsWith(prfx)) Some(s.substring(prfx.length)) else None def try_unsuffix(sffx: String, s: String): Option[String] = if (s.endsWith(sffx)) Some(s.substring(0, s.length - sffx.length)) else None def perhaps_unprefix(prfx: String, s: String): String = try_unprefix(prfx, s) getOrElse s def perhaps_unsuffix(sffx: String, s: String): String = try_unsuffix(sffx, s) getOrElse s def isolate_substring(s: String): String = new String(s.toCharArray) def strip_ansi_color(s: String): String = s.replaceAll("\u001b\\[\\d+m", "") /* quote */ def single_quote(s: String): String = "'" + s + "'" def quote(s: String): String = "\"" + s + "\"" def try_unquote(s: String): Option[String] = if (s.startsWith("\"") && s.endsWith("\"")) Some(s.substring(1, s.length - 1)) else None def perhaps_unquote(s: String): String = try_unquote(s) getOrElse s def commas(ss: Iterable[String]): String = ss.iterator.mkString(", ") def commas_quote(ss: Iterable[String]): String = ss.iterator.map(quote).mkString(", ") /* CharSequence */ class Reverse(text: CharSequence, start: Int, end: Int) extends CharSequence { require(0 <= start && start <= end && end <= text.length, "bad reverse range") def this(text: CharSequence) = this(text, 0, text.length) def length: Int = end - start def charAt(i: Int): Char = text.charAt(end - i - 1) def subSequence(i: Int, j: Int): CharSequence = if (0 <= i && i <= j && j <= length) new Reverse(text, end - j, end - i) else throw new IndexOutOfBoundsException override def toString: String = { val buf = new StringBuilder(length) for (i <- 0 until length) buf.append(charAt(i)) buf.toString } } class Line_Termination(text: CharSequence) extends CharSequence { def length: Int = text.length + 1 def charAt(i: Int): Char = if (i == text.length) '\n' else text.charAt(i) def subSequence(i: Int, j: Int): CharSequence = if (j == text.length + 1) new Line_Termination(text.subSequence(i, j - 1)) else text.subSequence(i, j) override def toString: String = text.toString + "\n" } /* regular expressions */ def make_regex(s: String): Option[Regex] = try { Some(new Regex(s)) } catch { case ERROR(_) => None } def is_regex_meta(c: Char): Boolean = """()[]{}\^$|?*+.<>-=!""".contains(c) def escape_regex(s: String): String = if (s.exists(is_regex_meta)) { (for (c <- s.iterator) yield { if (is_regex_meta(c)) "\\" + c.toString else c.toString }).mkString } else s /* lists */ def take_prefix[A](pred: A => Boolean, xs: List[A]): (List[A], List[A]) = (xs.takeWhile(pred), xs.dropWhile(pred)) def take_suffix[A](pred: A => Boolean, xs: List[A]): (List[A], List[A]) = { val rev_xs = xs.reverse (rev_xs.dropWhile(pred).reverse, rev_xs.takeWhile(pred).reverse) } def trim[A](pred: A => Boolean, xs: List[A]): List[A] = take_suffix(pred, take_prefix(pred, xs)._2)._1 def member[A, B](xs: List[A])(x: B): Boolean = xs.contains(x) def insert[A](x: A)(xs: List[A]): List[A] = if (xs.contains(x)) xs else x :: xs def remove[A, B](x: B)(xs: List[A]): List[A] = if (member(xs)(x)) xs.filterNot(_ == x) else xs def update[A](x: A)(xs: List[A]): List[A] = x :: remove(x)(xs) def merge[A](xs: List[A], ys: List[A]): List[A] = if (xs.eq(ys)) xs else if (xs.isEmpty) ys else ys.foldRight(xs)(Library.insert(_)(_)) def distinct[A](xs: List[A], eq: (A, A) => Boolean = (x: A, y: A) => x == y): List[A] = { val result = new mutable.ListBuffer[A] xs.foreach(x => if (!result.exists(y => eq(x, y))) result += x) result.toList } def duplicates[A](lst: List[A], eq: (A, A) => Boolean = (x: A, y: A) => x == y): List[A] = { val result = new mutable.ListBuffer[A] @tailrec def dups(rest: List[A]): Unit = rest match { case Nil => case x :: xs => if (!result.exists(y => eq(x, y)) && xs.exists(y => eq(x, y))) result += x dups(xs) } dups(lst) result.toList } def replicate[A](n: Int, a: A): List[A] = if (n < 0) throw new IllegalArgumentException else if (n == 0) Nil else { val res = new mutable.ListBuffer[A] (1 to n).foreach(_ => res += a) res.toList } def the_single[A](xs: List[A]): A = xs match { case List(x) => x case _ => error("Single argument expected") } /* proper values */ def proper_bool(b: Boolean): Option[Boolean] = if (!b) None else Some(b) def proper_string(s: String): Option[String] = if (s == null || s == "") None else Some(s) def proper_list[A](list: List[A]): Option[List[A]] = if (list == null || list.isEmpty) None else Some(list) /* reflection */ def is_subclass[A, B](a: Class[A], b: Class[B]): Boolean = { import scala.language.existentials @tailrec def subclass(c: Class[_]): Boolean = { c == b || { val d = c.getSuperclass; d != null && subclass(d) } } subclass(a) } } diff --git a/src/Tools/VSCode/src/dynamic_output.scala b/src/Tools/VSCode/src/dynamic_output.scala --- a/src/Tools/VSCode/src/dynamic_output.scala +++ b/src/Tools/VSCode/src/dynamic_output.scala @@ -1,88 +1,88 @@ /* Title: Tools/VSCode/src/dynamic_output.scala Author: Makarius Dynamic output, depending on caret focus: messages, state etc. */ package isabelle.vscode import isabelle._ object Dynamic_Output { sealed case class State(do_update: Boolean = true, output: List[XML.Tree] = Nil) { def handle_update( resources: VSCode_Resources, channel: Channel, restriction: Option[Set[Command]] ): State = { val st1 = resources.get_caret() match { case None => copy(output = Nil) case Some(caret) => val snapshot = caret.model.snapshot() if (do_update && !snapshot.is_outdated) { snapshot.current_command(caret.node_name, caret.offset) match { case None => copy(output = Nil) case Some(command) => copy(output = if (restriction.isEmpty || restriction.get.contains(command)) Rendering.output_messages(snapshot.command_results(command)) else output) } } else this } if (st1.output != output) { val context = new Presentation.Entity_Context { override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = for { thy_file <- Position.Def_File.unapply(props) def_line <- Position.Def_Line.unapply(props) source <- resources.source_file(thy_file) - uri = Path.explode(source).absolute_file.toURI + uri = File.uri(Path.explode(source).absolute_file) } yield HTML.link(uri.toString + "#" + def_line, body) } val elements = Presentation.elements2.copy(entity = Markup.Elements.full) val html = Presentation.make_html(context, elements, Pretty.separate(st1.output)) channel.write(LSP.Dynamic_Output(HTML.source(html).toString)) } st1 } } def apply(server: Language_Server): Dynamic_Output = new Dynamic_Output(server) } class Dynamic_Output private(server: Language_Server) { private val state = Synchronized(Dynamic_Output.State()) private def handle_update(restriction: Option[Set[Command]]): Unit = state.change(_.handle_update(server.resources, server.channel, restriction)) /* main */ private val main = Session.Consumer[Any](getClass.getName) { case changed: Session.Commands_Changed => handle_update(if (changed.assignment) None else Some(changed.commands)) case Session.Caret_Focus => handle_update(None) } def init(): Unit = { server.session.commands_changed += main server.session.caret_focus += main handle_update(None) } def exit(): Unit = { server.session.commands_changed -= main server.session.caret_focus -= main } } diff --git a/src/Tools/VSCode/src/language_server.scala b/src/Tools/VSCode/src/language_server.scala --- a/src/Tools/VSCode/src/language_server.scala +++ b/src/Tools/VSCode/src/language_server.scala @@ -1,545 +1,545 @@ /* Title: Tools/VSCode/src/language_server.scala Author: Makarius Server for VS Code Language Server Protocol 2.0/3.0, see also https://github.com/Microsoft/language-server-protocol https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md PIDE protocol extensions depend on system option "vscode_pide_extensions". */ package isabelle.vscode import isabelle._ import java.io.{PrintStream, OutputStream, File => JFile} import scala.annotation.tailrec import scala.collection.mutable object Language_Server { type Editor = isabelle.Editor[Unit] /* Isabelle tool wrapper */ private lazy val default_logic = Isabelle_System.getenv("ISABELLE_LOGIC") val isabelle_tool = Isabelle_Tool("vscode_server", "VSCode Language Server for PIDE", Scala_Project.here, { args => try { var logic_ancestor: Option[String] = None var log_file: Option[Path] = None var logic_requirements = false var dirs: List[Path] = Nil var include_sessions: List[String] = Nil var logic = default_logic var modes: List[String] = Nil var no_build = false var options = Options.init() var verbose = false val getopts = Getopts(""" Usage: isabelle vscode_server [OPTIONS] Options are: -A NAME ancestor session for option -R (default: parent) -L FILE logging on FILE -R NAME build image with requirements from other sessions -d DIR include session directory -i NAME include session in name-space of theories -l NAME logic session name (default ISABELLE_LOGIC=""" + quote(default_logic) + """) -m MODE add print mode for output -n no build of session image on startup -o OPTION override Isabelle system OPTION (via NAME=VAL or NAME) -v verbose logging Run the VSCode Language Server protocol (JSON RPC) over stdin/stdout. """, "A:" -> (arg => logic_ancestor = Some(arg)), "L:" -> (arg => log_file = Some(Path.explode(File.standard_path(arg)))), "R:" -> (arg => { logic = arg; logic_requirements = true }), "d:" -> (arg => dirs = dirs ::: List(Path.explode(File.standard_path(arg)))), "i:" -> (arg => include_sessions = include_sessions ::: List(arg)), "l:" -> (arg => logic = arg), "m:" -> (arg => modes = arg :: modes), "n" -> (_ => no_build = true), "o:" -> (arg => options = options + arg), "v" -> (_ => verbose = true)) val more_args = getopts(args) if (more_args.nonEmpty) getopts.usage() val log = Logger.make(log_file) val channel = new Channel(System.in, System.out, log, verbose) val server = new Language_Server(channel, options, session_name = logic, session_dirs = dirs, include_sessions = include_sessions, session_ancestor = logic_ancestor, session_requirements = logic_requirements, session_no_build = no_build, modes = modes, log = log) // prevent spurious garbage on the main protocol channel val orig_out = System.out try { - System.setOut(new PrintStream(new OutputStream { def write(n: Int): Unit = {} })) + System.setOut(new PrintStream(OutputStream.nullOutputStream())) server.start() } finally { System.setOut(orig_out) } } catch { case exn: Throwable => val channel = new Channel(System.in, System.out, No_Logger) channel.error_message(Exn.message(exn)) throw(exn) } }) } class Language_Server( val channel: Channel, options: Options, session_name: String = Language_Server.default_logic, include_sessions: List[String] = Nil, session_dirs: List[Path] = Nil, session_ancestor: Option[String] = None, session_requirements: Boolean = false, session_no_build: Boolean = false, modes: List[String] = Nil, log: Logger = No_Logger ) { server => /* prover session */ private val session_ = Synchronized(None: Option[Session]) def session: Session = session_.value getOrElse error("Server inactive") def resources: VSCode_Resources = session.resources.asInstanceOf[VSCode_Resources] def rendering_offset(node_pos: Line.Node_Position): Option[(VSCode_Rendering, Text.Offset)] = for { model <- resources.get_model(new JFile(node_pos.name)) rendering = model.rendering() offset <- model.content.doc.offset(node_pos.pos) } yield (rendering, offset) private val dynamic_output = Dynamic_Output(server) /* input from client or file-system */ private val file_watcher: File_Watcher = File_Watcher(sync_documents, options.seconds("vscode_load_delay")) private val delay_input: Delay = Delay.last(options.seconds("vscode_input_delay"), channel.Error_Logger) { resources.flush_input(session, channel) } private val delay_load: Delay = Delay.last(options.seconds("vscode_load_delay"), channel.Error_Logger) { val (invoke_input, invoke_load) = resources.resolve_dependencies(session, editor, file_watcher) if (invoke_input) delay_input.invoke() if (invoke_load) delay_load.invoke() } private def close_document(file: JFile): Unit = { if (resources.close_model(file)) { file_watcher.register_parent(file) sync_documents(Set(file)) delay_input.invoke() delay_output.invoke() } } private def sync_documents(changed: Set[JFile]): Unit = { resources.sync_models(changed) delay_input.invoke() delay_output.invoke() } private def change_document( file: JFile, version: Long, changes: List[LSP.TextDocumentChange] ): Unit = { val norm_changes = new mutable.ListBuffer[LSP.TextDocumentChange] @tailrec def norm(chs: List[LSP.TextDocumentChange]): Unit = { if (chs.nonEmpty) { val (full_texts, rest1) = chs.span(_.range.isEmpty) val (edits, rest2) = rest1.span(_.range.nonEmpty) norm_changes ++= full_texts norm_changes ++= edits.sortBy(_.range.get.start)(Line.Position.Ordering).reverse norm(rest2) } } norm(changes) norm_changes.foreach(change => resources.change_model(session, editor, file, version, change.text, change.range)) delay_input.invoke() delay_output.invoke() } /* caret handling */ private val delay_caret_update: Delay = Delay.last(options.seconds("vscode_input_delay"), channel.Error_Logger) { session.caret_focus.post(Session.Caret_Focus) } private def update_caret(caret: Option[(JFile, Line.Position)]): Unit = { resources.update_caret(caret) delay_caret_update.invoke() delay_input.invoke() } /* preview */ private lazy val preview_panel = new Preview_Panel(resources) private lazy val delay_preview: Delay = Delay.last(options.seconds("vscode_output_delay"), channel.Error_Logger) { if (preview_panel.flush(channel)) delay_preview.invoke() } private def request_preview(file: JFile, column: Int): Unit = { preview_panel.request(file, column) delay_preview.invoke() } /* output to client */ private val delay_output: Delay = Delay.last(options.seconds("vscode_output_delay"), channel.Error_Logger) { if (resources.flush_output(channel)) delay_output.invoke() } def update_output(changed_nodes: Iterable[JFile]): Unit = { resources.update_output(changed_nodes) delay_output.invoke() } def update_output_visible(): Unit = { resources.update_output_visible() delay_output.invoke() } private val prover_output = Session.Consumer[Session.Commands_Changed](getClass.getName) { case changed => update_output(changed.nodes.toList.map(resources.node_file(_))) } private val syslog_messages = Session.Consumer[Prover.Output](getClass.getName) { case output => channel.log_writeln(resources.output_xml(output.message)) } /* init and exit */ def init(id: LSP.Id): Unit = { def reply_ok(msg: String): Unit = { channel.write(LSP.Initialize.reply(id, "")) channel.writeln(msg) } def reply_error(msg: String): Unit = { channel.write(LSP.Initialize.reply(id, msg)) channel.error_message(msg) } val try_session = try { val base_info = Sessions.base_info( options, session_name, dirs = session_dirs, include_sessions = include_sessions, session_ancestor = session_ancestor, session_requirements = session_requirements).check def build(no_build: Boolean = false): Build.Results = Build.build(options, selection = Sessions.Selection.session(base_info.session), build_heap = true, no_build = no_build, dirs = session_dirs, infos = base_info.infos) if (!session_no_build && !build(no_build = true).ok) { val start_msg = "Build started for Isabelle/" + base_info.session + " ..." val fail_msg = "Session build failed -- prover process remains inactive!" val progress = channel.progress(verbose = true) progress.echo(start_msg); channel.writeln(start_msg) if (!build().ok) { progress.echo(fail_msg); error(fail_msg) } } val resources = new VSCode_Resources(options, base_info, log) { override def commit(change: Session.Change): Unit = if (change.deps_changed || undefined_blobs(change.version.nodes).nonEmpty) delay_load.invoke() } val session_options = options.bool("editor_output_state") = true val session = new Session(session_options, resources) Some((base_info, session)) } catch { case ERROR(msg) => reply_error(msg); None } for ((base_info, session) <- try_session) { session_.change(_ => Some(session)) session.commands_changed += prover_output session.syslog_messages += syslog_messages dynamic_output.init() try { Isabelle_Process.start(session, options, base_info.sessions_structure, Sessions.store(options), modes = modes, logic = base_info.session).await_startup() reply_ok("Welcome to Isabelle/" + base_info.session + Isabelle_System.isabelle_heading()) } catch { case ERROR(msg) => reply_error(msg) } } } def shutdown(id: LSP.Id): Unit = { def reply(err: String): Unit = channel.write(LSP.Shutdown.reply(id, err)) session_.change({ case Some(session) => session.commands_changed -= prover_output session.syslog_messages -= syslog_messages dynamic_output.exit() delay_load.revoke() file_watcher.shutdown() delay_input.revoke() delay_output.revoke() delay_caret_update.revoke() delay_preview.revoke() val result = session.stop() if (result.ok) reply("") else reply("Prover shutdown failed: " + result.rc) None case None => reply("Prover inactive") None }) } def exit(): Unit = { log("\n") sys.exit(if (session_.value.isEmpty) Process_Result.RC.ok else Process_Result.RC.failure) } /* completion */ def completion(id: LSP.Id, node_pos: Line.Node_Position): Unit = { val result = (for ((rendering, offset) <- rendering_offset(node_pos)) yield rendering.completion(node_pos, offset)) getOrElse Nil channel.write(LSP.Completion.reply(id, result)) } /* spell-checker dictionary */ def update_dictionary(include: Boolean, permanent: Boolean): Unit = { for { spell_checker <- resources.spell_checker.get caret <- resources.get_caret() rendering = caret.model.rendering() range = rendering.before_caret_range(caret.offset) Text.Info(_, word) <- Spell_Checker.current_word(rendering, range) } { spell_checker.update(word, include, permanent) update_output_visible() } } def reset_dictionary(): Unit = { for (spell_checker <- resources.spell_checker.get) { spell_checker.reset() update_output_visible() } } /* hover */ def hover(id: LSP.Id, node_pos: Line.Node_Position): Unit = { val result = for { (rendering, offset) <- rendering_offset(node_pos) info <- rendering.tooltips(VSCode_Rendering.tooltip_elements, Text.Range(offset, offset + 1)) } yield { val range = rendering.model.content.doc.range(info.range) val contents = info.info.map(t => LSP.MarkedString(resources.output_pretty_tooltip(List(t)))) (range, contents) } channel.write(LSP.Hover.reply(id, result)) } /* goto definition */ def goto_definition(id: LSP.Id, node_pos: Line.Node_Position): Unit = { val result = (for ((rendering, offset) <- rendering_offset(node_pos)) yield rendering.hyperlinks(Text.Range(offset, offset + 1))) getOrElse Nil channel.write(LSP.GotoDefinition.reply(id, result)) } /* document highlights */ def document_highlights(id: LSP.Id, node_pos: Line.Node_Position): Unit = { val result = (for ((rendering, offset) <- rendering_offset(node_pos)) yield { val model = rendering.model rendering.caret_focus_ranges(Text.Range(offset, offset + 1), model.content.text_range) .map(r => LSP.DocumentHighlight.text(model.content.doc.range(r))) }) getOrElse Nil channel.write(LSP.DocumentHighlights.reply(id, result)) } /* main loop */ def start(): Unit = { log("Server started " + Date.now()) def handle(json: JSON.T): Unit = { try { json match { case LSP.Initialize(id) => init(id) case LSP.Initialized(()) => case LSP.Shutdown(id) => shutdown(id) case LSP.Exit(()) => exit() case LSP.DidOpenTextDocument(file, _, version, text) => change_document(file, version, List(LSP.TextDocumentChange(None, text))) delay_load.invoke() case LSP.DidChangeTextDocument(file, version, changes) => change_document(file, version, changes) case LSP.DidCloseTextDocument(file) => close_document(file) case LSP.Completion(id, node_pos) => completion(id, node_pos) case LSP.Include_Word(()) => update_dictionary(true, false) case LSP.Include_Word_Permanently(()) => update_dictionary(true, true) case LSP.Exclude_Word(()) => update_dictionary(false, false) case LSP.Exclude_Word_Permanently(()) => update_dictionary(false, true) case LSP.Reset_Words(()) => reset_dictionary() case LSP.Hover(id, node_pos) => hover(id, node_pos) case LSP.GotoDefinition(id, node_pos) => goto_definition(id, node_pos) case LSP.DocumentHighlights(id, node_pos) => document_highlights(id, node_pos) case LSP.Caret_Update(caret) => update_caret(caret) case LSP.State_Init(()) => State_Panel.init(server) case LSP.State_Exit(id) => State_Panel.exit(id) case LSP.State_Locate(id) => State_Panel.locate(id) case LSP.State_Update(id) => State_Panel.update(id) case LSP.State_Auto_Update(id, enabled) => State_Panel.auto_update(id, enabled) case LSP.Preview_Request(file, column) => request_preview(file, column) case _ => if (!LSP.ResponseMessage.is_empty(json)) log("### IGNORED") } } catch { case exn: Throwable => channel.log_error_message(Exn.message(exn)) } } @tailrec def loop(): Unit = { channel.read() match { case Some(json) => json match { case bulk: List[_] => bulk.foreach(handle) case _ => handle(json) } loop() case None => log("### TERMINATE") } } loop() } /* abstract editor operations */ object editor extends Language_Server.Editor { /* session */ override def session: Session = server.session override def flush(): Unit = resources.flush_input(session, channel) override def invoke(): Unit = delay_input.invoke() /* current situation */ override def current_node(context: Unit): Option[Document.Node.Name] = resources.get_caret().map(_.model.node_name) override def current_node_snapshot(context: Unit): Option[Document.Snapshot] = resources.get_caret().map(_.model.snapshot()) override def node_snapshot(name: Document.Node.Name): Document.Snapshot = { resources.get_model(name) match { case Some(model) => model.snapshot() case None => session.snapshot(name) } } def current_command(snapshot: Document.Snapshot): Option[Command] = { resources.get_caret() match { case Some(caret) => snapshot.current_command(caret.node_name, caret.offset) case None => None } } override def current_command(context: Unit, snapshot: Document.Snapshot): Option[Command] = current_command(snapshot) /* overlays */ override def node_overlays(name: Document.Node.Name): Document.Node.Overlays = resources.node_overlays(name) override def insert_overlay(command: Command, fn: String, args: List[String]): Unit = resources.insert_overlay(command, fn, args) override def remove_overlay(command: Command, fn: String, args: List[String]): Unit = resources.remove_overlay(command, fn, args) /* hyperlinks */ override def hyperlink_command( focus: Boolean, snapshot: Document.Snapshot, id: Document_ID.Generic, offset: Symbol.Offset = 0 ): Option[Hyperlink] = { if (snapshot.is_outdated) None else snapshot.find_command_position(id, offset).map(node_pos => new Hyperlink { def follow(unit: Unit): Unit = channel.write(LSP.Caret_Update(node_pos, focus)) }) } /* dispatcher thread */ override def assert_dispatcher[A](body: => A): A = session.assert_dispatcher(body) override def require_dispatcher[A](body: => A): A = session.require_dispatcher(body) override def send_dispatcher(body: => Unit): Unit = session.send_dispatcher(body) override def send_wait_dispatcher(body: => Unit): Unit = session.send_wait_dispatcher(body) } } diff --git a/src/Tools/VSCode/src/state_panel.scala b/src/Tools/VSCode/src/state_panel.scala --- a/src/Tools/VSCode/src/state_panel.scala +++ b/src/Tools/VSCode/src/state_panel.scala @@ -1,131 +1,131 @@ /* Title: Tools/VSCode/src/state_panel.scala Author: Makarius Show proof state. */ package isabelle.vscode import isabelle._ object State_Panel { private val make_id = Counter.make() private val instances = Synchronized(Map.empty[Counter.ID, State_Panel]) def init(server: Language_Server): Unit = { val instance = new State_Panel(server) instances.change(_ + (instance.id -> instance)) instance.init() } def exit(id: Counter.ID): Unit = { instances.change(map => map.get(id) match { case None => map case Some(instance) => instance.exit(); map - id }) } def locate(id: Counter.ID): Unit = instances.value.get(id).foreach(state => state.server.editor.send_dispatcher(state.locate())) def update(id: Counter.ID): Unit = instances.value.get(id).foreach(state => state.server.editor.send_dispatcher(state.update())) def auto_update(id: Counter.ID, enabled: Boolean): Unit = instances.value.get(id).foreach(state => state.server.editor.send_dispatcher(state.auto_update(Some(enabled)))) } class State_Panel private(val server: Language_Server) { /* output */ val id: Counter.ID = State_Panel.make_id() private def output(content: String): Unit = server.channel.write(LSP.State_Output(id, content, auto_update_enabled.value)) /* query operation */ private val output_active = Synchronized(true) private val print_state = new Query_Operation(server.editor, (), "print_state", _ => (), (_, _, body) => if (output_active.value && body.nonEmpty){ val context = new Presentation.Entity_Context { override def make_ref(props: Properties.T, body: XML.Body): Option[XML.Elem] = for { thy_file <- Position.Def_File.unapply(props) def_line <- Position.Def_Line.unapply(props) source <- server.resources.source_file(thy_file) - uri = Path.explode(source).absolute_file.toURI + uri = File.uri(Path.explode(source).absolute_file) } yield HTML.link(uri.toString + "#" + def_line, body) } val elements = Presentation.elements2.copy(entity = Markup.Elements.full) val html = Presentation.make_html(context, elements, Pretty.separate(body)) output(HTML.source(html).toString) }) def locate(): Unit = print_state.locate_query() def update(): Unit = { server.editor.current_node_snapshot(()) match { case Some(snapshot) => (server.editor.current_command((), snapshot), print_state.get_location) match { case (Some(command1), Some(command2)) if command1.id == command2.id => case _ => print_state.apply_query(Nil) } case None => } } /* auto update */ private val auto_update_enabled = Synchronized(true) def auto_update(set: Option[Boolean] = None): Unit = { val enabled = auto_update_enabled.guarded_access(a => set match { case None => Some((a, a)) case Some(b) => Some((b, b)) }) if (enabled) update() } /* main */ private val main = Session.Consumer[Any](getClass.getName) { case changed: Session.Commands_Changed => if (changed.assignment) auto_update() case Session.Caret_Focus => auto_update() } def init(): Unit = { server.session.commands_changed += main server.session.caret_focus += main server.editor.send_wait_dispatcher { print_state.activate() } server.editor.send_dispatcher { auto_update() } } def exit(): Unit = { output_active.change(_ => false) server.session.commands_changed -= main server.session.caret_focus -= main server.editor.send_wait_dispatcher { print_state.deactivate() } } } diff --git a/src/Tools/jEdit/jedit_main/scala_console.scala b/src/Tools/jEdit/jedit_main/scala_console.scala --- a/src/Tools/jEdit/jedit_main/scala_console.scala +++ b/src/Tools/jEdit/jedit_main/scala_console.scala @@ -1,144 +1,144 @@ /* Title: Tools/jEdit/jedit_main/scala_console.scala Author: Makarius Scala instance of Console plugin. */ package isabelle.jedit_main import isabelle._ import isabelle.jedit._ import console.{Console, ConsolePane, Shell, Output} import org.gjt.sp.jedit.JARClassLoader import java.io.OutputStream object Scala_Console { class Interpreter(context: Scala.Compiler.Context, val console: Console) extends Scala.Interpreter(context) def console_interpreter(console: Console): Option[Interpreter] = Scala.Interpreter.get { case int: Interpreter if int.console == console => int } def running_interpreter(): Interpreter = { val self = Thread.currentThread() Scala.Interpreter.get { case int: Interpreter if int.running_thread(self) => int } .getOrElse(error("Bad Scala interpreter thread")) } def running_console(): Console = running_interpreter().console val init = """ import isabelle._ import isabelle.jedit._ val console = isabelle.jedit_main.Scala_Console.running_console() val view = console.getView() """ } class Scala_Console extends Shell("Scala") { /* global state -- owned by GUI thread */ @volatile private var global_console: Console = null @volatile private var global_out: Output = null @volatile private var global_err: Output = null private val console_stream = new OutputStream { val buf = new StringBuilder(100) override def flush(): Unit = { val s = buf.synchronized { val s = buf.toString; buf.clear(); s } val str = UTF8.decode_permissive(s) GUI_Thread.later { if (global_out == null) java.lang.System.out.print(str) else global_out.writeAttrs(null, str) } Time.seconds(0.01).sleep() // FIXME adhoc delay to avoid loosing output } override def close(): Unit = flush() def write(byte: Int): Unit = { val c = byte.toChar buf.synchronized { buf.append(c) } if (c == '\n') flush() } } private def with_console[A](console: Console, out: Output, err: Output)(e: => A): A = { global_console = console global_out = out global_err = if (err == null) out else err try { scala.Console.withErr(console_stream) { scala.Console.withOut(console_stream) { e } } } finally { console_stream.flush() global_console = null global_out = null global_err = null } } /* jEdit console methods */ override def openConsole(console: Console): Unit = { val context = Scala.Compiler.context( - jar_dirs = JEdit_Lib.directories, + jar_files = JEdit_Lib.directories, class_loader = Some(new JARClassLoader)) val interpreter = new Scala_Console.Interpreter(context, console) interpreter.execute((context, state) => context.compile(Scala_Console.init, state = state).state) } override def closeConsole(console: Console): Unit = Scala_Console.console_interpreter(console).foreach(_.shutdown()) override def printInfoMessage(out: Output): Unit = { out.print(null, "This shell evaluates Isabelle/Scala expressions.\n\n" + "The contents of package isabelle and isabelle.jedit are imported.\n" + "The following special toplevel bindings are provided:\n" + " view -- current jEdit/Swing view (e.g. view.getBuffer, view.getTextArea)\n" + " console -- jEdit Console plugin\n" + " PIDE -- Isabelle/PIDE plugin (e.g. PIDE.session, PIDE.snapshot, PIDE.rendering)\n") } override def printPrompt(console: Console, out: Output): Unit = { out.writeAttrs(ConsolePane.colorAttributes(console.getInfoColor), "scala>") out.writeAttrs(ConsolePane.colorAttributes(console.getPlainColor), " ") } override def execute( console: Console, input: String, out: Output, err: Output, command: String ): Unit = { Scala_Console.console_interpreter(console).foreach(interpreter => interpreter.execute { (context, state) => val result = with_console(console, out, err) { context.compile(command, state) } GUI_Thread.later { val diag = if (err == null) out else err for (message <- result.messages) { val color = if (message.is_error) console.getErrorColor else null diag.print(color, message.text + "\n") } Option(err).foreach(_.commandDone()) out.commandDone() } result.state }) } override def stop(console: Console): Unit = Scala_Console.console_interpreter(console).foreach(_.shutdown()) } diff --git a/src/Tools/jEdit/src/jedit_lib.scala b/src/Tools/jEdit/src/jedit_lib.scala --- a/src/Tools/jEdit/src/jedit_lib.scala +++ b/src/Tools/jEdit/src/jedit_lib.scala @@ -1,358 +1,358 @@ /* Title: Tools/jEdit/src/jedit_lib.scala Author: Makarius Misc library functions for jEdit. */ package isabelle.jedit import isabelle._ import java.io.{File => JFile} import java.awt.{Component, Container, GraphicsEnvironment, Point, Rectangle, Dimension, Toolkit} import java.awt.event.{InputEvent, KeyEvent, KeyListener} import javax.swing.{Icon, ImageIcon, JWindow, SwingUtilities} import scala.util.parsing.input.CharSequenceReader import scala.jdk.CollectionConverters._ import org.gjt.sp.jedit.{jEdit, Buffer, View, GUIUtilities, Debug, EditPane} import org.gjt.sp.jedit.io.{FileVFS, VFSManager} import org.gjt.sp.jedit.gui.{KeyEventWorkaround, KeyEventTranslator} import org.gjt.sp.jedit.buffer.{JEditBuffer, LineManager} import org.gjt.sp.jedit.textarea.{JEditTextArea, TextArea, TextAreaPainter} object JEdit_Lib { /* jEdit directories */ def directories: List[JFile] = (Option(jEdit.getSettingsDirectory).toList ::: List(jEdit.getJEditHome)).map(new JFile(_)) /* window geometry measurement */ private lazy val dummy_window = new JWindow final case class Window_Geometry(width: Int, height: Int, inner_width: Int, inner_height: Int) { def deco_width: Int = width - inner_width def deco_height: Int = height - inner_height } def window_geometry(outer: Container, inner: Component): Window_Geometry = { GUI_Thread.require {} val old_content = dummy_window.getContentPane dummy_window.setContentPane(outer) dummy_window.pack dummy_window.revalidate() val geometry = Window_Geometry( dummy_window.getWidth, dummy_window.getHeight, inner.getWidth, inner.getHeight) dummy_window.setContentPane(old_content) geometry } /* files */ def is_file(name: String): Boolean = VFSManager.getVFSForPath(name).isInstanceOf[FileVFS] def check_file(name: String): Option[JFile] = if (is_file(name)) Some(new JFile(name)) else None /* buffers */ def buffer_text(buffer: JEditBuffer): String = buffer_lock(buffer) { buffer.getText(0, buffer.getLength) } def buffer_reader(buffer: JEditBuffer): CharSequenceReader = Scan.char_reader(buffer.getSegment(0, buffer.getLength)) def buffer_mode(buffer: JEditBuffer): String = { val mode = buffer.getMode if (mode == null) "" else { val name = mode.getName if (name == null) "" else name } } def buffer_line_manager(buffer: JEditBuffer): LineManager = Untyped.get[LineManager](buffer, "lineMgr") def buffer_name(buffer: Buffer): String = buffer.getSymlinkPath def buffer_file(buffer: Buffer): Option[JFile] = check_file(buffer_name(buffer)) def buffer_undo_in_progress[A](buffer: JEditBuffer, body: => A): A = { val undo_in_progress = buffer.isUndoInProgress def set(b: Boolean): Unit = Untyped.set[Boolean](buffer, "undoInProgress", b) try { set(true); body } finally { set(undo_in_progress) } } /* main jEdit components */ def jedit_buffers(): Iterator[Buffer] = jEdit.getBufferManager().getBuffers().asScala.iterator def jedit_buffer(name: String): Option[Buffer] = jedit_buffers().find(buffer => buffer_name(buffer) == name) def jedit_buffer(name: Document.Node.Name): Option[Buffer] = jedit_buffer(name.node) def jedit_views(): Iterator[View] = jEdit.getViewManager().getViews().asScala.iterator def jedit_view(view: View = null): View = if (view == null) jEdit.getActiveView() else view def jedit_edit_panes(view: View): Iterator[EditPane] = if (view == null) Iterator.empty else view.getEditPanes().iterator.filter(_ != null) def jedit_text_areas(view: View): Iterator[JEditTextArea] = if (view == null) Iterator.empty else view.getEditPanes().iterator.filter(_ != null).map(_.getTextArea).filter(_ != null) def jedit_text_areas(): Iterator[JEditTextArea] = jedit_views().flatMap(jedit_text_areas) def jedit_text_areas(buffer: JEditBuffer): Iterator[JEditTextArea] = jedit_text_areas().filter(_.getBuffer == buffer) def buffer_lock[A](buffer: JEditBuffer)(body: => A): A = { try { buffer.readLock(); body } finally { buffer.readUnlock() } } def buffer_edit[A](buffer: JEditBuffer)(body: => A): A = { try { buffer.beginCompoundEdit(); body } finally { buffer.endCompoundEdit() } } /* get text */ def get_text(buffer: JEditBuffer, range: Text.Range): Option[String] = try { Some(buffer.getText(range.start, range.length)) } catch { case _: ArrayIndexOutOfBoundsException => None } /* point range */ def point_range(buffer: JEditBuffer, offset: Text.Offset): Text.Range = if (offset < 0) Text.Range.offside else buffer_lock(buffer) { def text(i: Text.Offset): Char = buffer.getText(i, 1).charAt(0) try { val c = text(offset) if (Character.isHighSurrogate(c) && Character.isLowSurrogate(text(offset + 1))) Text.Range(offset, offset + 2) else if (Character.isLowSurrogate(c) && Character.isHighSurrogate(text(offset - 1))) Text.Range(offset - 1, offset + 1) else Text.Range(offset, offset + 1) } catch { case _: ArrayIndexOutOfBoundsException => Text.Range(offset, offset + 1) } } /* text ranges */ def buffer_range(buffer: JEditBuffer): Text.Range = Text.Range(0, buffer.getLength) def line_range(buffer: JEditBuffer, line: Int): Text.Range = Text.Range(buffer.getLineStartOffset(line), buffer.getLineEndOffset(line) min buffer.getLength) def caret_range(text_area: TextArea): Text.Range = point_range(text_area.getBuffer, text_area.getCaretPosition) def visible_range(text_area: TextArea): Option[Text.Range] = { val buffer = text_area.getBuffer val n = text_area.getVisibleLines if (n > 0) { val start = text_area.getScreenLineStartOffset(0) val raw_end = text_area.getScreenLineEndOffset(n - 1) val end = if (raw_end >= 0) raw_end min buffer.getLength else buffer.getLength Some(Text.Range(start, end)) } else None } def invalidate_range(text_area: TextArea, range: Text.Range): Unit = { val buffer = text_area.getBuffer buffer_range(buffer).try_restrict(range) match { case Some(range1) if !range1.is_singularity => try { text_area.invalidateLineRange( buffer.getLineOfOffset(range1.start), buffer.getLineOfOffset(range1.stop)) } catch { case _: ArrayIndexOutOfBoundsException => } case _ => } } def invalidate(text_area: TextArea): Unit = { val visible_lines = text_area.getVisibleLines if (visible_lines > 0) text_area.invalidateScreenLineRange(0, visible_lines) } /* graphics range */ case class Gfx_Range(x: Int, y: Int, length: Int) // NB: jEdit always normalizes \r\n and \r to \n // NB: last line lacks \n def gfx_range(text_area: TextArea, range: Text.Range): Option[Gfx_Range] = { val metric = pretty_metric(text_area.getPainter) val char_width = (metric.unit * metric.average).round.toInt val buffer = text_area.getBuffer val end = buffer.getLength val stop = range.stop val (p, q, r) = try { val p = text_area.offsetToXY(range.start) val (q, r) = if (get_text(buffer, Text.Range(stop - 1, stop)) == Some("\n")) (text_area.offsetToXY(stop - 1), char_width) else if (stop >= end) (text_area.offsetToXY(end), char_width * (stop - end)) else (text_area.offsetToXY(stop), 0) (p, q, r) } catch { case _: ArrayIndexOutOfBoundsException => (null, null, 0) } if (p != null && q != null && p.x < q.x + r && p.y == q.y) Some(Gfx_Range(p.x, p.y, q.x + r - p.x)) else None } /* pixel range */ def pixel_range(text_area: TextArea, x: Int, y: Int): Option[Text.Range] = { // coordinates wrt. inner painter component val painter = text_area.getPainter if (0 <= x && x < painter.getWidth && 0 <= y && y < painter.getHeight) { val offset = text_area.xyToOffset(x, y, false) if (offset >= 0) { val range = point_range(text_area.getBuffer, offset) gfx_range(text_area, range) match { case Some(g) if g.x <= x && x < g.x + g.length => Some(range) case _ => None } } else None } else None } /* pretty text metric */ abstract class Pretty_Metric extends Pretty.Metric { def average: Double } def pretty_metric(painter: TextAreaPainter): Pretty_Metric = new Pretty_Metric { def string_width(s: String): Double = painter.getFont.getStringBounds(s, painter.getFontRenderContext).getWidth val unit: Double = string_width(Symbol.space) max 1.0 val average: Double = string_width("mix") / (3 * unit) def apply(s: String): Double = if (s == "\n") 1.0 else string_width(s) / unit } /* icons */ def load_icon(name: String): Icon = { val name1 = if (name.startsWith("idea-icons/")) { - val file = Path.explode("$ISABELLE_IDEA_ICONS").file.toURI.toASCIIString + val file = File.uri(Path.explode("$ISABELLE_IDEA_ICONS")).toASCIIString "jar:" + file + "!/" + name } else name val icon = GUIUtilities.loadIcon(name1) if (icon.getIconWidth < 0 || icon.getIconHeight < 0) error("Bad icon: " + name) else icon } def load_image_icon(name: String): ImageIcon = load_icon(name) match { case icon: ImageIcon => icon case _ => error("Bad image icon: " + name) } /* key event handling */ def request_focus_view(alt_view: View = null): Unit = { val view = if (alt_view != null) alt_view else jEdit.getActiveView() if (view != null) { val text_area = view.getTextArea if (text_area != null) text_area.requestFocus() } } def propagate_key(view: View, evt: KeyEvent): Unit = { if (view != null && !evt.isConsumed) view.getInputHandler().processKeyEvent(evt, View.ACTION_BAR, false) } def key_listener( key_typed: KeyEvent => Unit = _ => (), key_pressed: KeyEvent => Unit = _ => (), key_released: KeyEvent => Unit = _ => () ): KeyListener = { def process_key_event(evt0: KeyEvent, handle: KeyEvent => Unit): Unit = { val evt = KeyEventWorkaround.processKeyEvent(evt0) if (evt != null) handle(evt) } new KeyListener { def keyTyped(evt: KeyEvent): Unit = process_key_event(evt, key_typed) def keyPressed(evt: KeyEvent): Unit = process_key_event(evt, key_pressed) def keyReleased(evt: KeyEvent): Unit = process_key_event(evt, key_released) } } def special_key(evt: KeyEvent): Boolean = { // cf. 5.2.0/jEdit/org/gjt/sp/jedit/gui/KeyEventWorkaround.java val mod = evt.getModifiersEx (mod & InputEvent.CTRL_DOWN_MASK) != 0 && (mod & InputEvent.ALT_DOWN_MASK) == 0 || (mod & InputEvent.CTRL_DOWN_MASK) == 0 && (mod & InputEvent.ALT_DOWN_MASK) != 0 && !Debug.ALT_KEY_PRESSED_DISABLED || (mod & InputEvent.META_DOWN_MASK) != 0 } def command_modifier(evt: InputEvent): Boolean = (evt.getModifiersEx & Toolkit.getDefaultToolkit.getMenuShortcutKeyMaskEx) != 0 def shift_modifier(evt: InputEvent): Boolean = (evt.getModifiersEx & InputEvent.SHIFT_DOWN_MASK) != 0 def modifier_string(evt: InputEvent): String = KeyEventTranslator.getModifierString(evt) match { case null => "" case s => s } }